mirror of
https://github.com/prowler-cloud/prowler.git
synced 2026-04-13 05:17:59 +00:00
Compare commits
21 Commits
feat/prowl
...
improve-co
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6ca837dfc0 | ||
|
|
b3c94cc54a | ||
|
|
ad36938717 | ||
|
|
10dd9460e9 | ||
|
|
c8d41745dd | ||
|
|
c6c000a369 | ||
|
|
a2b083e8c8 | ||
|
|
d2f7169537 | ||
|
|
632f2633c1 | ||
|
|
82d487a1e7 | ||
|
|
9a6a43637d | ||
|
|
c21cf0ac20 | ||
|
|
f3b142c0cf | ||
|
|
eda90c4673 | ||
|
|
def59a8cc2 | ||
|
|
1bfed74db5 | ||
|
|
baf1194824 | ||
|
|
b9270df3e6 | ||
|
|
379df7800d | ||
|
|
fcabe1f99e | ||
|
|
ad7a56d010 |
2
.github/workflows/api-code-quality.yml
vendored
2
.github/workflows/api-code-quality.yml
vendored
@@ -50,7 +50,7 @@ jobs:
|
||||
|
||||
- name: Check for API changes
|
||||
id: check-changes
|
||||
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files: |
|
||||
api/**
|
||||
|
||||
@@ -137,18 +137,18 @@ jobs:
|
||||
sed -i "s|prowler-cloud/prowler.git@master|prowler-cloud/prowler.git@${LATEST_SHA}|" api/pyproject.toml
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
||||
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
|
||||
|
||||
- name: Build and push API container for ${{ matrix.arch }}
|
||||
id: container-push
|
||||
if: github.event_name == 'push' || github.event_name == 'release' || github.event_name == 'workflow_dispatch'
|
||||
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6.19.2
|
||||
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
push: true
|
||||
@@ -178,7 +178,7 @@ jobs:
|
||||
auth.docker.io:443
|
||||
production.cloudflare.docker.com:443
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
8
.github/workflows/api-container-checks.yml
vendored
8
.github/workflows/api-container-checks.yml
vendored
@@ -42,7 +42,7 @@ jobs:
|
||||
|
||||
- name: Check if Dockerfile changed
|
||||
id: dockerfile-changed
|
||||
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files: api/Dockerfile
|
||||
|
||||
@@ -104,7 +104,7 @@ jobs:
|
||||
|
||||
- name: Check for API changes
|
||||
id: check-changes
|
||||
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files: api/**
|
||||
files_ignore: |
|
||||
@@ -115,11 +115,11 @@ jobs:
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
||||
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
|
||||
|
||||
- name: Build container for ${{ matrix.arch }}
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6.19.2
|
||||
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
|
||||
with:
|
||||
context: ${{ env.API_WORKING_DIR }}
|
||||
push: false
|
||||
|
||||
2
.github/workflows/api-security.yml
vendored
2
.github/workflows/api-security.yml
vendored
@@ -53,7 +53,7 @@ jobs:
|
||||
|
||||
- name: Check for API changes
|
||||
id: check-changes
|
||||
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files: |
|
||||
api/**
|
||||
|
||||
2
.github/workflows/api-tests.yml
vendored
2
.github/workflows/api-tests.yml
vendored
@@ -99,7 +99,7 @@ jobs:
|
||||
|
||||
- name: Check for API changes
|
||||
id: check-changes
|
||||
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files: |
|
||||
api/**
|
||||
|
||||
2
.github/workflows/backport.yml
vendored
2
.github/workflows/backport.yml
vendored
@@ -46,7 +46,7 @@ jobs:
|
||||
|
||||
- name: Backport PR
|
||||
if: steps.label_check.outputs.label_check == 'success'
|
||||
uses: sorenlouv/backport-github-action@516854e7c9f962b9939085c9a92ea28411d1ae90 # v10.2.0
|
||||
uses: sorenlouv/backport-github-action@9460b7102fea25466026ce806c9ebf873ac48721 # v11.0.0
|
||||
with:
|
||||
github_token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
auto_backport_label_prefix: ${{ env.BACKPORT_LABEL_PREFIX }}
|
||||
|
||||
2
.github/workflows/ci-zizmor.yml
vendored
2
.github/workflows/ci-zizmor.yml
vendored
@@ -49,6 +49,6 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Run zizmor
|
||||
uses: zizmorcore/zizmor-action@0dce2577a4760a2749d8cfb7a84b7d5585ebcb7d # v0.5.0
|
||||
uses: zizmorcore/zizmor-action@71321a20a9ded102f6e9ce5718a2fcec2c4f70d8 # v0.5.2
|
||||
with:
|
||||
token: ${{ github.token }}
|
||||
|
||||
4
.github/workflows/helm-chart-checks.yml
vendored
4
.github/workflows/helm-chart-checks.yml
vendored
@@ -36,12 +36,12 @@ jobs:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Helm
|
||||
uses: azure/setup-helm@1a275c3b69536ee54be43f2070a358922e12c8d4 # v4.3.1
|
||||
uses: azure/setup-helm@dda3372f752e03dde6b3237bc9431cdc2f7a02a2 # v5.0.0
|
||||
|
||||
- name: Update chart dependencies
|
||||
run: helm dependency update ${{ env.CHART_PATH }}
|
||||
|
||||
4
.github/workflows/helm-chart-release.yml
vendored
4
.github/workflows/helm-chart-release.yml
vendored
@@ -29,12 +29,12 @@ jobs:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Helm
|
||||
uses: azure/setup-helm@b9e51907a09c216f16ebe8536097933489208112 # v4.3.0
|
||||
uses: azure/setup-helm@dda3372f752e03dde6b3237bc9431cdc2f7a02a2 # v5.0.0
|
||||
|
||||
- name: Set appVersion from release tag
|
||||
run: |
|
||||
|
||||
18
.github/workflows/issue-triage.lock.yml
generated
vendored
18
.github/workflows/issue-triage.lock.yml
generated
vendored
@@ -772,7 +772,7 @@ jobs:
|
||||
SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Upload Safe Outputs
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: safe-output
|
||||
path: ${{ env.GH_AW_SAFE_OUTPUTS }}
|
||||
@@ -793,13 +793,13 @@ jobs:
|
||||
await main();
|
||||
- name: Upload sanitized agent output
|
||||
if: always() && env.GH_AW_AGENT_OUTPUT
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: agent-output
|
||||
path: ${{ env.GH_AW_AGENT_OUTPUT }}
|
||||
if-no-files-found: warn
|
||||
- name: Upload engine output files
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: agent_outputs
|
||||
path: |
|
||||
@@ -839,7 +839,7 @@ jobs:
|
||||
- name: Upload agent artifacts
|
||||
if: always()
|
||||
continue-on-error: true
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: agent-artifacts
|
||||
path: |
|
||||
@@ -880,7 +880,7 @@ jobs:
|
||||
destination: /opt/gh-aw/actions
|
||||
- name: Download agent output artifact
|
||||
continue-on-error: true
|
||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
||||
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
|
||||
with:
|
||||
name: agent-output
|
||||
path: /tmp/gh-aw/safeoutputs/
|
||||
@@ -992,13 +992,13 @@ jobs:
|
||||
destination: /opt/gh-aw/actions
|
||||
- name: Download agent artifacts
|
||||
continue-on-error: true
|
||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
||||
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
|
||||
with:
|
||||
name: agent-artifacts
|
||||
path: /tmp/gh-aw/threat-detection/
|
||||
- name: Download agent output artifact
|
||||
continue-on-error: true
|
||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
||||
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
|
||||
with:
|
||||
name: agent-output
|
||||
path: /tmp/gh-aw/threat-detection/
|
||||
@@ -1071,7 +1071,7 @@ jobs:
|
||||
await main();
|
||||
- name: Upload threat detection log
|
||||
if: always()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: threat-detection.log
|
||||
path: /tmp/gh-aw/threat-detection/detection.log
|
||||
@@ -1174,7 +1174,7 @@ jobs:
|
||||
destination: /opt/gh-aw/actions
|
||||
- name: Download agent output artifact
|
||||
continue-on-error: true
|
||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
||||
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
|
||||
with:
|
||||
name: agent-output
|
||||
path: /tmp/gh-aw/safeoutputs/
|
||||
|
||||
@@ -123,18 +123,18 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
||||
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
|
||||
|
||||
- name: Build and push MCP container for ${{ matrix.arch }}
|
||||
id: container-push
|
||||
if: github.event_name == 'push' || github.event_name == 'release' || github.event_name == 'workflow_dispatch'
|
||||
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6.19.2
|
||||
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
push: true
|
||||
@@ -173,7 +173,7 @@ jobs:
|
||||
release-assets.githubusercontent.com:443
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
8
.github/workflows/mcp-container-checks.yml
vendored
8
.github/workflows/mcp-container-checks.yml
vendored
@@ -42,7 +42,7 @@ jobs:
|
||||
|
||||
- name: Check if Dockerfile changed
|
||||
id: dockerfile-changed
|
||||
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files: mcp_server/Dockerfile
|
||||
|
||||
@@ -96,7 +96,7 @@ jobs:
|
||||
|
||||
- name: Check for MCP changes
|
||||
id: check-changes
|
||||
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files: mcp_server/**
|
||||
files_ignore: |
|
||||
@@ -105,11 +105,11 @@ jobs:
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
||||
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
|
||||
|
||||
- name: Build MCP container for ${{ matrix.arch }}
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6.19.2
|
||||
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
|
||||
with:
|
||||
context: ${{ env.MCP_WORKING_DIR }}
|
||||
push: false
|
||||
|
||||
2
.github/workflows/pr-check-changelog.yml
vendored
2
.github/workflows/pr-check-changelog.yml
vendored
@@ -45,7 +45,7 @@ jobs:
|
||||
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files: |
|
||||
api/**
|
||||
|
||||
@@ -43,7 +43,7 @@ jobs:
|
||||
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files: |
|
||||
prowler/providers/**/services/**/*.metadata.json
|
||||
|
||||
2
.github/workflows/pr-conflict-checker.yml
vendored
2
.github/workflows/pr-conflict-checker.yml
vendored
@@ -39,7 +39,7 @@ jobs:
|
||||
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files: '**'
|
||||
|
||||
|
||||
2
.github/workflows/prepare-release.yml
vendored
2
.github/workflows/prepare-release.yml
vendored
@@ -380,7 +380,7 @@ jobs:
|
||||
no-changelog
|
||||
|
||||
- name: Create draft release
|
||||
uses: softprops/action-gh-release@a06a81a03ee405af7f2048a818ed3f03bbf83c7b # v2.5.0
|
||||
uses: softprops/action-gh-release@153bb8e04406b158c6c84fc1615b65b24149a1fe # v2.6.1
|
||||
with:
|
||||
tag_name: ${{ env.PROWLER_VERSION }}
|
||||
name: Prowler ${{ env.PROWLER_VERSION }}
|
||||
|
||||
2
.github/workflows/sdk-code-quality.yml
vendored
2
.github/workflows/sdk-code-quality.yml
vendored
@@ -46,7 +46,7 @@ jobs:
|
||||
|
||||
- name: Check for SDK changes
|
||||
id: check-changes
|
||||
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files: ./**
|
||||
files_ignore: |
|
||||
|
||||
16
.github/workflows/sdk-container-build-push.yml
vendored
16
.github/workflows/sdk-container-build-push.yml
vendored
@@ -197,13 +197,13 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to Public ECR
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
|
||||
with:
|
||||
registry: public.ecr.aws
|
||||
username: ${{ secrets.PUBLIC_ECR_AWS_ACCESS_KEY_ID }}
|
||||
@@ -212,12 +212,12 @@ jobs:
|
||||
AWS_REGION: ${{ env.AWS_REGION }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
||||
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
|
||||
|
||||
- name: Build and push SDK container for ${{ matrix.arch }}
|
||||
id: container-push
|
||||
if: github.event_name == 'push' || github.event_name == 'release' || github.event_name == 'workflow_dispatch'
|
||||
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6.19.2
|
||||
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
|
||||
with:
|
||||
context: .
|
||||
file: ${{ env.DOCKERFILE_PATH }}
|
||||
@@ -252,13 +252,13 @@ jobs:
|
||||
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to Public ECR
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
|
||||
with:
|
||||
registry: public.ecr.aws
|
||||
username: ${{ secrets.PUBLIC_ECR_AWS_ACCESS_KEY_ID }}
|
||||
@@ -295,7 +295,7 @@ jobs:
|
||||
# Push to toniblyx/prowler only for current version (latest/stable/release tags)
|
||||
- name: Login to DockerHub (toniblyx)
|
||||
if: needs.setup.outputs.latest_tag == 'latest'
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
|
||||
with:
|
||||
username: ${{ secrets.TONIBLYX_DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.TONIBLYX_DOCKERHUB_PASSWORD }}
|
||||
@@ -320,7 +320,7 @@ jobs:
|
||||
# Re-login as prowlercloud for cleanup of intermediate tags
|
||||
- name: Login to DockerHub (prowlercloud)
|
||||
if: always()
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
8
.github/workflows/sdk-container-checks.yml
vendored
8
.github/workflows/sdk-container-checks.yml
vendored
@@ -41,7 +41,7 @@ jobs:
|
||||
|
||||
- name: Check if Dockerfile changed
|
||||
id: dockerfile-changed
|
||||
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files: Dockerfile
|
||||
|
||||
@@ -102,7 +102,7 @@ jobs:
|
||||
|
||||
- name: Check for SDK changes
|
||||
id: check-changes
|
||||
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files: ./**
|
||||
files_ignore: |
|
||||
@@ -127,11 +127,11 @@ jobs:
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
||||
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
|
||||
|
||||
- name: Build SDK container for ${{ matrix.arch }}
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6.19.2
|
||||
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
|
||||
with:
|
||||
context: .
|
||||
push: false
|
||||
|
||||
2
.github/workflows/sdk-security.yml
vendored
2
.github/workflows/sdk-security.yml
vendored
@@ -44,7 +44,7 @@ jobs:
|
||||
|
||||
- name: Check for SDK changes
|
||||
id: check-changes
|
||||
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files:
|
||||
./**
|
||||
|
||||
32
.github/workflows/sdk-tests.yml
vendored
32
.github/workflows/sdk-tests.yml
vendored
@@ -67,7 +67,7 @@ jobs:
|
||||
|
||||
- name: Check for SDK changes
|
||||
id: check-changes
|
||||
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files: ./**
|
||||
files_ignore: |
|
||||
@@ -109,7 +109,7 @@ jobs:
|
||||
- name: Check if AWS files changed
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
id: changed-aws
|
||||
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/**/aws/**
|
||||
@@ -239,7 +239,7 @@ jobs:
|
||||
- name: Check if Azure files changed
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
id: changed-azure
|
||||
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/**/azure/**
|
||||
@@ -263,7 +263,7 @@ jobs:
|
||||
- name: Check if GCP files changed
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
id: changed-gcp
|
||||
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/**/gcp/**
|
||||
@@ -287,7 +287,7 @@ jobs:
|
||||
- name: Check if Kubernetes files changed
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
id: changed-kubernetes
|
||||
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/**/kubernetes/**
|
||||
@@ -311,7 +311,7 @@ jobs:
|
||||
- name: Check if GitHub files changed
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
id: changed-github
|
||||
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/**/github/**
|
||||
@@ -335,7 +335,7 @@ jobs:
|
||||
- name: Check if NHN files changed
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
id: changed-nhn
|
||||
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/**/nhn/**
|
||||
@@ -359,7 +359,7 @@ jobs:
|
||||
- name: Check if M365 files changed
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
id: changed-m365
|
||||
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/**/m365/**
|
||||
@@ -383,7 +383,7 @@ jobs:
|
||||
- name: Check if IaC files changed
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
id: changed-iac
|
||||
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/**/iac/**
|
||||
@@ -407,7 +407,7 @@ jobs:
|
||||
- name: Check if MongoDB Atlas files changed
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
id: changed-mongodbatlas
|
||||
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/**/mongodbatlas/**
|
||||
@@ -431,7 +431,7 @@ jobs:
|
||||
- name: Check if OCI files changed
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
id: changed-oraclecloud
|
||||
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/**/oraclecloud/**
|
||||
@@ -455,7 +455,7 @@ jobs:
|
||||
- name: Check if OpenStack files changed
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
id: changed-openstack
|
||||
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/**/openstack/**
|
||||
@@ -479,7 +479,7 @@ jobs:
|
||||
- name: Check if Google Workspace files changed
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
id: changed-googleworkspace
|
||||
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/**/googleworkspace/**
|
||||
@@ -503,7 +503,7 @@ jobs:
|
||||
- name: Check if Vercel files changed
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
id: changed-vercel
|
||||
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/**/vercel/**
|
||||
@@ -527,7 +527,7 @@ jobs:
|
||||
- name: Check if Lib files changed
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
id: changed-lib
|
||||
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/lib/**
|
||||
@@ -551,7 +551,7 @@ jobs:
|
||||
- name: Check if Config files changed
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
id: changed-config
|
||||
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/config/**
|
||||
|
||||
2
.github/workflows/test-impact-analysis.yml
vendored
2
.github/workflows/test-impact-analysis.yml
vendored
@@ -66,7 +66,7 @@ jobs:
|
||||
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
|
||||
@@ -127,18 +127,18 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
||||
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
|
||||
|
||||
- name: Build and push UI container for ${{ matrix.arch }}
|
||||
id: container-push
|
||||
if: github.event_name == 'push' || github.event_name == 'release' || github.event_name == 'workflow_dispatch'
|
||||
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6.19.2
|
||||
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
build-args: |
|
||||
@@ -172,7 +172,7 @@ jobs:
|
||||
production.cloudflare.docker.com:443
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
8
.github/workflows/ui-container-checks.yml
vendored
8
.github/workflows/ui-container-checks.yml
vendored
@@ -42,7 +42,7 @@ jobs:
|
||||
|
||||
- name: Check if Dockerfile changed
|
||||
id: dockerfile-changed
|
||||
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files: ui/Dockerfile
|
||||
|
||||
@@ -98,7 +98,7 @@ jobs:
|
||||
|
||||
- name: Check for UI changes
|
||||
id: check-changes
|
||||
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files: ui/**
|
||||
files_ignore: |
|
||||
@@ -108,11 +108,11 @@ jobs:
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
||||
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
|
||||
|
||||
- name: Build UI container for ${{ matrix.arch }}
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6.19.2
|
||||
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
|
||||
with:
|
||||
context: ${{ env.UI_WORKING_DIR }}
|
||||
target: prod
|
||||
|
||||
10
.github/workflows/ui-e2e-tests-v2.yml
vendored
10
.github/workflows/ui-e2e-tests-v2.yml
vendored
@@ -158,12 +158,12 @@ jobs:
|
||||
'
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
|
||||
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
|
||||
with:
|
||||
node-version: '24.13.0'
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5.0.0
|
||||
with:
|
||||
package_json_file: ui/package.json
|
||||
run_install: false
|
||||
@@ -172,7 +172,7 @@ jobs:
|
||||
run: echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV
|
||||
|
||||
- name: Setup pnpm and Next.js cache
|
||||
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.STORE_PATH }}
|
||||
@@ -192,7 +192,7 @@ jobs:
|
||||
run: pnpm run build
|
||||
|
||||
- name: Cache Playwright browsers
|
||||
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
id: playwright-cache
|
||||
with:
|
||||
path: ~/.cache/ms-playwright
|
||||
@@ -259,7 +259,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Upload test reports
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
if: failure()
|
||||
with:
|
||||
name: playwright-report
|
||||
|
||||
12
.github/workflows/ui-tests.yml
vendored
12
.github/workflows/ui-tests.yml
vendored
@@ -49,7 +49,7 @@ jobs:
|
||||
|
||||
- name: Check for UI changes
|
||||
id: check-changes
|
||||
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files: |
|
||||
ui/**
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
- name: Get changed source files for targeted tests
|
||||
id: changed-source
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files: |
|
||||
ui/**/*.ts
|
||||
@@ -78,7 +78,7 @@ jobs:
|
||||
- name: Check for critical path changes (run all tests)
|
||||
id: critical-changes
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files: |
|
||||
ui/lib/**
|
||||
@@ -90,13 +90,13 @@ jobs:
|
||||
|
||||
- name: Setup Node.js ${{ env.NODE_VERSION }}
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
|
||||
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
|
||||
- name: Setup pnpm
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5.0.0
|
||||
with:
|
||||
package_json_file: ui/package.json
|
||||
run_install: false
|
||||
@@ -108,7 +108,7 @@ jobs:
|
||||
|
||||
- name: Setup pnpm and Next.js cache
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.STORE_PATH }}
|
||||
|
||||
@@ -36,6 +36,7 @@ All notable changes to the **Prowler API** are documented in this file.
|
||||
|
||||
- Pin all unpinned dependencies to exact versions to prevent supply chain attacks and ensure reproducible builds [(#10469)](https://github.com/prowler-cloud/prowler/pull/10469)
|
||||
- `authlib` bumped from 1.6.6 to 1.6.9 to fix CVE-2026-28802 (JWT `alg: none` validation bypass) [(#10579)](https://github.com/prowler-cloud/prowler/pull/10579)
|
||||
- `aiohttp` bumped from 3.13.3 to 3.13.5 to fix CVE-2026-34520 (the C parser accepted null bytes and control characters in response headers) [(#10538)](https://github.com/prowler-cloud/prowler/pull/10538)
|
||||
|
||||
---
|
||||
|
||||
|
||||
242
api/poetry.lock
generated
242
api/poetry.lock
generated
@@ -103,132 +103,132 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "aiohttp"
|
||||
version = "3.13.3"
|
||||
version = "3.13.5"
|
||||
description = "Async http client/server framework (asyncio)"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d5a372fd5afd301b3a89582817fdcdb6c34124787c70dbcc616f259013e7eef7"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:147e422fd1223005c22b4fe080f5d93ced44460f5f9c105406b753612b587821"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:859bd3f2156e81dd01432f5849fc73e2243d4a487c4fd26609b1299534ee1845"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dca68018bf48c251ba17c72ed479f4dafe9dbd5a73707ad8d28a38d11f3d42af"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:fee0c6bc7db1de362252affec009707a17478a00ec69f797d23ca256e36d5940"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c048058117fd649334d81b4b526e94bde3ccaddb20463a815ced6ecbb7d11160"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:215a685b6fbbfcf71dfe96e3eba7a6f58f10da1dfdf4889c7dd856abe430dca7"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2c184bb1fe2cbd2cefba613e9db29a5ab559323f994b6737e370d3da0ac455"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:75ca857eba4e20ce9f546cd59c7007b33906a4cd48f2ff6ccf1ccfc3b646f279"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:81e97251d9298386c2b7dbeb490d3d1badbdc69107fb8c9299dd04eb39bddc0e"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:c0e2d366af265797506f0283487223146af57815b388623f0357ef7eac9b209d"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4e239d501f73d6db1522599e14b9b321a7e3b1de66ce33d53a765d975e9f4808"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:0db318f7a6f065d84cb1e02662c526294450b314a02bd9e2a8e67f0d8564ce40"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:bfc1cc2fe31a6026a8a88e4ecfb98d7f6b1fec150cfd708adbfd1d2f42257c29"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:af71fff7bac6bb7508956696dce8f6eec2bbb045eceb40343944b1ae62b5ef11"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-win32.whl", hash = "sha256:37da61e244d1749798c151421602884db5270faf479cf0ef03af0ff68954c9dd"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-win_amd64.whl", hash = "sha256:7e63f210bc1b57ef699035f2b4b6d9ce096b5914414a49b0997c839b2bd2223c"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5b6073099fb654e0a068ae678b10feff95c5cae95bbfcbfa7af669d361a8aa6b"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cb93e166e6c28716c8c6aeb5f99dfb6d5ccf482d29fe9bf9a794110e6d0ab64"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:28e027cf2f6b641693a09f631759b4d9ce9165099d2b5d92af9bd4e197690eea"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3b61b7169ababd7802f9568ed96142616a9118dd2be0d1866e920e77ec8fa92a"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:80dd4c21b0f6237676449c6baaa1039abae86b91636b6c91a7f8e61c87f89540"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:65d2ccb7eabee90ce0503c17716fc77226be026dcc3e65cce859a30db715025b"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5b179331a481cb5529fca8b432d8d3c7001cb217513c94cd72d668d1248688a3"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d4c940f02f49483b18b079d1c27ab948721852b281f8b015c058100e9421dd1"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f9444f105664c4ce47a2a7171a2418bce5b7bae45fb610f4e2c36045d85911d3"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:694976222c711d1d00ba131904beb60534f93966562f64440d0c9d41b8cdb440"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:f33ed1a2bf1997a36661874b017f5c4b760f41266341af36febaf271d179f6d7"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e636b3c5f61da31a92bf0d91da83e58fdfa96f178ba682f11d24f31944cdd28c"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:5d2d94f1f5fcbe40838ac51a6ab5704a6f9ea42e72ceda48de5e6b898521da51"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2be0e9ccf23e8a94f6f0650ce06042cefc6ac703d0d7ab6c7a917289f2539ad4"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9af5e68ee47d6534d36791bbe9b646d2a7c7deb6fc24d7943628edfbb3581f29"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-win32.whl", hash = "sha256:a2212ad43c0833a873d0fb3c63fa1bacedd4cf6af2fee62bf4b739ceec3ab239"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-win_amd64.whl", hash = "sha256:642f752c3eb117b105acbd87e2c143de710987e09860d674e068c4c2c441034f"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b903a4dfee7d347e2d87697d0713be59e0b87925be030c9178c5faa58ea58d5c"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a45530014d7a1e09f4a55f4f43097ba0fd155089372e105e4bff4ca76cb1b168"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:27234ef6d85c914f9efeb77ff616dbf4ad2380be0cda40b4db086ffc7ddd1b7d"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d32764c6c9aafb7fb55366a224756387cd50bfa720f32b88e0e6fa45b27dcf29"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b1a6102b4d3ebc07dad44fbf07b45bb600300f15b552ddf1851b5390202ea2e3"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c014c7ea7fb775dd015b2d3137378b7be0249a448a1612268b5a90c2d81de04d"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2b8d8ddba8f95ba17582226f80e2de99c7a7948e66490ef8d947e272a93e9463"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ae8dd55c8e6c4257eae3a20fd2c8f41edaea5992ed67156642493b8daf3cecc"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:01ad2529d4b5035578f5081606a465f3b814c542882804e2e8cda61adf5c71bf"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bb4f7475e359992b580559e008c598091c45b5088f28614e855e42d39c2f1033"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:c19b90316ad3b24c69cd78d5c9b4f3aa4497643685901185b65166293d36a00f"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:96d604498a7c782cb15a51c406acaea70d8c027ee6b90c569baa6e7b93073679"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:084911a532763e9d3dd95adf78a78f4096cd5f58cdc18e6fdbc1b58417a45423"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7a4a94eb787e606d0a09404b9c38c113d3b099d508021faa615d70a0131907ce"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:87797e645d9d8e222e04160ee32aa06bc5c163e8499f24db719e7852ec23093a"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-win32.whl", hash = "sha256:b04be762396457bef43f3597c991e192ee7da460a4953d7e647ee4b1c28e7046"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-win_amd64.whl", hash = "sha256:e3531d63d3bdfa7e3ac5e9b27b2dd7ec9df3206a98e0b3445fa906f233264c57"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:5dff64413671b0d3e7d5918ea490bdccb97a4ad29b3f311ed423200b2203e01c"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:87b9aab6d6ed88235aa2970294f496ff1a1f9adcd724d800e9b952395a80ffd9"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:425c126c0dc43861e22cb1c14ba4c8e45d09516d0a3ae0a3f7494b79f5f233a3"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f9120f7093c2a32d9647abcaf21e6ad275b4fbec5b55969f978b1a97c7c86bf"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:697753042d57f4bf7122cab985bf15d0cef23c770864580f5af4f52023a56bd6"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6de499a1a44e7de70735d0b39f67c8f25eb3d91eb3103be99ca0fa882cdd987d"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:37239e9f9a7ea9ac5bf6b92b0260b01f8a22281996da609206a84df860bc1261"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f76c1e3fe7d7c8afad7ed193f89a292e1999608170dcc9751a7462a87dfd5bc0"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fc290605db2a917f6e81b0e1e0796469871f5af381ce15c604a3c5c7e51cb730"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4021b51936308aeea0367b8f006dc999ca02bc118a0cc78c303f50a2ff6afb91"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:49a03727c1bba9a97d3e93c9f93ca03a57300f484b6e935463099841261195d3"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3d9908a48eb7416dc1f4524e69f1d32e5d90e3981e4e37eb0aa1cd18f9cfa2a4"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:2712039939ec963c237286113c68dbad80a82a4281543f3abf766d9d73228998"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:7bfdc049127717581866fa4708791220970ce291c23e28ccf3922c700740fdc0"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8057c98e0c8472d8846b9c79f56766bcc57e3e8ac7bfd510482332366c56c591"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-win32.whl", hash = "sha256:1449ceddcdbcf2e0446957863af03ebaaa03f94c090f945411b61269e2cb5daf"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-win_amd64.whl", hash = "sha256:693781c45a4033d31d4187d2436f5ac701e7bbfe5df40d917736108c1cc7436e"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:ea37047c6b367fd4bd632bff8077449b8fa034b69e812a18e0132a00fae6e808"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:6fc0e2337d1a4c3e6acafda6a78a39d4c14caea625124817420abceed36e2415"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c685f2d80bb67ca8c3837823ad76196b3694b0159d232206d1e461d3d434666f"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:48e377758516d262bde50c2584fc6c578af272559c409eecbdd2bae1601184d6"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:34749271508078b261c4abb1767d42b8d0c0cc9449c73a4df494777dc55f0687"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:82611aeec80eb144416956ec85b6ca45a64d76429c1ed46ae1b5f86c6e0c9a26"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2fff83cfc93f18f215896e3a190e8e5cb413ce01553901aca925176e7568963a"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bbe7d4cecacb439e2e2a8a1a7b935c25b812af7a5fd26503a66dadf428e79ec1"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b928f30fe49574253644b1ca44b1b8adbd903aa0da4b9054a6c20fc7f4092a25"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7b5e8fe4de30df199155baaf64f2fcd604f4c678ed20910db8e2c66dc4b11603"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:8542f41a62bcc58fc7f11cf7c90e0ec324ce44950003feb70640fc2a9092c32a"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:5e1d8c8b8f1d91cd08d8f4a3c2b067bfca6ec043d3ff36de0f3a715feeedf926"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:90455115e5da1c3c51ab619ac57f877da8fd6d73c05aacd125c5ae9819582aba"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:042e9e0bcb5fba81886c8b4fbb9a09d6b8a00245fd8d88e4d989c1f96c74164c"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2eb752b102b12a76ca02dff751a801f028b4ffbbc478840b473597fc91a9ed43"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-win32.whl", hash = "sha256:b556c85915d8efaed322bf1bdae9486aa0f3f764195a0fb6ee962e5c71ef5ce1"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-win_amd64.whl", hash = "sha256:9bf9f7a65e7aa20dd764151fb3d616c81088f91f8df39c3893a536e279b4b984"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:05861afbbec40650d8a07ea324367cb93e9e8cc7762e04dd4405df99fa65159c"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2fc82186fadc4a8316768d61f3722c230e2c1dcab4200d52d2ebdf2482e47592"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0add0900ff220d1d5c5ebbf99ed88b0c1bbf87aa7e4262300ed1376a6b13414f"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:568f416a4072fbfae453dcf9a99194bbb8bdeab718e08ee13dfa2ba0e4bebf29"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:add1da70de90a2569c5e15249ff76a631ccacfe198375eead4aadf3b8dc849dc"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:10b47b7ba335d2e9b1239fa571131a87e2d8ec96b333e68b2a305e7a98b0bae2"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3dd4dce1c718e38081c8f35f323209d4c1df7d4db4bab1b5c88a6b4d12b74587"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34bac00a67a812570d4a460447e1e9e06fae622946955f939051e7cc895cfab8"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a19884d2ee70b06d9204b2727a7b9f983d0c684c650254679e716b0b77920632"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5f8ca7f2bb6ba8348a3614c7918cc4bb73268c5ac2a207576b7afea19d3d9f64"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:b0d95340658b9d2f11d9697f59b3814a9d3bb4b7a7c20b131df4bcef464037c0"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:a1e53262fd202e4b40b70c3aff944a8155059beedc8a89bba9dc1f9ef06a1b56"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:d60ac9663f44168038586cab2157e122e46bdef09e9368b37f2d82d354c23f72"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:90751b8eed69435bac9ff4e3d2f6b3af1f57e37ecb0fbeee59c0174c9e2d41df"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:fc353029f176fd2b3ec6cfc71be166aba1936fe5d73dd1992ce289ca6647a9aa"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-win32.whl", hash = "sha256:2e41b18a58da1e474a057b3d35248d8320029f61d70a37629535b16a0c8f3767"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-win_amd64.whl", hash = "sha256:44531a36aa2264a1860089ffd4dce7baf875ee5a6079d5fb42e261c704ef7344"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:31a83ea4aead760dfcb6962efb1d861db48c34379f2ff72db9ddddd4cda9ea2e"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:988a8c5e317544fdf0d39871559e67b6341065b87fceac641108c2096d5506b7"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9b174f267b5cfb9a7dba9ee6859cecd234e9a681841eb85068059bc867fb8f02"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:947c26539750deeaee933b000fb6517cc770bbd064bad6033f1cff4803881e43"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:9ebf57d09e131f5323464bd347135a88622d1c0976e88ce15b670e7ad57e4bd6"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4ae5b5a0e1926e504c81c5b84353e7a5516d8778fbbff00429fe7b05bb25cbce"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2ba0eea45eb5cc3172dbfc497c066f19c41bac70963ea1a67d51fc92e4cf9a80"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bae5c2ed2eae26cc382020edad80d01f36cb8e746da40b292e68fec40421dc6a"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8a60e60746623925eab7d25823329941aee7242d559baa119ca2b253c88a7bd6"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e50a2e1404f063427c9d027378472316201a2290959a295169bcf25992d04558"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:9a9dc347e5a3dc7dfdbc1f82da0ef29e388ddb2ed281bfce9dd8248a313e62b7"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:b46020d11d23fe16551466c77823df9cc2f2c1e63cc965daf67fa5eec6ca1877"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:69c56fbc1993fa17043e24a546959c0178fe2b5782405ad4559e6c13975c15e3"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:b99281b0704c103d4e11e72a76f1b543d4946fea7dd10767e7e1b5f00d4e5704"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:40c5e40ecc29ba010656c18052b877a1c28f84344825efa106705e835c28530f"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-win32.whl", hash = "sha256:56339a36b9f1fc708260c76c87e593e2afb30d26de9ae1eb445b5e051b98a7a1"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-win_amd64.whl", hash = "sha256:c6b8568a3bb5819a0ad087f16d40e5a3fb6099f39ea1d5625a3edc1e923fc538"},
|
||||
{file = "aiohttp-3.13.3.tar.gz", hash = "sha256:a949eee43d3782f2daae4f4a2819b2cb9b0c5d3b7f7a927067cc84dafdbb9f88"},
|
||||
{file = "aiohttp-3.13.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:02222e7e233295f40e011c1b00e3b0bd451f22cf853a0304c3595633ee47da4b"},
|
||||
{file = "aiohttp-3.13.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bace460460ed20614fa6bc8cb09966c0b8517b8c58ad8046828c6078d25333b5"},
|
||||
{file = "aiohttp-3.13.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f546a4dc1e6a5edbb9fd1fd6ad18134550e096a5a43f4ad74acfbd834fc6670"},
|
||||
{file = "aiohttp-3.13.5-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c86969d012e51b8e415a8c6ce96f7857d6a87d6207303ab02d5d11ef0cad2274"},
|
||||
{file = "aiohttp-3.13.5-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b6f6cd1560c5fa427e3b6074bb24d2c64e225afbb7165008903bd42e4e33e28a"},
|
||||
{file = "aiohttp-3.13.5-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:636bc362f0c5bbc7372bc3ae49737f9e3030dbce469f0f422c8f38079780363d"},
|
||||
{file = "aiohttp-3.13.5-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6a7cbeb06d1070f1d14895eeeed4dac5913b22d7b456f2eb969f11f4b3993796"},
|
||||
{file = "aiohttp-3.13.5-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bca9ef7517fd7874a1a08970ae88f497bf5c984610caa0bf40bd7e8450852b95"},
|
||||
{file = "aiohttp-3.13.5-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:019a67772e034a0e6b9b17c13d0a8fe56ad9fb150fc724b7f3ffd3724288d9e5"},
|
||||
{file = "aiohttp-3.13.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f34ecee82858e41dd217734f0c41a532bd066bcaab636ad830f03a30b2a96f2a"},
|
||||
{file = "aiohttp-3.13.5-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:4eac02d9af4813ee289cd63a361576da36dba57f5a1ab36377bc2600db0cbb73"},
|
||||
{file = "aiohttp-3.13.5-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4beac52e9fe46d6abf98b0176a88154b742e878fdf209d2248e99fcdf73cd297"},
|
||||
{file = "aiohttp-3.13.5-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:c180f480207a9b2475f2b8d8bd7204e47aec952d084b2a2be58a782ffcf96074"},
|
||||
{file = "aiohttp-3.13.5-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2837fb92951564d6339cedae4a7231692aa9f73cbc4fb2e04263b96844e03b4e"},
|
||||
{file = "aiohttp-3.13.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d9010032a0b9710f58012a1e9c222528763d860ba2ee1422c03473eab47703e7"},
|
||||
{file = "aiohttp-3.13.5-cp310-cp310-win32.whl", hash = "sha256:7c4b6668b2b2b9027f209ddf647f2a4407784b5d88b8be4efcc72036f365baf9"},
|
||||
{file = "aiohttp-3.13.5-cp310-cp310-win_amd64.whl", hash = "sha256:cd3db5927bf9167d5a6157ddb2f036f6b6b0ad001ac82355d43e97a4bde76d76"},
|
||||
{file = "aiohttp-3.13.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7ab7229b6f9b5c1ba4910d6c41a9eb11f543eadb3f384df1b4c293f4e73d44d6"},
|
||||
{file = "aiohttp-3.13.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8f14c50708bb156b3a3ca7230b3d820199d56a48e3af76fa21c2d6087190fe3d"},
|
||||
{file = "aiohttp-3.13.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e7d2f8616f0ff60bd332022279011776c3ac0faa0f1b463f7bb12326fbc97a1c"},
|
||||
{file = "aiohttp-3.13.5-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a2567b72e1ffc3ab25510db43f355b29eeada56c0a622e58dcdb19530eb0a3cb"},
|
||||
{file = "aiohttp-3.13.5-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:fb0540c854ac9c0c5ad495908fdfd3e332d553ec731698c0e29b1877ba0d2ec6"},
|
||||
{file = "aiohttp-3.13.5-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c9883051c6972f58bfc4ebb2116345ee2aa151178e99c3f2b2bbe2af712abd13"},
|
||||
{file = "aiohttp-3.13.5-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2294172ce08a82fb7c7273485895de1fa1186cc8294cfeb6aef4af42ad261174"},
|
||||
{file = "aiohttp-3.13.5-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3a807cabd5115fb55af198b98178997a5e0e57dead43eb74a93d9c07d6d4a7dc"},
|
||||
{file = "aiohttp-3.13.5-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:aa6d0d932e0f39c02b80744273cd5c388a2d9bc07760a03164f229c8e02662f6"},
|
||||
{file = "aiohttp-3.13.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:60869c7ac4aaabe7110f26499f3e6e5696eae98144735b12a9c3d9eae2b51a49"},
|
||||
{file = "aiohttp-3.13.5-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:26d2f8546f1dfa75efa50c3488215a903c0168d253b75fba4210f57ab77a0fb8"},
|
||||
{file = "aiohttp-3.13.5-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1162a1492032c82f14271e831c8f4b49f2b6078f4f5fc74de2c912fa225d51d"},
|
||||
{file = "aiohttp-3.13.5-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:8b14eb3262fad0dc2f89c1a43b13727e709504972186ff6a99a3ecaa77102b6c"},
|
||||
{file = "aiohttp-3.13.5-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:ca9ac61ac6db4eb6c2a0cd1d0f7e1357647b638ccc92f7e9d8d133e71ed3c6ac"},
|
||||
{file = "aiohttp-3.13.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7996023b2ed59489ae4762256c8516df9820f751cf2c5da8ed2fb20ee50abab3"},
|
||||
{file = "aiohttp-3.13.5-cp311-cp311-win32.whl", hash = "sha256:77dfa48c9f8013271011e51c00f8ada19851f013cde2c48fca1ba5e0caf5bb06"},
|
||||
{file = "aiohttp-3.13.5-cp311-cp311-win_amd64.whl", hash = "sha256:d3a4834f221061624b8887090637db9ad4f61752001eae37d56c52fddade2dc8"},
|
||||
{file = "aiohttp-3.13.5-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:023ecba036ddd840b0b19bf195bfae970083fd7024ce1ac22e9bba90464620e9"},
|
||||
{file = "aiohttp-3.13.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:15c933ad7920b7d9a20de151efcd05a6e38302cbf0e10c9b2acb9a42210a2416"},
|
||||
{file = "aiohttp-3.13.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ab2899f9fa2f9f741896ebb6fa07c4c883bfa5c7f2ddd8cf2aafa86fa981b2d2"},
|
||||
{file = "aiohttp-3.13.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a60eaa2d440cd4707696b52e40ed3e2b0f73f65be07fd0ef23b6b539c9c0b0b4"},
|
||||
{file = "aiohttp-3.13.5-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:55b3bdd3292283295774ab585160c4004f4f2f203946997f49aac032c84649e9"},
|
||||
{file = "aiohttp-3.13.5-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c2b2355dc094e5f7d45a7bb262fe7207aa0460b37a0d87027dcf21b5d890e7d5"},
|
||||
{file = "aiohttp-3.13.5-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b38765950832f7d728297689ad78f5f2cf79ff82487131c4d26fe6ceecdc5f8e"},
|
||||
{file = "aiohttp-3.13.5-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b18f31b80d5a33661e08c89e202edabf1986e9b49c42b4504371daeaa11b47c1"},
|
||||
{file = "aiohttp-3.13.5-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:33add2463dde55c4f2d9635c6ab33ce154e5ecf322bd26d09af95c5f81cfa286"},
|
||||
{file = "aiohttp-3.13.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:327cc432fdf1356fb4fbc6fe833ad4e9f6aacb71a8acaa5f1855e4b25910e4a9"},
|
||||
{file = "aiohttp-3.13.5-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:7c35b0bf0b48a70b4cb4fc5d7bed9b932532728e124874355de1a0af8ec4bc88"},
|
||||
{file = "aiohttp-3.13.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:df23d57718f24badef8656c49743e11a89fd6f5358fa8a7b96e728fda2abf7d3"},
|
||||
{file = "aiohttp-3.13.5-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:02e048037a6501a5ec1f6fc9736135aec6eb8a004ce48838cb951c515f32c80b"},
|
||||
{file = "aiohttp-3.13.5-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:31cebae8b26f8a615d2b546fee45d5ffb76852ae6450e2a03f42c9102260d6fe"},
|
||||
{file = "aiohttp-3.13.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:888e78eb5ca55a615d285c3c09a7a91b42e9dd6fc699b166ebd5dee87c9ccf14"},
|
||||
{file = "aiohttp-3.13.5-cp312-cp312-win32.whl", hash = "sha256:8bd3ec6376e68a41f9f95f5ed170e2fcf22d4eb27a1f8cb361d0508f6e0557f3"},
|
||||
{file = "aiohttp-3.13.5-cp312-cp312-win_amd64.whl", hash = "sha256:110e448e02c729bcebb18c60b9214a87ba33bac4a9fa5e9a5f139938b56c6cb1"},
|
||||
{file = "aiohttp-3.13.5-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a5029cc80718bbd545123cd8fe5d15025eccaaaace5d0eeec6bd556ad6163d61"},
|
||||
{file = "aiohttp-3.13.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4bb6bf5811620003614076bdc807ef3b5e38244f9d25ca5fe888eaccea2a9832"},
|
||||
{file = "aiohttp-3.13.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a84792f8631bf5a94e52d9cc881c0b824ab42717165a5579c760b830d9392ac9"},
|
||||
{file = "aiohttp-3.13.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:57653eac22c6a4c13eb22ecf4d673d64a12f266e72785ab1c8b8e5940d0e8090"},
|
||||
{file = "aiohttp-3.13.5-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5e5f7debc7a57af53fdf5c5009f9391d9f4c12867049d509bf7bb164a6e295b"},
|
||||
{file = "aiohttp-3.13.5-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c719f65bebcdf6716f10e9eff80d27567f7892d8988c06de12bbbd39307c6e3a"},
|
||||
{file = "aiohttp-3.13.5-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d97f93fdae594d886c5a866636397e2bcab146fd7a132fd6bb9ce182224452f8"},
|
||||
{file = "aiohttp-3.13.5-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3df334e39d4c2f899a914f1dba283c1aadc311790733f705182998c6f7cae665"},
|
||||
{file = "aiohttp-3.13.5-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fe6970addfea9e5e081401bcbadf865d2b6da045472f58af08427e108d618540"},
|
||||
{file = "aiohttp-3.13.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7becdf835feff2f4f335d7477f121af787e3504b48b449ff737afb35869ba7bb"},
|
||||
{file = "aiohttp-3.13.5-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:676e5651705ad5d8a70aeb8eb6936c436d8ebbd56e63436cb7dd9bb36d2a9a46"},
|
||||
{file = "aiohttp-3.13.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:9b16c653d38eb1a611cc898c41e76859ca27f119d25b53c12875fd0474ae31a8"},
|
||||
{file = "aiohttp-3.13.5-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:999802d5fa0389f58decd24b537c54aa63c01c3219ce17d1214cbda3c2b22d2d"},
|
||||
{file = "aiohttp-3.13.5-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:ec707059ee75732b1ba130ed5f9580fe10ff75180c812bc267ded039db5128c6"},
|
||||
{file = "aiohttp-3.13.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2d6d44a5b48132053c2f6cd5c8cb14bc67e99a63594e336b0f2af81e94d5530c"},
|
||||
{file = "aiohttp-3.13.5-cp313-cp313-win32.whl", hash = "sha256:329f292ed14d38a6c4c435e465f48bebb47479fd676a0411936cc371643225cc"},
|
||||
{file = "aiohttp-3.13.5-cp313-cp313-win_amd64.whl", hash = "sha256:69f571de7500e0557801c0b51f4780482c0ec5fe2ac851af5a92cfce1af1cb83"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:eb4639f32fd4a9904ab8fb45bf3383ba71137f3d9d4ba25b3b3f3109977c5b8c"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:7e5dc4311bd5ac493886c63cbf76ab579dbe4641268e7c74e48e774c74b6f2be"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:756c3c304d394977519824449600adaf2be0ccee76d206ee339c5e76b70ded25"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ecc26751323224cf8186efcf7fbcbc30f4e1d8c7970659daf25ad995e4032a56"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10a75acfcf794edf9d8db50e5a7ec5fc818b2a8d3f591ce93bc7b1210df016d2"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0f7a18f258d124cd678c5fe072fe4432a4d5232b0657fca7c1847f599233c83a"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:df6104c009713d3a89621096f3e3e88cc323fd269dbd7c20afe18535094320be"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:241a94f7de7c0c3b616627aaad530fe2cb620084a8b144d3be7b6ecfe95bae3b"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c974fb66180e58709b6fc402846f13791240d180b74de81d23913abe48e96d94"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:6e27ea05d184afac78aabbac667450c75e54e35f62238d44463131bd3f96753d"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:a79a6d399cef33a11b6f004c67bb07741d91f2be01b8d712d52c75711b1e07c7"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:c632ce9c0b534fbe25b52c974515ed674937c5b99f549a92127c85f771a78772"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:fceedde51fbd67ee2bcc8c0b33d0126cc8b51ef3bbde2f86662bd6d5a6f10ec5"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f92995dfec9420bb69ae629abf422e516923ba79ba4403bc750d94fb4a6c68c1"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:20ae0ff08b1f2c8788d6fb85afcb798654ae6ba0b747575f8562de738078457b"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314-win32.whl", hash = "sha256:b20df693de16f42b2472a9c485e1c948ee55524786a0a34345511afdd22246f3"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314-win_amd64.whl", hash = "sha256:f85c6f327bf0b8c29da7d93b1cabb6363fb5e4e160a32fa241ed2dce21b73162"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:1efb06900858bb618ff5cee184ae2de5828896c448403d51fb633f09e109be0a"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:fee86b7c4bd29bdaf0d53d14739b08a106fdda809ca5fe032a15f52fae5fe254"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:20058e23909b9e65f9da62b396b77dfa95965cbe840f8def6e572538b1d32e36"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8cf20a8d6868cb15a73cab329ffc07291ba8c22b1b88176026106ae39aa6df0f"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:330f5da04c987f1d5bdb8ae189137c77139f36bd1cb23779ca1a354a4b027800"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6f1cbf0c7926d315c3c26c2da41fd2b5d2fe01ac0e157b78caefc51a782196cf"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:53fc049ed6390d05423ba33103ded7281fe897cf97878f369a527070bd95795b"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:898703aa2667e3c5ca4c54ca36cd73f58b7a38ef87a5606414799ebce4d3fd3a"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0494a01ca9584eea1e5fbd6d748e61ecff218c51b576ee1999c23db7066417d8"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:6cf81fe010b8c17b09495cbd15c1d35afbc8fb405c0c9cf4738e5ae3af1d65be"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:c564dd5f09ddc9d8f2c2d0a301cd30a79a2cc1b46dd1a73bef8f0038863d016b"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:2994be9f6e51046c4f864598fd9abeb4fba6e88f0b2152422c9666dcd4aea9c6"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:157826e2fa245d2ef46c83ea8a5faf77ca19355d278d425c29fda0beb3318037"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:a8aca50daa9493e9e13c0f566201a9006f080e7c50e5e90d0b06f53146a54500"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:3b13560160d07e047a93f23aaa30718606493036253d5430887514715b67c9d9"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314t-win32.whl", hash = "sha256:9a0f4474b6ea6818b41f82172d799e4b3d29e22c2c520ce4357856fced9af2f8"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314t-win_amd64.whl", hash = "sha256:18a2f6c1182c51baa1d28d68fea51513cb2a76612f038853c0ad3c145423d3d9"},
|
||||
{file = "aiohttp-3.13.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:347542f0ea3f95b2a955ee6656461fa1c776e401ac50ebce055a6c38454a0adf"},
|
||||
{file = "aiohttp-3.13.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:178c7b5e62b454c2bc790786e6058c3cc968613b4419251b478c153a4aec32b1"},
|
||||
{file = "aiohttp-3.13.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:af545c2cffdb0967a96b6249e6f5f7b0d92cdfd267f9d5238d5b9ca63e8edb10"},
|
||||
{file = "aiohttp-3.13.5-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:206b7b3ef96e4ce211754f0cd003feb28b7d81f0ad26b8d077a5d5161436067f"},
|
||||
{file = "aiohttp-3.13.5-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:ee5e86776273de1795947d17bddd6bb19e0365fd2af4289c0d2c5454b6b1d36b"},
|
||||
{file = "aiohttp-3.13.5-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:95d14ca7abefde230f7639ec136ade282655431fd5db03c343b19dda72dd1643"},
|
||||
{file = "aiohttp-3.13.5-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:912d4b6af530ddb1338a66229dac3a25ff11d4448be3ec3d6340583995f56031"},
|
||||
{file = "aiohttp-3.13.5-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e999f0c88a458c836d5fb521814e92ed2172c649200336a6df514987c1488258"},
|
||||
{file = "aiohttp-3.13.5-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:39380e12bd1f2fdab4285b6e055ad48efbaed5c836433b142ed4f5b9be71036a"},
|
||||
{file = "aiohttp-3.13.5-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9efcc0f11d850cefcafdd9275b9576ad3bfb539bed96807663b32ad99c4d4b88"},
|
||||
{file = "aiohttp-3.13.5-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:147b4f501d0292077f29d5268c16bb7c864a1f054d7001c4c1812c0421ea1ed0"},
|
||||
{file = "aiohttp-3.13.5-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:d147004fede1b12f6013a6dbb2a26a986a671a03c6ea740ddc76500e5f1c399f"},
|
||||
{file = "aiohttp-3.13.5-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:9277145d36a01653863899c665243871434694bcc3431922c3b35c978061bdb8"},
|
||||
{file = "aiohttp-3.13.5-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4e704c52438f66fdd89588346183d898bb42167cf88f8b7ff1c0f9fc957c348f"},
|
||||
{file = "aiohttp-3.13.5-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a8a4d3427e8de1312ddf309cc482186466c79895b3a139fed3259fc01dfa9a5b"},
|
||||
{file = "aiohttp-3.13.5-cp39-cp39-win32.whl", hash = "sha256:6f497a6876aa4b1a102b04996ce4c1170c7040d83faa9387dd921c16e30d5c83"},
|
||||
{file = "aiohttp-3.13.5-cp39-cp39-win_amd64.whl", hash = "sha256:cb979826071c0986a5f08333a36104153478ce6018c58cba7f9caddaf63d5d67"},
|
||||
{file = "aiohttp-3.13.5.tar.gz", hash = "sha256:9d98cc980ecc96be6eb4c1994ce35d28d8b1f5e5208a23b421187d1209dbb7d1"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
||||
244
poetry.lock
generated
244
poetry.lock
generated
@@ -1,4 +1,4 @@
|
||||
# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "about-time"
|
||||
@@ -38,132 +38,132 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "aiohttp"
|
||||
version = "3.13.3"
|
||||
version = "3.13.5"
|
||||
description = "Async http client/server framework (asyncio)"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d5a372fd5afd301b3a89582817fdcdb6c34124787c70dbcc616f259013e7eef7"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:147e422fd1223005c22b4fe080f5d93ced44460f5f9c105406b753612b587821"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:859bd3f2156e81dd01432f5849fc73e2243d4a487c4fd26609b1299534ee1845"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dca68018bf48c251ba17c72ed479f4dafe9dbd5a73707ad8d28a38d11f3d42af"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:fee0c6bc7db1de362252affec009707a17478a00ec69f797d23ca256e36d5940"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c048058117fd649334d81b4b526e94bde3ccaddb20463a815ced6ecbb7d11160"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:215a685b6fbbfcf71dfe96e3eba7a6f58f10da1dfdf4889c7dd856abe430dca7"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2c184bb1fe2cbd2cefba613e9db29a5ab559323f994b6737e370d3da0ac455"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:75ca857eba4e20ce9f546cd59c7007b33906a4cd48f2ff6ccf1ccfc3b646f279"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:81e97251d9298386c2b7dbeb490d3d1badbdc69107fb8c9299dd04eb39bddc0e"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:c0e2d366af265797506f0283487223146af57815b388623f0357ef7eac9b209d"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4e239d501f73d6db1522599e14b9b321a7e3b1de66ce33d53a765d975e9f4808"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:0db318f7a6f065d84cb1e02662c526294450b314a02bd9e2a8e67f0d8564ce40"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:bfc1cc2fe31a6026a8a88e4ecfb98d7f6b1fec150cfd708adbfd1d2f42257c29"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:af71fff7bac6bb7508956696dce8f6eec2bbb045eceb40343944b1ae62b5ef11"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-win32.whl", hash = "sha256:37da61e244d1749798c151421602884db5270faf479cf0ef03af0ff68954c9dd"},
|
||||
{file = "aiohttp-3.13.3-cp310-cp310-win_amd64.whl", hash = "sha256:7e63f210bc1b57ef699035f2b4b6d9ce096b5914414a49b0997c839b2bd2223c"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5b6073099fb654e0a068ae678b10feff95c5cae95bbfcbfa7af669d361a8aa6b"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cb93e166e6c28716c8c6aeb5f99dfb6d5ccf482d29fe9bf9a794110e6d0ab64"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:28e027cf2f6b641693a09f631759b4d9ce9165099d2b5d92af9bd4e197690eea"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3b61b7169ababd7802f9568ed96142616a9118dd2be0d1866e920e77ec8fa92a"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:80dd4c21b0f6237676449c6baaa1039abae86b91636b6c91a7f8e61c87f89540"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:65d2ccb7eabee90ce0503c17716fc77226be026dcc3e65cce859a30db715025b"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5b179331a481cb5529fca8b432d8d3c7001cb217513c94cd72d668d1248688a3"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d4c940f02f49483b18b079d1c27ab948721852b281f8b015c058100e9421dd1"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f9444f105664c4ce47a2a7171a2418bce5b7bae45fb610f4e2c36045d85911d3"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:694976222c711d1d00ba131904beb60534f93966562f64440d0c9d41b8cdb440"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:f33ed1a2bf1997a36661874b017f5c4b760f41266341af36febaf271d179f6d7"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e636b3c5f61da31a92bf0d91da83e58fdfa96f178ba682f11d24f31944cdd28c"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:5d2d94f1f5fcbe40838ac51a6ab5704a6f9ea42e72ceda48de5e6b898521da51"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2be0e9ccf23e8a94f6f0650ce06042cefc6ac703d0d7ab6c7a917289f2539ad4"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9af5e68ee47d6534d36791bbe9b646d2a7c7deb6fc24d7943628edfbb3581f29"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-win32.whl", hash = "sha256:a2212ad43c0833a873d0fb3c63fa1bacedd4cf6af2fee62bf4b739ceec3ab239"},
|
||||
{file = "aiohttp-3.13.3-cp311-cp311-win_amd64.whl", hash = "sha256:642f752c3eb117b105acbd87e2c143de710987e09860d674e068c4c2c441034f"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b903a4dfee7d347e2d87697d0713be59e0b87925be030c9178c5faa58ea58d5c"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a45530014d7a1e09f4a55f4f43097ba0fd155089372e105e4bff4ca76cb1b168"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:27234ef6d85c914f9efeb77ff616dbf4ad2380be0cda40b4db086ffc7ddd1b7d"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d32764c6c9aafb7fb55366a224756387cd50bfa720f32b88e0e6fa45b27dcf29"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b1a6102b4d3ebc07dad44fbf07b45bb600300f15b552ddf1851b5390202ea2e3"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c014c7ea7fb775dd015b2d3137378b7be0249a448a1612268b5a90c2d81de04d"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2b8d8ddba8f95ba17582226f80e2de99c7a7948e66490ef8d947e272a93e9463"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ae8dd55c8e6c4257eae3a20fd2c8f41edaea5992ed67156642493b8daf3cecc"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:01ad2529d4b5035578f5081606a465f3b814c542882804e2e8cda61adf5c71bf"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bb4f7475e359992b580559e008c598091c45b5088f28614e855e42d39c2f1033"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:c19b90316ad3b24c69cd78d5c9b4f3aa4497643685901185b65166293d36a00f"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:96d604498a7c782cb15a51c406acaea70d8c027ee6b90c569baa6e7b93073679"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:084911a532763e9d3dd95adf78a78f4096cd5f58cdc18e6fdbc1b58417a45423"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7a4a94eb787e606d0a09404b9c38c113d3b099d508021faa615d70a0131907ce"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:87797e645d9d8e222e04160ee32aa06bc5c163e8499f24db719e7852ec23093a"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-win32.whl", hash = "sha256:b04be762396457bef43f3597c991e192ee7da460a4953d7e647ee4b1c28e7046"},
|
||||
{file = "aiohttp-3.13.3-cp312-cp312-win_amd64.whl", hash = "sha256:e3531d63d3bdfa7e3ac5e9b27b2dd7ec9df3206a98e0b3445fa906f233264c57"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:5dff64413671b0d3e7d5918ea490bdccb97a4ad29b3f311ed423200b2203e01c"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:87b9aab6d6ed88235aa2970294f496ff1a1f9adcd724d800e9b952395a80ffd9"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:425c126c0dc43861e22cb1c14ba4c8e45d09516d0a3ae0a3f7494b79f5f233a3"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f9120f7093c2a32d9647abcaf21e6ad275b4fbec5b55969f978b1a97c7c86bf"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:697753042d57f4bf7122cab985bf15d0cef23c770864580f5af4f52023a56bd6"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6de499a1a44e7de70735d0b39f67c8f25eb3d91eb3103be99ca0fa882cdd987d"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:37239e9f9a7ea9ac5bf6b92b0260b01f8a22281996da609206a84df860bc1261"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f76c1e3fe7d7c8afad7ed193f89a292e1999608170dcc9751a7462a87dfd5bc0"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fc290605db2a917f6e81b0e1e0796469871f5af381ce15c604a3c5c7e51cb730"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4021b51936308aeea0367b8f006dc999ca02bc118a0cc78c303f50a2ff6afb91"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:49a03727c1bba9a97d3e93c9f93ca03a57300f484b6e935463099841261195d3"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3d9908a48eb7416dc1f4524e69f1d32e5d90e3981e4e37eb0aa1cd18f9cfa2a4"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:2712039939ec963c237286113c68dbad80a82a4281543f3abf766d9d73228998"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:7bfdc049127717581866fa4708791220970ce291c23e28ccf3922c700740fdc0"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8057c98e0c8472d8846b9c79f56766bcc57e3e8ac7bfd510482332366c56c591"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-win32.whl", hash = "sha256:1449ceddcdbcf2e0446957863af03ebaaa03f94c090f945411b61269e2cb5daf"},
|
||||
{file = "aiohttp-3.13.3-cp313-cp313-win_amd64.whl", hash = "sha256:693781c45a4033d31d4187d2436f5ac701e7bbfe5df40d917736108c1cc7436e"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:ea37047c6b367fd4bd632bff8077449b8fa034b69e812a18e0132a00fae6e808"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:6fc0e2337d1a4c3e6acafda6a78a39d4c14caea625124817420abceed36e2415"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c685f2d80bb67ca8c3837823ad76196b3694b0159d232206d1e461d3d434666f"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:48e377758516d262bde50c2584fc6c578af272559c409eecbdd2bae1601184d6"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:34749271508078b261c4abb1767d42b8d0c0cc9449c73a4df494777dc55f0687"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:82611aeec80eb144416956ec85b6ca45a64d76429c1ed46ae1b5f86c6e0c9a26"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2fff83cfc93f18f215896e3a190e8e5cb413ce01553901aca925176e7568963a"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bbe7d4cecacb439e2e2a8a1a7b935c25b812af7a5fd26503a66dadf428e79ec1"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b928f30fe49574253644b1ca44b1b8adbd903aa0da4b9054a6c20fc7f4092a25"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7b5e8fe4de30df199155baaf64f2fcd604f4c678ed20910db8e2c66dc4b11603"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:8542f41a62bcc58fc7f11cf7c90e0ec324ce44950003feb70640fc2a9092c32a"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:5e1d8c8b8f1d91cd08d8f4a3c2b067bfca6ec043d3ff36de0f3a715feeedf926"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:90455115e5da1c3c51ab619ac57f877da8fd6d73c05aacd125c5ae9819582aba"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:042e9e0bcb5fba81886c8b4fbb9a09d6b8a00245fd8d88e4d989c1f96c74164c"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2eb752b102b12a76ca02dff751a801f028b4ffbbc478840b473597fc91a9ed43"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-win32.whl", hash = "sha256:b556c85915d8efaed322bf1bdae9486aa0f3f764195a0fb6ee962e5c71ef5ce1"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314-win_amd64.whl", hash = "sha256:9bf9f7a65e7aa20dd764151fb3d616c81088f91f8df39c3893a536e279b4b984"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:05861afbbec40650d8a07ea324367cb93e9e8cc7762e04dd4405df99fa65159c"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2fc82186fadc4a8316768d61f3722c230e2c1dcab4200d52d2ebdf2482e47592"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0add0900ff220d1d5c5ebbf99ed88b0c1bbf87aa7e4262300ed1376a6b13414f"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:568f416a4072fbfae453dcf9a99194bbb8bdeab718e08ee13dfa2ba0e4bebf29"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:add1da70de90a2569c5e15249ff76a631ccacfe198375eead4aadf3b8dc849dc"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:10b47b7ba335d2e9b1239fa571131a87e2d8ec96b333e68b2a305e7a98b0bae2"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3dd4dce1c718e38081c8f35f323209d4c1df7d4db4bab1b5c88a6b4d12b74587"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34bac00a67a812570d4a460447e1e9e06fae622946955f939051e7cc895cfab8"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a19884d2ee70b06d9204b2727a7b9f983d0c684c650254679e716b0b77920632"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5f8ca7f2bb6ba8348a3614c7918cc4bb73268c5ac2a207576b7afea19d3d9f64"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:b0d95340658b9d2f11d9697f59b3814a9d3bb4b7a7c20b131df4bcef464037c0"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:a1e53262fd202e4b40b70c3aff944a8155059beedc8a89bba9dc1f9ef06a1b56"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:d60ac9663f44168038586cab2157e122e46bdef09e9368b37f2d82d354c23f72"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:90751b8eed69435bac9ff4e3d2f6b3af1f57e37ecb0fbeee59c0174c9e2d41df"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:fc353029f176fd2b3ec6cfc71be166aba1936fe5d73dd1992ce289ca6647a9aa"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-win32.whl", hash = "sha256:2e41b18a58da1e474a057b3d35248d8320029f61d70a37629535b16a0c8f3767"},
|
||||
{file = "aiohttp-3.13.3-cp314-cp314t-win_amd64.whl", hash = "sha256:44531a36aa2264a1860089ffd4dce7baf875ee5a6079d5fb42e261c704ef7344"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:31a83ea4aead760dfcb6962efb1d861db48c34379f2ff72db9ddddd4cda9ea2e"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:988a8c5e317544fdf0d39871559e67b6341065b87fceac641108c2096d5506b7"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9b174f267b5cfb9a7dba9ee6859cecd234e9a681841eb85068059bc867fb8f02"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:947c26539750deeaee933b000fb6517cc770bbd064bad6033f1cff4803881e43"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:9ebf57d09e131f5323464bd347135a88622d1c0976e88ce15b670e7ad57e4bd6"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4ae5b5a0e1926e504c81c5b84353e7a5516d8778fbbff00429fe7b05bb25cbce"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2ba0eea45eb5cc3172dbfc497c066f19c41bac70963ea1a67d51fc92e4cf9a80"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bae5c2ed2eae26cc382020edad80d01f36cb8e746da40b292e68fec40421dc6a"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8a60e60746623925eab7d25823329941aee7242d559baa119ca2b253c88a7bd6"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e50a2e1404f063427c9d027378472316201a2290959a295169bcf25992d04558"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:9a9dc347e5a3dc7dfdbc1f82da0ef29e388ddb2ed281bfce9dd8248a313e62b7"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:b46020d11d23fe16551466c77823df9cc2f2c1e63cc965daf67fa5eec6ca1877"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:69c56fbc1993fa17043e24a546959c0178fe2b5782405ad4559e6c13975c15e3"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:b99281b0704c103d4e11e72a76f1b543d4946fea7dd10767e7e1b5f00d4e5704"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:40c5e40ecc29ba010656c18052b877a1c28f84344825efa106705e835c28530f"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-win32.whl", hash = "sha256:56339a36b9f1fc708260c76c87e593e2afb30d26de9ae1eb445b5e051b98a7a1"},
|
||||
{file = "aiohttp-3.13.3-cp39-cp39-win_amd64.whl", hash = "sha256:c6b8568a3bb5819a0ad087f16d40e5a3fb6099f39ea1d5625a3edc1e923fc538"},
|
||||
{file = "aiohttp-3.13.3.tar.gz", hash = "sha256:a949eee43d3782f2daae4f4a2819b2cb9b0c5d3b7f7a927067cc84dafdbb9f88"},
|
||||
{file = "aiohttp-3.13.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:02222e7e233295f40e011c1b00e3b0bd451f22cf853a0304c3595633ee47da4b"},
|
||||
{file = "aiohttp-3.13.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bace460460ed20614fa6bc8cb09966c0b8517b8c58ad8046828c6078d25333b5"},
|
||||
{file = "aiohttp-3.13.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f546a4dc1e6a5edbb9fd1fd6ad18134550e096a5a43f4ad74acfbd834fc6670"},
|
||||
{file = "aiohttp-3.13.5-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c86969d012e51b8e415a8c6ce96f7857d6a87d6207303ab02d5d11ef0cad2274"},
|
||||
{file = "aiohttp-3.13.5-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b6f6cd1560c5fa427e3b6074bb24d2c64e225afbb7165008903bd42e4e33e28a"},
|
||||
{file = "aiohttp-3.13.5-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:636bc362f0c5bbc7372bc3ae49737f9e3030dbce469f0f422c8f38079780363d"},
|
||||
{file = "aiohttp-3.13.5-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6a7cbeb06d1070f1d14895eeeed4dac5913b22d7b456f2eb969f11f4b3993796"},
|
||||
{file = "aiohttp-3.13.5-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bca9ef7517fd7874a1a08970ae88f497bf5c984610caa0bf40bd7e8450852b95"},
|
||||
{file = "aiohttp-3.13.5-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:019a67772e034a0e6b9b17c13d0a8fe56ad9fb150fc724b7f3ffd3724288d9e5"},
|
||||
{file = "aiohttp-3.13.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f34ecee82858e41dd217734f0c41a532bd066bcaab636ad830f03a30b2a96f2a"},
|
||||
{file = "aiohttp-3.13.5-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:4eac02d9af4813ee289cd63a361576da36dba57f5a1ab36377bc2600db0cbb73"},
|
||||
{file = "aiohttp-3.13.5-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4beac52e9fe46d6abf98b0176a88154b742e878fdf209d2248e99fcdf73cd297"},
|
||||
{file = "aiohttp-3.13.5-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:c180f480207a9b2475f2b8d8bd7204e47aec952d084b2a2be58a782ffcf96074"},
|
||||
{file = "aiohttp-3.13.5-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2837fb92951564d6339cedae4a7231692aa9f73cbc4fb2e04263b96844e03b4e"},
|
||||
{file = "aiohttp-3.13.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d9010032a0b9710f58012a1e9c222528763d860ba2ee1422c03473eab47703e7"},
|
||||
{file = "aiohttp-3.13.5-cp310-cp310-win32.whl", hash = "sha256:7c4b6668b2b2b9027f209ddf647f2a4407784b5d88b8be4efcc72036f365baf9"},
|
||||
{file = "aiohttp-3.13.5-cp310-cp310-win_amd64.whl", hash = "sha256:cd3db5927bf9167d5a6157ddb2f036f6b6b0ad001ac82355d43e97a4bde76d76"},
|
||||
{file = "aiohttp-3.13.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7ab7229b6f9b5c1ba4910d6c41a9eb11f543eadb3f384df1b4c293f4e73d44d6"},
|
||||
{file = "aiohttp-3.13.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8f14c50708bb156b3a3ca7230b3d820199d56a48e3af76fa21c2d6087190fe3d"},
|
||||
{file = "aiohttp-3.13.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e7d2f8616f0ff60bd332022279011776c3ac0faa0f1b463f7bb12326fbc97a1c"},
|
||||
{file = "aiohttp-3.13.5-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a2567b72e1ffc3ab25510db43f355b29eeada56c0a622e58dcdb19530eb0a3cb"},
|
||||
{file = "aiohttp-3.13.5-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:fb0540c854ac9c0c5ad495908fdfd3e332d553ec731698c0e29b1877ba0d2ec6"},
|
||||
{file = "aiohttp-3.13.5-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c9883051c6972f58bfc4ebb2116345ee2aa151178e99c3f2b2bbe2af712abd13"},
|
||||
{file = "aiohttp-3.13.5-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2294172ce08a82fb7c7273485895de1fa1186cc8294cfeb6aef4af42ad261174"},
|
||||
{file = "aiohttp-3.13.5-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3a807cabd5115fb55af198b98178997a5e0e57dead43eb74a93d9c07d6d4a7dc"},
|
||||
{file = "aiohttp-3.13.5-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:aa6d0d932e0f39c02b80744273cd5c388a2d9bc07760a03164f229c8e02662f6"},
|
||||
{file = "aiohttp-3.13.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:60869c7ac4aaabe7110f26499f3e6e5696eae98144735b12a9c3d9eae2b51a49"},
|
||||
{file = "aiohttp-3.13.5-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:26d2f8546f1dfa75efa50c3488215a903c0168d253b75fba4210f57ab77a0fb8"},
|
||||
{file = "aiohttp-3.13.5-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1162a1492032c82f14271e831c8f4b49f2b6078f4f5fc74de2c912fa225d51d"},
|
||||
{file = "aiohttp-3.13.5-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:8b14eb3262fad0dc2f89c1a43b13727e709504972186ff6a99a3ecaa77102b6c"},
|
||||
{file = "aiohttp-3.13.5-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:ca9ac61ac6db4eb6c2a0cd1d0f7e1357647b638ccc92f7e9d8d133e71ed3c6ac"},
|
||||
{file = "aiohttp-3.13.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7996023b2ed59489ae4762256c8516df9820f751cf2c5da8ed2fb20ee50abab3"},
|
||||
{file = "aiohttp-3.13.5-cp311-cp311-win32.whl", hash = "sha256:77dfa48c9f8013271011e51c00f8ada19851f013cde2c48fca1ba5e0caf5bb06"},
|
||||
{file = "aiohttp-3.13.5-cp311-cp311-win_amd64.whl", hash = "sha256:d3a4834f221061624b8887090637db9ad4f61752001eae37d56c52fddade2dc8"},
|
||||
{file = "aiohttp-3.13.5-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:023ecba036ddd840b0b19bf195bfae970083fd7024ce1ac22e9bba90464620e9"},
|
||||
{file = "aiohttp-3.13.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:15c933ad7920b7d9a20de151efcd05a6e38302cbf0e10c9b2acb9a42210a2416"},
|
||||
{file = "aiohttp-3.13.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ab2899f9fa2f9f741896ebb6fa07c4c883bfa5c7f2ddd8cf2aafa86fa981b2d2"},
|
||||
{file = "aiohttp-3.13.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a60eaa2d440cd4707696b52e40ed3e2b0f73f65be07fd0ef23b6b539c9c0b0b4"},
|
||||
{file = "aiohttp-3.13.5-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:55b3bdd3292283295774ab585160c4004f4f2f203946997f49aac032c84649e9"},
|
||||
{file = "aiohttp-3.13.5-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c2b2355dc094e5f7d45a7bb262fe7207aa0460b37a0d87027dcf21b5d890e7d5"},
|
||||
{file = "aiohttp-3.13.5-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b38765950832f7d728297689ad78f5f2cf79ff82487131c4d26fe6ceecdc5f8e"},
|
||||
{file = "aiohttp-3.13.5-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b18f31b80d5a33661e08c89e202edabf1986e9b49c42b4504371daeaa11b47c1"},
|
||||
{file = "aiohttp-3.13.5-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:33add2463dde55c4f2d9635c6ab33ce154e5ecf322bd26d09af95c5f81cfa286"},
|
||||
{file = "aiohttp-3.13.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:327cc432fdf1356fb4fbc6fe833ad4e9f6aacb71a8acaa5f1855e4b25910e4a9"},
|
||||
{file = "aiohttp-3.13.5-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:7c35b0bf0b48a70b4cb4fc5d7bed9b932532728e124874355de1a0af8ec4bc88"},
|
||||
{file = "aiohttp-3.13.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:df23d57718f24badef8656c49743e11a89fd6f5358fa8a7b96e728fda2abf7d3"},
|
||||
{file = "aiohttp-3.13.5-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:02e048037a6501a5ec1f6fc9736135aec6eb8a004ce48838cb951c515f32c80b"},
|
||||
{file = "aiohttp-3.13.5-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:31cebae8b26f8a615d2b546fee45d5ffb76852ae6450e2a03f42c9102260d6fe"},
|
||||
{file = "aiohttp-3.13.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:888e78eb5ca55a615d285c3c09a7a91b42e9dd6fc699b166ebd5dee87c9ccf14"},
|
||||
{file = "aiohttp-3.13.5-cp312-cp312-win32.whl", hash = "sha256:8bd3ec6376e68a41f9f95f5ed170e2fcf22d4eb27a1f8cb361d0508f6e0557f3"},
|
||||
{file = "aiohttp-3.13.5-cp312-cp312-win_amd64.whl", hash = "sha256:110e448e02c729bcebb18c60b9214a87ba33bac4a9fa5e9a5f139938b56c6cb1"},
|
||||
{file = "aiohttp-3.13.5-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a5029cc80718bbd545123cd8fe5d15025eccaaaace5d0eeec6bd556ad6163d61"},
|
||||
{file = "aiohttp-3.13.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4bb6bf5811620003614076bdc807ef3b5e38244f9d25ca5fe888eaccea2a9832"},
|
||||
{file = "aiohttp-3.13.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a84792f8631bf5a94e52d9cc881c0b824ab42717165a5579c760b830d9392ac9"},
|
||||
{file = "aiohttp-3.13.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:57653eac22c6a4c13eb22ecf4d673d64a12f266e72785ab1c8b8e5940d0e8090"},
|
||||
{file = "aiohttp-3.13.5-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5e5f7debc7a57af53fdf5c5009f9391d9f4c12867049d509bf7bb164a6e295b"},
|
||||
{file = "aiohttp-3.13.5-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c719f65bebcdf6716f10e9eff80d27567f7892d8988c06de12bbbd39307c6e3a"},
|
||||
{file = "aiohttp-3.13.5-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d97f93fdae594d886c5a866636397e2bcab146fd7a132fd6bb9ce182224452f8"},
|
||||
{file = "aiohttp-3.13.5-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3df334e39d4c2f899a914f1dba283c1aadc311790733f705182998c6f7cae665"},
|
||||
{file = "aiohttp-3.13.5-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fe6970addfea9e5e081401bcbadf865d2b6da045472f58af08427e108d618540"},
|
||||
{file = "aiohttp-3.13.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7becdf835feff2f4f335d7477f121af787e3504b48b449ff737afb35869ba7bb"},
|
||||
{file = "aiohttp-3.13.5-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:676e5651705ad5d8a70aeb8eb6936c436d8ebbd56e63436cb7dd9bb36d2a9a46"},
|
||||
{file = "aiohttp-3.13.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:9b16c653d38eb1a611cc898c41e76859ca27f119d25b53c12875fd0474ae31a8"},
|
||||
{file = "aiohttp-3.13.5-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:999802d5fa0389f58decd24b537c54aa63c01c3219ce17d1214cbda3c2b22d2d"},
|
||||
{file = "aiohttp-3.13.5-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:ec707059ee75732b1ba130ed5f9580fe10ff75180c812bc267ded039db5128c6"},
|
||||
{file = "aiohttp-3.13.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2d6d44a5b48132053c2f6cd5c8cb14bc67e99a63594e336b0f2af81e94d5530c"},
|
||||
{file = "aiohttp-3.13.5-cp313-cp313-win32.whl", hash = "sha256:329f292ed14d38a6c4c435e465f48bebb47479fd676a0411936cc371643225cc"},
|
||||
{file = "aiohttp-3.13.5-cp313-cp313-win_amd64.whl", hash = "sha256:69f571de7500e0557801c0b51f4780482c0ec5fe2ac851af5a92cfce1af1cb83"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:eb4639f32fd4a9904ab8fb45bf3383ba71137f3d9d4ba25b3b3f3109977c5b8c"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:7e5dc4311bd5ac493886c63cbf76ab579dbe4641268e7c74e48e774c74b6f2be"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:756c3c304d394977519824449600adaf2be0ccee76d206ee339c5e76b70ded25"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ecc26751323224cf8186efcf7fbcbc30f4e1d8c7970659daf25ad995e4032a56"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10a75acfcf794edf9d8db50e5a7ec5fc818b2a8d3f591ce93bc7b1210df016d2"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0f7a18f258d124cd678c5fe072fe4432a4d5232b0657fca7c1847f599233c83a"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:df6104c009713d3a89621096f3e3e88cc323fd269dbd7c20afe18535094320be"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:241a94f7de7c0c3b616627aaad530fe2cb620084a8b144d3be7b6ecfe95bae3b"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c974fb66180e58709b6fc402846f13791240d180b74de81d23913abe48e96d94"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:6e27ea05d184afac78aabbac667450c75e54e35f62238d44463131bd3f96753d"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:a79a6d399cef33a11b6f004c67bb07741d91f2be01b8d712d52c75711b1e07c7"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:c632ce9c0b534fbe25b52c974515ed674937c5b99f549a92127c85f771a78772"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:fceedde51fbd67ee2bcc8c0b33d0126cc8b51ef3bbde2f86662bd6d5a6f10ec5"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f92995dfec9420bb69ae629abf422e516923ba79ba4403bc750d94fb4a6c68c1"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:20ae0ff08b1f2c8788d6fb85afcb798654ae6ba0b747575f8562de738078457b"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314-win32.whl", hash = "sha256:b20df693de16f42b2472a9c485e1c948ee55524786a0a34345511afdd22246f3"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314-win_amd64.whl", hash = "sha256:f85c6f327bf0b8c29da7d93b1cabb6363fb5e4e160a32fa241ed2dce21b73162"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:1efb06900858bb618ff5cee184ae2de5828896c448403d51fb633f09e109be0a"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:fee86b7c4bd29bdaf0d53d14739b08a106fdda809ca5fe032a15f52fae5fe254"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:20058e23909b9e65f9da62b396b77dfa95965cbe840f8def6e572538b1d32e36"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8cf20a8d6868cb15a73cab329ffc07291ba8c22b1b88176026106ae39aa6df0f"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:330f5da04c987f1d5bdb8ae189137c77139f36bd1cb23779ca1a354a4b027800"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6f1cbf0c7926d315c3c26c2da41fd2b5d2fe01ac0e157b78caefc51a782196cf"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:53fc049ed6390d05423ba33103ded7281fe897cf97878f369a527070bd95795b"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:898703aa2667e3c5ca4c54ca36cd73f58b7a38ef87a5606414799ebce4d3fd3a"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0494a01ca9584eea1e5fbd6d748e61ecff218c51b576ee1999c23db7066417d8"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:6cf81fe010b8c17b09495cbd15c1d35afbc8fb405c0c9cf4738e5ae3af1d65be"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:c564dd5f09ddc9d8f2c2d0a301cd30a79a2cc1b46dd1a73bef8f0038863d016b"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:2994be9f6e51046c4f864598fd9abeb4fba6e88f0b2152422c9666dcd4aea9c6"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:157826e2fa245d2ef46c83ea8a5faf77ca19355d278d425c29fda0beb3318037"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:a8aca50daa9493e9e13c0f566201a9006f080e7c50e5e90d0b06f53146a54500"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:3b13560160d07e047a93f23aaa30718606493036253d5430887514715b67c9d9"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314t-win32.whl", hash = "sha256:9a0f4474b6ea6818b41f82172d799e4b3d29e22c2c520ce4357856fced9af2f8"},
|
||||
{file = "aiohttp-3.13.5-cp314-cp314t-win_amd64.whl", hash = "sha256:18a2f6c1182c51baa1d28d68fea51513cb2a76612f038853c0ad3c145423d3d9"},
|
||||
{file = "aiohttp-3.13.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:347542f0ea3f95b2a955ee6656461fa1c776e401ac50ebce055a6c38454a0adf"},
|
||||
{file = "aiohttp-3.13.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:178c7b5e62b454c2bc790786e6058c3cc968613b4419251b478c153a4aec32b1"},
|
||||
{file = "aiohttp-3.13.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:af545c2cffdb0967a96b6249e6f5f7b0d92cdfd267f9d5238d5b9ca63e8edb10"},
|
||||
{file = "aiohttp-3.13.5-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:206b7b3ef96e4ce211754f0cd003feb28b7d81f0ad26b8d077a5d5161436067f"},
|
||||
{file = "aiohttp-3.13.5-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:ee5e86776273de1795947d17bddd6bb19e0365fd2af4289c0d2c5454b6b1d36b"},
|
||||
{file = "aiohttp-3.13.5-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:95d14ca7abefde230f7639ec136ade282655431fd5db03c343b19dda72dd1643"},
|
||||
{file = "aiohttp-3.13.5-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:912d4b6af530ddb1338a66229dac3a25ff11d4448be3ec3d6340583995f56031"},
|
||||
{file = "aiohttp-3.13.5-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e999f0c88a458c836d5fb521814e92ed2172c649200336a6df514987c1488258"},
|
||||
{file = "aiohttp-3.13.5-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:39380e12bd1f2fdab4285b6e055ad48efbaed5c836433b142ed4f5b9be71036a"},
|
||||
{file = "aiohttp-3.13.5-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9efcc0f11d850cefcafdd9275b9576ad3bfb539bed96807663b32ad99c4d4b88"},
|
||||
{file = "aiohttp-3.13.5-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:147b4f501d0292077f29d5268c16bb7c864a1f054d7001c4c1812c0421ea1ed0"},
|
||||
{file = "aiohttp-3.13.5-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:d147004fede1b12f6013a6dbb2a26a986a671a03c6ea740ddc76500e5f1c399f"},
|
||||
{file = "aiohttp-3.13.5-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:9277145d36a01653863899c665243871434694bcc3431922c3b35c978061bdb8"},
|
||||
{file = "aiohttp-3.13.5-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4e704c52438f66fdd89588346183d898bb42167cf88f8b7ff1c0f9fc957c348f"},
|
||||
{file = "aiohttp-3.13.5-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a8a4d3427e8de1312ddf309cc482186466c79895b3a139fed3259fc01dfa9a5b"},
|
||||
{file = "aiohttp-3.13.5-cp39-cp39-win32.whl", hash = "sha256:6f497a6876aa4b1a102b04996ce4c1170c7040d83faa9387dd921c16e30d5c83"},
|
||||
{file = "aiohttp-3.13.5-cp39-cp39-win_amd64.whl", hash = "sha256:cb979826071c0986a5f08333a36104153478ce6018c58cba7f9caddaf63d5d67"},
|
||||
{file = "aiohttp-3.13.5.tar.gz", hash = "sha256:9d98cc980ecc96be6eb4c1994ce35d28d8b1f5e5208a23b421187d1209dbb7d1"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
||||
@@ -45,6 +45,7 @@ All notable changes to the **Prowler SDK** are documented in this file.
|
||||
- Sensitive CLI flag values (tokens, keys, passwords) in HTML output "Parameters used" field now redacted to prevent credential leaks [(#10518)](https://github.com/prowler-cloud/prowler/pull/10518)
|
||||
- `authlib` bumped from 1.6.5 to 1.6.9 to fix CVE-2026-28802 (JWT `alg: none` validation bypass) [(#10579)](https://github.com/prowler-cloud/prowler/pull/10579)
|
||||
- `cryptography` bumped from 44.0.3 to 46.0.6 ([CVE-2026-26007](https://github.com/pyca/cryptography/security/advisories/GHSA-r6ph-v2qm-q3c2), [CVE-2026-34073](https://github.com/pyca/cryptography/security/advisories/GHSA-m959-cc7f-wv43)), `oci` to 2.169.0, and `alibabacloud-tea-openapi` to 0.4.4 [(#10535)](https://github.com/prowler-cloud/prowler/pull/10535)
|
||||
- `aiohttp` bumped from 3.13.3 to 3.13.5 to fix CVE-2026-34520 (the C parser accepted null bytes and control characters in response headers) [(#10537)](https://github.com/prowler-cloud/prowler/pull/10537)
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -1,16 +1,28 @@
|
||||
---
|
||||
name: prowler-compliance
|
||||
description: >
|
||||
Creates and manages Prowler compliance frameworks.
|
||||
Trigger: When working with compliance frameworks (CIS, NIST, PCI-DSS, SOC2, GDPR, ISO27001, ENS, MITRE ATT&CK).
|
||||
Creates, syncs, audits and manages Prowler compliance frameworks end-to-end.
|
||||
Covers the four-layer architecture (SDK models → JSON catalogs → output
|
||||
formatters → API/UI), upstream sync workflows, cloud-auditor check-mapping
|
||||
reviews, output formatter creation, and framework-specific attribute models.
|
||||
Trigger: When working with compliance frameworks (CIS, NIST, PCI-DSS, SOC2,
|
||||
GDPR, ISO27001, ENS, MITRE ATT&CK, CCC, C5, CSA CCM, KISA ISMS-P,
|
||||
Prowler ThreatScore, FedRAMP, HIPAA), syncing with upstream catalogs,
|
||||
auditing check-to-requirement mappings, adding output formatters, or fixing
|
||||
compliance JSON bugs (duplicate IDs, empty Version, wrong Section, stale
|
||||
check refs).
|
||||
license: Apache-2.0
|
||||
metadata:
|
||||
author: prowler-cloud
|
||||
version: "1.1"
|
||||
version: "1.2"
|
||||
scope: [root, sdk]
|
||||
auto_invoke:
|
||||
- "Creating/updating compliance frameworks"
|
||||
- "Mapping checks to compliance controls"
|
||||
- "Syncing compliance framework with upstream catalog"
|
||||
- "Auditing check-to-requirement mappings as a cloud auditor"
|
||||
- "Adding a compliance output formatter (per-provider class + table dispatcher)"
|
||||
- "Fixing compliance JSON bugs (duplicate IDs, empty Section, stale refs)"
|
||||
allowed-tools: Read, Edit, Write, Glob, Grep, Bash, WebFetch, WebSearch, Task
|
||||
---
|
||||
|
||||
@@ -18,10 +30,82 @@ allowed-tools: Read, Edit, Write, Glob, Grep, Bash, WebFetch, WebSearch, Task
|
||||
|
||||
Use this skill when:
|
||||
- Creating a new compliance framework for any provider
|
||||
- **Syncing an existing framework with an upstream source of truth** (CIS, FINOS CCC, CSA CCM, NIST, ENS, etc.)
|
||||
- Adding requirements to existing frameworks
|
||||
- Mapping checks to compliance controls
|
||||
- **Auditing existing check mappings as a cloud auditor** (user asks "are these mappings correct?", "which checks apply to this requirement?", "review the mappings")
|
||||
- **Adding a new output formatter** (new framework needs a table dispatcher + per-provider classes + CSV models)
|
||||
- **Fixing JSON bugs**: duplicate IDs, empty Version, wrong Section, stale check refs, inconsistent FamilyName, padded tangential check mappings
|
||||
- **Registering a framework in the CLI table dispatcher or API export map**
|
||||
- Investigating why a finding/check isn't showing under the expected compliance framework in the UI
|
||||
- Understanding compliance framework structures and attributes
|
||||
|
||||
## Four-Layer Architecture (Mental Model)
|
||||
|
||||
Prowler compliance is a **four-layer system** hanging off one Pydantic model tree. Bugs usually happen where one layer doesn't match another, so know all four before touching anything.
|
||||
|
||||
### Layer 1: SDK / Core Models — `prowler/lib/check/`
|
||||
|
||||
- **`compliance_models.py`** — Pydantic **v1** model tree (`from pydantic.v1 import`). One `*_Requirement_Attribute` class per framework type + `Generic_Compliance_Requirement_Attribute` as fallback.
|
||||
- `Compliance_Requirement.Attributes: list[Union[...]]` — **`Generic_Compliance_Requirement_Attribute` MUST be LAST** in the Union or every framework-specific attribute falls through to Generic (Pydantic v1 tries union members in order).
|
||||
- **`compliance.py`** — runtime linker. `get_check_compliance()` builds the key as `f"{Framework}-{Version}"` **only if `Version` is non-empty**. An empty Version makes the key just `"{Framework}"` — this breaks downstream filters and tests that expect the versioned key.
|
||||
- `Compliance.get_bulk(provider)` walks `prowler/compliance/{provider}/` and parses every `.json` file. No central index — just directory scan.
|
||||
|
||||
### Layer 2: JSON Frameworks — `prowler/compliance/{provider}/`
|
||||
|
||||
See "Compliance Framework Location" and "Framework-Specific Attribute Structures" sections below.
|
||||
|
||||
### Layer 3: Output Formatters — `prowler/lib/outputs/compliance/{framework}/`
|
||||
|
||||
**Every framework directory follows this exact convention** — do not deviate:
|
||||
|
||||
```
|
||||
{framework}/
|
||||
├── __init__.py
|
||||
├── {framework}.py # ONLY get_{framework}_table() — NO function docstring
|
||||
├── {framework}_{provider}.py # One class per provider (e.g., CCC_AWS, CCC_Azure, CCC_GCP)
|
||||
└── models.py # One Pydantic v2 BaseModel per provider (CSV columns)
|
||||
```
|
||||
|
||||
- **`{framework}.py`** holds the **table dispatcher function** `get_{framework}_table()`. It prints the pass/fail/muted summary table. **Must NOT import `Finding` or `ComplianceOutput`** — doing so creates a circular import with `prowler/lib/outputs/compliance/compliance.py`. Only imports: `colorama`, `tabulate`, `prowler.config.config.orange_color`.
|
||||
- **`{framework}_{provider}.py`** holds a per-provider class like `CCC_AWS(ComplianceOutput)` with a `transform()` method that walks findings and emits rows. This file IS allowed to import `Finding` because it's not on the dispatcher import chain.
|
||||
- **`models.py`** holds one Pydantic v2 `BaseModel` per provider. Field names become CSV column headers (**public API** — renaming breaks downstream consumers).
|
||||
- **Never collapse per-provider files into a unified parameterized class**, even when DRY-tempting. Every framework in Prowler follows the per-provider file pattern and reviewers will reject the refactor. CSV columns differ per provider (`AccountId`/`Region` vs `SubscriptionId`/`Location` vs `ProjectId`/`Location`) — three classes is the convention.
|
||||
- **No function docstring on `get_{framework}_table()`** — no other framework has one; stay consistent.
|
||||
- Register in `prowler/lib/outputs/compliance/compliance.py` → `display_compliance_table()` with an `elif compliance_framework.startswith("{framework}_"):` branch. Import the table function at the top of the file.
|
||||
|
||||
### Layer 4: API / UI
|
||||
|
||||
- **API table dispatcher**: `api/src/backend/tasks/jobs/export.py` → `COMPLIANCE_CLASS_MAP` keyed by provider. Uses `startswith` predicates: `(lambda name: name.startswith("ccc_"), CCC_AWS)`. **Never use exact match** (`name == "ccc_aws"`) — it's inconsistent and breaks versioning.
|
||||
- **API lazy loader**: `api/src/backend/api/compliance.py` — `LazyComplianceTemplate` and `LazyChecksMapping` load compliance per provider on first access.
|
||||
- **UI mapper routing**: `ui/lib/compliance/compliance-mapper.ts` routes framework names → per-framework mapper.
|
||||
- **UI per-framework mapper**: `ui/lib/compliance/{framework}.tsx` flattens `Requirements` into a 3-level tree (Framework → Category → Control → Requirement) for the accordion view. Groups by `Attributes[0].FamilyName` and `Attributes[0].Section`.
|
||||
- **UI detail panel**: `ui/components/compliance/compliance-custom-details/{framework}-details.tsx`.
|
||||
- **UI types**: `ui/types/compliance.ts` — TypeScript mirrors of the attribute metadata.
|
||||
|
||||
### The CLI Pipeline (end-to-end)
|
||||
|
||||
```
|
||||
prowler aws --compliance ccc_aws
|
||||
↓
|
||||
Compliance.get_bulk("aws") → parses prowler/compliance/aws/*.json
|
||||
↓
|
||||
update_checks_metadata_with_compliance() → attaches compliance info to CheckMetadata
|
||||
↓
|
||||
execute_checks() → runs checks, produces Finding objects
|
||||
↓
|
||||
get_check_compliance(finding, "aws", bulk_checks_metadata)
|
||||
→ dict "{Framework}-{Version}" → [requirement_ids]
|
||||
↓
|
||||
CCC_AWS(findings, compliance).transform() → per-provider class builds CSV rows
|
||||
↓
|
||||
batch_write_data_to_file() → writes {output_filename}_ccc_aws.csv
|
||||
↓
|
||||
display_compliance_table() → get_ccc_table() → prints stdout summary
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Compliance Framework Location
|
||||
|
||||
Frameworks are JSON files located in: `prowler/compliance/{provider}/{framework_name}_{provider}.json`
|
||||
@@ -455,14 +539,453 @@ Prowler ThreatScore is a custom security scoring framework developed by Prowler
|
||||
- **M365:** `cis_4.0_m365.json`, `iso27001_2022_m365.json`
|
||||
- **NHN:** `iso27001_2022_nhn.json`
|
||||
|
||||
## Workflow A: Sync a Framework With an Upstream Catalog
|
||||
|
||||
Use when the framework is maintained upstream (CIS Benchmarks, FINOS CCC, CSA CCM, NIST, ENS, etc.) and Prowler needs to catch up.
|
||||
|
||||
### Step 1 — Cache the upstream source
|
||||
|
||||
Download every upstream file to a local cache so subsequent iterations don't hit the network. For FINOS CCC:
|
||||
|
||||
```bash
|
||||
mkdir -p /tmp/ccc_upstream
|
||||
catalogs="core/ccc storage/object management/auditlog management/logging ..."
|
||||
for p in $catalogs; do
|
||||
safe=$(echo "$p" | tr '/' '_')
|
||||
gh api "repos/finos/common-cloud-controls/contents/catalogs/$p/controls.yaml" \
|
||||
-H "Accept: application/vnd.github.raw" > "/tmp/ccc_upstream/${safe}.yaml"
|
||||
done
|
||||
```
|
||||
|
||||
### Step 2 — Run the generic sync runner against a framework config
|
||||
|
||||
The sync tooling is split into three layers so adding a new framework only takes a YAML config (and optionally a new parser module for an unfamiliar upstream format):
|
||||
|
||||
```
|
||||
skills/prowler-compliance/assets/
|
||||
├── sync_framework.py # generic runner — works for any framework
|
||||
├── configs/
|
||||
│ └── ccc.yaml # per-framework config (canonical example)
|
||||
└── parsers/
|
||||
├── __init__.py
|
||||
└── finos_ccc.py # parser module for FINOS CCC YAML
|
||||
```
|
||||
|
||||
**For frameworks that already have a config + parser** (today: FINOS CCC), run:
|
||||
|
||||
```bash
|
||||
python skills/prowler-compliance/assets/sync_framework.py \
|
||||
skills/prowler-compliance/assets/configs/ccc.yaml
|
||||
```
|
||||
|
||||
The runner loads the config, validates it, dynamically imports the parser declared in `parser.module`, calls `parser.parse_upstream(config) -> list[dict]`, then applies generic post-processing (id uniqueness safety net, `FamilyName` normalization, legacy check-mapping preservation) and writes the provider JSONs.
|
||||
|
||||
**To add a new framework sync**:
|
||||
|
||||
1. **Write a config file** at `skills/prowler-compliance/assets/configs/{framework}.yaml`. See `configs/ccc.yaml` as the canonical example. Required top-level sections:
|
||||
- `framework` — `name`, `display_name`, `version` (**never empty** — empty Version silently breaks `get_check_compliance()` key construction, so the runner refuses to start), `description_template` (accepts `{provider_display}`, `{provider_key}`, `{framework_name}`, `{framework_display}`, `{version}` placeholders).
|
||||
- `providers` — list of `{key, display}` pairs, one per Prowler provider the framework targets.
|
||||
- `output.path_template` — supports `{provider}`, `{framework}`, `{version}` placeholders. Examples: `"prowler/compliance/{provider}/ccc_{provider}.json"` for unversioned file names, `"prowler/compliance/{provider}/cis_{version}_{provider}.json"` for versioned ones.
|
||||
- `upstream.dir` — local cache directory (populate via Step 1).
|
||||
- `parser.module` — name of the module under `parsers/` to load (without `.py`). Everything else under `parser.` is opaque to the runner and passed to the parser as config.
|
||||
- `post_processing.check_preservation.primary_key` — top-level field name for the primary legacy-mapping lookup (almost always `Id`).
|
||||
- `post_processing.check_preservation.fallback_keys` — **config-driven fallback keys** for preserving check mappings when ids change. Each entry is a list of `Attributes[0]` field names composed into a tuple. Examples:
|
||||
- CCC: `- [Section, Applicability]` (because `Applicability` is a CCC-only attribute, verified in `compliance_models.py:213`).
|
||||
- CIS would use `- [Section, Profile]`.
|
||||
- NIST would use `- [ItemId]`.
|
||||
- List-valued fields (like `Applicability`) are automatically frozen to `frozenset` so the tuple is hashable.
|
||||
- `post_processing.family_name_normalization` (optional) — map of raw → canonical `FamilyName` values. The UI groups by `Attributes[0].FamilyName` exactly, so inconsistent upstream variants otherwise become separate tree branches.
|
||||
|
||||
2. **Reuse an existing parser** if the upstream format matches one (currently only `finos_ccc` exists). Otherwise, **write a new parser** at `parsers/{name}.py` implementing:
|
||||
|
||||
```python
|
||||
def parse_upstream(config: dict) -> list[dict]:
|
||||
"""Return Prowler-format requirements {Id, Description, Attributes: [...], Checks: []}.
|
||||
|
||||
Ids MUST be unique in the returned list. The runner raises ValueError
|
||||
on duplicates — it does NOT silently renumber, because mutating a
|
||||
canonical upstream id (e.g. CIS '1.1.1' or NIST 'AC-2(1)') would be
|
||||
catastrophic. The parser owns all upstream-format quirks: foreign-prefix
|
||||
rewriting, genuine collision renumbering, shape handling.
|
||||
"""
|
||||
```
|
||||
|
||||
The parser reads its own settings from `config['upstream']` and `config['parser']`. It does NOT load existing Prowler JSONs (the runner does that for check preservation) and does NOT write output (the runner does that too).
|
||||
|
||||
**Gotchas the runner already handles for you** (learned from the FINOS CCC v2025.10 sync — they're documented here so you don't re-discover them):
|
||||
|
||||
- **Multiple upstream YAML shapes**. Most FINOS CCC catalogs use `control-families: [...]`, but `storage/object` uses a top-level `controls: [...]` with a `family: "CCC.X.Y"` reference id and no human-readable family name. A parser that only handles shape 1 silently drops the shape-2 catalog — this exact bug dropped ObjStor from Prowler for a full iteration. `parsers/finos_ccc.py` handles both shapes; if you write a new parser for a similar format, test with at least one file of each shape.
|
||||
- **Whitespace collapse**. Upstream YAML multi-line block scalars (`|`) preserve newlines. Prowler stores descriptions single-line. Collapse with `" ".join(value.split())` before emitting (see `parsers/finos_ccc.py::clean()`).
|
||||
- **Foreign-prefix AR id rewriting**. Upstream sometimes aliases requirements across catalogs by keeping the original prefix (e.g., `CCC.AuditLog.CN08.AR01` appears nested under `CCC.Logging.CN03`). Rewrite the foreign id to fit its parent control: `CCC.Logging.CN03.AR01`. This logic is parser-specific because the id structure varies per framework (CCC uses 3-dot depth; CIS uses numeric dots; NIST uses `AC-2(1)`).
|
||||
- **Genuine upstream collision renumbering**. Sometimes upstream has a real typo where two different requirements share the same id (e.g., `CCC.Core.CN14.AR02` defined twice for 30-day and 14-day backup variants). Renumber the second copy to the next free AR number (`.AR03`). The parser handles this; the runner asserts the final list has unique ids as a safety net.
|
||||
- **Existing check mapping preservation**. The runner uses the `primary_key` + `fallback_keys` declared in config to look up the old `Checks` list for each requirement. For CCC this means primary index by `Id` plus fallback index by `(Section, frozenset(Applicability))` — the fallback recovers mappings for requirements whose ids were rewritten or renumbered by the parser.
|
||||
- **FamilyName normalization**. Configured via `post_processing.family_name_normalization` — no code changes needed to collapse upstream variants like `"Logging & Monitoring"` → `"Logging and Monitoring"`.
|
||||
- **Populate `Version`**. The runner refuses to start on empty `framework.version` — fail-fast replaces the silent bug where `get_check_compliance()` would build the key as just `"{Framework}"`.
|
||||
|
||||
### Step 3 — Validate before committing
|
||||
|
||||
```python
|
||||
from prowler.lib.check.compliance_models import Compliance
|
||||
for prov in ['aws', 'azure', 'gcp']:
|
||||
c = Compliance.parse_file(f"prowler/compliance/{prov}/ccc_{prov}.json")
|
||||
print(f"{prov}: {len(c.Requirements)} reqs, version={c.Version}")
|
||||
```
|
||||
|
||||
Any `ValidationError` means the Attribute fields don't match the `*_Requirement_Attribute` model. Either fix the JSON or extend the model in `compliance_models.py` (remember: Generic stays last).
|
||||
|
||||
### Step 4 — Verify every check id exists
|
||||
|
||||
```python
|
||||
import json
|
||||
from pathlib import Path
|
||||
for prov in ['aws', 'azure', 'gcp']:
|
||||
existing = {p.stem.replace('.metadata','')
|
||||
for p in Path(f'prowler/providers/{prov}/services').rglob('*.metadata.json')}
|
||||
with open(f'prowler/compliance/{prov}/ccc_{prov}.json') as f:
|
||||
data = json.load(f)
|
||||
refs = {c for r in data['Requirements'] for c in r['Checks']}
|
||||
missing = refs - existing
|
||||
assert not missing, f"{prov} missing: {missing}"
|
||||
```
|
||||
|
||||
A stale check id silently becomes dead weight — no finding will ever map to it. This pre-validation **must run on every write**; bake it into the generator script.
|
||||
|
||||
### Step 5 — Add an attribute model if needed
|
||||
|
||||
Only if the framework has fields beyond `Generic_Compliance_Requirement_Attribute`. Add the class to `prowler/lib/check/compliance_models.py` and register it in `Compliance_Requirement.Attributes: list[Union[...]]`. **Generic stays last.**
|
||||
|
||||
---
|
||||
|
||||
## Workflow B: Audit Check Mappings as a Cloud Auditor
|
||||
|
||||
Use when the user asks to review existing mappings ("are these correct?", "verify that the checks apply", "audit the CCC mappings"). This is the highest-value compliance task — it surfaces padded mappings with zero actual coverage and missing mappings for legitimate coverage.
|
||||
|
||||
### The golden rule
|
||||
|
||||
> A Prowler check's title/risk MUST **literally describe what the requirement text says**. "Related" is not enough. If no check actually addresses the requirement, leave `Checks: []` (MANUAL) — **honest MANUAL is worth more than padded coverage**.
|
||||
|
||||
### Audit process
|
||||
|
||||
**Step 1 — Build a per-provider check inventory** (cache in `/tmp/`):
|
||||
|
||||
```python
|
||||
import json
|
||||
from pathlib import Path
|
||||
for provider in ['aws', 'azure', 'gcp']:
|
||||
inv = {}
|
||||
for meta in Path(f'prowler/providers/{provider}/services').rglob('*.metadata.json'):
|
||||
with open(meta) as f:
|
||||
d = json.load(f)
|
||||
cid = d.get('CheckID') or meta.stem.replace('.metadata','')
|
||||
inv[cid] = {
|
||||
'service': d.get('ServiceName', ''),
|
||||
'title': d.get('CheckTitle', ''),
|
||||
'risk': d.get('Risk', ''),
|
||||
'description': d.get('Description', ''),
|
||||
}
|
||||
with open(f'/tmp/checks_{provider}.json', 'w') as f:
|
||||
json.dump(inv, f, indent=2)
|
||||
```
|
||||
|
||||
**Step 2 — Keyword/service query helper** — see [assets/query_checks.py](assets/query_checks.py):
|
||||
|
||||
```bash
|
||||
python assets/query_checks.py aws encryption transit # keyword AND-search
|
||||
python assets/query_checks.py aws --service iam # all iam checks
|
||||
python assets/query_checks.py aws --id kms_cmk_rotation_enabled # full metadata
|
||||
```
|
||||
|
||||
**Step 3 — Dump a framework section with current mappings** — see [assets/dump_section.py](assets/dump_section.py):
|
||||
|
||||
```bash
|
||||
python assets/dump_section.py "CCC.Core." # all Core ARs across 3 providers
|
||||
python assets/dump_section.py "CCC.AuditLog." # all AuditLog ARs
|
||||
```
|
||||
|
||||
**Step 4 — Encode explicit REPLACE decisions** — see [assets/audit_framework_template.py](assets/audit_framework_template.py). Structure:
|
||||
|
||||
```python
|
||||
DECISIONS = {}
|
||||
|
||||
DECISIONS["CCC.Core.CN01.AR01"] = {
|
||||
"aws": [
|
||||
"cloudfront_distributions_https_enabled",
|
||||
"cloudfront_distributions_origin_traffic_encrypted",
|
||||
# ...
|
||||
],
|
||||
"azure": [
|
||||
"storage_secure_transfer_required_is_enabled",
|
||||
"app_minimum_tls_version_12",
|
||||
# ...
|
||||
],
|
||||
"gcp": [
|
||||
"cloudsql_instance_ssl_connections",
|
||||
],
|
||||
# Missing provider key = leave the legacy mapping untouched
|
||||
}
|
||||
|
||||
# Empty list = EXPLICITLY MANUAL (overwrites legacy)
|
||||
DECISIONS["CCC.Core.CN01.AR07"] = {
|
||||
"aws": [], # Prowler has no IANA port/protocol check
|
||||
"azure": [],
|
||||
"gcp": [],
|
||||
}
|
||||
```
|
||||
|
||||
**REPLACE, not PATCH.** Encoding every mapping as a full list (not add/remove delta) makes the audit reproducible and surfaces hidden assumptions from the legacy data.
|
||||
|
||||
**Step 5 — Pre-validation**. The audit script MUST validate every check id against the inventory and **abort with stderr listing typos**. Common typos caught during a real audit:
|
||||
|
||||
- `fsx_file_system_encryption_at_rest_using_kms` (doesn't exist)
|
||||
- `cosmosdb_account_encryption_at_rest_with_cmk` (doesn't exist)
|
||||
- `sqlserver_geo_replication` (doesn't exist)
|
||||
- `redshift_cluster_audit_logging` (should be `redshift_cluster_encrypted_at_rest`)
|
||||
- `postgresql_flexible_server_require_secure_transport` (should be `postgresql_flexible_server_enforce_ssl_enabled`)
|
||||
- `storage_secure_transfer_required_enabled` (should be `storage_secure_transfer_required_is_enabled`)
|
||||
- `sqlserver_minimum_tls_version_12` (should be `sqlserver_recommended_minimal_tls_version`)
|
||||
|
||||
**Step 6 — Apply + validate + test**:
|
||||
|
||||
```bash
|
||||
python /path/to/audit_script.py # applies decisions, pre-validates
|
||||
python -m pytest tests/lib/outputs/compliance/ tests/lib/check/ -q
|
||||
```
|
||||
|
||||
### Audit Reference Table: Requirement Text → Prowler Checks
|
||||
|
||||
Use this table to map CCC-style / NIST-style / ISO-style requirements to the checks that actually verify them. Built from a real audit of 172 CCC ARs × 3 providers.
|
||||
|
||||
| Requirement text | AWS checks | Azure checks | GCP checks |
|
||||
|---|---|---|---|
|
||||
| **TLS in transit enforced** | `cloudfront_distributions_https_enabled`, `s3_bucket_secure_transport_policy`, `elbv2_ssl_listeners`, `elbv2_insecure_ssl_ciphers`, `elb_ssl_listeners`, `elb_insecure_ssl_ciphers`, `opensearch_service_domains_https_communications_enforced`, `rds_instance_transport_encrypted`, `redshift_cluster_in_transit_encryption_enabled`, `elasticache_redis_cluster_in_transit_encryption_enabled`, `dynamodb_accelerator_cluster_in_transit_encryption_enabled`, `dms_endpoint_ssl_enabled`, `kafka_cluster_in_transit_encryption_enabled`, `transfer_server_in_transit_encryption_enabled`, `glue_database_connections_ssl_enabled`, `sns_subscription_not_using_http_endpoints` | `storage_secure_transfer_required_is_enabled`, `storage_ensure_minimum_tls_version_12`, `postgresql_flexible_server_enforce_ssl_enabled`, `mysql_flexible_server_ssl_connection_enabled`, `mysql_flexible_server_minimum_tls_version_12`, `sqlserver_recommended_minimal_tls_version`, `app_minimum_tls_version_12`, `app_ensure_http_is_redirected_to_https`, `app_ftp_deployment_disabled` | `cloudsql_instance_ssl_connections` (almost only option) |
|
||||
| **TLS 1.3 specifically** | Partial: `cloudfront_distributions_using_deprecated_ssl_protocols`, `elb*_insecure_ssl_ciphers`, `*_minimum_tls_version_12` | Partial: `*_minimum_tls_version_12` checks | None — accept as MANUAL |
|
||||
| **SSH / port 22 hardening** | `ec2_instance_port_ssh_exposed_to_internet`, `ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22`, `ec2_networkacl_allow_ingress_tcp_port_22` | `network_ssh_internet_access_restricted`, `vm_linux_enforce_ssh_authentication` | `compute_firewall_ssh_access_from_the_internet_allowed`, `compute_instance_block_project_wide_ssh_keys_disabled`, `compute_project_os_login_enabled`, `compute_project_os_login_2fa_enabled` |
|
||||
| **mTLS (mutual TLS)** | `kafka_cluster_mutual_tls_authentication_enabled`, `apigateway_restapi_client_certificate_enabled` | `app_client_certificates_on` | None — MANUAL |
|
||||
| **Data at rest encrypted** | `s3_bucket_default_encryption`, `s3_bucket_kms_encryption`, `ec2_ebs_default_encryption`, `ec2_ebs_volume_encryption`, `rds_instance_storage_encrypted`, `rds_cluster_storage_encrypted`, `rds_snapshots_encrypted`, `dynamodb_tables_kms_cmk_encryption_enabled`, `redshift_cluster_encrypted_at_rest`, `neptune_cluster_storage_encrypted`, `documentdb_cluster_storage_encrypted`, `opensearch_service_domains_encryption_at_rest_enabled`, `kinesis_stream_encrypted_at_rest`, `firehose_stream_encrypted_at_rest`, `sns_topics_kms_encryption_at_rest_enabled`, `sqs_queues_server_side_encryption_enabled`, `efs_encryption_at_rest_enabled`, `athena_workgroup_encryption`, `glue_data_catalogs_metadata_encryption_enabled`, `backup_vaults_encrypted`, `backup_recovery_point_encrypted`, `cloudtrail_kms_encryption_enabled`, `cloudwatch_log_group_kms_encryption_enabled`, `eks_cluster_kms_cmk_encryption_in_secrets_enabled`, `sagemaker_notebook_instance_encryption_enabled`, `apigateway_restapi_cache_encrypted`, `kafka_cluster_encryption_at_rest_uses_cmk`, `dynamodb_accelerator_cluster_encryption_enabled`, `storagegateway_fileshare_encryption_enabled` | `storage_infrastructure_encryption_is_enabled`, `storage_ensure_encryption_with_customer_managed_keys`, `vm_ensure_attached_disks_encrypted_with_cmk`, `vm_ensure_unattached_disks_encrypted_with_cmk`, `sqlserver_tde_encryption_enabled`, `sqlserver_tde_encrypted_with_cmk`, `databricks_workspace_cmk_encryption_enabled`, `monitor_storage_account_with_activity_logs_cmk_encrypted` | `compute_instance_encryption_with_csek_enabled`, `dataproc_encrypted_with_cmks_disabled`, `bigquery_dataset_cmk_encryption`, `bigquery_table_cmk_encryption` |
|
||||
| **CMEK required (customer-managed keys)** | `kms_cmk_are_used` | `storage_ensure_encryption_with_customer_managed_keys`, `vm_ensure_attached_disks_encrypted_with_cmk`, `vm_ensure_unattached_disks_encrypted_with_cmk`, `sqlserver_tde_encrypted_with_cmk`, `databricks_workspace_cmk_encryption_enabled` | `bigquery_dataset_cmk_encryption`, `bigquery_table_cmk_encryption`, `dataproc_encrypted_with_cmks_disabled`, `compute_instance_encryption_with_csek_enabled` |
|
||||
| **Key rotation enabled** | `kms_cmk_rotation_enabled` | `keyvault_key_rotation_enabled`, `storage_key_rotation_90_days` | `kms_key_rotation_enabled` |
|
||||
| **MFA for UI access** | `iam_root_mfa_enabled`, `iam_root_hardware_mfa_enabled`, `iam_user_mfa_enabled_console_access`, `iam_user_hardware_mfa_enabled`, `iam_administrator_access_with_mfa`, `cognito_user_pool_mfa_enabled` | `entra_privileged_user_has_mfa`, `entra_non_privileged_user_has_mfa`, `entra_user_with_vm_access_has_mfa`, `entra_security_defaults_enabled` | `compute_project_os_login_2fa_enabled` |
|
||||
| **API access / credentials** | `iam_no_root_access_key`, `iam_user_no_setup_initial_access_key`, `apigateway_restapi_authorizers_enabled`, `apigateway_restapi_public_with_authorizer`, `apigatewayv2_api_authorizers_enabled` | `entra_conditional_access_policy_require_mfa_for_management_api`, `app_function_access_keys_configured`, `app_function_identity_is_configured` | `apikeys_api_restrictions_configured`, `apikeys_key_exists`, `apikeys_key_rotated_in_90_days` |
|
||||
| **Log all admin/config changes** | `cloudtrail_multi_region_enabled`, `cloudtrail_multi_region_enabled_logging_management_events`, `cloudtrail_cloudwatch_logging_enabled`, `cloudtrail_log_file_validation_enabled`, `cloudwatch_log_metric_filter_*`, `cloudwatch_changes_to_*_alarm_configured`, `config_recorder_all_regions_enabled` | `monitor_diagnostic_settings_exists`, `monitor_diagnostic_setting_with_appropriate_categories`, `monitor_alert_*` | `iam_audit_logs_enabled`, `logging_log_metric_filter_and_alert_for_*`, `logging_sink_created` |
|
||||
| **Log integrity (digital signatures)** | `cloudtrail_log_file_validation_enabled` (exact) | None | None |
|
||||
| **Public access denied** | `s3_bucket_public_access`, `s3_bucket_public_list_acl`, `s3_bucket_public_write_acl`, `s3_account_level_public_access_blocks`, `apigateway_restapi_public`, `awslambda_function_url_public`, `awslambda_function_not_publicly_accessible`, `rds_instance_no_public_access`, `rds_snapshots_public_access`, `ec2_securitygroup_allow_ingress_from_internet_to_all_ports`, `sns_topics_not_publicly_accessible`, `sqs_queues_not_publicly_accessible` | `storage_blob_public_access_level_is_disabled`, `storage_ensure_private_endpoints_in_storage_accounts`, `containerregistry_not_publicly_accessible`, `keyvault_private_endpoints`, `app_function_not_publicly_accessible`, `aks_clusters_public_access_disabled`, `network_http_internet_access_restricted` | `cloudstorage_bucket_public_access`, `compute_instance_public_ip`, `cloudsql_instance_public_ip`, `compute_firewall_*_access_from_the_internet_allowed` |
|
||||
| **IAM least privilege** | `iam_*_no_administrative_privileges`, `iam_policy_allows_privilege_escalation`, `iam_inline_policy_allows_privilege_escalation`, `iam_role_administratoraccess_policy`, `iam_group_administrator_access_policy`, `iam_user_administrator_access_policy`, `iam_policy_attached_only_to_group_or_roles`, `iam_role_cross_service_confused_deputy_prevention` | `iam_role_user_access_admin_restricted`, `iam_subscription_roles_owner_custom_not_created`, `iam_custom_role_has_permissions_to_administer_resource_locks` | `iam_sa_no_administrative_privileges`, `iam_no_service_roles_at_project_level`, `iam_role_kms_enforce_separation_of_duties`, `iam_role_sa_enforce_separation_of_duties` |
|
||||
| **Password policy** | `iam_password_policy_minimum_length_14`, `iam_password_policy_uppercase`, `iam_password_policy_lowercase`, `iam_password_policy_symbol`, `iam_password_policy_number`, `iam_password_policy_expires_passwords_within_90_days_or_less`, `iam_password_policy_reuse_24` | None | None |
|
||||
| **Credential rotation / unused** | `iam_rotate_access_key_90_days`, `iam_user_accesskey_unused`, `iam_user_console_access_unused` | None | `iam_sa_user_managed_key_rotate_90_days`, `iam_sa_user_managed_key_unused`, `iam_service_account_unused` |
|
||||
| **VPC / flow logs** | `vpc_flow_logs_enabled` | `network_flow_log_captured_sent`, `network_watcher_enabled`, `network_flow_log_more_than_90_days` | `compute_subnet_flow_logs_enabled` |
|
||||
| **Backup / DR / Multi-AZ** | `backup_vaults_exist`, `backup_plans_exist`, `backup_reportplans_exist`, `rds_instance_backup_enabled`, `rds_*_protected_by_backup_plan`, `rds_cluster_multi_az`, `neptune_cluster_backup_enabled`, `documentdb_cluster_backup_enabled`, `efs_have_backup_enabled`, `s3_bucket_cross_region_replication`, `dynamodb_table_protected_by_backup_plan` | `vm_backup_enabled`, `vm_sufficient_daily_backup_retention_period`, `storage_geo_redundant_enabled` | `cloudsql_instance_automated_backups`, `cloudstorage_bucket_log_retention_policy_lock`, `cloudstorage_bucket_sufficient_retention_period` |
|
||||
| **Access analysis / discovery** | `accessanalyzer_enabled`, `accessanalyzer_enabled_without_findings` | None specific | `iam_account_access_approval_enabled`, `iam_cloud_asset_inventory_enabled` |
|
||||
| **Object lock / retention** | `s3_bucket_object_lock`, `s3_bucket_object_versioning`, `s3_bucket_lifecycle_enabled`, `cloudtrail_bucket_requires_mfa_delete`, `s3_bucket_no_mfa_delete` | `storage_ensure_soft_delete_is_enabled`, `storage_blob_versioning_is_enabled`, `storage_ensure_file_shares_soft_delete_is_enabled` | `cloudstorage_bucket_log_retention_policy_lock`, `cloudstorage_bucket_soft_delete_enabled`, `cloudstorage_bucket_versioning_enabled`, `cloudstorage_bucket_sufficient_retention_period` |
|
||||
| **Uniform bucket-level access** | `s3_bucket_acl_prohibited` | `storage_account_key_access_disabled`, `storage_default_to_entra_authorization_enabled` | `cloudstorage_bucket_uniform_bucket_level_access` |
|
||||
| **Container vulnerability scanning** | `ecr_registry_scan_images_on_push_enabled`, `ecr_repositories_scan_vulnerabilities_in_latest_image` | `defender_container_images_scan_enabled`, `defender_container_images_resolved_vulnerabilities` | `artifacts_container_analysis_enabled`, `gcr_container_scanning_enabled` |
|
||||
| **WAF / rate limiting** | `wafv2_webacl_with_rules`, `waf_*_webacl_with_rules`, `wafv2_webacl_logging_enabled`, `waf_global_webacl_logging_enabled` | None | None |
|
||||
| **Deployment region restriction** | `organizations_scp_check_deny_regions` | None | None |
|
||||
| **Secrets automatic rotation** | `secretsmanager_automatic_rotation_enabled`, `secretsmanager_secret_rotated_periodically` | `keyvault_rbac_secret_expiration_set`, `keyvault_non_rbac_secret_expiration_set` | None |
|
||||
| **Certificate management** | `acm_certificates_expiration_check`, `acm_certificates_with_secure_key_algorithms`, `acm_certificates_transparency_logs_enabled` | `keyvault_key_expiration_set_in_non_rbac`, `keyvault_rbac_key_expiration_set`, `keyvault_non_rbac_secret_expiration_set` | None |
|
||||
| **GenAI guardrails / input/output filtering** | `bedrock_guardrail_prompt_attack_filter_enabled`, `bedrock_guardrail_sensitive_information_filter_enabled`, `bedrock_agent_guardrail_enabled`, `bedrock_model_invocation_logging_enabled`, `bedrock_api_key_no_administrative_privileges`, `bedrock_api_key_no_long_term_credentials` | None | None |
|
||||
| **ML dev environment security** | `sagemaker_notebook_instance_root_access_disabled`, `sagemaker_notebook_instance_without_direct_internet_access_configured`, `sagemaker_notebook_instance_vpc_settings_configured`, `sagemaker_models_vpc_settings_configured`, `sagemaker_training_jobs_vpc_settings_configured`, `sagemaker_training_jobs_network_isolation_enabled`, `sagemaker_training_jobs_volume_and_output_encryption_enabled` | None | None |
|
||||
| **Threat detection / anomalous behavior** | `cloudtrail_threat_detection_enumeration`, `cloudtrail_threat_detection_privilege_escalation`, `cloudtrail_threat_detection_llm_jacking`, `guardduty_is_enabled`, `guardduty_no_high_severity_findings` | None | None |
|
||||
| **Serverless private access** | `awslambda_function_inside_vpc`, `awslambda_function_not_publicly_accessible`, `awslambda_function_url_public` | `app_function_not_publicly_accessible` | None |
|
||||
|
||||
### What Prowler Does NOT Cover (accept MANUAL honestly)
|
||||
|
||||
Don't pad mappings for these — mark `Checks: []` and move on:
|
||||
|
||||
- **TLS 1.3 version specifically** — Prowler verifies TLS is enforced, not always the exact version
|
||||
- **IANA port-protocol consistency** — no check for "protocol running on its assigned port"
|
||||
- **mTLS on most Azure/GCP services** — limited to App Service client certs on Azure, nothing on GCP
|
||||
- **Rate limiting** on monitoring endpoints, load balancers, serverless invocations, vector ingestion
|
||||
- **Session cookie expiry** (LB stickiness)
|
||||
- **HTTP header scrubbing** (Server, X-Powered-By)
|
||||
- **Certificate transparency verification for imports**
|
||||
- **Model version pinning, red teaming, AI quality review**
|
||||
- **Vector embedding validation, dimensional constraints, ANN vs exact search**
|
||||
- **Secret region replication** (cross-region residency)
|
||||
- **Lifecycle cleanup policies on container registries**
|
||||
- **Row-level / column-level security in data warehouses**
|
||||
- **Deployment region restriction on Azure/GCP** (AWS has `organizations_scp_check_deny_regions`, others don't)
|
||||
- **Cross-tenant alert silencing permissions**
|
||||
- **Field-level masking in logs**
|
||||
- **Managed view enforcement for database access**
|
||||
- **Automatic MFA delete on all S3 buckets** (only CloudTrail bucket variant exists for some frameworks — AWS has the generic `s3_bucket_no_mfa_delete` though)
|
||||
|
||||
---
|
||||
|
||||
## Workflow C: Add a New Output Formatter
|
||||
|
||||
Use when a new framework needs its own CSV columns or terminal table. Follow the c5/csa/ens layout exactly:
|
||||
|
||||
```bash
|
||||
mkdir -p prowler/lib/outputs/compliance/{framework}
|
||||
touch prowler/lib/outputs/compliance/{framework}/__init__.py
|
||||
```
|
||||
|
||||
### Step 1 — Create `{framework}.py` (table dispatcher ONLY)
|
||||
|
||||
Copy from `prowler/lib/outputs/compliance/c5/c5.py` and change the function name + framework string. The `diff` between your file and `c5.py` should be just those two lines. **No function docstring** — other frameworks don't have one, stay consistent.
|
||||
|
||||
### Step 2 — Create `models.py`
|
||||
|
||||
One Pydantic v2 `BaseModel` per provider. Field names become CSV column headers (public API — don't rename later without a migration).
|
||||
|
||||
```python
|
||||
from typing import Optional
|
||||
from pydantic import BaseModel
|
||||
|
||||
class {Framework}_AWSModel(BaseModel):
|
||||
Provider: str
|
||||
Description: str
|
||||
AccountId: str
|
||||
Region: str
|
||||
AssessmentDate: str
|
||||
Requirements_Id: str
|
||||
Requirements_Description: str
|
||||
# ... provider-specific columns
|
||||
Status: str
|
||||
StatusExtended: str
|
||||
ResourceId: str
|
||||
ResourceName: str
|
||||
CheckId: str
|
||||
Muted: bool
|
||||
```
|
||||
|
||||
### Step 3 — Create `{framework}_{provider}.py` for each provider
|
||||
|
||||
Copy from `prowler/lib/outputs/compliance/c5/c5_aws.py` etc. Contains the `{Framework}_AWS(ComplianceOutput)` class with `transform()` that walks findings and emits model rows. This file IS allowed to import `Finding`.
|
||||
|
||||
### Step 4 — Register everywhere
|
||||
|
||||
**`prowler/lib/outputs/compliance/compliance.py`** (CLI table dispatcher):
|
||||
```python
|
||||
from prowler.lib.outputs.compliance.{framework}.{framework} import get_{framework}_table
|
||||
|
||||
def display_compliance_table(...):
|
||||
...
|
||||
elif compliance_framework.startswith("{framework}_"):
|
||||
get_{framework}_table(findings, bulk_checks_metadata,
|
||||
compliance_framework, output_filename,
|
||||
output_directory, compliance_overview)
|
||||
```
|
||||
|
||||
**`prowler/__main__.py`** (CLI output writer per provider):
|
||||
Add imports at the top:
|
||||
```python
|
||||
from prowler.lib.outputs.compliance.{framework}.{framework}_aws import {Framework}_AWS
|
||||
from prowler.lib.outputs.compliance.{framework}.{framework}_azure import {Framework}_Azure
|
||||
from prowler.lib.outputs.compliance.{framework}.{framework}_gcp import {Framework}_GCP
|
||||
```
|
||||
Add provider-specific `elif compliance_name.startswith("{framework}_"):` branches that instantiate the class and call `batch_write_data_to_file()`.
|
||||
|
||||
**`api/src/backend/tasks/jobs/export.py`** (API export dispatcher):
|
||||
```python
|
||||
from prowler.lib.outputs.compliance.{framework}.{framework}_aws import {Framework}_AWS
|
||||
# ... azure, gcp
|
||||
|
||||
COMPLIANCE_CLASS_MAP = {
|
||||
"aws": [
|
||||
# ...
|
||||
(lambda name: name.startswith("{framework}_"), {Framework}_AWS),
|
||||
],
|
||||
# ... azure, gcp
|
||||
}
|
||||
```
|
||||
|
||||
**Always use `startswith`**, never `name == "framework_aws"`. Exact match is a regression.
|
||||
|
||||
### Step 5 — Add tests
|
||||
|
||||
Create `tests/lib/outputs/compliance/{framework}/` with `{framework}_aws_test.py`, `{framework}_azure_test.py`, `{framework}_gcp_test.py`. See the test template in [references/test_template.md](references/test_template.md).
|
||||
|
||||
Add fixtures to `tests/lib/outputs/compliance/fixtures.py`: one `Compliance` object per provider with 1 evaluated + 1 manual requirement to exercise both code paths in `transform()`.
|
||||
|
||||
### Circular import warning
|
||||
|
||||
**The table dispatcher file (`{framework}.py`) MUST NOT import `Finding`** (directly or transitively). The cycle is:
|
||||
|
||||
```
|
||||
compliance.compliance imports get_{framework}_table
|
||||
→ {framework}.py imports ComplianceOutput
|
||||
→ compliance_output imports Finding
|
||||
→ finding imports get_check_compliance from compliance.compliance
|
||||
→ CIRCULAR
|
||||
```
|
||||
|
||||
Keep `{framework}.py` bare — only `colorama`, `tabulate`, `prowler.config.config`. Put anything that imports `Finding` in the per-provider `{framework}_{provider}.py` files.
|
||||
|
||||
---
|
||||
|
||||
## Conventions and Hard-Won Gotchas
|
||||
|
||||
These are lessons from the FINOS CCC v2025.10 sync + 172-AR audit pass (April 2026). Learn them once; save days of debugging.
|
||||
|
||||
1. **Per-provider files are non-negotiable.** Never collapse `{framework}_aws.py`, `{framework}_azure.py`, `{framework}_gcp.py` into a single parameterized class, no matter how DRY-tempting. Every other framework in the codebase follows the per-provider pattern and reviewers will reject the refactor. The CSV column names differ per provider — three classes is the convention.
|
||||
2. **`{framework}.py` has NO function docstring.** Other frameworks don't have them. Don't add one to be "helpful".
|
||||
3. **Circular import protection**: the table dispatcher file MUST NOT import `Finding` (directly or transitively). Split the code so `{framework}.py` only has `get_{framework}_table()` with bare imports, and `{framework}_{provider}.py` holds the class that needs `Finding`.
|
||||
4. **`Generic_Compliance_Requirement_Attribute` is the fallback** — in the `Compliance_Requirement.Attributes` Union in `compliance_models.py`, Generic MUST be LAST because Pydantic v1 tries union members in order. Putting Generic first means every framework-specific attribute falls through to Generic and the specific model is never used.
|
||||
5. **Pydantic v1 imports.** `from pydantic.v1 import BaseModel` in `compliance_models.py` — not v2. Mixing causes validation errors. Pydantic v2 is used in the CSV models (`models.py`) — that's fine because they're separate trees.
|
||||
6. **`get_check_compliance()` key format** is `f"{Framework}-{Version}"` ONLY if Version is set. Empty Version → key is `"{Framework}"` (no version suffix). Tests that mock compliance dicts must match this exact format — when a framework ships with `Version: ""`, downstream code and tests break silently.
|
||||
7. **CSV column names from `models.py` are public API.** Don't rename a field without migrating downstream consumers — CSV headers change.
|
||||
8. **Upstream YAML multi-line scalars** (`|` block scalars) preserve newlines. Collapse to single-line with `" ".join(value.split())` before writing to JSON.
|
||||
9. **Upstream catalogs can use multiple shapes.** FINOS CCC uses `control-families: [...]` in most catalogs but `controls: [...]` at the top level in `storage/object`. Any sync script must handle both or silently drop entire catalogs.
|
||||
10. **Foreign-prefix AR ids.** Upstream sometimes "imports" requirements from one catalog into another by keeping the original id prefix (e.g., `CCC.AuditLog.CN08.AR01` appearing under `CCC.Logging.CN03`). Prowler's compliance model requires unique ids within a catalog — rewrite the foreign id to fit the parent control: `CCC.AuditLog.CN08.AR01` (inside `CCC.Logging.CN03`) → `CCC.Logging.CN03.AR01`.
|
||||
11. **Genuine upstream id collisions.** Sometimes upstream has a real typo where two different requirements share the same id (e.g., `CCC.Core.CN14.AR02` defined twice for 30-day and 14-day backup variants). Renumber the second copy to the next free AR number. Preserve check mappings by matching on `(Section, frozenset(Applicability))` since the renumbered id won't match by id.
|
||||
12. **`COMPLIANCE_CLASS_MAP` in `export.py` uses `startswith` predicates** for all modern frameworks. Exact match (`name == "ccc_aws"`) is an anti-pattern — it was present for CCC until April 2026 and was the reason CCC couldn't have versioned variants.
|
||||
13. **Pre-validate every check id** against the per-provider inventory before writing the JSON. A typo silently creates an unreferenced check that will fail when findings try to map to it. The audit script MUST abort with stderr listing typos, not swallow them.
|
||||
14. **REPLACE is better than PATCH** for audit decisions. Encoding every mapping explicitly makes the audit reproducible and surfaces hidden assumptions from the legacy data. A PATCH system that adds/removes is too easy to forget.
|
||||
15. **When no check applies, MANUAL is correct.** Do not pad mappings with tangential checks "just in case". Prowler's compliance reports are meant to be actionable — padding them with noise breaks that. Honest manual reqs can be mapped later when new checks land.
|
||||
16. **UI groups by `Attributes[0].FamilyName` and `Attributes[0].Section`.** If FamilyName has inconsistent variants within the same JSON (e.g., "Logging & Monitoring" vs "Logging and Monitoring"), the UI renders them as separate categories. Section empty → the requirement falls into an orphan control with label "". Normalize before shipping.
|
||||
17. **Provider coverage is asymmetric.** AWS has dense coverage (~586 checks across 80+ services): in-transit encryption, IAM, database encryption, backup. Azure (~167 checks) and GCP (~102 checks) are thinner especially for in-transit encryption, mTLS, and ML/AI. Accept the asymmetry in mappings — don't force GCP parity where Prowler genuinely can't verify.
|
||||
|
||||
---
|
||||
|
||||
## Useful One-Liners
|
||||
|
||||
```bash
|
||||
# Count requirements per service prefix (CCC, CIS sections, etc.)
|
||||
jq -r '.Requirements[].Id | split(".")[1]' prowler/compliance/aws/ccc_aws.json | sort | uniq -c
|
||||
|
||||
# Find duplicate requirement IDs
|
||||
jq -r '.Requirements[].Id' file.json | sort | uniq -d
|
||||
|
||||
# Count manual requirements (no checks)
|
||||
jq '[.Requirements[] | select((.Checks | length) == 0)] | length' file.json
|
||||
|
||||
# List all unique check references in a framework
|
||||
jq -r '.Requirements[].Checks[]' file.json | sort -u
|
||||
|
||||
# List all unique Sections (to spot inconsistency)
|
||||
jq '[.Requirements[].Attributes[0].Section] | unique' file.json
|
||||
|
||||
# List all unique FamilyNames (to spot inconsistency)
|
||||
jq '[.Requirements[].Attributes[0].FamilyName] | unique' file.json
|
||||
|
||||
# Diff requirement ids between two versions of the same framework
|
||||
diff <(jq -r '.Requirements[].Id' a.json | sort) <(jq -r '.Requirements[].Id' b.json | sort)
|
||||
|
||||
# Find where a check id is used across all frameworks
|
||||
grep -rl "my_check_name" prowler/compliance/
|
||||
|
||||
# Check if a Prowler check exists
|
||||
find prowler/providers/aws/services -name "{check_id}.metadata.json"
|
||||
|
||||
# Validate a JSON with Pydantic
|
||||
python -c "from prowler.lib.check.compliance_models import Compliance; print(Compliance.parse_file('prowler/compliance/aws/ccc_aws.json').Framework)"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Requirement IDs**: Follow the original framework numbering exactly (e.g., "1.1", "A.5.1", "T1190", "ac_2_1")
|
||||
2. **Check Mapping**: Map to existing checks when possible. Use `Checks: []` for manual-only requirements
|
||||
2. **Check Mapping**: Map to existing checks when possible. Use `Checks: []` for manual-only requirements — honest MANUAL beats padded coverage
|
||||
3. **Completeness**: Include all framework requirements, even those without automated checks
|
||||
4. **Version Control**: Include framework version in `Name` and `Version` fields
|
||||
4. **Version Control**: Include framework version in `Name` and `Version` fields. **Never leave `Version: ""`** — it breaks `get_check_compliance()` key format
|
||||
5. **File Naming**: Use format `{framework}_{version}_{provider}.json`
|
||||
6. **Validation**: Prowler validates JSON against Pydantic models at startup - invalid JSON will cause errors
|
||||
6. **Validation**: Prowler validates JSON against Pydantic models at startup — invalid JSON will cause errors
|
||||
7. **Pre-validate check ids** against the provider's `*.metadata.json` inventory before every commit
|
||||
8. **Normalize FamilyName and Section** to avoid inconsistent UI tree branches
|
||||
9. **Register everywhere**: SDK model (if needed) → `compliance.py` dispatcher → `__main__.py` CLI writer → `export.py` API map → UI mapper. Skipping any layer results in silent failures
|
||||
10. **Audit, don't pad**: when reviewing mappings, apply the golden rule — the check's title/risk MUST literally describe what the requirement text says. Tangential relation doesn't count
|
||||
|
||||
## Commands
|
||||
|
||||
@@ -482,11 +1005,46 @@ prowler aws --compliance cis_5.0_aws -M csv json html
|
||||
|
||||
## Code References
|
||||
|
||||
- **Compliance Models:** `prowler/lib/check/compliance_models.py`
|
||||
- **Compliance Processing:** `prowler/lib/check/compliance.py`
|
||||
- **Compliance Output:** `prowler/lib/outputs/compliance/`
|
||||
### Layer 1 — SDK / Core
|
||||
- **Compliance Models:** `prowler/lib/check/compliance_models.py` (Pydantic v1 model tree)
|
||||
- **Compliance Processing / Linker:** `prowler/lib/check/compliance.py` (`get_check_compliance`, `update_checks_metadata_with_compliance`)
|
||||
- **Check Utils:** `prowler/lib/check/utils.py` (`list_compliance_modules`)
|
||||
|
||||
### Layer 2 — JSON Catalogs
|
||||
- **Framework JSONs:** `prowler/compliance/{provider}/` (auto-discovered via directory walk)
|
||||
|
||||
### Layer 3 — Output Formatters
|
||||
- **Per-framework folders:** `prowler/lib/outputs/compliance/{framework}/`
|
||||
- **Shared base class:** `prowler/lib/outputs/compliance/compliance_output.py` (`ComplianceOutput` + `batch_write_data_to_file`)
|
||||
- **CLI table dispatcher:** `prowler/lib/outputs/compliance/compliance.py` (`display_compliance_table`)
|
||||
- **Finding model:** `prowler/lib/outputs/finding.py` (**do not import transitively from table dispatcher files — circular import**)
|
||||
- **CLI writer:** `prowler/__main__.py` (per-provider `elif compliance_name.startswith(...)` branches that instantiate per-provider classes)
|
||||
|
||||
### Layer 4 — API / UI
|
||||
- **API lazy loader:** `api/src/backend/api/compliance.py` (`LazyComplianceTemplate`, `LazyChecksMapping`)
|
||||
- **API export dispatcher:** `api/src/backend/tasks/jobs/export.py` (`COMPLIANCE_CLASS_MAP` with `startswith` predicates)
|
||||
- **UI framework router:** `ui/lib/compliance/compliance-mapper.ts`
|
||||
- **UI per-framework mapper:** `ui/lib/compliance/{framework}.tsx`
|
||||
- **UI detail panel:** `ui/components/compliance/compliance-custom-details/{framework}-details.tsx`
|
||||
- **UI types:** `ui/types/compliance.ts`
|
||||
- **UI icon:** `ui/components/icons/compliance/{framework}.svg` + registration in `IconCompliance.tsx`
|
||||
|
||||
### Tests
|
||||
- **Output formatter tests:** `tests/lib/outputs/compliance/{framework}/{framework}_{provider}_test.py`
|
||||
- **Shared fixtures:** `tests/lib/outputs/compliance/fixtures.py`
|
||||
|
||||
## Resources
|
||||
|
||||
- **Templates:** See [assets/](assets/) for framework JSON templates
|
||||
- **JSON Templates:** See [assets/](assets/) for framework JSON templates (cis, ens, iso27001, mitre_attack, prowler_threatscore, generic)
|
||||
- **Config-driven compliance sync** (any upstream-backed framework):
|
||||
- [assets/sync_framework.py](assets/sync_framework.py) — generic runner. Loads a YAML config, dynamically imports the declared parser, applies generic post-processing (id uniqueness safety net, `FamilyName` normalization, legacy check-mapping preservation with config-driven fallback keys), and writes the provider JSONs with Pydantic post-validation. Framework-agnostic — works for any compliance framework.
|
||||
- [assets/configs/ccc.yaml](assets/configs/ccc.yaml) — canonical config example (FINOS CCC v2025.10). Copy and adapt for new frameworks.
|
||||
- [assets/parsers/finos_ccc.py](assets/parsers/finos_ccc.py) — FINOS CCC YAML parser. Handles both upstream shapes (`control-families` and top-level `controls`), foreign-prefix AR rewriting, and genuine collision renumbering. Exposes `parse_upstream(config) -> list[dict]`.
|
||||
- [assets/parsers/](assets/parsers/) — add new parser modules here for unfamiliar upstream formats (NIST OSCAL JSON, MITRE STIX, CIS Benchmarks, etc.). Each parser is a `{name}.py` file implementing `parse_upstream(config) -> list[dict]` with guaranteed-unique ids.
|
||||
- **Reusable audit tooling** (added April 2026 after the FINOS CCC v2025.10 sync):
|
||||
- [assets/audit_framework_template.py](assets/audit_framework_template.py) — explicit REPLACE decision ledger with pre-validation against the per-provider inventory. Drop-in template for auditing any framework.
|
||||
- [assets/query_checks.py](assets/query_checks.py) — keyword/service/id query helper over `/tmp/checks_{provider}.json`.
|
||||
- [assets/dump_section.py](assets/dump_section.py) — dumps every AR for a given id prefix across all 3 providers with current check mappings.
|
||||
- [assets/build_inventory.py](assets/build_inventory.py) — generates `/tmp/checks_{provider}.json` from `*.metadata.json` files.
|
||||
- **Documentation:** See [references/compliance-docs.md](references/compliance-docs.md) for additional resources
|
||||
- **Related skill:** [prowler-compliance-review](../prowler-compliance-review/SKILL.md) — PR review checklist and validator script for compliance framework PRs
|
||||
|
||||
188
skills/prowler-compliance/assets/audit_framework_template.py
Normal file
188
skills/prowler-compliance/assets/audit_framework_template.py
Normal file
@@ -0,0 +1,188 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Cloud-auditor pass template for any Prowler compliance framework.
|
||||
|
||||
Encode explicit REPLACE decisions per (requirement_id, provider) pair below.
|
||||
Each decision FULLY overwrites the legacy Checks list for that requirement.
|
||||
|
||||
Workflow:
|
||||
1. Run build_inventory.py first to cache per-provider check metadata.
|
||||
2. Run dump_section.py to see current mappings for the catalog you're auditing.
|
||||
3. Fill in DECISIONS below with explicit check lists.
|
||||
4. Run this script — it pre-validates every check id against the inventory
|
||||
and aborts with stderr listing typos before writing.
|
||||
|
||||
Decision rules (apply as a hostile cloud auditor):
|
||||
- The Prowler check's title/risk MUST literally describe what the AR text says.
|
||||
"Related" is not enough.
|
||||
- If no check actually addresses the requirement, leave `[]` (= MANUAL).
|
||||
HONEST MANUAL is worth more than padded coverage.
|
||||
- Missing provider key = leave the legacy mapping untouched.
|
||||
- Empty list `[]` = explicitly MANUAL (overwrites legacy).
|
||||
|
||||
Usage:
|
||||
# 1. Copy this file to /tmp/audit_<framework>.py and fill in DECISIONS
|
||||
# 2. Edit FRAMEWORK_KEY below to match your framework file naming
|
||||
# 3. Run:
|
||||
python /tmp/audit_<framework>.py
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Configure for your framework
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
# Framework file basename inside prowler/compliance/{provider}/.
|
||||
# If your framework is called "cis_5.0_aws.json", FRAMEWORK_KEY is "cis_5.0".
|
||||
# If the file is "ccc_aws.json", FRAMEWORK_KEY is "ccc".
|
||||
FRAMEWORK_KEY = "ccc"
|
||||
|
||||
# Which providers to apply decisions to.
|
||||
PROVIDERS = ["aws", "azure", "gcp"]
|
||||
|
||||
PROWLER_DIR = Path("prowler/compliance")
|
||||
CHECK_INV = {prov: Path(f"/tmp/checks_{prov}.json") for prov in PROVIDERS}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# DECISIONS — encode one entry per requirement you want to audit
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
# DECISIONS[requirement_id][provider] = list[str] of check ids
|
||||
# See SKILL.md → "Audit Reference Table: Requirement Text → Prowler Checks"
|
||||
# for a comprehensive mapping cheat sheet built from a 172-AR CCC audit.
|
||||
|
||||
DECISIONS: dict[str, dict[str, list[str]]] = {}
|
||||
|
||||
# ---- Example entries (delete and replace with your own) ----
|
||||
|
||||
# Example 1: TLS in transit enforced (non-SSH traffic)
|
||||
# DECISIONS["CCC.Core.CN01.AR01"] = {
|
||||
# "aws": [
|
||||
# "cloudfront_distributions_https_enabled",
|
||||
# "cloudfront_distributions_origin_traffic_encrypted",
|
||||
# "s3_bucket_secure_transport_policy",
|
||||
# "elbv2_ssl_listeners",
|
||||
# "rds_instance_transport_encrypted",
|
||||
# "kafka_cluster_in_transit_encryption_enabled",
|
||||
# "redshift_cluster_in_transit_encryption_enabled",
|
||||
# "opensearch_service_domains_https_communications_enforced",
|
||||
# ],
|
||||
# "azure": [
|
||||
# "storage_secure_transfer_required_is_enabled",
|
||||
# "app_minimum_tls_version_12",
|
||||
# "postgresql_flexible_server_enforce_ssl_enabled",
|
||||
# "sqlserver_recommended_minimal_tls_version",
|
||||
# ],
|
||||
# "gcp": [
|
||||
# "cloudsql_instance_ssl_connections",
|
||||
# ],
|
||||
# }
|
||||
|
||||
# Example 2: MANUAL — no Prowler check exists
|
||||
# DECISIONS["CCC.Core.CN01.AR07"] = {
|
||||
# "aws": [], # no IANA port/protocol check exists in Prowler
|
||||
# "azure": [],
|
||||
# "gcp": [],
|
||||
# }
|
||||
|
||||
# Example 3: Reuse a decision for multiple sibling ARs
|
||||
# DECISIONS["CCC.ObjStor.CN05.AR02"] = DECISIONS["CCC.ObjStor.CN05.AR01"]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Driver — do not edit below
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def load_inventory(provider: str) -> dict:
|
||||
path = CHECK_INV[provider]
|
||||
if not path.exists():
|
||||
raise SystemExit(
|
||||
f"Check inventory missing: {path}\n"
|
||||
f"Run: python skills/prowler-compliance/assets/build_inventory.py {provider}"
|
||||
)
|
||||
with open(path) as f:
|
||||
return json.load(f)
|
||||
|
||||
|
||||
def resolve_json_path(provider: str) -> Path:
|
||||
"""Resolve the JSON file path for a given provider.
|
||||
|
||||
Handles both shapes: {FRAMEWORK_KEY}_{provider}.json (ccc_aws.json) and
|
||||
cases where FRAMEWORK_KEY already contains the provider suffix.
|
||||
"""
|
||||
candidates = [
|
||||
PROWLER_DIR / provider / f"{FRAMEWORK_KEY}_{provider}.json",
|
||||
PROWLER_DIR / provider / f"{FRAMEWORK_KEY}.json",
|
||||
]
|
||||
for c in candidates:
|
||||
if c.exists():
|
||||
return c
|
||||
raise SystemExit(
|
||||
f"Could not find framework JSON for provider={provider} "
|
||||
f"with FRAMEWORK_KEY={FRAMEWORK_KEY}. Tried: {candidates}"
|
||||
)
|
||||
|
||||
|
||||
def apply_for_provider(provider: str) -> tuple[int, int, int]:
|
||||
"""Apply DECISIONS to the JSON for one provider.
|
||||
|
||||
Returns (touched, added, removed).
|
||||
"""
|
||||
path = resolve_json_path(provider)
|
||||
with open(path) as f:
|
||||
data = json.load(f)
|
||||
inv = load_inventory(provider)
|
||||
|
||||
touched = 0
|
||||
add_count = 0
|
||||
rm_count = 0
|
||||
unknown: list[tuple[str, str]] = []
|
||||
|
||||
for req in data["Requirements"]:
|
||||
rid = req["Id"]
|
||||
if rid not in DECISIONS or provider not in DECISIONS[rid]:
|
||||
continue
|
||||
new_checks = list(dict.fromkeys(DECISIONS[rid][provider]))
|
||||
for c in new_checks:
|
||||
if c not in inv:
|
||||
unknown.append((rid, c))
|
||||
before = set(req.get("Checks") or [])
|
||||
after = set(new_checks)
|
||||
rm_count += len(before - after)
|
||||
add_count += len(after - before)
|
||||
req["Checks"] = new_checks
|
||||
touched += 1
|
||||
|
||||
if unknown:
|
||||
print(f"\n!! {provider} — UNKNOWN CHECK IDS (typos?):", file=sys.stderr)
|
||||
for rid, c in unknown:
|
||||
print(f" {rid} -> {c}", file=sys.stderr)
|
||||
print("\nAborting: fix the check ids above and re-run.", file=sys.stderr)
|
||||
sys.exit(2)
|
||||
|
||||
with open(path, "w") as f:
|
||||
json.dump(data, f, indent=2, ensure_ascii=False)
|
||||
f.write("\n")
|
||||
return touched, add_count, rm_count
|
||||
|
||||
|
||||
def main() -> int:
|
||||
if not DECISIONS:
|
||||
print("No DECISIONS encoded. Fill in the DECISIONS dict and re-run.")
|
||||
return 1
|
||||
print(f"Applying {len(DECISIONS)} decisions to framework '{FRAMEWORK_KEY}'...")
|
||||
for provider in PROVIDERS:
|
||||
touched, added, removed = apply_for_provider(provider)
|
||||
print(
|
||||
f" {provider}: touched={touched} added={added} removed={removed}"
|
||||
)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
82
skills/prowler-compliance/assets/build_inventory.py
Normal file
82
skills/prowler-compliance/assets/build_inventory.py
Normal file
@@ -0,0 +1,82 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Build a per-provider check inventory by scanning Prowler's check metadata files.
|
||||
|
||||
Outputs one JSON per provider at /tmp/checks_{provider}.json with the shape:
|
||||
{
|
||||
"check_id": {
|
||||
"service": "...",
|
||||
"subservice": "...",
|
||||
"resource": "...",
|
||||
"severity": "...",
|
||||
"title": "...",
|
||||
"description": "...",
|
||||
"risk": "..."
|
||||
},
|
||||
...
|
||||
}
|
||||
|
||||
This is the reference used by audit_framework_template.py for pre-validation
|
||||
(every check id in the audit ledger must exist in the inventory) and by
|
||||
query_checks.py for keyword/service lookup.
|
||||
|
||||
Usage:
|
||||
python skills/prowler-compliance/assets/build_inventory.py
|
||||
# Or for a specific provider:
|
||||
python skills/prowler-compliance/assets/build_inventory.py aws
|
||||
|
||||
Output:
|
||||
/tmp/checks_aws.json (~586 checks)
|
||||
/tmp/checks_azure.json (~167 checks)
|
||||
/tmp/checks_gcp.json (~102 checks)
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
DEFAULT_PROVIDERS = ["aws", "azure", "gcp", "kubernetes", "m365", "github",
|
||||
"oraclecloud", "alibabacloud", "mongodbatlas", "nhn",
|
||||
"iac", "llm", "googleworkspace", "cloudflare"]
|
||||
|
||||
|
||||
def build_for_provider(provider: str) -> dict:
|
||||
inventory: dict[str, dict] = {}
|
||||
base = Path(f"prowler/providers/{provider}/services")
|
||||
if not base.exists():
|
||||
print(f" skip {provider}: no services directory", file=sys.stderr)
|
||||
return inventory
|
||||
for meta_path in base.rglob("*.metadata.json"):
|
||||
try:
|
||||
with open(meta_path) as f:
|
||||
data = json.load(f)
|
||||
except Exception as exc:
|
||||
print(f" warn: cannot parse {meta_path}: {exc}", file=sys.stderr)
|
||||
continue
|
||||
cid = data.get("CheckID") or meta_path.stem.replace(".metadata", "")
|
||||
inventory[cid] = {
|
||||
"service": data.get("ServiceName", ""),
|
||||
"subservice": data.get("SubServiceName", ""),
|
||||
"resource": data.get("ResourceType", ""),
|
||||
"severity": data.get("Severity", ""),
|
||||
"title": data.get("CheckTitle", ""),
|
||||
"description": data.get("Description", ""),
|
||||
"risk": data.get("Risk", ""),
|
||||
}
|
||||
return inventory
|
||||
|
||||
|
||||
def main() -> int:
|
||||
providers = sys.argv[1:] or DEFAULT_PROVIDERS
|
||||
for provider in providers:
|
||||
inv = build_for_provider(provider)
|
||||
out_path = Path(f"/tmp/checks_{provider}.json")
|
||||
with open(out_path, "w") as f:
|
||||
json.dump(inv, f, indent=2)
|
||||
print(f" {provider}: {len(inv)} checks → {out_path}")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
111
skills/prowler-compliance/assets/configs/ccc.yaml
Normal file
111
skills/prowler-compliance/assets/configs/ccc.yaml
Normal file
@@ -0,0 +1,111 @@
|
||||
# FINOS Common Cloud Controls (CCC) sync config for sync_framework.py.
|
||||
#
|
||||
# Usage:
|
||||
# python skills/prowler-compliance/assets/sync_framework.py \
|
||||
# skills/prowler-compliance/assets/configs/ccc.yaml
|
||||
#
|
||||
# Prerequisite: run the upstream fetch step from SKILL.md Workflow A Step 1 to
|
||||
# populate upstream.dir with the raw FINOS catalog YAML files.
|
||||
|
||||
framework:
|
||||
name: CCC
|
||||
display_name: Common Cloud Controls Catalog (CCC)
|
||||
version: v2025.10
|
||||
# The {provider_display} placeholder is replaced at output time with the
|
||||
# per-provider display string from the providers list below.
|
||||
description_template: "Common Cloud Controls Catalog (CCC) for {provider_display}"
|
||||
|
||||
providers:
|
||||
- key: aws
|
||||
display: AWS
|
||||
- key: azure
|
||||
display: Azure
|
||||
- key: gcp
|
||||
display: GCP
|
||||
|
||||
output:
|
||||
# Supported placeholders: {provider}, {framework}, {version}.
|
||||
# For versioned frameworks like CIS the template would be
|
||||
# "prowler/compliance/{provider}/cis_{version}_{provider}.json".
|
||||
path_template: "prowler/compliance/{provider}/ccc_{provider}.json"
|
||||
|
||||
upstream:
|
||||
# Directory containing the cached FINOS catalog YAMLs. Populate via
|
||||
# SKILL.md Workflow A Step 1 (gh api raw download commands).
|
||||
dir: /tmp/ccc_upstream
|
||||
fetch_docs: "See SKILL.md Workflow A Step 1 for gh api fetch commands"
|
||||
|
||||
parser:
|
||||
# Name of the parser module under parsers/ (loaded dynamically by the
|
||||
# runner). For FINOS CCC YAML this is always finos_ccc.
|
||||
module: finos_ccc
|
||||
|
||||
# FINOS CCC catalog files in load order. Core first so its ARs render
|
||||
# first in the output JSON.
|
||||
catalog_files:
|
||||
- core_ccc.yaml
|
||||
- management_auditlog.yaml
|
||||
- management_logging.yaml
|
||||
- management_monitoring.yaml
|
||||
- storage_object.yaml
|
||||
- networking_loadbalancer.yaml
|
||||
- networking_vpc.yaml
|
||||
- crypto_key.yaml
|
||||
- crypto_secrets.yaml
|
||||
- database_warehouse.yaml
|
||||
- database_vector.yaml
|
||||
- database_relational.yaml
|
||||
- devtools_build.yaml
|
||||
- devtools_container-registry.yaml
|
||||
- identity_iam.yaml
|
||||
- ai-ml_gen-ai.yaml
|
||||
- ai-ml_mlde.yaml
|
||||
- app-integration_message.yaml
|
||||
- compute_serverless-computing.yaml
|
||||
|
||||
# Shape-2 catalogs (storage/object) reference the family via id only
|
||||
# (e.g. "CCC.ObjStor.Data") with no human-readable title or description
|
||||
# in the YAML. Map the suffix (after the last dot) to a canonical title
|
||||
# and description so the generated JSON has consistent FamilyName fields
|
||||
# regardless of upstream shape.
|
||||
family_id_title:
|
||||
Data: Data
|
||||
IAM: Identity and Access Management
|
||||
Identity: Identity and Access Management
|
||||
Encryption: Encryption
|
||||
Logging: Logging and Monitoring
|
||||
Network: Network Security
|
||||
Availability: Availability
|
||||
Integrity: Integrity
|
||||
Confidentiality: Confidentiality
|
||||
family_id_description:
|
||||
Data: "The Data control family ensures the confidentiality, integrity, availability, and sovereignty of data across its lifecycle."
|
||||
IAM: "The Identity and Access Management control family ensures that only trusted and authenticated entities can access resources."
|
||||
|
||||
post_processing:
|
||||
# Collapse FamilyName variants that appear inconsistently across upstream
|
||||
# catalogs. The Prowler UI groups by Attributes[0].FamilyName exactly,
|
||||
# so each variant would otherwise become a separate tree branch.
|
||||
family_name_normalization:
|
||||
"Logging & Monitoring": "Logging and Monitoring"
|
||||
"Logging and Metrics Publication": "Logging and Monitoring"
|
||||
|
||||
# Preserve existing Checks lists from the legacy Prowler JSON when
|
||||
# regenerating. The runner builds two lookup tables from the legacy
|
||||
# output: a primary index by Id, and fallback indexes composed of
|
||||
# attribute field names.
|
||||
#
|
||||
# primary_key: the top-level requirement field to use as the primary
|
||||
# lookup key (almost always "Id")
|
||||
# fallback_keys: a list of composite keys. Each composite key is a list
|
||||
# of Attributes[0] field names to join into a tuple. List-valued fields
|
||||
# (like Applicability) are frozen to frozenset so the tuple is hashable.
|
||||
#
|
||||
# CCC uses (Section, Applicability) because Applicability is a CCC-only
|
||||
# top-level attribute field. CIS would use (Section, Profile). NIST would
|
||||
# use (ItemId,). The fallback is how renumbered or rewritten ids still
|
||||
# recover their check mappings.
|
||||
check_preservation:
|
||||
primary_key: Id
|
||||
fallback_keys:
|
||||
- [Section, Applicability]
|
||||
91
skills/prowler-compliance/assets/dump_section.py
Normal file
91
skills/prowler-compliance/assets/dump_section.py
Normal file
@@ -0,0 +1,91 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Dump every requirement of a compliance framework for a given id prefix across
|
||||
providers, with their current Check mappings.
|
||||
|
||||
Useful for reviewing a whole control family in one pass before encoding audit
|
||||
decisions in audit_framework_template.py.
|
||||
|
||||
Usage:
|
||||
# Dump all CCC.Core requirements across aws/azure/gcp
|
||||
python skills/prowler-compliance/assets/dump_section.py ccc "CCC.Core."
|
||||
|
||||
# Dump all CIS 5.0 section 1 requirements for AWS only
|
||||
python skills/prowler-compliance/assets/dump_section.py cis_5.0_aws "1."
|
||||
|
||||
Arguments:
|
||||
framework_key: file prefix inside prowler/compliance/{provider}/ without
|
||||
the provider suffix. Examples:
|
||||
- "ccc" → loads ccc_aws.json / ccc_azure.json / ccc_gcp.json
|
||||
- "cis_5.0_aws" → loads only that one file
|
||||
- "iso27001_2022" → loads all providers
|
||||
id_prefix: Requirement id prefix to filter by (e.g. "CCC.Core.",
|
||||
"1.1.", "A.5.").
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import sys
|
||||
from collections import defaultdict
|
||||
from pathlib import Path
|
||||
|
||||
PROWLER_COMPLIANCE_DIR = Path("prowler/compliance")
|
||||
|
||||
|
||||
def main() -> int:
|
||||
if len(sys.argv) < 3:
|
||||
print(__doc__)
|
||||
return 1
|
||||
|
||||
framework_key = sys.argv[1]
|
||||
id_prefix = sys.argv[2]
|
||||
|
||||
# Find matching JSON files across all providers
|
||||
candidates: list[tuple[str, Path]] = []
|
||||
for prov_dir in sorted(PROWLER_COMPLIANCE_DIR.iterdir()):
|
||||
if not prov_dir.is_dir():
|
||||
continue
|
||||
for json_path in prov_dir.glob("*.json"):
|
||||
stem = json_path.stem
|
||||
if stem == framework_key or stem.startswith(f"{framework_key}_") \
|
||||
or stem == f"{framework_key}_{prov_dir.name}":
|
||||
candidates.append((prov_dir.name, json_path))
|
||||
|
||||
if not candidates:
|
||||
print(f"No files matching '{framework_key}'", file=sys.stderr)
|
||||
return 2
|
||||
|
||||
by_id: dict[str, dict] = defaultdict(dict)
|
||||
for prov, path in candidates:
|
||||
with open(path) as f:
|
||||
data = json.load(f)
|
||||
for req in data["Requirements"]:
|
||||
if req["Id"].startswith(id_prefix):
|
||||
by_id[req["Id"]][prov] = {
|
||||
"desc": req.get("Description", ""),
|
||||
"sec": (req.get("Attributes") or [{}])[0].get("Section", ""),
|
||||
"obj": (req.get("Attributes") or [{}])[0].get(
|
||||
"SubSectionObjective", ""
|
||||
),
|
||||
"checks": req.get("Checks") or [],
|
||||
}
|
||||
|
||||
for ar_id in sorted(by_id):
|
||||
rows = by_id[ar_id]
|
||||
sample = next(iter(rows.values()))
|
||||
print(f"\n### {ar_id}")
|
||||
print(f" desc: {sample['desc']}")
|
||||
if sample["sec"]:
|
||||
print(f" sec : {sample['sec']}")
|
||||
if sample["obj"]:
|
||||
print(f" obj : {sample['obj']}")
|
||||
for prov in ["aws", "azure", "gcp", "kubernetes", "m365", "github",
|
||||
"oraclecloud", "alibabacloud"]:
|
||||
if prov in rows:
|
||||
checks = rows[prov]["checks"]
|
||||
print(f" {prov}: ({len(checks)}) {checks}")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
219
skills/prowler-compliance/assets/parsers/finos_ccc.py
Normal file
219
skills/prowler-compliance/assets/parsers/finos_ccc.py
Normal file
@@ -0,0 +1,219 @@
|
||||
"""
|
||||
FINOS Common Cloud Controls (CCC) YAML parser.
|
||||
|
||||
Reads cached upstream YAML files and emits Prowler-format requirements
|
||||
(``{Id, Description, Attributes: [...], Checks: []}``). This module is
|
||||
agnostic to providers, JSON output paths, framework metadata and legacy
|
||||
check-mapping preservation — those are handled by ``sync_framework.py``.
|
||||
|
||||
Contract
|
||||
--------
|
||||
``parse_upstream(config: dict) -> list[dict]``
|
||||
Returns a list of Prowler-format requirement dicts with **guaranteed
|
||||
unique ids**. Foreign-prefix AR rewriting and genuine collision
|
||||
renumbering both happen inside this module — the runner treats id
|
||||
uniqueness as a contract violation, not as something to fix.
|
||||
|
||||
Config keys consumed
|
||||
--------------------
|
||||
This parser reads the following config entries (the rest of the config is
|
||||
opaque to it):
|
||||
|
||||
- ``upstream.dir`` — directory containing the cached YAMLs
|
||||
- ``parser.catalog_files`` — ordered list of YAML filenames to load
|
||||
- ``parser.family_id_title`` — suffix → canonical family title (shape 2)
|
||||
- ``parser.family_id_description`` — suffix → family description (shape 2)
|
||||
|
||||
Upstream shapes
|
||||
---------------
|
||||
FINOS CCC catalogs come in two shapes:
|
||||
|
||||
1. ``control-families: [{title, description, controls: [...]}]``
|
||||
(used by most catalogs)
|
||||
2. ``controls: [{id, family: "CCC.X.Y", ...}]`` (no families wrapper; used
|
||||
by ``storage/object``). The ``family`` field references a family id with
|
||||
no human-readable title in the file — the title/description come from
|
||||
``config.parser.family_id_title`` / ``family_id_description``.
|
||||
|
||||
Id rewriting rules
|
||||
------------------
|
||||
- **Foreign-prefix rewriting**: upstream intentionally aliases requirements
|
||||
across catalogs by keeping the original prefix (e.g. ``CCC.AuditLog.CN08.AR01``
|
||||
appears nested under ``CCC.Logging.CN03``). Prowler requires unique ids
|
||||
within a catalog file, so we rename the AR to fit its parent control:
|
||||
``CCC.Logging.CN03.AR01``. See ``rewrite_ar_id()``.
|
||||
- **Genuine collision renumbering**: sometimes upstream has a real typo
|
||||
where two distinct requirements share the same id (e.g.
|
||||
``CCC.Core.CN14.AR02`` appears twice for 30-day and 14-day backup variants).
|
||||
The second copy is renumbered to the next free AR number within the
|
||||
control. See the ``seen_ids`` logic in ``emit_requirement()``.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
import yaml
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def clean(value: str | None) -> str:
|
||||
"""Trim and collapse internal whitespace/newlines into single spaces.
|
||||
|
||||
Upstream YAML uses ``|`` block scalars that preserve newlines; Prowler
|
||||
stores descriptions as single-line text.
|
||||
"""
|
||||
if not value:
|
||||
return ""
|
||||
return " ".join(value.split())
|
||||
|
||||
|
||||
def flatten_mappings(mappings):
|
||||
"""Convert upstream ``{reference-id, entries: [{reference-id, ...}]}`` to
|
||||
Prowler's ``{ReferenceId, Identifiers: [...]}``.
|
||||
"""
|
||||
if not mappings:
|
||||
return []
|
||||
out = []
|
||||
for m in mappings:
|
||||
ids = []
|
||||
for entry in m.get("entries") or []:
|
||||
eid = entry.get("reference-id")
|
||||
if eid:
|
||||
ids.append(eid)
|
||||
out.append({"ReferenceId": m.get("reference-id", ""), "Identifiers": ids})
|
||||
return out
|
||||
|
||||
|
||||
def ar_prefix(ar_id: str) -> str:
|
||||
"""Return the first three dot-segments of an AR id (the parent control).
|
||||
|
||||
e.g. ``CCC.Core.CN01.AR01`` -> ``CCC.Core.CN01``.
|
||||
"""
|
||||
return ".".join(ar_id.split(".")[:3])
|
||||
|
||||
|
||||
def rewrite_ar_id(parent_control_id: str, original_ar_id: str, ar_index: int) -> str:
|
||||
"""If an AR's id doesn't share its parent control's prefix, rename it.
|
||||
|
||||
Example
|
||||
-------
|
||||
parent ``CCC.Logging.CN03`` + AR id ``CCC.AuditLog.CN08.AR01`` with
|
||||
index 0 -> ``CCC.Logging.CN03.AR01``.
|
||||
"""
|
||||
if ar_prefix(original_ar_id) == parent_control_id:
|
||||
return original_ar_id
|
||||
return f"{parent_control_id}.AR{ar_index + 1:02d}"
|
||||
|
||||
|
||||
def emit_requirement(
|
||||
control: dict,
|
||||
family_name: str,
|
||||
family_desc: str,
|
||||
seen_ids: set[str],
|
||||
requirements: list[dict],
|
||||
) -> None:
|
||||
"""Translate one FINOS control + its assessment-requirements into
|
||||
Prowler-format requirement dicts and append them to ``requirements``.
|
||||
|
||||
Applies foreign-prefix rewriting and genuine-collision renumbering so
|
||||
the final list is guaranteed to have unique ids.
|
||||
"""
|
||||
control_id = clean(control.get("id"))
|
||||
control_title = clean(control.get("title"))
|
||||
section = f"{control_id} {control_title}".strip()
|
||||
objective = clean(control.get("objective"))
|
||||
threat_mappings = flatten_mappings(control.get("threat-mappings"))
|
||||
guideline_mappings = flatten_mappings(control.get("guideline-mappings"))
|
||||
ars = control.get("assessment-requirements") or []
|
||||
for idx, ar in enumerate(ars):
|
||||
raw_id = clean(ar.get("id"))
|
||||
if not raw_id:
|
||||
continue
|
||||
new_id = rewrite_ar_id(control_id, raw_id, idx)
|
||||
# Renumber on genuine upstream collision (find next free AR number)
|
||||
if new_id in seen_ids:
|
||||
base = ".".join(new_id.split(".")[:-1])
|
||||
n = 1
|
||||
while f"{base}.AR{n:02d}" in seen_ids:
|
||||
n += 1
|
||||
new_id = f"{base}.AR{n:02d}"
|
||||
seen_ids.add(new_id)
|
||||
|
||||
requirements.append(
|
||||
{
|
||||
"Id": new_id,
|
||||
"Description": clean(ar.get("text")),
|
||||
"Attributes": [
|
||||
{
|
||||
"FamilyName": family_name,
|
||||
"FamilyDescription": family_desc,
|
||||
"Section": section,
|
||||
"SubSection": "",
|
||||
"SubSectionObjective": objective,
|
||||
"Applicability": list(ar.get("applicability") or []),
|
||||
"Recommendation": clean(ar.get("recommendation")),
|
||||
"SectionThreatMappings": threat_mappings,
|
||||
"SectionGuidelineMappings": guideline_mappings,
|
||||
}
|
||||
],
|
||||
"Checks": [],
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Public entry point
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def parse_upstream(config: dict) -> list[dict]:
|
||||
"""Walk upstream YAMLs and emit Prowler-format requirements.
|
||||
|
||||
Handles both top-level shapes (``control-families`` and ``controls``).
|
||||
Ids are guaranteed unique in the returned list.
|
||||
"""
|
||||
upstream_dir = Path(config["upstream"]["dir"])
|
||||
parser_cfg = config.get("parser") or {}
|
||||
catalog_files = parser_cfg.get("catalog_files") or []
|
||||
family_id_title = parser_cfg.get("family_id_title") or {}
|
||||
family_id_description = parser_cfg.get("family_id_description") or {}
|
||||
|
||||
requirements: list[dict] = []
|
||||
seen_ids: set[str] = set()
|
||||
|
||||
for filename in catalog_files:
|
||||
path = upstream_dir / filename
|
||||
if not path.exists():
|
||||
# The runner handles fatal errors; a missing optional catalog
|
||||
# file is surfaced as a warning via print to stderr.
|
||||
import sys
|
||||
|
||||
print(f"warn: missing upstream file {filename}", file=sys.stderr)
|
||||
continue
|
||||
with open(path) as f:
|
||||
doc = yaml.safe_load(f) or {}
|
||||
|
||||
# Shape 1: control-families wrapper
|
||||
for family in doc.get("control-families") or []:
|
||||
family_name = clean(family.get("title"))
|
||||
family_desc = clean(family.get("description"))
|
||||
for control in family.get("controls") or []:
|
||||
emit_requirement(
|
||||
control, family_name, family_desc, seen_ids, requirements
|
||||
)
|
||||
|
||||
# Shape 2: top-level controls with family reference id
|
||||
for control in doc.get("controls") or []:
|
||||
family_ref = clean(control.get("family"))
|
||||
suffix = family_ref.split(".")[-1] if family_ref else ""
|
||||
family_name = family_id_title.get(suffix, suffix or "Data")
|
||||
family_desc = family_id_description.get(suffix, "")
|
||||
emit_requirement(
|
||||
control, family_name, family_desc, seen_ids, requirements
|
||||
)
|
||||
|
||||
return requirements
|
||||
86
skills/prowler-compliance/assets/query_checks.py
Normal file
86
skills/prowler-compliance/assets/query_checks.py
Normal file
@@ -0,0 +1,86 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Keyword/service/id lookup over a Prowler check inventory produced by
|
||||
build_inventory.py.
|
||||
|
||||
Usage:
|
||||
# Keyword AND-search across id + title + risk + description
|
||||
python skills/prowler-compliance/assets/query_checks.py aws encryption transit
|
||||
|
||||
# Show all checks for a service
|
||||
python skills/prowler-compliance/assets/query_checks.py aws --service iam
|
||||
|
||||
# Show full metadata for one check id
|
||||
python skills/prowler-compliance/assets/query_checks.py aws --id kms_cmk_rotation_enabled
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import sys
|
||||
|
||||
|
||||
def main() -> int:
|
||||
if len(sys.argv) < 3:
|
||||
print(__doc__)
|
||||
return 1
|
||||
|
||||
provider = sys.argv[1]
|
||||
try:
|
||||
with open(f"/tmp/checks_{provider}.json") as f:
|
||||
inv = json.load(f)
|
||||
except FileNotFoundError:
|
||||
print(
|
||||
f"No inventory for {provider}. Run build_inventory.py first.",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return 2
|
||||
|
||||
if sys.argv[2] == "--service":
|
||||
if len(sys.argv) < 4:
|
||||
print("usage: --service <service_name>")
|
||||
return 1
|
||||
svc = sys.argv[3]
|
||||
hits = [cid for cid in sorted(inv) if inv[cid].get("service") == svc]
|
||||
for cid in hits:
|
||||
print(f" {cid}")
|
||||
print(f" {inv[cid].get('title', '')}")
|
||||
print(f"\n{len(hits)} checks in service '{svc}'")
|
||||
elif sys.argv[2] == "--id":
|
||||
if len(sys.argv) < 4:
|
||||
print("usage: --id <check_id>")
|
||||
return 1
|
||||
cid = sys.argv[3]
|
||||
if cid not in inv:
|
||||
print(f"NOT FOUND: {cid}")
|
||||
return 3
|
||||
m = inv[cid]
|
||||
print(f"== {cid} ==")
|
||||
print(f"service : {m.get('service')}")
|
||||
print(f"severity: {m.get('severity')}")
|
||||
print(f"resource: {m.get('resource')}")
|
||||
print(f"title : {m.get('title')}")
|
||||
print(f"desc : {m.get('description', '')[:500]}")
|
||||
print(f"risk : {m.get('risk', '')[:500]}")
|
||||
else:
|
||||
keywords = [k.lower() for k in sys.argv[2:]]
|
||||
hits = 0
|
||||
for cid in sorted(inv):
|
||||
m = inv[cid]
|
||||
blob = " ".join(
|
||||
[
|
||||
cid,
|
||||
m.get("title", ""),
|
||||
m.get("risk", ""),
|
||||
m.get("description", ""),
|
||||
]
|
||||
).lower()
|
||||
if all(k in blob for k in keywords):
|
||||
hits += 1
|
||||
print(f" {cid} [{m.get('service', '')}]")
|
||||
print(f" {m.get('title', '')[:120]}")
|
||||
print(f"\n{hits} matches for {' + '.join(keywords)}")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
478
skills/prowler-compliance/assets/sync_framework.py
Normal file
478
skills/prowler-compliance/assets/sync_framework.py
Normal file
@@ -0,0 +1,478 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Generic, config-driven compliance framework sync runner.
|
||||
|
||||
Usage:
|
||||
python skills/prowler-compliance/assets/sync_framework.py \
|
||||
skills/prowler-compliance/assets/configs/ccc.yaml
|
||||
|
||||
Pipeline:
|
||||
1. Load and validate the YAML config (fail fast on missing or empty
|
||||
required fields — notably ``framework.version``, which silently
|
||||
breaks ``get_check_compliance()`` key construction if empty).
|
||||
2. Dynamically import the parser module declared in ``parser.module``
|
||||
(resolved as ``parsers.{name}`` under this script's directory).
|
||||
3. Call ``parser.parse_upstream(config) -> list[dict]`` to get raw
|
||||
Prowler-format requirements. The parser owns all upstream-format
|
||||
quirks (foreign-prefix AR rewriting, collision renumbering, shape
|
||||
handling) and MUST return ids that are unique within the returned
|
||||
list.
|
||||
4. **Safety net**: assert id uniqueness. The runner raises
|
||||
``ValueError`` on any duplicate — it does NOT silently renumber,
|
||||
because mutating a canonical upstream id (e.g. CIS ``1.1.1`` or
|
||||
NIST ``AC-2(1)``) would be catastrophic.
|
||||
5. Apply generic ``FamilyName`` normalization from
|
||||
``post_processing.family_name_normalization`` (optional).
|
||||
6. Preserve legacy ``Checks`` lists from the existing Prowler JSON
|
||||
using a config-driven primary key + fallback key chain. CCC uses
|
||||
``(Section, Applicability)`` as fallback; CIS would use
|
||||
``(Section, Profile)``; NIST would use ``(ItemId,)``.
|
||||
7. Wrap each provider's requirements in the framework metadata dict
|
||||
built from the config templates.
|
||||
8. Write each provider's JSON to the path resolved from
|
||||
``output.path_template`` (supports ``{framework}``, ``{version}``
|
||||
and ``{provider}`` placeholders).
|
||||
9. Pydantic-validate the written JSON via ``Compliance.parse_file()``
|
||||
and report the load counts per provider.
|
||||
|
||||
The runner is strictly generic — it never mentions CCC, knows nothing
|
||||
about YAML shapes, and can handle any upstream-backed framework given a
|
||||
parser module and a config file.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import importlib
|
||||
import json
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import yaml
|
||||
|
||||
# Make sibling `parsers/` package importable regardless of the runner's
|
||||
# invocation directory.
|
||||
_SCRIPT_DIR = Path(__file__).resolve().parent
|
||||
if str(_SCRIPT_DIR) not in sys.path:
|
||||
sys.path.insert(0, str(_SCRIPT_DIR))
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Config loading and validation
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class ConfigError(ValueError):
|
||||
"""Raised when the sync config is malformed or missing required fields."""
|
||||
|
||||
|
||||
def _require(cfg: dict, dotted_path: str) -> Any:
|
||||
"""Fetch a dotted-path key from nested dicts. Raises ConfigError on
|
||||
missing or empty values (empty-string, empty-list, None)."""
|
||||
current: Any = cfg
|
||||
parts = dotted_path.split(".")
|
||||
for i, part in enumerate(parts):
|
||||
if not isinstance(current, dict) or part not in current:
|
||||
raise ConfigError(f"config: missing required field '{dotted_path}'")
|
||||
current = current[part]
|
||||
if current in ("", None, [], {}):
|
||||
raise ConfigError(f"config: field '{dotted_path}' must not be empty")
|
||||
return current
|
||||
|
||||
|
||||
def load_config(path: Path) -> dict:
|
||||
if not path.exists():
|
||||
raise ConfigError(f"config file not found: {path}")
|
||||
with open(path) as f:
|
||||
cfg = yaml.safe_load(f) or {}
|
||||
if not isinstance(cfg, dict):
|
||||
raise ConfigError(f"config root must be a mapping, got {type(cfg).__name__}")
|
||||
|
||||
# Required fields — fail fast. Empty Version in particular silently
|
||||
# breaks get_check_compliance() key construction.
|
||||
_require(cfg, "framework.name")
|
||||
_require(cfg, "framework.display_name")
|
||||
_require(cfg, "framework.version")
|
||||
_require(cfg, "framework.description_template")
|
||||
_require(cfg, "providers")
|
||||
_require(cfg, "output.path_template")
|
||||
_require(cfg, "upstream.dir")
|
||||
_require(cfg, "parser.module")
|
||||
_require(cfg, "post_processing.check_preservation.primary_key")
|
||||
|
||||
providers = cfg["providers"]
|
||||
if not isinstance(providers, list) or not providers:
|
||||
raise ConfigError("config: 'providers' must be a non-empty list")
|
||||
for idx, p in enumerate(providers):
|
||||
if not isinstance(p, dict) or "key" not in p or "display" not in p:
|
||||
raise ConfigError(
|
||||
f"config: providers[{idx}] must have 'key' and 'display' fields"
|
||||
)
|
||||
|
||||
return cfg
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Parser loading
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def load_parser(parser_module_name: str):
|
||||
try:
|
||||
return importlib.import_module(f"parsers.{parser_module_name}")
|
||||
except ImportError as exc:
|
||||
raise ConfigError(
|
||||
f"cannot import parser 'parsers.{parser_module_name}': {exc}"
|
||||
) from exc
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Post-processing: id uniqueness safety net
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def assert_unique_ids(requirements: list[dict]) -> None:
|
||||
"""Enforce the parser contract: every requirement must have a unique Id.
|
||||
|
||||
The runner never renumbers silently — a duplicate is a parser bug.
|
||||
"""
|
||||
seen: set[str] = set()
|
||||
dups: list[str] = []
|
||||
for req in requirements:
|
||||
rid = req.get("Id")
|
||||
if not rid:
|
||||
raise ValueError(f"requirement missing Id: {req}")
|
||||
if rid in seen:
|
||||
dups.append(rid)
|
||||
seen.add(rid)
|
||||
if dups:
|
||||
raise ValueError(
|
||||
f"parser returned duplicate requirement ids: {sorted(set(dups))}"
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Post-processing: FamilyName normalization
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def normalize_family_names(requirements: list[dict], norm_map: dict[str, str]) -> None:
|
||||
"""Apply ``Attributes[0].FamilyName`` normalization in place."""
|
||||
if not norm_map:
|
||||
return
|
||||
for req in requirements:
|
||||
for attr in req.get("Attributes") or []:
|
||||
name = attr.get("FamilyName")
|
||||
if name in norm_map:
|
||||
attr["FamilyName"] = norm_map[name]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Post-processing: legacy check-mapping preservation
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _freeze(value: Any) -> Any:
|
||||
"""Make a value hashable for use in composite lookup keys.
|
||||
|
||||
Lists become frozensets (order-insensitive match). Scalars pass through.
|
||||
"""
|
||||
if isinstance(value, list):
|
||||
return frozenset(value)
|
||||
return value
|
||||
|
||||
|
||||
def _build_fallback_key(attrs: dict, field_names: list[str]) -> tuple | None:
|
||||
"""Build a composite tuple key from the given attribute field names.
|
||||
|
||||
Returns None if any field is missing or falsy — that key will be
|
||||
skipped (the lookup table just won't have an entry for it).
|
||||
"""
|
||||
parts = []
|
||||
for name in field_names:
|
||||
if name not in attrs:
|
||||
return None
|
||||
value = attrs[name]
|
||||
if value in ("", None, [], {}):
|
||||
return None
|
||||
parts.append(_freeze(value))
|
||||
return tuple(parts)
|
||||
|
||||
|
||||
def load_legacy_check_maps(
|
||||
legacy_path: Path,
|
||||
primary_key: str,
|
||||
fallback_keys: list[list[str]],
|
||||
) -> tuple[dict[str, list[str]], list[dict[tuple, list[str]]]]:
|
||||
"""Read the existing Prowler JSON and build lookup tables for check
|
||||
preservation.
|
||||
|
||||
Returns
|
||||
-------
|
||||
by_primary : dict
|
||||
``{primary_value: [checks]}`` — e.g. ``{ar_id: [checks]}``.
|
||||
by_fallback : list[dict]
|
||||
One lookup dict per entry in ``fallback_keys``. Each maps a
|
||||
composite tuple key to its preserved checks list.
|
||||
"""
|
||||
by_primary: dict[str, list[str]] = {}
|
||||
by_fallback: list[dict[tuple, list[str]]] = [{} for _ in fallback_keys]
|
||||
|
||||
if not legacy_path.exists():
|
||||
return by_primary, by_fallback
|
||||
|
||||
with open(legacy_path) as f:
|
||||
data = json.load(f)
|
||||
|
||||
for req in data.get("Requirements") or []:
|
||||
checks = req.get("Checks") or []
|
||||
# Primary index
|
||||
pv = req.get(primary_key)
|
||||
if pv:
|
||||
bucket = by_primary.setdefault(pv, [])
|
||||
for c in checks:
|
||||
if c not in bucket:
|
||||
bucket.append(c)
|
||||
|
||||
# Fallback indexes — read from Attributes[0]
|
||||
attributes = req.get("Attributes") or []
|
||||
if not attributes:
|
||||
continue
|
||||
attrs = attributes[0]
|
||||
for i, field_names in enumerate(fallback_keys):
|
||||
key = _build_fallback_key(attrs, field_names)
|
||||
if key is None:
|
||||
continue
|
||||
bucket = by_fallback[i].setdefault(key, [])
|
||||
for c in checks:
|
||||
if c not in bucket:
|
||||
bucket.append(c)
|
||||
|
||||
return by_primary, by_fallback
|
||||
|
||||
|
||||
def lookup_preserved_checks(
|
||||
req: dict,
|
||||
by_primary: dict,
|
||||
by_fallback: list[dict],
|
||||
primary_key: str,
|
||||
fallback_keys: list[list[str]],
|
||||
) -> list[str]:
|
||||
"""Return preserved check ids for a requirement, trying the primary
|
||||
key first then each fallback in order."""
|
||||
pv = req.get(primary_key)
|
||||
if pv and pv in by_primary:
|
||||
return list(by_primary[pv])
|
||||
attributes = req.get("Attributes") or []
|
||||
if not attributes:
|
||||
return []
|
||||
attrs = attributes[0]
|
||||
for i, field_names in enumerate(fallback_keys):
|
||||
key = _build_fallback_key(attrs, field_names)
|
||||
if key and key in by_fallback[i]:
|
||||
return list(by_fallback[i][key])
|
||||
return []
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Provider output assembly
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def resolve_output_path(template: str, framework: dict, provider_key: str) -> Path:
|
||||
return Path(
|
||||
template.format(
|
||||
provider=provider_key,
|
||||
framework=framework["name"].lower(),
|
||||
version=framework["version"],
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def build_provider_json(
|
||||
config: dict,
|
||||
provider: dict,
|
||||
base_requirements: list[dict],
|
||||
) -> tuple[dict, dict[str, int]]:
|
||||
"""Produce the provider-specific JSON dict ready to dump.
|
||||
|
||||
Returns ``(json_dict, counts)`` where ``counts`` tracks how each
|
||||
requirement's checks were resolved (primary, fallback, or none).
|
||||
"""
|
||||
framework = config["framework"]
|
||||
preservation = config["post_processing"]["check_preservation"]
|
||||
primary_key = preservation["primary_key"]
|
||||
fallback_keys = preservation.get("fallback_keys") or []
|
||||
|
||||
legacy_path = resolve_output_path(
|
||||
config["output"]["path_template"], framework, provider["key"]
|
||||
)
|
||||
by_primary, by_fallback = load_legacy_check_maps(
|
||||
legacy_path, primary_key, fallback_keys
|
||||
)
|
||||
|
||||
counts = {"primary": 0, "fallback": 0, "none": 0}
|
||||
enriched: list[dict] = []
|
||||
for req in base_requirements:
|
||||
# Try primary key first
|
||||
pv = req.get(primary_key)
|
||||
checks: list[str] = []
|
||||
source = "none"
|
||||
if pv and pv in by_primary:
|
||||
checks = list(by_primary[pv])
|
||||
source = "primary"
|
||||
else:
|
||||
attributes = req.get("Attributes") or []
|
||||
if attributes:
|
||||
attrs = attributes[0]
|
||||
for i, field_names in enumerate(fallback_keys):
|
||||
key = _build_fallback_key(attrs, field_names)
|
||||
if key and key in by_fallback[i]:
|
||||
checks = list(by_fallback[i][key])
|
||||
source = "fallback"
|
||||
break
|
||||
counts[source] += 1
|
||||
enriched.append(
|
||||
{
|
||||
"Id": req["Id"],
|
||||
"Description": req["Description"],
|
||||
# Shallow-copy attribute dicts so providers don't share refs
|
||||
"Attributes": [dict(a) for a in req.get("Attributes") or []],
|
||||
"Checks": checks,
|
||||
}
|
||||
)
|
||||
|
||||
description = framework["description_template"].format(
|
||||
provider_display=provider["display"],
|
||||
provider_key=provider["key"],
|
||||
framework_name=framework["name"],
|
||||
framework_display=framework["display_name"],
|
||||
version=framework["version"],
|
||||
)
|
||||
out = {
|
||||
"Framework": framework["name"],
|
||||
"Version": framework["version"],
|
||||
"Provider": provider["display"],
|
||||
"Name": framework["display_name"],
|
||||
"Description": description,
|
||||
"Requirements": enriched,
|
||||
}
|
||||
return out, counts
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Pydantic post-validation
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def pydantic_validate(json_path: Path) -> int:
|
||||
"""Import Prowler lazily so the runner still works without Prowler
|
||||
installed (validation step is skipped in that case)."""
|
||||
try:
|
||||
from prowler.lib.check.compliance_models import Compliance
|
||||
except ImportError:
|
||||
print(
|
||||
" note: prowler package not importable — skipping Pydantic validation",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return -1
|
||||
try:
|
||||
parsed = Compliance.parse_file(str(json_path))
|
||||
except Exception as exc:
|
||||
raise RuntimeError(
|
||||
f"Pydantic validation failed for {json_path}: {exc}"
|
||||
) from exc
|
||||
return len(parsed.Requirements)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Driver
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def main() -> int:
|
||||
if len(sys.argv) != 2:
|
||||
print("usage: sync_framework.py <config.yaml>", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
config_path = Path(sys.argv[1])
|
||||
try:
|
||||
config = load_config(config_path)
|
||||
except ConfigError as exc:
|
||||
print(f"config error: {exc}", file=sys.stderr)
|
||||
return 2
|
||||
|
||||
framework_name = config["framework"]["name"]
|
||||
upstream_dir = Path(config["upstream"]["dir"])
|
||||
if not upstream_dir.exists():
|
||||
print(
|
||||
f"error: upstream cache dir {upstream_dir} not found\n"
|
||||
f" hint: {config['upstream'].get('fetch_docs', '(see SKILL.md Workflow A Step 1)')}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return 3
|
||||
|
||||
parser_module_name = config["parser"]["module"]
|
||||
print(
|
||||
f"Sync: framework={framework_name} version={config['framework']['version']} "
|
||||
f"parser={parser_module_name}"
|
||||
)
|
||||
|
||||
try:
|
||||
parser = load_parser(parser_module_name)
|
||||
except ConfigError as exc:
|
||||
print(f"parser error: {exc}", file=sys.stderr)
|
||||
return 4
|
||||
|
||||
print(f"Parsing upstream from {upstream_dir}...")
|
||||
base_requirements = parser.parse_upstream(config)
|
||||
print(f" parser returned {len(base_requirements)} requirements")
|
||||
|
||||
# Safety-net: parser contract
|
||||
try:
|
||||
assert_unique_ids(base_requirements)
|
||||
except ValueError as exc:
|
||||
print(f"parser contract violation: {exc}", file=sys.stderr)
|
||||
return 5
|
||||
|
||||
# Post-processing: family name normalization
|
||||
norm_map = (
|
||||
config.get("post_processing", {})
|
||||
.get("family_name_normalization")
|
||||
or {}
|
||||
)
|
||||
normalize_family_names(base_requirements, norm_map)
|
||||
|
||||
# Per-provider output
|
||||
print()
|
||||
for provider in config["providers"]:
|
||||
provider_json, counts = build_provider_json(
|
||||
config, provider, base_requirements
|
||||
)
|
||||
out_path = resolve_output_path(
|
||||
config["output"]["path_template"],
|
||||
config["framework"],
|
||||
provider["key"],
|
||||
)
|
||||
out_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
with open(out_path, "w") as f:
|
||||
json.dump(provider_json, f, indent=2, ensure_ascii=False)
|
||||
f.write("\n")
|
||||
|
||||
validated = pydantic_validate(out_path)
|
||||
validated_msg = (
|
||||
f" pydantic_reqs={validated}" if validated >= 0 else " pydantic=skipped"
|
||||
)
|
||||
print(
|
||||
f" {provider['key']}: total={len(provider_json['Requirements'])} "
|
||||
f"matched_primary={counts['primary']} "
|
||||
f"matched_fallback={counts['fallback']} "
|
||||
f"new_or_unmatched={counts['none']}{validated_msg}"
|
||||
)
|
||||
print(f" wrote {out_path}")
|
||||
|
||||
print("\nDone.")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
@@ -6,6 +6,9 @@ All notable changes to the **Prowler UI** are documented in this file.
|
||||
|
||||
### 🚀 Added
|
||||
|
||||
- Invitation accept smart router for handling invitation flow routing [(#10573)](https://github.com/prowler-cloud/prowler/pull/10573)
|
||||
- Invitation link backward compatibility [(#10583)](https://github.com/prowler-cloud/prowler/pull/10583)
|
||||
- Updated invitation link to use smart router [(#10575)](https://github.com/prowler-cloud/prowler/pull/10575)
|
||||
- Multi-tenant organization management: create, switch, edit, and delete organizations from the profile page [(#10491)](https://github.com/prowler-cloud/prowler/pull/10491)
|
||||
- Findings grouped view with drill-down table showing resources per check, resource detail drawer, infinite scroll pagination, and bulk mute support [(#10425)](https://github.com/prowler-cloud/prowler/pull/10425)
|
||||
- Resource events tool to Lighthouse AI [(#10412)](https://github.com/prowler-cloud/prowler/pull/10412)
|
||||
@@ -18,6 +21,7 @@ All notable changes to the **Prowler UI** are documented in this file.
|
||||
|
||||
### 🐞 Fixed
|
||||
|
||||
- Preserve query parameters in callbackUrl during invitation flow [(#10571)](https://github.com/prowler-cloud/prowler/pull/10571)
|
||||
- Deleting the active organization now switches to the target org before deleting, preventing JWT rejection from the backend [(#10491)](https://github.com/prowler-cloud/prowler/pull/10491)
|
||||
- Clear Filters now resets all filters including muted findings and auto-applies, Clear all in pills only removes pill-visible sub-filters, and the discard icon is now an Undo text button [(#10446)](https://github.com/prowler-cloud/prowler/pull/10446)
|
||||
- Send to Jira modal now dynamically fetches and displays available issue types per project instead of hardcoding `"Task"`, fixing failures on non-English Jira instances [(#10534)](https://github.com/prowler-cloud/prowler/pull/10534)
|
||||
|
||||
@@ -163,6 +163,7 @@ describe("adaptFindingGroupResourcesResponse — malformed input", () => {
|
||||
alias: "production",
|
||||
},
|
||||
status: "FAIL",
|
||||
delta: "new",
|
||||
severity: "critical",
|
||||
first_seen_at: null,
|
||||
last_seen_at: "2024-01-01T00:00:00Z",
|
||||
@@ -178,5 +179,6 @@ describe("adaptFindingGroupResourcesResponse — malformed input", () => {
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].checkId).toBe("s3_check");
|
||||
expect(result[0].resourceName).toBe("my-bucket");
|
||||
expect(result[0].delta).toBe("new");
|
||||
});
|
||||
});
|
||||
|
||||
@@ -98,6 +98,7 @@ interface FindingGroupResourceAttributes {
|
||||
resource: ResourceInfo;
|
||||
provider: ProviderInfo;
|
||||
status: string;
|
||||
delta?: string | null;
|
||||
severity: string;
|
||||
first_seen_at: string | null;
|
||||
last_seen_at: string | null;
|
||||
@@ -137,14 +138,15 @@ export function adaptFindingGroupResourcesResponse(
|
||||
providerAlias: item.attributes.provider?.alias || "",
|
||||
providerUid: item.attributes.provider?.uid || "",
|
||||
resourceName: item.attributes.resource?.name || "-",
|
||||
resourceType: item.attributes.resource?.type || "-",
|
||||
resourceGroup: item.attributes.resource?.resource_group || "-",
|
||||
resourceUid: item.attributes.resource?.uid || "-",
|
||||
service: item.attributes.resource?.service || "-",
|
||||
region: item.attributes.resource?.region || "-",
|
||||
severity: (item.attributes.severity || "informational") as Severity,
|
||||
status: item.attributes.status,
|
||||
delta: item.attributes.delta || null,
|
||||
isMuted: item.attributes.status === "MUTED",
|
||||
// TODO: remove fallback once the API returns muted_reason in finding-group-resources
|
||||
mutedReason: item.attributes.muted_reason || undefined,
|
||||
firstSeenAt: item.attributes.first_seen_at,
|
||||
lastSeenAt: item.attributes.last_seen_at,
|
||||
|
||||
@@ -47,10 +47,6 @@ import {
|
||||
getLatestFindingGroupResources,
|
||||
} from "./finding-groups";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Blocker 1 + 2: FAIL-first sort and FAIL-only filter for drill-down resources
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tests
|
||||
// ---------------------------------------------------------------------------
|
||||
@@ -169,7 +165,7 @@ describe("getLatestFindingGroupResources — SSRF path traversal protection", ()
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Blocker 1: Resources list must show FAIL first (sort=-status)
|
||||
// Resources list keeps FAIL-first sort but no longer forces FAIL-only filtering
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe("getFindingGroupResources — Blocker 1: FAIL-first sort", () => {
|
||||
@@ -181,30 +177,30 @@ describe("getFindingGroupResources — Blocker 1: FAIL-first sort", () => {
|
||||
fetchMock.mockResolvedValue(new Response("", { status: 200 }));
|
||||
});
|
||||
|
||||
it("should include sort=-status in the API call so FAIL resources appear first", async () => {
|
||||
it("should include the composite sort so FAIL resources appear first, then severity", async () => {
|
||||
// Given
|
||||
const checkId = "s3_bucket_public_access";
|
||||
|
||||
// When
|
||||
await getFindingGroupResources({ checkId });
|
||||
|
||||
// Then — the URL must contain sort=-status
|
||||
// Then — the URL must contain the composite sort
|
||||
const calledUrl = fetchMock.mock.calls[0][0] as string;
|
||||
const url = new URL(calledUrl);
|
||||
expect(url.searchParams.get("sort")).toBe("-status");
|
||||
expect(url.searchParams.get("sort")).toBe("-severity,-delta,-last_seen_at");
|
||||
});
|
||||
|
||||
it("should include filter[status]=FAIL in the API call so only impacted resources are shown", async () => {
|
||||
it("should not force filter[status]=FAIL so PASS resources can also be shown", async () => {
|
||||
// Given
|
||||
const checkId = "s3_bucket_public_access";
|
||||
|
||||
// When
|
||||
await getFindingGroupResources({ checkId });
|
||||
|
||||
// Then — the URL must contain filter[status]=FAIL
|
||||
// Then — the URL should not add a hardcoded status filter
|
||||
const calledUrl = fetchMock.mock.calls[0][0] as string;
|
||||
const url = new URL(calledUrl);
|
||||
expect(url.searchParams.get("filter[status]")).toBe("FAIL");
|
||||
expect(url.searchParams.get("filter[status]")).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -217,7 +213,7 @@ describe("getLatestFindingGroupResources — Blocker 1: FAIL-first sort", () =>
|
||||
fetchMock.mockResolvedValue(new Response("", { status: 200 }));
|
||||
});
|
||||
|
||||
it("should include sort=-status in the API call so FAIL resources appear first", async () => {
|
||||
it("should include the composite sort so FAIL resources appear first, then severity", async () => {
|
||||
// Given
|
||||
const checkId = "iam_user_mfa_enabled";
|
||||
|
||||
@@ -227,10 +223,10 @@ describe("getLatestFindingGroupResources — Blocker 1: FAIL-first sort", () =>
|
||||
// Then
|
||||
const calledUrl = fetchMock.mock.calls[0][0] as string;
|
||||
const url = new URL(calledUrl);
|
||||
expect(url.searchParams.get("sort")).toBe("-status");
|
||||
expect(url.searchParams.get("sort")).toBe("-severity,-delta,-last_seen_at");
|
||||
});
|
||||
|
||||
it("should include filter[status]=FAIL in the API call so only impacted resources are shown", async () => {
|
||||
it("should not force filter[status]=FAIL so PASS resources can also be shown", async () => {
|
||||
// Given
|
||||
const checkId = "iam_user_mfa_enabled";
|
||||
|
||||
@@ -240,7 +236,7 @@ describe("getLatestFindingGroupResources — Blocker 1: FAIL-first sort", () =>
|
||||
// Then
|
||||
const calledUrl = fetchMock.mock.calls[0][0] as string;
|
||||
const url = new URL(calledUrl);
|
||||
expect(url.searchParams.get("filter[status]")).toBe("FAIL");
|
||||
expect(url.searchParams.get("filter[status]")).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -257,7 +253,7 @@ describe("getFindingGroupResources — triangulation: params coexist", () => {
|
||||
fetchMock.mockResolvedValue(new Response("", { status: 200 }));
|
||||
});
|
||||
|
||||
it("should send sort=-status AND filter[status]=FAIL alongside pagination params", async () => {
|
||||
it("should send the composite sort alongside pagination params without forcing filter[status]", async () => {
|
||||
// Given
|
||||
const checkId = "s3_bucket_versioning";
|
||||
|
||||
@@ -269,8 +265,8 @@ describe("getFindingGroupResources — triangulation: params coexist", () => {
|
||||
const url = new URL(calledUrl);
|
||||
expect(url.searchParams.get("page[number]")).toBe("2");
|
||||
expect(url.searchParams.get("page[size]")).toBe("50");
|
||||
expect(url.searchParams.get("sort")).toBe("-status");
|
||||
expect(url.searchParams.get("filter[status]")).toBe("FAIL");
|
||||
expect(url.searchParams.get("sort")).toBe("-severity,-delta,-last_seen_at");
|
||||
expect(url.searchParams.get("filter[status]")).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -283,7 +279,7 @@ describe("getLatestFindingGroupResources — triangulation: params coexist", ()
|
||||
fetchMock.mockResolvedValue(new Response("", { status: 200 }));
|
||||
});
|
||||
|
||||
it("should send sort=-status AND filter[status]=FAIL alongside pagination params", async () => {
|
||||
it("should send the composite sort alongside pagination params without forcing filter[status]", async () => {
|
||||
// Given
|
||||
const checkId = "iam_root_mfa_enabled";
|
||||
|
||||
@@ -295,16 +291,16 @@ describe("getLatestFindingGroupResources — triangulation: params coexist", ()
|
||||
const url = new URL(calledUrl);
|
||||
expect(url.searchParams.get("page[number]")).toBe("3");
|
||||
expect(url.searchParams.get("page[size]")).toBe("20");
|
||||
expect(url.searchParams.get("sort")).toBe("-status");
|
||||
expect(url.searchParams.get("filter[status]")).toBe("FAIL");
|
||||
expect(url.searchParams.get("sort")).toBe("-severity,-delta,-last_seen_at");
|
||||
expect(url.searchParams.get("filter[status]")).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Blocker: Duplicate filter[status] — caller-supplied status must be stripped
|
||||
// Caller filters should propagate unchanged to the drill-down resources endpoint
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
describe("getFindingGroupResources — Blocker: caller filter[status] is always overridden to FAIL", () => {
|
||||
describe("getFindingGroupResources — caller filters are preserved", () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
vi.stubGlobal("fetch", fetchMock);
|
||||
@@ -313,23 +309,7 @@ describe("getFindingGroupResources — Blocker: caller filter[status] is always
|
||||
fetchMock.mockResolvedValue(new Response("", { status: 200 }));
|
||||
});
|
||||
|
||||
it("should use filter[status]=FAIL even when caller passes filter[status]=PASS", async () => {
|
||||
// Given — caller explicitly passes PASS, which must be ignored
|
||||
const checkId = "s3_bucket_public_access";
|
||||
const filters = { "filter[status]": "PASS" };
|
||||
|
||||
// When
|
||||
await getFindingGroupResources({ checkId, filters });
|
||||
|
||||
// Then — the final URL must have exactly one filter[status]=FAIL, not PASS
|
||||
const calledUrl = fetchMock.mock.calls[0][0] as string;
|
||||
const url = new URL(calledUrl);
|
||||
const allStatusValues = url.searchParams.getAll("filter[status]");
|
||||
expect(allStatusValues).toHaveLength(1);
|
||||
expect(allStatusValues[0]).toBe("FAIL");
|
||||
});
|
||||
|
||||
it("should not have duplicate filter[status] params when caller passes filter[status]", async () => {
|
||||
it("should preserve caller filter[status] when explicitly provided", async () => {
|
||||
// Given
|
||||
const checkId = "s3_bucket_public_access";
|
||||
const filters = { "filter[status]": "PASS" };
|
||||
@@ -337,14 +317,56 @@ describe("getFindingGroupResources — Blocker: caller filter[status] is always
|
||||
// When
|
||||
await getFindingGroupResources({ checkId, filters });
|
||||
|
||||
// Then — no duplicates
|
||||
// Then
|
||||
const calledUrl = fetchMock.mock.calls[0][0] as string;
|
||||
const url = new URL(calledUrl);
|
||||
expect(url.searchParams.getAll("filter[status]")).toHaveLength(1);
|
||||
const allStatusValues = url.searchParams.getAll("filter[status]");
|
||||
expect(allStatusValues).toHaveLength(1);
|
||||
expect(allStatusValues[0]).toBe("PASS");
|
||||
});
|
||||
|
||||
it("should translate a single group status__in filter into filter[status] for resources", async () => {
|
||||
// Given
|
||||
const checkId = "s3_bucket_public_access";
|
||||
const filters = {
|
||||
"filter[status__in]": "PASS",
|
||||
"filter[severity__in]": "medium",
|
||||
"filter[provider_type__in]": "aws",
|
||||
};
|
||||
|
||||
// When
|
||||
await getFindingGroupResources({ checkId, filters });
|
||||
|
||||
// Then
|
||||
const calledUrl = fetchMock.mock.calls[0][0] as string;
|
||||
const url = new URL(calledUrl);
|
||||
expect(url.searchParams.get("filter[status]")).toBe("PASS");
|
||||
expect(url.searchParams.get("filter[status__in]")).toBeNull();
|
||||
expect(url.searchParams.get("filter[severity__in]")).toBe("medium");
|
||||
expect(url.searchParams.get("filter[provider_type__in]")).toBe("aws");
|
||||
});
|
||||
|
||||
it("should keep the composite sort when the resource search filter is applied", async () => {
|
||||
// Given
|
||||
const checkId = "s3_bucket_public_access";
|
||||
const filters = {
|
||||
"filter[name__icontains]": "bucket-prod",
|
||||
"filter[severity__in]": "high",
|
||||
};
|
||||
|
||||
// When
|
||||
await getFindingGroupResources({ checkId, filters });
|
||||
|
||||
// Then
|
||||
const calledUrl = fetchMock.mock.calls[0][0] as string;
|
||||
const url = new URL(calledUrl);
|
||||
expect(url.searchParams.get("sort")).toBe("-severity,-delta,-last_seen_at");
|
||||
expect(url.searchParams.get("filter[name__icontains]")).toBe("bucket-prod");
|
||||
expect(url.searchParams.get("filter[severity__in]")).toBe("high");
|
||||
});
|
||||
});
|
||||
|
||||
describe("getLatestFindingGroupResources — Blocker: caller filter[status] is always overridden to FAIL", () => {
|
||||
describe("getLatestFindingGroupResources — caller filters are preserved", () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
vi.stubGlobal("fetch", fetchMock);
|
||||
@@ -353,23 +375,7 @@ describe("getLatestFindingGroupResources — Blocker: caller filter[status] is a
|
||||
fetchMock.mockResolvedValue(new Response("", { status: 200 }));
|
||||
});
|
||||
|
||||
it("should use filter[status]=FAIL even when caller passes filter[status]=PASS", async () => {
|
||||
// Given — caller explicitly passes PASS, which must be ignored
|
||||
const checkId = "iam_user_mfa_enabled";
|
||||
const filters = { "filter[status]": "PASS" };
|
||||
|
||||
// When
|
||||
await getLatestFindingGroupResources({ checkId, filters });
|
||||
|
||||
// Then — the final URL must have exactly one filter[status]=FAIL, not PASS
|
||||
const calledUrl = fetchMock.mock.calls[0][0] as string;
|
||||
const url = new URL(calledUrl);
|
||||
const allStatusValues = url.searchParams.getAll("filter[status]");
|
||||
expect(allStatusValues).toHaveLength(1);
|
||||
expect(allStatusValues[0]).toBe("FAIL");
|
||||
});
|
||||
|
||||
it("should not have duplicate filter[status] params when caller passes filter[status]", async () => {
|
||||
it("should preserve caller filter[status] when explicitly provided", async () => {
|
||||
// Given
|
||||
const checkId = "iam_user_mfa_enabled";
|
||||
const filters = { "filter[status]": "PASS" };
|
||||
@@ -377,9 +383,53 @@ describe("getLatestFindingGroupResources — Blocker: caller filter[status] is a
|
||||
// When
|
||||
await getLatestFindingGroupResources({ checkId, filters });
|
||||
|
||||
// Then — no duplicates
|
||||
// Then
|
||||
const calledUrl = fetchMock.mock.calls[0][0] as string;
|
||||
const url = new URL(calledUrl);
|
||||
expect(url.searchParams.getAll("filter[status]")).toHaveLength(1);
|
||||
const allStatusValues = url.searchParams.getAll("filter[status]");
|
||||
expect(allStatusValues).toHaveLength(1);
|
||||
expect(allStatusValues[0]).toBe("PASS");
|
||||
});
|
||||
|
||||
it("should translate a single group status__in filter into filter[status] for latest resources", async () => {
|
||||
// Given
|
||||
const checkId = "iam_user_mfa_enabled";
|
||||
const filters = {
|
||||
"filter[status__in]": "PASS",
|
||||
"filter[severity__in]": "low",
|
||||
"filter[provider_type__in]": "aws",
|
||||
};
|
||||
|
||||
// When
|
||||
await getLatestFindingGroupResources({ checkId, filters });
|
||||
|
||||
// Then
|
||||
const calledUrl = fetchMock.mock.calls[0][0] as string;
|
||||
const url = new URL(calledUrl);
|
||||
expect(url.searchParams.get("filter[status]")).toBe("PASS");
|
||||
expect(url.searchParams.get("filter[status__in]")).toBeNull();
|
||||
expect(url.searchParams.get("filter[severity__in]")).toBe("low");
|
||||
expect(url.searchParams.get("filter[provider_type__in]")).toBe("aws");
|
||||
});
|
||||
|
||||
it("should keep the composite sort when the resource search filter is applied", async () => {
|
||||
// Given
|
||||
const checkId = "iam_user_mfa_enabled";
|
||||
const filters = {
|
||||
"filter[name__icontains]": "instance-prod",
|
||||
"filter[status__in]": "PASS,FAIL",
|
||||
};
|
||||
|
||||
// When
|
||||
await getLatestFindingGroupResources({ checkId, filters });
|
||||
|
||||
// Then
|
||||
const calledUrl = fetchMock.mock.calls[0][0] as string;
|
||||
const url = new URL(calledUrl);
|
||||
expect(url.searchParams.get("sort")).toBe("-severity,-delta,-last_seen_at");
|
||||
expect(url.searchParams.get("filter[name__icontains]")).toBe(
|
||||
"instance-prod",
|
||||
);
|
||||
expect(url.searchParams.get("filter[status__in]")).toBe("PASS,FAIL");
|
||||
});
|
||||
});
|
||||
|
||||
@@ -23,17 +23,68 @@ function mapSearchFilter(
|
||||
return mapped;
|
||||
}
|
||||
|
||||
export const getFindingGroups = async ({
|
||||
page = 1,
|
||||
pageSize = 10,
|
||||
sort = "",
|
||||
filters = {},
|
||||
}) => {
|
||||
function splitCsvFilterValues(value: string | string[] | undefined): string[] {
|
||||
if (Array.isArray(value)) {
|
||||
return value
|
||||
.flatMap((item) => item.split(","))
|
||||
.map((item) => item.trim())
|
||||
.filter(Boolean);
|
||||
}
|
||||
|
||||
if (typeof value === "string") {
|
||||
return value
|
||||
.split(",")
|
||||
.map((item) => item.trim())
|
||||
.filter(Boolean);
|
||||
}
|
||||
|
||||
return [];
|
||||
}
|
||||
|
||||
function normalizeFindingGroupResourceFilters(
|
||||
filters: Record<string, string | string[] | undefined>,
|
||||
): Record<string, string | string[] | undefined> {
|
||||
const normalized = { ...filters };
|
||||
const exactStatusFilter = normalized["filter[status]"];
|
||||
|
||||
if (exactStatusFilter !== undefined) {
|
||||
delete normalized["filter[status__in]"];
|
||||
return normalized;
|
||||
}
|
||||
|
||||
const statusValues = splitCsvFilterValues(normalized["filter[status__in]"]);
|
||||
if (statusValues.length === 1) {
|
||||
normalized["filter[status]"] = statusValues[0];
|
||||
delete normalized["filter[status__in]"];
|
||||
}
|
||||
|
||||
return normalized;
|
||||
}
|
||||
|
||||
const DEFAULT_FINDING_GROUPS_SORT =
|
||||
"-severity,-delta,-fail_count,-last_seen_at";
|
||||
|
||||
interface FetchFindingGroupsParams {
|
||||
page?: number;
|
||||
pageSize?: number;
|
||||
sort?: string;
|
||||
filters?: Record<string, string | string[] | undefined>;
|
||||
}
|
||||
|
||||
async function fetchFindingGroupsEndpoint(
|
||||
endpoint: string,
|
||||
{
|
||||
page = 1,
|
||||
pageSize = 10,
|
||||
sort = DEFAULT_FINDING_GROUPS_SORT,
|
||||
filters = {},
|
||||
}: FetchFindingGroupsParams,
|
||||
) {
|
||||
const headers = await getAuthHeaders({ contentType: false });
|
||||
|
||||
if (isNaN(Number(page)) || page < 1) redirect("/findings");
|
||||
|
||||
const url = new URL(`${apiBaseUrl}/finding-groups`);
|
||||
const url = new URL(`${apiBaseUrl}/${endpoint}`);
|
||||
|
||||
if (page) url.searchParams.append("page[number]", page.toString());
|
||||
if (pageSize) url.searchParams.append("page[size]", pageSize.toString());
|
||||
@@ -45,120 +96,60 @@ export const getFindingGroups = async ({
|
||||
const response = await fetch(url.toString(), { headers });
|
||||
return handleApiResponse(response);
|
||||
} catch (error) {
|
||||
console.error("Error fetching finding groups:", error);
|
||||
console.error(`Error fetching ${endpoint}:`, error);
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export const getLatestFindingGroups = async ({
|
||||
page = 1,
|
||||
pageSize = 10,
|
||||
sort = "",
|
||||
filters = {},
|
||||
}) => {
|
||||
export const getFindingGroups = async (params: FetchFindingGroupsParams = {}) =>
|
||||
fetchFindingGroupsEndpoint("finding-groups", params);
|
||||
|
||||
export const getLatestFindingGroups = async (
|
||||
params: FetchFindingGroupsParams = {},
|
||||
) => fetchFindingGroupsEndpoint("finding-groups/latest", params);
|
||||
|
||||
interface FetchFindingGroupResourcesParams {
|
||||
checkId: string;
|
||||
page?: number;
|
||||
pageSize?: number;
|
||||
filters?: Record<string, string | string[] | undefined>;
|
||||
}
|
||||
|
||||
async function fetchFindingGroupResourcesEndpoint(
|
||||
endpointPrefix: string,
|
||||
{
|
||||
checkId,
|
||||
page = 1,
|
||||
pageSize = 20,
|
||||
filters = {},
|
||||
}: FetchFindingGroupResourcesParams,
|
||||
) {
|
||||
const headers = await getAuthHeaders({ contentType: false });
|
||||
const normalizedFilters = normalizeFindingGroupResourceFilters(filters);
|
||||
|
||||
if (isNaN(Number(page)) || page < 1) redirect("/findings");
|
||||
|
||||
const url = new URL(`${apiBaseUrl}/finding-groups/latest`);
|
||||
const url = new URL(
|
||||
`${apiBaseUrl}/${endpointPrefix}/${encodeURIComponent(checkId)}/resources`,
|
||||
);
|
||||
|
||||
if (page) url.searchParams.append("page[number]", page.toString());
|
||||
if (pageSize) url.searchParams.append("page[size]", pageSize.toString());
|
||||
if (sort) url.searchParams.append("sort", sort);
|
||||
url.searchParams.append("sort", "-severity,-delta,-last_seen_at");
|
||||
|
||||
appendSanitizedProviderFilters(url, mapSearchFilter(filters));
|
||||
appendSanitizedProviderFilters(url, normalizedFilters);
|
||||
|
||||
try {
|
||||
const response = await fetch(url.toString(), { headers });
|
||||
return handleApiResponse(response);
|
||||
} catch (error) {
|
||||
console.error("Error fetching latest finding groups:", error);
|
||||
console.error(`Error fetching ${endpointPrefix} resources:`, error);
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export const getFindingGroupResources = async ({
|
||||
checkId,
|
||||
page = 1,
|
||||
pageSize = 20,
|
||||
filters = {},
|
||||
}: {
|
||||
checkId: string;
|
||||
page?: number;
|
||||
pageSize?: number;
|
||||
filters?: Record<string, string | string[] | undefined>;
|
||||
}) => {
|
||||
const headers = await getAuthHeaders({ contentType: false });
|
||||
export const getFindingGroupResources = async (
|
||||
params: FetchFindingGroupResourcesParams,
|
||||
) => fetchFindingGroupResourcesEndpoint("finding-groups", params);
|
||||
|
||||
const url = new URL(
|
||||
`${apiBaseUrl}/finding-groups/${encodeURIComponent(checkId)}/resources`,
|
||||
);
|
||||
|
||||
if (page) url.searchParams.append("page[number]", page.toString());
|
||||
if (pageSize) url.searchParams.append("page[size]", pageSize.toString());
|
||||
// sort=-status is kept for future-proofing: if the filter[status]=FAIL
|
||||
// constraint is ever relaxed to allow multiple statuses, the sort ensures
|
||||
// FAIL resources still appear first in the result set.
|
||||
url.searchParams.append("sort", "-status");
|
||||
|
||||
appendSanitizedProviderFilters(url, filters);
|
||||
|
||||
// Use .set() AFTER appendSanitizedProviderFilters so our hardcoded FAIL
|
||||
// always wins, even if the caller passed a different filter[status] value.
|
||||
// Using .set() instead of .append() prevents duplicate filter[status] params.
|
||||
url.searchParams.set("filter[status]", "FAIL");
|
||||
|
||||
try {
|
||||
const response = await fetch(url.toString(), {
|
||||
headers,
|
||||
});
|
||||
|
||||
return handleApiResponse(response);
|
||||
} catch (error) {
|
||||
console.error("Error fetching finding group resources:", error);
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
|
||||
export const getLatestFindingGroupResources = async ({
|
||||
checkId,
|
||||
page = 1,
|
||||
pageSize = 20,
|
||||
filters = {},
|
||||
}: {
|
||||
checkId: string;
|
||||
page?: number;
|
||||
pageSize?: number;
|
||||
filters?: Record<string, string | string[] | undefined>;
|
||||
}) => {
|
||||
const headers = await getAuthHeaders({ contentType: false });
|
||||
|
||||
const url = new URL(
|
||||
`${apiBaseUrl}/finding-groups/latest/${encodeURIComponent(checkId)}/resources`,
|
||||
);
|
||||
|
||||
if (page) url.searchParams.append("page[number]", page.toString());
|
||||
if (pageSize) url.searchParams.append("page[size]", pageSize.toString());
|
||||
// sort=-status is kept for future-proofing: if the filter[status]=FAIL
|
||||
// constraint is ever relaxed to allow multiple statuses, the sort ensures
|
||||
// FAIL resources still appear first in the result set.
|
||||
url.searchParams.append("sort", "-status");
|
||||
|
||||
appendSanitizedProviderFilters(url, filters);
|
||||
|
||||
// Use .set() AFTER appendSanitizedProviderFilters so our hardcoded FAIL
|
||||
// always wins, even if the caller passed a different filter[status] value.
|
||||
// Using .set() instead of .append() prevents duplicate filter[status] params.
|
||||
url.searchParams.set("filter[status]", "FAIL");
|
||||
|
||||
try {
|
||||
const response = await fetch(url.toString(), {
|
||||
headers,
|
||||
});
|
||||
|
||||
return handleApiResponse(response);
|
||||
} catch (error) {
|
||||
console.error("Error fetching latest finding group resources:", error);
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
export const getLatestFindingGroupResources = async (
|
||||
params: FetchFindingGroupResourcesParams,
|
||||
) => fetchFindingGroupResourcesEndpoint("finding-groups/latest", params);
|
||||
|
||||
@@ -379,6 +379,9 @@ export const getLatestFindingsByResourceUid = async ({
|
||||
);
|
||||
|
||||
url.searchParams.append("filter[resource_uid]", resourceUid);
|
||||
url.searchParams.append("filter[status]", "FAIL");
|
||||
url.searchParams.append("filter[muted]", "include");
|
||||
url.searchParams.append("sort", "-severity,status,-updated_at");
|
||||
if (page) url.searchParams.append("page[number]", page.toString());
|
||||
if (pageSize) url.searchParams.append("page[size]", pageSize.toString());
|
||||
|
||||
|
||||
@@ -2,10 +2,13 @@
|
||||
|
||||
import { revalidatePath } from "next/cache";
|
||||
import { redirect } from "next/navigation";
|
||||
import { z } from "zod";
|
||||
|
||||
import { apiBaseUrl, getAuthHeaders } from "@/lib";
|
||||
import { handleApiError, handleApiResponse } from "@/lib/server-actions-helper";
|
||||
|
||||
const invitationTokenSchema = z.string().min(1).max(500);
|
||||
|
||||
export const getInvitations = async ({
|
||||
page = 1,
|
||||
query = "",
|
||||
@@ -195,3 +198,35 @@ export const revokeInvite = async (formData: FormData) => {
|
||||
handleApiError(error);
|
||||
}
|
||||
};
|
||||
|
||||
export const acceptInvitation = async (token: string) => {
|
||||
const parsed = invitationTokenSchema.safeParse(token);
|
||||
if (!parsed.success) {
|
||||
return { error: "Invalid invitation token" };
|
||||
}
|
||||
|
||||
const headers = await getAuthHeaders({ contentType: true });
|
||||
|
||||
const url = new URL(`${apiBaseUrl}/invitations/accept`);
|
||||
|
||||
const body = JSON.stringify({
|
||||
data: {
|
||||
type: "invitations",
|
||||
attributes: {
|
||||
invitation_token: parsed.data,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
try {
|
||||
const response = await fetch(url.toString(), {
|
||||
method: "POST",
|
||||
headers,
|
||||
body,
|
||||
});
|
||||
|
||||
return handleApiResponse(response);
|
||||
} catch (error) {
|
||||
return handleApiError(error);
|
||||
}
|
||||
};
|
||||
|
||||
18
ui/app/(auth)/(guest-only)/layout.tsx
Normal file
18
ui/app/(auth)/(guest-only)/layout.tsx
Normal file
@@ -0,0 +1,18 @@
|
||||
import { redirect } from "next/navigation";
|
||||
import { ReactNode } from "react";
|
||||
|
||||
import { auth } from "@/auth.config";
|
||||
|
||||
export default async function GuestOnlyLayout({
|
||||
children,
|
||||
}: {
|
||||
children: ReactNode;
|
||||
}) {
|
||||
const session = await auth();
|
||||
|
||||
if (session?.user) {
|
||||
redirect("/");
|
||||
}
|
||||
|
||||
return <>{children}</>;
|
||||
}
|
||||
219
ui/app/(auth)/invitation/accept/accept-invitation-client.tsx
Normal file
219
ui/app/(auth)/invitation/accept/accept-invitation-client.tsx
Normal file
@@ -0,0 +1,219 @@
|
||||
"use client";
|
||||
|
||||
import { Icon } from "@iconify/react";
|
||||
import Link from "next/link";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { signOut } from "next-auth/react";
|
||||
import { useEffect, useRef, useState } from "react";
|
||||
|
||||
import { acceptInvitation } from "@/actions/invitations";
|
||||
import { Button } from "@/components/shadcn";
|
||||
import {
|
||||
INVITATION_ACTION_PARAM,
|
||||
INVITATION_SIGNUP_ACTION,
|
||||
} from "@/lib/invitation-routing";
|
||||
|
||||
type AcceptState =
|
||||
| { kind: "no-token" }
|
||||
| { kind: "accepting" }
|
||||
| { kind: "error"; message: string; canRetry: boolean; needsSignOut: boolean }
|
||||
| { kind: "choose" };
|
||||
|
||||
function mapApiError(status: number | undefined): {
|
||||
message: string;
|
||||
canRetry: boolean;
|
||||
needsSignOut: boolean;
|
||||
} {
|
||||
switch (status) {
|
||||
case 410:
|
||||
return {
|
||||
message:
|
||||
"This invitation has expired. Please contact your administrator for a new one.",
|
||||
canRetry: false,
|
||||
needsSignOut: false,
|
||||
};
|
||||
case 400:
|
||||
return {
|
||||
message: "This invitation has already been used.",
|
||||
canRetry: false,
|
||||
needsSignOut: false,
|
||||
};
|
||||
case 404:
|
||||
return {
|
||||
message:
|
||||
"This invitation was sent to a different email address. Please sign in with the correct account.",
|
||||
canRetry: false,
|
||||
needsSignOut: true,
|
||||
};
|
||||
default:
|
||||
return {
|
||||
message: "Something went wrong while accepting the invitation.",
|
||||
canRetry: true,
|
||||
needsSignOut: false,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export function AcceptInvitationClient({
|
||||
isAuthenticated,
|
||||
token,
|
||||
}: {
|
||||
isAuthenticated: boolean;
|
||||
token: string | null;
|
||||
}) {
|
||||
const router = useRouter();
|
||||
const [state, setState] = useState<AcceptState>(() => {
|
||||
if (!token) return { kind: "no-token" };
|
||||
if (!isAuthenticated) return { kind: "choose" };
|
||||
return { kind: "accepting" };
|
||||
});
|
||||
const hasStartedRef = useRef(false);
|
||||
|
||||
async function doAccept() {
|
||||
if (!token) return;
|
||||
setState({ kind: "accepting" });
|
||||
|
||||
const result = await acceptInvitation(token);
|
||||
|
||||
if (result?.error) {
|
||||
const { message, canRetry, needsSignOut } = mapApiError(result.status);
|
||||
setState({ kind: "error", message, canRetry, needsSignOut });
|
||||
} else {
|
||||
router.push("/");
|
||||
}
|
||||
}
|
||||
|
||||
async function handleSignOutAndRedirect() {
|
||||
if (!token) return;
|
||||
const callbackPath = `/invitation/accept?invitation_token=${encodeURIComponent(token)}`;
|
||||
await signOut({ redirect: false });
|
||||
router.push(`/sign-in?callbackUrl=${encodeURIComponent(callbackPath)}`);
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
if (hasStartedRef.current) return;
|
||||
hasStartedRef.current = true;
|
||||
|
||||
if (!token) {
|
||||
setState({ kind: "no-token" });
|
||||
return;
|
||||
}
|
||||
|
||||
if (isAuthenticated) {
|
||||
doAccept();
|
||||
} else {
|
||||
setState({ kind: "choose" });
|
||||
}
|
||||
}, [token, isAuthenticated]); // eslint-disable-line react-hooks/exhaustive-deps
|
||||
|
||||
return (
|
||||
<div className="flex min-h-screen items-center justify-center p-4">
|
||||
<div className="w-full max-w-md space-y-6 text-center">
|
||||
{/* No token */}
|
||||
{state.kind === "no-token" && (
|
||||
<div className="flex flex-col items-center gap-4">
|
||||
<Icon
|
||||
icon="solar:danger-triangle-bold"
|
||||
className="text-warning"
|
||||
width={48}
|
||||
/>
|
||||
<h1 className="text-xl font-semibold">Invalid Invitation Link</h1>
|
||||
<p className="text-default-500">
|
||||
No invitation token was provided. Please check the link you
|
||||
received.
|
||||
</p>
|
||||
<Button asChild variant="outline">
|
||||
<Link href="/sign-in">Go to Sign In</Link>
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Accepting */}
|
||||
{state.kind === "accepting" && (
|
||||
<div className="flex flex-col items-center gap-4">
|
||||
<Icon
|
||||
icon="eos-icons:loading"
|
||||
className="text-default-500"
|
||||
width={48}
|
||||
/>
|
||||
<h1 className="text-xl font-semibold">Accepting Invitation...</h1>
|
||||
<p className="text-default-500">
|
||||
Please wait while we process your invitation.
|
||||
</p>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Error */}
|
||||
{state.kind === "error" && (
|
||||
<div className="flex flex-col items-center gap-4">
|
||||
<Icon
|
||||
icon="solar:danger-triangle-bold"
|
||||
className="text-danger"
|
||||
width={48}
|
||||
/>
|
||||
<h1 className="text-xl font-semibold">
|
||||
Could Not Accept Invitation
|
||||
</h1>
|
||||
<p className="text-default-500">{state.message}</p>
|
||||
<div className="flex gap-3">
|
||||
{state.canRetry && <Button onClick={doAccept}>Retry</Button>}
|
||||
{state.needsSignOut ? (
|
||||
<Button variant="outline" onClick={handleSignOutAndRedirect}>
|
||||
Sign in with a different account
|
||||
</Button>
|
||||
) : (
|
||||
<Button asChild variant="outline">
|
||||
<Link href="/sign-in">Go to Sign In</Link>
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Choice page for unauthenticated users */}
|
||||
{state.kind === "choose" && (
|
||||
<div className="flex flex-col items-center gap-6">
|
||||
<Icon
|
||||
icon="solar:letter-bold"
|
||||
className="text-primary"
|
||||
width={48}
|
||||
/>
|
||||
<div>
|
||||
<h1 className="text-xl font-semibold">
|
||||
You've Been Invited
|
||||
</h1>
|
||||
<p className="text-default-500 mt-2">
|
||||
You've been invited to join a tenant on Prowler. How would
|
||||
you like to continue?
|
||||
</p>
|
||||
</div>
|
||||
<div className="flex w-full flex-col gap-3">
|
||||
<Button
|
||||
className="w-full"
|
||||
onClick={() => {
|
||||
const callbackPath = `/invitation/accept?invitation_token=${encodeURIComponent(token!)}`;
|
||||
router.push(
|
||||
`/sign-in?callbackUrl=${encodeURIComponent(callbackPath)}`,
|
||||
);
|
||||
}}
|
||||
>
|
||||
I have an account — Sign in
|
||||
</Button>
|
||||
<Button
|
||||
variant="outline"
|
||||
className="w-full"
|
||||
onClick={() => {
|
||||
router.push(
|
||||
`/sign-up?invitation_token=${encodeURIComponent(token!)}&${INVITATION_ACTION_PARAM}=${INVITATION_SIGNUP_ACTION}`,
|
||||
);
|
||||
}}
|
||||
>
|
||||
I'm new — Create an account
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
22
ui/app/(auth)/invitation/accept/page.tsx
Normal file
22
ui/app/(auth)/invitation/accept/page.tsx
Normal file
@@ -0,0 +1,22 @@
|
||||
import { auth } from "@/auth.config";
|
||||
import { SearchParamsProps } from "@/types";
|
||||
|
||||
import { AcceptInvitationClient } from "./accept-invitation-client";
|
||||
|
||||
export default async function AcceptInvitationPage({
|
||||
searchParams,
|
||||
}: {
|
||||
searchParams: Promise<SearchParamsProps>;
|
||||
}) {
|
||||
const session = await auth();
|
||||
const resolvedSearchParams = await searchParams;
|
||||
|
||||
const token =
|
||||
typeof resolvedSearchParams?.invitation_token === "string"
|
||||
? resolvedSearchParams.invitation_token
|
||||
: null;
|
||||
|
||||
return (
|
||||
<AcceptInvitationClient isAuthenticated={!!session?.user} token={token} />
|
||||
);
|
||||
}
|
||||
@@ -2,10 +2,8 @@ import "@/styles/globals.css";
|
||||
|
||||
import { GoogleTagManager } from "@next/third-parties/google";
|
||||
import { Metadata, Viewport } from "next";
|
||||
import { redirect } from "next/navigation";
|
||||
import { ReactNode } from "react";
|
||||
import { ReactNode, Suspense } from "react";
|
||||
|
||||
import { auth } from "@/auth.config";
|
||||
import { NavigationProgress, Toaster } from "@/components/ui";
|
||||
import { fontSans } from "@/config/fonts";
|
||||
import { siteConfig } from "@/config/site";
|
||||
@@ -31,17 +29,7 @@ export const viewport: Viewport = {
|
||||
],
|
||||
};
|
||||
|
||||
export default async function RootLayout({
|
||||
children,
|
||||
}: {
|
||||
children: ReactNode;
|
||||
}) {
|
||||
const session = await auth();
|
||||
|
||||
if (session?.user) {
|
||||
redirect("/");
|
||||
}
|
||||
|
||||
export default function AuthLayout({ children }: { children: ReactNode }) {
|
||||
return (
|
||||
<html suppressHydrationWarning lang="en">
|
||||
<head />
|
||||
@@ -53,7 +41,9 @@ export default async function RootLayout({
|
||||
)}
|
||||
>
|
||||
<Providers themeProps={{ attribute: "class", defaultTheme: "dark" }}>
|
||||
<NavigationProgress />
|
||||
<Suspense>
|
||||
<NavigationProgress />
|
||||
</Suspense>
|
||||
{children}
|
||||
<Toaster />
|
||||
<GoogleTagManager
|
||||
|
||||
37
ui/app/(prowler)/findings/page.test.ts
Normal file
37
ui/app/(prowler)/findings/page.test.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { readFileSync } from "node:fs";
|
||||
import path from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
|
||||
import { describe, expect, it } from "vitest";
|
||||
|
||||
/**
|
||||
* Source-level assertions for the findings page.
|
||||
*
|
||||
* Directly importing page.tsx triggers deep transitive imports
|
||||
* (next-auth → next/server) that vitest cannot resolve without the
|
||||
* full Next.js build pipeline. These tests verify key architectural
|
||||
* invariants via source analysis instead.
|
||||
*/
|
||||
describe("findings page", () => {
|
||||
const currentDir = path.dirname(fileURLToPath(import.meta.url));
|
||||
const pagePath = path.join(currentDir, "page.tsx");
|
||||
const source = readFileSync(pagePath, "utf8");
|
||||
|
||||
it("only passes sort to fetchFindingGroups when the user has an explicit sort param", () => {
|
||||
expect(source).toContain("...(encodedSort && { sort: encodedSort })");
|
||||
});
|
||||
|
||||
it("normalizes scan filters with the required inserted_at params before fetching historical finding groups", () => {
|
||||
expect(source).toContain("resolveFindingScanDateFilters");
|
||||
});
|
||||
|
||||
it("uses getLatestFindingGroups for non-date/scan queries and getFindingGroups for historical", () => {
|
||||
expect(source).toContain("hasDateOrScan");
|
||||
expect(source).toContain("getFindingGroups");
|
||||
expect(source).toContain("getLatestFindingGroups");
|
||||
});
|
||||
|
||||
it("guards errors array access with a length check", () => {
|
||||
expect(source).toContain("errors?.length > 0");
|
||||
});
|
||||
});
|
||||
@@ -7,7 +7,7 @@ import {
|
||||
} from "@/actions/finding-groups";
|
||||
import { getLatestMetadataInfo, getMetadataInfo } from "@/actions/findings";
|
||||
import { getProviders } from "@/actions/providers";
|
||||
import { getScans } from "@/actions/scans";
|
||||
import { getScan, getScans } from "@/actions/scans";
|
||||
import { FindingsFilters } from "@/components/findings/findings-filters";
|
||||
import {
|
||||
FindingsGroupTable,
|
||||
@@ -21,6 +21,7 @@ import {
|
||||
extractSortAndKey,
|
||||
hasDateOrScanFilter,
|
||||
} from "@/lib";
|
||||
import { resolveFindingScanDateFilters } from "@/lib/findings-scan-filters";
|
||||
import { ScanEntity, ScanProps } from "@/types";
|
||||
import { SearchParamsProps } from "@/types/components";
|
||||
|
||||
@@ -39,16 +40,28 @@ export default async function Findings({
|
||||
// TODO: Re-implement deep link support (/findings?id=<uuid>) using the grouped view's resource detail drawer
|
||||
// once the legacy FindingDetailsSheet is fully deprecated (still used by /resources and overview dashboard).
|
||||
|
||||
const [metadataInfoData, providersData, scansData] = await Promise.all([
|
||||
(hasDateOrScan ? getMetadataInfo : getLatestMetadataInfo)({
|
||||
query,
|
||||
sort: encodedSort,
|
||||
filters,
|
||||
}),
|
||||
const [providersData, scansData] = await Promise.all([
|
||||
getProviders({ pageSize: 50 }),
|
||||
getScans({ pageSize: 50 }),
|
||||
]);
|
||||
|
||||
const filtersWithScanDates = await resolveFindingScanDateFilters({
|
||||
filters,
|
||||
scans: scansData?.data || [],
|
||||
loadScan: async (scanId: string) => {
|
||||
const response = await getScan(scanId);
|
||||
return response?.data;
|
||||
},
|
||||
});
|
||||
|
||||
const metadataInfoData = await (
|
||||
hasDateOrScan ? getMetadataInfo : getLatestMetadataInfo
|
||||
)({
|
||||
query,
|
||||
sort: encodedSort,
|
||||
filters: filtersWithScanDates,
|
||||
});
|
||||
|
||||
// Extract unique regions, services, categories, groups from the new endpoint
|
||||
const uniqueRegions = metadataInfoData?.data?.attributes?.regions || [];
|
||||
const uniqueServices = metadataInfoData?.data?.attributes?.services || [];
|
||||
@@ -88,7 +101,10 @@ export default async function Findings({
|
||||
/>
|
||||
</div>
|
||||
<Suspense fallback={<SkeletonTableFindings />}>
|
||||
<SSRDataTable searchParams={resolvedSearchParams} />
|
||||
<SSRDataTable
|
||||
searchParams={resolvedSearchParams}
|
||||
filters={filtersWithScanDates}
|
||||
/>
|
||||
</Suspense>
|
||||
</FilterTransitionWrapper>
|
||||
</ContentLayout>
|
||||
@@ -97,19 +113,15 @@ export default async function Findings({
|
||||
|
||||
const SSRDataTable = async ({
|
||||
searchParams,
|
||||
filters,
|
||||
}: {
|
||||
searchParams: SearchParamsProps;
|
||||
filters: Record<string, string>;
|
||||
}) => {
|
||||
const page = parseInt(searchParams.page?.toString() || "1", 10);
|
||||
const pageSize = parseInt(searchParams.pageSize?.toString() || "10", 10);
|
||||
const defaultSort = "-severity,-fail_count,-last_seen_at";
|
||||
|
||||
const { encodedSort } = extractSortAndKey({
|
||||
...searchParams,
|
||||
sort: searchParams.sort ?? defaultSort,
|
||||
});
|
||||
|
||||
const { filters } = extractFiltersAndQuery(searchParams);
|
||||
const { encodedSort } = extractSortAndKey(searchParams);
|
||||
// Check if the searchParams contain any date or scan filter
|
||||
const hasDateOrScan = hasDateOrScanFilter(searchParams);
|
||||
|
||||
@@ -119,7 +131,7 @@ const SSRDataTable = async ({
|
||||
|
||||
const findingGroupsData = await fetchFindingGroups({
|
||||
page,
|
||||
sort: encodedSort,
|
||||
...(encodedSort && { sort: encodedSort }),
|
||||
filters,
|
||||
pageSize,
|
||||
});
|
||||
@@ -131,7 +143,7 @@ const SSRDataTable = async ({
|
||||
|
||||
return (
|
||||
<>
|
||||
{findingGroupsData?.errors && (
|
||||
{findingGroupsData?.errors?.length > 0 && (
|
||||
<div className="text-small mb-4 flex rounded-lg border border-red-500 bg-red-100 p-2 text-red-700">
|
||||
<p className="mr-2 font-semibold">Error:</p>
|
||||
<p>{findingGroupsData.errors[0].detail}</p>
|
||||
|
||||
@@ -77,11 +77,7 @@ const SSRDataUser = async ({
|
||||
{},
|
||||
);
|
||||
|
||||
const firstUserMembership = membershipsIncluded.find(
|
||||
(m) => m.relationships?.user?.data?.id === userData.id,
|
||||
);
|
||||
|
||||
const userTenantId = firstUserMembership?.relationships?.tenant?.data?.id;
|
||||
const userTenantId = session?.tenantId;
|
||||
|
||||
const userRoleIds =
|
||||
userData.relationships?.roles?.data?.map((r) => r.id) || [];
|
||||
|
||||
@@ -18,7 +18,12 @@ import {
|
||||
createProviderDetailsMapping,
|
||||
extractProviderUIDs,
|
||||
} from "@/lib/provider-helpers";
|
||||
import { ProviderProps, ScanProps, SearchParamsProps } from "@/types";
|
||||
import {
|
||||
ExpandedScanData,
|
||||
ProviderProps,
|
||||
ScanProps,
|
||||
SearchParamsProps,
|
||||
} from "@/types";
|
||||
|
||||
export default async function Scans({
|
||||
searchParams,
|
||||
@@ -30,7 +35,34 @@ export default async function Scans({
|
||||
const filteredParams = { ...resolvedSearchParams };
|
||||
delete filteredParams.scanId;
|
||||
|
||||
const providersData = await getAllProviders();
|
||||
const [providersData, completedScansData] = await Promise.all([
|
||||
getAllProviders(),
|
||||
getScans({
|
||||
filters: { "filter[state]": "completed" },
|
||||
pageSize: 50,
|
||||
fields: { scans: "name,completed_at,provider" },
|
||||
include: "provider",
|
||||
}),
|
||||
]);
|
||||
|
||||
const completedScans: ExpandedScanData[] = (completedScansData?.data ?? [])
|
||||
.map((scan: ScanProps) => {
|
||||
const providerId = scan.relationships?.provider?.data?.id;
|
||||
const providerData = completedScansData?.included?.find(
|
||||
(item: { type: string; id: string }) =>
|
||||
item.type === "providers" && item.id === providerId,
|
||||
);
|
||||
if (!providerData) return null;
|
||||
return {
|
||||
...scan,
|
||||
providerInfo: {
|
||||
provider: providerData.attributes.provider,
|
||||
uid: providerData.attributes.uid,
|
||||
alias: providerData.attributes.alias,
|
||||
},
|
||||
};
|
||||
})
|
||||
.filter(Boolean) as ExpandedScanData[];
|
||||
|
||||
const providerInfo =
|
||||
providersData?.data
|
||||
@@ -90,6 +122,7 @@ export default async function Scans({
|
||||
<ScansFilters
|
||||
providerUIDs={providerUIDs}
|
||||
providerDetails={providerDetails}
|
||||
completedScans={completedScans}
|
||||
/>
|
||||
<div className="flex items-center justify-end">
|
||||
<MutedFindingsConfigButton />
|
||||
|
||||
@@ -281,15 +281,20 @@ export const authConfig = {
|
||||
const sessionError = auth?.error;
|
||||
const isSignUpPage = nextUrl.pathname === "/sign-up";
|
||||
const isSignInPage = nextUrl.pathname === "/sign-in";
|
||||
const isInvitationPage =
|
||||
nextUrl.pathname.startsWith("/invitation/accept");
|
||||
|
||||
// Allow access to sign-up and sign-in pages
|
||||
if (isSignUpPage || isSignInPage) return true;
|
||||
// Allow access to sign-up, sign-in, and invitation pages
|
||||
if (isSignUpPage || isSignInPage || isInvitationPage) return true;
|
||||
|
||||
// For all other routes, require authentication
|
||||
// Return NextResponse.redirect to preserve callbackUrl for post-login redirect
|
||||
if (!isLoggedIn) {
|
||||
const signInUrl = new URL("/sign-in", nextUrl.origin);
|
||||
signInUrl.searchParams.set("callbackUrl", nextUrl.pathname);
|
||||
signInUrl.searchParams.set(
|
||||
"callbackUrl",
|
||||
nextUrl.pathname + nextUrl.search,
|
||||
);
|
||||
// Include session error if present (e.g., RefreshAccessTokenError)
|
||||
if (sessionError) {
|
||||
signInUrl.searchParams.set("error", sessionError);
|
||||
|
||||
@@ -18,11 +18,12 @@ import { Button } from "@/components/shadcn";
|
||||
import { ExpandableSection } from "@/components/ui/expandable-section";
|
||||
import { DataTableFilterCustom } from "@/components/ui/table";
|
||||
import { useFilterBatch } from "@/hooks/use-filter-batch";
|
||||
import { formatLabel, getCategoryLabel, getGroupLabel } from "@/lib/categories";
|
||||
import { FilterType, FINDING_STATUS_DISPLAY_NAMES, ScanEntity } from "@/types";
|
||||
import { getCategoryLabel, getGroupLabel } from "@/lib/categories";
|
||||
import { FilterType, ScanEntity } from "@/types";
|
||||
import { DATA_TABLE_FILTER_MODE, FilterParam } from "@/types/filters";
|
||||
import { getProviderDisplayName, ProviderProps } from "@/types/providers";
|
||||
import { SEVERITY_DISPLAY_NAMES } from "@/types/severities";
|
||||
import { ProviderProps } from "@/types/providers";
|
||||
|
||||
import { getFindingsFilterDisplayValue } from "./findings-filters.utils";
|
||||
|
||||
interface FindingsFiltersProps {
|
||||
/** Provider data for ProviderTypeSelector and AccountsSelector */
|
||||
@@ -58,49 +59,6 @@ const FILTER_KEY_LABELS: Record<FilterParam, string> = {
|
||||
"filter[muted]": "Muted",
|
||||
};
|
||||
|
||||
/**
|
||||
* Formats a raw filter value into a human-readable display string.
|
||||
* - Provider types: uses shared getProviderDisplayName utility
|
||||
* - Severities: uses shared SEVERITY_DISPLAY_NAMES (e.g. "critical" → "Critical")
|
||||
* - Status: uses shared FINDING_STATUS_DISPLAY_NAMES (e.g. "FAIL" → "Fail")
|
||||
* - Categories: uses getCategoryLabel (handles IAM, EC2, IMDSv1, etc.)
|
||||
* - Resource groups: uses getGroupLabel (underscore-delimited)
|
||||
* - Date (filter[inserted_at]): returns the ISO date string as-is (YYYY-MM-DD)
|
||||
* - Other values: uses formatLabel as a generic fallback (avoids naive capitalisation)
|
||||
*/
|
||||
const formatFilterValue = (filterKey: string, value: string): string => {
|
||||
if (!value) return value;
|
||||
if (filterKey === "filter[provider_type__in]") {
|
||||
return getProviderDisplayName(value);
|
||||
}
|
||||
if (filterKey === "filter[severity__in]") {
|
||||
return (
|
||||
SEVERITY_DISPLAY_NAMES[
|
||||
value.toLowerCase() as keyof typeof SEVERITY_DISPLAY_NAMES
|
||||
] ?? formatLabel(value)
|
||||
);
|
||||
}
|
||||
if (filterKey === "filter[status__in]") {
|
||||
return (
|
||||
FINDING_STATUS_DISPLAY_NAMES[
|
||||
value as keyof typeof FINDING_STATUS_DISPLAY_NAMES
|
||||
] ?? formatLabel(value)
|
||||
);
|
||||
}
|
||||
if (filterKey === "filter[category__in]") {
|
||||
return getCategoryLabel(value);
|
||||
}
|
||||
if (filterKey === "filter[resource_groups__in]") {
|
||||
return getGroupLabel(value);
|
||||
}
|
||||
// Date filter: preserve ISO date string (YYYY-MM-DD) — do not run through formatLabel
|
||||
if (filterKey === "filter[inserted_at]") {
|
||||
return value;
|
||||
}
|
||||
// Generic fallback: handles hyphen/underscore-delimited IDs with smart capitalisation
|
||||
return formatLabel(value);
|
||||
};
|
||||
|
||||
export const FindingsFilters = ({
|
||||
providers,
|
||||
completedScanIds,
|
||||
@@ -185,7 +143,10 @@ export const FindingsFilters = ({
|
||||
key,
|
||||
label,
|
||||
value,
|
||||
displayValue: formatFilterValue(key, value),
|
||||
displayValue: getFindingsFilterDisplayValue(key, value, {
|
||||
providers,
|
||||
scans: scanDetails,
|
||||
}),
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
148
ui/components/findings/findings-filters.utils.test.ts
Normal file
148
ui/components/findings/findings-filters.utils.test.ts
Normal file
@@ -0,0 +1,148 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
|
||||
import { ProviderProps } from "@/types/providers";
|
||||
import { ScanEntity } from "@/types/scans";
|
||||
|
||||
import { getFindingsFilterDisplayValue } from "./findings-filters.utils";
|
||||
|
||||
function makeProvider(
|
||||
overrides: Partial<ProviderProps> & { id: string },
|
||||
): ProviderProps {
|
||||
return {
|
||||
type: "providers",
|
||||
attributes: {
|
||||
provider: "aws",
|
||||
uid: "123456789012",
|
||||
alias: "Production Account",
|
||||
status: "completed",
|
||||
resources: 10,
|
||||
connection: { connected: true, last_checked_at: "2026-04-07T10:00:00Z" },
|
||||
scanner_args: {
|
||||
only_logs: false,
|
||||
excluded_checks: [],
|
||||
aws_retries_max_attempts: 3,
|
||||
},
|
||||
inserted_at: "2026-04-07T10:00:00Z",
|
||||
updated_at: "2026-04-07T10:00:00Z",
|
||||
created_by: { object: "user", id: "user-1" },
|
||||
},
|
||||
relationships: {
|
||||
secret: { data: null },
|
||||
provider_groups: { meta: { count: 0 }, data: [] },
|
||||
},
|
||||
...overrides,
|
||||
} as ProviderProps;
|
||||
}
|
||||
|
||||
function makeScanMap(
|
||||
scanId: string,
|
||||
overrides?: Partial<ScanEntity>,
|
||||
): { [scanId: string]: ScanEntity } {
|
||||
return {
|
||||
[scanId]: {
|
||||
id: scanId,
|
||||
providerInfo: {
|
||||
provider: "aws",
|
||||
alias: "Scan Account",
|
||||
uid: "123456789012",
|
||||
},
|
||||
attributes: {
|
||||
name: "Nightly scan",
|
||||
completed_at: "2026-04-07T10:00:00Z",
|
||||
},
|
||||
...overrides,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const providers = [makeProvider({ id: "provider-1" })];
|
||||
const scans = [makeScanMap("scan-1")];
|
||||
|
||||
describe("getFindingsFilterDisplayValue", () => {
|
||||
it("shows the account alias for provider_id filters instead of the raw provider id", () => {
|
||||
expect(
|
||||
getFindingsFilterDisplayValue("filter[provider_id__in]", "provider-1", {
|
||||
providers,
|
||||
}),
|
||||
).toBe("Production Account");
|
||||
});
|
||||
|
||||
it("falls back to the provider uid when the alias is empty", () => {
|
||||
expect(
|
||||
getFindingsFilterDisplayValue("filter[provider_id__in]", "provider-2", {
|
||||
providers: [
|
||||
...providers,
|
||||
makeProvider({
|
||||
id: "provider-2",
|
||||
attributes: {
|
||||
...providers[0].attributes,
|
||||
alias: "",
|
||||
uid: "210987654321",
|
||||
},
|
||||
}),
|
||||
],
|
||||
}),
|
||||
).toBe("210987654321");
|
||||
});
|
||||
|
||||
it("keeps the raw value when the provider cannot be resolved", () => {
|
||||
expect(
|
||||
getFindingsFilterDisplayValue(
|
||||
"filter[provider_id__in]",
|
||||
"missing-provider",
|
||||
{ providers },
|
||||
),
|
||||
).toBe("missing-provider");
|
||||
});
|
||||
|
||||
it("shows the resolved scan badge label for scan filters instead of formatting the raw scan id", () => {
|
||||
expect(
|
||||
getFindingsFilterDisplayValue("filter[scan__in]", "scan-1", { scans }),
|
||||
).toBe("Scan Account");
|
||||
});
|
||||
|
||||
it("falls back to the scan provider uid when the alias is missing", () => {
|
||||
expect(
|
||||
getFindingsFilterDisplayValue("filter[scan__in]", "scan-2", {
|
||||
scans: [
|
||||
...scans,
|
||||
makeScanMap("scan-2", {
|
||||
providerInfo: { provider: "aws", uid: "210987654321" },
|
||||
attributes: {
|
||||
name: "Weekly scan",
|
||||
completed_at: "2026-04-08T10:00:00Z",
|
||||
},
|
||||
}),
|
||||
],
|
||||
}),
|
||||
).toBe("210987654321");
|
||||
});
|
||||
|
||||
it("keeps the raw scan value when the scan cannot be resolved", () => {
|
||||
expect(
|
||||
getFindingsFilterDisplayValue("filter[scan__in]", "missing-scan", {
|
||||
scans,
|
||||
}),
|
||||
).toBe("missing-scan");
|
||||
});
|
||||
|
||||
it("passes through date values for inserted_at__gte filters", () => {
|
||||
expect(
|
||||
getFindingsFilterDisplayValue(
|
||||
"filter[inserted_at__gte]",
|
||||
"2026-04-03",
|
||||
{},
|
||||
),
|
||||
).toBe("2026-04-03");
|
||||
});
|
||||
|
||||
it("passes through date values for inserted_at__lte filters", () => {
|
||||
expect(
|
||||
getFindingsFilterDisplayValue(
|
||||
"filter[inserted_at__lte]",
|
||||
"2026-04-07",
|
||||
{},
|
||||
),
|
||||
).toBe("2026-04-07");
|
||||
});
|
||||
});
|
||||
80
ui/components/findings/findings-filters.utils.ts
Normal file
80
ui/components/findings/findings-filters.utils.ts
Normal file
@@ -0,0 +1,80 @@
|
||||
import { formatLabel, getCategoryLabel, getGroupLabel } from "@/lib/categories";
|
||||
import { FINDING_STATUS_DISPLAY_NAMES } from "@/types";
|
||||
import { getProviderDisplayName, ProviderProps } from "@/types/providers";
|
||||
import { ScanEntity } from "@/types/scans";
|
||||
import { SEVERITY_DISPLAY_NAMES } from "@/types/severities";
|
||||
|
||||
interface GetFindingsFilterDisplayValueOptions {
|
||||
providers?: ProviderProps[];
|
||||
scans?: Array<{ [scanId: string]: ScanEntity }>;
|
||||
}
|
||||
|
||||
function getProviderAccountDisplayValue(
|
||||
providerId: string,
|
||||
providers: ProviderProps[],
|
||||
): string {
|
||||
const provider = providers.find((item) => item.id === providerId);
|
||||
if (!provider) {
|
||||
return providerId;
|
||||
}
|
||||
|
||||
return provider.attributes.alias || provider.attributes.uid || providerId;
|
||||
}
|
||||
|
||||
function getScanDisplayValue(
|
||||
scanId: string,
|
||||
scans: Array<{ [scanId: string]: ScanEntity }>,
|
||||
): string {
|
||||
const scan = scans.find((item) => item[scanId])?.[scanId];
|
||||
if (!scan) {
|
||||
return scanId;
|
||||
}
|
||||
|
||||
return scan.providerInfo.alias || scan.providerInfo.uid || scanId;
|
||||
}
|
||||
|
||||
export function getFindingsFilterDisplayValue(
|
||||
filterKey: string,
|
||||
value: string,
|
||||
options: GetFindingsFilterDisplayValueOptions = {},
|
||||
): string {
|
||||
if (!value) return value;
|
||||
if (filterKey === "filter[provider_type__in]") {
|
||||
return getProviderDisplayName(value);
|
||||
}
|
||||
if (filterKey === "filter[provider_id__in]") {
|
||||
return getProviderAccountDisplayValue(value, options.providers || []);
|
||||
}
|
||||
if (filterKey === "filter[scan__in]") {
|
||||
return getScanDisplayValue(value, options.scans || []);
|
||||
}
|
||||
if (filterKey === "filter[severity__in]") {
|
||||
return (
|
||||
SEVERITY_DISPLAY_NAMES[
|
||||
value.toLowerCase() as keyof typeof SEVERITY_DISPLAY_NAMES
|
||||
] ?? formatLabel(value)
|
||||
);
|
||||
}
|
||||
if (filterKey === "filter[status__in]") {
|
||||
return (
|
||||
FINDING_STATUS_DISPLAY_NAMES[
|
||||
value as keyof typeof FINDING_STATUS_DISPLAY_NAMES
|
||||
] ?? formatLabel(value)
|
||||
);
|
||||
}
|
||||
if (filterKey === "filter[category__in]") {
|
||||
return getCategoryLabel(value);
|
||||
}
|
||||
if (filterKey === "filter[resource_groups__in]") {
|
||||
return getGroupLabel(value);
|
||||
}
|
||||
if (
|
||||
filterKey === "filter[inserted_at]" ||
|
||||
filterKey === "filter[inserted_at__gte]" ||
|
||||
filterKey === "filter[inserted_at__lte]"
|
||||
) {
|
||||
return value;
|
||||
}
|
||||
|
||||
return formatLabel(value);
|
||||
}
|
||||
@@ -17,11 +17,20 @@ vi.mock("next/navigation", () => ({
|
||||
vi.mock("@/components/shadcn", () => ({
|
||||
Checkbox: ({
|
||||
"aria-label": ariaLabel,
|
||||
onCheckedChange,
|
||||
...props
|
||||
}: InputHTMLAttributes<HTMLInputElement> & {
|
||||
"aria-label"?: string;
|
||||
size?: string;
|
||||
}) => <input type="checkbox" aria-label={ariaLabel} {...props} />,
|
||||
onCheckedChange?: (checked: boolean) => void;
|
||||
}) => (
|
||||
<input
|
||||
type="checkbox"
|
||||
aria-label={ariaLabel}
|
||||
onChange={(event) => onCheckedChange?.(event.target.checked)}
|
||||
{...props}
|
||||
/>
|
||||
),
|
||||
}));
|
||||
|
||||
vi.mock("@/components/ui/table", () => ({
|
||||
@@ -52,7 +61,13 @@ vi.mock("./impacted-providers-cell", () => ({
|
||||
}));
|
||||
|
||||
vi.mock("./impacted-resources-cell", () => ({
|
||||
ImpactedResourcesCell: () => null,
|
||||
ImpactedResourcesCell: ({
|
||||
impacted,
|
||||
total,
|
||||
}: {
|
||||
impacted: number;
|
||||
total: number;
|
||||
}) => <span>{`${impacted}/${total}`}</span>,
|
||||
}));
|
||||
|
||||
vi.mock("./notification-indicator", () => ({
|
||||
@@ -94,6 +109,7 @@ function makeGroup(overrides?: Partial<FindingGroupRow>): FindingGroupRow {
|
||||
function renderFindingCell(
|
||||
checkTitle: string,
|
||||
onDrillDown: (checkId: string, group: FindingGroupRow) => void,
|
||||
overrides?: Partial<FindingGroupRow>,
|
||||
) {
|
||||
const columns = getColumnFindingGroups({
|
||||
rowSelection: {},
|
||||
@@ -107,7 +123,7 @@ function renderFindingCell(
|
||||
);
|
||||
if (!findingColumn?.cell) throw new Error("finding column not found");
|
||||
|
||||
const group = makeGroup({ checkTitle });
|
||||
const group = makeGroup({ checkTitle, ...overrides });
|
||||
// Render the cell directly with a minimal row mock
|
||||
const CellComponent = findingColumn.cell as (props: {
|
||||
row: { original: FindingGroupRow };
|
||||
@@ -116,6 +132,67 @@ function renderFindingCell(
|
||||
render(<div>{CellComponent({ row: { original: group } })}</div>);
|
||||
}
|
||||
|
||||
function renderImpactedResourcesCell(overrides?: Partial<FindingGroupRow>) {
|
||||
const columns = getColumnFindingGroups({
|
||||
rowSelection: {},
|
||||
selectableRowCount: 1,
|
||||
onDrillDown: vi.fn(),
|
||||
});
|
||||
|
||||
const impactedResourcesColumn = columns.find(
|
||||
(col) => (col as { id?: string }).id === "impactedResources",
|
||||
);
|
||||
if (!impactedResourcesColumn?.cell) {
|
||||
throw new Error("impactedResources column not found");
|
||||
}
|
||||
|
||||
const group = makeGroup(overrides);
|
||||
const CellComponent = impactedResourcesColumn.cell as (props: {
|
||||
row: { original: FindingGroupRow };
|
||||
}) => ReactNode;
|
||||
|
||||
render(<div>{CellComponent({ row: { original: group } })}</div>);
|
||||
}
|
||||
|
||||
function renderSelectCell(overrides?: Partial<FindingGroupRow>) {
|
||||
const toggleSelected = vi.fn();
|
||||
const columns = getColumnFindingGroups({
|
||||
rowSelection: {},
|
||||
selectableRowCount: 1,
|
||||
onDrillDown: vi.fn(),
|
||||
});
|
||||
|
||||
const selectColumn = columns.find(
|
||||
(col) => (col as { id?: string }).id === "select",
|
||||
);
|
||||
if (!selectColumn?.cell) {
|
||||
throw new Error("select column not found");
|
||||
}
|
||||
|
||||
const group = makeGroup(overrides);
|
||||
const CellComponent = selectColumn.cell as (props: {
|
||||
row: {
|
||||
id: string;
|
||||
original: FindingGroupRow;
|
||||
toggleSelected: (selected: boolean) => void;
|
||||
};
|
||||
}) => ReactNode;
|
||||
|
||||
render(
|
||||
<div>
|
||||
{CellComponent({
|
||||
row: {
|
||||
id: "0",
|
||||
original: group,
|
||||
toggleSelected,
|
||||
},
|
||||
})}
|
||||
</div>,
|
||||
);
|
||||
|
||||
return { toggleSelected };
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Fix 5: Accessibility — <p onClick> → <button>
|
||||
// ---------------------------------------------------------------------------
|
||||
@@ -191,4 +268,60 @@ describe("column-finding-groups — accessibility of check title cell", () => {
|
||||
// Then — native button handles Enter natively
|
||||
expect(onDrillDown).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it("should allow expanding a group that only has PASS resources", async () => {
|
||||
// Given
|
||||
const user = userEvent.setup();
|
||||
const onDrillDown =
|
||||
vi.fn<(checkId: string, group: FindingGroupRow) => void>();
|
||||
|
||||
renderFindingCell("My Passing Check", onDrillDown, {
|
||||
resourcesTotal: 2,
|
||||
resourcesFail: 0,
|
||||
status: "PASS",
|
||||
});
|
||||
|
||||
// When
|
||||
await user.click(
|
||||
screen.getByRole("button", {
|
||||
name: "My Passing Check",
|
||||
}),
|
||||
);
|
||||
|
||||
// Then
|
||||
expect(onDrillDown).toHaveBeenCalledTimes(1);
|
||||
expect(onDrillDown).toHaveBeenCalledWith(
|
||||
"s3_check",
|
||||
expect.objectContaining({
|
||||
resourcesTotal: 2,
|
||||
resourcesFail: 0,
|
||||
status: "PASS",
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("column-finding-groups — impacted resources count", () => {
|
||||
it("should keep impacted resources based on failing resources only", () => {
|
||||
// Given/When
|
||||
renderImpactedResourcesCell({
|
||||
resourcesTotal: 5,
|
||||
resourcesFail: 3,
|
||||
});
|
||||
|
||||
// Then
|
||||
expect(screen.getByText("3/5")).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
describe("column-finding-groups — group selection", () => {
|
||||
it("should disable the row checkbox when the group has zero impacted resources", () => {
|
||||
renderSelectCell({
|
||||
resourcesTotal: 2,
|
||||
resourcesFail: 0,
|
||||
status: "PASS",
|
||||
});
|
||||
|
||||
expect(screen.getByRole("checkbox", { name: "Select row" })).toBeDisabled();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -13,6 +13,7 @@ import { cn } from "@/lib";
|
||||
import { FindingGroupRow, ProviderType } from "@/types";
|
||||
|
||||
import { DataTableRowActions } from "./data-table-row-actions";
|
||||
import { canMuteFindingGroup } from "./finding-group-selection";
|
||||
import { ImpactedProvidersCell } from "./impacted-providers-cell";
|
||||
import { ImpactedResourcesCell } from "./impacted-resources-cell";
|
||||
import { DeltaValues, NotificationIndicator } from "./notification-indicator";
|
||||
@@ -26,6 +27,9 @@ interface GetColumnFindingGroupsOptions {
|
||||
hasResourceSelection?: boolean;
|
||||
}
|
||||
|
||||
const VISIBLE_DISABLED_CHECKBOX_CLASS =
|
||||
"disabled:opacity-100 disabled:bg-bg-input-primary/60 disabled:border-border-input-primary/70";
|
||||
|
||||
export function getColumnFindingGroups({
|
||||
rowSelection,
|
||||
selectableRowCount,
|
||||
@@ -56,6 +60,7 @@ export function getColumnFindingGroups({
|
||||
<div className="w-4" />
|
||||
<Checkbox
|
||||
size="sm"
|
||||
className={VISIBLE_DISABLED_CHECKBOX_CLASS}
|
||||
checked={headerChecked}
|
||||
onCheckedChange={(checked) =>
|
||||
table.toggleAllPageRowsSelected(checked === true)
|
||||
@@ -80,7 +85,12 @@ export function getColumnFindingGroups({
|
||||
? DeltaValues.CHANGED
|
||||
: DeltaValues.NONE;
|
||||
|
||||
const canExpand = group.resourcesFail > 0;
|
||||
const canExpand = group.resourcesTotal > 0;
|
||||
const canSelect = canMuteFindingGroup({
|
||||
resourcesFail: group.resourcesFail,
|
||||
resourcesTotal: group.resourcesTotal,
|
||||
mutedCount: group.mutedCount,
|
||||
});
|
||||
|
||||
return (
|
||||
<div className="flex items-center gap-2">
|
||||
@@ -104,11 +114,13 @@ export function getColumnFindingGroups({
|
||||
)}
|
||||
<Checkbox
|
||||
size="sm"
|
||||
className={VISIBLE_DISABLED_CHECKBOX_CLASS}
|
||||
checked={
|
||||
rowSelection[row.id] && isExpanded && hasResourceSelection
|
||||
? "indeterminate"
|
||||
: !!rowSelection[row.id]
|
||||
}
|
||||
disabled={!canSelect}
|
||||
onCheckedChange={(checked) => {
|
||||
// When indeterminate (resources selected), clicking deselects the group
|
||||
if (
|
||||
@@ -155,7 +167,7 @@ export function getColumnFindingGroups({
|
||||
),
|
||||
cell: ({ row }) => {
|
||||
const group = row.original;
|
||||
const canExpand = group.resourcesFail > 0;
|
||||
const canExpand = group.resourcesTotal > 0;
|
||||
|
||||
return (
|
||||
<div>
|
||||
|
||||
203
ui/components/findings/table/column-finding-resources.test.tsx
Normal file
203
ui/components/findings/table/column-finding-resources.test.tsx
Normal file
@@ -0,0 +1,203 @@
|
||||
import { render, screen } from "@testing-library/react";
|
||||
import type { InputHTMLAttributes, ReactNode } from "react";
|
||||
import { describe, expect, it, vi } from "vitest";
|
||||
|
||||
vi.mock("@/components/shadcn", () => ({
|
||||
Checkbox: ({
|
||||
"aria-label": ariaLabel,
|
||||
onCheckedChange,
|
||||
...props
|
||||
}: InputHTMLAttributes<HTMLInputElement> & {
|
||||
"aria-label"?: string;
|
||||
size?: string;
|
||||
onCheckedChange?: (checked: boolean) => void;
|
||||
}) => (
|
||||
<input
|
||||
type="checkbox"
|
||||
aria-label={ariaLabel}
|
||||
onChange={(event) => onCheckedChange?.(event.target.checked)}
|
||||
{...props}
|
||||
/>
|
||||
),
|
||||
}));
|
||||
|
||||
vi.mock("@/components/findings/mute-findings-modal", () => ({
|
||||
MuteFindingsModal: () => null,
|
||||
}));
|
||||
|
||||
vi.mock("@/components/findings/send-to-jira-modal", () => ({
|
||||
SendToJiraModal: () => null,
|
||||
}));
|
||||
|
||||
vi.mock("@/components/icons/services/IconServices", () => ({
|
||||
JiraIcon: () => null,
|
||||
}));
|
||||
|
||||
vi.mock("@/components/shadcn/dropdown", () => ({
|
||||
ActionDropdown: ({ children }: { children: ReactNode }) => (
|
||||
<div>{children}</div>
|
||||
),
|
||||
ActionDropdownItem: ({ label }: { label: string }) => (
|
||||
<button>{label}</button>
|
||||
),
|
||||
}));
|
||||
|
||||
vi.mock("@/components/shadcn/info-field/info-field", () => ({
|
||||
InfoField: () => null,
|
||||
}));
|
||||
|
||||
vi.mock("@/components/shadcn/spinner/spinner", () => ({
|
||||
Spinner: () => null,
|
||||
}));
|
||||
|
||||
vi.mock("@/components/ui/entities", () => ({
|
||||
DateWithTime: () => null,
|
||||
}));
|
||||
|
||||
vi.mock("@/components/ui/entities/entity-info", () => ({
|
||||
EntityInfo: ({
|
||||
entityAlias,
|
||||
entityId,
|
||||
}: {
|
||||
entityAlias?: string;
|
||||
entityId?: string;
|
||||
}) => (
|
||||
<div>
|
||||
<span>{entityAlias}</span>
|
||||
<span>{entityId}</span>
|
||||
</div>
|
||||
),
|
||||
}));
|
||||
|
||||
vi.mock("@/components/ui/table", () => ({
|
||||
SeverityBadge: ({ severity }: { severity: string }) => (
|
||||
<span>{severity}</span>
|
||||
),
|
||||
}));
|
||||
|
||||
vi.mock("@/components/ui/table/data-table-column-header", () => ({
|
||||
DataTableColumnHeader: ({ title }: { title: string }) => <span>{title}</span>,
|
||||
}));
|
||||
|
||||
vi.mock("@/components/ui/table/status-finding-badge", () => ({
|
||||
StatusFindingBadge: ({ status }: { status: string }) => <span>{status}</span>,
|
||||
}));
|
||||
|
||||
vi.mock("@/lib/date-utils", () => ({
|
||||
getFailingForLabel: () => "2d",
|
||||
}));
|
||||
|
||||
const notificationIndicatorMock = vi.fn((_props: unknown) => null);
|
||||
|
||||
vi.mock("./notification-indicator", () => ({
|
||||
NotificationIndicator: (props: unknown) => {
|
||||
notificationIndicatorMock(props);
|
||||
return null;
|
||||
},
|
||||
}));
|
||||
|
||||
import type { FindingResourceRow } from "@/types";
|
||||
|
||||
import { getColumnFindingResources } from "./column-finding-resources";
|
||||
|
||||
function makeResource(
|
||||
overrides?: Partial<FindingResourceRow>,
|
||||
): FindingResourceRow {
|
||||
return {
|
||||
id: "resource-row-1",
|
||||
rowType: "resource",
|
||||
findingId: "finding-1",
|
||||
checkId: "s3_check",
|
||||
providerType: "aws",
|
||||
providerAlias: "production",
|
||||
providerUid: "123456789",
|
||||
resourceName: "my-bucket",
|
||||
resourceType: "bucket",
|
||||
resourceGroup: "default",
|
||||
resourceUid: "arn:aws:s3:::my-bucket",
|
||||
service: "s3",
|
||||
region: "us-east-1",
|
||||
severity: "critical",
|
||||
status: "FAIL",
|
||||
delta: "new",
|
||||
isMuted: false,
|
||||
firstSeenAt: null,
|
||||
lastSeenAt: "2024-01-01T00:00:00Z",
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
describe("column-finding-resources", () => {
|
||||
it("should pass delta to NotificationIndicator for resource rows", () => {
|
||||
const columns = getColumnFindingResources({
|
||||
rowSelection: {},
|
||||
selectableRowCount: 1,
|
||||
});
|
||||
|
||||
const selectColumn = columns.find(
|
||||
(col) => (col as { id?: string }).id === "select",
|
||||
);
|
||||
if (!selectColumn?.cell) {
|
||||
throw new Error("select column not found");
|
||||
}
|
||||
|
||||
const CellComponent = selectColumn.cell as (props: {
|
||||
row: {
|
||||
id: string;
|
||||
original: FindingResourceRow;
|
||||
toggleSelected: (selected: boolean) => void;
|
||||
};
|
||||
}) => ReactNode;
|
||||
|
||||
render(
|
||||
<div>
|
||||
{CellComponent({
|
||||
row: {
|
||||
id: "0",
|
||||
original: makeResource(),
|
||||
toggleSelected: vi.fn(),
|
||||
},
|
||||
})}
|
||||
</div>,
|
||||
);
|
||||
|
||||
expect(screen.getByLabelText("Select resource")).toBeInTheDocument();
|
||||
expect(notificationIndicatorMock).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
delta: "new",
|
||||
isMuted: false,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it("should render the resource EntityInfo with resourceName as alias", () => {
|
||||
const columns = getColumnFindingResources({
|
||||
rowSelection: {},
|
||||
selectableRowCount: 1,
|
||||
});
|
||||
|
||||
const resourceColumn = columns.find(
|
||||
(col) => (col as { id?: string }).id === "resource",
|
||||
);
|
||||
if (!resourceColumn?.cell) {
|
||||
throw new Error("resource column not found");
|
||||
}
|
||||
|
||||
const CellComponent = resourceColumn.cell as (props: {
|
||||
row: { original: FindingResourceRow };
|
||||
}) => ReactNode;
|
||||
|
||||
render(
|
||||
<div>
|
||||
{CellComponent({
|
||||
row: {
|
||||
original: makeResource(),
|
||||
},
|
||||
})}
|
||||
</div>,
|
||||
);
|
||||
|
||||
expect(screen.getByText("my-bucket")).toBeInTheDocument();
|
||||
expect(screen.getByText("arn:aws:s3:::my-bucket")).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -25,11 +25,16 @@ import {
|
||||
import { getFailingForLabel } from "@/lib/date-utils";
|
||||
import { FindingResourceRow } from "@/types";
|
||||
|
||||
import { canMuteFindingResource } from "./finding-resource-selection";
|
||||
import { FindingsSelectionContext } from "./findings-selection-context";
|
||||
import { NotificationIndicator } from "./notification-indicator";
|
||||
import {
|
||||
type DeltaType,
|
||||
NotificationIndicator,
|
||||
} from "./notification-indicator";
|
||||
|
||||
const ResourceRowActions = ({ row }: { row: Row<FindingResourceRow> }) => {
|
||||
const resource = row.original;
|
||||
const canMute = canMuteFindingResource(resource);
|
||||
const [isMuteModalOpen, setIsMuteModalOpen] = useState(false);
|
||||
const [isJiraModalOpen, setIsJiraModalOpen] = useState(false);
|
||||
const [resolvedIds, setResolvedIds] = useState<string[]>([]);
|
||||
@@ -81,7 +86,7 @@ const ResourceRowActions = ({ row }: { row: Row<FindingResourceRow> }) => {
|
||||
|
||||
return (
|
||||
<>
|
||||
{!resource.isMuted && (
|
||||
{canMute && (
|
||||
<MuteFindingsModal
|
||||
isOpen={isMuteModalOpen}
|
||||
onOpenChange={setIsMuteModalOpen}
|
||||
@@ -111,7 +116,7 @@ const ResourceRowActions = ({ row }: { row: Row<FindingResourceRow> }) => {
|
||||
)
|
||||
}
|
||||
label={isResolving ? "Resolving..." : getMuteLabel()}
|
||||
disabled={resource.isMuted || isResolving}
|
||||
disabled={!canMute || isResolving}
|
||||
onSelect={handleMuteClick}
|
||||
/>
|
||||
<ActionDropdownItem
|
||||
@@ -171,6 +176,7 @@ export function getColumnFindingResources({
|
||||
cell: ({ row }) => (
|
||||
<div className="flex items-center gap-2">
|
||||
<NotificationIndicator
|
||||
delta={row.original.delta as DeltaType | undefined}
|
||||
isMuted={row.original.isMuted}
|
||||
mutedReason={row.original.mutedReason}
|
||||
/>
|
||||
@@ -178,7 +184,7 @@ export function getColumnFindingResources({
|
||||
<Checkbox
|
||||
size="sm"
|
||||
checked={!!rowSelection[row.id]}
|
||||
disabled={row.original.isMuted}
|
||||
disabled={!canMuteFindingResource(row.original)}
|
||||
onCheckedChange={(checked) => row.toggleSelected(checked === true)}
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
aria-label="Select resource"
|
||||
@@ -198,7 +204,7 @@ export function getColumnFindingResources({
|
||||
<div className="max-w-[240px]">
|
||||
<EntityInfo
|
||||
nameIcon={<Container className="size-4" />}
|
||||
entityAlias={row.original.resourceGroup}
|
||||
entityAlias={row.original.resourceName}
|
||||
entityId={row.original.resourceUid}
|
||||
/>
|
||||
</div>
|
||||
@@ -213,8 +219,12 @@ export function getColumnFindingResources({
|
||||
),
|
||||
cell: ({ row }) => {
|
||||
const rawStatus = row.original.status;
|
||||
const status =
|
||||
rawStatus === "MUTED" ? "FAIL" : (rawStatus as FindingStatus);
|
||||
const status: FindingStatus =
|
||||
rawStatus === "MUTED" || rawStatus === "FAIL"
|
||||
? "FAIL"
|
||||
: rawStatus === "PASS"
|
||||
? "PASS"
|
||||
: "FAIL";
|
||||
return <StatusFindingBadge status={status} />;
|
||||
},
|
||||
enableSorting: false,
|
||||
|
||||
45
ui/components/findings/table/finding-group-selection.test.ts
Normal file
45
ui/components/findings/table/finding-group-selection.test.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
|
||||
import { canMuteFindingGroup } from "./finding-group-selection";
|
||||
|
||||
describe("canMuteFindingGroup", () => {
|
||||
it("returns false when impacted resources is zero", () => {
|
||||
expect(
|
||||
canMuteFindingGroup({
|
||||
resourcesFail: 0,
|
||||
resourcesTotal: 2,
|
||||
mutedCount: 0,
|
||||
}),
|
||||
).toBe(false);
|
||||
});
|
||||
|
||||
it("returns false when all resources are already muted", () => {
|
||||
expect(
|
||||
canMuteFindingGroup({
|
||||
resourcesFail: 3,
|
||||
resourcesTotal: 3,
|
||||
mutedCount: 3,
|
||||
}),
|
||||
).toBe(false);
|
||||
});
|
||||
|
||||
it("returns false when all failing resources are muted even if PASS resources exist", () => {
|
||||
expect(
|
||||
canMuteFindingGroup({
|
||||
resourcesFail: 2,
|
||||
resourcesTotal: 5,
|
||||
mutedCount: 2,
|
||||
}),
|
||||
).toBe(false);
|
||||
});
|
||||
|
||||
it("returns true when the group still has failing resources to mute", () => {
|
||||
expect(
|
||||
canMuteFindingGroup({
|
||||
resourcesFail: 2,
|
||||
resourcesTotal: 5,
|
||||
mutedCount: 1,
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
13
ui/components/findings/table/finding-group-selection.ts
Normal file
13
ui/components/findings/table/finding-group-selection.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
interface FindingGroupSelectionState {
|
||||
resourcesFail: number;
|
||||
resourcesTotal: number;
|
||||
mutedCount: number;
|
||||
}
|
||||
|
||||
export function canMuteFindingGroup({
|
||||
resourcesFail,
|
||||
mutedCount,
|
||||
}: FindingGroupSelectionState): boolean {
|
||||
const allMuted = mutedCount > 0 && mutedCount === resourcesFail;
|
||||
return resourcesFail > 0 && !allMuted;
|
||||
}
|
||||
@@ -0,0 +1,47 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
|
||||
import type { FindingResourceRow } from "@/types";
|
||||
|
||||
import { canMuteFindingResource } from "./finding-resource-selection";
|
||||
|
||||
function makeResource(
|
||||
overrides?: Partial<FindingResourceRow>,
|
||||
): FindingResourceRow {
|
||||
return {
|
||||
id: "finding-1",
|
||||
rowType: "resource",
|
||||
findingId: "finding-1",
|
||||
checkId: "check-1",
|
||||
providerType: "aws",
|
||||
providerAlias: "prod",
|
||||
providerUid: "123456789012",
|
||||
resourceName: "bucket-a",
|
||||
resourceType: "Bucket",
|
||||
resourceGroup: "bucket-a",
|
||||
resourceUid: "arn:aws:s3:::bucket-a",
|
||||
service: "s3",
|
||||
region: "us-east-1",
|
||||
severity: "high",
|
||||
status: "FAIL",
|
||||
isMuted: false,
|
||||
firstSeenAt: null,
|
||||
lastSeenAt: null,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
describe("canMuteFindingResource", () => {
|
||||
it("should allow muting FAIL resources that are not muted", () => {
|
||||
expect(canMuteFindingResource(makeResource())).toBe(true);
|
||||
});
|
||||
|
||||
it("should disable muting for PASS resources", () => {
|
||||
expect(canMuteFindingResource(makeResource({ status: "PASS" }))).toBe(
|
||||
false,
|
||||
);
|
||||
});
|
||||
|
||||
it("should disable muting for already muted resources", () => {
|
||||
expect(canMuteFindingResource(makeResource({ isMuted: true }))).toBe(false);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,5 @@
|
||||
import { FindingResourceRow } from "@/types";
|
||||
|
||||
export function canMuteFindingResource(resource: FindingResourceRow): boolean {
|
||||
return resource.status === "FAIL" && !resource.isMuted;
|
||||
}
|
||||
@@ -28,6 +28,7 @@ import { FindingGroupRow, FindingResourceRow } from "@/types";
|
||||
|
||||
import { FloatingMuteButton } from "../floating-mute-button";
|
||||
import { getColumnFindingResources } from "./column-finding-resources";
|
||||
import { canMuteFindingResource } from "./finding-resource-selection";
|
||||
import { FindingsSelectionContext } from "./findings-selection-context";
|
||||
import { ImpactedResourcesCell } from "./impacted-resources-cell";
|
||||
import { DeltaValues, NotificationIndicator } from "./notification-indicator";
|
||||
@@ -82,7 +83,7 @@ export function FindingsGroupDrillDown({
|
||||
setIsLoading(loading);
|
||||
};
|
||||
|
||||
const { sentinelRef, refresh, loadMore } = useInfiniteResources({
|
||||
const { sentinelRef, refresh, loadMore, totalCount } = useInfiniteResources({
|
||||
checkId: group.checkId,
|
||||
hasDateOrScanFilter: hasDateOrScan,
|
||||
filters,
|
||||
@@ -95,7 +96,7 @@ export function FindingsGroupDrillDown({
|
||||
const drawer = useResourceDetailDrawer({
|
||||
resources,
|
||||
checkId: group.checkId,
|
||||
totalResourceCount: group.resourcesTotal,
|
||||
totalResourceCount: totalCount ?? group.resourcesTotal,
|
||||
onRequestMoreResources: loadMore,
|
||||
});
|
||||
|
||||
@@ -108,7 +109,7 @@ export function FindingsGroupDrillDown({
|
||||
const selectedFindingIds = Object.keys(rowSelection)
|
||||
.filter((key) => rowSelection[key])
|
||||
.map((idx) => resources[parseInt(idx)]?.findingId)
|
||||
.filter(Boolean);
|
||||
.filter((id): id is string => id !== null && id !== undefined && id !== "");
|
||||
|
||||
/** Converts resource_ids (display) → resourceUids → finding UUIDs via API. */
|
||||
const resolveResourceIds = async (ids: string[]) => {
|
||||
@@ -124,10 +125,10 @@ export function FindingsGroupDrillDown({
|
||||
});
|
||||
};
|
||||
|
||||
const selectableRowCount = resources.filter((r) => !r.isMuted).length;
|
||||
const selectableRowCount = resources.filter(canMuteFindingResource).length;
|
||||
|
||||
const getRowCanSelect = (row: Row<FindingResourceRow>): boolean => {
|
||||
return !row.original.isMuted;
|
||||
return canMuteFindingResource(row.original);
|
||||
};
|
||||
|
||||
const clearSelection = () => {
|
||||
|
||||
@@ -14,6 +14,7 @@ import { FindingGroupRow, MetaDataProps } from "@/types";
|
||||
|
||||
import { FloatingMuteButton } from "../floating-mute-button";
|
||||
import { getColumnFindingGroups } from "./column-finding-groups";
|
||||
import { canMuteFindingGroup } from "./finding-group-selection";
|
||||
import { FindingsSelectionContext } from "./findings-selection-context";
|
||||
import {
|
||||
InlineResourceContainer,
|
||||
@@ -88,13 +89,21 @@ export function FindingsGroupTable({
|
||||
.filter(Boolean);
|
||||
|
||||
// Count of selectable rows (groups where not ALL findings are muted)
|
||||
const selectableRowCount = safeData.filter(
|
||||
(g) => !(g.mutedCount > 0 && g.mutedCount === g.resourcesTotal),
|
||||
const selectableRowCount = safeData.filter((g) =>
|
||||
canMuteFindingGroup({
|
||||
resourcesFail: g.resourcesFail,
|
||||
resourcesTotal: g.resourcesTotal,
|
||||
mutedCount: g.mutedCount,
|
||||
}),
|
||||
).length;
|
||||
|
||||
const getRowCanSelect = (row: Row<FindingGroupRow>): boolean => {
|
||||
const group = row.original;
|
||||
return !(group.mutedCount > 0 && group.mutedCount === group.resourcesTotal);
|
||||
return canMuteFindingGroup({
|
||||
resourcesFail: group.resourcesFail,
|
||||
resourcesTotal: group.resourcesTotal,
|
||||
mutedCount: group.mutedCount,
|
||||
});
|
||||
};
|
||||
|
||||
const clearSelection = () => {
|
||||
@@ -136,8 +145,8 @@ export function FindingsGroupTable({
|
||||
};
|
||||
|
||||
const handleDrillDown = (checkId: string, group: FindingGroupRow) => {
|
||||
// No impacted resources → nothing to show, skip drill-down
|
||||
if (group.resourcesFail === 0) return;
|
||||
// No resources in the group → nothing to show, skip drill-down
|
||||
if (group.resourcesTotal === 0) return;
|
||||
|
||||
// Toggle: same group = collapse, different = switch
|
||||
if (expandedCheckId === checkId) {
|
||||
|
||||
@@ -22,6 +22,7 @@ import { hasDateOrScanFilter } from "@/lib";
|
||||
import { FindingGroupRow, FindingResourceRow } from "@/types";
|
||||
|
||||
import { getColumnFindingResources } from "./column-finding-resources";
|
||||
import { canMuteFindingResource } from "./finding-resource-selection";
|
||||
import { FindingsSelectionContext } from "./findings-selection-context";
|
||||
import {
|
||||
ResourceDetailDrawer,
|
||||
@@ -180,7 +181,7 @@ export function InlineResourceContainer({
|
||||
setIsLoading(loading);
|
||||
};
|
||||
|
||||
const { sentinelRef, refresh, loadMore } = useInfiniteResources({
|
||||
const { sentinelRef, refresh, loadMore, totalCount } = useInfiniteResources({
|
||||
checkId: group.checkId,
|
||||
hasDateOrScanFilter: hasDateOrScan,
|
||||
filters,
|
||||
@@ -194,7 +195,7 @@ export function InlineResourceContainer({
|
||||
const drawer = useResourceDetailDrawer({
|
||||
resources,
|
||||
checkId: group.checkId,
|
||||
totalResourceCount: group.resourcesTotal,
|
||||
totalResourceCount: totalCount ?? group.resourcesTotal,
|
||||
onRequestMoreResources: loadMore,
|
||||
});
|
||||
|
||||
@@ -222,10 +223,10 @@ export function InlineResourceContainer({
|
||||
});
|
||||
};
|
||||
|
||||
const selectableRowCount = resources.filter((r) => !r.isMuted).length;
|
||||
const selectableRowCount = resources.filter(canMuteFindingResource).length;
|
||||
|
||||
const getRowCanSelect = (row: Row<FindingResourceRow>): boolean => {
|
||||
return !row.original.isMuted;
|
||||
return canMuteFindingResource(row.original);
|
||||
};
|
||||
|
||||
const clearSelection = () => {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { render, screen } from "@testing-library/react";
|
||||
import { render, screen, within } from "@testing-library/react";
|
||||
import userEvent from "@testing-library/user-event";
|
||||
import type { ButtonHTMLAttributes, HTMLAttributes, ReactNode } from "react";
|
||||
import { createPortal } from "react-dom";
|
||||
import { afterEach, describe, expect, it, vi } from "vitest";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
@@ -10,17 +11,17 @@ import { afterEach, describe, expect, it, vi } from "vitest";
|
||||
const {
|
||||
mockGetComplianceIcon,
|
||||
mockGetCompliancesOverview,
|
||||
mockRouterPush,
|
||||
mockWindowOpen,
|
||||
mockSearchParamsState,
|
||||
} = vi.hoisted(() => ({
|
||||
mockGetComplianceIcon: vi.fn((_: string) => null as string | null),
|
||||
mockGetCompliancesOverview: vi.fn(),
|
||||
mockRouterPush: vi.fn(),
|
||||
mockWindowOpen: vi.fn(),
|
||||
mockSearchParamsState: { value: "" },
|
||||
}));
|
||||
|
||||
vi.mock("next/navigation", () => ({
|
||||
useRouter: () => ({ push: mockRouterPush, refresh: vi.fn() }),
|
||||
useRouter: () => ({ refresh: vi.fn() }),
|
||||
usePathname: () => "/findings",
|
||||
useSearchParams: () => new URLSearchParams(mockSearchParamsState.value),
|
||||
redirect: vi.fn(),
|
||||
@@ -104,10 +105,30 @@ vi.mock("@/components/shadcn/card/card", () => ({
|
||||
}));
|
||||
|
||||
vi.mock("@/components/shadcn/dropdown", () => ({
|
||||
ActionDropdown: ({ children }: { children: ReactNode }) => (
|
||||
<div>{children}</div>
|
||||
ActionDropdown: ({
|
||||
children,
|
||||
ariaLabel,
|
||||
}: {
|
||||
children: ReactNode;
|
||||
ariaLabel?: string;
|
||||
}) => (
|
||||
<div role="menu" aria-label={ariaLabel}>
|
||||
{children}
|
||||
</div>
|
||||
),
|
||||
ActionDropdownItem: ({
|
||||
label,
|
||||
disabled,
|
||||
onSelect,
|
||||
}: {
|
||||
label: string;
|
||||
disabled?: boolean;
|
||||
onSelect?: () => void;
|
||||
}) => (
|
||||
<button type="button" disabled={disabled} onClick={onSelect}>
|
||||
{label}
|
||||
</button>
|
||||
),
|
||||
ActionDropdownItem: () => null,
|
||||
}));
|
||||
|
||||
vi.mock("@/components/shadcn/skeleton/skeleton", () => ({
|
||||
@@ -125,7 +146,25 @@ vi.mock("@/components/shadcn/tooltip", () => ({
|
||||
}));
|
||||
|
||||
vi.mock("@/components/findings/mute-findings-modal", () => ({
|
||||
MuteFindingsModal: () => null,
|
||||
MuteFindingsModal: ({
|
||||
isOpen,
|
||||
findingIds,
|
||||
onComplete,
|
||||
}: {
|
||||
isOpen: boolean;
|
||||
findingIds: string[];
|
||||
onComplete?: () => void;
|
||||
}) =>
|
||||
isOpen
|
||||
? globalThis.document?.body &&
|
||||
// Render into body to mirror the real modal portal behavior.
|
||||
createPortal(
|
||||
<button type="button" onClick={onComplete}>
|
||||
{`Confirm mute ${findingIds.join(",")}`}
|
||||
</button>,
|
||||
globalThis.document.body,
|
||||
)
|
||||
: null,
|
||||
}));
|
||||
|
||||
vi.mock("@/components/findings/send-to-jira-modal", () => ({
|
||||
@@ -547,9 +586,14 @@ describe("ResourceDetailDrawerContent — compliance icon styling", () => {
|
||||
});
|
||||
|
||||
describe("ResourceDetailDrawerContent — compliance navigation", () => {
|
||||
afterEach(() => {
|
||||
vi.unstubAllGlobals();
|
||||
});
|
||||
|
||||
it("should resolve the clicked framework against the selected scan and navigate to compliance detail", async () => {
|
||||
// Given
|
||||
const user = userEvent.setup();
|
||||
vi.stubGlobal("open", mockWindowOpen);
|
||||
mockSearchParamsState.value =
|
||||
"filter[scan__in]=scan-selected&filter[region__in]=eu-west-1";
|
||||
mockGetCompliancesOverview.mockResolvedValue({
|
||||
@@ -595,14 +639,17 @@ describe("ResourceDetailDrawerContent — compliance navigation", () => {
|
||||
expect(mockGetCompliancesOverview).toHaveBeenCalledWith({
|
||||
scanId: "scan-selected",
|
||||
});
|
||||
expect(mockRouterPush).toHaveBeenCalledWith(
|
||||
expect(mockWindowOpen).toHaveBeenCalledWith(
|
||||
"/compliance/PCI-DSS?complianceId=compliance-1&version=4.0&scanId=scan-selected&filter%5Bregion__in%5D=eu-west-1",
|
||||
"_blank",
|
||||
"noopener,noreferrer",
|
||||
);
|
||||
});
|
||||
|
||||
it("should use the current finding scan when no scan filter is active", async () => {
|
||||
// Given
|
||||
const user = userEvent.setup();
|
||||
vi.stubGlobal("open", mockWindowOpen);
|
||||
mockGetCompliancesOverview.mockResolvedValue({
|
||||
data: [
|
||||
{
|
||||
@@ -662,8 +709,134 @@ describe("ResourceDetailDrawerContent — compliance navigation", () => {
|
||||
expect(mockGetCompliancesOverview).toHaveBeenCalledWith({
|
||||
scanId: "scan-from-finding",
|
||||
});
|
||||
expect(mockRouterPush).toHaveBeenCalledWith(
|
||||
expect(mockWindowOpen).toHaveBeenCalledWith(
|
||||
"/compliance/PCI-DSS?complianceId=compliance-2&version=4.0&scanId=scan-from-finding&scanData=%7B%22id%22%3A%22scan-from-finding%22%2C%22providerInfo%22%3A%7B%22provider%22%3A%22aws%22%2C%22alias%22%3A%22prod%22%2C%22uid%22%3A%22123456789%22%7D%2C%22attributes%22%3A%7B%22name%22%3A%22Nightly+scan%22%2C%22completed_at%22%3A%222026-03-30T10%3A05%3A00Z%22%7D%7D",
|
||||
"_blank",
|
||||
"noopener,noreferrer",
|
||||
);
|
||||
});
|
||||
|
||||
it("should navigate when the finding framework is a short alias of the compliance overview framework", async () => {
|
||||
// Given
|
||||
const user = userEvent.setup();
|
||||
vi.stubGlobal("open", mockWindowOpen);
|
||||
mockGetComplianceIcon.mockImplementation((framework: string) =>
|
||||
framework.toLowerCase().includes("kisa") ? "/kisa.svg" : null,
|
||||
);
|
||||
mockGetCompliancesOverview.mockResolvedValue({
|
||||
data: [
|
||||
{
|
||||
id: "compliance-kisa",
|
||||
type: "compliance-overviews",
|
||||
attributes: {
|
||||
framework: "KISA-ISMS-P",
|
||||
version: "1.0",
|
||||
requirements_passed: 5,
|
||||
requirements_failed: 1,
|
||||
requirements_manual: 0,
|
||||
total_requirements: 6,
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
const findingWithScan = {
|
||||
...mockFinding,
|
||||
scan: {
|
||||
id: "scan-from-finding",
|
||||
name: "Nightly scan",
|
||||
trigger: "manual",
|
||||
state: "completed",
|
||||
uniqueResourceCount: 25,
|
||||
progress: 100,
|
||||
duration: 300,
|
||||
startedAt: "2026-03-30T10:00:00Z",
|
||||
completedAt: "2026-03-30T10:05:00Z",
|
||||
insertedAt: "2026-03-30T09:59:00Z",
|
||||
scheduledAt: null,
|
||||
},
|
||||
};
|
||||
|
||||
render(
|
||||
<ResourceDetailDrawerContent
|
||||
isLoading={false}
|
||||
isNavigating={false}
|
||||
checkMeta={{
|
||||
...mockCheckMeta,
|
||||
complianceFrameworks: ["KISA"],
|
||||
}}
|
||||
currentIndex={0}
|
||||
totalResources={1}
|
||||
currentFinding={findingWithScan}
|
||||
otherFindings={[]}
|
||||
onNavigatePrev={vi.fn()}
|
||||
onNavigateNext={vi.fn()}
|
||||
onMuteComplete={vi.fn()}
|
||||
/>,
|
||||
);
|
||||
|
||||
// When
|
||||
await user.click(
|
||||
screen.getByRole("button", {
|
||||
name: "Open KISA compliance details",
|
||||
}),
|
||||
);
|
||||
|
||||
// Then
|
||||
expect(mockGetCompliancesOverview).toHaveBeenCalledWith({
|
||||
scanId: "scan-from-finding",
|
||||
});
|
||||
expect(mockWindowOpen).toHaveBeenCalledWith(
|
||||
"/compliance/KISA-ISMS-P?complianceId=compliance-kisa&version=1.0&scanId=scan-from-finding&scanData=%7B%22id%22%3A%22scan-from-finding%22%2C%22providerInfo%22%3A%7B%22provider%22%3A%22aws%22%2C%22alias%22%3A%22prod%22%2C%22uid%22%3A%22123456789%22%7D%2C%22attributes%22%3A%7B%22name%22%3A%22Nightly+scan%22%2C%22completed_at%22%3A%222026-03-30T10%3A05%3A00Z%22%7D%7D",
|
||||
"_blank",
|
||||
"noopener,noreferrer",
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("ResourceDetailDrawerContent — other findings mute refresh", () => {
|
||||
it("should update only the muted other-finding row without refreshing the current finding group", async () => {
|
||||
// Given
|
||||
const user = userEvent.setup();
|
||||
const onMuteComplete = vi.fn();
|
||||
const otherFinding: ResourceDrawerFinding = {
|
||||
...mockFinding,
|
||||
id: "finding-2",
|
||||
uid: "uid-2",
|
||||
checkId: "ec2_check",
|
||||
checkTitle: "EC2 Check",
|
||||
updatedAt: "2026-03-30T10:05:00Z",
|
||||
};
|
||||
|
||||
render(
|
||||
<ResourceDetailDrawerContent
|
||||
isLoading={false}
|
||||
isNavigating={false}
|
||||
checkMeta={mockCheckMeta}
|
||||
currentIndex={0}
|
||||
totalResources={1}
|
||||
currentFinding={mockFinding}
|
||||
otherFindings={[otherFinding]}
|
||||
onNavigatePrev={vi.fn()}
|
||||
onNavigateNext={vi.fn()}
|
||||
onMuteComplete={onMuteComplete}
|
||||
/>,
|
||||
);
|
||||
|
||||
// When
|
||||
const row = screen.getByText("EC2 Check").closest("tr");
|
||||
expect(row).not.toBeNull();
|
||||
|
||||
await user.click(
|
||||
within(row as HTMLElement).getByRole("button", { name: "Mute" }),
|
||||
);
|
||||
await user.click(
|
||||
screen.getByRole("button", { name: "Confirm mute finding-2" }),
|
||||
);
|
||||
|
||||
// Then
|
||||
expect(
|
||||
within(row as HTMLElement).getByRole("button", { name: "Muted" }),
|
||||
).toBeDisabled();
|
||||
expect(onMuteComplete).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -12,7 +12,7 @@ import {
|
||||
} from "lucide-react";
|
||||
import Image from "next/image";
|
||||
import Link from "next/link";
|
||||
import { useRouter, useSearchParams } from "next/navigation";
|
||||
import { useSearchParams } from "next/navigation";
|
||||
import { useState } from "react";
|
||||
|
||||
import { getCompliancesOverview } from "@/actions/compliances";
|
||||
@@ -84,7 +84,90 @@ function normalizeComplianceFrameworkName(framework: string): string {
|
||||
return framework
|
||||
.trim()
|
||||
.toLowerCase()
|
||||
.replace(/[\s_]+/g, "-");
|
||||
.replace(/[\s_]+/g, "-")
|
||||
.replace(/-+/g, "-");
|
||||
}
|
||||
|
||||
function stripComplianceVersionSuffix(framework: string): string {
|
||||
return framework.replace(/-\d+(?:\.\d+)*$/g, "");
|
||||
}
|
||||
|
||||
function canonicalComplianceKey(framework: string): string {
|
||||
return stripComplianceVersionSuffix(
|
||||
normalizeComplianceFrameworkName(framework),
|
||||
)
|
||||
.replace(/[^a-z0-9]+/g, "")
|
||||
.trim();
|
||||
}
|
||||
|
||||
function complianceTokens(framework: string): string[] {
|
||||
return stripComplianceVersionSuffix(
|
||||
normalizeComplianceFrameworkName(framework),
|
||||
)
|
||||
.split("-")
|
||||
.map((token) => token.trim())
|
||||
.filter(Boolean)
|
||||
.filter((token) => !/^\d+(?:\.\d+)*$/.test(token));
|
||||
}
|
||||
|
||||
function complianceMatchScore(
|
||||
sourceFramework: string,
|
||||
targetFramework: string,
|
||||
): number {
|
||||
const normalizedSource = normalizeComplianceFrameworkName(sourceFramework);
|
||||
const normalizedTarget = normalizeComplianceFrameworkName(targetFramework);
|
||||
|
||||
if (normalizedSource === normalizedTarget) {
|
||||
return 5;
|
||||
}
|
||||
|
||||
const canonicalSource = canonicalComplianceKey(sourceFramework);
|
||||
const canonicalTarget = canonicalComplianceKey(targetFramework);
|
||||
|
||||
if (canonicalSource === canonicalTarget) {
|
||||
return 4;
|
||||
}
|
||||
|
||||
if (canonicalSource && canonicalTarget) {
|
||||
const sourceTokens = canonicalSource.split("-");
|
||||
const targetTokens = canonicalTarget.split("-");
|
||||
if (
|
||||
sourceTokens.length !== targetTokens.length &&
|
||||
(sourceTokens.every((t) => targetTokens.includes(t)) ||
|
||||
targetTokens.every((t) => sourceTokens.includes(t)))
|
||||
) {
|
||||
return 3;
|
||||
}
|
||||
}
|
||||
|
||||
const sourceTokens = complianceTokens(sourceFramework);
|
||||
const targetTokens = complianceTokens(targetFramework);
|
||||
if (!sourceTokens.length || !targetTokens.length) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
const sourceMatchesTarget = sourceTokens.every((token) =>
|
||||
targetTokens.includes(token),
|
||||
);
|
||||
const targetMatchesSource = targetTokens.every((token) =>
|
||||
sourceTokens.includes(token),
|
||||
);
|
||||
|
||||
if (sourceMatchesTarget || targetMatchesSource) {
|
||||
return 2;
|
||||
}
|
||||
|
||||
if (
|
||||
sourceTokens.some((token) => targetTokens.includes(token)) &&
|
||||
canonicalSource &&
|
||||
canonicalTarget &&
|
||||
(canonicalTarget.includes(canonicalSource) ||
|
||||
canonicalSource.includes(canonicalTarget))
|
||||
) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
function parseSelectedScanIds(scanFilterValue: string | null): string[] {
|
||||
@@ -110,12 +193,13 @@ function resolveComplianceMatch(
|
||||
return null;
|
||||
}
|
||||
|
||||
const normalizedFramework = normalizeComplianceFrameworkName(framework);
|
||||
const match = compliances.find(
|
||||
(compliance) =>
|
||||
normalizeComplianceFrameworkName(compliance.attributes.framework) ===
|
||||
normalizedFramework,
|
||||
);
|
||||
const match = compliances
|
||||
.map((compliance) => ({
|
||||
compliance,
|
||||
score: complianceMatchScore(framework, compliance.attributes.framework),
|
||||
}))
|
||||
.filter(({ score }) => score > 0)
|
||||
.sort((a, b) => b.score - a.score)[0]?.compliance;
|
||||
|
||||
if (!match) {
|
||||
return null;
|
||||
@@ -202,13 +286,15 @@ export function ResourceDetailDrawerContent({
|
||||
onNavigateNext,
|
||||
onMuteComplete,
|
||||
}: ResourceDetailDrawerContentProps) {
|
||||
const router = useRouter();
|
||||
const searchParams = useSearchParams();
|
||||
const [isMuteModalOpen, setIsMuteModalOpen] = useState(false);
|
||||
const [isJiraModalOpen, setIsJiraModalOpen] = useState(false);
|
||||
const [resolvingFramework, setResolvingFramework] = useState<string | null>(
|
||||
null,
|
||||
);
|
||||
const [optimisticallyMutedIds, setOptimisticallyMutedIds] = useState<
|
||||
Set<string>
|
||||
>(new Set());
|
||||
|
||||
// Initial load — no check metadata yet
|
||||
if (!checkMeta && isLoading) {
|
||||
@@ -284,7 +370,7 @@ export function ResourceDetailDrawerContent({
|
||||
return;
|
||||
}
|
||||
|
||||
router.push(
|
||||
window.open(
|
||||
buildComplianceDetailHref({
|
||||
complianceId: complianceMatch.complianceId,
|
||||
framework: complianceMatch.framework,
|
||||
@@ -294,6 +380,8 @@ export function ResourceDetailDrawerContent({
|
||||
currentFinding: f,
|
||||
includeScanData: f?.scan?.id === complianceScanId,
|
||||
}),
|
||||
"_blank",
|
||||
"noopener,noreferrer",
|
||||
);
|
||||
} catch (error) {
|
||||
console.error("Error resolving compliance detail:", error);
|
||||
@@ -428,10 +516,10 @@ export function ResourceDetailDrawerContent({
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Navigation: "Impacted Resource (X of N)" */}
|
||||
{/* Navigation: "Resource (X of N)" */}
|
||||
<div className="flex items-center justify-between">
|
||||
<Badge variant="tag" className="rounded text-sm">
|
||||
Impacted Resource
|
||||
Resource
|
||||
<span className="font-bold">{currentIndex + 1}</span>
|
||||
<span className="font-normal">of</span>
|
||||
<span className="font-bold">{totalResources}</span>
|
||||
@@ -477,7 +565,7 @@ export function ResourceDetailDrawerContent({
|
||||
/>
|
||||
<EntityInfo
|
||||
nameIcon={<Container className="size-4" />}
|
||||
entityAlias={f.resourceGroup}
|
||||
entityAlias={f.resourceName}
|
||||
entityId={f.resourceUid}
|
||||
idLabel="UID"
|
||||
/>
|
||||
@@ -505,7 +593,9 @@ export function ResourceDetailDrawerContent({
|
||||
<InfoField label="Failing for" variant="compact">
|
||||
{getFailingForLabel(f.firstSeenAt) || "-"}
|
||||
</InfoField>
|
||||
<div className="hidden md:block" />
|
||||
<InfoField label="Group" variant="compact">
|
||||
{f.resourceGroup || "-"}
|
||||
</InfoField>
|
||||
|
||||
{/* Row 3: IDs */}
|
||||
<InfoField label="Check ID" variant="compact">
|
||||
@@ -529,6 +619,11 @@ export function ResourceDetailDrawerContent({
|
||||
className="max-w-full text-sm"
|
||||
/>
|
||||
</InfoField>
|
||||
|
||||
{/* Row 4: Resource metadata */}
|
||||
<InfoField label="Resource type" variant="compact">
|
||||
{f.resourceType || "-"}
|
||||
</InfoField>
|
||||
</div>
|
||||
|
||||
{/* Actions button — fixed size, aligned with row 1 */}
|
||||
@@ -757,10 +852,7 @@ export function ResourceDetailDrawerContent({
|
||||
</div>
|
||||
) : (
|
||||
<>
|
||||
<div className="flex items-center justify-between">
|
||||
<h4 className="text-text-neutral-primary text-sm font-medium">
|
||||
Failed Findings For This Resource
|
||||
</h4>
|
||||
<div className="flex items-center justify-end">
|
||||
<span className="text-text-neutral-tertiary text-sm">
|
||||
{otherFindings.length} Total Entries
|
||||
</span>
|
||||
@@ -796,7 +888,18 @@ export function ResourceDetailDrawerContent({
|
||||
<TableBody>
|
||||
{otherFindings.length > 0 ? (
|
||||
otherFindings.map((finding) => (
|
||||
<OtherFindingRow key={finding.id} finding={finding} />
|
||||
<OtherFindingRow
|
||||
key={finding.id}
|
||||
finding={finding}
|
||||
isOptimisticallyMuted={optimisticallyMutedIds.has(
|
||||
finding.id,
|
||||
)}
|
||||
onMuted={() =>
|
||||
setOptimisticallyMutedIds((prev) =>
|
||||
new Set(prev).add(finding.id),
|
||||
)
|
||||
}
|
||||
/>
|
||||
))
|
||||
) : (
|
||||
<TableRow>
|
||||
@@ -908,19 +1011,32 @@ export function ResourceDetailDrawerContent({
|
||||
);
|
||||
}
|
||||
|
||||
function OtherFindingRow({ finding }: { finding: ResourceDrawerFinding }) {
|
||||
function OtherFindingRow({
|
||||
finding,
|
||||
isOptimisticallyMuted,
|
||||
onMuted,
|
||||
}: {
|
||||
finding: ResourceDrawerFinding;
|
||||
isOptimisticallyMuted: boolean;
|
||||
onMuted: () => void;
|
||||
}) {
|
||||
const [isMuteModalOpen, setIsMuteModalOpen] = useState(false);
|
||||
const [isJiraModalOpen, setIsJiraModalOpen] = useState(false);
|
||||
const isMuted = finding.isMuted || isOptimisticallyMuted;
|
||||
|
||||
const findingUrl = `/findings?filter%5Bcheck_id__in%5D=${encodeURIComponent(finding.checkId)}&filter%5Bmuted%5D=include`;
|
||||
|
||||
return (
|
||||
<>
|
||||
{!finding.isMuted && (
|
||||
{!isMuted && (
|
||||
<MuteFindingsModal
|
||||
isOpen={isMuteModalOpen}
|
||||
onOpenChange={setIsMuteModalOpen}
|
||||
findingIds={[finding.id]}
|
||||
onComplete={() => {
|
||||
setIsMuteModalOpen(false);
|
||||
onMuted();
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
<SendToJiraModal
|
||||
@@ -934,7 +1050,7 @@ function OtherFindingRow({ finding }: { finding: ResourceDrawerFinding }) {
|
||||
onClick={() => window.open(findingUrl, "_blank", "noopener,noreferrer")}
|
||||
>
|
||||
<TableCell className="w-10">
|
||||
<NotificationIndicator isMuted={finding.isMuted} />
|
||||
<NotificationIndicator isMuted={isMuted} />
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<StatusFindingBadge status={finding.status as FindingStatus} />
|
||||
@@ -955,14 +1071,14 @@ function OtherFindingRow({ finding }: { finding: ResourceDrawerFinding }) {
|
||||
<ActionDropdown ariaLabel="Finding actions">
|
||||
<ActionDropdownItem
|
||||
icon={
|
||||
finding.isMuted ? (
|
||||
isMuted ? (
|
||||
<VolumeOff className="size-5" />
|
||||
) : (
|
||||
<VolumeX className="size-5" />
|
||||
)
|
||||
}
|
||||
label={finding.isMuted ? "Muted" : "Mute"}
|
||||
disabled={finding.isMuted}
|
||||
label={isMuted ? "Muted" : "Mute"}
|
||||
disabled={isMuted}
|
||||
onSelect={() => setIsMuteModalOpen(true)}
|
||||
/>
|
||||
<ActionDropdownItem
|
||||
|
||||
@@ -0,0 +1,26 @@
|
||||
import { render, screen } from "@testing-library/react";
|
||||
import { describe, expect, it, vi } from "vitest";
|
||||
|
||||
vi.mock("@/components/shadcn/skeleton/skeleton", () => ({
|
||||
Skeleton: ({ className }: { className?: string }) => (
|
||||
<div data-testid="skeleton-block" data-class={className ?? ""} />
|
||||
),
|
||||
}));
|
||||
|
||||
import { ResourceDetailSkeleton } from "./resource-detail-skeleton";
|
||||
|
||||
describe("ResourceDetailSkeleton", () => {
|
||||
it("should include placeholders for group and resource type fields", () => {
|
||||
render(<ResourceDetailSkeleton />);
|
||||
|
||||
const blocks = screen.getAllByTestId("skeleton-block");
|
||||
const classes = blocks.map(
|
||||
(block) => block.getAttribute("data-class") ?? "",
|
||||
);
|
||||
|
||||
expect(classes).toContain("h-3.5 w-10 rounded");
|
||||
expect(classes).toContain("h-5 w-18 rounded");
|
||||
expect(classes).toContain("h-3.5 w-20 rounded");
|
||||
expect(classes).toContain("h-5 w-28 rounded");
|
||||
});
|
||||
});
|
||||
@@ -2,8 +2,8 @@ import { Skeleton } from "@/components/shadcn/skeleton/skeleton";
|
||||
|
||||
/**
|
||||
* Skeleton placeholder for the resource info grid in the detail drawer.
|
||||
* Mirrors the 4-column layout: EntityInfo × 2, InfoField × 2 per row,
|
||||
* plus the actions button.
|
||||
* Mirrors the drawer layout so added metadata fields don't leave visual gaps
|
||||
* while the next resource is loading.
|
||||
*/
|
||||
export function ResourceDetailSkeleton() {
|
||||
return (
|
||||
@@ -15,16 +15,19 @@ export function ResourceDetailSkeleton() {
|
||||
<InfoFieldSkeleton labelWidth="w-12" valueWidth="w-20" />
|
||||
<InfoFieldSkeleton labelWidth="w-12" valueWidth="w-24" />
|
||||
|
||||
{/* Row 2: Last detected, First seen, Failing for */}
|
||||
{/* Row 2: Last detected, First seen, Failing for, Group */}
|
||||
<InfoFieldSkeleton labelWidth="w-20" valueWidth="w-32" />
|
||||
<InfoFieldSkeleton labelWidth="w-16" valueWidth="w-32" />
|
||||
<InfoFieldSkeleton labelWidth="w-16" valueWidth="w-16" />
|
||||
<div className="hidden md:block" />
|
||||
<InfoFieldSkeleton labelWidth="w-10" valueWidth="w-18" />
|
||||
|
||||
{/* Row 3: Check ID, Finding ID, Finding UID */}
|
||||
<InfoFieldSkeleton labelWidth="w-14" valueWidth="w-36" />
|
||||
<InfoFieldSkeleton labelWidth="w-16" valueWidth="w-36" />
|
||||
<InfoFieldSkeleton labelWidth="w-20" valueWidth="w-36" />
|
||||
|
||||
{/* Row 4: Resource type */}
|
||||
<InfoFieldSkeleton labelWidth="w-20" valueWidth="w-28" />
|
||||
</div>
|
||||
|
||||
{/* Actions button */}
|
||||
|
||||
@@ -26,6 +26,7 @@ vi.mock("next/navigation", () => ({
|
||||
// Import after mocks
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
import type { ResourceDrawerFinding } from "@/actions/findings";
|
||||
import type { FindingResourceRow } from "@/types";
|
||||
|
||||
import { useResourceDetailDrawer } from "./use-resource-detail-drawer";
|
||||
@@ -60,6 +61,46 @@ function makeResource(
|
||||
} as FindingResourceRow;
|
||||
}
|
||||
|
||||
function makeDrawerFinding(
|
||||
overrides?: Partial<ResourceDrawerFinding>,
|
||||
): ResourceDrawerFinding {
|
||||
return {
|
||||
id: "finding-1",
|
||||
uid: "uid-1",
|
||||
checkId: "s3_check",
|
||||
checkTitle: "S3 Check",
|
||||
status: "FAIL",
|
||||
severity: "high",
|
||||
delta: null,
|
||||
isMuted: false,
|
||||
mutedReason: null,
|
||||
firstSeenAt: null,
|
||||
updatedAt: null,
|
||||
resourceId: "resource-1",
|
||||
resourceUid: "arn:aws:s3:::my-bucket",
|
||||
resourceName: "my-bucket",
|
||||
resourceService: "s3",
|
||||
resourceRegion: "us-east-1",
|
||||
resourceType: "bucket",
|
||||
resourceGroup: "default",
|
||||
providerType: "aws",
|
||||
providerAlias: "prod",
|
||||
providerUid: "123",
|
||||
risk: "high",
|
||||
description: "desc",
|
||||
statusExtended: "status",
|
||||
complianceFrameworks: [],
|
||||
categories: [],
|
||||
remediation: {
|
||||
recommendation: { text: "", url: "" },
|
||||
code: { cli: "", other: "", nativeiac: "", terraform: "" },
|
||||
},
|
||||
additionalUrls: [],
|
||||
scan: null,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Fix 2: AbortController cleanup on unmount
|
||||
// ---------------------------------------------------------------------------
|
||||
@@ -128,3 +169,212 @@ describe("useResourceDetailDrawer — unmount cleanup", () => {
|
||||
expect(abortSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe("useResourceDetailDrawer — other findings filtering", () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
it("should exclude the current finding from otherFindings and preserve API order", async () => {
|
||||
const resources = [makeResource()];
|
||||
|
||||
getLatestFindingsByResourceUidMock.mockResolvedValue({ data: [] });
|
||||
adaptFindingsByResourceResponseMock.mockReturnValue([
|
||||
makeDrawerFinding({
|
||||
id: "current",
|
||||
checkId: "s3_check",
|
||||
checkTitle: "Current",
|
||||
status: "FAIL",
|
||||
severity: "critical",
|
||||
}),
|
||||
makeDrawerFinding({
|
||||
id: "other-1",
|
||||
checkId: "check-other-1",
|
||||
checkTitle: "Other 1",
|
||||
status: "PASS",
|
||||
severity: "critical",
|
||||
}),
|
||||
makeDrawerFinding({
|
||||
id: "other-2",
|
||||
checkId: "check-other-2",
|
||||
checkTitle: "Other 2",
|
||||
status: "FAIL",
|
||||
severity: "medium",
|
||||
}),
|
||||
]);
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
useResourceDetailDrawer({
|
||||
resources,
|
||||
checkId: "s3_check",
|
||||
}),
|
||||
);
|
||||
|
||||
await act(async () => {
|
||||
result.current.openDrawer(0);
|
||||
await Promise.resolve();
|
||||
});
|
||||
|
||||
expect(result.current.otherFindings.map((finding) => finding.id)).toEqual([
|
||||
"other-1",
|
||||
"other-2",
|
||||
]);
|
||||
});
|
||||
|
||||
it("should keep isNavigating true for a cached resource long enough to render skeletons", async () => {
|
||||
vi.useFakeTimers();
|
||||
|
||||
const resources = [
|
||||
makeResource({
|
||||
id: "row-1",
|
||||
findingId: "finding-1",
|
||||
resourceUid: "arn:aws:s3:::first-bucket",
|
||||
resourceName: "first-bucket",
|
||||
}),
|
||||
makeResource({
|
||||
id: "row-2",
|
||||
findingId: "finding-2",
|
||||
resourceUid: "arn:aws:s3:::second-bucket",
|
||||
resourceName: "second-bucket",
|
||||
}),
|
||||
];
|
||||
|
||||
getLatestFindingsByResourceUidMock.mockImplementation(
|
||||
async ({ resourceUid }: { resourceUid: string }) => ({
|
||||
data: [resourceUid],
|
||||
}),
|
||||
);
|
||||
adaptFindingsByResourceResponseMock.mockImplementation(
|
||||
(response: { data: string[] }) => [
|
||||
makeDrawerFinding({
|
||||
id: response.data[0].includes("first") ? "finding-1" : "finding-2",
|
||||
resourceUid: response.data[0],
|
||||
resourceName: response.data[0].includes("first")
|
||||
? "first-bucket"
|
||||
: "second-bucket",
|
||||
}),
|
||||
],
|
||||
);
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
useResourceDetailDrawer({
|
||||
resources,
|
||||
checkId: "s3_check",
|
||||
}),
|
||||
);
|
||||
|
||||
await act(async () => {
|
||||
result.current.openDrawer(0);
|
||||
await Promise.resolve();
|
||||
});
|
||||
|
||||
await act(async () => {
|
||||
result.current.navigateNext();
|
||||
await Promise.resolve();
|
||||
});
|
||||
|
||||
expect(result.current.currentIndex).toBe(1);
|
||||
expect(result.current.currentFinding?.id).toBe("finding-2");
|
||||
|
||||
act(() => {
|
||||
result.current.navigatePrev();
|
||||
});
|
||||
|
||||
expect(result.current.currentIndex).toBe(0);
|
||||
expect(result.current.isNavigating).toBe(true);
|
||||
|
||||
await act(async () => {
|
||||
vi.runAllTimers();
|
||||
await Promise.resolve();
|
||||
});
|
||||
|
||||
expect(result.current.isNavigating).toBe(false);
|
||||
expect(result.current.currentFinding?.id).toBe("finding-1");
|
||||
|
||||
vi.useRealTimers();
|
||||
});
|
||||
|
||||
it("should keep isNavigating true for a fast uncached navigation long enough to avoid flicker", async () => {
|
||||
vi.useFakeTimers();
|
||||
vi.setSystemTime(new Date("2026-04-08T15:00:00.000Z"));
|
||||
|
||||
const resources = [
|
||||
makeResource({
|
||||
id: "row-1",
|
||||
findingId: "finding-1",
|
||||
resourceUid: "arn:aws:s3:::first-bucket",
|
||||
resourceName: "first-bucket",
|
||||
}),
|
||||
makeResource({
|
||||
id: "row-2",
|
||||
findingId: "finding-2",
|
||||
resourceUid: "arn:aws:s3:::second-bucket",
|
||||
resourceName: "second-bucket",
|
||||
}),
|
||||
];
|
||||
|
||||
getLatestFindingsByResourceUidMock.mockImplementation(
|
||||
async ({ resourceUid }: { resourceUid: string }) => ({
|
||||
data: [resourceUid],
|
||||
}),
|
||||
);
|
||||
adaptFindingsByResourceResponseMock.mockImplementation(
|
||||
(response: { data: string[] }) => [
|
||||
makeDrawerFinding({
|
||||
id: response.data[0].includes("first") ? "finding-1" : "finding-2",
|
||||
resourceUid: response.data[0],
|
||||
resourceName: response.data[0].includes("first")
|
||||
? "first-bucket"
|
||||
: "second-bucket",
|
||||
}),
|
||||
],
|
||||
);
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
useResourceDetailDrawer({
|
||||
resources,
|
||||
checkId: "s3_check",
|
||||
}),
|
||||
);
|
||||
|
||||
await act(async () => {
|
||||
result.current.openDrawer(0);
|
||||
await Promise.resolve();
|
||||
});
|
||||
|
||||
act(() => {
|
||||
result.current.navigateNext();
|
||||
});
|
||||
|
||||
expect(result.current.currentIndex).toBe(1);
|
||||
expect(result.current.isNavigating).toBe(true);
|
||||
|
||||
await act(async () => {
|
||||
await Promise.resolve();
|
||||
});
|
||||
|
||||
expect(result.current.currentFinding?.id).toBe("finding-2");
|
||||
expect(result.current.isNavigating).toBe(true);
|
||||
|
||||
await act(async () => {
|
||||
vi.advanceTimersByTime(119);
|
||||
await Promise.resolve();
|
||||
});
|
||||
|
||||
expect(result.current.isNavigating).toBe(true);
|
||||
|
||||
await act(async () => {
|
||||
vi.advanceTimersByTime(1);
|
||||
await Promise.resolve();
|
||||
});
|
||||
|
||||
await act(async () => {
|
||||
vi.runOnlyPendingTimers();
|
||||
await Promise.resolve();
|
||||
});
|
||||
|
||||
expect(result.current.isNavigating).toBe(false);
|
||||
|
||||
vi.useRealTimers();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -9,6 +9,10 @@ import {
|
||||
} from "@/actions/findings";
|
||||
import { FindingResourceRow } from "@/types";
|
||||
|
||||
// Keep fast carousel navigations in a loading state for one short beat so
|
||||
// React doesn't batch away the skeleton frame when switching resources.
|
||||
const MIN_NAVIGATION_SKELETON_MS = 300;
|
||||
|
||||
/**
|
||||
* Check-level metadata that is identical across all resources for a given check.
|
||||
* Extracted once on first successful fetch and kept stable during navigation.
|
||||
@@ -83,18 +87,65 @@ export function useResourceDetailDrawer({
|
||||
const cacheRef = useRef<Map<string, ResourceDrawerFinding[]>>(new Map());
|
||||
const checkMetaRef = useRef<CheckMeta | null>(null);
|
||||
const fetchControllerRef = useRef<AbortController | null>(null);
|
||||
const navigationTimeoutRef = useRef<ReturnType<typeof setTimeout> | null>(
|
||||
null,
|
||||
);
|
||||
const navigationStartedAtRef = useRef<number | null>(null);
|
||||
|
||||
const clearNavigationTimeout = () => {
|
||||
if (navigationTimeoutRef.current !== null) {
|
||||
clearTimeout(navigationTimeoutRef.current);
|
||||
navigationTimeoutRef.current = null;
|
||||
}
|
||||
};
|
||||
|
||||
const finishNavigation = () => {
|
||||
clearNavigationTimeout();
|
||||
setIsLoading(false);
|
||||
|
||||
const navigationStartedAt = navigationStartedAtRef.current;
|
||||
if (navigationStartedAt === null) {
|
||||
navigationStartedAtRef.current = null;
|
||||
setIsNavigating(false);
|
||||
return;
|
||||
}
|
||||
|
||||
const elapsed = Date.now() - navigationStartedAt;
|
||||
const remaining = Math.max(0, MIN_NAVIGATION_SKELETON_MS - elapsed);
|
||||
|
||||
if (remaining === 0) {
|
||||
navigationStartedAtRef.current = null;
|
||||
setIsNavigating(false);
|
||||
return;
|
||||
}
|
||||
|
||||
navigationTimeoutRef.current = setTimeout(() => {
|
||||
setIsNavigating(false);
|
||||
navigationStartedAtRef.current = null;
|
||||
navigationTimeoutRef.current = null;
|
||||
}, remaining);
|
||||
};
|
||||
|
||||
const startNavigation = () => {
|
||||
clearNavigationTimeout();
|
||||
navigationStartedAtRef.current = Date.now();
|
||||
setIsNavigating(true);
|
||||
};
|
||||
|
||||
// Abort any in-flight request on unmount to prevent state updates
|
||||
// on an already-unmounted component.
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
fetchControllerRef.current?.abort();
|
||||
clearNavigationTimeout();
|
||||
navigationStartedAtRef.current = null;
|
||||
};
|
||||
}, []);
|
||||
|
||||
const fetchFindings = async (resourceUid: string) => {
|
||||
// Abort any in-flight request to prevent stale data from out-of-order responses
|
||||
fetchControllerRef.current?.abort();
|
||||
clearNavigationTimeout();
|
||||
const controller = new AbortController();
|
||||
fetchControllerRef.current = controller;
|
||||
|
||||
@@ -106,8 +157,7 @@ export function useResourceDetailDrawer({
|
||||
if (main) checkMetaRef.current = extractCheckMeta(main);
|
||||
}
|
||||
setFindings(cached);
|
||||
setIsLoading(false);
|
||||
setIsNavigating(false);
|
||||
finishNavigation();
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -135,8 +185,7 @@ export function useResourceDetailDrawer({
|
||||
}
|
||||
} finally {
|
||||
if (!controller.signal.aborted) {
|
||||
setIsLoading(false);
|
||||
setIsNavigating(false);
|
||||
finishNavigation();
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -145,8 +194,11 @@ export function useResourceDetailDrawer({
|
||||
const resource = resources[index];
|
||||
if (!resource) return;
|
||||
|
||||
clearNavigationTimeout();
|
||||
navigationStartedAtRef.current = null;
|
||||
setCurrentIndex(index);
|
||||
setIsOpen(true);
|
||||
setIsNavigating(false);
|
||||
setFindings([]);
|
||||
fetchFindings(resource.resourceUid);
|
||||
};
|
||||
@@ -159,7 +211,7 @@ export function useResourceDetailDrawer({
|
||||
const resource = resources[currentIndex];
|
||||
if (!resource) return;
|
||||
cacheRef.current.delete(resource.resourceUid);
|
||||
setIsNavigating(true);
|
||||
startNavigation();
|
||||
fetchFindings(resource.resourceUid);
|
||||
};
|
||||
|
||||
@@ -168,7 +220,7 @@ export function useResourceDetailDrawer({
|
||||
if (!resource) return;
|
||||
|
||||
setCurrentIndex(index);
|
||||
setIsNavigating(true);
|
||||
startNavigation();
|
||||
fetchFindings(resource.resourceUid);
|
||||
};
|
||||
|
||||
|
||||
@@ -53,7 +53,7 @@ export const InvitationDetails = ({ attributes }: InvitationDetailsProps) => {
|
||||
? window.location.origin
|
||||
: "http://localhost:3000";
|
||||
|
||||
const invitationLink = `${baseUrl}/sign-up?invitation_token=${attributes.token}`;
|
||||
const invitationLink = `${baseUrl}/invitation/accept?invitation_token=${attributes.token}`;
|
||||
|
||||
return (
|
||||
<div className="flex flex-col gap-x-4 gap-y-8">
|
||||
|
||||
@@ -3,20 +3,23 @@
|
||||
import { X } from "lucide-react";
|
||||
import { usePathname, useRouter, useSearchParams } from "next/navigation";
|
||||
|
||||
import { ScanSelector } from "@/components/compliance/compliance-header";
|
||||
import { filterScans } from "@/components/filters/data-filters";
|
||||
import { FilterControls } from "@/components/filters/filter-controls";
|
||||
import { Badge } from "@/components/shadcn/badge/badge";
|
||||
import { useRelatedFilters } from "@/hooks";
|
||||
import { FilterEntity, FilterType } from "@/types";
|
||||
import { ExpandedScanData, FilterEntity, FilterType } from "@/types";
|
||||
|
||||
interface ScansFiltersProps {
|
||||
providerUIDs: string[];
|
||||
providerDetails: { [uid: string]: FilterEntity }[];
|
||||
completedScans?: ExpandedScanData[];
|
||||
}
|
||||
|
||||
export const ScansFilters = ({
|
||||
providerUIDs,
|
||||
providerDetails,
|
||||
completedScans = [],
|
||||
}: ScansFiltersProps) => {
|
||||
const router = useRouter();
|
||||
const pathname = usePathname();
|
||||
@@ -36,24 +39,50 @@ export const ScansFilters = ({
|
||||
router.push(`${pathname}?${params.toString()}`);
|
||||
};
|
||||
|
||||
const scanIdChip = idFilter ? (
|
||||
<div className="flex items-center">
|
||||
<Badge
|
||||
variant="tag"
|
||||
className="max-w-[300px] shrink-0 cursor-default gap-1 truncate"
|
||||
>
|
||||
<span className="text-text-neutral-secondary mr-1 text-xs">Scan:</span>
|
||||
<span className="truncate">{idFilter}</span>
|
||||
const handleScanChange = (selectedScanId: string) => {
|
||||
const params = new URLSearchParams(searchParams.toString());
|
||||
params.set("filter[id__in]", selectedScanId);
|
||||
router.push(`${pathname}?${params.toString()}`);
|
||||
};
|
||||
|
||||
const scanIdElement = idFilter ? (
|
||||
completedScans.length > 0 ? (
|
||||
<div className="flex items-center gap-2">
|
||||
<ScanSelector
|
||||
scans={completedScans}
|
||||
selectedScanId={idFilter}
|
||||
onSelectionChange={handleScanChange}
|
||||
/>
|
||||
<button
|
||||
type="button"
|
||||
aria-label="Clear scan filter"
|
||||
className="hover:text-text-neutral-primary ml-0.5 shrink-0"
|
||||
className="text-text-neutral-secondary hover:text-text-neutral-primary shrink-0"
|
||||
onClick={handleDismissIdFilter}
|
||||
>
|
||||
<X className="size-3" />
|
||||
<X className="size-4" />
|
||||
</button>
|
||||
</Badge>
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
<div className="flex items-center">
|
||||
<Badge
|
||||
variant="tag"
|
||||
className="max-w-[300px] shrink-0 cursor-default gap-1 truncate"
|
||||
>
|
||||
<span className="text-text-neutral-secondary mr-1 text-xs">
|
||||
Scan:
|
||||
</span>
|
||||
<span className="truncate">{idFilter}</span>
|
||||
<button
|
||||
type="button"
|
||||
aria-label="Clear scan filter"
|
||||
className="hover:text-text-neutral-primary ml-0.5 shrink-0"
|
||||
onClick={handleDismissIdFilter}
|
||||
>
|
||||
<X className="size-3" />
|
||||
</button>
|
||||
</Badge>
|
||||
</div>
|
||||
)
|
||||
) : null;
|
||||
|
||||
return (
|
||||
@@ -68,7 +97,7 @@ export const ScansFilters = ({
|
||||
index: 1,
|
||||
},
|
||||
]}
|
||||
prependElement={scanIdChip}
|
||||
prependElement={scanIdElement}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -20,7 +20,7 @@ const cardVariants = cva("flex flex-col gap-6 rounded-xl border", {
|
||||
inner:
|
||||
"rounded-[12px] backdrop-blur-[46px] border-border-neutral-tertiary bg-bg-neutral-tertiary",
|
||||
danger:
|
||||
"gap-1 rounded-[12px] border-border-error-primary bg-bg-fail-secondary",
|
||||
"gap-1 rounded-[12px] border-[rgba(67,34,50,0.5)] bg-[rgba(67,34,50,0.2)] dark:border-[rgba(67,34,50,0.7)] dark:bg-[rgba(67,34,50,0.3)]",
|
||||
},
|
||||
padding: {
|
||||
default: "",
|
||||
|
||||
@@ -1,5 +1,10 @@
|
||||
import { format, parseISO } from "date-fns";
|
||||
|
||||
import {
|
||||
Tooltip,
|
||||
TooltipContent,
|
||||
TooltipTrigger,
|
||||
} from "@/components/shadcn/tooltip";
|
||||
import { cn } from "@/lib/utils";
|
||||
|
||||
interface DateWithTimeProps {
|
||||
@@ -33,25 +38,52 @@ export const DateWithTime = ({
|
||||
?.substring(0, 3)
|
||||
.toUpperCase() || "";
|
||||
|
||||
return (
|
||||
const fullText = showTime
|
||||
? `${formattedDate} ${formattedTime} ${timezone}`
|
||||
: formattedDate;
|
||||
|
||||
const content = (
|
||||
<div
|
||||
className={cn(
|
||||
"gap-1",
|
||||
inline
|
||||
? "inline-flex flex-row flex-wrap items-center"
|
||||
? "inline-flex flex-row items-center overflow-hidden"
|
||||
: "flex flex-col",
|
||||
)}
|
||||
>
|
||||
<span className="text-text-neutral-primary text-sm whitespace-nowrap">
|
||||
<span
|
||||
className={cn(
|
||||
"text-text-neutral-primary text-sm whitespace-nowrap",
|
||||
inline && "truncate",
|
||||
)}
|
||||
>
|
||||
{formattedDate}
|
||||
</span>
|
||||
{showTime && (
|
||||
<span className="text-text-neutral-tertiary text-xs font-medium whitespace-nowrap">
|
||||
<span
|
||||
className={cn(
|
||||
"text-text-neutral-tertiary text-xs font-medium whitespace-nowrap",
|
||||
inline && "truncate",
|
||||
)}
|
||||
>
|
||||
{formattedTime} {timezone}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
|
||||
if (inline) {
|
||||
return (
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<div className="min-w-0 overflow-hidden">{content}</div>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>{fullText}</TooltipContent>
|
||||
</Tooltip>
|
||||
);
|
||||
}
|
||||
|
||||
return content;
|
||||
} catch {
|
||||
return <span>-</span>;
|
||||
}
|
||||
|
||||
@@ -163,6 +163,38 @@ describe("useInfiniteResources", () => {
|
||||
findingGroupActionsMock.getLatestFindingGroupResources,
|
||||
).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("should forward the active finding-group filters to the resources endpoint", async () => {
|
||||
// Given
|
||||
const apiResponse = makeApiResponse([], { pages: 1 });
|
||||
const filters = {
|
||||
"filter[status__in]": "PASS",
|
||||
"filter[severity__in]": "medium",
|
||||
"filter[provider_type__in]": "aws",
|
||||
};
|
||||
findingGroupActionsMock.getLatestFindingGroupResources.mockResolvedValue(
|
||||
apiResponse,
|
||||
);
|
||||
findingGroupActionsMock.adaptFindingGroupResourcesResponse.mockReturnValue(
|
||||
[],
|
||||
);
|
||||
|
||||
// When
|
||||
renderHook(() => useInfiniteResources(defaultOptions({ filters })));
|
||||
await flushAsync();
|
||||
|
||||
// Then
|
||||
expect(
|
||||
findingGroupActionsMock.getLatestFindingGroupResources,
|
||||
).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
checkId: "check_1",
|
||||
page: 1,
|
||||
pageSize: 10,
|
||||
filters,
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("when all resources fit in one page", () => {
|
||||
|
||||
@@ -32,6 +32,8 @@ interface UseInfiniteResourcesReturn {
|
||||
refresh: () => void;
|
||||
/** Imperatively load the next page (e.g. from drawer navigation). */
|
||||
loadMore: () => void;
|
||||
/** Total number of resources matching current filters (from API pagination). */
|
||||
totalCount: number | null;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -60,6 +62,7 @@ export function useInfiniteResources({
|
||||
const currentCheckIdRef = useRef(checkId);
|
||||
const controllerRef = useRef<AbortController | null>(null);
|
||||
const observerRef = useRef<IntersectionObserver | null>(null);
|
||||
const totalCountRef = useRef<number | null>(null);
|
||||
|
||||
// Store latest values in refs so the fetch function always reads current values
|
||||
// without being recreated on every render
|
||||
@@ -70,6 +73,7 @@ export function useInfiniteResources({
|
||||
const onSetLoadingRef = useRef(onSetLoading);
|
||||
|
||||
// Keep refs in sync with latest props
|
||||
currentCheckIdRef.current = checkId;
|
||||
hasDateOrScanRef.current = hasDateOrScanFilter;
|
||||
filtersRef.current = filters;
|
||||
onSetResourcesRef.current = onSetResources;
|
||||
@@ -110,6 +114,7 @@ export function useInfiniteResources({
|
||||
);
|
||||
const totalPages = response?.meta?.pagination?.pages ?? 1;
|
||||
const hasMore = page < totalPages;
|
||||
totalCountRef.current = response?.meta?.pagination?.count ?? null;
|
||||
|
||||
// Commit the page number only after a successful (non-aborted) fetch.
|
||||
// This prevents a premature pageRef increment from loadNextPage being
|
||||
@@ -209,5 +214,10 @@ export function useInfiniteResources({
|
||||
fetchPage(1, false, currentCheckIdRef.current, controller.signal);
|
||||
}
|
||||
|
||||
return { sentinelRef, refresh, loadMore: loadNextPage };
|
||||
return {
|
||||
sentinelRef,
|
||||
refresh,
|
||||
loadMore: loadNextPage,
|
||||
totalCount: totalCountRef.current,
|
||||
};
|
||||
}
|
||||
|
||||
112
ui/lib/findings-scan-filters.test.ts
Normal file
112
ui/lib/findings-scan-filters.test.ts
Normal file
@@ -0,0 +1,112 @@
|
||||
import { describe, expect, it, vi } from "vitest";
|
||||
|
||||
import {
|
||||
buildFindingScanDateFilters,
|
||||
resolveFindingScanDateFilters,
|
||||
} from "./findings-scan-filters";
|
||||
|
||||
describe("buildFindingScanDateFilters", () => {
|
||||
it("uses an exact inserted_at filter when all selected scans belong to the same day", () => {
|
||||
expect(
|
||||
buildFindingScanDateFilters([
|
||||
"2026-04-07T10:00:00Z",
|
||||
"2026-04-07T18:30:00Z",
|
||||
]),
|
||||
).toEqual({
|
||||
"filter[inserted_at]": "2026-04-07",
|
||||
});
|
||||
});
|
||||
|
||||
it("ignores whitespace-only date strings", () => {
|
||||
expect(buildFindingScanDateFilters([" ", "2026-04-07T10:00:00Z"])).toEqual(
|
||||
{
|
||||
"filter[inserted_at]": "2026-04-07",
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
it("uses a date range when selected scans span multiple days", () => {
|
||||
expect(
|
||||
buildFindingScanDateFilters([
|
||||
"2026-04-03T10:00:00Z",
|
||||
"2026-04-07T18:30:00Z",
|
||||
"2026-04-05T12:00:00Z",
|
||||
]),
|
||||
).toEqual({
|
||||
"filter[inserted_at__gte]": "2026-04-03",
|
||||
"filter[inserted_at__lte]": "2026-04-07",
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("resolveFindingScanDateFilters", () => {
|
||||
it("adds the required inserted_at filter for a selected scan when the URL only contains scan__in", async () => {
|
||||
const result = await resolveFindingScanDateFilters({
|
||||
filters: {
|
||||
"filter[muted]": "false",
|
||||
"filter[scan__in]": "scan-1",
|
||||
},
|
||||
scans: [
|
||||
{
|
||||
id: "scan-1",
|
||||
attributes: {
|
||||
inserted_at: "2026-04-07T10:00:00Z",
|
||||
},
|
||||
},
|
||||
],
|
||||
loadScan: vi.fn(),
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
"filter[muted]": "false",
|
||||
"filter[scan__in]": "scan-1",
|
||||
"filter[inserted_at]": "2026-04-07",
|
||||
});
|
||||
});
|
||||
|
||||
it("fetches missing scan details when the selected scan is not present in the prefetched scans list", async () => {
|
||||
const loadScan = vi.fn().mockResolvedValue({
|
||||
id: "scan-2",
|
||||
attributes: {
|
||||
inserted_at: "2026-04-05T08:00:00Z",
|
||||
},
|
||||
});
|
||||
|
||||
const result = await resolveFindingScanDateFilters({
|
||||
filters: {
|
||||
"filter[scan__in]": "scan-2",
|
||||
},
|
||||
scans: [],
|
||||
loadScan,
|
||||
});
|
||||
|
||||
expect(loadScan).toHaveBeenCalledWith("scan-2");
|
||||
expect(result).toEqual({
|
||||
"filter[scan__in]": "scan-2",
|
||||
"filter[inserted_at]": "2026-04-05",
|
||||
});
|
||||
});
|
||||
|
||||
it("does not override an explicit inserted_at filter already chosen in the frontend", async () => {
|
||||
const result = await resolveFindingScanDateFilters({
|
||||
filters: {
|
||||
"filter[scan__in]": "scan-1",
|
||||
"filter[inserted_at__gte]": "2026-04-01",
|
||||
},
|
||||
scans: [
|
||||
{
|
||||
id: "scan-1",
|
||||
attributes: {
|
||||
inserted_at: "2026-04-07T10:00:00Z",
|
||||
},
|
||||
},
|
||||
],
|
||||
loadScan: vi.fn(),
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
"filter[scan__in]": "scan-1",
|
||||
"filter[inserted_at__gte]": "2026-04-01",
|
||||
});
|
||||
});
|
||||
});
|
||||
99
ui/lib/findings-scan-filters.ts
Normal file
99
ui/lib/findings-scan-filters.ts
Normal file
@@ -0,0 +1,99 @@
|
||||
interface ScanDateSource {
|
||||
id: string;
|
||||
attributes?: {
|
||||
inserted_at?: string;
|
||||
};
|
||||
}
|
||||
|
||||
interface ResolveFindingScanDateFiltersOptions {
|
||||
filters: Record<string, string>;
|
||||
scans: ScanDateSource[];
|
||||
loadScan: (scanId: string) => Promise<ScanDateSource | null | undefined>;
|
||||
}
|
||||
|
||||
const INSERTED_AT_FILTER_KEYS = [
|
||||
"filter[inserted_at]",
|
||||
"filter[inserted_at__date]",
|
||||
"filter[inserted_at__gte]",
|
||||
"filter[inserted_at__lte]",
|
||||
] as const;
|
||||
|
||||
function getScanFilterIds(filters: Record<string, string>): string[] {
|
||||
const scanIds = filters["filter[scan__in]"] || filters["filter[scan]"] || "";
|
||||
return Array.from(new Set(scanIds.split(",").filter(Boolean)));
|
||||
}
|
||||
|
||||
function formatScanDate(dateTime?: string): string | null {
|
||||
if (!dateTime) return null;
|
||||
const [date] = dateTime.split("T");
|
||||
return date?.trim() || null;
|
||||
}
|
||||
|
||||
function hasInsertedAtFilter(filters: Record<string, string>): boolean {
|
||||
return INSERTED_AT_FILTER_KEYS.some((key) => Boolean(filters[key]));
|
||||
}
|
||||
|
||||
export function buildFindingScanDateFilters(
|
||||
scanInsertedAtValues: string[],
|
||||
): Record<string, string> {
|
||||
const dates = Array.from(
|
||||
new Set(scanInsertedAtValues.map(formatScanDate).filter(Boolean)),
|
||||
).sort() as string[];
|
||||
|
||||
if (dates.length === 0) {
|
||||
return {};
|
||||
}
|
||||
|
||||
if (dates.length === 1) {
|
||||
return {
|
||||
"filter[inserted_at]": dates[0],
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
"filter[inserted_at__gte]": dates[0],
|
||||
"filter[inserted_at__lte]": dates[dates.length - 1],
|
||||
};
|
||||
}
|
||||
|
||||
export async function resolveFindingScanDateFilters({
|
||||
filters,
|
||||
scans,
|
||||
loadScan,
|
||||
}: ResolveFindingScanDateFiltersOptions): Promise<Record<string, string>> {
|
||||
const scanIds = getScanFilterIds(filters);
|
||||
|
||||
if (scanIds.length === 0 || hasInsertedAtFilter(filters)) {
|
||||
return filters;
|
||||
}
|
||||
|
||||
const scansById = new Map(scans.map((scan) => [scan.id, scan]));
|
||||
const missingScanIds = scanIds.filter((scanId) => !scansById.has(scanId));
|
||||
|
||||
if (missingScanIds.length > 0) {
|
||||
const missingScans = await Promise.all(
|
||||
missingScanIds.map((scanId) => loadScan(scanId)),
|
||||
);
|
||||
|
||||
missingScans.forEach((scan) => {
|
||||
if (scan) {
|
||||
scansById.set(scan.id, scan);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const scanInsertedAtValues = scanIds
|
||||
.map((scanId) => scansById.get(scanId)?.attributes?.inserted_at)
|
||||
.filter((insertedAt): insertedAt is string => Boolean(insertedAt));
|
||||
|
||||
const dateFilters = buildFindingScanDateFilters(scanInsertedAtValues);
|
||||
|
||||
if (Object.keys(dateFilters).length === 0) {
|
||||
return filters;
|
||||
}
|
||||
|
||||
return {
|
||||
...filters,
|
||||
...dateFilters,
|
||||
};
|
||||
}
|
||||
10
ui/lib/invitation-routing.ts
Normal file
10
ui/lib/invitation-routing.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
/**
|
||||
* Query param name + value used to bypass the backward-compat redirect
|
||||
* in proxy.ts when the user explicitly chose "Create an account"
|
||||
* from the invitation smart router.
|
||||
*
|
||||
* Client sends: /sign-up?invitation_token=…&action=signup
|
||||
* Proxy skips redirect when "action" param is present.
|
||||
*/
|
||||
export const INVITATION_ACTION_PARAM = "action";
|
||||
export const INVITATION_SIGNUP_ACTION = "signup";
|
||||
22
ui/proxy.ts
22
ui/proxy.ts
@@ -1,10 +1,12 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
|
||||
import { auth } from "@/auth.config";
|
||||
import { INVITATION_ACTION_PARAM } from "@/lib/invitation-routing";
|
||||
|
||||
const publicRoutes = [
|
||||
"/sign-in",
|
||||
"/sign-up",
|
||||
"/invitation/accept",
|
||||
// In Cloud uncomment the following lines:
|
||||
// "/reset-password",
|
||||
// "/email-verification",
|
||||
@@ -18,6 +20,22 @@ const isPublicRoute = (pathname: string): boolean => {
|
||||
// NextAuth's auth() wrapper - renamed from middleware to proxy
|
||||
export default auth((req: NextRequest & { auth: any }) => {
|
||||
const { pathname } = req.nextUrl;
|
||||
|
||||
// Backward compatibility: redirect old invitation links to new smart router
|
||||
// Skip redirect when the user explicitly chose "Create an account" from the smart router
|
||||
if (
|
||||
pathname === "/sign-up" &&
|
||||
req.nextUrl.searchParams.has("invitation_token") &&
|
||||
!req.nextUrl.searchParams.has(INVITATION_ACTION_PARAM)
|
||||
) {
|
||||
const acceptUrl = new URL("/invitation/accept", req.url);
|
||||
acceptUrl.searchParams.set(
|
||||
"invitation_token",
|
||||
req.nextUrl.searchParams.get("invitation_token")!,
|
||||
);
|
||||
return NextResponse.redirect(acceptUrl);
|
||||
}
|
||||
|
||||
const user = req.auth?.user;
|
||||
const sessionError = req.auth?.error;
|
||||
|
||||
@@ -25,13 +43,13 @@ export default auth((req: NextRequest & { auth: any }) => {
|
||||
if (sessionError && !isPublicRoute(pathname)) {
|
||||
const signInUrl = new URL("/sign-in", req.url);
|
||||
signInUrl.searchParams.set("error", sessionError);
|
||||
signInUrl.searchParams.set("callbackUrl", pathname);
|
||||
signInUrl.searchParams.set("callbackUrl", pathname + req.nextUrl.search);
|
||||
return NextResponse.redirect(signInUrl);
|
||||
}
|
||||
|
||||
if (!user && !isPublicRoute(pathname)) {
|
||||
const signInUrl = new URL("/sign-in", req.url);
|
||||
signInUrl.searchParams.set("callbackUrl", pathname);
|
||||
signInUrl.searchParams.set("callbackUrl", pathname + req.nextUrl.search);
|
||||
return NextResponse.redirect(signInUrl);
|
||||
}
|
||||
|
||||
|
||||
@@ -65,7 +65,9 @@ test.describe("Middleware Error Handling", () => {
|
||||
await freshPage.goto(`/scans?e2e_mw=${cacheBuster}`, {
|
||||
waitUntil: "commit",
|
||||
});
|
||||
await freshSignInPage.verifyRedirectWithCallback("/scans");
|
||||
await freshSignInPage.verifyRedirectWithCallback(
|
||||
`/scans?e2e_mw=${cacheBuster}`,
|
||||
);
|
||||
} finally {
|
||||
await invalidSessionContext.close();
|
||||
}
|
||||
|
||||
@@ -69,4 +69,19 @@ test.describe("Session Error Messages", () => {
|
||||
await signInPage.verifyRedirectWithCallback("/providers");
|
||||
},
|
||||
);
|
||||
|
||||
test(
|
||||
"should preserve query parameters in callbackUrl",
|
||||
{ tag: ["@e2e", "@auth", "@session", "@AUTH-SESSION-E2E-005"] },
|
||||
async ({ page, context }) => {
|
||||
const signInPage = new SignInPage(page);
|
||||
await context.clearCookies();
|
||||
|
||||
// Navigate to a protected route with query params and assert they are preserved.
|
||||
await page.goto("/providers?ref=test", {
|
||||
waitUntil: "commit",
|
||||
});
|
||||
await signInPage.verifyRedirectWithCallback("/providers?ref=test");
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
@@ -34,12 +34,14 @@ export interface FindingResourceRow {
|
||||
providerAlias: string;
|
||||
providerUid: string;
|
||||
resourceName: string;
|
||||
resourceType: string;
|
||||
resourceGroup: string;
|
||||
resourceUid: string;
|
||||
service: string;
|
||||
region: string;
|
||||
severity: Severity;
|
||||
status: string;
|
||||
delta?: string | null;
|
||||
isMuted: boolean;
|
||||
mutedReason?: string;
|
||||
firstSeenAt: string | null;
|
||||
|
||||
Reference in New Issue
Block a user