Compare commits

...

13 Commits

Author SHA1 Message Date
dependabot[bot] 14604cf72d chore(deps): bump actions/github-script from 8.0.0 to 9.0.0
Bumps [actions/github-script](https://github.com/actions/github-script) from 8.0.0 to 9.0.0.
- [Release notes](https://github.com/actions/github-script/releases)
- [Commits](https://github.com/actions/github-script/compare/ed597411d8f924073f98dfc5c65a23a2325f34cd...3a2844b7e9c422d3c10d287c895573f7108da1b3)

---
updated-dependencies:
- dependency-name: actions/github-script
  dependency-version: 9.0.0
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2026-04-16 11:44:13 +00:00
Rubén De la Torre Vico 04fe3f65e0 chore(deps): enable Dependabot pre-commit ecosystem and bump hooks (#10732) 2026-04-16 13:38:11 +02:00
Andoni Alonso 297c9d0734 fix(sdk): move #10726 changelog entry to unreleased version (#10728) 2026-04-16 13:10:00 +02:00
Erich Blume a2a1a73749 fix(image): --registry-list crashes with AttributeError on global_provider (#10691)
Co-authored-by: Andoni A. <14891798+andoniaf@users.noreply.github.com>
2026-04-16 13:02:25 +02:00
lydiavilchez 08fbe17e29 fix(googleworkspace): treat secure Google defaults as PASS for Drive checks (#10727) 2026-04-16 13:01:55 +02:00
lydiavilchez d920f78059 fix(googleworkspace): treat secure Google defaults as PASS for Calendar checks (#10726) 2026-04-16 12:51:40 +02:00
Pepe Fagoaga 12bf3d5e70 fix(db): add missing tenant_id filter in queries (#10722) 2026-04-16 11:55:38 +02:00
Adrián Peña 4002c28b5d fix(api): add fallback handling for missing resources in findings (#10708) 2026-04-16 11:45:06 +02:00
Andoni Alonso 2439f54280 fix(sdk): allow account-scoped tokens in Cloudflare connection test (#10723) 2026-04-16 11:38:15 +02:00
Prowler Bot b0e59156e6 chore(ui): Bump version to v5.25.0 (#10711)
Co-authored-by: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
2026-04-15 20:14:46 +02:00
Prowler Bot f013bd4a53 docs: Update version to v5.24.0 (#10714)
Co-authored-by: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
2026-04-15 20:14:17 +02:00
Prowler Bot 6ad15f900f chore(release): Bump version to v5.25.0 (#10710)
Co-authored-by: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
2026-04-15 20:14:06 +02:00
Prowler Bot 1784bf38ab chore(api): Bump version to v1.26.0 (#10715)
Co-authored-by: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
2026-04-15 20:13:33 +02:00
32 changed files with 560 additions and 224 deletions
+1 -1
View File
@@ -145,7 +145,7 @@ SENTRY_RELEASE=local
NEXT_PUBLIC_SENTRY_ENVIRONMENT=${SENTRY_ENVIRONMENT}
#### Prowler release version ####
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.24.0
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.25.0
# Social login credentials
SOCIAL_GOOGLE_OAUTH_CALLBACK_URL="${AUTH_URL}/api/auth/callback/google"
+12
View File
@@ -66,6 +66,18 @@ updates:
cooldown:
default-days: 7
- package-ecosystem: "pre-commit"
directory: "/"
schedule:
interval: "monthly"
open-pull-requests-limit: 25
target-branch: master
labels:
- "dependencies"
- "pre-commit"
cooldown:
default-days: 7
# Dependabot Updates are temporary disabled - 2025/04/15
# v4.6
# - package-ecosystem: "pip"
+1 -1
View File
@@ -29,7 +29,7 @@ jobs:
api.github.com:443
- name: Comment and lock issue
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
with:
script: |
const { owner, repo } = context.repo;
+24 -24
View File
@@ -75,7 +75,7 @@ jobs:
with:
destination: /opt/gh-aw/actions
- name: Check workflow file timestamps
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
env:
GH_AW_WORKFLOW_FILE: "issue-triage.lock.yml"
with:
@@ -86,7 +86,7 @@ jobs:
await main();
- name: Compute current body text
id: compute-text
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
with:
script: |
const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs');
@@ -96,7 +96,7 @@ jobs:
- name: Add comment with workflow run link
id: add-comment
if: github.event_name == 'issues' || github.event_name == 'issue_comment' || github.event_name == 'pull_request_review_comment' || github.event_name == 'discussion' || github.event_name == 'discussion_comment' || (github.event_name == 'pull_request') && (github.event.pull_request.head.repo.id == github.repository_id)
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
env:
GH_AW_WORKFLOW_NAME: "Issue Triage"
GH_AW_SAFE_OUTPUT_MESSAGES: "{\"footer\":\"\\u003e 🤖 Generated by [Prowler Issue Triage]({run_url}) [Experimental]\"}"
@@ -148,7 +148,7 @@ jobs:
with:
persist-credentials: false
- name: Merge remote .github folder
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
env:
GH_AW_AGENT_FILE: ".github/agents/issue-triage.md"
GH_AW_AGENT_IMPORT_SPEC: "../agents/issue-triage.md"
@@ -175,7 +175,7 @@ jobs:
id: checkout-pr
if: |
github.event.pull_request
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
env:
GH_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
with:
@@ -187,7 +187,7 @@ jobs:
await main();
- name: Generate agentic run info
id: generate_aw_info
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
with:
script: |
const fs = require('fs');
@@ -511,7 +511,7 @@ jobs:
}
GH_AW_MCP_CONFIG_EOF
- name: Generate workflow overview
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
with:
script: |
const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs');
@@ -606,7 +606,7 @@ jobs:
{{#runtime-import .github/workflows/issue-triage.md}}
GH_AW_PROMPT_EOF
- name: Substitute placeholders
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
env:
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
GH_AW_GITHUB_ACTOR: ${{ github.actor }}
@@ -640,7 +640,7 @@ jobs:
}
});
- name: Interpolate variables and render templates
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
env:
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
GH_AW_GITHUB_EVENT_ISSUE_NUMBER: ${{ github.event.issue.number }}
@@ -757,7 +757,7 @@ jobs:
bash /opt/gh-aw/actions/stop_mcp_gateway.sh "$GATEWAY_PID"
- name: Redact secrets in logs
if: always()
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
with:
script: |
const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs');
@@ -779,7 +779,7 @@ jobs:
if-no-files-found: warn
- name: Ingest agent output
id: collect_output
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
env:
GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }}
GH_AW_ALLOWED_DOMAINS: "*.pythonhosted.org,anaconda.org,api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,binstar.org,bootstrap.pypa.io,conda.anaconda.org,conda.binstar.org,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,files.pythonhosted.org,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,mcp.context7.com,mcp.prowler.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,pip.pypa.io,ppa.launchpad.net,pypi.org,pypi.python.org,raw.githubusercontent.com,registry.npmjs.org,repo.anaconda.com,repo.continuum.io,s.symcb.com,s.symcd.com,security.ubuntu.com,telemetry.enterprise.githubcopilot.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com"
@@ -808,7 +808,7 @@ jobs:
if-no-files-found: ignore
- name: Parse agent logs for step summary
if: always()
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
env:
GH_AW_AGENT_OUTPUT: /tmp/gh-aw/sandbox/agent/logs/
with:
@@ -819,7 +819,7 @@ jobs:
await main();
- name: Parse MCP gateway logs for step summary
if: always()
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
with:
script: |
const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs');
@@ -891,7 +891,7 @@ jobs:
echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
- name: Process No-Op Messages
id: noop
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
GH_AW_NOOP_MAX: 1
@@ -905,7 +905,7 @@ jobs:
await main();
- name: Record Missing Tool
id: missing_tool
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
GH_AW_WORKFLOW_NAME: "Issue Triage"
@@ -918,7 +918,7 @@ jobs:
await main();
- name: Handle Agent Failure
id: handle_agent_failure
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
GH_AW_WORKFLOW_NAME: "Issue Triage"
@@ -937,7 +937,7 @@ jobs:
await main();
- name: Handle No-Op Message
id: handle_noop_message
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
GH_AW_WORKFLOW_NAME: "Issue Triage"
@@ -954,7 +954,7 @@ jobs:
await main();
- name: Update reaction comment with completion status
id: conclusion
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
GH_AW_COMMENT_ID: ${{ needs.activation.outputs.comment_id }}
@@ -1008,7 +1008,7 @@ jobs:
run: |
echo "Agent output-types: $AGENT_OUTPUT_TYPES"
- name: Setup threat detection
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
env:
WORKFLOW_NAME: "Issue Triage"
WORKFLOW_DESCRIPTION: "[Experimental] AI-powered issue triage for Prowler - produces coding-agent-ready fix plans"
@@ -1062,7 +1062,7 @@ jobs:
XDG_CONFIG_HOME: /home/runner
- name: Parse threat detection results
id: parse_results
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
with:
script: |
const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs');
@@ -1102,7 +1102,7 @@ jobs:
- name: Add eyes reaction for immediate feedback
id: react
if: github.event_name == 'issues' || github.event_name == 'issue_comment' || github.event_name == 'pull_request_review_comment' || github.event_name == 'discussion' || github.event_name == 'discussion_comment' || (github.event_name == 'pull_request') && (github.event.pull_request.head.repo.id == github.repository_id)
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
env:
GH_AW_REACTION: "eyes"
with:
@@ -1114,7 +1114,7 @@ jobs:
await main();
- name: Check team membership for workflow
id: check_membership
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
env:
GH_AW_REQUIRED_ROLES: admin,maintainer,write
with:
@@ -1126,7 +1126,7 @@ jobs:
await main();
- name: Check user rate limit
id: check_rate_limit
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
env:
GH_AW_RATE_LIMIT_MAX: "5"
GH_AW_RATE_LIMIT_WINDOW: "60"
@@ -1185,7 +1185,7 @@ jobs:
echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
- name: Process Safe Outputs
id: process_safe_outputs
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v9.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"add_comment\":{\"hide_older_comments\":true,\"max\":1},\"missing_data\":{},\"missing_tool\":{}}"
+9 -9
View File
@@ -1,7 +1,7 @@
repos:
## GENERAL
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.6.0
rev: v6.0.0
hooks:
- id: check-merge-conflict
- id: check-yaml
@@ -16,7 +16,7 @@ repos:
## TOML
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
rev: v2.13.0
rev: v2.16.0
hooks:
- id: pretty-format-toml
args: [--autofix]
@@ -24,21 +24,21 @@ repos:
## GITHUB ACTIONS
- repo: https://github.com/zizmorcore/zizmor-pre-commit
rev: v1.6.0
rev: v1.24.1
hooks:
- id: zizmor
files: ^\.github/
## BASH
- repo: https://github.com/koalaman/shellcheck-precommit
rev: v0.10.0
rev: v0.11.0
hooks:
- id: shellcheck
exclude: contrib
## PYTHON
- repo: https://github.com/myint/autoflake
rev: v2.3.1
rev: v2.3.3
hooks:
- id: autoflake
exclude: ^skills/
@@ -50,20 +50,20 @@ repos:
]
- repo: https://github.com/pycqa/isort
rev: 5.13.2
rev: 8.0.1
hooks:
- id: isort
exclude: ^skills/
args: ["--profile", "black"]
- repo: https://github.com/psf/black
rev: 24.4.2
rev: 26.3.1
hooks:
- id: black
exclude: ^skills/
- repo: https://github.com/pycqa/flake8
rev: 7.0.0
rev: 7.3.0
hooks:
- id: flake8
exclude: (contrib|^skills/)
@@ -93,7 +93,7 @@ repos:
pass_filenames: false
- repo: https://github.com/hadolint/hadolint
rev: v2.13.0-beta
rev: v2.14.0
hooks:
- id: hadolint
args: ["--ignore=DL3013"]
+9
View File
@@ -2,6 +2,14 @@
All notable changes to the **Prowler API** are documented in this file.
## [1.25.1] (Prowler v5.24.1)
### 🐞 Fixed
- Attack Paths: Missing `tenant_id` filter while getting related findings after scan completes [(#10722)](https://github.com/prowler-cloud/prowler/pull/10722)
---
## [1.25.0] (Prowler v5.24.0)
### 🔄 Changed
@@ -13,6 +21,7 @@ All notable changes to the **Prowler API** are documented in this file.
- Worker-beat race condition on cold start: replaced `sleep 15` with API service healthcheck dependency (Docker Compose) and init containers (Helm), aligned Gunicorn default port to `8080` [(#10603)](https://github.com/prowler-cloud/prowler/pull/10603)
- API container startup crash on Linux due to root-owned bind-mount preventing JWT key generation [(#10646)](https://github.com/prowler-cloud/prowler/pull/10646)
- Finding group resources endpoints now include findings without associated resources (orphan IaC findings) as simulated resource rows, and return one row per finding when multiple findings share a resource [(#10708)](https://github.com/prowler-cloud/prowler/pull/10708)
### 🔐 Security
+1 -1
View File
@@ -50,7 +50,7 @@ name = "prowler-api"
package-mode = false
# Needed for the SDK compatibility
requires-python = ">=3.11,<3.13"
version = "1.25.0"
version = "1.26.0"
[project.scripts]
celery = "src.backend.config.settings.celery"
+1 -1
View File
@@ -1,7 +1,7 @@
openapi: 3.0.3
info:
title: Prowler API
version: 1.25.0
version: 1.26.0
description: |-
Prowler API specification.
+31
View File
@@ -57,6 +57,7 @@ from api.models import (
ProviderGroupMembership,
ProviderSecret,
Resource,
ResourceFindingMapping,
Role,
RoleProviderGroupRelationship,
SAMLConfiguration,
@@ -16030,6 +16031,36 @@ class TestFindingGroupViewSet:
# s3_bucket_public_access has 2 findings with 2 different resources
assert len(data) == 2
def test_resources_id_matches_resource_id_for_mapped_findings(
self, authenticated_client, finding_groups_fixture
):
"""Findings with a resource expose the resource id as row id (hot path contract)."""
response = authenticated_client.get(
reverse(
"finding-group-resources", kwargs={"pk": "s3_bucket_public_access"}
),
{"filter[inserted_at]": TODAY},
)
assert response.status_code == status.HTTP_200_OK
data = response.json()["data"]
assert data, "expected resources in response"
resource_ids = set(
ResourceFindingMapping.objects.filter(
finding__check_id="s3_bucket_public_access",
).values_list("resource_id", flat=True)
)
finding_ids = set(
Finding.objects.filter(
check_id="s3_bucket_public_access",
).values_list("id", flat=True)
)
returned_ids = {item["id"] for item in data}
assert returned_ids <= {str(rid) for rid in resource_ids}
assert returned_ids.isdisjoint({str(fid) for fid in finding_ids})
def test_resources_fields(self, authenticated_client, finding_groups_fixture):
"""Test resource fields (uid, name, service, region, type) have valid values."""
response = authenticated_client.get(
+3 -2
View File
@@ -4225,10 +4225,11 @@ class FindingGroupResourceSerializer(BaseSerializerV1):
Serializer for Finding Group Resources - resources within a finding group.
Returns individual resources with their current status, severity,
and timing information.
and timing information. Orphan findings (without any resource) expose the
finding id as `id` so the row stays identifiable in the UI.
"""
id = serializers.UUIDField(source="resource_id")
id = serializers.UUIDField(source="row_id")
resource = serializers.SerializerMethodField()
provider = serializers.SerializerMethodField()
finding_id = serializers.UUIDField()
+247 -37
View File
@@ -35,11 +35,13 @@ from django.db.models import (
CharField,
Count,
DecimalField,
Exists,
ExpressionWrapper,
F,
IntegerField,
Max,
Min,
OuterRef,
Prefetch,
Q,
QuerySet,
@@ -415,7 +417,7 @@ class SchemaView(SpectacularAPIView):
def get(self, request, *args, **kwargs):
spectacular_settings.TITLE = "Prowler API"
spectacular_settings.VERSION = "1.25.0"
spectacular_settings.VERSION = "1.26.0"
spectacular_settings.DESCRIPTION = (
"Prowler API specification.\n\nThis file is auto-generated."
)
@@ -7578,6 +7580,53 @@ class FindingGroupViewSet(BaseRLSViewSet):
.order_by(*ordering)
)
def _orphan_findings_queryset(self, filtered_queryset, finding_ids=None):
"""Findings in the filtered set with no ResourceFindingMapping entries."""
orphan_qs = filtered_queryset.filter(
~Exists(ResourceFindingMapping.objects.filter(finding_id=OuterRef("pk")))
)
if finding_ids is not None:
orphan_qs = orphan_qs.filter(id__in=finding_ids)
return orphan_qs
def _has_orphan_findings(self, filtered_queryset) -> bool:
"""Return True if any finding in the filtered set has no resource mapping."""
return self._orphan_findings_queryset(filtered_queryset).exists()
def _orphan_aggregation_values(self, orphan_queryset):
"""Raw rows for orphan findings; resource payload synthesized from metadata.
check_metadata is stored with lowercase keys (see
`prowler.lib.outputs.finding.Finding.get_metadata`) and
`Finding.resource_groups` is already denormalized at ingest time.
"""
return orphan_queryset.annotate(
_provider_type=F("scan__provider__provider"),
_provider_uid=F("scan__provider__uid"),
_provider_alias=F("scan__provider__alias"),
_svc=KeyTextTransform("servicename", "check_metadata"),
_region=KeyTextTransform("region", "check_metadata"),
_rtype=KeyTextTransform("resourcetype", "check_metadata"),
_rgroup=F("resource_groups"),
).values(
"id",
"uid",
"status",
"severity",
"delta",
"muted",
"muted_reason",
"first_seen_at",
"inserted_at",
"_provider_type",
"_provider_uid",
"_provider_alias",
"_svc",
"_region",
"_rtype",
"_rgroup",
)
def _post_process_resources(self, resource_data):
"""Convert resource aggregation rows to API output."""
results = []
@@ -7599,9 +7648,13 @@ class FindingGroupViewSet(BaseRLSViewSet):
else:
delta = None
resource_id = row["resource_id"]
finding_id = str(row["finding_id"]) if row.get("finding_id") else None
results.append(
{
"resource_id": row["resource_id"],
"row_id": resource_id,
"resource_id": resource_id,
"resource_uid": row["resource_uid"],
"resource_name": row["resource_name"],
"resource_service": row["resource_service"],
@@ -7620,9 +7673,46 @@ class FindingGroupViewSet(BaseRLSViewSet):
"muted": bool(row.get("muted", False)),
"muted_reason": row.get("muted_reason"),
"resource_group": row.get("resource_group", ""),
"finding_id": (
str(row["finding_id"]) if row.get("finding_id") else None
),
"finding_id": finding_id,
}
)
return results
def _post_process_orphans(self, orphan_rows):
"""Convert orphan finding rows into the same API shape as mapping rows."""
results = []
for row in orphan_rows:
status_val = row["status"]
status = status_val if status_val in ("FAIL", "PASS") else "MANUAL"
muted = bool(row["muted"])
delta_val = row.get("delta")
delta = delta_val if delta_val in ("new", "changed") and not muted else None
finding_id = str(row["id"])
results.append(
{
"row_id": finding_id,
"resource_id": None,
"resource_uid": row["uid"],
"resource_name": row["uid"],
"resource_service": row["_svc"] or "",
"resource_region": row["_region"] or "",
"resource_type": row["_rtype"] or "",
"provider_type": row["_provider_type"],
"provider_uid": row["_provider_uid"],
"provider_alias": row["_provider_alias"],
"status": status,
"severity": row["severity"],
"delta": delta,
"first_seen_at": row["first_seen_at"],
"last_seen_at": row["inserted_at"],
"muted": muted,
"muted_reason": row.get("muted_reason"),
"resource_group": row["_rgroup"] or "",
"finding_id": finding_id,
}
)
@@ -7731,41 +7821,64 @@ class FindingGroupViewSet(BaseRLSViewSet):
def _paginated_resource_response(
self, request, filtered_queryset, resource_ids, tenant_id
):
"""Paginate and return resources.
"""Paginate and return resources, appending orphan findings when present.
Without sort: paginate lightweight resource IDs first, aggregate only the page.
With sort: build a lightweight ordering subquery (resource_id + sort keys),
paginate that, then aggregate full details only for the page.
Hot path (no orphans, or resource filter applied): resources come from
ResourceFindingMapping aggregation. Untouched pre-existing behaviour.
Orphan fallback: findings without a mapping (e.g. IaC) are appended
after mapping rows as synthesised resource-like rows so they remain
visible in the UI without paying the aggregation cost on the hot path.
"""
sort_param = request.query_params.get("sort")
ordering = None
if sort_param:
ordering = self._validate_sort_fields(sort_param, self._RESOURCE_SORT_MAP)
if ordering:
if "resource_id" not in {field.lstrip("-") for field in ordering}:
ordering.append("resource_id")
validated = self._validate_sort_fields(sort_param, self._RESOURCE_SORT_MAP)
ordering = validated if validated else None
# Phase 1: lightweight aggregation with only sort keys, paginate
ordering_qs = self._build_resource_ordering_queryset(
filtered_queryset,
resource_ids=resource_ids,
tenant_id=tenant_id,
ordering=ordering,
)
page = self.paginate_queryset(ordering_qs)
if page is not None:
page_ids = [row["resource_id"] for row in page]
resource_data = self._build_resource_aggregation(
filtered_queryset, resource_ids=page_ids, tenant_id=tenant_id
)
# Re-sort to match the page ordering
id_order = {rid: idx for idx, rid in enumerate(page_ids)}
results = self._post_process_resources(resource_data)
results.sort(key=lambda r: id_order.get(r["resource_id"], 0))
serializer = FindingGroupResourceSerializer(results, many=True)
return self.get_paginated_response(serializer.data)
# Resource filters can only match findings with resources; skip orphan
# detection entirely when they are present.
if resource_ids is not None:
return self._mapping_paginated_response(
request, filtered_queryset, resource_ids, tenant_id, ordering
)
page_ids = [row["resource_id"] for row in ordering_qs]
has_mappings = self._build_resource_mapping_queryset(
filtered_queryset, resource_ids=None, tenant_id=tenant_id
).exists()
if has_mappings:
# Normal or mixed group: serve only resource-mapped rows.
# TODO: Orphan findings in mixed groups are intentionally excluded
# until the ephemeral resources strategy is decided. When resolved,
# route mixed groups to _combined_paginated_response instead.
return self._mapping_paginated_response(
request, filtered_queryset, resource_ids, tenant_id, ordering
)
# Pure orphan group (e.g. IaC): synthesize resource-like rows.
return self._combined_paginated_response(
request, filtered_queryset, tenant_id, ordering
)
def _mapping_paginated_response(
self, request, filtered_queryset, resource_ids, tenant_id, ordering
):
"""Mapping-only paginated response (original fast path)."""
if ordering:
if "resource_id" not in {field.lstrip("-") for field in ordering}:
ordering.append("resource_id")
# Phase 1: lightweight aggregation with only sort keys, paginate
ordering_qs = self._build_resource_ordering_queryset(
filtered_queryset,
resource_ids=resource_ids,
tenant_id=tenant_id,
ordering=ordering,
)
page = self.paginate_queryset(ordering_qs)
if page is not None:
page_ids = [row["resource_id"] for row in page]
resource_data = self._build_resource_aggregation(
filtered_queryset, resource_ids=page_ids, tenant_id=tenant_id
)
@@ -7773,10 +7886,18 @@ class FindingGroupViewSet(BaseRLSViewSet):
results = self._post_process_resources(resource_data)
results.sort(key=lambda r: id_order.get(r["resource_id"], 0))
serializer = FindingGroupResourceSerializer(results, many=True)
return Response(serializer.data)
return self.get_paginated_response(serializer.data)
page_ids = [row["resource_id"] for row in ordering_qs]
resource_data = self._build_resource_aggregation(
filtered_queryset, resource_ids=page_ids, tenant_id=tenant_id
)
id_order = {rid: idx for idx, rid in enumerate(page_ids)}
results = self._post_process_resources(resource_data)
results.sort(key=lambda r: id_order.get(r["resource_id"], 0))
serializer = FindingGroupResourceSerializer(results, many=True)
return Response(serializer.data)
# No sort (or only empty sort fragments): paginate lightweight resource IDs
# first, aggregate only the page.
mapping_qs = self._build_resource_mapping_queryset(
filtered_queryset, resource_ids=resource_ids, tenant_id=tenant_id
)
@@ -7804,6 +7925,95 @@ class FindingGroupViewSet(BaseRLSViewSet):
serializer = FindingGroupResourceSerializer(results, many=True)
return Response(serializer.data)
def _combined_paginated_response(
self, request, filtered_queryset, tenant_id, ordering
):
"""Mapping rows + orphan findings appended at end.
Orphans sit after mapping rows regardless of sort. This keeps the
mapping-only code path intact for checks that have no orphans (the
common case) and avoids paying UNION/coalesce costs there.
"""
mapping_qs = self._build_resource_mapping_queryset(
filtered_queryset, resource_ids=None, tenant_id=tenant_id
)
mapping_count = mapping_qs.values("resource_id").distinct().count()
orphan_ids = list(
self._orphan_findings_queryset(filtered_queryset)
.order_by("id")
.values_list("id", flat=True)
)
orphan_count = len(orphan_ids)
total = mapping_count + orphan_count
# Paginate a simple [0..total) index sequence so DRF produces proper
# links/meta; then slice mapping / orphan sources accordingly.
page = self.paginate_queryset(range(total))
page_indices = list(page) if page is not None else list(range(total))
mapping_indices = [i for i in page_indices if i < mapping_count]
orphan_positions = [
i - mapping_count for i in page_indices if i >= mapping_count
]
mapping_results = []
if mapping_indices:
start = mapping_indices[0]
stop = mapping_indices[-1] + 1
if ordering:
ordering_fields = list(ordering)
if "resource_id" not in {
field.lstrip("-") for field in ordering_fields
}:
ordering_fields.append("resource_id")
ordered_qs = self._build_resource_ordering_queryset(
filtered_queryset,
resource_ids=None,
tenant_id=tenant_id,
ordering=ordering_fields,
)
slice_rids = [row["resource_id"] for row in ordered_qs[start:stop]]
else:
slice_rids = list(
mapping_qs.values_list("resource_id", flat=True)
.distinct()
.order_by("resource_id")[start:stop]
)
if slice_rids:
resource_data = self._build_resource_aggregation(
filtered_queryset,
resource_ids=slice_rids,
tenant_id=tenant_id,
)
rows_by_rid = {row["resource_id"]: row for row in resource_data}
ordered_rows = [
rows_by_rid[rid] for rid in slice_rids if rid in rows_by_rid
]
mapping_results = self._post_process_resources(ordered_rows)
orphan_results = []
if orphan_positions:
slice_fids = [orphan_ids[pos] for pos in orphan_positions]
raw_rows = list(
self._orphan_aggregation_values(
self._orphan_findings_queryset(
filtered_queryset, finding_ids=slice_fids
)
)
)
rows_by_fid = {row["id"]: row for row in raw_rows}
ordered_rows = [
rows_by_fid[fid] for fid in slice_fids if fid in rows_by_fid
]
orphan_results = self._post_process_orphans(ordered_rows)
results = mapping_results + orphan_results
serializer = FindingGroupResourceSerializer(results, many=True)
if page is not None:
return self.get_paginated_response(serializer.data)
return Response(serializer.data)
def list(self, request, *args, **kwargs):
"""
List finding groups with aggregation and filtering.
@@ -248,7 +248,9 @@ def _fetch_findings_batch(
with rls_transaction(tenant_id, using=READ_REPLICA_ALIAS):
# Use `all_objects` to get `Findings` even on soft-deleted `Providers`
# But even the provider is already validated as active in this context
qs = FindingModel.all_objects.filter(scan_id=scan_id).order_by("id")
qs = FindingModel.all_objects.filter(
tenant_id=tenant_id, scan_id=scan_id
).order_by("id")
if after_id is not None:
qs = qs.filter(id__gt=after_id)
@@ -121,8 +121,8 @@ To update the environment file:
Edit the `.env` file and change version values:
```env
PROWLER_UI_VERSION="5.23.0"
PROWLER_API_VERSION="5.23.0"
PROWLER_UI_VERSION="5.24.0"
PROWLER_API_VERSION="5.24.0"
```
<Note>
+12 -1
View File
@@ -2,6 +2,16 @@
All notable changes to the **Prowler SDK** are documented in this file.
## [5.24.1] (Prowler UNRELEASED)
### 🐞 Fixed
- Cloudflare account-scoped API tokens failing connection test in the App with `CloudflareUserTokenRequiredError` [(#10723)](https://github.com/prowler-cloud/prowler/pull/10723)
- `prowler image --registry` failing with `ImageNoImagesProvidedError` due to registry arguments not being forwarded to `ImageProvider` in `init_global_provider` [(#10470)](https://github.com/prowler-cloud/prowler/pull/10470)
- Google Workspace Calendar checks false FAIL on unconfigured settings with secure Google defaults [(#10726)](https://github.com/prowler-cloud/prowler/pull/10726)
---
## [5.24.0] (Prowler v5.24.0)
### 🚀 Added
@@ -26,7 +36,9 @@ All notable changes to the **Prowler SDK** are documented in this file.
### 🐞 Fixed
- `prowler image --registry-list` crashes with `AttributeError` because `ImageProvider.__init__` returns early before registering the global provider [(#10691)](https://github.com/prowler-cloud/prowler/pull/10691)
- Vercel firewall config handling for team-scoped projects and current API response shapes [(#10695)](https://github.com/prowler-cloud/prowler/pull/10695)
- Google Workspace Drive checks false FAIL on unconfigured settings with secure Google defaults [(#10727)](https://github.com/prowler-cloud/prowler/pull/10727)
---
@@ -77,7 +89,6 @@ All notable changes to the **Prowler SDK** are documented in this file.
- Oracle Cloud `kms_key_rotation_enabled` now checks current key version age to avoid false positives on vaults without auto-rotation support [(#10450)](https://github.com/prowler-cloud/prowler/pull/10450)
- OCI filestorage, blockstorage, KMS, and compute services now honor `--region` for scanning outside the tenancy home region [(#10472)](https://github.com/prowler-cloud/prowler/pull/10472)
- OCI provider now supports multi-region filtering via `--region` [(#10473)](https://github.com/prowler-cloud/prowler/pull/10473)
- `prowler image --registry` failing with `ImageNoImagesProvidedError` due to registry arguments not being forwarded to `ImageProvider` in `init_global_provider` [(#10470)](https://github.com/prowler-cloud/prowler/pull/10470)
- OCI multi-region support for identity client configuration in blockstorage, identity, and filestorage services [(#10520)](https://github.com/prowler-cloud/prowler/pull/10520)
- Google Workspace Calendar checks now filter for customer-level policies only, skipping OU and group overrides that could produce incorrect audit results [(#10658)](https://github.com/prowler-cloud/prowler/pull/10658)
+4
View File
@@ -293,6 +293,10 @@ def prowler():
if not args.only_logs:
global_provider.print_credentials()
# --registry-list: listing already printed during provider init, exit
if getattr(global_provider, "_listing_only", False):
sys.exit()
# Skip service and check loading for external-tool providers
if provider not in EXTERNAL_TOOL_PROVIDERS:
# Import custom checks from folder
+1 -1
View File
@@ -38,7 +38,7 @@ class _MutableTimestamp:
timestamp = _MutableTimestamp(datetime.today())
timestamp_utc = _MutableTimestamp(datetime.now(timezone.utc))
prowler_version = "5.24.0"
prowler_version = "5.25.0"
html_logo_url = "https://github.com/prowler-cloud/prowler/"
square_logo_img = "https://raw.githubusercontent.com/prowler-cloud/prowler/dc7d2d5aeb92fdf12e8604f42ef6472cd3e8e889/docs/img/prowler-logo-black.png"
aws_logo = "https://user-images.githubusercontent.com/38561120/235953920-3e3fba08-0795-41dc-b480-9bea57db9f2e.png"
@@ -332,19 +332,16 @@ class CloudflareProvider(Provider):
return
except PermissionDeniedError as error:
error_str = str(error)
# Check for user-level authentication required (code 9109)
if "9109" in error_str:
logger.error(f"CloudflareUserTokenRequiredError: {error}")
raise CloudflareUserTokenRequiredError(
file=os.path.basename(__file__),
)
# Check for invalid API key or email (code 9103) - comes as 403
if "9103" in error_str or "Unknown X-Auth-Key" in error_str:
logger.error(f"CloudflareInvalidAPIKeyError: {error}")
raise CloudflareInvalidAPIKeyError(
file=os.path.basename(__file__),
)
# For other permission errors, try accounts.list() as fallback
# For permission errors (including 9109 account-scoped tokens),
# try accounts.list() as fallback before failing.
# Error 9109 means the token is account-scoped, not user-level,
# which is valid for scanning — only fail if accounts.list() also fails.
logger.warning(
f"Unable to retrieve Cloudflare user info: {error}. "
"Trying accounts.list() as fallback."
@@ -35,21 +35,20 @@ class calendar_external_invitations_warning(Check):
f"External invitation warnings for Google Calendar are enabled "
f"in domain {calendar_client.provider.identity.domain}."
)
elif warning_enabled is None:
report.status = "PASS"
report.status_extended = (
f"External invitation warnings for Google Calendar use Google's "
f"secure default configuration (enabled) "
f"in domain {calendar_client.provider.identity.domain}."
)
else:
report.status = "FAIL"
if warning_enabled is None:
report.status_extended = (
f"External invitation warnings for Google Calendar are not "
f"explicitly configured in domain "
f"{calendar_client.provider.identity.domain}. "
f"Users should be warned when inviting guests outside the organization."
)
else:
report.status_extended = (
f"External invitation warnings for Google Calendar are disabled "
f"in domain {calendar_client.provider.identity.domain}. "
f"Users should be warned when inviting guests outside the organization."
)
report.status_extended = (
f"External invitation warnings for Google Calendar are disabled "
f"in domain {calendar_client.provider.identity.domain}. "
f"Users should be warned when inviting guests outside the organization."
)
findings.append(report)
@@ -36,20 +36,20 @@ class calendar_external_sharing_primary_calendar(Check):
f"{calendar_client.provider.identity.domain} is restricted to "
f"free/busy information only."
)
elif sharing is None:
report.status = "PASS"
report.status_extended = (
f"Primary calendar external sharing uses Google's secure default "
f"configuration (free/busy only) "
f"in domain {calendar_client.provider.identity.domain}."
)
else:
report.status = "FAIL"
if sharing is None:
report.status_extended = (
f"Primary calendar external sharing is not explicitly configured "
f"in domain {calendar_client.provider.identity.domain}. "
f"External sharing should be restricted to free/busy information only."
)
else:
report.status_extended = (
f"Primary calendar external sharing in domain "
f"{calendar_client.provider.identity.domain} is set to {sharing}. "
f"External sharing should be restricted to free/busy information only."
)
report.status_extended = (
f"Primary calendar external sharing in domain "
f"{calendar_client.provider.identity.domain} is set to {sharing}. "
f"External sharing should be restricted to free/busy information only."
)
findings.append(report)
@@ -33,21 +33,20 @@ class drive_external_sharing_warn_users(Check):
f"External sharing warnings for Drive and Docs are enabled "
f"in domain {drive_client.provider.identity.domain}."
)
elif warning_enabled is None:
report.status = "PASS"
report.status_extended = (
f"External sharing warnings for Drive and Docs use Google's "
f"secure default configuration (enabled) "
f"in domain {drive_client.provider.identity.domain}."
)
else:
report.status = "FAIL"
if warning_enabled is None:
report.status_extended = (
f"External sharing warnings for Drive and Docs are not "
f"explicitly configured in domain "
f"{drive_client.provider.identity.domain}. "
f"Users should be warned when sharing files outside the organization."
)
else:
report.status_extended = (
f"External sharing warnings for Drive and Docs are disabled "
f"in domain {drive_client.provider.identity.domain}. "
f"Users should be warned when sharing files outside the organization."
)
report.status_extended = (
f"External sharing warnings for Drive and Docs are disabled "
f"in domain {drive_client.provider.identity.domain}. "
f"Users should be warned when sharing files outside the organization."
)
findings.append(report)
@@ -35,22 +35,21 @@ class drive_shared_drive_creation_allowed(Check):
f"Users in domain {drive_client.provider.identity.domain} "
f"are allowed to create new shared drives."
)
elif allow_creation is None:
report.status = "PASS"
report.status_extended = (
f"Shared drive creation uses Google's secure default "
f"configuration (allowed) "
f"in domain {drive_client.provider.identity.domain}."
)
else:
report.status = "FAIL"
if allow_creation is None:
report.status_extended = (
f"Shared drive creation is not explicitly configured in "
f"domain {drive_client.provider.identity.domain}. "
f"Users should be allowed to create new shared drives to avoid "
f"data loss when accounts are deleted."
)
else:
report.status_extended = (
f"Users in domain {drive_client.provider.identity.domain} "
f"are prevented from creating new shared drives. "
f"Users should be allowed to create new shared drives to avoid "
f"data loss when accounts are deleted."
)
report.status_extended = (
f"Users in domain {drive_client.provider.identity.domain} "
f"are prevented from creating new shared drives. "
f"Users should be allowed to create new shared drives to avoid "
f"data loss when accounts are deleted."
)
findings.append(report)
@@ -35,21 +35,21 @@ class drive_shared_drive_disable_download_print_copy(Check):
f"{drive_client.provider.identity.domain} is restricted to "
f"{allowed}."
)
elif allowed is None:
report.status = "PASS"
report.status_extended = (
f"Download, print, and copy restrictions for shared drives use "
f"Google's secure default configuration (disabled for viewers "
f"and commenters) "
f"in domain {drive_client.provider.identity.domain}."
)
else:
report.status = "FAIL"
if allowed is None:
report.status_extended = (
f"Download, print, and copy restrictions for shared drive "
f"viewers and commenters are not explicitly configured in "
f"domain {drive_client.provider.identity.domain}. "
f"These actions should be restricted to editors or managers only."
)
else:
report.status_extended = (
f"Download, print, and copy in shared drives in domain "
f"{drive_client.provider.identity.domain} is set to {allowed}. "
f"These actions should be restricted to editors or managers only."
)
report.status_extended = (
f"Download, print, and copy in shared drives in domain "
f"{drive_client.provider.identity.domain} is set to {allowed}. "
f"These actions should be restricted to editors or managers only."
)
findings.append(report)
@@ -36,21 +36,20 @@ class drive_warn_sharing_with_allowlisted_domains(Check):
f"Users are warned when sharing files with allowlisted "
f"domains in domain {drive_client.provider.identity.domain}."
)
elif warn_enabled is None:
report.status = "PASS"
report.status_extended = (
f"Warning when sharing with allowlisted domains uses Google's "
f"secure default configuration (enabled) "
f"in domain {drive_client.provider.identity.domain}."
)
else:
report.status = "FAIL"
if warn_enabled is None:
report.status_extended = (
f"Warning when sharing with allowlisted domains is not "
f"explicitly configured in domain "
f"{drive_client.provider.identity.domain}. "
f"Users should be warned when sharing files with users in allowlisted domains."
)
else:
report.status_extended = (
f"Warning when sharing with allowlisted domains is disabled "
f"in domain {drive_client.provider.identity.domain}. "
f"Users should be warned when sharing files with users in allowlisted domains."
)
report.status_extended = (
f"Warning when sharing with allowlisted domains is disabled "
f"in domain {drive_client.provider.identity.domain}. "
f"Users should be warned when sharing files with users in allowlisted domains."
)
findings.append(report)
+41 -33
View File
@@ -163,42 +163,50 @@ class ImageProvider(Provider):
# Registry scan mode: enumerate images from registry
if self.registry:
self._enumerate_registry()
if self._listing_only:
return
for image in self.images:
self._validate_image_name(image)
if not self.images:
raise ImageNoImagesProvidedError(
file=__file__,
message="No images provided for scanning.",
)
# Audit Config
if config_content:
self._audit_config = config_content
else:
if not config_path:
config_path = default_config_file_path
self._audit_config = load_and_validate_config_file(self._type, config_path)
# Fixer Config
self._fixer_config = fixer_config if fixer_config is not None else {}
# Mutelist (not needed for Image provider since Trivy has its own logic)
# Safe defaults for listing-only mode (overwritten below in scan mode)
self._audit_config = {}
self._fixer_config = {}
self._mutelist = None
self.audit_metadata = None
self.audit_metadata = Audit_Metadata(
provider=self._type,
account_id=self.audited_account,
account_name="image",
region=self.region,
services_scanned=0,
expected_checks=[],
completed_checks=0,
audit_progress=0,
)
# Skip scan setup for listing-only mode
if not self._listing_only:
for image in self.images:
self._validate_image_name(image)
if not self.images:
raise ImageNoImagesProvidedError(
file=__file__,
message="No images provided for scanning.",
)
# Audit Config
if config_content:
self._audit_config = config_content
else:
if not config_path:
config_path = default_config_file_path
self._audit_config = load_and_validate_config_file(
self._type, config_path
)
# Fixer Config
self._fixer_config = fixer_config if fixer_config is not None else {}
# Mutelist (not needed for Image provider since Trivy has its own logic)
self._mutelist = None
self.audit_metadata = Audit_Metadata(
provider=self._type,
account_id=self.audited_account,
account_name="image",
region=self.region,
services_scanned=0,
expected_checks=[],
completed_checks=0,
audit_progress=0,
)
Provider.set_global_provider(self)
+1 -1
View File
@@ -95,7 +95,7 @@ maintainers = [{name = "Prowler Engineering", email = "engineering@prowler.com"}
name = "prowler"
readme = "README.md"
requires-python = ">=3.10,<3.13"
version = "5.24.0"
version = "5.25.0"
[project.scripts]
prowler = "prowler.__main__:prowler"
@@ -73,8 +73,8 @@ class TestCalendarExternalInvitationsWarning:
assert findings[0].status == "FAIL"
assert "disabled" in findings[0].status_extended
def test_fail_no_policy_set(self):
"""Test FAIL when no explicit policy is set (None) but fetch succeeded"""
def test_pass_using_default(self):
"""Test PASS when no explicit policy is set (None) — Google default is secure (enabled)"""
mock_provider = set_mocked_googleworkspace_provider()
with (
@@ -100,8 +100,8 @@ class TestCalendarExternalInvitationsWarning:
findings = check.execute()
assert len(findings) == 1
assert findings[0].status == "FAIL"
assert "not explicitly configured" in findings[0].status_extended
assert findings[0].status == "PASS"
assert "secure default" in findings[0].status_extended
def test_no_findings_when_fetch_failed(self):
"""Test no findings returned when the API fetch failed"""
@@ -104,8 +104,8 @@ class TestCalendarExternalSharingPrimaryCalendar:
assert findings[0].status == "FAIL"
assert "EXTERNAL_ALL_INFO_READ_WRITE" in findings[0].status_extended
def test_fail_no_policy_set(self):
"""Test FAIL when no explicit policy is set (None) but fetch succeeded"""
def test_pass_using_default(self):
"""Test PASS when no explicit policy is set (None) — Google default is secure (free/busy only)"""
mock_provider = set_mocked_googleworkspace_provider()
with (
@@ -131,8 +131,8 @@ class TestCalendarExternalSharingPrimaryCalendar:
findings = check.execute()
assert len(findings) == 1
assert findings[0].status == "FAIL"
assert "not explicitly configured" in findings[0].status_extended
assert findings[0].status == "PASS"
assert "secure default" in findings[0].status_extended
def test_no_findings_when_fetch_failed(self):
"""Test no findings returned when the API fetch failed"""
@@ -67,8 +67,8 @@ class TestDriveExternalSharingWarnUsers:
assert findings[0].status == "FAIL"
assert "disabled" in findings[0].status_extended
def test_fail_no_policy_set(self):
"""Test FAIL when no explicit policy is set (None) but fetch succeeded"""
def test_pass_using_default(self):
"""Test PASS when no explicit policy is set (None) — Google default is secure"""
mock_provider = set_mocked_googleworkspace_provider()
with (
@@ -92,8 +92,8 @@ class TestDriveExternalSharingWarnUsers:
findings = check.execute()
assert len(findings) == 1
assert findings[0].status == "FAIL"
assert "not explicitly configured" in findings[0].status_extended
assert findings[0].status == "PASS"
assert "secure default" in findings[0].status_extended
def test_no_findings_when_fetch_failed(self):
"""Test no findings returned when the API fetch failed"""
@@ -69,8 +69,8 @@ class TestDriveSharedDriveCreationAllowed:
assert findings[0].status == "FAIL"
assert "prevented" in findings[0].status_extended
def test_fail_no_policy_set(self):
"""Test FAIL when no explicit policy is set (None) but fetch succeeded"""
def test_pass_using_default(self):
"""Test PASS when no explicit policy is set (None) — Google default is secure"""
mock_provider = set_mocked_googleworkspace_provider()
with (
@@ -94,8 +94,8 @@ class TestDriveSharedDriveCreationAllowed:
findings = check.execute()
assert len(findings) == 1
assert findings[0].status == "FAIL"
assert "not explicitly configured" in findings[0].status_extended
assert findings[0].status == "PASS"
assert "secure default" in findings[0].status_extended
def test_no_findings_when_fetch_failed(self):
"""Test no findings returned when the API fetch failed"""
@@ -101,8 +101,8 @@ class TestDriveSharedDriveDisableDownloadPrintCopy:
assert findings[0].status == "FAIL"
assert "ALL" in findings[0].status_extended
def test_fail_no_policy_set(self):
"""Test FAIL when no explicit policy is set (None) but fetch succeeded"""
def test_pass_using_default(self):
"""Test PASS when no explicit policy is set (None) — Google default is secure"""
mock_provider = set_mocked_googleworkspace_provider()
with (
@@ -128,8 +128,8 @@ class TestDriveSharedDriveDisableDownloadPrintCopy:
findings = check.execute()
assert len(findings) == 1
assert findings[0].status == "FAIL"
assert "not explicitly configured" in findings[0].status_extended
assert findings[0].status == "PASS"
assert "secure default" in findings[0].status_extended
def test_no_findings_when_fetch_failed(self):
"""Test no findings returned when the API fetch failed"""
@@ -71,8 +71,8 @@ class TestDriveWarnSharingWithAllowlistedDomains:
assert findings[0].status == "FAIL"
assert "disabled" in findings[0].status_extended
def test_fail_no_policy_set(self):
"""Test FAIL when no explicit policy is set (None) but fetch succeeded"""
def test_pass_using_default(self):
"""Test PASS when no explicit policy is set (None) — Google default is secure"""
mock_provider = set_mocked_googleworkspace_provider()
with (
@@ -98,8 +98,8 @@ class TestDriveWarnSharingWithAllowlistedDomains:
findings = check.execute()
assert len(findings) == 1
assert findings[0].status == "FAIL"
assert "not explicitly configured" in findings[0].status_extended
assert findings[0].status == "PASS"
assert "secure default" in findings[0].status_extended
def test_no_findings_when_fetch_failed(self):
"""Test no findings returned when the API fetch failed"""
@@ -1185,3 +1185,58 @@ class TestInitGlobalProviderRegistryEnumeration:
# The "other/lib" repo should be filtered out by --image-filter
assert not any("other/lib" in img for img in provider.images)
assert len(provider.images) == 3
class TestRegistryListMode:
"""Regression test: `prowler image --registry <url> --registry-list` crashes.
When --registry-list is passed, ImageProvider._enumerate_registry sets
_listing_only = True and __init__ returns early before calling
Provider.set_global_provider(self). The caller in __main__.py then calls
global_provider.print_credentials() on a None reference, raising
AttributeError: 'NoneType' object has no attribute 'print_credentials'.
"""
@patch("prowler.providers.image.image_provider.create_registry_adapter")
@patch("prowler.providers.common.provider.load_and_validate_config_file")
def test_registry_list_does_not_crash(self, mock_load_config, mock_adapter_factory):
"""Reproduce the --registry-list crash by running the same sequence
as __main__.py: init_global_provider, get_global_provider,
then print_credentials."""
mock_load_config.return_value = {}
adapter = MagicMock()
adapter.list_repositories.return_value = ["myorg/app"]
adapter.list_tags.return_value = ["v1.0", "latest"]
mock_adapter_factory.return_value = adapter
arguments = Namespace(
provider="image",
config_file=None,
fixer_config=None,
images=None,
image_list_file=None,
scanners=["vuln"],
image_config_scanners=None,
trivy_severity=None,
ignore_unfixed=False,
timeout="5m",
registry="myregistry.io",
image_filter=None,
tag_filter=None,
max_images=0,
registry_insecure=False,
registry_list_images=True,
)
# Reproduce the exact crash sequence from __main__.py lines 289-294:
# Provider.init_global_provider(args)
# global_provider = Provider.get_global_provider()
# global_provider.print_credentials()
with mock.patch.object(Provider, "_global", None):
Provider.init_global_provider(arguments)
global_provider = Provider.get_global_provider()
# This is the line that crashes: global_provider is None so
# .print_credentials() raises AttributeError.
global_provider.print_credentials()