mirror of
https://github.com/prowler-cloud/prowler.git
synced 2026-05-17 09:43:28 +00:00
Compare commits
10 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| bede10508f | |||
| b4fe70efa5 | |||
| 557284efb1 | |||
| 0204d1e02d | |||
| 9bd4e4b65c | |||
| 8ef326b276 | |||
| 68ffb2b219 | |||
| 739be07077 | |||
| 0abbb7fc59 | |||
| 0b4393776c |
@@ -36,6 +36,7 @@ Please add a detailed description of how to review this PR.
|
||||
|
||||
#### UI
|
||||
- [ ] All issue/task requirements work as expected on the UI
|
||||
- [ ] If this PR adds or updates npm dependencies, include package-health evidence (maintenance, popularity, known vulnerabilities, license, release age) and explain why existing/native alternatives are insufficient.
|
||||
- [ ] Screenshots/Video of the functionality flow (if applicable) - Mobile (X < 640px)
|
||||
- [ ] Screenshots/Video of the functionality flow (if applicable) - Table (640px > X < 1024px)
|
||||
- [ ] Screenshots/Video of the functionality flow (if applicable) - Desktop (X > 1024px)
|
||||
|
||||
@@ -126,7 +126,7 @@ jobs:
|
||||
|
||||
- name: Upload coverage reports to Codecov
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v6.0.0
|
||||
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5.5.2
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
|
||||
@@ -221,7 +221,7 @@ jobs:
|
||||
|
||||
- name: Upload AWS coverage to Codecov
|
||||
if: steps.changed-aws.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v6.0.0
|
||||
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5.5.2
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
@@ -245,7 +245,7 @@ jobs:
|
||||
|
||||
- name: Upload Azure coverage to Codecov
|
||||
if: steps.changed-azure.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v6.0.0
|
||||
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5.5.2
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
@@ -269,7 +269,7 @@ jobs:
|
||||
|
||||
- name: Upload GCP coverage to Codecov
|
||||
if: steps.changed-gcp.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v6.0.0
|
||||
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5.5.2
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
@@ -293,7 +293,7 @@ jobs:
|
||||
|
||||
- name: Upload Kubernetes coverage to Codecov
|
||||
if: steps.changed-kubernetes.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v6.0.0
|
||||
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5.5.2
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
@@ -317,7 +317,7 @@ jobs:
|
||||
|
||||
- name: Upload GitHub coverage to Codecov
|
||||
if: steps.changed-github.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v6.0.0
|
||||
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5.5.2
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
@@ -341,7 +341,7 @@ jobs:
|
||||
|
||||
- name: Upload Okta coverage to Codecov
|
||||
if: steps.changed-okta.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v6.0.0
|
||||
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5.5.2
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
@@ -365,7 +365,7 @@ jobs:
|
||||
|
||||
- name: Upload NHN coverage to Codecov
|
||||
if: steps.changed-nhn.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v6.0.0
|
||||
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5.5.2
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
@@ -389,7 +389,7 @@ jobs:
|
||||
|
||||
- name: Upload M365 coverage to Codecov
|
||||
if: steps.changed-m365.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v6.0.0
|
||||
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5.5.2
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
@@ -413,7 +413,7 @@ jobs:
|
||||
|
||||
- name: Upload IaC coverage to Codecov
|
||||
if: steps.changed-iac.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v6.0.0
|
||||
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5.5.2
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
@@ -437,7 +437,7 @@ jobs:
|
||||
|
||||
- name: Upload MongoDB Atlas coverage to Codecov
|
||||
if: steps.changed-mongodbatlas.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v6.0.0
|
||||
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5.5.2
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
@@ -461,7 +461,7 @@ jobs:
|
||||
|
||||
- name: Upload OCI coverage to Codecov
|
||||
if: steps.changed-oraclecloud.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v6.0.0
|
||||
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5.5.2
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
@@ -485,7 +485,7 @@ jobs:
|
||||
|
||||
- name: Upload OpenStack coverage to Codecov
|
||||
if: steps.changed-openstack.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v6.0.0
|
||||
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5.5.2
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
@@ -509,7 +509,7 @@ jobs:
|
||||
|
||||
- name: Upload Google Workspace coverage to Codecov
|
||||
if: steps.changed-googleworkspace.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v6.0.0
|
||||
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5.5.2
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
@@ -533,7 +533,7 @@ jobs:
|
||||
|
||||
- name: Upload Vercel coverage to Codecov
|
||||
if: steps.changed-vercel.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v6.0.0
|
||||
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5.5.2
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
@@ -557,7 +557,7 @@ jobs:
|
||||
|
||||
- name: Upload Lib coverage to Codecov
|
||||
if: steps.changed-lib.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v6.0.0
|
||||
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5.5.2
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
@@ -581,7 +581,7 @@ jobs:
|
||||
|
||||
- name: Upload Config coverage to Codecov
|
||||
if: steps.changed-config.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v6.0.0
|
||||
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5.5.2
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
|
||||
@@ -132,6 +132,10 @@ jobs:
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
run: pnpm run healthcheck
|
||||
|
||||
- name: Run pnpm audit
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
run: pnpm run audit
|
||||
|
||||
- name: Run unit tests (all - critical paths changed)
|
||||
if: steps.check-changes.outputs.any_changed == 'true' && steps.critical-changes.outputs.any_changed == 'true'
|
||||
run: |
|
||||
|
||||
@@ -121,6 +121,7 @@ Every AWS provider scan will enqueue an Attack Paths ingestion job automatically
|
||||
| OpenStack | 34 | 5 | 0 | 9 | Official | UI, API, CLI |
|
||||
| Vercel | 26 | 6 | 0 | 5 | Official | UI, API, CLI |
|
||||
| Okta | 1 | 1 | 0 | 1 | Official | CLI |
|
||||
| Scaleway [Contact us](https://prowler.com/contact) | 1 | 1 | 0 | 1 | Unofficial | CLI |
|
||||
| NHN | 6 | 2 | 1 | 0 | Unofficial | CLI |
|
||||
|
||||
> [!Note]
|
||||
|
||||
+2
-2
@@ -10,10 +10,10 @@ This repository contains the Prowler Open Source documentation powered by [Mintl
|
||||
|
||||
## Local Development
|
||||
|
||||
Install the [Mintlify CLI](https://www.npmjs.com/package/mint) to preview documentation changes locally:
|
||||
Install a reviewed version of the [Mintlify CLI](https://www.npmjs.com/package/mint) to preview documentation changes locally:
|
||||
|
||||
```bash
|
||||
npm i -g mint
|
||||
npm install --global mint@4.2.560
|
||||
```
|
||||
|
||||
Run the following command at the root of your documentation (where `mint.json` is located):
|
||||
|
||||
@@ -28,7 +28,7 @@ This includes the [AGENTS.md](https://github.com/prowler-cloud/prowler/blob/mast
|
||||
<Steps>
|
||||
<Step title="Install Mintlify CLI">
|
||||
```bash
|
||||
npm i -g mint
|
||||
npm install --global mint@4.2.560
|
||||
```
|
||||
For detailed instructions, check the [Mintlify documentation](https://www.mintlify.com/docs/installation).
|
||||
</Step>
|
||||
|
||||
@@ -326,6 +326,12 @@
|
||||
"user-guide/providers/openstack/authentication"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Scaleway",
|
||||
"pages": [
|
||||
"user-guide/providers/scaleway/getting-started-scaleway"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Vercel",
|
||||
"pages": [
|
||||
|
||||
@@ -10,7 +10,7 @@ Complete reference guide for all tools available in the Prowler MCP Server. Tool
|
||||
|----------|------------|------------------------|
|
||||
| Prowler Hub | 10 tools | No |
|
||||
| Prowler Documentation | 2 tools | No |
|
||||
| Prowler Cloud/App | 29 tools | Yes |
|
||||
| Prowler Cloud/App | 32 tools | Yes |
|
||||
|
||||
## Tool Naming Convention
|
||||
|
||||
@@ -36,6 +36,14 @@ Tools for searching, viewing, and analyzing security findings across all cloud p
|
||||
- **`prowler_app_get_finding_details`** - Get comprehensive details about a specific finding including remediation guidance, check metadata, and resource relationships
|
||||
- **`prowler_app_get_findings_overview`** - Get aggregate statistics and trends about security findings as a markdown report
|
||||
|
||||
### Finding Groups Management
|
||||
|
||||
Tools for listing finding groups aggregated by check ID, viewing complete group counters, and drilling down into affected resources.
|
||||
|
||||
- **`prowler_app_list_finding_groups`** - List latest or historical finding groups with filters for provider, region, service, resource, category, check, severity, status, muted state, delta, date range, and sorting
|
||||
- **`prowler_app_get_finding_group_details`** - Get complete details for a specific finding group including counters, description, timestamps, and impacted providers
|
||||
- **`prowler_app_list_finding_group_resources`** - List actionable unmuted resources affected by a finding group by default, including nested resource and provider data plus the `finding_id` for remediation details. Set `include_muted` to include suppressed resources
|
||||
|
||||
### Provider Management
|
||||
|
||||
Tools for managing cloud provider connections in Prowler.
|
||||
|
||||
@@ -44,13 +44,21 @@ Choose the configuration based on your deployment:
|
||||
|
||||
<Tab title="Generic without Native HTTP Support">
|
||||
**Configuration:**
|
||||
<Warning>
|
||||
Avoid configuring MCP clients to run `npx mcp-remote` directly. `npx` can download and execute a new package version on each run. Install a reviewed version of `mcp-remote` in a dedicated local workspace, then point the MCP client to the installed binary.
|
||||
</Warning>
|
||||
```bash
|
||||
mkdir -p ~/.local/share/prowler-mcp-bridge
|
||||
cd ~/.local/share/prowler-mcp-bridge
|
||||
npm init -y
|
||||
npm install --save-exact mcp-remote@0.1.38
|
||||
```
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"prowler": {
|
||||
"command": "npx",
|
||||
"command": "/absolute/path/to/.local/share/prowler-mcp-bridge/node_modules/.bin/mcp-remote",
|
||||
"args": [
|
||||
"mcp-remote",
|
||||
"https://mcp.prowler.com/mcp", // or your self-hosted Prowler MCP Server URL
|
||||
"--header",
|
||||
"Authorization: Bearer ${PROWLER_APP_API_KEY}"
|
||||
@@ -72,14 +80,20 @@ Choose the configuration based on your deployment:
|
||||
2. Go to "Developer" tab
|
||||
3. Click in "Edit Config" button
|
||||
4. Edit the `claude_desktop_config.json` file with your favorite editor
|
||||
5. Add the following configuration:
|
||||
5. Install a reviewed version of `mcp-remote` in a dedicated local workspace:
|
||||
```bash
|
||||
mkdir -p ~/.local/share/prowler-mcp-bridge
|
||||
cd ~/.local/share/prowler-mcp-bridge
|
||||
npm init -y
|
||||
npm install --save-exact mcp-remote@0.1.38
|
||||
```
|
||||
6. Add the following configuration:
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"prowler": {
|
||||
"command": "npx",
|
||||
"command": "/absolute/path/to/.local/share/prowler-mcp-bridge/node_modules/.bin/mcp-remote",
|
||||
"args": [
|
||||
"mcp-remote",
|
||||
"https://mcp.prowler.com/mcp",
|
||||
"--header",
|
||||
"Authorization: Bearer ${PROWLER_APP_API_KEY}"
|
||||
|
||||
@@ -38,7 +38,7 @@ Refer to the [Prowler App Tutorial](/user-guide/tutorials/prowler-app) for detai
|
||||
|
||||
- `git` installed.
|
||||
- `poetry` installed: [poetry installation](https://python-poetry.org/docs/#installation).
|
||||
- `npm` installed: [npm installation](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm).
|
||||
- `pnpm` installed through [Corepack](https://pnpm.io/installation#using-corepack) or the standalone [pnpm installation](https://pnpm.io/installation).
|
||||
- `Docker Compose` installed: https://docs.docker.com/compose/install/.
|
||||
|
||||
<Warning>
|
||||
@@ -97,9 +97,11 @@ Refer to the [Prowler App Tutorial](/user-guide/tutorials/prowler-app) for detai
|
||||
```bash
|
||||
git clone https://github.com/prowler-cloud/prowler \
|
||||
cd prowler/ui \
|
||||
npm install \
|
||||
npm run build \
|
||||
npm start
|
||||
corepack enable \
|
||||
corepack install \
|
||||
pnpm install --frozen-lockfile \
|
||||
pnpm run build \
|
||||
pnpm start
|
||||
```
|
||||
|
||||
> Enjoy Prowler App at http://localhost:3000 by signing up with your email and password.
|
||||
|
||||
@@ -35,6 +35,7 @@ Prowler supports a wide range of providers organized by category:
|
||||
| **NHN** | Unofficial | Tenants | CLI |
|
||||
| [OpenStack](/user-guide/providers/openstack/getting-started-openstack) | Official | Projects | UI, API, CLI |
|
||||
| [Oracle Cloud](/user-guide/providers/oci/getting-started-oci) | Official | Tenancies / Compartments | UI, API, CLI |
|
||||
| [Scaleway](/user-guide/providers/scaleway/getting-started-scaleway) [Contact us](https://prowler.com/contact) | Unofficial | Organizations | CLI |
|
||||
|
||||
### Infrastructure as Code Providers
|
||||
|
||||
|
||||
@@ -18,9 +18,11 @@ prowler <provider> --scan-unused-services
|
||||
|
||||
#### ACM (AWS Certificate Manager)
|
||||
|
||||
Certificates stored in ACM without active usage in AWS resources are excluded. By default, Prowler only scans actively used certificates. Unused certificates will not be checked if they are expired, if their expiring date is near or if they are good.
|
||||
Certificates stored in ACM without active usage in AWS resources are excluded. By default, Prowler only scans actively used certificates. Unused certificates are not evaluated for expiration, transparency logging, or weak key algorithms.
|
||||
|
||||
- `acm_certificates_expiration_check`
|
||||
- `acm_certificates_transparency_logs_enabled`
|
||||
- `acm_certificates_with_secure_key_algorithms`
|
||||
|
||||
#### Athena
|
||||
|
||||
@@ -28,6 +30,13 @@ Upon AWS account creation, Athena provisions a default primary workgroup for the
|
||||
|
||||
- `athena_workgroup_encryption`
|
||||
- `athena_workgroup_enforce_configuration`
|
||||
- `athena_workgroup_logging_enabled`
|
||||
|
||||
#### Amazon Bedrock
|
||||
|
||||
Generative AI workloads benefit from private VPC endpoint connectivity to keep prompt and model traffic off the public internet. Prowler only evaluates this configuration for VPCs in use (with active ENIs).
|
||||
|
||||
- `bedrock_vpc_endpoints_configured`
|
||||
|
||||
#### AWS CloudTrail
|
||||
|
||||
@@ -38,15 +47,23 @@ AWS CloudTrail should have at least one trail with a data event to record all S3
|
||||
|
||||
#### AWS Elastic Compute Cloud (EC2)
|
||||
|
||||
If Amazon Elastic Block Store (EBS) default encyption is not enabled, sensitive data at rest will remain unprotected in EC2. However, Prowler will only generate a finding if EBS volumes exist where default encryption could be enforced.
|
||||
If Amazon Elastic Block Store (EBS) default encryption is not enabled, sensitive data at rest remains unprotected in EC2. Prowler only generates a finding if EBS volumes exist where default encryption could be enforced.
|
||||
|
||||
- `ec2_ebs_default_encryption`
|
||||
|
||||
**EBS Snapshot Public Access**: Public EBS snapshots can leak data. Prowler only evaluates the account-level block setting if EBS snapshots exist in the account.
|
||||
|
||||
- `ec2_ebs_snapshot_account_block_public_access`
|
||||
|
||||
**EC2 Instance Metadata Service (IMDS)**: Enforcing IMDSv2 at the account level mitigates SSRF-based credential theft. Prowler only evaluates the account-level setting if EC2 instances exist in the account.
|
||||
|
||||
- `ec2_instance_account_imdsv2_enabled`
|
||||
|
||||
**Security Groups**: Misconfigured security groups increase the attack surface.
|
||||
|
||||
Prowler scans only attached security groups to report vulnerabilities in actively used configurations. Applies to:
|
||||
|
||||
- 15 security group-related checks, including open ports and ingress/egress traffic rules.
|
||||
- 20 security group-related checks, including open ports and ingress/egress traffic rules.
|
||||
|
||||
- `ec2_securitygroup_allow_ingress_from_internet_to_port_X`
|
||||
- `ec2_securitygroup_default_restrict_traffic`
|
||||
@@ -56,6 +73,18 @@ Prowler scans only attached security groups to report vulnerabilities in activel
|
||||
|
||||
- `ec2_networkacl_allow_ingress_X_port`
|
||||
|
||||
#### AWS Identity and Access Management (IAM)
|
||||
|
||||
Customer-managed IAM policies that are not attached to any user, group, or role grant no effective permissions until a principal is bound to them. Prowler treats such policies as dormant by default and skips the content-evaluation checks below when `--scan-unused-services` is not set. Enable the flag to surface findings on unattached policies as well.
|
||||
|
||||
- `iam_policy_allows_privilege_escalation`
|
||||
- `iam_policy_no_full_access_to_cloudtrail`
|
||||
- `iam_policy_no_full_access_to_kms`
|
||||
- `iam_policy_no_wildcard_marketplace_subscribe`
|
||||
- `iam_no_custom_policy_permissive_role_assumption`
|
||||
|
||||
The dedicated `iam_customer_unattached_policy_no_administrative_privileges` check still inspects unattached policies regardless of the flag, since its purpose is to highlight dormant administrator privileges.
|
||||
|
||||
#### AWS Glue
|
||||
|
||||
AWS Glue best practices recommend encrypting metadata and connection passwords in Data Catalogs.
|
||||
@@ -71,6 +100,12 @@ Amazon Inspector is a vulnerability discovery service that automates continuous
|
||||
|
||||
- `inspector2_is_enabled`
|
||||
|
||||
#### AWS Key Management Service (KMS)
|
||||
|
||||
Customer managed Customer Master Keys (CMKs) in the `Disabled` state cannot be used for cryptographic operations, so Prowler skips the unintentional-deletion check on them by default. Enable the flag to evaluate disabled CMKs as well.
|
||||
|
||||
- `kms_cmk_not_deleted_unintentionally`
|
||||
|
||||
#### Amazon Macie
|
||||
|
||||
Amazon Macie leverages machine learning to automatically discover, classify, and protect sensitive data in S3 buckets. Prowler only generates findings if Macie is disabled and there are S3 buckets in the AWS account.
|
||||
@@ -83,6 +118,15 @@ A network firewall is essential for monitoring and controlling traffic within a
|
||||
|
||||
- `networkfirewall_in_all_vpc`
|
||||
|
||||
#### Amazon Relational Database Service (RDS)
|
||||
|
||||
RDS event subscriptions notify operators of critical database events. Prowler only evaluates these subscription checks when RDS clusters or instances exist in the account.
|
||||
|
||||
- `rds_cluster_critical_event_subscription`
|
||||
- `rds_instance_critical_event_subscription`
|
||||
- `rds_instance_event_subscription_parameter_groups`
|
||||
- `rds_instance_event_subscription_security_groups`
|
||||
|
||||
#### Amazon S3
|
||||
|
||||
To prevent unintended data exposure:
|
||||
@@ -99,6 +143,10 @@ VPC settings directly impact network security and availability.
|
||||
|
||||
- `vpc_flow_logs_enabled`
|
||||
|
||||
- VPC Endpoint for EC2: Routes EC2 API calls through a private VPC endpoint to keep traffic off the public internet. Prowler only evaluates this configuration for VPCs in use, i.e., those with active ENIs.
|
||||
|
||||
- `vpc_endpoint_for_ec2_enabled`
|
||||
|
||||
- VPC Subnet Public IP Restrictions: Prevent unintended exposure of resources to the internet. Prowler only checks this configuration for VPCs in use, i.e., those with active ENIs.
|
||||
|
||||
- `vpc_subnet_no_public_ip_by_default`
|
||||
|
||||
@@ -22,7 +22,7 @@ Install promptfoo using one of the following methods:
|
||||
|
||||
**Using npm:**
|
||||
```bash
|
||||
npm install -g promptfoo
|
||||
npm install --global promptfoo@0.121.11
|
||||
```
|
||||
|
||||
**Using Homebrew (macOS):**
|
||||
|
||||
@@ -0,0 +1,51 @@
|
||||
---
|
||||
title: "Getting Started With Scaleway on Prowler"
|
||||
---
|
||||
|
||||
Prowler for Scaleway scans IAM resources in your Scaleway organization for security misconfigurations. The current release ships one check that flags API keys still owned by the account root user.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
1. A Scaleway organization with IAM access.
|
||||
2. A Scaleway API key with at least the `IAMReadOnly` policy bound to a dedicated IAM user (do not use the account root user).
|
||||
3. Your organization ID (visible at the top right of the Scaleway console).
|
||||
|
||||
## Authentication
|
||||
|
||||
Prowler reads credentials from the standard Scaleway environment variables:
|
||||
|
||||
| Variable | Purpose |
|
||||
|---|---|
|
||||
| `SCW_ACCESS_KEY` | API key access key |
|
||||
| `SCW_SECRET_KEY` | API key secret key |
|
||||
| `SCW_DEFAULT_ORGANIZATION_ID` | Optional, required when the key bearer is an application |
|
||||
| `SCW_DEFAULT_PROJECT_ID` | Optional, default project for project-scoped resources |
|
||||
| `SCW_DEFAULT_REGION` | Optional, defaults to `fr-par` |
|
||||
|
||||
Alternatively, pass them as CLI flags (`--access-key`, `--secret-key`, `--organization-id`, `--project-id`, `--region`). The CLI emits a warning when secrets are passed via the command line; environment variables are preferred.
|
||||
|
||||
## Run a scan
|
||||
|
||||
```bash
|
||||
export SCW_ACCESS_KEY="SCW..."
|
||||
export SCW_SECRET_KEY="..."
|
||||
export SCW_DEFAULT_ORGANIZATION_ID="..."
|
||||
|
||||
prowler scaleway
|
||||
```
|
||||
|
||||
To run only the IAM root-key check:
|
||||
|
||||
```bash
|
||||
prowler scaleway --check iam_no_root_api_keys
|
||||
```
|
||||
|
||||
## Checks shipped
|
||||
|
||||
| Check ID | Severity | Description |
|
||||
|---|---|---|
|
||||
| `iam_no_root_api_keys` | Critical | Fails when any Scaleway IAM API key is still owned by the account root user. |
|
||||
|
||||
## Required Scaleway permissions
|
||||
|
||||
The API key bearer needs read access to the IAM API in order to list users and API keys. The `IAMReadOnly` policy is sufficient. Refer to the [Scaleway IAM policy reference](https://www.scaleway.com/en/docs/identity-and-access-management/iam/reference-content/permission-sets/) for the full list of permissions.
|
||||
@@ -4,6 +4,10 @@ All notable changes to the **Prowler MCP Server** are documented in this file.
|
||||
|
||||
## [0.7.0] (Prowler UNRELEASED)
|
||||
|
||||
### 🚀 Added
|
||||
|
||||
- MCP Server tools for Prowler Finding Groups Management [(#11140)](https://github.com/prowler-cloud/prowler/pull/11140)
|
||||
|
||||
### 🔐 Security
|
||||
|
||||
- `cryptography` from 46.0.1 to 47.0.0 (transitive) for CVE-2026-39892 and CVE-2026-26007 / CVE-2026-34073 [(#10978)](https://github.com/prowler-cloud/prowler/pull/10978)
|
||||
|
||||
+11
-2
@@ -10,6 +10,7 @@
|
||||
|
||||
Full access to Prowler Cloud platform and self-managed Prowler App for:
|
||||
- **Findings Analysis**: Query, filter, and analyze security findings across all your cloud environments
|
||||
- **Finding Groups Analysis**: Triage findings grouped by check ID and drill down into affected resources
|
||||
- **Provider Management**: Create, configure, and manage your configured Prowler providers (AWS, Azure, GCP, etc.)
|
||||
- **Scan Orchestration**: Trigger on-demand scans and schedule recurring security assessments
|
||||
- **Resource Inventory**: Search and view detailed information about your audited resources
|
||||
@@ -56,13 +57,21 @@ Prowler MCP Server can be used in three ways:
|
||||
- Managed and maintained by Prowler team
|
||||
- Always up-to-date
|
||||
|
||||
Install a reviewed version of `mcp-remote` in a dedicated local workspace first. Avoid running `npx mcp-remote` directly because it can download and execute a new package version on each run.
|
||||
|
||||
```bash
|
||||
mkdir -p ~/.local/share/prowler-mcp-bridge
|
||||
cd ~/.local/share/prowler-mcp-bridge
|
||||
npm init -y
|
||||
npm install --save-exact mcp-remote@0.1.38
|
||||
```
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"prowler": {
|
||||
"command": "npx",
|
||||
"command": "/absolute/path/to/.local/share/prowler-mcp-bridge/node_modules/.bin/mcp-remote",
|
||||
"args": [
|
||||
"mcp-remote",
|
||||
"https://mcp.prowler.com/mcp",
|
||||
"--header",
|
||||
"Authorization: Bearer pk_YOUR_API_KEY_HERE"
|
||||
|
||||
@@ -0,0 +1,292 @@
|
||||
"""Pydantic models for Prowler Finding Groups responses."""
|
||||
|
||||
from typing import Literal
|
||||
|
||||
from pydantic import Field
|
||||
|
||||
from prowler_mcp_server.prowler_app.models.base import MinimalSerializerMixin
|
||||
|
||||
|
||||
FindingStatus = Literal["FAIL", "PASS", "MANUAL"]
|
||||
FindingSeverity = Literal["critical", "high", "medium", "low", "informational"]
|
||||
FindingDelta = Literal["new", "changed"]
|
||||
|
||||
|
||||
def _attributes(data: dict) -> dict:
|
||||
return data.get("attributes", {})
|
||||
|
||||
|
||||
def _counter(attributes: dict, key: str) -> int:
|
||||
return attributes.get(key) or 0
|
||||
|
||||
|
||||
def _simplified_group_kwargs(data: dict) -> dict:
|
||||
attributes = _attributes(data)
|
||||
return {
|
||||
"check_id": attributes.get("check_id", data.get("id", "")),
|
||||
"check_title": attributes.get("check_title"),
|
||||
"severity": attributes.get("severity", "informational"),
|
||||
"status": attributes.get("status", "MANUAL"),
|
||||
"muted": attributes.get("muted", False),
|
||||
"impacted_providers": attributes.get("impacted_providers") or [],
|
||||
"resources_fail": _counter(attributes, "resources_fail"),
|
||||
"resources_total": _counter(attributes, "resources_total"),
|
||||
"pass_count": _counter(attributes, "pass_count"),
|
||||
"fail_count": _counter(attributes, "fail_count"),
|
||||
"manual_count": _counter(attributes, "manual_count"),
|
||||
"muted_count": _counter(attributes, "muted_count"),
|
||||
"new_count": _counter(attributes, "new_count"),
|
||||
"changed_count": _counter(attributes, "changed_count"),
|
||||
"first_seen_at": attributes.get("first_seen_at"),
|
||||
"last_seen_at": attributes.get("last_seen_at"),
|
||||
"failing_since": attributes.get("failing_since"),
|
||||
}
|
||||
|
||||
|
||||
class SimplifiedFindingGroup(MinimalSerializerMixin):
|
||||
"""Finding group summary optimized for browsing many checks."""
|
||||
|
||||
check_id: str = Field(description="Public check ID that identifies this group")
|
||||
check_title: str | None = Field(
|
||||
default=None, description="Human-readable check title"
|
||||
)
|
||||
severity: FindingSeverity = Field(description="Highest severity in the group")
|
||||
status: FindingStatus = Field(description="Aggregated finding group status")
|
||||
muted: bool = Field(
|
||||
description="Whether all findings in this group are muted or accepted"
|
||||
)
|
||||
impacted_providers: list[str] = Field(
|
||||
default_factory=list,
|
||||
description="Provider types impacted by this finding group",
|
||||
)
|
||||
resources_fail: int = Field(
|
||||
description="Number of non-muted failing resources in this group", ge=0
|
||||
)
|
||||
resources_total: int = Field(
|
||||
description="Total number of resources in this group", ge=0
|
||||
)
|
||||
pass_count: int = Field(
|
||||
description="Number of non-muted PASS findings in this group", ge=0
|
||||
)
|
||||
fail_count: int = Field(
|
||||
description="Number of non-muted FAIL findings in this group", ge=0
|
||||
)
|
||||
manual_count: int = Field(
|
||||
description="Number of non-muted MANUAL findings in this group", ge=0
|
||||
)
|
||||
muted_count: int = Field(description="Total muted findings in this group", ge=0)
|
||||
new_count: int = Field(description="Number of new non-muted findings", ge=0)
|
||||
changed_count: int = Field(description="Number of changed non-muted findings", ge=0)
|
||||
first_seen_at: str | None = Field(
|
||||
default=None, description="First time this group was detected"
|
||||
)
|
||||
last_seen_at: str | None = Field(
|
||||
default=None, description="Last time this group was detected"
|
||||
)
|
||||
failing_since: str | None = Field(
|
||||
default=None, description="First time this group started failing"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_api_response(cls, data: dict) -> "SimplifiedFindingGroup":
|
||||
"""Transform JSON:API finding group response to simplified format."""
|
||||
return cls(**_simplified_group_kwargs(data))
|
||||
|
||||
|
||||
class DetailedFindingGroup(SimplifiedFindingGroup):
|
||||
"""Finding group with complete counters and descriptive context."""
|
||||
|
||||
check_description: str | None = Field(
|
||||
default=None, description="Description of the check behind this group"
|
||||
)
|
||||
pass_muted_count: int = Field(description="Muted PASS findings", ge=0)
|
||||
fail_muted_count: int = Field(description="Muted FAIL findings", ge=0)
|
||||
manual_muted_count: int = Field(description="Muted MANUAL findings", ge=0)
|
||||
new_fail_count: int = Field(description="New non-muted FAIL findings", ge=0)
|
||||
new_fail_muted_count: int = Field(description="New muted FAIL findings", ge=0)
|
||||
new_pass_count: int = Field(description="New non-muted PASS findings", ge=0)
|
||||
new_pass_muted_count: int = Field(description="New muted PASS findings", ge=0)
|
||||
new_manual_count: int = Field(description="New non-muted MANUAL findings", ge=0)
|
||||
new_manual_muted_count: int = Field(description="New muted MANUAL findings", ge=0)
|
||||
changed_fail_count: int = Field(description="Changed non-muted FAIL findings", ge=0)
|
||||
changed_fail_muted_count: int = Field(
|
||||
description="Changed muted FAIL findings", ge=0
|
||||
)
|
||||
changed_pass_count: int = Field(description="Changed non-muted PASS findings", ge=0)
|
||||
changed_pass_muted_count: int = Field(
|
||||
description="Changed muted PASS findings", ge=0
|
||||
)
|
||||
changed_manual_count: int = Field(
|
||||
description="Changed non-muted MANUAL findings", ge=0
|
||||
)
|
||||
changed_manual_muted_count: int = Field(
|
||||
description="Changed muted MANUAL findings", ge=0
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_api_response(cls, data: dict) -> "DetailedFindingGroup":
|
||||
"""Transform JSON:API finding group response to detailed format."""
|
||||
attributes = _attributes(data)
|
||||
|
||||
return cls(
|
||||
**_simplified_group_kwargs(data),
|
||||
check_description=attributes.get("check_description"),
|
||||
pass_muted_count=_counter(attributes, "pass_muted_count"),
|
||||
fail_muted_count=_counter(attributes, "fail_muted_count"),
|
||||
manual_muted_count=_counter(attributes, "manual_muted_count"),
|
||||
new_fail_count=_counter(attributes, "new_fail_count"),
|
||||
new_fail_muted_count=_counter(attributes, "new_fail_muted_count"),
|
||||
new_pass_count=_counter(attributes, "new_pass_count"),
|
||||
new_pass_muted_count=_counter(attributes, "new_pass_muted_count"),
|
||||
new_manual_count=_counter(attributes, "new_manual_count"),
|
||||
new_manual_muted_count=_counter(attributes, "new_manual_muted_count"),
|
||||
changed_fail_count=_counter(attributes, "changed_fail_count"),
|
||||
changed_fail_muted_count=_counter(attributes, "changed_fail_muted_count"),
|
||||
changed_pass_count=_counter(attributes, "changed_pass_count"),
|
||||
changed_pass_muted_count=_counter(attributes, "changed_pass_muted_count"),
|
||||
changed_manual_count=_counter(attributes, "changed_manual_count"),
|
||||
changed_manual_muted_count=_counter(
|
||||
attributes, "changed_manual_muted_count"
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class FindingGroupsListResponse(MinimalSerializerMixin):
|
||||
"""Paginated response for finding group list queries."""
|
||||
|
||||
groups: list[SimplifiedFindingGroup] = Field(
|
||||
description="Finding groups matching the query"
|
||||
)
|
||||
total_num_groups: int = Field(
|
||||
description="Total groups matching the query across all pages", ge=0
|
||||
)
|
||||
total_num_pages: int = Field(description="Total pages available", ge=0)
|
||||
current_page: int = Field(description="Current page number", ge=1)
|
||||
|
||||
@classmethod
|
||||
def from_api_response(cls, response: dict) -> "FindingGroupsListResponse":
|
||||
"""Transform JSON:API list response to simplified format."""
|
||||
pagination = response.get("meta", {}).get("pagination", {})
|
||||
groups = [
|
||||
SimplifiedFindingGroup.from_api_response(item)
|
||||
for item in response.get("data", [])
|
||||
]
|
||||
|
||||
return cls(
|
||||
groups=groups,
|
||||
total_num_groups=pagination.get("count", len(groups)),
|
||||
total_num_pages=pagination.get("pages", 1),
|
||||
current_page=pagination.get("page", 1),
|
||||
)
|
||||
|
||||
|
||||
class FindingGroupResourceInfo(MinimalSerializerMixin):
|
||||
"""Nested resource information for a finding group row."""
|
||||
|
||||
uid: str = Field(description="Provider-native resource UID")
|
||||
name: str = Field(description="Resource name")
|
||||
service: str = Field(description="Cloud service")
|
||||
region: str = Field(description="Cloud region")
|
||||
type: str = Field(description="Resource type")
|
||||
resource_group: str | None = Field(
|
||||
default=None, description="Provider resource group or equivalent"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_api_response(cls, data: dict) -> "FindingGroupResourceInfo":
|
||||
"""Transform nested resource data to simplified format."""
|
||||
return cls(
|
||||
uid=data.get("uid", ""),
|
||||
name=data.get("name", ""),
|
||||
service=data.get("service", ""),
|
||||
region=data.get("region", ""),
|
||||
type=data.get("type", ""),
|
||||
resource_group=data.get("resource_group"),
|
||||
)
|
||||
|
||||
|
||||
class FindingGroupProviderInfo(MinimalSerializerMixin):
|
||||
"""Nested provider information for a finding group resource row."""
|
||||
|
||||
type: str = Field(description="Provider type")
|
||||
uid: str = Field(description="Provider-native account or subscription ID")
|
||||
alias: str | None = Field(default=None, description="Provider alias")
|
||||
|
||||
@classmethod
|
||||
def from_api_response(cls, data: dict) -> "FindingGroupProviderInfo":
|
||||
"""Transform nested provider data to simplified format."""
|
||||
return cls(
|
||||
type=data.get("type", ""),
|
||||
uid=data.get("uid", ""),
|
||||
alias=data.get("alias"),
|
||||
)
|
||||
|
||||
|
||||
class FindingGroupResource(MinimalSerializerMixin):
|
||||
"""Resource row affected by a finding group."""
|
||||
|
||||
id: str = Field(description="Row identifier for this finding group resource")
|
||||
resource: FindingGroupResourceInfo = Field(description="Affected resource")
|
||||
provider: FindingGroupProviderInfo = Field(description="Affected provider")
|
||||
finding_id: str = Field(
|
||||
description="Finding UUID to use with prowler_app_get_finding_details"
|
||||
)
|
||||
status: FindingStatus = Field(description="Finding status for this resource")
|
||||
severity: FindingSeverity = Field(description="Finding severity")
|
||||
muted: bool = Field(description="Whether the finding is muted")
|
||||
delta: FindingDelta | None = Field(default=None, description="Change status")
|
||||
first_seen_at: str | None = Field(default=None, description="First seen time")
|
||||
last_seen_at: str | None = Field(default=None, description="Last seen time")
|
||||
muted_reason: str | None = Field(default=None, description="Mute reason")
|
||||
|
||||
@classmethod
|
||||
def from_api_response(cls, data: dict) -> "FindingGroupResource":
|
||||
"""Transform JSON:API finding group resource response."""
|
||||
attributes = _attributes(data)
|
||||
|
||||
return cls(
|
||||
id=data.get("id", ""),
|
||||
resource=FindingGroupResourceInfo.from_api_response(
|
||||
attributes.get("resource") or {}
|
||||
),
|
||||
provider=FindingGroupProviderInfo.from_api_response(
|
||||
attributes.get("provider") or {}
|
||||
),
|
||||
finding_id=str(attributes.get("finding_id", "")),
|
||||
status=attributes.get("status", "MANUAL"),
|
||||
severity=attributes.get("severity", "informational"),
|
||||
muted=attributes.get("muted", False),
|
||||
delta=attributes.get("delta"),
|
||||
first_seen_at=attributes.get("first_seen_at"),
|
||||
last_seen_at=attributes.get("last_seen_at"),
|
||||
muted_reason=attributes.get("muted_reason"),
|
||||
)
|
||||
|
||||
|
||||
class FindingGroupResourcesListResponse(MinimalSerializerMixin):
|
||||
"""Paginated response for finding group resource queries."""
|
||||
|
||||
resources: list[FindingGroupResource] = Field(
|
||||
description="Resources matching the finding group query"
|
||||
)
|
||||
total_num_resources: int = Field(
|
||||
description="Total resources matching the query across all pages", ge=0
|
||||
)
|
||||
total_num_pages: int = Field(description="Total pages available", ge=0)
|
||||
current_page: int = Field(description="Current page number", ge=1)
|
||||
|
||||
@classmethod
|
||||
def from_api_response(cls, response: dict) -> "FindingGroupResourcesListResponse":
|
||||
"""Transform JSON:API resource list response to simplified format."""
|
||||
pagination = response.get("meta", {}).get("pagination", {})
|
||||
resources = [
|
||||
FindingGroupResource.from_api_response(item)
|
||||
for item in response.get("data", [])
|
||||
]
|
||||
|
||||
return cls(
|
||||
resources=resources,
|
||||
total_num_resources=pagination.get("count", len(resources)),
|
||||
total_num_pages=pagination.get("pages", 1),
|
||||
current_page=pagination.get("page", 1),
|
||||
)
|
||||
@@ -0,0 +1,471 @@
|
||||
"""Finding Groups tools for Prowler App MCP Server.
|
||||
|
||||
This module provides read-only tools for finding group triage and drill-downs.
|
||||
"""
|
||||
|
||||
from typing import Any, Literal
|
||||
from urllib.parse import quote
|
||||
|
||||
from pydantic import Field
|
||||
|
||||
from prowler_mcp_server.prowler_app.models.finding_groups import (
|
||||
DetailedFindingGroup,
|
||||
FindingGroupResourcesListResponse,
|
||||
FindingGroupsListResponse,
|
||||
)
|
||||
from prowler_mcp_server.prowler_app.tools.base import BaseTool
|
||||
|
||||
|
||||
StatusFilter = Literal["FAIL", "PASS", "MANUAL"]
|
||||
SeverityFilter = Literal["critical", "high", "medium", "low", "informational"]
|
||||
DeltaFilter = Literal["new", "changed"]
|
||||
|
||||
GROUP_DETAIL_FIELDS = (
|
||||
"check_id,check_title,check_description,severity,status,muted,"
|
||||
"impacted_providers,resources_fail,resources_total,pass_count,fail_count,"
|
||||
"manual_count,pass_muted_count,fail_muted_count,manual_muted_count,"
|
||||
"muted_count,new_count,changed_count,new_fail_count,new_fail_muted_count,"
|
||||
"new_pass_count,new_pass_muted_count,new_manual_count,new_manual_muted_count,"
|
||||
"changed_fail_count,changed_fail_muted_count,changed_pass_count,"
|
||||
"changed_pass_muted_count,changed_manual_count,changed_manual_muted_count,"
|
||||
"first_seen_at,last_seen_at,failing_since"
|
||||
)
|
||||
|
||||
GROUP_LIST_FIELDS = (
|
||||
"check_id,check_title,severity,status,muted,impacted_providers,"
|
||||
"resources_fail,resources_total,pass_count,fail_count,manual_count,"
|
||||
"muted_count,new_count,changed_count,first_seen_at,last_seen_at,failing_since"
|
||||
)
|
||||
|
||||
RESOURCE_FIELDS = (
|
||||
"resource,provider,finding_id,status,severity,muted,delta,"
|
||||
"first_seen_at,last_seen_at,muted_reason"
|
||||
)
|
||||
|
||||
|
||||
class FindingGroupsTools(BaseTool):
|
||||
"""Tools for Finding Groups operations."""
|
||||
|
||||
@staticmethod
|
||||
def _bool_value(value: bool | str) -> bool:
|
||||
"""Normalize bool-like MCP client values."""
|
||||
if isinstance(value, bool):
|
||||
return value
|
||||
return value.lower() == "true"
|
||||
|
||||
@staticmethod
|
||||
def _group_endpoint(date_range: tuple[str, str] | None) -> str:
|
||||
return "/finding-groups/latest" if date_range is None else "/finding-groups"
|
||||
|
||||
@staticmethod
|
||||
def _resource_endpoint(check_id: str, date_range: tuple[str, str] | None) -> str:
|
||||
escaped_check_id = quote(check_id, safe="")
|
||||
if date_range is None:
|
||||
return f"/finding-groups/latest/{escaped_check_id}/resources"
|
||||
return f"/finding-groups/{escaped_check_id}/resources"
|
||||
|
||||
def _base_date_params(
|
||||
self, date_from: str | None, date_to: str | None
|
||||
) -> tuple[tuple[str, str] | None, dict[str, Any]]:
|
||||
date_range = self.api_client.normalize_date_range(
|
||||
date_from, date_to, max_days=2
|
||||
)
|
||||
if date_range is None:
|
||||
return None, {}
|
||||
|
||||
return date_range, {
|
||||
"filter[inserted_at__gte]": date_range[0],
|
||||
"filter[inserted_at__lte]": date_range[1],
|
||||
}
|
||||
|
||||
def _apply_common_filters(
|
||||
self,
|
||||
params: dict[str, Any],
|
||||
provider: list[str],
|
||||
provider_type: list[str],
|
||||
provider_uid: list[str],
|
||||
provider_alias: str | None,
|
||||
region: list[str],
|
||||
service: list[str],
|
||||
resource_type: list[str],
|
||||
resource_name: str | None,
|
||||
resource_uid: str | None,
|
||||
resource_group: list[str],
|
||||
category: list[str],
|
||||
check_id: list[str],
|
||||
check_title: str | None,
|
||||
severity: list[SeverityFilter],
|
||||
status: list[StatusFilter],
|
||||
muted: bool | str | None,
|
||||
delta: list[DeltaFilter],
|
||||
) -> None:
|
||||
if provider:
|
||||
params["filter[provider__in]"] = provider
|
||||
if provider_type:
|
||||
params["filter[provider_type__in]"] = provider_type
|
||||
if provider_uid:
|
||||
params["filter[provider_uid__in]"] = provider_uid
|
||||
if provider_alias:
|
||||
params["filter[provider_alias__icontains]"] = provider_alias
|
||||
if region:
|
||||
params["filter[region__in]"] = region
|
||||
if service:
|
||||
params["filter[service__in]"] = service
|
||||
if resource_type:
|
||||
params["filter[resource_type__in]"] = resource_type
|
||||
if resource_name:
|
||||
params["filter[resource_name__icontains]"] = resource_name
|
||||
if resource_uid:
|
||||
params["filter[resource_uid__icontains]"] = resource_uid
|
||||
if resource_group:
|
||||
params["filter[resource_groups__in]"] = resource_group
|
||||
if category:
|
||||
params["filter[category__in]"] = category
|
||||
if check_id:
|
||||
params["filter[check_id__in]"] = check_id
|
||||
if check_title:
|
||||
params["filter[check_title__icontains]"] = check_title
|
||||
if severity:
|
||||
params["filter[severity__in]"] = severity
|
||||
if status:
|
||||
params["filter[status__in]"] = status
|
||||
if muted is not None:
|
||||
params["filter[muted]"] = self._bool_value(muted)
|
||||
if delta:
|
||||
params["filter[delta__in]"] = delta
|
||||
|
||||
async def list_finding_groups(
|
||||
self,
|
||||
provider: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by provider UUIDs. Multiple values allowed. If empty, all visible providers are returned.",
|
||||
),
|
||||
provider_type: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by provider type. Multiple values allowed, such as aws, azure, gcp, kubernetes, github, or m365.",
|
||||
),
|
||||
provider_uid: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by provider-native account, subscription, or project IDs. Multiple values allowed.",
|
||||
),
|
||||
provider_alias: str | None = Field(
|
||||
default=None,
|
||||
description="Filter by provider alias/name using partial matching.",
|
||||
),
|
||||
region: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by cloud regions. Multiple values allowed.",
|
||||
),
|
||||
service: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by cloud services. Multiple values allowed.",
|
||||
),
|
||||
resource_type: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by resource types. Multiple values allowed.",
|
||||
),
|
||||
resource_name: str | None = Field(
|
||||
default=None,
|
||||
description="Filter by resource name using partial matching.",
|
||||
),
|
||||
resource_uid: str | None = Field(
|
||||
default=None,
|
||||
description="Filter by resource UID using partial matching.",
|
||||
),
|
||||
resource_group: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by resource group values. Multiple values allowed.",
|
||||
),
|
||||
category: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by finding categories. Multiple values allowed.",
|
||||
),
|
||||
check_id: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by check IDs. Multiple values allowed.",
|
||||
),
|
||||
check_title: str | None = Field(
|
||||
default=None,
|
||||
description="Filter by check title using partial matching.",
|
||||
),
|
||||
severity: list[SeverityFilter] = Field(
|
||||
default=[],
|
||||
description="Filter by aggregated severity. Empty returns all severities.",
|
||||
),
|
||||
status: list[StatusFilter] = Field(
|
||||
default=["FAIL"],
|
||||
description="Filter by aggregated status. Default returns failing groups. Pass [] to return all statuses.",
|
||||
),
|
||||
muted: bool | str | None = Field(
|
||||
default=None,
|
||||
description="Filter by fully muted group state. Accepts true/false.",
|
||||
),
|
||||
include_muted: bool | str = Field(
|
||||
default=False,
|
||||
description="When false, excludes fully muted groups. Set true to include fully muted groups.",
|
||||
),
|
||||
delta: list[DeltaFilter] = Field(
|
||||
default=[],
|
||||
description="Filter by group delta values: new or changed.",
|
||||
),
|
||||
date_from: str | None = Field(
|
||||
default=None,
|
||||
description="Start date for historical query in YYYY-MM-DD format. Maximum range is 2 days.",
|
||||
),
|
||||
date_to: str | None = Field(
|
||||
default=None,
|
||||
description="End date for historical query in YYYY-MM-DD format. Maximum range is 2 days.",
|
||||
),
|
||||
sort: str | None = Field(
|
||||
default=None,
|
||||
description="Optional sort expression supported by the finding-groups API, such as -fail_count,-severity,check_id.",
|
||||
),
|
||||
page_size: int = Field(
|
||||
default=50, description="Number of groups to return per page"
|
||||
),
|
||||
page_number: int = Field(
|
||||
default=1, description="Page number to retrieve (1-indexed)"
|
||||
),
|
||||
) -> dict[str, Any]:
|
||||
"""List finding groups aggregated by check ID.
|
||||
|
||||
Default behavior returns the latest non-muted FAIL groups for fast triage.
|
||||
Without dates this uses `/finding-groups/latest`. With `date_from` or
|
||||
`date_to`, this uses `/finding-groups` with a maximum 2-day date window.
|
||||
|
||||
Use this tool to find noisy or high-impact checks, then call
|
||||
prowler_app_get_finding_group_details for complete counters or
|
||||
prowler_app_list_finding_group_resources to drill into affected resources.
|
||||
"""
|
||||
try:
|
||||
self.api_client.validate_page_size(page_size)
|
||||
date_range, params = self._base_date_params(date_from, date_to)
|
||||
endpoint = self._group_endpoint(date_range)
|
||||
|
||||
self._apply_common_filters(
|
||||
params,
|
||||
provider,
|
||||
provider_type,
|
||||
provider_uid,
|
||||
provider_alias,
|
||||
region,
|
||||
service,
|
||||
resource_type,
|
||||
resource_name,
|
||||
resource_uid,
|
||||
resource_group,
|
||||
category,
|
||||
check_id,
|
||||
check_title,
|
||||
severity,
|
||||
status,
|
||||
muted,
|
||||
delta,
|
||||
)
|
||||
|
||||
params["filter[include_muted]"] = self._bool_value(include_muted)
|
||||
params["page[size]"] = page_size
|
||||
params["page[number]"] = page_number
|
||||
params["fields[finding-groups]"] = GROUP_LIST_FIELDS
|
||||
if sort:
|
||||
params["sort"] = sort
|
||||
|
||||
clean_params = self.api_client.build_filter_params(params)
|
||||
api_response = await self.api_client.get(endpoint, params=clean_params)
|
||||
response = FindingGroupsListResponse.from_api_response(api_response)
|
||||
return response.model_dump()
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error listing finding groups: {e}")
|
||||
return {"error": str(e), "status": "failed"}
|
||||
|
||||
async def get_finding_group_details(
|
||||
self,
|
||||
check_id: str = Field(
|
||||
description="Public check ID that identifies the finding group. This is not a UUID."
|
||||
),
|
||||
date_from: str | None = Field(
|
||||
default=None,
|
||||
description="Start date for historical query in YYYY-MM-DD format. Maximum range is 2 days.",
|
||||
),
|
||||
date_to: str | None = Field(
|
||||
default=None,
|
||||
description="End date for historical query in YYYY-MM-DD format. Maximum range is 2 days.",
|
||||
),
|
||||
) -> dict[str, Any]:
|
||||
"""Get complete details for one finding group by exact check ID.
|
||||
|
||||
Uses `filter[check_id]` exact matching against latest data by default,
|
||||
or historical data when dates are provided. Fully muted groups are
|
||||
included by default so accepted risk does not look like a missing group.
|
||||
"""
|
||||
try:
|
||||
date_range, params = self._base_date_params(date_from, date_to)
|
||||
endpoint = self._group_endpoint(date_range)
|
||||
|
||||
params.update(
|
||||
{
|
||||
"filter[check_id]": check_id,
|
||||
"filter[include_muted]": True,
|
||||
"page[size]": 1,
|
||||
"page[number]": 1,
|
||||
"fields[finding-groups]": GROUP_DETAIL_FIELDS,
|
||||
}
|
||||
)
|
||||
|
||||
clean_params = self.api_client.build_filter_params(params)
|
||||
api_response = await self.api_client.get(endpoint, params=clean_params)
|
||||
data = api_response.get("data", [])
|
||||
|
||||
if not data:
|
||||
return {
|
||||
"error": f"Finding group '{check_id}' not found.",
|
||||
"status": "not_found",
|
||||
}
|
||||
|
||||
group = DetailedFindingGroup.from_api_response(data[0])
|
||||
return group.model_dump()
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error getting finding group details: {e}")
|
||||
return {"error": str(e), "status": "failed"}
|
||||
|
||||
async def list_finding_group_resources(
|
||||
self,
|
||||
check_id: str = Field(
|
||||
description="Public check ID that identifies the finding group. This is not a UUID."
|
||||
),
|
||||
provider: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by provider UUIDs. Multiple values allowed.",
|
||||
),
|
||||
provider_type: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by provider type. Multiple values allowed.",
|
||||
),
|
||||
provider_uid: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by provider-native account, subscription, or project IDs. Multiple values allowed.",
|
||||
),
|
||||
provider_alias: str | None = Field(
|
||||
default=None,
|
||||
description="Filter by provider alias/name using partial matching.",
|
||||
),
|
||||
region: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by cloud regions. Multiple values allowed.",
|
||||
),
|
||||
service: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by cloud services. Multiple values allowed.",
|
||||
),
|
||||
resource_type: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by resource types. Multiple values allowed.",
|
||||
),
|
||||
resource_name: str | None = Field(
|
||||
default=None,
|
||||
description="Filter by resource name using partial matching.",
|
||||
),
|
||||
resource_uid: str | None = Field(
|
||||
default=None,
|
||||
description="Filter by resource UID using partial matching.",
|
||||
),
|
||||
resource_group: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by resource group values. Multiple values allowed.",
|
||||
),
|
||||
category: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by finding categories. Multiple values allowed.",
|
||||
),
|
||||
severity: list[SeverityFilter] = Field(
|
||||
default=[],
|
||||
description="Filter by severity. Empty returns all severities.",
|
||||
),
|
||||
status: list[StatusFilter] = Field(
|
||||
default=["FAIL"],
|
||||
description="Filter by status. Default returns failing resources. Pass [] to return all statuses.",
|
||||
),
|
||||
muted: bool | str | None = Field(
|
||||
default=None,
|
||||
description="Filter by muted state. Accepts true/false. Overrides include_muted when provided.",
|
||||
),
|
||||
include_muted: bool | str = Field(
|
||||
default=False,
|
||||
description="When false, returns only actionable unmuted resources by applying muted=false. Set true to include muted and unmuted resources.",
|
||||
),
|
||||
delta: list[DeltaFilter] = Field(
|
||||
default=[], description="Filter by delta values: new or changed."
|
||||
),
|
||||
date_from: str | None = Field(
|
||||
default=None,
|
||||
description="Start date for historical query in YYYY-MM-DD format. Maximum range is 2 days.",
|
||||
),
|
||||
date_to: str | None = Field(
|
||||
default=None,
|
||||
description="End date for historical query in YYYY-MM-DD format. Maximum range is 2 days.",
|
||||
),
|
||||
sort: str | None = Field(
|
||||
default=None,
|
||||
description="Optional sort expression supported by the finding group resources API.",
|
||||
),
|
||||
page_size: int = Field(
|
||||
default=50, description="Number of resources to return per page"
|
||||
),
|
||||
page_number: int = Field(
|
||||
default=1, description="Page number to retrieve (1-indexed)"
|
||||
),
|
||||
) -> dict[str, Any]:
|
||||
"""List resources affected by a finding group.
|
||||
|
||||
Without dates this uses `/finding-groups/latest/{check_id}/resources`.
|
||||
With `date_from` or `date_to`, this uses
|
||||
`/finding-groups/{check_id}/resources` with a maximum 2-day date window.
|
||||
|
||||
Default behavior returns FAIL, unmuted resources so the result is
|
||||
actionable. Set `include_muted=True` to include accepted/suppressed
|
||||
resources too. Each row includes nested resource and provider data plus
|
||||
`finding_id`. Use `prowler_app_get_finding_details(finding_id)` to
|
||||
retrieve complete remediation guidance for a specific resource finding.
|
||||
"""
|
||||
try:
|
||||
self.api_client.validate_page_size(page_size)
|
||||
date_range, params = self._base_date_params(date_from, date_to)
|
||||
endpoint = self._resource_endpoint(check_id, date_range)
|
||||
|
||||
if muted is None and not self._bool_value(include_muted):
|
||||
muted = False
|
||||
|
||||
self._apply_common_filters(
|
||||
params,
|
||||
provider,
|
||||
provider_type,
|
||||
provider_uid,
|
||||
provider_alias,
|
||||
region,
|
||||
service,
|
||||
resource_type,
|
||||
resource_name,
|
||||
resource_uid,
|
||||
resource_group,
|
||||
category,
|
||||
[],
|
||||
None,
|
||||
severity,
|
||||
status,
|
||||
muted,
|
||||
delta,
|
||||
)
|
||||
|
||||
params["page[size]"] = page_size
|
||||
params["page[number]"] = page_number
|
||||
params["fields[finding-group-resources]"] = RESOURCE_FIELDS
|
||||
if sort:
|
||||
params["sort"] = sort
|
||||
|
||||
clean_params = self.api_client.build_filter_params(params)
|
||||
api_response = await self.api_client.get(endpoint, params=clean_params)
|
||||
response = FindingGroupResourcesListResponse.from_api_response(api_response)
|
||||
return response.model_dump()
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error listing finding group resources: {e}")
|
||||
return {"error": str(e), "status": "failed"}
|
||||
Generated
+33
-1
@@ -6043,6 +6043,38 @@ pydantic = ">=2.6.0"
|
||||
ruamel-yaml = ">=0.17.21"
|
||||
typing-extensions = ">=4.7.1"
|
||||
|
||||
[[package]]
|
||||
name = "scaleway"
|
||||
version = "2.10.3"
|
||||
description = "Scaleway SDK for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.10"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "scaleway-2.10.3-py3-none-any.whl", hash = "sha256:dbf381440d6caf37c878cf16445a63f4969a4aac2257c9b72c744d10ff223a0c"},
|
||||
{file = "scaleway-2.10.3.tar.gz", hash = "sha256:b1f9dd1b1450767205234c6f5a345e5e25dc039c780253d698893b5c344ce594"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
scaleway-core = "2.10.3"
|
||||
|
||||
[[package]]
|
||||
name = "scaleway-core"
|
||||
version = "2.10.3"
|
||||
description = "Scaleway SDK for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.10"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "scaleway_core-2.10.3-py3-none-any.whl", hash = "sha256:fd4112144554d6adae22ff737555eeb0e38cb1063250b3e88c9aebc1b957793b"},
|
||||
{file = "scaleway_core-2.10.3.tar.gz", hash = "sha256:56432f755d694669429de51d51c1d0b3361b28dc2f939b28e4cb954610ee76be"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
python-dateutil = ">=2.8.2,<3.0.0"
|
||||
PyYAML = ">=6.0,<7.0"
|
||||
requests = ">=2.28.1,<3.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "schema"
|
||||
version = "0.7.5"
|
||||
@@ -6885,4 +6917,4 @@ files = [
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">=3.10,<3.13"
|
||||
content-hash = "96359a9bfe4031fb0747c22eb4b00f2a008e3fb6d07189fa0fe6ee3875b1f913"
|
||||
content-hash = "e158ae9902d799a82e7d91cb4c0eb404d811ae3460310192fbdd198727e647cd"
|
||||
|
||||
@@ -10,10 +10,12 @@ All notable changes to the **Prowler SDK** are documented in this file.
|
||||
- `iam_user_access_not_stale_to_sagemaker` check for AWS provider with configurable `max_unused_sagemaker_access_days` (default 90) [(#11000)](https://github.com/prowler-cloud/prowler/pull/11000)
|
||||
- `cloudtrail_bedrock_logging_enabled` check for AWS provider [(#10858)](https://github.com/prowler-cloud/prowler/pull/10858)
|
||||
- Okta provider with OAuth 2.0 authentication and `signon_global_session_idle_timeout_15min` check [(#11079)](https://github.com/prowler-cloud/prowler/pull/11079)
|
||||
- Scaleway provider with `iam_no_root_api_keys` check [(#11166)](https://github.com/prowler-cloud/prowler/pull/11166)
|
||||
|
||||
### 🔄 Changed
|
||||
|
||||
- `entra_emergency_access_exclusion` check for M365 provider now scopes the exclusion requirement to enabled Conditional Access policies with a `Block` grant control instead of every enabled policy, focusing on the lockout-relevant policy set [(#10849)](https://github.com/prowler-cloud/prowler/pull/10849)
|
||||
- AWS IAM customer-managed policy checks no longer emit `FAIL` on unattached policies unless `--scan-unused-services` is enabled [(#11150)](https://github.com/prowler-cloud/prowler/pull/11150)
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -157,6 +157,7 @@ from prowler.providers.nhn.models import NHNOutputOptions
|
||||
from prowler.providers.okta.models import OktaOutputOptions
|
||||
from prowler.providers.openstack.models import OpenStackOutputOptions
|
||||
from prowler.providers.oraclecloud.models import OCIOutputOptions
|
||||
from prowler.providers.scaleway.models import ScalewayOutputOptions
|
||||
from prowler.providers.vercel.models import VercelOutputOptions
|
||||
|
||||
|
||||
@@ -431,6 +432,10 @@ def prowler():
|
||||
output_options = OktaOutputOptions(
|
||||
args, bulk_checks_metadata, global_provider.identity
|
||||
)
|
||||
elif provider == "scaleway":
|
||||
output_options = ScalewayOutputOptions(
|
||||
args, bulk_checks_metadata, global_provider.identity
|
||||
)
|
||||
|
||||
# Run the quick inventory for the provider if available
|
||||
if hasattr(args, "quick_inventory") and args.quick_inventory:
|
||||
|
||||
@@ -75,6 +75,7 @@ class Provider(str, Enum):
|
||||
ALIBABACLOUD = "alibabacloud"
|
||||
OPENSTACK = "openstack"
|
||||
IMAGE = "image"
|
||||
SCALEWAY = "scaleway"
|
||||
VERCEL = "vercel"
|
||||
OKTA = "okta"
|
||||
|
||||
|
||||
@@ -741,6 +741,10 @@ def execute(
|
||||
is_finding_muted_args["team_id"] = (
|
||||
team.id if team else global_provider.identity.user_id
|
||||
)
|
||||
elif global_provider.type == "scaleway":
|
||||
is_finding_muted_args["organization_id"] = (
|
||||
global_provider.identity.organization_id
|
||||
)
|
||||
elif global_provider.type == "oraclecloud":
|
||||
is_finding_muted_args["tenancy_id"] = (
|
||||
global_provider.identity.tenancy_id
|
||||
|
||||
@@ -1318,6 +1318,53 @@ class CheckReportVercel(Check_Report):
|
||||
return "global"
|
||||
|
||||
|
||||
class CheckReportScaleway(Check_Report):
|
||||
"""Contains the Scaleway Check's finding information.
|
||||
|
||||
Scaleway scans run at the organization level. Most IAM/account-level
|
||||
resources are global; regional resources expose a ``region`` attribute
|
||||
on the underlying object, which we surface as the report ``region``.
|
||||
"""
|
||||
|
||||
resource_name: str
|
||||
resource_id: str
|
||||
organization_id: str
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
metadata: Dict,
|
||||
resource: Any,
|
||||
resource_name: str = None,
|
||||
resource_id: str = None,
|
||||
organization_id: str = None,
|
||||
) -> None:
|
||||
"""Initialize the Scaleway Check's finding information.
|
||||
|
||||
Args:
|
||||
metadata: Check metadata dictionary.
|
||||
resource: The Scaleway resource being checked.
|
||||
resource_name: Override for resource name.
|
||||
resource_id: Override for resource ID.
|
||||
organization_id: Override for the organization ID.
|
||||
"""
|
||||
super().__init__(metadata, resource)
|
||||
self.resource_name = resource_name or getattr(
|
||||
resource, "name", getattr(resource, "resource_name", "")
|
||||
)
|
||||
self.resource_id = resource_id or getattr(
|
||||
resource, "id", getattr(resource, "resource_id", "")
|
||||
)
|
||||
self.organization_id = organization_id or getattr(
|
||||
resource, "organization_id", ""
|
||||
)
|
||||
self._region = getattr(resource, "region", None) or "global"
|
||||
|
||||
@property
|
||||
def region(self) -> str:
|
||||
"""Scaleway regional resources expose their own region; IAM is global."""
|
||||
return self._region
|
||||
|
||||
|
||||
# Testing Pending
|
||||
def load_check_metadata(metadata_file: str) -> CheckMetadata:
|
||||
"""
|
||||
|
||||
@@ -29,10 +29,10 @@ class ProwlerArgumentParser:
|
||||
self.parser = argparse.ArgumentParser(
|
||||
prog="prowler",
|
||||
formatter_class=RawTextHelpFormatter,
|
||||
usage="prowler [-h] [--version] {aws,azure,gcp,kubernetes,m365,github,googleworkspace,okta,nhn,mongodbatlas,oraclecloud,alibabacloud,cloudflare,openstack,vercel,dashboard,iac,image,llm} ...",
|
||||
usage="prowler [-h] [--version] {aws,azure,gcp,kubernetes,m365,github,googleworkspace,okta,nhn,mongodbatlas,oraclecloud,alibabacloud,cloudflare,openstack,scaleway,vercel,dashboard,iac,image,llm} ...",
|
||||
epilog="""
|
||||
Available Cloud Providers:
|
||||
{aws,azure,gcp,kubernetes,m365,github,googleworkspace,okta,iac,llm,image,nhn,mongodbatlas,oraclecloud,alibabacloud,cloudflare,openstack,vercel}
|
||||
{aws,azure,gcp,kubernetes,m365,github,googleworkspace,okta,iac,llm,image,nhn,mongodbatlas,oraclecloud,alibabacloud,cloudflare,openstack,scaleway,vercel}
|
||||
aws AWS Provider
|
||||
azure Azure Provider
|
||||
gcp GCP Provider
|
||||
@@ -50,6 +50,7 @@ Available Cloud Providers:
|
||||
image Container Image Provider
|
||||
nhn NHN Provider (Unofficial)
|
||||
mongodbatlas MongoDB Atlas Provider
|
||||
scaleway Scaleway Provider
|
||||
vercel Vercel Provider
|
||||
|
||||
Available components:
|
||||
|
||||
@@ -442,6 +442,18 @@ class Finding(BaseModel):
|
||||
output_data["resource_uid"] = check_output.resource_id
|
||||
output_data["region"] = "global"
|
||||
|
||||
elif provider.type == "scaleway":
|
||||
output_data["auth_method"] = "api_key"
|
||||
output_data["account_uid"] = get_nested_attribute(
|
||||
provider, "identity.organization_id"
|
||||
)
|
||||
output_data["account_name"] = get_nested_attribute(
|
||||
provider, "identity.bearer_email"
|
||||
) or get_nested_attribute(provider, "identity.organization_id")
|
||||
output_data["resource_name"] = check_output.resource_name
|
||||
output_data["resource_uid"] = check_output.resource_id
|
||||
output_data["region"] = check_output.region
|
||||
|
||||
elif provider.type == "alibabacloud":
|
||||
output_data["auth_method"] = get_nested_attribute(
|
||||
provider, "identity.identity_arn"
|
||||
|
||||
@@ -1450,6 +1450,77 @@ class HTML(Output):
|
||||
)
|
||||
return ""
|
||||
|
||||
@staticmethod
|
||||
def get_scaleway_assessment_summary(provider: Provider) -> str:
|
||||
"""
|
||||
get_scaleway_assessment_summary gets the HTML assessment summary for the Scaleway provider
|
||||
|
||||
Args:
|
||||
provider (Provider): the Scaleway provider object
|
||||
|
||||
Returns:
|
||||
str: HTML assessment summary for the Scaleway provider
|
||||
"""
|
||||
try:
|
||||
assessment_items = f"""
|
||||
<li class="list-group-item">
|
||||
<b>Organization ID:</b> {provider.identity.organization_id}
|
||||
</li>"""
|
||||
|
||||
credentials_items = """
|
||||
<li class="list-group-item">
|
||||
<b>Authentication:</b> API Key
|
||||
</li>"""
|
||||
|
||||
access_key = getattr(provider.session, "access_key", None)
|
||||
if access_key:
|
||||
credentials_items += f"""
|
||||
<li class="list-group-item">
|
||||
<b>Access Key:</b> {access_key}
|
||||
</li>"""
|
||||
|
||||
bearer_type = getattr(provider.identity, "bearer_type", None)
|
||||
bearer_email = getattr(provider.identity, "bearer_email", None)
|
||||
bearer_id = getattr(provider.identity, "bearer_id", None)
|
||||
if bearer_type:
|
||||
bearer_label = bearer_email or bearer_id or "-"
|
||||
credentials_items += f"""
|
||||
<li class="list-group-item">
|
||||
<b>Bearer:</b> {bearer_type} ({bearer_label})
|
||||
</li>"""
|
||||
|
||||
region = getattr(provider.session, "default_region", None)
|
||||
if region:
|
||||
credentials_items += f"""
|
||||
<li class="list-group-item">
|
||||
<b>Default Region:</b> {region}
|
||||
</li>"""
|
||||
|
||||
return f"""
|
||||
<div class="col-md-2">
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
Scaleway Assessment Summary
|
||||
</div>
|
||||
<ul class="list-group list-group-flush">{assessment_items}
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-4">
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
Scaleway Credentials
|
||||
</div>
|
||||
<ul class="list-group list-group-flush">{credentials_items}
|
||||
</ul>
|
||||
</div>
|
||||
</div>"""
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
return ""
|
||||
|
||||
@staticmethod
|
||||
def get_assessment_summary(provider: Provider) -> str:
|
||||
"""
|
||||
|
||||
@@ -42,6 +42,8 @@ def stdout_report(finding, color, verbose, status, fix):
|
||||
details = finding.region
|
||||
if finding.check_metadata.Provider == "okta":
|
||||
details = finding.region
|
||||
if finding.check_metadata.Provider == "scaleway":
|
||||
details = finding.region
|
||||
|
||||
if (verbose or fix) and (not status or finding.status in status):
|
||||
if finding.muted:
|
||||
|
||||
@@ -111,6 +111,9 @@ def display_summary_table(
|
||||
elif provider.type == "okta":
|
||||
entity_type = "Okta Org"
|
||||
audited_entities = provider.identity.org_domain
|
||||
elif provider.type == "scaleway":
|
||||
entity_type = "Organization"
|
||||
audited_entities = provider.identity.organization_id
|
||||
|
||||
# Check if there are findings and that they are not all MANUAL
|
||||
if findings and not all(finding.status == "MANUAL" for finding in findings):
|
||||
|
||||
+2
@@ -16,6 +16,8 @@ class iam_no_custom_policy_permissive_role_assumption(Check):
|
||||
for policy in iam_client.policies.values():
|
||||
# Check only custom policies
|
||||
if policy.type == "Custom":
|
||||
if not policy.attached and not iam_client.provider.scan_unused_services:
|
||||
continue
|
||||
report = Check_Report_AWS(metadata=self.metadata(), resource=policy)
|
||||
report.region = iam_client.region
|
||||
report.status = "PASS"
|
||||
|
||||
+2
@@ -11,6 +11,8 @@ class iam_policy_allows_privilege_escalation(Check):
|
||||
|
||||
for policy in iam_client.policies.values():
|
||||
if policy.type == "Custom":
|
||||
if not policy.attached and not iam_client.provider.scan_unused_services:
|
||||
continue
|
||||
report = Check_Report_AWS(metadata=self.metadata(), resource=policy)
|
||||
report.region = iam_client.region
|
||||
report.status = "PASS"
|
||||
|
||||
+2
@@ -11,6 +11,8 @@ class iam_policy_no_full_access_to_cloudtrail(Check):
|
||||
for policy in iam_client.policies.values():
|
||||
# Check only custom policies
|
||||
if policy.type == "Custom":
|
||||
if not policy.attached and not iam_client.provider.scan_unused_services:
|
||||
continue
|
||||
report = Check_Report_AWS(metadata=self.metadata(), resource=policy)
|
||||
report.region = iam_client.region
|
||||
report.status = "PASS"
|
||||
|
||||
+2
@@ -11,6 +11,8 @@ class iam_policy_no_full_access_to_kms(Check):
|
||||
for policy in iam_client.policies.values():
|
||||
# Check only custom policies
|
||||
if policy.type == "Custom":
|
||||
if not policy.attached and not iam_client.provider.scan_unused_services:
|
||||
continue
|
||||
report = Check_Report_AWS(metadata=self.metadata(), resource=policy)
|
||||
report.region = iam_client.region
|
||||
report.status = "PASS"
|
||||
|
||||
+2
@@ -10,6 +10,8 @@ class iam_policy_no_wildcard_marketplace_subscribe(Check):
|
||||
findings = []
|
||||
for policy in iam_client.policies.values():
|
||||
if policy.type == "Custom":
|
||||
if not policy.attached and not iam_client.provider.scan_unused_services:
|
||||
continue
|
||||
report = Check_Report_AWS(metadata=self.metadata(), resource=policy)
|
||||
report.region = iam_client.region
|
||||
report.status = "PASS"
|
||||
|
||||
@@ -416,6 +416,17 @@ class Provider(ABC):
|
||||
mutelist_path=arguments.mutelist_file,
|
||||
fixer_config=fixer_config,
|
||||
)
|
||||
elif "scaleway" in provider_class_name.lower():
|
||||
provider_class(
|
||||
access_key=getattr(arguments, "access_key", None),
|
||||
secret_key=getattr(arguments, "secret_key", None),
|
||||
organization_id=getattr(arguments, "organization_id", None),
|
||||
project_id=getattr(arguments, "project_id", None),
|
||||
region=getattr(arguments, "region", None),
|
||||
config_path=arguments.config_file,
|
||||
mutelist_path=arguments.mutelist_file,
|
||||
fixer_config=fixer_config,
|
||||
)
|
||||
|
||||
except TypeError as error:
|
||||
logger.critical(
|
||||
|
||||
@@ -0,0 +1,99 @@
|
||||
# Exceptions codes from 14000 to 14999 are reserved for Scaleway exceptions
|
||||
from prowler.exceptions.exceptions import ProwlerException
|
||||
|
||||
|
||||
class ScalewayBaseException(ProwlerException):
|
||||
"""Base exception for Scaleway provider errors."""
|
||||
|
||||
SCALEWAY_ERROR_CODES = {
|
||||
(14000, "ScalewayCredentialsError"): {
|
||||
"message": "Scaleway credentials not found or invalid.",
|
||||
"remediation": (
|
||||
"Set the SCW_ACCESS_KEY and SCW_SECRET_KEY environment variables "
|
||||
"with a valid Scaleway API key. Generate one at "
|
||||
"https://console.scaleway.com/iam/api-keys."
|
||||
),
|
||||
},
|
||||
(14001, "ScalewayAuthenticationError"): {
|
||||
"message": "Authentication to the Scaleway API failed.",
|
||||
"remediation": (
|
||||
"Verify your Scaleway API key is valid, has not expired, and that "
|
||||
"the bearer has IAM read permissions on the target organization."
|
||||
),
|
||||
},
|
||||
(14002, "ScalewaySessionError"): {
|
||||
"message": "Failed to create a Scaleway API session.",
|
||||
"remediation": (
|
||||
"Check network connectivity and ensure the Scaleway API is "
|
||||
"reachable at https://api.scaleway.com."
|
||||
),
|
||||
},
|
||||
(14003, "ScalewayIdentityError"): {
|
||||
"message": "Failed to retrieve Scaleway identity information.",
|
||||
"remediation": (
|
||||
"Ensure the API key has permissions to read IAM users and the "
|
||||
"owning organization metadata."
|
||||
),
|
||||
},
|
||||
(14004, "ScalewayAPIError"): {
|
||||
"message": "An error occurred while calling the Scaleway API.",
|
||||
"remediation": (
|
||||
"Check the Scaleway API status at https://status.scaleway.com "
|
||||
"and retry. Run with --log-level DEBUG for the full traceback."
|
||||
),
|
||||
},
|
||||
}
|
||||
|
||||
def __init__(self, code, file=None, original_exception=None, message=None):
|
||||
provider = "Scaleway"
|
||||
error_info = self.SCALEWAY_ERROR_CODES.get((code, self.__class__.__name__))
|
||||
if error_info is None:
|
||||
error_info = {
|
||||
"message": message or "Unknown Scaleway error.",
|
||||
"remediation": "Check the Scaleway API documentation for more details.",
|
||||
}
|
||||
elif message:
|
||||
error_info = error_info.copy()
|
||||
error_info["message"] = message
|
||||
super().__init__(
|
||||
code=code,
|
||||
source=provider,
|
||||
file=file,
|
||||
original_exception=original_exception,
|
||||
error_info=error_info,
|
||||
)
|
||||
|
||||
|
||||
class ScalewayCredentialsError(ScalewayBaseException):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
14000, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class ScalewayAuthenticationError(ScalewayBaseException):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
14001, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class ScalewaySessionError(ScalewayBaseException):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
14002, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class ScalewayIdentityError(ScalewayBaseException):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
14003, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class ScalewayAPIError(ScalewayBaseException):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
14004, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
@@ -0,0 +1,57 @@
|
||||
SENSITIVE_ARGUMENTS = frozenset({"--access-key", "--secret-key"})
|
||||
|
||||
|
||||
def init_parser(self):
|
||||
"""Init the Scaleway provider CLI parser."""
|
||||
scaleway_parser = self.subparsers.add_parser(
|
||||
"scaleway",
|
||||
parents=[self.common_providers_parser],
|
||||
help="Scaleway Provider",
|
||||
)
|
||||
|
||||
# Authentication
|
||||
auth_subparser = scaleway_parser.add_argument_group("Authentication")
|
||||
auth_subparser.add_argument(
|
||||
"--access-key",
|
||||
nargs="?",
|
||||
default=None,
|
||||
metavar="SCW_ACCESS_KEY",
|
||||
help=(
|
||||
"Scaleway API access key. Prefer the SCW_ACCESS_KEY env var "
|
||||
"instead of passing it on the command line."
|
||||
),
|
||||
)
|
||||
auth_subparser.add_argument(
|
||||
"--secret-key",
|
||||
nargs="?",
|
||||
default=None,
|
||||
metavar="SCW_SECRET_KEY",
|
||||
help=(
|
||||
"Scaleway API secret key. Prefer the SCW_SECRET_KEY env var "
|
||||
"instead of passing it on the command line."
|
||||
),
|
||||
)
|
||||
|
||||
# Scope
|
||||
scope_subparser = scaleway_parser.add_argument_group("Scope")
|
||||
scope_subparser.add_argument(
|
||||
"--organization-id",
|
||||
nargs="?",
|
||||
default=None,
|
||||
metavar="SCW_DEFAULT_ORGANIZATION_ID",
|
||||
help="Scaleway organization ID to scope the audit.",
|
||||
)
|
||||
scope_subparser.add_argument(
|
||||
"--project-id",
|
||||
nargs="?",
|
||||
default=None,
|
||||
metavar="SCW_DEFAULT_PROJECT_ID",
|
||||
help="Default Scaleway project ID for project-scoped resources.",
|
||||
)
|
||||
scope_subparser.add_argument(
|
||||
"--region",
|
||||
nargs="?",
|
||||
default=None,
|
||||
metavar="SCW_DEFAULT_REGION",
|
||||
help="Default Scaleway region (fr-par, nl-ams, pl-waw).",
|
||||
)
|
||||
@@ -0,0 +1,20 @@
|
||||
from prowler.lib.check.models import CheckReportScaleway
|
||||
from prowler.lib.mutelist.mutelist import Mutelist
|
||||
from prowler.lib.outputs.utils import unroll_dict, unroll_tags
|
||||
|
||||
|
||||
class ScalewayMutelist(Mutelist):
|
||||
"""Scaleway-specific mutelist helper."""
|
||||
|
||||
def is_finding_muted(
|
||||
self,
|
||||
finding: CheckReportScaleway,
|
||||
organization_id: str,
|
||||
) -> bool:
|
||||
return self.is_muted(
|
||||
organization_id,
|
||||
finding.check_metadata.CheckID,
|
||||
finding.region or "global",
|
||||
finding.resource_id or finding.resource_name,
|
||||
unroll_dict(unroll_tags(finding.resource_tags)),
|
||||
)
|
||||
@@ -0,0 +1,44 @@
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.providers.scaleway.exceptions.exceptions import ScalewayAPIError
|
||||
|
||||
|
||||
class ScalewayService:
|
||||
"""Base class for Scaleway services.
|
||||
|
||||
Centralizes the provider context (audit/fixer configuration, the
|
||||
scoping organization, the authenticated ``scaleway.Client``) so each
|
||||
service only worries about which Scaleway API to call.
|
||||
"""
|
||||
|
||||
def __init__(self, service: str, provider):
|
||||
self.provider = provider
|
||||
self.audit_config = provider.audit_config
|
||||
self.fixer_config = provider.fixer_config
|
||||
self.service = service.lower() if not service.islower() else service
|
||||
|
||||
# Shared authenticated client and the organization in scope
|
||||
self.client = provider.session.client
|
||||
self.organization_id = provider.identity.organization_id
|
||||
|
||||
def _safe_call(self, label: str, fn, *args, **kwargs):
|
||||
"""Run a Scaleway SDK call and surface failures as ScalewayAPIError.
|
||||
|
||||
Args:
|
||||
label: Human-readable label for the call (used in logs).
|
||||
fn: SDK function to invoke.
|
||||
|
||||
Returns:
|
||||
The SDK function result, or ``None`` if the call failed.
|
||||
"""
|
||||
try:
|
||||
return fn(*args, **kwargs)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{self.service} - {label} failed: "
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
raise ScalewayAPIError(
|
||||
file=__file__,
|
||||
original_exception=error,
|
||||
message=f"Scaleway API call '{label}' failed.",
|
||||
)
|
||||
@@ -0,0 +1,53 @@
|
||||
from typing import Any, Literal, Optional
|
||||
|
||||
from pydantic.v1 import BaseModel, Field
|
||||
|
||||
from prowler.config.config import output_file_timestamp
|
||||
from prowler.providers.common.models import ProviderOutputOptions
|
||||
|
||||
ScalewayBearerType = Literal["user", "application"]
|
||||
|
||||
|
||||
class ScalewaySession(BaseModel):
|
||||
"""Scaleway API session information.
|
||||
|
||||
Stores the credentials and the underlying ``scaleway.Client`` so every
|
||||
service can reuse the same authenticated client.
|
||||
"""
|
||||
|
||||
access_key: str
|
||||
secret_key: str
|
||||
organization_id: Optional[str] = None
|
||||
default_project_id: Optional[str] = None
|
||||
default_region: Optional[str] = None
|
||||
client: Any = Field(default=None, exclude=True)
|
||||
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
|
||||
|
||||
class ScalewayIdentityInfo(BaseModel):
|
||||
"""Scaleway identity and scoping information."""
|
||||
|
||||
organization_id: str
|
||||
bearer_id: Optional[str] = None
|
||||
bearer_type: Optional[ScalewayBearerType] = None
|
||||
bearer_email: Optional[str] = None
|
||||
account_root_user_id: Optional[str] = None
|
||||
|
||||
|
||||
class ScalewayOutputOptions(ProviderOutputOptions):
|
||||
"""Customize output filenames for Scaleway scans."""
|
||||
|
||||
def __init__(self, arguments, bulk_checks_metadata, identity: ScalewayIdentityInfo):
|
||||
super().__init__(arguments, bulk_checks_metadata)
|
||||
if (
|
||||
not hasattr(arguments, "output_filename")
|
||||
or arguments.output_filename is None
|
||||
):
|
||||
account_fragment = identity.organization_id or "scaleway"
|
||||
self.output_filename = (
|
||||
f"prowler-output-{account_fragment}-{output_file_timestamp}"
|
||||
)
|
||||
else:
|
||||
self.output_filename = arguments.output_filename
|
||||
@@ -0,0 +1,372 @@
|
||||
import os
|
||||
|
||||
from colorama import Fore, Style
|
||||
from scaleway import Client
|
||||
from scaleway.iam.v1alpha1 import IamV1Alpha1API
|
||||
|
||||
from prowler.config.config import (
|
||||
default_config_file_path,
|
||||
get_default_mute_file_path,
|
||||
load_and_validate_config_file,
|
||||
)
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.utils.utils import print_boxes
|
||||
from prowler.providers.common.models import Audit_Metadata, Connection
|
||||
from prowler.providers.common.provider import Provider
|
||||
from prowler.providers.scaleway.exceptions.exceptions import (
|
||||
ScalewayAuthenticationError,
|
||||
ScalewayCredentialsError,
|
||||
ScalewayIdentityError,
|
||||
ScalewaySessionError,
|
||||
)
|
||||
from prowler.providers.scaleway.lib.mutelist.mutelist import ScalewayMutelist
|
||||
from prowler.providers.scaleway.models import (
|
||||
ScalewayIdentityInfo,
|
||||
ScalewaySession,
|
||||
)
|
||||
|
||||
|
||||
class ScalewayProvider(Provider):
|
||||
"""Scaleway provider.
|
||||
|
||||
Authenticates against the Scaleway API using an API key (access key +
|
||||
secret key) and exposes a single global session that every service
|
||||
reuses. Scaleway scopes everything to an organization, so the
|
||||
organization ID is the audit identity.
|
||||
"""
|
||||
|
||||
_type: str = "scaleway"
|
||||
_session: ScalewaySession
|
||||
_identity: ScalewayIdentityInfo
|
||||
_audit_config: dict
|
||||
_fixer_config: dict
|
||||
_mutelist: ScalewayMutelist
|
||||
audit_metadata: Audit_Metadata
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
# Authentication credentials
|
||||
access_key: str = None,
|
||||
secret_key: str = None,
|
||||
organization_id: str = None,
|
||||
project_id: str = None,
|
||||
region: str = None,
|
||||
# Provider configuration
|
||||
config_path: str = None,
|
||||
config_content: dict | None = None,
|
||||
fixer_config: dict = {},
|
||||
mutelist_path: str = None,
|
||||
mutelist_content: dict = None,
|
||||
):
|
||||
logger.info("Instantiating Scaleway provider...")
|
||||
|
||||
if config_content:
|
||||
self._audit_config = config_content
|
||||
else:
|
||||
if not config_path:
|
||||
config_path = default_config_file_path
|
||||
self._audit_config = load_and_validate_config_file(self._type, config_path)
|
||||
|
||||
self._session = ScalewayProvider.setup_session(
|
||||
access_key=access_key,
|
||||
secret_key=secret_key,
|
||||
organization_id=organization_id,
|
||||
project_id=project_id,
|
||||
region=region,
|
||||
)
|
||||
|
||||
self._identity = ScalewayProvider.setup_identity(self._session)
|
||||
|
||||
self._fixer_config = fixer_config
|
||||
|
||||
if mutelist_content:
|
||||
self._mutelist = ScalewayMutelist(mutelist_content=mutelist_content)
|
||||
else:
|
||||
if not mutelist_path:
|
||||
mutelist_path = get_default_mute_file_path(self.type)
|
||||
self._mutelist = ScalewayMutelist(mutelist_path=mutelist_path)
|
||||
|
||||
Provider.set_global_provider(self)
|
||||
|
||||
@property
|
||||
def type(self):
|
||||
return self._type
|
||||
|
||||
@property
|
||||
def session(self):
|
||||
return self._session
|
||||
|
||||
@property
|
||||
def identity(self):
|
||||
return self._identity
|
||||
|
||||
@property
|
||||
def audit_config(self):
|
||||
return self._audit_config
|
||||
|
||||
@property
|
||||
def fixer_config(self):
|
||||
return self._fixer_config
|
||||
|
||||
@property
|
||||
def mutelist(self) -> ScalewayMutelist:
|
||||
return self._mutelist
|
||||
|
||||
@staticmethod
|
||||
def setup_session(
|
||||
access_key: str = None,
|
||||
secret_key: str = None,
|
||||
organization_id: str = None,
|
||||
project_id: str = None,
|
||||
region: str = None,
|
||||
) -> ScalewaySession:
|
||||
"""Initialize the Scaleway API session.
|
||||
|
||||
Credentials can be provided as arguments (for API/SDK use) or read
|
||||
from the official Scaleway environment variables:
|
||||
|
||||
- ``SCW_ACCESS_KEY``
|
||||
- ``SCW_SECRET_KEY``
|
||||
- ``SCW_DEFAULT_ORGANIZATION_ID``
|
||||
- ``SCW_DEFAULT_PROJECT_ID``
|
||||
- ``SCW_DEFAULT_REGION``
|
||||
|
||||
Args:
|
||||
access_key: Scaleway API access key.
|
||||
secret_key: Scaleway API secret key.
|
||||
organization_id: Default organization ID to scope the audit.
|
||||
project_id: Default project ID for project-scoped resources.
|
||||
region: Default region.
|
||||
|
||||
Returns:
|
||||
ScalewaySession: The initialized session, holding the
|
||||
authenticated ``scaleway.Client``.
|
||||
|
||||
Raises:
|
||||
ScalewayCredentialsError: Access or secret key missing.
|
||||
ScalewaySessionError: Client instantiation failed.
|
||||
"""
|
||||
access = access_key or os.environ.get("SCW_ACCESS_KEY", "")
|
||||
secret = secret_key or os.environ.get("SCW_SECRET_KEY", "")
|
||||
org = organization_id or os.environ.get("SCW_DEFAULT_ORGANIZATION_ID") or None
|
||||
project = project_id or os.environ.get("SCW_DEFAULT_PROJECT_ID") or None
|
||||
default_region = region or os.environ.get("SCW_DEFAULT_REGION") or "fr-par"
|
||||
|
||||
if not access or not secret:
|
||||
raise ScalewayCredentialsError(
|
||||
file=os.path.basename(__file__),
|
||||
message=(
|
||||
"Scaleway credentials not found. Provide access_key and "
|
||||
"secret_key or set the SCW_ACCESS_KEY and SCW_SECRET_KEY "
|
||||
"environment variables."
|
||||
),
|
||||
)
|
||||
|
||||
try:
|
||||
client = Client(
|
||||
access_key=access,
|
||||
secret_key=secret,
|
||||
default_organization_id=org,
|
||||
default_project_id=project,
|
||||
default_region=default_region,
|
||||
)
|
||||
return ScalewaySession(
|
||||
access_key=access,
|
||||
secret_key=secret,
|
||||
organization_id=org,
|
||||
default_project_id=project,
|
||||
default_region=default_region,
|
||||
client=client,
|
||||
)
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
raise ScalewaySessionError(
|
||||
file=os.path.basename(__file__),
|
||||
original_exception=error,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def setup_identity(session: ScalewaySession) -> ScalewayIdentityInfo:
|
||||
"""Resolve the audit identity by calling Scaleway IAM.
|
||||
|
||||
Uses ``iam.get_api_key`` on the current access key to discover the
|
||||
bearer (user vs application). When the bearer is a user, the
|
||||
owning organization is read from the user record; otherwise we
|
||||
require ``SCW_DEFAULT_ORGANIZATION_ID``.
|
||||
"""
|
||||
try:
|
||||
iam = IamV1Alpha1API(session.client)
|
||||
current_key = iam.get_api_key(access_key=session.access_key)
|
||||
|
||||
bearer_id = current_key.user_id or current_key.application_id
|
||||
bearer_type = (
|
||||
"user"
|
||||
if current_key.user_id
|
||||
else ("application" if current_key.application_id else None)
|
||||
)
|
||||
|
||||
organization_id = session.organization_id
|
||||
bearer_email = None
|
||||
account_root_user_id = None
|
||||
|
||||
# If the bearer is a user, resolve the org from the user record
|
||||
# and surface the email + root user id for the credentials banner.
|
||||
if current_key.user_id:
|
||||
user = iam.get_user(user_id=current_key.user_id)
|
||||
organization_id = organization_id or user.organization_id
|
||||
bearer_email = user.email
|
||||
account_root_user_id = user.account_root_user_id
|
||||
elif current_key.application_id and not organization_id:
|
||||
# Application keys do not expose the org directly without an
|
||||
# extra call. The default org from env is preferred.
|
||||
logger.warning(
|
||||
"Scaleway application-scoped API key without "
|
||||
"SCW_DEFAULT_ORGANIZATION_ID. Resource discovery may fail."
|
||||
)
|
||||
|
||||
if not organization_id:
|
||||
raise ScalewayIdentityError(
|
||||
file=os.path.basename(__file__),
|
||||
message=(
|
||||
"Could not determine the Scaleway organization ID. "
|
||||
"Set SCW_DEFAULT_ORGANIZATION_ID or use a user-scoped "
|
||||
"API key."
|
||||
),
|
||||
)
|
||||
|
||||
return ScalewayIdentityInfo(
|
||||
organization_id=organization_id,
|
||||
bearer_id=bearer_id,
|
||||
bearer_type=bearer_type,
|
||||
bearer_email=bearer_email,
|
||||
account_root_user_id=account_root_user_id,
|
||||
)
|
||||
except ScalewayIdentityError:
|
||||
raise
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
raise ScalewayIdentityError(
|
||||
file=os.path.basename(__file__),
|
||||
original_exception=error,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def validate_credentials(session: ScalewaySession) -> None:
|
||||
"""Smoke-test credentials by resolving the current API key.
|
||||
|
||||
Uses ``iam.get_api_key`` because it does not require any prior
|
||||
knowledge of the bearer or the owning organization.
|
||||
|
||||
Args:
|
||||
session: The Scaleway session to validate.
|
||||
|
||||
Raises:
|
||||
ScalewayAuthenticationError: Authentication or authorization
|
||||
failed against the Scaleway IAM API.
|
||||
"""
|
||||
try:
|
||||
iam = IamV1Alpha1API(session.client)
|
||||
iam.get_api_key(access_key=session.access_key)
|
||||
except Exception as error:
|
||||
raise ScalewayAuthenticationError(
|
||||
file=os.path.basename(__file__),
|
||||
original_exception=error,
|
||||
)
|
||||
|
||||
def print_credentials(self) -> None:
|
||||
report_title = (
|
||||
f"{Style.BRIGHT}Using the Scaleway credentials below:{Style.RESET_ALL}"
|
||||
)
|
||||
report_lines = [
|
||||
f"Authentication: {Fore.YELLOW}API Key{Style.RESET_ALL}",
|
||||
f"Access Key: {Fore.YELLOW}{self._session.access_key}{Style.RESET_ALL}",
|
||||
f"Organization ID: {Fore.YELLOW}{self._identity.organization_id}{Style.RESET_ALL}",
|
||||
]
|
||||
if self._identity.bearer_type:
|
||||
report_lines.append(
|
||||
f"Bearer: {Fore.YELLOW}{self._identity.bearer_type}"
|
||||
f" ({self._identity.bearer_email or self._identity.bearer_id})"
|
||||
f"{Style.RESET_ALL}"
|
||||
)
|
||||
if self._session.default_region:
|
||||
report_lines.append(
|
||||
f"Default Region: {Fore.YELLOW}{self._session.default_region}{Style.RESET_ALL}"
|
||||
)
|
||||
|
||||
print_boxes(report_lines, report_title)
|
||||
|
||||
@staticmethod
|
||||
def test_connection(
|
||||
access_key: str = None,
|
||||
secret_key: str = None,
|
||||
organization_id: str = None,
|
||||
raise_on_exception: bool = True,
|
||||
provider_id: str = None,
|
||||
) -> Connection:
|
||||
"""Test connection to Scaleway.
|
||||
|
||||
Args:
|
||||
access_key: Scaleway access key (falls back to SCW_ACCESS_KEY).
|
||||
secret_key: Scaleway secret key (falls back to SCW_SECRET_KEY).
|
||||
organization_id: Organization ID to scope the audit.
|
||||
raise_on_exception: Whether to raise or return errors.
|
||||
provider_id: Expected Scaleway organization ID. When provided,
|
||||
the resolved identity must match it; otherwise the test
|
||||
fails with ``ScalewayAuthenticationError``.
|
||||
|
||||
Returns:
|
||||
Connection: Connection object with is_connected status.
|
||||
"""
|
||||
try:
|
||||
session = ScalewayProvider.setup_session(
|
||||
access_key=access_key,
|
||||
secret_key=secret_key,
|
||||
organization_id=organization_id,
|
||||
)
|
||||
ScalewayProvider.validate_credentials(session)
|
||||
|
||||
# Guard for API callers that already know the expected
|
||||
# organization: the credentials must point to that exact org.
|
||||
if provider_id:
|
||||
identity = ScalewayProvider.setup_identity(session)
|
||||
if identity.organization_id != provider_id:
|
||||
raise ScalewayAuthenticationError(
|
||||
file=os.path.basename(__file__),
|
||||
message=(
|
||||
"The provided credentials do not have access to "
|
||||
f"the Scaleway organization with ID: {provider_id}"
|
||||
),
|
||||
)
|
||||
|
||||
return Connection(is_connected=True)
|
||||
|
||||
except (
|
||||
ScalewayCredentialsError,
|
||||
ScalewaySessionError,
|
||||
ScalewayAuthenticationError,
|
||||
) as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
if raise_on_exception:
|
||||
raise error
|
||||
return Connection(is_connected=False, error=error)
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
formatted_error = ScalewayAuthenticationError(
|
||||
file=os.path.basename(__file__),
|
||||
original_exception=error,
|
||||
)
|
||||
if raise_on_exception:
|
||||
raise formatted_error
|
||||
return Connection(is_connected=False, error=formatted_error)
|
||||
|
||||
def validate_arguments(self) -> None:
|
||||
return None
|
||||
@@ -0,0 +1,4 @@
|
||||
from prowler.providers.common.provider import Provider
|
||||
from prowler.providers.scaleway.services.iam.iam_service import IAM
|
||||
|
||||
iam_client = IAM(Provider.get_global_provider())
|
||||
+38
@@ -0,0 +1,38 @@
|
||||
{
|
||||
"Provider": "scaleway",
|
||||
"CheckID": "iam_no_root_api_keys",
|
||||
"CheckTitle": "Scaleway IAM API keys must not be owned by the account root user",
|
||||
"CheckType": [],
|
||||
"ServiceName": "iam",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "critical",
|
||||
"ResourceType": "ScalewayAPIKey",
|
||||
"ResourceGroup": "IAM",
|
||||
"Description": "**Scaleway API keys** are checked to ensure none is bound to the **account root user**. The account root user is the original Scaleway account owner; its credentials bypass IAM policies and grant unrestricted access to the entire organization.",
|
||||
"Risk": "API keys owned by the **account root user** cannot be scoped down with IAM policies. Leaking one of these keys yields immediate full control over every project, resource and billing setting in the organization, and rotating them disrupts every automation depending on root credentials.",
|
||||
"RelatedUrl": "",
|
||||
"AdditionalURLs": [
|
||||
"https://www.scaleway.com/en/docs/identity-and-access-management/iam/concepts/#root-account",
|
||||
"https://www.scaleway.com/en/docs/identity-and-access-management/iam/how-to/create-api-keys/",
|
||||
"https://www.scaleway.com/en/docs/identity-and-access-management/iam/reference-content/users-and-applications/"
|
||||
],
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "scw iam api-key delete <ACCESS_KEY>",
|
||||
"NativeIaC": "",
|
||||
"Other": "1. Sign in to the Scaleway console as a user with IAM admin permissions.\n2. Create a dedicated IAM user or application scoped with the minimum required policy.\n3. Generate a new API key for that bearer and roll it out to the workloads currently using the root key.\n4. Delete the API key owned by the account root user from the IAM > API keys page.",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Never use API keys owned by the account root user for automation. Create scoped IAM users or applications, attach the least-privilege policies, and rotate any existing root API keys to that new bearer.",
|
||||
"Url": "https://hub.prowler.com/check/iam_no_root_api_keys"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"identity-access"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": ""
|
||||
}
|
||||
@@ -0,0 +1,87 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportScaleway
|
||||
from prowler.providers.scaleway.services.iam.iam_client import iam_client
|
||||
|
||||
|
||||
class iam_no_root_api_keys(Check):
|
||||
"""Ensure no Scaleway IAM API key is owned by the account root user.
|
||||
|
||||
The account root user is the original Scaleway account owner. API keys
|
||||
bound to that bearer bypass IAM policies and grant unrestricted access
|
||||
to the entire organization; rotating or losing them is a critical
|
||||
incident. Day-to-day automation should rely on IAM users or
|
||||
applications scoped through policies instead.
|
||||
"""
|
||||
|
||||
def execute(self) -> List[CheckReportScaleway]:
|
||||
"""Iterate over the API keys cached by the IAM service.
|
||||
|
||||
The check degrades to ``MANUAL`` when the IAM service could not
|
||||
load the prerequisite data (users or API keys) — emitting ``PASS``
|
||||
in those cases would silently mask the very condition the check
|
||||
exists to detect.
|
||||
|
||||
Returns:
|
||||
One ``CheckReportScaleway`` per discovered API key. ``FAIL``
|
||||
when the bearer is the account root user, ``PASS`` otherwise.
|
||||
A single ``MANUAL`` report is emitted when underlying IAM data
|
||||
is unavailable.
|
||||
"""
|
||||
findings: List[CheckReportScaleway] = []
|
||||
|
||||
# If we could not even load the users we cannot tell who the root
|
||||
# bearer is, so every API key would falsely PASS. Surface MANUAL
|
||||
# explicitly so the operator investigates.
|
||||
if not iam_client.users_loaded or not iam_client.api_keys_loaded:
|
||||
placeholder = _IAMDataUnavailableResource(
|
||||
organization_id=iam_client.organization_id
|
||||
)
|
||||
report = CheckReportScaleway(metadata=self.metadata(), resource=placeholder)
|
||||
report.status = "MANUAL"
|
||||
report.status_extended = (
|
||||
"Could not retrieve Scaleway IAM users or API keys for "
|
||||
f"organization {iam_client.organization_id}. Verify the "
|
||||
"API key has the IAMReadOnly policy and rerun."
|
||||
)
|
||||
findings.append(report)
|
||||
return findings
|
||||
|
||||
root_user_id = iam_client.account_root_user_id
|
||||
|
||||
for api_key in iam_client.api_keys:
|
||||
report = CheckReportScaleway(metadata=self.metadata(), resource=api_key)
|
||||
|
||||
if root_user_id and api_key.user_id == root_user_id:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Scaleway API key {api_key.access_key} is owned by the "
|
||||
f"account root user ({root_user_id}). Replace it with an "
|
||||
f"API key bound to a dedicated IAM user or application."
|
||||
)
|
||||
else:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Scaleway API key {api_key.access_key} is not owned by "
|
||||
f"the account root user."
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
|
||||
|
||||
class _IAMDataUnavailableResource:
|
||||
"""Minimal stand-in resource used when the IAM service failed to load.
|
||||
|
||||
``CheckReportScaleway`` derives ``resource_name``/``resource_id``/
|
||||
``region``/``organization_id`` from the resource via ``getattr`` with
|
||||
defaults, so this lightweight object is enough to materialize a
|
||||
MANUAL finding without polluting the real domain models.
|
||||
"""
|
||||
|
||||
def __init__(self, organization_id: str):
|
||||
self.name = "iam-data-unavailable"
|
||||
self.id = "iam-data-unavailable"
|
||||
self.organization_id = organization_id
|
||||
self.region = "global"
|
||||
@@ -0,0 +1,142 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic.v1 import BaseModel
|
||||
from scaleway.iam.v1alpha1 import IamV1Alpha1API
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.providers.scaleway.lib.service.service import ScalewayService
|
||||
|
||||
|
||||
class IAM(ScalewayService):
|
||||
"""Scaleway IAM service.
|
||||
|
||||
Loads the users in scope plus every API key tied to the current
|
||||
organization. Checks consume the materialized lists; nothing in this
|
||||
class is lazy. Each load operation tracks success/failure separately
|
||||
so checks can degrade to ``MANUAL`` when data is incomplete instead of
|
||||
falsely passing.
|
||||
"""
|
||||
|
||||
def __init__(self, provider):
|
||||
super().__init__("iam", provider)
|
||||
self._api = IamV1Alpha1API(self.client)
|
||||
|
||||
# Cached state — populated eagerly during construction
|
||||
self.users: list[ScalewayUser] = []
|
||||
self.api_keys: list[ScalewayAPIKey] = []
|
||||
self.account_root_user_id: Optional[str] = None
|
||||
|
||||
# Load status flags — checks consult these to surface MANUAL when
|
||||
# the underlying API call failed rather than reporting empty lists
|
||||
# as a clean PASS.
|
||||
self.users_loaded: bool = False
|
||||
self.api_keys_loaded: bool = False
|
||||
|
||||
self._load_users()
|
||||
self._load_api_keys()
|
||||
|
||||
def _load_users(self) -> None:
|
||||
"""List every IAM user in the audited organization."""
|
||||
try:
|
||||
users = self._api.list_users_all(organization_id=self.organization_id)
|
||||
for user in users:
|
||||
self.users.append(
|
||||
ScalewayUser(
|
||||
id=user.id,
|
||||
email=user.email,
|
||||
username=user.username,
|
||||
organization_id=user.organization_id,
|
||||
account_root_user_id=user.account_root_user_id,
|
||||
mfa=bool(getattr(user, "mfa", False)),
|
||||
type_=(
|
||||
str(user.type_) if getattr(user, "type_", None) else None
|
||||
),
|
||||
status=(
|
||||
str(user.status) if getattr(user, "status", None) else None
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
# All users in the same org share the same account_root_user_id.
|
||||
if self.users and self.users[0].account_root_user_id:
|
||||
self.account_root_user_id = self.users[0].account_root_user_id
|
||||
|
||||
self.users_loaded = True
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{self.service} - Error listing users: "
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
def _load_api_keys(self) -> None:
|
||||
"""List every API key in the audited organization."""
|
||||
try:
|
||||
api_keys = self._api.list_api_keys_all(organization_id=self.organization_id)
|
||||
for key in api_keys:
|
||||
self.api_keys.append(
|
||||
ScalewayAPIKey(
|
||||
access_key=key.access_key,
|
||||
description=key.description,
|
||||
user_id=key.user_id,
|
||||
application_id=key.application_id,
|
||||
default_project_id=key.default_project_id,
|
||||
editable=bool(key.editable),
|
||||
managed=bool(getattr(key, "managed", False)),
|
||||
creation_ip=key.creation_ip,
|
||||
created_at=str(key.created_at) if key.created_at else None,
|
||||
updated_at=str(key.updated_at) if key.updated_at else None,
|
||||
expires_at=str(key.expires_at) if key.expires_at else None,
|
||||
)
|
||||
)
|
||||
|
||||
self.api_keys_loaded = True
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{self.service} - Error listing API keys: "
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
|
||||
class ScalewayUser(BaseModel):
|
||||
"""Subset of a Scaleway IAM user surface that the checks need."""
|
||||
|
||||
id: str
|
||||
email: Optional[str] = None
|
||||
username: Optional[str] = None
|
||||
organization_id: Optional[str] = None
|
||||
account_root_user_id: Optional[str] = None
|
||||
mfa: bool = False
|
||||
type_: Optional[str] = None
|
||||
status: Optional[str] = None
|
||||
# Provide name/id for CheckReportScaleway
|
||||
name: str = ""
|
||||
|
||||
def __init__(self, **data):
|
||||
super().__init__(**data)
|
||||
self.name = self.email or self.username or self.id
|
||||
|
||||
|
||||
class ScalewayAPIKey(BaseModel):
|
||||
"""Subset of a Scaleway IAM API key surface that the checks need."""
|
||||
|
||||
access_key: str
|
||||
description: Optional[str] = None
|
||||
user_id: Optional[str] = None
|
||||
application_id: Optional[str] = None
|
||||
default_project_id: Optional[str] = None
|
||||
editable: bool = False
|
||||
managed: bool = False
|
||||
creation_ip: Optional[str] = None
|
||||
created_at: Optional[str] = None
|
||||
updated_at: Optional[str] = None
|
||||
expires_at: Optional[str] = None
|
||||
# Provide name/id for CheckReportScaleway
|
||||
name: str = ""
|
||||
id: str = ""
|
||||
|
||||
def __init__(self, **data):
|
||||
super().__init__(**data)
|
||||
self.id = self.access_key
|
||||
self.name = self.description or self.access_key
|
||||
+2
-1
@@ -88,7 +88,8 @@ dependencies = [
|
||||
"alibabacloud_actiontrail20200706==2.4.1",
|
||||
"alibabacloud_cs20151215==6.1.0",
|
||||
"alibabacloud-rds20140815==12.0.0",
|
||||
"alibabacloud-sls20201230==5.9.0"
|
||||
"alibabacloud-sls20201230==5.9.0",
|
||||
"scaleway==2.10.3"
|
||||
]
|
||||
description = "Prowler is an Open Source security tool to perform AWS, GCP and Azure security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness. It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, FedRAMP, PCI-DSS, GDPR, HIPAA, FFIEC, SOC2, GXP, AWS Well-Architected Framework Security Pillar, AWS Foundational Technical Review (FTR), ENS (Spanish National Security Scheme) and your custom security frameworks."
|
||||
license = "Apache-2.0"
|
||||
|
||||
+80
@@ -408,3 +408,83 @@ class Test_iam_no_custom_policy_permissive_role_assumption:
|
||||
assert search(
|
||||
"allows permissive STS Role assumption", result[0].status_extended
|
||||
)
|
||||
|
||||
@mock_aws
|
||||
def test_unattached_policy_skipped_when_scan_unused_services_disabled(self):
|
||||
iam_client = client("iam")
|
||||
policy_name = "unattached_permissive_assume_role"
|
||||
policy_document = {
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [
|
||||
{"Effect": "Allow", "Action": "sts:AssumeRole", "Resource": "*"},
|
||||
],
|
||||
}
|
||||
iam_client.create_policy(
|
||||
PolicyName=policy_name, PolicyDocument=dumps(policy_document)
|
||||
)
|
||||
|
||||
from prowler.providers.aws.services.iam.iam_service import IAM
|
||||
|
||||
aws_provider = set_mocked_aws_provider(
|
||||
[AWS_REGION_US_EAST_1], scan_unused_services=False
|
||||
)
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=aws_provider,
|
||||
):
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.services.iam.iam_no_custom_policy_permissive_role_assumption.iam_no_custom_policy_permissive_role_assumption.iam_client",
|
||||
new=IAM(aws_provider),
|
||||
):
|
||||
from prowler.providers.aws.services.iam.iam_no_custom_policy_permissive_role_assumption.iam_no_custom_policy_permissive_role_assumption import (
|
||||
iam_no_custom_policy_permissive_role_assumption,
|
||||
)
|
||||
|
||||
check = iam_no_custom_policy_permissive_role_assumption()
|
||||
result = check.execute()
|
||||
assert result == []
|
||||
|
||||
@mock_aws
|
||||
def test_attached_policy_fails_when_scan_unused_services_disabled(self):
|
||||
iam_client = client("iam")
|
||||
user_name = "test_user_assume_role"
|
||||
policy_name = "attached_permissive_assume_role"
|
||||
policy_document = {
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [
|
||||
{"Effect": "Allow", "Action": "sts:AssumeRole", "Resource": "*"},
|
||||
],
|
||||
}
|
||||
arn = iam_client.create_policy(
|
||||
PolicyName=policy_name, PolicyDocument=dumps(policy_document)
|
||||
)["Policy"]["Arn"]
|
||||
iam_client.create_user(UserName=user_name)
|
||||
iam_client.attach_user_policy(UserName=user_name, PolicyArn=arn)
|
||||
|
||||
from prowler.providers.aws.services.iam.iam_service import IAM
|
||||
|
||||
aws_provider = set_mocked_aws_provider(
|
||||
[AWS_REGION_US_EAST_1], scan_unused_services=False
|
||||
)
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=aws_provider,
|
||||
):
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.services.iam.iam_no_custom_policy_permissive_role_assumption.iam_no_custom_policy_permissive_role_assumption.iam_client",
|
||||
new=IAM(aws_provider),
|
||||
):
|
||||
from prowler.providers.aws.services.iam.iam_no_custom_policy_permissive_role_assumption.iam_no_custom_policy_permissive_role_assumption import (
|
||||
iam_no_custom_policy_permissive_role_assumption,
|
||||
)
|
||||
|
||||
check = iam_no_custom_policy_permissive_role_assumption()
|
||||
result = check.execute()
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert result[0].resource_arn == arn
|
||||
assert search(
|
||||
"allows permissive STS Role assumption", result[0].status_extended
|
||||
)
|
||||
|
||||
+83
@@ -1261,3 +1261,86 @@ class Test_iam_policy_allows_privilege_escalation:
|
||||
permissions
|
||||
]:
|
||||
assert search(permission, finding.status_extended)
|
||||
|
||||
@mock_aws
|
||||
def test_unattached_policy_skipped_when_scan_unused_services_disabled(self):
|
||||
iam_client = client("iam", region_name=AWS_REGION_US_EAST_1)
|
||||
policy_name = "unattached_privilege_escalation"
|
||||
policy_document = {
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [
|
||||
{"Effect": "Allow", "Action": "iam:CreateAccessKey", "Resource": "*"},
|
||||
],
|
||||
}
|
||||
iam_client.create_policy(
|
||||
PolicyName=policy_name, PolicyDocument=dumps(policy_document)
|
||||
)
|
||||
|
||||
aws_provider = set_mocked_aws_provider(
|
||||
[AWS_REGION_US_EAST_1], scan_unused_services=False
|
||||
)
|
||||
from prowler.providers.aws.services.iam.iam_service import IAM
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=aws_provider,
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.aws.services.iam.iam_policy_allows_privilege_escalation.iam_policy_allows_privilege_escalation.iam_client",
|
||||
new=IAM(aws_provider),
|
||||
),
|
||||
):
|
||||
from prowler.providers.aws.services.iam.iam_policy_allows_privilege_escalation.iam_policy_allows_privilege_escalation import (
|
||||
iam_policy_allows_privilege_escalation,
|
||||
)
|
||||
|
||||
check = iam_policy_allows_privilege_escalation()
|
||||
result = check.execute()
|
||||
assert result == []
|
||||
|
||||
@mock_aws
|
||||
def test_attached_policy_fails_when_scan_unused_services_disabled(self):
|
||||
iam_client = client("iam", region_name=AWS_REGION_US_EAST_1)
|
||||
user_name = "test_user_privesc"
|
||||
policy_name = "attached_privilege_escalation"
|
||||
policy_document = {
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [
|
||||
{"Effect": "Allow", "Action": "iam:CreateAccessKey", "Resource": "*"},
|
||||
],
|
||||
}
|
||||
policy_arn = iam_client.create_policy(
|
||||
PolicyName=policy_name, PolicyDocument=dumps(policy_document)
|
||||
)["Policy"]["Arn"]
|
||||
iam_client.create_user(UserName=user_name)
|
||||
iam_client.attach_user_policy(UserName=user_name, PolicyArn=policy_arn)
|
||||
|
||||
aws_provider = set_mocked_aws_provider(
|
||||
[AWS_REGION_US_EAST_1], scan_unused_services=False
|
||||
)
|
||||
from prowler.providers.aws.services.iam.iam_service import IAM
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=aws_provider,
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.aws.services.iam.iam_policy_allows_privilege_escalation.iam_policy_allows_privilege_escalation.iam_client",
|
||||
new=IAM(aws_provider),
|
||||
),
|
||||
):
|
||||
from prowler.providers.aws.services.iam.iam_policy_allows_privilege_escalation.iam_policy_allows_privilege_escalation import (
|
||||
iam_policy_allows_privilege_escalation,
|
||||
)
|
||||
|
||||
check = iam_policy_allows_privilege_escalation()
|
||||
result = check.execute()
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert result[0].resource_arn == policy_arn
|
||||
assert search(
|
||||
f"Custom Policy {policy_arn} allows privilege escalation",
|
||||
result[0].status_extended,
|
||||
)
|
||||
|
||||
+75
@@ -207,3 +207,78 @@ class Test_iam_policy_no_full_access_to_cloudtrail:
|
||||
assert result[0].resource_id == "policy_no_cloudtrail_full_no_actions"
|
||||
assert result[0].resource_arn == arn
|
||||
assert result[0].region == "us-east-1"
|
||||
|
||||
@mock_aws
|
||||
def test_unattached_policy_skipped_when_scan_unused_services_disabled(self):
|
||||
aws_provider = set_mocked_aws_provider(
|
||||
[AWS_REGION_US_EAST_1], scan_unused_services=False
|
||||
)
|
||||
iam_client = client("iam", region_name=AWS_REGION_US_EAST_1)
|
||||
policy_name = "unattached_cloudtrail_full"
|
||||
policy_document_full_access = {
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [
|
||||
{"Effect": "Allow", "Action": "cloudtrail:*", "Resource": "*"},
|
||||
],
|
||||
}
|
||||
iam_client.create_policy(
|
||||
PolicyName=policy_name, PolicyDocument=dumps(policy_document_full_access)
|
||||
)
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=aws_provider,
|
||||
):
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.services.iam.iam_policy_no_full_access_to_cloudtrail.iam_policy_no_full_access_to_cloudtrail.iam_client",
|
||||
new=IAM(aws_provider),
|
||||
):
|
||||
from prowler.providers.aws.services.iam.iam_policy_no_full_access_to_cloudtrail.iam_policy_no_full_access_to_cloudtrail import (
|
||||
iam_policy_no_full_access_to_cloudtrail,
|
||||
)
|
||||
|
||||
check = iam_policy_no_full_access_to_cloudtrail()
|
||||
result = check.execute()
|
||||
assert result == []
|
||||
|
||||
@mock_aws
|
||||
def test_attached_policy_fails_when_scan_unused_services_disabled(self):
|
||||
aws_provider = set_mocked_aws_provider(
|
||||
[AWS_REGION_US_EAST_1], scan_unused_services=False
|
||||
)
|
||||
iam_client = client("iam", region_name=AWS_REGION_US_EAST_1)
|
||||
user_name = "test_user_cloudtrail"
|
||||
policy_name = "attached_cloudtrail_full"
|
||||
policy_document_full_access = {
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [
|
||||
{"Effect": "Allow", "Action": "cloudtrail:*", "Resource": "*"},
|
||||
],
|
||||
}
|
||||
arn = iam_client.create_policy(
|
||||
PolicyName=policy_name, PolicyDocument=dumps(policy_document_full_access)
|
||||
)["Policy"]["Arn"]
|
||||
iam_client.create_user(UserName=user_name)
|
||||
iam_client.attach_user_policy(UserName=user_name, PolicyArn=arn)
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=aws_provider,
|
||||
):
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.services.iam.iam_policy_no_full_access_to_cloudtrail.iam_policy_no_full_access_to_cloudtrail.iam_client",
|
||||
new=IAM(aws_provider),
|
||||
):
|
||||
from prowler.providers.aws.services.iam.iam_policy_no_full_access_to_cloudtrail.iam_policy_no_full_access_to_cloudtrail import (
|
||||
iam_policy_no_full_access_to_cloudtrail,
|
||||
)
|
||||
|
||||
check = iam_policy_no_full_access_to_cloudtrail()
|
||||
result = check.execute()
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== f"Custom Policy {policy_name} allows 'cloudtrail:*' privileges."
|
||||
)
|
||||
assert result[0].resource_arn == arn
|
||||
|
||||
+75
@@ -329,6 +329,81 @@ class Test_iam_policy_no_full_access_to_kms_with_unicode:
|
||||
assert result[0].resource_arn == arn
|
||||
assert result[0].region == "us-east-1"
|
||||
|
||||
@mock_aws
|
||||
def test_unattached_policy_skipped_when_scan_unused_services_disabled(self):
|
||||
aws_provider = set_mocked_aws_provider(
|
||||
[AWS_REGION_US_EAST_1], scan_unused_services=False
|
||||
)
|
||||
iam_client = client("iam")
|
||||
policy_name = "unattached_kms_full"
|
||||
policy_document_full_access = {
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [
|
||||
{"Effect": "Allow", "Action": "kms:*", "Resource": "*"},
|
||||
],
|
||||
}
|
||||
iam_client.create_policy(
|
||||
PolicyName=policy_name, PolicyDocument=dumps(policy_document_full_access)
|
||||
)
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=aws_provider,
|
||||
):
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.services.iam.iam_policy_no_full_access_to_kms.iam_policy_no_full_access_to_kms.iam_client",
|
||||
new=IAM(aws_provider),
|
||||
):
|
||||
from prowler.providers.aws.services.iam.iam_policy_no_full_access_to_kms.iam_policy_no_full_access_to_kms import (
|
||||
iam_policy_no_full_access_to_kms,
|
||||
)
|
||||
|
||||
check = iam_policy_no_full_access_to_kms()
|
||||
result = check.execute()
|
||||
assert result == []
|
||||
|
||||
@mock_aws
|
||||
def test_attached_policy_fails_when_scan_unused_services_disabled(self):
|
||||
aws_provider = set_mocked_aws_provider(
|
||||
[AWS_REGION_US_EAST_1], scan_unused_services=False
|
||||
)
|
||||
iam_client = client("iam")
|
||||
user_name = "test_user_kms"
|
||||
policy_name = "attached_kms_full"
|
||||
policy_document_full_access = {
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [
|
||||
{"Effect": "Allow", "Action": "kms:*", "Resource": "*"},
|
||||
],
|
||||
}
|
||||
arn = iam_client.create_policy(
|
||||
PolicyName=policy_name, PolicyDocument=dumps(policy_document_full_access)
|
||||
)["Policy"]["Arn"]
|
||||
iam_client.create_user(UserName=user_name)
|
||||
iam_client.attach_user_policy(UserName=user_name, PolicyArn=arn)
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=aws_provider,
|
||||
):
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.services.iam.iam_policy_no_full_access_to_kms.iam_policy_no_full_access_to_kms.iam_client",
|
||||
new=IAM(aws_provider),
|
||||
):
|
||||
from prowler.providers.aws.services.iam.iam_policy_no_full_access_to_kms.iam_policy_no_full_access_to_kms import (
|
||||
iam_policy_no_full_access_to_kms,
|
||||
)
|
||||
|
||||
check = iam_policy_no_full_access_to_kms()
|
||||
result = check.execute()
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== f"Custom Policy {policy_name} allows 'kms:*' privileges."
|
||||
)
|
||||
assert result[0].resource_arn == arn
|
||||
|
||||
@mock_aws
|
||||
def test_policy_full_access_and_full_deny_to_kms(self):
|
||||
aws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1])
|
||||
|
||||
+81
@@ -507,3 +507,84 @@ class Test_iam_policy_no_wildcard_marketplace_subscribe:
|
||||
check = iam_policy_no_wildcard_marketplace_subscribe()
|
||||
result = check.execute()
|
||||
assert len(result) == 0
|
||||
|
||||
@mock_aws
|
||||
def test_unattached_policy_skipped_when_scan_unused_services_disabled(self):
|
||||
"""No FAIL for an unattached risky policy when --scan-unused-services is off."""
|
||||
aws_provider = set_mocked_aws_provider(
|
||||
[AWS_REGION_US_EAST_1], scan_unused_services=False
|
||||
)
|
||||
iam_client = client("iam")
|
||||
policy_name = "unattached_marketplace_subscribe"
|
||||
policy_document = {
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [
|
||||
{
|
||||
"Effect": "Allow",
|
||||
"Action": "aws-marketplace:Subscribe",
|
||||
"Resource": "*",
|
||||
},
|
||||
],
|
||||
}
|
||||
iam_client.create_policy(
|
||||
PolicyName=policy_name, PolicyDocument=dumps(policy_document)
|
||||
)
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=aws_provider,
|
||||
):
|
||||
with mock.patch(
|
||||
f"{CHECK_MODULE_PATH}.iam_client",
|
||||
new=IAM(aws_provider),
|
||||
):
|
||||
from prowler.providers.aws.services.iam.iam_policy_no_wildcard_marketplace_subscribe.iam_policy_no_wildcard_marketplace_subscribe import (
|
||||
iam_policy_no_wildcard_marketplace_subscribe,
|
||||
)
|
||||
|
||||
check = iam_policy_no_wildcard_marketplace_subscribe()
|
||||
result = check.execute()
|
||||
assert result == []
|
||||
|
||||
@mock_aws
|
||||
def test_attached_policy_fails_when_scan_unused_services_disabled(self):
|
||||
"""Attached risky policy still FAILs when --scan-unused-services is off."""
|
||||
aws_provider = set_mocked_aws_provider(
|
||||
[AWS_REGION_US_EAST_1], scan_unused_services=False
|
||||
)
|
||||
iam_client = client("iam")
|
||||
user_name = "test_user_marketplace"
|
||||
policy_name = "attached_marketplace_subscribe"
|
||||
policy_document = {
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [
|
||||
{
|
||||
"Effect": "Allow",
|
||||
"Action": "aws-marketplace:Subscribe",
|
||||
"Resource": "*",
|
||||
},
|
||||
],
|
||||
}
|
||||
arn = iam_client.create_policy(
|
||||
PolicyName=policy_name, PolicyDocument=dumps(policy_document)
|
||||
)["Policy"]["Arn"]
|
||||
iam_client.create_user(UserName=user_name)
|
||||
iam_client.attach_user_policy(UserName=user_name, PolicyArn=arn)
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=aws_provider,
|
||||
):
|
||||
with mock.patch(
|
||||
f"{CHECK_MODULE_PATH}.iam_client",
|
||||
new=IAM(aws_provider),
|
||||
):
|
||||
from prowler.providers.aws.services.iam.iam_policy_no_wildcard_marketplace_subscribe.iam_policy_no_wildcard_marketplace_subscribe import (
|
||||
iam_policy_no_wildcard_marketplace_subscribe,
|
||||
)
|
||||
|
||||
check = iam_policy_no_wildcard_marketplace_subscribe()
|
||||
result = check.execute()
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert result[0].resource_arn == arn
|
||||
|
||||
@@ -0,0 +1,96 @@
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
from prowler.providers.scaleway.models import (
|
||||
ScalewayIdentityInfo,
|
||||
ScalewaySession,
|
||||
)
|
||||
from prowler.providers.scaleway.services.iam.iam_service import (
|
||||
ScalewayAPIKey,
|
||||
ScalewayUser,
|
||||
)
|
||||
|
||||
# Scaleway Identity
|
||||
ORGANIZATION_ID = "b4ce0bfc-38fc-4c53-8757-548be64add26"
|
||||
ROOT_USER_ID = "00000000-0000-0000-0000-000000000001"
|
||||
MEMBER_USER_ID = "00000000-0000-0000-0000-000000000002"
|
||||
APPLICATION_ID = "00000000-0000-0000-0000-000000000003"
|
||||
BEARER_EMAIL = "pedro@prowler.com"
|
||||
|
||||
# Scaleway Credentials
|
||||
ACCESS_KEY = "SCWAE000000000000000"
|
||||
SECRET_KEY = "00000000-0000-0000-0000-000000000000"
|
||||
|
||||
# API Key Constants
|
||||
ROOT_API_KEY = "SCWROOT00000000000000"
|
||||
USER_API_KEY = "SCWUSER00000000000000"
|
||||
APP_API_KEY = "SCWAPP000000000000000"
|
||||
|
||||
|
||||
def set_mocked_scaleway_provider(
|
||||
access_key: str = ACCESS_KEY,
|
||||
secret_key: str = SECRET_KEY,
|
||||
identity: ScalewayIdentityInfo = None,
|
||||
audit_config: dict = None,
|
||||
):
|
||||
"""Create a mocked ScalewayProvider for testing."""
|
||||
provider = MagicMock()
|
||||
provider.type = "scaleway"
|
||||
provider.session = ScalewaySession(
|
||||
access_key=access_key,
|
||||
secret_key=secret_key,
|
||||
organization_id=ORGANIZATION_ID,
|
||||
default_project_id=None,
|
||||
default_region="fr-par",
|
||||
client=MagicMock(),
|
||||
)
|
||||
provider.identity = identity or ScalewayIdentityInfo(
|
||||
organization_id=ORGANIZATION_ID,
|
||||
bearer_id=ROOT_USER_ID,
|
||||
bearer_type="user",
|
||||
bearer_email=BEARER_EMAIL,
|
||||
account_root_user_id=ROOT_USER_ID,
|
||||
)
|
||||
provider.audit_config = audit_config or {}
|
||||
provider.fixer_config = {}
|
||||
|
||||
return provider
|
||||
|
||||
|
||||
def make_user(
|
||||
user_id: str = ROOT_USER_ID,
|
||||
email: str = BEARER_EMAIL,
|
||||
account_root_user_id: str = ROOT_USER_ID,
|
||||
mfa: bool = True,
|
||||
) -> ScalewayUser:
|
||||
return ScalewayUser(
|
||||
id=user_id,
|
||||
email=email,
|
||||
username=email.split("@")[0] if email else None,
|
||||
organization_id=ORGANIZATION_ID,
|
||||
account_root_user_id=account_root_user_id,
|
||||
mfa=mfa,
|
||||
type_="owner" if user_id == account_root_user_id else "member",
|
||||
status="activated",
|
||||
)
|
||||
|
||||
|
||||
def make_api_key(
|
||||
access_key: str = USER_API_KEY,
|
||||
user_id: str = MEMBER_USER_ID,
|
||||
application_id: str = None,
|
||||
description: str = "test key",
|
||||
expires_at: str = None,
|
||||
) -> ScalewayAPIKey:
|
||||
return ScalewayAPIKey(
|
||||
access_key=access_key,
|
||||
description=description,
|
||||
user_id=user_id,
|
||||
application_id=application_id,
|
||||
default_project_id=None,
|
||||
editable=True,
|
||||
managed=False,
|
||||
creation_ip=None,
|
||||
created_at="2026-01-01T00:00:00Z",
|
||||
updated_at="2026-01-01T00:00:00Z",
|
||||
expires_at=expires_at,
|
||||
)
|
||||
@@ -0,0 +1,106 @@
|
||||
import os
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
|
||||
from prowler.providers.scaleway.exceptions.exceptions import (
|
||||
ScalewayAuthenticationError,
|
||||
ScalewayCredentialsError,
|
||||
ScalewayIdentityError,
|
||||
)
|
||||
from prowler.providers.scaleway.models import ScalewaySession
|
||||
from prowler.providers.scaleway.scaleway_provider import ScalewayProvider
|
||||
from tests.providers.scaleway.scaleway_fixtures import (
|
||||
ACCESS_KEY,
|
||||
BEARER_EMAIL,
|
||||
ORGANIZATION_ID,
|
||||
ROOT_USER_ID,
|
||||
SECRET_KEY,
|
||||
)
|
||||
|
||||
|
||||
class Test_ScalewayProvider_setup_session:
|
||||
def test_missing_access_key_raises_credentials_error(self):
|
||||
with mock.patch.dict(
|
||||
os.environ, {"SCW_ACCESS_KEY": "", "SCW_SECRET_KEY": ""}, clear=False
|
||||
):
|
||||
os.environ.pop("SCW_ACCESS_KEY", None)
|
||||
os.environ.pop("SCW_SECRET_KEY", None)
|
||||
with pytest.raises(ScalewayCredentialsError):
|
||||
ScalewayProvider.setup_session()
|
||||
|
||||
def test_returns_session_with_credentials(self):
|
||||
session = ScalewayProvider.setup_session(
|
||||
access_key=ACCESS_KEY,
|
||||
secret_key=SECRET_KEY,
|
||||
organization_id=ORGANIZATION_ID,
|
||||
)
|
||||
assert isinstance(session, ScalewaySession)
|
||||
assert session.access_key == ACCESS_KEY
|
||||
assert session.organization_id == ORGANIZATION_ID
|
||||
assert session.default_region == "fr-par"
|
||||
|
||||
|
||||
class Test_ScalewayProvider_setup_identity:
|
||||
def _build_session(self):
|
||||
return ScalewaySession(
|
||||
access_key=ACCESS_KEY,
|
||||
secret_key=SECRET_KEY,
|
||||
organization_id=ORGANIZATION_ID,
|
||||
default_region="fr-par",
|
||||
client=mock.MagicMock(),
|
||||
)
|
||||
|
||||
def test_resolves_user_bearer_identity(self):
|
||||
session = self._build_session()
|
||||
api_key = mock.MagicMock(user_id=ROOT_USER_ID, application_id=None)
|
||||
user = mock.MagicMock(
|
||||
email=BEARER_EMAIL,
|
||||
organization_id=ORGANIZATION_ID,
|
||||
account_root_user_id=ROOT_USER_ID,
|
||||
)
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.scaleway.scaleway_provider.IamV1Alpha1API"
|
||||
) as iam_cls:
|
||||
iam = iam_cls.return_value
|
||||
iam.get_api_key.return_value = api_key
|
||||
iam.get_user.return_value = user
|
||||
|
||||
identity = ScalewayProvider.setup_identity(session)
|
||||
|
||||
assert identity.organization_id == ORGANIZATION_ID
|
||||
assert identity.bearer_type == "user"
|
||||
assert identity.bearer_id == ROOT_USER_ID
|
||||
assert identity.bearer_email == BEARER_EMAIL
|
||||
assert identity.account_root_user_id == ROOT_USER_ID
|
||||
|
||||
def test_missing_organization_raises_identity_error(self):
|
||||
session = self._build_session()
|
||||
session.organization_id = None
|
||||
api_key = mock.MagicMock(user_id=None, application_id="app-id")
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.scaleway.scaleway_provider.IamV1Alpha1API"
|
||||
) as iam_cls:
|
||||
iam = iam_cls.return_value
|
||||
iam.get_api_key.return_value = api_key
|
||||
|
||||
with pytest.raises(ScalewayIdentityError):
|
||||
ScalewayProvider.setup_identity(session)
|
||||
|
||||
|
||||
class Test_ScalewayProvider_validate_credentials:
|
||||
def test_invalid_credentials_raise_authentication_error(self):
|
||||
session = ScalewaySession(
|
||||
access_key=ACCESS_KEY,
|
||||
secret_key=SECRET_KEY,
|
||||
organization_id=ORGANIZATION_ID,
|
||||
client=mock.MagicMock(),
|
||||
)
|
||||
with mock.patch(
|
||||
"prowler.providers.scaleway.scaleway_provider.IamV1Alpha1API"
|
||||
) as iam_cls:
|
||||
iam_cls.return_value.get_api_key.side_effect = Exception("expired")
|
||||
with pytest.raises(ScalewayAuthenticationError):
|
||||
ScalewayProvider.validate_credentials(session)
|
||||
+172
@@ -0,0 +1,172 @@
|
||||
from unittest import mock
|
||||
|
||||
from tests.providers.scaleway.scaleway_fixtures import (
|
||||
APP_API_KEY,
|
||||
APPLICATION_ID,
|
||||
MEMBER_USER_ID,
|
||||
ORGANIZATION_ID,
|
||||
ROOT_API_KEY,
|
||||
ROOT_USER_ID,
|
||||
USER_API_KEY,
|
||||
make_api_key,
|
||||
set_mocked_scaleway_provider,
|
||||
)
|
||||
|
||||
|
||||
def _patch_clients(iam_client_mock):
|
||||
"""Patch both the provider and the iam_client singleton."""
|
||||
return [
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_scaleway_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.scaleway.services.iam.iam_no_root_api_keys.iam_no_root_api_keys.iam_client",
|
||||
new=iam_client_mock,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
class Test_iam_no_root_api_keys:
|
||||
def test_no_api_keys_returns_empty_findings(self):
|
||||
iam_client = mock.MagicMock()
|
||||
iam_client.users_loaded = True
|
||||
iam_client.api_keys_loaded = True
|
||||
iam_client.account_root_user_id = ROOT_USER_ID
|
||||
iam_client.api_keys = []
|
||||
iam_client.organization_id = ORGANIZATION_ID
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_scaleway_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.scaleway.services.iam.iam_no_root_api_keys.iam_no_root_api_keys.iam_client",
|
||||
new=iam_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.scaleway.services.iam.iam_no_root_api_keys.iam_no_root_api_keys import (
|
||||
iam_no_root_api_keys,
|
||||
)
|
||||
|
||||
result = iam_no_root_api_keys().execute()
|
||||
assert result == []
|
||||
|
||||
def test_root_api_key_fails(self):
|
||||
iam_client = mock.MagicMock()
|
||||
iam_client.users_loaded = True
|
||||
iam_client.api_keys_loaded = True
|
||||
iam_client.account_root_user_id = ROOT_USER_ID
|
||||
iam_client.api_keys = [
|
||||
make_api_key(access_key=ROOT_API_KEY, user_id=ROOT_USER_ID)
|
||||
]
|
||||
iam_client.organization_id = ORGANIZATION_ID
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_scaleway_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.scaleway.services.iam.iam_no_root_api_keys.iam_no_root_api_keys.iam_client",
|
||||
new=iam_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.scaleway.services.iam.iam_no_root_api_keys.iam_no_root_api_keys import (
|
||||
iam_no_root_api_keys,
|
||||
)
|
||||
|
||||
result = iam_no_root_api_keys().execute()
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert result[0].resource_id == ROOT_API_KEY
|
||||
assert ROOT_USER_ID in result[0].status_extended
|
||||
|
||||
def test_user_api_key_passes(self):
|
||||
iam_client = mock.MagicMock()
|
||||
iam_client.users_loaded = True
|
||||
iam_client.api_keys_loaded = True
|
||||
iam_client.account_root_user_id = ROOT_USER_ID
|
||||
iam_client.api_keys = [
|
||||
make_api_key(access_key=USER_API_KEY, user_id=MEMBER_USER_ID)
|
||||
]
|
||||
iam_client.organization_id = ORGANIZATION_ID
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_scaleway_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.scaleway.services.iam.iam_no_root_api_keys.iam_no_root_api_keys.iam_client",
|
||||
new=iam_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.scaleway.services.iam.iam_no_root_api_keys.iam_no_root_api_keys import (
|
||||
iam_no_root_api_keys,
|
||||
)
|
||||
|
||||
result = iam_no_root_api_keys().execute()
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "PASS"
|
||||
assert result[0].resource_id == USER_API_KEY
|
||||
|
||||
def test_application_api_key_passes(self):
|
||||
iam_client = mock.MagicMock()
|
||||
iam_client.users_loaded = True
|
||||
iam_client.api_keys_loaded = True
|
||||
iam_client.account_root_user_id = ROOT_USER_ID
|
||||
iam_client.api_keys = [
|
||||
make_api_key(
|
||||
access_key=APP_API_KEY, user_id=None, application_id=APPLICATION_ID
|
||||
)
|
||||
]
|
||||
iam_client.organization_id = ORGANIZATION_ID
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_scaleway_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.scaleway.services.iam.iam_no_root_api_keys.iam_no_root_api_keys.iam_client",
|
||||
new=iam_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.scaleway.services.iam.iam_no_root_api_keys.iam_no_root_api_keys import (
|
||||
iam_no_root_api_keys,
|
||||
)
|
||||
|
||||
result = iam_no_root_api_keys().execute()
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "PASS"
|
||||
|
||||
def test_users_load_failure_returns_manual(self):
|
||||
iam_client = mock.MagicMock()
|
||||
iam_client.users_loaded = False
|
||||
iam_client.api_keys_loaded = True
|
||||
iam_client.account_root_user_id = None
|
||||
iam_client.api_keys = [
|
||||
make_api_key(access_key=ROOT_API_KEY, user_id=ROOT_USER_ID)
|
||||
]
|
||||
iam_client.organization_id = ORGANIZATION_ID
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_scaleway_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.scaleway.services.iam.iam_no_root_api_keys.iam_no_root_api_keys.iam_client",
|
||||
new=iam_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.scaleway.services.iam.iam_no_root_api_keys.iam_no_root_api_keys import (
|
||||
iam_no_root_api_keys,
|
||||
)
|
||||
|
||||
result = iam_no_root_api_keys().execute()
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "MANUAL"
|
||||
assert "Could not retrieve" in result[0].status_extended
|
||||
@@ -0,0 +1,84 @@
|
||||
from unittest import mock
|
||||
|
||||
from prowler.providers.scaleway.services.iam.iam_service import IAM
|
||||
from tests.providers.scaleway.scaleway_fixtures import (
|
||||
APPLICATION_ID,
|
||||
MEMBER_USER_ID,
|
||||
ORGANIZATION_ID,
|
||||
ROOT_USER_ID,
|
||||
USER_API_KEY,
|
||||
set_mocked_scaleway_provider,
|
||||
)
|
||||
|
||||
|
||||
def _mock_user(
|
||||
user_id: str, account_root_user_id: str = ROOT_USER_ID, email: str = "u@example.com"
|
||||
):
|
||||
user = mock.MagicMock()
|
||||
user.id = user_id
|
||||
user.email = email
|
||||
user.username = email.split("@")[0]
|
||||
user.organization_id = ORGANIZATION_ID
|
||||
user.account_root_user_id = account_root_user_id
|
||||
user.mfa = True
|
||||
user.type_ = "owner" if user_id == account_root_user_id else "member"
|
||||
user.status = "activated"
|
||||
return user
|
||||
|
||||
|
||||
def _mock_api_key(access_key: str, user_id: str = None, application_id: str = None):
|
||||
key = mock.MagicMock()
|
||||
key.access_key = access_key
|
||||
key.description = "test"
|
||||
key.user_id = user_id
|
||||
key.application_id = application_id
|
||||
key.default_project_id = None
|
||||
key.editable = True
|
||||
key.managed = False
|
||||
key.creation_ip = None
|
||||
key.created_at = None
|
||||
key.updated_at = None
|
||||
key.expires_at = None
|
||||
return key
|
||||
|
||||
|
||||
class Test_IAM_service:
|
||||
def test_loads_users_and_api_keys(self):
|
||||
provider = set_mocked_scaleway_provider()
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.scaleway.services.iam.iam_service.IamV1Alpha1API"
|
||||
) as iam_cls:
|
||||
api = iam_cls.return_value
|
||||
api.list_users_all.return_value = [
|
||||
_mock_user(ROOT_USER_ID),
|
||||
_mock_user(MEMBER_USER_ID, email="m@example.com"),
|
||||
]
|
||||
api.list_api_keys_all.return_value = [
|
||||
_mock_api_key(USER_API_KEY, user_id=MEMBER_USER_ID),
|
||||
_mock_api_key("SCWAPP", application_id=APPLICATION_ID),
|
||||
]
|
||||
|
||||
iam = IAM(provider)
|
||||
|
||||
assert iam.users_loaded is True
|
||||
assert iam.api_keys_loaded is True
|
||||
assert iam.account_root_user_id == ROOT_USER_ID
|
||||
assert len(iam.users) == 2
|
||||
assert len(iam.api_keys) == 2
|
||||
|
||||
def test_marks_users_unloaded_on_error(self):
|
||||
provider = set_mocked_scaleway_provider()
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.scaleway.services.iam.iam_service.IamV1Alpha1API"
|
||||
) as iam_cls:
|
||||
api = iam_cls.return_value
|
||||
api.list_users_all.side_effect = Exception("denied")
|
||||
api.list_api_keys_all.return_value = []
|
||||
|
||||
iam = IAM(provider)
|
||||
|
||||
assert iam.users_loaded is False
|
||||
assert iam.api_keys_loaded is True
|
||||
assert iam.account_root_user_id is None
|
||||
@@ -226,5 +226,6 @@ pnpm run test:e2e:ui
|
||||
- [ ] Relevant E2E tests pass
|
||||
- [ ] All UI states handled (loading, error, empty)
|
||||
- [ ] No secrets in code (use `.env.local`)
|
||||
- [ ] New npm dependencies include package-health evidence (maintenance, popularity, known vulnerabilities, license, release age) and a rationale for not using existing/native alternatives.
|
||||
- [ ] Error messages sanitized
|
||||
- [ ] Server-side validation present
|
||||
|
||||
@@ -11,9 +11,14 @@ All notable changes to the **Prowler UI** are documented in this file.
|
||||
### 🔄 Changed
|
||||
|
||||
- Trimmed unused npm dependencies [(#11115)](https://github.com/prowler-cloud/prowler/pull/11115)
|
||||
- Lighthouse now accepts Prowler App Finding Groups MCP tools [(#11140)](https://github.com/prowler-cloud/prowler/pull/11140)
|
||||
- Attack Paths graph now uses React Flow with improved layout, interactions, export, minimap, and browser test coverage [(#10686)](https://github.com/prowler-cloud/prowler/pull/10686)
|
||||
- SAML ACS URL is only shown if the email domain is configured [(#11144)](https://github.com/prowler-cloud/prowler/pull/11144)
|
||||
|
||||
### 🐞 Fixed
|
||||
|
||||
- Mute Findings modal now enforces the 100-character limit on the rule name input with a live counter and inline error, matching the existing reason field behaviour [(#11158)](https://github.com/prowler-cloud/prowler/pull/11158)
|
||||
|
||||
---
|
||||
|
||||
## [1.26.2] (Prowler 5.26.2)
|
||||
|
||||
@@ -106,6 +106,11 @@ describe("MuteFindingsModal", () => {
|
||||
expect(
|
||||
screen.getByText("Explain why these findings are being muted"),
|
||||
).toBeInTheDocument();
|
||||
expect(screen.getByText("0/100 characters")).toBeInTheDocument();
|
||||
expect(screen.getByLabelText("Rule Name")).toHaveAttribute(
|
||||
"maxLength",
|
||||
"100",
|
||||
);
|
||||
expect(screen.getByText("0/500 characters")).toBeInTheDocument();
|
||||
expect(screen.getByLabelText("Reason")).toHaveAttribute("maxLength", "500");
|
||||
});
|
||||
@@ -183,4 +188,23 @@ describe("MuteFindingsModal", () => {
|
||||
screen.getByText("Reason must be 500 characters or fewer"),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it("clamps oversized rule name input and shows a local validation error", () => {
|
||||
render(
|
||||
<MuteFindingsModal
|
||||
isOpen
|
||||
onOpenChange={vi.fn()}
|
||||
findingIds={["finding-1"]}
|
||||
/>,
|
||||
);
|
||||
|
||||
fireEvent.change(screen.getByLabelText("Rule Name"), {
|
||||
target: { value: "a".repeat(101) },
|
||||
});
|
||||
|
||||
expect(screen.getByText("100/100 characters")).toBeInTheDocument();
|
||||
expect(
|
||||
screen.getByText("Name must be 100 characters or fewer"),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -11,8 +11,12 @@ import { FormButtons } from "@/components/ui/form";
|
||||
import { Label } from "@/components/ui/form/Label";
|
||||
import { useMuteRuleAction } from "@/hooks/use-mute-rule-action";
|
||||
import {
|
||||
enforceMuteRuleNameLimit,
|
||||
enforceMuteRuleReasonLimit,
|
||||
getMuteRuleNameCounterText,
|
||||
getMuteRuleReasonCounterText,
|
||||
MAX_MUTE_RULE_NAME_LENGTH,
|
||||
MAX_MUTE_RULE_REASON_LENGTH,
|
||||
} from "@/lib/mute-rules";
|
||||
|
||||
interface MuteFindingsModalProps {
|
||||
@@ -35,6 +39,8 @@ export function MuteFindingsModal({
|
||||
preparationError = null,
|
||||
}: MuteFindingsModalProps) {
|
||||
const [state, setState] = useState<MuteRuleActionState | null>(null);
|
||||
const [name, setName] = useState("");
|
||||
const [nameLengthError, setNameLengthError] = useState<string>();
|
||||
const [reason, setReason] = useState("");
|
||||
const [reasonLengthError, setReasonLengthError] = useState<string>();
|
||||
const { isPending, runAction } = useMuteRuleAction();
|
||||
@@ -48,9 +54,16 @@ export function MuteFindingsModal({
|
||||
isPreparing ||
|
||||
findingIds.length === 0 ||
|
||||
Boolean(preparationError);
|
||||
const nameError = state?.errors?.name;
|
||||
const nameError = nameLengthError || state?.errors?.name;
|
||||
const reasonError = reasonLengthError || state?.errors?.reason;
|
||||
|
||||
const handleNameChange = (event: React.ChangeEvent<HTMLInputElement>) => {
|
||||
const nextName = enforceMuteRuleNameLimit(event.target.value);
|
||||
|
||||
setName(nextName.value);
|
||||
setNameLengthError(nextName.error);
|
||||
};
|
||||
|
||||
const handleReasonChange = (
|
||||
event: React.ChangeEvent<HTMLTextAreaElement>,
|
||||
) => {
|
||||
@@ -77,8 +90,15 @@ export function MuteFindingsModal({
|
||||
}
|
||||
|
||||
const formData = new FormData(e.currentTarget);
|
||||
formData.set("name", name);
|
||||
formData.set("reason", reason);
|
||||
|
||||
const nextName = enforceMuteRuleNameLimit(name);
|
||||
if (nextName.error) {
|
||||
setNameLengthError(nextName.error);
|
||||
return;
|
||||
}
|
||||
|
||||
const nextReason = enforceMuteRuleReasonLimit(reason);
|
||||
if (nextReason.error) {
|
||||
setReasonLengthError(nextReason.error);
|
||||
@@ -211,6 +231,9 @@ export function MuteFindingsModal({
|
||||
placeholder="e.g., Ignore dev environment S3 buckets"
|
||||
required
|
||||
disabled={isPending}
|
||||
value={name}
|
||||
onChange={handleNameChange}
|
||||
maxLength={MAX_MUTE_RULE_NAME_LENGTH}
|
||||
aria-invalid={nameError ? "true" : "false"}
|
||||
aria-describedby={
|
||||
nameError
|
||||
@@ -218,12 +241,17 @@ export function MuteFindingsModal({
|
||||
: "mute-rule-name-description"
|
||||
}
|
||||
/>
|
||||
<p
|
||||
id="mute-rule-name-description"
|
||||
className="text-text-neutral-tertiary text-xs"
|
||||
>
|
||||
A descriptive name for this mute rule
|
||||
</p>
|
||||
<div className="flex items-center justify-between gap-3">
|
||||
<p
|
||||
id="mute-rule-name-description"
|
||||
className="text-text-neutral-tertiary text-xs"
|
||||
>
|
||||
A descriptive name for this mute rule
|
||||
</p>
|
||||
<p className="text-text-neutral-tertiary shrink-0 text-xs">
|
||||
{getMuteRuleNameCounterText(name)}
|
||||
</p>
|
||||
</div>
|
||||
{nameError ? (
|
||||
<p
|
||||
id="mute-rule-name-error"
|
||||
@@ -250,7 +278,7 @@ export function MuteFindingsModal({
|
||||
value={reason}
|
||||
onChange={handleReasonChange}
|
||||
rows={4}
|
||||
maxLength={500}
|
||||
maxLength={MAX_MUTE_RULE_REASON_LENGTH}
|
||||
aria-invalid={reasonError ? "true" : "false"}
|
||||
aria-describedby={
|
||||
reasonError
|
||||
|
||||
@@ -109,10 +109,10 @@ export function MyComponent() {
|
||||
|
||||
## Adding New shadcn Components
|
||||
|
||||
When adding new shadcn components using the CLI:
|
||||
When adding new shadcn components using the CLI, pin the reviewed CLI version instead of using `@latest`:
|
||||
|
||||
```bash
|
||||
npx shadcn@latest add [component-name]
|
||||
pnpm dlx shadcn@4.7.0 add [component-name]
|
||||
```
|
||||
|
||||
The component will be automatically added to this directory due to the configuration in `components.json`:
|
||||
|
||||
@@ -67,6 +67,10 @@ const ALLOWED_TOOLS = new Set([
|
||||
"prowler_app_search_security_findings",
|
||||
"prowler_app_get_finding_details",
|
||||
"prowler_app_get_findings_overview",
|
||||
// Finding Groups
|
||||
"prowler_app_list_finding_groups",
|
||||
"prowler_app_get_finding_group_details",
|
||||
"prowler_app_list_finding_group_resources",
|
||||
// Providers
|
||||
"prowler_app_search_providers",
|
||||
// Scans
|
||||
|
||||
@@ -1,11 +1,31 @@
|
||||
export const MAX_MUTE_RULE_NAME_LENGTH = 100;
|
||||
export const MAX_MUTE_RULE_REASON_LENGTH = 500;
|
||||
|
||||
export const MUTE_RULE_NAME_TOO_LONG_MESSAGE = `Name must be ${MAX_MUTE_RULE_NAME_LENGTH} characters or fewer`;
|
||||
export const MUTE_RULE_REASON_TOO_LONG_MESSAGE = `Reason must be ${MAX_MUTE_RULE_REASON_LENGTH} characters or fewer`;
|
||||
|
||||
export function getMuteRuleNameCounterText(name: string): string {
|
||||
return `${name.length}/${MAX_MUTE_RULE_NAME_LENGTH} characters`;
|
||||
}
|
||||
|
||||
export function getMuteRuleReasonCounterText(reason: string): string {
|
||||
return `${reason.length}/${MAX_MUTE_RULE_REASON_LENGTH} characters`;
|
||||
}
|
||||
|
||||
export function enforceMuteRuleNameLimit(name: string): {
|
||||
value: string;
|
||||
error?: string;
|
||||
} {
|
||||
if (name.length <= MAX_MUTE_RULE_NAME_LENGTH) {
|
||||
return { value: name };
|
||||
}
|
||||
|
||||
return {
|
||||
value: name.slice(0, MAX_MUTE_RULE_NAME_LENGTH),
|
||||
error: MUTE_RULE_NAME_TOO_LONG_MESSAGE,
|
||||
};
|
||||
}
|
||||
|
||||
export function enforceMuteRuleReasonLimit(reason: string): {
|
||||
value: string;
|
||||
error?: string;
|
||||
|
||||
@@ -28,6 +28,8 @@
|
||||
"test:e2e:headed": "playwright test --project=auth --project=sign-up --project=providers --project=invitations --project=scans --headed",
|
||||
"test:e2e:report": "playwright show-report",
|
||||
"test:e2e:install": "playwright install",
|
||||
"audit": "pnpm audit --audit-level critical",
|
||||
"audit:high": "pnpm audit --audit-level high",
|
||||
"audit:fix": "pnpm audit fix"
|
||||
},
|
||||
"dependencies": {
|
||||
|
||||
@@ -14,20 +14,21 @@ minimumReleaseAge: 1440
|
||||
|
||||
# --- Level 2: Explicit Build Script Allow-list ---
|
||||
# Only these packages may run install/postinstall lifecycle scripts.
|
||||
# Any unlisted package with lifecycle scripts will have them silently skipped.
|
||||
onlyBuiltDependencies:
|
||||
# Any unlisted package with lifecycle scripts fails the install.
|
||||
strictDepBuilds: true
|
||||
allowBuilds:
|
||||
# sharp: Native image processing (libvips). Installs platform-specific pre-built binary or compiles from source.
|
||||
- sharp
|
||||
sharp: true
|
||||
# @sentry/cli: Downloads the sentry-cli native binary for the current platform. Validates integrity via SHA256.
|
||||
- "@sentry/cli"
|
||||
"@sentry/cli": true
|
||||
# esbuild: Go binary. Downloads the pre-compiled binary matching the current platform/architecture.
|
||||
- esbuild
|
||||
esbuild: true
|
||||
# @heroui/shared-utils: Demi pattern — detects React/Next.js version at install time and copies the compatible bundle (React 18 vs 19).
|
||||
- "@heroui/shared-utils"
|
||||
"@heroui/shared-utils": true
|
||||
# unrs-resolver: Rust module resolver (NAPI-RS). Verifies the correct native binding is available for the platform.
|
||||
- unrs-resolver
|
||||
unrs-resolver: true
|
||||
# msw: Copies mockServiceWorker.js into the directories listed in package.json's `msw.workerDirectory` (here: `public/`) so the runtime worker stays in sync with the installed msw version. Pure file copy — no native binary, no network access. Required for vitest browser tests to intercept fetches via the service worker.
|
||||
- msw
|
||||
msw: true
|
||||
|
||||
# --- Level 3: Trust Policy + Exotic Subdeps ---
|
||||
# Fail when a package's trust evidence is downgraded (e.g., new publisher).
|
||||
|
||||
Reference in New Issue
Block a user