mirror of
https://github.com/prowler-cloud/prowler.git
synced 2026-04-03 05:55:54 +00:00
Compare commits
6 Commits
dependabot
...
chore/bump
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
763185a9b4 | ||
|
|
dfe06a1077 | ||
|
|
4f86667433 | ||
|
|
4bb1e5cff7 | ||
|
|
99b80ebbd9 | ||
|
|
d18c5a8974 |
7
.github/labeler.yml
vendored
7
.github/labeler.yml
vendored
@@ -67,6 +67,11 @@ provider/googleworkspace:
|
||||
- any-glob-to-any-file: "prowler/providers/googleworkspace/**"
|
||||
- any-glob-to-any-file: "tests/providers/googleworkspace/**"
|
||||
|
||||
provider/vercel:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/providers/vercel/**"
|
||||
- any-glob-to-any-file: "tests/providers/vercel/**"
|
||||
|
||||
github_actions:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: ".github/workflows/*"
|
||||
@@ -102,6 +107,8 @@ mutelist:
|
||||
- any-glob-to-any-file: "tests/providers/openstack/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "prowler/providers/googleworkspace/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "tests/providers/googleworkspace/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "prowler/providers/vercel/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "tests/providers/vercel/lib/mutelist/**"
|
||||
|
||||
integration/s3:
|
||||
- changed-files:
|
||||
|
||||
8
.github/test-impact.yml
vendored
8
.github/test-impact.yml
vendored
@@ -177,6 +177,14 @@ modules:
|
||||
- tests/providers/llm/**
|
||||
e2e: []
|
||||
|
||||
- name: sdk-vercel
|
||||
match:
|
||||
- prowler/providers/vercel/**
|
||||
- prowler/compliance/vercel/**
|
||||
tests:
|
||||
- tests/providers/vercel/**
|
||||
e2e: []
|
||||
|
||||
# ============================================
|
||||
# SDK - Lib modules
|
||||
# ============================================
|
||||
|
||||
180
.github/workflows/pr-check-compliance-mapping.yml
vendored
Normal file
180
.github/workflows/pr-check-compliance-mapping.yml
vendored
Normal file
@@ -0,0 +1,180 @@
|
||||
name: 'Tools: Check Compliance Mapping'
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- 'opened'
|
||||
- 'synchronize'
|
||||
- 'reopened'
|
||||
- 'labeled'
|
||||
- 'unlabeled'
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
check-compliance-mapping:
|
||||
if: contains(github.event.pull_request.labels.*.name, 'no-compliance-check') == false
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Harden Runner
|
||||
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
|
||||
with:
|
||||
egress-policy: block
|
||||
allowed-endpoints: >
|
||||
api.github.com:443
|
||||
github.com:443
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# zizmor: ignore[artipacked]
|
||||
persist-credentials: true # Required by tj-actions/changed-files to fetch PR branch
|
||||
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
|
||||
with:
|
||||
files: |
|
||||
prowler/providers/**/services/**/*.metadata.json
|
||||
prowler/compliance/**/*.json
|
||||
|
||||
- name: Check if new checks are mapped in compliance
|
||||
id: compliance-check
|
||||
run: |
|
||||
ADDED_METADATA="${STEPS_CHANGED_FILES_OUTPUTS_ADDED_FILES}"
|
||||
ALL_CHANGED="${STEPS_CHANGED_FILES_OUTPUTS_ALL_CHANGED_FILES}"
|
||||
|
||||
# Filter only new metadata files (new checks)
|
||||
new_checks=""
|
||||
for f in $ADDED_METADATA; do
|
||||
case "$f" in *.metadata.json) new_checks="$new_checks $f" ;; esac
|
||||
done
|
||||
|
||||
if [ -z "$(echo "$new_checks" | tr -d ' ')" ]; then
|
||||
echo "No new checks detected."
|
||||
echo "has_new_checks=false" >> "$GITHUB_OUTPUT"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Collect compliance files changed in this PR
|
||||
changed_compliance=""
|
||||
for f in $ALL_CHANGED; do
|
||||
case "$f" in prowler/compliance/*.json) changed_compliance="$changed_compliance $f" ;; esac
|
||||
done
|
||||
|
||||
UNMAPPED=""
|
||||
MAPPED=""
|
||||
|
||||
for metadata_file in $new_checks; do
|
||||
check_dir=$(dirname "$metadata_file")
|
||||
check_id=$(basename "$check_dir")
|
||||
provider=$(echo "$metadata_file" | cut -d'/' -f3)
|
||||
|
||||
# Read CheckID from the metadata JSON for accuracy
|
||||
if [ -f "$metadata_file" ]; then
|
||||
json_check_id=$(python3 -c "import json; print(json.load(open('$metadata_file')).get('CheckID', ''))" 2>/dev/null || echo "")
|
||||
if [ -n "$json_check_id" ]; then
|
||||
check_id="$json_check_id"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Search for the check ID in compliance files changed in this PR
|
||||
found_in=""
|
||||
for comp_file in $changed_compliance; do
|
||||
if grep -q "\"${check_id}\"" "$comp_file" 2>/dev/null; then
|
||||
found_in="${found_in}$(basename "$comp_file" .json), "
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -n "$found_in" ]; then
|
||||
found_in=$(echo "$found_in" | sed 's/, $//')
|
||||
MAPPED="${MAPPED}- \`${check_id}\` (\`${provider}\`): ${found_in}"$'\n'
|
||||
else
|
||||
UNMAPPED="${UNMAPPED}- \`${check_id}\` (\`${provider}\`)"$'\n'
|
||||
fi
|
||||
done
|
||||
|
||||
echo "has_new_checks=true" >> "$GITHUB_OUTPUT"
|
||||
|
||||
if [ -n "$UNMAPPED" ]; then
|
||||
echo "has_unmapped=true" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "has_unmapped=false" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
{
|
||||
echo "unmapped<<EOF"
|
||||
echo -e "${UNMAPPED}"
|
||||
echo "EOF"
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
|
||||
{
|
||||
echo "mapped<<EOF"
|
||||
echo -e "${MAPPED}"
|
||||
echo "EOF"
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
env:
|
||||
STEPS_CHANGED_FILES_OUTPUTS_ADDED_FILES: ${{ steps.changed-files.outputs.added_files }}
|
||||
STEPS_CHANGED_FILES_OUTPUTS_ALL_CHANGED_FILES: ${{ steps.changed-files.outputs.all_changed_files }}
|
||||
|
||||
- name: Manage compliance review label
|
||||
if: steps.compliance-check.outputs.has_new_checks == 'true'
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
HAS_UNMAPPED: ${{ steps.compliance-check.outputs.has_unmapped }}
|
||||
run: |
|
||||
LABEL_NAME="needs-compliance-review"
|
||||
|
||||
if [ "$HAS_UNMAPPED" = "true" ]; then
|
||||
echo "Adding compliance review label to PR #${PR_NUMBER}..."
|
||||
gh pr edit "$PR_NUMBER" --add-label "$LABEL_NAME" --repo "${{ github.repository }}" || true
|
||||
else
|
||||
echo "Removing compliance review label from PR #${PR_NUMBER}..."
|
||||
gh pr edit "$PR_NUMBER" --remove-label "$LABEL_NAME" --repo "${{ github.repository }}" || true
|
||||
fi
|
||||
|
||||
- name: Find existing compliance comment
|
||||
if: steps.compliance-check.outputs.has_new_checks == 'true' && github.event.pull_request.head.repo.full_name == github.repository
|
||||
id: find-comment
|
||||
uses: peter-evans/find-comment@b30e6a3c0ed37e7c023ccd3f1db5c6c0b0c23aad # v4.0.0
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: 'github-actions[bot]'
|
||||
body-includes: '<!-- compliance-mapping-check -->'
|
||||
|
||||
- name: Create or update compliance comment
|
||||
if: steps.compliance-check.outputs.has_new_checks == 'true' && github.event.pull_request.head.repo.full_name == github.repository
|
||||
uses: peter-evans/create-or-update-comment@e8674b075228eee787fea43ef493e45ece1004c9 # v5.0.0
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||
edit-mode: replace
|
||||
body: |
|
||||
<!-- compliance-mapping-check -->
|
||||
## Compliance Mapping Review
|
||||
|
||||
This PR adds new checks. Please verify that they have been mapped to the relevant compliance framework requirements.
|
||||
|
||||
${{ steps.compliance-check.outputs.unmapped != '' && format('### New checks not mapped to any compliance framework in this PR
|
||||
|
||||
{0}
|
||||
|
||||
> Please review whether these checks should be added to compliance framework requirements in `prowler/compliance/<provider>/`. Each compliance JSON has a `Checks` array inside each requirement — add the check ID there if it satisfies that requirement.', steps.compliance-check.outputs.unmapped) || '' }}
|
||||
|
||||
${{ steps.compliance-check.outputs.mapped != '' && format('### New checks already mapped in this PR
|
||||
|
||||
{0}', steps.compliance-check.outputs.mapped) || '' }}
|
||||
|
||||
Use the `no-compliance-check` label to skip this check.
|
||||
24
.github/workflows/sdk-tests.yml
vendored
24
.github/workflows/sdk-tests.yml
vendored
@@ -499,6 +499,30 @@ jobs:
|
||||
flags: prowler-py${{ matrix.python-version }}-googleworkspace
|
||||
files: ./googleworkspace_coverage.xml
|
||||
|
||||
# Vercel Provider
|
||||
- name: Check if Vercel files changed
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
id: changed-vercel
|
||||
uses: tj-actions/changed-files@7dee1b0c1557f278e5c7dc244927139d78c0e22a # v47.0.4
|
||||
with:
|
||||
files: |
|
||||
./prowler/**/vercel/**
|
||||
./tests/**/vercel/**
|
||||
./poetry.lock
|
||||
|
||||
- name: Run Vercel tests
|
||||
if: steps.changed-vercel.outputs.any_changed == 'true'
|
||||
run: poetry run pytest -n auto --cov=./prowler/providers/vercel --cov-report=xml:vercel_coverage.xml tests/providers/vercel
|
||||
|
||||
- name: Upload Vercel coverage to Codecov
|
||||
if: steps.changed-vercel.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5.5.2
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
flags: prowler-py${{ matrix.python-version }}-vercel
|
||||
files: ./vercel_coverage.xml
|
||||
|
||||
# Lib
|
||||
- name: Check if Lib files changed
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
|
||||
@@ -119,6 +119,7 @@ Every AWS provider scan will enqueue an Attack Paths ingestion job automatically
|
||||
| Image | N/A | N/A | N/A | N/A | Official | CLI, API |
|
||||
| Google Workspace | 1 | 1 | 0 | 1 | Official | CLI |
|
||||
| OpenStack | 27 | 4 | 0 | 8 | Official | UI, API, CLI |
|
||||
| Vercel | 30 | 6 | 0 | 5 | Official | CLI |
|
||||
| NHN | 6 | 2 | 1 | 0 | Unofficial | CLI |
|
||||
|
||||
> [!Note]
|
||||
|
||||
@@ -296,6 +296,13 @@
|
||||
"user-guide/providers/openstack/getting-started-openstack",
|
||||
"user-guide/providers/openstack/authentication"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Vercel",
|
||||
"pages": [
|
||||
"user-guide/providers/vercel/getting-started-vercel",
|
||||
"user-guide/providers/vercel/authentication"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
|
||||
@@ -37,6 +37,7 @@ The supported providers right now are:
|
||||
| [Infra as Code](/user-guide/providers/iac/getting-started-iac) | Official | Repositories | UI, API, CLI |
|
||||
| [MongoDB Atlas](/user-guide/providers/mongodbatlas/getting-started-mongodbatlas) | Official | Organizations | UI, API, CLI |
|
||||
| [OpenStack](/user-guide/providers/openstack/getting-started-openstack) | Official | Projects | UI, API, CLI |
|
||||
| [Vercel](/user-guide/providers/vercel/getting-started-vercel) | Official | Teams / Projects | CLI |
|
||||
| [LLM](/user-guide/providers/llm/getting-started-llm) | Official | Models | CLI |
|
||||
| [Image](/user-guide/providers/image/getting-started-image) | Official | Container Images | CLI, API |
|
||||
| [Google Workspace](/user-guide/providers/googleworkspace/getting-started-googleworkspace) | Official | Domains | CLI |
|
||||
|
||||
@@ -141,6 +141,22 @@ The following list includes all the GitHub checks with configurable variables th
|
||||
|--------------------------------------------|---------------------------------------------|---------|
|
||||
| `repository_inactive_not_archived` | `inactive_not_archived_days_threshold` | Integer |
|
||||
|
||||
## Vercel
|
||||
|
||||
### Configurable Checks
|
||||
The following list includes all the Vercel checks with configurable variables that can be changed in the configuration YAML file:
|
||||
|
||||
| Check Name | Value | Type |
|
||||
|-----------------------------------------------------|------------------------------------|-----------------|
|
||||
| `authentication_no_stale_tokens` | `stale_token_threshold_days` | Integer |
|
||||
| `authentication_token_not_expired` | `days_to_expire_threshold` | Integer |
|
||||
| `deployment_production_uses_stable_target` | `stable_branches` | List of Strings |
|
||||
| `domain_ssl_certificate_valid` | `days_to_expire_threshold` | Integer |
|
||||
| `project_environment_no_secrets_in_plain_type` | `secret_suffixes` | List of Strings |
|
||||
| `team_member_role_least_privilege` | `max_owner_percentage` | Integer |
|
||||
| `team_member_role_least_privilege` | `max_owners` | Integer |
|
||||
| `team_no_stale_invitations` | `stale_invitation_threshold_days` | Integer |
|
||||
|
||||
## Config YAML File Structure
|
||||
|
||||
<Note>
|
||||
@@ -624,5 +640,29 @@ github:
|
||||
# github.repository_inactive_not_archived
|
||||
inactive_not_archived_days_threshold: 180
|
||||
|
||||
# Vercel Configuration
|
||||
vercel:
|
||||
# vercel.deployment_production_uses_stable_target
|
||||
stable_branches:
|
||||
- "main"
|
||||
- "master"
|
||||
# vercel.authentication_token_not_expired & vercel.domain_ssl_certificate_valid
|
||||
days_to_expire_threshold: 7
|
||||
# vercel.authentication_no_stale_tokens
|
||||
stale_token_threshold_days: 90
|
||||
# vercel.team_no_stale_invitations
|
||||
stale_invitation_threshold_days: 30
|
||||
# vercel.team_member_role_least_privilege
|
||||
max_owner_percentage: 20
|
||||
max_owners: 3
|
||||
# vercel.project_environment_no_secrets_in_plain_type
|
||||
secret_suffixes:
|
||||
- "_KEY"
|
||||
- "_SECRET"
|
||||
- "_TOKEN"
|
||||
- "_PASSWORD"
|
||||
- "_API_KEY"
|
||||
- "_PRIVATE_KEY"
|
||||
|
||||
|
||||
```
|
||||
|
||||
137
docs/user-guide/providers/vercel/authentication.mdx
Normal file
137
docs/user-guide/providers/vercel/authentication.mdx
Normal file
@@ -0,0 +1,137 @@
|
||||
---
|
||||
title: "Vercel Authentication in Prowler"
|
||||
---
|
||||
|
||||
import { VersionBadge } from "/snippets/version-badge.mdx"
|
||||
|
||||
<VersionBadge version="5.21.0" />
|
||||
|
||||
Prowler for Vercel authenticates using an **API Token**.
|
||||
|
||||
## Required Permissions
|
||||
|
||||
Prowler requires read-only access to Vercel teams, projects, deployments, domains, and security settings. The API Token must have access to the target team scope.
|
||||
|
||||
<Note>
|
||||
Vercel API Tokens inherit the permissions of the user that created them. Ensure the user has at least a **Viewer** role on the team to be scanned.
|
||||
</Note>
|
||||
|
||||
| Resource | Access | Description |
|
||||
|----------|--------|-------------|
|
||||
| Teams | Read | Required to list teams, members, and SSO configuration |
|
||||
| Projects | Read | Required to list projects, environment variables, and deployment protection settings |
|
||||
| Deployments | Read | Required to list deployments and protection status |
|
||||
| Domains | Read | Required to list domains, DNS records, and SSL certificates |
|
||||
| Firewall | Read | Required to read WAF rules, rate limiting, and IP blocking configuration |
|
||||
|
||||
---
|
||||
|
||||
## API Token
|
||||
|
||||
### Step 1: Create an API Token
|
||||
|
||||
1. Log into the [Vercel Dashboard](https://vercel.com/dashboard).
|
||||
2. Click the account avatar in the bottom-left corner and select "Settings".
|
||||
|
||||

|
||||
|
||||
3. In the left sidebar, click "Tokens".
|
||||
4. Under **Create Token**, enter a descriptive name (e.g., "Prowler Scan").
|
||||
5. Select the **Scope** — choose the team to be scanned or "Full Account" for all teams.
|
||||
6. Set an **Expiration** date, or select "No expiration" for continuous scanning.
|
||||
7. Click **Create**.
|
||||
|
||||

|
||||
|
||||
8. Copy the token immediately.
|
||||
|
||||
<Warning>
|
||||
Vercel only displays the token once. Copy it immediately and store it securely. If lost, a new token must be created.
|
||||
</Warning>
|
||||
|
||||
### Step 2: Provide the Token to Prowler
|
||||
|
||||
Export the token as an environment variable:
|
||||
|
||||
```console
|
||||
export VERCEL_TOKEN="your-api-token-here"
|
||||
prowler vercel
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Team Scoping (Optional)
|
||||
|
||||
By default, Prowler auto-discovers all teams the authenticated user belongs to and scans each one. To restrict the scan to a specific team, provide the Team ID.
|
||||
|
||||
### Locate the Team ID
|
||||
|
||||
1. In the Vercel Dashboard, navigate to "Settings" for the target team.
|
||||
2. Scroll down to the **Team ID** section and copy the value.
|
||||
|
||||

|
||||
|
||||
### Provide the Team ID to Prowler
|
||||
|
||||
Export the Team ID as an environment variable:
|
||||
|
||||
```console
|
||||
export VERCEL_TOKEN="your-api-token-here"
|
||||
export VERCEL_TEAM="team_yourteamid"
|
||||
prowler vercel
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Environment Variables Reference
|
||||
|
||||
| Variable | Required | Description |
|
||||
|----------|----------|-------------|
|
||||
| `VERCEL_TOKEN` | Yes | Vercel API Bearer Token |
|
||||
| `VERCEL_TEAM` | No | Team ID or slug to scope the scan to a single team |
|
||||
|
||||
---
|
||||
|
||||
## Best Practices
|
||||
|
||||
- **Create a dedicated token for Prowler** — Avoid reusing tokens shared with other integrations.
|
||||
- **Use environment variables** — Never hardcode credentials in scripts or commands.
|
||||
- **Scope tokens to specific teams** — When possible, limit token access to the team being scanned.
|
||||
- **Set token expiration** — Use time-limited tokens and rotate them regularly.
|
||||
- **Use least privilege** — Assign the Viewer role to the user creating the token unless write access is explicitly needed.
|
||||
|
||||
---
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### "Vercel credentials not found" Error
|
||||
|
||||
This error occurs when no API Token is provided. Ensure the `VERCEL_TOKEN` environment variable is set:
|
||||
|
||||
```console
|
||||
export VERCEL_TOKEN="your-api-token-here"
|
||||
```
|
||||
|
||||
### "Invalid or expired Vercel API token" Error
|
||||
|
||||
- Verify the API Token is correct and has not expired.
|
||||
- Check that the token has not been revoked in the Vercel Dashboard under "Settings" > "Tokens".
|
||||
|
||||
### "Insufficient permissions" Error
|
||||
|
||||
- Ensure the user that created the token has at least a **Viewer** role on the target team.
|
||||
- If scanning a specific team, verify the token scope includes that team.
|
||||
|
||||
### "Team not found or not accessible" Error
|
||||
|
||||
This error occurs when the provided `VERCEL_TEAM` value does not match an accessible team. Verify the Team ID is correct:
|
||||
|
||||
1. Navigate to the team "Settings" in the Vercel Dashboard.
|
||||
2. Copy the exact **Team ID** value from the settings page.
|
||||
|
||||
### "Rate limit exceeded" Error
|
||||
|
||||
Vercel applies rate limits to API requests. Prowler automatically retries rate-limited requests up to 3 times with exponential backoff. If this error persists:
|
||||
|
||||
- Reduce the number of projects being scanned in a single run using the `--project` argument.
|
||||
- Wait a few minutes and retry the scan.
|
||||
108
docs/user-guide/providers/vercel/getting-started-vercel.mdx
Normal file
108
docs/user-guide/providers/vercel/getting-started-vercel.mdx
Normal file
@@ -0,0 +1,108 @@
|
||||
---
|
||||
title: "Getting Started With Vercel on Prowler"
|
||||
---
|
||||
|
||||
import { VersionBadge } from "/snippets/version-badge.mdx"
|
||||
|
||||
Prowler for Vercel scans teams and projects for security misconfigurations, including deployment protection, environment variable exposure, WAF rules, domain configuration, team access controls, and more.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Set up authentication for Vercel with the [Vercel Authentication](/user-guide/providers/vercel/authentication) guide before starting:
|
||||
|
||||
- Create a Vercel API Token with access to the target team
|
||||
- Identify the Team ID (optional, required to scope the scan to a single team)
|
||||
|
||||
## Prowler CLI
|
||||
|
||||
<VersionBadge version="5.22.0" />
|
||||
|
||||
### Step 1: Set Up Authentication
|
||||
|
||||
Follow the [Vercel Authentication](/user-guide/providers/vercel/authentication) guide to create an API Token, then export it:
|
||||
|
||||
```console
|
||||
export VERCEL_TOKEN="your-api-token-here"
|
||||
```
|
||||
|
||||
Optionally, scope the scan to a specific team:
|
||||
|
||||
```console
|
||||
export VERCEL_TEAM="team_yourteamid"
|
||||
```
|
||||
|
||||
### Step 2: Run the First Scan
|
||||
|
||||
Run a baseline scan after credentials are configured:
|
||||
|
||||
```console
|
||||
prowler vercel
|
||||
```
|
||||
|
||||
Prowler automatically discovers all teams accessible with the provided token and runs security checks against them.
|
||||
|
||||
### Step 3: Filter the Scan Scope (Optional)
|
||||
|
||||
#### Filter by Team
|
||||
|
||||
To scan a specific team, set the `VERCEL_TEAM` environment variable with the Team ID or slug:
|
||||
|
||||
```console
|
||||
export VERCEL_TEAM="team_yourteamid"
|
||||
prowler vercel
|
||||
```
|
||||
|
||||
<Note>
|
||||
When no team is specified, Prowler auto-discovers all teams the authenticated user belongs to and scans each one.
|
||||
</Note>
|
||||
|
||||
#### Filter by Project
|
||||
|
||||
To scan only specific projects, use the `--project` argument:
|
||||
|
||||
```console
|
||||
prowler vercel --project my-project-name
|
||||
```
|
||||
|
||||
Multiple projects can be specified:
|
||||
|
||||
```console
|
||||
prowler vercel --project my-project-name another-project
|
||||
```
|
||||
|
||||
Project IDs are also supported:
|
||||
|
||||
```console
|
||||
prowler vercel --project prj_abc123def456
|
||||
```
|
||||
|
||||
### Step 4: Use a Custom Configuration (Optional)
|
||||
|
||||
Prowler uses a configuration file to customize provider behavior. The Vercel configuration includes:
|
||||
|
||||
```yaml
|
||||
vercel:
|
||||
# Maximum number of retries for API requests (default is 3)
|
||||
max_retries: 3
|
||||
```
|
||||
|
||||
To use a custom configuration:
|
||||
|
||||
```console
|
||||
prowler vercel --config-file /path/to/config.yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Supported Services
|
||||
|
||||
Prowler for Vercel includes security checks across the following services:
|
||||
|
||||
| Service | Description |
|
||||
|---------|-------------|
|
||||
| **Authentication** | Token expiration and staleness checks |
|
||||
| **Deployment** | Preview deployment access and production stability |
|
||||
| **Domain** | DNS configuration, SSL certificates, and wildcard exposure |
|
||||
| **Project** | Deployment protection, environment variable security, fork protection, and skew protection |
|
||||
| **Security** | Web Application Firewall (WAF), rate limiting, IP blocking, and managed rulesets |
|
||||
| **Team** | SSO enforcement, directory sync, member access, and invitation hygiene |
|
||||
Binary file not shown.
|
After Width: | Height: | Size: 226 KiB |
BIN
docs/user-guide/providers/vercel/images/vercel-create-token.png
Normal file
BIN
docs/user-guide/providers/vercel/images/vercel-create-token.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 284 KiB |
BIN
docs/user-guide/providers/vercel/images/vercel-team-id.png
Normal file
BIN
docs/user-guide/providers/vercel/images/vercel-team-id.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 310 KiB |
146
poetry.lock
generated
146
poetry.lock
generated
@@ -1,4 +1,4 @@
|
||||
# This file is automatically @generated by Poetry 2.3.2 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "about-time"
|
||||
@@ -605,21 +605,21 @@ requests = ">=2.21.0,<3.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "alibabacloud-tea-openapi"
|
||||
version = "0.4.1"
|
||||
version = "0.4.4"
|
||||
description = "Alibaba Cloud openapi SDK Library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "alibabacloud_tea_openapi-0.4.1-py3-none-any.whl", hash = "sha256:e46bfa3ca34086d2c357d217a0b7284ecbd4b3bab5c88e075e73aec637b0e4a0"},
|
||||
{file = "alibabacloud_tea_openapi-0.4.1.tar.gz", hash = "sha256:2384b090870fdb089c3c40f3fb8cf0145b8c7d6c14abbac521f86a01abb5edaf"},
|
||||
{file = "alibabacloud_tea_openapi-0.4.4-py3-none-any.whl", hash = "sha256:cea6bc1fe35b0319a8752cb99eb0ecb0dab7ca1a71b99c12970ba0867410995f"},
|
||||
{file = "alibabacloud_tea_openapi-0.4.4.tar.gz", hash = "sha256:1b0917bc03cd49417da64945e92731716d53e2eb8707b235f54e45b7473221ce"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
alibabacloud-credentials = ">=1.0.2,<2.0.0"
|
||||
alibabacloud-gateway-spi = ">=0.0.2,<1.0.0"
|
||||
alibabacloud-tea-util = ">=0.3.13,<1.0.0"
|
||||
cryptography = ">=3.0.0,<45.0.0"
|
||||
cryptography = {version = ">=3.0.0,<47.0.0", markers = "python_version >= \"3.8\""}
|
||||
darabonba-core = ">=1.0.3,<2.0.0"
|
||||
|
||||
[[package]]
|
||||
@@ -1888,7 +1888,6 @@ files = [
|
||||
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
|
||||
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
|
||||
]
|
||||
markers = {dev = "platform_system == \"Windows\" or sys_platform == \"win32\""}
|
||||
|
||||
[[package]]
|
||||
name = "contextlib2"
|
||||
@@ -1983,62 +1982,75 @@ toml = ["tomli ; python_full_version <= \"3.11.0a6\""]
|
||||
|
||||
[[package]]
|
||||
name = "cryptography"
|
||||
version = "44.0.3"
|
||||
version = "46.0.6"
|
||||
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
|
||||
optional = false
|
||||
python-versions = "!=3.9.0,!=3.9.1,>=3.7"
|
||||
python-versions = "!=3.9.0,!=3.9.1,>=3.8"
|
||||
groups = ["main", "dev"]
|
||||
files = [
|
||||
{file = "cryptography-44.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:962bc30480a08d133e631e8dfd4783ab71cc9e33d5d7c1e192f0b7c06397bb88"},
|
||||
{file = "cryptography-44.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffc61e8f3bf5b60346d89cd3d37231019c17a081208dfbbd6e1605ba03fa137"},
|
||||
{file = "cryptography-44.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58968d331425a6f9eedcee087f77fd3c927c88f55368f43ff7e0a19891f2642c"},
|
||||
{file = "cryptography-44.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:e28d62e59a4dbd1d22e747f57d4f00c459af22181f0b2f787ea83f5a876d7c76"},
|
||||
{file = "cryptography-44.0.3-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:af653022a0c25ef2e3ffb2c673a50e5a0d02fecc41608f4954176f1933b12359"},
|
||||
{file = "cryptography-44.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:157f1f3b8d941c2bd8f3ffee0af9b049c9665c39d3da9db2dc338feca5e98a43"},
|
||||
{file = "cryptography-44.0.3-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:c6cd67722619e4d55fdb42ead64ed8843d64638e9c07f4011163e46bc512cf01"},
|
||||
{file = "cryptography-44.0.3-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:b424563394c369a804ecbee9b06dfb34997f19d00b3518e39f83a5642618397d"},
|
||||
{file = "cryptography-44.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c91fc8e8fd78af553f98bc7f2a1d8db977334e4eea302a4bfd75b9461c2d8904"},
|
||||
{file = "cryptography-44.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:25cd194c39fa5a0aa4169125ee27d1172097857b27109a45fadc59653ec06f44"},
|
||||
{file = "cryptography-44.0.3-cp37-abi3-win32.whl", hash = "sha256:3be3f649d91cb182c3a6bd336de8b61a0a71965bd13d1a04a0e15b39c3d5809d"},
|
||||
{file = "cryptography-44.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:3883076d5c4cc56dbef0b898a74eb6992fdac29a7b9013870b34efe4ddb39a0d"},
|
||||
{file = "cryptography-44.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:5639c2b16764c6f76eedf722dbad9a0914960d3489c0cc38694ddf9464f1bb2f"},
|
||||
{file = "cryptography-44.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3ffef566ac88f75967d7abd852ed5f182da252d23fac11b4766da3957766759"},
|
||||
{file = "cryptography-44.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:192ed30fac1728f7587c6f4613c29c584abdc565d7417c13904708db10206645"},
|
||||
{file = "cryptography-44.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7d5fe7195c27c32a64955740b949070f21cba664604291c298518d2e255931d2"},
|
||||
{file = "cryptography-44.0.3-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3f07943aa4d7dad689e3bb1638ddc4944cc5e0921e3c227486daae0e31a05e54"},
|
||||
{file = "cryptography-44.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cb90f60e03d563ca2445099edf605c16ed1d5b15182d21831f58460c48bffb93"},
|
||||
{file = "cryptography-44.0.3-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:ab0b005721cc0039e885ac3503825661bd9810b15d4f374e473f8c89b7d5460c"},
|
||||
{file = "cryptography-44.0.3-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:3bb0847e6363c037df8f6ede57d88eaf3410ca2267fb12275370a76f85786a6f"},
|
||||
{file = "cryptography-44.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b0cc66c74c797e1db750aaa842ad5b8b78e14805a9b5d1348dc603612d3e3ff5"},
|
||||
{file = "cryptography-44.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6866df152b581f9429020320e5eb9794c8780e90f7ccb021940d7f50ee00ae0b"},
|
||||
{file = "cryptography-44.0.3-cp39-abi3-win32.whl", hash = "sha256:c138abae3a12a94c75c10499f1cbae81294a6f983b3af066390adee73f433028"},
|
||||
{file = "cryptography-44.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:5d186f32e52e66994dce4f766884bcb9c68b8da62d61d9d215bfe5fb56d21334"},
|
||||
{file = "cryptography-44.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:cad399780053fb383dc067475135e41c9fe7d901a97dd5d9c5dfb5611afc0d7d"},
|
||||
{file = "cryptography-44.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:21a83f6f35b9cc656d71b5de8d519f566df01e660ac2578805ab245ffd8523f8"},
|
||||
{file = "cryptography-44.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fc3c9babc1e1faefd62704bb46a69f359a9819eb0292e40df3fb6e3574715cd4"},
|
||||
{file = "cryptography-44.0.3-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:e909df4053064a97f1e6565153ff8bb389af12c5c8d29c343308760890560aff"},
|
||||
{file = "cryptography-44.0.3-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:dad80b45c22e05b259e33ddd458e9e2ba099c86ccf4e88db7bbab4b747b18d06"},
|
||||
{file = "cryptography-44.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:479d92908277bed6e1a1c69b277734a7771c2b78633c224445b5c60a9f4bc1d9"},
|
||||
{file = "cryptography-44.0.3-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:896530bc9107b226f265effa7ef3f21270f18a2026bc09fed1ebd7b66ddf6375"},
|
||||
{file = "cryptography-44.0.3-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9b4d4a5dbee05a2c390bf212e78b99434efec37b17a4bff42f50285c5c8c9647"},
|
||||
{file = "cryptography-44.0.3-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:02f55fb4f8b79c1221b0961488eaae21015b69b210e18c386b69de182ebb1259"},
|
||||
{file = "cryptography-44.0.3-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:dd3db61b8fe5be220eee484a17233287d0be6932d056cf5738225b9c05ef4fff"},
|
||||
{file = "cryptography-44.0.3-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:978631ec51a6bbc0b7e58f23b68a8ce9e5f09721940933e9c217068388789fe5"},
|
||||
{file = "cryptography-44.0.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:5d20cc348cca3a8aa7312f42ab953a56e15323800ca3ab0706b8cd452a3a056c"},
|
||||
{file = "cryptography-44.0.3.tar.gz", hash = "sha256:fe19d8bc5536a91a24a8133328880a41831b6c5df54599a8417b62fe015d3053"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:64235194bad039a10bb6d2d930ab3323baaec67e2ce36215fd0952fad0930ca8"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:26031f1e5ca62fcb9d1fcb34b2b60b390d1aacaa15dc8b895a9ed00968b97b30"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9a693028b9cbe51b5a1136232ee8f2bc242e4e19d456ded3fa7c86e43c713b4a"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:67177e8a9f421aa2d3a170c3e56eca4e0128883cf52a071a7cbf53297f18b175"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:d9528b535a6c4f8ff37847144b8986a9a143585f0540fbcb1a98115b543aa463"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:22259338084d6ae497a19bae5d4c66b7ca1387d3264d1c2c0e72d9e9b6a77b97"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:760997a4b950ff00d418398ad73fbc91aa2894b5c1db7ccb45b4f68b42a63b3c"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:3dfa6567f2e9e4c5dceb8ccb5a708158a2a871052fa75c8b78cb0977063f1507"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:cdcd3edcbc5d55757e5f5f3d330dd00007ae463a7e7aa5bf132d1f22a4b62b19"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:d4e4aadb7fc1f88687f47ca20bb7227981b03afaae69287029da08096853b738"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2b417edbe8877cda9022dde3a008e2deb50be9c407eef034aeeb3a8b11d9db3c"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:380343e0653b1c9d7e1f55b52aaa2dbb2fdf2730088d48c43ca1c7c0abb7cc2f"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-win32.whl", hash = "sha256:bcb87663e1f7b075e48c3be3ecb5f0b46c8fc50b50a97cf264e7f60242dca3f2"},
|
||||
{file = "cryptography-46.0.6-cp311-abi3-win_amd64.whl", hash = "sha256:6739d56300662c468fddb0e5e291f9b4d084bead381667b9e654c7dd81705124"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:2ef9e69886cbb137c2aef9772c2e7138dc581fad4fcbcf13cc181eb5a3ab6275"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7f417f034f91dcec1cb6c5c35b07cdbb2ef262557f701b4ecd803ee8cefed4f4"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d24c13369e856b94892a89ddf70b332e0b70ad4a5c43cf3e9cb71d6d7ffa1f7b"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:aad75154a7ac9039936d50cf431719a2f8d4ed3d3c277ac03f3339ded1a5e707"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:3c21d92ed15e9cfc6eb64c1f5a0326db22ca9c2566ca46d845119b45b4400361"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:4668298aef7cddeaf5c6ecc244c2302a2b8e40f384255505c22875eebb47888b"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:8ce35b77aaf02f3b59c90b2c8a05c73bac12cea5b4e8f3fbece1f5fddea5f0ca"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:c89eb37fae9216985d8734c1afd172ba4927f5a05cfd9bf0e4863c6d5465b013"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:ed418c37d095aeddf5336898a132fba01091f0ac5844e3e8018506f014b6d2c4"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:69cf0056d6947edc6e6760e5f17afe4bea06b56a9ac8a06de9d2bd6b532d4f3a"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8e7304c4f4e9490e11efe56af6713983460ee0780f16c63f219984dab3af9d2d"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b928a3ca837c77a10e81a814a693f2295200adb3352395fad024559b7be7a736"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-win32.whl", hash = "sha256:97c8115b27e19e592a05c45d0dd89c57f81f841cc9880e353e0d3bf25b2139ed"},
|
||||
{file = "cryptography-46.0.6-cp314-cp314t-win_amd64.whl", hash = "sha256:c797e2517cb7880f8297e2c0f43bb910e91381339336f75d2c1c2cbf811b70b4"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:12cae594e9473bca1a7aceb90536060643128bb274fcea0fc459ab90f7d1ae7a"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:639301950939d844a9e1c4464d7e07f902fe9a7f6b215bb0d4f28584729935d8"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ed3775295fb91f70b4027aeba878d79b3e55c0b3e97eaa4de71f8f23a9f2eb77"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8927ccfbe967c7df312ade694f987e7e9e22b2425976ddbf28271d7e58845290"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:b12c6b1e1651e42ab5de8b1e00dc3b6354fdfd778e7fa60541ddacc27cd21410"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:063b67749f338ca9c5a0b7fe438a52c25f9526b851e24e6c9310e7195aad3b4d"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:02fad249cb0e090b574e30b276a3da6a149e04ee2f049725b1f69e7b8351ec70"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:7e6142674f2a9291463e5e150090b95a8519b2fb6e6aaec8917dd8d094ce750d"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:456b3215172aeefb9284550b162801d62f5f264a081049a3e94307fe20792cfa"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:341359d6c9e68834e204ceaf25936dffeafea3829ab80e9503860dcc4f4dac58"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9a9c42a2723999a710445bc0d974e345c32adfd8d2fac6d8a251fa829ad31cfb"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6617f67b1606dfd9fe4dbfa354a9508d4a6d37afe30306fe6c101b7ce3274b72"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-win32.whl", hash = "sha256:7f6690b6c55e9c5332c0b59b9c8a3fb232ebf059094c17f9019a51e9827df91c"},
|
||||
{file = "cryptography-46.0.6-cp38-abi3-win_amd64.whl", hash = "sha256:79e865c642cfc5c0b3eb12af83c35c5aeff4fa5c672dc28c43721c2c9fdd2f0f"},
|
||||
{file = "cryptography-46.0.6-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:2ea0f37e9a9cf0df2952893ad145fd9627d326a59daec9b0802480fa3bcd2ead"},
|
||||
{file = "cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a3e84d5ec9ba01f8fd03802b2147ba77f0c8f2617b2aff254cedd551844209c8"},
|
||||
{file = "cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:12f0fa16cc247b13c43d56d7b35287ff1569b5b1f4c5e87e92cc4fcc00cd10c0"},
|
||||
{file = "cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:50575a76e2951fe7dbd1f56d181f8c5ceeeb075e9ff88e7ad997d2f42af06e7b"},
|
||||
{file = "cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:90e5f0a7b3be5f40c3a0a0eafb32c681d8d2c181fc2a1bdabe9b3f611d9f6b1a"},
|
||||
{file = "cryptography-46.0.6-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6728c49e3b2c180ef26f8e9f0a883a2c585638db64cf265b49c9ba10652d430e"},
|
||||
{file = "cryptography-46.0.6.tar.gz", hash = "sha256:27550628a518c5c6c903d84f637fbecf287f6cb9ced3804838a1295dc1fd0759"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""}
|
||||
cffi = {version = ">=2.0.0", markers = "python_full_version >= \"3.9.0\" and platform_python_implementation != \"PyPy\""}
|
||||
typing-extensions = {version = ">=4.13.2", markers = "python_full_version < \"3.11.0\""}
|
||||
|
||||
[package.extras]
|
||||
docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0) ; python_version >= \"3.8\""]
|
||||
docs = ["sphinx (>=5.3.0)", "sphinx-inline-tabs", "sphinx-rtd-theme (>=3.0.0)"]
|
||||
docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"]
|
||||
nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_version >= \"3.8\""]
|
||||
pep8test = ["check-sdist ; python_version >= \"3.8\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"]
|
||||
nox = ["nox[uv] (>=2024.4.15)"]
|
||||
pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.14)", "ruff (>=0.11.11)"]
|
||||
sdist = ["build (>=1.0.0)"]
|
||||
ssh = ["bcrypt (>=3.1.5)"]
|
||||
test = ["certifi (>=2024)", "cryptography-vectors (==44.0.3)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
|
||||
test = ["certifi (>=2024)", "cryptography-vectors (==46.0.6)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
|
||||
test-randomorder = ["pytest-randomly"]
|
||||
|
||||
[[package]]
|
||||
@@ -3071,7 +3083,7 @@ files = [
|
||||
|
||||
[package.dependencies]
|
||||
attrs = ">=22.2.0"
|
||||
jsonschema-specifications = ">=2023.3.6"
|
||||
jsonschema-specifications = ">=2023.03.6"
|
||||
referencing = ">=0.28.4"
|
||||
rpds-py = ">=0.7.1"
|
||||
|
||||
@@ -3151,7 +3163,7 @@ files = [
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
certifi = ">=14.5.14"
|
||||
certifi = ">=14.05.14"
|
||||
durationpy = ">=0.7"
|
||||
google-auth = ">=1.0.1"
|
||||
oauthlib = ">=3.2.2"
|
||||
@@ -4074,23 +4086,24 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"]
|
||||
|
||||
[[package]]
|
||||
name = "oci"
|
||||
version = "2.160.3"
|
||||
version = "2.169.0"
|
||||
description = "Oracle Cloud Infrastructure Python SDK"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "oci-2.160.3-py3-none-any.whl", hash = "sha256:858bff3e697098bdda44833d2476bfb4632126f0182178e7dbde4dbd156d71f0"},
|
||||
{file = "oci-2.160.3.tar.gz", hash = "sha256:57514889be3b713a8385d86e3ba8a33cf46e3563c2a7e29a93027fb30b8a2537"},
|
||||
{file = "oci-2.169.0-py3-none-any.whl", hash = "sha256:c71bb5143f307791082b3e33cc1545c2490a518cfed85ab1948ef5107c36d30b"},
|
||||
{file = "oci-2.169.0.tar.gz", hash = "sha256:f3c5fff00b01783b5325ea7b13bf140053ec1e9f41da20bfb9c8a349ee7662fa"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
certifi = "*"
|
||||
circuitbreaker = {version = ">=1.3.1,<3.0.0", markers = "python_version >= \"3.7\""}
|
||||
cryptography = ">=3.2.1,<46.0.0"
|
||||
pyOpenSSL = ">=17.5.0,<25.0.0"
|
||||
cryptography = ">=3.2.1,<47.0.0"
|
||||
pyOpenSSL = ">=17.5.0,<27.0.0"
|
||||
python-dateutil = ">=2.5.3,<3.0.0"
|
||||
pytz = ">=2016.10"
|
||||
urllib3 = {version = ">=2.6.3", markers = "python_version >= \"3.10.0\""}
|
||||
|
||||
[package.extras]
|
||||
adk = ["docstring-parser (>=0.16) ; python_version >= \"3.10\" and python_version < \"4\"", "mcp (>=1.6.0) ; python_version >= \"3.10\" and python_version < \"4\"", "pydantic (>=2.10.6) ; python_version >= \"3.10\" and python_version < \"4\"", "rich (>=13.9.4) ; python_version >= \"3.10\" and python_version < \"4\""]
|
||||
@@ -4963,7 +4976,7 @@ files = [
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
astroid = ">=3.3.8,<=3.4.0.dev0"
|
||||
astroid = ">=3.3.8,<=3.4.0-dev0"
|
||||
colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""}
|
||||
dill = [
|
||||
{version = ">=0.2", markers = "python_version < \"3.11\""},
|
||||
@@ -5024,18 +5037,19 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=7.4.0)", "pytest-cov (>=2.10.1)", "
|
||||
|
||||
[[package]]
|
||||
name = "pyopenssl"
|
||||
version = "24.3.0"
|
||||
version = "26.0.0"
|
||||
description = "Python wrapper module around the OpenSSL library"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "pyOpenSSL-24.3.0-py3-none-any.whl", hash = "sha256:e474f5a473cd7f92221cc04976e48f4d11502804657a08a989fb3be5514c904a"},
|
||||
{file = "pyopenssl-24.3.0.tar.gz", hash = "sha256:49f7a019577d834746bc55c5fce6ecbcec0f2b4ec5ce1cf43a9a173b8138bb36"},
|
||||
{file = "pyopenssl-26.0.0-py3-none-any.whl", hash = "sha256:df94d28498848b98cc1c0ffb8ef1e71e40210d3b0a8064c9d29571ed2904bf81"},
|
||||
{file = "pyopenssl-26.0.0.tar.gz", hash = "sha256:f293934e52936f2e3413b89c6ce36df66a0b34ae1ea3a053b8c5020ff2f513fc"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
cryptography = ">=41.0.5,<45"
|
||||
cryptography = ">=46.0.0,<47"
|
||||
typing-extensions = {version = ">=4.9", markers = "python_version < \"3.13\" and python_version >= \"3.8\""}
|
||||
|
||||
[package.extras]
|
||||
docs = ["sphinx (!=5.2.0,!=5.2.0.post0,!=7.2.5)", "sphinx_rtd_theme"]
|
||||
@@ -5808,10 +5822,10 @@ files = [
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
botocore = ">=1.37.4,<2.0a0"
|
||||
botocore = ">=1.37.4,<2.0a.0"
|
||||
|
||||
[package.extras]
|
||||
crt = ["botocore[crt] (>=1.37.4,<2.0a0)"]
|
||||
crt = ["botocore[crt] (>=1.37.4,<2.0a.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "safety"
|
||||
@@ -6729,4 +6743,4 @@ files = [
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">=3.10,<3.13"
|
||||
content-hash = "65f1f9833d61f90f1f89ed70b3677f76c0693bae275dd39699df01c05050bbe6"
|
||||
content-hash = "91739ee5e383337160f9f08b76944ab4e8629c94084c8a9d115246862557f7c5"
|
||||
|
||||
@@ -14,12 +14,11 @@ All notable changes to the **Prowler SDK** are documented in this file.
|
||||
- `ec2_securitygroup_allow_ingress_from_internet_to_any_port_from_ip` check for AWS provider using `ipaddress.is_global` for accurate public IP detection [(#10335)](https://github.com/prowler-cloud/prowler/pull/10335)
|
||||
- `entra_conditional_access_policy_block_o365_elevated_insider_risk` check for M365 provider [(#10232)](https://github.com/prowler-cloud/prowler/pull/10232)
|
||||
- `--resource-group` and `--list-resource-groups` CLI flags to filter checks by resource group across all providers [(#10479)](https://github.com/prowler-cloud/prowler/pull/10479)
|
||||
- `apikeys_api_restricted_with_gemini_api` check for GCP provider [(#10280)](https://github.com/prowler-cloud/prowler/pull/10280)
|
||||
- `gemini_api_disabled` check for GCP provider [(#10280)](https://github.com/prowler-cloud/prowler/pull/10280)
|
||||
- CISA SCuBA Google Workspace Baselines compliance [(#10466)](https://github.com/prowler-cloud/prowler/pull/10466)
|
||||
- CIS Google Workspace Foundations Benchmark v1.3.0 compliance [(#10462)](https://github.com/prowler-cloud/prowler/pull/10462)
|
||||
- `entra_conditional_access_policy_device_registration_mfa_required` check and `entra_intune_enrollment_sign_in_frequency_every_time` enhancement for M365 provider [(#10222)](https://github.com/prowler-cloud/prowler/pull/10222)
|
||||
- `entra_conditional_access_policy_block_elevated_insider_risk` check for M365 provider [(#10234)](https://github.com/prowler-cloud/prowler/pull/10234)
|
||||
- `Vercel` provider support with 30 checks [(#10189)](https://github.com/prowler-cloud/prowler/pull/10189)
|
||||
|
||||
### 🔄 Changed
|
||||
|
||||
@@ -30,6 +29,11 @@ All notable changes to the **Prowler SDK** are documented in this file.
|
||||
|
||||
- `return` statements in `finally` blocks replaced across IAM, Organizations, GCP provider, and custom checks metadata to stop silently swallowing exceptions [(#10102)](https://github.com/prowler-cloud/prowler/pull/10102)
|
||||
|
||||
### 🔐 Security
|
||||
|
||||
- Sensitive CLI flag values (tokens, keys, passwords) in HTML output "Parameters used" field now redacted to prevent credential leaks [(#10518)](https://github.com/prowler-cloud/prowler/pull/10518)
|
||||
- `cryptography` bumped from 44.0.3 to 46.0.6 ([CVE-2026-26007](https://github.com/pyca/cryptography/security/advisories/GHSA-r6ph-v2qm-q3c2), [CVE-2026-34073](https://github.com/pyca/cryptography/security/advisories/GHSA-m959-cc7f-wv43)), `oci` to 2.169.0, and `alibabacloud-tea-openapi` to 0.4.4 [(#10535)](https://github.com/prowler-cloud/prowler/pull/10535)
|
||||
|
||||
---
|
||||
|
||||
## [5.22.1] (Prowler UNRELEASED)
|
||||
@@ -43,6 +47,7 @@ All notable changes to the **Prowler SDK** are documented in this file.
|
||||
- Oracle Cloud patch for filestorage, blockstorage, kms, and compute services in OCI to allow for region scanning outside home [(#10455)](https://github.com/prowler-cloud/prowler/pull/10472)
|
||||
- Oracle cloud provider now supports multi-region filtering [(#10435)](https://github.com/prowler-cloud/prowler/pull/10473)
|
||||
- `prowler image --registry` failing with `ImageNoImagesProvidedError` due to registry arguments not being forwarded to `ImageProvider` in `init_global_provider` [(#10457)](https://github.com/prowler-cloud/prowler/issues/10457)
|
||||
- Oracle Cloud multi-region support for identity client configuration in blockstorage, identity, and filestorage services [(#10519)](https://github.com/prowler-cloud/prowler/pull/10520)
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -145,6 +145,7 @@ from prowler.providers.mongodbatlas.models import MongoDBAtlasOutputOptions
|
||||
from prowler.providers.nhn.models import NHNOutputOptions
|
||||
from prowler.providers.openstack.models import OpenStackOutputOptions
|
||||
from prowler.providers.oraclecloud.models import OCIOutputOptions
|
||||
from prowler.providers.vercel.models import VercelOutputOptions
|
||||
|
||||
|
||||
def prowler():
|
||||
@@ -398,6 +399,10 @@ def prowler():
|
||||
output_options = OpenStackOutputOptions(
|
||||
args, bulk_checks_metadata, global_provider.identity
|
||||
)
|
||||
elif provider == "vercel":
|
||||
output_options = VercelOutputOptions(
|
||||
args, bulk_checks_metadata, global_provider.identity
|
||||
)
|
||||
|
||||
# Run the quick inventory for the provider if available
|
||||
if hasattr(args, "quick_inventory") and args.quick_inventory:
|
||||
|
||||
@@ -65,6 +65,7 @@ class Provider(str, Enum):
|
||||
ALIBABACLOUD = "alibabacloud"
|
||||
OPENSTACK = "openstack"
|
||||
IMAGE = "image"
|
||||
VERCEL = "vercel"
|
||||
|
||||
|
||||
# Providers that delegate scanning to an external tool (e.g. Trivy, promptfoo)
|
||||
|
||||
@@ -609,3 +609,34 @@ cloudflare:
|
||||
# Maximum number of retries for API requests (default is 2)
|
||||
# Set to 0 to disable retries
|
||||
max_retries: 3
|
||||
|
||||
# Vercel Configuration
|
||||
vercel:
|
||||
# vercel.deployment_production_uses_stable_target
|
||||
# Branches considered stable for production deployments
|
||||
stable_branches:
|
||||
- "main"
|
||||
- "master"
|
||||
# vercel.authentication_token_not_expired & vercel.domain_ssl_certificate_valid
|
||||
# Number of days before expiration to flag a token/certificate as about to expire
|
||||
days_to_expire_threshold: 7
|
||||
# vercel.authentication_no_stale_tokens
|
||||
# Number of days of inactivity before a token is considered stale
|
||||
stale_token_threshold_days: 90
|
||||
# vercel.team_no_stale_invitations
|
||||
# Number of days before a pending invitation is considered stale
|
||||
stale_invitation_threshold_days: 30
|
||||
# vercel.team_member_role_least_privilege
|
||||
# Maximum percentage of team members that can have the OWNER role
|
||||
max_owner_percentage: 20
|
||||
# Maximum number of owners allowed (overrides percentage for large teams)
|
||||
max_owners: 3
|
||||
# vercel.project_environment_no_secrets_in_plain_type
|
||||
# Suffixes that identify secret-like environment variable names
|
||||
secret_suffixes:
|
||||
- "_KEY"
|
||||
- "_SECRET"
|
||||
- "_TOKEN"
|
||||
- "_PASSWORD"
|
||||
- "_API_KEY"
|
||||
- "_PRIVATE_KEY"
|
||||
|
||||
50
prowler/config/vercel_mutelist_example.yaml
Normal file
50
prowler/config/vercel_mutelist_example.yaml
Normal file
@@ -0,0 +1,50 @@
|
||||
### Account, Check and/or Region can be * to apply for all the cases.
|
||||
### Account == <Vercel Team ID>
|
||||
### Region == * (Vercel is a global service, region is always "global")
|
||||
### Resources and tags are lists that can have either Regex or Keywords.
|
||||
### Tags is an optional list that matches on tuples of 'key=value' and are "ANDed" together.
|
||||
### Use an alternation Regex to match one of multiple tags with "ORed" logic.
|
||||
### For each check you can except Accounts, Regions, Resources and/or Tags.
|
||||
########################### MUTELIST EXAMPLE ###########################
|
||||
Mutelist:
|
||||
Accounts:
|
||||
"team_example123":
|
||||
Checks:
|
||||
"project_deployment_protection_enabled":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "prj_internal001"
|
||||
- "prj_internal002"
|
||||
Description: "Mute deployment protection check for internal-only projects"
|
||||
"project_environment_*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "prj_staging.*"
|
||||
Description: "Mute all environment variable checks for staging projects"
|
||||
"*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "prj_sandbox"
|
||||
Tags:
|
||||
- "environment=sandbox"
|
||||
Description: "Mute all checks for sandbox project with matching tag"
|
||||
|
||||
"*":
|
||||
Checks:
|
||||
"security_waf_enabled":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "prj_static.*"
|
||||
Description: "Mute WAF check for static-only projects across all teams"
|
||||
"*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "*"
|
||||
Tags:
|
||||
- "prowler-ignore=true"
|
||||
Description: "Global mute for resources tagged with prowler-ignore=true"
|
||||
@@ -713,6 +713,11 @@ def execute(
|
||||
is_finding_muted_args["project_id"] = (
|
||||
global_provider.identity.project_id
|
||||
)
|
||||
elif global_provider.type == "vercel":
|
||||
team = getattr(global_provider.identity, "team", None)
|
||||
is_finding_muted_args["team_id"] = (
|
||||
team.id if team else global_provider.identity.user_id
|
||||
)
|
||||
for finding in check_findings:
|
||||
if global_provider.type == "cloudflare":
|
||||
is_finding_muted_args["account_id"] = finding.account_id
|
||||
|
||||
@@ -1240,6 +1240,50 @@ class CheckReportMongoDBAtlas(Check_Report):
|
||||
self.location = getattr(resource, "location", self.project_id)
|
||||
|
||||
|
||||
@dataclass
|
||||
class CheckReportVercel(Check_Report):
|
||||
"""Contains the Vercel Check's finding information.
|
||||
|
||||
Vercel is a global platform - team_id is the scoping context.
|
||||
All resource-related attributes are derived from the resource object.
|
||||
"""
|
||||
|
||||
resource_name: str
|
||||
resource_id: str
|
||||
team_id: str
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
metadata: Dict,
|
||||
resource: Any,
|
||||
resource_name: str = None,
|
||||
resource_id: str = None,
|
||||
team_id: str = None,
|
||||
) -> None:
|
||||
"""Initialize the Vercel Check's finding information.
|
||||
|
||||
Args:
|
||||
metadata: Check metadata dictionary
|
||||
resource: The Vercel resource being checked
|
||||
resource_name: Override for resource name
|
||||
resource_id: Override for resource ID
|
||||
team_id: Override for team ID
|
||||
"""
|
||||
super().__init__(metadata, resource)
|
||||
self.resource_name = resource_name or getattr(
|
||||
resource, "name", getattr(resource, "resource_name", "")
|
||||
)
|
||||
self.resource_id = resource_id or getattr(
|
||||
resource, "id", getattr(resource, "resource_id", "")
|
||||
)
|
||||
self.team_id = team_id or getattr(resource, "team_id", "")
|
||||
|
||||
@property
|
||||
def region(self) -> str:
|
||||
"""Vercel is global - return 'global'."""
|
||||
return "global"
|
||||
|
||||
|
||||
# Testing Pending
|
||||
def load_check_metadata(metadata_file: str) -> CheckMetadata:
|
||||
"""
|
||||
|
||||
@@ -19,6 +19,8 @@ from prowler.providers.common.arguments import (
|
||||
validate_provider_arguments,
|
||||
)
|
||||
|
||||
SENSITIVE_ARGUMENTS = frozenset({"--shodan"})
|
||||
|
||||
|
||||
class ProwlerArgumentParser:
|
||||
# Set the default parser
|
||||
@@ -27,10 +29,10 @@ class ProwlerArgumentParser:
|
||||
self.parser = argparse.ArgumentParser(
|
||||
prog="prowler",
|
||||
formatter_class=RawTextHelpFormatter,
|
||||
usage="prowler [-h] [--version] {aws,azure,gcp,kubernetes,m365,github,googleworkspace,nhn,mongodbatlas,oraclecloud,alibabacloud,cloudflare,openstack,dashboard,iac,image} ...",
|
||||
usage="prowler [-h] [--version] {aws,azure,gcp,kubernetes,m365,github,googleworkspace,nhn,mongodbatlas,oraclecloud,alibabacloud,cloudflare,openstack,vercel,dashboard,iac,image} ...",
|
||||
epilog="""
|
||||
Available Cloud Providers:
|
||||
{aws,azure,gcp,kubernetes,m365,github,googleworkspace,iac,llm,image,nhn,mongodbatlas,oraclecloud,alibabacloud,cloudflare,openstack}
|
||||
{aws,azure,gcp,kubernetes,m365,github,googleworkspace,iac,llm,image,nhn,mongodbatlas,oraclecloud,alibabacloud,cloudflare,openstack,vercel}
|
||||
aws AWS Provider
|
||||
azure Azure Provider
|
||||
gcp GCP Provider
|
||||
@@ -47,6 +49,7 @@ Available Cloud Providers:
|
||||
image Container Image Provider
|
||||
nhn NHN Provider (Unofficial)
|
||||
mongodbatlas MongoDB Atlas Provider (Beta)
|
||||
vercel Vercel Provider
|
||||
|
||||
Available components:
|
||||
dashboard Local dashboard
|
||||
|
||||
68
prowler/lib/cli/redact.py
Normal file
68
prowler/lib/cli/redact.py
Normal file
@@ -0,0 +1,68 @@
|
||||
from functools import lru_cache
|
||||
from importlib import import_module
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.providers.common.provider import Provider, providers_path
|
||||
|
||||
REDACTED_VALUE = "REDACTED"
|
||||
|
||||
|
||||
@lru_cache(maxsize=None)
|
||||
def get_sensitive_arguments() -> frozenset:
|
||||
"""Collect SENSITIVE_ARGUMENTS from all provider argument modules and the common parser."""
|
||||
sensitive: set[str] = set()
|
||||
|
||||
# Common parser sensitive arguments (e.g., --shodan)
|
||||
try:
|
||||
parser_module = import_module("prowler.lib.cli.parser")
|
||||
sensitive.update(getattr(parser_module, "SENSITIVE_ARGUMENTS", frozenset()))
|
||||
except Exception as error:
|
||||
logger.debug(f"Could not load SENSITIVE_ARGUMENTS from parser: {error}")
|
||||
|
||||
# Provider-specific sensitive arguments
|
||||
for provider in Provider.get_available_providers():
|
||||
try:
|
||||
module = import_module(
|
||||
f"{providers_path}.{provider}.lib.arguments.arguments"
|
||||
)
|
||||
sensitive.update(getattr(module, "SENSITIVE_ARGUMENTS", frozenset()))
|
||||
except Exception as error:
|
||||
logger.debug(f"Could not load SENSITIVE_ARGUMENTS from {provider}: {error}")
|
||||
|
||||
return frozenset(sensitive)
|
||||
|
||||
|
||||
def redact_argv(argv: list[str]) -> str:
|
||||
"""Redact values of sensitive CLI flags from an argument list.
|
||||
|
||||
Handles both ``--flag value`` and ``--flag=value`` syntax.
|
||||
Returns a single joined string suitable for display.
|
||||
"""
|
||||
sensitive = get_sensitive_arguments()
|
||||
result: list[str] = []
|
||||
skip_next = False
|
||||
|
||||
for i, arg in enumerate(argv):
|
||||
if skip_next:
|
||||
result.append(REDACTED_VALUE)
|
||||
skip_next = False
|
||||
continue
|
||||
|
||||
# Handle --flag=value syntax
|
||||
if "=" in arg:
|
||||
flag = arg.split("=", 1)[0]
|
||||
if flag in sensitive:
|
||||
result.append(f"{flag}={REDACTED_VALUE}")
|
||||
continue
|
||||
|
||||
# Handle --flag value syntax
|
||||
if arg in sensitive:
|
||||
result.append(arg)
|
||||
# Only redact the next token if it exists and is not another flag
|
||||
if i + 1 < len(argv) and not argv[i + 1].startswith("-"):
|
||||
skip_next = True
|
||||
continue
|
||||
|
||||
result.append(arg)
|
||||
|
||||
return " ".join(result)
|
||||
@@ -404,6 +404,23 @@ class Finding(BaseModel):
|
||||
output_data["resource_uid"] = check_output.resource_id
|
||||
output_data["region"] = check_output.zone_name
|
||||
|
||||
elif provider.type == "vercel":
|
||||
output_data["auth_method"] = "api_token"
|
||||
team = get_nested_attribute(provider, "identity.team")
|
||||
output_data["account_uid"] = (
|
||||
team.id
|
||||
if team
|
||||
else get_nested_attribute(provider, "identity.user_id")
|
||||
)
|
||||
output_data["account_name"] = (
|
||||
team.name
|
||||
if team
|
||||
else get_nested_attribute(provider, "identity.username")
|
||||
)
|
||||
output_data["resource_name"] = check_output.resource_name
|
||||
output_data["resource_uid"] = check_output.resource_id
|
||||
output_data["region"] = "global"
|
||||
|
||||
elif provider.type == "alibabacloud":
|
||||
output_data["auth_method"] = get_nested_attribute(
|
||||
provider, "identity.identity_arn"
|
||||
|
||||
@@ -9,6 +9,7 @@ from prowler.config.config import (
|
||||
square_logo_img,
|
||||
timestamp,
|
||||
)
|
||||
from prowler.lib.cli.redact import redact_argv
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.outputs.output import Finding, Output
|
||||
from prowler.lib.outputs.utils import parse_html_string, unroll_dict
|
||||
@@ -196,7 +197,7 @@ class HTML(Output):
|
||||
</div>
|
||||
</li>
|
||||
<li class="list-group-item">
|
||||
<b>Parameters used:</b> {" ".join(sys.argv[1:]) if from_cli else ""}
|
||||
<b>Parameters used:</b> {redact_argv(sys.argv[1:]) if from_cli else ""}
|
||||
</li>
|
||||
<li class="list-group-item">
|
||||
<b>Date:</b> {timestamp.isoformat()}
|
||||
@@ -1331,6 +1332,71 @@ class HTML(Output):
|
||||
)
|
||||
return ""
|
||||
|
||||
@staticmethod
|
||||
def get_vercel_assessment_summary(provider: Provider) -> str:
|
||||
"""
|
||||
get_vercel_assessment_summary gets the HTML assessment summary for the Vercel provider
|
||||
|
||||
Args:
|
||||
provider (Provider): the Vercel provider object
|
||||
|
||||
Returns:
|
||||
str: HTML assessment summary for the Vercel provider
|
||||
"""
|
||||
try:
|
||||
assessment_items = ""
|
||||
|
||||
team = getattr(provider.identity, "team", None)
|
||||
if team:
|
||||
assessment_items += f"""
|
||||
<li class="list-group-item">
|
||||
<b>Team:</b> {team.name} ({team.id})
|
||||
</li>"""
|
||||
|
||||
credentials_items = """
|
||||
<li class="list-group-item">
|
||||
<b>Authentication:</b> API Token
|
||||
</li>"""
|
||||
|
||||
email = getattr(provider.identity, "email", None)
|
||||
if email:
|
||||
credentials_items += f"""
|
||||
<li class="list-group-item">
|
||||
<b>Email:</b> {email}
|
||||
</li>"""
|
||||
|
||||
username = getattr(provider.identity, "username", None)
|
||||
if username:
|
||||
credentials_items += f"""
|
||||
<li class="list-group-item">
|
||||
<b>Username:</b> {username}
|
||||
</li>"""
|
||||
|
||||
return f"""
|
||||
<div class="col-md-2">
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
Vercel Assessment Summary
|
||||
</div>
|
||||
<ul class="list-group list-group-flush">{assessment_items}
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-4">
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
Vercel Credentials
|
||||
</div>
|
||||
<ul class="list-group list-group-flush">{credentials_items}
|
||||
</ul>
|
||||
</div>
|
||||
</div>"""
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
return ""
|
||||
|
||||
@staticmethod
|
||||
def get_assessment_summary(provider: Provider) -> str:
|
||||
"""
|
||||
|
||||
@@ -38,6 +38,8 @@ def stdout_report(finding, color, verbose, status, fix):
|
||||
details = finding.zone_name
|
||||
if finding.check_metadata.Provider == "googleworkspace":
|
||||
details = finding.location
|
||||
if finding.check_metadata.Provider == "vercel":
|
||||
details = finding.region
|
||||
|
||||
if (verbose or fix) and (not status or finding.status in status):
|
||||
if finding.muted:
|
||||
|
||||
@@ -99,6 +99,14 @@ def display_summary_table(
|
||||
elif provider.type == "image":
|
||||
entity_type = "Image"
|
||||
audited_entities = ", ".join(provider.images)
|
||||
elif provider.type == "vercel":
|
||||
entity_type = "Team"
|
||||
if provider.identity.team:
|
||||
audited_entities = (
|
||||
f"{provider.identity.team.name} ({provider.identity.team.slug})"
|
||||
)
|
||||
else:
|
||||
audited_entities = provider.identity.username or "Personal Account"
|
||||
|
||||
# Check if there are findings and that they are not all MANUAL
|
||||
if findings and not all(finding.status == "MANUAL" for finding in findings):
|
||||
|
||||
@@ -371,6 +371,13 @@ class Provider(ABC):
|
||||
mutelist_path=arguments.mutelist_file,
|
||||
fixer_config=fixer_config,
|
||||
)
|
||||
elif "vercel" in provider_class_name.lower():
|
||||
provider_class(
|
||||
projects=getattr(arguments, "project", None),
|
||||
config_path=arguments.config_file,
|
||||
mutelist_path=arguments.mutelist_file,
|
||||
fixer_config=fixer_config,
|
||||
)
|
||||
|
||||
except TypeError as error:
|
||||
logger.critical(
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
SENSITIVE_ARGUMENTS = frozenset({"--personal-access-token", "--oauth-app-token"})
|
||||
|
||||
|
||||
def init_parser(self):
|
||||
"""Init the Github Provider CLI parser"""
|
||||
github_parser = self.subparsers.add_parser(
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import re
|
||||
|
||||
SENSITIVE_ARGUMENTS = frozenset({"--personal-access-token", "--oauth-app-token"})
|
||||
|
||||
SCANNERS_CHOICES = [
|
||||
"vuln",
|
||||
"misconfig",
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
SENSITIVE_ARGUMENTS = frozenset({"--atlas-private-key"})
|
||||
|
||||
|
||||
def init_parser(self):
|
||||
"""Initialize the MongoDB Atlas Provider CLI parser"""
|
||||
mongodbatlas_parser = self.subparsers.add_parser(
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
SENSITIVE_ARGUMENTS = frozenset({"--nhn-password"})
|
||||
|
||||
|
||||
def init_parser(self):
|
||||
"""Init the NHN Provider CLI parser"""
|
||||
nhn_parser = self.subparsers.add_parser(
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
from argparse import Namespace
|
||||
|
||||
SENSITIVE_ARGUMENTS = frozenset({"--os-password"})
|
||||
|
||||
|
||||
def init_parser(self):
|
||||
"""Initialize the OpenStack provider CLI parser."""
|
||||
|
||||
@@ -111,7 +111,8 @@ class BlockStorage(OCIService):
|
||||
try:
|
||||
# Get availability domains for this compartment
|
||||
identity_client = self._create_oci_client(
|
||||
oci.identity.IdentityClient
|
||||
oci.identity.IdentityClient,
|
||||
config_overrides={"region": regional_client.region},
|
||||
)
|
||||
availability_domains = identity_client.list_availability_domains(
|
||||
compartment_id=compartment.id
|
||||
|
||||
@@ -39,7 +39,8 @@ class Filestorage(OCIService):
|
||||
try:
|
||||
# Get availability domains for this compartment
|
||||
identity_client = self._create_oci_client(
|
||||
oci.identity.IdentityClient
|
||||
oci.identity.IdentityClient,
|
||||
config_overrides={"region": regional_client.region},
|
||||
)
|
||||
availability_domains = identity_client.list_availability_domains(
|
||||
compartment_id=compartment.id
|
||||
|
||||
@@ -35,7 +35,7 @@ class Identity(OCIService):
|
||||
self.__threading_call__(self.__list_dynamic_groups__)
|
||||
self.__threading_call__(self.__list_domains__)
|
||||
self.__threading_call__(self.__list_domain_password_policies__)
|
||||
self.__get_password_policy__()
|
||||
self.__threading_call__(self.__get_password_policy__)
|
||||
self.__threading_call__(self.__search_root_compartment_resources__)
|
||||
self.__threading_call__(self.__search_active_non_root_compartments__)
|
||||
|
||||
@@ -49,10 +49,9 @@ class Identity(OCIService):
|
||||
Returns:
|
||||
Identity client instance
|
||||
"""
|
||||
client_region = self.regional_clients.get(region)
|
||||
if client_region:
|
||||
return self._create_oci_client(oci.identity.IdentityClient)
|
||||
return None
|
||||
return self._create_oci_client(
|
||||
oci.identity.IdentityClient, config_overrides={"region": region}
|
||||
)
|
||||
|
||||
def __list_users__(self, regional_client):
|
||||
"""
|
||||
@@ -66,7 +65,7 @@ class Identity(OCIService):
|
||||
if regional_client.region not in self.provider.identity.region:
|
||||
return
|
||||
|
||||
identity_client = self._create_oci_client(oci.identity.IdentityClient)
|
||||
identity_client = self.__get_client__(regional_client.region)
|
||||
|
||||
logger.info("Identity - Listing Users...")
|
||||
|
||||
@@ -316,7 +315,7 @@ class Identity(OCIService):
|
||||
if regional_client.region not in self.provider.identity.region:
|
||||
return
|
||||
|
||||
identity_client = self._create_oci_client(oci.identity.IdentityClient)
|
||||
identity_client = self.__get_client__(regional_client.region)
|
||||
|
||||
logger.info("Identity - Listing Groups...")
|
||||
|
||||
@@ -359,7 +358,7 @@ class Identity(OCIService):
|
||||
if regional_client.region not in self.provider.identity.region:
|
||||
return
|
||||
|
||||
identity_client = self._create_oci_client(oci.identity.IdentityClient)
|
||||
identity_client = self.__get_client__(regional_client.region)
|
||||
|
||||
logger.info("Identity - Listing Policies...")
|
||||
|
||||
@@ -404,7 +403,7 @@ class Identity(OCIService):
|
||||
if regional_client.region not in self.provider.identity.region:
|
||||
return
|
||||
|
||||
identity_client = self._create_oci_client(oci.identity.IdentityClient)
|
||||
identity_client = self.__get_client__(regional_client.region)
|
||||
|
||||
logger.info("Identity - Listing Dynamic Groups...")
|
||||
|
||||
@@ -452,7 +451,7 @@ class Identity(OCIService):
|
||||
if regional_client.region not in self.provider.identity.region:
|
||||
return
|
||||
|
||||
identity_client = self._create_oci_client(oci.identity.IdentityClient)
|
||||
identity_client = self.__get_client__(regional_client.region)
|
||||
|
||||
logger.info("Identity - Listing Identity Domains...")
|
||||
|
||||
@@ -549,10 +548,13 @@ class Identity(OCIService):
|
||||
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
def __get_password_policy__(self):
|
||||
def __get_password_policy__(self, regional_client):
|
||||
"""Get the password policy for the tenancy."""
|
||||
try:
|
||||
identity_client = self._create_oci_client(oci.identity.IdentityClient)
|
||||
if regional_client.region not in self.provider.identity.region:
|
||||
return
|
||||
|
||||
identity_client = self.__get_client__(regional_client.region)
|
||||
|
||||
logger.info("Identity - Getting Password Policy...")
|
||||
|
||||
@@ -584,7 +586,8 @@ class Identity(OCIService):
|
||||
|
||||
# Create search client using the helper method for proper authentication
|
||||
search_client = self._create_oci_client(
|
||||
oci.resource_search.ResourceSearchClient
|
||||
oci.resource_search.ResourceSearchClient,
|
||||
config_overrides={"region": regional_client.region},
|
||||
)
|
||||
|
||||
# Query to search for resources in root compartment
|
||||
@@ -631,7 +634,8 @@ class Identity(OCIService):
|
||||
|
||||
# Create search client using the helper method for proper authentication
|
||||
search_client = self._create_oci_client(
|
||||
oci.resource_search.ResourceSearchClient
|
||||
oci.resource_search.ResourceSearchClient,
|
||||
config_overrides={"region": regional_client.region},
|
||||
)
|
||||
|
||||
# Query to search for active compartments in the tenancy (excluding root)
|
||||
|
||||
0
prowler/providers/vercel/__init__.py
Normal file
0
prowler/providers/vercel/__init__.py
Normal file
0
prowler/providers/vercel/exceptions/__init__.py
Normal file
0
prowler/providers/vercel/exceptions/__init__.py
Normal file
127
prowler/providers/vercel/exceptions/exceptions.py
Normal file
127
prowler/providers/vercel/exceptions/exceptions.py
Normal file
@@ -0,0 +1,127 @@
|
||||
# Exceptions codes from 13000 to 13999 are reserved for Vercel exceptions
|
||||
from prowler.exceptions.exceptions import ProwlerException
|
||||
|
||||
|
||||
class VercelBaseException(ProwlerException):
|
||||
"""Base exception for Vercel provider errors."""
|
||||
|
||||
VERCEL_ERROR_CODES = {
|
||||
(13000, "VercelCredentialsError"): {
|
||||
"message": "Vercel credentials not found or invalid.",
|
||||
"remediation": "Set the VERCEL_TOKEN environment variable with a valid Vercel API token. Generate one at https://vercel.com/account/tokens.",
|
||||
},
|
||||
(13001, "VercelAuthenticationError"): {
|
||||
"message": "Authentication to Vercel API failed.",
|
||||
"remediation": "Verify your Vercel API token is valid and has not expired. Check at https://vercel.com/account/tokens.",
|
||||
},
|
||||
(13002, "VercelSessionError"): {
|
||||
"message": "Failed to create a Vercel API session.",
|
||||
"remediation": "Check network connectivity and ensure the Vercel API is reachable at https://api.vercel.com.",
|
||||
},
|
||||
(13003, "VercelIdentityError"): {
|
||||
"message": "Failed to retrieve Vercel identity information.",
|
||||
"remediation": "Ensure the API token has permissions to read user and team information.",
|
||||
},
|
||||
(13004, "VercelInvalidTeamError"): {
|
||||
"message": "The specified Vercel team was not found or is not accessible.",
|
||||
"remediation": "Verify the team ID or slug is correct and that your token has access to the team.",
|
||||
},
|
||||
(13005, "VercelInvalidProviderIdError"): {
|
||||
"message": "The provided Vercel provider ID is invalid.",
|
||||
"remediation": "Ensure the provider UID matches a valid Vercel team ID or user ID format.",
|
||||
},
|
||||
(13006, "VercelAPIError"): {
|
||||
"message": "An error occurred while calling the Vercel API.",
|
||||
"remediation": "Check the Vercel API status at https://www.vercel-status.com/ and retry the request.",
|
||||
},
|
||||
(13007, "VercelRateLimitError"): {
|
||||
"message": "Rate limited by the Vercel API.",
|
||||
"remediation": "Wait and retry. Vercel API rate limits vary by endpoint. See https://vercel.com/docs/rest-api#rate-limits.",
|
||||
},
|
||||
(13008, "VercelPlanLimitationError"): {
|
||||
"message": "This feature requires a higher Vercel plan.",
|
||||
"remediation": "Some security features (e.g., WAF managed rulesets) require Vercel Enterprise. Upgrade your plan or skip these checks.",
|
||||
},
|
||||
}
|
||||
|
||||
def __init__(self, code, file=None, original_exception=None, message=None):
|
||||
provider = "Vercel"
|
||||
error_info = self.VERCEL_ERROR_CODES.get((code, self.__class__.__name__))
|
||||
if error_info is None:
|
||||
error_info = {
|
||||
"message": message or "Unknown Vercel error.",
|
||||
"remediation": "Check the Vercel API documentation for more details.",
|
||||
}
|
||||
elif message:
|
||||
error_info = error_info.copy()
|
||||
error_info["message"] = message
|
||||
super().__init__(
|
||||
code=code,
|
||||
source=provider,
|
||||
file=file,
|
||||
original_exception=original_exception,
|
||||
error_info=error_info,
|
||||
)
|
||||
|
||||
|
||||
class VercelCredentialsError(VercelBaseException):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
13000, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class VercelAuthenticationError(VercelBaseException):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
13001, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class VercelSessionError(VercelBaseException):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
13002, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class VercelIdentityError(VercelBaseException):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
13003, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class VercelInvalidTeamError(VercelBaseException):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
13004, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class VercelInvalidProviderIdError(VercelBaseException):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
13005, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class VercelAPIError(VercelBaseException):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
13006, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class VercelRateLimitError(VercelBaseException):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
13007, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class VercelPlanLimitationError(VercelBaseException):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
13008, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
0
prowler/providers/vercel/lib/__init__.py
Normal file
0
prowler/providers/vercel/lib/__init__.py
Normal file
0
prowler/providers/vercel/lib/arguments/__init__.py
Normal file
0
prowler/providers/vercel/lib/arguments/__init__.py
Normal file
18
prowler/providers/vercel/lib/arguments/arguments.py
Normal file
18
prowler/providers/vercel/lib/arguments/arguments.py
Normal file
@@ -0,0 +1,18 @@
|
||||
def init_parser(self):
|
||||
"""Init the Vercel provider CLI parser."""
|
||||
vercel_parser = self.subparsers.add_parser(
|
||||
"vercel",
|
||||
parents=[self.common_providers_parser],
|
||||
help="Vercel Provider",
|
||||
)
|
||||
|
||||
# Scope
|
||||
scope_group = vercel_parser.add_argument_group("Scope")
|
||||
scope_group.add_argument(
|
||||
"--project",
|
||||
"--projects",
|
||||
nargs="*",
|
||||
default=None,
|
||||
metavar="PROJECT",
|
||||
help="Filter scan to specific Vercel project names or IDs.",
|
||||
)
|
||||
0
prowler/providers/vercel/lib/mutelist/__init__.py
Normal file
0
prowler/providers/vercel/lib/mutelist/__init__.py
Normal file
20
prowler/providers/vercel/lib/mutelist/mutelist.py
Normal file
20
prowler/providers/vercel/lib/mutelist/mutelist.py
Normal file
@@ -0,0 +1,20 @@
|
||||
from prowler.lib.check.models import CheckReportVercel
|
||||
from prowler.lib.mutelist.mutelist import Mutelist
|
||||
from prowler.lib.outputs.utils import unroll_dict, unroll_tags
|
||||
|
||||
|
||||
class VercelMutelist(Mutelist):
|
||||
"""Vercel-specific mutelist helper."""
|
||||
|
||||
def is_finding_muted(
|
||||
self,
|
||||
finding: CheckReportVercel,
|
||||
team_id: str,
|
||||
) -> bool:
|
||||
return self.is_muted(
|
||||
team_id,
|
||||
finding.check_metadata.CheckID,
|
||||
"global", # Vercel is a global service
|
||||
finding.resource_id or finding.resource_name,
|
||||
unroll_dict(unroll_tags(finding.resource_tags)),
|
||||
)
|
||||
0
prowler/providers/vercel/lib/service/__init__.py
Normal file
0
prowler/providers/vercel/lib/service/__init__.py
Normal file
177
prowler/providers/vercel/lib/service/service.py
Normal file
177
prowler/providers/vercel/lib/service/service.py
Normal file
@@ -0,0 +1,177 @@
|
||||
import time
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
|
||||
import requests
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.providers.vercel.exceptions.exceptions import (
|
||||
VercelAPIError,
|
||||
VercelRateLimitError,
|
||||
)
|
||||
|
||||
MAX_WORKERS = 10
|
||||
|
||||
|
||||
class VercelService:
|
||||
"""Base class for Vercel services to share provider context and HTTP client."""
|
||||
|
||||
def __init__(self, service: str, provider):
|
||||
self.provider = provider
|
||||
self.audit_config = provider.audit_config
|
||||
self.fixer_config = provider.fixer_config
|
||||
self.service = service.lower() if not service.islower() else service
|
||||
|
||||
# Set up HTTP session with Bearer token
|
||||
self._http_session = requests.Session()
|
||||
self._http_session.headers.update(
|
||||
{
|
||||
"Authorization": f"Bearer {provider.session.token}",
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
)
|
||||
self._base_url = provider.session.base_url
|
||||
self._team_id = provider.session.team_id
|
||||
|
||||
# Thread pool for parallel API calls
|
||||
self.thread_pool = ThreadPoolExecutor(max_workers=MAX_WORKERS)
|
||||
|
||||
@property
|
||||
def _all_team_ids(self) -> list[str]:
|
||||
"""Return team IDs to scan: explicit team_id, or all auto-discovered teams."""
|
||||
if self._team_id:
|
||||
return [self._team_id]
|
||||
return [t.id for t in self.provider.identity.teams]
|
||||
|
||||
def _get(self, path: str, params: dict = None) -> dict:
|
||||
"""Make a rate-limit-aware GET request to the Vercel API.
|
||||
|
||||
Args:
|
||||
path: API path (e.g., "/v9/projects").
|
||||
params: Query parameters.
|
||||
|
||||
Returns:
|
||||
Parsed JSON response as dict.
|
||||
|
||||
Raises:
|
||||
VercelRateLimitError: If rate limited after retries.
|
||||
VercelAPIError: If the API returns an error.
|
||||
"""
|
||||
if params is None:
|
||||
params = {}
|
||||
|
||||
# Append teamId if operating in team scope
|
||||
if self._team_id and "teamId" not in params:
|
||||
params["teamId"] = self._team_id
|
||||
|
||||
url = f"{self._base_url}{path}"
|
||||
max_retries = self.audit_config.get("max_retries", 3)
|
||||
|
||||
for attempt in range(max_retries + 1):
|
||||
try:
|
||||
response = self._http_session.get(url, params=params, timeout=30)
|
||||
|
||||
if response.status_code == 429:
|
||||
retry_after = int(response.headers.get("Retry-After", 5))
|
||||
if attempt < max_retries:
|
||||
logger.warning(
|
||||
f"{self.service} - Rate limited, retrying after {retry_after}s (attempt {attempt + 1}/{max_retries})"
|
||||
)
|
||||
time.sleep(retry_after)
|
||||
continue
|
||||
raise VercelRateLimitError(
|
||||
file=__file__,
|
||||
message=f"Rate limited on {path} after {max_retries} retries.",
|
||||
)
|
||||
|
||||
if response.status_code == 403:
|
||||
# Plan limitation or permission error — return None for graceful handling
|
||||
logger.warning(
|
||||
f"{self.service} - Access denied for {path} (403). "
|
||||
"This may be a plan limitation."
|
||||
)
|
||||
return None
|
||||
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
except VercelRateLimitError:
|
||||
raise
|
||||
except requests.exceptions.HTTPError as error:
|
||||
raise VercelAPIError(
|
||||
file=__file__,
|
||||
original_exception=error,
|
||||
message=f"HTTP error on {path}: {error}",
|
||||
)
|
||||
except requests.exceptions.RequestException as error:
|
||||
if attempt < max_retries:
|
||||
logger.warning(
|
||||
f"{self.service} - Request error on {path}, retrying (attempt {attempt + 1}/{max_retries}): {error}"
|
||||
)
|
||||
time.sleep(2**attempt)
|
||||
continue
|
||||
raise VercelAPIError(
|
||||
file=__file__,
|
||||
original_exception=error,
|
||||
message=f"Request failed on {path} after {max_retries} retries: {error}",
|
||||
)
|
||||
|
||||
return {}
|
||||
|
||||
def _paginate(self, path: str, key: str, params: dict = None) -> list:
|
||||
"""Paginate through a Vercel API list endpoint.
|
||||
|
||||
Vercel uses cursor-based pagination with a `pagination.next` field.
|
||||
|
||||
Args:
|
||||
path: API path.
|
||||
key: JSON key containing the list of items.
|
||||
params: Additional query parameters.
|
||||
|
||||
Returns:
|
||||
Combined list of all items across pages.
|
||||
"""
|
||||
if params is None:
|
||||
params = {}
|
||||
|
||||
params["limit"] = params.get("limit", 100)
|
||||
all_items = []
|
||||
|
||||
while True:
|
||||
data = self._get(path, params)
|
||||
if data is None:
|
||||
break
|
||||
|
||||
items = data.get(key, [])
|
||||
all_items.extend(items)
|
||||
|
||||
# Check for next page cursor
|
||||
pagination = data.get("pagination", {})
|
||||
next_cursor = pagination.get("next")
|
||||
if not next_cursor:
|
||||
break
|
||||
|
||||
params["until"] = next_cursor
|
||||
|
||||
return all_items
|
||||
|
||||
def __threading_call__(self, call, iterator):
|
||||
"""Execute a function across multiple items using threading."""
|
||||
items = list(iterator) if not isinstance(iterator, list) else iterator
|
||||
|
||||
futures = {self.thread_pool.submit(call, item): item for item in items}
|
||||
results = []
|
||||
|
||||
for future in as_completed(futures):
|
||||
try:
|
||||
result = future.result()
|
||||
if result is not None:
|
||||
results.append(result)
|
||||
except Exception as error:
|
||||
item = futures[future]
|
||||
item_id = getattr(item, "id", str(item))
|
||||
logger.error(
|
||||
f"{self.service} - Threading error processing {item_id}: "
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
return results
|
||||
52
prowler/providers/vercel/models.py
Normal file
52
prowler/providers/vercel/models.py
Normal file
@@ -0,0 +1,52 @@
|
||||
from typing import Any, Optional
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from prowler.config.config import output_file_timestamp
|
||||
from prowler.providers.common.models import ProviderOutputOptions
|
||||
|
||||
|
||||
class VercelSession(BaseModel):
|
||||
"""Vercel API session information."""
|
||||
|
||||
token: str
|
||||
team_id: Optional[str] = None
|
||||
base_url: str = "https://api.vercel.com"
|
||||
http_session: Any = Field(default=None, exclude=True)
|
||||
|
||||
|
||||
class VercelTeamInfo(BaseModel):
|
||||
"""Vercel team metadata."""
|
||||
|
||||
id: str
|
||||
name: str
|
||||
slug: str
|
||||
|
||||
|
||||
class VercelIdentityInfo(BaseModel):
|
||||
"""Vercel identity and scoping information."""
|
||||
|
||||
user_id: Optional[str] = None
|
||||
username: Optional[str] = None
|
||||
email: Optional[str] = None
|
||||
team: Optional[VercelTeamInfo] = None
|
||||
teams: list[VercelTeamInfo] = Field(default_factory=list)
|
||||
|
||||
|
||||
class VercelOutputOptions(ProviderOutputOptions):
|
||||
"""Customize output filenames for Vercel scans."""
|
||||
|
||||
def __init__(self, arguments, bulk_checks_metadata, identity: VercelIdentityInfo):
|
||||
super().__init__(arguments, bulk_checks_metadata)
|
||||
if (
|
||||
not hasattr(arguments, "output_filename")
|
||||
or arguments.output_filename is None
|
||||
):
|
||||
account_fragment = (
|
||||
identity.team.slug if identity.team else identity.username or "vercel"
|
||||
)
|
||||
self.output_filename = (
|
||||
f"prowler-output-{account_fragment}-{output_file_timestamp}"
|
||||
)
|
||||
else:
|
||||
self.output_filename = arguments.output_filename
|
||||
0
prowler/providers/vercel/services/__init__.py
Normal file
0
prowler/providers/vercel/services/__init__.py
Normal file
@@ -0,0 +1,6 @@
|
||||
from prowler.providers.common.provider import Provider
|
||||
from prowler.providers.vercel.services.authentication.authentication_service import (
|
||||
Authentication,
|
||||
)
|
||||
|
||||
authentication_client = Authentication(Provider.get_global_provider())
|
||||
@@ -0,0 +1,38 @@
|
||||
{
|
||||
"Provider": "vercel",
|
||||
"CheckID": "authentication_no_stale_tokens",
|
||||
"CheckTitle": "Vercel API tokens are not stale or unused for over 90 days",
|
||||
"CheckType": [],
|
||||
"ServiceName": "authentication",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "NotDefined",
|
||||
"ResourceGroup": "governance",
|
||||
"Description": "**Vercel API tokens** are assessed for **staleness** by checking whether each token has been active within the last 90 days. Stale tokens that remain unused for extended periods represent unnecessary access credentials that increase the attack surface. Tokens with no recorded activity are also flagged.",
|
||||
"Risk": "Stale tokens that have not been used for over **90 days** may belong to decommissioned integrations, former team members, or forgotten automation. These tokens remain **valid** and could be compromised or misused without detection, as their inactivity makes suspicious usage harder to notice in access logs.",
|
||||
"RelatedUrl": "",
|
||||
"AdditionalURLs": [
|
||||
"https://vercel.com/docs/rest-api#authentication"
|
||||
],
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "1. Sign in to the Vercel dashboard\n2. Navigate to Account Settings > Tokens\n3. Review the last active date for each token\n4. Revoke or delete tokens that have not been used in over 90 days\n5. Contact token owners to confirm whether the token is still needed\n6. Implement a regular token review process (e.g., quarterly)",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Regularly audit API tokens and revoke any that have not been used within 90 days. Implement a token lifecycle management process that includes periodic reviews, automatic expiration dates, and documentation of each token's purpose and owner.",
|
||||
"Url": "https://hub.prowler.com/checks/vercel/authentication_no_stale_tokens"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"trust-boundaries"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [
|
||||
"authentication_token_not_expired"
|
||||
],
|
||||
"Notes": "The stale threshold is configurable via ``stale_token_threshold_days`` in audit_config (default: 90 days). Tokens with no recorded activity (active_at is None) are considered stale."
|
||||
}
|
||||
@@ -0,0 +1,69 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportVercel
|
||||
from prowler.providers.vercel.services.authentication.authentication_client import (
|
||||
authentication_client,
|
||||
)
|
||||
|
||||
|
||||
class authentication_no_stale_tokens(Check):
|
||||
"""Check if API tokens have been used recently.
|
||||
|
||||
This class verifies whether each Vercel API token has been active within
|
||||
the configured threshold (default: 90 days). Stale tokens that remain
|
||||
unused pose a security risk as they may have been forgotten or belong
|
||||
to former team members.
|
||||
"""
|
||||
|
||||
def execute(self) -> List[CheckReportVercel]:
|
||||
"""Execute the Vercel Stale Token check.
|
||||
|
||||
Iterates over all tokens and checks if each token has been active
|
||||
within the configured threshold. The threshold is configurable via
|
||||
``stale_token_threshold_days`` in audit_config (default: 90 days).
|
||||
|
||||
Returns:
|
||||
List[CheckReportVercel]: A list of reports for each token.
|
||||
"""
|
||||
findings = []
|
||||
now = datetime.now(timezone.utc)
|
||||
stale_threshold_days = authentication_client.audit_config.get(
|
||||
"stale_token_threshold_days", 90
|
||||
)
|
||||
stale_cutoff = now - timedelta(days=stale_threshold_days)
|
||||
|
||||
for token in authentication_client.tokens.values():
|
||||
report = CheckReportVercel(
|
||||
metadata=self.metadata(),
|
||||
resource=token,
|
||||
resource_name=token.name,
|
||||
resource_id=token.id,
|
||||
)
|
||||
|
||||
if token.active_at is None:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Token '{token.name}' ({token.id}) has no recorded activity "
|
||||
f"and is considered stale."
|
||||
)
|
||||
elif token.active_at < stale_cutoff:
|
||||
days_inactive = (now - token.active_at).days
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Token '{token.name}' ({token.id}) has not been used for "
|
||||
f"{days_inactive} days (last active: "
|
||||
f"{token.active_at.strftime('%Y-%m-%d %H:%M UTC')}). "
|
||||
f"Threshold is {stale_threshold_days} days."
|
||||
)
|
||||
else:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Token '{token.name}' ({token.id}) was last active on "
|
||||
f"{token.active_at.strftime('%Y-%m-%d %H:%M UTC')} "
|
||||
f"(within the last {stale_threshold_days} days)."
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -0,0 +1,99 @@
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.providers.vercel.lib.service.service import VercelService
|
||||
|
||||
|
||||
class Authentication(VercelService):
|
||||
"""Retrieve Vercel API token metadata for hygiene checks."""
|
||||
|
||||
def __init__(self, provider):
|
||||
super().__init__("Authentication", provider)
|
||||
self.tokens: dict[str, VercelAuthToken] = {}
|
||||
self._list_tokens()
|
||||
|
||||
def _list_tokens(self):
|
||||
"""List all API tokens for the authenticated user and their teams."""
|
||||
# Always fetch personal tokens (no teamId filter)
|
||||
self._fetch_tokens_for_scope(team_id=None)
|
||||
|
||||
# Also fetch tokens scoped to each team
|
||||
for tid in self._all_team_ids:
|
||||
self._fetch_tokens_for_scope(team_id=tid)
|
||||
|
||||
logger.info(f"Authentication - Found {len(self.tokens)} token(s)")
|
||||
|
||||
def _fetch_tokens_for_scope(self, team_id: str = None):
|
||||
"""Fetch tokens for a specific scope (personal or team).
|
||||
|
||||
Args:
|
||||
team_id: Team ID to fetch tokens for. None for personal tokens.
|
||||
"""
|
||||
try:
|
||||
# Always set teamId key explicitly — _get won't auto-inject when key
|
||||
# is present, and requests skips None values from query params.
|
||||
params = {"teamId": team_id}
|
||||
data = self._get("/v5/user/tokens", params=params)
|
||||
if not data:
|
||||
return
|
||||
|
||||
tokens = data.get("tokens", [])
|
||||
|
||||
for token in tokens:
|
||||
token_id = token.get("id", "")
|
||||
if not token_id or token_id in self.tokens:
|
||||
continue
|
||||
|
||||
active_at = None
|
||||
if token.get("activeAt"):
|
||||
active_at = datetime.fromtimestamp(
|
||||
token["activeAt"] / 1000, tz=timezone.utc
|
||||
)
|
||||
|
||||
created_at = None
|
||||
if token.get("createdAt"):
|
||||
created_at = datetime.fromtimestamp(
|
||||
token["createdAt"] / 1000, tz=timezone.utc
|
||||
)
|
||||
|
||||
expires_at = None
|
||||
if token.get("expiresAt"):
|
||||
expires_at = datetime.fromtimestamp(
|
||||
token["expiresAt"] / 1000, tz=timezone.utc
|
||||
)
|
||||
|
||||
self.tokens[token_id] = VercelAuthToken(
|
||||
id=token_id,
|
||||
name=token.get("name", "Unnamed Token"),
|
||||
type=token.get("type"),
|
||||
active_at=active_at,
|
||||
created_at=created_at,
|
||||
expires_at=expires_at,
|
||||
scopes=token.get("scopes", []),
|
||||
origin=token.get("origin"),
|
||||
team_id=token.get("teamId") or team_id,
|
||||
)
|
||||
|
||||
except Exception as error:
|
||||
scope = f"team {team_id}" if team_id else "personal"
|
||||
logger.error(
|
||||
f"Authentication - Error listing tokens for {scope}: "
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
|
||||
class VercelAuthToken(BaseModel):
|
||||
"""Vercel API token representation."""
|
||||
|
||||
id: str
|
||||
name: str
|
||||
type: Optional[str] = None
|
||||
active_at: Optional[datetime] = None
|
||||
created_at: Optional[datetime] = None
|
||||
expires_at: Optional[datetime] = None
|
||||
scopes: list[dict] = Field(default_factory=list)
|
||||
origin: Optional[str] = None
|
||||
team_id: Optional[str] = None
|
||||
@@ -0,0 +1,38 @@
|
||||
{
|
||||
"Provider": "vercel",
|
||||
"CheckID": "authentication_token_not_expired",
|
||||
"CheckTitle": "Vercel API tokens have not expired",
|
||||
"CheckType": [],
|
||||
"ServiceName": "authentication",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "high",
|
||||
"ResourceType": "NotDefined",
|
||||
"ResourceGroup": "IAM",
|
||||
"Description": "**Vercel API tokens** are assessed for **expiration status** to identify expired tokens or those about to expire within a configurable threshold (default: 7 days). Tokens about to expire are flagged proactively so they can be rotated before causing disruptions. Tokens without an expiration date are considered valid.",
|
||||
"Risk": "Expired tokens indicate poor **token lifecycle management**. Tokens about to expire risk **imminent service disruption** if not rotated in time. Integrations or **CI/CD pipelines** relying on expired or soon-to-expire tokens will fail silently.",
|
||||
"RelatedUrl": "",
|
||||
"AdditionalURLs": [
|
||||
"https://vercel.com/docs/rest-api#authentication"
|
||||
],
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "1. Sign in to the Vercel dashboard\n2. Navigate to Account Settings > Tokens\n3. Identify any expired tokens\n4. Delete expired tokens that are no longer needed\n5. Create new tokens with appropriate expiration dates to replace expired ones\n6. Update any integrations or CI/CD pipelines that used the expired tokens",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Remove expired tokens and create new ones with appropriate expiration dates. Implement a token rotation schedule to ensure tokens are refreshed before they expire. Update all integrations and automation that depend on the replaced tokens.",
|
||||
"Url": "https://hub.prowler.com/checks/vercel/authentication_token_not_expired"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"trust-boundaries"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [
|
||||
"authentication_no_stale_tokens"
|
||||
],
|
||||
"Notes": "Tokens without an expiration date (expires_at is None) are treated as valid since they have no defined expiry. The days_to_expire_threshold is configurable via audit_config (default: 7 days). Tokens expiring within the threshold are reported with medium severity; already expired tokens are reported with high severity."
|
||||
}
|
||||
@@ -0,0 +1,75 @@
|
||||
from datetime import datetime, timezone
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportVercel, Severity
|
||||
from prowler.providers.vercel.services.authentication.authentication_client import (
|
||||
authentication_client,
|
||||
)
|
||||
|
||||
|
||||
class authentication_token_not_expired(Check):
|
||||
"""Check if API tokens have not expired or are about to expire.
|
||||
|
||||
This class verifies whether each Vercel API token is still valid by
|
||||
checking its expiration date against the current time. Tokens expiring
|
||||
within a configurable threshold (default: 7 days) are flagged as
|
||||
about to expire with medium severity.
|
||||
"""
|
||||
|
||||
def execute(self) -> List[CheckReportVercel]:
|
||||
"""Execute the Vercel Token Expiration check.
|
||||
|
||||
Iterates over all tokens and checks if each token has expired or
|
||||
is about to expire soon. The threshold is configurable via
|
||||
``days_to_expire_threshold`` in audit_config (default: 7 days).
|
||||
Tokens without an expiration date are considered valid (no expiry set).
|
||||
|
||||
Returns:
|
||||
List[CheckReportVercel]: A list of reports for each token.
|
||||
"""
|
||||
findings = []
|
||||
now = datetime.now(timezone.utc)
|
||||
days_to_expire_threshold = authentication_client.audit_config.get(
|
||||
"days_to_expire_threshold", 7
|
||||
)
|
||||
for token in authentication_client.tokens.values():
|
||||
report = CheckReportVercel(
|
||||
metadata=self.metadata(),
|
||||
resource=token,
|
||||
resource_name=token.name,
|
||||
resource_id=token.id,
|
||||
)
|
||||
|
||||
if token.expires_at is None:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Token '{token.name}' ({token.id}) does not have an expiration "
|
||||
f"date set and is currently valid."
|
||||
)
|
||||
elif token.expires_at <= now:
|
||||
report.status = "FAIL"
|
||||
report.check_metadata.Severity = Severity.high
|
||||
report.status_extended = (
|
||||
f"Token '{token.name}' ({token.id}) has expired "
|
||||
f"on {token.expires_at.strftime('%Y-%m-%d %H:%M UTC')}."
|
||||
)
|
||||
else:
|
||||
days_left = (token.expires_at - now).days
|
||||
if days_left <= days_to_expire_threshold:
|
||||
report.status = "FAIL"
|
||||
report.check_metadata.Severity = Severity.medium
|
||||
report.status_extended = (
|
||||
f"Token '{token.name}' ({token.id}) is about to expire "
|
||||
f"in {days_left} days "
|
||||
f"on {token.expires_at.strftime('%Y-%m-%d %H:%M UTC')}."
|
||||
)
|
||||
else:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Token '{token.name}' ({token.id}) is valid and expires "
|
||||
f"on {token.expires_at.strftime('%Y-%m-%d %H:%M UTC')}."
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -0,0 +1,4 @@
|
||||
from prowler.providers.common.provider import Provider
|
||||
from prowler.providers.vercel.services.deployment.deployment_service import Deployment
|
||||
|
||||
deployment_client = Deployment(Provider.get_global_provider())
|
||||
@@ -0,0 +1,36 @@
|
||||
{
|
||||
"Provider": "vercel",
|
||||
"CheckID": "deployment_production_uses_stable_target",
|
||||
"CheckTitle": "Vercel production deployments originate from a stable branch",
|
||||
"CheckType": [],
|
||||
"ServiceName": "deployment",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "NotDefined",
|
||||
"ResourceGroup": "devops",
|
||||
"Description": "**Vercel production deployments** are assessed for **source branch stability** by verifying they are sourced from a stable branch (`main` or `master`). Deploying to production from feature branches bypasses standard CI/CD review processes and may introduce untested or incomplete code into the production environment.",
|
||||
"Risk": "Production deployments from **feature branches** may contain untested, incomplete, or unapproved code changes. This bypasses the standard **code review and merge workflow**, increasing the risk of shipping bugs, security vulnerabilities, or breaking changes to end users.",
|
||||
"RelatedUrl": "",
|
||||
"AdditionalURLs": [
|
||||
"https://vercel.com/docs/deployments/git"
|
||||
],
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "1. Sign in to the Vercel dashboard\n2. Navigate to the project Settings > Git\n3. Ensure the Production Branch is set to 'main' or 'master'\n4. Review recent production deployments and revert any that originated from feature branches",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Configure the production branch to main or master and ensure all production deployments go through the standard merge workflow. Use branch protection rules in your Git provider to prevent direct pushes to the production branch.",
|
||||
"Url": "https://hub.prowler.com/checks/vercel/deployment_production_uses_stable_target"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"trust-boundaries"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "Deployments without git source information are skipped as they may be manual deployments or CLI-triggered builds."
|
||||
}
|
||||
@@ -0,0 +1,57 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportVercel
|
||||
from prowler.providers.vercel.services.deployment.deployment_client import (
|
||||
deployment_client,
|
||||
)
|
||||
|
||||
|
||||
class deployment_production_uses_stable_target(Check):
|
||||
"""Check if production deployments are sourced from a stable branch.
|
||||
|
||||
This class verifies whether each Vercel production deployment originates
|
||||
from a configured stable branch rather than a feature branch. The list of
|
||||
stable branches is configurable via audit_config key ``stable_branches``
|
||||
(default: ``["main", "master"]``).
|
||||
"""
|
||||
|
||||
def execute(self) -> List[CheckReportVercel]:
|
||||
"""Execute the Vercel Production Deployment Stable Target check.
|
||||
|
||||
Iterates over all deployments, filters for production targets with
|
||||
git source information, and checks if the branch is main or master.
|
||||
|
||||
Returns:
|
||||
List[CheckReportVercel]: A list of reports for each production deployment.
|
||||
"""
|
||||
findings = []
|
||||
for deployment in deployment_client.deployments.values():
|
||||
if deployment.target != "production":
|
||||
continue
|
||||
|
||||
if not deployment.git_source:
|
||||
continue
|
||||
|
||||
report = CheckReportVercel(metadata=self.metadata(), resource=deployment)
|
||||
|
||||
stable_branches = deployment_client.audit_config.get(
|
||||
"stable_branches", ["main", "master"]
|
||||
)
|
||||
branch = deployment.git_source.get("branch") or ""
|
||||
if branch in stable_branches:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Production deployment {deployment.name} ({deployment.id}) "
|
||||
f"is sourced from stable branch '{branch}'."
|
||||
)
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Production deployment {deployment.name} ({deployment.id}) "
|
||||
f"is sourced from branch '{branch}' instead of a "
|
||||
f"configured stable branch ({', '.join(stable_branches)})."
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -0,0 +1,103 @@
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.providers.vercel.lib.service.service import VercelService
|
||||
|
||||
|
||||
class Deployment(VercelService):
|
||||
"""Retrieve recent Vercel deployments."""
|
||||
|
||||
def __init__(self, provider):
|
||||
super().__init__("Deployment", provider)
|
||||
self.deployments: dict[str, VercelDeployment] = {}
|
||||
self._list_deployments()
|
||||
|
||||
def _list_deployments(self):
|
||||
"""List recent deployments across all projects."""
|
||||
try:
|
||||
params = {"limit": 100}
|
||||
# Fetch only recent deployments (first page is sufficient for security checks)
|
||||
raw_deployments = self._paginate("/v6/deployments", "deployments", params)
|
||||
|
||||
seen_ids: set[str] = set()
|
||||
filter_projects = self.provider.filter_projects
|
||||
|
||||
for dep in raw_deployments:
|
||||
dep_id = dep.get("uid", dep.get("id", ""))
|
||||
if not dep_id or dep_id in seen_ids:
|
||||
continue
|
||||
seen_ids.add(dep_id)
|
||||
|
||||
project_id = dep.get("projectId", "")
|
||||
|
||||
# Apply project filter if specified
|
||||
if filter_projects and project_id not in filter_projects:
|
||||
project_name = dep.get("name", "")
|
||||
if project_name not in filter_projects:
|
||||
continue
|
||||
|
||||
created_at = None
|
||||
if dep.get("createdAt"):
|
||||
created_at = datetime.fromtimestamp(
|
||||
dep["createdAt"] / 1000, tz=timezone.utc
|
||||
)
|
||||
|
||||
ready_at = None
|
||||
if dep.get("ready"):
|
||||
ready_at = datetime.fromtimestamp(
|
||||
dep["ready"] / 1000, tz=timezone.utc
|
||||
)
|
||||
|
||||
git_source = None
|
||||
meta = dep.get("meta", {}) or {}
|
||||
if meta.get("githubCommitSha") or meta.get("gitlabCommitSha"):
|
||||
git_source = {
|
||||
"commit_sha": meta.get("githubCommitSha")
|
||||
or meta.get("gitlabCommitSha"),
|
||||
"branch": meta.get("githubCommitRef")
|
||||
or meta.get("gitlabCommitRef"),
|
||||
"repo": meta.get("githubRepo") or meta.get("gitlabRepo"),
|
||||
}
|
||||
|
||||
self.deployments[dep_id] = VercelDeployment(
|
||||
id=dep_id,
|
||||
name=dep.get("name", ""),
|
||||
url=dep.get("url", ""),
|
||||
state=dep.get("state", dep.get("readyState", "")),
|
||||
target=dep.get("target"),
|
||||
created_at=created_at,
|
||||
ready_at=ready_at,
|
||||
project_id=project_id,
|
||||
project_name=dep.get("name", ""),
|
||||
team_id=dep.get("teamId") or self.provider.session.team_id,
|
||||
git_source=git_source,
|
||||
deployment_protection=dep.get("deploymentProtection"),
|
||||
)
|
||||
|
||||
logger.info(f"Deployment - Found {len(self.deployments)} deployment(s)")
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"Deployment - Error listing deployments: "
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
|
||||
class VercelDeployment(BaseModel):
|
||||
"""Vercel deployment representation."""
|
||||
|
||||
id: str
|
||||
name: str
|
||||
url: str = ""
|
||||
state: str = ""
|
||||
target: Optional[str] = None # "production" | "preview"
|
||||
created_at: Optional[datetime] = None
|
||||
ready_at: Optional[datetime] = None
|
||||
project_id: Optional[str] = None
|
||||
project_name: Optional[str] = None
|
||||
team_id: Optional[str] = None
|
||||
git_source: Optional[dict] = None
|
||||
deployment_protection: Optional[dict] = None
|
||||
@@ -0,0 +1,4 @@
|
||||
from prowler.providers.common.provider import Provider
|
||||
from prowler.providers.vercel.services.domain.domain_service import Domain
|
||||
|
||||
domain_client = Domain(Provider.get_global_provider())
|
||||
@@ -0,0 +1,39 @@
|
||||
{
|
||||
"Provider": "vercel",
|
||||
"CheckID": "domain_dns_properly_configured",
|
||||
"CheckTitle": "Vercel domain DNS records are properly configured",
|
||||
"CheckType": [],
|
||||
"ServiceName": "domain",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "NotDefined",
|
||||
"ResourceGroup": "network",
|
||||
"Description": "**Vercel domains** are assessed for **DNS configuration** to verify records properly point to Vercel's infrastructure. Misconfigured DNS can result in domains that fail to serve content, SSL certificate provisioning failures, and degraded user experience.",
|
||||
"Risk": "**Misconfigured DNS records** can cause the domain to be unreachable, preventing users from accessing the application. It can also prevent **SSL certificate provisioning**, resulting in browser security warnings. Stale DNS configurations may point to decommissioned infrastructure, creating a risk of **subdomain takeover**.",
|
||||
"RelatedUrl": "",
|
||||
"AdditionalURLs": [
|
||||
"https://vercel.com/docs/projects/domains"
|
||||
],
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "1. Sign in to the Vercel dashboard\n2. Navigate to the project Settings > Domains\n3. Review the DNS configuration status for each domain\n4. Update DNS records at your domain registrar to match the values shown in the Vercel dashboard\n5. Wait for DNS propagation (typically 24-48 hours)",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Update DNS records at your domain registrar to correctly point to Vercel. Use a CNAME record for subdomains or an A record for apex domains. Verify the configuration in the Vercel dashboard after making changes.",
|
||||
"Url": "https://hub.prowler.com/checks/vercel/domain_dns_properly_configured"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"trust-boundaries"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [
|
||||
"domain_verified",
|
||||
"domain_ssl_certificate_valid"
|
||||
],
|
||||
"Notes": ""
|
||||
}
|
||||
@@ -0,0 +1,45 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportVercel
|
||||
from prowler.providers.vercel.services.domain.domain_client import domain_client
|
||||
|
||||
|
||||
class domain_dns_properly_configured(Check):
|
||||
"""Check if domains have DNS properly configured.
|
||||
|
||||
This class verifies whether each Vercel domain has its DNS records
|
||||
properly configured to point to Vercel's infrastructure.
|
||||
"""
|
||||
|
||||
def execute(self) -> List[CheckReportVercel]:
|
||||
"""Execute the Vercel Domain DNS Configuration check.
|
||||
|
||||
Iterates over all domains and checks if DNS is properly configured.
|
||||
|
||||
Returns:
|
||||
List[CheckReportVercel]: A list of reports for each domain.
|
||||
"""
|
||||
findings = []
|
||||
for domain in domain_client.domains.values():
|
||||
report = CheckReportVercel(
|
||||
metadata=self.metadata(),
|
||||
resource=domain,
|
||||
resource_name=domain.name,
|
||||
resource_id=domain.id or domain.name,
|
||||
)
|
||||
|
||||
if domain.configured:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Domain {domain.name} has DNS properly configured."
|
||||
)
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Domain {domain.name} does not have DNS properly configured. "
|
||||
f"The domain may not be resolving to Vercel's infrastructure."
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
124
prowler/providers/vercel/services/domain/domain_service.py
Normal file
124
prowler/providers/vercel/services/domain/domain_service.py
Normal file
@@ -0,0 +1,124 @@
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.providers.vercel.lib.service.service import VercelService
|
||||
|
||||
|
||||
class Domain(VercelService):
|
||||
"""Retrieve Vercel domains with DNS and SSL information."""
|
||||
|
||||
def __init__(self, provider):
|
||||
super().__init__("Domain", provider)
|
||||
self.domains: dict[str, VercelDomain] = {}
|
||||
self._list_domains()
|
||||
self.__threading_call__(self._fetch_dns_records, list(self.domains.values()))
|
||||
self.__threading_call__(
|
||||
self._fetch_ssl_certificate, list(self.domains.values())
|
||||
)
|
||||
|
||||
def _list_domains(self):
|
||||
"""List all domains."""
|
||||
try:
|
||||
raw_domains = self._paginate("/v5/domains", "domains")
|
||||
|
||||
seen_names: set[str] = set()
|
||||
|
||||
for domain in raw_domains:
|
||||
domain_name = domain.get("name", "")
|
||||
if not domain_name or domain_name in seen_names:
|
||||
continue
|
||||
seen_names.add(domain_name)
|
||||
|
||||
self.domains[domain_name] = VercelDomain(
|
||||
name=domain_name,
|
||||
id=domain.get("id", domain_name),
|
||||
apex_name=domain.get("apexName"),
|
||||
verified=domain.get("verified", False),
|
||||
configured=(
|
||||
domain.get("configured", False)
|
||||
if "configured" in domain
|
||||
else domain.get("verified", False)
|
||||
),
|
||||
redirect=domain.get("redirect"),
|
||||
team_id=self.provider.session.team_id,
|
||||
)
|
||||
|
||||
logger.info(f"Domain - Found {len(self.domains)} domain(s)")
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"Domain - Error listing domains: "
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
def _fetch_dns_records(self, domain: "VercelDomain"):
|
||||
"""Fetch DNS records for a single domain."""
|
||||
try:
|
||||
data = self._get(f"/v4/domains/{domain.name}/records")
|
||||
if data and "records" in data:
|
||||
domain.dns_records = data["records"]
|
||||
logger.debug(
|
||||
f"Domain - Fetched {len(domain.dns_records)} DNS records for {domain.name}"
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"Domain - Error fetching DNS records for {domain.name}: "
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
def _fetch_ssl_certificate(self, domain: "VercelDomain"):
|
||||
"""Fetch SSL certificate for a domain via the certs endpoint."""
|
||||
try:
|
||||
data = self._get(f"/v8/certs/{domain.name}")
|
||||
if data:
|
||||
expires_at_ms = data.get("expiresAt")
|
||||
created_at_ms = data.get("createdAt")
|
||||
domain.ssl_certificate = VercelSSLCertificate(
|
||||
id=data.get("id", ""),
|
||||
created_at=(
|
||||
datetime.fromtimestamp(created_at_ms / 1000, tz=timezone.utc)
|
||||
if created_at_ms
|
||||
else None
|
||||
),
|
||||
expires_at=(
|
||||
datetime.fromtimestamp(expires_at_ms / 1000, tz=timezone.utc)
|
||||
if expires_at_ms
|
||||
else None
|
||||
),
|
||||
auto_renew=data.get("autoRenew", False),
|
||||
cns=data.get("cns", []),
|
||||
)
|
||||
logger.debug(f"Domain - Fetched SSL certificate for {domain.name}")
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"Domain - Error fetching SSL certificate for {domain.name}: "
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
|
||||
class VercelSSLCertificate(BaseModel):
|
||||
"""Vercel SSL certificate representation."""
|
||||
|
||||
id: str = ""
|
||||
created_at: Optional[datetime] = None
|
||||
expires_at: Optional[datetime] = None
|
||||
auto_renew: bool = False
|
||||
cns: list[str] = Field(default_factory=list)
|
||||
|
||||
|
||||
class VercelDomain(BaseModel):
|
||||
"""Vercel domain representation."""
|
||||
|
||||
name: str
|
||||
id: str = ""
|
||||
apex_name: Optional[str] = None
|
||||
verified: bool = False
|
||||
configured: bool = False
|
||||
ssl_certificate: Optional[VercelSSLCertificate] = None
|
||||
redirect: Optional[str] = None
|
||||
dns_records: list[dict] = Field(default_factory=list)
|
||||
team_id: Optional[str] = None
|
||||
project_id: Optional[str] = None
|
||||
@@ -0,0 +1,39 @@
|
||||
{
|
||||
"Provider": "vercel",
|
||||
"CheckID": "domain_ssl_certificate_valid",
|
||||
"CheckTitle": "Vercel domains have a valid, non-expired SSL certificate",
|
||||
"CheckType": [],
|
||||
"ServiceName": "domain",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "critical",
|
||||
"ResourceType": "NotDefined",
|
||||
"ResourceGroup": "network",
|
||||
"Description": "**Vercel domains** are assessed for **SSL certificate validity** including provisioning, expiration, and upcoming expiry. Vercel automatically provisions and renews SSL certificates for properly configured domains. A missing, expired, or soon-to-expire certificate indicates a configuration issue that may leave traffic unencrypted.",
|
||||
"Risk": "Without an **SSL certificate**, traffic between users and the domain is transmitted in **plain text**. This exposes sensitive data such as authentication tokens, form submissions, and personal information to interception via **man-in-the-middle attacks**. Search engines also penalize non-HTTPS sites, reducing visibility.",
|
||||
"RelatedUrl": "",
|
||||
"AdditionalURLs": [
|
||||
"https://vercel.com/docs/security/encryption"
|
||||
],
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "1. Sign in to the Vercel dashboard\n2. Navigate to the project Settings > Domains\n3. Verify the domain's DNS records point to Vercel correctly\n4. Vercel will automatically provision an SSL certificate once DNS is properly configured\n5. If issues persist, remove and re-add the domain",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Ensure domain DNS records are properly configured to point to Vercel. Once DNS is validated, Vercel automatically provisions and renews SSL/TLS certificates. Check the domain configuration in the Vercel dashboard if the certificate is not being issued.",
|
||||
"Url": "https://hub.prowler.com/checks/vercel/domain_ssl_certificate_valid"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"encryption"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [
|
||||
"domain_verified",
|
||||
"domain_dns_properly_configured"
|
||||
],
|
||||
"Notes": ""
|
||||
}
|
||||
@@ -0,0 +1,78 @@
|
||||
from datetime import datetime, timezone
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportVercel, Severity
|
||||
from prowler.providers.vercel.services.domain.domain_client import domain_client
|
||||
|
||||
|
||||
class domain_ssl_certificate_valid(Check):
|
||||
"""Check if domains have a valid, non-expired SSL certificate.
|
||||
|
||||
This class verifies whether each Vercel domain has an SSL certificate
|
||||
that is provisioned, not expired, and not about to expire. The
|
||||
expiration threshold is configurable via ``days_to_expire_threshold``
|
||||
in audit_config (default: 7 days).
|
||||
"""
|
||||
|
||||
def execute(self) -> List[CheckReportVercel]:
|
||||
"""Execute the Vercel Domain SSL Certificate check.
|
||||
|
||||
Iterates over all domains and checks SSL certificate presence and
|
||||
expiration status.
|
||||
|
||||
Returns:
|
||||
List[CheckReportVercel]: A list of reports for each domain.
|
||||
"""
|
||||
findings = []
|
||||
now = datetime.now(timezone.utc)
|
||||
days_to_expire_threshold = domain_client.audit_config.get(
|
||||
"days_to_expire_threshold", 7
|
||||
)
|
||||
|
||||
for domain in domain_client.domains.values():
|
||||
report = CheckReportVercel(
|
||||
metadata=self.metadata(),
|
||||
resource=domain,
|
||||
resource_name=domain.name,
|
||||
resource_id=domain.id or domain.name,
|
||||
)
|
||||
|
||||
if domain.ssl_certificate is None:
|
||||
report.status = "FAIL"
|
||||
report.check_metadata.Severity = Severity.high
|
||||
report.status_extended = f"Domain {domain.name} does not have an SSL certificate provisioned."
|
||||
elif (
|
||||
domain.ssl_certificate.expires_at is not None
|
||||
and domain.ssl_certificate.expires_at <= now
|
||||
):
|
||||
report.status = "FAIL"
|
||||
report.check_metadata.Severity = Severity.critical
|
||||
report.status_extended = (
|
||||
f"Domain {domain.name} has an SSL certificate that expired "
|
||||
f"on {domain.ssl_certificate.expires_at.strftime('%Y-%m-%d %H:%M UTC')}."
|
||||
)
|
||||
elif domain.ssl_certificate.expires_at is not None:
|
||||
days_left = (domain.ssl_certificate.expires_at - now).days
|
||||
if days_left <= days_to_expire_threshold:
|
||||
report.status = "FAIL"
|
||||
report.check_metadata.Severity = Severity.high
|
||||
report.status_extended = (
|
||||
f"Domain {domain.name} has an SSL certificate expiring "
|
||||
f"in {days_left} days "
|
||||
f"on {domain.ssl_certificate.expires_at.strftime('%Y-%m-%d %H:%M UTC')}."
|
||||
)
|
||||
else:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Domain {domain.name} has a valid SSL certificate expiring "
|
||||
f"on {domain.ssl_certificate.expires_at.strftime('%Y-%m-%d %H:%M UTC')}."
|
||||
)
|
||||
else:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Domain {domain.name} has an SSL certificate provisioned."
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -0,0 +1,39 @@
|
||||
{
|
||||
"Provider": "vercel",
|
||||
"CheckID": "domain_verified",
|
||||
"CheckTitle": "Vercel domains are verified",
|
||||
"CheckType": [],
|
||||
"ServiceName": "domain",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "high",
|
||||
"ResourceType": "NotDefined",
|
||||
"ResourceGroup": "network",
|
||||
"Description": "**Vercel domains** are assessed for **ownership verification** status. Unverified domains may not serve traffic correctly and could indicate a pending or incomplete domain setup. Domain verification confirms that the domain owner has authorized Vercel to manage the domain.",
|
||||
"Risk": "**Unverified domains** may fail to resolve or serve content, causing **downtime** for users. An unverified domain could also indicate a stale or orphaned configuration, or a domain that was added but never properly transferred, creating potential for **domain takeover** if the ownership verification is left incomplete.",
|
||||
"RelatedUrl": "",
|
||||
"AdditionalURLs": [
|
||||
"https://vercel.com/docs/projects/domains"
|
||||
],
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "1. Sign in to the Vercel dashboard\n2. Navigate to the project Settings > Domains\n3. For any unverified domain, follow the verification steps shown\n4. Add the required DNS records (CNAME or A record) at your domain registrar\n5. Wait for DNS propagation and verify the domain",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Complete domain verification by configuring the required DNS records at your domain registrar. Remove any domains that are no longer needed to reduce the attack surface. Regularly audit domain configurations to ensure all domains remain verified.",
|
||||
"Url": "https://hub.prowler.com/checks/vercel/domain_verified"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"trust-boundaries"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [
|
||||
"domain_dns_properly_configured",
|
||||
"domain_ssl_certificate_valid"
|
||||
],
|
||||
"Notes": ""
|
||||
}
|
||||
@@ -0,0 +1,44 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportVercel
|
||||
from prowler.providers.vercel.services.domain.domain_client import domain_client
|
||||
|
||||
|
||||
class domain_verified(Check):
|
||||
"""Check if domains have been verified by Vercel.
|
||||
|
||||
This class verifies whether each Vercel domain has passed ownership
|
||||
verification. Unverified domains may not function correctly and could
|
||||
indicate domain misconfiguration or hijacking attempts.
|
||||
"""
|
||||
|
||||
def execute(self) -> List[CheckReportVercel]:
|
||||
"""Execute the Vercel Domain Verified check.
|
||||
|
||||
Iterates over all domains and checks if each is verified.
|
||||
|
||||
Returns:
|
||||
List[CheckReportVercel]: A list of reports for each domain.
|
||||
"""
|
||||
findings = []
|
||||
for domain in domain_client.domains.values():
|
||||
report = CheckReportVercel(
|
||||
metadata=self.metadata(),
|
||||
resource=domain,
|
||||
resource_name=domain.name,
|
||||
resource_id=domain.id or domain.name,
|
||||
)
|
||||
|
||||
if domain.verified:
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Domain {domain.name} is verified."
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Domain {domain.name} is not verified. "
|
||||
f"The domain may not be serving traffic correctly."
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -0,0 +1,36 @@
|
||||
{
|
||||
"Provider": "vercel",
|
||||
"CheckID": "project_auto_expose_system_env_disabled",
|
||||
"CheckTitle": "Vercel project has automatic exposure of system environment variables disabled",
|
||||
"CheckType": [],
|
||||
"ServiceName": "project",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "NotDefined",
|
||||
"ResourceGroup": "security",
|
||||
"Description": "**Vercel projects** are assessed for **automatic system environment variable exposure** (`VERCEL_URL`, `VERCEL_ENV`, `VERCEL_GIT_COMMIT_SHA`). When enabled, these variables are injected into every deployment and may be accessible in client-side JavaScript bundles if not handled carefully, leaking internal infrastructure details.",
|
||||
"Risk": "Automatically exposed **system environment variables** can reveal deployment URLs, Git metadata, environment names, and other internal details. If these values are inadvertently included in **client-side bundles**, attackers can use them to map infrastructure, identify staging environments, or craft targeted attacks against specific deployment instances.",
|
||||
"RelatedUrl": "",
|
||||
"AdditionalURLs": [
|
||||
"https://vercel.com/docs/projects/environment-variables/system-environment-variables"
|
||||
],
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "1. Sign in to the Vercel dashboard\n2. Navigate to the project Settings > Environment Variables\n3. Locate the 'Automatically expose System Environment Variables' toggle\n4. Disable the toggle\n5. Manually add only the specific system variables your application needs",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Disable automatic exposure of system environment variables and explicitly define only the variables required by your application. This follows the principle of least privilege and reduces the risk of leaking internal infrastructure details through client-side code.",
|
||||
"Url": "https://hub.prowler.com/checks/vercel/project_auto_expose_system_env_disabled"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"trust-boundaries"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": ""
|
||||
}
|
||||
@@ -0,0 +1,42 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportVercel
|
||||
from prowler.providers.vercel.services.project.project_client import project_client
|
||||
|
||||
|
||||
class project_auto_expose_system_env_disabled(Check):
|
||||
"""Check if automatic exposure of system environment variables is disabled.
|
||||
|
||||
This class verifies whether each Vercel project has the automatic exposure
|
||||
of system environment variables disabled to prevent information leakage.
|
||||
"""
|
||||
|
||||
def execute(self) -> List[CheckReportVercel]:
|
||||
"""Execute the Vercel Project Auto Expose System Env check.
|
||||
|
||||
Iterates over all projects and checks if automatic exposure of system
|
||||
environment variables is disabled.
|
||||
|
||||
Returns:
|
||||
List[CheckReportVercel]: A list of reports for each project.
|
||||
"""
|
||||
findings = []
|
||||
for project in project_client.projects.values():
|
||||
report = CheckReportVercel(metadata=self.metadata(), resource=project)
|
||||
|
||||
if not project.auto_expose_system_envs:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Project {project.name} does not automatically expose "
|
||||
f"system environment variables to the build process."
|
||||
)
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Project {project.name} automatically exposes system "
|
||||
f"environment variables to the build process."
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -0,0 +1,4 @@
|
||||
from prowler.providers.common.provider import Provider
|
||||
from prowler.providers.vercel.services.project.project_service import Project
|
||||
|
||||
project_client = Project(Provider.get_global_provider())
|
||||
@@ -0,0 +1,38 @@
|
||||
{
|
||||
"Provider": "vercel",
|
||||
"CheckID": "project_deployment_protection_enabled",
|
||||
"CheckTitle": "Vercel project has deployment protection enabled on preview deployments",
|
||||
"CheckType": [],
|
||||
"ServiceName": "project",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "high",
|
||||
"ResourceType": "NotDefined",
|
||||
"ResourceGroup": "security",
|
||||
"Description": "**Vercel projects** are assessed for **deployment protection** configuration, which restricts access to preview deployments by requiring authentication before visitors can view them. When disabled, anyone with the preview URL can access in-progress or staging versions of the application, potentially exposing unreleased features, debug information, or internal endpoints.",
|
||||
"Risk": "Without **deployment protection** on preview deployments, any person who obtains or guesses a preview URL can view **unreleased application code**, test data, or internal API endpoints. This increases the attack surface and may leak sensitive business logic or credentials embedded in preview builds.",
|
||||
"RelatedUrl": "",
|
||||
"AdditionalURLs": [
|
||||
"https://vercel.com/docs/security/deployment-protection"
|
||||
],
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "1. Sign in to the Vercel dashboard\n2. Navigate to the project Settings > General\n3. Scroll to Deployment Protection\n4. Under Preview deployments, select 'Standard Protection' or 'Vercel Authentication'\n5. Click Save",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Enable deployment protection on preview deployments to require authentication before visitors can access preview URLs. Use 'Standard Protection' for Vercel Authentication or configure trusted IP ranges for more granular control.",
|
||||
"Url": "https://hub.prowler.com/checks/vercel/project_deployment_protection_enabled"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"internet-exposed"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [
|
||||
"project_production_deployment_protection_enabled"
|
||||
],
|
||||
"Notes": ""
|
||||
}
|
||||
@@ -0,0 +1,45 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportVercel
|
||||
from prowler.providers.vercel.services.project.project_client import project_client
|
||||
|
||||
|
||||
class project_deployment_protection_enabled(Check):
|
||||
"""Check if deployment protection is enabled on preview deployments.
|
||||
|
||||
This class verifies whether each Vercel project has deployment protection
|
||||
configured for preview deployments to prevent unauthorized access.
|
||||
"""
|
||||
|
||||
def execute(self) -> List[CheckReportVercel]:
|
||||
"""Execute the Vercel Project Deployment Protection check.
|
||||
|
||||
Iterates over all projects and checks if deployment protection is enabled
|
||||
on preview deployments.
|
||||
|
||||
Returns:
|
||||
List[CheckReportVercel]: A list of reports for each project.
|
||||
"""
|
||||
findings = []
|
||||
for project in project_client.projects.values():
|
||||
report = CheckReportVercel(metadata=self.metadata(), resource=project)
|
||||
|
||||
if (
|
||||
project.deployment_protection is not None
|
||||
and project.deployment_protection.level != "none"
|
||||
):
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Project {project.name} has deployment protection enabled "
|
||||
f"with level '{project.deployment_protection.level}' on preview deployments."
|
||||
)
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Project {project.name} does not have deployment protection "
|
||||
f"enabled on preview deployments."
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -0,0 +1,36 @@
|
||||
{
|
||||
"Provider": "vercel",
|
||||
"CheckID": "project_directory_listing_disabled",
|
||||
"CheckTitle": "Vercel project has directory listing disabled",
|
||||
"CheckType": [],
|
||||
"ServiceName": "project",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "NotDefined",
|
||||
"ResourceGroup": "security",
|
||||
"Description": "**Vercel projects** are assessed for **directory listing** configuration. When enabled, this feature allows visitors to browse the file structure of a deployment when no index file is present in a directory, potentially exposing source files, configuration files, and other assets that should not be publicly accessible.",
|
||||
"Risk": "Enabled **directory listing** allows attackers to enumerate the file structure of the deployment, potentially discovering backup files, configuration files, source maps, or other **sensitive assets**. This information disclosure can be leveraged to identify attack vectors or access files that were not intended to be public.",
|
||||
"RelatedUrl": "",
|
||||
"AdditionalURLs": [
|
||||
"https://vercel.com/docs/projects/project-configuration"
|
||||
],
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "1. Sign in to the Vercel dashboard\n2. Navigate to the project Settings > General\n3. Locate the 'Directory Listing' option\n4. Ensure it is disabled\n5. Click Save",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Disable directory listing to prevent visitors from browsing the file structure of your deployments. Ensure that all directories either contain an index file or return a 404 response when accessed directly.",
|
||||
"Url": "https://hub.prowler.com/checks/vercel/project_directory_listing_disabled"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"internet-exposed"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": ""
|
||||
}
|
||||
@@ -0,0 +1,40 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportVercel
|
||||
from prowler.providers.vercel.services.project.project_client import project_client
|
||||
|
||||
|
||||
class project_directory_listing_disabled(Check):
|
||||
"""Check if directory listing is disabled for the project.
|
||||
|
||||
This class verifies whether each Vercel project has directory listing
|
||||
disabled to prevent exposure of the project's file structure.
|
||||
"""
|
||||
|
||||
def execute(self) -> List[CheckReportVercel]:
|
||||
"""Execute the Vercel Project Directory Listing check.
|
||||
|
||||
Iterates over all projects and checks if directory listing is disabled.
|
||||
|
||||
Returns:
|
||||
List[CheckReportVercel]: A list of reports for each project.
|
||||
"""
|
||||
findings = []
|
||||
for project in project_client.projects.values():
|
||||
report = CheckReportVercel(metadata=self.metadata(), resource=project)
|
||||
|
||||
if not project.directory_listing:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Project {project.name} has directory listing disabled."
|
||||
)
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Project {project.name} has directory listing enabled, "
|
||||
f"which may expose the project's file structure to visitors."
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -0,0 +1,38 @@
|
||||
{
|
||||
"Provider": "vercel",
|
||||
"CheckID": "project_environment_no_overly_broad_target",
|
||||
"CheckTitle": "Vercel project has no environment variables targeting all three environments",
|
||||
"CheckType": [],
|
||||
"ServiceName": "project",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "NotDefined",
|
||||
"ResourceGroup": "security",
|
||||
"Description": "**Vercel project environment variables** are assessed for **overly broad targeting** by checking whether any variable targets all three environments (production, preview, development) simultaneously, which violates the principle of least privilege.",
|
||||
"Risk": "Environment variables targeting **all environments** share the same values across production, preview, and development, increasing **blast radius** if credentials are compromised. Production secrets are exposed to weaker environments, making it harder to isolate and track unauthorized changes.",
|
||||
"RelatedUrl": "",
|
||||
"AdditionalURLs": [
|
||||
"https://vercel.com/docs/environment-variables"
|
||||
],
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "1. Navigate to your Vercel project dashboard\n2. Go to Settings > Environment Variables\n3. Identify variables that target all three environments (Production, Preview, Development)\n4. Edit each variable to target only the specific environments where it is needed\n5. Create separate variables with environment-specific values where different credentials are needed per environment",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Follow the **principle of least privilege** for environment variable targeting.\n- Assign each variable to only the environments where it is actually needed\n- Use different credentials for production, preview, and development environments\n- Non-sensitive configuration (e.g. feature flags, public URLs) may be acceptable in multiple environments but should still be reviewed\n- Regularly audit environment variable targets to prevent scope creep",
|
||||
"Url": "https://hub.prowler.com/checks/vercel/project_environment_no_overly_broad_target"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"secrets"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [
|
||||
"project_environment_production_vars_not_in_preview"
|
||||
],
|
||||
"Notes": "This check flags any variable targeting all three environments regardless of its type. Even non-sensitive configuration shared across all environments may indicate a lack of environment-specific configuration management."
|
||||
}
|
||||
@@ -0,0 +1,54 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportVercel
|
||||
from prowler.providers.vercel.services.project.project_client import project_client
|
||||
|
||||
ALL_ENVIRONMENTS = {"production", "preview", "development"}
|
||||
|
||||
|
||||
class project_environment_no_overly_broad_target(Check):
|
||||
"""Check that no environment variables target all three environments simultaneously.
|
||||
|
||||
This class verifies that environment variables are not configured to target
|
||||
production, preview, and development environments at the same time, which
|
||||
violates the principle of least privilege and may expose production secrets
|
||||
to development and preview contexts.
|
||||
"""
|
||||
|
||||
def execute(self) -> List[CheckReportVercel]:
|
||||
"""Execute the no-overly-broad-target check.
|
||||
|
||||
Iterates over all projects and inspects each environment variable,
|
||||
flagging any that target all three environments (production, preview,
|
||||
and development) simultaneously.
|
||||
|
||||
Returns:
|
||||
List[CheckReportVercel]: A list of reports for each project.
|
||||
"""
|
||||
findings = []
|
||||
for project in project_client.projects.values():
|
||||
report = CheckReportVercel(metadata=self.metadata(), resource=project)
|
||||
|
||||
broad_keys = []
|
||||
for env_var in project.environment_variables:
|
||||
targets = {t.lower() for t in env_var.target}
|
||||
if ALL_ENVIRONMENTS.issubset(targets):
|
||||
broad_keys.append(env_var.key)
|
||||
|
||||
if broad_keys:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Project {project.name} has {len(broad_keys)} environment "
|
||||
f"variable(s) targeting all three environments: "
|
||||
f"{', '.join(broad_keys)}."
|
||||
)
|
||||
else:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Project {project.name} has no environment variables targeting "
|
||||
f"all three environments simultaneously."
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -0,0 +1,38 @@
|
||||
{
|
||||
"Provider": "vercel",
|
||||
"CheckID": "project_environment_no_secrets_in_plain_type",
|
||||
"CheckTitle": "Vercel project has no secret-like environment variables stored as plain text",
|
||||
"CheckType": [],
|
||||
"ServiceName": "project",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "high",
|
||||
"ResourceType": "NotDefined",
|
||||
"ResourceGroup": "security",
|
||||
"Description": "**Vercel project environment variables** are assessed for **secret exposure** by checking whether variables with secret-like name suffixes (`*_KEY`, `*_SECRET`, `*_TOKEN`, `*_PASSWORD`, `*_API_KEY`, `*_PRIVATE_KEY`) are stored using the `plain` type, which makes their values readable.",
|
||||
"Risk": "Secrets stored as **plain text** environment variables are visible to all team members with project access and may appear in API responses. Plaintext secrets can be read through the Vercel dashboard or API, enabling **unauthorized modification** of connected services or disruption of integrations.",
|
||||
"RelatedUrl": "",
|
||||
"AdditionalURLs": [
|
||||
"https://vercel.com/docs/environment-variables"
|
||||
],
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "1. Navigate to your Vercel project dashboard\n2. Go to Settings > Environment Variables\n3. Identify any variables ending in _KEY, _SECRET, _TOKEN, _PASSWORD, _API_KEY, or _PRIVATE_KEY that are stored as 'Plain'\n4. Delete the plain-text variable\n5. Re-create it using the 'Sensitive' type to ensure the value is encrypted and write-only",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Use the **Sensitive** type for all environment variables that contain secrets, keys, tokens, or passwords.\n- Sensitive variables are never exposed in the dashboard or API responses after creation\n- Rotate all credentials that were previously stored as plain text\n- Implement naming conventions that make it easy to identify secret variables",
|
||||
"Url": "https://hub.prowler.com/checks/vercel/project_environment_no_secrets_in_plain_type"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"secrets"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [
|
||||
"project_environment_production_vars_not_in_preview"
|
||||
],
|
||||
"Notes": "This check uses suffix-based matching on variable names (_KEY, _SECRET, _TOKEN, _PASSWORD, _API_KEY, _PRIVATE_KEY) to identify likely secrets."
|
||||
}
|
||||
@@ -0,0 +1,68 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportVercel
|
||||
from prowler.providers.vercel.services.project.project_client import project_client
|
||||
|
||||
DEFAULT_SECRET_SUFFIXES = [
|
||||
"_KEY",
|
||||
"_SECRET",
|
||||
"_TOKEN",
|
||||
"_PASSWORD",
|
||||
"_API_KEY",
|
||||
"_PRIVATE_KEY",
|
||||
]
|
||||
|
||||
|
||||
class project_environment_no_secrets_in_plain_type(Check):
|
||||
"""Check that no environment variables with secret-like name suffixes are stored as plain text.
|
||||
|
||||
This class verifies that environment variables whose names end with
|
||||
configurable secret suffixes are not stored with the "plain" type,
|
||||
which makes their values readable in the dashboard and API responses.
|
||||
The suffix list is configurable via ``secret_suffixes`` in audit_config.
|
||||
"""
|
||||
|
||||
def execute(self) -> List[CheckReportVercel]:
|
||||
"""Execute the no-secrets-in-plain-type check.
|
||||
|
||||
Iterates over all projects and inspects each environment variable,
|
||||
flagging any variable whose name ends with a known secret suffix and
|
||||
is stored as "plain" type.
|
||||
|
||||
Returns:
|
||||
List[CheckReportVercel]: A list of reports for each project.
|
||||
"""
|
||||
findings = []
|
||||
secret_suffixes = project_client.audit_config.get(
|
||||
"secret_suffixes", DEFAULT_SECRET_SUFFIXES
|
||||
)
|
||||
# Normalize to uppercase tuples for efficient endswith matching
|
||||
secret_suffixes_upper = tuple(s.upper() for s in secret_suffixes)
|
||||
|
||||
for project in project_client.projects.values():
|
||||
report = CheckReportVercel(metadata=self.metadata(), resource=project)
|
||||
|
||||
plain_secret_keys = []
|
||||
for env_var in project.environment_variables:
|
||||
upper_key = env_var.key.upper()
|
||||
if upper_key.endswith(secret_suffixes_upper):
|
||||
if env_var.type == "plain":
|
||||
plain_secret_keys.append(env_var.key)
|
||||
|
||||
if plain_secret_keys:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Project {project.name} has {len(plain_secret_keys)} secret-like "
|
||||
f"environment variable(s) stored as plain text: "
|
||||
f"{', '.join(plain_secret_keys)}."
|
||||
)
|
||||
else:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Project {project.name} has no secret-like environment variables "
|
||||
f"stored as plain text."
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -0,0 +1,39 @@
|
||||
{
|
||||
"Provider": "vercel",
|
||||
"CheckID": "project_environment_production_vars_not_in_preview",
|
||||
"CheckTitle": "Vercel sensitive production environment variables do not target preview",
|
||||
"CheckType": [],
|
||||
"ServiceName": "project",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "NotDefined",
|
||||
"ResourceGroup": "security",
|
||||
"Description": "**Vercel project environment variables** are assessed for **environment separation** by checking whether sensitive variables (type `secret` or `encrypted`) that target the `production` environment also target `preview`, which could expose production credentials to untrusted preview builds.",
|
||||
"Risk": "Preview deployments are often triggered by **pull requests**, including those from external contributors or forks. Sharing **production secrets** with preview environments can lead to credential theft. Production API keys and database credentials could be exfiltrated by malicious code in preview builds and used to modify or disrupt live services.",
|
||||
"RelatedUrl": "",
|
||||
"AdditionalURLs": [
|
||||
"https://vercel.com/docs/environment-variables"
|
||||
],
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "1. Navigate to your Vercel project dashboard\n2. Go to Settings > Environment Variables\n3. Identify sensitive variables (type Secret or Encrypted) that target both Production and Preview\n4. Edit each variable to remove the Preview target\n5. If preview builds require credentials, create separate variables with limited-scope preview-only credentials",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Maintain strict **environment separation** between production and preview deployments.\n- Use dedicated, limited-scope credentials for preview environments\n- Never share production database credentials, API keys, or signing keys with preview builds\n- Enable Vercel's deployment protection features to further restrict access to preview deployments\n- Regularly audit which environment variables target multiple environments",
|
||||
"Url": "https://hub.prowler.com/checks/vercel/project_environment_production_vars_not_in_preview"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"secrets"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [
|
||||
"project_environment_no_secrets_in_plain_type",
|
||||
"project_environment_no_overly_broad_target"
|
||||
],
|
||||
"Notes": "This check only inspects variables with type 'secret' or 'encrypted' since these are the ones most likely to contain actual credentials. Plain-text variables with sensitive names should be caught by the project_environment_no_secrets_in_plain_type check."
|
||||
}
|
||||
@@ -0,0 +1,55 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportVercel
|
||||
from prowler.providers.vercel.services.project.project_client import project_client
|
||||
|
||||
SENSITIVE_TYPES = {"secret", "encrypted"}
|
||||
|
||||
|
||||
class project_environment_production_vars_not_in_preview(Check):
|
||||
"""Check that sensitive production environment variables do not also target preview.
|
||||
|
||||
This class verifies that environment variables using "secret" or "encrypted"
|
||||
types that target "production" do not simultaneously target "preview"
|
||||
deployments, which could expose production credentials to untrusted code
|
||||
running in preview builds from pull requests.
|
||||
"""
|
||||
|
||||
def execute(self) -> List[CheckReportVercel]:
|
||||
"""Execute the production-vars-not-in-preview check.
|
||||
|
||||
Iterates over all projects, inspects each environment variable with a
|
||||
sensitive type (secret or encrypted), and flags any that target both
|
||||
"production" and "preview" environments.
|
||||
|
||||
Returns:
|
||||
List[CheckReportVercel]: A list of reports for each project.
|
||||
"""
|
||||
findings = []
|
||||
for project in project_client.projects.values():
|
||||
report = CheckReportVercel(metadata=self.metadata(), resource=project)
|
||||
|
||||
leaking_keys = []
|
||||
for env_var in project.environment_variables:
|
||||
if env_var.type in SENSITIVE_TYPES:
|
||||
targets = {t.lower() for t in env_var.target}
|
||||
if "production" in targets and "preview" in targets:
|
||||
leaking_keys.append(env_var.key)
|
||||
|
||||
if leaking_keys:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Project {project.name} has {len(leaking_keys)} sensitive "
|
||||
f"production environment variable(s) also targeting preview: "
|
||||
f"{', '.join(leaking_keys)}."
|
||||
)
|
||||
else:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Project {project.name} has no sensitive production environment "
|
||||
f"variables leaking to preview deployments."
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -0,0 +1,36 @@
|
||||
{
|
||||
"Provider": "vercel",
|
||||
"CheckID": "project_git_fork_protection_enabled",
|
||||
"CheckTitle": "Vercel project has Git fork protection enabled to prevent untrusted forks from accessing secrets",
|
||||
"CheckType": [],
|
||||
"ServiceName": "project",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "high",
|
||||
"ResourceType": "NotDefined",
|
||||
"ResourceGroup": "devops",
|
||||
"Description": "**Vercel projects** are assessed for **Git fork protection** configuration, which controls whether pull requests from forked repositories can trigger deployments and access environment variables. When disabled, anyone who forks a public repository can submit a pull request that triggers a Vercel build with access to the project's environment variables, including secrets and API keys.",
|
||||
"Risk": "Without **Git fork protection**, an attacker can fork a public repository, modify the build process to **exfiltrate environment variables** (API keys, database credentials, third-party tokens), and submit a pull request. The Vercel build triggered by the PR would execute the attacker's code with access to the project's secrets, leading to **credential theft** and potential full system compromise.",
|
||||
"RelatedUrl": "",
|
||||
"AdditionalURLs": [
|
||||
"https://vercel.com/docs/security/deployment-protection/managing-deployment-protection#git-fork-protection"
|
||||
],
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "1. Sign in to the Vercel dashboard\n2. Navigate to the project Settings > General\n3. Scroll to the 'Git Fork Protection' section\n4. Enable the option to require authorization for fork pull requests\n5. Click Save",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Enable Git fork protection to require explicit authorization before pull requests from forked repositories can trigger deployments. This prevents untrusted contributors from accessing environment variables and secrets through the build process. For open-source projects, review fork PRs manually before allowing builds.",
|
||||
"Url": "https://hub.prowler.com/checks/vercel/project_git_fork_protection_enabled"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"internet-exposed"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": ""
|
||||
}
|
||||
@@ -0,0 +1,43 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportVercel
|
||||
from prowler.providers.vercel.services.project.project_client import project_client
|
||||
|
||||
|
||||
class project_git_fork_protection_enabled(Check):
|
||||
"""Check if Git fork protection is enabled for the project.
|
||||
|
||||
This class verifies whether each Vercel project has Git fork protection
|
||||
enabled to prevent untrusted forks from accessing environment variables
|
||||
and triggering deployments.
|
||||
"""
|
||||
|
||||
def execute(self) -> List[CheckReportVercel]:
|
||||
"""Execute the Vercel Project Git Fork Protection check.
|
||||
|
||||
Iterates over all projects and checks if Git fork protection is enabled.
|
||||
|
||||
Returns:
|
||||
List[CheckReportVercel]: A list of reports for each project.
|
||||
"""
|
||||
findings = []
|
||||
for project in project_client.projects.values():
|
||||
report = CheckReportVercel(metadata=self.metadata(), resource=project)
|
||||
|
||||
if project.git_fork_protection:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Project {project.name} has Git fork protection enabled, "
|
||||
f"preventing untrusted forks from accessing secrets."
|
||||
)
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Project {project.name} does not have Git fork protection "
|
||||
f"enabled, allowing forks to access environment variables "
|
||||
f"and trigger deployments."
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -0,0 +1,38 @@
|
||||
{
|
||||
"Provider": "vercel",
|
||||
"CheckID": "project_password_protection_enabled",
|
||||
"CheckTitle": "Vercel project has password protection configured for deployments",
|
||||
"CheckType": [],
|
||||
"ServiceName": "project",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "NotDefined",
|
||||
"ResourceGroup": "security",
|
||||
"Description": "**Vercel projects** are assessed for **password protection** configuration, which adds a shared-password gate in front of deployments requiring visitors to enter a password before they can access the application. This provides an additional layer of access control beyond Vercel Authentication, useful for sharing preview deployments with external stakeholders who do not have Vercel accounts.",
|
||||
"Risk": "Without **password protection**, deployments are accessible to anyone who has the URL. For projects that contain pre-release features, client work, or sensitive content, this means **unauthorized individuals** can view and interact with the application without any authentication barrier.",
|
||||
"RelatedUrl": "",
|
||||
"AdditionalURLs": [
|
||||
"https://vercel.com/docs/security/deployment-protection/methods-to-protect-deployments/password-protection"
|
||||
],
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "1. Sign in to the Vercel dashboard\n2. Navigate to the project Settings > General\n3. Scroll to the 'Password Protection' section\n4. Enable Password Protection and set a strong shared password\n5. Click Save\n6. Share the password only with authorized stakeholders",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Enable password protection to add a shared-password gate to your deployments. This is especially recommended for preview deployments shared with external clients or stakeholders who do not have Vercel accounts. Combine with Vercel Authentication for defense-in-depth.",
|
||||
"Url": "https://hub.prowler.com/checks/vercel/project_password_protection_enabled"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"internet-exposed"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [
|
||||
"project_deployment_protection_enabled"
|
||||
],
|
||||
"Notes": ""
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user