Compare commits
1 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 81de824a5d |
@@ -72,11 +72,6 @@ provider/vercel:
|
||||
- any-glob-to-any-file: "prowler/providers/vercel/**"
|
||||
- any-glob-to-any-file: "tests/providers/vercel/**"
|
||||
|
||||
provider/okta:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/providers/okta/**"
|
||||
- any-glob-to-any-file: "tests/providers/okta/**"
|
||||
|
||||
github_actions:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: ".github/workflows/*"
|
||||
@@ -114,8 +109,6 @@ mutelist:
|
||||
- any-glob-to-any-file: "tests/providers/googleworkspace/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "prowler/providers/vercel/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "tests/providers/vercel/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "prowler/providers/okta/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "tests/providers/okta/lib/mutelist/**"
|
||||
|
||||
integration/s3:
|
||||
- changed-files:
|
||||
|
||||
@@ -36,7 +36,6 @@ Please add a detailed description of how to review this PR.
|
||||
|
||||
#### UI
|
||||
- [ ] All issue/task requirements work as expected on the UI
|
||||
- [ ] If this PR adds or updates npm dependencies, include package-health evidence (maintenance, popularity, known vulnerabilities, license, release age) and explain why existing/native alternatives are insufficient.
|
||||
- [ ] Screenshots/Video of the functionality flow (if applicable) - Mobile (X < 640px)
|
||||
- [ ] Screenshots/Video of the functionality flow (if applicable) - Table (640px > X < 1024px)
|
||||
- [ ] Screenshots/Video of the functionality flow (if applicable) - Desktop (X > 1024px)
|
||||
|
||||
@@ -324,30 +324,6 @@ jobs:
|
||||
flags: prowler-py${{ matrix.python-version }}-github
|
||||
files: ./github_coverage.xml
|
||||
|
||||
# Okta Provider
|
||||
- name: Check if Okta files changed
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
id: changed-okta
|
||||
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 # v47.0.5
|
||||
with:
|
||||
files: |
|
||||
./prowler/**/okta/**
|
||||
./tests/**/okta/**
|
||||
./poetry.lock
|
||||
|
||||
- name: Run Okta tests
|
||||
if: steps.changed-okta.outputs.any_changed == 'true'
|
||||
run: poetry run pytest -n auto --cov=./prowler/providers/okta --cov-report=xml:okta_coverage.xml tests/providers/okta
|
||||
|
||||
- name: Upload Okta coverage to Codecov
|
||||
if: steps.changed-okta.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5.5.2
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
flags: prowler-py${{ matrix.python-version }}-okta
|
||||
files: ./okta_coverage.xml
|
||||
|
||||
# NHN Provider
|
||||
- name: Check if NHN files changed
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
|
||||
@@ -178,7 +178,7 @@ jobs:
|
||||
run: echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV
|
||||
|
||||
- name: Setup pnpm and Next.js cache
|
||||
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
|
||||
with:
|
||||
path: |
|
||||
${{ env.STORE_PATH }}
|
||||
@@ -198,7 +198,7 @@ jobs:
|
||||
run: pnpm run build
|
||||
|
||||
- name: Cache Playwright browsers
|
||||
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
|
||||
id: playwright-cache
|
||||
with:
|
||||
path: ~/.cache/ms-playwright
|
||||
|
||||
@@ -113,7 +113,7 @@ jobs:
|
||||
|
||||
- name: Setup pnpm and Next.js cache
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
|
||||
with:
|
||||
path: |
|
||||
${{ env.STORE_PATH }}
|
||||
@@ -132,10 +132,6 @@ jobs:
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
run: pnpm run healthcheck
|
||||
|
||||
- name: Run pnpm audit
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
run: pnpm run audit
|
||||
|
||||
- name: Run unit tests (all - critical paths changed)
|
||||
if: steps.check-changes.outputs.any_changed == 'true' && steps.critical-changes.outputs.any_changed == 'true'
|
||||
run: |
|
||||
@@ -162,7 +158,7 @@ jobs:
|
||||
- name: Cache Playwright browsers
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
id: playwright-cache
|
||||
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
|
||||
with:
|
||||
path: ~/.cache/ms-playwright
|
||||
key: ${{ runner.os }}-playwright-chromium-${{ hashFiles('ui/pnpm-lock.yaml') }}
|
||||
|
||||
@@ -15,7 +15,7 @@ Use these skills for detailed patterns on-demand:
|
||||
|-------|-------------|-----|
|
||||
| `typescript` | Const types, flat interfaces, utility types | [SKILL.md](skills/typescript/SKILL.md) |
|
||||
| `react-19` | No useMemo/useCallback, React Compiler | [SKILL.md](skills/react-19/SKILL.md) |
|
||||
| `nextjs-16` | App Router, Server Actions, proxy.ts, streaming | [SKILL.md](skills/nextjs-16/SKILL.md) |
|
||||
| `nextjs-15` | App Router, Server Actions, streaming | [SKILL.md](skills/nextjs-15/SKILL.md) |
|
||||
| `tailwind-4` | cn() utility, no var() in className | [SKILL.md](skills/tailwind-4/SKILL.md) |
|
||||
| `playwright` | Page Object Model, MCP workflow, selectors | [SKILL.md](skills/playwright/SKILL.md) |
|
||||
| `pytest` | Fixtures, mocking, markers, parametrize | [SKILL.md](skills/pytest/SKILL.md) |
|
||||
@@ -60,14 +60,11 @@ When performing these actions, ALWAYS invoke the corresponding skill FIRST:
|
||||
|--------|-------|
|
||||
| Add changelog entry for a PR or feature | `prowler-changelog` |
|
||||
| Adding DRF pagination or permissions | `django-drf` |
|
||||
| Adding a compliance output formatter (per-provider class + table dispatcher) | `prowler-compliance` |
|
||||
| Adding indexes or constraints to database tables | `django-migration-psql` |
|
||||
| Adding new providers | `prowler-provider` |
|
||||
| Adding privilege escalation detection queries | `prowler-attack-paths-query` |
|
||||
| Adding services to existing providers | `prowler-provider` |
|
||||
| After creating/modifying a skill | `skill-sync` |
|
||||
| App Router / Server Actions | `nextjs-16` |
|
||||
| Auditing check-to-requirement mappings as a cloud auditor | `prowler-compliance` |
|
||||
| App Router / Server Actions | `nextjs-15` |
|
||||
| Building AI chat features | `ai-sdk-5` |
|
||||
| Committing changes | `prowler-commit` |
|
||||
| Configuring MCP servers in agentic workflows | `gh-aw` |
|
||||
@@ -81,7 +78,6 @@ When performing these actions, ALWAYS invoke the corresponding skill FIRST:
|
||||
| Creating a git commit | `prowler-commit` |
|
||||
| Creating new checks | `prowler-sdk-check` |
|
||||
| Creating new skills | `skill-creator` |
|
||||
| Creating or reviewing Django migrations | `django-migration-psql` |
|
||||
| Creating/modifying Prowler UI components | `prowler-ui` |
|
||||
| Creating/modifying models, views, serializers | `prowler-api` |
|
||||
| Creating/updating compliance frameworks | `prowler-compliance` |
|
||||
@@ -89,7 +85,6 @@ When performing these actions, ALWAYS invoke the corresponding skill FIRST:
|
||||
| Debugging gh-aw compilation errors | `gh-aw` |
|
||||
| Fill .github/pull_request_template.md (Context/Description/Steps to review/Checklist) | `prowler-pr` |
|
||||
| Fixing bug | `tdd` |
|
||||
| Fixing compliance JSON bugs (duplicate IDs, empty Section, stale refs) | `prowler-compliance` |
|
||||
| General Prowler development questions | `prowler` |
|
||||
| Implementing JSON:API endpoints | `django-drf` |
|
||||
| Implementing feature | `tdd` |
|
||||
@@ -107,8 +102,6 @@ When performing these actions, ALWAYS invoke the corresponding skill FIRST:
|
||||
| Review changelog format and conventions | `prowler-changelog` |
|
||||
| Reviewing JSON:API compliance | `jsonapi` |
|
||||
| Reviewing compliance framework PRs | `prowler-compliance-review` |
|
||||
| Running makemigrations or pgmakemigrations | `django-migration-psql` |
|
||||
| Syncing compliance framework with upstream catalog | `prowler-compliance` |
|
||||
| Testing RLS tenant isolation | `prowler-test-api` |
|
||||
| Testing hooks or utilities | `vitest` |
|
||||
| Troubleshoot why a skill is missing from AGENTS.md auto-invoke | `skill-sync` |
|
||||
@@ -136,7 +129,6 @@ When performing these actions, ALWAYS invoke the corresponding skill FIRST:
|
||||
| Writing React components | `react-19` |
|
||||
| Writing TypeScript types/interfaces | `typescript` |
|
||||
| Writing Vitest tests | `vitest` |
|
||||
| Writing data backfill or data migration | `django-migration-psql` |
|
||||
| Writing documentation | `prowler-docs` |
|
||||
| Writing unit tests for UI | `vitest` |
|
||||
|
||||
@@ -150,7 +142,7 @@ Prowler is an open-source cloud security assessment tool supporting AWS, Azure,
|
||||
|-----------|----------|------------|
|
||||
| SDK | `prowler/` | Python 3.10+, Poetry 2.3+ |
|
||||
| API | `api/` | Django 5.1, DRF, Celery |
|
||||
| UI | `ui/` | Next.js 16, React 19, Tailwind 4 |
|
||||
| UI | `ui/` | Next.js 15, React 19, Tailwind 4 |
|
||||
| MCP Server | `mcp_server/` | FastMCP, Python 3.12+ |
|
||||
| Dashboard | `dashboard/` | Dash, Plotly |
|
||||
|
||||
|
||||
@@ -117,10 +117,9 @@ Every AWS provider scan will enqueue an Attack Paths ingestion job automatically
|
||||
| MongoDB Atlas | 10 | 3 | 0 | 8 | Official | UI, API, CLI |
|
||||
| LLM | [See `promptfoo` docs.](https://www.promptfoo.dev/docs/red-team/plugins/) | N/A | N/A | N/A | Official | CLI |
|
||||
| Image | N/A | N/A | N/A | N/A | Official | CLI, API |
|
||||
| Google Workspace | 25 | 4 | 2 | 4 | Official | UI, API, CLI |
|
||||
| Google Workspace | 25 | 4 | 2 | 4 | Official | CLI |
|
||||
| OpenStack | 34 | 5 | 0 | 9 | Official | UI, API, CLI |
|
||||
| Vercel | 26 | 6 | 0 | 5 | Official | UI, API, CLI |
|
||||
| Okta | 1 | 1 | 0 | 1 | Official | CLI |
|
||||
| Vercel | 26 | 6 | 0 | 5 | Official | CLI |
|
||||
| NHN | 6 | 2 | 1 | 0 | Unofficial | CLI |
|
||||
|
||||
> [!Note]
|
||||
|
||||
@@ -14,14 +14,6 @@ All notable changes to the **Prowler API** are documented in this file.
|
||||
|
||||
---
|
||||
|
||||
## [1.27.2] (Prowler UNRELEASED)
|
||||
|
||||
### 🐞 Fixed
|
||||
|
||||
- Attack Paths: BEDROCK-001 and BEDROCK-002 now target roles trusting `bedrock-agentcore.amazonaws.com` instead of `bedrock.amazonaws.com`, eliminating false positives against regular Bedrock service roles (Agents, Knowledge Bases, model invocation) [(#11141)](https://github.com/prowler-cloud/prowler/pull/11141)
|
||||
|
||||
---
|
||||
|
||||
## [1.27.1] (Prowler v5.26.1)
|
||||
|
||||
### 🐞 Fixed
|
||||
|
||||
@@ -484,8 +484,8 @@ AWS_BEDROCK_PRIVESC_PASSROLE_CODE_INTERPRETER = AttackPathsQueryDefinition(
|
||||
OR action = '*'
|
||||
)
|
||||
|
||||
// Find roles that trust the Bedrock AgentCore service (can be passed to a code interpreter)
|
||||
MATCH path_target = (aws)--(target_role:AWSRole)-[:TRUSTS_AWS_PRINCIPAL]->(:AWSPrincipal {{arn: 'bedrock-agentcore.amazonaws.com'}})
|
||||
// Find roles that trust Bedrock service (can be passed to Bedrock)
|
||||
MATCH path_target = (aws)--(target_role:AWSRole)-[:TRUSTS_AWS_PRINCIPAL]->(:AWSPrincipal {{arn: 'bedrock.amazonaws.com'}})
|
||||
WHERE any(resource IN stmt_passrole.resource WHERE
|
||||
resource = '*'
|
||||
OR target_role.arn CONTAINS resource
|
||||
@@ -536,8 +536,8 @@ AWS_BEDROCK_PRIVESC_INVOKE_CODE_INTERPRETER = AttackPathsQueryDefinition(
|
||||
OR action = '*'
|
||||
)
|
||||
|
||||
// Find roles that trust the Bedrock AgentCore service (already attached to existing code interpreters)
|
||||
MATCH path_target = (aws)--(target_role:AWSRole)-[:TRUSTS_AWS_PRINCIPAL]->(:AWSPrincipal {{arn: 'bedrock-agentcore.amazonaws.com'}})
|
||||
// Find roles that trust Bedrock service (already attached to existing code interpreters)
|
||||
MATCH path_target = (aws)--(target_role:AWSRole)-[:TRUSTS_AWS_PRINCIPAL]->(:AWSPrincipal {{arn: 'bedrock.amazonaws.com'}})
|
||||
|
||||
WITH collect(path_principal) + collect(path_target) AS paths
|
||||
UNWIND paths AS p
|
||||
|
||||
@@ -1,335 +0,0 @@
|
||||
# AWS Inventory Connectivity Graph
|
||||
|
||||
A community-contributed tool that generates interactive connectivity graphs from Prowler AWS scans, visualizing relationships between AWS resources with zero additional API calls.
|
||||
|
||||
## Overview
|
||||
|
||||
This tool extends Prowler by producing two artifacts after a scan completes:
|
||||
|
||||
- **`<output>.inventory.json`** – Machine-readable graph (nodes + edges)
|
||||
- **`<output>.inventory.html`** – Interactive D3.js force-directed visualization
|
||||
|
||||
### Why?
|
||||
|
||||
Prowler's existing outputs (CSV, ASFF, OCSF, HTML) report individual check findings but provide no cross-service topology view. Security engineers need to understand **how** resources are connected—which Lambda functions sit inside which VPC, which IAM roles can be assumed by which services, which event sources trigger which functions—before they can reason about attack paths, blast-radius, or lateral-movement risk.
|
||||
|
||||
This tool fills that gap by building a connectivity graph from the service clients that are already loaded during a Prowler scan.
|
||||
|
||||
## Features
|
||||
|
||||
### Supported AWS Services
|
||||
|
||||
The tool currently extracts connectivity information from:
|
||||
|
||||
- **Lambda** – Functions, VPC/subnet/SG edges, event source mappings, layers, DLQ, KMS
|
||||
- **EC2** – Instances, security groups, subnet/VPC edges
|
||||
- **VPC** – VPCs, subnets, peering connections
|
||||
- **RDS** – DB instances, VPC/SG/cluster/KMS edges
|
||||
- **ELBv2** – ALB/NLB load balancers, SG and VPC edges
|
||||
- **S3** – Buckets, replication targets, logging buckets, KMS keys
|
||||
- **IAM** – Roles, trust-relationship edges (who can assume what)
|
||||
|
||||
### Edge Semantic Types
|
||||
|
||||
Edges are typed for downstream filtering and attack-path analysis:
|
||||
|
||||
- `network` – Resources share a network path (VPC/subnet/SG)
|
||||
- `iam` – IAM trust or permission relationship
|
||||
- `triggers` – One resource can invoke another (event source → Lambda)
|
||||
- `data_flow` – Data is written/read (Lambda → SQS dead-letter queue)
|
||||
- `depends_on` – Soft dependency (Lambda layer, subnet belongs to VPC)
|
||||
- `routes_to` – Traffic routing (LB → target)
|
||||
- `replicates_to` – S3 replication
|
||||
- `encrypts` – KMS key encrypts the resource
|
||||
- `logs_to` – Logging relationship
|
||||
|
||||
### Interactive HTML Graph Features
|
||||
|
||||
- Force-directed layout with drag-and-drop node pinning
|
||||
- Zoom / pan (mouse wheel + click-drag on background)
|
||||
- Per-service color-coded nodes with a legend
|
||||
- Hover tooltips showing ARN + all metadata properties
|
||||
- Service filter dropdown (show only Lambda, EC2, RDS, etc.)
|
||||
- Adjustable link-distance and charge-strength physics sliders
|
||||
- Edge labels on every arrow
|
||||
|
||||
## Installation
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Python 3.9.1 or higher
|
||||
- Prowler installed and configured (see [Prowler documentation](https://docs.prowler.com/))
|
||||
|
||||
### Setup
|
||||
|
||||
1. Clone or download this directory to your local machine
|
||||
2. Ensure Prowler is installed and working
|
||||
3. No additional dependencies required beyond Prowler's existing requirements
|
||||
|
||||
## Usage
|
||||
|
||||
### Basic Usage
|
||||
|
||||
Run Prowler with your desired checks, then use the inventory graph script:
|
||||
|
||||
```bash
|
||||
# Run Prowler scan (example)
|
||||
prowler aws --output-formats csv
|
||||
|
||||
# Generate inventory graph from the scan
|
||||
python contrib/inventory-graph/inventory_graph.py --output-directory ./output
|
||||
```
|
||||
|
||||
### Command-Line Options
|
||||
|
||||
```bash
|
||||
python contrib/inventory-graph/inventory_graph.py [OPTIONS]
|
||||
|
||||
Options:
|
||||
--output-directory DIR Directory to save output files (default: ./output)
|
||||
--output-filename NAME Base filename without extension (default: prowler-inventory-<timestamp>)
|
||||
--help Show this help message and exit
|
||||
```
|
||||
|
||||
### Example Workflow
|
||||
|
||||
```bash
|
||||
# 1. Run a Prowler scan on your AWS account
|
||||
prowler aws --profile my-aws-profile --output-formats csv html
|
||||
|
||||
# 2. Generate the inventory graph
|
||||
python contrib/inventory-graph/inventory_graph.py \
|
||||
--output-directory ./output \
|
||||
--output-filename my-aws-inventory
|
||||
|
||||
# 3. Open the HTML file in your browser
|
||||
open output/my-aws-inventory.inventory.html
|
||||
```
|
||||
|
||||
### Integration with Prowler Scans
|
||||
|
||||
The tool reads from already-loaded AWS service clients in memory (via `sys.modules`). This means:
|
||||
|
||||
- **Zero extra AWS API calls** – Uses data already collected during the Prowler scan
|
||||
- **Graceful degradation** – Services not scanned are silently skipped
|
||||
- **Flexible** – Works with any subset of Prowler checks
|
||||
|
||||
## Output Files
|
||||
|
||||
### JSON Output (`*.inventory.json`)
|
||||
|
||||
Machine-readable graph structure:
|
||||
|
||||
```json
|
||||
{
|
||||
"generated_at": "2026-03-19T12:34:56Z",
|
||||
"nodes": [
|
||||
{
|
||||
"id": "arn:aws:lambda:us-east-1:123456789012:function:my-function",
|
||||
"type": "lambda_function",
|
||||
"name": "my-function",
|
||||
"service": "lambda",
|
||||
"region": "us-east-1",
|
||||
"account_id": "123456789012",
|
||||
"properties": {
|
||||
"runtime": "python3.9",
|
||||
"vpc_id": "vpc-abc123"
|
||||
}
|
||||
}
|
||||
],
|
||||
"edges": [
|
||||
{
|
||||
"source_id": "arn:aws:lambda:...",
|
||||
"target_id": "arn:aws:ec2:...:vpc/vpc-abc123",
|
||||
"edge_type": "network",
|
||||
"label": "in-vpc"
|
||||
}
|
||||
],
|
||||
"stats": {
|
||||
"node_count": 42,
|
||||
"edge_count": 87
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### HTML Output (`*.inventory.html`)
|
||||
|
||||
Self-contained interactive visualization that opens in any modern browser. No server or build step required.
|
||||
|
||||
## Architecture
|
||||
|
||||
### Design Decisions
|
||||
|
||||
| Decision | Rationale |
|
||||
|----------|-----------|
|
||||
| **Read from sys.modules** | Zero extra AWS API calls; services not scanned are silently skipped |
|
||||
| **Self-contained HTML** | D3.js v7 via CDN; no server, no build step; opens in any browser |
|
||||
| **One extractor per service** | Each extractor is independently testable; adding a new service = one new file + one line in the registry |
|
||||
| **Typed edges** | Semantic types allow downstream consumers (attack-path tools, Neo4j import) to filter by relationship class |
|
||||
|
||||
### Project Structure
|
||||
|
||||
```
|
||||
contrib/inventory-graph/
|
||||
├── README.md # This file
|
||||
├── inventory_graph.py # Main entry point script
|
||||
├── lib/
|
||||
│ ├── __init__.py
|
||||
│ ├── models.py # ResourceNode, ResourceEdge, ConnectivityGraph dataclasses
|
||||
│ ├── graph_builder.py # Reads loaded service clients from sys.modules
|
||||
│ ├── inventory_output.py # write_json(), write_html()
|
||||
│ └── extractors/
|
||||
│ ├── __init__.py
|
||||
│ ├── lambda_extractor.py # Lambda functions → VPC/subnet/SG/event-sources/layers/DLQ/KMS
|
||||
│ ├── ec2_extractor.py # EC2 instances + security groups → subnet/VPC
|
||||
│ ├── vpc_extractor.py # VPCs, subnets, peering connections
|
||||
│ ├── rds_extractor.py # RDS instances → VPC/SG/cluster/KMS
|
||||
│ ├── elbv2_extractor.py # ALB/NLB load balancers → SG/VPC
|
||||
│ ├── s3_extractor.py # S3 buckets → replication targets/logging buckets/KMS keys
|
||||
│ └── iam_extractor.py # IAM roles + trust-relationship edges
|
||||
└── examples/
|
||||
└── sample_output.html # Example output (optional)
|
||||
```
|
||||
|
||||
## Testing
|
||||
|
||||
### Smoke Test (No AWS Credentials Needed)
|
||||
|
||||
```python
|
||||
import sys
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
# Wire a fake Lambda client
|
||||
mock_module = MagicMock()
|
||||
mock_fn = MagicMock()
|
||||
mock_fn.arn = "arn:aws:lambda:us-east-1:123:function:test"
|
||||
mock_fn.name = "test"
|
||||
mock_fn.region = "us-east-1"
|
||||
mock_fn.vpc_id = "vpc-abc"
|
||||
mock_fn.security_groups = ["sg-111"]
|
||||
mock_fn.subnet_ids = {"subnet-aaa"}
|
||||
mock_fn.environment = None
|
||||
mock_fn.kms_key_arn = None
|
||||
mock_fn.layers = []
|
||||
mock_fn.dead_letter_config = None
|
||||
mock_fn.event_source_mappings = []
|
||||
mock_module.awslambda_client.functions = {mock_fn.arn: mock_fn}
|
||||
mock_module.awslambda_client.audited_account = "123"
|
||||
sys.modules["prowler.providers.aws.services.awslambda.awslambda_client"] = mock_module
|
||||
|
||||
from contrib.inventory_graph.lib.graph_builder import build_graph
|
||||
from contrib.inventory_graph.lib.inventory_output import write_json, write_html
|
||||
|
||||
graph = build_graph()
|
||||
write_json(graph, "/tmp/test.inventory.json")
|
||||
write_html(graph, "/tmp/test.inventory.html")
|
||||
# Open /tmp/test.inventory.html in a browser
|
||||
```
|
||||
|
||||
## Extending
|
||||
|
||||
### Adding a New Service
|
||||
|
||||
1. Create a new extractor file in `lib/extractors/` (e.g., `dynamodb_extractor.py`)
|
||||
2. Implement the `extract(client)` function that returns `(nodes, edges)`
|
||||
3. Register it in `lib/graph_builder.py` in the `_SERVICE_REGISTRY` tuple
|
||||
|
||||
Example extractor template:
|
||||
|
||||
```python
|
||||
from typing import List, Tuple
|
||||
from prowler.lib.outputs.inventory.models import ResourceNode, ResourceEdge
|
||||
|
||||
def extract(client) -> Tuple[List[ResourceNode], List[ResourceEdge]]:
|
||||
"""Extract DynamoDB tables and their relationships."""
|
||||
nodes = []
|
||||
edges = []
|
||||
|
||||
for table in client.tables:
|
||||
nodes.append(
|
||||
ResourceNode(
|
||||
id=table.arn,
|
||||
type="dynamodb_table",
|
||||
name=table.name,
|
||||
service="dynamodb",
|
||||
region=table.region,
|
||||
account_id=client.audited_account,
|
||||
properties={"billing_mode": table.billing_mode}
|
||||
)
|
||||
)
|
||||
|
||||
# Add edges for KMS encryption, streams, etc.
|
||||
if table.kms_key_arn:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=table.kms_key_arn,
|
||||
target_id=table.arn,
|
||||
edge_type="encrypts",
|
||||
label="encrypts"
|
||||
)
|
||||
)
|
||||
|
||||
return nodes, edges
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### No nodes discovered
|
||||
|
||||
**Problem:** The tool reports "no nodes discovered" after running.
|
||||
|
||||
**Solution:** Ensure you've run a Prowler scan first. The tool reads from in-memory service clients loaded during the scan. If no services were scanned, no nodes will be discovered.
|
||||
|
||||
### Missing services in the graph
|
||||
|
||||
**Problem:** Some AWS services are not appearing in the graph.
|
||||
|
||||
**Solution:** The tool only includes services that have been scanned by Prowler. Run Prowler with the services you want to include, or run without service filters to scan all available services.
|
||||
|
||||
### HTML file doesn't display properly
|
||||
|
||||
**Problem:** The HTML visualization doesn't load or shows errors.
|
||||
|
||||
**Solution:**
|
||||
- Ensure you're opening the file in a modern browser (Chrome, Firefox, Safari, Edge)
|
||||
- Check your browser's console for JavaScript errors
|
||||
- Verify the file was generated completely (check file size > 0)
|
||||
- The HTML requires internet access to load D3.js from CDN
|
||||
|
||||
## Roadmap
|
||||
|
||||
Potential future enhancements:
|
||||
|
||||
- [ ] Support for additional AWS services (DynamoDB, SQS, SNS, etc.)
|
||||
- [ ] Export to Neo4j / Cartography format
|
||||
- [ ] Attack path analysis integration
|
||||
- [ ] Multi-account/multi-region aggregation
|
||||
- [ ] Custom edge type filtering in HTML UI
|
||||
- [ ] Graph diff between two scans
|
||||
|
||||
## Contributing
|
||||
|
||||
This is a community contribution. If you'd like to enhance it:
|
||||
|
||||
1. Fork the Prowler repository
|
||||
2. Make your changes in `contrib/inventory-graph/`
|
||||
3. Test thoroughly
|
||||
4. Submit a pull request with a clear description
|
||||
|
||||
## License
|
||||
|
||||
This tool is part of the Prowler project and is licensed under the Apache License 2.0.
|
||||
|
||||
## Credits
|
||||
|
||||
- **Author:** [@sandiyochristan](https://github.com/sandiyochristan)
|
||||
- **Related PR:** [#10382](https://github.com/prowler-cloud/prowler/pull/10382)
|
||||
- **Prowler Project:** [prowler-cloud/prowler](https://github.com/prowler-cloud/prowler)
|
||||
|
||||
## Support
|
||||
|
||||
For issues or questions:
|
||||
|
||||
- Open an issue in the [Prowler repository](https://github.com/prowler-cloud/prowler/issues)
|
||||
- Join the [Prowler Community Slack](https://goto.prowler.com/slack)
|
||||
- Tag your issue with `contrib:inventory-graph`
|
||||
@@ -1,181 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Example: Generate AWS Inventory Graph with Mock Data
|
||||
|
||||
This example demonstrates how to use the inventory graph tool with mock AWS data.
|
||||
No AWS credentials required.
|
||||
"""
|
||||
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
# Add parent directory to path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
from lib.graph_builder import build_graph
|
||||
from lib.inventory_output import write_json, write_html
|
||||
|
||||
|
||||
def create_mock_lambda_client():
|
||||
"""Create a mock Lambda client with sample data."""
|
||||
mock_module = MagicMock()
|
||||
|
||||
# Create a mock Lambda function
|
||||
mock_fn = MagicMock()
|
||||
mock_fn.arn = "arn:aws:lambda:us-east-1:123456789012:function:my-test-function"
|
||||
mock_fn.name = "my-test-function"
|
||||
mock_fn.region = "us-east-1"
|
||||
mock_fn.vpc_id = "vpc-abc123"
|
||||
mock_fn.security_groups = ["sg-111222"]
|
||||
mock_fn.subnet_ids = {"subnet-aaa111", "subnet-bbb222"}
|
||||
mock_fn.environment = {"Variables": {"ENV": "production"}}
|
||||
mock_fn.kms_key_arn = (
|
||||
"arn:aws:kms:us-east-1:123456789012:key/12345678-1234-1234-1234-123456789012"
|
||||
)
|
||||
mock_fn.layers = []
|
||||
mock_fn.dead_letter_config = None
|
||||
mock_fn.event_source_mappings = []
|
||||
|
||||
mock_module.awslambda_client.functions = {mock_fn.arn: mock_fn}
|
||||
mock_module.awslambda_client.audited_account = "123456789012"
|
||||
|
||||
return mock_module
|
||||
|
||||
|
||||
def create_mock_ec2_client():
|
||||
"""Create a mock EC2 client with sample data."""
|
||||
mock_module = MagicMock()
|
||||
|
||||
# Create a mock EC2 instance
|
||||
mock_instance = MagicMock()
|
||||
mock_instance.arn = (
|
||||
"arn:aws:ec2:us-east-1:123456789012:instance/i-1234567890abcdef0"
|
||||
)
|
||||
mock_instance.id = "i-1234567890abcdef0"
|
||||
mock_instance.region = "us-east-1"
|
||||
mock_instance.vpc_id = "vpc-abc123"
|
||||
mock_instance.subnet_id = "subnet-aaa111"
|
||||
mock_instance.security_groups = [MagicMock(id="sg-111222")]
|
||||
mock_instance.state = "running"
|
||||
mock_instance.type = "t3.micro"
|
||||
mock_instance.tags = [{"Key": "Name", "Value": "test-instance"}]
|
||||
|
||||
# Create a mock security group
|
||||
mock_sg = MagicMock()
|
||||
mock_sg.arn = "arn:aws:ec2:us-east-1:123456789012:security-group/sg-111222"
|
||||
mock_sg.id = "sg-111222"
|
||||
mock_sg.name = "test-security-group"
|
||||
mock_sg.region = "us-east-1"
|
||||
mock_sg.vpc_id = "vpc-abc123"
|
||||
|
||||
mock_module.ec2_client.instances = [mock_instance]
|
||||
mock_module.ec2_client.security_groups = [mock_sg]
|
||||
mock_module.ec2_client.audited_account = "123456789012"
|
||||
|
||||
return mock_module
|
||||
|
||||
|
||||
def create_mock_vpc_client():
|
||||
"""Create a mock VPC client with sample data."""
|
||||
mock_module = MagicMock()
|
||||
|
||||
# Create a mock VPC
|
||||
mock_vpc = MagicMock()
|
||||
mock_vpc.arn = "arn:aws:ec2:us-east-1:123456789012:vpc/vpc-abc123"
|
||||
mock_vpc.id = "vpc-abc123"
|
||||
mock_vpc.region = "us-east-1"
|
||||
mock_vpc.cidr_block = "10.0.0.0/16"
|
||||
mock_vpc.tags = [{"Key": "Name", "Value": "test-vpc"}]
|
||||
|
||||
# Create mock subnets
|
||||
mock_subnet1 = MagicMock()
|
||||
mock_subnet1.arn = "arn:aws:ec2:us-east-1:123456789012:subnet/subnet-aaa111"
|
||||
mock_subnet1.id = "subnet-aaa111"
|
||||
mock_subnet1.region = "us-east-1"
|
||||
mock_subnet1.vpc_id = "vpc-abc123"
|
||||
mock_subnet1.cidr_block = "10.0.1.0/24"
|
||||
mock_subnet1.availability_zone = "us-east-1a"
|
||||
|
||||
mock_subnet2 = MagicMock()
|
||||
mock_subnet2.arn = "arn:aws:ec2:us-east-1:123456789012:subnet/subnet-bbb222"
|
||||
mock_subnet2.id = "subnet-bbb222"
|
||||
mock_subnet2.region = "us-east-1"
|
||||
mock_subnet2.vpc_id = "vpc-abc123"
|
||||
mock_subnet2.cidr_block = "10.0.2.0/24"
|
||||
mock_subnet2.availability_zone = "us-east-1b"
|
||||
|
||||
mock_module.vpc_client.vpcs = [mock_vpc]
|
||||
mock_module.vpc_client.subnets = [mock_subnet1, mock_subnet2]
|
||||
mock_module.vpc_client.vpc_peering_connections = []
|
||||
mock_module.vpc_client.audited_account = "123456789012"
|
||||
|
||||
return mock_module
|
||||
|
||||
|
||||
def main():
|
||||
"""Main function to demonstrate the inventory graph generation."""
|
||||
print("=" * 70)
|
||||
print("AWS Inventory Graph - Mock Data Example")
|
||||
print("=" * 70)
|
||||
print()
|
||||
|
||||
# Create mock clients and inject them into sys.modules
|
||||
print("Creating mock AWS service clients...")
|
||||
sys.modules["prowler.providers.aws.services.awslambda.awslambda_client"] = (
|
||||
create_mock_lambda_client()
|
||||
)
|
||||
sys.modules["prowler.providers.aws.services.ec2.ec2_client"] = (
|
||||
create_mock_ec2_client()
|
||||
)
|
||||
sys.modules["prowler.providers.aws.services.vpc.vpc_client"] = (
|
||||
create_mock_vpc_client()
|
||||
)
|
||||
print("✓ Mock clients created")
|
||||
print()
|
||||
|
||||
# Build the graph
|
||||
print("Building connectivity graph...")
|
||||
graph = build_graph()
|
||||
print(f"✓ Graph built: {len(graph.nodes)} nodes, {len(graph.edges)} edges")
|
||||
print()
|
||||
|
||||
# Display discovered nodes
|
||||
print("Discovered nodes:")
|
||||
for node in graph.nodes:
|
||||
print(f" - {node.type}: {node.name} ({node.region})")
|
||||
print()
|
||||
|
||||
# Display discovered edges
|
||||
print("Discovered edges:")
|
||||
for edge in graph.edges:
|
||||
source_node = next((n for n in graph.nodes if n.id == edge.source_id), None)
|
||||
target_node = next((n for n in graph.nodes if n.id == edge.target_id), None)
|
||||
source_name = source_node.name if source_node else edge.source_id
|
||||
target_name = target_node.name if target_node else edge.target_id
|
||||
print(f" - {source_name} --[{edge.edge_type}]--> {target_name}")
|
||||
print()
|
||||
|
||||
# Write outputs
|
||||
output_dir = Path(__file__).parent
|
||||
json_path = output_dir / "example_output.inventory.json"
|
||||
html_path = output_dir / "example_output.inventory.html"
|
||||
|
||||
print("Writing output files...")
|
||||
write_json(graph, str(json_path))
|
||||
write_html(graph, str(html_path))
|
||||
print(f"✓ JSON written to: {json_path}")
|
||||
print(f"✓ HTML written to: {html_path}")
|
||||
print()
|
||||
|
||||
print("=" * 70)
|
||||
print("✓ Example complete!")
|
||||
print("=" * 70)
|
||||
print()
|
||||
print(f"Open the HTML file to view the interactive graph:")
|
||||
print(f" open {html_path}")
|
||||
print()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -1,158 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
AWS Inventory Connectivity Graph Generator
|
||||
|
||||
A standalone tool that generates interactive connectivity graphs from Prowler AWS scans.
|
||||
This tool reads from already-loaded AWS service clients in memory and produces:
|
||||
- JSON graph (nodes + edges)
|
||||
- Interactive HTML visualization
|
||||
|
||||
Usage:
|
||||
python inventory_graph.py --output-directory ./output --output-filename my-inventory
|
||||
|
||||
For more information, see README.md
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
# Add the contrib directory to the path so we can import the lib modules
|
||||
CONTRIB_DIR = Path(__file__).parent
|
||||
sys.path.insert(0, str(CONTRIB_DIR))
|
||||
|
||||
from lib.graph_builder import build_graph
|
||||
from lib.inventory_output import write_json, write_html
|
||||
|
||||
|
||||
def parse_arguments():
|
||||
"""Parse command-line arguments."""
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Generate AWS inventory connectivity graph from Prowler scan data",
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
epilog="""
|
||||
Examples:
|
||||
# Generate graph with default settings
|
||||
python inventory_graph.py
|
||||
|
||||
# Specify custom output directory and filename
|
||||
python inventory_graph.py --output-directory ./my-output --output-filename aws-inventory
|
||||
|
||||
# After running a Prowler scan
|
||||
prowler aws --profile my-profile
|
||||
python inventory_graph.py --output-directory ./output
|
||||
|
||||
For more information, see README.md
|
||||
""",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--output-directory",
|
||||
"-o",
|
||||
default="./output",
|
||||
help="Directory to save output files (default: ./output)",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--output-filename",
|
||||
"-f",
|
||||
default=None,
|
||||
help="Base filename without extension (default: prowler-inventory-<timestamp>)",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--verbose",
|
||||
"-v",
|
||||
action="store_true",
|
||||
help="Enable verbose output",
|
||||
)
|
||||
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def main():
|
||||
"""Main entry point for the inventory graph generator."""
|
||||
args = parse_arguments()
|
||||
|
||||
# Set up output paths
|
||||
output_dir = Path(args.output_directory)
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Generate filename with timestamp if not provided
|
||||
if args.output_filename:
|
||||
base_filename = args.output_filename
|
||||
else:
|
||||
timestamp = datetime.now().strftime("%Y%m%d%H%M%S")
|
||||
base_filename = f"prowler-inventory-{timestamp}"
|
||||
|
||||
json_path = output_dir / f"{base_filename}.inventory.json"
|
||||
html_path = output_dir / f"{base_filename}.inventory.html"
|
||||
|
||||
print("=" * 70)
|
||||
print("AWS Inventory Connectivity Graph Generator")
|
||||
print("=" * 70)
|
||||
print()
|
||||
|
||||
# Build the graph from loaded service clients
|
||||
if args.verbose:
|
||||
print("Building connectivity graph from loaded AWS service clients...")
|
||||
|
||||
graph = build_graph()
|
||||
|
||||
# Check if any nodes were discovered
|
||||
if not graph.nodes:
|
||||
print("⚠️ WARNING: No nodes discovered!")
|
||||
print()
|
||||
print("This usually means:")
|
||||
print(" 1. No Prowler scan has been run yet in this Python session")
|
||||
print(" 2. No AWS service clients are loaded in memory")
|
||||
print()
|
||||
print("To fix this:")
|
||||
print(" 1. Run a Prowler scan first: prowler aws --output-formats csv")
|
||||
print(" 2. Then run this script in the same session")
|
||||
print()
|
||||
print(
|
||||
"Alternatively, integrate this tool directly into Prowler's output pipeline."
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
print(f"✓ Discovered {len(graph.nodes)} nodes and {len(graph.edges)} edges")
|
||||
print()
|
||||
|
||||
# Write outputs
|
||||
if args.verbose:
|
||||
print(f"Writing JSON output to: {json_path}")
|
||||
write_json(graph, str(json_path))
|
||||
|
||||
if args.verbose:
|
||||
print(f"Writing HTML output to: {html_path}")
|
||||
write_html(graph, str(html_path))
|
||||
|
||||
print()
|
||||
print("=" * 70)
|
||||
print("✓ Graph generation complete!")
|
||||
print("=" * 70)
|
||||
print()
|
||||
print(f"📄 JSON: {json_path}")
|
||||
print(f"🌐 HTML: {html_path}")
|
||||
print()
|
||||
print(f"Open the HTML file in your browser to explore the interactive graph:")
|
||||
print(f" open {html_path}")
|
||||
print()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except KeyboardInterrupt:
|
||||
print("\n\nInterrupted by user. Exiting...")
|
||||
sys.exit(130)
|
||||
except Exception as e:
|
||||
print(f"\n❌ Error: {e}", file=sys.stderr)
|
||||
if "--verbose" in sys.argv or "-v" in sys.argv:
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
sys.exit(1)
|
||||
@@ -1,94 +0,0 @@
|
||||
from typing import List, Tuple
|
||||
|
||||
from lib.models import ResourceEdge, ResourceNode
|
||||
|
||||
|
||||
def extract(client) -> Tuple[List[ResourceNode], List[ResourceEdge]]:
|
||||
"""
|
||||
Extract EC2 instance and security-group nodes with their edges.
|
||||
|
||||
Edges produced:
|
||||
- instance → security-group [network]
|
||||
- instance → subnet [network]
|
||||
- security-group → VPC [network]
|
||||
"""
|
||||
nodes: List[ResourceNode] = []
|
||||
edges: List[ResourceEdge] = []
|
||||
|
||||
# EC2 Instances
|
||||
for instance in client.instances:
|
||||
name = instance.id
|
||||
for tag in instance.tags or []:
|
||||
if tag.get("Key") == "Name":
|
||||
name = tag["Value"]
|
||||
break
|
||||
|
||||
props = {
|
||||
"instance_type": getattr(instance, "type", None),
|
||||
"state": getattr(instance, "state", None),
|
||||
"vpc_id": getattr(instance, "vpc_id", None),
|
||||
"subnet_id": getattr(instance, "subnet_id", None),
|
||||
"public_ip": getattr(instance, "public_ip_address", None),
|
||||
"private_ip": getattr(instance, "private_ip_address", None),
|
||||
}
|
||||
|
||||
nodes.append(
|
||||
ResourceNode(
|
||||
id=instance.arn,
|
||||
type="ec2_instance",
|
||||
name=name,
|
||||
service="ec2",
|
||||
region=instance.region,
|
||||
account_id=client.audited_account,
|
||||
properties={k: v for k, v in props.items() if v is not None},
|
||||
)
|
||||
)
|
||||
|
||||
for sg_id in instance.security_groups or []:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=instance.arn,
|
||||
target_id=sg_id,
|
||||
edge_type="network",
|
||||
label="sg",
|
||||
)
|
||||
)
|
||||
|
||||
if instance.subnet_id:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=instance.arn,
|
||||
target_id=instance.subnet_id,
|
||||
edge_type="network",
|
||||
label="subnet",
|
||||
)
|
||||
)
|
||||
|
||||
# Security Groups
|
||||
for sg in client.security_groups.values():
|
||||
name = (
|
||||
sg.name if hasattr(sg, "name") else sg.id if hasattr(sg, "id") else sg.arn
|
||||
)
|
||||
nodes.append(
|
||||
ResourceNode(
|
||||
id=sg.arn,
|
||||
type="security_group",
|
||||
name=name,
|
||||
service="ec2",
|
||||
region=sg.region,
|
||||
account_id=client.audited_account,
|
||||
properties={"vpc_id": sg.vpc_id},
|
||||
)
|
||||
)
|
||||
|
||||
if sg.vpc_id:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=sg.arn,
|
||||
target_id=sg.vpc_id,
|
||||
edge_type="network",
|
||||
label="in-vpc",
|
||||
)
|
||||
)
|
||||
|
||||
return nodes, edges
|
||||
@@ -1,60 +0,0 @@
|
||||
from typing import List, Tuple
|
||||
|
||||
from lib.models import ResourceEdge, ResourceNode
|
||||
|
||||
|
||||
def extract(client) -> Tuple[List[ResourceNode], List[ResourceEdge]]:
|
||||
"""
|
||||
Extract ELBv2 (ALB/NLB) load balancer nodes and their edges.
|
||||
|
||||
Edges produced:
|
||||
- load_balancer → security-group [network]
|
||||
- load_balancer → VPC [network]
|
||||
"""
|
||||
nodes: List[ResourceNode] = []
|
||||
edges: List[ResourceEdge] = []
|
||||
|
||||
for lb in client.loadbalancersv2.values():
|
||||
props = {
|
||||
"type": getattr(lb, "type", None),
|
||||
"scheme": getattr(lb, "scheme", None),
|
||||
"dns_name": getattr(lb, "dns", None),
|
||||
"vpc_id": getattr(lb, "vpc_id", None),
|
||||
}
|
||||
|
||||
name = getattr(lb, "name", lb.arn.split("/")[-2] if "/" in lb.arn else lb.arn)
|
||||
|
||||
nodes.append(
|
||||
ResourceNode(
|
||||
id=lb.arn,
|
||||
type="load_balancer",
|
||||
name=name,
|
||||
service="elbv2",
|
||||
region=lb.region,
|
||||
account_id=client.audited_account,
|
||||
properties={k: v for k, v in props.items() if v is not None},
|
||||
)
|
||||
)
|
||||
|
||||
for sg_id in lb.security_groups or []:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=lb.arn,
|
||||
target_id=sg_id,
|
||||
edge_type="network",
|
||||
label="sg",
|
||||
)
|
||||
)
|
||||
|
||||
vpc_id = getattr(lb, "vpc_id", None)
|
||||
if vpc_id:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=lb.arn,
|
||||
target_id=vpc_id,
|
||||
edge_type="network",
|
||||
label="in-vpc",
|
||||
)
|
||||
)
|
||||
|
||||
return nodes, edges
|
||||
@@ -1,84 +0,0 @@
|
||||
import json
|
||||
from typing import Any, Dict, List, Tuple
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
from lib.models import ResourceEdge, ResourceNode
|
||||
|
||||
|
||||
def _parse_trust_principals(assume_role_policy: Any) -> List[str]:
|
||||
"""
|
||||
Return a flat list of principal strings from an IAM assume-role policy document.
|
||||
The policy may be a dict already or a JSON string.
|
||||
"""
|
||||
if not assume_role_policy:
|
||||
return []
|
||||
|
||||
if isinstance(assume_role_policy, str):
|
||||
try:
|
||||
assume_role_policy = json.loads(assume_role_policy)
|
||||
except (json.JSONDecodeError, ValueError):
|
||||
return []
|
||||
|
||||
principals = []
|
||||
for statement in assume_role_policy.get("Statement", []):
|
||||
principal = statement.get("Principal", {})
|
||||
if isinstance(principal, str):
|
||||
principals.append(principal)
|
||||
elif isinstance(principal, dict):
|
||||
for v in principal.values():
|
||||
if isinstance(v, list):
|
||||
principals.extend(v)
|
||||
else:
|
||||
principals.append(v)
|
||||
elif isinstance(principal, list):
|
||||
principals.extend(principal)
|
||||
|
||||
return principals
|
||||
|
||||
|
||||
def extract(client) -> Tuple[List[ResourceNode], List[ResourceEdge]]:
|
||||
"""
|
||||
Extract IAM role nodes and their trust-relationship edges.
|
||||
|
||||
Edges produced:
|
||||
- trusted-principal → role [iam] (who can assume this role)
|
||||
"""
|
||||
nodes: List[ResourceNode] = []
|
||||
edges: List[ResourceEdge] = []
|
||||
|
||||
for role in client.roles:
|
||||
props: Dict[str, Any] = {
|
||||
"path": getattr(role, "path", None),
|
||||
"create_date": str(getattr(role, "create_date", "") or ""),
|
||||
}
|
||||
|
||||
nodes.append(
|
||||
ResourceNode(
|
||||
id=role.arn,
|
||||
type="iam_role",
|
||||
name=role.name,
|
||||
service="iam",
|
||||
region="global",
|
||||
account_id=client.audited_account,
|
||||
properties={k: v for k, v in props.items() if v},
|
||||
)
|
||||
)
|
||||
|
||||
# Trust-relationship edges: principal → role (principal CAN assume role)
|
||||
try:
|
||||
for principal in _parse_trust_principals(role.assume_role_policy):
|
||||
if principal and principal != "*":
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=principal,
|
||||
target_id=role.arn,
|
||||
edge_type="iam",
|
||||
label="can-assume",
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
logger.debug(
|
||||
f"inventory iam_extractor: could not parse trust policy for {role.arn}: {e}"
|
||||
)
|
||||
|
||||
return nodes, edges
|
||||
@@ -1,118 +0,0 @@
|
||||
from typing import List, Tuple
|
||||
|
||||
from lib.models import ResourceEdge, ResourceNode
|
||||
|
||||
|
||||
def extract(client) -> Tuple[List[ResourceNode], List[ResourceEdge]]:
|
||||
"""
|
||||
Extract Lambda function nodes and their edges from an awslambda_client.
|
||||
|
||||
Edges produced:
|
||||
- lambda → VPC [network]
|
||||
- lambda → subnet [network]
|
||||
- lambda → sg [network]
|
||||
- lambda → event-source[triggers] (from EventSourceMapping)
|
||||
- lambda → layer ARN [depends_on]
|
||||
- lambda → DLQ target [data_flow]
|
||||
- lambda → KMS key [encrypts]
|
||||
"""
|
||||
nodes: List[ResourceNode] = []
|
||||
edges: List[ResourceEdge] = []
|
||||
|
||||
for fn in client.functions.values():
|
||||
props = {
|
||||
"runtime": fn.runtime,
|
||||
"vpc_id": fn.vpc_id,
|
||||
}
|
||||
if fn.environment:
|
||||
props["has_env_vars"] = True
|
||||
if fn.kms_key_arn:
|
||||
props["kms_key_arn"] = fn.kms_key_arn
|
||||
|
||||
nodes.append(
|
||||
ResourceNode(
|
||||
id=fn.arn,
|
||||
type="lambda_function",
|
||||
name=fn.name,
|
||||
service="lambda",
|
||||
region=fn.region,
|
||||
account_id=client.audited_account,
|
||||
properties=props,
|
||||
)
|
||||
)
|
||||
|
||||
# Network edges → VPC, subnets, security groups
|
||||
if fn.vpc_id:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=fn.arn,
|
||||
target_id=fn.vpc_id,
|
||||
edge_type="network",
|
||||
label="in-vpc",
|
||||
)
|
||||
)
|
||||
for sg_id in fn.security_groups or []:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=fn.arn,
|
||||
target_id=sg_id,
|
||||
edge_type="network",
|
||||
label="sg",
|
||||
)
|
||||
)
|
||||
for subnet_id in fn.subnet_ids or set():
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=fn.arn,
|
||||
target_id=subnet_id,
|
||||
edge_type="network",
|
||||
label="subnet",
|
||||
)
|
||||
)
|
||||
|
||||
# Trigger edges from event source mappings
|
||||
for esm in getattr(fn, "event_source_mappings", []):
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=esm.event_source_arn,
|
||||
target_id=fn.arn,
|
||||
edge_type="triggers",
|
||||
label=f"esm:{esm.state}",
|
||||
)
|
||||
)
|
||||
|
||||
# Layer dependency edges
|
||||
for layer in getattr(fn, "layers", []):
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=fn.arn,
|
||||
target_id=layer.arn,
|
||||
edge_type="depends_on",
|
||||
label="layer",
|
||||
)
|
||||
)
|
||||
|
||||
# Dead-letter queue data-flow edge
|
||||
dlq = getattr(fn, "dead_letter_config", None)
|
||||
if dlq and dlq.target_arn:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=fn.arn,
|
||||
target_id=dlq.target_arn,
|
||||
edge_type="data_flow",
|
||||
label="dlq",
|
||||
)
|
||||
)
|
||||
|
||||
# KMS encryption edge
|
||||
if fn.kms_key_arn:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=fn.kms_key_arn,
|
||||
target_id=fn.arn,
|
||||
edge_type="encrypts",
|
||||
label="kms",
|
||||
)
|
||||
)
|
||||
|
||||
return nodes, edges
|
||||
@@ -1,86 +0,0 @@
|
||||
from typing import List, Tuple
|
||||
|
||||
from lib.models import ResourceEdge, ResourceNode
|
||||
|
||||
|
||||
def extract(client) -> Tuple[List[ResourceNode], List[ResourceEdge]]:
|
||||
"""
|
||||
Extract RDS DB instance nodes and their edges.
|
||||
|
||||
Edges produced:
|
||||
- db_instance → security-group [network]
|
||||
- db_instance → VPC [network]
|
||||
- db_instance → cluster [depends_on]
|
||||
- db_instance → KMS key [encrypts]
|
||||
"""
|
||||
nodes: List[ResourceNode] = []
|
||||
edges: List[ResourceEdge] = []
|
||||
|
||||
for db in client.db_instances.values():
|
||||
props = {
|
||||
"engine": getattr(db, "engine", None),
|
||||
"engine_version": getattr(db, "engine_version", None),
|
||||
"instance_class": getattr(db, "db_instance_class", None),
|
||||
"vpc_id": getattr(db, "vpc_id", None),
|
||||
"multi_az": getattr(db, "multi_az", None),
|
||||
"publicly_accessible": getattr(db, "publicly_accessible", None),
|
||||
"storage_encrypted": getattr(db, "storage_encrypted", None),
|
||||
}
|
||||
|
||||
nodes.append(
|
||||
ResourceNode(
|
||||
id=db.arn,
|
||||
type="rds_instance",
|
||||
name=db.id,
|
||||
service="rds",
|
||||
region=db.region,
|
||||
account_id=client.audited_account,
|
||||
properties={k: v for k, v in props.items() if v is not None},
|
||||
)
|
||||
)
|
||||
|
||||
for sg in getattr(db, "security_groups", []):
|
||||
sg_id = sg if isinstance(sg, str) else getattr(sg, "id", str(sg))
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=db.arn,
|
||||
target_id=sg_id,
|
||||
edge_type="network",
|
||||
label="sg",
|
||||
)
|
||||
)
|
||||
|
||||
vpc_id = getattr(db, "vpc_id", None)
|
||||
if vpc_id:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=db.arn,
|
||||
target_id=vpc_id,
|
||||
edge_type="network",
|
||||
label="in-vpc",
|
||||
)
|
||||
)
|
||||
|
||||
cluster_arn = getattr(db, "cluster_arn", None)
|
||||
if cluster_arn:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=db.arn,
|
||||
target_id=cluster_arn,
|
||||
edge_type="depends_on",
|
||||
label="cluster-member",
|
||||
)
|
||||
)
|
||||
|
||||
kms_key_id = getattr(db, "kms_key_id", None)
|
||||
if kms_key_id:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=kms_key_id,
|
||||
target_id=db.arn,
|
||||
edge_type="encrypts",
|
||||
label="kms",
|
||||
)
|
||||
)
|
||||
|
||||
return nodes, edges
|
||||
@@ -1,92 +0,0 @@
|
||||
from typing import List, Tuple
|
||||
|
||||
from lib.models import ResourceEdge, ResourceNode
|
||||
|
||||
|
||||
def extract(client) -> Tuple[List[ResourceNode], List[ResourceEdge]]:
|
||||
"""
|
||||
Extract S3 bucket nodes and their edges.
|
||||
|
||||
Edges produced:
|
||||
- bucket → replication-target bucket [replicates_to]
|
||||
- bucket → KMS key [encrypts]
|
||||
- bucket → logging bucket [logs_to]
|
||||
"""
|
||||
nodes: List[ResourceNode] = []
|
||||
edges: List[ResourceEdge] = []
|
||||
|
||||
for bucket in client.buckets.values():
|
||||
encryption = getattr(bucket, "encryption", None)
|
||||
versioning = getattr(bucket, "versioning_enabled", None)
|
||||
logging = getattr(bucket, "logging", None)
|
||||
public = getattr(bucket, "public_access_block", None)
|
||||
|
||||
props = {}
|
||||
if versioning is not None:
|
||||
props["versioning"] = versioning
|
||||
if encryption:
|
||||
enc_type = getattr(encryption, "type", str(encryption))
|
||||
props["encryption"] = enc_type
|
||||
|
||||
nodes.append(
|
||||
ResourceNode(
|
||||
id=bucket.arn,
|
||||
type="s3_bucket",
|
||||
name=bucket.name,
|
||||
service="s3",
|
||||
region=bucket.region,
|
||||
account_id=client.audited_account,
|
||||
properties=props,
|
||||
)
|
||||
)
|
||||
|
||||
# Replication edges
|
||||
for rule in getattr(bucket, "replication_rules", None) or []:
|
||||
dest_bucket = getattr(rule, "destination_bucket", None)
|
||||
if dest_bucket:
|
||||
dest_arn = (
|
||||
dest_bucket
|
||||
if dest_bucket.startswith("arn:")
|
||||
else f"arn:aws:s3:::{dest_bucket}"
|
||||
)
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=bucket.arn,
|
||||
target_id=dest_arn,
|
||||
edge_type="replicates_to",
|
||||
label="s3-replication",
|
||||
)
|
||||
)
|
||||
|
||||
# Logging edges
|
||||
if logging:
|
||||
target_bucket = getattr(logging, "target_bucket", None)
|
||||
if target_bucket:
|
||||
target_arn = (
|
||||
target_bucket
|
||||
if target_bucket.startswith("arn:")
|
||||
else f"arn:aws:s3:::{target_bucket}"
|
||||
)
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=bucket.arn,
|
||||
target_id=target_arn,
|
||||
edge_type="logs_to",
|
||||
label="access-logs",
|
||||
)
|
||||
)
|
||||
|
||||
# KMS encryption edges
|
||||
if encryption:
|
||||
kms_arn = getattr(encryption, "kms_master_key_id", None)
|
||||
if kms_arn:
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=kms_arn,
|
||||
target_id=bucket.arn,
|
||||
edge_type="encrypts",
|
||||
label="kms",
|
||||
)
|
||||
)
|
||||
|
||||
return nodes, edges
|
||||
@@ -1,92 +0,0 @@
|
||||
from typing import List, Tuple
|
||||
|
||||
from lib.models import ResourceEdge, ResourceNode
|
||||
|
||||
|
||||
def extract(client) -> Tuple[List[ResourceNode], List[ResourceEdge]]:
|
||||
"""
|
||||
Extract VPC and subnet nodes with their edges.
|
||||
|
||||
Edges produced:
|
||||
- subnet → VPC [depends_on]
|
||||
- peering connection between VPCs [network]
|
||||
"""
|
||||
nodes: List[ResourceNode] = []
|
||||
edges: List[ResourceEdge] = []
|
||||
|
||||
# VPCs
|
||||
for vpc in client.vpcs.values():
|
||||
name = vpc.id if hasattr(vpc, "id") else vpc.arn
|
||||
for tag in vpc.tags or []:
|
||||
if isinstance(tag, dict) and tag.get("Key") == "Name":
|
||||
name = tag["Value"]
|
||||
break
|
||||
|
||||
nodes.append(
|
||||
ResourceNode(
|
||||
id=vpc.arn,
|
||||
type="vpc",
|
||||
name=name,
|
||||
service="vpc",
|
||||
region=vpc.region,
|
||||
account_id=client.audited_account,
|
||||
properties={
|
||||
"cidr_block": getattr(vpc, "cidr_block", None),
|
||||
"is_default": getattr(vpc, "is_default", None),
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
# VPC Subnets
|
||||
for subnet in client.vpc_subnets.values():
|
||||
name = subnet.id if hasattr(subnet, "id") else subnet.arn
|
||||
for tag in getattr(subnet, "tags", None) or []:
|
||||
if isinstance(tag, dict) and tag.get("Key") == "Name":
|
||||
name = tag["Value"]
|
||||
break
|
||||
|
||||
nodes.append(
|
||||
ResourceNode(
|
||||
id=subnet.arn,
|
||||
type="subnet",
|
||||
name=name,
|
||||
service="vpc",
|
||||
region=subnet.region,
|
||||
account_id=client.audited_account,
|
||||
properties={
|
||||
"vpc_id": getattr(subnet, "vpc_id", None),
|
||||
"cidr_block": getattr(subnet, "cidr_block", None),
|
||||
"availability_zone": getattr(subnet, "availability_zone", None),
|
||||
"public": getattr(subnet, "public", None),
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
vpc_id = getattr(subnet, "vpc_id", None)
|
||||
if vpc_id:
|
||||
# Find the VPC ARN for this vpc_id
|
||||
vpc_arn = next(
|
||||
(v.arn for v in client.vpcs.values() if v.id == vpc_id),
|
||||
vpc_id,
|
||||
)
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=subnet.arn,
|
||||
target_id=vpc_arn,
|
||||
edge_type="depends_on",
|
||||
label="subnet-of",
|
||||
)
|
||||
)
|
||||
|
||||
# VPC Peering Connections
|
||||
for peering in getattr(client, "vpc_peering_connections", {}).values():
|
||||
edges.append(
|
||||
ResourceEdge(
|
||||
source_id=peering.arn,
|
||||
target_id=getattr(peering, "accepter_vpc_id", peering.arn),
|
||||
edge_type="network",
|
||||
label="vpc-peer",
|
||||
)
|
||||
)
|
||||
|
||||
return nodes, edges
|
||||
@@ -1,106 +0,0 @@
|
||||
"""
|
||||
graph_builder.py
|
||||
----------------
|
||||
Builds a ConnectivityGraph by reading already-loaded AWS service clients from
|
||||
sys.modules. Only services that were actually scanned (i.e. whose client
|
||||
module is already imported) contribute nodes and edges. Unknown / unloaded
|
||||
services are silently skipped, so the output degrades gracefully when only a
|
||||
subset of checks has been run.
|
||||
"""
|
||||
|
||||
import sys
|
||||
from typing import Tuple
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
from lib.models import ConnectivityGraph
|
||||
|
||||
# Registry: (sys.modules key, attribute name inside that module, extractor module path)
|
||||
_SERVICE_REGISTRY: Tuple[Tuple[str, str, str], ...] = (
|
||||
(
|
||||
"prowler.providers.aws.services.awslambda.awslambda_client",
|
||||
"awslambda_client",
|
||||
"lib.extractors.lambda_extractor",
|
||||
),
|
||||
(
|
||||
"prowler.providers.aws.services.ec2.ec2_client",
|
||||
"ec2_client",
|
||||
"lib.extractors.ec2_extractor",
|
||||
),
|
||||
(
|
||||
"prowler.providers.aws.services.vpc.vpc_client",
|
||||
"vpc_client",
|
||||
"lib.extractors.vpc_extractor",
|
||||
),
|
||||
(
|
||||
"prowler.providers.aws.services.rds.rds_client",
|
||||
"rds_client",
|
||||
"lib.extractors.rds_extractor",
|
||||
),
|
||||
(
|
||||
"prowler.providers.aws.services.elbv2.elbv2_client",
|
||||
"elbv2_client",
|
||||
"lib.extractors.elbv2_extractor",
|
||||
),
|
||||
(
|
||||
"prowler.providers.aws.services.s3.s3_client",
|
||||
"s3_client",
|
||||
"lib.extractors.s3_extractor",
|
||||
),
|
||||
(
|
||||
"prowler.providers.aws.services.iam.iam_client",
|
||||
"iam_client",
|
||||
"lib.extractors.iam_extractor",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def build_graph() -> ConnectivityGraph:
|
||||
"""
|
||||
Iterate over every registered service, check whether its client module is
|
||||
already loaded, and call the corresponding extractor.
|
||||
|
||||
Returns a ConnectivityGraph with all discovered nodes and edges.
|
||||
Duplicate node IDs are silently deduplicated (first occurrence wins).
|
||||
"""
|
||||
graph = ConnectivityGraph()
|
||||
seen_node_ids: set = set()
|
||||
|
||||
for client_module_key, client_attr, extractor_module_key in _SERVICE_REGISTRY:
|
||||
client_module = sys.modules.get(client_module_key)
|
||||
if client_module is None:
|
||||
continue
|
||||
|
||||
service_client = getattr(client_module, client_attr, None)
|
||||
if service_client is None:
|
||||
continue
|
||||
|
||||
extractor_module = sys.modules.get(extractor_module_key)
|
||||
if extractor_module is None:
|
||||
try:
|
||||
import importlib
|
||||
|
||||
extractor_module = importlib.import_module(extractor_module_key)
|
||||
except ImportError as e:
|
||||
logger.debug(
|
||||
f"inventory graph_builder: cannot import extractor {extractor_module_key}: {e}"
|
||||
)
|
||||
continue
|
||||
|
||||
try:
|
||||
nodes, edges = extractor_module.extract(service_client)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"inventory graph_builder: extractor {extractor_module_key} failed: "
|
||||
f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}]: {e}"
|
||||
)
|
||||
continue
|
||||
|
||||
for node in nodes:
|
||||
if node.id not in seen_node_ids:
|
||||
graph.add_node(node)
|
||||
seen_node_ids.add(node.id)
|
||||
|
||||
for edge in edges:
|
||||
graph.add_edge(edge)
|
||||
|
||||
return graph
|
||||
@@ -1,502 +0,0 @@
|
||||
"""
|
||||
inventory_output.py
|
||||
-------------------
|
||||
Writes the ConnectivityGraph produced by graph_builder to two files:
|
||||
|
||||
<output_path>.inventory.json – machine-readable graph (nodes + edges)
|
||||
<output_path>.inventory.html – interactive D3.js force-directed graph
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
from dataclasses import asdict
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
from lib.models import ConnectivityGraph
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# JSON output
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def write_json(graph: ConnectivityGraph, file_path: str) -> None:
|
||||
"""Serialise the graph to a JSON file."""
|
||||
try:
|
||||
os.makedirs(os.path.dirname(file_path), exist_ok=True)
|
||||
data = {
|
||||
"generated_at": datetime.utcnow().isoformat() + "Z",
|
||||
"nodes": [asdict(n) for n in graph.nodes],
|
||||
"edges": [asdict(e) for e in graph.edges],
|
||||
"stats": {
|
||||
"node_count": len(graph.nodes),
|
||||
"edge_count": len(graph.edges),
|
||||
},
|
||||
}
|
||||
with open(file_path, "w", encoding="utf-8") as fh:
|
||||
json.dump(data, fh, indent=2, default=str)
|
||||
logger.info(f"Inventory graph JSON written to {file_path}")
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"inventory_output.write_json: {e.__class__.__name__}[{e.__traceback__.tb_lineno}]: {e}"
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# HTML output (self-contained, D3.js CDN)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
# Colour palette per node type
|
||||
_NODE_COLOURS = {
|
||||
"lambda_function": "#f59e0b",
|
||||
"ec2_instance": "#3b82f6",
|
||||
"security_group": "#6366f1",
|
||||
"vpc": "#10b981",
|
||||
"subnet": "#34d399",
|
||||
"rds_instance": "#ef4444",
|
||||
"load_balancer": "#8b5cf6",
|
||||
"s3_bucket": "#06b6d4",
|
||||
"iam_role": "#f97316",
|
||||
"default": "#94a3b8",
|
||||
}
|
||||
|
||||
# Edge stroke colours per edge type
|
||||
_EDGE_COLOURS = {
|
||||
"network": "#64748b",
|
||||
"iam": "#f97316",
|
||||
"triggers": "#a855f7",
|
||||
"data_flow": "#0ea5e9",
|
||||
"depends_on": "#94a3b8",
|
||||
"routes_to": "#22c55e",
|
||||
"replicates_to": "#ec4899",
|
||||
"encrypts": "#eab308",
|
||||
"logs_to": "#78716c",
|
||||
}
|
||||
|
||||
_HTML_TEMPLATE = """\
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8"/>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0"/>
|
||||
<title>Prowler – AWS Connectivity Graph</title>
|
||||
<script src="https://d3js.org/d3.v7.min.js"></script>
|
||||
<style>
|
||||
*, *::before, *::after {{ box-sizing: border-box; }}
|
||||
body {{
|
||||
margin: 0;
|
||||
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, sans-serif;
|
||||
background: #0f172a;
|
||||
color: #e2e8f0;
|
||||
}}
|
||||
#header {{
|
||||
padding: 12px 20px;
|
||||
background: #1e293b;
|
||||
border-bottom: 1px solid #334155;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 16px;
|
||||
}}
|
||||
#header h1 {{ margin: 0; font-size: 18px; font-weight: 700; }}
|
||||
#header .stats {{ font-size: 13px; color: #94a3b8; }}
|
||||
#controls {{
|
||||
padding: 8px 20px;
|
||||
background: #1e293b;
|
||||
border-bottom: 1px solid #334155;
|
||||
display: flex;
|
||||
gap: 12px;
|
||||
align-items: center;
|
||||
flex-wrap: wrap;
|
||||
}}
|
||||
#controls label {{ font-size: 12px; color: #94a3b8; }}
|
||||
#controls select, #controls input[type=range] {{
|
||||
background: #0f172a;
|
||||
color: #e2e8f0;
|
||||
border: 1px solid #334155;
|
||||
border-radius: 4px;
|
||||
padding: 3px 6px;
|
||||
font-size: 12px;
|
||||
}}
|
||||
#graph-container {{ width: 100%; height: calc(100vh - 100px); position: relative; }}
|
||||
svg {{ width: 100%; height: 100%; }}
|
||||
.node circle {{
|
||||
stroke: #1e293b;
|
||||
stroke-width: 1.5px;
|
||||
cursor: pointer;
|
||||
transition: r 0.15s;
|
||||
}}
|
||||
.node circle:hover {{ stroke-width: 3px; }}
|
||||
.node text {{
|
||||
font-size: 10px;
|
||||
fill: #e2e8f0;
|
||||
pointer-events: none;
|
||||
text-shadow: 0 0 4px #0f172a;
|
||||
}}
|
||||
.link {{
|
||||
stroke-opacity: 0.6;
|
||||
stroke-width: 1.5px;
|
||||
}}
|
||||
.link-label {{
|
||||
font-size: 8px;
|
||||
fill: #94a3b8;
|
||||
pointer-events: none;
|
||||
}}
|
||||
#tooltip {{
|
||||
position: fixed;
|
||||
background: #1e293b;
|
||||
border: 1px solid #334155;
|
||||
border-radius: 6px;
|
||||
padding: 10px 14px;
|
||||
font-size: 12px;
|
||||
pointer-events: none;
|
||||
max-width: 320px;
|
||||
word-break: break-all;
|
||||
z-index: 9999;
|
||||
display: none;
|
||||
}}
|
||||
#tooltip strong {{ color: #f8fafc; }}
|
||||
#tooltip .prop {{ color: #94a3b8; margin-top: 4px; }}
|
||||
#legend {{
|
||||
position: absolute;
|
||||
top: 10px;
|
||||
right: 10px;
|
||||
background: rgba(30,41,59,0.9);
|
||||
border: 1px solid #334155;
|
||||
border-radius: 6px;
|
||||
padding: 10px 14px;
|
||||
font-size: 11px;
|
||||
}}
|
||||
#legend h3 {{ margin: 0 0 6px; font-size: 12px; }}
|
||||
.legend-row {{ display: flex; align-items: center; gap: 6px; margin: 3px 0; }}
|
||||
.legend-dot {{ width: 12px; height: 12px; border-radius: 50%; flex-shrink: 0; }}
|
||||
.legend-line {{ width: 20px; height: 2px; flex-shrink: 0; }}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div id="header">
|
||||
<h1>🔗 AWS Connectivity Graph</h1>
|
||||
<span class="stats" id="stat-label">Generated: {generated_at}</span>
|
||||
</div>
|
||||
<div id="controls">
|
||||
<label>Filter service:
|
||||
<select id="filter-service">
|
||||
<option value="">All services</option>
|
||||
</select>
|
||||
</label>
|
||||
<label>Link distance:
|
||||
<input type="range" id="link-distance" min="40" max="300" value="120"/>
|
||||
</label>
|
||||
<label>Charge strength:
|
||||
<input type="range" id="charge-strength" min="-800" max="-20" value="-250"/>
|
||||
</label>
|
||||
<span class="stats" id="visible-count"></span>
|
||||
</div>
|
||||
<div id="graph-container">
|
||||
<svg id="graph-svg"></svg>
|
||||
<div id="tooltip"></div>
|
||||
<div id="legend">
|
||||
<h3>Node types</h3>
|
||||
{legend_nodes_html}
|
||||
<h3 style="margin-top:8px">Edge types</h3>
|
||||
{legend_edges_html}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
const RAW_NODES = {nodes_json};
|
||||
const RAW_EDGES = {edges_json};
|
||||
const NODE_COLOURS = {node_colours_json};
|
||||
const EDGE_COLOURS = {edge_colours_json};
|
||||
|
||||
// ── helpers ──────────────────────────────────────────────────────────────
|
||||
function nodeColour(d) {{
|
||||
return NODE_COLOURS[d.type] || NODE_COLOURS["default"];
|
||||
}}
|
||||
function edgeColour(d) {{
|
||||
return EDGE_COLOURS[d.edge_type] || "#94a3b8";
|
||||
}}
|
||||
function nodeRadius(d) {{
|
||||
const base = {{
|
||||
lambda_function: 9, ec2_instance: 10, vpc: 14, subnet: 8,
|
||||
security_group: 7, rds_instance: 11, load_balancer: 12,
|
||||
s3_bucket: 9, iam_role: 9
|
||||
}};
|
||||
return base[d.type] || 8;
|
||||
}}
|
||||
|
||||
// ── filter controls ───────────────────────────────────────────────────────
|
||||
const services = [...new Set(RAW_NODES.map(n => n.service))].sort();
|
||||
const sel = document.getElementById("filter-service");
|
||||
services.forEach(s => {{
|
||||
const o = document.createElement("option");
|
||||
o.value = s; o.textContent = s;
|
||||
sel.appendChild(o);
|
||||
}});
|
||||
|
||||
// ── D3 setup ──────────────────────────────────────────────────────────────
|
||||
const svg = d3.select("#graph-svg");
|
||||
const container = svg.append("g");
|
||||
|
||||
// zoom
|
||||
svg.call(
|
||||
d3.zoom().scaleExtent([0.05, 8])
|
||||
.on("zoom", e => container.attr("transform", e.transform))
|
||||
);
|
||||
|
||||
// arrowhead marker
|
||||
const defs = svg.append("defs");
|
||||
defs.append("marker")
|
||||
.attr("id", "arrow")
|
||||
.attr("viewBox", "0 -5 10 10")
|
||||
.attr("refX", 20).attr("refY", 0)
|
||||
.attr("markerWidth", 6).attr("markerHeight", 6)
|
||||
.attr("orient", "auto")
|
||||
.append("path")
|
||||
.attr("d", "M0,-5L10,0L0,5")
|
||||
.attr("fill", "#94a3b8");
|
||||
|
||||
// tooltip
|
||||
const tooltip = document.getElementById("tooltip");
|
||||
|
||||
// ── simulation ────────────────────────────────────────────────────────────
|
||||
let simulation, linkSel, nodeSel, labelSel;
|
||||
|
||||
function buildGraph(nodeFilter) {{
|
||||
// Determine which nodes to show
|
||||
const visibleNodes = nodeFilter
|
||||
? RAW_NODES.filter(n => n.service === nodeFilter)
|
||||
: RAW_NODES;
|
||||
const visibleIds = new Set(visibleNodes.map(n => n.id));
|
||||
|
||||
// Only show edges where BOTH endpoints are visible
|
||||
const visibleEdges = RAW_EDGES.filter(
|
||||
e => visibleIds.has(e.source_id) && visibleIds.has(e.target_id)
|
||||
);
|
||||
|
||||
document.getElementById("visible-count").textContent =
|
||||
`Showing ${{visibleNodes.length}} nodes · ${{visibleEdges.length}} edges`;
|
||||
|
||||
container.selectAll("*").remove();
|
||||
|
||||
if (simulation) simulation.stop();
|
||||
|
||||
const nodes = visibleNodes.map(n => ({{ ...n }}));
|
||||
const nodeIndex = Object.fromEntries(nodes.map(n => [n.id, n]));
|
||||
|
||||
const links = visibleEdges.map(e => ({{
|
||||
...e,
|
||||
source: nodeIndex[e.source_id] || e.source_id,
|
||||
target: nodeIndex[e.target_id] || e.target_id,
|
||||
}}));
|
||||
|
||||
const dist = +document.getElementById("link-distance").value;
|
||||
const charge = +document.getElementById("charge-strength").value;
|
||||
|
||||
simulation = d3.forceSimulation(nodes)
|
||||
.force("link", d3.forceLink(links).id(d => d.id).distance(dist))
|
||||
.force("charge", d3.forceManyBody().strength(charge))
|
||||
.force("center", d3.forceCenter(
|
||||
document.getElementById("graph-container").clientWidth / 2,
|
||||
document.getElementById("graph-container").clientHeight / 2
|
||||
))
|
||||
.force("collision", d3.forceCollide().radius(d => nodeRadius(d) + 6));
|
||||
|
||||
// Edges
|
||||
linkSel = container.append("g").attr("class", "links")
|
||||
.selectAll("line")
|
||||
.data(links)
|
||||
.join("line")
|
||||
.attr("class", "link")
|
||||
.attr("stroke", edgeColour)
|
||||
.attr("marker-end", "url(#arrow)");
|
||||
|
||||
// Edge labels
|
||||
labelSel = container.append("g").attr("class", "link-labels")
|
||||
.selectAll("text")
|
||||
.data(links)
|
||||
.join("text")
|
||||
.attr("class", "link-label")
|
||||
.text(d => d.label || "");
|
||||
|
||||
// Nodes
|
||||
nodeSel = container.append("g").attr("class", "nodes")
|
||||
.selectAll("g")
|
||||
.data(nodes)
|
||||
.join("g")
|
||||
.attr("class", "node")
|
||||
.call(
|
||||
d3.drag()
|
||||
.on("start", (event, d) => {{
|
||||
if (!event.active) simulation.alphaTarget(0.3).restart();
|
||||
d.fx = d.x; d.fy = d.y;
|
||||
}})
|
||||
.on("drag", (event, d) => {{ d.fx = event.x; d.fy = event.y; }})
|
||||
.on("end", (event, d) => {{
|
||||
if (!event.active) simulation.alphaTarget(0);
|
||||
d.fx = null; d.fy = null;
|
||||
}})
|
||||
)
|
||||
.on("mouseover", (event, d) => {{
|
||||
const props = Object.entries(d.properties || {{}})
|
||||
.map(([k, v]) => `<div class="prop"><b>${{k}}</b>: ${{v}}</div>`)
|
||||
.join("");
|
||||
tooltip.innerHTML = `
|
||||
<strong>${{d.name}}</strong>
|
||||
<div class="prop"><b>type</b>: ${{d.type}}</div>
|
||||
<div class="prop"><b>service</b>: ${{d.service}}</div>
|
||||
<div class="prop"><b>region</b>: ${{d.region}}</div>
|
||||
<div class="prop"><b>account</b>: ${{d.account_id}}</div>
|
||||
<div class="prop" style="word-break:break-all"><b>arn</b>: ${{d.id}}</div>
|
||||
${{props}}
|
||||
`;
|
||||
tooltip.style.display = "block";
|
||||
tooltip.style.left = (event.clientX + 12) + "px";
|
||||
tooltip.style.top = (event.clientY - 10) + "px";
|
||||
}})
|
||||
.on("mousemove", event => {{
|
||||
tooltip.style.left = (event.clientX + 12) + "px";
|
||||
tooltip.style.top = (event.clientY - 10) + "px";
|
||||
}})
|
||||
.on("mouseout", () => {{ tooltip.style.display = "none"; }});
|
||||
|
||||
nodeSel.append("circle")
|
||||
.attr("r", nodeRadius)
|
||||
.attr("fill", nodeColour);
|
||||
|
||||
nodeSel.append("text")
|
||||
.attr("dx", d => nodeRadius(d) + 3)
|
||||
.attr("dy", "0.35em")
|
||||
.text(d => d.name.length > 24 ? d.name.slice(0, 22) + "…" : d.name);
|
||||
|
||||
simulation.on("tick", () => {{
|
||||
linkSel
|
||||
.attr("x1", d => d.source.x)
|
||||
.attr("y1", d => d.source.y)
|
||||
.attr("x2", d => d.target.x)
|
||||
.attr("y2", d => d.target.y);
|
||||
|
||||
labelSel
|
||||
.attr("x", d => (d.source.x + d.target.x) / 2)
|
||||
.attr("y", d => (d.source.y + d.target.y) / 2);
|
||||
|
||||
nodeSel.attr("transform", d => `translate(${{d.x}},${{d.y}})`);
|
||||
}});
|
||||
}}
|
||||
|
||||
// Initial render
|
||||
buildGraph(null);
|
||||
|
||||
// Filter change
|
||||
sel.addEventListener("change", () => buildGraph(sel.value || null));
|
||||
|
||||
// Simulation control sliders — restart on change
|
||||
document.getElementById("link-distance").addEventListener("input", () => buildGraph(sel.value || null));
|
||||
document.getElementById("charge-strength").addEventListener("input", () => buildGraph(sel.value || null));
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
|
||||
def _build_legend_html(colours: dict, shape: str) -> str:
|
||||
rows = []
|
||||
for key, colour in sorted(colours.items()):
|
||||
if shape == "dot":
|
||||
rows.append(
|
||||
f'<div class="legend-row">'
|
||||
f'<div class="legend-dot" style="background:{colour}"></div>'
|
||||
f"<span>{key}</span></div>"
|
||||
)
|
||||
else:
|
||||
rows.append(
|
||||
f'<div class="legend-row">'
|
||||
f'<div class="legend-line" style="background:{colour}"></div>'
|
||||
f"<span>{key}</span></div>"
|
||||
)
|
||||
return "\n".join(rows)
|
||||
|
||||
|
||||
def write_html(graph: ConnectivityGraph, file_path: str) -> None:
|
||||
"""Render the graph as a self-contained interactive HTML page."""
|
||||
try:
|
||||
os.makedirs(os.path.dirname(file_path), exist_ok=True)
|
||||
|
||||
nodes_json = json.dumps(
|
||||
[
|
||||
{
|
||||
"id": n.id,
|
||||
"type": n.type,
|
||||
"name": n.name,
|
||||
"service": n.service,
|
||||
"region": n.region,
|
||||
"account_id": n.account_id,
|
||||
"properties": n.properties,
|
||||
}
|
||||
for n in graph.nodes
|
||||
],
|
||||
indent=None,
|
||||
default=str,
|
||||
)
|
||||
edges_json = json.dumps(
|
||||
[
|
||||
{
|
||||
"source_id": e.source_id,
|
||||
"target_id": e.target_id,
|
||||
"edge_type": e.edge_type,
|
||||
"label": e.label or "",
|
||||
}
|
||||
for e in graph.edges
|
||||
],
|
||||
indent=None,
|
||||
default=str,
|
||||
)
|
||||
|
||||
html = _HTML_TEMPLATE.format(
|
||||
generated_at=datetime.utcnow().strftime("%Y-%m-%d %H:%M UTC"),
|
||||
nodes_json=nodes_json,
|
||||
edges_json=edges_json,
|
||||
node_colours_json=json.dumps(_NODE_COLOURS),
|
||||
edge_colours_json=json.dumps(_EDGE_COLOURS),
|
||||
legend_nodes_html=_build_legend_html(_NODE_COLOURS, "dot"),
|
||||
legend_edges_html=_build_legend_html(_EDGE_COLOURS, "line"),
|
||||
)
|
||||
|
||||
with open(file_path, "w", encoding="utf-8") as fh:
|
||||
fh.write(html)
|
||||
|
||||
logger.info(f"Inventory graph HTML written to {file_path}")
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"inventory_output.write_html: {e.__class__.__name__}[{e.__traceback__.tb_lineno}]: {e}"
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Convenience entry-point called from __main__.py
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def generate_inventory_outputs(output_path: str) -> None:
|
||||
"""
|
||||
Build the connectivity graph from currently-loaded service clients and write
|
||||
both JSON and HTML outputs.
|
||||
|
||||
Args:
|
||||
output_path: base file path WITHOUT extension, e.g.
|
||||
"output/prowler-output-20240101120000".
|
||||
The function appends .inventory.json and .inventory.html.
|
||||
"""
|
||||
from lib.graph_builder import build_graph
|
||||
|
||||
graph = build_graph()
|
||||
|
||||
if not graph.nodes:
|
||||
logger.warning(
|
||||
"Inventory graph: no nodes discovered. "
|
||||
"Make sure at least one AWS service was scanned before generating the inventory."
|
||||
)
|
||||
|
||||
write_json(graph, f"{output_path}.inventory.json")
|
||||
write_html(graph, f"{output_path}.inventory.html")
|
||||
@@ -1,71 +0,0 @@
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
|
||||
@dataclass
|
||||
class ResourceNode:
|
||||
"""
|
||||
Represents a single AWS resource as a node in the connectivity graph.
|
||||
|
||||
id : globally unique identifier — always the resource ARN
|
||||
type : coarse resource type used for grouping/colour, e.g. "lambda_function"
|
||||
name : human-readable label shown on the graph
|
||||
service : AWS service name, e.g. "lambda", "ec2", "rds"
|
||||
region : AWS region the resource lives in
|
||||
account_id: AWS account ID
|
||||
properties: additional resource-specific metadata (runtime, vpc_id, etc.)
|
||||
"""
|
||||
|
||||
id: str
|
||||
type: str
|
||||
name: str
|
||||
service: str
|
||||
region: str
|
||||
account_id: str
|
||||
properties: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ResourceEdge:
|
||||
"""
|
||||
Represents a directional relationship between two resource nodes.
|
||||
|
||||
source_id : ARN of the source node
|
||||
target_id : ARN of the target node
|
||||
edge_type : semantic type of the relationship, e.g.:
|
||||
"network" – resources share a network path (VPC/subnet/SG)
|
||||
"iam" – IAM trust or permission relationship
|
||||
"triggers" – one resource can invoke another (event source → Lambda)
|
||||
"data_flow" – data is written/read (Lambda → SQS dead-letter queue)
|
||||
"depends_on" – soft dependency (Lambda layer, subnet belongs to VPC)
|
||||
"routes_to" – traffic routing (LB → target)
|
||||
"encrypts" – KMS key encrypts the resource
|
||||
label : optional short label rendered on the edge in the HTML graph
|
||||
"""
|
||||
|
||||
source_id: str
|
||||
target_id: str
|
||||
edge_type: str
|
||||
label: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class ConnectivityGraph:
|
||||
"""
|
||||
Container for the full inventory connectivity graph.
|
||||
|
||||
nodes: all discovered resource nodes
|
||||
edges: all discovered edges between nodes
|
||||
"""
|
||||
|
||||
nodes: List[ResourceNode] = field(default_factory=list)
|
||||
edges: List[ResourceEdge] = field(default_factory=list)
|
||||
|
||||
def add_node(self, node: ResourceNode) -> None:
|
||||
self.nodes.append(node)
|
||||
|
||||
def add_edge(self, edge: ResourceEdge) -> None:
|
||||
self.edges.append(edge)
|
||||
|
||||
def node_ids(self) -> set:
|
||||
return {n.id for n in self.nodes}
|
||||
@@ -10,10 +10,10 @@ This repository contains the Prowler Open Source documentation powered by [Mintl
|
||||
|
||||
## Local Development
|
||||
|
||||
Install a reviewed version of the [Mintlify CLI](https://www.npmjs.com/package/mint) to preview documentation changes locally:
|
||||
Install the [Mintlify CLI](https://www.npmjs.com/package/mint) to preview documentation changes locally:
|
||||
|
||||
```bash
|
||||
npm install --global mint@4.2.560
|
||||
npm i -g mint
|
||||
```
|
||||
|
||||
Run the following command at the root of your documentation (where `mint.json` is located):
|
||||
|
||||
@@ -28,7 +28,7 @@ This includes the [AGENTS.md](https://github.com/prowler-cloud/prowler/blob/mast
|
||||
<Steps>
|
||||
<Step title="Install Mintlify CLI">
|
||||
```bash
|
||||
npm install --global mint@4.2.560
|
||||
npm i -g mint
|
||||
```
|
||||
For detailed instructions, check the [Mintlify documentation](https://www.mintlify.com/docs/installation).
|
||||
</Step>
|
||||
|
||||
@@ -332,13 +332,6 @@
|
||||
"user-guide/providers/vercel/getting-started-vercel",
|
||||
"user-guide/providers/vercel/authentication"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Okta",
|
||||
"pages": [
|
||||
"user-guide/providers/okta/getting-started-okta",
|
||||
"user-guide/providers/okta/authentication"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
|
||||
@@ -44,21 +44,13 @@ Choose the configuration based on your deployment:
|
||||
|
||||
<Tab title="Generic without Native HTTP Support">
|
||||
**Configuration:**
|
||||
<Warning>
|
||||
Avoid configuring MCP clients to run `npx mcp-remote` directly. `npx` can download and execute a new package version on each run. Install a reviewed version of `mcp-remote` in a dedicated local workspace, then point the MCP client to the installed binary.
|
||||
</Warning>
|
||||
```bash
|
||||
mkdir -p ~/.local/share/prowler-mcp-bridge
|
||||
cd ~/.local/share/prowler-mcp-bridge
|
||||
npm init -y
|
||||
npm install --save-exact mcp-remote@0.1.38
|
||||
```
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"prowler": {
|
||||
"command": "/absolute/path/to/.local/share/prowler-mcp-bridge/node_modules/.bin/mcp-remote",
|
||||
"command": "npx",
|
||||
"args": [
|
||||
"mcp-remote",
|
||||
"https://mcp.prowler.com/mcp", // or your self-hosted Prowler MCP Server URL
|
||||
"--header",
|
||||
"Authorization: Bearer ${PROWLER_APP_API_KEY}"
|
||||
@@ -80,20 +72,14 @@ Choose the configuration based on your deployment:
|
||||
2. Go to "Developer" tab
|
||||
3. Click in "Edit Config" button
|
||||
4. Edit the `claude_desktop_config.json` file with your favorite editor
|
||||
5. Install a reviewed version of `mcp-remote` in a dedicated local workspace:
|
||||
```bash
|
||||
mkdir -p ~/.local/share/prowler-mcp-bridge
|
||||
cd ~/.local/share/prowler-mcp-bridge
|
||||
npm init -y
|
||||
npm install --save-exact mcp-remote@0.1.38
|
||||
```
|
||||
6. Add the following configuration:
|
||||
5. Add the following configuration:
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"prowler": {
|
||||
"command": "/absolute/path/to/.local/share/prowler-mcp-bridge/node_modules/.bin/mcp-remote",
|
||||
"command": "npx",
|
||||
"args": [
|
||||
"mcp-remote",
|
||||
"https://mcp.prowler.com/mcp",
|
||||
"--header",
|
||||
"Authorization: Bearer ${PROWLER_APP_API_KEY}"
|
||||
|
||||
@@ -38,7 +38,7 @@ Refer to the [Prowler App Tutorial](/user-guide/tutorials/prowler-app) for detai
|
||||
|
||||
- `git` installed.
|
||||
- `poetry` installed: [poetry installation](https://python-poetry.org/docs/#installation).
|
||||
- `pnpm` installed through [Corepack](https://pnpm.io/installation#using-corepack) or the standalone [pnpm installation](https://pnpm.io/installation).
|
||||
- `npm` installed: [npm installation](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm).
|
||||
- `Docker Compose` installed: https://docs.docker.com/compose/install/.
|
||||
|
||||
<Warning>
|
||||
@@ -97,11 +97,9 @@ Refer to the [Prowler App Tutorial](/user-guide/tutorials/prowler-app) for detai
|
||||
```bash
|
||||
git clone https://github.com/prowler-cloud/prowler \
|
||||
cd prowler/ui \
|
||||
corepack enable \
|
||||
corepack install \
|
||||
pnpm install --frozen-lockfile \
|
||||
pnpm run build \
|
||||
pnpm start
|
||||
npm install \
|
||||
npm run build \
|
||||
npm start
|
||||
```
|
||||
|
||||
> Enjoy Prowler App at http://localhost:3000 by signing up with your email and password.
|
||||
|
||||
@@ -47,12 +47,11 @@ Prowler supports a wide range of providers organized by category:
|
||||
| Provider | Support | Audit Scope/Entities | Interface |
|
||||
| ----------------------------------------------------------------------------------------- | -------- | ---------------------------- | ------------ |
|
||||
| [GitHub](/user-guide/providers/github/getting-started-github) | Official | Organizations / Repositories | UI, API, CLI |
|
||||
| [Google Workspace](/user-guide/providers/googleworkspace/getting-started-googleworkspace) | Official | Domains | UI, API, CLI |
|
||||
| [Google Workspace](/user-guide/providers/googleworkspace/getting-started-googleworkspace) | Official | Domains | CLI |
|
||||
| [LLM](/user-guide/providers/llm/getting-started-llm) | Official | Models | CLI |
|
||||
| [M365](/user-guide/providers/microsoft365/getting-started-m365) | Official | Tenants | UI, API, CLI |
|
||||
| [MongoDB Atlas](/user-guide/providers/mongodbatlas/getting-started-mongodbatlas) | Official | Organizations | UI, API, CLI |
|
||||
| [Okta](/user-guide/providers/okta/getting-started-okta) | Official | Organizations | CLI |
|
||||
| [Vercel](/user-guide/providers/vercel/getting-started-vercel) | Official | Teams / Projects | UI, API, CLI |
|
||||
| [Vercel](/user-guide/providers/vercel/getting-started-vercel) | Official | Teams / Projects | CLI |
|
||||
|
||||
### Kubernetes
|
||||
|
||||
|
||||
@@ -158,15 +158,6 @@ The following list includes all the Vercel checks with configurable variables th
|
||||
| `team_member_role_least_privilege` | `max_owners` | Integer |
|
||||
| `team_no_stale_invitations` | `stale_invitation_threshold_days` | Integer |
|
||||
|
||||
## Okta
|
||||
|
||||
### Configurable Checks
|
||||
The following list includes all the Okta checks with configurable variables that can be changed in the configuration YAML file:
|
||||
|
||||
| Check Name | Value | Type |
|
||||
|---------------------------------------------------------------|------------------------------------|---------|
|
||||
| `signon_global_session_idle_timeout_15min` | `okta_max_session_idle_minutes` | Integer |
|
||||
|
||||
## Config YAML File Structure
|
||||
|
||||
<Note>
|
||||
|
||||
@@ -22,7 +22,7 @@ Install promptfoo using one of the following methods:
|
||||
|
||||
**Using npm:**
|
||||
```bash
|
||||
npm install --global promptfoo@0.121.11
|
||||
npm install -g promptfoo
|
||||
```
|
||||
|
||||
**Using Homebrew (macOS):**
|
||||
|
||||
@@ -1,186 +0,0 @@
|
||||
---
|
||||
title: 'Okta Authentication in Prowler'
|
||||
---
|
||||
|
||||
import { VersionBadge } from "/snippets/version-badge.mdx"
|
||||
|
||||
<VersionBadge version="5.27.0" />
|
||||
|
||||
Prowler authenticates to Okta as a **service application** using **OAuth 2.0 with a private-key JWT** (Client Credentials grant). The integration is read-only by scope and follows DISA STIG guidance for least-privilege access.
|
||||
|
||||
## Common Setup
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- An Okta organization. The UI examples below use **Identity Engine** terminology such as **Global Session Policy**; Classic Engine exposes equivalent sign-on policy concepts under older naming.
|
||||
- A **Super Administrator** account on that organization for the one-time service-app setup.
|
||||
- An **API Services** app integration created in the Okta Admin Console.
|
||||
|
||||
### Authentication Method Overview
|
||||
|
||||
| Method | Status | Use Case |
|
||||
|---|---|---|
|
||||
| **OAuth 2.0 (private-key JWT)** | Supported | Production scans, CI/CD, Prowler App. |
|
||||
|
||||
The private-key JWT flow is the only supported authentication method in the initial release. The service application proves possession of a private key on every token request; Okta returns a short-lived access token, refreshed automatically by the SDK.
|
||||
|
||||
<Note>
|
||||
If a different authentication method is needed (SSWS API token, OAuth with user delegation, etc.), please open a [feature request](https://github.com/prowler-cloud/prowler/issues/new?template=feature-request.yml) describing the use case.
|
||||
</Note>
|
||||
|
||||
### Required OAuth Scopes
|
||||
|
||||
For the initial check (`signon_global_session_idle_timeout_15min`) only one scope is required:
|
||||
|
||||
- `okta.policies.read`
|
||||
|
||||
Additional scopes will be needed as more services and checks are added, this are the current ones needed:
|
||||
|
||||
| Scope | Used by |
|
||||
|---|---|
|
||||
| `okta.policies.read` | Sign-on / password / authentication policies |
|
||||
|
||||
### Required Admin Role
|
||||
|
||||
The service application must be assigned the built-in **Read-Only Administrator** role.
|
||||
|
||||
Okta's Management API enforces a two-layer authorization model: an OAuth **scope** decides which API endpoints the token can call, and an **admin role** decides whether the call returns data. With only a scope granted, the token mint succeeds but every read returns `403 Forbidden`. The Read-Only Administrator role is the minimum that lets the granted `okta.*.read` scopes actually return configuration data to Prowler's checks — without it, the credential probe at provider startup fails and the scan never gets to evaluate any check.
|
||||
|
||||
Read-Only Administrator is intentionally the narrowest role that satisfies this requirement and aligns with the least-privilege guidance in DISA STIG.
|
||||
|
||||
## Step-by-Step Setup
|
||||
|
||||
### 1. Go to the admin console
|
||||
|
||||

|
||||
|
||||
### 2. [Optional] - Disable the privilege-escalation bypass (org-wide, one-time)
|
||||
|
||||
In the Okta Admin Console, go to **Settings → Account → Public client app admins** and ensure it is **off**. When enabled, every API Services app can be auto-assigned the Super Administrator role after scopes are granted, which would invalidate the read-only premise of this integration.
|
||||
|
||||

|
||||
|
||||
### 3. Create the API Services app
|
||||
|
||||
1. Go to **Applications → Applications**.
|
||||
|
||||

|
||||
|
||||
2. **Create App Integration**
|
||||
|
||||

|
||||
|
||||
3. Sign-in method: **API Services**. Click **Next**.
|
||||
4. Name the app (for example, `Prowler Scanner`) and click **Save**.
|
||||
5. Copy the displayed **Client ID** — you'll use it as `OKTA_CLIENT_ID`.
|
||||
|
||||

|
||||
|
||||
### 4. Switch to private-key authentication and generate a keypair
|
||||
|
||||
On the new app's **General** tab, scroll to **Client Credentials**:
|
||||
|
||||
1. Click **Edit**.
|
||||
2. Set **Client authentication** to **Public key / Private key**.
|
||||
3. Under **Public Keys**, click **Add key**.
|
||||
4. In the modal, click **Generate new key**. Okta creates a JWK pair.
|
||||
5. Click the **PEM** tab to switch the displayed format (or keep JWK — Prowler accepts both).
|
||||
6. Copy the entire `-----BEGIN PRIVATE KEY-----` block (or the JWK JSON).
|
||||
7. Click **Done**, then **Save**.
|
||||
|
||||
<Warning>
|
||||
Okta displays the private key **only once**. If you close the modal without copying, you must generate a new key.
|
||||
</Warning>
|
||||
|
||||

|
||||
|
||||
### 5. Grant the required OAuth scopes
|
||||
|
||||
On the app, open the **Okta API Scopes** tab and click **Grant** on every scope Prowler needs. For the initial release, granting only `okta.policies.read` is sufficient.
|
||||
|
||||

|
||||
|
||||
### 6. Assign the Read-Only Administrator role
|
||||
|
||||
On the app, open the **Admin roles** tab and click **Edit assignments → Add assignment**:
|
||||
|
||||
- **Role:** Read-Only Administrator
|
||||
- **Resources:** All resources
|
||||
|
||||
Save the changes.
|
||||
|
||||

|
||||
|
||||
### 7. [Optional] Verify DPoP setting
|
||||
|
||||
Prowler sends DPoP (Demonstrating Proof of Possession) proofs on every token request. The integration works whether the **Require Demonstrating Proof of Possession (DPoP) header in token requests** setting on the service app is on or off — but enabling it is the more secure default.
|
||||
|
||||
## Prowler CLI Authentication
|
||||
|
||||
### Using Environment Variables (Required for Secrets)
|
||||
|
||||
Private key material **must** be supplied via environment variables — Prowler does not accept secrets through CLI flags.
|
||||
|
||||
```bash
|
||||
export OKTA_ORG_DOMAIN="YOUR-ORG.okta.com"
|
||||
export OKTA_CLIENT_ID="0oa1234567890abcdef"
|
||||
|
||||
# Either of the two — content takes precedence over file when both are set.
|
||||
export OKTA_PRIVATE_KEY_FILE="/secure/path/to/prowler-okta.pem"
|
||||
# or
|
||||
export OKTA_PRIVATE_KEY="$(cat /secure/path/to/prowler-okta.pem)"
|
||||
|
||||
# Optional — defaults to "okta.policies.read"
|
||||
export OKTA_SCOPES="okta.policies.read"
|
||||
|
||||
poetry run python prowler-cli.py okta
|
||||
```
|
||||
|
||||
### Non-Secret CLI Flags
|
||||
|
||||
Non-secret values are also available as CLI flags for ergonomic overrides:
|
||||
|
||||
| Flag | Equivalent env var |
|
||||
|---|---|
|
||||
| `--okta-org-domain` | `OKTA_ORG_DOMAIN` |
|
||||
| `--okta-client-id` | `OKTA_CLIENT_ID` |
|
||||
| `--okta-scopes` | `OKTA_SCOPES` |
|
||||
|
||||
Run a single check directly:
|
||||
|
||||
```bash
|
||||
poetry run python prowler-cli.py okta --check signon_global_session_idle_timeout_15min
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### `OktaInvalidOrgDomainError`
|
||||
|
||||
The org domain must be `<org>.okta.com` (or `.oktapreview.com` / `.okta-emea.com` / `.okta-gov.com` / `.okta.mil` / `.okta-miltest.com` / `.trex-govcloud.com`). Pass the bare hostname only — no `https://` scheme, no path, no trailing slash. Custom (vanity) domains are not currently accepted.
|
||||
|
||||
### `OktaPrivateKeyFileError`
|
||||
|
||||
The file at `OKTA_PRIVATE_KEY_FILE` is missing, unreadable, or empty. Confirm the path and that the file contains a non-empty PEM block or JWK JSON document.
|
||||
|
||||
### `OktaInvalidCredentialsError` at provider init
|
||||
|
||||
Prowler validates credentials at startup by listing one sign-on policy. This error indicates the credential material itself was rejected:
|
||||
|
||||
- **`invalid_client`** — the public key registered in Okta does not match the private key on disk. Generate a fresh keypair and try again.
|
||||
|
||||
### `OktaInsufficientPermissionsError` at provider init
|
||||
|
||||
Raised when the credential probe succeeds at the OAuth layer but the request is rejected because the service app lacks the required scope or admin role:
|
||||
|
||||
- **`invalid_scope`** — the `okta.policies.read` scope is not granted on the service app. Grant it from **Okta API Scopes**.
|
||||
- **`Forbidden` / `not authorized`** — the **Read-Only Administrator** role is not assigned to the service app. Assign it from **Admin roles**.
|
||||
|
||||
### `invalid_dpop_proof`
|
||||
|
||||
The org or the service app requires DPoP. The provider always sends DPoP proofs, so this error indicates the SDK could not build a valid proof — typically because the private key on disk does not match the public key uploaded to Okta. Regenerate the keypair.
|
||||
|
||||
## Additional Resources
|
||||
|
||||
- [Implement OAuth 2.0 for an Okta service app](https://developer.okta.com/docs/guides/implement-oauth-for-okta-serviceapp/main/)
|
||||
- [Okta Policy API reference](https://developer.okta.com/docs/api/openapi/okta-management/management/tag/Policy/)
|
||||
- [DISA STIG for Okta (V-273186)](https://stigviewer.com/stigs/okta/)
|
||||
@@ -1,144 +0,0 @@
|
||||
---
|
||||
title: 'Getting Started With Okta on Prowler'
|
||||
---
|
||||
|
||||
import { VersionBadge } from "/snippets/version-badge.mdx"
|
||||
|
||||
Prowler for Okta scans an Okta organization for identity and session-management misconfigurations. The provider authenticates as a service application using **OAuth 2.0 with a private-key JWT** (Client Credentials grant) — no end-user login, read-only by scope.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Set up authentication for Okta with the [Okta Authentication](/user-guide/providers/okta/authentication) guide before starting:
|
||||
|
||||
- An Okta organization. The UI examples below use **Identity Engine** terminology such as **Global Session Policy**; Classic Engine exposes the equivalent sign-on policy concepts under older names.
|
||||
- A **Super Administrator** account on that organization for the one-time service-app setup.
|
||||
- An **API Services** app integration in the Okta Admin Console with the `okta.policies.read` scope granted and the **Read-Only Administrator** role assigned.
|
||||
- Python 3.10+ and Prowler 5.27.0 or later installed locally.
|
||||
|
||||
<CardGroup cols={2}>
|
||||
<Card title="Prowler Cloud" icon="cloud" href="#prowler-cloud">
|
||||
Onboard Okta using Prowler Cloud
|
||||
</Card>
|
||||
<Card title="Prowler CLI" icon="terminal" href="#prowler-cli">
|
||||
Onboard Okta using Prowler CLI
|
||||
</Card>
|
||||
</CardGroup>
|
||||
|
||||
## Prowler Cloud
|
||||
|
||||
<Note>
|
||||
Prowler Cloud onboarding for Okta is coming soon. Track the [Prowler GitHub repository](https://github.com/prowler-cloud/prowler) for release updates. Use the [Prowler CLI](#prowler-cli) workflow below in the meantime.
|
||||
</Note>
|
||||
|
||||
---
|
||||
|
||||
## Prowler CLI
|
||||
|
||||
<VersionBadge version="5.27.0" />
|
||||
|
||||
### Step 1: Set Up Authentication
|
||||
|
||||
Follow the [Okta Authentication](/user-guide/providers/okta/authentication) guide to create the service application, generate a keypair, grant scopes, and assign the Read-Only Administrator role. Then export the credentials:
|
||||
|
||||
```bash
|
||||
export OKTA_ORG_DOMAIN="acme.okta.com"
|
||||
export OKTA_CLIENT_ID="0oa1234567890abcdef"
|
||||
export OKTA_PRIVATE_KEY_FILE="/secure/path/to/prowler-okta.pem"
|
||||
# Optional — defaults to "okta.policies.read"
|
||||
export OKTA_SCOPES="okta.policies.read"
|
||||
```
|
||||
|
||||
The private key file may contain either a PEM-encoded RSA key or a JWK JSON document.
|
||||
|
||||
#### Supplying the Private Key as Content
|
||||
|
||||
For automated environments where writing the key to disk is not desirable (CI runners, container secrets, etc.), the private key may be passed directly as a string:
|
||||
|
||||
```bash
|
||||
export OKTA_ORG_DOMAIN="acme.okta.com"
|
||||
export OKTA_CLIENT_ID="0oa1234567890abcdef"
|
||||
export OKTA_PRIVATE_KEY="$(cat /secure/path/to/prowler-okta.pem)"
|
||||
```
|
||||
|
||||
`OKTA_PRIVATE_KEY` takes precedence over `OKTA_PRIVATE_KEY_FILE` when both are set. The private key is intentionally not exposed as a CLI flag — secrets must be supplied via environment variables only.
|
||||
|
||||
### Step 2: Run the First Scan
|
||||
|
||||
Run a baseline scan after credentials are configured:
|
||||
|
||||
```bash
|
||||
prowler okta
|
||||
```
|
||||
|
||||
Or run a specific check directly:
|
||||
|
||||
```bash
|
||||
prowler okta --check signon_global_session_idle_timeout_15min
|
||||
```
|
||||
|
||||
Prowler prints a summary table; full findings are written to the configured output formats.
|
||||
|
||||
### Step 3: Use a Custom Configuration (Optional)
|
||||
|
||||
Prowler uses a configuration file to customize check thresholds. The Okta configuration currently includes:
|
||||
|
||||
```yaml
|
||||
okta:
|
||||
# okta.signon_global_session_idle_timeout_15min
|
||||
# Defaults to 15 minutes per DISA STIG V-273186.
|
||||
okta_max_session_idle_minutes: 15
|
||||
```
|
||||
|
||||
To use a custom configuration:
|
||||
|
||||
```bash
|
||||
prowler okta --config-file /path/to/config.yaml
|
||||
```
|
||||
|
||||
## Supported Services
|
||||
|
||||
Prowler for Okta includes security checks across the following services:
|
||||
|
||||
| Service | Description |
|
||||
| ----------- | ----------------------------------------------------------------------------------- |
|
||||
| **Sign-On** | Global session policy controls (idle timeout, lifetime, rule priority and ordering) |
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### STIG Rule Ordering
|
||||
|
||||
The initial check is mapped to DISA STIG `V-273186` / `OKTA-APP-000020`. Prowler implements the STIG procedure as written: the **Default Policy** must have a **Priority 1** rule that is **not** `Default Rule`, and that rule must set **Maximum Okta global session idle time** to 15 minutes or less.
|
||||
|
||||
This is stricter than simply finding the same timeout value somewhere else in the policy set. A compliant custom rule in another policy, or a compliant timeout on the built-in `Default Rule`, does not satisfy this STIG procedure.
|
||||
|
||||
### Default Scopes
|
||||
|
||||
Prowler requests a fixed set of OAuth scopes on every token exchange. The default is a single scope that covers the bundled initial check:
|
||||
|
||||
- `okta.policies.read`
|
||||
|
||||
The service app must have that scope granted in the **Okta API Scopes** tab. When the granted set is narrower than the requested set, the token request fails with an `invalid_scope` error and the scan stops at provider initialization.
|
||||
|
||||
When additional checks are enabled — or when running against a service app that exposes a different scope set — override the default with `OKTA_SCOPES` (comma-separated string for the env var) or `--okta-scopes` (space-separated list for the CLI):
|
||||
|
||||
```bash
|
||||
# Environment variable — comma-separated
|
||||
export OKTA_SCOPES="okta.policies.read,okta.apps.read,okta.users.read"
|
||||
|
||||
# CLI flag — space-separated
|
||||
prowler okta --okta-scopes okta.policies.read okta.apps.read okta.users.read
|
||||
```
|
||||
|
||||
For the full catalog of OAuth scopes exposed by the Okta Management API, refer to the [Okta OAuth 2.0 scopes documentation](https://developer.okta.com/docs/api/oauth2/).
|
||||
|
||||
<Note>
|
||||
As new services and checks land in the Okta provider, the default scope list grows alongside them. Re-check the granted scopes on the service app after each Prowler upgrade and grant any newly required `okta.*.read` scopes in the Admin Console.
|
||||
</Note>
|
||||
|
||||
### Common Errors
|
||||
|
||||
- **`OktaInvalidOrgDomainError`** — the org domain must be `<org>.okta.com` (or `.oktapreview.com` / `.okta-emea.com` / `.okta-gov.com` / `.okta.mil` / `.okta-miltest.com` / `.trex-govcloud.com`). Pass the bare hostname only — no `https://` scheme, no path, no trailing slash.
|
||||
- **`OktaPrivateKeyFileError`** — confirm the file is readable and contains a non-empty PEM or JWK body.
|
||||
- **`OktaInsufficientPermissionsError`** — the credential probe reached Okta but the service app cannot perform the request. The error string carries `invalid_scope`, `Forbidden`, `not authorized`, or `permission`. Fix by granting the missing `okta.*.read` scope from **Okta API Scopes** and confirming the **Read-Only Administrator** role is assigned to the service app.
|
||||
- **`OktaInvalidCredentialsError`** — the credential probe reached Okta but Okta rejected the JWT. Typically the private key on disk does not match the public JWK uploaded to the service app, or the JWT signing parameters are wrong. Regenerate the keypair and re-upload the public JWK.
|
||||
- **Token requests failing for an unknown scope** — the app was granted a narrower scope set than `OKTA_SCOPES` requests. Either narrow `OKTA_SCOPES` or grant the missing scopes in the Admin Console.
|
||||
|
Before Width: | Height: | Size: 159 KiB |
|
Before Width: | Height: | Size: 134 KiB |
|
Before Width: | Height: | Size: 173 KiB |
|
Before Width: | Height: | Size: 127 KiB |
|
Before Width: | Height: | Size: 83 KiB |
|
Before Width: | Height: | Size: 78 KiB |
|
Before Width: | Height: | Size: 216 KiB |
|
Before Width: | Height: | Size: 56 KiB |
@@ -56,21 +56,13 @@ Prowler MCP Server can be used in three ways:
|
||||
- Managed and maintained by Prowler team
|
||||
- Always up-to-date
|
||||
|
||||
Install a reviewed version of `mcp-remote` in a dedicated local workspace first. Avoid running `npx mcp-remote` directly because it can download and execute a new package version on each run.
|
||||
|
||||
```bash
|
||||
mkdir -p ~/.local/share/prowler-mcp-bridge
|
||||
cd ~/.local/share/prowler-mcp-bridge
|
||||
npm init -y
|
||||
npm install --save-exact mcp-remote@0.1.38
|
||||
```
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"prowler": {
|
||||
"command": "/absolute/path/to/.local/share/prowler-mcp-bridge/node_modules/.bin/mcp-remote",
|
||||
"command": "npx",
|
||||
"args": [
|
||||
"mcp-remote",
|
||||
"https://mcp.prowler.com/mcp",
|
||||
"--header",
|
||||
"Authorization: Bearer pk_YOUR_API_KEY_HERE"
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# This file is automatically @generated by Poetry 2.3.4 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 2.3.2 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "about-time"
|
||||
@@ -12,19 +12,6 @@ files = [
|
||||
{file = "about_time-4.2.1-py3-none-any.whl", hash = "sha256:8bbf4c75fe13cbd3d72f49a03b02c5c7dca32169b6d49117c257e7eb3eaee341"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "aenum"
|
||||
version = "3.1.17"
|
||||
description = "Advanced Enumerations (compatible with Python's stdlib Enum), NamedTuples, and NamedConstants"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "aenum-3.1.17-py2-none-any.whl", hash = "sha256:0dad0421b2fbe30e3fb623b2a0a23eff823407df53829d6a72595e7f76f3d872"},
|
||||
{file = "aenum-3.1.17-py3-none-any.whl", hash = "sha256:8b883a37a04e74cc838ac442bdd28c266eae5bbf13e1342c7ef123ed25230139"},
|
||||
{file = "aenum-3.1.17.tar.gz", hash = "sha256:a969a4516b194895de72c875ece355f17c0d272146f7fda346ef74f93cf4d5ba"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "aiofiles"
|
||||
version = "24.1.0"
|
||||
@@ -3157,22 +3144,6 @@ files = [
|
||||
[package.dependencies]
|
||||
referencing = ">=0.31.0"
|
||||
|
||||
[[package]]
|
||||
name = "jwcrypto"
|
||||
version = "1.5.7"
|
||||
description = "Implementation of JOSE Web standards"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "jwcrypto-1.5.7-py3-none-any.whl", hash = "sha256:729463fefe28b6de5cf1ebfda3e94f1a1b41d2799148ef98a01cb9678ebe2bb0"},
|
||||
{file = "jwcrypto-1.5.7.tar.gz", hash = "sha256:70204d7cca406eda8c82352e3c41ba2d946610dafd19e54403f0a1f4f18633c6"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
cryptography = ">=3.4"
|
||||
typing_extensions = ">=4.5.0"
|
||||
|
||||
[[package]]
|
||||
name = "keystoneauth1"
|
||||
version = "5.13.0"
|
||||
@@ -4141,35 +4112,6 @@ urllib3 = {version = ">=2.6.3", markers = "python_version >= \"3.10.0\""}
|
||||
[package.extras]
|
||||
adk = ["docstring-parser (>=0.16) ; python_version >= \"3.10\" and python_version < \"4\"", "mcp (>=1.6.0) ; python_version >= \"3.10\" and python_version < \"4\"", "pydantic (>=2.10.6) ; python_version >= \"3.10\" and python_version < \"4\"", "rich (>=13.9.4) ; python_version >= \"3.10\" and python_version < \"4\""]
|
||||
|
||||
[[package]]
|
||||
name = "okta"
|
||||
version = "3.4.2"
|
||||
description = "Python SDK for the Okta Management API"
|
||||
optional = false
|
||||
python-versions = ">=3.10"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "okta-3.4.2-py3-none-any.whl", hash = "sha256:b67bcff31de65223c5848894a202153236d0c99e3a8541a54bf7065f81676637"},
|
||||
{file = "okta-3.4.2.tar.gz", hash = "sha256:b05201056f3f028c5d2d16394f9b47024a689080f5a993c11d4d80f0e1b5ba1e"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
aenum = ">=3.1.16"
|
||||
aiohttp = ">=3.13.4"
|
||||
blinker = ">=1.9.0"
|
||||
jwcrypto = ">=1.5.6"
|
||||
pycryptodomex = ">=3.23.0"
|
||||
pydantic = ">=2.11.3"
|
||||
pydash = ">=8.0.6"
|
||||
PyJWT = ">=2.12.0"
|
||||
python-dateutil = ">=2.9.0.post0"
|
||||
PyYAML = ">=6.0.3"
|
||||
requests = ">=2.33.0"
|
||||
xmltodict = ">=1.0.2"
|
||||
|
||||
[package.extras]
|
||||
images = ["pillow (>=9.0.0,<12)"]
|
||||
|
||||
[[package]]
|
||||
name = "openapi-schema-validator"
|
||||
version = "0.6.3"
|
||||
@@ -4810,57 +4752,6 @@ files = [
|
||||
{file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pycryptodomex"
|
||||
version = "3.23.0"
|
||||
description = "Cryptographic library for Python"
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "pycryptodomex-3.23.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:add243d204e125f189819db65eed55e6b4713f70a7e9576c043178656529cec7"},
|
||||
{file = "pycryptodomex-3.23.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:1c6d919fc8429e5cb228ba8c0d4d03d202a560b421c14867a65f6042990adc8e"},
|
||||
{file = "pycryptodomex-3.23.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:1c3a65ad441746b250d781910d26b7ed0a396733c6f2dbc3327bd7051ec8a541"},
|
||||
{file = "pycryptodomex-3.23.0-cp27-cp27m-win32.whl", hash = "sha256:47f6d318fe864d02d5e59a20a18834819596c4ed1d3c917801b22b92b3ffa648"},
|
||||
{file = "pycryptodomex-3.23.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:d9825410197a97685d6a1fa2a86196430b01877d64458a20e95d4fd00d739a08"},
|
||||
{file = "pycryptodomex-3.23.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:267a3038f87a8565bd834317dbf053a02055915acf353bf42ededb9edaf72010"},
|
||||
{file = "pycryptodomex-3.23.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:7b37e08e3871efe2187bc1fd9320cc81d87caf19816c648f24443483005ff886"},
|
||||
{file = "pycryptodomex-3.23.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:91979028227543010d7b2ba2471cf1d1e398b3f183cb105ac584df0c36dac28d"},
|
||||
{file = "pycryptodomex-3.23.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b8962204c47464d5c1c4038abeadd4514a133b28748bcd9fa5b6d62e3cec6fa"},
|
||||
{file = "pycryptodomex-3.23.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a33986a0066860f7fcf7c7bd2bc804fa90e434183645595ae7b33d01f3c91ed8"},
|
||||
{file = "pycryptodomex-3.23.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7947ab8d589e3178da3d7cdeabe14f841b391e17046954f2fbcd941705762b5"},
|
||||
{file = "pycryptodomex-3.23.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c25e30a20e1b426e1f0fa00131c516f16e474204eee1139d1603e132acffc314"},
|
||||
{file = "pycryptodomex-3.23.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:da4fa650cef02db88c2b98acc5434461e027dce0ae8c22dd5a69013eaf510006"},
|
||||
{file = "pycryptodomex-3.23.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:58b851b9effd0d072d4ca2e4542bf2a4abcf13c82a29fd2c93ce27ee2a2e9462"},
|
||||
{file = "pycryptodomex-3.23.0-cp313-cp313t-win32.whl", hash = "sha256:a9d446e844f08299236780f2efa9898c818fe7e02f17263866b8550c7d5fb328"},
|
||||
{file = "pycryptodomex-3.23.0-cp313-cp313t-win_amd64.whl", hash = "sha256:bc65bdd9fc8de7a35a74cab1c898cab391a4add33a8fe740bda00f5976ca4708"},
|
||||
{file = "pycryptodomex-3.23.0-cp313-cp313t-win_arm64.whl", hash = "sha256:c885da45e70139464f082018ac527fdaad26f1657a99ee13eecdce0f0ca24ab4"},
|
||||
{file = "pycryptodomex-3.23.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:06698f957fe1ab229a99ba2defeeae1c09af185baa909a31a5d1f9d42b1aaed6"},
|
||||
{file = "pycryptodomex-3.23.0-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:b2c2537863eccef2d41061e82a881dcabb04944c5c06c5aa7110b577cc487545"},
|
||||
{file = "pycryptodomex-3.23.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:43c446e2ba8df8889e0e16f02211c25b4934898384c1ec1ec04d7889c0333587"},
|
||||
{file = "pycryptodomex-3.23.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f489c4765093fb60e2edafdf223397bc716491b2b69fe74367b70d6999257a5c"},
|
||||
{file = "pycryptodomex-3.23.0-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bdc69d0d3d989a1029df0eed67cc5e8e5d968f3724f4519bd03e0ec68df7543c"},
|
||||
{file = "pycryptodomex-3.23.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6bbcb1dd0f646484939e142462d9e532482bc74475cecf9c4903d4e1cd21f003"},
|
||||
{file = "pycryptodomex-3.23.0-cp37-abi3-musllinux_1_2_i686.whl", hash = "sha256:8a4fcd42ccb04c31268d1efeecfccfd1249612b4de6374205376b8f280321744"},
|
||||
{file = "pycryptodomex-3.23.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:55ccbe27f049743a4caf4f4221b166560d3438d0b1e5ab929e07ae1702a4d6fd"},
|
||||
{file = "pycryptodomex-3.23.0-cp37-abi3-win32.whl", hash = "sha256:189afbc87f0b9f158386bf051f720e20fa6145975f1e76369303d0f31d1a8d7c"},
|
||||
{file = "pycryptodomex-3.23.0-cp37-abi3-win_amd64.whl", hash = "sha256:52e5ca58c3a0b0bd5e100a9fbc8015059b05cffc6c66ce9d98b4b45e023443b9"},
|
||||
{file = "pycryptodomex-3.23.0-cp37-abi3-win_arm64.whl", hash = "sha256:02d87b80778c171445d67e23d1caef279bf4b25c3597050ccd2e13970b57fd51"},
|
||||
{file = "pycryptodomex-3.23.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:febec69c0291efd056c65691b6d9a339f8b4bc43c6635b8699471248fe897fea"},
|
||||
{file = "pycryptodomex-3.23.0-pp27-pypy_73-win32.whl", hash = "sha256:c84b239a1f4ec62e9c789aafe0543f0594f0acd90c8d9e15bcece3efe55eca66"},
|
||||
{file = "pycryptodomex-3.23.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:ebfff755c360d674306e5891c564a274a47953562b42fb74a5c25b8fc1fb1cb5"},
|
||||
{file = "pycryptodomex-3.23.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eca54f4bb349d45afc17e3011ed4264ef1cc9e266699874cdd1349c504e64798"},
|
||||
{file = "pycryptodomex-3.23.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2596e643d4365e14d0879dc5aafe6355616c61c2176009270f3048f6d9a61f"},
|
||||
{file = "pycryptodomex-3.23.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fdfac7cda115bca3a5abb2f9e43bc2fb66c2b65ab074913643803ca7083a79ea"},
|
||||
{file = "pycryptodomex-3.23.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:14c37aaece158d0ace436f76a7bb19093db3b4deade9797abfc39ec6cd6cc2fe"},
|
||||
{file = "pycryptodomex-3.23.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7de1e40a41a5d7f1ac42b6569b10bcdded34339950945948529067d8426d2785"},
|
||||
{file = "pycryptodomex-3.23.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bffc92138d75664b6d543984db7893a628559b9e78658563b0395e2a5fb47ed9"},
|
||||
{file = "pycryptodomex-3.23.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df027262368334552db2c0ce39706b3fb32022d1dce34673d0f9422df004b96a"},
|
||||
{file = "pycryptodomex-3.23.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e79f1aaff5a3a374e92eb462fa9e598585452135012e2945f96874ca6eeb1ff"},
|
||||
{file = "pycryptodomex-3.23.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:27e13c80ac9a0a1d050ef0a7e0a18cc04c8850101ec891815b6c5a0375e8a245"},
|
||||
{file = "pycryptodomex-3.23.0.tar.gz", hash = "sha256:71909758f010c82bc99b0abf4ea12012c98962fbf0583c2164f8b84533c2e4da"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pydantic"
|
||||
version = "2.12.5"
|
||||
@@ -5017,24 +4908,6 @@ files = [
|
||||
[package.dependencies]
|
||||
typing-extensions = ">=4.14.1"
|
||||
|
||||
[[package]]
|
||||
name = "pydash"
|
||||
version = "8.0.6"
|
||||
description = "The kitchen sink of Python utility libraries for doing \"stuff\" in a functional way. Based on the Lo-Dash Javascript library."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "pydash-8.0.6-py3-none-any.whl", hash = "sha256:ee70a81a5b292c007f28f03a4ee8e75c1f5d7576df5457b836ec7ab2839cc5d0"},
|
||||
{file = "pydash-8.0.6.tar.gz", hash = "sha256:b2821547e9723f69cf3a986be4db64de41730be149b2641947ecd12e1e11025a"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
typing-extensions = ">3.10,<4.6.0 || >4.6.0"
|
||||
|
||||
[package.extras]
|
||||
dev = ["build", "coverage", "furo", "invoke", "mypy", "pytest", "pytest-cov", "pytest-mypy-testing", "ruff", "sphinx", "sphinx-autodoc-typehints", "tox", "twine", "wheel"]
|
||||
|
||||
[[package]]
|
||||
name = "pyflakes"
|
||||
version = "3.2.0"
|
||||
@@ -5367,85 +5240,65 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "pyyaml"
|
||||
version = "6.0.3"
|
||||
version = "6.0.2"
|
||||
description = "YAML parser and emitter for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main", "dev"]
|
||||
files = [
|
||||
{file = "PyYAML-6.0.3-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:c2514fceb77bc5e7a2f7adfaa1feb2fb311607c9cb518dbc378688ec73d8292f"},
|
||||
{file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c57bb8c96f6d1808c030b1687b9b5fb476abaa47f0db9c0101f5e9f394e97f4"},
|
||||
{file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:efd7b85f94a6f21e4932043973a7ba2613b059c4a000551892ac9f1d11f5baf3"},
|
||||
{file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22ba7cfcad58ef3ecddc7ed1db3409af68d023b7f940da23c6c2a1890976eda6"},
|
||||
{file = "PyYAML-6.0.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6344df0d5755a2c9a276d4473ae6b90647e216ab4757f8426893b5dd2ac3f369"},
|
||||
{file = "PyYAML-6.0.3-cp38-cp38-win32.whl", hash = "sha256:3ff07ec89bae51176c0549bc4c63aa6202991da2d9a6129d7aef7f1407d3f295"},
|
||||
{file = "PyYAML-6.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:5cf4e27da7e3fbed4d6c3d8e797387aaad68102272f8f9752883bc32d61cb87b"},
|
||||
{file = "pyyaml-6.0.3-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:214ed4befebe12df36bcc8bc2b64b396ca31be9304b8f59e25c11cf94a4c033b"},
|
||||
{file = "pyyaml-6.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02ea2dfa234451bbb8772601d7b8e426c2bfa197136796224e50e35a78777956"},
|
||||
{file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b30236e45cf30d2b8e7b3e85881719e98507abed1011bf463a8fa23e9c3e98a8"},
|
||||
{file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:66291b10affd76d76f54fad28e22e51719ef9ba22b29e1d7d03d6777a9174198"},
|
||||
{file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c7708761fccb9397fe64bbc0395abcae8c4bf7b0eac081e12b809bf47700d0b"},
|
||||
{file = "pyyaml-6.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:418cf3f2111bc80e0933b2cd8cd04f286338bb88bdc7bc8e6dd775ebde60b5e0"},
|
||||
{file = "pyyaml-6.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5e0b74767e5f8c593e8c9b5912019159ed0533c70051e9cce3e8b6aa699fcd69"},
|
||||
{file = "pyyaml-6.0.3-cp310-cp310-win32.whl", hash = "sha256:28c8d926f98f432f88adc23edf2e6d4921ac26fb084b028c733d01868d19007e"},
|
||||
{file = "pyyaml-6.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:bdb2c67c6c1390b63c6ff89f210c8fd09d9a1217a465701eac7316313c915e4c"},
|
||||
{file = "pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e"},
|
||||
{file = "pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824"},
|
||||
{file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c"},
|
||||
{file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00"},
|
||||
{file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d"},
|
||||
{file = "pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a"},
|
||||
{file = "pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4"},
|
||||
{file = "pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b"},
|
||||
{file = "pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf"},
|
||||
{file = "pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196"},
|
||||
{file = "pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0"},
|
||||
{file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28"},
|
||||
{file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c"},
|
||||
{file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc"},
|
||||
{file = "pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e"},
|
||||
{file = "pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea"},
|
||||
{file = "pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5"},
|
||||
{file = "pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b"},
|
||||
{file = "pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd"},
|
||||
{file = "pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8"},
|
||||
{file = "pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1"},
|
||||
{file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c"},
|
||||
{file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5"},
|
||||
{file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6"},
|
||||
{file = "pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6"},
|
||||
{file = "pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be"},
|
||||
{file = "pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26"},
|
||||
{file = "pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c"},
|
||||
{file = "pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb"},
|
||||
{file = "pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac"},
|
||||
{file = "pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310"},
|
||||
{file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7"},
|
||||
{file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788"},
|
||||
{file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5"},
|
||||
{file = "pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764"},
|
||||
{file = "pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35"},
|
||||
{file = "pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac"},
|
||||
{file = "pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3"},
|
||||
{file = "pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3"},
|
||||
{file = "pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba"},
|
||||
{file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c"},
|
||||
{file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702"},
|
||||
{file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c"},
|
||||
{file = "pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065"},
|
||||
{file = "pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65"},
|
||||
{file = "pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9"},
|
||||
{file = "pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b"},
|
||||
{file = "pyyaml-6.0.3-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:b865addae83924361678b652338317d1bd7e79b1f4596f96b96c77a5a34b34da"},
|
||||
{file = "pyyaml-6.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c3355370a2c156cffb25e876646f149d5d68f5e0a3ce86a5084dd0b64a994917"},
|
||||
{file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3c5677e12444c15717b902a5798264fa7909e41153cdf9ef7ad571b704a63dd9"},
|
||||
{file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5ed875a24292240029e4483f9d4a4b8a1ae08843b9c54f43fcc11e404532a8a5"},
|
||||
{file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0150219816b6a1fa26fb4699fb7daa9caf09eb1999f3b70fb6e786805e80375a"},
|
||||
{file = "pyyaml-6.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fa160448684b4e94d80416c0fa4aac48967a969efe22931448d853ada8baf926"},
|
||||
{file = "pyyaml-6.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:27c0abcb4a5dac13684a37f76e701e054692a9b2d3064b70f5e4eb54810553d7"},
|
||||
{file = "pyyaml-6.0.3-cp39-cp39-win32.whl", hash = "sha256:1ebe39cb5fc479422b83de611d14e2c0d3bb2a18bbcb01f229ab3cfbd8fee7a0"},
|
||||
{file = "pyyaml-6.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:2e71d11abed7344e42a8849600193d15b6def118602c4c176f748e4583246007"},
|
||||
{file = "pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f"},
|
||||
{file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"},
|
||||
{file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"},
|
||||
{file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"},
|
||||
{file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"},
|
||||
{file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"},
|
||||
{file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"},
|
||||
{file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"},
|
||||
{file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"},
|
||||
{file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"},
|
||||
{file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"},
|
||||
{file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"},
|
||||
{file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"},
|
||||
{file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"},
|
||||
{file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"},
|
||||
{file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"},
|
||||
{file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"},
|
||||
{file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"},
|
||||
{file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"},
|
||||
{file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"},
|
||||
{file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"},
|
||||
{file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"},
|
||||
{file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"},
|
||||
{file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"},
|
||||
{file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"},
|
||||
{file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"},
|
||||
{file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"},
|
||||
{file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"},
|
||||
{file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"},
|
||||
{file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"},
|
||||
{file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"},
|
||||
{file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"},
|
||||
{file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"},
|
||||
{file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"},
|
||||
{file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"},
|
||||
{file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"},
|
||||
{file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"},
|
||||
{file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"},
|
||||
{file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"},
|
||||
{file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"},
|
||||
{file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"},
|
||||
{file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"},
|
||||
{file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"},
|
||||
{file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"},
|
||||
{file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"},
|
||||
{file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"},
|
||||
{file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"},
|
||||
{file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"},
|
||||
{file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"},
|
||||
{file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"},
|
||||
{file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"},
|
||||
{file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"},
|
||||
{file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"},
|
||||
{file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -6594,19 +6447,16 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "xmltodict"
|
||||
version = "1.0.4"
|
||||
version = "0.14.2"
|
||||
description = "Makes working with XML feel like you are working with JSON"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main", "dev"]
|
||||
python-versions = ">=3.6"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "xmltodict-1.0.4-py3-none-any.whl", hash = "sha256:a4a00d300b0e1c59fc2bfccb53d7b2e88c32f200df138a0dd2229f842497026a"},
|
||||
{file = "xmltodict-1.0.4.tar.gz", hash = "sha256:6d94c9f834dd9e44514162799d344d815a3a4faec913717a9ecbfa5be1bb8e61"},
|
||||
{file = "xmltodict-0.14.2-py2.py3-none-any.whl", hash = "sha256:20cc7d723ed729276e808f26fb6b3599f786cbc37e06c65e192ba77c40f20aac"},
|
||||
{file = "xmltodict-0.14.2.tar.gz", hash = "sha256:201e7c28bb210e374999d1dde6382923ab0ed1a8a5faeece48ab525b7810a553"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
test = ["pytest", "pytest-cov"]
|
||||
|
||||
[[package]]
|
||||
name = "yarl"
|
||||
version = "1.20.1"
|
||||
@@ -6885,4 +6735,4 @@ files = [
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">=3.10,<3.13"
|
||||
content-hash = "96359a9bfe4031fb0747c22eb4b00f2a008e3fb6d07189fa0fe6ee3875b1f913"
|
||||
content-hash = "d7e2ad41783a864bb845f63ccc10c88ae1e4ac36d61993ea106bbb4a5f58a843"
|
||||
|
||||
@@ -14,19 +14,15 @@ When performing these actions, ALWAYS invoke the corresponding skill FIRST:
|
||||
| Action | Skill |
|
||||
|--------|-------|
|
||||
| Add changelog entry for a PR or feature | `prowler-changelog` |
|
||||
| Adding a compliance output formatter (per-provider class + table dispatcher) | `prowler-compliance` |
|
||||
| Adding new providers | `prowler-provider` |
|
||||
| Adding services to existing providers | `prowler-provider` |
|
||||
| Auditing check-to-requirement mappings as a cloud auditor | `prowler-compliance` |
|
||||
| Create PR that requires changelog entry | `prowler-changelog` |
|
||||
| Creating new checks | `prowler-sdk-check` |
|
||||
| Creating/updating compliance frameworks | `prowler-compliance` |
|
||||
| Fixing compliance JSON bugs (duplicate IDs, empty Section, stale refs) | `prowler-compliance` |
|
||||
| Mapping checks to compliance controls | `prowler-compliance` |
|
||||
| Mocking AWS with moto in tests | `prowler-test-sdk` |
|
||||
| Review changelog format and conventions | `prowler-changelog` |
|
||||
| Reviewing compliance framework PRs | `prowler-compliance-review` |
|
||||
| Syncing compliance framework with upstream catalog | `prowler-compliance` |
|
||||
| Update CHANGELOG.md in any component | `prowler-changelog` |
|
||||
| Updating existing checks and metadata | `prowler-sdk-check` |
|
||||
| Writing Prowler SDK tests | `prowler-test-sdk` |
|
||||
|
||||
@@ -9,7 +9,6 @@ All notable changes to the **Prowler SDK** are documented in this file.
|
||||
- `entra_service_principal_no_secrets_for_permanent_tier0_roles` check for M365 provider [(#10788)](https://github.com/prowler-cloud/prowler/pull/10788)
|
||||
- `iam_user_access_not_stale_to_sagemaker` check for AWS provider with configurable `max_unused_sagemaker_access_days` (default 90) [(#11000)](https://github.com/prowler-cloud/prowler/pull/11000)
|
||||
- `cloudtrail_bedrock_logging_enabled` check for AWS provider [(#10858)](https://github.com/prowler-cloud/prowler/pull/10858)
|
||||
- Okta provider with OAuth 2.0 authentication and `signon_global_session_idle_timeout_15min` check [(#11079)](https://github.com/prowler-cloud/prowler/pull/11079)
|
||||
|
||||
### 🔄 Changed
|
||||
|
||||
@@ -17,14 +16,6 @@ All notable changes to the **Prowler SDK** are documented in this file.
|
||||
|
||||
---
|
||||
|
||||
## [5.26.2] (Prowler UNRELEASED)
|
||||
|
||||
### 🐞 Fixed
|
||||
|
||||
- `entra_users_mfa_capable` and `entra_break_glass_account_fido2_security_key_registered` report a preventive FAIL per affected user (with the missing permission named) when the M365 service principal lacks `AuditLog.Read.All`, instead of mass false positives [(#10907)](https://github.com/prowler-cloud/prowler/pull/10907)
|
||||
|
||||
---
|
||||
|
||||
## [5.26.1] (Prowler v5.26.1)
|
||||
|
||||
### 🐞 Fixed
|
||||
|
||||
@@ -154,7 +154,6 @@ from prowler.providers.llm.models import LLMOutputOptions
|
||||
from prowler.providers.m365.models import M365OutputOptions
|
||||
from prowler.providers.mongodbatlas.models import MongoDBAtlasOutputOptions
|
||||
from prowler.providers.nhn.models import NHNOutputOptions
|
||||
from prowler.providers.okta.models import OktaOutputOptions
|
||||
from prowler.providers.openstack.models import OpenStackOutputOptions
|
||||
from prowler.providers.oraclecloud.models import OCIOutputOptions
|
||||
from prowler.providers.vercel.models import VercelOutputOptions
|
||||
@@ -427,10 +426,6 @@ def prowler():
|
||||
output_options = VercelOutputOptions(
|
||||
args, bulk_checks_metadata, global_provider.identity
|
||||
)
|
||||
elif provider == "okta":
|
||||
output_options = OktaOutputOptions(
|
||||
args, bulk_checks_metadata, global_provider.identity
|
||||
)
|
||||
|
||||
# Run the quick inventory for the provider if available
|
||||
if hasattr(args, "quick_inventory") and args.quick_inventory:
|
||||
|
||||
@@ -76,7 +76,6 @@ class Provider(str, Enum):
|
||||
OPENSTACK = "openstack"
|
||||
IMAGE = "image"
|
||||
VERCEL = "vercel"
|
||||
OKTA = "okta"
|
||||
|
||||
|
||||
# Compliance
|
||||
|
||||
@@ -649,11 +649,3 @@ vercel:
|
||||
- "_PASSWORD"
|
||||
- "_API_KEY"
|
||||
- "_PRIVATE_KEY"
|
||||
|
||||
okta:
|
||||
# Okta Sign-On Policies
|
||||
# okta.signon_global_session_idle_timeout_15min
|
||||
# Maximum acceptable Global Session idle timeout, in minutes. Defaults to
|
||||
# 15 per DISA STIG V-273186 (OKTA-APP-000020); raise it only with an
|
||||
# explicit risk acceptance.
|
||||
okta_max_session_idle_minutes: 15
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
### Account, Check and/or Region can be * to apply for all the cases.
|
||||
### Account == <Okta organization domain, e.g. acme.okta.com>
|
||||
### Bare domain only — no scheme, no path, no trailing slash.
|
||||
### Region is always "*" — Okta has no regional concept.
|
||||
### Resources matches against the policy name (e.g. "Default Policy"), not the id.
|
||||
### Resources and tags are lists that can have either Regex or Keywords.
|
||||
### Tags is an optional list that matches on tuples of 'key=value' and are "ANDed" together.
|
||||
### Use an alternation Regex to match one of multiple tags with "ORed" logic.
|
||||
### For each check you can except Accounts, Regions, Resources and/or Tags.
|
||||
########################### MUTELIST EXAMPLE ###########################
|
||||
Mutelist:
|
||||
Accounts:
|
||||
"acme.okta.com":
|
||||
Checks:
|
||||
"signon_global_session_idle_timeout_15min":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "Default Policy"
|
||||
@@ -745,10 +745,6 @@ def execute(
|
||||
is_finding_muted_args["tenancy_id"] = (
|
||||
global_provider.identity.tenancy_id
|
||||
)
|
||||
elif global_provider.type == "okta":
|
||||
is_finding_muted_args["org_domain"] = (
|
||||
global_provider.identity.org_domain
|
||||
)
|
||||
for finding in check_findings:
|
||||
if global_provider.type == "cloudflare":
|
||||
is_finding_muted_args["account_id"] = finding.account_id
|
||||
|
||||
@@ -933,41 +933,6 @@ class CheckReportGithub(Check_Report):
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class CheckReportOkta(Check_Report):
|
||||
"""Contains the Okta Check's finding information."""
|
||||
|
||||
resource_name: str
|
||||
resource_id: str
|
||||
org_domain: str
|
||||
region: str
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
metadata: Dict,
|
||||
resource: Any,
|
||||
resource_name: str = None,
|
||||
resource_id: str = None,
|
||||
org_domain: str = None,
|
||||
region: str = "global",
|
||||
) -> None:
|
||||
"""Initialize the Okta Check's finding information.
|
||||
|
||||
Args:
|
||||
metadata: The metadata of the check.
|
||||
resource: Basic information about the resource.
|
||||
resource_name: The name of the resource related with the finding.
|
||||
resource_id: The id of the resource related with the finding.
|
||||
org_domain: The Okta organization domain related with the finding.
|
||||
region: Always "global" — Okta has no regional concept.
|
||||
"""
|
||||
super().__init__(metadata, resource)
|
||||
self.resource_name = resource_name or getattr(resource, "name", "")
|
||||
self.resource_id = resource_id or getattr(resource, "id", "")
|
||||
self.org_domain = org_domain or getattr(resource, "org_domain", "")
|
||||
self.region = region
|
||||
|
||||
|
||||
@dataclass
|
||||
class CheckReportGoogleWorkspace(Check_Report):
|
||||
"""Contains the Google Workspace Check's finding information."""
|
||||
|
||||
@@ -29,10 +29,10 @@ class ProwlerArgumentParser:
|
||||
self.parser = argparse.ArgumentParser(
|
||||
prog="prowler",
|
||||
formatter_class=RawTextHelpFormatter,
|
||||
usage="prowler [-h] [--version] {aws,azure,gcp,kubernetes,m365,github,googleworkspace,okta,nhn,mongodbatlas,oraclecloud,alibabacloud,cloudflare,openstack,vercel,dashboard,iac,image,llm} ...",
|
||||
usage="prowler [-h] [--version] {aws,azure,gcp,kubernetes,m365,github,googleworkspace,nhn,mongodbatlas,oraclecloud,alibabacloud,cloudflare,openstack,vercel,dashboard,iac,image,llm} ...",
|
||||
epilog="""
|
||||
Available Cloud Providers:
|
||||
{aws,azure,gcp,kubernetes,m365,github,googleworkspace,okta,iac,llm,image,nhn,mongodbatlas,oraclecloud,alibabacloud,cloudflare,openstack,vercel}
|
||||
{aws,azure,gcp,kubernetes,m365,github,googleworkspace,iac,llm,image,nhn,mongodbatlas,oraclecloud,alibabacloud,cloudflare,openstack,vercel}
|
||||
aws AWS Provider
|
||||
azure Azure Provider
|
||||
gcp GCP Provider
|
||||
@@ -40,7 +40,6 @@ Available Cloud Providers:
|
||||
m365 Microsoft 365 Provider
|
||||
github GitHub Provider
|
||||
googleworkspace Google Workspace Provider
|
||||
okta Okta Provider
|
||||
cloudflare Cloudflare Provider
|
||||
oraclecloud Oracle Cloud Infrastructure Provider
|
||||
openstack OpenStack Provider
|
||||
|
||||
@@ -427,21 +427,6 @@ class Finding(BaseModel):
|
||||
output_data["resource_uid"] = check_output.resource_id
|
||||
output_data["region"] = "global"
|
||||
|
||||
elif provider.type == "okta":
|
||||
output_data["auth_method"] = provider.auth_method
|
||||
output_data["account_uid"] = get_nested_attribute(
|
||||
provider, "identity.org_domain"
|
||||
)
|
||||
output_data["account_name"] = get_nested_attribute(
|
||||
provider, "identity.org_domain"
|
||||
)
|
||||
output_data["account_organization_uid"] = get_nested_attribute(
|
||||
provider, "identity.client_id"
|
||||
)
|
||||
output_data["resource_name"] = check_output.resource_name
|
||||
output_data["resource_uid"] = check_output.resource_id
|
||||
output_data["region"] = "global"
|
||||
|
||||
elif provider.type == "alibabacloud":
|
||||
output_data["auth_method"] = get_nested_attribute(
|
||||
provider, "identity.identity_arn"
|
||||
|
||||
@@ -1400,56 +1400,6 @@ class HTML(Output):
|
||||
)
|
||||
return ""
|
||||
|
||||
@staticmethod
|
||||
def get_okta_assessment_summary(provider: Provider) -> str:
|
||||
"""
|
||||
get_okta_assessment_summary gets the HTML assessment summary for the Okta provider
|
||||
|
||||
Args:
|
||||
provider (Provider): the Okta provider object
|
||||
|
||||
Returns:
|
||||
str: HTML assessment summary for the Okta provider
|
||||
"""
|
||||
try:
|
||||
assessment_items = f"""
|
||||
<li class="list-group-item">
|
||||
<b>Okta Domain:</b> {provider.identity.org_domain}
|
||||
</li>"""
|
||||
|
||||
credentials_items = f"""
|
||||
<li class="list-group-item">
|
||||
<b>Authentication:</b> {provider.auth_method}
|
||||
</li>
|
||||
<li class="list-group-item">
|
||||
<b>Client ID:</b> {provider.identity.client_id}
|
||||
</li>"""
|
||||
|
||||
return f"""
|
||||
<div class="col-md-2">
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
Okta Assessment Summary
|
||||
</div>
|
||||
<ul class="list-group list-group-flush">{assessment_items}
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-4">
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
Okta Credentials
|
||||
</div>
|
||||
<ul class="list-group list-group-flush">{credentials_items}
|
||||
</ul>
|
||||
</div>
|
||||
</div>"""
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
return ""
|
||||
|
||||
@staticmethod
|
||||
def get_assessment_summary(provider: Provider) -> str:
|
||||
"""
|
||||
|
||||
@@ -40,8 +40,6 @@ def stdout_report(finding, color, verbose, status, fix):
|
||||
details = finding.location
|
||||
if finding.check_metadata.Provider == "vercel":
|
||||
details = finding.region
|
||||
if finding.check_metadata.Provider == "okta":
|
||||
details = finding.region
|
||||
|
||||
if (verbose or fix) and (not status or finding.status in status):
|
||||
if finding.muted:
|
||||
|
||||
@@ -108,9 +108,6 @@ def display_summary_table(
|
||||
)
|
||||
else:
|
||||
audited_entities = provider.identity.username or "Personal Account"
|
||||
elif provider.type == "okta":
|
||||
entity_type = "Okta Org"
|
||||
audited_entities = provider.identity.org_domain
|
||||
|
||||
# Check if there are findings and that they are not all MANUAL
|
||||
if findings and not all(finding.status == "MANUAL" for finding in findings):
|
||||
|
||||
@@ -403,19 +403,6 @@ class Provider(ABC):
|
||||
mutelist_path=arguments.mutelist_file,
|
||||
fixer_config=fixer_config,
|
||||
)
|
||||
elif "okta" in provider_class_name.lower():
|
||||
provider_class(
|
||||
okta_org_domain=getattr(arguments, "okta_org_domain", ""),
|
||||
okta_client_id=getattr(arguments, "okta_client_id", ""),
|
||||
okta_private_key=getattr(arguments, "okta_private_key", ""),
|
||||
okta_private_key_file=getattr(
|
||||
arguments, "okta_private_key_file", ""
|
||||
),
|
||||
okta_scopes=getattr(arguments, "okta_scopes", None),
|
||||
config_path=arguments.config_file,
|
||||
mutelist_path=arguments.mutelist_file,
|
||||
fixer_config=fixer_config,
|
||||
)
|
||||
|
||||
except TypeError as error:
|
||||
logger.critical(
|
||||
|
||||
@@ -85,15 +85,6 @@ class entra_break_glass_account_fido2_security_key_registered(Check):
|
||||
resource_id=user.id,
|
||||
)
|
||||
|
||||
if entra_client.user_registration_details_error:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Cannot verify FIDO2 security key registration for break glass account {user.name}: "
|
||||
f"{entra_client.user_registration_details_error}."
|
||||
)
|
||||
findings.append(report)
|
||||
continue
|
||||
|
||||
auth_methods = set(user.authentication_methods)
|
||||
has_fido2 = "fido2SecurityKey" in auth_methods
|
||||
has_passkey_device_bound = "passKeyDeviceBound" in auth_methods
|
||||
|
||||
@@ -3,7 +3,7 @@ import json
|
||||
from asyncio import gather
|
||||
from datetime import datetime, timezone
|
||||
from enum import Enum
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
from typing import Dict, List, Optional
|
||||
from uuid import UUID
|
||||
|
||||
from kiota_abstractions.base_request_configuration import RequestConfiguration
|
||||
@@ -76,7 +76,6 @@ class Entra(M365Service):
|
||||
|
||||
self.tenant_domain = provider.identity.tenant_domain
|
||||
self.tenant_id = getattr(provider.identity, "tenant_id", None)
|
||||
self.user_registration_details_error: Optional[str] = None
|
||||
attributes = loop.run_until_complete(
|
||||
gather(
|
||||
self._get_authorization_policy(),
|
||||
@@ -855,9 +854,7 @@ class Entra(M365Service):
|
||||
for member in members:
|
||||
user_roles_map.setdefault(member.id, []).append(role_template_id)
|
||||
|
||||
registration_details, self.user_registration_details_error = (
|
||||
await self._get_user_registration_details()
|
||||
)
|
||||
registration_details = await self._get_user_registration_details()
|
||||
|
||||
while users_response:
|
||||
for user in getattr(users_response, "value", []) or []:
|
||||
@@ -900,24 +897,18 @@ class Entra(M365Service):
|
||||
)
|
||||
return users
|
||||
|
||||
async def _get_user_registration_details(
|
||||
self,
|
||||
) -> Tuple[Dict[str, Dict[str, Any]], Optional[str]]:
|
||||
async def _get_user_registration_details(self):
|
||||
"""Retrieve user authentication method registration details.
|
||||
|
||||
Fetches registration details from the Microsoft Graph API, including
|
||||
MFA capability and the specific authentication methods each user has registered.
|
||||
|
||||
Returns:
|
||||
A tuple containing:
|
||||
- A dictionary mapping user IDs to their registration details,
|
||||
where each value is a dict with 'is_mfa_capable' (bool) and
|
||||
'authentication_methods' (list of str), or an empty dict if
|
||||
retrieval fails.
|
||||
- An error message string if there was an access error, None otherwise.
|
||||
dict: A dictionary mapping user IDs to their registration details,
|
||||
where each value is a dict with 'is_mfa_capable' (bool) and
|
||||
'authentication_methods' (list of str).
|
||||
"""
|
||||
registration_details = {}
|
||||
error_message = None
|
||||
try:
|
||||
registration_builder = (
|
||||
self.client.reports.authentication_methods.user_registration_details
|
||||
@@ -942,25 +933,16 @@ class Entra(M365Service):
|
||||
next_link
|
||||
).get()
|
||||
|
||||
except ODataError as error:
|
||||
error_code = getattr(error.error, "code", None) if error.error else None
|
||||
if error_code == "Authorization_RequestDenied":
|
||||
error_message = "Insufficient privileges to read user registration details. Required permission: AuditLog.Read.All"
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error_message}"
|
||||
)
|
||||
else:
|
||||
except Exception as error:
|
||||
if (
|
||||
error.__class__.__name__ == "ODataError"
|
||||
and error.__dict__.get("response_status_code", None) == 403
|
||||
):
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
error_message = str(error)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
error_message = f"Failed to retrieve user registration details: {error}"
|
||||
|
||||
return registration_details, error_message
|
||||
return registration_details
|
||||
|
||||
async def _get_oauth_apps(self) -> Optional[Dict[str, "OAuthApp"]]:
|
||||
"""
|
||||
|
||||
@@ -13,10 +13,6 @@ class entra_users_mfa_capable(Check):
|
||||
("Ensure all member users are 'MFA capable'").
|
||||
|
||||
Guest users and disabled accounts are excluded from the evaluation.
|
||||
|
||||
- PASS: The member user is MFA capable.
|
||||
- FAIL: The member user is not MFA capable, or MFA capability cannot be
|
||||
verified due to insufficient permissions to read user registration details.
|
||||
"""
|
||||
|
||||
def execute(self) -> List[CheckReportM365]:
|
||||
@@ -46,13 +42,7 @@ class entra_users_mfa_capable(Check):
|
||||
resource_id=user.id,
|
||||
)
|
||||
|
||||
if entra_client.user_registration_details_error:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Cannot verify MFA capability for user {user.name}: "
|
||||
f"{entra_client.user_registration_details_error}."
|
||||
)
|
||||
elif not user.is_mfa_capable:
|
||||
if not user.is_mfa_capable:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"User {user.name} is not MFA capable."
|
||||
else:
|
||||
|
||||
@@ -1,112 +0,0 @@
|
||||
from prowler.exceptions.exceptions import ProwlerException
|
||||
|
||||
|
||||
# Exceptions codes from 14000 to 14999 are reserved for Okta exceptions
|
||||
class OktaBaseException(ProwlerException):
|
||||
"""Base class for Okta Errors."""
|
||||
|
||||
OKTA_ERROR_CODES = {
|
||||
(14000, "OktaEnvironmentVariableError"): {
|
||||
"message": "Okta environment variable error",
|
||||
"remediation": "Check the Okta environment variables and ensure they are properly set.",
|
||||
},
|
||||
(14001, "OktaSetUpSessionError"): {
|
||||
"message": "Error setting up Okta session",
|
||||
"remediation": "Check the OAuth credentials (org URL, client ID, private key, scopes) and ensure they are properly configured.",
|
||||
},
|
||||
(14002, "OktaSetUpIdentityError"): {
|
||||
"message": "Okta identity setup error due to bad credentials",
|
||||
"remediation": "Check the OAuth credentials and confirm the service app has been granted the required read scopes.",
|
||||
},
|
||||
(14003, "OktaInvalidCredentialsError"): {
|
||||
"message": "Okta credentials are not valid",
|
||||
"remediation": "Check the client ID and private key for the Okta service app.",
|
||||
},
|
||||
(14004, "OktaInvalidOrgDomainError"): {
|
||||
"message": "Okta organization domain is not valid",
|
||||
"remediation": "Provide an Okta-managed domain such as <org>.okta.com (or .oktapreview.com / .okta-emea.com / .okta-gov.com / .okta.mil / .okta-miltest.com / .trex-govcloud.com), with no scheme and no trailing slash.",
|
||||
},
|
||||
(14005, "OktaPrivateKeyFileError"): {
|
||||
"message": "Okta private key file could not be read",
|
||||
"remediation": "Check the file path and permissions, and ensure the file contains a PEM-encoded RSA key or a JWK JSON document.",
|
||||
},
|
||||
(14006, "OktaInsufficientPermissionsError"): {
|
||||
"message": "Okta service app is missing required scopes",
|
||||
"remediation": "Have a Super Admin grant the required *.read scopes to the service app and assign the Read-Only Administrator role.",
|
||||
},
|
||||
}
|
||||
|
||||
def __init__(self, code, file=None, original_exception=None, message=None):
|
||||
provider = "Okta"
|
||||
error_info = self.OKTA_ERROR_CODES.get((code, self.__class__.__name__))
|
||||
if error_info is None:
|
||||
error_info = {
|
||||
"message": message or "Unknown Okta error.",
|
||||
"remediation": "Check the Okta API documentation for more details.",
|
||||
}
|
||||
elif message:
|
||||
error_info = error_info.copy()
|
||||
error_info["message"] = message
|
||||
super().__init__(
|
||||
code=code,
|
||||
source=provider,
|
||||
file=file,
|
||||
original_exception=original_exception,
|
||||
error_info=error_info,
|
||||
)
|
||||
|
||||
|
||||
class OktaCredentialsError(OktaBaseException):
|
||||
"""Base class for Okta credentials errors."""
|
||||
|
||||
def __init__(self, code, file=None, original_exception=None, message=None):
|
||||
super().__init__(code, file, original_exception, message)
|
||||
|
||||
|
||||
class OktaEnvironmentVariableError(OktaCredentialsError):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
14000, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class OktaSetUpSessionError(OktaCredentialsError):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
14001, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class OktaSetUpIdentityError(OktaCredentialsError):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
14002, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class OktaInvalidCredentialsError(OktaCredentialsError):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
14003, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class OktaInvalidOrgDomainError(OktaCredentialsError):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
14004, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class OktaPrivateKeyFileError(OktaCredentialsError):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
14005, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class OktaInsufficientPermissionsError(OktaCredentialsError):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
14006, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
@@ -1,43 +0,0 @@
|
||||
def init_parser(self):
|
||||
"""Init the Okta Provider CLI parser.
|
||||
|
||||
The Okta provider authenticates with OAuth 2.0 (private-key JWT). The
|
||||
private key is intentionally not exposed as a CLI flag — secrets must
|
||||
be supplied via the `OKTA_PRIVATE_KEY` or `OKTA_PRIVATE_KEY_FILE`
|
||||
environment variable. Non-secret values (org URL, client ID, scopes)
|
||||
are flag-configurable.
|
||||
"""
|
||||
okta_parser = self.subparsers.add_parser(
|
||||
"okta", parents=[self.common_providers_parser], help="Okta Provider"
|
||||
)
|
||||
okta_auth_subparser = okta_parser.add_argument_group("Authentication")
|
||||
okta_auth_subparser.add_argument(
|
||||
"--okta-org-domain",
|
||||
nargs="?",
|
||||
help=(
|
||||
"Okta organization domain (e.g. acme.okta.com). Must be an "
|
||||
"Okta-managed domain (.okta.com / .oktapreview.com / "
|
||||
".okta-emea.com / .okta-gov.com / .okta.mil / "
|
||||
".okta-miltest.com / .trex-govcloud.com), without scheme or path."
|
||||
),
|
||||
default=None,
|
||||
metavar="OKTA_ORG_DOMAIN",
|
||||
)
|
||||
okta_auth_subparser.add_argument(
|
||||
"--okta-client-id",
|
||||
nargs="?",
|
||||
help="Okta service app Client ID for OAuth 2.0 (private-key JWT)",
|
||||
default=None,
|
||||
metavar="OKTA_CLIENT_ID",
|
||||
)
|
||||
okta_auth_subparser.add_argument(
|
||||
"--okta-scopes",
|
||||
nargs="+",
|
||||
help=(
|
||||
"OAuth scopes to request, space-separated "
|
||||
"(e.g. okta.policies.read okta.users.read). Defaults to the "
|
||||
"read scopes required by the bundled checks."
|
||||
),
|
||||
default=None,
|
||||
metavar="OKTA_SCOPES",
|
||||
)
|
||||
@@ -1,14 +0,0 @@
|
||||
from prowler.lib.check.models import CheckReportOkta
|
||||
from prowler.lib.mutelist.mutelist import Mutelist
|
||||
from prowler.lib.outputs.utils import unroll_dict, unroll_tags
|
||||
|
||||
|
||||
class OktaMutelist(Mutelist):
|
||||
def is_finding_muted(self, finding: CheckReportOkta, org_domain: str) -> bool:
|
||||
return self.is_muted(
|
||||
org_domain,
|
||||
finding.check_metadata.CheckID,
|
||||
"*",
|
||||
finding.resource_name,
|
||||
unroll_dict(unroll_tags(finding.resource_tags)),
|
||||
)
|
||||
@@ -1,34 +0,0 @@
|
||||
import asyncio
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from okta.client import Client as OktaSDKClient
|
||||
|
||||
from prowler.providers.okta.models import OktaSession
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from prowler.providers.okta.okta_provider import OktaProvider
|
||||
|
||||
|
||||
class OktaService:
|
||||
"""Base class for Okta service implementations.
|
||||
|
||||
Wraps the async okta-sdk-python `Client` so that subclasses can stay
|
||||
synchronous like the other Prowler providers. The SDK auto-refreshes
|
||||
the OAuth access token; nothing to manage here.
|
||||
"""
|
||||
|
||||
def __init__(self, service: str, provider: "OktaProvider"):
|
||||
self.provider = provider
|
||||
self.service = service
|
||||
self.client = self.__set_client__(provider.session)
|
||||
self.audit_config = provider.audit_config
|
||||
self.fixer_config = provider.fixer_config
|
||||
|
||||
@staticmethod
|
||||
def __set_client__(session: OktaSession) -> OktaSDKClient:
|
||||
return OktaSDKClient(session.to_sdk_config())
|
||||
|
||||
@staticmethod
|
||||
def _run(coro):
|
||||
"""Run an okta-sdk-python coroutine from synchronous code."""
|
||||
return asyncio.run(coro)
|
||||
@@ -1,48 +0,0 @@
|
||||
from pydantic import BaseModel
|
||||
|
||||
from prowler.config.config import output_file_timestamp
|
||||
from prowler.providers.common.models import ProviderOutputOptions
|
||||
|
||||
|
||||
class OktaSession(BaseModel):
|
||||
org_domain: str
|
||||
client_id: str
|
||||
scopes: list[str]
|
||||
private_key: str
|
||||
|
||||
def to_sdk_config(self) -> dict:
|
||||
# Shared by the credential probe (OktaProvider.setup_identity) and
|
||||
# the service-level client (OktaService.__set_client__). Keeping the
|
||||
# builder in one place stops the two SDK config dicts from drifting.
|
||||
# The Okta SDK expects a fully-qualified `orgUrl`; we build it from
|
||||
# the validated domain so user input stays scheme-free.
|
||||
# DPoP proofs are sent on every token request — required by tenants
|
||||
# with "Demonstrating Proof of Possession" enabled on the service
|
||||
# app (or org-wide), harmless on tenants that don't.
|
||||
return {
|
||||
"orgUrl": f"https://{self.org_domain}",
|
||||
"authorizationMode": "PrivateKey",
|
||||
"clientId": self.client_id,
|
||||
"scopes": self.scopes,
|
||||
"privateKey": self.private_key,
|
||||
"dpopEnabled": True,
|
||||
}
|
||||
|
||||
|
||||
class OktaIdentityInfo(BaseModel):
|
||||
org_domain: str
|
||||
client_id: str
|
||||
|
||||
|
||||
class OktaOutputOptions(ProviderOutputOptions):
|
||||
def __init__(self, arguments, bulk_checks_metadata, identity):
|
||||
super().__init__(arguments, bulk_checks_metadata)
|
||||
if (
|
||||
not hasattr(arguments, "output_filename")
|
||||
or arguments.output_filename is None
|
||||
):
|
||||
self.output_filename = (
|
||||
f"prowler-output-{identity.org_domain}-{output_file_timestamp}"
|
||||
)
|
||||
else:
|
||||
self.output_filename = arguments.output_filename
|
||||
@@ -1,375 +0,0 @@
|
||||
import asyncio
|
||||
import os
|
||||
import re
|
||||
from os import environ
|
||||
from typing import Optional, Union
|
||||
|
||||
from colorama import Fore, Style
|
||||
from okta.client import Client as OktaSDKClient
|
||||
|
||||
from prowler.config.config import (
|
||||
default_config_file_path,
|
||||
get_default_mute_file_path,
|
||||
load_and_validate_config_file,
|
||||
)
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.mutelist.mutelist import Mutelist
|
||||
from prowler.lib.utils.utils import print_boxes
|
||||
from prowler.providers.common.models import Audit_Metadata, Connection
|
||||
from prowler.providers.common.provider import Provider
|
||||
from prowler.providers.okta.exceptions.exceptions import (
|
||||
OktaEnvironmentVariableError,
|
||||
OktaInsufficientPermissionsError,
|
||||
OktaInvalidCredentialsError,
|
||||
OktaInvalidOrgDomainError,
|
||||
OktaPrivateKeyFileError,
|
||||
OktaSetUpIdentityError,
|
||||
OktaSetUpSessionError,
|
||||
)
|
||||
from prowler.providers.okta.lib.mutelist.mutelist import OktaMutelist
|
||||
from prowler.providers.okta.models import OktaIdentityInfo, OktaSession
|
||||
|
||||
DEFAULT_SCOPES = ["okta.policies.read"]
|
||||
# Accept only Okta-managed domains. Custom (vanity) domains are rejected on
|
||||
# purpose — they're a recurring source of typos and silent misconfig and
|
||||
# Prowler's audience overwhelmingly uses Okta-managed hosts. The TLDs below
|
||||
# match the set the Okta SDK whitelists in `okta.config.config_validator`,
|
||||
# which includes the commercial, preview, EMEA and US gov/mil environments.
|
||||
# If a customer with a custom domain shows up, lift this guard behind an
|
||||
# explicit opt-in.
|
||||
ORG_DOMAIN_RE = re.compile(
|
||||
r"^[a-z0-9][a-z0-9-]*\.("
|
||||
r"okta\.com|oktapreview\.com|okta-emea\.com|"
|
||||
r"okta-gov\.com|okta\.mil|okta-miltest\.com|trex-govcloud\.com"
|
||||
r")$"
|
||||
)
|
||||
|
||||
|
||||
class OktaProvider(Provider):
|
||||
"""Okta Provider class.
|
||||
|
||||
Authenticates against an Okta organization using OAuth 2.0 with a
|
||||
private-key JWT (Client Credentials grant). The SDK requests and
|
||||
refreshes the access token internally.
|
||||
|
||||
Attributes:
|
||||
_type (str): The type of the provider.
|
||||
_auth_method (str): The authentication method used by the provider.
|
||||
_session (OktaSession): The session object for the provider.
|
||||
_identity (OktaIdentityInfo): The identity information for the provider.
|
||||
_audit_config (dict): The audit configuration for the provider.
|
||||
_fixer_config (dict): The fixer configuration for the provider.
|
||||
_mutelist (Mutelist): The mutelist for the provider.
|
||||
audit_metadata (Audit_Metadata): The audit metadata for the provider.
|
||||
"""
|
||||
|
||||
_type: str = "okta"
|
||||
_auth_method: str = None
|
||||
_session: OktaSession
|
||||
_identity: OktaIdentityInfo
|
||||
_audit_config: dict
|
||||
_fixer_config: dict
|
||||
_mutelist: Mutelist
|
||||
audit_metadata: Audit_Metadata
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
okta_org_domain: str = "",
|
||||
okta_client_id: str = "",
|
||||
okta_private_key: str = "",
|
||||
okta_private_key_file: str = "",
|
||||
okta_scopes: Optional[Union[str, list[str]]] = None,
|
||||
config_path: str = None,
|
||||
config_content: dict = None,
|
||||
fixer_config: dict = {},
|
||||
mutelist_path: str = None,
|
||||
mutelist_content: dict = None,
|
||||
):
|
||||
"""Okta Provider constructor."""
|
||||
logger.info("Instantiating Okta Provider...")
|
||||
|
||||
OktaProvider.validate_arguments(
|
||||
okta_org_domain=okta_org_domain,
|
||||
okta_client_id=okta_client_id,
|
||||
okta_private_key=okta_private_key,
|
||||
okta_private_key_file=okta_private_key_file,
|
||||
)
|
||||
self._session = OktaProvider.setup_session(
|
||||
org_domain=okta_org_domain,
|
||||
client_id=okta_client_id,
|
||||
private_key=okta_private_key,
|
||||
private_key_file=okta_private_key_file,
|
||||
scopes=okta_scopes,
|
||||
)
|
||||
self._identity = OktaProvider.setup_identity(self._session)
|
||||
self._auth_method = "OAuth 2.0 (private-key JWT)"
|
||||
|
||||
if config_content:
|
||||
self._audit_config = config_content
|
||||
else:
|
||||
if not config_path:
|
||||
config_path = default_config_file_path
|
||||
self._audit_config = load_and_validate_config_file(self._type, config_path)
|
||||
self._fixer_config = fixer_config
|
||||
|
||||
if mutelist_content:
|
||||
self._mutelist = OktaMutelist(mutelist_content=mutelist_content)
|
||||
else:
|
||||
if not mutelist_path:
|
||||
mutelist_path = get_default_mute_file_path(self.type)
|
||||
self._mutelist = OktaMutelist(mutelist_path=mutelist_path)
|
||||
|
||||
Provider.set_global_provider(self)
|
||||
|
||||
@property
|
||||
def auth_method(self):
|
||||
return self._auth_method
|
||||
|
||||
@property
|
||||
def session(self):
|
||||
return self._session
|
||||
|
||||
@property
|
||||
def identity(self):
|
||||
return self._identity
|
||||
|
||||
@property
|
||||
def type(self):
|
||||
return self._type
|
||||
|
||||
@property
|
||||
def audit_config(self):
|
||||
return self._audit_config
|
||||
|
||||
@property
|
||||
def fixer_config(self):
|
||||
return self._fixer_config
|
||||
|
||||
@property
|
||||
def mutelist(self) -> OktaMutelist:
|
||||
return self._mutelist
|
||||
|
||||
@staticmethod
|
||||
def validate_arguments(
|
||||
okta_org_domain: str = "",
|
||||
okta_client_id: str = "",
|
||||
okta_private_key: str = "",
|
||||
okta_private_key_file: str = "",
|
||||
):
|
||||
"""Validate that all required OAuth credentials are provided.
|
||||
|
||||
Falls back to the matching `OKTA_*` environment variables when a CLI
|
||||
argument is not supplied. The private key may be supplied as raw
|
||||
content (preferred for API/UI integrations) or as a file path.
|
||||
Raises a single combined error if any required value is missing.
|
||||
"""
|
||||
org_domain = okta_org_domain or environ.get("OKTA_ORG_DOMAIN", "")
|
||||
client_id = okta_client_id or environ.get("OKTA_CLIENT_ID", "")
|
||||
private_key = okta_private_key or environ.get("OKTA_PRIVATE_KEY", "")
|
||||
private_key_file = okta_private_key_file or environ.get(
|
||||
"OKTA_PRIVATE_KEY_FILE", ""
|
||||
)
|
||||
|
||||
missing = []
|
||||
if not org_domain:
|
||||
missing.append("--okta-org-domain / OKTA_ORG_DOMAIN")
|
||||
if not client_id:
|
||||
missing.append("--okta-client-id / OKTA_CLIENT_ID")
|
||||
if not private_key and not private_key_file:
|
||||
missing.append("OKTA_PRIVATE_KEY (or OKTA_PRIVATE_KEY_FILE)")
|
||||
if missing:
|
||||
raise OktaEnvironmentVariableError(
|
||||
file=os.path.basename(__file__),
|
||||
message=(
|
||||
"Okta provider requires all OAuth credentials. Missing: "
|
||||
+ ", ".join(missing)
|
||||
),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def setup_session(
|
||||
org_domain: str = "",
|
||||
client_id: str = "",
|
||||
private_key: str = "",
|
||||
private_key_file: str = "",
|
||||
scopes: Optional[Union[str, list[str]]] = None,
|
||||
) -> OktaSession:
|
||||
"""Build an OktaSession from CLI args, falling back to environment variables.
|
||||
|
||||
Accepts the private key as raw content (`private_key` /
|
||||
`OKTA_PRIVATE_KEY`) or as a file path (`private_key_file` /
|
||||
`OKTA_PRIVATE_KEY_FILE`). Content takes precedence when both are
|
||||
supplied — this matches the GitHub provider pattern and keeps the
|
||||
API/UI integrations from having to write keys to disk.
|
||||
"""
|
||||
try:
|
||||
org_domain = org_domain or environ.get("OKTA_ORG_DOMAIN", "")
|
||||
client_id = client_id or environ.get("OKTA_CLIENT_ID", "")
|
||||
private_key = private_key or environ.get("OKTA_PRIVATE_KEY", "")
|
||||
private_key_file = private_key_file or environ.get(
|
||||
"OKTA_PRIVATE_KEY_FILE", ""
|
||||
)
|
||||
if not scopes:
|
||||
scopes = environ.get("OKTA_SCOPES", "")
|
||||
|
||||
org_domain = org_domain.strip().lower()
|
||||
if not ORG_DOMAIN_RE.match(org_domain):
|
||||
raise OktaInvalidOrgDomainError(
|
||||
file=os.path.basename(__file__),
|
||||
message=(
|
||||
f"Invalid Okta org domain: '{org_domain}'. Expected "
|
||||
"an Okta-managed domain such as <org>.okta.com "
|
||||
"(or .oktapreview.com / .okta-emea.com / "
|
||||
".okta-gov.com / .okta.mil / .okta-miltest.com / "
|
||||
".trex-govcloud.com), with no scheme and no path."
|
||||
),
|
||||
)
|
||||
|
||||
if private_key:
|
||||
private_key = private_key.strip()
|
||||
else:
|
||||
try:
|
||||
with open(private_key_file, "r") as fh:
|
||||
private_key = fh.read().strip()
|
||||
except OSError as error:
|
||||
raise OktaPrivateKeyFileError(
|
||||
file=os.path.basename(__file__),
|
||||
original_exception=error,
|
||||
message=f"Could not read private key file '{private_key_file}': {error}",
|
||||
)
|
||||
if not private_key:
|
||||
raise OktaPrivateKeyFileError(
|
||||
file=os.path.basename(__file__),
|
||||
message=(
|
||||
f"Private key file '{private_key_file}' is empty."
|
||||
if private_key_file
|
||||
else "Private key content is empty."
|
||||
),
|
||||
)
|
||||
|
||||
# Accept either a CSV string (from env var / legacy callers) or
|
||||
# a list[str] (from programmatic callers and the CLI's nargs="+").
|
||||
# List elements may themselves contain commas (e.g. "a,b") and
|
||||
# are flattened to support mixed input.
|
||||
if isinstance(scopes, str):
|
||||
raw_items = scopes.split(",")
|
||||
elif isinstance(scopes, list):
|
||||
raw_items = [item for s in scopes for item in str(s).split(",")]
|
||||
else:
|
||||
raw_items = []
|
||||
scope_list = [s.strip() for s in raw_items if s and s.strip()]
|
||||
if not scope_list:
|
||||
scope_list = list(DEFAULT_SCOPES)
|
||||
|
||||
return OktaSession(
|
||||
org_domain=org_domain,
|
||||
client_id=client_id,
|
||||
scopes=scope_list,
|
||||
private_key=private_key,
|
||||
)
|
||||
|
||||
except (OktaInvalidOrgDomainError, OktaPrivateKeyFileError):
|
||||
raise
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
raise OktaSetUpSessionError(original_exception=error)
|
||||
|
||||
@staticmethod
|
||||
def setup_identity(session: OktaSession) -> OktaIdentityInfo:
|
||||
"""Synthesize identity from the session and verify credentials.
|
||||
|
||||
Service apps don't represent a human user, so the identity is the
|
||||
org URL plus the service-app client ID. We still hit the cheapest
|
||||
scope-covered endpoint (`list_policies` with limit=1) to fail loud
|
||||
when credentials, scopes, or the granted admin role are wrong.
|
||||
"""
|
||||
|
||||
async def _probe():
|
||||
client = OktaSDKClient(session.to_sdk_config())
|
||||
return await client.list_policies(type="OKTA_SIGN_ON", limit="1")
|
||||
|
||||
try:
|
||||
result = asyncio.run(_probe())
|
||||
# SDK returns (items, resp, err) on the normal path and (items, err)
|
||||
# only on early request-creation errors. The error is always last.
|
||||
err = result[-1]
|
||||
if err is not None:
|
||||
err_text = str(err).lower()
|
||||
# Distinguish scope/role failures from generic credential
|
||||
# failures — different remediation paths in the docs.
|
||||
permission_signals = (
|
||||
"invalid_scope",
|
||||
"forbidden",
|
||||
"not authorized",
|
||||
"permission",
|
||||
)
|
||||
if any(signal in err_text for signal in permission_signals):
|
||||
raise OktaInsufficientPermissionsError(
|
||||
file=os.path.basename(__file__),
|
||||
message=(
|
||||
"Okta rejected the credential probe with a "
|
||||
f"permission-related error: {err}"
|
||||
),
|
||||
)
|
||||
raise OktaInvalidCredentialsError(
|
||||
file=os.path.basename(__file__),
|
||||
message=f"Failed to authenticate against Okta: {err}",
|
||||
)
|
||||
return OktaIdentityInfo(
|
||||
org_domain=session.org_domain,
|
||||
client_id=session.client_id,
|
||||
)
|
||||
except (OktaInvalidCredentialsError, OktaInsufficientPermissionsError):
|
||||
raise
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
raise OktaSetUpIdentityError(original_exception=error)
|
||||
|
||||
def print_credentials(self):
|
||||
report_lines = [
|
||||
f"Okta Domain: {Fore.YELLOW}{self.identity.org_domain}{Style.RESET_ALL}",
|
||||
f"Okta Client ID: {Fore.YELLOW}{self.identity.client_id}{Style.RESET_ALL}",
|
||||
f"Authentication Method: {Fore.YELLOW}{self.auth_method}{Style.RESET_ALL}",
|
||||
]
|
||||
report_title = (
|
||||
f"{Style.BRIGHT}Using the Okta credentials below:{Style.RESET_ALL}"
|
||||
)
|
||||
print_boxes(report_lines, report_title)
|
||||
|
||||
@staticmethod
|
||||
def test_connection(
|
||||
okta_org_domain: str = "",
|
||||
okta_client_id: str = "",
|
||||
okta_private_key: str = "",
|
||||
okta_private_key_file: str = "",
|
||||
okta_scopes: Optional[Union[str, list[str]]] = None,
|
||||
raise_on_exception: bool = True,
|
||||
) -> Connection:
|
||||
"""Test the connection to Okta with the provided OAuth credentials."""
|
||||
try:
|
||||
OktaProvider.validate_arguments(
|
||||
okta_org_domain=okta_org_domain,
|
||||
okta_client_id=okta_client_id,
|
||||
okta_private_key=okta_private_key,
|
||||
okta_private_key_file=okta_private_key_file,
|
||||
)
|
||||
session = OktaProvider.setup_session(
|
||||
org_domain=okta_org_domain,
|
||||
client_id=okta_client_id,
|
||||
private_key=okta_private_key,
|
||||
private_key_file=okta_private_key_file,
|
||||
scopes=okta_scopes,
|
||||
)
|
||||
OktaProvider.setup_identity(session)
|
||||
return Connection(is_connected=True)
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
if raise_on_exception:
|
||||
raise error
|
||||
return Connection(error=error)
|
||||
@@ -1,4 +0,0 @@
|
||||
from prowler.providers.common.provider import Provider
|
||||
from prowler.providers.okta.services.signon.signon_service import Signon
|
||||
|
||||
signon_client = Signon(Provider.get_global_provider())
|
||||
@@ -1,37 +0,0 @@
|
||||
{
|
||||
"Provider": "okta",
|
||||
"CheckID": "signon_global_session_idle_timeout_15min",
|
||||
"CheckTitle": "Default Global Session Policy has a Priority 1 non-default rule enforcing 15-minute idle timeout",
|
||||
"CheckType": [],
|
||||
"ServiceName": "signon",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "NotDefined",
|
||||
"ResourceGroup": "governance",
|
||||
"Description": "The **Default Global Session Policy** must have a **Priority 1** rule that is **not** the built-in `Default Rule`, and that rule must set **Maximum Okta global session idle time** to 15 minutes or less. The threshold defaults to 15 minutes and is overridable via the `okta_max_session_idle_minutes` key in the audit config.",
|
||||
"Risk": "Without a 15-minute idle timeout, an unattended workstation leaves an authenticated Okta session open indefinitely, allowing an attacker physical or remote access to take over the user's identity and pivot into every downstream application that trusts Okta SSO.",
|
||||
"RelatedUrl": "",
|
||||
"AdditionalURLs": [
|
||||
"https://help.okta.com/oie/en-us/content/topics/identity-engine/policies/about-okta-sign-on-policies.htm",
|
||||
"https://developer.okta.com/docs/api/openapi/okta-management/management/tag/Policy/"
|
||||
],
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "1. Sign in to the Okta Admin Console as a Super Admin\n2. Go to Security > Global Session Policy\n3. Open the Default Policy\n4. Add or edit a non-default rule\n5. Move that rule to Priority 1 so it is evaluated before the built-in Default Rule\n6. Set 'Maximum Okta global session idle time' to 15 minutes or less\n7. Save the rule",
|
||||
"Terraform": "resource \"okta_policy_rule_signon\" \"prowler_idle_timeout_15min\" {\n policy_id = okta_policy_signon.default.id\n name = \"Prowler-enforced idle timeout\"\n status = \"ACTIVE\"\n session_idle = 15\n session_persistent = false\n}\n"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Configure the Default Global Session Policy so its Priority 1 non-default rule sets the Maximum Okta global session idle time to 15 minutes or less.",
|
||||
"Url": "https://hub.prowler.com/check/signon_global_session_idle_timeout_15min"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"identity-access"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": ""
|
||||
}
|
||||
@@ -1,126 +0,0 @@
|
||||
from prowler.lib.check.models import Check, CheckReportOkta
|
||||
from prowler.providers.okta.services.signon.signon_client import signon_client
|
||||
from prowler.providers.okta.services.signon.signon_service import GlobalSessionPolicy
|
||||
|
||||
DEFAULT_THRESHOLD_MINUTES = 15
|
||||
|
||||
|
||||
class signon_global_session_idle_timeout_15min(Check):
|
||||
"""STIG V-273186 / OKTA-APP-000020.
|
||||
|
||||
The DISA STIG requires the Okta Default Policy to have an active
|
||||
Priority 1 rule that is not the built-in Default Rule, and that
|
||||
rule must set the maximum Okta global session idle time to the
|
||||
configured threshold or lower (defaults to 15 minutes per STIG;
|
||||
override via `okta_max_session_idle_minutes` in the audit config).
|
||||
"""
|
||||
|
||||
def execute(self) -> list[CheckReportOkta]:
|
||||
audit_config = signon_client.audit_config or {}
|
||||
threshold = audit_config.get(
|
||||
"okta_max_session_idle_minutes", DEFAULT_THRESHOLD_MINUTES
|
||||
)
|
||||
org_domain = signon_client.provider.identity.org_domain
|
||||
policy = self._get_default_policy()
|
||||
report = CheckReportOkta(
|
||||
metadata=self.metadata(), resource=policy, org_domain=org_domain
|
||||
)
|
||||
|
||||
if policy.id == "default-policy-missing":
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
"Default Global Session Policy was not found. STIG V-273186 "
|
||||
"requires the Default Policy to contain an active Priority 1 "
|
||||
f"non-default rule with idle timeout <= {threshold} minutes."
|
||||
)
|
||||
return [report]
|
||||
|
||||
if policy.status and policy.status.upper() != "ACTIVE":
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Default Global Session Policy '{policy.name}' is in "
|
||||
f"status '{policy.status}'. STIG V-273186 requires an active "
|
||||
"Default Policy with an active Priority 1 non-default rule."
|
||||
)
|
||||
return [report]
|
||||
|
||||
active_rules = sorted(
|
||||
[
|
||||
rule
|
||||
for rule in policy.rules
|
||||
if not rule.status or rule.status.upper() == "ACTIVE"
|
||||
],
|
||||
key=lambda rule: (
|
||||
rule.priority if rule.priority is not None else float("inf"),
|
||||
rule.name,
|
||||
),
|
||||
)
|
||||
if not active_rules:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Default Global Session Policy '{policy.name}' has no active "
|
||||
"rules. STIG V-273186 requires an active Priority 1 non-default "
|
||||
f"rule with idle timeout <= {threshold} minutes."
|
||||
)
|
||||
return [report]
|
||||
|
||||
priority_one_rule = active_rules[0]
|
||||
if priority_one_rule.priority != 1:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Default Global Session Policy '{policy.name}' has no active "
|
||||
f"Priority 1 rule. The first active rule is '{priority_one_rule.name}' "
|
||||
f"at priority {priority_one_rule.priority}."
|
||||
)
|
||||
return [report]
|
||||
|
||||
if priority_one_rule.is_default or priority_one_rule.name == "Default Rule":
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Default Global Session Policy '{policy.name}' uses "
|
||||
f"'{priority_one_rule.name}' as its active Priority 1 rule. "
|
||||
"The STIG requires a non-default Priority 1 rule."
|
||||
)
|
||||
return [report]
|
||||
|
||||
idle_timeout = priority_one_rule.max_session_idle_minutes
|
||||
if idle_timeout is None:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Priority 1 non-default rule '{priority_one_rule.name}' in "
|
||||
f"Default Global Session Policy '{policy.name}' does not define "
|
||||
"a maximum Okta global session idle time."
|
||||
)
|
||||
return [report]
|
||||
|
||||
if idle_timeout <= threshold:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Priority 1 non-default rule '{priority_one_rule.name}' in "
|
||||
f"Default Global Session Policy '{policy.name}' sets the "
|
||||
f"maximum Okta global session idle time to {idle_timeout} "
|
||||
f"minutes, meeting the configured threshold of {threshold} minutes."
|
||||
)
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Priority 1 non-default rule '{priority_one_rule.name}' in "
|
||||
f"Default Global Session Policy '{policy.name}' sets the "
|
||||
f"maximum Okta global session idle time to {idle_timeout} "
|
||||
f"minutes, exceeding the configured threshold of {threshold} minutes."
|
||||
)
|
||||
return [report]
|
||||
|
||||
@staticmethod
|
||||
def _get_default_policy() -> GlobalSessionPolicy:
|
||||
for policy in signon_client.global_session_policies.values():
|
||||
if policy.is_default or policy.name == "Default Policy":
|
||||
return policy
|
||||
return GlobalSessionPolicy(
|
||||
id="default-policy-missing",
|
||||
name="Default Policy",
|
||||
priority=1,
|
||||
status="MISSING",
|
||||
is_default=True,
|
||||
rules=[],
|
||||
)
|
||||
@@ -1,178 +0,0 @@
|
||||
from typing import Optional
|
||||
from urllib.parse import parse_qs, urlparse
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.providers.okta.lib.service.service import OktaService
|
||||
|
||||
|
||||
def _next_after_cursor(resp) -> Optional[str]:
|
||||
"""Extract the `after` cursor from a `Link: ...; rel="next"` header.
|
||||
|
||||
Returns None when there is no next page. Header format follows RFC 5988
|
||||
and Okta's pagination guide.
|
||||
"""
|
||||
if resp is None:
|
||||
return None
|
||||
headers = getattr(resp, "headers", None) or {}
|
||||
link = headers.get("link") or headers.get("Link") or ""
|
||||
if not link:
|
||||
return None
|
||||
for part in link.split(","):
|
||||
if 'rel="next"' not in part:
|
||||
continue
|
||||
url_segment = part.split(";", 1)[0].strip().lstrip("<").rstrip(">")
|
||||
cursor = parse_qs(urlparse(url_segment).query).get("after", [None])[0]
|
||||
if cursor:
|
||||
return cursor
|
||||
return None
|
||||
|
||||
|
||||
class Signon(OktaService):
|
||||
"""Fetches OKTA_SIGN_ON policies and their rules.
|
||||
|
||||
Populates `self.global_session_policies` keyed by policy id. Each
|
||||
policy carries its rules; downstream checks read directly from this
|
||||
structure.
|
||||
"""
|
||||
|
||||
def __init__(self, provider):
|
||||
super().__init__(__class__.__name__, provider)
|
||||
self.global_session_policies: dict[str, GlobalSessionPolicy] = (
|
||||
self._list_global_session_policies()
|
||||
)
|
||||
|
||||
def _list_global_session_policies(self) -> dict:
|
||||
logger.info("Signon - Listing OKTA_SIGN_ON policies and rules...")
|
||||
try:
|
||||
return self._run(self._fetch_all())
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
return {}
|
||||
|
||||
async def _fetch_all(self) -> dict:
|
||||
result: dict[str, GlobalSessionPolicy] = {}
|
||||
all_policies, err = await self._paginate(
|
||||
lambda after: self.client.list_policies(type="OKTA_SIGN_ON", after=after)
|
||||
)
|
||||
if err is not None:
|
||||
logger.error(f"Error listing OKTA_SIGN_ON policies: {err}")
|
||||
return result
|
||||
|
||||
for policy in all_policies:
|
||||
rules = await self._fetch_rules(policy.id)
|
||||
result[policy.id] = GlobalSessionPolicy(
|
||||
id=policy.id,
|
||||
name=getattr(policy, "name", "") or "",
|
||||
priority=getattr(policy, "priority", None),
|
||||
status=getattr(policy, "status", "") or "",
|
||||
is_default=bool(getattr(policy, "system", False)),
|
||||
rules=rules,
|
||||
)
|
||||
return result
|
||||
|
||||
async def _fetch_rules(self, policy_id: str) -> list:
|
||||
# Okta's `list_policy_rules` endpoint does not expose an `after`
|
||||
# cursor in the SDK signature, so we call once with a generous
|
||||
# `limit`. Tenants with more rules per policy than the limit would
|
||||
# silently truncate; this is rare (most policies have <10 rules).
|
||||
rule_fetch_limit = 100
|
||||
rules_out: list[GlobalSessionPolicyRule] = []
|
||||
result = await self.client.list_policy_rules(
|
||||
policy_id, limit=str(rule_fetch_limit)
|
||||
)
|
||||
err = result[-1]
|
||||
if err is not None:
|
||||
logger.error(f"Error listing rules for policy {policy_id}: {err}")
|
||||
return rules_out
|
||||
all_rules = list(result[0] or [])
|
||||
if len(all_rules) >= rule_fetch_limit:
|
||||
logger.warning(
|
||||
f"Policy {policy_id} returned {len(all_rules)} rules — the "
|
||||
f"per-policy fetch limit ({rule_fetch_limit}) was hit; any "
|
||||
"rules beyond this limit are not evaluated by Prowler. "
|
||||
"Review the policy in the Okta Admin Console."
|
||||
)
|
||||
|
||||
for rule in all_rules:
|
||||
actions = getattr(rule, "actions", None)
|
||||
signon = getattr(actions, "signon", None) if actions else None
|
||||
session = getattr(signon, "session", None) if signon else None
|
||||
conditions = getattr(rule, "conditions", None)
|
||||
network = getattr(conditions, "network", None) if conditions else None
|
||||
rules_out.append(
|
||||
GlobalSessionPolicyRule(
|
||||
id=getattr(rule, "id", "") or "",
|
||||
name=getattr(rule, "name", "") or "",
|
||||
priority=getattr(rule, "priority", None),
|
||||
status=getattr(rule, "status", "") or "",
|
||||
is_default=bool(getattr(rule, "system", False)),
|
||||
max_session_idle_minutes=getattr(
|
||||
session, "max_session_idle_minutes", None
|
||||
),
|
||||
max_session_lifetime_minutes=getattr(
|
||||
session, "max_session_lifetime_minutes", None
|
||||
),
|
||||
use_persistent_cookie=getattr(
|
||||
session, "use_persistent_cookie", None
|
||||
),
|
||||
network_zones_include=list(getattr(network, "include", None) or []),
|
||||
network_zones_exclude=list(getattr(network, "exclude", None) or []),
|
||||
)
|
||||
)
|
||||
return rules_out
|
||||
|
||||
@staticmethod
|
||||
async def _paginate(fetch):
|
||||
"""Drain all pages of an SDK list call.
|
||||
|
||||
`fetch` is a callable that takes the `after` cursor (or None for
|
||||
the first page) and returns the SDK's standard `(items, resp, err)`
|
||||
tuple. We follow `Link: rel="next"` headers until exhausted.
|
||||
"""
|
||||
all_items = []
|
||||
result = await fetch(None)
|
||||
# Defensive against the SDK's 2-tuple early-error path: error is last.
|
||||
err = result[-1]
|
||||
if err is not None:
|
||||
return [], err
|
||||
items = result[0]
|
||||
resp = result[1] if len(result) >= 3 else None
|
||||
all_items.extend(items or [])
|
||||
while True:
|
||||
cursor = _next_after_cursor(resp)
|
||||
if not cursor:
|
||||
break
|
||||
result = await fetch(cursor)
|
||||
err = result[-1]
|
||||
if err is not None:
|
||||
return all_items, err
|
||||
items = result[0]
|
||||
resp = result[1] if len(result) >= 3 else None
|
||||
all_items.extend(items or [])
|
||||
return all_items, None
|
||||
|
||||
|
||||
class GlobalSessionPolicyRule(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
priority: Optional[int] = None
|
||||
status: str = ""
|
||||
is_default: bool = False
|
||||
max_session_idle_minutes: Optional[int] = None
|
||||
max_session_lifetime_minutes: Optional[int] = None
|
||||
use_persistent_cookie: Optional[bool] = None
|
||||
network_zones_include: list[str] = []
|
||||
network_zones_exclude: list[str] = []
|
||||
|
||||
|
||||
class GlobalSessionPolicy(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
priority: Optional[int] = None
|
||||
status: str = ""
|
||||
is_default: bool = False
|
||||
rules: list[GlobalSessionPolicyRule] = []
|
||||
@@ -59,7 +59,6 @@ dependencies = [
|
||||
"microsoft-kiota-abstractions==1.9.2",
|
||||
"msgraph-sdk==1.55.0",
|
||||
"numpy==2.0.2",
|
||||
"okta==3.4.2",
|
||||
"openstacksdk==4.2.0",
|
||||
"pandas==2.2.3",
|
||||
"py-ocsf-models==0.8.1",
|
||||
|
||||
@@ -50,7 +50,7 @@ Reusable patterns for common technologies:
|
||||
|-------|-------------|
|
||||
| `typescript` | Const types, flat interfaces, utility types |
|
||||
| `react-19` | React 19 patterns, React Compiler |
|
||||
| `nextjs-16` | App Router, Server Actions, proxy.ts, streaming |
|
||||
| `nextjs-15` | App Router, Server Actions, streaming |
|
||||
| `tailwind-4` | cn() utility, Tailwind 4 patterns |
|
||||
| `playwright` | Page Object Model, selectors |
|
||||
| `vitest` | Unit testing, React Testing Library |
|
||||
|
||||
@@ -0,0 +1,150 @@
|
||||
---
|
||||
name: nextjs-15
|
||||
description: >
|
||||
Next.js 15 App Router patterns.
|
||||
Trigger: When working in Next.js App Router (app/), Server Components vs Client Components, Server Actions, Route Handlers, caching/revalidation, and streaming/Suspense.
|
||||
license: Apache-2.0
|
||||
metadata:
|
||||
author: prowler-cloud
|
||||
version: "1.0"
|
||||
scope: [root, ui]
|
||||
auto_invoke: "App Router / Server Actions"
|
||||
allowed-tools: Read, Edit, Write, Glob, Grep, Bash, WebFetch, WebSearch, Task
|
||||
---
|
||||
|
||||
## App Router File Conventions
|
||||
|
||||
```
|
||||
app/
|
||||
├── layout.tsx # Root layout (required)
|
||||
├── page.tsx # Home page (/)
|
||||
├── loading.tsx # Loading UI (Suspense)
|
||||
├── error.tsx # Error boundary
|
||||
├── not-found.tsx # 404 page
|
||||
├── (auth)/ # Route group (no URL impact)
|
||||
│ ├── login/page.tsx # /login
|
||||
│ └── signup/page.tsx # /signup
|
||||
├── api/
|
||||
│ └── route.ts # API handler
|
||||
└── _components/ # Private folder (not routed)
|
||||
```
|
||||
|
||||
## Server Components (Default)
|
||||
|
||||
```typescript
|
||||
// No directive needed - async by default
|
||||
export default async function Page() {
|
||||
const data = await db.query();
|
||||
return <Component data={data} />;
|
||||
}
|
||||
```
|
||||
|
||||
## Server Actions
|
||||
|
||||
```typescript
|
||||
// app/actions.ts
|
||||
"use server";
|
||||
|
||||
import { revalidatePath } from "next/cache";
|
||||
import { redirect } from "next/navigation";
|
||||
|
||||
export async function createUser(formData: FormData) {
|
||||
const name = formData.get("name") as string;
|
||||
|
||||
await db.users.create({ data: { name } });
|
||||
|
||||
revalidatePath("/users");
|
||||
redirect("/users");
|
||||
}
|
||||
|
||||
// Usage
|
||||
<form action={createUser}>
|
||||
<input name="name" required />
|
||||
<button type="submit">Create</button>
|
||||
</form>
|
||||
```
|
||||
|
||||
## Data Fetching
|
||||
|
||||
```typescript
|
||||
// Parallel
|
||||
async function Page() {
|
||||
const [users, posts] = await Promise.all([
|
||||
getUsers(),
|
||||
getPosts(),
|
||||
]);
|
||||
return <Dashboard users={users} posts={posts} />;
|
||||
}
|
||||
|
||||
// Streaming with Suspense
|
||||
<Suspense fallback={<Loading />}>
|
||||
<SlowComponent />
|
||||
</Suspense>
|
||||
```
|
||||
|
||||
## Route Handlers (API)
|
||||
|
||||
```typescript
|
||||
// app/api/users/route.ts
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
const users = await db.users.findMany();
|
||||
return NextResponse.json(users);
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const body = await request.json();
|
||||
const user = await db.users.create({ data: body });
|
||||
return NextResponse.json(user, { status: 201 });
|
||||
}
|
||||
```
|
||||
|
||||
## Middleware
|
||||
|
||||
```typescript
|
||||
// middleware.ts (root level)
|
||||
import { NextResponse } from "next/server";
|
||||
import type { NextRequest } from "next/server";
|
||||
|
||||
export function middleware(request: NextRequest) {
|
||||
const token = request.cookies.get("token");
|
||||
|
||||
if (!token && request.nextUrl.pathname.startsWith("/dashboard")) {
|
||||
return NextResponse.redirect(new URL("/login", request.url));
|
||||
}
|
||||
|
||||
return NextResponse.next();
|
||||
}
|
||||
|
||||
export const config = {
|
||||
matcher: ["/dashboard/:path*"],
|
||||
};
|
||||
```
|
||||
|
||||
## Metadata
|
||||
|
||||
```typescript
|
||||
// Static
|
||||
export const metadata = {
|
||||
title: "My App",
|
||||
description: "Description",
|
||||
};
|
||||
|
||||
// Dynamic
|
||||
export async function generateMetadata({ params }) {
|
||||
const product = await getProduct(params.id);
|
||||
return { title: product.name };
|
||||
}
|
||||
```
|
||||
|
||||
## server-only Package
|
||||
|
||||
```typescript
|
||||
import "server-only";
|
||||
|
||||
// This will error if imported in client component
|
||||
export async function getSecretData() {
|
||||
return db.secrets.findMany();
|
||||
}
|
||||
```
|
||||
@@ -1,160 +0,0 @@
|
||||
---
|
||||
name: nextjs-16
|
||||
description: >
|
||||
Next.js 16 App Router patterns.
|
||||
Trigger: When working in Next.js App Router (app/), Server Components vs Client Components, Server Actions, Route Handlers, proxy.ts, caching/revalidation, Cache Components, and streaming/Suspense.
|
||||
license: Apache-2.0
|
||||
metadata:
|
||||
author: prowler-cloud
|
||||
version: "1.0"
|
||||
scope: [root, ui]
|
||||
auto_invoke: "App Router / Server Actions"
|
||||
allowed-tools: Read, Edit, Write, Glob, Grep, Bash, WebFetch, WebSearch, Task
|
||||
---
|
||||
|
||||
## App Router File Conventions
|
||||
|
||||
```
|
||||
app/
|
||||
├── layout.tsx # Root layout (required)
|
||||
├── page.tsx # Home page (/)
|
||||
├── loading.tsx # Loading UI (Suspense)
|
||||
├── error.tsx # Error boundary
|
||||
├── not-found.tsx # 404 page
|
||||
├── (auth)/ # Route group (no URL impact)
|
||||
│ ├── login/page.tsx # /login
|
||||
│ └── signup/page.tsx # /signup
|
||||
├── api/
|
||||
│ └── route.ts # API handler
|
||||
└── _components/ # Private folder (not routed)
|
||||
```
|
||||
|
||||
## Next.js 16 Notes
|
||||
|
||||
- Use `proxy.ts` for request-boundary logic. `middleware.ts` is deprecated in Next.js 16.
|
||||
- `proxy.ts` runs on the Node.js runtime and cannot be configured for Edge.
|
||||
- Keep `proxy.ts` matchers narrow. Exclude `api`, static files, and image assets unless the route explicitly needs proxy logic.
|
||||
- Route Handlers in `app/api/**/route.ts` are the right fit for health checks, webhooks, backend-for-frontend endpoints, and server-only proxy calls.
|
||||
|
||||
## Server Components (Default)
|
||||
|
||||
```typescript
|
||||
// No directive needed - async by default
|
||||
export default async function Page() {
|
||||
const data = await db.query();
|
||||
return <Component data={data} />;
|
||||
}
|
||||
```
|
||||
|
||||
## Server Actions
|
||||
|
||||
```typescript
|
||||
"use server";
|
||||
|
||||
import { revalidatePath } from "next/cache";
|
||||
import { redirect } from "next/navigation";
|
||||
|
||||
export async function createUser(formData: FormData) {
|
||||
const name = formData.get("name") as string;
|
||||
|
||||
await db.users.create({ data: { name } });
|
||||
|
||||
revalidatePath("/users");
|
||||
redirect("/users");
|
||||
}
|
||||
```
|
||||
|
||||
## Data Fetching
|
||||
|
||||
```typescript
|
||||
async function Page() {
|
||||
const [users, posts] = await Promise.all([getUsers(), getPosts()]);
|
||||
|
||||
return <Dashboard users={users} posts={posts} />;
|
||||
}
|
||||
|
||||
<Suspense fallback={<Loading />}>
|
||||
<SlowComponent />
|
||||
</Suspense>;
|
||||
```
|
||||
|
||||
## Caching and Revalidation
|
||||
|
||||
```typescript
|
||||
import { revalidatePath, revalidateTag } from "next/cache";
|
||||
|
||||
export async function refreshDashboard() {
|
||||
"use server";
|
||||
|
||||
revalidatePath("/");
|
||||
revalidateTag("dashboard");
|
||||
}
|
||||
```
|
||||
|
||||
- Use `revalidatePath` for route-level invalidation after mutations.
|
||||
- Use `revalidateTag` when data fetches share a cache tag across routes.
|
||||
- With Cache Components enabled, put `"use cache"` only in pure server-side cached functions. Do not cache auth, tenant-scoped, or per-user responses unless the cache key explicitly isolates them.
|
||||
|
||||
## Route Handlers (API)
|
||||
|
||||
```typescript
|
||||
// app/api/users/route.ts
|
||||
import { NextResponse } from "next/server";
|
||||
|
||||
export async function GET() {
|
||||
const users = await db.users.findMany();
|
||||
return NextResponse.json(users);
|
||||
}
|
||||
|
||||
export async function POST(request: Request) {
|
||||
const body = await request.json();
|
||||
const user = await db.users.create({ data: body });
|
||||
return NextResponse.json(user, { status: 201 });
|
||||
}
|
||||
```
|
||||
|
||||
## Proxy
|
||||
|
||||
```typescript
|
||||
// proxy.ts (root level)
|
||||
import { NextResponse } from "next/server";
|
||||
import type { NextRequest } from "next/server";
|
||||
|
||||
export function proxy(request: NextRequest) {
|
||||
const token = request.cookies.get("token");
|
||||
|
||||
if (!token && request.nextUrl.pathname.startsWith("/dashboard")) {
|
||||
return NextResponse.redirect(new URL("/login", request.url));
|
||||
}
|
||||
|
||||
return NextResponse.next();
|
||||
}
|
||||
|
||||
export const config = {
|
||||
matcher: ["/dashboard/:path*"],
|
||||
};
|
||||
```
|
||||
|
||||
## Metadata
|
||||
|
||||
```typescript
|
||||
export const metadata = {
|
||||
title: "My App",
|
||||
description: "Description",
|
||||
};
|
||||
|
||||
export async function generateMetadata() {
|
||||
const product = await getProduct();
|
||||
return { title: product.name };
|
||||
}
|
||||
```
|
||||
|
||||
## server-only Package
|
||||
|
||||
```typescript
|
||||
import "server-only";
|
||||
|
||||
export async function getSecretData() {
|
||||
return db.secrets.findMany();
|
||||
}
|
||||
```
|
||||
@@ -77,7 +77,6 @@ allowed-tools: Read, Edit, Write, Glob, Grep, Bash
|
||||
- No period at the end
|
||||
- Do NOT start with redundant verbs (section header already provides the action)
|
||||
- **CRITICAL: Preserve section order** — when adding a new section to the UNRELEASED block, insert it in the correct position relative to existing sections (Added → Changed → Deprecated → Removed → Fixed → Security). Never append a new section at the top or bottom without checking order
|
||||
- **CRITICAL: ALWAYS link to the PR, NEVER to the issue.** Every entry MUST use `https://github.com/prowler-cloud/prowler/pull/N`. Linking to `/issues/N` is FORBIDDEN, even when the PR fixes an issue. The issue↔PR relationship belongs in the PR body (`Fixes #N`), not in the changelog. If a fix has no PR yet, do not add the entry until the PR exists.
|
||||
|
||||
### Semantic Versioning Rules
|
||||
|
||||
@@ -164,8 +163,6 @@ git diff main...HEAD --name-only
|
||||
|
||||
**CRITICAL:** Add new entries at the BOTTOM of each section, NOT at the top.
|
||||
|
||||
**CRITICAL:** The link MUST point to the PR (`/pull/N`). Linking to `/issues/N` is FORBIDDEN. If the PR closes an issue, that mapping goes in the PR body via `Fixes #N` — never in the changelog entry.
|
||||
|
||||
```markdown
|
||||
## [1.17.0] (Prowler UNRELEASED)
|
||||
|
||||
@@ -218,7 +215,6 @@ This maintains chronological order within each section (oldest at top, newest at
|
||||
- Added new feature for users # Missing PR link, redundant verb
|
||||
- Add search bar [(#123)] # Redundant verb (section already says "Added")
|
||||
- This PR adds a cool new thing (#123) # Wrong link format, conversational
|
||||
- Some bug fix [(#123)](https://github.com/prowler-cloud/prowler/issues/123) # FORBIDDEN: must link to /pull/N, never /issues/N
|
||||
- POST /api/v1/scanswas intermittently failing withScan matching query does not existin thescan-performworker (#11122) # Missing spaces/backticks, unreadable
|
||||
- entra_users_mfa_capable no longer flags disabled guest users by requesting accountEnabled and userType from Microsoft Graph via $select and using Graph as the source of truth for account_enabled (EXO Get-User does not return guest users) (#11002) # Run-on sentence, identifiers not formatted
|
||||
```
|
||||
|
||||
@@ -20,8 +20,6 @@ This maintains chronological order: oldest entries at top, newest at bottom.
|
||||
## Entry Patterns
|
||||
|
||||
> **Note:** Section headers already provide the verb. Entries describe WHAT, not the action.
|
||||
>
|
||||
> **Link target rule:** Every entry MUST link to the PR (`https://github.com/prowler-cloud/prowler/pull/N`). Linking to `/issues/N` is FORBIDDEN — even when the PR fixes an issue. The issue↔PR mapping belongs in the PR body (`Fixes #N`), not here.
|
||||
|
||||
### Feature Addition (🚀 Added)
|
||||
```markdown
|
||||
@@ -42,8 +40,6 @@ This maintains chronological order: oldest entries at top, newest at bottom.
|
||||
- {What was broken} in {component} [(#XXXX)](https://github.com/prowler-cloud/prowler/pull/XXXX)
|
||||
```
|
||||
|
||||
> When a PR fixes a reported issue, the link still goes to the PR (`/pull/N`), never the issue (`/issues/N`). Reference the issue from the PR body with `Fixes #N`.
|
||||
|
||||
### Security Patch (🔐 Security)
|
||||
```markdown
|
||||
- Node.js from 20.x to 24.13.0 LTS, patching 8 CVEs [(#XXXX)](https://github.com/prowler-cloud/prowler/pull/XXXX)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
---
|
||||
name: prowler-ui
|
||||
description: >
|
||||
Prowler UI-specific patterns. For generic patterns, see: typescript, react-19, nextjs-16, tailwind-4.
|
||||
Prowler UI-specific patterns. For generic patterns, see: typescript, react-19, nextjs-15, tailwind-4.
|
||||
Trigger: When working inside ui/ on Prowler-specific conventions (shadcn vs HeroUI legacy, folder placement, actions/adapters, shared types/hooks/lib).
|
||||
license: Apache-2.0
|
||||
metadata:
|
||||
@@ -18,7 +18,7 @@ allowed-tools: Read, Edit, Write, Glob, Grep, Bash, WebFetch, WebSearch, Task
|
||||
|
||||
- `typescript` - Const types, flat interfaces
|
||||
- `react-19` - No useMemo/useCallback, compiler
|
||||
- `nextjs-16` - App Router, Server Actions
|
||||
- `nextjs-15` - App Router, Server Actions
|
||||
- `tailwind-4` - cn() utility, styling rules
|
||||
- `zod-4` - Schema validation
|
||||
- `zustand-5` - State management
|
||||
@@ -28,7 +28,7 @@ allowed-tools: Read, Edit, Write, Glob, Grep, Bash, WebFetch, WebSearch, Task
|
||||
## Tech Stack (Versions)
|
||||
|
||||
```
|
||||
Next.js 16.2.3 | React 19.2.5 | Tailwind 4.1.18 | shadcn/ui
|
||||
Next.js 15.5.9 | React 19.2.2 | Tailwind 4.1.13 | shadcn/ui
|
||||
Zod 4.1.11 | React Hook Form 7.62.0 | Zustand 5.0.8
|
||||
NextAuth 5.0.0-beta.30 | Recharts 2.15.4
|
||||
HeroUI 2.8.4 (LEGACY - do not add new components)
|
||||
|
||||
@@ -18,7 +18,7 @@ allowed-tools: Read, Edit, Write, Glob, Grep, Bash, WebFetch, WebSearch, Task
|
||||
|-----------|-------|----------|
|
||||
| SDK | Python 3.10+, Poetry | `prowler/` |
|
||||
| API | Django 5.1, DRF, Celery | `api/` |
|
||||
| UI | Next.js 16, React 19, Tailwind 4 | `ui/` |
|
||||
| UI | Next.js 15, React 19, Tailwind 4 | `ui/` |
|
||||
| MCP | FastMCP 2.13.1 | `mcp_server/` |
|
||||
|
||||
## Quick Commands
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
name: react-19
|
||||
description: >
|
||||
React 19 patterns with React Compiler.
|
||||
Trigger: When writing React 19 components/hooks in .tsx (React Compiler rules, hook patterns, refs as props). If using Next.js App Router/Server Actions, also use nextjs-16.
|
||||
Trigger: When writing React 19 components/hooks in .tsx (React Compiler rules, hook patterns, refs as props). If using Next.js App Router/Server Actions, also use nextjs-15.
|
||||
license: Apache-2.0
|
||||
metadata:
|
||||
author: prowler-cloud
|
||||
|
||||
@@ -488,8 +488,3 @@ m365:
|
||||
# Exchange Mailbox Settings
|
||||
# m365.exchange_mailbox_properties_auditing_enabled
|
||||
audit_log_age: 90 # maximum number of days to keep audit logs
|
||||
|
||||
okta:
|
||||
# Okta Sign-On Policies
|
||||
# okta.signon_global_session_idle_timeout_15min
|
||||
okta_max_session_idle_minutes: 15
|
||||
|
||||
@@ -17,7 +17,7 @@ prowler_command = "prowler"
|
||||
|
||||
# capsys
|
||||
# https://docs.pytest.org/en/7.1.x/how-to/capture-stdout-stderr.html
|
||||
prowler_default_usage_error = "usage: prowler [-h] [--version] {aws,azure,gcp,kubernetes,m365,github,googleworkspace,okta,nhn,mongodbatlas,oraclecloud,alibabacloud,cloudflare,openstack,vercel,dashboard,iac,image,llm} ..."
|
||||
prowler_default_usage_error = "usage: prowler [-h] [--version] {aws,azure,gcp,kubernetes,m365,github,googleworkspace,nhn,mongodbatlas,oraclecloud,alibabacloud,cloudflare,openstack,vercel,dashboard,iac,image,llm} ..."
|
||||
|
||||
|
||||
def mock_get_available_providers():
|
||||
|
||||
@@ -67,7 +67,6 @@ class Test_entra_break_glass_account_fido2_security_key_registered:
|
||||
entra_client = mock.MagicMock
|
||||
entra_client.audited_tenant = "audited_tenant"
|
||||
entra_client.audited_domain = DOMAIN
|
||||
entra_client.user_registration_details_error = None
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
@@ -105,7 +104,6 @@ class Test_entra_break_glass_account_fido2_security_key_registered:
|
||||
entra_client = mock.MagicMock
|
||||
entra_client.audited_tenant = "audited_tenant"
|
||||
entra_client.audited_domain = DOMAIN
|
||||
entra_client.user_registration_details_error = None
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
@@ -144,7 +142,6 @@ class Test_entra_break_glass_account_fido2_security_key_registered:
|
||||
entra_client = mock.MagicMock
|
||||
entra_client.audited_tenant = "audited_tenant"
|
||||
entra_client.audited_domain = DOMAIN
|
||||
entra_client.user_registration_details_error = None
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
@@ -181,7 +178,6 @@ class Test_entra_break_glass_account_fido2_security_key_registered:
|
||||
entra_client = mock.MagicMock
|
||||
entra_client.audited_tenant = "audited_tenant"
|
||||
entra_client.audited_domain = DOMAIN
|
||||
entra_client.user_registration_details_error = None
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
@@ -232,7 +228,6 @@ class Test_entra_break_glass_account_fido2_security_key_registered:
|
||||
entra_client = mock.MagicMock
|
||||
entra_client.audited_tenant = "audited_tenant"
|
||||
entra_client.audited_domain = DOMAIN
|
||||
entra_client.user_registration_details_error = None
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
@@ -280,7 +275,6 @@ class Test_entra_break_glass_account_fido2_security_key_registered:
|
||||
entra_client = mock.MagicMock
|
||||
entra_client.audited_tenant = "audited_tenant"
|
||||
entra_client.audited_domain = DOMAIN
|
||||
entra_client.user_registration_details_error = None
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
@@ -327,7 +321,6 @@ class Test_entra_break_glass_account_fido2_security_key_registered:
|
||||
entra_client = mock.MagicMock
|
||||
entra_client.audited_tenant = "audited_tenant"
|
||||
entra_client.audited_domain = DOMAIN
|
||||
entra_client.user_registration_details_error = None
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
@@ -375,7 +368,6 @@ class Test_entra_break_glass_account_fido2_security_key_registered:
|
||||
entra_client = mock.MagicMock
|
||||
entra_client.audited_tenant = "audited_tenant"
|
||||
entra_client.audited_domain = DOMAIN
|
||||
entra_client.user_registration_details_error = None
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
@@ -430,7 +422,6 @@ class Test_entra_break_glass_account_fido2_security_key_registered:
|
||||
entra_client = mock.MagicMock
|
||||
entra_client.audited_tenant = "audited_tenant"
|
||||
entra_client.audited_domain = DOMAIN
|
||||
entra_client.user_registration_details_error = None
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
@@ -466,7 +457,6 @@ class Test_entra_break_glass_account_fido2_security_key_registered:
|
||||
entra_client = mock.MagicMock
|
||||
entra_client.audited_tenant = "audited_tenant"
|
||||
entra_client.audited_domain = DOMAIN
|
||||
entra_client.user_registration_details_error = None
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
@@ -510,117 +500,3 @@ class Test_entra_break_glass_account_fido2_security_key_registered:
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "PASS"
|
||||
assert result[0].resource_name == "BreakGlass1"
|
||||
|
||||
def test_user_registration_details_permission_error(self):
|
||||
"""Test FAIL when there's a permission error reading user registration details."""
|
||||
entra_client = mock.MagicMock
|
||||
entra_client.audited_tenant = "audited_tenant"
|
||||
entra_client.audited_domain = DOMAIN
|
||||
entra_client.user_registration_details_error = "Insufficient privileges to read user registration details. Required permission: AuditLog.Read.All"
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_m365_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
f"{CHECK_MODULE_PATH}.entra_client",
|
||||
new=entra_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.m365.services.entra.entra_break_glass_account_fido2_security_key_registered.entra_break_glass_account_fido2_security_key_registered import (
|
||||
entra_break_glass_account_fido2_security_key_registered,
|
||||
)
|
||||
|
||||
policy_id = str(uuid4())
|
||||
bg_user_id = str(uuid4())
|
||||
|
||||
entra_client.conditional_access_policies = {
|
||||
policy_id: _make_policy(policy_id, excluded_users=[bg_user_id]),
|
||||
}
|
||||
entra_client.users = {
|
||||
bg_user_id: User(
|
||||
id=bg_user_id,
|
||||
name="BreakGlass1",
|
||||
on_premises_sync_enabled=False,
|
||||
authentication_methods=[],
|
||||
),
|
||||
}
|
||||
|
||||
check = entra_break_glass_account_fido2_security_key_registered()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert (
|
||||
"Cannot verify FIDO2 security key registration for break glass account BreakGlass1"
|
||||
in result[0].status_extended
|
||||
)
|
||||
assert "AuditLog.Read.All" in result[0].status_extended
|
||||
assert result[0].resource_name == "BreakGlass1"
|
||||
assert result[0].resource_id == bg_user_id
|
||||
|
||||
def test_user_registration_details_permission_error_with_missing_user(self):
|
||||
"""Per-user emission and missing-user short-circuit on the error path.
|
||||
|
||||
Two break-glass user IDs are excluded from all CAPs, but only one is
|
||||
present in ``entra_client.users``. With ``user_registration_details_error``
|
||||
set, the present user must produce one preventive FAIL anchored to the
|
||||
real user; the missing user must be skipped by the existing
|
||||
``if not user: continue`` guard rather than crash or yield a synthetic
|
||||
finding.
|
||||
"""
|
||||
entra_client = mock.MagicMock
|
||||
entra_client.audited_tenant = "audited_tenant"
|
||||
entra_client.audited_domain = DOMAIN
|
||||
entra_client.user_registration_details_error = "Insufficient privileges to read user registration details. Required permission: AuditLog.Read.All"
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_m365_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
f"{CHECK_MODULE_PATH}.entra_client",
|
||||
new=entra_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.m365.services.entra.entra_break_glass_account_fido2_security_key_registered.entra_break_glass_account_fido2_security_key_registered import (
|
||||
entra_break_glass_account_fido2_security_key_registered,
|
||||
)
|
||||
|
||||
policy_id = str(uuid4())
|
||||
present_user_id = str(uuid4())
|
||||
missing_user_id = str(uuid4())
|
||||
|
||||
entra_client.conditional_access_policies = {
|
||||
policy_id: _make_policy(
|
||||
policy_id,
|
||||
excluded_users=[present_user_id, missing_user_id],
|
||||
),
|
||||
}
|
||||
entra_client.users = {
|
||||
present_user_id: User(
|
||||
id=present_user_id,
|
||||
name="BreakGlass1",
|
||||
on_premises_sync_enabled=False,
|
||||
authentication_methods=[],
|
||||
),
|
||||
# missing_user_id intentionally absent — exercises the
|
||||
# `if not user: continue` short-circuit inside the loop.
|
||||
}
|
||||
|
||||
check = entra_break_glass_account_fido2_security_key_registered()
|
||||
result = check.execute()
|
||||
|
||||
# One finding for the present user; the missing one is skipped.
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert (
|
||||
"Cannot verify FIDO2 security key registration for break glass account BreakGlass1"
|
||||
in result[0].status_extended
|
||||
)
|
||||
assert "AuditLog.Read.All" in result[0].status_extended
|
||||
assert result[0].resource == entra_client.users[present_user_id]
|
||||
assert result[0].resource_name == "BreakGlass1"
|
||||
assert result[0].resource_id == present_user_id
|
||||
|
||||
@@ -11,7 +11,6 @@ class Test_entra_users_mfa_capable:
|
||||
entra_client = mock.MagicMock
|
||||
entra_client.audited_tenant = "audited_tenant"
|
||||
entra_client.audited_domain = DOMAIN
|
||||
entra_client.user_registration_details_error = None
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
@@ -54,7 +53,6 @@ class Test_entra_users_mfa_capable:
|
||||
entra_client = mock.MagicMock
|
||||
entra_client.audited_tenant = "audited_tenant"
|
||||
entra_client.audited_domain = DOMAIN
|
||||
entra_client.user_registration_details_error = None
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
@@ -97,7 +95,6 @@ class Test_entra_users_mfa_capable:
|
||||
entra_client = mock.MagicMock
|
||||
entra_client.audited_tenant = "audited_tenant"
|
||||
entra_client.audited_domain = DOMAIN
|
||||
entra_client.user_registration_details_error = None
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
@@ -156,7 +153,6 @@ class Test_entra_users_mfa_capable:
|
||||
entra_client = mock.MagicMock
|
||||
entra_client.audited_tenant = "audited_tenant"
|
||||
entra_client.audited_domain = DOMAIN
|
||||
entra_client.user_registration_details_error = None
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
@@ -195,7 +191,6 @@ class Test_entra_users_mfa_capable:
|
||||
entra_client = mock.MagicMock
|
||||
entra_client.audited_tenant = "audited_tenant"
|
||||
entra_client.audited_domain = DOMAIN
|
||||
entra_client.user_registration_details_error = None
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
@@ -253,7 +248,6 @@ class Test_entra_users_mfa_capable:
|
||||
entra_client = mock.MagicMock
|
||||
entra_client.audited_tenant = "audited_tenant"
|
||||
entra_client.audited_domain = DOMAIN
|
||||
entra_client.user_registration_details_error = None
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
@@ -292,7 +286,6 @@ class Test_entra_users_mfa_capable:
|
||||
entra_client = mock.MagicMock
|
||||
entra_client.audited_tenant = "audited_tenant"
|
||||
entra_client.audited_domain = DOMAIN
|
||||
entra_client.user_registration_details_error = None
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
@@ -331,7 +324,6 @@ class Test_entra_users_mfa_capable:
|
||||
entra_client = mock.MagicMock
|
||||
entra_client.audited_tenant = "audited_tenant"
|
||||
entra_client.audited_domain = DOMAIN
|
||||
entra_client.user_registration_details_error = None
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
@@ -391,7 +383,6 @@ class Test_entra_users_mfa_capable:
|
||||
entra_client = mock.MagicMock
|
||||
entra_client.audited_tenant = "audited_tenant"
|
||||
entra_client.audited_domain = DOMAIN
|
||||
entra_client.user_registration_details_error = None
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
@@ -429,125 +420,3 @@ class Test_entra_users_mfa_capable:
|
||||
assert result[0].resource == entra_client.users[user_id]
|
||||
assert result[0].resource_name == "Test User"
|
||||
assert result[0].resource_id == user_id
|
||||
|
||||
def test_user_registration_details_permission_error(self):
|
||||
"""Test FAIL when there's a permission error reading user registration details."""
|
||||
entra_client = mock.MagicMock
|
||||
entra_client.audited_tenant = "audited_tenant"
|
||||
entra_client.audited_domain = DOMAIN
|
||||
entra_client.user_registration_details_error = "Insufficient privileges to read user registration details. Required permission: AuditLog.Read.All"
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_m365_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.m365.services.entra.entra_users_mfa_capable.entra_users_mfa_capable.entra_client",
|
||||
new=entra_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.m365.services.entra.entra_users_mfa_capable.entra_users_mfa_capable import (
|
||||
entra_users_mfa_capable,
|
||||
)
|
||||
|
||||
user_id = str(uuid4())
|
||||
entra_client.users = {
|
||||
user_id: User(
|
||||
id=user_id,
|
||||
name="Test User",
|
||||
on_premises_sync_enabled=False,
|
||||
directory_roles_ids=[],
|
||||
is_mfa_capable=False,
|
||||
account_enabled=True,
|
||||
)
|
||||
}
|
||||
|
||||
check = entra_users_mfa_capable()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert (
|
||||
"Cannot verify MFA capability for user Test User"
|
||||
in result[0].status_extended
|
||||
)
|
||||
assert "AuditLog.Read.All" in result[0].status_extended
|
||||
assert result[0].resource == entra_client.users[user_id]
|
||||
assert result[0].resource_name == "Test User"
|
||||
assert result[0].resource_id == user_id
|
||||
|
||||
def test_user_registration_details_permission_error_skips_guest_and_disabled(self):
|
||||
"""CIS-scope skip (Guest, disabled) still applies on the permission-error path.
|
||||
|
||||
With ``user_registration_details_error`` set, only enabled member users
|
||||
should receive a per-user "Cannot verify MFA capability" FAIL — guests
|
||||
and disabled members are filtered out before the error branch runs.
|
||||
"""
|
||||
entra_client = mock.MagicMock
|
||||
entra_client.audited_tenant = "audited_tenant"
|
||||
entra_client.audited_domain = DOMAIN
|
||||
entra_client.user_registration_details_error = "Insufficient privileges to read user registration details. Required permission: AuditLog.Read.All"
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_m365_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.m365.services.entra.entra_users_mfa_capable.entra_users_mfa_capable.entra_client",
|
||||
new=entra_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.m365.services.entra.entra_users_mfa_capable.entra_users_mfa_capable import (
|
||||
entra_users_mfa_capable,
|
||||
)
|
||||
|
||||
member_id = str(uuid4())
|
||||
guest_id = str(uuid4())
|
||||
disabled_member_id = str(uuid4())
|
||||
entra_client.users = {
|
||||
member_id: User(
|
||||
id=member_id,
|
||||
name="Enabled Member",
|
||||
on_premises_sync_enabled=False,
|
||||
directory_roles_ids=[],
|
||||
is_mfa_capable=False,
|
||||
account_enabled=True,
|
||||
user_type="Member",
|
||||
),
|
||||
guest_id: User(
|
||||
id=guest_id,
|
||||
name="Guest User",
|
||||
on_premises_sync_enabled=False,
|
||||
directory_roles_ids=[],
|
||||
is_mfa_capable=False,
|
||||
account_enabled=True,
|
||||
user_type="Guest",
|
||||
),
|
||||
disabled_member_id: User(
|
||||
id=disabled_member_id,
|
||||
name="Disabled Member",
|
||||
on_premises_sync_enabled=False,
|
||||
directory_roles_ids=[],
|
||||
is_mfa_capable=False,
|
||||
account_enabled=False,
|
||||
user_type="Member",
|
||||
),
|
||||
}
|
||||
|
||||
check = entra_users_mfa_capable()
|
||||
result = check.execute()
|
||||
|
||||
# Only the enabled member should be reported — Guest and
|
||||
# disabled member are skipped before the error branch.
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert (
|
||||
"Cannot verify MFA capability for user Enabled Member"
|
||||
in result[0].status_extended
|
||||
)
|
||||
assert "AuditLog.Read.All" in result[0].status_extended
|
||||
assert result[0].resource == entra_client.users[member_id]
|
||||
assert result[0].resource_name == "Enabled Member"
|
||||
assert result[0].resource_id == member_id
|
||||
|
||||
@@ -665,11 +665,10 @@ class Test_Entra_Service:
|
||||
)
|
||||
)
|
||||
|
||||
registration_details, error_message = asyncio.run(
|
||||
registration_details = asyncio.run(
|
||||
entra_service._get_user_registration_details()
|
||||
)
|
||||
|
||||
assert error_message is None
|
||||
assert registration_details == {
|
||||
"user-1": {
|
||||
"is_mfa_capable": True,
|
||||
@@ -687,37 +686,6 @@ class Test_Entra_Service:
|
||||
registration_builder.with_url.assert_called_once_with("next-link")
|
||||
registration_builder_next.get.assert_awaited()
|
||||
|
||||
def test__get_user_registration_details_returns_error_on_permission_denied(self):
|
||||
"""Test that 403 Authorization_RequestDenied returns an empty dict and
|
||||
a descriptive error message naming the missing AuditLog.Read.All permission.
|
||||
"""
|
||||
from msgraph.generated.models.o_data_errors.main_error import MainError
|
||||
from msgraph.generated.models.o_data_errors.o_data_error import ODataError
|
||||
|
||||
odata_error = ODataError()
|
||||
odata_error.error = MainError()
|
||||
odata_error.error.code = "Authorization_RequestDenied"
|
||||
|
||||
registration_builder = SimpleNamespace(get=AsyncMock(side_effect=odata_error))
|
||||
|
||||
entra_service = Entra.__new__(Entra)
|
||||
entra_service.client = SimpleNamespace(
|
||||
reports=SimpleNamespace(
|
||||
authentication_methods=SimpleNamespace(
|
||||
user_registration_details=registration_builder
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
registration_details, error_message = asyncio.run(
|
||||
entra_service._get_user_registration_details()
|
||||
)
|
||||
|
||||
assert registration_details == {}
|
||||
assert error_message is not None
|
||||
assert "AuditLog.Read.All" in error_message
|
||||
assert "user registration details" in error_message
|
||||
|
||||
def test__get_service_principals_filters_third_party_owners(self):
|
||||
"""Service principals owned by another tenant must not be returned."""
|
||||
# Mixed-case input to verify the service normalizes both sides before
|
||||
|
||||
@@ -1,76 +0,0 @@
|
||||
import pytest
|
||||
|
||||
from prowler.providers.okta.exceptions.exceptions import (
|
||||
OktaBaseException,
|
||||
OktaCredentialsError,
|
||||
OktaEnvironmentVariableError,
|
||||
OktaInsufficientPermissionsError,
|
||||
OktaInvalidCredentialsError,
|
||||
OktaInvalidOrgDomainError,
|
||||
OktaPrivateKeyFileError,
|
||||
OktaSetUpIdentityError,
|
||||
OktaSetUpSessionError,
|
||||
)
|
||||
|
||||
EXPECTED_CODES = {
|
||||
OktaEnvironmentVariableError: 14000,
|
||||
OktaSetUpSessionError: 14001,
|
||||
OktaSetUpIdentityError: 14002,
|
||||
OktaInvalidCredentialsError: 14003,
|
||||
OktaInvalidOrgDomainError: 14004,
|
||||
OktaPrivateKeyFileError: 14005,
|
||||
OktaInsufficientPermissionsError: 14006,
|
||||
}
|
||||
|
||||
|
||||
class Test_OktaExceptions:
|
||||
def test_all_codes_in_reserved_range(self):
|
||||
codes = [c for c, _ in OktaBaseException.OKTA_ERROR_CODES.keys()]
|
||||
assert all(14000 <= c <= 14999 for c in codes)
|
||||
assert len(codes) == len(set(codes)) # unique
|
||||
|
||||
def test_all_subclasses_inherit_from_credentials_error(self):
|
||||
for exc_cls in EXPECTED_CODES:
|
||||
assert issubclass(exc_cls, OktaCredentialsError)
|
||||
assert issubclass(exc_cls, OktaBaseException)
|
||||
|
||||
@pytest.mark.parametrize("exc_cls,code", list(EXPECTED_CODES.items()))
|
||||
def test_each_exception_carries_its_code(self, exc_cls, code):
|
||||
exc = exc_cls()
|
||||
assert exc.code == code
|
||||
assert exc.source == "Okta"
|
||||
assert exc.message # populated from OKTA_ERROR_CODES
|
||||
assert exc.remediation # populated from OKTA_ERROR_CODES
|
||||
|
||||
@pytest.mark.parametrize("exc_cls", list(EXPECTED_CODES.keys()))
|
||||
def test_custom_message_overrides_default(self, exc_cls):
|
||||
custom = "specific error context"
|
||||
exc = exc_cls(message=custom)
|
||||
assert exc.message == custom
|
||||
|
||||
def test_str_format_includes_class_code_and_message(self):
|
||||
exc = OktaInvalidOrgDomainError(message="bad url")
|
||||
rendered = str(exc)
|
||||
assert "OktaInvalidOrgDomainError" in rendered
|
||||
assert "[14004]" in rendered
|
||||
assert "bad url" in rendered
|
||||
|
||||
def test_original_exception_appended_to_str(self):
|
||||
original = ValueError("network down")
|
||||
exc = OktaSetUpIdentityError(original_exception=original)
|
||||
rendered = str(exc)
|
||||
assert "network down" in rendered
|
||||
|
||||
def test_can_be_raised_and_caught(self):
|
||||
with pytest.raises(OktaInvalidCredentialsError) as info:
|
||||
raise OktaInvalidCredentialsError(message="bad token")
|
||||
assert info.value.code == 14003
|
||||
assert "bad token" in str(info.value)
|
||||
|
||||
def test_caught_as_credentials_error_base(self):
|
||||
with pytest.raises(OktaCredentialsError):
|
||||
raise OktaPrivateKeyFileError(message="empty")
|
||||
|
||||
def test_caught_as_okta_base_exception(self):
|
||||
with pytest.raises(OktaBaseException):
|
||||
raise OktaEnvironmentVariableError(message="missing org url")
|
||||
@@ -1,62 +0,0 @@
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
from prowler.providers.okta.lib.arguments import arguments
|
||||
|
||||
|
||||
class TestOktaArguments:
|
||||
def setup_method(self):
|
||||
self.mock_parser = MagicMock()
|
||||
self.mock_subparsers = MagicMock()
|
||||
self.mock_okta_parser = MagicMock()
|
||||
|
||||
self.mock_parser.add_subparsers.return_value = self.mock_subparsers
|
||||
self.mock_subparsers.add_parser.return_value = self.mock_okta_parser
|
||||
|
||||
def test_init_parser_creates_subparser(self):
|
||||
mock_args = MagicMock()
|
||||
mock_args.subparsers = self.mock_subparsers
|
||||
mock_args.common_providers_parser = MagicMock()
|
||||
|
||||
arguments.init_parser(mock_args)
|
||||
|
||||
self.mock_subparsers.add_parser.assert_called_once_with(
|
||||
"okta",
|
||||
parents=[mock_args.common_providers_parser],
|
||||
help="Okta Provider",
|
||||
)
|
||||
|
||||
def test_init_parser_registers_non_secret_flags(self):
|
||||
mock_args = MagicMock()
|
||||
mock_args.subparsers = self.mock_subparsers
|
||||
mock_args.common_providers_parser = MagicMock()
|
||||
|
||||
auth_group = MagicMock()
|
||||
self.mock_okta_parser.add_argument_group.return_value = auth_group
|
||||
|
||||
arguments.init_parser(mock_args)
|
||||
|
||||
registered = {call.args[0] for call in auth_group.add_argument.call_args_list}
|
||||
assert registered == {
|
||||
"--okta-org-domain",
|
||||
"--okta-client-id",
|
||||
"--okta-scopes",
|
||||
}
|
||||
|
||||
def test_secret_flags_not_registered(self):
|
||||
"""Private key material must never be a CLI flag — env-only."""
|
||||
mock_args = MagicMock()
|
||||
mock_args.subparsers = self.mock_subparsers
|
||||
mock_args.common_providers_parser = MagicMock()
|
||||
|
||||
auth_group = MagicMock()
|
||||
self.mock_okta_parser.add_argument_group.return_value = auth_group
|
||||
|
||||
arguments.init_parser(mock_args)
|
||||
|
||||
registered = {call.args[0] for call in auth_group.add_argument.call_args_list}
|
||||
assert "--okta-private-key" not in registered
|
||||
assert "--okta-private-key-file" not in registered
|
||||
|
||||
def test_no_sensitive_arguments_constant(self):
|
||||
"""No SENSITIVE_ARGUMENTS frozenset needed — no secret flags exist."""
|
||||
assert not hasattr(arguments, "SENSITIVE_ARGUMENTS")
|
||||
@@ -1,9 +0,0 @@
|
||||
Mutelist:
|
||||
Accounts:
|
||||
"acme.okta.com":
|
||||
Checks:
|
||||
"signon_global_session_idle_timeout_15min":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "pol-default"
|
||||
@@ -1,104 +0,0 @@
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import yaml
|
||||
|
||||
from prowler.providers.okta.lib.mutelist.mutelist import OktaMutelist
|
||||
|
||||
MUTELIST_FIXTURE_PATH = "tests/providers/okta/lib/mutelist/fixtures/okta_mutelist.yaml"
|
||||
|
||||
|
||||
class TestOktaMutelist:
|
||||
def test_get_mutelist_file_from_local_file(self):
|
||||
mutelist = OktaMutelist(mutelist_path=MUTELIST_FIXTURE_PATH)
|
||||
|
||||
with open(MUTELIST_FIXTURE_PATH) as f:
|
||||
mutelist_fixture = yaml.safe_load(f)["Mutelist"]
|
||||
|
||||
assert mutelist.mutelist == mutelist_fixture
|
||||
assert mutelist.mutelist_file_path == MUTELIST_FIXTURE_PATH
|
||||
|
||||
def test_get_mutelist_file_from_local_file_non_existent(self):
|
||||
mutelist_path = "tests/providers/okta/lib/mutelist/fixtures/not_present"
|
||||
mutelist = OktaMutelist(mutelist_path=mutelist_path)
|
||||
|
||||
assert mutelist.mutelist == {}
|
||||
assert mutelist.mutelist_file_path == mutelist_path
|
||||
|
||||
def test_validate_mutelist_not_valid_key(self):
|
||||
with open(MUTELIST_FIXTURE_PATH) as f:
|
||||
mutelist_fixture = yaml.safe_load(f)["Mutelist"]
|
||||
|
||||
mutelist_fixture["Accounts1"] = mutelist_fixture["Accounts"]
|
||||
del mutelist_fixture["Accounts"]
|
||||
|
||||
mutelist = OktaMutelist(mutelist_content=mutelist_fixture)
|
||||
|
||||
assert len(mutelist.validate_mutelist(mutelist_fixture)) == 0
|
||||
assert mutelist.mutelist == {}
|
||||
assert mutelist.mutelist_file_path is None
|
||||
|
||||
def test_is_finding_muted_match(self):
|
||||
mutelist_content = {
|
||||
"Accounts": {
|
||||
"acme.okta.com": {
|
||||
"Checks": {
|
||||
"signon_global_session_idle_timeout_15min": {
|
||||
"Regions": ["*"],
|
||||
"Resources": ["Default Policy"],
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
mutelist = OktaMutelist(mutelist_content=mutelist_content)
|
||||
|
||||
finding = MagicMock()
|
||||
finding.check_metadata.CheckID = "signon_global_session_idle_timeout_15min"
|
||||
finding.resource_name = "Default Policy"
|
||||
finding.resource_tags = []
|
||||
|
||||
assert mutelist.is_finding_muted(finding, org_domain="acme.okta.com") is True
|
||||
|
||||
def test_is_finding_muted_no_match(self):
|
||||
mutelist_content = {
|
||||
"Accounts": {
|
||||
"acme.okta.com": {
|
||||
"Checks": {
|
||||
"signon_global_session_idle_timeout_15min": {
|
||||
"Regions": ["*"],
|
||||
"Resources": ["Default Policy"],
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
mutelist = OktaMutelist(mutelist_content=mutelist_content)
|
||||
|
||||
finding = MagicMock()
|
||||
finding.check_metadata.CheckID = "signon_global_session_idle_timeout_15min"
|
||||
finding.resource_name = "Some Other Policy"
|
||||
finding.resource_tags = []
|
||||
|
||||
assert mutelist.is_finding_muted(finding, org_domain="acme.okta.com") is False
|
||||
|
||||
def test_is_finding_muted_no_match_on_different_org(self):
|
||||
mutelist_content = {
|
||||
"Accounts": {
|
||||
"acme.okta.com": {
|
||||
"Checks": {
|
||||
"signon_global_session_idle_timeout_15min": {
|
||||
"Regions": ["*"],
|
||||
"Resources": ["*"],
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
mutelist = OktaMutelist(mutelist_content=mutelist_content)
|
||||
|
||||
finding = MagicMock()
|
||||
finding.check_metadata.CheckID = "signon_global_session_idle_timeout_15min"
|
||||
finding.resource_name = "Default Policy"
|
||||
finding.resource_tags = []
|
||||
|
||||
assert mutelist.is_finding_muted(finding, org_domain="other.okta.com") is False
|
||||
@@ -1,34 +0,0 @@
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
from prowler.providers.okta.models import OktaIdentityInfo, OktaSession
|
||||
|
||||
OKTA_ORG_DOMAIN = "acme.okta.com"
|
||||
OKTA_CLIENT_ID = "0oa1234567890abcdef"
|
||||
OKTA_PRIVATE_KEY = "-----BEGIN PRIVATE KEY-----\nMOCK\n-----END PRIVATE KEY-----"
|
||||
|
||||
|
||||
def set_mocked_okta_provider(
|
||||
session: OktaSession = None,
|
||||
identity: OktaIdentityInfo = None,
|
||||
audit_config: dict = None,
|
||||
):
|
||||
if session is None:
|
||||
session = OktaSession(
|
||||
org_domain=OKTA_ORG_DOMAIN,
|
||||
client_id=OKTA_CLIENT_ID,
|
||||
scopes=["okta.policies.read"],
|
||||
private_key=OKTA_PRIVATE_KEY,
|
||||
)
|
||||
if identity is None:
|
||||
identity = OktaIdentityInfo(
|
||||
org_domain=OKTA_ORG_DOMAIN,
|
||||
client_id=OKTA_CLIENT_ID,
|
||||
)
|
||||
|
||||
provider = MagicMock()
|
||||
provider.type = "okta"
|
||||
provider.auth_method = "OAuth 2.0 (private-key JWT)"
|
||||
provider.session = session
|
||||
provider.identity = identity
|
||||
provider.audit_config = audit_config or {}
|
||||
return provider
|
||||
@@ -1,422 +0,0 @@
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
|
||||
from prowler.providers.okta.exceptions.exceptions import (
|
||||
OktaEnvironmentVariableError,
|
||||
OktaInsufficientPermissionsError,
|
||||
OktaInvalidCredentialsError,
|
||||
OktaInvalidOrgDomainError,
|
||||
OktaPrivateKeyFileError,
|
||||
OktaSetUpIdentityError,
|
||||
)
|
||||
from prowler.providers.okta.models import OktaIdentityInfo, OktaSession
|
||||
from prowler.providers.okta.okta_provider import DEFAULT_SCOPES, OktaProvider
|
||||
from tests.providers.okta.okta_fixtures import (
|
||||
OKTA_CLIENT_ID,
|
||||
OKTA_ORG_DOMAIN,
|
||||
OKTA_PRIVATE_KEY,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _clear_okta_env(monkeypatch):
|
||||
for var in (
|
||||
"OKTA_ORG_DOMAIN",
|
||||
"OKTA_CLIENT_ID",
|
||||
"OKTA_PRIVATE_KEY",
|
||||
"OKTA_PRIVATE_KEY_FILE",
|
||||
"OKTA_SCOPES",
|
||||
):
|
||||
monkeypatch.delenv(var, raising=False)
|
||||
|
||||
|
||||
class Test_OktaProvider_validate_arguments:
|
||||
def test_missing_all_three_raises_combined(self, _clear_okta_env):
|
||||
with pytest.raises(OktaEnvironmentVariableError) as exc:
|
||||
OktaProvider.validate_arguments()
|
||||
msg = str(exc.value)
|
||||
assert "OKTA_ORG_DOMAIN" in msg
|
||||
assert "OKTA_CLIENT_ID" in msg
|
||||
assert "OKTA_PRIVATE_KEY" in msg
|
||||
|
||||
def test_only_org_domain_missing(self, _clear_okta_env, tmp_path):
|
||||
key_file = tmp_path / "key.pem"
|
||||
key_file.write_text(OKTA_PRIVATE_KEY)
|
||||
with pytest.raises(OktaEnvironmentVariableError) as exc:
|
||||
OktaProvider.validate_arguments(
|
||||
okta_client_id=OKTA_CLIENT_ID,
|
||||
okta_private_key_file=str(key_file),
|
||||
)
|
||||
assert "OKTA_ORG_DOMAIN" in str(exc.value)
|
||||
|
||||
def test_accepts_private_key_content_in_place_of_file(self, _clear_okta_env):
|
||||
OktaProvider.validate_arguments(
|
||||
okta_org_domain=OKTA_ORG_DOMAIN,
|
||||
okta_client_id=OKTA_CLIENT_ID,
|
||||
okta_private_key=OKTA_PRIVATE_KEY,
|
||||
)
|
||||
|
||||
def test_all_present_via_args(self, _clear_okta_env, tmp_path):
|
||||
key_file = tmp_path / "key.pem"
|
||||
key_file.write_text(OKTA_PRIVATE_KEY)
|
||||
OktaProvider.validate_arguments(
|
||||
okta_org_domain=OKTA_ORG_DOMAIN,
|
||||
okta_client_id=OKTA_CLIENT_ID,
|
||||
okta_private_key_file=str(key_file),
|
||||
)
|
||||
|
||||
def test_all_present_via_env(self, monkeypatch, tmp_path):
|
||||
key_file = tmp_path / "key.pem"
|
||||
key_file.write_text(OKTA_PRIVATE_KEY)
|
||||
monkeypatch.setenv("OKTA_ORG_DOMAIN", OKTA_ORG_DOMAIN)
|
||||
monkeypatch.setenv("OKTA_CLIENT_ID", OKTA_CLIENT_ID)
|
||||
monkeypatch.setenv("OKTA_PRIVATE_KEY_FILE", str(key_file))
|
||||
OktaProvider.validate_arguments()
|
||||
|
||||
|
||||
class Test_OktaProvider_setup_session:
|
||||
def test_rejects_domain_with_scheme(self, _clear_okta_env, tmp_path):
|
||||
key_file = tmp_path / "key.pem"
|
||||
key_file.write_text(OKTA_PRIVATE_KEY)
|
||||
with pytest.raises(OktaInvalidOrgDomainError):
|
||||
OktaProvider.setup_session(
|
||||
org_domain="https://acme.okta.com",
|
||||
client_id=OKTA_CLIENT_ID,
|
||||
private_key_file=str(key_file),
|
||||
)
|
||||
|
||||
def test_rejects_domain_with_trailing_slash(self, _clear_okta_env, tmp_path):
|
||||
key_file = tmp_path / "key.pem"
|
||||
key_file.write_text(OKTA_PRIVATE_KEY)
|
||||
with pytest.raises(OktaInvalidOrgDomainError):
|
||||
OktaProvider.setup_session(
|
||||
org_domain="acme.okta.com/",
|
||||
client_id=OKTA_CLIENT_ID,
|
||||
private_key_file=str(key_file),
|
||||
)
|
||||
|
||||
def test_rejects_non_okta_tld(self, _clear_okta_env, tmp_path):
|
||||
key_file = tmp_path / "key.pem"
|
||||
key_file.write_text(OKTA_PRIVATE_KEY)
|
||||
with pytest.raises(OktaInvalidOrgDomainError):
|
||||
OktaProvider.setup_session(
|
||||
org_domain="login.example.com",
|
||||
client_id=OKTA_CLIENT_ID,
|
||||
private_key_file=str(key_file),
|
||||
)
|
||||
|
||||
def test_accepts_all_okta_managed_tlds(self, _clear_okta_env, tmp_path):
|
||||
# Mirrors the domain whitelist used by the Okta SDK
|
||||
# (okta.config.config_validator) so that gov/mil tenants — exactly the
|
||||
# audience most likely to care about the DISA STIG check — are not
|
||||
# turned away at provider init.
|
||||
key_file = tmp_path / "key.pem"
|
||||
key_file.write_text(OKTA_PRIVATE_KEY)
|
||||
for domain in (
|
||||
"acme.oktapreview.com",
|
||||
"acme.okta-emea.com",
|
||||
"acme.okta-gov.com",
|
||||
"acme.okta.mil",
|
||||
"acme.okta-miltest.com",
|
||||
"acme.trex-govcloud.com",
|
||||
):
|
||||
session = OktaProvider.setup_session(
|
||||
org_domain=domain,
|
||||
client_id=OKTA_CLIENT_ID,
|
||||
private_key_file=str(key_file),
|
||||
)
|
||||
assert session.org_domain == domain
|
||||
|
||||
def test_unreadable_private_key_file_raises(self, _clear_okta_env):
|
||||
with pytest.raises(OktaPrivateKeyFileError):
|
||||
OktaProvider.setup_session(
|
||||
org_domain=OKTA_ORG_DOMAIN,
|
||||
client_id=OKTA_CLIENT_ID,
|
||||
private_key_file="/nonexistent/path.pem",
|
||||
)
|
||||
|
||||
def test_happy_path_uses_default_scopes(self, _clear_okta_env, tmp_path):
|
||||
key_file = tmp_path / "key.pem"
|
||||
key_file.write_text(OKTA_PRIVATE_KEY)
|
||||
session = OktaProvider.setup_session(
|
||||
org_domain=OKTA_ORG_DOMAIN,
|
||||
client_id=OKTA_CLIENT_ID,
|
||||
private_key_file=str(key_file),
|
||||
)
|
||||
assert session.org_domain == OKTA_ORG_DOMAIN
|
||||
assert session.client_id == OKTA_CLIENT_ID
|
||||
assert session.private_key == OKTA_PRIVATE_KEY
|
||||
assert session.scopes == DEFAULT_SCOPES
|
||||
|
||||
def test_custom_scopes_parsed_from_csv(self, _clear_okta_env, tmp_path):
|
||||
key_file = tmp_path / "key.pem"
|
||||
key_file.write_text(OKTA_PRIVATE_KEY)
|
||||
session = OktaProvider.setup_session(
|
||||
org_domain=OKTA_ORG_DOMAIN,
|
||||
client_id=OKTA_CLIENT_ID,
|
||||
private_key_file=str(key_file),
|
||||
scopes="okta.policies.read, okta.apps.read ,okta.users.read",
|
||||
)
|
||||
assert session.scopes == [
|
||||
"okta.policies.read",
|
||||
"okta.apps.read",
|
||||
"okta.users.read",
|
||||
]
|
||||
|
||||
def test_custom_scopes_accepts_list_input(self, _clear_okta_env, tmp_path):
|
||||
key_file = tmp_path / "key.pem"
|
||||
key_file.write_text(OKTA_PRIVATE_KEY)
|
||||
session = OktaProvider.setup_session(
|
||||
org_domain=OKTA_ORG_DOMAIN,
|
||||
client_id=OKTA_CLIENT_ID,
|
||||
private_key_file=str(key_file),
|
||||
scopes=["okta.policies.read", "okta.apps.read", "okta.users.read"],
|
||||
)
|
||||
assert session.scopes == [
|
||||
"okta.policies.read",
|
||||
"okta.apps.read",
|
||||
"okta.users.read",
|
||||
]
|
||||
|
||||
def test_custom_scopes_flattens_mixed_list_and_csv(self, _clear_okta_env, tmp_path):
|
||||
# Mirrors how argparse nargs="+" delivers values when a user
|
||||
# passes "--okta-scopes a,b c" — a list whose first element still
|
||||
# contains a comma.
|
||||
key_file = tmp_path / "key.pem"
|
||||
key_file.write_text(OKTA_PRIVATE_KEY)
|
||||
session = OktaProvider.setup_session(
|
||||
org_domain=OKTA_ORG_DOMAIN,
|
||||
client_id=OKTA_CLIENT_ID,
|
||||
private_key_file=str(key_file),
|
||||
scopes=["okta.policies.read,okta.apps.read", "okta.users.read"],
|
||||
)
|
||||
assert session.scopes == [
|
||||
"okta.policies.read",
|
||||
"okta.apps.read",
|
||||
"okta.users.read",
|
||||
]
|
||||
|
||||
def test_org_domain_normalized_lowercase_and_trimmed(
|
||||
self, _clear_okta_env, tmp_path
|
||||
):
|
||||
# The provider lowercases and strips whitespace so that
|
||||
# " ACME.okta.com " is accepted as "acme.okta.com".
|
||||
key_file = tmp_path / "key.pem"
|
||||
key_file.write_text(OKTA_PRIVATE_KEY)
|
||||
session = OktaProvider.setup_session(
|
||||
org_domain=" ACME.okta.com ",
|
||||
client_id=OKTA_CLIENT_ID,
|
||||
private_key_file=str(key_file),
|
||||
)
|
||||
assert session.org_domain == OKTA_ORG_DOMAIN
|
||||
|
||||
def test_accepts_private_key_via_content_arg(self, _clear_okta_env):
|
||||
session = OktaProvider.setup_session(
|
||||
org_domain=OKTA_ORG_DOMAIN,
|
||||
client_id=OKTA_CLIENT_ID,
|
||||
private_key=OKTA_PRIVATE_KEY,
|
||||
)
|
||||
assert session.private_key == OKTA_PRIVATE_KEY
|
||||
|
||||
def test_accepts_private_key_via_env_var(self, monkeypatch):
|
||||
monkeypatch.setenv("OKTA_PRIVATE_KEY", OKTA_PRIVATE_KEY)
|
||||
monkeypatch.delenv("OKTA_PRIVATE_KEY_FILE", raising=False)
|
||||
session = OktaProvider.setup_session(
|
||||
org_domain=OKTA_ORG_DOMAIN,
|
||||
client_id=OKTA_CLIENT_ID,
|
||||
)
|
||||
assert session.private_key == OKTA_PRIVATE_KEY
|
||||
|
||||
def test_content_takes_precedence_over_file(self, _clear_okta_env, tmp_path):
|
||||
# File has stale content; explicit content arg should win.
|
||||
key_file = tmp_path / "stale.pem"
|
||||
key_file.write_text("STALE CONTENT FROM FILE")
|
||||
fresh_key = "-----BEGIN PRIVATE KEY-----\nFRESH\n-----END PRIVATE KEY-----"
|
||||
session = OktaProvider.setup_session(
|
||||
org_domain=OKTA_ORG_DOMAIN,
|
||||
client_id=OKTA_CLIENT_ID,
|
||||
private_key=fresh_key,
|
||||
private_key_file=str(key_file),
|
||||
)
|
||||
assert session.private_key == fresh_key
|
||||
|
||||
|
||||
class Test_OktaProvider_setup_identity:
|
||||
def _session(self, tmp_path):
|
||||
key_file = tmp_path / "key.pem"
|
||||
key_file.write_text(OKTA_PRIVATE_KEY)
|
||||
return OktaProvider.setup_session(
|
||||
org_domain=OKTA_ORG_DOMAIN,
|
||||
client_id=OKTA_CLIENT_ID,
|
||||
private_key_file=str(key_file),
|
||||
)
|
||||
|
||||
def test_synthesizes_identity_and_probes_successfully(
|
||||
self, _clear_okta_env, tmp_path
|
||||
):
|
||||
session = self._session(tmp_path)
|
||||
|
||||
async def fake_list_policies(*_a, **_k):
|
||||
return ([], mock.MagicMock(headers={}), None)
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.okta.okta_provider.OktaSDKClient"
|
||||
) as mocked_client_cls:
|
||||
mocked = mock.MagicMock()
|
||||
mocked.list_policies = fake_list_policies
|
||||
mocked_client_cls.return_value = mocked
|
||||
identity = OktaProvider.setup_identity(session)
|
||||
|
||||
assert identity.org_domain == OKTA_ORG_DOMAIN
|
||||
assert identity.client_id == OKTA_CLIENT_ID
|
||||
|
||||
def test_raises_invalid_credentials_when_probe_returns_error(
|
||||
self, _clear_okta_env, tmp_path
|
||||
):
|
||||
session = self._session(tmp_path)
|
||||
|
||||
async def failing_list_policies(*_a, **_k):
|
||||
return ([], None, Exception("E0000011: Invalid token"))
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.okta.okta_provider.OktaSDKClient"
|
||||
) as mocked_client_cls:
|
||||
mocked = mock.MagicMock()
|
||||
mocked.list_policies = failing_list_policies
|
||||
mocked_client_cls.return_value = mocked
|
||||
with pytest.raises(OktaInvalidCredentialsError):
|
||||
OktaProvider.setup_identity(session)
|
||||
|
||||
def test_raises_insufficient_permissions_on_scope_error(
|
||||
self, _clear_okta_env, tmp_path
|
||||
):
|
||||
session = self._session(tmp_path)
|
||||
|
||||
async def failing_list_policies(*_a, **_k):
|
||||
return ([], None, Exception("invalid_scope: policies.read missing"))
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.okta.okta_provider.OktaSDKClient"
|
||||
) as mocked_client_cls:
|
||||
mocked = mock.MagicMock()
|
||||
mocked.list_policies = failing_list_policies
|
||||
mocked_client_cls.return_value = mocked
|
||||
with pytest.raises(OktaInsufficientPermissionsError):
|
||||
OktaProvider.setup_identity(session)
|
||||
|
||||
def test_raises_insufficient_permissions_on_forbidden(
|
||||
self, _clear_okta_env, tmp_path
|
||||
):
|
||||
session = self._session(tmp_path)
|
||||
|
||||
async def failing_list_policies(*_a, **_k):
|
||||
return ([], None, Exception("403 Forbidden"))
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.okta.okta_provider.OktaSDKClient"
|
||||
) as mocked_client_cls:
|
||||
mocked = mock.MagicMock()
|
||||
mocked.list_policies = failing_list_policies
|
||||
mocked_client_cls.return_value = mocked
|
||||
with pytest.raises(OktaInsufficientPermissionsError):
|
||||
OktaProvider.setup_identity(session)
|
||||
|
||||
def test_wraps_unexpected_errors_in_setup_identity_error(
|
||||
self, _clear_okta_env, tmp_path
|
||||
):
|
||||
session = self._session(tmp_path)
|
||||
|
||||
async def boom(*_a, **_k):
|
||||
raise RuntimeError("network down")
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.okta.okta_provider.OktaSDKClient"
|
||||
) as mocked_client_cls:
|
||||
mocked = mock.MagicMock()
|
||||
mocked.list_policies = boom
|
||||
mocked_client_cls.return_value = mocked
|
||||
with pytest.raises(OktaSetUpIdentityError):
|
||||
OktaProvider.setup_identity(session)
|
||||
|
||||
|
||||
def _mock_setup_paths():
|
||||
"""Patches that bypass the real SDK during provider construction."""
|
||||
session = OktaSession(
|
||||
org_domain=OKTA_ORG_DOMAIN,
|
||||
client_id=OKTA_CLIENT_ID,
|
||||
scopes=list(DEFAULT_SCOPES),
|
||||
private_key=OKTA_PRIVATE_KEY,
|
||||
)
|
||||
identity = OktaIdentityInfo(org_domain=OKTA_ORG_DOMAIN, client_id=OKTA_CLIENT_ID)
|
||||
return (
|
||||
mock.patch.object(OktaProvider, "validate_arguments"),
|
||||
mock.patch.object(OktaProvider, "setup_session", return_value=session),
|
||||
mock.patch.object(OktaProvider, "setup_identity", return_value=identity),
|
||||
)
|
||||
|
||||
|
||||
class Test_OktaProvider_init:
|
||||
def test_init_end_to_end(self, _clear_okta_env, tmp_path):
|
||||
validate_p, session_p, identity_p = _mock_setup_paths()
|
||||
with validate_p, session_p, identity_p:
|
||||
provider = OktaProvider(
|
||||
okta_org_domain=OKTA_ORG_DOMAIN,
|
||||
okta_client_id=OKTA_CLIENT_ID,
|
||||
okta_private_key_file="/tmp/key.pem",
|
||||
)
|
||||
|
||||
assert provider.type == "okta"
|
||||
assert provider.auth_method == "OAuth 2.0 (private-key JWT)"
|
||||
assert provider.identity.org_domain == OKTA_ORG_DOMAIN
|
||||
assert provider.identity.client_id == OKTA_CLIENT_ID
|
||||
assert provider.session.scopes == DEFAULT_SCOPES
|
||||
assert provider.audit_config is not None
|
||||
assert provider.mutelist is not None
|
||||
|
||||
|
||||
class Test_OktaProvider_test_connection:
|
||||
def test_success(self, _clear_okta_env, tmp_path):
|
||||
validate_p, session_p, identity_p = _mock_setup_paths()
|
||||
with validate_p, session_p, identity_p:
|
||||
connection = OktaProvider.test_connection(
|
||||
okta_org_domain=OKTA_ORG_DOMAIN,
|
||||
okta_client_id=OKTA_CLIENT_ID,
|
||||
okta_private_key_file="/tmp/key.pem",
|
||||
)
|
||||
assert connection.is_connected is True
|
||||
assert connection.error is None
|
||||
|
||||
def test_returns_error_when_raise_disabled(self, _clear_okta_env):
|
||||
connection = OktaProvider.test_connection(raise_on_exception=False)
|
||||
assert connection.is_connected is False
|
||||
assert connection.error is not None
|
||||
|
||||
def test_raises_when_raise_enabled(self, _clear_okta_env):
|
||||
with pytest.raises(OktaEnvironmentVariableError):
|
||||
OktaProvider.test_connection()
|
||||
|
||||
|
||||
class Test_OktaProvider_print_credentials:
|
||||
def test_invokes_print_boxes_with_org_and_client(self, _clear_okta_env, tmp_path):
|
||||
validate_p, session_p, identity_p = _mock_setup_paths()
|
||||
with (
|
||||
validate_p,
|
||||
session_p,
|
||||
identity_p,
|
||||
mock.patch(
|
||||
"prowler.providers.okta.okta_provider.print_boxes"
|
||||
) as mock_print,
|
||||
):
|
||||
provider = OktaProvider(
|
||||
okta_org_domain=OKTA_ORG_DOMAIN,
|
||||
okta_client_id=OKTA_CLIENT_ID,
|
||||
okta_private_key_file="/tmp/key.pem",
|
||||
)
|
||||
provider.print_credentials()
|
||||
|
||||
mock_print.assert_called_once()
|
||||
rendered = " ".join(mock_print.call_args.args[0])
|
||||
assert OKTA_ORG_DOMAIN in rendered
|
||||
assert OKTA_CLIENT_ID in rendered
|
||||
assert "OAuth 2.0" in rendered
|
||||