diff --git a/.github/workflows/sdk-code-quality.yml b/.github/workflows/sdk-code-quality.yml index 49f01f7fab..068b482f1c 100644 --- a/.github/workflows/sdk-code-quality.yml +++ b/.github/workflows/sdk-code-quality.yml @@ -79,11 +79,11 @@ jobs: - name: Lint with flake8 if: steps.check-changes.outputs.any_changed == 'true' - run: poetry run flake8 . --ignore=E266,W503,E203,E501,W605,E128 --exclude contrib,ui,api + run: poetry run flake8 . --ignore=E266,W503,E203,E501,W605,E128 --exclude contrib,ui,api,skills - name: Check format with black if: steps.check-changes.outputs.any_changed == 'true' - run: poetry run black --exclude api ui --check . + run: poetry run black --exclude api ui skills --check . - name: Lint with pylint if: steps.check-changes.outputs.any_changed == 'true' diff --git a/.gitignore b/.gitignore index e1f49be87c..ffad8a7d37 100644 --- a/.gitignore +++ b/.gitignore @@ -82,6 +82,9 @@ continue.json .continuerc .continuerc.json +# AI Coding Assistants - OpenCode +opencode.json + # AI Coding Assistants - GitHub Copilot .copilot/ .github/copilot/ @@ -152,3 +155,9 @@ CLAUDE.md # Compliance report *.pdf + +# AI Skills symlinks (generated by skills/setup.sh) +.claude/skills +.codex/skills +.github/skills +.gemini/skills diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ea9954f082..24f8f0f211 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -34,6 +34,7 @@ repos: rev: v2.3.1 hooks: - id: autoflake + exclude: ^skills/ args: [ "--in-place", @@ -45,18 +46,20 @@ repos: rev: 5.13.2 hooks: - id: isort + exclude: ^skills/ args: ["--profile", "black"] - repo: https://github.com/psf/black rev: 24.4.2 hooks: - id: black + exclude: ^skills/ - repo: https://github.com/pycqa/flake8 rev: 7.0.0 hooks: - id: flake8 - exclude: contrib + exclude: (contrib|^skills/) args: ["--ignore=E266,W503,E203,E501,W605"] - repo: https://github.com/python-poetry/poetry @@ -109,7 +112,7 @@ repos: - id: bandit name: bandit description: "Bandit is a tool for finding common security issues in Python code" - entry: bash -c 'bandit -q -lll -x '*_test.py,./contrib/,./.venv/' -r .' + entry: bash -c 'bandit -q -lll -x '*_test.py,./contrib/,./.venv/,./skills/' -r .' language: system files: '.*\.py' @@ -123,7 +126,7 @@ repos: - id: vulture name: vulture description: "Vulture finds unused code in Python programs." - entry: bash -c 'vulture --exclude "contrib,.venv,api/src/backend/api/tests/,api/src/backend/conftest.py,api/src/backend/tasks/tests/" --min-confidence 100 .' + entry: bash -c 'vulture --exclude "contrib,.venv,api/src/backend/api/tests/,api/src/backend/conftest.py,api/src/backend/tasks/tests/,skills/" --min-confidence 100 .' language: system files: '.*\.py' diff --git a/AGENTS.md b/AGENTS.md index c6a6027c18..a982b82fae 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -2,109 +2,83 @@ ## How to Use This Guide -- Start here for cross-project norms, Prowler is a monorepo with several components. Every component should have an `AGENTS.md` file that contains the guidelines for the agents in that component. The file is located beside the code you are touching (e.g. `api/AGENTS.md`, `ui/AGENTS.md`, `prowler/AGENTS.md`). -- Follow the stricter rule when guidance conflicts; component docs override this file for their scope. -- Keep instructions synchronized. When you add new workflows or scripts, update both, the relevant component `AGENTS.md` and this file if they apply broadly. +- Start here for cross-project norms. Prowler is a monorepo with several components. +- Each component has an `AGENTS.md` file with specific guidelines (e.g., `api/AGENTS.md`, `ui/AGENTS.md`). +- Component docs override this file when guidance conflicts. + +## Available Skills + +Use these skills for detailed patterns on-demand: + +### Generic Skills (Any Project) +| Skill | Description | URL | +|-------|-------------|-----| +| `typescript` | Const types, flat interfaces, utility types | [SKILL.md](skills/typescript/SKILL.md) | +| `react-19` | No useMemo/useCallback, React Compiler | [SKILL.md](skills/react-19/SKILL.md) | +| `nextjs-15` | App Router, Server Actions, streaming | [SKILL.md](skills/nextjs-15/SKILL.md) | +| `tailwind-4` | cn() utility, no var() in className | [SKILL.md](skills/tailwind-4/SKILL.md) | +| `playwright` | Page Object Model, MCP workflow, selectors | [SKILL.md](skills/playwright/SKILL.md) | +| `pytest` | Fixtures, mocking, markers, parametrize | [SKILL.md](skills/pytest/SKILL.md) | +| `django-drf` | ViewSets, Serializers, Filters | [SKILL.md](skills/django-drf/SKILL.md) | +| `zod-4` | New API (z.email(), z.uuid()) | [SKILL.md](skills/zod-4/SKILL.md) | +| `zustand-5` | Persist, selectors, slices | [SKILL.md](skills/zustand-5/SKILL.md) | +| `ai-sdk-5` | UIMessage, streaming, LangChain | [SKILL.md](skills/ai-sdk-5/SKILL.md) | + +### Prowler-Specific Skills +| Skill | Description | URL | +|-------|-------------|-----| +| `prowler` | Project overview, component navigation | [SKILL.md](skills/prowler/SKILL.md) | +| `prowler-api` | Django + RLS + JSON:API patterns | [SKILL.md](skills/prowler-api/SKILL.md) | +| `prowler-ui` | Next.js + shadcn conventions | [SKILL.md](skills/prowler-ui/SKILL.md) | +| `prowler-sdk-check` | Create new security checks | [SKILL.md](skills/prowler-sdk-check/SKILL.md) | +| `prowler-mcp` | MCP server tools and models | [SKILL.md](skills/prowler-mcp/SKILL.md) | +| `prowler-test-sdk` | SDK testing (pytest + moto) | [SKILL.md](skills/prowler-test-sdk/SKILL.md) | +| `prowler-test-api` | API testing (pytest-django + RLS) | [SKILL.md](skills/prowler-test-api/SKILL.md) | +| `prowler-test-ui` | E2E testing (Playwright) | [SKILL.md](skills/prowler-test-ui/SKILL.md) | +| `prowler-compliance` | Compliance framework structure | [SKILL.md](skills/prowler-compliance/SKILL.md) | +| `prowler-provider` | Add new cloud providers | [SKILL.md](skills/prowler-provider/SKILL.md) | +| `prowler-pr` | Pull request conventions | [SKILL.md](skills/prowler-pr/SKILL.md) | +| `prowler-docs` | Documentation style guide | [SKILL.md](skills/prowler-docs/SKILL.md) | +| `skill-creator` | Create new AI agent skills | [SKILL.md](skills/skill-creator/SKILL.md) | + +--- ## Project Overview -Prowler is an open-source cloud security assessment tool that supports multiple cloud providers (AWS, Azure, GCP, Kubernetes, GitHub, M365, etc.). The project consists in a monorepo with the following main components: +Prowler is an open-source cloud security assessment tool supporting AWS, Azure, GCP, Kubernetes, GitHub, M365, and more. -- **Prowler SDK**: Python SDK, includes the Prowler CLI, providers, services, checks, compliances, config, etc. (`prowler/`) -- **Prowler API**: Django-based REST API backend (`api/`) -- **Prowler UI**: Next.js frontend application (`ui/`) -- **Prowler MCP Server**: Model Context Protocol server that gives access to the entire Prowler ecosystem for LLMs (`mcp_server/`) -- **Prowler Dashboard**: Prowler CLI feature that allows to visualize the results of the scans in a simple dashboard (`dashboard/`) +| Component | Location | Tech Stack | +|-----------|----------|------------| +| SDK | `prowler/` | Python 3.9+, Poetry | +| API | `api/` | Django 5.1, DRF, Celery | +| UI | `ui/` | Next.js 15, React 19, Tailwind 4 | +| MCP Server | `mcp_server/` | FastMCP, Python 3.12+ | +| Dashboard | `dashboard/` | Dash, Plotly | -### Project Structure (Key Folders & Files) - -- `prowler/`: Main source code for Prowler SDK (CLI, providers, services, checks, compliances, config, etc.) -- `api/`: Django-based REST API backend components -- `ui/`: Next.js frontend application -- `mcp_server/`: Model Context Protocol server that gives access to the entire Prowler ecosystem for LLMs -- `dashboard/`: Prowler CLI feature that allows to visualize the results of the scans in a simple dashboard -- `docs/`: Documentation -- `examples/`: Example output formats for providers and scripts -- `permissions/`: Permission-related files and policies -- `contrib/`: Community-contributed scripts or modules -- `tests/`: Prowler SDK test suite -- `docker-compose.yml`: Docker compose file to run the Prowler App (API + UI) production environment -- `docker-compose-dev.yml`: Docker compose file to run the Prowler App (API + UI) development environment -- `pyproject.toml`: Poetry Prowler SDK project file -- `.pre-commit-config.yaml`: Pre-commit hooks configuration -- `Makefile`: Makefile to run the project -- `LICENSE`: License file -- `README.md`: README file -- `CONTRIBUTING.md`: Contributing guide +--- ## Python Development -Most of the code is written in Python, so the main files in the root are focused on Python code. - -### Poetry Dev Environment - -For developing in Python we recommend using `poetry` to manage the dependencies. The minimal version is `2.1.1`. So it is recommended to run all commands using `poetry run ...`. - -To install the core dependencies to develop it is needed to run `poetry install --with dev`. - -### Pre-commit hooks - -The project has pre-commit hooks to lint and format the code. They are installed by running `poetry run pre-commit install`. - -When commiting a change, the hooks will be run automatically. Some of them are: - -- Code formatting (black, isort) -- Linting (flake8, pylint) -- Security checks (bandit, safety, trufflehog) -- YAML/JSON validation -- Poetry lock file validation - - -### Linting and Formatting - -We use the following tools to lint and format the code: - -- `flake8`: for linting the code -- `black`: for formatting the code -- `pylint`: for linting the code - -You can run all using the `make` command: ```bash +# Setup +poetry install --with dev +poetry run pre-commit install + +# Code quality poetry run make lint poetry run make format +poetry run pre-commit run --all-files ``` -Or they will be run automatically when you commit your changes using pre-commit hooks. +--- ## Commit & Pull Request Guidelines -For the commit messages and pull requests name follow the conventional-commit style. +Follow conventional-commit style: `[scope]: ` -Befire creating a pull request, complete the checklist in `.github/pull_request_template.md`. Summaries should explain deployment impact, highlight review steps, and note changelog or permission updates. Run all relevant tests and linters before requesting review and link screenshots for UI or dashboard changes. +**Types:** `feat`, `fix`, `docs`, `chore`, `perf`, `refactor`, `style`, `test` -### Conventional Commit Style - -The Conventional Commits specification is a lightweight convention on top of commit messages. It provides an easy set of rules for creating an explicit commit history; which makes it easier to write automated tools on top of. - -The commit message should be structured as follows: - -``` -[optional scope]: - -[optional body] - -[optional footer(s)] -``` - -Any line of the commit message cannot be longer 100 characters! This allows the message to be easier to read on GitHub as well as in various git tools - -#### Commit Types - -- **feat**: code change introuce new functionality to the application -- **fix**: code change that solve a bug in the codebase -- **docs**: documentation only changes -- **chore**: changes related to the build process or auxiliary tools and libraries, that do not affect the application's functionality -- **perf**: code change that improves performance -- **refactor**: code change that neither fixes a bug nor adds a feature -- **style**: changes that do not affect the meaning of the code (white-space, formatting, missing semi-colons, etc) -- **test**: adding missing tests or correcting existing tests +Before creating a PR: +1. Complete checklist in `.github/pull_request_template.md` +2. Run all relevant tests and linters +3. Link screenshots for UI changes diff --git a/README.md b/README.md index a1e71e23be..bdf08ee20a 100644 --- a/README.md +++ b/README.md @@ -310,6 +310,45 @@ And many more environments. ![Architecture](docs/img/architecture.png) +# 🤖 AI Skills for Development + +Prowler includes a comprehensive set of **AI Skills** that help AI coding assistants understand Prowler's codebase patterns and conventions. + +## What are AI Skills? + +Skills are structured instructions that give AI assistants the context they need to write code that follows Prowler's standards. They include: + +- **Coding patterns** for each component (SDK, API, UI, MCP Server) +- **Testing conventions** (pytest, Playwright) +- **Architecture guidelines** (Clean Architecture, RLS patterns) +- **Framework-specific rules** (React 19, Next.js 15, Django DRF, Tailwind 4) + +## Available Skills + +| Category | Skills | +|----------|--------| +| **Generic** | `typescript`, `react-19`, `nextjs-15`, `tailwind-4`, `playwright`, `pytest`, `django-drf`, `zod-4`, `zustand-5`, `ai-sdk-5` | +| **Prowler** | `prowler`, `prowler-api`, `prowler-ui`, `prowler-mcp`, `prowler-sdk-check`, `prowler-test-ui`, `prowler-test-api`, `prowler-test-sdk`, `prowler-compliance`, `prowler-provider`, `prowler-pr`, `prowler-docs` | + +## Setup + +```bash +./skills/setup.sh +``` + +This configures skills for AI coding assistants that follow the [agentskills.io](https://agentskills.io) standard: + +| Tool | Configuration | +|------|---------------| +| **Claude Code** | `.claude/skills/` (symlink) | +| **OpenCode** | `.claude/skills/` (symlink) | +| **Codex (OpenAI)** | `.codex/skills/` (symlink) | +| **GitHub Copilot** | `.github/skills/` (symlink) | +| **Gemini CLI** | `.gemini/skills/` (symlink) | + +> **Note:** Restart your AI coding assistant after running setup to load the skills. +> Gemini CLI requires `experimental.skills` enabled in settings. + # 📖 Documentation For installation instructions, usage details, tutorials, and the Developer Guide, visit https://docs.prowler.com/ diff --git a/api/AGENTS.md b/api/AGENTS.md new file mode 100644 index 0000000000..9ae73401c6 --- /dev/null +++ b/api/AGENTS.md @@ -0,0 +1,137 @@ +# Prowler API - AI Agent Ruleset + +> **Skills Reference**: For detailed patterns, use these skills: +> - [`prowler-api`](../skills/prowler-api/SKILL.md) - Models, Serializers, Views, RLS patterns +> - [`prowler-test-api`](../skills/prowler-test-api/SKILL.md) - Testing patterns (pytest-django) +> - [`django-drf`](../skills/django-drf/SKILL.md) - Generic DRF patterns +> - [`pytest`](../skills/pytest/SKILL.md) - Generic pytest patterns + +## CRITICAL RULES - NON-NEGOTIABLE + +### Models +- ALWAYS: UUIDv4 PKs, `inserted_at`/`updated_at` timestamps, `JSONAPIMeta` class +- ALWAYS: Inherit from `RowLevelSecurityProtectedModel` for tenant-scoped data +- NEVER: Auto-increment integer PKs, models without tenant isolation + +### Serializers +- ALWAYS: Separate serializers for Create/Update operations +- ALWAYS: Inherit from `RLSSerializer` for tenant-scoped models +- NEVER: Write logic in serializers (use services/utils) + +### Views +- ALWAYS: Inherit from `BaseRLSViewSet` for tenant-scoped resources +- ALWAYS: Define `filterset_class`, use `@extend_schema` for OpenAPI +- NEVER: Raw SQL queries, business logic in views + +### Row-Level Security (RLS) +- ALWAYS: Use `rls_transaction(tenant_id)` context manager +- NEVER: Query across tenants, trust client-provided tenant_id + +### Celery Tasks +- ALWAYS: `@shared_task` with `name`, `queue`, `RLSTask` base class +- NEVER: Long-running ops in views, request context in tasks + +--- + +## DECISION TREES + +### Serializer Selection +``` +Read → Serializer +Create → CreateSerializer +Update → UpdateSerializer +Nested read → IncludeSerializer +``` + +### Task vs View +``` +< 100ms → View +> 100ms or external API → Celery task +Needs retry → Celery task +``` + +--- + +## TECH STACK + +Django 5.1.x | DRF 3.15.x | djangorestframework-jsonapi 7.x | Celery 5.4.x | PostgreSQL 16 | pytest 8.x + +--- + +## PROJECT STRUCTURE + +``` +api/src/backend/ +├── api/ # Main Django app +│ ├── v1/ # API version 1 (views, serializers, urls) +│ ├── models.py # Django models +│ ├── filters.py # FilterSet classes +│ ├── base_views.py # Base ViewSet classes +│ ├── rls.py # Row-Level Security +│ └── tests/ # Unit tests +├── config/ # Django configuration +└── tasks/ # Celery tasks +``` + +--- + +## COMMANDS + +```bash +# Development +poetry run python src/backend/manage.py runserver +poetry run celery -A config.celery worker -l INFO + +# Database +poetry run python src/backend/manage.py makemigrations +poetry run python src/backend/manage.py migrate + +# Testing & Linting +poetry run pytest -x --tb=short +poetry run make lint +``` + +--- + +## QA CHECKLIST + +- [ ] `poetry run pytest` passes +- [ ] `poetry run make lint` passes +- [ ] Migrations created if models changed +- [ ] New endpoints have `@extend_schema` decorators +- [ ] RLS properly applied for tenant data +- [ ] Tests cover success and error cases + +--- + +## NAMING CONVENTIONS + +| Entity | Pattern | Example | +|--------|---------|---------| +| Serializer (read) | `Serializer` | `ProviderSerializer` | +| Serializer (create) | `CreateSerializer` | `ProviderCreateSerializer` | +| Serializer (update) | `UpdateSerializer` | `ProviderUpdateSerializer` | +| Filter | `Filter` | `ProviderFilter` | +| ViewSet | `ViewSet` | `ProviderViewSet` | +| Task | `__task` | `sync_provider_resources_task` | + +--- + +## API CONVENTIONS (JSON:API) + +```json +{ + "data": { + "type": "providers", + "id": "uuid", + "attributes": { "name": "value" }, + "relationships": { "tenant": { "data": { "type": "tenants", "id": "uuid" } } } + } +} +``` + +- Content-Type: `application/vnd.api+json` +- Pagination: `?page[number]=1&page[size]=20` +- Filtering: `?filter[field]=value`, `?filter[field__in]=val1,val2` +- Sorting: `?sort=field`, `?sort=-field` +- Including: `?include=provider,findings` diff --git a/docs/developer-guide/ai-skills.mdx b/docs/developer-guide/ai-skills.mdx new file mode 100644 index 0000000000..d595c3ab11 --- /dev/null +++ b/docs/developer-guide/ai-skills.mdx @@ -0,0 +1,216 @@ +--- +title: 'AI Skills System' +--- + +This guide explains the AI Skills system that provides on-demand context and patterns to AI agents working with the Prowler codebase. + + +**What are AI Skills?** Skills are structured instructions that help AI agents (Claude Code, Cursor, Copilot, etc.) understand Prowler's conventions, patterns, and best practices. + + +## Architecture Overview + +```mermaid +graph LR + subgraph FLOW["AI Skills Architecture"] + A["AI Agent"] -->|"1. matches trigger"| B["AGENTS.md"] + B -->|"2. loads"| C["Skill"] + C -->|"3. provides"| D["Patterns
Templates
Commands"] + C -->|"4. references"| E["Local Docs"] + D --> F["Correct Output"] + E --> F + end + + style A fill:#1e3a5f,stroke:#4a9eff,color:#fff + style B fill:#5c4d1a,stroke:#ffd700,color:#fff + style C fill:#1a4d1a,stroke:#4caf50,color:#fff + style E fill:#4a1a4d,stroke:#ba68c8,color:#fff + style F fill:#1a4d2e,stroke:#66bb6a,color:#fff +``` + +## How It Works + +```mermaid +sequenceDiagram + participant U as User + participant A as AI Agent + participant R as AGENTS.md + participant S as Skill + participant AS as assets/ + participant RF as references/ + participant D as Local Docs + + U->>A: "Create an AWS security check" + + Note over A: Analyze request context + + A->>R: Find matching skill trigger + R-->>A: prowler-sdk-check matches + + A->>S: Load SKILL.md + S-->>A: Patterns, rules, templates, commands + + Note over A: Need code template? + + A->>AS: Read assets/aws_check.py + AS-->>A: Check implementation template + + Note over A: Need more details? + + A->>RF: Read references/metadata-docs.md + RF-->>A: Points to local docs + + A->>D: Read docs/developer-guide/checks.mdx + D-->>A: Full documentation + + Note over A: Execute with full context + + A->>U: Creates check with correct patterns +``` + +## Before vs After + +```mermaid +graph TD + subgraph COMPARISON["BEFORE vs AFTER"] + direction LR + + subgraph BEFORE["Without Skills"] + B1["AI guesses conventions"] + B2["Wrong structure"] + B3["Multiple iterations"] + B4["Web searches for docs"] + B5["Inconsistent patterns"] + end + + subgraph AFTER["With Skills"] + A1["AI loads exact patterns"] + A2["Correct structure"] + A3["First-time right"] + A4["Local docs referenced"] + A5["Consistent patterns"] + end + end + + style BEFORE fill:#5c1a1a,stroke:#ef5350,color:#fff + style AFTER fill:#1a4d1a,stroke:#66bb6a,color:#fff +``` + +## Complete Architecture + +```mermaid +flowchart TB + subgraph ENTRY["ENTRY POINT"] + AGENTS["AGENTS.md
━━━━━━━━━━━━━━━━━
• Available skills registry
• Skill → Trigger mapping
• Component navigation"] + end + + subgraph SKILLS["SKILLS LIBRARY"] + direction TB + + subgraph GENERIC["Generic Skills"] + G1["typescript"] + G2["react-19"] + G3["nextjs-15"] + G4["tailwind-4"] + G5["pytest"] + G6["playwright"] + G7["django-drf"] + G8["zod-4"] + G9["zustand-5"] + G10["ai-sdk-5"] + end + + subgraph PROWLER["Prowler Skills"] + P1["prowler"] + P2["prowler-sdk-check"] + P3["prowler-api"] + P4["prowler-ui"] + P5["prowler-mcp"] + P6["prowler-provider"] + P7["prowler-compliance"] + P8["prowler-docs"] + P9["prowler-pr"] + end + + subgraph TESTING["Testing Skills"] + T1["prowler-test-sdk"] + T2["prowler-test-api"] + T3["prowler-test-ui"] + end + + subgraph META["Meta Skills"] + M1["skill-creator"] + end + end + + subgraph STRUCTURE["SKILL STRUCTURE"] + direction LR + + SKILLMD["SKILL.md
━━━━━━━━━━━━━━
• Frontmatter
• Critical patterns
• Decision trees
• Code examples
• Commands
• Keywords"] + + ASSETS["assets/
━━━━━━━━━━━━━━
• Code templates
• JSON schemas
• Config examples"] + + REFS["references/
━━━━━━━━━━━━━━
• Local doc paths
• No web URLs
• Single source"] + end + + subgraph DOCS["DOCUMENTATION"] + direction TB + DD["docs/developer-guide/"] + D1["checks.mdx"] + D2["unit-testing.mdx"] + D3["provider.mdx"] + D4["mcp-server.mdx"] + D5["..."] + + DD --> D1 + DD --> D2 + DD --> D3 + DD --> D4 + DD --> D5 + end + + ENTRY --> SKILLS + SKILLS --> STRUCTURE + SKILLMD --> ASSETS + SKILLMD --> REFS + REFS -.->|"points to"| DOCS + + style ENTRY fill:#1e3a5f,stroke:#4a9eff,color:#fff + style GENERIC fill:#5c4d1a,stroke:#ffd700,color:#fff + style PROWLER fill:#1a4d1a,stroke:#66bb6a,color:#fff + style TESTING fill:#4d1a3d,stroke:#f06292,color:#fff + style META fill:#4a1a4d,stroke:#ba68c8,color:#fff + style STRUCTURE fill:#5c3d1a,stroke:#ffb74d,color:#fff + style DOCS fill:#1a3d4d,stroke:#4dd0e1,color:#fff +``` + +## Skills Included + +| Type | Skills | +|------|--------| +| **Generic** | typescript, react-19, nextjs-15, tailwind-4, pytest, playwright, django-drf, zod-4, zustand-5, ai-sdk-5 | +| **Prowler** | prowler, prowler-sdk-check, prowler-api, prowler-ui, prowler-mcp, prowler-provider, prowler-compliance, prowler-docs, prowler-pr | +| **Testing** | prowler-test-sdk, prowler-test-api, prowler-test-ui | +| **Meta** | skill-creator | + +## Skill Structure + +Each skill follows the [Agent Skills spec](https://agentskills.io): + +``` +skills/{skill-name}/ +├── SKILL.md # Patterns, rules, decision trees +├── assets/ # Code templates, schemas +└── references/ # Links to local docs (single source of truth) +``` + +## Key Design Decisions + +1. **Self-contained skills** - Critical patterns inline for fast loading +2. **Local doc references** - No web URLs, points to `docs/developer-guide/*.mdx` +3. **Single source of truth** - Skills reference docs, no duplication +4. **On-demand loading** - AI loads only what's needed for the task + +## Creating New Skills + +Use the `skill-creator` meta-skill to create new skills that follow the Agent Skills spec. See `AGENTS.md` for the full list of available skills and their triggers. diff --git a/docs/docs.json b/docs/docs.json index 65cf7d3070..f6e5876bb7 100644 --- a/docs/docs.json +++ b/docs/docs.json @@ -285,7 +285,8 @@ "developer-guide/integrations", "developer-guide/security-compliance-framework", "developer-guide/lighthouse", - "developer-guide/mcp-server" + "developer-guide/mcp-server", + "developer-guide/ai-skills" ] }, { diff --git a/mcp_server/AGENTS.md b/mcp_server/AGENTS.md index fdc1ce5535..bbefa2db57 100644 --- a/mcp_server/AGENTS.md +++ b/mcp_server/AGENTS.md @@ -1,310 +1,88 @@ # Prowler MCP Server - AI Agent Ruleset -**Complete guide for AI agents and developers working on the Prowler MCP Server - the Model Context Protocol server that provides AI agents access to the Prowler ecosystem.** +> **Skills Reference**: For detailed patterns, use the [`prowler-mcp`](../skills/prowler-mcp/SKILL.md) skill. ## Project Overview -The Prowler MCP Server brings the entire Prowler ecosystem to AI assistants through -the Model Context Protocol (MCP). It enables seamless integration with AI tools -like Claude Desktop, Cursor, and other MCP hosts, allowing interaction with -Prowler's security capabilities through natural language. +The Prowler MCP Server provides AI agents access to the Prowler ecosystem through the Model Context Protocol (MCP). It integrates with Claude Desktop, Cursor, and other MCP hosts. --- -## Critical Rules +## CRITICAL RULES ### Tool Implementation - -- **ALWAYS**: Extend `BaseTool` ABC for new Prowler App tools (auto-registration) -- **ALWAYS**: Use `@mcp.tool()` decorator for Hub/Docs tools (manual registration) -- **NEVER**: Manually register BaseTool subclasses (auto-discovered via `load_all_tools()`) -- **NEVER**: Import tools directly in server.py (tool_loader handles discovery) +- ALWAYS: Extend `BaseTool` ABC for Prowler App tools (auto-registration) +- ALWAYS: Use `@mcp.tool()` decorator for Hub/Docs tools +- NEVER: Manually register BaseTool subclasses +- NEVER: Import tools directly in server.py ### Models - -- **ALWAYS**: Use `MinimalSerializerMixin` for LLM-optimized responses -- **ALWAYS**: Implement `from_api_response()` factory method for API transformations -- **ALWAYS**: Use two-tier models (Simplified for lists, Detailed for single items) -- **NEVER**: Return raw API responses (transform to simplified models) +- ALWAYS: Use `MinimalSerializerMixin` for LLM-optimized responses +- ALWAYS: Implement `from_api_response()` factory method +- ALWAYS: Two-tier models (Simplified for lists, Detailed for single items) +- NEVER: Return raw API responses ### API Client - -- **ALWAYS**: Use singleton `ProwlerAPIClient` via `self.api_client` in tools -- **ALWAYS**: Use `build_filter_params()` for query parameter normalization -- **NEVER**: Create new httpx clients in tools (use shared client) +- ALWAYS: Use singleton `ProwlerAPIClient` via `self.api_client` +- ALWAYS: Use `build_filter_params()` for query parameters +- NEVER: Create new httpx clients in tools --- -## Architecture +## ARCHITECTURE -### Three Sub-Servers Pattern - -The main server (`server.py`) orchestrates three independent sub-servers with prefixed tool namespacing: +### Three Sub-Servers ```python -# server.py imports sub-servers with prefixes await prowler_mcp_server.import_server(hub_mcp_server, prefix="prowler_hub") await prowler_mcp_server.import_server(app_mcp_server, prefix="prowler_app") await prowler_mcp_server.import_server(docs_mcp_server, prefix="prowler_docs") ``` -This pattern ensures: -- Failures in one sub-server do not block others -- Clear tool namespacing for LLM disambiguation -- Independent development and testing - -### Tool Naming Convention - -All tools follow a consistent naming pattern with prefixes: -- `prowler_hub_*` - Prowler Hub catalog and compliance tools -- `prowler_docs_*` - Prowler documentation search and retrieval -- `prowler_app_*` - Prowler Cloud and App (Self-Managed) management tools - -### Tool Registration Patterns - -**Pattern 1: Prowler Hub/Docs (Direct Decorators)** - -```python -# prowler_hub/server.py or prowler_documentation/server.py -hub_mcp_server = FastMCP("prowler-hub") - -@hub_mcp_server.tool() -async def get_checks(providers: str | None = None) -> dict: - """Tool docstring becomes LLM description.""" - # Direct implementation - response = prowler_hub_client.get("/check", params=params) - return response.json() -``` - -**Pattern 2: Prowler App (BaseTool Auto-Registration)** - -```python -# prowler_app/tools/findings.py -class FindingsTools(BaseTool): - async def search_security_findings( - self, - severity: list[str] = Field(default=[], description="Filter by severity") - ) -> dict: - """Docstring becomes LLM description.""" - response = await self.api_client.get("/api/v1/findings") - return SimplifiedFinding.from_api_response(response).model_dump() -``` - -NOTE: Only public methods of `BaseTool` subclasses are registered as tools. +### Tool Naming +- `prowler_hub_*` - Catalog and compliance (no auth) +- `prowler_docs_*` - Documentation search (no auth) +- `prowler_app_*` - Cloud/App management (auth required) --- -## Tech Stack +## TECH STACK -- **Language**: Python 3.12+ -- **MCP Framework**: FastMCP 2.13.1 -- **HTTP Client**: httpx (async) -- **Validation**: Pydantic with MinimalSerializerMixin -- **Package Manager**: uv +Python 3.12+ | FastMCP 2.13.1 | httpx (async) | Pydantic | uv --- -## Project Structure +## PROJECT STRUCTURE ``` -mcp_server/ -├── README.md # User documentation -├── AGENTS.md # This file - AI agent guidelines -├── CHANGELOG.md # Version history -├── pyproject.toml # Project metadata and dependencies -├── Dockerfile # Container image definition -├── entrypoint.sh # Docker entrypoint script -└── prowler_mcp_server/ - ├── __init__.py # Version info - ├── main.py # CLI entry point - ├── server.py # Main FastMCP server orchestration - ├── lib/ - │ └── logger.py # Structured logging - ├── prowler_hub/ - │ └── server.py # Hub tools (10 tools, no auth) - ├── prowler_app/ - │ ├── server.py # App server initialization - │ ├── tools/ - │ │ ├── base.py # BaseTool abstract class - │ │ ├── findings.py # Findings tools - │ │ ├── providers.py # Provider tools - │ │ ├── scans.py # Scan tools - │ │ ├── resources.py # Resource tools - │ │ └── muting.py # Muting tools - │ ├── models/ - │ │ ├── base.py # MinimalSerializerMixin - │ │ ├── findings.py # Finding models - │ │ ├── providers.py # Provider models - │ │ ├── scans.py # Scan models - │ │ ├── resources.py # Resource models - │ │ └── muting.py # Muting models - │ └── utils/ - │ ├── api_client.py # ProwlerAPIClient singleton - │ ├── auth.py # ProwlerAppAuth (STDIO/HTTP) - │ └── tool_loader.py # Auto-discovery and registration - └── prowler_documentation/ - ├── server.py # Documentation tools (2 tools, no auth) - └── search_engine.py # Mintlify API integration +mcp_server/prowler_mcp_server/ +├── server.py # Main orchestration +├── prowler_hub/server.py # Hub tools (no auth) +├── prowler_app/ +│ ├── server.py +│ ├── tools/{feature}.py # BaseTool subclasses +│ ├── models/{feature}.py # Pydantic models +│ └── utils/api_client.py # ProwlerAPIClient +└── prowler_documentation/ + └── server.py # Docs tools (no auth) ``` --- -## Commands - -NOTE: To run a python command always use `uv run ` from within the `mcp_server/` directory. - -### Development +## COMMANDS ```bash -# Navigate to MCP server directory -cd mcp_server - -# Run in STDIO mode (default) -uv run prowler-mcp - -# Run in HTTP mode -uv run prowler-mcp --transport http --host 0.0.0.0 --port 8000 - -# Run from anywhere using uvx -uvx /path/to/prowler/mcp_server/ +cd mcp_server && uv run prowler-mcp # STDIO mode +cd mcp_server && uv run prowler-mcp --transport http --port 8000 # HTTP mode ``` --- -## Development Patterns +## QA CHECKLIST -### Adding New Tools to Prowler App - -1. **Create or extend a tool class** in `prowler_app/tools/`: - -```python -# prowler_app/tools/new_feature.py -from pydantic import Field -from prowler_mcp_server.prowler_app.tools.base import BaseTool -from prowler_mcp_server.prowler_app.models.new_feature import FeatureResponse - -class NewFeatureTools(BaseTool): - async def list_features( - self, - status: str | None = Field(default=None, description="Filter by status") - ) -> dict: - """List all features with optional filtering. - - Returns a simplified list of features optimized for LLM consumption. - """ - params = {} - if status: - params["filter[status]"] = status - - clean_params = self.api_client.build_filter_params(params) - response = await self.api_client.get("/api/v1/features", params=clean_params) - - return FeatureResponse.from_api_response(response).model_dump() -``` - -2. **Create corresponding models** in `prowler_app/models/`: - -```python -# prowler_app/models/new_feature.py -from pydantic import Field -from prowler_mcp_server.prowler_app.models.base import MinimalSerializerMixin - -class SimplifiedFeature(MinimalSerializerMixin): - """Lightweight feature for list operations.""" - id: str - name: str - status: str - -class DetailedFeature(SimplifiedFeature): - """Extended feature with complete details.""" - description: str | None = None - created_at: str - updated_at: str - - @classmethod - def from_api_response(cls, data: dict) -> "DetailedFeature": - """Transform API response to model.""" - attributes = data.get("attributes", {}) - return cls( - id=data["id"], - name=attributes["name"], - status=attributes["status"], - description=attributes.get("description"), - created_at=attributes["created_at"], - updated_at=attributes["updated_at"], - ) -``` - -3. **No registration needed** - the tool loader auto-discovers BaseTool subclasses - -### Adding Tools to Prowler Hub/Docs - -Use the `@mcp.tool()` decorator directly: - -```python -# prowler_hub/server.py -@hub_mcp_server.tool() -async def new_hub_tool(param: str) -> dict: - """Tool description for LLM.""" - response = prowler_hub_client.get("/endpoint") - return response.json() -``` - ---- - -## Code Quality Standards - -### Tool Docstrings - -Tool docstrings become AI agent descriptions. Write them in a clear, concise manner focusing on LLM-relevant behavior: - -```python -async def search_security_findings( - self, - severity: list[str] = Field(default=[], description="Filter by severity levels") -) -> dict: - """Search security findings with advanced filtering. - - Returns a lightweight list of findings optimized for LLM consumption. - Use get_finding_details for complete information about a specific finding. - """ -``` - -### Model Design - -- Use `MinimalSerializerMixin` to exclude None/empty values -- Implement `from_api_response()` for consistent API transformation -- Create two-tier models: Simplified (lists) and Detailed (single items) - -### Error Handling - -Return structured error responses rather than raising exceptions: - -```python -try: - response = await self.api_client.get(f"/api/v1/items/{item_id}") - return DetailedItem.from_api_response(response["data"]).model_dump() -except Exception as e: - self.logger.error(f"Failed to get item {item_id}: {e}") - return {"error": str(e), "status": "failed"} -``` - ---- - -## QA Checklist Before Commit - -- [ ] Tool docstrings are clear and describe LLM-relevant behavior -- [ ] Models use `MinimalSerializerMixin` for LLM optimization -- [ ] API responses are transformed to simplified models -- [ ] No hardcoded secrets or API keys +- [ ] Tool docstrings describe LLM-relevant behavior +- [ ] Models use `MinimalSerializerMixin` +- [ ] API responses transformed to simplified models +- [ ] No hardcoded secrets - [ ] Error handling returns structured responses -- [ ] New tools are auto-discovered (BaseTool subclass) or properly decorated -- [ ] Parameter descriptions use Pydantic `Field()` with clear descriptions - ---- - -## References - -- **Root Project Guide**: `../AGENTS.md` -- **FastMCP Documentation**: https://gofastmcp.com/llms.txt -- **Prowler API Documentation**: https://api.prowler.com/api/v1/docs +- [ ] Parameter descriptions use Pydantic `Field()` diff --git a/prowler/AGENTS.md b/prowler/AGENTS.md index 0a9fb455e8..86c2244edf 100644 --- a/prowler/AGENTS.md +++ b/prowler/AGENTS.md @@ -1,366 +1,125 @@ # Prowler SDK Agent Guide -**Complete guide for AI agents and developers working on the Prowler SDK - the core Python security scanning engine.** +> **Skills Reference**: For detailed patterns, use these skills: +> - [`prowler-sdk-check`](../skills/prowler-sdk-check/SKILL.md) - Create new security checks (step-by-step) +> - [`prowler-provider`](../skills/prowler-provider/SKILL.md) - Add new cloud providers +> - [`prowler-test-sdk`](../skills/prowler-test-sdk/SKILL.md) - pytest patterns for SDK +> - [`prowler-compliance`](../skills/prowler-compliance/SKILL.md) - Compliance framework structure +> - [`pytest`](../skills/pytest/SKILL.md) - Generic pytest patterns ## Project Overview -The Prowler SDK is the core Python engine that powers Prowler's cloud security assessment capabilities. It provides: - -- **Multi-cloud Security Scanning**: AWS, Azure, GCP, Kubernetes, GitHub, M365, Oracle Cloud, MongoDB Atlas, and more -- **Compliance Frameworks**: 30+ frameworks including CIS, NIST, PCI-DSS, SOC2, GDPR -- **1000+ Security Checks**: Comprehensive coverage across all supported providers -- **Multiple Output Formats**: JSON, CSV, HTML, ASFF, OCSF, and compliance-specific formats - -## Mission & Scope - -- Maintain and enhance the core Prowler SDK functionality with security and stability as top priorities -- Follow best practices for Python patterns, code style, security, and comprehensive testing -- To get more information about development guidelines, please refer to the Prowler Developer Guide in `docs/developer-guide/` +The Prowler SDK is the core Python engine powering cloud security assessments across AWS, Azure, GCP, Kubernetes, GitHub, M365, and more. It includes 1000+ security checks and 30+ compliance frameworks. --- -## Architecture Rules +## CRITICAL RULES -### 1. Provider Architecture Pattern - -All Prowler providers MUST follow the established pattern: +### Provider Architecture ``` prowler/providers/{provider}/ -├── {provider}_provider.py # Main provider class -├── models.py # Provider-specific models -├── config.py # Provider configuration -├── exceptions/ # Provider-specific exceptions -├── lib/ # Provider libraries (as minimun it should have implemented the next folders: service, arguments, mutelist) -│ ├── service/ # Provider-specific service class to be inherited by all services of the provider -│ ├── arguments/ # Provider-specific CLI arguments parser -│ └── mutelist/ # Provider-specific mutelist functionality -└── services/ # All provider services to be audited - └── {service}/ # Individual service - ├── {service}_service.py # Class to fetch the needed resources from the API and store them to be used by the checks - ├── {service}_client.py # Python instance of the service class to be used by the checks - └── {check_name}/ # Individual check folder - ├── {check_name}.py # Python class to implement the check logic - └── {check_name}.metadata.json # JSON file to store the check metadata - └── {check_name_2}/ # Other checks can be added to the same service folder - ├── {check_name_2}.py - └── {check_name_2}.metadata.json - ... - └── {service_2}/ # Other services can be added to the same provider folder - ... +├── {provider}_provider.py # Main provider class +├── models.py # Provider-specific models +├── lib/ # service/, arguments/, mutelist/ +└── services/{service}/ + ├── {service}_service.py # Resource fetcher + ├── {service}_client.py # Singleton instance + └── {check_name}/ # Individual checks + ├── {check_name}.py + └── {check_name}.metadata.json ``` -### 2. Check Implementation Standards - -Every security check MUST implement: +### Check Implementation ```python -from prowler.lib.check.models import Check, CheckReport -from prowler.providers..services.._client import _client +from prowler.lib.check.models import Check, CheckReport{Provider} +from prowler.providers.{provider}.services.{service}.{service}_client import {service}_client -class check_name(Check): - """Ensure that meets .""" - def execute(self) -> list[CheckReport]: - """Execute the check logic. - - Returns: - A list of reports containing the result of the check. - """ +class {check_name}(Check): + def execute(self) -> list[CheckReport{Provider}]: findings = [] - # Check implementation here - for resource in _client.: - # Security validation logic - report = CheckReport(metadata=self.metadata(), resource=resource) - report.status = "PASS" | "FAIL" + for resource in {service}_client.{resources}: + report = CheckReport{Provider}(metadata=self.metadata(), resource=resource) + report.status = "PASS" if resource.is_compliant else "FAIL" report.status_extended = "Detailed explanation" - findings.append(report) # Add the report to the list of findings + findings.append(report) return findings ``` -### 3. Compliance Framework Integration +### Code Style -All compliance frameworks must be defined in: -- `prowler/compliance/{provider}/{framework}.json` -- Follow the established Compliance model structure -- Include proper requirement mappings and metadata +- Type hints required for all public functions +- Docstrings required for classes and methods (Google style) +- PEP 8 compliance enforced by black/flake8 +- Import order: standard → third-party → local --- -## Tech Stack +## TECH STACK -- **Language**: Python 3.9+ -- **Dependency Management**: Poetry 2+ -- **CLI Framework**: Custom argument parser with provider-specific subcommands -- **Testing**: Pytest with extensive unit and integration tests -- **Code Quality**: Pre-commit hooks for Black, Flake8, Pylint, Bandit for security scanning +Python 3.9+ | Poetry 2+ | pytest | moto (AWS mocking) | Pre-commit hooks (black, flake8, pylint, bandit) -## Commands +--- -### Development Environment - -```bash -# Core development setup -poetry install --with dev # Install all dependencies -poetry run pre-commit install # Install pre-commit hooks - -# Code quality -poetry run pre-commit run --all-files - -# Run tests -poetry run pytest -n auto -vvv -s -x tests/ -``` - -### Running Prowler CLI - -```bash -# Run Prowler -poetry run python prowler-cli.py --help - -# Run Prowler with a specific provider -poetry run python prowler-cli.py - -# Run Prowler with error logging -poetry run python prowler-cli.py --log-level ERROR --verbose - -# Run specific checks -poetry run python prowler-cli.py --checks -``` - -## Project Structure +## PROJECT STRUCTURE ``` prowler/ -├── __main__.py # Main CLI entry point -├── config/ # Global configuration -│ ├── config.py # Core configuration settings -│ └── __init__.py -├── lib/ # Core library functions -│ ├── check/ # Check execution engine -│ │ ├── check.py # Check execution logic -│ │ ├── checks_loader.py # Dynamic check loading -│ │ ├── compliance.py # Compliance framework handling -│ │ └── models.py # Check and report models -│ ├── cli/ # Command-line interface -│ │ └── parser.py # Argument parsing -│ ├── outputs/ # Output format handlers -│ │ ├── csv/ # CSV output -│ │ ├── html/ # HTML reports -│ │ ├── json/ # JSON formats -│ │ └── compliance/ # Compliance reports -│ ├── scan/ # Scan orchestration -│ ├── utils/ # Utility functions -│ └── mutelist/ # Mute list functionality -├── providers/ # Cloud provider implementations -│ ├── aws/ # AWS provider -│ ├── azure/ # Azure provider -│ ├── gcp/ # Google Cloud provider -│ ├── kubernetes/ # Kubernetes provider -│ ├── github/ # GitHub provider -│ ├── m365/ # Microsoft 365 provider -│ ├── mongodbatlas/ # MongoDB Atlas provider -│ ├── oci/ # Oracle Cloud provider -│ ├── ... -│ └── common/ # Shared provider utilities -├── compliance/ # Compliance framework definitions -│ ├── aws/ # AWS compliance frameworks -│ ├── azure/ # Azure compliance frameworks -│ ├── gcp/ # GCP compliance frameworks -│ ├── ... -└── exceptions/ # Global exception definitions +├── __main__.py # CLI entry point +├── config/ # Global configuration +├── lib/ +│ ├── check/ # Check execution engine +│ ├── cli/ # Command-line interface +│ ├── outputs/ # Output format handlers (JSON, CSV, HTML, ASFF, OCSF) +│ └── mutelist/ # Mute list functionality +├── providers/ # Cloud providers (aws, azure, gcp, kubernetes, github, m365...) +│ └── common/ # Shared provider utilities +├── compliance/ # Compliance framework definitions (CIS, NIST, PCI-DSS, SOC2...) +└── exceptions/ # Global exceptions ``` -## Key Components +--- -### 1. Provider System +## COMMANDS -Each cloud provider implements: - -```python -class Provider: - """Base provider class""" - - def __init__(self, arguments): - self.session = self._setup_session(arguments) - self.regions = self._get_regions() - # Initialize all services - - def _setup_session(self, arguments): - """Provider-specific authentication""" - pass - - def _get_regions(self): - """Get available regions for provider""" - pass -``` - -### 2. Check Engine - -The check execution system: - -- **Dynamic Loading**: Automatically discovers and loads checks -- **Parallel Execution**: Runs checks in parallel for performance -- **Error Isolation**: Individual check failures don't affect others -- **Comprehensive Reporting**: Detailed findings with remediation guidance - -### 3. Compliance Framework Engine - -Compliance frameworks are defined as JSON files mapping checks to requirements: - -```json -{ - "Framework": "CIS", - "Name": "CIS Amazon Web Services Foundations Benchmark v2.0.0", - "Version": "2.0", - "Provider": "AWS", - "Description": "The CIS Amazon Web Services Foundations Benchmark provides prescriptive guidance for configuring security options for a subset of Amazon Web Services with an emphasis on foundational, testable, and architecture agnostic settings.", - "Requirements": [ - { - "Id": "1.1", - "Description": "Maintain current contact details", - "Checks": ["account_contact_details_configured"] - } - ] -} -``` - -### 4. Output System - -Multiple output formats supported: - -- **JSON**: Machine-readable findings -- **CSV**: Spreadsheet-compatible format -- **HTML**: Interactive web reports -- **ASFF**: AWS Security Finding Format -- **OCSF**: Open Cybersecurity Schema Framework - -## Development Patterns - -### Adding New Cloud Providers - -1. **Create Provider Structure**: ```bash -mkdir -p prowler/providers/{provider} -mkdir -p prowler/providers/{provider}/services -mkdir -p prowler/providers/{provider}/lib/{service,arguments,mutelist} -mkdir -p prowler/providers/{provider}/exceptions +# Setup +poetry install --with dev +poetry run pre-commit install + +# Run Prowler +poetry run python prowler-cli.py {provider} +poetry run python prowler-cli.py {provider} --check {check_name} +poetry run python prowler-cli.py {provider} --list-checks + +# Testing +poetry run pytest -n auto -vvv tests/ +poetry run pytest tests/providers/{provider}/services/{service}/ -v + +# Code Quality +poetry run pre-commit run --all-files ``` -2. **Implement Provider Class**: -```python -from prowler.providers.common.provider import Provider +--- -class NewProvider(Provider): - def __init__(self, arguments): - super().__init__(arguments) - # Provider-specific initialization -``` +## CREATING NEW CHECKS (Quick Reference) -3. **Add Provider to CLI**: -Update `prowler/lib/cli/parser.py` to include new provider arguments. +1. Verify check doesn't exist: `--list-checks | grep {check_name}` +2. Create folder: `prowler/providers/{provider}/services/{service}/{check_name}/` +3. Create files: `__init__.py`, `{check_name}.py`, `{check_name}.metadata.json` +4. Implement check logic +5. Test locally: `--check {check_name}` +6. Write tests -### Adding New Security Checks +**For detailed guidance, use the `prowler-sdk-check` skill.** -The most common high level steps to create a new check are: +--- -1. Prerequisites: - - Verify the check does not already exist by searching in the same service folder as `prowler/providers//services///`. - - Ensure required provider and service exist. If not, you will need to create them first. - - Confirm the service has implemented all required methods and attributes for the check (in most cases, you will need to add or modify some methods in the service to get the data you need for the check). -2. Navigate to the service directory. The path should be as follows: `prowler/providers//services/`. -3. Create a check-specific folder. The path should follow this pattern: `prowler/providers//services//`. Adhere to the [Naming Format for Checks](/developer-guide/checks#naming-format-for-checks). -4. Create the check files, you can use next commands: -```bash -mkdir -p prowler/providers//services// -touch prowler/providers//services///__init__.py -touch prowler/providers//services///.py -touch prowler/providers//services///.metadata.json -``` -5. Run the check locally to ensure it works as expected. For checking you can use the CLI in the next way: - - To ensure the check has been detected by Prowler: `poetry run python prowler-cli.py --list-checks | grep `. - - To run the check, to find possible issues: `poetry run python prowler-cli.py --log-level ERROR --verbose --check `. -6. Create comprehensive tests for the check that cover multiple scenarios including both PASS (compliant) and FAIL (non-compliant) cases. For detailed information about test structure and implementation guidelines, refer to the [Testing](/developer-guide/unit-testing) documentation. -7. If the check and its corresponding tests are working as expected, you can submit a PR to Prowler. +## QA CHECKLIST -### Adding Compliance Frameworks - -1. **Create Framework File**: -```bash -# Create prowler/compliance/{provider}/{framework}.json -``` - -2. **Define Requirements**: -Map framework requirements to existing checks. - -3. **Test Compliance**: -```bash -poetry run python -m prowler {provider} --compliance {framework} -``` - -## Code Quality Standards - -### 1. Python Style - -- **PEP 8 Compliance**: Enforced by black and flake8 -- **Type Hints**: Required for all public functions -- **Docstrings**: Required for all classes and methods -- **Import Organization**: Use isort for consistent import ordering - -```python -import standard_library - -from third_party import library - -from prowler.lib import internal_module - -class ExampleClass: - """Class docstring.""" - - def method(self, param: str) -> dict | list | None: - """Method docstring. - - Args: - param: Description of parameter - - Returns: - Description of return value - """ - return None -``` - -### 2. Error Handling - -```python -from prowler.lib.logger import logger - -try: - # Risky operation - result = api_call() -except ProviderSpecificException as e: - logger.error(f"Provider error: {e}") - # Graceful handling -except Exception as e: - logger.error(f"Unexpected error: {e}") - # Never let checks crash the entire scan -``` - -### 3. Security Practices - -- **No Hardcoded Secrets**: Use environment variables or secure credential management -- **Input Validation**: Validate all external inputs -- **Principle of Least Privilege**: Request minimal necessary permissions -- **Secure Defaults**: Default to secure configurations - -## Testing Guidelines - -### Unit Tests - -- **100% Coverage Goal**: Aim for complete test coverage -- **Mock External Services**: Use mock objects to simulate the external services -- **Test Edge Cases**: Include error conditions and boundary cases - -## References - -- **Root Project Guide**: `../AGENTS.md` (takes priority for cross-component guidance) -- **Provider Examples**: Reference existing providers for implementation patterns -- **Check Examples**: Study existing checks for proper implementation patterns -- **Compliance Framework Examples**: Review existing frameworks for structure +- [ ] `poetry run pytest` passes +- [ ] `poetry run pre-commit run --all-files` passes +- [ ] Check metadata JSON is valid +- [ ] Tests cover PASS, FAIL, and empty resource scenarios +- [ ] Docstrings follow Google style diff --git a/prowler/CHANGELOG.md b/prowler/CHANGELOG.md index 7fd5b6b734..b202455fa0 100644 --- a/prowler/CHANGELOG.md +++ b/prowler/CHANGELOG.md @@ -5,6 +5,7 @@ All notable changes to the **Prowler SDK** are documented in this file. ## [5.17.0] (Prowler UNRELEASED) ### Added +- AI Skills pack for AI coding assistants (Claude Code, OpenCode, Codex) following agentskills.io standard [(#9728)](https://github.com/prowler-cloud/prowler/pull/9728) - Add Prowler ThreatScore for the Alibaba Cloud provider [(#9511)](https://github.com/prowler-cloud/prowler/pull/9511) - `compute_instance_group_multiple_zones` check for GCP provider [(#9566)](https://github.com/prowler-cloud/prowler/pull/9566) - `compute_instance_group_autohealing_enabled` check for GCP provider [(#9690)](https://github.com/prowler-cloud/prowler/pull/9690) diff --git a/skills/README.md b/skills/README.md new file mode 100644 index 0000000000..becdffaf9b --- /dev/null +++ b/skills/README.md @@ -0,0 +1,127 @@ +# AI Agent Skills + +This directory contains **Agent Skills** following the [Agent Skills open standard](https://agentskills.io). Skills provide domain-specific patterns, conventions, and guardrails that help AI coding assistants (Claude Code, OpenCode, Cursor, etc.) understand project-specific requirements. + +## What Are Skills? + +[Agent Skills](https://agentskills.io) is an open standard format for extending AI agent capabilities with specialized knowledge. Originally developed by Anthropic and released as an open standard, it is now adopted by multiple agent products. + +Skills teach AI assistants how to perform specific tasks. When an AI loads a skill, it gains context about: + +- Critical rules (what to always/never do) +- Code patterns and conventions +- Project-specific workflows +- References to detailed documentation + +## Setup + +Run the setup script to configure skills for all supported AI coding assistants: + +```bash +./skills/setup.sh +``` + +This creates symlinks so each tool finds skills in its expected location: + +| Tool | Symlink Created | +|------|-----------------| +| Claude Code / OpenCode | `.claude/skills/` | +| Codex (OpenAI) | `.codex/skills/` | +| GitHub Copilot | `.github/skills/` | +| Gemini CLI | `.gemini/skills/` | + +After running setup, restart your AI coding assistant to load the skills. + +## How to Use Skills + +Skills are automatically discovered by the AI agent. To manually load a skill during a session: + +``` +Read skills/{skill-name}/SKILL.md +``` + +## Available Skills + +### Generic Skills + +Reusable patterns for common technologies: + +| Skill | Description | +|-------|-------------| +| `typescript` | Const types, flat interfaces, utility types | +| `react-19` | React 19 patterns, React Compiler | +| `nextjs-15` | App Router, Server Actions, streaming | +| `tailwind-4` | cn() utility, Tailwind 4 patterns | +| `playwright` | Page Object Model, selectors | +| `pytest` | Fixtures, mocking, markers | +| `django-drf` | ViewSets, Serializers, Filters | +| `zod-4` | Zod 4 API patterns | +| `zustand-5` | Persist, selectors, slices | +| `ai-sdk-5` | Vercel AI SDK patterns | + +### Prowler-Specific Skills + +Patterns tailored for Prowler development: + +| Skill | Description | +|-------|-------------| +| `prowler` | Project overview, component navigation | +| `prowler-api` | Django + RLS + JSON:API patterns | +| `prowler-ui` | Next.js + shadcn conventions | +| `prowler-sdk-check` | Create new security checks | +| `prowler-mcp` | MCP server tools and models | +| `prowler-test-sdk` | SDK testing (pytest + moto) | +| `prowler-test-api` | API testing (pytest-django + RLS) | +| `prowler-test-ui` | E2E testing (Playwright) | +| `prowler-compliance` | Compliance framework structure | +| `prowler-provider` | Add new cloud providers | +| `prowler-pr` | Pull request conventions | +| `prowler-docs` | Documentation style guide | + +### Meta Skills + +| Skill | Description | +|-------|-------------| +| `skill-creator` | Create new AI agent skills | + +## Directory Structure + +``` +skills/ +├── {skill-name}/ +│ ├── SKILL.md # Required - main instrunsction and metadata +│ ├── scripts/ # Optional - executable code +│ ├── assets/ # Optional - templates, schemas, resources +│ └── references/ # Optional - links to local docs +└── README.md # This file +``` + +## Creating New Skills + +Use the `skill-creator` skill for guidance: + +``` +Read skills/skill-creator/SKILL.md +``` + +### Quick Checklist + +1. Create directory: `skills/{skill-name}/` +2. Add `SKILL.md` with required frontmatter +3. Keep content concise (under 500 lines) +4. Reference existing docs instead of duplicating +5. Add to `AGENTS.md` skills table + +## Design Principles + +- **Concise**: Only include what AI doesn't already know +- **Progressive disclosure**: Point to detailed docs, don't duplicate +- **Critical rules first**: Lead with ALWAYS/NEVER patterns +- **Minimal examples**: Show patterns, not tutorials + +## Resources + +- [Agent Skills Standard](https://agentskills.io) - Open standard specification +- [Agent Skills GitHub](https://github.com/anthropics/skills) - Example skills +- [Claude Code Best Practices](https://platform.claude.com/docs/en/agents-and-tools/agent-skills/best-practices) - Skill authoring guide +- [Prowler AGENTS.md](../AGENTS.md) - AI agent general rules diff --git a/skills/ai-sdk-5/SKILL.md b/skills/ai-sdk-5/SKILL.md new file mode 100644 index 0000000000..3b1c979f73 --- /dev/null +++ b/skills/ai-sdk-5/SKILL.md @@ -0,0 +1,234 @@ +--- +name: ai-sdk-5 +description: > + Vercel AI SDK 5 patterns. + Trigger: When building AI chat features - breaking changes from v4. +license: Apache-2.0 +metadata: + author: prowler-cloud + version: "1.0" +allowed-tools: Read, Edit, Write, Glob, Grep, Bash, WebFetch, WebSearch, Task +--- + +## Breaking Changes from AI SDK 4 + +```typescript +// ❌ AI SDK 4 (OLD) +import { useChat } from "ai"; +const { messages, handleSubmit, input, handleInputChange } = useChat({ + api: "/api/chat", +}); + +// ✅ AI SDK 5 (NEW) +import { useChat } from "@ai-sdk/react"; +import { DefaultChatTransport } from "ai"; + +const { messages, sendMessage } = useChat({ + transport: new DefaultChatTransport({ api: "/api/chat" }), +}); +``` + +## Client Setup + +```typescript +import { useChat } from "@ai-sdk/react"; +import { DefaultChatTransport } from "ai"; +import { useState } from "react"; + +export function Chat() { + const [input, setInput] = useState(""); + + const { messages, sendMessage, isLoading, error } = useChat({ + transport: new DefaultChatTransport({ api: "/api/chat" }), + }); + + const handleSubmit = (e: React.FormEvent) => { + e.preventDefault(); + if (!input.trim()) return; + sendMessage({ text: input }); + setInput(""); + }; + + return ( +
+
+ {messages.map((message) => ( + + ))} +
+ +
+ setInput(e.target.value)} + placeholder="Type a message..." + disabled={isLoading} + /> + +
+ + {error &&
Error: {error.message}
} +
+ ); +} +``` + +## UIMessage Structure (v5) + +```typescript +// ❌ Old: message.content was a string +// ✅ New: message.parts is an array + +interface UIMessage { + id: string; + role: "user" | "assistant" | "system"; + parts: MessagePart[]; +} + +type MessagePart = + | { type: "text"; text: string } + | { type: "image"; image: string } + | { type: "tool-call"; toolCallId: string; toolName: string; args: unknown } + | { type: "tool-result"; toolCallId: string; result: unknown }; + +// Extract text from parts +function getMessageText(message: UIMessage): string { + return message.parts + .filter((part): part is { type: "text"; text: string } => part.type === "text") + .map((part) => part.text) + .join(""); +} + +// Render message +function Message({ message }: { message: UIMessage }) { + return ( +
+ {message.parts.map((part, index) => { + if (part.type === "text") { + return

{part.text}

; + } + if (part.type === "image") { + return ; + } + return null; + })} +
+ ); +} +``` + +## Server-Side (Route Handler) + +```typescript +// app/api/chat/route.ts +import { openai } from "@ai-sdk/openai"; +import { streamText } from "ai"; + +export async function POST(req: Request) { + const { messages } = await req.json(); + + const result = await streamText({ + model: openai("gpt-4o"), + messages, + system: "You are a helpful assistant.", + }); + + return result.toDataStreamResponse(); +} +``` + +## With LangChain + +```typescript +// app/api/chat/route.ts +import { toUIMessageStream } from "@ai-sdk/langchain"; +import { ChatOpenAI } from "@langchain/openai"; +import { HumanMessage, AIMessage } from "@langchain/core/messages"; + +export async function POST(req: Request) { + const { messages } = await req.json(); + + const model = new ChatOpenAI({ + modelName: "gpt-4o", + streaming: true, + }); + + // Convert UI messages to LangChain format + const langchainMessages = messages.map((m) => { + const text = m.parts + .filter((p) => p.type === "text") + .map((p) => p.text) + .join(""); + return m.role === "user" + ? new HumanMessage(text) + : new AIMessage(text); + }); + + const stream = await model.stream(langchainMessages); + + return toUIMessageStream(stream).toDataStreamResponse(); +} +``` + +## Streaming with Tools + +```typescript +import { openai } from "@ai-sdk/openai"; +import { streamText, tool } from "ai"; +import { z } from "zod"; + +const result = await streamText({ + model: openai("gpt-4o"), + messages, + tools: { + getWeather: tool({ + description: "Get weather for a location", + parameters: z.object({ + location: z.string().describe("City name"), + }), + execute: async ({ location }) => { + // Fetch weather data + return { temperature: 72, condition: "sunny" }; + }, + }), + }, +}); +``` + +## useCompletion (Text Generation) + +```typescript +import { useCompletion } from "@ai-sdk/react"; +import { DefaultCompletionTransport } from "ai"; + +const { completion, complete, isLoading } = useCompletion({ + transport: new DefaultCompletionTransport({ api: "/api/complete" }), +}); + +// Trigger completion +await complete("Write a haiku about"); +``` + +## Error Handling + +```typescript +const { error, messages, sendMessage } = useChat({ + transport: new DefaultChatTransport({ api: "/api/chat" }), + onError: (error) => { + console.error("Chat error:", error); + toast.error("Failed to send message"); + }, +}); + +// Display error +{error && ( +
+ {error.message} + +
+)} +``` diff --git a/skills/django-drf/SKILL.md b/skills/django-drf/SKILL.md new file mode 100644 index 0000000000..03b43fc64e --- /dev/null +++ b/skills/django-drf/SKILL.md @@ -0,0 +1,184 @@ +--- +name: django-drf +description: > + Django REST Framework patterns. + Trigger: When building REST APIs with Django - ViewSets, Serializers, Filters. +license: Apache-2.0 +metadata: + author: prowler-cloud + version: "1.0" +allowed-tools: Read, Edit, Write, Glob, Grep, Bash, WebFetch, WebSearch, Task +--- + +## ViewSet Pattern + +```python +from rest_framework import viewsets, status +from rest_framework.response import Response +from rest_framework.decorators import action + +class UserViewSet(viewsets.ModelViewSet): + queryset = User.objects.all() + serializer_class = UserSerializer + filterset_class = UserFilter + permission_classes = [IsAuthenticated] + + def get_serializer_class(self): + if self.action == "create": + return UserCreateSerializer + if self.action in ["update", "partial_update"]: + return UserUpdateSerializer + return UserSerializer + + @action(detail=True, methods=["post"]) + def activate(self, request, pk=None): + user = self.get_object() + user.is_active = True + user.save() + return Response({"status": "activated"}) +``` + +## Serializer Patterns + +```python +from rest_framework import serializers + +# Read Serializer +class UserSerializer(serializers.ModelSerializer): + full_name = serializers.SerializerMethodField() + + class Meta: + model = User + fields = ["id", "email", "full_name", "created_at"] + read_only_fields = ["id", "created_at"] + + def get_full_name(self, obj): + return f"{obj.first_name} {obj.last_name}" + +# Create Serializer +class UserCreateSerializer(serializers.ModelSerializer): + password = serializers.CharField(write_only=True) + + class Meta: + model = User + fields = ["email", "password", "first_name", "last_name"] + + def create(self, validated_data): + password = validated_data.pop("password") + user = User(**validated_data) + user.set_password(password) + user.save() + return user + +# Update Serializer +class UserUpdateSerializer(serializers.ModelSerializer): + class Meta: + model = User + fields = ["first_name", "last_name"] +``` + +## Filters + +```python +from django_filters import rest_framework as filters + +class UserFilter(filters.FilterSet): + email = filters.CharFilter(lookup_expr="icontains") + is_active = filters.BooleanFilter() + created_after = filters.DateTimeFilter( + field_name="created_at", + lookup_expr="gte" + ) + created_before = filters.DateTimeFilter( + field_name="created_at", + lookup_expr="lte" + ) + + class Meta: + model = User + fields = ["email", "is_active"] +``` + +## Permissions + +```python +from rest_framework.permissions import BasePermission + +class IsOwner(BasePermission): + def has_object_permission(self, request, view, obj): + return obj.owner == request.user + +class IsAdminOrReadOnly(BasePermission): + def has_permission(self, request, view): + if request.method in ["GET", "HEAD", "OPTIONS"]: + return True + return request.user.is_staff +``` + +## Pagination + +```python +from rest_framework.pagination import PageNumberPagination + +class StandardPagination(PageNumberPagination): + page_size = 20 + page_size_query_param = "page_size" + max_page_size = 100 + +# settings.py +REST_FRAMEWORK = { + "DEFAULT_PAGINATION_CLASS": "api.pagination.StandardPagination", +} +``` + +## URL Routing + +```python +from rest_framework.routers import DefaultRouter + +router = DefaultRouter() +router.register(r"users", UserViewSet, basename="user") +router.register(r"posts", PostViewSet, basename="post") + +urlpatterns = [ + path("api/v1/", include(router.urls)), +] +``` + +## Testing + +```python +import pytest +from rest_framework import status +from rest_framework.test import APIClient + +@pytest.fixture +def api_client(): + return APIClient() + +@pytest.fixture +def authenticated_client(api_client, user): + api_client.force_authenticate(user=user) + return api_client + +@pytest.mark.django_db +class TestUserViewSet: + def test_list_users(self, authenticated_client): + response = authenticated_client.get("/api/v1/users/") + assert response.status_code == status.HTTP_200_OK + + def test_create_user(self, authenticated_client): + data = {"email": "new@test.com", "password": "pass123"} + response = authenticated_client.post("/api/v1/users/", data) + assert response.status_code == status.HTTP_201_CREATED +``` + +## Commands + +```bash +python manage.py runserver +python manage.py makemigrations +python manage.py migrate +python manage.py createsuperuser +python manage.py shell +``` diff --git a/skills/nextjs-15/SKILL.md b/skills/nextjs-15/SKILL.md new file mode 100644 index 0000000000..4fa47fd619 --- /dev/null +++ b/skills/nextjs-15/SKILL.md @@ -0,0 +1,148 @@ +--- +name: nextjs-15 +description: > + Next.js 15 App Router patterns. + Trigger: When working with Next.js - routing, Server Actions, data fetching. +license: Apache-2.0 +metadata: + author: prowler-cloud + version: "1.0" +allowed-tools: Read, Edit, Write, Glob, Grep, Bash, WebFetch, WebSearch, Task +--- + +## App Router File Conventions + +``` +app/ +├── layout.tsx # Root layout (required) +├── page.tsx # Home page (/) +├── loading.tsx # Loading UI (Suspense) +├── error.tsx # Error boundary +├── not-found.tsx # 404 page +├── (auth)/ # Route group (no URL impact) +│ ├── login/page.tsx # /login +│ └── signup/page.tsx # /signup +├── api/ +│ └── route.ts # API handler +└── _components/ # Private folder (not routed) +``` + +## Server Components (Default) + +```typescript +// No directive needed - async by default +export default async function Page() { + const data = await db.query(); + return ; +} +``` + +## Server Actions + +```typescript +// app/actions.ts +"use server"; + +import { revalidatePath } from "next/cache"; +import { redirect } from "next/navigation"; + +export async function createUser(formData: FormData) { + const name = formData.get("name") as string; + + await db.users.create({ data: { name } }); + + revalidatePath("/users"); + redirect("/users"); +} + +// Usage +
+ + +
+``` + +## Data Fetching + +```typescript +// Parallel +async function Page() { + const [users, posts] = await Promise.all([ + getUsers(), + getPosts(), + ]); + return ; +} + +// Streaming with Suspense +}> + + +``` + +## Route Handlers (API) + +```typescript +// app/api/users/route.ts +import { NextRequest, NextResponse } from "next/server"; + +export async function GET(request: NextRequest) { + const users = await db.users.findMany(); + return NextResponse.json(users); +} + +export async function POST(request: NextRequest) { + const body = await request.json(); + const user = await db.users.create({ data: body }); + return NextResponse.json(user, { status: 201 }); +} +``` + +## Middleware + +```typescript +// middleware.ts (root level) +import { NextResponse } from "next/server"; +import type { NextRequest } from "next/server"; + +export function middleware(request: NextRequest) { + const token = request.cookies.get("token"); + + if (!token && request.nextUrl.pathname.startsWith("/dashboard")) { + return NextResponse.redirect(new URL("/login", request.url)); + } + + return NextResponse.next(); +} + +export const config = { + matcher: ["/dashboard/:path*"], +}; +``` + +## Metadata + +```typescript +// Static +export const metadata = { + title: "My App", + description: "Description", +}; + +// Dynamic +export async function generateMetadata({ params }) { + const product = await getProduct(params.id); + return { title: product.name }; +} +``` + +## server-only Package + +```typescript +import "server-only"; + +// This will error if imported in client component +export async function getSecretData() { + return db.secrets.findMany(); +} +``` diff --git a/skills/playwright/SKILL.md b/skills/playwright/SKILL.md new file mode 100644 index 0000000000..34e71aadd1 --- /dev/null +++ b/skills/playwright/SKILL.md @@ -0,0 +1,324 @@ +--- +name: playwright +description: > + Playwright E2E testing patterns. + Trigger: When writing E2E tests - Page Objects, selectors, MCP workflow. +license: Apache-2.0 +metadata: + author: prowler-cloud + version: "1.0" +allowed-tools: Read, Edit, Write, Glob, Grep, Bash, WebFetch, WebSearch, Task +--- + +## MCP Workflow (MANDATORY If Available) + +**⚠️ If you have Playwright MCP tools, ALWAYS use them BEFORE creating any test:** + +1. **Navigate** to target page +2. **Take snapshot** to see page structure and elements +3. **Interact** with forms/elements to verify exact user flow +4. **Take screenshots** to document expected states +5. **Verify page transitions** through complete flow (loading, success, error) +6. **Document actual selectors** from snapshots (use real refs and labels) +7. **Only after exploring** create test code with verified selectors + +**If MCP NOT available:** Proceed with test creation based on docs and code analysis. + +**Why This Matters:** +- ✅ Precise tests - exact steps needed, no assumptions +- ✅ Accurate selectors - real DOM structure, not imagined +- ✅ Real flow validation - verify journey actually works +- ✅ Avoid over-engineering - minimal tests for what exists +- ✅ Prevent flaky tests - real exploration = stable tests +- ❌ Never assume how UI "should" work + +## File Structure + +``` +tests/ +├── base-page.ts # Parent class for ALL pages +├── helpers.ts # Shared utilities +└── {page-name}/ + ├── {page-name}-page.ts # Page Object Model + ├── {page-name}.spec.ts # ALL tests here (NO separate files!) + └── {page-name}.md # Test documentation +``` + +**File Naming:** +- ✅ `sign-up.spec.ts` (all sign-up tests) +- ✅ `sign-up-page.ts` (page object) +- ✅ `sign-up.md` (documentation) +- ❌ `sign-up-critical-path.spec.ts` (WRONG - no separate files) +- ❌ `sign-up-validation.spec.ts` (WRONG) + +## Selector Priority (REQUIRED) + +```typescript +// 1. BEST - getByRole for interactive elements +this.submitButton = page.getByRole("button", { name: "Submit" }); +this.navLink = page.getByRole("link", { name: "Dashboard" }); + +// 2. BEST - getByLabel for form controls +this.emailInput = page.getByLabel("Email"); +this.passwordInput = page.getByLabel("Password"); + +// 3. SPARINGLY - getByText for static content only +this.errorMessage = page.getByText("Invalid credentials"); +this.pageTitle = page.getByText("Welcome"); + +// 4. LAST RESORT - getByTestId when above fail +this.customWidget = page.getByTestId("date-picker"); + +// ❌ AVOID fragile selectors +this.button = page.locator(".btn-primary"); // NO +this.input = page.locator("#email"); // NO +``` + +## Scope Detection (ASK IF AMBIGUOUS) + +| User Says | Action | +|-----------|--------| +| "a test", "one test", "new test", "add test" | Create ONE test() in existing spec | +| "comprehensive tests", "all tests", "test suite", "generate tests" | Create full suite | + +**Examples:** +- "Create a test for user sign-up" → ONE test only +- "Generate E2E tests for login page" → Full suite +- "Add a test to verify form validation" → ONE test to existing spec + +## Page Object Pattern + +```typescript +import { Page, Locator, expect } from "@playwright/test"; + +// BasePage - ALL pages extend this +export class BasePage { + constructor(protected page: Page) {} + + async goto(path: string): Promise { + await this.page.goto(path); + await this.page.waitForLoadState("networkidle"); + } + + // Common methods go here (see Refactoring Guidelines) + async waitForNotification(): Promise { + await this.page.waitForSelector('[role="status"]'); + } + + async verifyNotificationMessage(message: string): Promise { + const notification = this.page.locator('[role="status"]'); + await expect(notification).toContainText(message); + } +} + +// Page-specific implementation +export interface LoginData { + email: string; + password: string; +} + +export class LoginPage extends BasePage { + readonly emailInput: Locator; + readonly passwordInput: Locator; + readonly submitButton: Locator; + + constructor(page: Page) { + super(page); + this.emailInput = page.getByLabel("Email"); + this.passwordInput = page.getByLabel("Password"); + this.submitButton = page.getByRole("button", { name: "Sign in" }); + } + + async goto(): Promise { + await super.goto("/login"); + } + + async login(data: LoginData): Promise { + await this.emailInput.fill(data.email); + await this.passwordInput.fill(data.password); + await this.submitButton.click(); + } + + async verifyCriticalOutcome(): Promise { + await expect(this.page).toHaveURL("/dashboard"); + } +} +``` + +## Page Object Reuse (CRITICAL) + +**Always check existing page objects before creating new ones!** + +```typescript +// ✅ GOOD: Reuse existing page objects +import { SignInPage } from "../sign-in/sign-in-page"; +import { HomePage } from "../home/home-page"; + +test("User can sign up and login", async ({ page }) => { + const signUpPage = new SignUpPage(page); + const signInPage = new SignInPage(page); // REUSE + const homePage = new HomePage(page); // REUSE + + await signUpPage.signUp(userData); + await homePage.verifyPageLoaded(); // REUSE method + await homePage.signOut(); // REUSE method + await signInPage.login(credentials); // REUSE method +}); + +// ❌ BAD: Recreating existing functionality +export class SignUpPage extends BasePage { + async logout() { /* ... */ } // ❌ HomePage already has this + async login() { /* ... */ } // ❌ SignInPage already has this +} +``` + +**Guidelines:** +- Check `tests/` for existing page objects first +- Import and reuse existing pages +- Create page objects only when page doesn't exist +- If test requires multiple pages, ensure all page objects exist (create if needed) + +## Refactoring Guidelines + +### Move to `BasePage` when: +- ✅ Navigation helpers used by multiple pages (`waitForPageLoad()`, `getCurrentUrl()`) +- ✅ Common UI interactions (notifications, modals, theme toggles) +- ✅ Verification patterns repeated across pages (`isVisible()`, `waitForVisible()`) +- ✅ Error handling that applies to all pages +- ✅ Screenshot utilities for debugging + +### Move to `helpers.ts` when: +- ✅ Test data generation (`generateUniqueEmail()`, `generateTestUser()`) +- ✅ Setup/teardown utilities (`createTestUser()`, `cleanupTestData()`) +- ✅ Custom assertions (`expectNotificationToContain()`) +- ✅ API helpers for test setup (`seedDatabase()`, `resetState()`) +- ✅ Time utilities (`waitForCondition()`, `retryAction()`) + +**Before (BAD):** +```typescript +// Repeated in multiple page objects +export class SignUpPage extends BasePage { + async waitForNotification(): Promise { + await this.page.waitForSelector('[role="status"]'); + } +} +export class SignInPage extends BasePage { + async waitForNotification(): Promise { + await this.page.waitForSelector('[role="status"]'); // DUPLICATED! + } +} +``` + +**After (GOOD):** +```typescript +// BasePage - shared across all pages +export class BasePage { + async waitForNotification(): Promise { + await this.page.waitForSelector('[role="status"]'); + } +} + +// helpers.ts - data generation +export function generateUniqueEmail(): string { + return `test.${Date.now()}@example.com`; +} + +export function generateTestUser() { + return { + name: "Test User", + email: generateUniqueEmail(), + password: "TestPassword123!", + }; +} +``` + +## Test Pattern with Tags + +```typescript +import { test, expect } from "@playwright/test"; +import { LoginPage } from "./login-page"; + +test.describe("Login", () => { + test("User can login successfully", + { tag: ["@critical", "@e2e", "@login", "@LOGIN-E2E-001"] }, + async ({ page }) => { + const loginPage = new LoginPage(page); + + await loginPage.goto(); + await loginPage.login({ email: "user@test.com", password: "pass123" }); + + await expect(page).toHaveURL("/dashboard"); + } + ); +}); +``` + +**Tag Categories:** +- Priority: `@critical`, `@high`, `@medium`, `@low` +- Type: `@e2e` +- Feature: `@signup`, `@signin`, `@dashboard` +- Test ID: `@SIGNUP-E2E-001`, `@LOGIN-E2E-002` + +## Test Documentation Format ({page-name}.md) + +```markdown +### E2E Tests: {Feature Name} + +**Suite ID:** `{SUITE-ID}` +**Feature:** {Feature description} + +--- + +## Test Case: `{TEST-ID}` - {Test case title} + +**Priority:** `{critical|high|medium|low}` + +**Tags:** +- type → @e2e +- feature → @{feature-name} + +**Description/Objective:** {Brief description} + +**Preconditions:** +- {Prerequisites for test to run} +- {Required data or state} + +### Flow Steps: +1. {Step 1} +2. {Step 2} +3. {Step 3} + +### Expected Result: +- {Expected outcome 1} +- {Expected outcome 2} + +### Key verification points: +- {Assertion 1} +- {Assertion 2} + +### Notes: +- {Additional considerations} +``` + +**Documentation Rules:** +- ❌ NO general test running instructions +- ❌ NO file structure explanations +- ❌ NO code examples or tutorials +- ❌ NO troubleshooting sections +- ✅ Focus ONLY on specific test case +- ✅ Keep under 60 lines when possible + +## Commands + +```bash +npx playwright test # Run all +npx playwright test --grep "login" # Filter by name +npx playwright test --ui # Interactive UI +npx playwright test --debug # Debug mode +npx playwright test tests/login/ # Run specific folder +``` + +## Prowler-Specific Patterns + +For Prowler UI E2E testing with authentication setup, environment variables, and test IDs, see: +- **Documentation**: [references/prowler-e2e.md](references/prowler-e2e.md) diff --git a/skills/playwright/references/prowler-e2e.md b/skills/playwright/references/prowler-e2e.md new file mode 100644 index 0000000000..bc9efaf78e --- /dev/null +++ b/skills/playwright/references/prowler-e2e.md @@ -0,0 +1,16 @@ +# Prowler-Specific E2E Patterns + +## Local Documentation + +For Prowler-specific Playwright patterns, see: + +- `docs/developer-guide/end2end-testing.mdx` - Complete E2E testing guide + +## Contents + +The Prowler documentation covers patterns NOT in the generic playwright skill: +- Authentication setup projects (`admin.auth.setup`, `member.auth.setup`, etc.) +- Environment variables (`E2E_AWS_PROVIDER_ACCOUNT_ID`, etc.) +- Page Object location (`ui/tests/`) +- Test ID conventions (`@PROVIDER-E2E-001`, `@SCANS-E2E-001`) +- Serial test requirements for data-dependent tests diff --git a/skills/prowler-api/SKILL.md b/skills/prowler-api/SKILL.md new file mode 100644 index 0000000000..62f3c15156 --- /dev/null +++ b/skills/prowler-api/SKILL.md @@ -0,0 +1,137 @@ +--- +name: prowler-api +description: > + Prowler API patterns: RLS, RBAC, providers, Celery tasks. + Trigger: When working on api/ - models, serializers, views, filters, tasks. +license: Apache-2.0 +metadata: + author: prowler-cloud + version: "1.0" +allowed-tools: Read, Edit, Write, Glob, Grep, Bash, WebFetch, WebSearch, Task +--- + +## Critical Rules + +- ALWAYS use `rls_transaction(tenant_id)` when querying outside ViewSet context +- ALWAYS use `get_role()` before checking permissions (returns FIRST role only) +- NEVER access `Provider.objects` without RLS context in Celery tasks +- ALWAYS use `@set_tenant` then `@handle_provider_deletion` decorator order + +--- + +## 1. Providers (10 Supported) + +UID validation is dynamic: `getattr(self, f"validate_{self.provider}_uid")(self.uid)` + +| Provider | UID Format | Example | +|----------|-----------|---------| +| AWS | 12 digits | `123456789012` | +| Azure | UUID v4 | `a1b2c3d4-e5f6-...` | +| GCP | 6-30 chars, lowercase, letter start | `my-gcp-project` | +| M365 | Valid domain | `contoso.onmicrosoft.com` | +| Kubernetes | 2-251 chars | `arn:aws:eks:...` | +| GitHub | 1-39 chars | `my-org` | +| IaC | Git URL | `https://github.com/user/repo.git` | +| Oracle Cloud | OCID format | `ocid1.tenancy.oc1..` | +| MongoDB Atlas | 24-char hex | `507f1f77bcf86cd799439011` | +| Alibaba Cloud | 16 digits | `1234567890123456` | + +**Adding new provider**: Add to `ProviderChoices` enum + create `validate__uid()` staticmethod. + +--- + +## 2. Row-Level Security (RLS) + +```python +from api.db_utils import rls_transaction + +with rls_transaction(tenant_id): + providers = Provider.objects.filter(connected=True) + # PostgreSQL enforces tenant_id automatically +``` + +Models inherit from `RowLevelSecurityProtectedModel` with `RowLevelSecurityConstraint`. + +--- + +## 3. Managers + +```python +Provider.objects.all() # Only is_deleted=False +Provider.all_objects.all() # All including deleted +Finding.objects.all() # Only from active providers +``` + +--- + +## 4. RBAC + +```python +from api.rbac.permissions import get_role, get_providers, Permissions + +user_role = get_role(self.request.user) # Returns FIRST role only + +if user_role.unlimited_visibility: + queryset = Provider.objects.filter(tenant_id=tenant_id) +else: + queryset = get_providers(user_role) # Filtered by provider_groups +``` + +**Permissions**: `MANAGE_USERS`, `MANAGE_ACCOUNT`, `MANAGE_BILLING`, `MANAGE_PROVIDERS`, `MANAGE_INTEGRATIONS`, `MANAGE_SCANS`, `UNLIMITED_VISIBILITY` + +--- + +## 5. Celery Tasks + +```python +@shared_task(base=RLSTask, name="task-name", queue="scans") +@set_tenant +@handle_provider_deletion +def my_task(tenant_id: str, provider_id: str): + pass +``` + +**Queues**: Check `tasks/tasks.py`. Common: `scans`, `overview`, `compliance`, `integrations`. + +**Orchestration**: Use `chain()` for sequential, `group()` for parallel. + +--- + +## 6. JSON:API Format + +```python +content_type = "application/vnd.api+json" + +# Request +{"data": {"type": "providers", "attributes": {"provider": "aws", "uid": "123456789012"}}} + +# Response access +response.json()["data"]["attributes"]["alias"] +``` + +--- + +## 7. Serializers + +| Pattern | Usage | +|---------|-------| +| `ProviderSerializer` | Read (list/retrieve) | +| `ProviderCreateSerializer` | POST | +| `ProviderUpdateSerializer` | PATCH | +| `RLSSerializer` | Auto-injects tenant_id | + +--- + +## Commands + +```bash +cd api && poetry run python manage.py migrate # Run migrations +cd api && poetry run python manage.py shell # Django shell +cd api && poetry run celery -A config.celery worker -l info # Start worker +``` + +--- + +## Resources + +- **Documentation**: See [references/api-docs.md](references/api-docs.md) for local file paths and documentation diff --git a/skills/prowler-api/references/api-docs.md b/skills/prowler-api/references/api-docs.md new file mode 100644 index 0000000000..72bc7990fd --- /dev/null +++ b/skills/prowler-api/references/api-docs.md @@ -0,0 +1,21 @@ +# API Documentation + +## Local Documentation + +For API-related patterns, see: + +- `api/src/backend/api/models.py` - Models, Providers, UID validation +- `api/src/backend/api/v1/views.py` - ViewSets, RBAC patterns +- `api/src/backend/api/v1/serializers.py` - Serializers +- `api/src/backend/api/rbac/permissions.py` - RBAC functions +- `api/src/backend/tasks/tasks.py` - Celery tasks +- `api/src/backend/api/db_utils.py` - rls_transaction + +## Contents + +The documentation covers: +- Row-Level Security (RLS) implementation +- RBAC permission system +- Provider validation patterns +- Celery task orchestration +- JSON:API serialization format diff --git a/skills/prowler-compliance/SKILL.md b/skills/prowler-compliance/SKILL.md new file mode 100644 index 0000000000..8b82617d6e --- /dev/null +++ b/skills/prowler-compliance/SKILL.md @@ -0,0 +1,113 @@ +--- +name: prowler-compliance +description: > + Creates and manages Prowler compliance frameworks. + Trigger: When working with compliance frameworks (CIS, NIST, PCI-DSS, SOC2, GDPR). +license: Apache-2.0 +metadata: + author: prowler-cloud + version: "1.0" +allowed-tools: Read, Edit, Write, Glob, Grep, Bash, WebFetch, WebSearch, Task +--- + +## When to Use + +Use this skill when: +- Creating a new compliance framework for any provider +- Adding requirements to existing frameworks +- Mapping checks to compliance controls + +## Compliance Framework Structure + +Frameworks are JSON files in: `prowler/compliance/{provider}/{framework}.json` + +```json +{ + "Framework": "CIS", + "Name": "CIS Amazon Web Services Foundations Benchmark v2.0.0", + "Version": "2.0", + "Provider": "AWS", + "Description": "The CIS Amazon Web Services Foundations Benchmark provides prescriptive guidance...", + "Requirements": [ + { + "Id": "1.1", + "Name": "Requirement name", + "Description": "Detailed description of the requirement", + "Attributes": [ + { + "Section": "1. Identity and Access Management", + "Profile": "Level 1", + "AssessmentStatus": "Automated", + "Description": "Attribute description" + } + ], + "Checks": ["check_name_1", "check_name_2"] + } + ] +} +``` + +## Supported Frameworks + +**Industry standards:** +- CIS (Center for Internet Security) +- NIST 800-53, NIST CSF +- CISA + +**Regulatory compliance:** +- PCI-DSS +- HIPAA +- GDPR +- FedRAMP +- SOC2 + +**Cloud-specific:** +- AWS Well-Architected Framework (Security Pillar) +- AWS Foundational Technical Review (FTR) +- Azure Security Benchmark +- GCP Security Best Practices + +## Framework Requirement Mapping + +Each requirement maps to one or more checks: + +```json +{ + "Id": "2.1.1", + "Name": "Ensure MFA is enabled for all IAM users", + "Description": "Multi-Factor Authentication adds an extra layer of protection...", + "Checks": [ + "iam_user_mfa_enabled", + "iam_root_mfa_enabled", + "iam_user_hardware_mfa_enabled" + ] +} +``` + +## Best Practices + +1. **Requirement IDs**: Follow the original framework numbering (e.g., "1.1", "2.3.4") +2. **Check Mapping**: Map to existing checks when possible, create new checks only if needed +3. **Completeness**: Include all framework requirements, even if no check exists (document as manual) +4. **Version Control**: Include framework version in the name and file + +## Commands + +```bash +# List available frameworks for a provider +poetry run python prowler-cli.py {provider} --list-compliance + +# Run scan with specific compliance framework +poetry run python prowler-cli.py {provider} --compliance {framework} + +# Run scan with multiple frameworks +poetry run python prowler-cli.py {provider} --compliance cis_aws_benchmark_v2 pci_dss_3.2.1 + +# Output compliance report +poetry run python prowler-cli.py {provider} --compliance {framework} -M csv json html +``` + +## Resources + +- **Templates**: See [assets/](assets/) for complete CIS framework JSON template +- **Documentation**: See [references/compliance-docs.md](references/compliance-docs.md) for official Prowler Developer Guide links diff --git a/skills/prowler-compliance/assets/cis_framework.json b/skills/prowler-compliance/assets/cis_framework.json new file mode 100644 index 0000000000..817c0ca6aa --- /dev/null +++ b/skills/prowler-compliance/assets/cis_framework.json @@ -0,0 +1,76 @@ +{ + "Framework": "CIS", + "Name": "CIS Amazon Web Services Foundations Benchmark v5.0.0", + "Version": "5.0", + "Provider": "AWS", + "Description": "The CIS Amazon Web Services Foundations Benchmark provides prescriptive guidance for configuring security options for a subset of Amazon Web Services.", + "Requirements": [ + { + "Id": "1.1", + "Description": "Maintain current contact details", + "Checks": [ + "account_maintain_current_contact_details" + ], + "Attributes": [ + { + "Section": "1 Identity and Access Management", + "Profile": "Level 1", + "AssessmentStatus": "Manual", + "Description": "Ensure contact email and telephone details for AWS accounts are current and map to more than one individual in your organization.", + "RationaleStatement": "If an AWS account is observed to be behaving in a prohibited or suspicious manner, AWS will attempt to contact the account owner by email and phone using the contact details listed.", + "ImpactStatement": "", + "RemediationProcedure": "This activity can only be performed via the AWS Console. Navigate to Account Settings and update contact information.", + "AuditProcedure": "This activity can only be performed via the AWS Console. Navigate to Account Settings and verify contact information is current.", + "AdditionalInformation": "", + "References": "https://docs.aws.amazon.com/accounts/latest/reference/manage-acct-update-contact.html", + "DefaultValue": "" + } + ] + }, + { + "Id": "1.3", + "Description": "Ensure no 'root' user account access key exists", + "Checks": [ + "iam_no_root_access_key" + ], + "Attributes": [ + { + "Section": "1 Identity and Access Management", + "Profile": "Level 1", + "AssessmentStatus": "Automated", + "Description": "The 'root' user account is the most privileged user in an AWS account. AWS Access Keys provide programmatic access to a given AWS account.", + "RationaleStatement": "Deleting access keys associated with the 'root' user account limits vectors by which the account can be compromised.", + "ImpactStatement": "", + "RemediationProcedure": "Navigate to IAM console, select root user, Security credentials tab, and delete any access keys.", + "AuditProcedure": "Run: aws iam get-account-summary | grep 'AccountAccessKeysPresent'", + "AdditionalInformation": "IAM User account root for us-gov cloud regions is not enabled by default.", + "References": "https://docs.aws.amazon.com/general/latest/gr/aws-access-keys-best-practices.html", + "DefaultValue": "" + } + ] + }, + { + "Id": "1.11", + "Description": "Ensure credentials unused for 45 days or more are disabled", + "Checks": [ + "iam_user_accesskey_unused", + "iam_user_console_access_unused" + ], + "Attributes": [ + { + "Section": "1 Identity and Access Management", + "Profile": "Level 1", + "AssessmentStatus": "Automated", + "Description": "AWS IAM users can access AWS resources using different types of credentials. It is recommended that all credentials unused for 45 days or more be deactivated or removed.", + "RationaleStatement": "Disabling or removing unnecessary credentials reduces the window of opportunity for compromised accounts.", + "ImpactStatement": "Users with deactivated credentials will lose access until re-enabled.", + "RemediationProcedure": "Use IAM console or CLI to deactivate unused access keys and remove unused passwords.", + "AuditProcedure": "Generate credential report and review password_last_used and access_key_last_used fields.", + "AdditionalInformation": "", + "References": "https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_getting-report.html", + "DefaultValue": "" + } + ] + } + ] +} diff --git a/skills/prowler-compliance/references/compliance-docs.md b/skills/prowler-compliance/references/compliance-docs.md new file mode 100644 index 0000000000..6d53252cfe --- /dev/null +++ b/skills/prowler-compliance/references/compliance-docs.md @@ -0,0 +1,15 @@ +# Compliance Framework Documentation + +## Local Documentation + +For detailed compliance framework patterns, see: + +- `docs/developer-guide/security-compliance-framework.mdx` - Complete guide for creating compliance frameworks (CIS, NIST, PCI-DSS, SOC2, GDPR) + +## Contents + +The documentation covers: +- Framework JSON structure +- Framework metadata (name, version, provider) +- Requirements array with IDs, descriptions, and attributes +- Check mappings for each requirement diff --git a/skills/prowler-docs/SKILL.md b/skills/prowler-docs/SKILL.md new file mode 100644 index 0000000000..d8fd8f9cb2 --- /dev/null +++ b/skills/prowler-docs/SKILL.md @@ -0,0 +1,122 @@ +--- +name: prowler-docs +description: > + Prowler documentation style guide and writing standards. + Trigger: When writing documentation for Prowler features, tutorials, or guides. +license: Apache-2.0 +metadata: + author: prowler-cloud + version: "1.0" +allowed-tools: Read, Edit, Write, Glob, Grep, Bash, WebFetch, WebSearch, Task +--- + +## When to Use + +Use this skill when writing Prowler documentation for: +- Feature documentation +- API/SDK references +- Tutorials and guides +- Release notes + +## Brand Voice + +### Unbiased Communication +- Avoid gendered pronouns (use "you/your" or "they/them") +- Use inclusive alternatives: businessman → businessperson, mankind → humanity +- No generalizations about gender, race, nationality, culture +- Avoid militaristic language: fight → address, kill chain → cyberattack chain + +### Technical Terminology +- Define key terms and acronyms on first use: "Identity and Access Management (IAM)" +- Prefer verbal over nominal constructions: "The report was created" not "The creation of the report" +- Use clear, accessible language; minimize jargon + +## Formatting Standards + +### Title Case Capitalization +Use Title Case for all headers: +- Good: "How to Configure Security Scanning" +- Bad: "How to configure security scanning" + +### Hyphenation +- Prenominal position: "world-leading company" +- Postnominal position: "features built in" + +### Bullet Points +Use when information can be logically divided: +```markdown +Prowler CLI includes: +* **Industry standards:** CIS, NIST 800, NIST CSF +* **Regulatory compliance:** RBI, FedRAMP, PCI-DSS +* **Privacy frameworks:** GDPR, HIPAA, FFIEC +``` + +### Interaction Verbs +- Desktop: Click, Double-click, Right-click, Drag, Scroll +- Touch: Tap, Double-tap, Press and hold, Swipe, Pinch + +## SEO Optimization + +### Sentence Structure +Place keywords at the beginning: +- Good: "To create a custom role, open a terminal..." +- Bad: "Open a terminal to create a custom role..." + +### Headers +- H1: Primary (unique, descriptive) +- H2-H6: Subheadings (logical hierarchy) +- Include keywords naturally + +## MDX Components + +### Version Badge +```mdx +import { VersionBadge } from "/snippets/version-badge.mdx" + +## New Feature Name + + + +Description of the feature... +``` + +### Warnings and Danger Calls +```mdx + +Disabling encryption may expose sensitive data to unauthorized access. + + + +Running this command will **permanently delete all data**. + +``` + +## Prowler Features (Proper Nouns) + +Reference without articles: +- Prowler App, Prowler CLI, Prowler SDK +- Prowler Cloud, Prowler Studio, Prowler Registry +- Built-in Compliance Checks +- Multi-cloud Security Scanning +- Autonomous Cloud Security Analyst (AI) + +## Documentation Structure + +``` +docs/ +├── getting-started/ +├── tutorials/ +├── providers/ +│ ├── aws/ +│ ├── azure/ +│ ├── gcp/ +│ └── ... +├── api/ +├── sdk/ +├── compliance/ +└── developer-guide/ +``` + +## Resources + +- **Documentation**: See [references/](references/) for links to local developer guide diff --git a/skills/prowler-docs/references/documentation-docs.md b/skills/prowler-docs/references/documentation-docs.md new file mode 100644 index 0000000000..727e4b0773 --- /dev/null +++ b/skills/prowler-docs/references/documentation-docs.md @@ -0,0 +1,15 @@ +# Documentation Style Guide + +## Local Documentation + +For documentation writing standards, see: + +- `docs/developer-guide/documentation.mdx` - Mintlify-based documentation and local development setup + +## Contents + +The documentation covers: +- Mintlify documentation system +- Local documentation development +- Style guide and conventions +- MDX file structure diff --git a/skills/prowler-mcp/SKILL.md b/skills/prowler-mcp/SKILL.md new file mode 100644 index 0000000000..4bf3eec7a0 --- /dev/null +++ b/skills/prowler-mcp/SKILL.md @@ -0,0 +1,78 @@ +--- +name: prowler-mcp +description: > + Creates MCP tools for Prowler MCP Server. Covers BaseTool pattern, model design, + and API client usage. Use when working on mcp_server/ directory. +license: Apache-2.0 +metadata: + author: prowler-cloud + version: "1.0" +allowed-tools: Read, Edit, Write, Glob, Grep, Bash, WebFetch, WebSearch, Task +--- + +## Overview + +The Prowler MCP Server uses three sub-servers with prefixed namespacing: + +| Sub-Server | Prefix | Auth | Purpose | +|------------|--------|------|---------| +| Prowler App | `prowler_app_*` | Required | Cloud management tools | +| Prowler Hub | `prowler_hub_*` | No | Security checks catalog | +| Prowler Docs | `prowler_docs_*` | No | Documentation search | + +For complete architecture, patterns, and examples, see [docs/developer-guide/mcp-server.mdx](../../../docs/developer-guide/mcp-server.mdx). + +--- + +## Critical Rules (Prowler App Only) + +### Tool Implementation + +- **ALWAYS**: Extend `BaseTool` (auto-registered via `tool_loader.py`, only public methods from the class are exposed as a tool) +- **NEVER**: Manually register BaseTool subclasses +- **NEVER**: Import tools directly in server.py + +### Models + +- **ALWAYS**: Use `MinimalSerializerMixin` for responses +- **ALWAYS**: Implement `from_api_response()` factory method +- **ALWAYS**: Use two-tier models (Simplified for lists, Detailed for single items) +- **NEVER**: Return raw API responses + +### API Client + +- **ALWAYS**: Use `self.api_client` singleton +- **ALWAYS**: Use `build_filter_params()` for query parameters +- **NEVER**: Create new httpx clients + +--- + +## Hub/Docs Tools + +Use `@mcp.tool()` decorator directly—no BaseTool or models required. + +--- + +## Quick Reference: New Prowler App Tool + +1. Create tool class in `prowler_app/tools/` extending `BaseTool` +2. Create models in `prowler_app/models/` using `MinimalSerializerMixin` +3. Tools auto-register via `tool_loader.py` + +--- + +## QA Checklist (Prowler App) + +- [ ] Tool docstrings describe LLM-relevant behavior +- [ ] Models use `MinimalSerializerMixin` +- [ ] API responses transformed to simplified models +- [ ] Error handling returns `{"error": str, "status": "failed"}` +- [ ] Parameters use `Field()` with descriptions +- [ ] No hardcoded secrets + +--- + +## Resources + +- **Full Guide**: [docs/developer-guide/mcp-server.mdx](../../../docs/developer-guide/mcp-server.mdx) +- **Templates**: See [assets/](assets/) for tool and model templates diff --git a/skills/prowler-mcp/assets/base_tool.py b/skills/prowler-mcp/assets/base_tool.py new file mode 100644 index 0000000000..7c176a22eb --- /dev/null +++ b/skills/prowler-mcp/assets/base_tool.py @@ -0,0 +1,62 @@ +# Example: BaseTool Abstract Class +# Source: mcp_server/prowler_mcp_server/prowler_app/tools/base.py + +import inspect +from abc import ABC +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from fastmcp import FastMCP + +from prowler_mcp_server.lib.logger import logger +from prowler_mcp_server.prowler_app.utils.api_client import ProwlerAPIClient + + +class BaseTool(ABC): + """ + Abstract base class for MCP tools. + + Key patterns: + 1. Auto-registers all public async methods as tools + 2. Provides shared api_client and logger via properties + 3. Subclasses just define async methods with Field() parameters + """ + + def __init__(self): + self._api_client = ProwlerAPIClient() + self._logger = logger + + @property + def api_client(self) -> ProwlerAPIClient: + """Shared API client for making authenticated requests.""" + return self._api_client + + @property + def logger(self): + """Logger for structured logging.""" + return self._logger + + def register_tools(self, mcp: "FastMCP") -> None: + """ + Auto-register all public async methods as MCP tools. + + Subclasses don't need to override this - just define async methods. + """ + registered_count = 0 + + for name, method in inspect.getmembers(self, predicate=inspect.ismethod): + # Skip private/protected methods + if name.startswith("_"): + continue + # Skip inherited methods + if name in ["register_tools", "api_client", "logger"]: + continue + # Only register async methods + if inspect.iscoroutinefunction(method): + mcp.tool(method) + registered_count += 1 + self.logger.debug(f"Auto-registered tool: {name}") + + self.logger.info( + f"Auto-registered {registered_count} tools from {self.__class__.__name__}" + ) diff --git a/skills/prowler-mcp/assets/models.py b/skills/prowler-mcp/assets/models.py new file mode 100644 index 0000000000..32669e6c84 --- /dev/null +++ b/skills/prowler-mcp/assets/models.py @@ -0,0 +1,146 @@ +# Example: MCP Models with MinimalSerializerMixin +# Source: mcp_server/prowler_mcp_server/prowler_app/models/ + +from typing import Any, Literal + +from pydantic import ( + BaseModel, + ConfigDict, + Field, + SerializerFunctionWrapHandler, + model_serializer, +) + + +class MinimalSerializerMixin(BaseModel): + """ + Mixin that excludes empty values from serialization. + + Key pattern: Reduces token usage by removing None, empty strings, empty lists/dicts. + Use this for all LLM-facing models. + """ + + @model_serializer(mode="wrap") + def _serialize(self, handler: SerializerFunctionWrapHandler) -> dict[str, Any]: + data = handler(self) + return {k: v for k, v in data.items() if not self._should_exclude(k, v)} + + def _should_exclude(self, key: str, value: Any) -> bool: + """Override in subclasses for custom exclusion logic.""" + if value is None: + return True + if value == "": + return True + if isinstance(value, list) and not value: + return True + if isinstance(value, dict) and not value: + return True + return False + + +class CheckRemediation(MinimalSerializerMixin, BaseModel): + """Remediation information - uses mixin to strip empty fields.""" + + model_config = ConfigDict(frozen=True) + + cli: str | None = Field(default=None, description="CLI command for remediation") + terraform: str | None = Field(default=None, description="Terraform code") + other: str | None = Field(default=None, description="Other remediation steps") + recommendation: str | None = Field( + default=None, description="Best practice recommendation" + ) + + +class SimplifiedFinding(MinimalSerializerMixin, BaseModel): + """ + Lightweight finding for list responses. + + Key pattern: Two-tier serialization + - SimplifiedFinding: minimal fields for lists (fast, low tokens) + - DetailedFinding: full fields for single item (complete info) + """ + + model_config = ConfigDict(frozen=True) + + id: str = Field(description="Finding UUID") + uid: str = Field(description="Unique finding identifier") + status: Literal["FAIL", "PASS", "MANUAL"] = Field(description="Finding status") + severity: str = Field(description="Severity level") + check_id: str = Field(description="Check ID that generated this finding") + resource_name: str | None = Field(default=None, description="Affected resource") + + @classmethod + def from_api_response(cls, data: dict) -> "SimplifiedFinding": + """Transform JSON:API response to model.""" + attributes = data["attributes"] + return cls( + id=data["id"], + uid=attributes["uid"], + status=attributes["status"], + severity=attributes["severity"], + check_id=attributes["check_id"], + resource_name=attributes.get("resource_name"), + ) + + +class DetailedFinding(SimplifiedFinding): + """ + Full finding details - extends SimplifiedFinding. + + Key pattern: Inheritance for two-tier serialization. + """ + + status_extended: str = Field(description="Detailed status message") + region: str | None = Field(default=None, description="Cloud region") + remediation: CheckRemediation | None = Field(default=None, description="How to fix") + + @classmethod + def from_api_response(cls, data: dict) -> "DetailedFinding": + """Transform JSON:API response to detailed model.""" + attributes = data["attributes"] + check_metadata = attributes.get("check_metadata", {}) + remediation_data = check_metadata.get("Remediation", {}) + + return cls( + id=data["id"], + uid=attributes["uid"], + status=attributes["status"], + severity=attributes["severity"], + check_id=attributes["check_id"], + resource_name=attributes.get("resource_name"), + status_extended=attributes.get("status_extended", ""), + region=attributes.get("region"), + remediation=( + CheckRemediation( + cli=remediation_data.get("Code", {}).get("CLI"), + terraform=remediation_data.get("Code", {}).get("Terraform"), + recommendation=remediation_data.get("Recommendation", {}).get( + "Text" + ), + ) + if remediation_data + else None + ), + ) + + +class FindingsListResponse(BaseModel): + """Wrapper for list responses with pagination.""" + + findings: list[SimplifiedFinding] + total: int + page: int + page_size: int + + @classmethod + def from_api_response(cls, data: dict) -> "FindingsListResponse": + findings = [ + SimplifiedFinding.from_api_response(f) for f in data.get("data", []) + ] + meta = data.get("meta", {}).get("pagination", {}) + return cls( + findings=findings, + total=meta.get("count", len(findings)), + page=meta.get("page", 1), + page_size=meta.get("page_size", len(findings)), + ) diff --git a/skills/prowler-mcp/assets/tool_implementation.py b/skills/prowler-mcp/assets/tool_implementation.py new file mode 100644 index 0000000000..87daa4b5e5 --- /dev/null +++ b/skills/prowler-mcp/assets/tool_implementation.py @@ -0,0 +1,95 @@ +# Example: Tool Implementation (FindingsTools) +# Source: mcp_server/prowler_mcp_server/prowler_app/tools/findings.py + +from typing import Any, Literal + +from prowler_mcp_server.prowler_app.models.findings import ( + DetailedFinding, + FindingsListResponse, +) +from prowler_mcp_server.prowler_app.tools.base import BaseTool +from pydantic import Field + + +class FindingsTools(BaseTool): + """ + MCP tools for security findings. + + Key patterns: + 1. Extends BaseTool (no need to override register_tools) + 2. Each async method becomes a tool automatically + 3. Use pydantic.Field() for parameter documentation + 4. Return dict from model_dump() for serialization + """ + + async def search_security_findings( + self, + severity: list[ + Literal["critical", "high", "medium", "low", "informational"] + ] = Field( + default=[], + description="Filter by severity levels. Multiple values allowed.", + ), + status: list[Literal["FAIL", "PASS", "MANUAL"]] = Field( + default=["FAIL"], + description="Filter by finding status. Default: ['FAIL'].", + ), + provider_type: list[str] = Field( + default=[], + description="Filter by cloud provider (aws, azure, gcp, etc.).", + ), + page_size: int = Field( + default=50, + description="Number of results per page.", + ), + page_number: int = Field( + default=1, + description="Page number (1-indexed).", + ), + ) -> dict[str, Any]: + """ + Search security findings with rich filtering. + + Returns simplified finding data optimized for LLM consumption. + """ + # Validate page size + self.api_client.validate_page_size(page_size) + + # Build query parameters + params = { + "page[size]": page_size, + "page[number]": page_number, + } + if severity: + params["filter[severity__in]"] = ",".join(severity) + if status: + params["filter[status__in]"] = ",".join(status) + if provider_type: + params["filter[provider_type__in]"] = ",".join(provider_type) + + # Make API request + api_response = await self.api_client.get("/findings", params=params) + + # Transform to simplified model and return + simplified_response = FindingsListResponse.from_api_response(api_response) + return simplified_response.model_dump() + + async def get_finding_details( + self, + finding_id: str = Field( + description="UUID of the finding to retrieve.", + ), + ) -> dict[str, Any]: + """ + Get comprehensive details for a specific finding. + + Returns full finding data including remediation steps. + """ + params = {"include": "resources,scan"} + api_response = await self.api_client.get( + f"/findings/{finding_id}", params=params + ) + detailed_finding = DetailedFinding.from_api_response( + api_response.get("data", {}) + ) + return detailed_finding.model_dump() diff --git a/skills/prowler-pr/SKILL.md b/skills/prowler-pr/SKILL.md new file mode 100644 index 0000000000..d5e743e18e --- /dev/null +++ b/skills/prowler-pr/SKILL.md @@ -0,0 +1,125 @@ +--- +name: prowler-pr +description: > + Creates Pull Requests for Prowler following the project template and conventions. + Trigger: When user asks to create a PR, submit changes, or open a pull request. +license: Apache-2.0 +metadata: + author: prowler-cloud + version: "1.0" +allowed-tools: Read, Edit, Write, Glob, Grep, Bash, WebFetch, WebSearch, Task +--- + +## PR Creation Process + +1. **Analyze changes**: `git diff main...HEAD` to understand ALL commits +2. **Determine affected components**: SDK, API, UI, MCP, Docs +3. **Fill template sections** based on changes +4. **Create PR** with `gh pr create` + +## PR Template Structure + +```markdown +### Context + +{Why this change? Link issues with `Fix #XXXX`} + +### Description + +{Summary of changes and dependencies} + +### Steps to review + +{How to test/verify the changes} + +### Checklist + +
+ +Community Checklist + +- [ ] This feature/issue is listed in [here](https://github.com/prowler-cloud/prowler/issues?q=sort%3Aupdated-desc+is%3Aissue+is%3Aopen) or roadmap.prowler.com +- [ ] Is it assigned to me, if not, request it via the issue/feature in [here](https://github.com/prowler-cloud/prowler/issues?q=sort%3Aupdated-desc+is%3Aissue+is%3Aopen) or [Prowler Community Slack](goto.prowler.com/slack) + +
+ +- Are there new checks included in this PR? Yes / No + - If so, do we need to update permissions for the provider? +- [ ] Review if the code is being covered by tests. +- [ ] Review if code is being documented following https://github.com/google/styleguide/blob/gh-pages/pyguide.md#38-comments-and-docstrings +- [ ] Review if backport is needed. +- [ ] Review if is needed to change the Readme.md +- [ ] Ensure new entries are added to CHANGELOG.md, if applicable. + +#### SDK/CLI +- Are there new checks included in this PR? Yes / No + - If so, do we need to update permissions for the provider? Please review this carefully. + +#### UI (if applicable) +- [ ] All issue/task requirements work as expected on the UI +- [ ] Screenshots/Video - Mobile (X < 640px) +- [ ] Screenshots/Video - Tablet (640px > X < 1024px) +- [ ] Screenshots/Video - Desktop (X > 1024px) +- [ ] Ensure new entries are added to ui/CHANGELOG.md + +#### API (if applicable) +- [ ] Verify if API specs need to be regenerated. +- [ ] Check if version updates are required. +- [ ] Ensure new entries are added to api/CHANGELOG.md + +### License + +By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. +``` + +## Component-Specific Rules + +| Component | CHANGELOG | Extra Checks | +|-----------|-----------|--------------| +| SDK | `prowler/CHANGELOG.md` | New checks → permissions update? | +| API | `api/CHANGELOG.md` | API specs regeneration, version bump | +| UI | `ui/CHANGELOG.md` | Screenshots for Mobile/Tablet/Desktop | +| MCP | `mcp_server/CHANGELOG.md` | N/A | + +## Commands + +```bash +# Check current branch status +git status +git log main..HEAD --oneline + +# View full diff +git diff main...HEAD + +# Create PR with heredoc for body +gh pr create --title "feat: description" --body "$(cat <<'EOF' +### Context +... +EOF +)" + +# Create draft PR +gh pr create --draft --title "feat: description" +``` + +## Title Conventions + +Follow conventional commits: +- `feat:` New feature +- `fix:` Bug fix +- `docs:` Documentation +- `chore:` Maintenance +- `refactor:` Code restructure +- `test:` Tests + +## Before Creating PR + +1. ✅ All tests pass locally +2. ✅ Linting passes (`make lint` or component-specific) +3. ✅ CHANGELOG updated (if applicable) +4. ✅ Branch is up to date with main +5. ✅ Commits are clean and descriptive + +## Resources + +- **Documentation**: See [references/](references/) for links to local developer guide diff --git a/skills/prowler-pr/references/pr-docs.md b/skills/prowler-pr/references/pr-docs.md new file mode 100644 index 0000000000..3fb1a7ce83 --- /dev/null +++ b/skills/prowler-pr/references/pr-docs.md @@ -0,0 +1,15 @@ +# Pull Request Documentation + +## Local Documentation + +For PR conventions and workflow, see: + +- `docs/developer-guide/introduction.mdx` - "Sending the Pull Request" section + +## Contents + +The documentation covers: +- PR template requirements +- Commit message conventions +- Review process +- CI/CD checks diff --git a/skills/prowler-provider/SKILL.md b/skills/prowler-provider/SKILL.md new file mode 100644 index 0000000000..493c393f1e --- /dev/null +++ b/skills/prowler-provider/SKILL.md @@ -0,0 +1,143 @@ +--- +name: prowler-provider +description: > + Creates new Prowler cloud providers or adds services to existing providers. + Trigger: When adding a new cloud provider or service to Prowler SDK. +license: Apache-2.0 +metadata: + author: prowler-cloud + version: "1.0" +allowed-tools: Read, Edit, Write, Glob, Grep, Bash, WebFetch, WebSearch, Task +--- + +## When to Use + +Use this skill when: +- Adding a new cloud provider to Prowler +- Adding a new service to an existing provider +- Understanding the provider architecture pattern + +## Provider Architecture Pattern + +Every provider MUST follow this structure: + +``` +prowler/providers/{provider}/ +├── __init__.py +├── {provider}_provider.py # Main provider class +├── models.py # Provider-specific models +├── config.py # Provider configuration +├── exceptions/ # Provider-specific exceptions +├── lib/ +│ ├── service/ # Base service class +│ ├── arguments/ # CLI arguments parser +│ └── mutelist/ # Mutelist functionality +└── services/ + └── {service}/ + ├── {service}_service.py # Resource fetcher + ├── {service}_client.py # Python singleton instance + └── {check_name}/ # Individual checks + ├── {check_name}.py + └── {check_name}.metadata.json +``` + +## Provider Class Template + +```python +from prowler.providers.common.provider import Provider + +class {Provider}Provider(Provider): + """Provider class for {Provider} cloud platform.""" + + def __init__(self, arguments): + super().__init__(arguments) + self.session = self._setup_session(arguments) + self.regions = self._get_regions() + + def _setup_session(self, arguments): + """Provider-specific authentication.""" + # Implement credential handling + pass + + def _get_regions(self): + """Get available regions for provider.""" + # Return list of regions + pass +``` + +## Service Class Template + +```python +from prowler.providers.{provider}.lib.service.service import {Provider}Service + +class {Service}({Provider}Service): + """Service class for {service} resources.""" + + def __init__(self, provider): + super().__init__(provider) + self.{resources} = [] + self._fetch_{resources}() + + def _fetch_{resources}(self): + """Fetch {resource} data from API.""" + try: + response = self.client.list_{resources}() + for item in response: + self.{resources}.append( + {Resource}( + id=item["id"], + name=item["name"], + region=item.get("region"), + ) + ) + except Exception as e: + logger.error(f"Error fetching {resources}: {e}") +``` + +## Service Client Template + +```python +from prowler.providers.{provider}.services.{service}.{service}_service import {Service} + +{service}_client = {Service} +``` + +## Supported Providers + +Current providers: +- AWS (Amazon Web Services) +- Azure (Microsoft Azure) +- GCP (Google Cloud Platform) +- Kubernetes +- GitHub +- M365 (Microsoft 365) +- OracleCloud (Oracle Cloud Infrastructure) +- AlibabaCloud +- MongoDB Atlas +- NHN (NHN Cloud) +- LLM (Language Model providers) +- IaC (Infrastructure as Code) + +## Commands + +```bash +# Run provider +poetry run python prowler-cli.py {provider} + +# List services for provider +poetry run python prowler-cli.py {provider} --list-services + +# List checks for provider +poetry run python prowler-cli.py {provider} --list-checks + +# Run specific service +poetry run python prowler-cli.py {provider} --services {service} + +# Debug mode +poetry run python prowler-cli.py {provider} --log-level DEBUG +``` + +## Resources + +- **Templates**: See [assets/](assets/) for Provider, Service, and Client singleton templates +- **Documentation**: See [references/provider-docs.md](references/provider-docs.md) for official Prowler Developer Guide links diff --git a/skills/prowler-provider/assets/client.py b/skills/prowler-provider/assets/client.py new file mode 100644 index 0000000000..ed47f5bc40 --- /dev/null +++ b/skills/prowler-provider/assets/client.py @@ -0,0 +1,56 @@ +# Example: Singleton Client Pattern +# Source: prowler/providers/github/services/repository/repository_client.py + +""" +Singleton Client Pattern + +This pattern is CRITICAL for how Prowler checks access service data. + +How it works: +1. When this module is imported, the service is instantiated ONCE +2. The service fetches all data during __init__ (eager loading) +3. All checks import this singleton and access pre-fetched data +4. No additional API calls needed during check execution + +File: prowler/providers/github/services/repository/repository_client.py +""" + +from prowler.providers.common.provider import Provider +from prowler.providers.github.services.repository.repository_service import Repository + +# SINGLETON: Instantiated once when module is first imported +# Provider.get_global_provider() returns the provider set in __init__ +repository_client = Repository(Provider.get_global_provider()) + + +""" +Usage in checks: + +from prowler.providers.github.services.repository.repository_client import ( + repository_client, +) + +class repository_secret_scanning_enabled(Check): + def execute(self): + findings = [] + for repo in repository_client.repositories.values(): + # Access pre-fetched repository data + report = CheckReportGithub(metadata=self.metadata(), resource=repo) + if repo.secret_scanning_enabled: + report.status = "PASS" + else: + report.status = "FAIL" + findings.append(report) + return findings +""" + + +# Another example for organization service +# File: prowler/providers/github/services/organization/organization_client.py + +# from prowler.providers.common.provider import Provider +# from prowler.providers.github.services.organization.organization_service import ( +# Organization, +# ) +# +# organization_client = Organization(Provider.get_global_provider()) diff --git a/skills/prowler-provider/assets/provider.py b/skills/prowler-provider/assets/provider.py new file mode 100644 index 0000000000..27bc8ff0b9 --- /dev/null +++ b/skills/prowler-provider/assets/provider.py @@ -0,0 +1,143 @@ +# Example: Provider Class Template (GitHub Provider) +# Source: prowler/providers/github/github_provider.py + + +from prowler.config.config import ( + default_config_file_path, + get_default_mute_file_path, + load_and_validate_config_file, +) +from prowler.lib.logger import logger +from prowler.lib.mutelist.mutelist import Mutelist +from prowler.providers.common.models import Audit_Metadata, Connection +from prowler.providers.common.provider import Provider + + +class GithubProvider(Provider): + """ + GitHub Provider - Template for creating new providers. + + Required attributes (from abstract Provider): + - _type: str - Provider identifier + - _session: Session model - Authentication credentials + - _identity: Identity model - Authenticated user info + - _audit_config: dict - Check configuration + - _mutelist: Mutelist - Finding filtering + """ + + _type: str = "github" + _auth_method: str = None + _session: "GithubSession" + _identity: "GithubIdentityInfo" + _audit_config: dict + _mutelist: Mutelist + audit_metadata: Audit_Metadata + + def __init__( + self, + # Authentication credentials + personal_access_token: str = "", + # Provider configuration + config_path: str = None, + config_content: dict = None, + fixer_config: dict = {}, + mutelist_path: str = None, + mutelist_content: dict = None, + # Provider scoping + repositories: list = None, + organizations: list = None, + ): + logger.info("Instantiating GitHub Provider...") + + # Store scoping configuration + self._repositories = repositories or [] + self._organizations = organizations or [] + + # Step 1: Setup session (authentication) + self._session = self.setup_session(personal_access_token) + self._auth_method = "Personal Access Token" + + # Step 2: Setup identity (who is authenticated) + self._identity = self.setup_identity(self._session) + + # Step 3: Load audit config + if config_content: + self._audit_config = config_content + else: + if not config_path: + config_path = default_config_file_path + self._audit_config = load_and_validate_config_file(self._type, config_path) + + # Step 4: Load fixer config + self._fixer_config = fixer_config + + # Step 5: Load mutelist + if mutelist_content: + self._mutelist = GithubMutelist(mutelist_content=mutelist_content) + else: + if not mutelist_path: + mutelist_path = get_default_mute_file_path(self.type) + self._mutelist = GithubMutelist(mutelist_path=mutelist_path) + + # CRITICAL: Register as global provider + Provider.set_global_provider(self) + + # Required property implementations + @property + def type(self) -> str: + return self._type + + @property + def session(self) -> "GithubSession": + return self._session + + @property + def identity(self) -> "GithubIdentityInfo": + return self._identity + + @property + def audit_config(self) -> dict: + return self._audit_config + + @property + def mutelist(self) -> Mutelist: + return self._mutelist + + @staticmethod + def setup_session(personal_access_token: str) -> "GithubSession": + """Create authenticated session from credentials.""" + if not personal_access_token: + raise ValueError("Personal access token required") + return GithubSession(token=personal_access_token) + + @staticmethod + def setup_identity(session: "GithubSession") -> "GithubIdentityInfo": + """Get identity info for authenticated user.""" + # Make API call to get user info + # g = Github(auth=Auth.Token(session.token)) + # user = g.get_user() + return GithubIdentityInfo( + account_id="user-id", + account_name="username", + account_url="https://github.com/username", + ) + + def print_credentials(self): + """Display credentials in CLI output.""" + print(f"GitHub Account: {self.identity.account_name}") + print(f"Auth Method: {self._auth_method}") + + @staticmethod + def test_connection( + personal_access_token: str = None, + raise_on_exception: bool = True, + ) -> Connection: + """Test if credentials can connect to the provider.""" + try: + session = GithubProvider.setup_session(personal_access_token) + GithubProvider.setup_identity(session) + return Connection(is_connected=True) + except Exception as e: + if raise_on_exception: + raise + return Connection(is_connected=False, error=str(e)) diff --git a/skills/prowler-provider/assets/service.py b/skills/prowler-provider/assets/service.py new file mode 100644 index 0000000000..0ad73a9142 --- /dev/null +++ b/skills/prowler-provider/assets/service.py @@ -0,0 +1,119 @@ +# Example: Service Base Class and Implementation +# Source: prowler/providers/github/lib/service/service.py +# Source: prowler/providers/github/services/repository/repository_service.py + +from typing import Optional + +from pydantic.v1 import BaseModel + +from prowler.lib.logger import logger + +# ============================================================ +# Base Service Class +# ============================================================ + + +class GithubService: + """ + Base service class for all GitHub services. + + Key patterns: + 1. Receives provider in __init__ + 2. Creates API clients in __set_clients__ + 3. Stores audit_config and fixer_config for check access + """ + + def __init__(self, service: str, provider: "GithubProvider"): + self.provider = provider + self.clients = self.__set_clients__(provider.session) + self.audit_config = provider.audit_config + self.fixer_config = provider.fixer_config + + def __set_clients__(self, session: "GithubSession") -> list: + """Create API clients based on authentication type.""" + clients = [] + try: + # Create client(s) based on session credentials + # For token auth: single client + # For GitHub App: multiple clients (one per installation) + pass + except Exception as error: + logger.error(f"{error.__class__.__name__}: {error}") + return clients + + +# ============================================================ +# Service Implementation +# ============================================================ + + +class Repository(GithubService): + """ + Repository service - fetches and stores repository data. + + Key patterns: + 1. Inherits from GithubService + 2. Fetches all data in __init__ (eager loading) + 3. Stores data in attributes for check access + 4. Defines Pydantic models for data structures + """ + + def __init__(self, provider: "GithubProvider"): + super().__init__(__class__.__name__, provider) + # Fetch and store data during initialization + self.repositories = self._list_repositories() + + def _list_repositories(self) -> dict: + """List repositories based on provider scoping.""" + logger.info("Repository - Listing Repositories...") + repos = {} + + try: + for client in self.clients: + # Get repos from specified repositories + for repo_name in self.provider.repositories: + repo = client.get_repo(repo_name) + self._process_repository(repo, repos) + + # Get repos from specified organizations + for org_name in self.provider.organizations: + org = client.get_organization(org_name) + for repo in org.get_repos(): + self._process_repository(repo, repos) + except Exception as error: + logger.error(f"{error.__class__.__name__}: {error}") + + return repos + + def _process_repository(self, repo, repos: dict): + """Process a single repository and add to repos dict.""" + repos[repo.id] = Repo( + id=repo.id, + name=repo.name, + owner=repo.owner.login, + full_name=repo.full_name, + private=repo.private, + archived=repo.archived, + ) + + +# ============================================================ +# Pydantic Models for Service Data +# ============================================================ + + +class Repo(BaseModel): + """Model for GitHub Repository.""" + + id: int + name: str + owner: str + full_name: str + private: bool + archived: bool + secret_scanning_enabled: Optional[bool] = None + dependabot_enabled: Optional[bool] = None + + class Config: + # Make model hashable for use as dict key + frozen = True diff --git a/skills/prowler-provider/references/provider-docs.md b/skills/prowler-provider/references/provider-docs.md new file mode 100644 index 0000000000..02261aaad0 --- /dev/null +++ b/skills/prowler-provider/references/provider-docs.md @@ -0,0 +1,28 @@ +# Provider Documentation + +## Local Documentation + +For detailed provider development patterns, see: + +### Core Documentation +- `docs/developer-guide/provider.mdx` - Provider architecture and creation guide +- `docs/developer-guide/services.mdx` - Adding services to existing providers + +### Provider-Specific Details +- `docs/developer-guide/aws-details.mdx` - AWS provider implementation +- `docs/developer-guide/azure-details.mdx` - Azure provider implementation +- `docs/developer-guide/gcp-details.mdx` - GCP provider implementation +- `docs/developer-guide/kubernetes-details.mdx` - Kubernetes provider implementation +- `docs/developer-guide/github-details.mdx` - GitHub provider implementation +- `docs/developer-guide/m365-details.mdx` - Microsoft 365 provider implementation +- `docs/developer-guide/alibabacloud-details.mdx` - Alibaba Cloud provider implementation +- `docs/developer-guide/llm-details.mdx` - LLM provider implementation + +## Contents + +The documentation covers: +- Provider types (SDK, API, Tool/Wrapper) +- Provider class structure and identity +- Service creation patterns +- Client singleton implementation +- Provider-specific authentication and API patterns diff --git a/skills/prowler-sdk-check/SKILL.md b/skills/prowler-sdk-check/SKILL.md new file mode 100644 index 0000000000..f032f6b590 --- /dev/null +++ b/skills/prowler-sdk-check/SKILL.md @@ -0,0 +1,257 @@ +--- +name: prowler-sdk-check +description: > + Creates Prowler security checks following SDK architecture patterns. + Trigger: When user asks to create a new security check for any provider (AWS, Azure, GCP, K8s, GitHub, etc.) +license: Apache-2.0 +metadata: + author: prowler-cloud + version: "1.0" +allowed-tools: Read, Edit, Write, Glob, Grep, Bash, WebFetch, WebSearch, Task +--- + +## Check Structure + +``` +prowler/providers/{provider}/services/{service}/{check_name}/ +├── __init__.py +├── {check_name}.py +└── {check_name}.metadata.json +``` + +--- + +## Step-by-Step Creation Process + +### 1. Prerequisites + +- **Verify check doesn't exist**: Search `prowler/providers/{provider}/services/{service}/` +- **Ensure provider and service exist** - create them first if not +- **Confirm service has required methods** - may need to add/modify service methods to get data + +### 2. Create Check Files + +```bash +mkdir -p prowler/providers/{provider}/services/{service}/{check_name} +touch prowler/providers/{provider}/services/{service}/{check_name}/__init__.py +touch prowler/providers/{provider}/services/{service}/{check_name}/{check_name}.py +touch prowler/providers/{provider}/services/{service}/{check_name}/{check_name}.metadata.json +``` + +### 3. Implement Check Logic + +```python +from prowler.lib.check.models import Check, Check_Report_{Provider} +from prowler.providers.{provider}.services.{service}.{service}_client import {service}_client + +class {check_name}(Check): + """Ensure that {resource} meets {security_requirement}.""" + def execute(self) -> list[Check_Report_{Provider}]: + """Execute the check logic. + + Returns: + A list of reports containing the result of the check. + """ + findings = [] + for resource in {service}_client.{resources}: + report = Check_Report_{Provider}(metadata=self.metadata(), resource=resource) + report.status = "PASS" if resource.is_compliant else "FAIL" + report.status_extended = f"Resource {resource.name} compliance status." + findings.append(report) + return findings +``` + +### 4. Create Metadata File + +See complete schema below and `assets/` folder for complete templates. +For detailed field documentation, see `references/metadata-docs.md`. + +### 5. Verify Check Detection + +```bash +poetry run python prowler-cli.py {provider} --list-checks | grep {check_name} +``` + +### 6. Run Check Locally + +```bash +poetry run python prowler-cli.py {provider} --log-level ERROR --verbose --check {check_name} +``` + +### 7. Create Tests + +See `prowler-test-sdk` skill for test patterns (PASS, FAIL, no resources, error handling). + +--- + +## Check Naming Convention + +``` +{service}_{resource}_{security_control} +``` + +Examples: +- `ec2_instance_public_ip_disabled` +- `s3_bucket_encryption_enabled` +- `iam_user_mfa_enabled` + +--- + +## Metadata Schema (COMPLETE) + +```json +{ + "Provider": "aws", + "CheckID": "{check_name}", + "CheckTitle": "Human-readable title", + "CheckType": [ + "Software and Configuration Checks/AWS Security Best Practices", + "Software and Configuration Checks/Industry and Regulatory Standards/AWS Foundational Security Best Practices" + ], + "ServiceName": "{service}", + "SubServiceName": "", + "ResourceIdTemplate": "", + "Severity": "low|medium|high|critical", + "ResourceType": "AwsEc2Instance|Other", + "ResourceGroup": "security|compute|storage|network", + "Description": "**Bold resource name**. Detailed explanation of what this check evaluates and why it matters.", + "Risk": "What happens if non-compliant. Explain attack vectors, data exposure risks, compliance impact.", + "RelatedUrl": "", + "AdditionalURLs": [ + "https://docs.aws.amazon.com/..." + ], + "Remediation": { + "Code": { + "CLI": "aws {service} {command} --option value", + "NativeIaC": "```yaml\nResources:\n Resource:\n Type: AWS::{Service}::{Resource}\n Properties:\n Key: value # This line fixes the issue\n```", + "Other": "1. Console steps\n2. Step by step", + "Terraform": "```hcl\nresource \"aws_{service}_{resource}\" \"example\" {\n key = \"value\" # This line fixes the issue\n}\n```" + }, + "Recommendation": { + "Text": "Detailed recommendation for remediation.", + "Url": "https://hub.prowler.com/check/{check_name}" + } + }, + "Categories": [ + "identity-access", + "encryption", + "logging", + "forensics-ready", + "internet-exposed", + "trust-boundaries" + ], + "DependsOn": [], + "RelatedTo": [], + "Notes": "" +} +``` + +### Required Fields + +| Field | Description | +|-------|-------------| +| `Provider` | Provider name: aws, azure, gcp, kubernetes, github, m365 | +| `CheckID` | Must match class name and folder name | +| `CheckTitle` | Human-readable title | +| `Severity` | `low`, `medium`, `high`, `critical` | +| `ServiceName` | Service being checked | +| `Description` | What the check evaluates | +| `Risk` | Security impact of non-compliance | +| `Remediation.Code.CLI` | CLI fix command | +| `Remediation.Recommendation.Text` | How to fix | + +### Severity Guidelines + +| Severity | When to Use | +|----------|-------------| +| `critical` | Direct data exposure, RCE, privilege escalation | +| `high` | Significant security risk, compliance violation | +| `medium` | Defense-in-depth, best practice | +| `low` | Informational, minor hardening | + +--- + +## Check Report Statuses + +| Status | When to Use | +|--------|-------------| +| `PASS` | Resource is compliant | +| `FAIL` | Resource is non-compliant | +| `MANUAL` | Requires human verification | + +--- + +## Common Patterns + +### AWS Check with Regional Resources + +```python +from prowler.lib.check.models import Check, Check_Report_AWS +from prowler.providers.aws.services.s3.s3_client import s3_client + +class s3_bucket_encryption_enabled(Check): + def execute(self) -> list[Check_Report_AWS]: + findings = [] + for bucket in s3_client.buckets.values(): + report = Check_Report_AWS(metadata=self.metadata(), resource=bucket) + if bucket.encryption: + report.status = "PASS" + report.status_extended = f"S3 bucket {bucket.name} has encryption enabled." + else: + report.status = "FAIL" + report.status_extended = f"S3 bucket {bucket.name} does not have encryption enabled." + findings.append(report) + return findings +``` + +### Check with Multiple Conditions + +```python +from prowler.lib.check.models import Check, Check_Report_AWS +from prowler.providers.aws.services.ec2.ec2_client import ec2_client + +class ec2_instance_hardened(Check): + def execute(self) -> list[Check_Report_AWS]: + findings = [] + for instance in ec2_client.instances: + report = Check_Report_AWS(metadata=self.metadata(), resource=instance) + + issues = [] + if instance.public_ip: + issues.append("has public IP") + if not instance.metadata_options.http_tokens == "required": + issues.append("IMDSv2 not enforced") + + if issues: + report.status = "FAIL" + report.status_extended = f"Instance {instance.id} {', '.join(issues)}." + else: + report.status = "PASS" + report.status_extended = f"Instance {instance.id} is properly hardened." + + findings.append(report) + return findings +``` + +--- + +## Commands + +```bash +# Verify detection +poetry run python prowler-cli.py {provider} --list-checks | grep {check_name} + +# Run check +poetry run python prowler-cli.py {provider} --log-level ERROR --verbose --check {check_name} + +# Run with specific profile/credentials +poetry run python prowler-cli.py aws --profile myprofile --check {check_name} + +# Run multiple checks +poetry run python prowler-cli.py {provider} --check {check1} {check2} {check3} +``` + +## Resources + +- **Templates**: See [assets/](assets/) for complete check and metadata templates (AWS, Azure, GCP) +- **Documentation**: See [references/metadata-docs.md](references/metadata-docs.md) for official Prowler Developer Guide links diff --git a/skills/prowler-sdk-check/assets/aws_check.py b/skills/prowler-sdk-check/assets/aws_check.py new file mode 100644 index 0000000000..9dd1f5c109 --- /dev/null +++ b/skills/prowler-sdk-check/assets/aws_check.py @@ -0,0 +1,20 @@ +# Example: AWS S3 Bucket Encryption Check +# Source: prowler/providers/aws/services/s3/s3_bucket_default_encryption/ + +from prowler.lib.check.models import Check, Check_Report_AWS +from prowler.providers.aws.services.s3.s3_client import s3_client + + +class s3_bucket_default_encryption(Check): + def execute(self): + findings = [] + for bucket in s3_client.buckets.values(): + report = Check_Report_AWS(metadata=self.metadata(), resource=bucket) + if bucket.encryption: + report.status = "PASS" + report.status_extended = f"S3 Bucket {bucket.name} has Server Side Encryption with {bucket.encryption}." + else: + report.status = "FAIL" + report.status_extended = f"S3 Bucket {bucket.name} does not have Server Side Encryption enabled." + findings.append(report) + return findings diff --git a/skills/prowler-sdk-check/assets/aws_metadata.json b/skills/prowler-sdk-check/assets/aws_metadata.json new file mode 100644 index 0000000000..fbf1e657dd --- /dev/null +++ b/skills/prowler-sdk-check/assets/aws_metadata.json @@ -0,0 +1,35 @@ +{ + "Provider": "aws", + "CheckID": "s3_bucket_default_encryption", + "CheckTitle": "Check if S3 buckets have default encryption (SSE) enabled or use a bucket policy to enforce it.", + "CheckType": [ + "Data Protection" + ], + "ServiceName": "s3", + "SubServiceName": "", + "ResourceIdTemplate": "arn:partition:s3:::bucket_name", + "Severity": "medium", + "ResourceType": "AwsS3Bucket", + "ResourceGroup": "storage", + "Description": "Check if S3 buckets have default encryption (SSE) enabled or use a bucket policy to enforce it.", + "Risk": "Amazon S3 default encryption provides a way to set the default encryption behavior for an S3 bucket. This will ensure data-at-rest is encrypted.", + "RelatedUrl": "", + "Remediation": { + "Code": { + "CLI": "aws s3api put-bucket-encryption --bucket --server-side-encryption-configuration '{\"Rules\": [{\"ApplyServerSideEncryptionByDefault\": {\"SSEAlgorithm\": \"AES256\"}}]}'", + "NativeIaC": "https://docs.prowler.com/checks/aws/s3-policies/s3_14-data-encrypted-at-rest#cloudformation", + "Other": "", + "Terraform": "https://docs.prowler.com/checks/aws/s3-policies/s3_14-data-encrypted-at-rest#terraform" + }, + "Recommendation": { + "Text": "Ensure that S3 buckets have encryption at rest enabled.", + "Url": "https://aws.amazon.com/blogs/security/how-to-prevent-uploads-of-unencrypted-objects-to-amazon-s3/" + } + }, + "Categories": [ + "encryption" + ], + "DependsOn": [], + "RelatedTo": [], + "Notes": "" +} diff --git a/skills/prowler-sdk-check/assets/azure_check.py b/skills/prowler-sdk-check/assets/azure_check.py new file mode 100644 index 0000000000..f7e036e7a5 --- /dev/null +++ b/skills/prowler-sdk-check/assets/azure_check.py @@ -0,0 +1,25 @@ +# Example: Azure Storage Secure Transfer Check +# Source: prowler/providers/azure/services/storage/storage_secure_transfer_required_is_enabled/ + +from prowler.lib.check.models import Check, Check_Report_Azure +from prowler.providers.azure.services.storage.storage_client import storage_client + + +class storage_secure_transfer_required_is_enabled(Check): + def execute(self) -> list[Check_Report_Azure]: + findings = [] + for subscription, storage_accounts in storage_client.storage_accounts.items(): + for storage_account in storage_accounts: + report = Check_Report_Azure( + metadata=self.metadata(), resource=storage_account + ) + report.subscription = subscription + report.status = "PASS" + report.status_extended = f"Storage account {storage_account.name} from subscription {subscription} has secure transfer required enabled." + if not storage_account.enable_https_traffic_only: + report.status = "FAIL" + report.status_extended = f"Storage account {storage_account.name} from subscription {subscription} has secure transfer required disabled." + + findings.append(report) + + return findings diff --git a/skills/prowler-sdk-check/assets/azure_metadata.json b/skills/prowler-sdk-check/assets/azure_metadata.json new file mode 100644 index 0000000000..e278849592 --- /dev/null +++ b/skills/prowler-sdk-check/assets/azure_metadata.json @@ -0,0 +1,33 @@ +{ + "Provider": "azure", + "CheckID": "storage_secure_transfer_required_is_enabled", + "CheckTitle": "Ensure that all data transferred between clients and your Azure Storage account is encrypted using the HTTPS protocol.", + "CheckType": [], + "ServiceName": "storage", + "SubServiceName": "", + "ResourceIdTemplate": "", + "Severity": "medium", + "ResourceType": "AzureStorageAccount", + "ResourceGroup": "storage", + "Description": "Ensure that all data transferred between clients and your Azure Storage account is encrypted using the HTTPS protocol.", + "Risk": "Requests to the storage account sent outside of a secure connection can be eavesdropped", + "RelatedUrl": "", + "Remediation": { + "Code": { + "CLI": "az storage account update --name --https-only true", + "NativeIaC": "", + "Other": "https://www.trendmicro.com/cloudoneconformity/knowledge-base/azure/StorageAccounts/secure-transfer-required.html", + "Terraform": "https://docs.prowler.com/checks/azure/azure-networking-policies/ensure-that-storage-account-enables-secure-transfer" + }, + "Recommendation": { + "Text": "Enable data encryption in transit.", + "Url": "" + } + }, + "Categories": [ + "encryption" + ], + "DependsOn": [], + "RelatedTo": [], + "Notes": "" +} diff --git a/skills/prowler-sdk-check/assets/gcp_check.py b/skills/prowler-sdk-check/assets/gcp_check.py new file mode 100644 index 0000000000..164a2b0a3d --- /dev/null +++ b/skills/prowler-sdk-check/assets/gcp_check.py @@ -0,0 +1,29 @@ +# Example: GCP Cloud Storage Bucket Versioning Check +# Source: prowler/providers/gcp/services/cloudstorage/cloudstorage_bucket_versioning_enabled/ + +from prowler.lib.check.models import Check, Check_Report_GCP +from prowler.providers.gcp.services.cloudstorage.cloudstorage_client import ( + cloudstorage_client, +) + + +class cloudstorage_bucket_versioning_enabled(Check): + """Ensure Cloud Storage buckets have Object Versioning enabled.""" + + def execute(self) -> list[Check_Report_GCP]: + findings = [] + for bucket in cloudstorage_client.buckets: + report = Check_Report_GCP(metadata=self.metadata(), resource=bucket) + report.status = "FAIL" + report.status_extended = ( + f"Bucket {bucket.name} does not have Object Versioning enabled." + ) + + if bucket.versioning_enabled: + report.status = "PASS" + report.status_extended = ( + f"Bucket {bucket.name} has Object Versioning enabled." + ) + + findings.append(report) + return findings diff --git a/skills/prowler-sdk-check/assets/gcp_metadata.json b/skills/prowler-sdk-check/assets/gcp_metadata.json new file mode 100644 index 0000000000..b6e28b0e17 --- /dev/null +++ b/skills/prowler-sdk-check/assets/gcp_metadata.json @@ -0,0 +1,37 @@ +{ + "Provider": "gcp", + "CheckID": "cloudstorage_bucket_versioning_enabled", + "CheckTitle": "Cloud Storage buckets have Object Versioning enabled", + "CheckType": [], + "ServiceName": "cloudstorage", + "SubServiceName": "", + "ResourceIdTemplate": "", + "Severity": "medium", + "ResourceType": "storage.googleapis.com/Bucket", + "ResourceGroup": "storage", + "Description": "Google Cloud Storage buckets are evaluated to ensure that Object Versioning is enabled.", + "Risk": "Buckets without Object Versioning enabled cannot recover previous object versions after accidental deletion or overwrites.", + "RelatedUrl": "", + "AdditionalURLs": [ + "https://www.trendmicro.com/cloudoneconformity/knowledge-base/gcp/CloudStorage/enable-versioning.html", + "https://cloud.google.com/storage/docs/object-versioning" + ], + "Remediation": { + "Code": { + "CLI": "gcloud storage buckets update gs:// --versioning", + "NativeIaC": "", + "Other": "1) Open Google Cloud Console -> Storage -> Buckets\n2) Select the bucket\n3) Click 'Edit bucket' -> 'Protection'\n4) Enable 'Object versioning'\n5) Save", + "Terraform": "resource \"google_storage_bucket\" \"example\" {\n versioning {\n enabled = true\n }\n}" + }, + "Recommendation": { + "Text": "Enable Object Versioning on Cloud Storage buckets to protect against accidental data loss.", + "Url": "https://hub.prowler.com/check/cloudstorage_bucket_versioning_enabled" + } + }, + "Categories": [ + "resilience" + ], + "DependsOn": [], + "RelatedTo": [], + "Notes": "" +} diff --git a/skills/prowler-sdk-check/references/metadata-docs.md b/skills/prowler-sdk-check/references/metadata-docs.md new file mode 100644 index 0000000000..63a702a9d0 --- /dev/null +++ b/skills/prowler-sdk-check/references/metadata-docs.md @@ -0,0 +1,19 @@ +# Check Documentation + +## Local Documentation + +For detailed check development patterns, see: + +- `docs/developer-guide/checks.mdx` - Complete guide for creating security checks +- `docs/developer-guide/check-metadata-guidelines.mdx` - Metadata writing standards and best practices +- `docs/developer-guide/configurable-checks.mdx` - Using audit_config for configurable checks +- `docs/developer-guide/renaming-checks.mdx` - Guidelines for renaming existing checks + +## Contents + +The documentation covers: +- Check structure and naming conventions +- Metadata schema and field descriptions +- Check implementation patterns per provider +- Configurable check parameters +- Check renaming procedures diff --git a/skills/prowler-test-api/SKILL.md b/skills/prowler-test-api/SKILL.md new file mode 100644 index 0000000000..b53eddc44b --- /dev/null +++ b/skills/prowler-test-api/SKILL.md @@ -0,0 +1,118 @@ +--- +name: prowler-test-api +description: > + Testing patterns for Prowler API: ViewSets, Celery tasks, RLS isolation, RBAC. + Trigger: When writing tests for api/ - viewsets, serializers, tasks, models. +license: Apache-2.0 +metadata: + author: prowler-cloud + version: "1.0" +allowed-tools: Read, Edit, Write, Glob, Grep, Bash, WebFetch, WebSearch, Task +--- + +## Critical Rules + +- ALWAYS use `response.json()["data"]` not `response.data` +- ALWAYS use `content_type = "application/vnd.api+json"` in requests +- ALWAYS test cross-tenant isolation with `other_tenant_provider` fixture +- NEVER skip RLS isolation tests when adding new endpoints + +--- + +## 1. JSON:API Format (Critical) + +```python +content_type = "application/vnd.api+json" + +payload = { + "data": { + "type": "providers", # Plural, kebab-case + "id": str(resource.id), # Required for PATCH + "attributes": {"alias": "updated"}, + } +} + +response.json()["data"]["attributes"]["alias"] +``` + +--- + +## 2. RLS Isolation Tests + +```python +def test_cross_tenant_access_denied(self, authenticated_client, other_tenant_provider): + """User cannot see resources from other tenants.""" + response = authenticated_client.get( + reverse("provider-detail", args=[other_tenant_provider.id]) + ) + assert response.status_code == status.HTTP_404_NOT_FOUND +``` + +--- + +## 3. RBAC Tests + +```python +def test_unlimited_visibility_sees_all(self, authenticated_client_admin, providers_fixture): + response = authenticated_client_admin.get(reverse("provider-list")) + assert len(response.json()["data"]) == len(providers_fixture) + +def test_limited_visibility_sees_only_assigned(self, authenticated_client_limited): + # User with unlimited_visibility=False sees only providers in their provider_groups + pass + +def test_permission_required(self, authenticated_client_readonly): + response = authenticated_client_readonly.post(reverse("provider-list"), ...) + assert response.status_code == status.HTTP_403_FORBIDDEN +``` + +--- + +## 4. Managers (objects vs all_objects) + +```python +def test_objects_excludes_deleted(self): + deleted_provider = Provider.objects.create(..., is_deleted=True) + assert deleted_provider not in Provider.objects.all() + assert deleted_provider in Provider.all_objects.all() +``` + +--- + +## 5. Celery Task Tests + +```python +@patch("tasks.tasks.perform_prowler_scan") +def test_task_success(self, mock_scan): + mock_scan.return_value = {"findings_count": 100} + result = perform_scan_task(tenant_id="...", scan_id="...", provider_id="...") + assert result["findings_count"] == 100 +``` + +--- + +## 6. Key Fixtures + +| Fixture | Description | +|---------|-------------| +| `create_test_user` | Session user (dev@prowler.com) | +| `tenants_fixture` | 3 tenants (2 with membership, 1 isolated) | +| `providers_fixture` | Providers in tenant 1 | +| `other_tenant_provider` | Provider in isolated tenant (RLS tests) | +| `authenticated_client` | Client with JWT for tenant 1 | + +--- + +## Commands + +```bash +cd api && poetry run pytest -x --tb=short +cd api && poetry run pytest -k "test_provider" +cd api && poetry run pytest -k "TestRBAC" +``` + +--- + +## Resources + +- **Documentation**: See [references/test-api-docs.md](references/test-api-docs.md) for local file paths and documentation diff --git a/skills/prowler-test-api/references/test-api-docs.md b/skills/prowler-test-api/references/test-api-docs.md new file mode 100644 index 0000000000..2aa3e0134f --- /dev/null +++ b/skills/prowler-test-api/references/test-api-docs.md @@ -0,0 +1,18 @@ +# API Test Documentation + +## Local Documentation + +For API testing patterns, see: + +- `api/src/backend/conftest.py` - All fixtures +- `api/src/backend/api/tests/` - API tests +- `api/src/backend/tasks/tests/` - Task tests + +## Contents + +The documentation covers: +- JSON:API format for requests/responses +- RLS isolation test patterns +- RBAC permission tests +- Celery task mocking +- Test fixtures and their usage diff --git a/skills/prowler-test-sdk/SKILL.md b/skills/prowler-test-sdk/SKILL.md new file mode 100644 index 0000000000..f9e08b5811 --- /dev/null +++ b/skills/prowler-test-sdk/SKILL.md @@ -0,0 +1,321 @@ +--- +name: prowler-test-sdk +description: > + Testing patterns for Prowler SDK (Python). + Trigger: When writing tests for checks, services, or providers. +license: Apache-2.0 +metadata: + author: prowler-cloud + version: "1.0" +allowed-tools: Read, Edit, Write, Glob, Grep, Bash, WebFetch, WebSearch, Task +--- + +> **Generic Patterns**: For base pytest patterns (fixtures, mocking, parametrize, markers), see the `pytest` skill. +> This skill covers **Prowler-specific** conventions only. +> +> **Full Documentation**: `docs/developer-guide/unit-testing.mdx` + +## CRITICAL: Provider-Specific Testing + +| Provider | Mocking Approach | Decorator | +|----------|------------------|-----------| +| **AWS** | `moto` library | `@mock_aws` | +| **Azure, GCP, K8s, others** | `MagicMock` | None | + +**NEVER use moto for non-AWS providers. NEVER use MagicMock for AWS.** + +--- + +## AWS Check Test Pattern + +```python +from unittest import mock +from boto3 import client +from moto import mock_aws +from tests.providers.aws.utils import AWS_REGION_US_EAST_1, set_mocked_aws_provider + + +class Test_{check_name}: + @mock_aws + def test_no_resources(self): + from prowler.providers.aws.services.{service}.{service}_service import {ServiceClass} + + aws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1]) + + with mock.patch( + "prowler.providers.common.provider.Provider.get_global_provider", + return_value=aws_provider, + ): + with mock.patch( + "prowler.providers.aws.services.{service}.{check_name}.{check_name}.{service}_client", + new={ServiceClass}(aws_provider), + ): + from prowler.providers.aws.services.{service}.{check_name}.{check_name} import ( + {check_name}, + ) + + check = {check_name}() + result = check.execute() + + assert len(result) == 0 + + @mock_aws + def test_{check_name}_pass(self): + # Setup AWS resources with moto + {service}_client = client("{service}", region_name=AWS_REGION_US_EAST_1) + # Create compliant resource... + + from prowler.providers.aws.services.{service}.{service}_service import {ServiceClass} + + aws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1]) + + with mock.patch( + "prowler.providers.common.provider.Provider.get_global_provider", + return_value=aws_provider, + ): + with mock.patch( + "prowler.providers.aws.services.{service}.{check_name}.{check_name}.{service}_client", + new={ServiceClass}(aws_provider), + ): + from prowler.providers.aws.services.{service}.{check_name}.{check_name} import ( + {check_name}, + ) + + check = {check_name}() + result = check.execute() + + assert len(result) == 1 + assert result[0].status == "PASS" + + @mock_aws + def test_{check_name}_fail(self): + # Setup AWS resources with moto + {service}_client = client("{service}", region_name=AWS_REGION_US_EAST_1) + # Create non-compliant resource... + + from prowler.providers.aws.services.{service}.{service}_service import {ServiceClass} + + aws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1]) + + with mock.patch( + "prowler.providers.common.provider.Provider.get_global_provider", + return_value=aws_provider, + ): + with mock.patch( + "prowler.providers.aws.services.{service}.{check_name}.{check_name}.{service}_client", + new={ServiceClass}(aws_provider), + ): + from prowler.providers.aws.services.{service}.{check_name}.{check_name} import ( + {check_name}, + ) + + check = {check_name}() + result = check.execute() + + assert len(result) == 1 + assert result[0].status == "FAIL" +``` + +> **Critical**: Always import the check INSIDE the mock.patch context to ensure proper client mocking. + +--- + +## Azure Check Test Pattern + +**NO moto decorator. Use MagicMock to mock the service client directly.** + +```python +from unittest import mock +from uuid import uuid4 + +from prowler.providers.azure.services.{service}.{service}_service import {ResourceModel} +from tests.providers.azure.azure_fixtures import ( + AZURE_SUBSCRIPTION_ID, + set_mocked_azure_provider, +) + + +class Test_{check_name}: + def test_no_resources(self): + {service}_client = mock.MagicMock + {service}_client.{resources} = {} + + with ( + mock.patch( + "prowler.providers.common.provider.Provider.get_global_provider", + return_value=set_mocked_azure_provider(), + ), + mock.patch( + "prowler.providers.azure.services.{service}.{check_name}.{check_name}.{service}_client", + new={service}_client, + ), + ): + from prowler.providers.azure.services.{service}.{check_name}.{check_name} import ( + {check_name}, + ) + + check = {check_name}() + result = check.execute() + assert len(result) == 0 + + def test_{check_name}_pass(self): + resource_id = str(uuid4()) + resource_name = "Test Resource" + + {service}_client = mock.MagicMock + {service}_client.{resources} = { + AZURE_SUBSCRIPTION_ID: { + resource_id: {ResourceModel}( + id=resource_id, + name=resource_name, + location="westeurope", + # ... compliant attributes + ) + } + } + + with ( + mock.patch( + "prowler.providers.common.provider.Provider.get_global_provider", + return_value=set_mocked_azure_provider(), + ), + mock.patch( + "prowler.providers.azure.services.{service}.{check_name}.{check_name}.{service}_client", + new={service}_client, + ), + ): + from prowler.providers.azure.services.{service}.{check_name}.{check_name} import ( + {check_name}, + ) + + check = {check_name}() + result = check.execute() + + assert len(result) == 1 + assert result[0].status == "PASS" + assert result[0].subscription == AZURE_SUBSCRIPTION_ID + assert result[0].resource_name == resource_name + + def test_{check_name}_fail(self): + resource_id = str(uuid4()) + resource_name = "Test Resource" + + {service}_client = mock.MagicMock + {service}_client.{resources} = { + AZURE_SUBSCRIPTION_ID: { + resource_id: {ResourceModel}( + id=resource_id, + name=resource_name, + location="westeurope", + # ... non-compliant attributes + ) + } + } + + with ( + mock.patch( + "prowler.providers.common.provider.Provider.get_global_provider", + return_value=set_mocked_azure_provider(), + ), + mock.patch( + "prowler.providers.azure.services.{service}.{check_name}.{check_name}.{service}_client", + new={service}_client, + ), + ): + from prowler.providers.azure.services.{service}.{check_name}.{check_name} import ( + {check_name}, + ) + + check = {check_name}() + result = check.execute() + + assert len(result) == 1 + assert result[0].status == "FAIL" +``` + +--- + +## GCP/Kubernetes/Other Providers + +Follow the same MagicMock pattern as Azure: + +```python +from tests.providers.gcp.gcp_fixtures import set_mocked_gcp_provider, GCP_PROJECT_ID +from tests.providers.kubernetes.kubernetes_fixtures import set_mocked_kubernetes_provider +``` + +**Key difference**: Each provider has its own fixtures file with `set_mocked_{provider}_provider`. + +--- + +## Provider Fixtures Reference + +| Provider | Fixtures File | Key Constants | +|----------|---------------|---------------| +| AWS | `tests/providers/aws/utils.py` | `AWS_REGION_US_EAST_1`, `AWS_ACCOUNT_NUMBER` | +| Azure | `tests/providers/azure/azure_fixtures.py` | `AZURE_SUBSCRIPTION_ID` | +| GCP | `tests/providers/gcp/gcp_fixtures.py` | `GCP_PROJECT_ID` | +| K8s | `tests/providers/kubernetes/kubernetes_fixtures.py` | - | + +--- + +## Test File Structure + +``` +tests/providers/{provider}/services/{service}/ +├── {service}_service_test.py # Service tests +└── {check_name}/ + └── {check_name}_test.py # Check tests +``` + +--- + +## Required Test Scenarios + +Every check MUST test: + +| Scenario | Expected | +|----------|----------| +| Resource compliant | `status == "PASS"` | +| Resource non-compliant | `status == "FAIL"` | +| No resources | `len(results) == 0` | + +--- + +## Assertions to Include + +```python +# Always verify these +assert result[0].status == "PASS" # or "FAIL" +assert result[0].status_extended == "Expected message..." +assert result[0].resource_id == expected_id +assert result[0].resource_name == expected_name + +# Provider-specific +assert result[0].region == "us-east-1" # AWS +assert result[0].subscription == AZURE_SUBSCRIPTION_ID # Azure +assert result[0].project_id == GCP_PROJECT_ID # GCP +``` + +--- + +## Commands + +```bash +# All SDK tests +poetry run pytest -n auto -vvv tests/ + +# Specific provider +poetry run pytest tests/providers/{provider}/ -v + +# Specific check +poetry run pytest tests/providers/{provider}/services/{service}/{check_name}/ -v + +# Stop on first failure +poetry run pytest -x tests/ +``` + +## Resources + +- **Templates**: See [assets/](assets/) for complete test templates (AWS with moto, Azure/GCP with MagicMock) +- **Documentation**: See [references/testing-docs.md](references/testing-docs.md) for official Prowler Developer Guide links diff --git a/skills/prowler-test-sdk/assets/aws_test.py b/skills/prowler-test-sdk/assets/aws_test.py new file mode 100644 index 0000000000..2137c5651f --- /dev/null +++ b/skills/prowler-test-sdk/assets/aws_test.py @@ -0,0 +1,149 @@ +# Example: AWS KMS Key Rotation Test +# Source: tests/providers/aws/services/kms/kms_cmk_rotation_enabled/ + +from unittest import mock + +import pytest +from boto3 import client +from moto import mock_aws + +from tests.providers.aws.utils import AWS_REGION_US_EAST_1, set_mocked_aws_provider + + +class Test_kms_cmk_rotation_enabled: + @mock_aws + def test_kms_no_key(self): + """Test when no KMS keys exist.""" + from prowler.providers.aws.services.kms.kms_service import KMS + + aws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1]) + + with ( + mock.patch( + "prowler.providers.common.provider.Provider.get_global_provider", + return_value=aws_provider, + ), + mock.patch( + "prowler.providers.aws.services.kms.kms_cmk_rotation_enabled.kms_cmk_rotation_enabled.kms_client", + new=KMS(aws_provider), + ), + ): + from prowler.providers.aws.services.kms.kms_cmk_rotation_enabled.kms_cmk_rotation_enabled import ( + kms_cmk_rotation_enabled, + ) + + check = kms_cmk_rotation_enabled() + result = check.execute() + + assert len(result) == 0 + + @mock_aws + def test_kms_cmk_rotation_enabled(self): + """Test PASS: KMS key with rotation enabled.""" + # Create mocked AWS resources using boto3 + kms_client = client("kms", region_name=AWS_REGION_US_EAST_1) + key = kms_client.create_key()["KeyMetadata"] + kms_client.enable_key_rotation(KeyId=key["KeyId"]) + + from prowler.providers.aws.services.kms.kms_service import KMS + + aws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1]) + + with ( + mock.patch( + "prowler.providers.common.provider.Provider.get_global_provider", + return_value=aws_provider, + ), + mock.patch( + "prowler.providers.aws.services.kms.kms_cmk_rotation_enabled.kms_cmk_rotation_enabled.kms_client", + new=KMS(aws_provider), + ), + ): + from prowler.providers.aws.services.kms.kms_cmk_rotation_enabled.kms_cmk_rotation_enabled import ( + kms_cmk_rotation_enabled, + ) + + check = kms_cmk_rotation_enabled() + result = check.execute() + + assert len(result) == 1 + assert result[0].status == "PASS" + assert result[0].resource_id == key["KeyId"] + assert result[0].resource_arn == key["Arn"] + + @mock_aws + def test_kms_cmk_rotation_disabled(self): + """Test FAIL: KMS key without rotation enabled.""" + kms_client = client("kms", region_name=AWS_REGION_US_EAST_1) + key = kms_client.create_key()["KeyMetadata"] + # Note: rotation NOT enabled + + from prowler.providers.aws.services.kms.kms_service import KMS + + aws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1]) + + with ( + mock.patch( + "prowler.providers.common.provider.Provider.get_global_provider", + return_value=aws_provider, + ), + mock.patch( + "prowler.providers.aws.services.kms.kms_cmk_rotation_enabled.kms_cmk_rotation_enabled.kms_client", + new=KMS(aws_provider), + ), + ): + from prowler.providers.aws.services.kms.kms_cmk_rotation_enabled.kms_cmk_rotation_enabled import ( + kms_cmk_rotation_enabled, + ) + + check = kms_cmk_rotation_enabled() + result = check.execute() + + assert len(result) == 1 + assert result[0].status == "FAIL" + assert result[0].resource_id == key["KeyId"] + + @pytest.mark.parametrize( + "no_of_keys_created,expected_no_of_passes", + [ + (5, 3), + (7, 5), + (10, 8), + ], + ) + @mock_aws + def test_kms_rotation_parametrized( + self, no_of_keys_created: int, expected_no_of_passes: int + ) -> None: + """Parametrized test demonstrating multiple scenarios.""" + kms_client = client("kms", region_name=AWS_REGION_US_EAST_1) + + for i in range(no_of_keys_created): + key = kms_client.create_key()["KeyMetadata"] + if i not in [2, 4]: # Skip enabling rotation for some keys + kms_client.enable_key_rotation(KeyId=key["KeyId"]) + + from prowler.providers.aws.services.kms.kms_service import KMS + + aws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1]) + + with ( + mock.patch( + "prowler.providers.common.provider.Provider.get_global_provider", + return_value=aws_provider, + ), + mock.patch( + "prowler.providers.aws.services.kms.kms_cmk_rotation_enabled.kms_cmk_rotation_enabled.kms_client", + new=KMS(aws_provider), + ), + ): + from prowler.providers.aws.services.kms.kms_cmk_rotation_enabled.kms_cmk_rotation_enabled import ( + kms_cmk_rotation_enabled, + ) + + check = kms_cmk_rotation_enabled() + result = check.execute() + + assert len(result) == no_of_keys_created + statuses = [r.status for r in result] + assert statuses.count("PASS") == expected_no_of_passes diff --git a/skills/prowler-test-sdk/assets/azure_test.py b/skills/prowler-test-sdk/assets/azure_test.py new file mode 100644 index 0000000000..4bf85d65e6 --- /dev/null +++ b/skills/prowler-test-sdk/assets/azure_test.py @@ -0,0 +1,137 @@ +# Example: Azure Storage Network Access Rule Test +# Source: tests/providers/azure/services/storage/storage_default_network_access_rule_is_denied/ + +from unittest import mock +from uuid import uuid4 + +from prowler.providers.azure.services.storage.storage_service import ( + Account, + NetworkRuleSet, +) +from tests.providers.azure.azure_fixtures import ( + AZURE_SUBSCRIPTION_ID, + set_mocked_azure_provider, +) + + +class Test_storage_default_network_access_rule_is_denied: + def test_storage_no_storage_accounts(self): + """Test when no storage accounts exist.""" + storage_client = mock.MagicMock + storage_client.storage_accounts = {} + + with ( + mock.patch( + "prowler.providers.common.provider.Provider.get_global_provider", + return_value=set_mocked_azure_provider(), + ), + mock.patch( + "prowler.providers.azure.services.storage.storage_default_network_access_rule_is_denied.storage_default_network_access_rule_is_denied.storage_client", + new=storage_client, + ), + ): + from prowler.providers.azure.services.storage.storage_default_network_access_rule_is_denied.storage_default_network_access_rule_is_denied import ( + storage_default_network_access_rule_is_denied, + ) + + check = storage_default_network_access_rule_is_denied() + result = check.execute() + assert len(result) == 0 + + def test_storage_network_access_rule_allowed(self): + """Test FAIL: Network access rule set to Allow.""" + storage_account_id = str(uuid4()) + storage_account_name = "Test Storage Account" + storage_client = mock.MagicMock + storage_client.storage_accounts = { + AZURE_SUBSCRIPTION_ID: [ + Account( + id=storage_account_id, + name=storage_account_name, + resouce_group_name="rg", + enable_https_traffic_only=False, + infrastructure_encryption=False, + allow_blob_public_access=False, + network_rule_set=NetworkRuleSet( + bypass="AzureServices", default_action="Allow" + ), + encryption_type="None", + minimum_tls_version="TLS1_2", + key_expiration_period_in_days=None, + location="westeurope", + private_endpoint_connections=[], + ) + ] + } + + with ( + mock.patch( + "prowler.providers.common.provider.Provider.get_global_provider", + return_value=set_mocked_azure_provider(), + ), + mock.patch( + "prowler.providers.azure.services.storage.storage_default_network_access_rule_is_denied.storage_default_network_access_rule_is_denied.storage_client", + new=storage_client, + ), + ): + from prowler.providers.azure.services.storage.storage_default_network_access_rule_is_denied.storage_default_network_access_rule_is_denied import ( + storage_default_network_access_rule_is_denied, + ) + + check = storage_default_network_access_rule_is_denied() + result = check.execute() + + assert len(result) == 1 + assert result[0].status == "FAIL" + assert result[0].subscription == AZURE_SUBSCRIPTION_ID + assert result[0].resource_name == storage_account_name + assert result[0].resource_id == storage_account_id + assert result[0].location == "westeurope" + + def test_storage_network_access_rule_denied(self): + """Test PASS: Network access rule set to Deny.""" + storage_account_id = str(uuid4()) + storage_account_name = "Test Storage Account" + storage_client = mock.MagicMock + storage_client.storage_accounts = { + AZURE_SUBSCRIPTION_ID: [ + Account( + id=storage_account_id, + name=storage_account_name, + resouce_group_name="rg", + enable_https_traffic_only=False, + infrastructure_encryption=False, + allow_blob_public_access=False, + network_rule_set=NetworkRuleSet( + default_action="Deny", bypass="AzureServices" + ), + encryption_type="None", + minimum_tls_version="TLS1_2", + key_expiration_period_in_days=None, + location="westeurope", + private_endpoint_connections=[], + ) + ] + } + + with ( + mock.patch( + "prowler.providers.common.provider.Provider.get_global_provider", + return_value=set_mocked_azure_provider(), + ), + mock.patch( + "prowler.providers.azure.services.storage.storage_default_network_access_rule_is_denied.storage_default_network_access_rule_is_denied.storage_client", + new=storage_client, + ), + ): + from prowler.providers.azure.services.storage.storage_default_network_access_rule_is_denied.storage_default_network_access_rule_is_denied import ( + storage_default_network_access_rule_is_denied, + ) + + check = storage_default_network_access_rule_is_denied() + result = check.execute() + + assert len(result) == 1 + assert result[0].status == "PASS" + assert result[0].subscription == AZURE_SUBSCRIPTION_ID + assert result[0].resource_name == storage_account_name diff --git a/skills/prowler-test-sdk/assets/gcp_test.py b/skills/prowler-test-sdk/assets/gcp_test.py new file mode 100644 index 0000000000..9f71a82121 --- /dev/null +++ b/skills/prowler-test-sdk/assets/gcp_test.py @@ -0,0 +1,126 @@ +# Example: GCP Cloud Storage Bucket Public Access Test +# Source: tests/providers/gcp/services/cloudstorage/cloudstorage_bucket_public_access/ + +from unittest import mock + +from tests.providers.gcp.gcp_fixtures import ( + GCP_PROJECT_ID, + GCP_US_CENTER1_LOCATION, + set_mocked_gcp_provider, +) + + +class TestCloudStorageBucketPublicAccess: + def test_bucket_public_access(self): + """Test FAIL: Bucket is publicly accessible.""" + cloudstorage_client = mock.MagicMock() + + with ( + mock.patch( + "prowler.providers.common.provider.Provider.get_global_provider", + return_value=set_mocked_gcp_provider(), + ), + mock.patch( + "prowler.providers.gcp.services.cloudstorage.cloudstorage_bucket_public_access.cloudstorage_bucket_public_access.cloudstorage_client", + new=cloudstorage_client, + ), + ): + from prowler.providers.gcp.services.cloudstorage.cloudstorage_bucket_public_access.cloudstorage_bucket_public_access import ( + cloudstorage_bucket_public_access, + ) + from prowler.providers.gcp.services.cloudstorage.cloudstorage_service import ( + Bucket, + ) + + cloudstorage_client.project_ids = [GCP_PROJECT_ID] + cloudstorage_client.region = GCP_US_CENTER1_LOCATION + + cloudstorage_client.buckets = [ + Bucket( + name="example-bucket", + id="example-bucket", + region=GCP_US_CENTER1_LOCATION, + uniform_bucket_level_access=True, + public=True, + project_id=GCP_PROJECT_ID, + ) + ] + + check = cloudstorage_bucket_public_access() + result = check.execute() + + assert len(result) == 1 + assert result[0].status == "FAIL" + assert result[0].resource_id == "example-bucket" + assert result[0].resource_name == "example-bucket" + assert result[0].location == GCP_US_CENTER1_LOCATION + assert result[0].project_id == GCP_PROJECT_ID + + def test_bucket_no_public_access(self): + """Test PASS: Bucket is not publicly accessible.""" + cloudstorage_client = mock.MagicMock() + + with ( + mock.patch( + "prowler.providers.common.provider.Provider.get_global_provider", + return_value=set_mocked_gcp_provider(), + ), + mock.patch( + "prowler.providers.gcp.services.cloudstorage.cloudstorage_bucket_public_access.cloudstorage_bucket_public_access.cloudstorage_client", + new=cloudstorage_client, + ), + ): + from prowler.providers.gcp.services.cloudstorage.cloudstorage_bucket_public_access.cloudstorage_bucket_public_access import ( + cloudstorage_bucket_public_access, + ) + from prowler.providers.gcp.services.cloudstorage.cloudstorage_service import ( + Bucket, + ) + + cloudstorage_client.project_ids = [GCP_PROJECT_ID] + cloudstorage_client.region = GCP_US_CENTER1_LOCATION + + cloudstorage_client.buckets = [ + Bucket( + name="example-bucket", + id="example-bucket", + region=GCP_US_CENTER1_LOCATION, + uniform_bucket_level_access=True, + public=False, + project_id=GCP_PROJECT_ID, + ) + ] + + check = cloudstorage_bucket_public_access() + result = check.execute() + + assert len(result) == 1 + assert result[0].status == "PASS" + assert result[0].resource_id == "example-bucket" + + def test_no_buckets(self): + """Test when no buckets exist.""" + cloudstorage_client = mock.MagicMock() + + with ( + mock.patch( + "prowler.providers.common.provider.Provider.get_global_provider", + return_value=set_mocked_gcp_provider(), + ), + mock.patch( + "prowler.providers.gcp.services.cloudstorage.cloudstorage_bucket_public_access.cloudstorage_bucket_public_access.cloudstorage_client", + new=cloudstorage_client, + ), + ): + from prowler.providers.gcp.services.cloudstorage.cloudstorage_bucket_public_access.cloudstorage_bucket_public_access import ( + cloudstorage_bucket_public_access, + ) + + cloudstorage_client.project_ids = [GCP_PROJECT_ID] + cloudstorage_client.region = GCP_US_CENTER1_LOCATION + cloudstorage_client.buckets = [] + + check = cloudstorage_bucket_public_access() + result = check.execute() + + assert len(result) == 0 diff --git a/skills/prowler-test-sdk/references/testing-docs.md b/skills/prowler-test-sdk/references/testing-docs.md new file mode 100644 index 0000000000..75ec547372 --- /dev/null +++ b/skills/prowler-test-sdk/references/testing-docs.md @@ -0,0 +1,17 @@ +# SDK Testing Documentation + +## Local Documentation + +For detailed SDK testing patterns, see: + +- `docs/developer-guide/unit-testing.mdx` - Complete guide for writing check tests + +## Contents + +The documentation covers: +- AWS testing with moto (`@mock_aws` decorator) +- Azure testing with MagicMock +- GCP testing with MagicMock +- Provider-specific fixtures (`set_mocked_aws_provider`, etc.) +- Service dependency table for CI optimization +- Test structure and required scenarios diff --git a/skills/prowler-test-ui/SKILL.md b/skills/prowler-test-ui/SKILL.md new file mode 100644 index 0000000000..fbff0f9310 --- /dev/null +++ b/skills/prowler-test-ui/SKILL.md @@ -0,0 +1,190 @@ +--- +name: prowler-test-ui +description: > + E2E testing patterns for Prowler UI (Playwright). + Trigger: When writing E2E tests for the Next.js frontend. +license: Apache-2.0 +metadata: + author: prowler-cloud + version: "1.0" +allowed-tools: Read, Edit, Write, Glob, Grep, Bash, WebFetch, WebSearch, Task +--- + +> **Generic Patterns**: For base Playwright patterns (Page Object Model, selectors, helpers), see the `playwright` skill. +> This skill covers **Prowler-specific** conventions only. + +## Prowler UI Test Structure + +``` +ui/tests/ +├── base-page.ts # Prowler-specific base page +├── helpers.ts # Prowler test utilities +└── {page-name}/ + ├── {page-name}-page.ts # Page Object Model + ├── {page-name}.spec.ts # ALL tests (single file per feature) + └── {page-name}.md # Test documentation +``` + +--- + +## MCP Workflow - CRITICAL + +**⚠️ MANDATORY: If Playwright MCP tools are available, ALWAYS use them BEFORE creating tests.** + +1. **Navigate** to target page +2. **Take snapshot** to see actual DOM structure +3. **Interact** with forms/elements to verify real flow +4. **Document actual selectors** from snapshots +5. **Only then** write test code + +**Why**: Prevents tests based on assumptions. Real exploration = stable tests. + +--- + +## Prowler Base Page + +```typescript +import { Page, Locator, expect } from "@playwright/test"; + +export class BasePage { + constructor(protected page: Page) {} + + async goto(path: string): Promise { + await this.page.goto(path); + await this.page.waitForLoadState("networkidle"); + } + + async waitForPageLoad(): Promise { + await this.page.waitForLoadState("networkidle"); + } + + // Prowler-specific: notification handling + async waitForNotification(): Promise { + const notification = this.page.locator('[role="status"]'); + await notification.waitFor({ state: "visible" }); + return notification; + } + + async verifyNotificationMessage(message: string): Promise { + const notification = await this.waitForNotification(); + await expect(notification).toContainText(message); + } +} +``` + +--- + +## Prowler-Specific Pages + +### Providers Page + +```typescript +import { BasePage } from "../base-page"; + +export class ProvidersPage extends BasePage { + readonly addButton = this.page.getByRole("button", { name: "Add Provider" }); + readonly providerTable = this.page.getByRole("table"); + + async goto(): Promise { + await super.goto("/providers"); + } + + async addProvider(type: string, alias: string): Promise { + await this.addButton.click(); + await this.page.getByLabel("Provider Type").selectOption(type); + await this.page.getByLabel("Alias").fill(alias); + await this.page.getByRole("button", { name: "Create" }).click(); + } +} +``` + +### Scans Page + +```typescript +export class ScansPage extends BasePage { + readonly newScanButton = this.page.getByRole("button", { name: "New Scan" }); + readonly scanTable = this.page.getByRole("table"); + + async goto(): Promise { + await super.goto("/scans"); + } + + async startScan(providerAlias: string): Promise { + await this.newScanButton.click(); + await this.page.getByRole("combobox", { name: "Provider" }).click(); + await this.page.getByRole("option", { name: providerAlias }).click(); + await this.page.getByRole("button", { name: "Start Scan" }).click(); + } +} +``` + +--- + +## Test Tags for Prowler + +```typescript +test("Provider CRUD operations", + { tag: ["@critical", "@e2e", "@providers", "@PROV-E2E-001"] }, + async ({ page }) => { + // ... + } +); +``` + +| Category | Tags | +|----------|------| +| Priority | `@critical`, `@high`, `@medium`, `@low` | +| Type | `@e2e`, `@smoke`, `@regression` | +| Feature | `@providers`, `@scans`, `@findings`, `@compliance`, `@signin`, `@signup` | +| Test ID | `@PROV-E2E-001`, `@SCAN-E2E-002` | + +--- + +## Prowler Test Documentation Template + +**Keep under 60 lines. Focus on flow, preconditions, expected results only.** + +```markdown +### E2E Tests: {Feature Name} + +**Suite ID:** `{SUITE-ID}` +**Feature:** {Feature description} + +--- + +## Test Case: `{TEST-ID}` - {Test case title} + +**Priority:** `{critical|high|medium|low}` +**Tags:** @e2e, @{feature-name} + +**Preconditions:** +- {Prerequisites} + +### Flow Steps: +1. {Step} +2. {Step} + +### Expected Result: +- {Outcome} + +### Key Verification Points: +- {Assertion} +``` + +--- + +## Commands + +```bash +cd ui && pnpm run test:e2e # All tests +cd ui && pnpm run test:e2e tests/providers/ # Specific folder +cd ui && pnpm run test:e2e --grep "provider" # By pattern +cd ui && pnpm run test:e2e:ui # With UI +cd ui && pnpm run test:e2e:debug # Debug mode +cd ui && pnpm run test:e2e:headed # See browser +cd ui && pnpm run test:e2e:report # Generate report +``` + +## Resources + +- **Documentation**: See [references/](references/) for links to local developer guide diff --git a/skills/prowler-test-ui/references/e2e-docs.md b/skills/prowler-test-ui/references/e2e-docs.md new file mode 100644 index 0000000000..3ea6f4db47 --- /dev/null +++ b/skills/prowler-test-ui/references/e2e-docs.md @@ -0,0 +1,17 @@ +# E2E Testing Documentation + +## Local Documentation + +For Playwright E2E testing patterns, see: + +- `docs/developer-guide/end2end-testing.mdx` - Complete E2E testing guide + +## Contents + +The documentation covers: +- Playwright setup and configuration +- Page Object Model patterns +- Authentication states (`admin.auth.setup`, etc.) +- Environment variables (`E2E_*`) +- Test tagging conventions (`@PROVIDER-E2E-001`) +- Serial test requirements diff --git a/skills/prowler-ui/SKILL.md b/skills/prowler-ui/SKILL.md new file mode 100644 index 0000000000..0c9e338e91 --- /dev/null +++ b/skills/prowler-ui/SKILL.md @@ -0,0 +1,210 @@ +--- +name: prowler-ui +description: > + Prowler UI-specific patterns. For generic patterns, see: typescript, react-19, nextjs-15, tailwind-4. + Trigger: When working on ui/ directory - components, pages, actions, hooks. +license: Apache-2.0 +metadata: + author: prowler-cloud + version: "1.0" +allowed-tools: Read, Edit, Write, Glob, Grep, Bash, WebFetch, WebSearch, Task +--- + +## Related Generic Skills + +- `typescript` - Const types, flat interfaces +- `react-19` - No useMemo/useCallback, compiler +- `nextjs-15` - App Router, Server Actions +- `tailwind-4` - cn() utility, styling rules +- `zod-4` - Schema validation +- `zustand-5` - State management +- `ai-sdk-5` - Chat/AI features +- `playwright` - E2E testing (see also `prowler-test-ui`) + +## Tech Stack (Versions) + +``` +Next.js 15.5.9 | React 19.2.2 | Tailwind 4.1.13 | shadcn/ui +Zod 4.1.11 | React Hook Form 7.62.0 | Zustand 5.0.8 +NextAuth 5.0.0-beta.30 | Recharts 2.15.4 +HeroUI 2.8.4 (LEGACY - do not add new components) +``` + +## CRITICAL: Component Library Rule + +- **ALWAYS**: Use `shadcn/ui` + Tailwind (`components/shadcn/`) +- **NEVER**: Add new HeroUI components (`components/ui/` is legacy only) + +## DECISION TREES + +### Component Placement + +``` +New feature UI? → shadcn/ui + Tailwind +Existing HeroUI feature? → Keep HeroUI (don't mix) +Used 1 feature? → features/{feature}/components/ +Used 2+ features? → components/shared/ +Needs state/hooks? → "use client" +Server component? → No directive needed +``` + +### Code Location + +``` +Server action → actions/{feature}/{feature}.ts +Data transform → actions/{feature}/{feature}.adapter.ts +Types (shared 2+) → types/{domain}.ts +Types (local 1) → {feature}/types.ts +Utils (shared 2+) → lib/ +Utils (local 1) → {feature}/utils/ +Hooks (shared 2+) → hooks/ +Hooks (local 1) → {feature}/hooks.ts +shadcn components → components/shadcn/ +HeroUI components → components/ui/ (LEGACY) +``` + +### Styling Decision + +``` +Tailwind class exists? → className +Dynamic value? → style prop +Conditional styles? → cn() +Static only? → className (no cn()) +Recharts/library? → CHART_COLORS constant + var() +``` + +### Scope Rule (ABSOLUTE) + +- Used 2+ places → `lib/` or `types/` or `hooks/` (components go in `components/{domain}/`) +- Used 1 place → keep local in feature directory +- **This determines ALL folder structure decisions** + +## Project Structure + +``` +ui/ +├── app/ +│ ├── (auth)/ # Auth pages (login, signup) +│ └── (prowler)/ # Main app +│ ├── compliance/ +│ ├── findings/ +│ ├── providers/ +│ ├── scans/ +│ ├── services/ +│ └── integrations/ +├── components/ +│ ├── shadcn/ # shadcn/ui (USE THIS) +│ ├── ui/ # HeroUI (LEGACY) +│ ├── {domain}/ # Domain-specific (compliance, findings, providers, etc.) +│ ├── filters/ # Filter components +│ ├── graphs/ # Chart components +│ └── icons/ # Icon components +├── actions/ # Server actions +├── types/ # Shared types +├── hooks/ # Shared hooks +├── lib/ # Utilities +├── store/ # Zustand state +├── tests/ # Playwright E2E +└── styles/ # Global CSS +``` + +## Recharts (Special Case) + +For Recharts props that don't accept className: + +```typescript +const CHART_COLORS = { + primary: "var(--color-primary)", + secondary: "var(--color-secondary)", + text: "var(--color-text)", + gridLine: "var(--color-border)", +}; + +// Only use var() for library props, NEVER in className + + +``` + +## Form + Validation Pattern + +```typescript +"use client"; +import { useForm } from "react-hook-form"; +import { zodResolver } from "@hookform/resolvers/zod"; +import { z } from "zod"; + +const schema = z.object({ + email: z.email(), // Zod 4 syntax + name: z.string().min(1), +}); + +type FormData = z.infer; + +export function MyForm() { + const { register, handleSubmit, formState: { errors } } = useForm({ + resolver: zodResolver(schema), + }); + + const onSubmit = async (data: FormData) => { + await serverAction(data); + }; + + return ( +
+ + {errors.email && {errors.email.message}} + +
+ ); +} +``` + +## Commands + +```bash +# Development +cd ui && pnpm install +cd ui && pnpm run dev + +# Code Quality +cd ui && pnpm run typecheck +cd ui && pnpm run lint:fix +cd ui && pnpm run format:write +cd ui && pnpm run healthcheck # typecheck + lint + +# Testing +cd ui && pnpm run test:e2e +cd ui && pnpm run test:e2e:ui +cd ui && pnpm run test:e2e:debug + +# Build +cd ui && pnpm run build +cd ui && pnpm start +``` + +## QA Checklist Before Commit + +- [ ] `pnpm run typecheck` passes +- [ ] `pnpm run lint:fix` passes +- [ ] `pnpm run format:write` passes +- [ ] Relevant E2E tests pass +- [ ] All UI states handled (loading, error, empty) +- [ ] No secrets in code (use `.env.local`) +- [ ] Error messages sanitized (no stack traces to users) +- [ ] Server-side validation present (don't trust client) +- [ ] Accessibility: keyboard navigation, ARIA labels +- [ ] Mobile responsive (if applicable) + +## Migrations Reference + +| From | To | Key Changes | +|------|-----|-------------| +| React 18 | 19.1 | Async components, React Compiler (no useMemo/useCallback) | +| Next.js 14 | 15.5 | Improved App Router, better streaming | +| NextUI | HeroUI 2.8.4 | Package rename only, same API | +| Zod 3 | 4 | `z.email()` not `z.string().email()`, `error` not `message` | +| AI SDK 4 | 5 | `@ai-sdk/react`, `sendMessage` not `handleSubmit`, `parts` not `content` | + +## Resources + +- **Documentation**: See [references/](references/) for links to local developer guide diff --git a/skills/prowler-ui/references/ui-docs.md b/skills/prowler-ui/references/ui-docs.md new file mode 100644 index 0000000000..81efd37ea8 --- /dev/null +++ b/skills/prowler-ui/references/ui-docs.md @@ -0,0 +1,14 @@ +# UI Documentation + +## Local Documentation + +For UI-related patterns, see: + +- `docs/developer-guide/lighthouse.mdx` - AI agent integration and Lighthouse patterns + +## Contents + +The documentation covers: +- AI agent integration in the UI +- Lighthouse performance patterns +- Component optimization diff --git a/skills/prowler/SKILL.md b/skills/prowler/SKILL.md new file mode 100644 index 0000000000..bb520fe42b --- /dev/null +++ b/skills/prowler/SKILL.md @@ -0,0 +1,63 @@ +--- +name: prowler +description: > + Main entry point for Prowler development - quick reference for all components. + Trigger: General Prowler development questions, project overview, component navigation. +license: Apache-2.0 +metadata: + author: prowler-cloud + version: "1.0" +allowed-tools: Read, Edit, Write, Glob, Grep, Bash, WebFetch, WebSearch, Task +--- + +## Components + +| Component | Stack | Location | +|-----------|-------|----------| +| SDK | Python 3.9+, Poetry | `prowler/` | +| API | Django 5.1, DRF, Celery | `api/` | +| UI | Next.js 15, React 19, Tailwind 4 | `ui/` | +| MCP | FastMCP 2.13.1 | `mcp_server/` | + +## Quick Commands + +```bash +# SDK +poetry install --with dev +poetry run python prowler-cli.py aws --check check_name +poetry run pytest tests/ + +# API +cd api && poetry run python src/backend/manage.py runserver +cd api && poetry run pytest + +# UI +cd ui && pnpm run dev +cd ui && pnpm run healthcheck + +# MCP +cd mcp_server && uv run prowler-mcp + +# Full Stack +docker-compose up -d +``` + +## Providers + +AWS, Azure, GCP, Kubernetes, GitHub, M365, OCI, AlibabaCloud, MongoDB Atlas, IaC + +## Commit Style + +`feat:`, `fix:`, `docs:`, `chore:`, `perf:`, `refactor:`, `test:` + +## Related Skills + +- `prowler-sdk-check` - Create security checks +- `prowler-api` - Django/DRF patterns +- `prowler-ui` - Next.js/React patterns +- `prowler-mcp` - MCP server tools +- `prowler-test` - Testing patterns + +## Resources + +- **Documentation**: See [references/](references/) for links to local developer guide diff --git a/skills/prowler/references/prowler-docs.md b/skills/prowler/references/prowler-docs.md new file mode 100644 index 0000000000..1b09c832af --- /dev/null +++ b/skills/prowler/references/prowler-docs.md @@ -0,0 +1,14 @@ +# Prowler Documentation + +## Local Documentation + +For project overview and development setup, see: + +- `docs/developer-guide/introduction.mdx` - Repository structure, setup, and development environment + +## Contents + +The documentation covers: +- Project structure overview +- Development environment setup +- Repository conventions diff --git a/skills/pytest/SKILL.md b/skills/pytest/SKILL.md new file mode 100644 index 0000000000..bb2328cc41 --- /dev/null +++ b/skills/pytest/SKILL.md @@ -0,0 +1,192 @@ +--- +name: pytest +description: > + Pytest testing patterns for Python. + Trigger: When writing Python tests - fixtures, mocking, markers. +license: Apache-2.0 +metadata: + author: prowler-cloud + version: "1.0" +allowed-tools: Read, Edit, Write, Glob, Grep, Bash, WebFetch, WebSearch, Task +--- + +## Basic Test Structure + +```python +import pytest + +class TestUserService: + def test_create_user_success(self): + user = create_user(name="John", email="john@test.com") + assert user.name == "John" + assert user.email == "john@test.com" + + def test_create_user_invalid_email_fails(self): + with pytest.raises(ValueError, match="Invalid email"): + create_user(name="John", email="invalid") +``` + +## Fixtures + +```python +import pytest + +@pytest.fixture +def user(): + """Create a test user.""" + return User(name="Test User", email="test@example.com") + +@pytest.fixture +def authenticated_client(client, user): + """Client with authenticated user.""" + client.force_login(user) + return client + +# Fixture with teardown +@pytest.fixture +def temp_file(): + path = Path("/tmp/test_file.txt") + path.write_text("test content") + yield path # Test runs here + path.unlink() # Cleanup after test + +# Fixture scopes +@pytest.fixture(scope="module") # Once per module +@pytest.fixture(scope="class") # Once per class +@pytest.fixture(scope="session") # Once per test session +``` + +## conftest.py + +```python +# tests/conftest.py - Shared fixtures +import pytest + +@pytest.fixture +def db_session(): + session = create_session() + yield session + session.rollback() + +@pytest.fixture +def api_client(): + return TestClient(app) +``` + +## Mocking + +```python +from unittest.mock import patch, MagicMock + +class TestPaymentService: + def test_process_payment_success(self): + with patch("services.payment.stripe_client") as mock_stripe: + mock_stripe.charge.return_value = {"id": "ch_123", "status": "succeeded"} + + result = process_payment(amount=100) + + assert result["status"] == "succeeded" + mock_stripe.charge.assert_called_once_with(amount=100) + + def test_process_payment_failure(self): + with patch("services.payment.stripe_client") as mock_stripe: + mock_stripe.charge.side_effect = PaymentError("Card declined") + + with pytest.raises(PaymentError): + process_payment(amount=100) + +# MagicMock for complex objects +def test_with_mock_object(): + mock_user = MagicMock() + mock_user.id = "user-123" + mock_user.name = "Test User" + mock_user.is_active = True + + result = get_user_info(mock_user) + assert result["name"] == "Test User" +``` + +## Parametrize + +```python +@pytest.mark.parametrize("input,expected", [ + ("hello", "HELLO"), + ("world", "WORLD"), + ("pytest", "PYTEST"), +]) +def test_uppercase(input, expected): + assert input.upper() == expected + +@pytest.mark.parametrize("email,is_valid", [ + ("user@example.com", True), + ("invalid-email", False), + ("", False), + ("user@.com", False), +]) +def test_email_validation(email, is_valid): + assert validate_email(email) == is_valid +``` + +## Markers + +```python +# pytest.ini or pyproject.toml +[tool.pytest.ini_options] +markers = [ + "slow: marks tests as slow", + "integration: marks integration tests", +] + +# Usage +@pytest.mark.slow +def test_large_data_processing(): + ... + +@pytest.mark.integration +def test_database_connection(): + ... + +@pytest.mark.skip(reason="Not implemented yet") +def test_future_feature(): + ... + +@pytest.mark.skipif(sys.platform == "win32", reason="Unix only") +def test_unix_specific(): + ... + +# Run specific markers +# pytest -m "not slow" +# pytest -m "integration" +``` + +## Async Tests + +```python +import pytest + +@pytest.mark.asyncio +async def test_async_function(): + result = await async_fetch_data() + assert result is not None +``` + +## Commands + +```bash +pytest # Run all tests +pytest -v # Verbose output +pytest -x # Stop on first failure +pytest -k "test_user" # Filter by name +pytest -m "not slow" # Filter by marker +pytest --cov=src # With coverage +pytest -n auto # Parallel (pytest-xdist) +pytest --tb=short # Short traceback +``` + +## References + +For general pytest documentation, see: +- **Official Docs**: https://docs.pytest.org/en/stable/ + +For Prowler SDK testing with provider-specific patterns (moto, MagicMock), see: +- **Documentation**: [references/prowler-testing.md](references/prowler-testing.md) diff --git a/skills/pytest/references/prowler-testing.md b/skills/pytest/references/prowler-testing.md new file mode 100644 index 0000000000..c26c8104dc --- /dev/null +++ b/skills/pytest/references/prowler-testing.md @@ -0,0 +1,16 @@ +# Prowler-Specific Testing Patterns + +## Local Documentation + +For Prowler-specific pytest patterns, see: + +- `docs/developer-guide/unit-testing.mdx` - Complete SDK testing guide + +## Contents + +The Prowler documentation covers patterns NOT in the generic pytest skill: +- `set_mocked_aws_provider()` fixture pattern +- `@mock_aws` decorator usage with moto +- `mock_make_api_call` pattern +- Service dependency table for CI optimization +- Provider-specific mocking (AWS uses moto, Azure/GCP use MagicMock) diff --git a/skills/react-19/SKILL.md b/skills/react-19/SKILL.md new file mode 100644 index 0000000000..53ab6b0aea --- /dev/null +++ b/skills/react-19/SKILL.md @@ -0,0 +1,122 @@ +--- +name: react-19 +description: > + React 19 patterns with React Compiler. + Trigger: When writing React components - no useMemo/useCallback needed. +license: Apache-2.0 +metadata: + author: prowler-cloud + version: "1.0" +allowed-tools: Read, Edit, Write, Glob, Grep, Bash, WebFetch, WebSearch, Task +--- + +## No Manual Memoization (REQUIRED) + +```typescript +// ✅ React Compiler handles optimization automatically +function Component({ items }) { + const filtered = items.filter(x => x.active); + const sorted = filtered.sort((a, b) => a.name.localeCompare(b.name)); + + const handleClick = (id) => { + console.log(id); + }; + + return ; +} + +// ❌ NEVER: Manual memoization +const filtered = useMemo(() => items.filter(x => x.active), [items]); +const handleClick = useCallback((id) => console.log(id), []); +``` + +## Imports (REQUIRED) + +```typescript +// ✅ ALWAYS: Named imports +import { useState, useEffect, useRef } from "react"; + +// ❌ NEVER +import React from "react"; +import * as React from "react"; +``` + +## Server Components First + +```typescript +// ✅ Server Component (default) - no directive +export default async function Page() { + const data = await fetchData(); + return ; +} + +// ✅ Client Component - only when needed +"use client"; +export function Interactive() { + const [state, setState] = useState(false); + return ; +} +``` + +## When to use "use client" + +- useState, useEffect, useRef, useContext +- Event handlers (onClick, onChange) +- Browser APIs (window, localStorage) + +## use() Hook + +```typescript +import { use } from "react"; + +// Read promises (suspends until resolved) +function Comments({ promise }) { + const comments = use(promise); + return comments.map(c =>
{c.text}
); +} + +// Conditional context (not possible with useContext!) +function Theme({ showTheme }) { + if (showTheme) { + const theme = use(ThemeContext); + return
Themed
; + } + return
Plain
; +} +``` + +## Actions & useActionState + +```typescript +"use server"; +async function submitForm(formData: FormData) { + await saveToDatabase(formData); + revalidatePath("/"); +} + +// With pending state +import { useActionState } from "react"; + +function Form() { + const [state, action, isPending] = useActionState(submitForm, null); + return ( +
+ +
+ ); +} +``` + +## ref as Prop (No forwardRef) + +```typescript +// ✅ React 19: ref is just a prop +function Input({ ref, ...props }) { + return ; +} + +// ❌ Old way (unnecessary now) +const Input = forwardRef((props, ref) => ); +``` diff --git a/skills/setup.sh b/skills/setup.sh new file mode 100755 index 0000000000..8f0fc57197 --- /dev/null +++ b/skills/setup.sh @@ -0,0 +1,119 @@ +#!/bin/bash +# Setup AI Skills for Prowler development +# Configures AI coding assistants that follow agentskills.io standard: +# - Claude Code: .claude/skills/ symlink (auto-discovery) +# - Gemini CLI: .gemini/skills/ symlink (auto-discovery) +# - Codex (OpenAI): .codex/skills/ symlink + AGENTS.md +# - GitHub Copilot: reads AGENTS.md from repo root (no symlink needed) + +set -e + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +REPO_ROOT="$(dirname "$SCRIPT_DIR")" +SKILLS_SOURCE="$SCRIPT_DIR" + +# Target locations +CLAUDE_SKILLS_TARGET="$REPO_ROOT/.claude/skills" +CODEX_SKILLS_TARGET="$REPO_ROOT/.codex/skills" +GEMINI_SKILLS_TARGET="$REPO_ROOT/.gemini/skills" + +# Colors for output +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +BLUE='\033[0;34m' +NC='\033[0m' # No Color + +echo "🤖 Prowler AI Skills Setup" +echo "==========================" +echo "" + +# Count skills (directories with SKILL.md) +SKILL_COUNT=$(find "$SKILLS_SOURCE" -maxdepth 2 -name "SKILL.md" | wc -l | tr -d ' ') + +if [ "$SKILL_COUNT" -eq 0 ]; then + echo -e "${RED}No skills found in $SKILLS_SOURCE${NC}" + exit 1 +fi + +echo -e "${BLUE}Found $SKILL_COUNT skills to configure${NC}" +echo "" + +# ============================================================================= +# CLAUDE CODE SETUP (.claude/skills symlink - auto-discovery) +# ============================================================================= +echo -e "${YELLOW}[1/3] Setting up Claude Code...${NC}" + +if [ ! -d "$REPO_ROOT/.claude" ]; then + mkdir -p "$REPO_ROOT/.claude" +fi + +if [ -L "$CLAUDE_SKILLS_TARGET" ]; then + rm "$CLAUDE_SKILLS_TARGET" +elif [ -d "$CLAUDE_SKILLS_TARGET" ]; then + mv "$CLAUDE_SKILLS_TARGET" "$REPO_ROOT/.claude/skills.backup.$(date +%s)" +fi + +ln -s "$SKILLS_SOURCE" "$CLAUDE_SKILLS_TARGET" +echo -e "${GREEN} ✓ .claude/skills -> skills/${NC}" + +# ============================================================================= +# CODEX (OPENAI) SETUP (.codex/skills symlink) +# ============================================================================= +echo -e "${YELLOW}[2/3] Setting up Codex (OpenAI)...${NC}" + +if [ ! -d "$REPO_ROOT/.codex" ]; then + mkdir -p "$REPO_ROOT/.codex" +fi + +if [ -L "$CODEX_SKILLS_TARGET" ]; then + rm "$CODEX_SKILLS_TARGET" +elif [ -d "$CODEX_SKILLS_TARGET" ]; then + mv "$CODEX_SKILLS_TARGET" "$REPO_ROOT/.codex/skills.backup.$(date +%s)" +fi + +ln -s "$SKILLS_SOURCE" "$CODEX_SKILLS_TARGET" +echo -e "${GREEN} ✓ .codex/skills -> skills/${NC}" + +# ============================================================================= +# GEMINI CLI SETUP (.gemini/skills symlink - auto-discovery) +# ============================================================================= +echo -e "${YELLOW}[3/3] Setting up Gemini CLI...${NC}" + +if [ ! -d "$REPO_ROOT/.gemini" ]; then + mkdir -p "$REPO_ROOT/.gemini" +fi + +if [ -L "$GEMINI_SKILLS_TARGET" ]; then + rm "$GEMINI_SKILLS_TARGET" +elif [ -d "$GEMINI_SKILLS_TARGET" ]; then + mv "$GEMINI_SKILLS_TARGET" "$REPO_ROOT/.gemini/skills.backup.$(date +%s)" +fi + +ln -s "$SKILLS_SOURCE" "$GEMINI_SKILLS_TARGET" +echo -e "${GREEN} ✓ .gemini/skills -> skills/${NC}" + +# ============================================================================= +# SUMMARY +# ============================================================================= +echo "" +echo -e "${GREEN}✅ Successfully configured $SKILL_COUNT AI skills!${NC}" +echo "" +echo "Configuration created:" +echo " • Claude Code: .claude/skills/ (symlink, auto-discovery)" +echo " • Codex (OpenAI): .codex/skills/ (symlink, reads AGENTS.md)" +echo " • Gemini CLI: .gemini/skills/ (symlink, auto-discovery)" +echo " • GitHub Copilot: reads AGENTS.md from repo root (no setup needed)" +echo "" +echo "Available skills:" +echo " Generic: typescript, react-19, nextjs-15, playwright, pytest," +echo " django-drf, zod-4, zustand-5, tailwind-4, ai-sdk-5" +echo "" +echo " Prowler: prowler, prowler-api, prowler-ui, prowler-mcp," +echo " prowler-sdk-check, prowler-test-ui, prowler-test-api," +echo " prowler-test-sdk, prowler-compliance, prowler-docs," +echo " prowler-provider, prowler-pr" +echo "" +echo -e "${BLUE}Note: Restart your AI coding assistant to load the skills.${NC}" +echo -e "${BLUE} Claude/Gemini auto-discover skills from SKILL.md descriptions.${NC}" +echo -e "${BLUE} Codex/Copilot use AGENTS.md instructions to reference skills.${NC}" diff --git a/skills/skill-creator/SKILL.md b/skills/skill-creator/SKILL.md new file mode 100644 index 0000000000..d84aa5101a --- /dev/null +++ b/skills/skill-creator/SKILL.md @@ -0,0 +1,169 @@ +--- +name: skill-creator +description: > + Creates new AI agent skills following the Agent Skills spec. + Trigger: When user asks to create a new skill, add agent instructions, or document patterns for AI. +license: Apache-2.0 +metadata: + author: prowler-cloud + version: "1.0" +allowed-tools: Read, Edit, Write, Glob, Grep, Bash, WebFetch, WebSearch, Task +--- + +## When to Create a Skill + +Create a skill when: +- A pattern is used repeatedly and AI needs guidance +- Project-specific conventions differ from generic best practices +- Complex workflows need step-by-step instructions +- Decision trees help AI choose the right approach + +**Don't create a skill when:** +- Documentation already exists (create a reference instead) +- Pattern is trivial or self-explanatory +- It's a one-off task + +--- + +## Skill Structure + +``` +skills/{skill-name}/ +├── SKILL.md # Required - main skill file +├── assets/ # Optional - templates, schemas, examples +│ ├── template.py +│ └── schema.json +└── references/ # Optional - links to local docs + └── docs.md # Points to docs/developer-guide/*.mdx +``` + +--- + +## SKILL.md Template + +```markdown +--- +name: {skill-name} +description: > + {One-line description of what this skill does}. + Trigger: {When the AI should load this skill}. +license: Apache-2.0 +metadata: + author: prowler-cloud + version: "1.0" +--- + +## When to Use + +{Bullet points of when to use this skill} + +## Critical Patterns + +{The most important rules - what AI MUST know} + +## Code Examples + +{Minimal, focused examples} + +## Commands + +```bash +{Common commands} +``` + +## Resources + +- **Templates**: See [assets/](assets/) for {description} +- **Documentation**: See [references/](references/) for local docs +``` + +--- + +## Naming Conventions + +| Type | Pattern | Examples | +|------|---------|----------| +| Generic skill | `{technology}` | `pytest`, `playwright`, `typescript` | +| Prowler-specific | `prowler-{component}` | `prowler-api`, `prowler-ui`, `prowler-sdk-check` | +| Testing skill | `prowler-test-{component}` | `prowler-test-sdk`, `prowler-test-api` | +| Workflow skill | `{action}-{target}` | `skill-creator`, `jira-task` | + +--- + +## Decision: assets/ vs references/ + +``` +Need code templates? → assets/ +Need JSON schemas? → assets/ +Need example configs? → assets/ +Link to existing docs? → references/ +Link to external guides? → references/ (with local path) +``` + +**Key Rule**: `references/` should point to LOCAL files (`docs/developer-guide/*.mdx`), not web URLs. + +--- + +## Decision: Prowler-Specific vs Generic + +``` +Patterns apply to ANY project? → Generic skill (e.g., pytest, typescript) +Patterns are Prowler-specific? → prowler-{name} skill +Generic skill needs Prowler info? → Add references/ pointing to Prowler docs +``` + +--- + +## Frontmatter Fields + +| Field | Required | Description | +|-------|----------|-------------| +| `name` | Yes | Skill identifier (lowercase, hyphens) | +| `description` | Yes | What + Trigger in one block | +| `license` | Yes | Always `Apache-2.0` for Prowler | +| `metadata.author` | Yes | `prowler-cloud` | +| `metadata.version` | Yes | Semantic version as string | + +--- + +## Content Guidelines + +### DO +- Start with the most critical patterns +- Use tables for decision trees +- Keep code examples minimal and focused +- Include Commands section with copy-paste commands + +### DON'T +- Add Keywords section (agent searches frontmatter, not body) +- Duplicate content from existing docs (reference instead) +- Include lengthy explanations (link to docs) +- Add troubleshooting sections (keep focused) +- Use web URLs in references (use local paths) + +--- + +## Registering the Skill + +After creating the skill, add it to `AGENTS.md`: + +```markdown +| `{skill-name}` | {Description} | [SKILL.md](skills/{skill-name}/SKILL.md) | +``` + +--- + +## Checklist Before Creating + +- [ ] Skill doesn't already exist (check `skills/`) +- [ ] Pattern is reusable (not one-off) +- [ ] Name follows conventions +- [ ] Frontmatter is complete (description includes trigger keywords) +- [ ] Critical patterns are clear +- [ ] Code examples are minimal +- [ ] Commands section exists +- [ ] Added to AGENTS.md + +## Resources + +- **Templates**: See [assets/](assets/) for SKILL.md template diff --git a/skills/skill-creator/assets/SKILL-TEMPLATE.md b/skills/skill-creator/assets/SKILL-TEMPLATE.md new file mode 100644 index 0000000000..7639240245 --- /dev/null +++ b/skills/skill-creator/assets/SKILL-TEMPLATE.md @@ -0,0 +1,78 @@ +--- +name: {skill-name} +description: > + {Brief description of what this skill enables}. + Trigger: {When the AI should load this skill - be specific}. +license: Apache-2.0 +metadata: + author: prowler-cloud + version: "1.0" +--- + +## When to Use + +Use this skill when: +- {Condition 1} +- {Condition 2} +- {Condition 3} + +--- + +## Critical Patterns + +{The MOST important rules - what AI MUST follow} + +### Pattern 1: {Name} + +```{language} +{code example} +``` + +### Pattern 2: {Name} + +```{language} +{code example} +``` + +--- + +## Decision Tree + +``` +{Question 1}? → {Action A} +{Question 2}? → {Action B} +Otherwise → {Default action} +``` + +--- + +## Code Examples + +### Example 1: {Description} + +```{language} +{minimal, focused example} +``` + +### Example 2: {Description} + +```{language} +{minimal, focused example} +``` + +--- + +## Commands + +```bash +{command 1} # {description} +{command 2} # {description} +{command 3} # {description} +``` + +--- + +## Resources + +- **Templates**: See [assets/](assets/) for {description of templates} +- **Documentation**: See [references/](references/) for local developer guide links diff --git a/skills/tailwind-4/SKILL.md b/skills/tailwind-4/SKILL.md new file mode 100644 index 0000000000..84b50261ce --- /dev/null +++ b/skills/tailwind-4/SKILL.md @@ -0,0 +1,197 @@ +--- +name: tailwind-4 +description: > + Tailwind CSS 4 patterns and best practices. + Trigger: When styling with Tailwind - cn(), theme variables, no var() in className. +license: Apache-2.0 +metadata: + author: prowler-cloud + version: "1.0" +allowed-tools: Read, Edit, Write, Glob, Grep, Bash, WebFetch, WebSearch, Task +--- + +## Styling Decision Tree + +``` +Tailwind class exists? → className="..." +Dynamic value? → style={{ width: `${x}%` }} +Conditional styles? → cn("base", condition && "variant") +Static only? → className="..." (no cn() needed) +Library can't use class?→ style prop with var() constants +``` + +## Critical Rules + +### Never Use var() in className + +```typescript +// ❌ NEVER: var() in className +
+
+ +// ✅ ALWAYS: Use Tailwind semantic classes +
+
+``` + +### Never Use Hex Colors + +```typescript +// ❌ NEVER: Hex colors in className +

+

+ +// ✅ ALWAYS: Use Tailwind color classes +

+

+``` + +## The cn() Utility + +```typescript +import { clsx } from "clsx"; +import { twMerge } from "tailwind-merge"; + +export function cn(...inputs: ClassValue[]) { + return twMerge(clsx(inputs)); +} +``` + +### When to Use cn() + +```typescript +// ✅ Conditional classes +
+ +// ✅ Merging with potential conflicts + + +
+ ); +} +``` + +## Persist Middleware + +```typescript +import { create } from "zustand"; +import { persist } from "zustand/middleware"; + +interface SettingsStore { + theme: "light" | "dark"; + language: string; + setTheme: (theme: "light" | "dark") => void; + setLanguage: (language: string) => void; +} + +const useSettingsStore = create()( + persist( + (set) => ({ + theme: "light", + language: "en", + setTheme: (theme) => set({ theme }), + setLanguage: (language) => set({ language }), + }), + { + name: "settings-storage", // localStorage key + } + ) +); +``` + +## Selectors (Zustand 5) + +```typescript +// ✅ Select specific fields to prevent unnecessary re-renders +function UserName() { + const name = useUserStore((state) => state.name); + return {name}; +} + +// ✅ For multiple fields, use useShallow +import { useShallow } from "zustand/react/shallow"; + +function UserInfo() { + const { name, email } = useUserStore( + useShallow((state) => ({ name: state.name, email: state.email })) + ); + return
{name} - {email}
; +} + +// ❌ AVOID: Selecting entire store (causes re-render on any change) +const store = useUserStore(); // Re-renders on ANY state change +``` + +## Async Actions + +```typescript +interface UserStore { + user: User | null; + loading: boolean; + error: string | null; + fetchUser: (id: string) => Promise; +} + +const useUserStore = create((set) => ({ + user: null, + loading: false, + error: null, + + fetchUser: async (id) => { + set({ loading: true, error: null }); + try { + const response = await fetch(`/api/users/${id}`); + const user = await response.json(); + set({ user, loading: false }); + } catch (error) { + set({ error: "Failed to fetch user", loading: false }); + } + }, +})); +``` + +## Slices Pattern + +```typescript +// userSlice.ts +interface UserSlice { + user: User | null; + setUser: (user: User) => void; + clearUser: () => void; +} + +const createUserSlice = (set): UserSlice => ({ + user: null, + setUser: (user) => set({ user }), + clearUser: () => set({ user: null }), +}); + +// cartSlice.ts +interface CartSlice { + items: CartItem[]; + addItem: (item: CartItem) => void; + removeItem: (id: string) => void; +} + +const createCartSlice = (set): CartSlice => ({ + items: [], + addItem: (item) => set((state) => ({ items: [...state.items, item] })), + removeItem: (id) => set((state) => ({ + items: state.items.filter(i => i.id !== id) + })), +}); + +// store.ts +type Store = UserSlice & CartSlice; + +const useStore = create()((...args) => ({ + ...createUserSlice(...args), + ...createCartSlice(...args), +})); +``` + +## Immer Middleware + +```typescript +import { create } from "zustand"; +import { immer } from "zustand/middleware/immer"; + +interface TodoStore { + todos: Todo[]; + addTodo: (text: string) => void; + toggleTodo: (id: string) => void; +} + +const useTodoStore = create()( + immer((set) => ({ + todos: [], + + addTodo: (text) => set((state) => { + // Mutate directly with Immer! + state.todos.push({ id: crypto.randomUUID(), text, done: false }); + }), + + toggleTodo: (id) => set((state) => { + const todo = state.todos.find(t => t.id === id); + if (todo) todo.done = !todo.done; + }), + })) +); +``` + +## DevTools + +```typescript +import { create } from "zustand"; +import { devtools } from "zustand/middleware"; + +const useStore = create()( + devtools( + (set) => ({ + // store definition + }), + { name: "MyStore" } // Name in Redux DevTools + ) +); +``` + +## Outside React + +```typescript +// Access store outside components +const { count, increment } = useCounterStore.getState(); +increment(); + +// Subscribe to changes +const unsubscribe = useCounterStore.subscribe( + (state) => console.log("Count changed:", state.count) +); +``` diff --git a/ui/AGENTS.md b/ui/AGENTS.md index 5eada68033..0e26f60f6f 100644 --- a/ui/AGENTS.md +++ b/ui/AGENTS.md @@ -1,5 +1,17 @@ # Prowler UI - AI Agent Ruleset +> **Skills Reference**: For detailed patterns, use these skills: +> - [`prowler-ui`](../skills/prowler-ui/SKILL.md) - Prowler-specific UI patterns +> - [`prowler-test-ui`](../skills/prowler-test-ui/SKILL.md) - Playwright E2E testing (comprehensive) +> - [`typescript`](../skills/typescript/SKILL.md) - Const types, flat interfaces +> - [`react-19`](../skills/react-19/SKILL.md) - No useMemo/useCallback, compiler +> - [`nextjs-15`](../skills/nextjs-15/SKILL.md) - App Router, Server Actions +> - [`tailwind-4`](../skills/tailwind-4/SKILL.md) - cn() utility, no var() in className +> - [`zod-4`](../skills/zod-4/SKILL.md) - New API (z.email(), z.uuid()) +> - [`zustand-5`](../skills/zustand-5/SKILL.md) - Selectors, persist middleware +> - [`ai-sdk-5`](../skills/ai-sdk-5/SKILL.md) - UIMessage, sendMessage +> - [`playwright`](../skills/playwright/SKILL.md) - Page Object Model, selectors + ## CRITICAL RULES - NON-NEGOTIABLE ### React @@ -19,48 +31,19 @@ - ALWAYS: Reuse via `extends` - NEVER: Inline nested objects -```typescript -// ✅ CORRECT -interface UserAddress { - street: string; - city: string; -} -interface User { - id: string; - address: UserAddress; -} -interface Admin extends User { - permissions: string[]; -} - -// ❌ WRONG -interface User { - address: { street: string; city: string }; -} -``` - ### Styling - Single class: `className="bg-slate-800 text-white"` -- Merge multiple classes: `className={cn(BUTTON_STYLES.base, BUTTON_STYLES.active, isLoading && "opacity-50")}` (cn() handles Tailwind conflicts with twMerge) -- Conditional classes: `className={cn("base", condition && "variant")}` -- Recharts props: `fill={CHART_COLORS.text}` (use constants with var()) -- Dynamic values: `style={{ width: "50%", opacity: 0.5 }}` -- CSS custom properties: `style={{ "--color": "var(--css-var)" }}` (for dynamic theming) -- NEVER: `var()` in className strings (use Tailwind semantic classes instead) -- NEVER: hex colors (use `text-white` not `text-[#fff]`) +- Merge multiple classes: `className={cn(BASE_STYLES, variant && "variant-class")}` +- Dynamic values: `style={{ width: "50%" }}` +- NEVER: `var()` in className, hex colors ### Scope Rule (ABSOLUTE) -- Used 2+ places → `components/shared/` or `lib/` or `types/` or `hooks/` +- Used 2+ places → `lib/` or `types/` or `hooks/` (components go in `components/{domain}/`) - Used 1 place → keep local in feature directory - This determines ALL folder structure decisions -### Memoization - -- NEVER: `useMemo`, `useCallback` -- React 19 Compiler handles automatic optimization - --- ## DECISION TREES @@ -68,8 +51,8 @@ interface User { ### Component Placement ``` -New feature UI? → shadcn/ui + Tailwind | Existing feature? → HeroUI -Used 1 feature? → features/{feature}/components | Used 2+? → components/shared +New/Existing UI? → shadcn/ui + Tailwind (NEVER HeroUI for new code) +Used 1 feature? → features/{feature}/components | Used 2+? → components/{domain}/ Needs state/hooks? → "use client" | Server component? → No directive ``` @@ -81,15 +64,7 @@ Data transform → actions/{feature}/{feature}.adapter.ts Types (shared 2+) → types/{domain}.ts | Types (local 1) → {feature}/types.ts Utils (shared 2+) → lib/ | Utils (local 1) → {feature}/utils/ Hooks (shared 2+) → hooks/ | Hooks (local 1) → {feature}/hooks.ts -shadcn components → components/shadcn/ | HeroUI → components/ui/ -``` - -### Styling Decision - -``` -Tailwind class exists? → className | Dynamic value? → style prop -Conditional styles? → cn() | Static? → className only -Recharts? → CHART_COLORS constant + var() | Other? → Tailwind classes +shadcn components → components/shadcn/ ``` --- @@ -105,14 +80,6 @@ export default async function Page() { } ``` -### Form + Validation - -```typescript -import { useForm } from "react-hook-form"; -import { zodResolver } from "@hookform/resolvers/zod"; -const form = useForm({ resolver: zodResolver(schema) }); -``` - ### Server Action ```typescript @@ -124,15 +91,22 @@ export async function updateProvider(formData: FormData) { } ``` -### Zod v4 +### Form + Validation (Zod 4) -- `z.email()` not `z.string().email()` -- `z.uuid()` not `z.string().uuid()` -- `z.url()` not `z.string().url()` -- `z.string().min(1)` not `z.string().nonempty()` -- `error` param not `message` param +```typescript +import { useForm } from "react-hook-form"; +import { zodResolver } from "@hookform/resolvers/zod"; +import { z } from "zod"; -### Zustand v5 +const schema = z.object({ + email: z.email(), // Zod 4: z.email() not z.string().email() + id: z.uuid(), // Zod 4: z.uuid() not z.string().uuid() +}); + +const form = useForm({ resolver: zodResolver(schema) }); +``` + +### Zustand 5 ```typescript const useStore = create( @@ -146,54 +120,31 @@ const useStore = create( ); ``` -### AI SDK v5 - -```typescript -import { useChat } from "@ai-sdk/react"; -const { messages, sendMessage } = useChat({ - transport: new DefaultChatTransport({ api: "/api/chat" }), -}); -const [input, setInput] = useState(""); -const handleSubmit = (e) => { - e.preventDefault(); - sendMessage({ text: input }); - setInput(""); -}; -``` - -### Testing (Playwright) +### Playwright Test ```typescript export class FeaturePage extends BasePage { readonly submitBtn = this.page.getByRole("button", { name: "Submit" }); - async goto() { - await super.goto("/path"); - } - async submit() { - await this.submitBtn.click(); - } + async goto() { await super.goto("/path"); } + async submit() { await this.submitBtn.click(); } } -test( - "action works", - { tag: ["@critical", "@feature", "@TEST-001"] }, - async ({ page }) => { - const p = new FeaturePage(page); - await p.goto(); - await p.submit(); - await expect(page).toHaveURL("/expected"); - }, -); +test("action works", { tag: ["@critical", "@feature"] }, async ({ page }) => { + const p = new FeaturePage(page); + await p.goto(); + await p.submit(); + await expect(page).toHaveURL("/expected"); +}); ``` -Selector priority: `getByRole()` → `getByLabel()` → `getByText()` → other - --- ## TECH STACK -Next.js 15.5.3 | React 19.1.1 | Tailwind 4.1.13 | shadcn/ui (new) | HeroUI 2.8.4 (legacy) -Zod 4.1.11 | React Hook Form 7.62.0 | Zustand 5.0.8 | NextAuth 5.0.0-beta.29 | Recharts 2.15.4 +Next.js 15.5.9 | React 19.2.2 | Tailwind 4.1.13 | shadcn/ui +Zod 4.1.11 | React Hook Form 7.62.0 | Zustand 5.0.8 | NextAuth 5.0.0-beta.30 | Recharts 2.15.4 + +> **Note**: HeroUI exists in `components/ui/` as legacy code. Do NOT add new components there. --- @@ -201,36 +152,30 @@ Zod 4.1.11 | React Hook Form 7.62.0 | Zustand 5.0.8 | NextAuth 5.0.0-beta.29 | R ``` ui/ -├── app/ (Next.js App Router) -│ ├── (auth)/ (Auth pages) -│ └── (prowler)/ (Main app: compliance, findings, providers, scans, services, integrations) -├── components/ -│ ├── shadcn/ (New shadcn/ui components) -│ ├── ui/ (HeroUI base) -│ └── {domain}/ (Domain components) -├── actions/ (Server actions) -├── types/ (Shared types) -├── hooks/ (Shared hooks) -├── lib/ (Utilities) -├── store/ (Zustand state) -├── tests/ (Playwright E2E) -└── styles/ (Global CSS) +├── app/(auth)/ # Auth pages +├── app/(prowler)/ # Main app: compliance, findings, providers, scans +├── components/shadcn/ # shadcn/ui components (USE THIS) +├── components/ui/ # HeroUI (LEGACY - do not add here) +├── actions/ # Server actions +├── types/ # Shared types +├── hooks/ # Shared hooks +├── lib/ # Utilities +├── store/ # Zustand state +├── tests/ # Playwright E2E +└── styles/ # Global CSS ``` --- ## COMMANDS -``` -pnpm install && pnpm run dev (Setup & start) -pnpm run typecheck (Type check) -pnpm run lint:fix (Fix linting) -pnpm run format:write (Format) -pnpm run healthcheck (typecheck + lint) -pnpm run test:e2e (E2E tests) -pnpm run test:e2e:ui (E2E with UI) -pnpm run test:e2e:debug (Debug E2E) -pnpm run build && pnpm start (Build & start) +```bash +pnpm install && pnpm run dev +pnpm run typecheck +pnpm run lint:fix +pnpm run healthcheck +pnpm run test:e2e +pnpm run test:e2e:ui ``` --- @@ -245,13 +190,3 @@ pnpm run build && pnpm start (Build & start) - [ ] No secrets in code (use `.env.local`) - [ ] Error messages sanitized - [ ] Server-side validation present - ---- - -## MIGRATIONS (As of Jan 2025) - -React 18 → 19.1.1 (async components, compiler) -Next.js 14 → 15.5.3 -NextUI → HeroUI 2.8.4 -Zod 3 → 4 (see patterns section) -AI SDK 4 → 5 (see patterns section)