Compare commits

..

58 Commits

Author SHA1 Message Date
alejandrobailo e0e4288715 chore(docker): add Next.js cache directory configuration in Dockerfile 2025-09-09 14:51:19 +02:00
Daniel Barranquero 252033d113 fix(compliance): replace old check id with new one (#8682) 2025-09-09 14:25:56 +02:00
Prowler Bot 0bc00dbca4 chore(release): Bump version to v5.13.0 (#8679)
Co-authored-by: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
2025-09-09 16:36:22 +05:45
Adrián Jesús Peña Rodríguez 3f5178bffb chore: update api changelog (#8677) 2025-09-09 10:23:55 +02:00
Josema Camacho e08b272a1d fix(login): add DRF throttle option for dj-rest-auth lib (#8672) 2025-09-09 09:34:02 +02:00
Pedro Martín 64c43a288d feat(jira): add force accept language for requests (#8674) 2025-09-09 13:17:25 +05:45
Daniel Barranquero 74bf0e6b47 fix(aws): nonetype errors in opensearch, firehose and cognito (#8670) 2025-09-09 13:12:57 +05:45
Andoni Alonso 02b7c5328f docs: update providers table (#8676)
Co-authored-by: Daniel Barranquero <danielbo2001@gmail.com>
2025-09-09 09:25:20 +02:00
Alejandro Bailo bb02004e7c fix: social auth buttons showed for sign-up (#8673) 2025-09-09 09:23:56 +02:00
Andoni Alonso 82cf216a74 feat(mongodbatlas): add MongoDB Atlas provider PoC (#8312)
Co-authored-by: Daniel Barranquero <danielbo2001@gmail.com>
2025-09-09 09:18:37 +02:00
Daniel Barranquero 7916425ed4 fix(memorydb): handle clusters with no security groups (#8666) 2025-09-08 15:05:13 -04:00
Andoni Alonso d98063ed47 docs: add interface column to providers (#8675) 2025-09-08 15:03:17 -04:00
Andoni Alonso 27bf78a3a1 docs: update providers list (#8671) 2025-09-08 17:12:16 +02:00
Andoni Alonso f50bd50d60 docs: add SSO with SAML Entra ID video link (#8668) 2025-09-08 14:57:38 +02:00
Alejandro Bailo 80665e0396 feat(ui): send a finding to Jira (#8649) 2025-09-08 14:15:23 +02:00
Pedro Martín 4b259fa8dd chore(changelog): update with latest changes (#8665) 2025-09-08 17:24:31 +05:45
Hugo Pereira Brito 10db2ed6d8 chore(docs): add notes regarding gov accounts support (#8656) 2025-09-08 11:07:00 +02:00
Chandrapal Badshah 422a8a0f62 fix: change title in lighthouse settings (#8615)
Co-authored-by: Chandrapal Badshah <12944530+Chan9390@users.noreply.github.com>
2025-09-08 10:34:09 +02:00
Daniel Barranquero 906a2cc651 fix(entra): add metadata description for check entra_admin_users_phishing_resistant_mfa_enabled (#8654) 2025-09-08 08:11:46 +02:00
Víctor Fernández Poyatos 43fe9c6860 feat(integrations): allow sending findings to Jira from the API (#8645) 2025-09-05 14:28:34 +02:00
Andoni Alonso f87b2089fb docs: remove llms.txt (#8653) 2025-09-05 17:08:42 +05:45
Samuele Pasini 1884874ab6 fix: typo ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_* CheckID (#8294)
Co-authored-by: Daniel Barranquero <danielbo2001@gmail.com>
2025-09-05 13:16:12 +02:00
Andoni Alonso cd6d29e176 docs: reorg tutorials (#8652) 2025-09-05 16:49:14 +05:45
Pedro Martín 0b7055e983 feat(jira): add send_finding method with specific finding fields (#8648) 2025-09-05 12:25:53 +02:00
Josema Camacho ae53b76d78 feat(login): add DJANGO_THROTTLE_TOKEN_OBTAIN to main .env file (#8650) 2025-09-05 16:01:48 +05:45
Josema Camacho 406e473b5c feat(login): add throttling option for the /api/v1/tokens endpoint (#8647) 2025-09-05 14:37:31 +05:45
Pedro Martín 1a2bf461f0 feat(jira): support labels in jira tickets (#8603) 2025-09-05 09:53:24 +02:00
Samuele Pasini 1b49c0b27f feat: add --excluded-checks-file flag (#8301)
Co-authored-by: pedrooot <pedromarting3@gmail.com>
2025-09-05 09:33:21 +02:00
Pablo Lara 12ada66978 feat: add status filter to /overviews endpoint (#8186)
Co-authored-by: Adrián Jesús Peña Rodríguez <adrianjpr@gmail.com>
2025-09-04 18:46:14 +02:00
Alejandro Bailo daa2536005 feat: Jira UI integration - pages and server actions (#8640) 2025-09-04 15:59:37 +02:00
Chandrapal Badshah 69a62db19a chore: rename to lighthouse ai (#8614)
Co-authored-by: Chandrapal Badshah <12944530+Chan9390@users.noreply.github.com>
2025-09-04 15:30:07 +05:45
Pedro Martín 79450d6977 fix(securityhub): resolve TypeError from Python3.9 (#8619)
Co-authored-by: Hugo Pereira Brito <101209179+HugoPBrito@users.noreply.github.com>
2025-09-03 17:52:09 +02:00
Víctor Fernández Poyatos 0463fd0830 refactor(integrations-jira): Move domain to credentials and retrieve metadata during connection test (#8637) 2025-09-03 17:24:42 +02:00
Alejandro Bailo b15e3d339c fix(saml): remove validation call on email domain change (#8638) 2025-09-03 17:04:51 +02:00
Pedro Martín 1fc12952ba feat(jira): add color for manual status (#8642) 2025-09-03 16:53:31 +02:00
sumit-tft 088a6bcbda feat(ui): handle no-permissions on scan page (#8624)
Co-authored-by: alejandrobailo <alejandrobailo94@gmail.com>
2025-09-03 15:51:14 +02:00
Hugo Pereira Brito a3b0bb6d4b refactor(models): rename AdditionalUrls to AdditionalURLs (#8639) 2025-09-03 19:34:06 +05:45
Pedro Martín 3c819f8875 chore(changelog): update with latest changes (#8636) 2025-09-03 12:54:50 +02:00
Pedro Martín cdf0292bbc feat(jira): add get_metadata (#8630) 2025-09-03 10:59:07 +02:00
César Arroba 987121051b chore(sdk): comment push readme to dockerhub steps (#8628) 2025-09-02 21:48:42 +05:45
Hugo Pereira Brito c9ed7773d2 feat(models): add AdditionalUrls field to check metadata (#8590) 2025-09-02 21:27:21 +05:45
Pepe Fagoaga fdf45aac51 fix(img): prowler architecture (#8635) 2025-09-02 21:15:40 +05:45
Alejandro Bailo 3ded224a4b fix: new errors detected through the app (#8629) 2025-09-02 12:35:06 +02:00
sumit-tft 230a085c76 fix(ui): display NoProvidersAdded when no cloud providers are configured (#8626) 2025-09-02 12:33:58 +02:00
Chandrapal Badshah 8cd90e07dc chore(ui): eslint nextjs files (#8627)
Co-authored-by: Chandrapal Badshah <12944530+Chan9390@users.noreply.github.com>
2025-09-02 12:15:48 +02:00
Pedro Martín 06ded98d05 feat(jira): add data to table and error handling (#8601) 2025-09-02 11:48:52 +02:00
Pedro Martín a5066326bd chore(changelog): update with latests changes (#8620) 2025-09-02 11:27:13 +02:00
Alejandro Bailo 83a9ac2109 chore(ui): update CHANGELOG (#8625) 2025-09-02 10:45:34 +02:00
Alejandro Bailo 136eb4facd feat: 50X errors handler (#8621) 2025-09-02 10:12:03 +02:00
Víctor Fernández Poyatos d4eb4bdca7 feat(integrations): Support JIRA integration in the API (#8622) 2025-09-02 09:53:36 +02:00
Alejandro Bailo 665c9d878a chore(ui): update Next.js and ESLint dependencies to version 14.2.32 (#8623) 2025-09-01 18:38:39 +02:00
Hugo Pereira Brito a064e43302 chore(ui): render attributes as markdown (#8604)
Co-authored-by: alejandrobailo <alejandrobailo94@gmail.com>
2025-09-01 16:43:36 +02:00
Daniel Barranquero fdb76e7820 feat(docs): update mfa enforcement date for m365 (#8610) 2025-09-01 09:48:21 +02:00
Pepe Fagoaga 1259bb85e3 fix: remove dot (#8613) 2025-08-29 14:46:19 +05:45
Pepe Fagoaga 0db9ab91b2 chore(docs): review stats, imgs and update copy (#8612) 2025-08-29 14:44:01 +05:45
César Arroba f6ea314ec0 chore(sdk): push readme file to docker hub (#8611) 2025-08-29 14:43:53 +05:45
Alejandro Bailo 9e02da342b docs: Security Hub API and UI documentation (#8576)
Co-authored-by: Adrián Jesús Peña Rodríguez <adrianjpr@gmail.com>
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2025-08-28 20:43:42 +05:45
Prowler Bot 358d4239c7 chore(release): Bump version to v5.12.0 (#8605)
Co-authored-by: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
2025-08-28 16:56:24 +02:00
228 changed files with 13500 additions and 2257 deletions
+1
View File
@@ -127,6 +127,7 @@ jQIDAQAB
DJANGO_SECRETS_ENCRYPTION_KEY="oE/ltOhp/n1TdbHjVmzcjDPLcLA41CVI/4Rk+UB5ESc="
DJANGO_BROKER_VISIBILITY_TIMEOUT=86400
DJANGO_SENTRY_DSN=
DJANGO_THROTTLE_TOKEN_OBTAIN=50/minute
# Sentry settings
SENTRY_ENVIRONMENT=local
+7
View File
@@ -37,6 +37,11 @@ provider/iac:
- any-glob-to-any-file: "prowler/providers/iac/**"
- any-glob-to-any-file: "tests/providers/iac/**"
provider/mongodbatlas:
- changed-files:
- any-glob-to-any-file: "prowler/providers/mongodbatlas/**"
- any-glob-to-any-file: "tests/providers/mongodbatlas/**"
github_actions:
- changed-files:
- any-glob-to-any-file: ".github/workflows/*"
@@ -52,11 +57,13 @@ mutelist:
- any-glob-to-any-file: "prowler/providers/azure/lib/mutelist/**"
- any-glob-to-any-file: "prowler/providers/gcp/lib/mutelist/**"
- any-glob-to-any-file: "prowler/providers/kubernetes/lib/mutelist/**"
- any-glob-to-any-file: "prowler/providers/mongodbatlas/lib/mutelist/**"
- any-glob-to-any-file: "tests/lib/mutelist/**"
- any-glob-to-any-file: "tests/providers/aws/lib/mutelist/**"
- any-glob-to-any-file: "tests/providers/azure/lib/mutelist/**"
- any-glob-to-any-file: "tests/providers/gcp/lib/mutelist/**"
- any-glob-to-any-file: "tests/providers/kubernetes/lib/mutelist/**"
- any-glob-to-any-file: "tests/providers/mongodbatlas/lib/mutelist/**"
integration/s3:
- changed-files:
@@ -157,6 +157,22 @@ jobs:
cache-from: type=gha
cache-to: type=gha,mode=max
# - name: Push README to Docker Hub (toniblyx)
# uses: peter-evans/dockerhub-description@432a30c9e07499fd01da9f8a49f0faf9e0ca5b77 # v4.0.2
# with:
# username: ${{ secrets.DOCKERHUB_USERNAME }}
# password: ${{ secrets.DOCKERHUB_TOKEN }}
# repository: ${{ env.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}
# readme-filepath: ./README.md
#
# - name: Push README to Docker Hub (prowlercloud)
# uses: peter-evans/dockerhub-description@432a30c9e07499fd01da9f8a49f0faf9e0ca5b77 # v4.0.2
# with:
# username: ${{ secrets.DOCKERHUB_USERNAME }}
# password: ${{ secrets.DOCKERHUB_TOKEN }}
# repository: ${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}
# readme-filepath: ./README.md
dispatch-action:
needs: container-build-push
runs-on: ubuntu-latest
+15
View File
@@ -234,6 +234,21 @@ jobs:
run: |
poetry run pytest -n auto --cov=./prowler/providers/iac --cov-report=xml:iac_coverage.xml tests/providers/iac
# Test MongoDB Atlas
- name: MongoDB Atlas - Check if any file has changed
id: mongodb-atlas-changed-files
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
with:
files: |
./prowler/providers/mongodbatlas/**
./tests/providers/mongodbatlas/**
.poetry.lock
- name: MongoDB Atlas - Test
if: steps.mongodb-atlas-changed-files.outputs.any_changed == 'true'
run: |
poetry run pytest -n auto --cov=./prowler/providers/mongodbatlas --cov-report=xml:mongodb_atlas_coverage.xml tests/providers/mongodbatlas
# Common Tests
- name: Lib - Test
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
+27 -26
View File
@@ -19,19 +19,16 @@
<a href="https://goto.prowler.com/slack"><img alt="Slack Shield" src="https://img.shields.io/badge/slack-prowler-brightgreen.svg?logo=slack"></a>
<a href="https://pypi.org/project/prowler/"><img alt="Python Version" src="https://img.shields.io/pypi/v/prowler.svg"></a>
<a href="https://pypi.python.org/pypi/prowler/"><img alt="Python Version" src="https://img.shields.io/pypi/pyversions/prowler.svg"></a>
<a href="https://pypistats.org/packages/prowler"><img alt="PyPI Prowler Downloads" src="https://img.shields.io/pypi/dw/prowler.svg?label=prowler%20downloads"></a>
<a href="https://pypistats.org/packages/prowler"><img alt="PyPI Downloads" src="https://img.shields.io/pypi/dw/prowler.svg?label=downloads"></a>
<a href="https://hub.docker.com/r/toniblyx/prowler"><img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/toniblyx/prowler"></a>
<a href="https://hub.docker.com/r/toniblyx/prowler"><img alt="Docker" src="https://img.shields.io/docker/cloud/build/toniblyx/prowler"></a>
<a href="https://hub.docker.com/r/toniblyx/prowler"><img alt="Docker" src="https://img.shields.io/docker/image-size/toniblyx/prowler"></a>
<a href="https://gallery.ecr.aws/prowler-cloud/prowler"><img width="120" height=19" alt="AWS ECR Gallery" src="https://user-images.githubusercontent.com/3985464/151531396-b6535a68-c907-44eb-95a1-a09508178616.png"></a>
<a href="https://codecov.io/gh/prowler-cloud/prowler"><img src="https://codecov.io/gh/prowler-cloud/prowler/graph/badge.svg?token=OflBGsdpDl"/></a>
</p>
<p align="center">
<a href="https://github.com/prowler-cloud/prowler"><img alt="Repo size" src="https://img.shields.io/github/repo-size/prowler-cloud/prowler"></a>
<a href="https://github.com/prowler-cloud/prowler/issues"><img alt="Issues" src="https://img.shields.io/github/issues/prowler-cloud/prowler"></a>
<a href="https://github.com/prowler-cloud/prowler/releases"><img alt="Version" src="https://img.shields.io/github/v/release/prowler-cloud/prowler"></a>
<a href="https://github.com/prowler-cloud/prowler/releases"><img alt="Version" src="https://img.shields.io/github/v/release/prowler-cloud/prowler"></a>
<a href="https://github.com/prowler-cloud/prowler/releases"><img alt="Version" src="https://img.shields.io/github/release-date/prowler-cloud/prowler"></a>
<a href="https://github.com/prowler-cloud/prowler"><img alt="Contributors" src="https://img.shields.io/github/contributors-anon/prowler-cloud/prowler"></a>
<a href="https://github.com/prowler-cloud/prowler/issues"><img alt="Issues" src="https://img.shields.io/github/issues/prowler-cloud/prowler"></a>
<a href="https://github.com/prowler-cloud/prowler"><img alt="License" src="https://img.shields.io/github/license/prowler-cloud/prowler"></a>
<a href="https://twitter.com/ToniBlyx"><img alt="Twitter" src="https://img.shields.io/twitter/follow/toniblyx?style=social"></a>
<a href="https://twitter.com/prowlercloud"><img alt="Twitter" src="https://img.shields.io/twitter/follow/prowlercloud?style=social"></a>
@@ -55,15 +52,11 @@ Prowler includes hundreds of built-in controls to ensure compliance with standar
- **National Security Standards:** ENS (Spanish National Security Scheme)
- **Custom Security Frameworks:** Tailored to your needs
## Prowler CLI and Prowler Cloud
Prowler offers a Command Line Interface (CLI), known as Prowler Open Source, and an additional service built on top of it, called <a href="https://prowler.com">Prowler Cloud</a>.
## Prowler App
Prowler App is a web-based application that simplifies running Prowler across your cloud provider accounts. It provides a user-friendly interface to visualize the results and streamline your security assessments.
![Prowler App](docs/img/overview.png)
![Prowler App](docs/products/img/overview.png)
>For more details, refer to the [Prowler App Documentation](https://docs.prowler.com/projects/prowler-open-source/en/latest/#prowler-app-installation)
@@ -80,28 +73,36 @@ prowler <provider>
```console
prowler dashboard
```
![Prowler Dashboard](docs/img/dashboard.png)
![Prowler Dashboard](docs/products/img/dashboard.png)
# Prowler at a Glance
> [!Tip]
> For the most accurate and up-to-date information about checks, services, frameworks, and categories, visit [**Prowler Hub**](https://hub.prowler.com).
| Provider | Checks | Services | [Compliance Frameworks](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/compliance/) | [Categories](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/misc/#categories) |
|---|---|---|---|---|
| AWS | 571 | 82 | 36 | 10 |
| GCP | 79 | 13 | 10 | 3 |
| Azure | 162 | 19 | 11 | 4 |
| Kubernetes | 83 | 7 | 5 | 7 |
| GitHub | 17 | 2 | 1 | 0 |
| M365 | 70 | 7 | 3 | 2 |
| NHN (Unofficial) | 6 | 2 | 1 | 0 |
| Provider | Checks | Services | [Compliance Frameworks](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/compliance/) | [Categories](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/misc/#categories) | Support | Stage | Interface |
|---|---|---|---|---|---|---|---|
| AWS | 576 | 82 | 36 | 10 | Official | Stable | UI, API, CLI |
| GCP | 79 | 13 | 10 | 3 | Official | Stable | UI, API, CLI |
| Azure | 162 | 19 | 11 | 4 | Official | Stable | UI, API, CLI |
| Kubernetes | 83 | 7 | 5 | 7 | Official | Stable | UI, API, CLI |
| GitHub | 17 | 2 | 1 | 0 | Official | Stable | UI, API, CLI |
| M365 | 70 | 7 | 3 | 2 | Official | Stable | UI, API, CLI |
| IaC | [See `trivy` docs.](https://trivy.dev/latest/docs/coverage/iac/) | N/A | N/A | N/A | Official | Beta | CLI |
| MongoDB Atlas | 10 | 3 | 0 | 0 | Official | Beta | CLI |
| NHN | 6 | 2 | 1 | 0 | Unofficial | Beta | CLI |
> [!Note]
> The numbers in the table are updated periodically.
> [!Tip]
> For the most accurate and up-to-date information about checks, services, frameworks, and categories, visit [**Prowler Hub**](https://hub.prowler.com).
> [!Note]
> Use the following commands to list Prowler's available checks, services, compliance frameworks, and categories: `prowler <provider> --list-checks`, `prowler <provider> --list-services`, `prowler <provider> --list-compliance` and `prowler <provider> --list-categories`.
> Use the following commands to list Prowler's available checks, services, compliance frameworks, and categories:
> - `prowler <provider> --list-checks`
> - `prowler <provider> --list-services`
> - `prowler <provider> --list-compliance`
> - `prowler <provider> --list-categories`
# 💻 Installation
@@ -239,7 +240,7 @@ The following versions of Prowler CLI are available, depending on your requireme
The container images are available here:
- Prowler CLI:
- [DockerHub](https://hub.docker.com/r/toniblyx/prowler/tags)
- [DockerHub](https://hub.docker.com/r/prowlercloud/prowler/tags)
- [AWS Public ECR](https://gallery.ecr.aws/prowler-cloud/prowler)
- Prowler App:
- [DockerHub - Prowler UI](https://hub.docker.com/r/prowlercloud/prowler-ui/tags)
@@ -274,7 +275,7 @@ python prowler-cli.py -v
- **Prowler API**: A backend service, developed with Django REST Framework, responsible for running Prowler scans and storing the generated results.
- **Prowler SDK**: A Python SDK designed to extend the functionality of the Prowler CLI for advanced capabilities.
![Prowler App Architecture](docs/img/prowler-app-architecture.png)
![Prowler App Architecture](docs/products/img/prowler-app-architecture.png)
## Prowler CLI
+2
View File
@@ -19,6 +19,8 @@ DJANGO_REFRESH_TOKEN_LIFETIME=1440
DJANGO_CACHE_MAX_AGE=3600
DJANGO_STALE_WHILE_REVALIDATE=60
DJANGO_SECRETS_ENCRYPTION_KEY=""
# Throttle, two options: Empty means no throttle; or if desired use one in DRF format: https://www.django-rest-framework.org/api-guide/throttling/#setting-the-throttling-policy
DJANGO_THROTTLE_TOKEN_OBTAIN=50/minute
# Decide whether to allow Django manage database table partitions
DJANGO_MANAGE_DB_PARTITIONS=[True|False]
DJANGO_CELERY_DEADLOCK_ATTEMPTS=5
+9
View File
@@ -2,6 +2,15 @@
All notable changes to the **Prowler API** are documented in this file.
## [1.13.0] (Prowler 5.12.0)
### Added
- Integration with JIRA, enabling sending findings to a JIRA project [(#8622)](https://github.com/prowler-cloud/prowler/pull/8622), [(#8637)](https://github.com/prowler-cloud/prowler/pull/8637)
- `GET /overviews/findings_severity` now supports `filter[status]` and `filter[status__in]` to aggregate by specific statuses (`FAIL`, `PASS`)[(#8186)](https://github.com/prowler-cloud/prowler/pull/8186)
- Throttling options for `/api/v1/tokens` using the `DJANGO_THROTTLE_TOKEN_OBTAIN` environment variable [(#8647)](https://github.com/prowler-cloud/prowler/pull/8647)
---
## [1.12.0] (Prowler 5.11.0)
### Added
+4 -65
View File
@@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand.
# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand.
[[package]]
name = "about-time"
@@ -383,24 +383,6 @@ cryptography = ">=2.1.4"
isodate = ">=0.6.1"
typing-extensions = ">=4.0.1"
[[package]]
name = "azure-mgmt-apimanagement"
version = "5.0.0"
description = "Microsoft Azure API Management Client Library for Python"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "azure_mgmt_apimanagement-5.0.0-py3-none-any.whl", hash = "sha256:b88c42a392333b60722fb86f15d092dfc19a8d67510dccd15c217381dff4e6ec"},
{file = "azure_mgmt_apimanagement-5.0.0.tar.gz", hash = "sha256:0ab7fe17e70fe3154cd840ff47d19d7a4610217003eaa7c21acf3511a6e57999"},
]
[package.dependencies]
azure-common = ">=1.1"
azure-mgmt-core = ">=1.3.2"
isodate = ">=0.6.1"
typing-extensions = ">=4.6.0"
[[package]]
name = "azure-mgmt-applicationinsights"
version = "4.1.0"
@@ -558,23 +540,6 @@ azure-mgmt-core = ">=1.3.2"
isodate = ">=0.6.1"
typing-extensions = ">=4.6.0"
[[package]]
name = "azure-mgmt-loganalytics"
version = "12.0.0"
description = "Microsoft Azure Log Analytics Management Client Library for Python"
optional = false
python-versions = "*"
groups = ["main"]
files = [
{file = "azure-mgmt-loganalytics-12.0.0.zip", hash = "sha256:da128a7e0291be7fa2063848df92a9180cf5c16d42adc09d2bc2efd711536bfb"},
{file = "azure_mgmt_loganalytics-12.0.0-py2.py3-none-any.whl", hash = "sha256:75ac1d47dd81179905c40765be8834643d8994acff31056ddc1863017f3faa02"},
]
[package.dependencies]
azure-common = ">=1.1,<2.0"
azure-mgmt-core = ">=1.2.0,<2.0.0"
msrest = ">=0.6.21"
[[package]]
name = "azure-mgmt-monitor"
version = "6.0.2"
@@ -785,23 +750,6 @@ azure-mgmt-core = ">=1.3.2"
isodate = ">=0.6.1"
typing-extensions = ">=4.6.0"
[[package]]
name = "azure-monitor-query"
version = "2.0.0"
description = "Microsoft Corporation Azure Monitor Query Client Library for Python"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "azure_monitor_query-2.0.0-py3-none-any.whl", hash = "sha256:8f52d581271d785e12f49cd5aaa144b8910fb843db2373855a7ef94c7fc462ea"},
{file = "azure_monitor_query-2.0.0.tar.gz", hash = "sha256:7b05f2fcac4fb67fc9f77a7d4c5d98a0f3099fb73b57c69ec1b080773994671b"},
]
[package.dependencies]
azure-core = ">=1.30.0"
isodate = ">=0.6.1"
typing-extensions = ">=4.6.0"
[[package]]
name = "azure-storage-blob"
version = "12.24.1"
@@ -4052,7 +4000,6 @@ alive-progress = "3.3.0"
awsipranges = "0.3.3"
azure-identity = "1.21.0"
azure-keyvault-keys = "4.10.0"
azure-mgmt-apimanagement = "5.0.0"
azure-mgmt-applicationinsights = "4.1.0"
azure-mgmt-authorization = "4.0.0"
azure-mgmt-compute = "34.0.0"
@@ -4061,7 +4008,6 @@ azure-mgmt-containerservice = "34.1.0"
azure-mgmt-cosmosdb = "9.7.0"
azure-mgmt-databricks = "2.0.0"
azure-mgmt-keyvault = "10.3.1"
azure-mgmt-loganalytics = "12.0.0"
azure-mgmt-monitor = "6.0.2"
azure-mgmt-network = "28.1.0"
azure-mgmt-rdbms = "10.1.0"
@@ -4074,7 +4020,6 @@ azure-mgmt-sql = "3.0.1"
azure-mgmt-storage = "22.1.1"
azure-mgmt-subscription = "3.1.1"
azure-mgmt-web = "8.0.0"
azure-monitor-query = "2.0.0"
azure-storage-blob = "12.24.1"
boto3 = "1.39.15"
botocore = "1.39.15"
@@ -4086,7 +4031,6 @@ detect-secrets = "1.5.0"
dulwich = "0.23.0"
google-api-python-client = "2.163.0"
google-auth-httplib2 = ">=0.1,<0.3"
h2 = "4.3.0"
jsonschema = "4.23.0"
kubernetes = "32.0.1"
microsoft-kiota-abstractions = "1.9.2"
@@ -4108,8 +4052,8 @@ tzlocal = "5.3.1"
[package.source]
type = "git"
url = "https://github.com/prowler-cloud/prowler.git"
reference = "v5.11"
resolved_reference = "b003fca37712bbdd90fa8313848cf57a1e447f4f"
reference = "master"
resolved_reference = "525f152e51f82de2110ed158c8dc489e42c289cf"
[[package]]
name = "psutil"
@@ -5279,7 +5223,6 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f66efbc1caa63c088dead1c4170d148eabc9b80d95fb75b6c92ac0aad2437d76"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22353049ba4181685023b25b5b51a574bce33e7f51c759371a7422dcae5402a6"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:932205970b9f9991b34f55136be327501903f7c66830e9760a8ffb15b07f05cd"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a52d48f4e7bf9005e8f0a89209bf9a73f7190ddf0489eee5eb51377385f59f2a"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win32.whl", hash = "sha256:3eac5a91891ceb88138c113f9db04f3cebdae277f5d44eaa3651a4f573e6a5da"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win_amd64.whl", hash = "sha256:ab007f2f5a87bd08ab1499bdf96f3d5c6ad4dcfa364884cb4549aa0154b13a28"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:4a6679521a58256a90b0d89e03992c15144c5f3858f40d7c18886023d7943db6"},
@@ -5288,7 +5231,6 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:811ea1594b8a0fb466172c384267a4e5e367298af6b228931f273b111f17ef52"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cf12567a7b565cbf65d438dec6cfbe2917d3c1bdddfce84a9930b7d35ea59642"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7dd5adc8b930b12c8fc5b99e2d535a09889941aa0d0bd06f4749e9a9397c71d2"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1492a6051dab8d912fc2adeef0e8c72216b24d57bd896ea607cb90bb0c4981d3"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win32.whl", hash = "sha256:bd0a08f0bab19093c54e18a14a10b4322e1eacc5217056f3c063bd2f59853ce4"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win_amd64.whl", hash = "sha256:a274fb2cb086c7a3dea4322ec27f4cb5cc4b6298adb583ab0e211a4682f241eb"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:20b0f8dc160ba83b6dcc0e256846e1a02d044e13f7ea74a3d1d56ede4e48c632"},
@@ -5297,7 +5239,6 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:749c16fcc4a2b09f28843cda5a193e0283e47454b63ec4b81eaa2242f50e4ccd"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bf165fef1f223beae7333275156ab2022cffe255dcc51c27f066b4370da81e31"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:32621c177bbf782ca5a18ba4d7af0f1082a3f6e517ac2a18b3974d4edf349680"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b82a7c94a498853aa0b272fd5bc67f29008da798d4f93a2f9f289feb8426a58d"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win32.whl", hash = "sha256:e8c4ebfcfd57177b572e2040777b8abc537cdef58a2120e830124946aa9b42c5"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win_amd64.whl", hash = "sha256:0467c5965282c62203273b838ae77c0d29d7638c8a4e3a1c8bdd3602c10904e4"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:4c8c5d82f50bb53986a5e02d1b3092b03622c02c2eb78e29bec33fd9593bae1a"},
@@ -5306,7 +5247,6 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96777d473c05ee3e5e3c3e999f5d23c6f4ec5b0c38c098b3a5229085f74236c6"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:3bc2a80e6420ca8b7d3590791e2dfc709c88ab9152c00eeb511c9875ce5778bf"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e188d2699864c11c36cdfdada94d781fd5d6b0071cd9c427bceb08ad3d7c70e1"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4f6f3eac23941b32afccc23081e1f50612bdbe4e982012ef4f5797986828cd01"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win32.whl", hash = "sha256:6442cb36270b3afb1b4951f060eccca1ce49f3d087ca1ca4563a6eb479cb3de6"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win_amd64.whl", hash = "sha256:e5b8daf27af0b90da7bb903a876477a9e6d7270be6146906b276605997c7e9a3"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:fc4b630cd3fa2cf7fce38afa91d7cfe844a9f75d7f0f36393fa98815e911d987"},
@@ -5315,7 +5255,6 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2f1c3765db32be59d18ab3953f43ab62a761327aafc1594a2a1fbe038b8b8a7"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d85252669dc32f98ebcd5d36768f5d4faeaeaa2d655ac0473be490ecdae3c285"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e143ada795c341b56de9418c58d028989093ee611aa27ffb9b7f609c00d813ed"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2c59aa6170b990d8d2719323e628aaf36f3bfbc1c26279c0eeeb24d05d2d11c7"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win32.whl", hash = "sha256:beffaed67936fbbeffd10966a4eb53c402fafd3d6833770516bf7314bc6ffa12"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win_amd64.whl", hash = "sha256:040ae85536960525ea62868b642bdb0c2cc6021c9f9d507810c0c604e66f5a7b"},
{file = "ruamel.yaml.clib-0.2.12.tar.gz", hash = "sha256:6c8fbb13ec503f99a91901ab46e0b07ae7941cd527393187039aec586fdfd36f"},
@@ -6221,4 +6160,4 @@ type = ["pytest-mypy"]
[metadata]
lock-version = "2.1"
python-versions = ">=3.11,<3.13"
content-hash = "9f810d14b9705a1dc15b85f8dc4507c2637247cea7fd183ea0f544571e56984c"
content-hash = "b954196aba7e108cacb94fd15732be7130b27379add09140fabbb55f7335bb7b"
+2 -2
View File
@@ -24,7 +24,7 @@ dependencies = [
"drf-spectacular-jsonapi==0.5.1",
"gunicorn==23.0.0",
"lxml==5.3.2",
"prowler @ git+https://github.com/prowler-cloud/prowler.git@v5.11",
"prowler @ git+https://github.com/prowler-cloud/prowler.git@master",
"psycopg2-binary==2.9.9",
"pytest-celery[redis] (>=1.0.1,<2.0.0)",
"sentry-sdk[django] (>=2.20.0,<3.0.0)",
@@ -39,7 +39,7 @@ name = "prowler-api"
package-mode = false
# Needed for the SDK compatibility
requires-python = ">=3.11,<3.13"
version = "1.12.0"
version = "1.13.0"
[project.scripts]
celery = "src.backend.config.settings.celery"
+88 -1
View File
@@ -2,7 +2,7 @@ from datetime import date, datetime, timedelta, timezone
from dateutil.parser import parse
from django.conf import settings
from django.db.models import Q
from django.db.models import F, Q
from django_filters.rest_framework import (
BaseInFilter,
BooleanFilter,
@@ -28,6 +28,7 @@ from api.models import (
Integration,
Invitation,
Membership,
OverviewStatusChoices,
PermissionChoices,
Processor,
Provider,
@@ -750,6 +751,72 @@ class ScanSummaryFilter(FilterSet):
}
class ScanSummarySeverityFilter(ScanSummaryFilter):
"""Filter for findings_severity ScanSummary endpoint - includes status filters"""
# Custom status filters - only for severity grouping endpoint
status = ChoiceFilter(method="filter_status", choices=OverviewStatusChoices.choices)
status__in = CharInFilter(method="filter_status_in", lookup_expr="in")
def filter_status(self, queryset, name, value):
# Validate the status value
if value not in [choice[0] for choice in OverviewStatusChoices.choices]:
raise ValidationError(f"Invalid status value: {value}")
# Apply the filter by annotating the queryset with the status field
if value == OverviewStatusChoices.FAIL:
return queryset.annotate(status_count=F("fail"))
elif value == OverviewStatusChoices.PASS:
return queryset.annotate(status_count=F("_pass"))
else:
return queryset.annotate(status_count=F("total"))
def filter_status_in(self, queryset, name, value):
# Validate the status values
valid_statuses = [choice[0] for choice in OverviewStatusChoices.choices]
for status_val in value:
if status_val not in valid_statuses:
raise ValidationError(f"Invalid status value: {status_val}")
# If all statuses or no valid statuses, use total
if (
set(value)
>= {
OverviewStatusChoices.FAIL,
OverviewStatusChoices.PASS,
}
or not value
):
return queryset.annotate(status_count=F("total"))
# Build the sum expression based on status values
sum_expression = None
for status in value:
if status == OverviewStatusChoices.FAIL:
field_expr = F("fail")
elif status == OverviewStatusChoices.PASS:
field_expr = F("_pass")
else:
continue
if sum_expression is None:
sum_expression = field_expr
else:
sum_expression = sum_expression + field_expr
if sum_expression is None:
return queryset.annotate(status_count=F("total"))
return queryset.annotate(status_count=sum_expression)
class Meta:
model = ScanSummary
fields = {
"inserted_at": ["date", "gte", "lte"],
"region": ["exact", "icontains", "in"],
}
class ServiceOverviewFilter(ScanSummaryFilter):
def is_valid(self):
# Check if at least one of the inserted_at filters is present
@@ -793,3 +860,23 @@ class ProcessorFilter(FilterSet):
field_name="processor_type",
lookup_expr="in",
)
class IntegrationJiraFindingsFilter(FilterSet):
# To be expanded as needed
finding_id = UUIDFilter(field_name="id", lookup_expr="exact")
finding_id__in = UUIDInFilter(field_name="id", lookup_expr="in")
class Meta:
model = Finding
fields = {}
def filter_queryset(self, queryset):
# Validate that there is at least one filter provided
if not self.data:
raise ValidationError(
{
"findings": "No finding filters provided. At least one filter is required."
}
)
return super().filter_queryset(queryset)
File diff suppressed because one or more lines are too long
+9
View File
@@ -74,6 +74,15 @@ class StatusChoices(models.TextChoices):
MANUAL = "MANUAL", _("Manual")
class OverviewStatusChoices(models.TextChoices):
"""
Status filters allowed in overview/severity endpoints.
"""
FAIL = "FAIL", _("Fail")
PASS = "PASS", _("Pass")
class StateChoices(models.TextChoices):
AVAILABLE = "available", _("Available")
SCHEDULED = "scheduled", _("Scheduled")
+230 -17
View File
@@ -1,7 +1,7 @@
openapi: 3.0.3
info:
title: Prowler API
version: 1.12.0
version: 1.13.0
description: |-
Prowler API specification.
@@ -2846,6 +2846,71 @@ paths:
schema:
$ref: '#/components/schemas/IntegrationCreateResponse'
description: ''
/api/v1/integrations/{integration_pk}/jira/dispatches:
post:
operationId: integrations_jira_dispatches_create
description: Send a set of filtered findings to the given integration. At least
one finding filter must be provided.
summary: Send findings to a Jira integration
parameters:
- in: query
name: filter[finding_id]
schema:
type: string
format: uuid
- in: query
name: filter[finding_id__in]
schema:
type: array
items:
type: string
format: uuid
description: Multiple values may be separated by commas.
explode: false
style: form
- in: path
name: integration_pk
schema:
type: string
required: true
tags:
- Integration
requestBody:
content:
application/vnd.api+json:
schema:
$ref: '#/components/schemas/IntegrationJiraDispatchRequest'
application/x-www-form-urlencoded:
schema:
$ref: '#/components/schemas/IntegrationJiraDispatchRequest'
multipart/form-data:
schema:
$ref: '#/components/schemas/IntegrationJiraDispatchRequest'
required: true
security:
- jwtAuth: []
responses:
'202':
content:
application/vnd.api+json:
schema:
$ref: '#/components/schemas/TaskResponse'
examples:
Task queued:
summary: Task queued
value:
data:
type: tasks
id: 497f6eca-6276-4993-bfeb-53cbbbba6f08
attributes:
inserted_at: '2019-08-24T14:15:22Z'
completed_at: '2019-08-24T14:15:22Z'
name: string
state: available
result: null
task_args: null
metadata: null
description: ''
/api/v1/integrations/{id}:
get:
operationId: integrations_retrieve
@@ -3027,8 +3092,8 @@ paths:
/api/v1/lighthouse-configurations:
get:
operationId: lighthouse_configurations_list
description: Retrieve a list of all Lighthouse configurations.
summary: List all Lighthouse configurations
description: Retrieve a list of all Lighthouse AI configurations.
summary: List all Lighthouse AI configurations
parameters:
- in: query
name: fields[lighthouse-configurations]
@@ -3087,7 +3152,7 @@ paths:
- -is_active
explode: false
tags:
- Lighthouse
- Lighthouse AI
security:
- jwtAuth: []
responses:
@@ -3099,10 +3164,10 @@ paths:
description: ''
post:
operationId: lighthouse_configurations_create
description: Create a new Lighthouse configuration with the specified details.
summary: Create a new Lighthouse configuration
description: Create a new Lighthouse AI configuration with the specified details.
summary: Create a new Lighthouse AI configuration
tags:
- Lighthouse
- Lighthouse AI
requestBody:
content:
application/vnd.api+json:
@@ -3127,8 +3192,8 @@ paths:
/api/v1/lighthouse-configurations/{id}:
patch:
operationId: lighthouse_configurations_partial_update
description: Update certain fields of an existing Lighthouse configuration.
summary: Partially update a Lighthouse configuration
description: Update certain fields of an existing Lighthouse AI configuration.
summary: Partially update a Lighthouse AI configuration
parameters:
- in: path
name: id
@@ -3136,7 +3201,7 @@ paths:
type: string
required: true
tags:
- Lighthouse
- Lighthouse AI
requestBody:
content:
application/vnd.api+json:
@@ -3160,8 +3225,8 @@ paths:
description: ''
delete:
operationId: lighthouse_configurations_destroy
description: Remove a Lighthouse configuration by its ID.
summary: Delete a Lighthouse configuration
description: Remove a Lighthouse AI configuration by its ID.
summary: Delete a Lighthouse AI configuration
parameters:
- in: path
name: id
@@ -3169,7 +3234,7 @@ paths:
type: string
required: true
tags:
- Lighthouse
- Lighthouse AI
security:
- jwtAuth: []
responses:
@@ -3179,7 +3244,7 @@ paths:
post:
operationId: lighthouse_configurations_connection_create
description: Verify the connection to the OpenAI API for a specific Lighthouse
configuration.
AI configuration.
summary: Check the connection to the OpenAI API
parameters:
- in: path
@@ -3188,7 +3253,7 @@ paths:
type: string
required: true
tags:
- Lighthouse
- Lighthouse AI
security:
- jwtAuth: []
responses:
@@ -3506,6 +3571,25 @@ paths:
description: A search term.
schema:
type: string
- in: query
name: filter[status]
schema:
type: string
enum:
- FAIL
- PASS
description: |-
* `FAIL` - Fail
* `PASS` - Pass
- in: query
name: filter[status__in]
schema:
type: array
items:
type: string
description: Multiple values may be separated by commas.
explode: false
style: form
- name: sort
required: false
in: query
@@ -9132,6 +9216,12 @@ components:
default: false
description: If true, archives findings that are not present in
the current execution.
- type: object
title: JIRA
description: JIRA integration does not accept any configuration in
the payload. Leave it as an empty JSON object (`{}`).
properties: {}
additionalProperties: false
credentials:
oneOf:
- type: object
@@ -9171,6 +9261,24 @@ components:
- User_Session-1
- Test.Session@2
pattern: ^[a-zA-Z0-9=,.@_-]+$
- type: object
title: JIRA Credentials
properties:
user_mail:
type: string
format: email
description: The email address of the JIRA user account.
api_token:
type: string
description: The API token for authentication with JIRA. This
can be generated from your Atlassian account settings.
domain:
type: string
description: The JIRA domain/instance URL (e.g., 'your-domain.atlassian.net').
required:
- user_mail
- api_token
- domain
writeOnly: true
required:
- integration_type
@@ -9292,6 +9400,12 @@ components:
default: false
description: If true, archives findings that are not present
in the current execution.
- type: object
title: JIRA
description: JIRA integration does not accept any configuration
in the payload. Leave it as an empty JSON object (`{}`).
properties: {}
additionalProperties: false
credentials:
oneOf:
- type: object
@@ -9332,6 +9446,24 @@ components:
- User_Session-1
- Test.Session@2
pattern: ^[a-zA-Z0-9=,.@_-]+$
- type: object
title: JIRA Credentials
properties:
user_mail:
type: string
format: email
description: The email address of the JIRA user account.
api_token:
type: string
description: The API token for authentication with JIRA. This
can be generated from your Atlassian account settings.
domain:
type: string
description: The JIRA domain/instance URL (e.g., 'your-domain.atlassian.net').
required:
- user_mail
- api_token
- domain
writeOnly: true
required:
- integration_type
@@ -9377,6 +9509,39 @@ components:
$ref: '#/components/schemas/IntegrationCreate'
required:
- data
IntegrationJiraDispatchRequest:
type: object
properties:
data:
type: object
required:
- type
additionalProperties: false
properties:
type:
type: string
description: The [type](https://jsonapi.org/format/#document-resource-object-identification)
member is used to describe resource objects that share common attributes
and relationships.
enum:
- integrations-jira-dispatches
attributes:
type: object
properties:
project_key:
type: string
minLength: 1
issue_type:
enum:
- Task
type: string
description: '* `Task` - Task'
x-spec-enum-id: b527b0cec62087c1
required:
- project_key
- issue_type
required:
- data
IntegrationResponse:
type: object
properties:
@@ -9468,6 +9633,12 @@ components:
default: false
description: If true, archives findings that are not present in
the current execution.
- type: object
title: JIRA
description: JIRA integration does not accept any configuration in
the payload. Leave it as an empty JSON object (`{}`).
properties: {}
additionalProperties: false
credentials:
oneOf:
- type: object
@@ -9507,6 +9678,24 @@ components:
- User_Session-1
- Test.Session@2
pattern: ^[a-zA-Z0-9=,.@_-]+$
- type: object
title: JIRA Credentials
properties:
user_mail:
type: string
format: email
description: The email address of the JIRA user account.
api_token:
type: string
description: The API token for authentication with JIRA. This
can be generated from your Atlassian account settings.
domain:
type: string
description: The JIRA domain/instance URL (e.g., 'your-domain.atlassian.net').
required:
- user_mail
- api_token
- domain
writeOnly: true
relationships:
type: object
@@ -10873,6 +11062,12 @@ components:
default: false
description: If true, archives findings that are not present
in the current execution.
- type: object
title: JIRA
description: JIRA integration does not accept any configuration
in the payload. Leave it as an empty JSON object (`{}`).
properties: {}
additionalProperties: false
credentials:
oneOf:
- type: object
@@ -10913,6 +11108,24 @@ components:
- User_Session-1
- Test.Session@2
pattern: ^[a-zA-Z0-9=,.@_-]+$
- type: object
title: JIRA Credentials
properties:
user_mail:
type: string
format: email
description: The email address of the JIRA user account.
api_token:
type: string
description: The API token for authentication with JIRA. This
can be generated from your Atlassian account settings.
domain:
type: string
description: The JIRA domain/instance URL (e.g., 'your-domain.atlassian.net').
required:
- user_mail
- api_token
- domain
writeOnly: true
relationships:
type: object
@@ -15607,8 +15820,8 @@ tags:
description: Endpoints for managing third-party integrations, including registration,
configuration, retrieval, and deletion of integrations such as S3, JIRA, or other
services.
- name: Lighthouse
description: Endpoints for managing Lighthouse configurations, including creation,
- name: Lighthouse AI
description: Endpoints for managing Lighthouse AI configurations, including creation,
retrieval, updating, and deletion of configurations such as OpenAI keys, models,
and business context.
- name: SAML
+149
View File
@@ -502,3 +502,152 @@ class TestProwlerIntegrationConnectionTest:
assert integration.configuration["regions"]["eu-west-1"] is True
assert integration.configuration["regions"]["ap-south-1"] is False
integration.save.assert_called_once()
@patch("api.utils.rls_transaction")
@patch("api.utils.Jira")
def test_jira_connection_success_basic_auth(
self, mock_jira_class, mock_rls_transaction
):
integration = MagicMock()
integration.integration_type = Integration.IntegrationChoices.JIRA
integration.tenant_id = "test-tenant-id"
integration.credentials = {
"user_mail": "test@example.com",
"api_token": "test_api_token",
"domain": "example.atlassian.net",
}
integration.configuration = {}
# Mock successful JIRA connection with projects
mock_connection = MagicMock()
mock_connection.is_connected = True
mock_connection.error = None
mock_connection.projects = {"PROJ1": "Project 1", "PROJ2": "Project 2"}
mock_jira_class.test_connection.return_value = mock_connection
# Mock rls_transaction context manager
mock_rls_transaction.return_value.__enter__ = MagicMock()
mock_rls_transaction.return_value.__exit__ = MagicMock()
result = prowler_integration_connection_test(integration)
assert result.is_connected is True
assert result.error is None
# Verify JIRA connection was called with correct parameters including domain from credentials
mock_jira_class.test_connection.assert_called_once_with(
user_mail="test@example.com",
api_token="test_api_token",
domain="example.atlassian.net",
raise_on_exception=False,
)
# Verify rls_transaction was called with correct tenant_id
mock_rls_transaction.assert_called_once_with("test-tenant-id")
# Verify projects were saved to integration configuration
assert integration.configuration["projects"] == {
"PROJ1": "Project 1",
"PROJ2": "Project 2",
}
# Verify integration.save() was called
integration.save.assert_called_once()
@patch("api.utils.rls_transaction")
@patch("api.utils.Jira")
def test_jira_connection_failure_invalid_credentials(
self, mock_jira_class, mock_rls_transaction
):
integration = MagicMock()
integration.integration_type = Integration.IntegrationChoices.JIRA
integration.tenant_id = "test-tenant-id"
integration.credentials = {
"user_mail": "invalid@example.com",
"api_token": "invalid_token",
"domain": "invalid.atlassian.net",
}
integration.configuration = {}
# Mock failed JIRA connection
mock_connection = MagicMock()
mock_connection.is_connected = False
mock_connection.error = Exception("Authentication failed: Invalid credentials")
mock_connection.projects = {} # Empty projects when connection fails
mock_jira_class.test_connection.return_value = mock_connection
# Mock rls_transaction context manager
mock_rls_transaction.return_value.__enter__ = MagicMock()
mock_rls_transaction.return_value.__exit__ = MagicMock()
result = prowler_integration_connection_test(integration)
assert result.is_connected is False
assert "Authentication failed: Invalid credentials" in str(result.error)
# Verify JIRA connection was called with correct parameters
mock_jira_class.test_connection.assert_called_once_with(
user_mail="invalid@example.com",
api_token="invalid_token",
domain="invalid.atlassian.net",
raise_on_exception=False,
)
# Verify rls_transaction was called even on failure
mock_rls_transaction.assert_called_once_with("test-tenant-id")
# Verify empty projects dict was saved to integration configuration
assert integration.configuration["projects"] == {}
# Verify integration.save() was called even on connection failure
integration.save.assert_called_once()
@patch("api.utils.rls_transaction")
@patch("api.utils.Jira")
def test_jira_connection_projects_update_with_existing_configuration(
self, mock_jira_class, mock_rls_transaction
):
"""Test that projects are properly updated when integration already has configuration data"""
integration = MagicMock()
integration.integration_type = Integration.IntegrationChoices.JIRA
integration.tenant_id = "test-tenant-id"
integration.credentials = {
"user_mail": "test@example.com",
"api_token": "test_api_token",
"domain": "example.atlassian.net",
}
integration.configuration = {
"issue_types": ["Task"], # Existing configuration
"projects": {"OLD_PROJ": "Old Project"}, # Will be overwritten
}
# Mock successful JIRA connection with new projects
mock_connection = MagicMock()
mock_connection.is_connected = True
mock_connection.error = None
mock_connection.projects = {
"NEW_PROJ1": "New Project 1",
"NEW_PROJ2": "New Project 2",
}
mock_jira_class.test_connection.return_value = mock_connection
# Mock rls_transaction context manager
mock_rls_transaction.return_value.__enter__ = MagicMock()
mock_rls_transaction.return_value.__exit__ = MagicMock()
result = prowler_integration_connection_test(integration)
assert result.is_connected is True
assert result.error is None
# Verify projects were updated (old projects replaced with new ones)
assert integration.configuration["projects"] == {
"NEW_PROJ1": "New Project 1",
"NEW_PROJ2": "New Project 2",
}
# Verify other configuration fields were preserved
assert integration.configuration["issue_types"] == ["Task"]
# Verify integration.save() was called
integration.save.assert_called_once()
+292
View File
@@ -5708,6 +5708,47 @@ class TestIntegrationViewSet:
== data["data"]["relationships"]["providers"]["data"][0]["id"]
)
def test_integrations_create_valid_jira(
self,
authenticated_client,
):
"""Jira integrations are special"""
data = {
"data": {
"type": "integrations",
"attributes": {
"integration_type": Integration.IntegrationChoices.JIRA,
"configuration": {},
"credentials": {
"domain": "prowlerdomain",
"api_token": "this-is-an-api-token-for-jira-that-works-for-sure",
"user_mail": "testing@prowler.com",
},
"enabled": True,
},
}
}
response = authenticated_client.post(
reverse("integration-list"),
data=json.dumps(data),
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_201_CREATED
assert Integration.objects.count() == 1
integration = Integration.objects.first()
integration_configuration = response.json()["data"]["attributes"][
"configuration"
]
assert "projects" in integration_configuration
assert "issue_types" in integration_configuration
assert "domain" in integration_configuration
assert integration.enabled == data["data"]["attributes"]["enabled"]
assert (
integration.integration_type
== data["data"]["attributes"]["integration_type"]
)
assert "credentials" not in response.json()["data"]["attributes"]
def test_integrations_create_valid_relationships(
self,
authenticated_client,
@@ -5806,6 +5847,46 @@ class TestIntegrationViewSet:
"invalid",
None,
),
(
{
"integration_type": "jira",
"configuration": {
"projects": ["JIRA"],
},
"credentials": {"domain": "prowlerdomain"},
},
"invalid",
"configuration",
),
(
{
"integration_type": "jira",
"credentialss": {
"domain": "prowlerdomain",
"api_token": "api-token",
"user_mail": "test@prowler.com",
},
},
"required",
"configuration",
),
(
{
"integration_type": "jira",
"configuration": {},
},
"required",
"credentials",
),
(
{
"integration_type": "jira",
"configuration": {},
"credentials": {"api_token": "api-token"},
},
"invalid",
"credentials",
),
]
),
)
@@ -5995,6 +6076,217 @@ class TestIntegrationViewSet:
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
def test_integrations_create_duplicate_amazon_s3(
self, authenticated_client, providers_fixture
):
provider = providers_fixture[0]
# Create first S3 integration
data = {
"data": {
"type": "integrations",
"attributes": {
"integration_type": Integration.IntegrationChoices.AMAZON_S3,
"configuration": {
"bucket_name": "test-bucket",
"output_directory": "test-output",
},
"credentials": {
"role_arn": "arn:aws:iam::123456789012:role/test-role",
"external_id": "test-external-id",
},
"enabled": True,
},
"relationships": {
"providers": {
"data": [{"type": "providers", "id": str(provider.id)}]
}
},
}
}
# First creation should succeed
response = authenticated_client.post(
reverse("integration-list"),
data=json.dumps(data),
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_201_CREATED
# Attempt to create duplicate should return 409
response = authenticated_client.post(
reverse("integration-list"),
data=json.dumps(data),
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_409_CONFLICT
assert (
"This integration already exists" in response.json()["errors"][0]["detail"]
)
assert (
response.json()["errors"][0]["source"]["pointer"]
== "/data/attributes/configuration"
)
def test_integrations_create_duplicate_jira(self, authenticated_client):
# Create first JIRA integration
data = {
"data": {
"type": "integrations",
"attributes": {
"integration_type": Integration.IntegrationChoices.JIRA,
"configuration": {},
"credentials": {
"user_mail": "test@example.com",
"api_token": "test-api-token",
"domain": "prowlerdomain",
},
"enabled": True,
},
}
}
# First creation should succeed
response = authenticated_client.post(
reverse("integration-list"),
data=json.dumps(data),
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_201_CREATED
# Attempt to create duplicate should return 409
response = authenticated_client.post(
reverse("integration-list"),
data=json.dumps(data),
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_409_CONFLICT
assert (
"This integration already exists" in response.json()["errors"][0]["detail"]
)
assert (
response.json()["errors"][0]["source"]["pointer"]
== "/data/attributes/configuration"
)
def test_integrations_update_jira_configuration_readonly(
self, authenticated_client
):
# Create JIRA integration first
create_data = {
"data": {
"type": "integrations",
"attributes": {
"integration_type": Integration.IntegrationChoices.JIRA,
"configuration": {},
"credentials": {
"user_mail": "test@example.com",
"api_token": "test-api-token",
"domain": "initial-domain",
},
"enabled": True,
},
}
}
# Create the integration
response = authenticated_client.post(
reverse("integration-list"),
data=json.dumps(create_data),
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_201_CREATED
integration_id = response.json()["data"]["id"]
# Attempt to update configuration - should be ignored/not allowed
update_data = {
"data": {
"type": "integrations",
"id": integration_id,
"attributes": {
"configuration": {
"projects": {"NEW_PROJECT": "New Project"},
"issue_types": ["Epic", "Story"],
"domain": "malicious-domain",
}
},
}
}
response = authenticated_client.patch(
reverse("integration-detail", kwargs={"pk": integration_id}),
data=json.dumps(update_data),
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
def test_integrations_update_jira_credentials_domain_reflects_in_configuration(
self, authenticated_client
):
# Create JIRA integration first
create_data = {
"data": {
"type": "integrations",
"attributes": {
"integration_type": Integration.IntegrationChoices.JIRA,
"configuration": {},
"credentials": {
"user_mail": "test@example.com",
"api_token": "test-api-token",
"domain": "original-domain",
},
"enabled": True,
},
}
}
# Create the integration
response = authenticated_client.post(
reverse("integration-list"),
data=json.dumps(create_data),
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_201_CREATED
integration_id = response.json()["data"]["id"]
# Verify initial domain in configuration
initial_integration = response.json()["data"]
assert (
initial_integration["attributes"]["configuration"]["domain"]
== "original-domain"
)
# Update credentials with new domain
update_data = {
"data": {
"type": "integrations",
"id": integration_id,
"attributes": {
"credentials": {
"user_mail": "updated@example.com",
"api_token": "updated-api-token",
"domain": "updated-domain",
}
},
}
}
response = authenticated_client.patch(
reverse("integration-detail", kwargs={"pk": integration_id}),
data=json.dumps(update_data),
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_200_OK
# Verify the new domain is reflected in configuration
updated_integration = response.json()["data"]
configuration = updated_integration["attributes"]["configuration"]
assert configuration["domain"] == "updated-domain"
# Verify other configuration fields are preserved
assert "projects" in configuration
assert "issue_types" in configuration
@pytest.mark.django_db
class TestSAMLTokenValidation:
+27 -2
View File
@@ -6,9 +6,11 @@ from django.db.models import Subquery
from rest_framework.exceptions import NotFound, ValidationError
from api.db_router import MainRouter
from api.db_utils import rls_transaction
from api.exceptions import InvitationTokenExpiredException
from api.models import Integration, Invitation, Processor, Provider, Resource
from api.v1.serializers import FindingMetadataSerializer
from prowler.lib.outputs.jira.jira import Jira, JiraBasicAuthError
from prowler.providers.aws.aws_provider import AwsProvider
from prowler.providers.aws.lib.s3.s3 import S3
from prowler.providers.aws.lib.security_hub.security_hub import SecurityHub
@@ -199,7 +201,8 @@ def prowler_integration_connection_test(integration: Integration) -> Connection:
raise_on_exception=False,
)
# TODO: It is possible that we can unify the connection test for all integrations, but need refactoring
# to avoid code duplication. Actually the AWS integrations are similar, so SecurityHub and S3 can be unified making some changes in the SDK.
# to avoid code duplication. Actually the AWS integrations are similar, so SecurityHub and S3 can be unified
# making some changes in the SDK.
elif (
integration.integration_type == Integration.IntegrationChoices.AWS_SECURITY_HUB
):
@@ -236,7 +239,15 @@ def prowler_integration_connection_test(integration: Integration) -> Connection:
return connection
elif integration.integration_type == Integration.IntegrationChoices.JIRA:
pass
jira_connection = Jira.test_connection(
**integration.credentials,
raise_on_exception=False,
)
project_keys = jira_connection.projects if jira_connection.is_connected else {}
with rls_transaction(str(integration.tenant_id)):
integration.configuration["projects"] = project_keys
integration.save()
return jira_connection
elif integration.integration_type == Integration.IntegrationChoices.SLACK:
pass
else:
@@ -336,3 +347,17 @@ def get_findings_metadata_no_aggregations(tenant_id: str, filtered_queryset):
serializer.is_valid(raise_exception=True)
return serializer.data
def initialize_prowler_integration(integration: Integration) -> Jira:
# TODO Refactor other integrations to use this function
if integration.integration_type == Integration.IntegrationChoices.JIRA:
try:
return Jira(**integration.credentials)
except JiraBasicAuthError as jira_auth_error:
with rls_transaction(str(integration.tenant_id)):
integration.configuration["projects"] = {}
integration.connected = False
integration.connection_last_checked_at = datetime.now(tz=timezone.utc)
integration.save()
raise jira_auth_error
@@ -67,6 +67,17 @@ class SecurityHubConfigSerializer(BaseValidateSerializer):
resource_name = "integrations"
class JiraConfigSerializer(BaseValidateSerializer):
domain = serializers.CharField(read_only=True)
issue_types = serializers.ListField(
read_only=True, child=serializers.CharField(), default=["Task"]
)
projects = serializers.DictField(read_only=True)
class Meta:
resource_name = "integrations"
class AWSCredentialSerializer(BaseValidateSerializer):
role_arn = serializers.CharField(required=False)
external_id = serializers.CharField(required=False)
@@ -82,6 +93,15 @@ class AWSCredentialSerializer(BaseValidateSerializer):
resource_name = "integrations"
class JiraCredentialSerializer(BaseValidateSerializer):
user_mail = serializers.EmailField(required=True)
api_token = serializers.CharField(required=True)
domain = serializers.CharField(required=True)
class Meta:
resource_name = "integrations"
@extend_schema_field(
{
"oneOf": [
@@ -133,6 +153,27 @@ class AWSCredentialSerializer(BaseValidateSerializer):
},
},
},
{
"type": "object",
"title": "JIRA Credentials",
"properties": {
"user_mail": {
"type": "string",
"format": "email",
"description": "The email address of the JIRA user account.",
},
"api_token": {
"type": "string",
"description": "The API token for authentication with JIRA. This can be generated from your "
"Atlassian account settings.",
},
"domain": {
"type": "string",
"description": "The JIRA domain/instance URL (e.g., 'your-domain.atlassian.net').",
},
},
"required": ["user_mail", "api_token", "domain"],
},
]
}
)
@@ -153,7 +194,10 @@ class IntegrationCredentialField(serializers.JSONField):
},
"output_directory": {
"type": "string",
"description": 'The directory path within the bucket where files will be saved. Optional - defaults to "output" if not provided. Path will be normalized to remove excessive slashes and invalid characters are not allowed (< > : " | ? *). Maximum length is 900 characters.',
"description": "The directory path within the bucket where files will be saved. Optional - "
'defaults to "output" if not provided. Path will be normalized to remove '
'excessive slashes and invalid characters are not allowed (< > : " | ? *). '
"Maximum length is 900 characters.",
"maxLength": 900,
"pattern": '^[^<>:"|?*]+$',
"default": "output",
@@ -177,6 +221,14 @@ class IntegrationCredentialField(serializers.JSONField):
},
},
},
{
"type": "object",
"title": "JIRA",
"description": "JIRA integration does not accept any configuration in the payload. Leave it as an "
"empty JSON object (`{}`).",
"properties": {},
"additionalProperties": False,
},
]
}
)
+106 -12
View File
@@ -15,6 +15,7 @@ from rest_framework_simplejwt.exceptions import TokenError
from rest_framework_simplejwt.serializers import TokenObtainPairSerializer
from rest_framework_simplejwt.tokens import RefreshToken
from api.exceptions import ConflictException
from api.models import (
Finding,
Integration,
@@ -45,6 +46,8 @@ from api.v1.serializer_utils.integrations import (
AWSCredentialSerializer,
IntegrationConfigField,
IntegrationCredentialField,
JiraConfigSerializer,
JiraCredentialSerializer,
S3ConfigSerializer,
SecurityHubConfigSerializer,
)
@@ -1952,18 +1955,33 @@ class ScheduleDailyCreateSerializer(serializers.Serializer):
class BaseWriteIntegrationSerializer(BaseWriteSerializer):
def validate(self, attrs):
integration_type = attrs.get("integration_type")
if (
attrs.get("integration_type") == Integration.IntegrationChoices.AMAZON_S3
integration_type == Integration.IntegrationChoices.AMAZON_S3
and Integration.objects.filter(
configuration=attrs.get("configuration")
).exists()
):
raise serializers.ValidationError(
{"configuration": "This integration already exists."}
raise ConflictException(
detail="This integration already exists.",
pointer="/data/attributes/configuration",
)
if (
integration_type == Integration.IntegrationChoices.JIRA
and Integration.objects.filter(
configuration__contains={
"domain": attrs.get("configuration").get("domain")
}
).exists()
):
raise ConflictException(
detail="This integration already exists.",
pointer="/data/attributes/configuration",
)
# Check if any provider already has a SecurityHub integration
integration_type = attrs.get("integration_type")
if hasattr(self, "instance") and self.instance and not integration_type:
integration_type = self.instance.integration_type
@@ -1984,10 +2002,10 @@ class BaseWriteIntegrationSerializer(BaseWriteSerializer):
query = query.exclude(integration=self.instance)
if query.exists():
raise serializers.ValidationError(
{
"providers": f"Provider {provider.id} already has a Security Hub integration. Only one Security Hub integration is allowed per provider."
}
raise ConflictException(
detail=f"Provider {provider.id} already has a Security Hub integration. Only one "
"Security Hub integration is allowed per provider.",
pointer="/data/relationships/providers",
)
return super().validate(attrs)
@@ -2018,6 +2036,30 @@ class BaseWriteIntegrationSerializer(BaseWriteSerializer):
)
config_serializer = SecurityHubConfigSerializer
credentials_serializers = [AWSCredentialSerializer]
elif integration_type == Integration.IntegrationChoices.JIRA:
if providers:
raise serializers.ValidationError(
{
"providers": "Relationship field is not accepted. This integration applies to all providers."
}
)
if configuration:
raise serializers.ValidationError(
{
"configuration": "This integration does not support custom configuration."
}
)
config_serializer = JiraConfigSerializer
# Create non-editable configuration for JIRA integration
default_jira_issue_types = ["Task"]
configuration.update(
{
"projects": {},
"issue_types": default_jira_issue_types,
"domain": credentials.get("domain"),
}
)
credentials_serializers = [JiraCredentialSerializer]
else:
raise serializers.ValidationError(
{
@@ -2081,6 +2123,10 @@ class IntegrationSerializer(RLSSerializer):
for provider in representation["providers"]
if provider["id"] in allowed_provider_ids
]
if instance.integration_type == Integration.IntegrationChoices.JIRA:
representation["configuration"].update(
{"domain": instance.credentials.get("domain")}
)
return representation
@@ -2122,9 +2168,7 @@ class IntegrationCreateSerializer(BaseWriteIntegrationSerializer):
and integration_type == Integration.IntegrationChoices.AWS_SECURITY_HUB
):
raise serializers.ValidationError(
{
"providers": "At least one provider is required for the Security Hub integration."
}
{"providers": "At least one provider is required for this integration."}
)
self.validate_integration_data(
@@ -2183,7 +2227,10 @@ class IntegrationUpdateSerializer(BaseWriteIntegrationSerializer):
def validate(self, attrs):
integration_type = self.instance.integration_type
providers = attrs.get("providers")
configuration = attrs.get("configuration") or self.instance.configuration
if integration_type != Integration.IntegrationChoices.JIRA:
configuration = attrs.get("configuration") or self.instance.configuration
else:
configuration = attrs.get("configuration", {})
credentials = attrs.get("credentials") or self.instance.credentials
self.validate_integration_data(
@@ -2213,6 +2260,53 @@ class IntegrationUpdateSerializer(BaseWriteIntegrationSerializer):
return super().update(instance, validated_data)
def to_representation(self, instance):
representation = super().to_representation(instance)
# Ensure JIRA integrations show updated domain in configuration from credentials
if instance.integration_type == Integration.IntegrationChoices.JIRA:
representation["configuration"].update(
{"domain": instance.credentials.get("domain")}
)
return representation
class IntegrationJiraDispatchSerializer(serializers.Serializer):
"""
Serializer for dispatching findings to JIRA integration.
"""
project_key = serializers.CharField(required=True)
issue_type = serializers.ChoiceField(required=True, choices=["Task"])
class JSONAPIMeta:
resource_name = "integrations-jira-dispatches"
def validate(self, attrs):
validated_attrs = super().validate(attrs)
integration_instance = Integration.objects.get(
id=self.context.get("integration_id")
)
if integration_instance.integration_type != Integration.IntegrationChoices.JIRA:
raise ValidationError(
{"integration_type": "The given integration is not a JIRA integration"}
)
if not integration_instance.enabled:
raise ValidationError(
{"integration": "The given integration is not enabled"}
)
project_key = attrs.get("project_key")
if project_key not in integration_instance.configuration.get("projects", {}):
raise ValidationError(
{
"project_key": "The given project key is not available for this JIRA integration. Refresh the "
"connection if this is an error."
}
)
return validated_attrs
# Processors
+9
View File
@@ -12,6 +12,7 @@ from api.v1.views import (
FindingViewSet,
GithubSocialLoginView,
GoogleSocialLoginView,
IntegrationJiraViewSet,
IntegrationViewSet,
InvitationAcceptViewSet,
InvitationViewSet,
@@ -73,6 +74,13 @@ tenants_router.register(
users_router = routers.NestedSimpleRouter(router, r"users", lookup="user")
users_router.register(r"memberships", MembershipViewSet, basename="user-membership")
integrations_router = routers.NestedSimpleRouter(
router, r"integrations", lookup="integration"
)
integrations_router.register(
r"jira", IntegrationJiraViewSet, basename="integration-jira"
)
urlpatterns = [
path("tokens", CustomTokenObtainView.as_view(), name="token-obtain"),
path("tokens/refresh", CustomTokenRefreshView.as_view(), name="token-refresh"),
@@ -162,6 +170,7 @@ urlpatterns = [
path("", include(router.urls)),
path("", include(tenants_router.urls)),
path("", include(users_router.urls)),
path("", include(integrations_router.urls)),
path("schema", SchemaView.as_view(), name="schema"),
path("docs", SpectacularRedocView.as_view(url_name="schema"), name="docs"),
]
+123 -23
View File
@@ -62,6 +62,7 @@ from tasks.tasks import (
check_provider_connection_task,
delete_provider_task,
delete_tenant_task,
jira_integration_task,
perform_scan_task,
)
@@ -75,8 +76,10 @@ from api.db_utils import rls_transaction
from api.exceptions import TaskFailedException
from api.filters import (
ComplianceOverviewFilter,
CustomDjangoFilterBackend,
FindingFilter,
IntegrationFilter,
IntegrationJiraFindingsFilter,
InvitationFilter,
LatestFindingFilter,
LatestResourceFilter,
@@ -89,6 +92,7 @@ from api.filters import (
RoleFilter,
ScanFilter,
ScanSummaryFilter,
ScanSummarySeverityFilter,
ServiceOverviewFilter,
TaskFilter,
TenantFilter,
@@ -142,6 +146,7 @@ from api.v1.serializers import (
FindingMetadataSerializer,
FindingSerializer,
IntegrationCreateSerializer,
IntegrationJiraDispatchSerializer,
IntegrationSerializer,
IntegrationUpdateSerializer,
InvitationAcceptSerializer,
@@ -214,6 +219,8 @@ class RelationshipViewSchema(JsonApiAutoSchema):
description="Obtain a token by providing valid credentials and an optional tenant ID.",
)
class CustomTokenObtainView(GenericAPIView):
throttle_scope = "token-obtain"
resource_name = "tokens"
serializer_class = TokenSerializer
http_method_names = ["post"]
@@ -293,7 +300,7 @@ class SchemaView(SpectacularAPIView):
def get(self, request, *args, **kwargs):
spectacular_settings.TITLE = "Prowler API"
spectacular_settings.VERSION = "1.12.0"
spectacular_settings.VERSION = "1.13.0"
spectacular_settings.DESCRIPTION = (
"Prowler API specification.\n\nThis file is auto-generated."
)
@@ -370,8 +377,8 @@ class SchemaView(SpectacularAPIView):
" retrieval, and deletion of integrations such as S3, JIRA, or other services.",
},
{
"name": "Lighthouse",
"description": "Endpoints for managing Lighthouse configurations, including creation, retrieval, "
"name": "Lighthouse AI",
"description": "Endpoints for managing Lighthouse AI configurations, including creation, retrieval, "
"updating, and deletion of configurations such as OpenAI keys, models, and business "
"context.",
},
@@ -3546,8 +3553,10 @@ class OverviewViewSet(BaseRLSViewSet):
def get_filterset_class(self):
if self.action == "providers":
return None
elif self.action in ["findings", "findings_severity"]:
elif self.action == "findings":
return ScanSummaryFilter
elif self.action == "findings_severity":
return ScanSummarySeverityFilter
elif self.action == "services":
return ServiceOverviewFilter
return None
@@ -3669,7 +3678,12 @@ class OverviewViewSet(BaseRLSViewSet):
@action(detail=False, methods=["get"], url_name="findings_severity")
def findings_severity(self, request):
tenant_id = self.request.tenant_id
queryset = self.get_queryset()
# Load only required fields
queryset = self.get_queryset().only(
"tenant_id", "scan_id", "severity", "fail", "_pass", "total"
)
filtered_queryset = self.filter_queryset(queryset)
provider_filter = (
{"provider__in": self.allowed_providers}
@@ -3689,16 +3703,22 @@ class OverviewViewSet(BaseRLSViewSet):
tenant_id=tenant_id, scan_id__in=latest_scan_ids
)
# The filter will have added a status_count annotation if any status filter was used
if "status_count" in filtered_queryset.query.annotations:
sum_expression = Sum("status_count")
else:
sum_expression = Sum("total")
severity_counts = (
filtered_queryset.values("severity")
.annotate(count=Sum("total"))
.annotate(count=sum_expression)
.order_by("severity")
)
severity_data = {sev[0]: 0 for sev in SeverityChoices}
for item in severity_counts:
severity_data[item["severity"]] = item["count"]
severity_data.update(
{item["severity"]: item["count"] for item in severity_counts}
)
serializer = self.get_serializer(severity_data)
return Response(serializer.data, status=status.HTTP_200_OK)
@@ -3886,31 +3906,111 @@ class IntegrationViewSet(BaseRLSViewSet):
)
@extend_schema_view(
dispatches=extend_schema(
tags=["Integration"],
summary="Send findings to a Jira integration",
description="Send a set of filtered findings to the given integration. At least one finding filter must be "
"provided.",
responses={202: OpenApiResponse(response=TaskSerializer)},
filters=True,
)
)
class IntegrationJiraViewSet(BaseRLSViewSet):
queryset = Finding.all_objects.all()
serializer_class = IntegrationJiraDispatchSerializer
http_method_names = ["post"]
filter_backends = [CustomDjangoFilterBackend]
filterset_class = IntegrationJiraFindingsFilter
# RBAC required permissions
required_permissions = [Permissions.MANAGE_INTEGRATIONS]
@extend_schema(exclude=True)
def create(self, request, *args, **kwargs):
raise MethodNotAllowed(method="POST")
def get_queryset(self):
tenant_id = self.request.tenant_id
user_roles = get_role(self.request.user)
if user_roles.unlimited_visibility:
# User has unlimited visibility, return all findings
queryset = Finding.all_objects.filter(tenant_id=tenant_id)
else:
# User lacks permission, filter findings based on provider groups associated with the role
queryset = Finding.all_objects.filter(
scan__provider__in=get_providers(user_roles)
)
return queryset
@action(detail=False, methods=["post"], url_name="dispatches")
def dispatches(self, request, integration_pk=None):
get_object_or_404(Integration, pk=integration_pk)
serializer = self.get_serializer(
data=request.data, context={"integration_id": integration_pk}
)
serializer.is_valid(raise_exception=True)
if self.filter_queryset(self.get_queryset()).count() == 0:
raise ValidationError(
{"findings": "No findings match the provided filters"}
)
finding_ids = [
str(finding_id)
for finding_id in self.filter_queryset(self.get_queryset()).values_list(
"id", flat=True
)
]
project_key = serializer.validated_data["project_key"]
issue_type = serializer.validated_data["issue_type"]
with transaction.atomic():
task = jira_integration_task.delay(
tenant_id=self.request.tenant_id,
integration_id=integration_pk,
project_key=project_key,
issue_type=issue_type,
finding_ids=finding_ids,
)
prowler_task = Task.objects.get(id=task.id)
serializer = TaskSerializer(prowler_task)
return Response(
data=serializer.data,
status=status.HTTP_202_ACCEPTED,
headers={
"Content-Location": reverse(
"task-detail", kwargs={"pk": prowler_task.id}
)
},
)
@extend_schema_view(
list=extend_schema(
tags=["Lighthouse"],
summary="List all Lighthouse configurations",
description="Retrieve a list of all Lighthouse configurations.",
tags=["Lighthouse AI"],
summary="List all Lighthouse AI configurations",
description="Retrieve a list of all Lighthouse AI configurations.",
),
create=extend_schema(
tags=["Lighthouse"],
summary="Create a new Lighthouse configuration",
description="Create a new Lighthouse configuration with the specified details.",
tags=["Lighthouse AI"],
summary="Create a new Lighthouse AI configuration",
description="Create a new Lighthouse AI configuration with the specified details.",
),
partial_update=extend_schema(
tags=["Lighthouse"],
summary="Partially update a Lighthouse configuration",
description="Update certain fields of an existing Lighthouse configuration.",
tags=["Lighthouse AI"],
summary="Partially update a Lighthouse AI configuration",
description="Update certain fields of an existing Lighthouse AI configuration.",
),
destroy=extend_schema(
tags=["Lighthouse"],
summary="Delete a Lighthouse configuration",
description="Remove a Lighthouse configuration by its ID.",
tags=["Lighthouse AI"],
summary="Delete a Lighthouse AI configuration",
description="Remove a Lighthouse AI configuration by its ID.",
),
connection=extend_schema(
tags=["Lighthouse"],
tags=["Lighthouse AI"],
summary="Check the connection to the OpenAI API",
description="Verify the connection to the OpenAI API for a specific Lighthouse configuration.",
description="Verify the connection to the OpenAI API for a specific Lighthouse AI configuration.",
request=None,
responses={202: OpenApiResponse(response=TaskSerializer)},
),
+7
View File
@@ -108,6 +108,13 @@ REST_FRAMEWORK = {
),
"TEST_REQUEST_DEFAULT_FORMAT": "vnd.api+json",
"JSON_API_UNIFORM_EXCEPTIONS": True,
"DEFAULT_THROTTLE_CLASSES": [
"rest_framework.throttling.ScopedRateThrottle",
],
"DEFAULT_THROTTLE_RATES": {
"token-obtain": env("DJANGO_THROTTLE_TOKEN_OBTAIN", default=None),
"dj_rest_auth": None,
},
}
SPECTACULAR_SETTINGS = {
+80 -3
View File
@@ -7,7 +7,7 @@ from tasks.utils import batched
from api.db_utils import rls_transaction
from api.models import Finding, Integration, Provider
from api.utils import initialize_prowler_provider
from api.utils import initialize_prowler_integration, initialize_prowler_provider
from prowler.lib.outputs.asff.asff import ASFF
from prowler.lib.outputs.compliance.generic.generic import GenericCompliance
from prowler.lib.outputs.csv.csv import CSV
@@ -330,7 +330,8 @@ def upload_security_hub_integration(
if not connected:
logger.error(
f"Security Hub connection failed for integration {integration.id}: {security_hub.error}"
f"Security Hub connection failed for integration {integration.id}: "
f"{security_hub.error}"
)
integration.connected = False
integration.save()
@@ -338,7 +339,8 @@ def upload_security_hub_integration(
security_hub_client = security_hub
logger.info(
f"Sending {'fail' if send_only_fails else 'all'} findings to Security Hub via integration {integration.id}"
f"Sending {'fail' if send_only_fails else 'all'} findings to Security Hub via "
f"integration {integration.id}"
)
else:
# Update findings in existing client for this batch
@@ -427,3 +429,78 @@ def upload_security_hub_integration(
f"Security Hub integrations failed for provider {provider_id}: {str(e)}"
)
return False
def send_findings_to_jira(
tenant_id: str,
integration_id: str,
project_key: str,
issue_type: str,
finding_ids: list[str],
):
with rls_transaction(tenant_id):
integration = Integration.objects.get(id=integration_id)
jira_integration = initialize_prowler_integration(integration)
num_tickets_created = 0
for finding_id in finding_ids:
with rls_transaction(tenant_id):
finding_instance = (
Finding.all_objects.select_related("scan__provider")
.prefetch_related("resources")
.get(id=finding_id)
)
# Extract resource information
resource = (
finding_instance.resources.first()
if finding_instance.resources.exists()
else None
)
resource_uid = resource.uid if resource else ""
resource_name = resource.name if resource else ""
resource_tags = {}
if resource and hasattr(resource, "tags"):
resource_tags = resource.get_tags(tenant_id)
# Get region
region = resource.region if resource and resource.region else ""
# Extract remediation information from check_metadata
check_metadata = finding_instance.check_metadata
remediation = check_metadata.get("remediation", {})
recommendation = remediation.get("recommendation", {})
remediation_code = remediation.get("code", {})
# Send the individual finding to Jira
result = jira_integration.send_finding(
check_id=finding_instance.check_id,
check_title=check_metadata.get("checktitle", ""),
severity=finding_instance.severity,
status=finding_instance.status,
status_extended=finding_instance.status_extended or "",
provider=finding_instance.scan.provider.provider,
region=region,
resource_uid=resource_uid,
resource_name=resource_name,
risk=check_metadata.get("risk", ""),
recommendation_text=recommendation.get("text", ""),
recommendation_url=recommendation.get("url", ""),
remediation_code_native_iac=remediation_code.get("nativeiac", ""),
remediation_code_terraform=remediation_code.get("terraform", ""),
remediation_code_cli=remediation_code.get("cli", ""),
remediation_code_other=remediation_code.get("other", ""),
resource_tags=resource_tags,
compliance=finding_instance.compliance or {},
project_key=project_key,
issue_type=issue_type,
)
if result:
num_tickets_created += 1
else:
logger.error(f"Failed to send finding {finding_id} to Jira")
return {
"created_count": num_tickets_created,
"failed_count": len(finding_ids) - num_tickets_created,
}
+18
View File
@@ -22,6 +22,7 @@ from tasks.jobs.export import (
_upload_to_s3,
)
from tasks.jobs.integrations import (
send_findings_to_jira,
upload_s3_integration,
upload_security_hub_integration,
)
@@ -595,3 +596,20 @@ def security_hub_integration_task(
scan_id (str): The scan identifier
"""
return upload_security_hub_integration(tenant_id, provider_id, scan_id)
@shared_task(
base=RLSTask,
name="integration-jira",
queue="integrations",
)
def jira_integration_task(
tenant_id: str,
integration_id: str,
project_key: str,
issue_type: str,
finding_ids: list[str],
):
return send_findings_to_jira(
tenant_id, integration_id, project_key, issue_type, finding_ids
)
@@ -4,6 +4,7 @@ import pytest
from tasks.jobs.integrations import (
get_s3_client_from_integration,
get_security_hub_client_from_integration,
send_findings_to_jira,
upload_s3_integration,
upload_security_hub_integration,
)
@@ -1557,3 +1558,354 @@ class TestSecurityHubIntegrationUploads:
mock_security_hub.batch_send_to_security_hub.assert_called_once()
mock_security_hub.archive_previous_findings.assert_called_once()
@pytest.mark.django_db
class TestJiraIntegration:
@patch("tasks.jobs.integrations.rls_transaction")
@patch("tasks.jobs.integrations.Finding")
@patch("tasks.jobs.integrations.Integration")
@patch("tasks.jobs.integrations.initialize_prowler_integration")
def test_send_findings_to_jira_success(
self,
mock_initialize_integration,
mock_integration_model,
mock_finding_model,
mock_rls_transaction,
):
"""Test successful sending of findings to Jira using send_finding method"""
tenant_id = "tenant-123"
integration_id = "integration-456"
project_key = "PROJ"
issue_type = "Task"
finding_ids = ["finding-1", "finding-2"]
# Mock RLS transaction
mock_rls_transaction.return_value.__enter__ = MagicMock()
mock_rls_transaction.return_value.__exit__ = MagicMock()
# Mock integration
integration = MagicMock()
mock_integration_model.objects.get.return_value = integration
# Mock Jira integration
mock_jira_integration = MagicMock()
mock_jira_integration.send_finding.side_effect = [True, True] # Both succeed
mock_initialize_integration.return_value = mock_jira_integration
# Mock findings with resources
resource1 = MagicMock()
resource1.uid = "resource-uid-1"
resource1.name = "resource-name-1"
resource1.region = "us-east-1"
resource1.get_tags.return_value = {"env": "prod", "team": "security"}
resource2 = MagicMock()
resource2.uid = "resource-uid-2"
resource2.name = "resource-name-2"
resource2.region = "eu-west-1"
resource2.get_tags.return_value = {"env": "dev"}
finding1 = MagicMock()
finding1.id = "finding-1"
finding1.check_id = "check_001"
finding1.severity = "high"
finding1.status = "FAIL"
finding1.status_extended = "Resource is not compliant"
finding1.resource_regions = ["us-east-1"]
finding1.compliance = {"cis": ["1.1", "1.2"]}
finding1.resources.exists.return_value = True
finding1.resources.first.return_value = resource1
finding1.scan.provider.provider = "aws"
finding1.check_metadata = {
"checktitle": "Check Title 1",
"risk": "High risk finding",
"remediation": {
"recommendation": {
"text": "Fix this issue",
"url": "https://docs.example.com/fix",
},
"code": {
"nativeiac": "native code",
"terraform": "terraform code",
"cli": "aws cli command",
"other": "",
},
},
}
finding2 = MagicMock()
finding2.id = "finding-2"
finding2.check_id = "check_002"
finding2.severity = "medium"
finding2.status = "PASS"
finding2.status_extended = None
finding2.resource_regions = []
finding2.compliance = {}
finding2.resources.exists.return_value = True
finding2.resources.first.return_value = resource2
finding2.scan.provider.provider = "azure"
finding2.check_metadata = {
"checktitle": "Check Title 2",
"risk": "Medium risk",
"remediation": {
"recommendation": {"text": "Consider fixing", "url": ""},
"code": {},
},
}
mock_finding_model.all_objects.select_related.return_value.prefetch_related.return_value.get.side_effect = [
finding1,
finding2,
]
# Call the function
result = send_findings_to_jira(
tenant_id, integration_id, project_key, issue_type, finding_ids
)
# Assertions
assert result == {"created_count": 2, "failed_count": 0}
# Verify Jira integration was initialized
mock_initialize_integration.assert_called_once_with(integration)
# Verify send_finding was called twice with correct parameters
assert mock_jira_integration.send_finding.call_count == 2
# Verify first call
first_call = mock_jira_integration.send_finding.call_args_list[0]
assert first_call.kwargs["check_id"] == "check_001"
assert first_call.kwargs["check_title"] == "Check Title 1"
assert first_call.kwargs["severity"] == "high"
assert first_call.kwargs["status"] == "FAIL"
assert first_call.kwargs["resource_uid"] == "resource-uid-1"
assert first_call.kwargs["resource_name"] == "resource-name-1"
assert first_call.kwargs["region"] == "us-east-1"
assert first_call.kwargs["provider"] == "aws"
assert first_call.kwargs["project_key"] == project_key
assert first_call.kwargs["issue_type"] == issue_type
# Verify second call
second_call = mock_jira_integration.send_finding.call_args_list[1]
assert second_call.kwargs["check_id"] == "check_002"
assert second_call.kwargs["severity"] == "medium"
assert second_call.kwargs["status"] == "PASS"
@patch("tasks.jobs.integrations.rls_transaction")
@patch("tasks.jobs.integrations.Finding")
@patch("tasks.jobs.integrations.Integration")
@patch("tasks.jobs.integrations.initialize_prowler_integration")
@patch("tasks.jobs.integrations.logger")
def test_send_findings_to_jira_partial_failure(
self,
mock_logger,
mock_initialize_integration,
mock_integration_model,
mock_finding_model,
mock_rls_transaction,
):
"""Test partial failure when sending findings to Jira"""
tenant_id = "tenant-123"
integration_id = "integration-456"
project_key = "PROJ"
issue_type = "Task"
finding_ids = ["finding-1", "finding-2", "finding-3"]
# Mock RLS transaction
mock_rls_transaction.return_value.__enter__ = MagicMock()
mock_rls_transaction.return_value.__exit__ = MagicMock()
# Mock integration
integration = MagicMock()
mock_integration_model.objects.get.return_value = integration
# Mock Jira integration with mixed results
mock_jira_integration = MagicMock()
mock_jira_integration.send_finding.side_effect = [
True,
False,
True,
] # Second fails
mock_initialize_integration.return_value = mock_jira_integration
# Mock findings (simplified for this test)
findings = []
for i in range(3):
finding = MagicMock()
finding.id = f"finding-{i + 1}"
finding.check_id = f"check_{i + 1:03d}"
finding.severity = "low"
finding.status = "FAIL"
finding.status_extended = ""
finding.resource_regions = []
finding.compliance = {}
finding.resources.exists.return_value = False
finding.resources.first.return_value = None
finding.scan.provider.provider = "aws"
finding.check_metadata = {
"checktitle": f"Check {i + 1}",
"risk": "Low risk",
"remediation": {"recommendation": {}, "code": {}},
}
findings.append(finding)
mock_finding_model.all_objects.select_related.return_value.prefetch_related.return_value.get.side_effect = findings
# Call the function
result = send_findings_to_jira(
tenant_id, integration_id, project_key, issue_type, finding_ids
)
# Assertions
assert result == {"created_count": 2, "failed_count": 1}
# Verify error was logged for the failed finding
mock_logger.error.assert_called_with("Failed to send finding finding-2 to Jira")
@patch("tasks.jobs.integrations.rls_transaction")
@patch("tasks.jobs.integrations.Finding")
@patch("tasks.jobs.integrations.Integration")
@patch("tasks.jobs.integrations.initialize_prowler_integration")
def test_send_findings_to_jira_no_resources(
self,
mock_initialize_integration,
mock_integration_model,
mock_finding_model,
mock_rls_transaction,
):
"""Test sending findings to Jira when finding has no resources"""
tenant_id = "tenant-123"
integration_id = "integration-456"
project_key = "PROJ"
issue_type = "Task"
finding_ids = ["finding-1"]
# Mock RLS transaction
mock_rls_transaction.return_value.__enter__ = MagicMock()
mock_rls_transaction.return_value.__exit__ = MagicMock()
# Mock integration
integration = MagicMock()
mock_integration_model.objects.get.return_value = integration
# Mock Jira integration
mock_jira_integration = MagicMock()
mock_jira_integration.send_finding.return_value = True
mock_initialize_integration.return_value = mock_jira_integration
# Mock finding without resources
finding = MagicMock()
finding.id = "finding-1"
finding.check_id = "check_001"
finding.severity = "critical"
finding.status = "FAIL"
finding.status_extended = "Critical issue found"
finding.resource_regions = None
finding.compliance = {"pci": ["3.1"]}
finding.resources.exists.return_value = False
finding.resources.first.return_value = None
finding.scan.provider.provider = "gcp"
finding.check_metadata = {
"checktitle": "Critical Check",
"risk": "Very high risk",
"remediation": {
"recommendation": {
"text": "Immediate action required",
"url": "https://example.com/critical",
},
"code": {
"nativeiac": "",
"terraform": "terraform fix",
"cli": "",
"other": "manual fix",
},
},
}
mock_finding_model.all_objects.select_related.return_value.prefetch_related.return_value.get.return_value = finding
# Call the function
result = send_findings_to_jira(
tenant_id, integration_id, project_key, issue_type, finding_ids
)
# Assertions
assert result == {"created_count": 1, "failed_count": 0}
# Verify send_finding was called with empty resource fields
call_kwargs = mock_jira_integration.send_finding.call_args.kwargs
assert call_kwargs["resource_uid"] == ""
assert call_kwargs["resource_name"] == ""
assert call_kwargs["resource_tags"] == {}
assert call_kwargs["region"] == ""
@patch("tasks.jobs.integrations.rls_transaction")
@patch("tasks.jobs.integrations.Finding")
@patch("tasks.jobs.integrations.Integration")
@patch("tasks.jobs.integrations.initialize_prowler_integration")
def test_send_findings_to_jira_with_empty_check_metadata(
self,
mock_initialize_integration,
mock_integration_model,
mock_finding_model,
mock_rls_transaction,
):
"""Test sending findings to Jira when check_metadata is empty or missing fields"""
tenant_id = "tenant-123"
integration_id = "integration-456"
project_key = "PROJ"
issue_type = "Task"
finding_ids = ["finding-1"]
# Mock RLS transaction
mock_rls_transaction.return_value.__enter__ = MagicMock()
mock_rls_transaction.return_value.__exit__ = MagicMock()
# Mock integration
integration = MagicMock()
mock_integration_model.objects.get.return_value = integration
# Mock Jira integration
mock_jira_integration = MagicMock()
mock_jira_integration.send_finding.return_value = True
mock_initialize_integration.return_value = mock_jira_integration
# Mock finding with minimal/empty check_metadata
finding = MagicMock()
finding.id = "finding-1"
finding.check_id = "check_001"
finding.severity = "low"
finding.status = "PASS"
finding.status_extended = None
finding.resource_regions = []
finding.compliance = None
finding.resources.exists.return_value = False
finding.resources.first.return_value = None
finding.scan.provider.provider = "kubernetes"
finding.check_metadata = {} # Empty metadata
mock_finding_model.all_objects.select_related.return_value.prefetch_related.return_value.get.return_value = finding
# Call the function
result = send_findings_to_jira(
tenant_id, integration_id, project_key, issue_type, finding_ids
)
# Assertions
assert result == {"created_count": 1, "failed_count": 0}
# Verify send_finding was called with default/empty values
call_kwargs = mock_jira_integration.send_finding.call_args.kwargs
assert call_kwargs["check_title"] == ""
assert call_kwargs["risk"] == ""
assert call_kwargs["recommendation_text"] == ""
assert call_kwargs["recommendation_url"] == ""
assert call_kwargs["remediation_code_native_iac"] == ""
assert call_kwargs["remediation_code_terraform"] == ""
assert call_kwargs["remediation_code_cli"] == ""
assert call_kwargs["remediation_code_other"] == ""
assert call_kwargs["compliance"] == {}
+1 -1
View File
@@ -50,7 +50,7 @@ Click `Go to Scans` to monitor progress.
Review findings during scan execution in the following sections:
- **Overview** Provides a high-level summary of your scans.
<img src="../../img/overview.png" alt="Overview" width="700"/>
<img src="../../products/img/overview.png" alt="Overview" width="700"/>
- **Compliance** Displays compliance insights based on security frameworks.
<img src="../../img/compliance.png" alt="Compliance" width="700"/>
+26 -1
View File
@@ -1,6 +1,6 @@
## Running Prowler
Running Prowler requires specifying the provider (e.g `aws`, `gcp`, `azure`, `m365`, `github` or `kubernetes`):
Running Prowler requires specifying the provider (e.g `aws`, `gcp`, `azure`, `kubernetes`, `m365`, `github`, `iac` or `mongodbatlas`):
???+ note
If no provider is specified, AWS is used by default for backward compatibility with Prowler v2.
@@ -255,3 +255,28 @@ prowler iac --scan-path ./my-iac-directory --exclude-path ./my-iac-directory/tes
- For more details on supported scanners, see the [Trivy documentation](https://trivy.dev/latest/docs/scanner/vulnerability/)
See more details about IaC scanning in the [IaC Tutorial](../tutorials/iac/getting-started-iac.md) section.
## MongoDB Atlas
Prowler allows you to scan your MongoDB Atlas cloud database deployments for security and compliance issues.
Authentication is done using MongoDB Atlas API key pairs:
```console
# Using command-line arguments
prowler mongodbatlas --atlas-public-key <public_key> --atlas-private-key <private_key>
# Using environment variables
export ATLAS_PUBLIC_KEY=<public_key>
export ATLAS_PRIVATE_KEY=<private_key>
prowler mongodbatlas
```
You can filter scans to specific organizations or projects:
```console
# Scan specific project
prowler mongodbatlas --atlas-project-id <project_id>
```
See more details about MongoDB Atlas Authentication in [Requirements](../getting-started/requirements.md#mongodb-atlas)
+2
View File
@@ -10,6 +10,7 @@ A provider is any platform or service that offers resources, data, or functional
- Software as a Service (SaaS) Platforms (like Microsoft 365)
- Development Platforms (like GitHub)
- Container Orchestration Platforms (like Kubernetes)
- Database-as-a-Service Platforms (like MongoDB Atlas)
For providers supported by Prowler, refer to [Prowler Hub](https://hub.prowler.com/).
@@ -63,6 +64,7 @@ Given the complexity and variability of providers, use existing provider impleme
- [Kubernetes](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/kubernetes/kubernetes_provider.py)
- [Microsoft365](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/microsoft365/microsoft365_provider.py)
- [GitHub](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/github/github_provider.py)
- [MongoDB Atlas](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/mongodbatlas/mongodbatlas_provider.py)
### Basic Provider Implementation: Pseudocode Example
+11 -9
View File
@@ -4,15 +4,17 @@
The official supported providers right now are:
- **AWS**
- **Azure**
- **Google Cloud**
- **Kubernetes**
- **M365**
- **Github**
- **IaC**
Unofficially, Prowler supports: NHN.
| Provider | Support | Stage | Interface |
|----------|--------|-------|----------|
| **AWS** | Official | Stable | UI, API, CLI |
| **Azure** | Official | Stable | UI, API, CLI |
| **Google Cloud** | Official | Stable | UI, API, CLI |
| **Kubernetes** | Official | Stable | UI, API, CLI |
| **M365** | Official | Stable | UI, API, CLI |
| **Github** | Official | Stable | UI, API, CLI |
| **IaC** | Official | Beta | CLI |
| **MongoDB Atlas** | Official | Beta | CLI |
| **NHN** | Unofficial | Beta | CLI |
Prowler supports **auditing, incident response, continuous monitoring, hardening, forensic readiness, and remediation**.
+1 -1
View File
@@ -132,7 +132,7 @@ prowler --security-hub --role arn:aws:iam::123456789012:role/ProwlerExecutionRol
```
???+ note
The specified IAM role must have the necessary permissions to send findings to Security Hub. For details on the required permissions, refer to the IAM policy: [prowler-security-hub.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-security-hub.json)
The specified IAM role must have the necessary permissions to send findings to Security Hub. For details on the required permissions, refer to the IAM policy: [prowler-additions-policy.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-additions-policy.json)
## Sending Only Failed Findings to AWS Security Hub
@@ -4,6 +4,9 @@
Set up your Azure subscription to enable security scanning using Prowler Cloud/App.
???+ note "Government Cloud Support"
Government cloud subscriptions (Azure Government) are not currently supported, but we expect to add support for them in the near future.
## Requirements
To configure your Azure subscription, youll need:
+3 -84
View File
@@ -23,97 +23,16 @@ Standard results will be shown and additionally the framework information as the
## List Available Compliance Frameworks
In order to see which compliance frameworks are covered by Prowler, you can use option `--list-compliance`:
To see which compliance frameworks are covered by Prowler, use the `--list-compliance` option:
```sh
prowler <provider> --list-compliance
```
### AWS (36 frameworks)
- `aws_account_security_onboarding_aws`
- `aws_audit_manager_control_tower_guardrails_aws`
- `aws_foundational_security_best_practices_aws`
- `aws_foundational_technical_review_aws`
- `aws_well_architected_framework_reliability_pillar_aws`
- `aws_well_architected_framework_security_pillar_aws`
- `cis_1.4_aws`
- `cis_1.5_aws`
- `cis_2.0_aws`
- `cis_3.0_aws`
- `cis_4.0_aws`
- `cis_5.0_aws`
- `cisa_aws`
- `ens_rd2022_aws`
- `fedramp_low_revision_4_aws`
- `fedramp_moderate_revision_4_aws`
- `ffiec_aws`
- `gdpr_aws`
- `gxp_21_cfr_part_11_aws`
- `gxp_eu_annex_11_aws`
- `hipaa_aws`
- `iso27001_2013_aws`
- `iso27001_2022_aws`
- `kisa_isms_p_2023_aws`
- `kisa_isms_p_2023_korean_aws`
- `mitre_attack_aws`
- `nis2_aws`
- `nist_800_171_revision_2_aws`
- `nist_800_53_revision_4_aws`
- `nist_800_53_revision_5_aws`
- `nist_csf_1.1_aws`
- `pci_3.2.1_aws`
- `pci_4.0_aws`
- `prowler_threatscore_aws`
- `rbi_cyber_security_framework_aws`
- `soc2_aws`
### Azure (10 frameworks)
- `cis_2.0_azure`
- `cis_2.1_azure`
- `cis_3.0_azure`
- `ens_rd2022_azure`
- `iso27001_2022_azure`
- `mitre_attack_azure`
- `nis2_azure`
- `pci_4.0_azure`
- `prowler_threatscore_azure`
- `soc2_azure`
### GCP (10 frameworks)
- `cis_2.0_gcp`
- `cis_3.0_gcp`
- `cis_4.0_gcp`
- `ens_rd2022_gcp`
- `iso27001_2022_gcp`
- `mitre_attack_gcp`
- `nis2_gcp`
- `pci_4.0_gcp`
- `prowler_threatscore_gcp`
- `soc2_gcp`
### Kubernetes (5 frameworks)
- `cis_1.10_kubernetes`
- `cis_1.11_kubernetes`
- `cis_1.8_kubernetes`
- `iso27001_2022_kubernetes`
- `pci_4.0_kubernetes`
### M365 (3 frameworks)
- `cis_4.0_m365`
- `iso27001_2022_m365`
- `prowler_threatscore_m365`
### GitHub (1 framework)
- `cis_1.0_github`
Or you can visit [Prowler Hub](https://hub.prowler.com/compliance).
## List Requirements of Compliance Frameworks
For each compliance framework, you can use the `--list-compliance-requirements` option to list its requirements:
To list requirements for a compliance framework, use the `--list-compliance-requirements` option:
```sh
prowler <provider> --list-compliance-requirements <compliance_framework(s)>
+1 -1
View File
@@ -1,4 +1,4 @@
# Prowler Fixer (remediation)
# Prowler Fixers (Remediations)
Prowler allows you to fix some of the failed findings it identifies. You can use the `--fixer` flag to run the fixes that are available for the checks that failed.
Binary file not shown.

After

Width:  |  Height:  |  Size: 395 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 105 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 370 KiB

@@ -8,7 +8,7 @@ Prowler for Microsoft 365 (M365) supports the following authentication methods:
- **Interactive browser authentication**
???+ warning
Prowler App supports the **Service Principal** authentication method and the **Service Principal with User Credentials** authentication method, but this last one will be deprecated in September once Microsoft will enforce MFA in all tenants not allowing User authentication without interactive method.
Prowler App supports the **Service Principal** authentication method and the **Service Principal with User Credentials** authentication method, but this last one will be deprecated in October once Microsoft will enforce MFA in all tenants not allowing User authentication without interactive method.
### Service Principal Authentication (Recommended)
@@ -109,7 +109,7 @@ When using service principal authentication, add the following **Application Per
> If you do this you will need to add also the `Organization.Read.All` permission to the service principal application in order to authenticate.
???+ note
This is the **recommended authentication method** because it allows you to run the full M365 provider including PowerShell checks, providing complete coverage of all available security checks, same as the Service Principal Authentication + User Credentials Authentication but this last one will be deprecated in September once Microsoft will enforce MFA in all tenants not allowing User authentication without interactive method.
This is the **recommended authentication method** because it allows you to run the full M365 provider including PowerShell checks, providing complete coverage of all available security checks, same as the Service Principal Authentication + User Credentials Authentication but this last one will be deprecated in October once Microsoft will enforce MFA in all tenants not allowing User authentication without interactive method.
#### Service Principal + User Credentials Authentication (`--env-auth`)
@@ -2,6 +2,9 @@
Set up your M365 account to enable security scanning using Prowler Cloud/App.
???+ note "Government Cloud Support"
Government cloud accounts or tenants (Microsoft 365 Government) are not currently supported, but we expect to add support for them in the near future.
## Requirements
To configure your M365 account, you'll need:
@@ -194,7 +197,7 @@ To grant the permissions for the PowerShell modules via application authenticati
#### If using user authentication
This method is not recommended because it requires a user with MFA enabled and Microsoft will not allow MFA capable users to authenticate programmatically after 1st September 2025. See [Microsoft documentation](https://learn.microsoft.com/en-us/entra/identity/authentication/concept-mandatory-multifactor-authentication?tabs=dotnet) for more information.
This method is not recommended because it requires a user with MFA enabled and Microsoft will not allow MFA capable users to authenticate programmatically after 1st October 2025. See [Microsoft documentation](https://learn.microsoft.com/en-us/entra/identity/authentication/concept-mandatory-multifactor-authentication?tabs=dotnet) for more information.
???+ warning
Remember that if the user is newly created, you need to sign in with that account first, as Microsoft will prompt you to change the password. If you dont complete this step, user authentication will fail because Microsoft marks the initial password as expired.
@@ -0,0 +1,45 @@
# MongoDB Atlas Authentication
MongoDB Atlas provider uses [HTTP Digest Authentication with API key pairs consisting of a public key and private key](https://www.mongodb.com/docs/atlas/configure-api-access/#grant-programmatic-access-to-service).
## Authentication Methods
### Command-Line Arguments
```bash
prowler mongodbatlas --atlas-public-key <public_key> --atlas-private-key <private_key>
```
### Environment Variables
```bash
export ATLAS_PUBLIC_KEY=<public_key>
export ATLAS_PRIVATE_KEY=<private_key>
prowler mongodbatlas
```
## Creating API Keys
### Step-by-Step Guide
1. **Log into MongoDB Atlas**
- Access the MongoDB Atlas console
2. **Navigate to Access Manager**
- Go to the organization or project access management section
3. **Select API Keys Tab**
- Click on the "API Keys" tab
4. **Create API Key**
- Click "Create API Key"
- Provide a description for the key
5. **Set Permissions**
- Grant minimum required permissions
6. **Save Credentials**
- Note the public key and private key
- Store credentials securely
For more details about MongoDB Atlas, see the [MongoDB Atlas Tutorial](../tutorials/mongodbatlas/getting-started-mongodbatlas.md).
@@ -0,0 +1,87 @@
# Getting Started with MongoDB Atlas
MongoDB Atlas provider enables security assessments of MongoDB Atlas cloud database deployments.
## Features
- **Authentication**: Supports MongoDB Atlas API key authentication
- **Services**: Projects and clusters services
- **Checks**: Network access security and encryption at rest validation
## Creating API Keys
To create MongoDB Atlas API keys:
1. **Log into MongoDB Atlas**: Access the MongoDB Atlas console
2. **Navigate to Access Manager**: Go to the organization access management section:
- Click on Access Manager and Organization Access:
![Organization Access](./img/organization-access.png)
- After that click on the Applications tab inside the Access Manager:
![Project Access](./img/access-manager.png)
3. **Select API Keys Tab**: Click on the "API Keys" tab that appears in the image above
4. **Create API Key**: Click "Create API Key" and provide a description
![Create API Key](./img/create-api-key.png)
5. **Set Permissions**: Project permissions are recommended for security, you can modify them after creating the key
![Set Permissions](./img/modify-permission.png)
6. **Save Credentials**: Note the public key and private key and store them securely
![Save Credentials](./img/copy-key.png)
7. **Add IP Access List**: Add the IP where you are running Prowler to the IP Access List of the API Key. If you want to skip this step and use your API key in all type of IP addresses you need to uncheck the `Require IP Access List for the Atlas Administration API` button on the [Organization Settings](#needed-permissions), but this is not recommended.
![Organization Settings](./img/add-ip.png)
## Basic Usage
### Scan All Projects and Clusters
After storing your API keys, you can run Prowler with the following command:
```bash
prowler mongodbatlas --atlas-public-key <key> --atlas-private-key <secret>
```
Also, you can set your API keys as environment variables:
```bash
export ATLAS_PUBLIC_KEY=<key>
export ATLAS_PRIVATE_KEY=<secret>
```
And then just run Prowler with the following command:
```bash
prowler mongodbatlas
```
### Scanning a Specific Project
If you want to scan a specific project, you can use the following argument added to the command above:
```bash
prowler mongodbatlas --atlas-project-id <project-id>
```
### Needed Permissions
MongoDB Atlas API keys require appropriate permissions to perform security checks:
- **Organization Read Only**: Provides read-only access to everything in the organization, including all projects in the organization.
- If you want to be able to [audit the Auditing configuration for the project](https://www.mongodb.com/docs/api/doc/atlas-admin-api-v2/group/endpoint-auditing), **Organization Owner** is needed.
Also, it's important to note that the IP where you are running Prowler must be added to the IP Access List of the MongoDB Atlas organization API key. If you want to skip this step and use your API key in all type of IP addresses you need to uncheck the `Require IP Access List for the Atlas Administration API` button on the Organization Settings, that setting is [enabled by default](https://www.mongodb.com/docs/atlas/configure-api-access/#optional--require-an-ip-access-list-for-the-atlas-administration-api).
???+ warning
If you want the check `organizations_api_access_list_required` to pass you will need to enable the API access list for the organization, so to make sure that your API Key is working you need to add your IP to the IP Access List of the organization. If you are running the check from Prowler Cloud, you will need to add our IP to the IP Access List.
![Organization Settings](./img/ip-access-list.png)
Binary file not shown.

After

Width:  |  Height:  |  Size: 95 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 97 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 97 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 220 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 67 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 100 KiB

+1 -1
View File
@@ -1,4 +1,4 @@
# Managing Users and Roles
# Managing Users and Role-Based Access Control (RBAC)
**Prowler App** supports multiple users within a single tenant, enabling seamless collaboration by allowing team members to easily share insights and manage security findings.
@@ -0,0 +1,128 @@
# AWS Security Hub Integration
Prowler App enables automatic export of security findings to AWS Security Hub, providing seamless integration with AWS's native security and compliance service. This comprehensive guide demonstrates how to configure and manage AWS Security Hub integrations to centralize security findings and enhance compliance tracking across AWS environments.
Integrating Prowler App with AWS Security Hub provides:
* **Centralized security visibility:** Consolidate findings from multiple AWS accounts and regions
* **Native AWS integration:** Leverage existing AWS security workflows and compliance frameworks
* **Automated finding management:** Archive resolved findings and filter results based on severity
* **Cost optimization:** Send only failed findings to reduce AWS Security Hub costs
* **Real-time updates:** Automatically export findings after each scan completion
## How It Works
When enabled and configured:
1. Scan results are automatically sent to AWS Security Hub after each scan completes
2. Findings are formatted in [AWS Security Finding Format](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-findings-format.html) (ASFF)
3. The integration automatically detects new AWS regions to send findings if the Prowler partner integration is enabled
4. Previously resolved findings are archived to maintain clean Security Hub dashboards
???+ note
Refer to [AWS Security Hub pricing](https://aws.amazon.com/security-hub/pricing/) for cost information.
## Prerequisites
Before configuring AWS Security Hub Integration in Prowler App, complete these steps:
### AWS Security Hub Setup
Enable the Prowler partner integration in AWS Security Hub by following the [AWS Security Hub setup documentation](./aws/securityhub.md#enabling-aws-security-hub-for-prowler-integration).
### AWS Authentication
Configure AWS credentials by following the [AWS authentication setup guide](./aws/getting-started-aws.md#step-3-set-up-aws-authentication).
## Configuration
To configure AWS Security Hub integration in Prowler App:
1. Navigate to **Integrations** in the Prowler App interface
2. Locate the **AWS Security Hub** card and click **Manage**, then select **Add integration**
![Integrations tab](./img/security-hub/integrations-tab.png)
3. Complete the integration settings
* **AWS Provider:** Select the AWS provider whose findings should be exported to Security Hub
* **Send Only Failed Findings:** Filter out `PASS` findings to reduce AWS Security Hub costs (enabled by default)
* **Archive Previous Findings:** Automatically archive findings resolved since the last scan to maintain clean Security Hub dashboards
![Integration settings](./img/security-hub/integration-settings.png)
4. Configure authentication:
Choose the appropriate authentication method:
* **Use Provider Credentials** (recommended): Leverages the AWS provider's existing credentials
???+ tip "Simplified Credential Management"
Using provider credentials reduces administrative complexity by managing a single set of credentials instead of maintaining separate authentication mechanisms. This approach minimizes security risks and provides the most efficient integration path when the AWS account has sufficient permissions to export findings to Security Hub.
* **Custom Credentials:** Configure separate credentials specifically for Security Hub access
5. Click **Create integration** to enable the integration
![Create integration](./img/security-hub/create-integration.png)
Once configured successfully, findings from subsequent scans will automatically appear in AWS Security Hub.
### Integration Status
Once the integration is active, monitor its status and make adjustments as needed through the integrations management interface.
1. Review configured integrations in the management interface
2. Each integration displays:
- **Connection Status:** Connected or Disconnected indicator.
- **Provider Information:** Selected AWS provider name.
- **Finding Filters:** Status of failed-only and archive settings.
- **Last Checked:** Timestamp of the most recent connection test.
- **Regions:** List of regions where the integration is active.
#### Actions
Each Security Hub integration provides several management actions accessible through dedicated buttons:
| Button | Purpose | Available Actions | Notes |
|--------|---------|------------------|-------|
| **Test** | Verify integration connectivity | • Test AWS credential validity<br/>• Check Security Hub accessibility<br/>• Detect enabled regions automatically<br/>• Validate finding export capability | Results displayed in notification message |
| **Config** | Modify integration settings | • Update AWS provider selection<br/>• Change finding filter settings<br/>• Modify archive preferences | Click "Update Configuration" to save changes |
| **Credentials** | Update authentication settings | • Switch between provider/custom credentials<br/>• Update AWS access keys<br/>• Change IAM role configuration | Click "Update Credentials" to save changes |
| **Enable/Disable** | Toggle integration status | • Enable integration to start exporting findings<br/>• Disable integration to pause exports | Status change takes effect immediately |
| **Delete** | Remove integration permanently | • Permanently delete integration<br/>• Remove all configuration data | ⚠️ **Cannot be undone** - confirm before deleting |
???+ tip "Management Best Practices"
- Test the integration after any configuration changes
- Use the Enable/Disable toggle for temporary changes instead of deleting
- Monitor the Last Checked timestamp to ensure recent connectivity
## Viewing Findings in AWS Security Hub
After successful configuration and scan completion, Prowler findings automatically appear in AWS Security Hub. For detailed information about accessing and interpreting findings in the Security Hub console, refer to the [AWS Security Hub findings documentation](./aws/securityhub.md#viewing-prowler-findings-in-aws-security-hub).
## Troubleshooting
**Connection test fails:**
- Verify AWS Security Hub is enabled in target regions
- Confirm Prowler integration is accepted in Security Hub
- Check IAM permissions include required Security Hub actions
- If using IAM Role, verify trust policy and External ID
**No findings in Security Hub:**
- Ensure integration shows "Connected" status
- Verify a scan has completed after enabling integration
- Check Security Hub console in the correct region
- Confirm finding filters match expectations
**Authentication errors:**
- For provider credentials, verify provider configuration
- For custom credentials, check access key validity
- For IAM roles, confirm role ARN and External ID match
+2
View File
@@ -2,6 +2,8 @@
This page provides instructions for creating and configuring a Microsoft Entra ID (formerly Azure AD) application to use SAML SSO with Prowler App.
You can find a walkthrough video [here](https://youtu.be/UtcjDh5cAjI).
## Creating and Configuring the Enterprise Application
1. From the "Enterprise Applications" page in the Azure Portal, click "+ New application".
+3 -1
View File
@@ -39,7 +39,7 @@ Upon logging in, the Overview page will display. At this stage, no data is prese
## **Step 3: Add a Provider**
To perform security scans, link a cloud provider account. Prowler supports the following providers:
To perform security scans, link a cloud provider account. Prowler supports the following providers and more:
- **AWS**
@@ -51,6 +51,8 @@ To perform security scans, link a cloud provider account. Prowler supports the f
- **M365**
- **GitHub**
Steps to add a provider:
1. Navigate to `Settings > Cloud Providers`.
+64 -55
View File
@@ -59,75 +59,84 @@ nav:
- Basic Usage:
- Prowler App: basic-usage/prowler-app.md
- Prowler CLI: basic-usage/prowler-cli.md
- Tutorials:
- User Guide:
- Prowler App:
- Getting Started: tutorials/prowler-app.md
- Authentication:
- Social Login: tutorials/prowler-app-social-login.md
- SSO with SAML: tutorials/prowler-app-sso.md
- Role-Based Access Control: tutorials/prowler-app-rbac.md
- Social Login: tutorials/prowler-app-social-login.md
- SSO with SAML: tutorials/prowler-app-sso.md
- Mute findings: tutorials/prowler-app-mute-findings.md
- Amazon S3 Integration: tutorials/prowler-app-s3-integration.md
- Lighthouse: tutorials/prowler-app-lighthouse.md
- Bulk Provider Provisioning: tutorials/bulk-provider-provisioning.md
- Mutelist: tutorials/prowler-app-mute-findings.md
- Integrations:
- Amazon S3: tutorials/prowler-app-s3-integration.md
- AWS Security Hub: tutorials/prowler-app-security-hub-integration.md
- Lighthouse AI: tutorials/prowler-app-lighthouse.md
- Tutorials:
- SSO with Entra: tutorials/prowler-app-sso-entra.md
- Bulk Provider Provisioning: tutorials/bulk-provider-provisioning.md
- CLI:
- Miscellaneous: tutorials/misc.md
- Reporting: tutorials/reporting.md
- Compliance: tutorials/compliance.md
- Dashboard: tutorials/dashboard.md
- Fixer (remediations): tutorials/fixer.md
- Quick Inventory: tutorials/quick-inventory.md
- Slack Integration: tutorials/integrations.md
- Configuration File: tutorials/configuration_file.md
- Logging: tutorials/logging.md
- Mutelist: tutorials/mutelist.md
- Integrations:
- AWS Security Hub: tutorials/aws/securityhub.md
- Slack: tutorials/integrations.md
- Send reports to AWS S3: tutorials/aws/s3.md
- Fixers (Remediations): tutorials/fixer.md
- Check Aliases: tutorials/check-aliases.md
- Custom Metadata: tutorials/custom-checks-metadata.md
- Scan Unused Services: tutorials/scan-unused-services.md
- Pentesting: tutorials/pentesting.md
- Parallel Execution: tutorials/parallel-execution.md
- Developer Guide: developer-guide/introduction.md
- Prowler Check Kreator: tutorials/prowler-check-kreator.md
- AWS:
- Getting Started: tutorials/aws/getting-started-aws.md
- Authentication: tutorials/aws/authentication.md
- Assume Role: tutorials/aws/role-assumption.md
- AWS Security Hub: tutorials/aws/securityhub.md
- AWS Organizations: tutorials/aws/organizations.md
- AWS Regions and Partitions: tutorials/aws/regions-and-partitions.md
- Scan Multiple AWS Accounts: tutorials/aws/multiaccount.md
- Send reports to AWS S3: tutorials/aws/s3.md
- AWS CloudShell: tutorials/aws/cloudshell.md
- Checks v2 to v3 and v4 Mapping: tutorials/aws/v2_to_v3_checks_mapping.md
- Tag-based Scan: tutorials/aws/tag-based-scan.md
- Resource ARNs based Scan: tutorials/aws/resource-arn-based-scan.md
- Boto3 Configuration: tutorials/aws/boto3-configuration.md
- Threat Detection: tutorials/aws/threat-detection.md
- Azure:
- Getting Started: tutorials/azure/getting-started-azure.md
- Authentication: tutorials/azure/authentication.md
- Non default clouds: tutorials/azure/use-non-default-cloud.md
- Subscriptions: tutorials/azure/subscriptions.md
- Create Prowler Service Principal: tutorials/azure/create-prowler-service-principal.md
- Google Cloud:
- Getting Started: tutorials/gcp/getting-started-gcp.md
- Authentication: tutorials/gcp/authentication.md
- Projects: tutorials/gcp/projects.md
- Organization: tutorials/gcp/organization.md
- Retry Configuration: tutorials/gcp/retry-configuration.md
- Kubernetes:
- In-Cluster Execution: tutorials/kubernetes/in-cluster.md
- Non In-Cluster Execution: tutorials/kubernetes/outside-cluster.md
- Miscellaneous: tutorials/kubernetes/misc.md
- Microsoft 365:
- Getting Started: tutorials/microsoft365/getting-started-m365.md
- Authentication: tutorials/microsoft365/authentication.md
- Use of PowerShell: tutorials/microsoft365/use-of-powershell.md
- GitHub:
- Getting Started: tutorials/github/getting-started-github.md
- Authentication: tutorials/github/authentication.md
- IaC:
- Getting Started: tutorials/iac/getting-started-iac.md
- Authentication: tutorials/iac/authentication.md
- Scan Unused Services: tutorials/scan-unused-services.md
- Quick Inventory: tutorials/quick-inventory.md
- Tutorials:
- Parallel Execution: tutorials/parallel-execution.md
- Providers:
- AWS:
- Getting Started: tutorials/aws/getting-started-aws.md
- Authentication: tutorials/aws/authentication.md
- Assume Role: tutorials/aws/role-assumption.md
- AWS Organizations: tutorials/aws/organizations.md
- AWS Regions and Partitions: tutorials/aws/regions-and-partitions.md
- Tag-based Scan: tutorials/aws/tag-based-scan.md
- Resource ARNs based Scan: tutorials/aws/resource-arn-based-scan.md
- Boto3 Configuration: tutorials/aws/boto3-configuration.md
- Threat Detection: tutorials/aws/threat-detection.md
- Tutorial > AWS CloudShell: tutorials/aws/cloudshell.md
- Tutorial > Scan Multiple AWS Accounts: tutorials/aws/multiaccount.md
- Azure:
- Getting Started: tutorials/azure/getting-started-azure.md
- Authentication: tutorials/azure/authentication.md
- Non default clouds: tutorials/azure/use-non-default-cloud.md
- Subscriptions: tutorials/azure/subscriptions.md
- Create Prowler Service Principal: tutorials/azure/create-prowler-service-principal.md
- Google Cloud:
- Getting Started: tutorials/gcp/getting-started-gcp.md
- Authentication: tutorials/gcp/authentication.md
- Projects: tutorials/gcp/projects.md
- Organization: tutorials/gcp/organization.md
- Retry Configuration: tutorials/gcp/retry-configuration.md
- Kubernetes:
- In-Cluster Execution: tutorials/kubernetes/in-cluster.md
- Non In-Cluster Execution: tutorials/kubernetes/outside-cluster.md
- Miscellaneous: tutorials/kubernetes/misc.md
- Microsoft 365:
- Getting Started: tutorials/microsoft365/getting-started-m365.md
- Authentication: tutorials/microsoft365/authentication.md
- Use of PowerShell: tutorials/microsoft365/use-of-powershell.md
- GitHub:
- Getting Started: tutorials/github/getting-started-github.md
- Authentication: tutorials/github/authentication.md
- IaC:
- Getting Started: tutorials/iac/getting-started-iac.md
- Authentication: tutorials/iac/authentication.md
- MongoDB Atlas:
- Getting Started: tutorials/mongodbatlas/getting-started-mongodbatlas.md
- Authentication: tutorials/mongodbatlas/authentication.md
- Developer Guide:
- Concepts:
- Introduction: developer-guide/introduction.md
+49
View File
@@ -2,6 +2,55 @@
All notable changes to the **Prowler SDK** are documented in this file.
## [v5.13.0] (Prowler UNRELEASED)
### Added
### Changed
### Fixed
## [v5.12.1] (Prowler v5.12.1)
### Fixed
- Replaced old check id with new ones for compliance files [(#8682)](https://github.com/prowler-cloud/prowler/pull/8682)
## [v5.12.0] (Prowler v5.12.0)
### Added
- Add more fields for the Jira ticket and handle custom fields errors [(#8601)](https://github.com/prowler-cloud/prowler/pull/8601)
- Support labels on Jira tickets [(#8603)](https://github.com/prowler-cloud/prowler/pull/8603)
- Add finding url and tenant info inside Jira tickets [(#8607)](https://github.com/prowler-cloud/prowler/pull/8607)
- Get Jira Project's metadata [(#8630)](https://github.com/prowler-cloud/prowler/pull/8630)
- Get Jira projects from test_connection [(#8634)](https://github.com/prowler-cloud/prowler/pull/8634)
- `AdditionalUrls` field in CheckMetadata [(#8590)](https://github.com/prowler-cloud/prowler/pull/8590)
- Support color for MANUAL finidngs in Jira tickets [(#8642)](https://github.com/prowler-cloud/prowler/pull/8642)
- `--excluded-checks-file` flag [(#8301)](https://github.com/prowler-cloud/prowler/pull/8301)
- Send finding in Jira integration with the needed values [(#8648)](https://github.com/prowler-cloud/prowler/pull/8648)
- Add language enforcement for Jira requests [(#8674)](https://github.com/prowler-cloud/prowler/pull/8674)
- MongoDB Atlas provider with 10 security checks [(#8312)](https://github.com/prowler-cloud/prowler/pull/8312)
- `clusters_authentication_enabled` - Ensure clusters have authentication enabled
- `clusters_backup_enabled` - Ensure clusters have backup enabled
- `clusters_encryption_at_rest_enabled` - Ensure clusters have encryption at rest enabled
- `clusters_tls_enabled` - Ensure clusters have TLS authentication required
- `organizations_api_access_list_required` - Ensure organization requires API access list
- `organizations_mfa_required` - Ensure organization requires MFA
- `organizations_security_contact_defined` - Ensure organization has security contact defined
- `organizations_service_account_secrets_expiration` - Ensure organization has maximum period expiration for service account secrets
- `projects_auditing_enabled` - Ensure database auditing is enabled
- `projects_network_access_list_exposed_to_internet` - Ensure project network access list is not exposed to internet
### Changed
- Rename ftp and mongo checks to follow pattern `ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_*` [(#8293)](https://github.com/prowler-cloud/prowler/pull/8293)
### Fixed
- Renamed `AdditionalUrls` to `AdditionalURLs` field in CheckMetadata [(#8639)](https://github.com/prowler-cloud/prowler/pull/8639)
- TypeError from Python 3.9 in Security Hub module by updating type annotations [(#8619)](https://github.com/prowler-cloud/prowler/pull/8619)
- KeyError when SecurityGroups field is missing in MemoryDB check [(#8666)](https://github.com/prowler-cloud/prowler/pull/8666)
- NoneType error in Opensearch, Firehose and Cognito checks [(#8670)](https://github.com/prowler-cloud/prowler/pull/8670)
---
## [v5.11.0] (Prowler v5.11.0)
### Added
+16
View File
@@ -23,6 +23,7 @@ from prowler.lib.check.check import (
list_checks_json,
list_fixers,
list_services,
parse_checks_from_file,
parse_checks_from_folder,
print_categories,
print_checks,
@@ -102,6 +103,7 @@ from prowler.providers.github.models import GithubOutputOptions
from prowler.providers.iac.models import IACOutputOptions
from prowler.providers.kubernetes.models import KubernetesOutputOptions
from prowler.providers.m365.models import M365OutputOptions
from prowler.providers.mongodbatlas.models import MongoDBAtlasOutputOptions
from prowler.providers.nhn.models import NHNOutputOptions
@@ -121,6 +123,7 @@ def prowler():
checks = args.check
excluded_checks = args.excluded_check
excluded_checks_file = args.excluded_checks_file
excluded_services = args.excluded_service
services = args.service
categories = args.category
@@ -257,6 +260,15 @@ def prowler():
checks_to_execute, excluded_checks
)
# Exclude checks if --excluded-checks-file
if excluded_checks_file:
excluded_checks_from_file = parse_checks_from_file(
excluded_checks_file, provider
)
checks_to_execute = exclude_checks_to_run(
checks_to_execute, list(excluded_checks_from_file)
)
# Exclude services if --excluded-services
if excluded_services:
checks_to_execute = exclude_services_to_run(
@@ -300,6 +312,10 @@ def prowler():
output_options = M365OutputOptions(
args, bulk_checks_metadata, global_provider.identity
)
elif provider == "mongodbatlas":
output_options = MongoDBAtlasOutputOptions(
args, bulk_checks_metadata, global_provider.identity
)
elif provider == "nhn":
output_options = NHNOutputOptions(
args, bulk_checks_metadata, global_provider.identity
@@ -364,8 +364,8 @@
"ec2_ami_public",
"ec2_instance_public_ip",
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
"ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mongodb_27017_27018",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888",
@@ -721,8 +721,8 @@
"ec2_networkacl_allow_ingress_tcp_port_22",
"ec2_networkacl_allow_ingress_tcp_port_3389",
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
"ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mongodb_27017_27018",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888",
@@ -1510,8 +1510,8 @@
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
"ec2_securitygroup_allow_ingress_from_internet_to_any_port",
"ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports",
"ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mongodb_27017_27018",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888",
@@ -1604,8 +1604,8 @@
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
"ec2_securitygroup_allow_ingress_from_internet_to_any_port",
"ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports",
"ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mongodb_27017_27018",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888",
@@ -1698,8 +1698,8 @@
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
"ec2_securitygroup_allow_ingress_from_internet_to_any_port",
"ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports",
"ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mongodb_27017_27018",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888",
@@ -1558,8 +1558,8 @@
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
"ec2_securitygroup_allow_ingress_from_internet_to_any_port",
"ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports",
"ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mongodb_27017_27018",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888",
@@ -1682,7 +1682,7 @@
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
"ec2_securitygroup_allow_ingress_from_internet_to_any_port",
"ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_elasticsearch_kibana_9200_9300_5601",
@@ -1814,7 +1814,7 @@
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
"ec2_securitygroup_allow_ingress_from_internet_to_any_port",
"ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports",
"ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mongodb_27017_27018",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_memcached_11211",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mysql_3306",
@@ -1917,7 +1917,7 @@
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
"ec2_securitygroup_allow_ingress_from_internet_to_any_port",
"ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_telnet_23",
@@ -3024,8 +3024,8 @@
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
"ec2_securitygroup_allow_ingress_from_internet_to_any_port",
"ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports",
"ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mongodb_27017_27018",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888",
@@ -4588,4 +4588,4 @@
]
}
]
}
}
@@ -1557,8 +1557,8 @@
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
"ec2_securitygroup_allow_ingress_from_internet_to_any_port",
"ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports",
"ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mongodb_27017_27018",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888",
@@ -1682,7 +1682,7 @@
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
"ec2_securitygroup_allow_ingress_from_internet_to_any_port",
"ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_elasticsearch_kibana_9200_9300_5601",
@@ -1816,7 +1816,7 @@
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
"ec2_securitygroup_allow_ingress_from_internet_to_any_port",
"ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports",
"ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mongodb_27017_27018",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_memcached_11211",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mysql_3306",
@@ -1919,7 +1919,7 @@
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
"ec2_securitygroup_allow_ingress_from_internet_to_any_port",
"ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_telnet_23",
@@ -3028,8 +3028,8 @@
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
"ec2_securitygroup_allow_ingress_from_internet_to_any_port",
"ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports",
"ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mongodb_27017_27018",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888",
@@ -4603,4 +4603,4 @@
]
}
]
}
}
+10 -10
View File
@@ -107,8 +107,8 @@
"ec2_networkacl_allow_ingress_tcp_port_22",
"ec2_networkacl_allow_ingress_tcp_port_3389",
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
"ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mongodb_27017_27018",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888",
@@ -1024,8 +1024,8 @@
"ec2_networkacl_allow_ingress_tcp_port_22",
"ec2_networkacl_allow_ingress_tcp_port_3389",
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
"ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mongodb_27017_27018",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888",
@@ -1470,8 +1470,8 @@
"ec2_networkacl_allow_ingress_tcp_port_22",
"ec2_networkacl_allow_ingress_tcp_port_3389",
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
"ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mongodb_27017_27018",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888",
@@ -1650,8 +1650,8 @@
"ec2_networkacl_allow_ingress_tcp_port_22",
"ec2_networkacl_allow_ingress_tcp_port_3389",
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
"ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mongodb_27017_27018",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888",
@@ -1902,8 +1902,8 @@
"ec2_networkacl_allow_ingress_tcp_port_22",
"ec2_networkacl_allow_ingress_tcp_port_3389",
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
"ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mongodb_27017_27018",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888",
@@ -553,8 +553,8 @@
"Description": "Ensure that ec2 security groups do not allow ingress from internet to common ports",
"Checks": [
"ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports",
"ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mongodb_27017_27018",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888",
@@ -66,7 +66,7 @@
"elbv2_ssl_listeners",
"ssm_documents_set_as_public",
"vpc_subnet_no_public_ip_by_default",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mysql_3306",
"s3_account_level_public_access_blocks"
+2 -2
View File
@@ -253,8 +253,8 @@
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
"ec2_securitygroup_allow_ingress_from_internet_to_any_port",
"ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports",
"ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mongodb_27017_27018",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888",
+2 -1
View File
@@ -12,7 +12,7 @@ from prowler.lib.logger import logger
timestamp = datetime.today()
timestamp_utc = datetime.now(timezone.utc).replace(tzinfo=timezone.utc)
prowler_version = "5.11.1"
prowler_version = "5.13.0"
html_logo_url = "https://github.com/prowler-cloud/prowler/"
square_logo_img = "https://prowler.com/wp-content/uploads/logo-html.png"
aws_logo = "https://user-images.githubusercontent.com/38561120/235953920-3e3fba08-0795-41dc-b480-9bea57db9f2e.png"
@@ -32,6 +32,7 @@ class Provider(str, Enum):
GITHUB = "github"
IAC = "iac"
NHN = "nhn"
MONGODBATLAS = "mongodbatlas"
# Compliance
+5
View File
@@ -581,3 +581,8 @@ m365:
github:
# github.repository_inactive_not_archived --> CIS recommends 180 days (6 months)
inactive_not_archived_days_threshold: 180
# MongoDB Atlas Configuration
mongodbatlas:
# mongodbatlas.organizations_service_account_secrets_expiration --> Maximum hours for service account secrets validity
max_service_account_secret_validity_hours: 8
@@ -0,0 +1,32 @@
### Account, Check and/or Region can be * to apply for all the cases.
### Account == MongoDB Atlas Organization ID and Region == MongoDB Atlas Cluster Region
### Resources and tags are lists that can have either Regex or Keywords.
### Tags is an optional list that matches on tuples of 'key=value' and are "ANDed" together.
### Use an alternation Regex to match one of multiple tags with "ORed" logic.
### For each check you can except Accounts, Regions, Resources and/or Tags.
########################### MONGODB ATLAS MUTELIST EXAMPLE ###########################
Mutelist:
Accounts:
"your-organization-id-here":
Checks:
"clusters_authentication_enabled":
Regions:
- "US_EAST_1"
Resources:
- "test-cluster"
Description: "Mute clusters_authentication_enabled check for test-cluster in US_EAST_1 region"
"projects_auditing_enabled":
Regions:
- "*"
Resources:
- "*"
Description: "Mute projects_auditing_enabled check for all resources in all regions"
"*":
Checks:
"clusters_backup_enabled":
Regions:
- "WESTERN_EUROPE"
Resources:
- "*"
Description: "Mute clusters_backup_enabled check for all clusters in all regions"
+4
View File
@@ -637,6 +637,10 @@ def execute(
)
elif global_provider.type == "m365":
is_finding_muted_args["tenant_id"] = global_provider.identity.tenant_id
elif global_provider.type == "mongodbatlas":
is_finding_muted_args["organization_id"] = (
global_provider.identity.organization_id
)
for finding in check_findings:
if global_provider.type == "azure":
is_finding_muted_args["subscription_id"] = (
+44 -3
View File
@@ -7,7 +7,7 @@ from dataclasses import asdict, dataclass, is_dataclass
from enum import Enum
from typing import Any, Dict, Optional, Set
from pydantic.v1 import BaseModel, ValidationError, validator
from pydantic.v1 import BaseModel, Field, ValidationError, validator
from prowler.config.config import Provider
from prowler.lib.check.compliance_models import Compliance
@@ -85,6 +85,7 @@ class CheckMetadata(BaseModel):
Risk (str): The risk associated with the check.
RelatedUrl (str): The URL related to the check.
Remediation (Remediation): The remediation steps for the check.
AdditionalURLs (list[str]): Additional URLs related to the check. Defaults to an empty list.
Categories (list[str]): The categories of the check.
DependsOn (list[str]): The dependencies of the check.
RelatedTo (list[str]): The related checks.
@@ -97,13 +98,14 @@ class CheckMetadata(BaseModel):
valid_severity(severity): Validator function to validate the severity of the check.
valid_cli_command(remediation): Validator function to validate the CLI command is not an URL.
valid_resource_type(resource_type): Validator function to validate the resource type is not empty.
validate_additional_urls(additional_urls): Validator function to ensure AdditionalURLs contains no duplicates.
"""
Provider: str
CheckID: str
CheckTitle: str
CheckType: list[str]
CheckAliases: list[str] = []
CheckAliases: list[str] = Field(default_factory=list)
ServiceName: str
SubServiceName: str
ResourceIdTemplate: str
@@ -113,13 +115,14 @@ class CheckMetadata(BaseModel):
Risk: str
RelatedUrl: str
Remediation: Remediation
AdditionalURLs: list[str] = Field(default_factory=list)
Categories: list[str]
DependsOn: list[str]
RelatedTo: list[str]
Notes: str
# We set the compliance to None to
# store the compliance later if supplied
Compliance: Optional[list[Any]] = []
Compliance: Optional[list[Any]] = Field(default_factory=list)
@validator("Categories", each_item=True, pre=True, always=True)
def valid_category(value):
@@ -178,6 +181,19 @@ class CheckMetadata(BaseModel):
return check_id
@validator("AdditionalURLs", pre=True, always=True)
def validate_additional_urls(cls, additional_urls):
if not isinstance(additional_urls, list):
raise ValueError("AdditionalURLs must be a list")
if any(not url or not url.strip() for url in additional_urls):
raise ValueError("AdditionalURLs cannot contain empty items")
if len(additional_urls) != len(set(additional_urls)):
raise ValueError("AdditionalURLs cannot contain duplicate items")
return additional_urls
@staticmethod
def get_bulk(provider: str) -> dict[str, "CheckMetadata"]:
"""
@@ -701,6 +717,31 @@ class CheckReportNHN(Check_Report):
self.location = getattr(resource, "location", "kr1")
@dataclass
class CheckReportMongoDBAtlas(Check_Report):
"""Contains the MongoDB Atlas Check's finding information."""
resource_name: str
resource_id: str
project_id: str
location: str
def __init__(self, metadata: Dict, resource: Any) -> None:
"""Initialize the MongoDB Atlas Check's finding information.
Args:
metadata: The metadata of the check.
resource: Basic information about the resource. Defaults to None.
"""
super().__init__(metadata, resource)
self.resource_name = getattr(
resource, "name", getattr(resource, "resource_name", "")
)
self.resource_id = getattr(resource, "id", getattr(resource, "resource_id", ""))
self.project_id = getattr(resource, "project_id", "")
self.location = getattr(resource, "location", self.project_id)
# Testing Pending
def load_check_metadata(metadata_file: str) -> CheckMetadata:
"""
+8 -2
View File
@@ -26,10 +26,10 @@ class ProwlerArgumentParser:
self.parser = argparse.ArgumentParser(
prog="prowler",
formatter_class=RawTextHelpFormatter,
usage="prowler [-h] [--version] {aws,azure,gcp,kubernetes,m365,github,nhn,dashboard,iac} ...",
usage="prowler [-h] [--version] {aws,azure,gcp,kubernetes,m365,github,nhn,mongodbatlas,dashboard,iac} ...",
epilog="""
Available Cloud Providers:
{aws,azure,gcp,kubernetes,m365,github,iac,nhn}
{aws,azure,gcp,kubernetes,m365,github,iac,nhn,mongodbatlas}
aws AWS Provider
azure Azure Provider
gcp GCP Provider
@@ -38,6 +38,7 @@ Available Cloud Providers:
github GitHub Provider
iac IaC Provider (Preview)
nhn NHN Provider (Unofficial)
mongodbatlas MongoDB Atlas Provider
Available components:
dashboard Local dashboard
@@ -234,6 +235,11 @@ Detailed documentation at https://docs.prowler.com
nargs="+",
help="Checks to exclude",
)
exclude_checks_parser.add_argument(
"--excluded-checks-file",
nargs="?",
help="JSON file containing the checks to be excluded. See config/checklist_example.json",
)
exclude_checks_parser.add_argument(
"--excluded-service",
"--excluded-services",
+12
View File
@@ -278,6 +278,18 @@ class Finding(BaseModel):
output_data["resource_uid"] = check_output.resource_id
output_data["region"] = check_output.location
elif provider.type == "mongodbatlas":
output_data["auth_method"] = "api_key"
output_data["account_uid"] = get_nested_attribute(
provider, "identity.organization_id"
)
output_data["account_name"] = get_nested_attribute(
provider, "identity.organization_name"
)
output_data["resource_name"] = check_output.resource_name
output_data["resource_uid"] = check_output.resource_id
output_data["region"] = check_output.location
elif provider.type == "nhn":
output_data["auth_method"] = (
f"passwordCredentials: username={get_nested_attribute(provider, '_identity.username')}, "
+45
View File
@@ -745,6 +745,51 @@ class HTML(Output):
)
return ""
@staticmethod
def get_mongodbatlas_assessment_summary(provider: Provider) -> str:
"""
get_mongodbatlas_assessment_summary gets the HTML assessment summary for the provider
Args:
provider (Provider): the provider object
Returns:
str: the HTML assessment summary
"""
try:
return f"""
<div class="col-md-2">
<div class="card">
<div class="card-header">
MongoDB Atlas Assessment Summary
</div>
<ul class="list-group
list-group-flush">
<li class="list-group-item">
<b>MongoDB Atlas organization:</b> {provider.identity.organization_name}
</li>
</ul>
</div>
</div>
<div class="col-md-4">
<div class="card">
<div class="card-header">
MongoDB Atlas Credentials
</div>
<ul class="list-group
list-group-flush">
<li class="list-group-item">
<b>MongoDB Atlas authentication method:</b> API Key
</li>
</ul>
</div>
</div>"""
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
)
return ""
@staticmethod
def get_iac_assessment_summary(provider: Provider) -> str:
"""
@@ -90,6 +90,10 @@ class JiraBaseException(ProwlerException):
"message": "Missing parameters on Jira Init function.",
"remediation": "Please check the parameters and try again.",
},
(9021, "JiraRequiredCustomFieldsError"): {
"message": "Jira project requires custom fields that are not supported.",
"remediation": "Please configure the Jira project to not require custom fields, or use a different project.",
},
}
def __init__(self, code, file=None, original_exception=None, message=None):
@@ -251,3 +255,10 @@ class JiraInvalidParameterError(JiraBaseException):
super().__init__(
9020, file=file, original_exception=original_exception, message=message
)
class JiraRequiredCustomFieldsError(JiraBaseException):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
9021, file=file, original_exception=original_exception, message=message
)
File diff suppressed because it is too large Load Diff
+2
View File
@@ -20,6 +20,8 @@ def stdout_report(finding, color, verbose, status, fix):
details = finding.owner
if finding.check_metadata.Provider == "m365":
details = finding.location
if finding.check_metadata.Provider == "mongodbatlas":
details = finding.location
if finding.check_metadata.Provider == "nhn":
details = finding.location
+3
View File
@@ -51,6 +51,9 @@ def display_summary_table(
elif provider.type == "m365":
entity_type = "Tenant Domain"
audited_entities = provider.identity.tenant_domain
elif provider.type == "mongodbatlas":
entity_type = "Organization"
audited_entities = provider.identity.organization_name
elif provider.type == "nhn":
entity_type = "Tenant Domain"
audited_entities = provider.identity.tenant_domain
@@ -1,7 +1,7 @@
import os
from concurrent.futures import ThreadPoolExecutor, as_completed
from dataclasses import dataclass
from typing import Optional
from typing import Optional, Union
from boto3 import Session
from botocore.client import ClientError
@@ -219,7 +219,7 @@ class SecurityHub:
session: Session,
aws_account_id: str,
aws_partition: str,
) -> tuple[str, Session | None]:
) -> tuple[str, Union[Session, None]]:
"""
Check if Security Hub is enabled in a specific region and if Prowler integration is active.
@@ -14,7 +14,10 @@ class cognito_user_pool_self_registration_disabled(Check):
report.status_extended = (
f"User pool {user_pool.id} has self registration disabled."
)
if not user_pool.admin_create_user_config.allow_admin_create_user_only:
if (
user_pool.admin_create_user_config
and not user_pool.admin_create_user_config.allow_admin_create_user_only
):
report.status = "FAIL"
report.status_extended = (
f"User pool {user_pool.id} has self registration enabled."
@@ -1,7 +1,10 @@
{
"Provider": "aws",
"CheckID": "ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
"CheckID": "ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
"CheckTitle": "Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to FTP ports 20 or 21.",
"CheckAliases": [
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21"
],
"CheckType": [
"Infrastructure Security"
],
@@ -7,7 +7,7 @@ from prowler.providers.aws.services.ec2.lib.security_groups import check_securit
from prowler.providers.aws.services.vpc.vpc_client import vpc_client
class ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21(Check):
class ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21(Check):
def execute(self):
findings = []
check_ports = [20, 21]
@@ -1,7 +1,10 @@
{
"Provider": "aws",
"CheckID": "ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018",
"CheckID": "ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mongodb_27017_27018",
"CheckTitle": "Ensure no security groups allow ingress from 0.0.0.0/0 or ::/0 to MongoDB ports 27017 and 27018.",
"CheckAliases": [
"ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018"
],
"CheckType": [
"Infrastructure Security"
],
@@ -7,7 +7,9 @@ from prowler.providers.aws.services.ec2.lib.security_groups import check_securit
from prowler.providers.aws.services.vpc.vpc_client import vpc_client
class ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018(Check):
class ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mongodb_27017_27018(
Check
):
def execute(self):
findings = []
check_ports = [27017, 27018]
@@ -32,9 +32,10 @@ class firehose_stream_encrypted_at_rest(Check):
source_stream = kinesis_client.streams.get(
stream.source.kinesis_stream.kinesis_stream_arn
)
if source_stream.encrypted_at_rest != EncryptionType.NONE:
report.status = "PASS"
report.status_extended = f"Firehose Stream {stream.name} does not have at rest encryption enabled but the source stream {source_stream.name} has at rest encryption enabled."
if source_stream:
if source_stream.encrypted_at_rest != EncryptionType.NONE:
report.status = "PASS"
report.status_extended = f"Firehose Stream {stream.name} does not have at rest encryption enabled but the source stream {source_stream.name} has at rest encryption enabled."
# Check if the stream has encryption enabled directly
elif stream.kms_encryption == EncryptionStatus.ENABLED:
@@ -36,7 +36,7 @@ class MemoryDB(AWSService):
region=regional_client.region,
security_groups=[
sg["SecurityGroupId"]
for sg in cluster["SecurityGroups"]
for sg in cluster.get("SecurityGroups", [])
if sg["Status"] == "active"
],
tls_enabled=cluster["TLSEnabled"],
@@ -79,7 +79,7 @@ class OpenSearchService(AWSService):
"AdvancedSecurityOptions"
].get("Enabled", False)
cluster_config = describe_domain["DomainStatus"].get("ClusterConfig", {})
domain.instance_count = cluster_config.get("InstanceCount", None)
domain.instance_count = cluster_config.get("InstanceCount", 0)
domain.zone_awareness_enabled = cluster_config.get(
"ZoneAwarenessEnabled", False
)
@@ -155,10 +155,10 @@ class OpenSearchDomain(BaseModel):
saml_enabled: bool = None
update_available: bool = None
version: str = None
instance_count: Optional[int]
instance_count: int = 0
zone_awareness_enabled: Optional[bool]
tags: Optional[list] = []
advanced_settings_enabled: bool = None
dedicated_master_enabled: Optional[bool]
dedicated_master_count: Optional[int]
dedicated_master_count: int = 0
tags: Optional[list] = []
+9
View File
@@ -261,6 +261,15 @@ class Provider(ABC):
personal_access_token=arguments.personal_access_token,
oauth_app_token=arguments.oauth_app_token,
)
elif "mongodbatlas" in provider_class_name.lower():
provider_class(
atlas_public_key=arguments.atlas_public_key,
atlas_private_key=arguments.atlas_private_key,
atlas_project_id=arguments.atlas_project_id,
config_path=arguments.config_file,
mutelist_path=arguments.mutelist_file,
fixer_config=fixer_config,
)
except TypeError as error:
logger.critical(
@@ -77,7 +77,7 @@ class M365PowerShell(PowerShellSession):
Initialize PowerShell credential object for Microsoft 365 authentication.
Supports three authentication methods:
1. User authentication (username/password) - Will be deprecated in September 2025
1. User authentication (username/password) - Will be deprecated in October 2025
2. Application authentication (client_id/client_secret)
3. Certificate authentication (certificate_content in base64/application_id)
@@ -115,7 +115,7 @@ class M365PowerShell(PowerShellSession):
self.execute(f'$tenantID = "{sanitized_tenant_id}"')
self.execute(f'$tenantDomain = "{credentials.tenant_domains[0]}"')
# User Auth (Will be deprecated in September 2025)
# User Auth (Will be deprecated in October 2025)
elif credentials.user and credentials.passwd:
credentials.encrypted_passwd = self.encrypt_password(credentials.passwd)
@@ -8,7 +8,7 @@
"ResourceIdTemplate": "",
"Severity": "high",
"ResourceType": "Conditional Access Policy",
"Description": "Ensure ",
"Description": "This check verifies that phishing-resistant MFA strength is required for all administrator accounts. Phishing-resistant MFA includes authentication methods that are resistant to phishing attacks and MFA fatigue attacks compared to weaker methods like SMS or push notifications.",
"Risk": "Administrators using weaker MFA methods, such as SMS or push notifications, are vulnerable to phishing attacks and MFA fatigue attacks. Attackers can intercept codes or trick users into approving fraudulent authentication requests, leading to unauthorized access to critical systems.",
"RelatedUrl": "https://learn.microsoft.com/en-us/entra/identity/conditional-access/policy-admin-phish-resistant-mfa",
"Remediation": {
+2
View File
@@ -0,0 +1,2 @@
# Supported encryption providers
ATLAS_ENCRYPTION_PROVIDERS = ["AWS", "AZURE", "GCP", "NONE"]
@@ -0,0 +1,118 @@
from prowler.exceptions.exceptions import ProwlerException
# Exceptions codes from 8000 to 8999 are reserved for MongoDB Atlas exceptions
class MongoDBAtlasBaseException(ProwlerException):
"""Base class for MongoDB Atlas Errors."""
MONGODBATLAS_ERROR_CODES = {
(8000, "MongoDBAtlasCredentialsError"): {
"message": "MongoDB Atlas credentials not found or invalid",
"remediation": "Check the MongoDB Atlas API credentials and ensure they are properly set.",
},
(8001, "MongoDBAtlasAuthenticationError"): {
"message": "MongoDB Atlas authentication failed",
"remediation": "Check the MongoDB Atlas API credentials and ensure they are valid.",
},
(8002, "MongoDBAtlasSessionError"): {
"message": "MongoDB Atlas session setup failed",
"remediation": "Check the session setup and ensure it is properly configured.",
},
(8003, "MongoDBAtlasIdentityError"): {
"message": "MongoDB Atlas identity setup failed",
"remediation": "Check credentials and ensure they are properly set up for MongoDB Atlas.",
},
(8004, "MongoDBAtlasAPIError"): {
"message": "MongoDB Atlas API call failed",
"remediation": "Check the API request and ensure it is properly formatted.",
},
(8005, "MongoDBAtlasRateLimitError"): {
"message": "MongoDB Atlas API rate limit exceeded",
"remediation": "Reduce the number of API requests or wait before making more requests.",
},
}
def __init__(self, code, file=None, original_exception=None, message=None):
provider = "MongoDB Atlas"
error_info = self.MONGODBATLAS_ERROR_CODES.get((code, self.__class__.__name__))
if message:
error_info["message"] = message
super().__init__(
code=code,
source=provider,
file=file,
original_exception=original_exception,
error_info=error_info,
)
class MongoDBAtlasCredentialsError(MongoDBAtlasBaseException):
"""Exception for MongoDB Atlas credentials errors"""
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
code=8000,
file=file,
original_exception=original_exception,
message=message,
)
class MongoDBAtlasAuthenticationError(MongoDBAtlasBaseException):
"""Exception for MongoDB Atlas authentication errors"""
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
code=8001,
file=file,
original_exception=original_exception,
message=message,
)
class MongoDBAtlasSessionError(MongoDBAtlasBaseException):
"""Exception for MongoDB Atlas session setup errors"""
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
code=8002,
file=file,
original_exception=original_exception,
message=message,
)
class MongoDBAtlasIdentityError(MongoDBAtlasBaseException):
"""Exception for MongoDB Atlas identity setup errors"""
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
code=8003,
file=file,
original_exception=original_exception,
message=message,
)
class MongoDBAtlasAPIError(MongoDBAtlasBaseException):
"""Exception for MongoDB Atlas API errors"""
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
code=8004,
file=file,
original_exception=original_exception,
message=message,
)
class MongoDBAtlasRateLimitError(MongoDBAtlasBaseException):
"""Exception for MongoDB Atlas rate limit errors"""
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
code=8005,
file=file,
original_exception=original_exception,
message=message,
)
@@ -0,0 +1,45 @@
def init_parser(self):
"""Initialize the MongoDB Atlas Provider CLI parser"""
mongodbatlas_parser = self.subparsers.add_parser(
"mongodbatlas",
parents=[self.common_providers_parser],
help="MongoDB Atlas Provider",
)
mongodbatlas_auth_subparser = mongodbatlas_parser.add_argument_group(
"Authentication Modes"
)
mongodbatlas_auth_subparser.add_argument(
"--atlas-public-key",
nargs="?",
help="MongoDB Atlas API public key",
default=None,
metavar="ATLAS_PUBLIC_KEY",
)
mongodbatlas_auth_subparser.add_argument(
"--atlas-private-key",
nargs="?",
help="MongoDB Atlas API private key",
default=None,
metavar="ATLAS_PRIVATE_KEY",
)
mongodbatlas_filters_subparser = mongodbatlas_parser.add_argument_group(
"Optional Filters"
)
mongodbatlas_filters_subparser.add_argument(
"--atlas-project-id",
nargs="?",
help="MongoDB Atlas Project ID to filter scans to a specific project",
default=None,
metavar="ATLAS_PROJECT_ID",
)
def validate_arguments(arguments):
"""Validate MongoDB Atlas provider arguments"""
# No specific validation needed for MongoDB Atlas arguments currently
return (True, "")
@@ -0,0 +1,30 @@
from prowler.lib.check.models import CheckReportMongoDBAtlas
from prowler.lib.mutelist.mutelist import Mutelist
from prowler.lib.outputs.utils import unroll_dict, unroll_tags
class MongoDBAtlasMutelist(Mutelist):
"""MongoDB Atlas Mutelist class"""
def is_finding_muted(
self,
finding: CheckReportMongoDBAtlas,
organization_id: str,
) -> bool:
"""
Check if a finding is muted in the MongoDB Atlas mutelist.
Args:
finding: The CheckReportMongoDBAtlas finding
organization_id: The organization/project id
Returns:
bool: True if the finding is muted, False otherwise
"""
return self.is_muted(
organization_id,
finding.check_metadata.CheckID,
finding.location, # TODO: Study regions in MongoDB Atlas
finding.resource_name,
unroll_dict(unroll_tags(finding.resource_tags)),
)
@@ -0,0 +1,172 @@
import time
from threading import current_thread
from typing import Any, Dict, List, Optional
import requests
from requests.auth import HTTPDigestAuth
from prowler.lib.logger import logger
from prowler.providers.mongodbatlas.exceptions.exceptions import (
MongoDBAtlasAPIError,
MongoDBAtlasRateLimitError,
)
class MongoDBAtlasService:
"""Base class for MongoDB Atlas services"""
def __init__(self, service_name: str, provider):
self.service_name = service_name
self.provider = provider
self.session = provider.session
self.base_url = provider.session.base_url
self.audit_config = provider.audit_config
self.auth = HTTPDigestAuth(
provider.session.public_key, provider.session.private_key
)
self.headers = {
"Accept": "application/vnd.atlas.2025-01-01+json",
"Content-Type": "application/json",
}
def _make_request(
self,
method: str,
endpoint: str,
params: Optional[Dict] = None,
data: Optional[Dict] = None,
max_retries: int = 3,
retry_delay: int = 1,
) -> Dict[str, Any]:
"""
Make HTTP request to MongoDB Atlas API with retry logic
Args:
method: HTTP method (GET, POST, PUT, DELETE)
endpoint: API endpoint (without base URL)
params: Query parameters
data: Request body data
max_retries: Maximum number of retries
retry_delay: Delay between retries in seconds
Returns:
dict: Response JSON data
Raises:
MongoDBAtlasAPIError: If the API request fails
MongoDBAtlasRateLimitError: If rate limit is exceeded
"""
url = f"{self.base_url}/{endpoint.lstrip('/')}"
for attempt in range(max_retries + 1):
try:
response = requests.request(
method=method,
url=url,
auth=self.auth,
headers=self.headers,
params=params,
json=data,
timeout=30,
)
if response.status_code == 429:
if attempt < max_retries:
logger.warning(
f"Rate limit exceeded for {url}, retrying in {retry_delay} seconds..."
)
time.sleep(retry_delay)
retry_delay *= 2
continue
else:
raise MongoDBAtlasRateLimitError(
message=f"Rate limit exceeded for {url} after {max_retries} retries"
)
response.raise_for_status()
return response.json()
except requests.exceptions.RequestException as e:
if attempt < max_retries:
logger.warning(
f"Request failed for {url}, retrying in {retry_delay} seconds: {str(e)}"
)
time.sleep(retry_delay)
retry_delay *= 2
continue
else:
logger.error(
f"Request failed for {url} after {max_retries} retries: {str(e)}"
)
raise MongoDBAtlasAPIError(
original_exception=e,
message=f"Failed to make request to {url}: {str(e)}",
)
def _paginate_request(
self,
endpoint: str,
params: Optional[Dict] = None,
page_size: int = 100,
max_pages: Optional[int] = None,
) -> List[Dict[str, Any]]:
"""
Make paginated requests to MongoDB Atlas API
Args:
endpoint: API endpoint
params: Query parameters
page_size: Number of items per page
max_pages: Maximum number of pages to fetch
Returns:
list: List of all items from all pages
"""
if params is None:
params = {}
params.update({"pageNum": 1, "itemsPerPage": page_size})
all_items = []
page_num = 1
while True:
params["pageNum"] = page_num
try:
response = self._make_request("GET", endpoint, params=params)
if "results" in response:
items = response["results"]
all_items.extend(items)
total_count = response.get("totalCount", 0)
if len(items) < page_size or len(all_items) >= total_count:
break
if max_pages and page_num >= max_pages:
logger.warning(
f"Reached maximum pages limit ({max_pages}) for {endpoint}"
)
break
page_num += 1
else:
break
except Exception as e:
logger.error(
f"Error during pagination for {endpoint} at page {page_num}: {str(e)}"
)
break
logger.info(
f"Retrieved {len(all_items)} items from {endpoint} across {page_num} pages"
)
return all_items
def _get_thread_info(self) -> str:
"""Get thread information for logging"""
return f"[{current_thread().name}]"

Some files were not shown because too many files have changed in this diff Show More