Compare commits

...

5 Commits

Author SHA1 Message Date
Lydia Vilchez
f12cc5cd6a feat(gcp): add cloudstorage_bucket_versioning_enabled check 2025-10-24 09:29:15 +02:00
Lydia Vilchez
77576983d1 fix(gcp): update lifecycle check metadata and add test for invalid rules 2025-10-22 12:50:01 +02:00
Daniel Barranquero
77d61881a0 feat: add changelog and fix flake8 2025-10-17 12:11:40 +02:00
Daniel Barranquero
a8d775d162 Merge branch 'master' into feat/gcp-cloudstorage-lifecycle-check 2025-10-17 12:08:09 +02:00
Lydia Vilchez
bb87c9826a feat(gcp): add Cloud Storage lifecycle management check, metadata and tests 2025-10-17 11:37:38 +02:00
61 changed files with 53968 additions and 105 deletions

2
.github/CODEOWNERS vendored
View File

@@ -3,4 +3,4 @@
prowler @prowler-cloud/sdk @prowler-cloud/detection-and-remediation
tests @prowler-cloud/sdk @prowler-cloud/detection-and-remediation
api @prowler-cloud/api
ui @prowler-cloud/ui
ui @prowler-cloud/ui

View File

@@ -21,4 +21,4 @@ jobs:
uses: agenthunt/conventional-commit-checker-action@9e552d650d0e205553ec7792d447929fc78e012b # v2.0.0
with:
pr-title-regex: '^(feat|fix|docs|style|refactor|perf|test|chore|build|ci|revert)(\([^)]+\))?!?: .+'

View File

@@ -47,5 +47,5 @@ help: ## Show this help.
@awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m<target>\033[0m\n"} /^[a-zA-Z_-]+:.*?##/ { printf " \033[36m%-15s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST)
##@ Development Environment
run-api-dev: ## Start development environment with API, PostgreSQL, Valkey, and workers
run-api-dev: ## Start development environment with API, PostgreSQL, Valkey, and workers
docker compose -f docker-compose-dev.yml up api-dev postgres valkey worker-dev worker-beat --build

View File

@@ -62,4 +62,4 @@ We strive to resolve all problems as quickly as possible, and we would like to p
---
For more information about our security policies, please refer to our [Security](https://docs.prowler.com/projects/prowler-open-source/en/latest/security/) section in our documentation.
For more information about our security policies, please refer to our [Security](https://docs.prowler.com/projects/prowler-open-source/en/latest/security/) section in our documentation.

View File

@@ -11,7 +11,9 @@
"unique_resource_count": 1,
"duration": 5,
"scanner_args": {
"checks_to_execute": ["accessanalyzer_enabled"]
"checks_to_execute": [
"accessanalyzer_enabled"
]
},
"inserted_at": "2024-09-01T17:25:27.050Z",
"started_at": "2024-09-01T17:25:27.050Z",
@@ -31,7 +33,9 @@
"unique_resource_count": 1,
"duration": 20,
"scanner_args": {
"checks_to_execute": ["accessanalyzer_enabled"]
"checks_to_execute": [
"accessanalyzer_enabled"
]
},
"inserted_at": "2024-09-02T17:24:27.050Z",
"started_at": "2024-09-02T17:24:27.050Z",
@@ -51,7 +55,9 @@
"unique_resource_count": 10,
"duration": 10,
"scanner_args": {
"checks_to_execute": ["cloudsql_instance_automated_backups"]
"checks_to_execute": [
"cloudsql_instance_automated_backups"
]
},
"inserted_at": "2024-09-02T19:26:27.050Z",
"started_at": "2024-09-02T19:26:27.050Z",
@@ -71,7 +77,9 @@
"unique_resource_count": 1,
"duration": 35,
"scanner_args": {
"checks_to_execute": ["accessanalyzer_enabled"]
"checks_to_execute": [
"accessanalyzer_enabled"
]
},
"inserted_at": "2024-09-02T19:27:27.050Z",
"started_at": "2024-09-02T19:27:27.050Z",
@@ -89,7 +97,9 @@
"name": "test scheduled aws scan",
"state": "available",
"scanner_args": {
"checks_to_execute": ["cloudformation_stack_outputs_find_secrets"]
"checks_to_execute": [
"cloudformation_stack_outputs_find_secrets"
]
},
"scheduled_at": "2030-09-02T19:20:27.050Z",
"inserted_at": "2024-09-02T19:24:27.050Z",
@@ -168,7 +178,9 @@
"unique_resource_count": 19,
"progress": 100,
"scanner_args": {
"checks_to_execute": ["accessanalyzer_enabled"]
"checks_to_execute": [
"accessanalyzer_enabled"
]
},
"duration": 7,
"scheduled_at": null,

File diff suppressed because one or more lines are too long

View File

@@ -4,11 +4,11 @@ from typing import Generator, Optional
from dateutil.relativedelta import relativedelta
from django.conf import settings
from psqlextra.partitioning import (
PostgresPartitioningError,
PostgresPartitioningManager,
PostgresRangePartition,
PostgresRangePartitioningStrategy,
PostgresTimePartitionSize,
PostgresPartitioningError,
)
from psqlextra.partitioning.config import PostgresPartitioningConfig
from uuid6 import UUID

View File

@@ -132,7 +132,9 @@ class RowLevelSecurityConstraint(models.BaseConstraint):
path, _, kwargs = super().deconstruct()
return (path, (self.target_field,), kwargs)
def validate(self, model, instance, exclude=None, using=DEFAULT_DB_ALIAS): # noqa: F841
def validate(
self, model, instance, exclude=None, using=DEFAULT_DB_ALIAS
): # noqa: F841
if not hasattr(instance, "tenant_id"):
raise ValidationError(f"{model.__name__} does not have a tenant_id field.")

View File

@@ -1,10 +1,9 @@
from unittest.mock import patch
import pytest
from conftest import TEST_PASSWORD, TEST_USER, get_api_tokens, get_authorization_header
from django.urls import reverse
from conftest import TEST_USER, TEST_PASSWORD, get_api_tokens, get_authorization_header
@patch("api.v1.views.schedule_provider_scan")
@pytest.mark.django_db

View File

@@ -7,11 +7,11 @@ from django.conf import settings
import api.apps as api_apps_module
from api.apps import (
ApiConfig,
PRIVATE_KEY_FILE,
PUBLIC_KEY_FILE,
SIGNING_KEY_ENV,
VERIFYING_KEY_ENV,
ApiConfig,
)

View File

@@ -1,12 +1,13 @@
from unittest.mock import patch
import pytest
from config.django.base import DATABASE_ROUTERS as PROD_DATABASE_ROUTERS
from django.conf import settings
from django.db.migrations.recorder import MigrationRecorder
from django.db.utils import ConnectionRouter
from api.db_router import MainRouter
from api.rls import Tenant
from config.django.base import DATABASE_ROUTERS as PROD_DATABASE_ROUTERS
from unittest.mock import patch
@patch("api.db_router.MainRouter.admin_db", new="admin")

View File

@@ -7,12 +7,12 @@ from rest_framework_json_api.serializers import ValidationError
from uuid6 import UUID
from api.uuid_utils import (
transform_into_uuid7,
datetime_to_uuid7,
datetime_from_uuid7,
uuid7_start,
datetime_to_uuid7,
transform_into_uuid7,
uuid7_end,
uuid7_range,
uuid7_start,
)

View File

@@ -1,7 +1,6 @@
from drf_spectacular.utils import extend_schema_field
from api.v1.serializer_utils.base import YamlOrJsonField
from prowler.lib.mutelist.mutelist import mutelist_schema

View File

@@ -34,12 +34,12 @@ from api.v1.views import (
ScheduleViewSet,
SchemaView,
TaskViewSet,
TenantApiKeyViewSet,
TenantFinishACSView,
TenantMembersViewSet,
TenantViewSet,
UserRoleRelationshipView,
UserViewSet,
TenantApiKeyViewSet,
)
router = routers.DefaultRouter(trailing_slash=False)

View File

@@ -11,8 +11,9 @@ os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.django.production")
import django # noqa: E402
django.setup()
from config.django.production import LOGGING as DJANGO_LOGGERS, DEBUG # noqa: E402
from config.custom_logging import BackendLogger # noqa: E402
from config.django.production import DEBUG
from config.django.production import LOGGING as DJANGO_LOGGERS # noqa: E402
BIND_ADDRESS = env("DJANGO_BIND_ADDRESS", default="127.0.0.1")
PORT = env("DJANGO_PORT", default=8000)

View File

@@ -20,10 +20,10 @@ from prowler.lib.outputs.asff.asff import ASFF
from prowler.lib.outputs.compliance.aws_well_architected.aws_well_architected import (
AWSWellArchitected,
)
from prowler.lib.outputs.compliance.c5.c5_aws import AWSC5
from prowler.lib.outputs.compliance.ccc.ccc_aws import CCC_AWS
from prowler.lib.outputs.compliance.ccc.ccc_azure import CCC_Azure
from prowler.lib.outputs.compliance.ccc.ccc_gcp import CCC_GCP
from prowler.lib.outputs.compliance.c5.c5_aws import AWSC5
from prowler.lib.outputs.compliance.cis.cis_aws import AWSCIS
from prowler.lib.outputs.compliance.cis.cis_azure import AzureCIS
from prowler.lib.outputs.compliance.cis.cis_gcp import GCPCIS

View File

@@ -1751,7 +1751,9 @@ class TestJiraIntegration:
}
findings.append(finding)
mock_finding_model.all_objects.select_related.return_value.prefetch_related.return_value.get.side_effect = findings
mock_finding_model.all_objects.select_related.return_value.prefetch_related.return_value.get.side_effect = (
findings
)
# Call the function
result = send_findings_to_jira(
@@ -1825,7 +1827,9 @@ class TestJiraIntegration:
},
}
mock_finding_model.all_objects.select_related.return_value.prefetch_related.return_value.get.return_value = finding
mock_finding_model.all_objects.select_related.return_value.prefetch_related.return_value.get.return_value = (
finding
)
# Call the function
result = send_findings_to_jira(
@@ -1888,7 +1892,9 @@ class TestJiraIntegration:
finding.scan.provider.provider = "kubernetes"
finding.check_metadata = {} # Empty metadata
mock_finding_model.all_objects.select_related.return_value.prefetch_related.return_value.get.return_value = finding
mock_finding_model.all_objects.select_related.return_value.prefetch_related.return_value.get.return_value = (
finding
)
# Call the function
result = send_findings_to_jira(

View File

@@ -12,7 +12,7 @@
1. Prowler CLI -&gt; Run a Prowler scan using the --compliance option
2. Prowler Cloud/App -&gt; Navigate to the compliance section to download csv outputs
![Download Compliance Scan](https://github.com/user-attachments/assets/42c11a60-8ce8-4c60-a663-2371199c052b)
The template supports the following CIS Benchmarks only:
@@ -113,5 +113,3 @@ The requirement page has the following components:
## Walkthrough Video
[![image](https://github.com/user-attachments/assets/866642c6-43ac-4aac-83d3-bb625002da0b)](https://www.youtube.com/watch?v=lfKFkTqBxjU)

View File

@@ -64,7 +64,7 @@ The logs that are generated and sent to Cloudwatch are error logs, and assessmen
## Instructions
1. Create a Private ECR Repository in the account that will host the Prowler container. The Audit account is recommended, but any account can be used.
2. Configure the .awsvariables file. Note the ROLE name chosen as it will be the CrossAccountRole.
3. Follow the steps from "View Push Commands" to build and upload the container image. Substitute step 2 with the build command provided in the Dockerfile. You need to have Docker and AWS CLI installed, and use the cli to login to the account first. After upload note the Image URI, as it is required for the CF-Prowler-ECS template. Ensure that you pay attention to the architecture while performing the docker build command. A common mistake is not specifying the architecture and then building on Apple silicon. Your task will fail with *exec /home/prowler/.local/bin/prowler: exec format error*.
3. Follow the steps from "View Push Commands" to build and upload the container image. Substitute step 2 with the build command provided in the Dockerfile. You need to have Docker and AWS CLI installed, and use the cli to login to the account first. After upload note the Image URI, as it is required for the CF-Prowler-ECS template. Ensure that you pay attention to the architecture while performing the docker build command. A common mistake is not specifying the architecture and then building on Apple silicon. Your task will fail with *exec /home/prowler/.local/bin/prowler: exec format error*.
4. Make sure SecurityHub is enabled in every account in AWS Organizations, and that the SecurityHub integration is enabled as explained in [Prowler - Security Hub Integration](https://github.com/prowler-cloud/prowler#security-hub-integration)
5. Deploy **CF-Prowler-CrossAccountRole.yml** in the Master Account as a single stack. You will have to choose the CrossAccountRole name (ProwlerXA-Role by default) and the ProwlerTaskRoleName (ProwlerECSTask-Role by default)
6. Deploy **CF-Prowler-CrossAccountRole.yml** in every Member Account as a StackSet. Choose the same CrossAccountName and ProwlerTaskRoleName as the previous step.

View File

@@ -119,4 +119,4 @@
.overview-table .card .collapse {
@apply visible
}
}

View File

@@ -14,4 +14,4 @@ Prowler Hub also provides a fully documented public API that you can integrate i
📚 Explore the API docs at: https://hub.prowler.com/api/docs
Whether youre customizing policies, managing compliance, or enhancing visibility, Prowler Hub is built to support your security operations.
Whether youre customizing policies, managing compliance, or enhancing visibility, Prowler Hub is built to support your security operations.

View File

@@ -5,7 +5,7 @@ requires = ["setuptools>=61.0", "wheel"]
[project]
dependencies = [
"fastmcp>=2.11.3",
"httpx>=0.27.0",
"httpx>=0.27.0"
]
description = "MCP server for Prowler ecosystem"
name = "prowler-mcp"

9
poetry.lock generated
View File

@@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 2.2.0 and should not be changed by hand.
# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand.
[[package]]
name = "about-time"
@@ -2416,6 +2416,8 @@ python-versions = "*"
groups = ["dev"]
files = [
{file = "jsonpath-ng-1.7.0.tar.gz", hash = "sha256:f6f5f7fd4e5ff79c785f1573b394043b39849fb2bb47bcead935d12b00beab3c"},
{file = "jsonpath_ng-1.7.0-py2-none-any.whl", hash = "sha256:898c93fc173f0c336784a3fa63d7434297544b7198124a68f9a3ef9597b0ae6e"},
{file = "jsonpath_ng-1.7.0-py3-none-any.whl", hash = "sha256:f3d7f9e848cba1b6da28c55b1c26ff915dc9e0b1ba7e752a53d6da8d5cbd00b6"},
]
[package.dependencies]
@@ -5085,6 +5087,7 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f66efbc1caa63c088dead1c4170d148eabc9b80d95fb75b6c92ac0aad2437d76"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22353049ba4181685023b25b5b51a574bce33e7f51c759371a7422dcae5402a6"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:932205970b9f9991b34f55136be327501903f7c66830e9760a8ffb15b07f05cd"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a52d48f4e7bf9005e8f0a89209bf9a73f7190ddf0489eee5eb51377385f59f2a"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win32.whl", hash = "sha256:3eac5a91891ceb88138c113f9db04f3cebdae277f5d44eaa3651a4f573e6a5da"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win_amd64.whl", hash = "sha256:ab007f2f5a87bd08ab1499bdf96f3d5c6ad4dcfa364884cb4549aa0154b13a28"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:4a6679521a58256a90b0d89e03992c15144c5f3858f40d7c18886023d7943db6"},
@@ -5093,6 +5096,7 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:811ea1594b8a0fb466172c384267a4e5e367298af6b228931f273b111f17ef52"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cf12567a7b565cbf65d438dec6cfbe2917d3c1bdddfce84a9930b7d35ea59642"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7dd5adc8b930b12c8fc5b99e2d535a09889941aa0d0bd06f4749e9a9397c71d2"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1492a6051dab8d912fc2adeef0e8c72216b24d57bd896ea607cb90bb0c4981d3"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win32.whl", hash = "sha256:bd0a08f0bab19093c54e18a14a10b4322e1eacc5217056f3c063bd2f59853ce4"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win_amd64.whl", hash = "sha256:a274fb2cb086c7a3dea4322ec27f4cb5cc4b6298adb583ab0e211a4682f241eb"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:20b0f8dc160ba83b6dcc0e256846e1a02d044e13f7ea74a3d1d56ede4e48c632"},
@@ -5101,6 +5105,7 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:749c16fcc4a2b09f28843cda5a193e0283e47454b63ec4b81eaa2242f50e4ccd"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bf165fef1f223beae7333275156ab2022cffe255dcc51c27f066b4370da81e31"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:32621c177bbf782ca5a18ba4d7af0f1082a3f6e517ac2a18b3974d4edf349680"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b82a7c94a498853aa0b272fd5bc67f29008da798d4f93a2f9f289feb8426a58d"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win32.whl", hash = "sha256:e8c4ebfcfd57177b572e2040777b8abc537cdef58a2120e830124946aa9b42c5"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win_amd64.whl", hash = "sha256:0467c5965282c62203273b838ae77c0d29d7638c8a4e3a1c8bdd3602c10904e4"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:4c8c5d82f50bb53986a5e02d1b3092b03622c02c2eb78e29bec33fd9593bae1a"},
@@ -5109,6 +5114,7 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96777d473c05ee3e5e3c3e999f5d23c6f4ec5b0c38c098b3a5229085f74236c6"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:3bc2a80e6420ca8b7d3590791e2dfc709c88ab9152c00eeb511c9875ce5778bf"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e188d2699864c11c36cdfdada94d781fd5d6b0071cd9c427bceb08ad3d7c70e1"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4f6f3eac23941b32afccc23081e1f50612bdbe4e982012ef4f5797986828cd01"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win32.whl", hash = "sha256:6442cb36270b3afb1b4951f060eccca1ce49f3d087ca1ca4563a6eb479cb3de6"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win_amd64.whl", hash = "sha256:e5b8daf27af0b90da7bb903a876477a9e6d7270be6146906b276605997c7e9a3"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:fc4b630cd3fa2cf7fce38afa91d7cfe844a9f75d7f0f36393fa98815e911d987"},
@@ -5117,6 +5123,7 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2f1c3765db32be59d18ab3953f43ab62a761327aafc1594a2a1fbe038b8b8a7"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d85252669dc32f98ebcd5d36768f5d4faeaeaa2d655ac0473be490ecdae3c285"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e143ada795c341b56de9418c58d028989093ee611aa27ffb9b7f609c00d813ed"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2c59aa6170b990d8d2719323e628aaf36f3bfbc1c26279c0eeeb24d05d2d11c7"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win32.whl", hash = "sha256:beffaed67936fbbeffd10966a4eb53c402fafd3d6833770516bf7314bc6ffa12"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win_amd64.whl", hash = "sha256:040ae85536960525ea62868b642bdb0c2cc6021c9f9d507810c0c604e66f5a7b"},
{file = "ruamel.yaml.clib-0.2.12.tar.gz", hash = "sha256:6c8fbb13ec503f99a91901ab46e0b07ae7941cd527393187039aec586fdfd36f"},

View File

@@ -17,6 +17,7 @@ All notable changes to the **Prowler SDK** are documented in this file.
- Oracle Cloud provider with CIS 3.0 benchmark [(#8893)](https://github.com/prowler-cloud/prowler/pull/8893)
- Support for Atlassian Document Format (ADF) in Jira integration [(#8878)](https://github.com/prowler-cloud/prowler/pull/8878)
- Add Common Cloud Controls for AWS, Azure and GCP [(#8000)](https://github.com/prowler-cloud/prowler/pull/8000)
- `cloudstorage_bucket_lifecycle_management_enabled` check for GCP provider [(#8936)](https://github.com/prowler-cloud/prowler/pull/8936)
### Changed

View File

@@ -49,10 +49,10 @@ from prowler.lib.outputs.asff.asff import ASFF
from prowler.lib.outputs.compliance.aws_well_architected.aws_well_architected import (
AWSWellArchitected,
)
from prowler.lib.outputs.compliance.c5.c5_aws import AWSC5
from prowler.lib.outputs.compliance.ccc.ccc_aws import CCC_AWS
from prowler.lib.outputs.compliance.ccc.ccc_azure import CCC_Azure
from prowler.lib.outputs.compliance.ccc.ccc_gcp import CCC_GCP
from prowler.lib.outputs.compliance.c5.c5_aws import AWSC5
from prowler.lib.outputs.compliance.cis.cis_aws import AWSCIS
from prowler.lib.outputs.compliance.cis.cis_azure import AzureCIS
from prowler.lib.outputs.compliance.cis.cis_gcp import GCPCIS

View File

@@ -12509,4 +12509,4 @@
}
}
}
}
}

View File

@@ -18,7 +18,7 @@
"RelatedUrl": "",
"AdditionalURLs": [
"https://noise.getoto.net/2022/11/08/how-to-evaluate-and-use-ecdsa-certificates-in-aws-certificate-manager/",
"https://docs.aws.amazon.com/acm/latest/userguide/data-protection.html"
"https://docs.aws.amazon.com/acm/latest/userguide/data-protection.html"
],
"Remediation": {
"Code": {

View File

@@ -23,7 +23,9 @@
"Url": "https://docs.aws.amazon.com/bedrock/latest/userguide/guardrails-create.html"
}
},
"Categories": ["gen-ai"],
"Categories": [
"gen-ai"
],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""

View File

@@ -23,7 +23,9 @@
"Url": "https://docs.aws.amazon.com/bedrock/latest/userguide/prompt-injection.html"
}
},
"Categories": ["gen-ai"],
"Categories": [
"gen-ai"
],
"DependsOn": [],
"RelatedTo": [],
"Notes": "Ensure that prompt attack protection is set to the highest strength to minimize the risk of prompt injection attacks."

View File

@@ -23,7 +23,9 @@
"Url": "https://docs.aws.amazon.com/bedrock/latest/userguide/guardrails-sensitive-filters.html"
}
},
"Categories": ["gen-ai"],
"Categories": [
"gen-ai"
],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""

View File

@@ -2,7 +2,10 @@
"Provider": "aws",
"CheckID": "dms_endpoint_ssl_enabled",
"CheckTitle": "Ensure SSL mode is enabled in DMS endpoint",
"CheckType": ["Effects", "Data Exposure"],
"CheckType": [
"Effects",
"Data Exposure"
],
"ServiceName": "dms",
"SubServiceName": "endpoint",
"ResourceIdTemplate": "arn:partition:dms:region:account-id:endpoint:resource-id",
@@ -12,21 +15,21 @@
"Risk": "Without SSL enabled, data transferred through DMS endpoints is not encrypted, potentially exposing sensitive information to unauthorized access or interception during transit.",
"RelatedUrl": "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Security.SSL.html",
"Remediation": {
"Code": {
"CLI": "aws dms modify-endpoint --endpoint-arn <endpoint_arn> --ssl-mode require",
"NativeIaC": "",
"Other": "https://docs.aws.amazon.com/securityhub/latest/userguide/dms-controls.html#dms-9",
"Terraform": ""
},
"Recommendation": {
"Text": "Enable SSL mode for all DMS endpoints. Use 'require' as the minimum SSL mode, and consider using 'verify-ca' or 'verify-full' for higher security.",
"Url": "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Security.SSL.html"
}
"Code": {
"CLI": "aws dms modify-endpoint --endpoint-arn <endpoint_arn> --ssl-mode require",
"NativeIaC": "",
"Other": "https://docs.aws.amazon.com/securityhub/latest/userguide/dms-controls.html#dms-9",
"Terraform": ""
},
"Recommendation": {
"Text": "Enable SSL mode for all DMS endpoints. Use 'require' as the minimum SSL mode, and consider using 'verify-ca' or 'verify-full' for higher security.",
"Url": "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Security.SSL.html"
}
},
"Categories": [
"encryption"
"encryption"
],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""
}
}

View File

@@ -23,7 +23,9 @@
"Url": "https://docs.aws.amazon.com/sagemaker/latest/dg/studio-notebooks-and-internet-access.html"
}
},
"Categories": ["gen-ai"],
"Categories": [
"gen-ai"
],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""

View File

@@ -23,7 +23,9 @@
"Url": "https://docs.aws.amazon.com/sagemaker/latest/dg/studio-notebooks-and-internet-access.html"
}
},
"Categories": ["gen-ai"],
"Categories": [
"gen-ai"
],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""

View File

@@ -23,7 +23,9 @@
"Url": "https://docs.aws.amazon.com/sagemaker/latest/dg/nbi-root-access.html"
}
},
"Categories": ["gen-ai"],
"Categories": [
"gen-ai"
],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""

View File

@@ -23,7 +23,9 @@
"Url": "https://docs.aws.amazon.com/sagemaker/latest/dg/interface-vpc-endpoint.html"
}
},
"Categories": ["gen-ai"],
"Categories": [
"gen-ai"
],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""

View File

@@ -23,7 +23,9 @@
"Url": "https://docs.aws.amazon.com/sagemaker/latest/dg/interface-vpc-endpoint.html"
}
},
"Categories": ["gen-ai"],
"Categories": [
"gen-ai"
],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""

View File

@@ -0,0 +1,34 @@
{
"Provider": "gcp",
"CheckID": "cloudstorage_bucket_lifecycle_management_enabled",
"CheckTitle": "Cloud Storage buckets have lifecycle management enabled",
"CheckType": [],
"ServiceName": "cloudstorage",
"SubServiceName": "",
"ResourceIdTemplate": "",
"Severity": "medium",
"ResourceType": "storage.googleapis.com/Bucket",
"Description": "**Google Cloud Storage buckets** are evaluated for the presence of **lifecycle management** with at least one valid rule (supported action and non-empty condition) to automatically transition or delete objects and optimize storage costs.",
"Risk": "Buckets without lifecycle rules can accumulate stale data, increase storage costs, and fail to meet data retention and internal compliance requirements.",
"RelatedUrl": "",
"AdditionalURLs": [
"https://www.trendmicro.com/cloudoneconformity/knowledge-base/gcp/CloudStorage/enable-lifecycle-management.html",
"https://cloud.google.com/storage/docs/lifecycle"
],
"Remediation": {
"Code": {
"CLI": "gcloud storage buckets update gs://<BUCKET_NAME> --lifecycle-file=<PATH_TO_JSON>",
"NativeIaC": "",
"Other": "1) Open Google Cloud Console → Storage → Buckets → <BUCKET_NAME>\n2) Tab 'Lifecycle'\n3) Add rule(s) to delete or transition objects (e.g., delete after 365 days; transition STANDARD→NEARLINE after 90 days)\n4) Save",
"Terraform": "```hcl\n# Example: enable lifecycle to transition and delete objects\nresource \"google_storage_bucket\" \"example\" {\n name = var.bucket_name\n location = var.location\n\n # Transition STANDARD → NEARLINE after 90 days\n lifecycle_rule {\n action {\n type = \"SetStorageClass\"\n storage_class = \"NEARLINE\"\n }\n condition {\n age = 90\n matches_storage_class = [\"STANDARD\"]\n }\n }\n\n # Delete objects after 365 days\n lifecycle_rule {\n action {\n type = \"Delete\"\n }\n condition {\n age = 365\n }\n }\n}\n```"
},
"Recommendation": {
"Text": "Configure lifecycle rules to automatically delete stale objects or transition them to colder storage classes according to your organization's retention and cost-optimization policy.",
"Url": "https://hub.prowler.com/check/cloudstorage_bucket_lifecycle_management_enabled"
}
},
"Categories": [],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""
}

View File

@@ -0,0 +1,48 @@
from prowler.lib.check.models import Check, Check_Report_GCP
from prowler.providers.gcp.services.cloudstorage.cloudstorage_client import (
cloudstorage_client,
)
class cloudstorage_bucket_lifecycle_management_enabled(Check):
"""Ensure Cloud Storage buckets have lifecycle management enabled with at least one valid rule.
Reports PASS if a bucket has at least one valid lifecycle rule
(with a supported action and condition), otherwise FAIL.
"""
def execute(self) -> list[Check_Report_GCP]:
"""Run the lifecycle management check for each Cloud Storage bucket.
Returns:
list[Check_Report_GCP]: Results for all evaluated buckets.
"""
findings = []
for bucket in cloudstorage_client.buckets:
report = Check_Report_GCP(metadata=self.metadata(), resource=bucket)
report.status = "FAIL"
report.status_extended = (
f"Bucket {bucket.name} does not have lifecycle management enabled."
)
rules = bucket.lifecycle_rules
if rules:
valid_rules = []
for rule in rules:
action_type = rule.get("action", {}).get("type")
condition = rule.get("condition")
if action_type and condition:
valid_rules.append(rule)
if valid_rules:
report.status = "PASS"
report.status_extended = f"Bucket {bucket.name} has lifecycle management enabled with {len(valid_rules)} valid rule(s)."
else:
report.status = "FAIL"
report.status_extended = f"Bucket {bucket.name} has lifecycle rules configured but none are valid."
findings.append(report)
return findings

View File

@@ -0,0 +1,36 @@
{
"Provider": "gcp",
"CheckID": "cloudstorage_bucket_versioning_enabled",
"CheckTitle": "Cloud Storage buckets have Object Versioning enabled",
"CheckType": [],
"ServiceName": "cloudstorage",
"SubServiceName": "",
"ResourceIdTemplate": "",
"Severity": "medium",
"ResourceType": "storage.googleapis.com/Bucket",
"Description": "**Google Cloud Storage buckets** are evaluated to ensure that **Object Versioning** is enabled. Object Versioning preserves older versions of objects, allowing data recovery, maintaining audit trails, and protecting against accidental deletions or overwrites.",
"Risk": "Buckets without Object Versioning enabled cannot recover previous object versions, which increases the risk of permanent data loss from accidental deletion or modification.",
"RelatedUrl": "",
"AdditionalURLs": [
"https://www.trendmicro.com/cloudoneconformity/knowledge-base/gcp/CloudStorage/enable-versioning.html",
"https://cloud.google.com/storage/docs/object-versioning"
],
"Remediation": {
"Code": {
"CLI": "gcloud storage buckets update gs://<BUCKET_NAME> --versioning",
"NativeIaC": "",
"Other": "1) Open Google Cloud Console → Storage → Buckets → <BUCKET_NAME>\n2) Tab 'Configuration'\n3) Under 'Object versioning', click 'Enable Object Versioning'\n4) Save changes",
"Terraform": "```hcl\n# Example: enable Object Versioning on a Cloud Storage bucket\nresource \"google_storage_bucket\" \"example\" {\n name = var.bucket_name\n location = var.location\n\n versioning {\n enabled = true\n }\n}\n```"
},
"Recommendation": {
"Text": "Enable Object Versioning on Cloud Storage buckets to preserve previous object versions and improve data recoverability and auditability.",
"Url": "https://hub.prowler.com/check/cloudstorage_bucket_versioning_enabled"
}
},
"Categories": [
"resilience"
],
"DependsOn": [],
"RelatedTo": [],
"Notes": "Buckets missing the 'versioning' block are treated as having Object Versioning disabled."
}

View File

@@ -0,0 +1,30 @@
from prowler.lib.check.models import Check, Check_Report_GCP
from prowler.providers.gcp.services.cloudstorage.cloudstorage_client import (
cloudstorage_client,
)
class cloudstorage_bucket_versioning_enabled(Check):
"""
Ensure Cloud Storage buckets have Object Versioning enabled.
Reports PASS if a bucket has versioning enabled, otherwise FAIL.
"""
def execute(self) -> list[Check_Report_GCP]:
findings = []
for bucket in cloudstorage_client.buckets:
report = Check_Report_GCP(metadata=self.metadata(), resource=bucket)
report.status = "FAIL"
report.status_extended = (
f"Bucket {bucket.name} does not have Object Versioning enabled."
)
if bucket.versioning_enabled:
report.status = "PASS"
report.status_extended = (
f"Bucket {bucket.name} has Object Versioning enabled."
)
findings.append(report)
return findings

View File

@@ -31,6 +31,18 @@ class CloudStorage(GCPService):
bucket_iam
) or "allUsers" in str(bucket_iam):
public = True
lifecycle_rules = None
lifecycle = bucket.get("lifecycle")
if isinstance(lifecycle, dict):
rules = lifecycle.get("rule")
if isinstance(rules, list):
lifecycle_rules = rules
versioning_enabled = (
bucket.get("versioning", {}).get("enabled", False)
)
self.buckets.append(
Bucket(
name=bucket["name"],
@@ -42,6 +54,8 @@ class CloudStorage(GCPService):
public=public,
retention_policy=bucket.get("retentionPolicy"),
project_id=project_id,
lifecycle_rules=lifecycle_rules,
versioning_enabled=versioning_enabled,
)
)
@@ -62,3 +76,5 @@ class Bucket(BaseModel):
public: bool
project_id: str
retention_policy: Optional[dict] = None
lifecycle_rules: Optional[list[dict]] = None
versioning_enabled: Optional[bool] = None

View File

@@ -1,7 +1,6 @@
from dataclasses import dataclass
from kubernetes import client
from prowler.config.config import output_file_timestamp
from prowler.providers.common.models import ProviderOutputOptions

View File

@@ -5916,4 +5916,4 @@
}
}
}
}
}

View File

@@ -3,11 +3,11 @@ from json import dumps
from os import path
import botocore
import pytest
import yaml
from boto3 import client, resource
from mock import MagicMock, patch
from moto import mock_aws
import pytest
from prowler.config.config import encoding_format_utf_8
from prowler.providers.aws.lib.mutelist.mutelist import AWSMutelist

View File

@@ -0,0 +1,223 @@
from unittest import mock
from tests.providers.gcp.gcp_fixtures import (
GCP_PROJECT_ID,
GCP_US_CENTER1_LOCATION,
set_mocked_gcp_provider,
)
class TestCloudStorageBucketLifecycleManagementEnabled:
def test_bucket_without_lifecycle_rules(self):
cloudstorage_client = mock.MagicMock()
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_gcp_provider(),
),
mock.patch(
"prowler.providers.gcp.services.cloudstorage.cloudstorage_bucket_lifecycle_management_enabled.cloudstorage_bucket_lifecycle_management_enabled.cloudstorage_client",
new=cloudstorage_client,
),
):
from prowler.providers.gcp.services.cloudstorage.cloudstorage_bucket_lifecycle_management_enabled.cloudstorage_bucket_lifecycle_management_enabled import (
cloudstorage_bucket_lifecycle_management_enabled,
)
from prowler.providers.gcp.services.cloudstorage.cloudstorage_service import (
Bucket,
)
cloudstorage_client.project_ids = [GCP_PROJECT_ID]
cloudstorage_client.region = GCP_US_CENTER1_LOCATION
cloudstorage_client.buckets = [
Bucket(
name="no-lifecycle",
id="no-lifecycle",
region=GCP_US_CENTER1_LOCATION,
uniform_bucket_level_access=True,
public=False,
retention_policy=None,
project_id=GCP_PROJECT_ID,
lifecycle_rules=[],
)
]
check = cloudstorage_bucket_lifecycle_management_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"Bucket {cloudstorage_client.buckets[0].name} does not have lifecycle management enabled."
)
assert result[0].resource_id == "no-lifecycle"
assert result[0].resource_name == "no-lifecycle"
assert result[0].location == GCP_US_CENTER1_LOCATION
assert result[0].project_id == GCP_PROJECT_ID
def test_bucket_with_minimal_delete_rule(self):
cloudstorage_client = mock.MagicMock()
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_gcp_provider(),
),
mock.patch(
"prowler.providers.gcp.services.cloudstorage.cloudstorage_bucket_lifecycle_management_enabled.cloudstorage_bucket_lifecycle_management_enabled.cloudstorage_client",
new=cloudstorage_client,
),
):
from prowler.providers.gcp.services.cloudstorage.cloudstorage_bucket_lifecycle_management_enabled.cloudstorage_bucket_lifecycle_management_enabled import (
cloudstorage_bucket_lifecycle_management_enabled,
)
from prowler.providers.gcp.services.cloudstorage.cloudstorage_service import (
Bucket,
)
cloudstorage_client.project_ids = [GCP_PROJECT_ID]
cloudstorage_client.region = GCP_US_CENTER1_LOCATION
cloudstorage_client.buckets = [
Bucket(
name="delete-rule",
id="delete-rule",
region=GCP_US_CENTER1_LOCATION,
uniform_bucket_level_access=True,
public=False,
retention_policy=None,
project_id=GCP_PROJECT_ID,
lifecycle_rules=[
{"action": {"type": "Delete"}, "condition": {"age": 30}}
],
)
]
check = cloudstorage_bucket_lifecycle_management_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"Bucket {cloudstorage_client.buckets[0].name} has lifecycle management enabled with 1 valid rule(s)."
)
assert result[0].resource_id == "delete-rule"
assert result[0].resource_name == "delete-rule"
assert result[0].location == GCP_US_CENTER1_LOCATION
assert result[0].project_id == GCP_PROJECT_ID
def test_bucket_with_transition_and_delete_rules(self):
cloudstorage_client = mock.MagicMock()
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_gcp_provider(),
),
mock.patch(
"prowler.providers.gcp.services.cloudstorage.cloudstorage_bucket_lifecycle_management_enabled.cloudstorage_bucket_lifecycle_management_enabled.cloudstorage_client",
new=cloudstorage_client,
),
):
from prowler.providers.gcp.services.cloudstorage.cloudstorage_bucket_lifecycle_management_enabled.cloudstorage_bucket_lifecycle_management_enabled import (
cloudstorage_bucket_lifecycle_management_enabled,
)
from prowler.providers.gcp.services.cloudstorage.cloudstorage_service import (
Bucket,
)
cloudstorage_client.project_ids = [GCP_PROJECT_ID]
cloudstorage_client.region = GCP_US_CENTER1_LOCATION
cloudstorage_client.buckets = [
Bucket(
name="transition-delete",
id="transition-delete",
region=GCP_US_CENTER1_LOCATION,
uniform_bucket_level_access=True,
public=False,
retention_policy=None,
project_id=GCP_PROJECT_ID,
lifecycle_rules=[
{
"action": {
"type": "SetStorageClass",
"storageClass": "NEARLINE",
},
"condition": {"matchesStorageClass": ["STANDARD"]},
},
{"action": {"type": "Delete"}, "condition": {"age": 365}},
],
)
]
check = cloudstorage_bucket_lifecycle_management_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"Bucket {cloudstorage_client.buckets[0].name} has lifecycle management enabled with 2 valid rule(s)."
)
assert result[0].resource_id == "transition-delete"
assert result[0].resource_name == "transition-delete"
assert result[0].location == GCP_US_CENTER1_LOCATION
assert result[0].project_id == GCP_PROJECT_ID
def test_bucket_with_invalid_lifecycle_rules(self):
cloudstorage_client = mock.MagicMock()
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_gcp_provider(),
),
mock.patch(
"prowler.providers.gcp.services.cloudstorage.cloudstorage_bucket_lifecycle_management_enabled.cloudstorage_bucket_lifecycle_management_enabled.cloudstorage_client",
new=cloudstorage_client,
),
):
from prowler.providers.gcp.services.cloudstorage.cloudstorage_bucket_lifecycle_management_enabled.cloudstorage_bucket_lifecycle_management_enabled import (
cloudstorage_bucket_lifecycle_management_enabled,
)
from prowler.providers.gcp.services.cloudstorage.cloudstorage_service import (
Bucket,
)
cloudstorage_client.project_ids = [GCP_PROJECT_ID]
cloudstorage_client.region = GCP_US_CENTER1_LOCATION
cloudstorage_client.buckets = [
Bucket(
name="invalid-rules",
id="invalid-rules",
region=GCP_US_CENTER1_LOCATION,
uniform_bucket_level_access=True,
public=False,
retention_policy=None,
project_id=GCP_PROJECT_ID,
lifecycle_rules=[
{"action": {}, "condition": {"age": 30}},
{"action": {"type": "Delete"}, "condition": {}},
],
)
]
check = cloudstorage_bucket_lifecycle_management_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"Bucket {cloudstorage_client.buckets[0].name} has lifecycle rules configured but none are valid."
)
assert result[0].resource_id == "invalid-rules"
assert result[0].resource_name == "invalid-rules"
assert result[0].location == GCP_US_CENTER1_LOCATION
assert result[0].project_id == GCP_PROJECT_ID

View File

@@ -0,0 +1,163 @@
from unittest import mock
from tests.providers.gcp.gcp_fixtures import (
GCP_PROJECT_ID,
GCP_US_CENTER1_LOCATION,
set_mocked_gcp_provider,
)
class TestCloudStorageBucketVersioningEnabled:
def test_bucket_without_versioning(self):
cloudstorage_client = mock.MagicMock()
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_gcp_provider(),
),
mock.patch(
"prowler.providers.gcp.services.cloudstorage.cloudstorage_bucket_versioning_enabled.cloudstorage_bucket_versioning_enabled.cloudstorage_client",
new=cloudstorage_client,
),
):
from prowler.providers.gcp.services.cloudstorage.cloudstorage_bucket_versioning_enabled.cloudstorage_bucket_versioning_enabled import (
cloudstorage_bucket_versioning_enabled,
)
from prowler.providers.gcp.services.cloudstorage.cloudstorage_service import (
Bucket,
)
cloudstorage_client.project_ids = [GCP_PROJECT_ID]
cloudstorage_client.region = GCP_US_CENTER1_LOCATION
cloudstorage_client.buckets = [
Bucket(
name="no-versioning",
id="no-versioning",
region=GCP_US_CENTER1_LOCATION,
uniform_bucket_level_access=True,
public=False,
retention_policy=None,
project_id=GCP_PROJECT_ID,
lifecycle_rules=[],
versioning_enabled=False,
)
]
check = cloudstorage_bucket_versioning_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"Bucket {cloudstorage_client.buckets[0].name} does not have Object Versioning enabled."
)
assert result[0].resource_id == "no-versioning"
assert result[0].resource_name == "no-versioning"
assert result[0].location == GCP_US_CENTER1_LOCATION
assert result[0].project_id == GCP_PROJECT_ID
def test_bucket_with_versioning_enabled(self):
cloudstorage_client = mock.MagicMock()
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_gcp_provider(),
),
mock.patch(
"prowler.providers.gcp.services.cloudstorage.cloudstorage_bucket_versioning_enabled.cloudstorage_bucket_versioning_enabled.cloudstorage_client",
new=cloudstorage_client,
),
):
from prowler.providers.gcp.services.cloudstorage.cloudstorage_bucket_versioning_enabled.cloudstorage_bucket_versioning_enabled import (
cloudstorage_bucket_versioning_enabled,
)
from prowler.providers.gcp.services.cloudstorage.cloudstorage_service import (
Bucket,
)
cloudstorage_client.project_ids = [GCP_PROJECT_ID]
cloudstorage_client.region = GCP_US_CENTER1_LOCATION
cloudstorage_client.buckets = [
Bucket(
name="with-versioning",
id="with-versioning",
region=GCP_US_CENTER1_LOCATION,
uniform_bucket_level_access=True,
public=False,
retention_policy=None,
project_id=GCP_PROJECT_ID,
lifecycle_rules=[],
versioning_enabled=True,
)
]
check = cloudstorage_bucket_versioning_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"Bucket {cloudstorage_client.buckets[0].name} has Object Versioning enabled."
)
assert result[0].resource_id == "with-versioning"
assert result[0].resource_name == "with-versioning"
assert result[0].location == GCP_US_CENTER1_LOCATION
assert result[0].project_id == GCP_PROJECT_ID
def test_bucket_without_versioning_configured_treated_as_disabled(self):
cloudstorage_client = mock.MagicMock()
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_gcp_provider(),
),
mock.patch(
"prowler.providers.gcp.services.cloudstorage.cloudstorage_bucket_versioning_enabled.cloudstorage_bucket_versioning_enabled.cloudstorage_client",
new=cloudstorage_client,
),
):
from prowler.providers.gcp.services.cloudstorage.cloudstorage_bucket_versioning_enabled.cloudstorage_bucket_versioning_enabled import (
cloudstorage_bucket_versioning_enabled,
)
from prowler.providers.gcp.services.cloudstorage.cloudstorage_service import (
Bucket,
)
cloudstorage_client.project_ids = [GCP_PROJECT_ID]
cloudstorage_client.region = GCP_US_CENTER1_LOCATION
cloudstorage_client.buckets = [
Bucket(
name="no-versioning-config",
id="no-versioning-config",
region=GCP_US_CENTER1_LOCATION,
uniform_bucket_level_access=True,
public=False,
retention_policy=None,
project_id=GCP_PROJECT_ID,
lifecycle_rules=[],
versioning_enabled=False,
)
]
check = cloudstorage_bucket_versioning_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"Bucket {cloudstorage_client.buckets[0].name} does not have Object Versioning enabled."
)
assert result[0].resource_id == "no-versioning-config"
assert result[0].resource_name == "no-versioning-config"
assert result[0].location == GCP_US_CENTER1_LOCATION
assert result[0].project_id == GCP_PROJECT_ID

View File

@@ -1,7 +1,6 @@
# This file needs to be named with the provider at the beginning since there is a limitation in pytest and two tests files cannot have the same name
# https://github.com/pytest-dev/pytest/issues/774#issuecomment-112343498
from kubernetes import client
from prowler.providers.kubernetes.lib.service.service import KubernetesService
from tests.providers.kubernetes.kubernetes_fixtures import (
set_mocked_kubernetes_provider,

View File

@@ -13,4 +13,4 @@ Mutelist:
- "*"
Resources:
- "resource_1"
- "resource_2"
- "resource_2"

View File

@@ -18,4 +18,4 @@ next-env.d.ts
!jest.config.js
!plopfile.js
!react-shim.js
!tsup.config.ts
!tsup.config.ts

2
ui/.gitignore vendored
View File

@@ -33,4 +33,4 @@ yarn-error.log*
# typescript
*.tsbuildinfo
next-env.d.ts
next-env.d.ts

View File

@@ -1 +1 @@
node_modules/
node_modules/

View File

@@ -6,5 +6,7 @@
"useTabs": false,
"semi": true,
"printWidth": 80,
"plugins": ["prettier-plugin-tailwindcss"]
"plugins": [
"prettier-plugin-tailwindcss"
]
}

View File

@@ -436,4 +436,4 @@
<g transform="matrix(0.91 0 0 0.79 805.08 523.89)">
<path style="stroke: none; stroke-width: 1; stroke-dasharray: none; stroke-linecap: butt; stroke-dashoffset: 0; stroke-linejoin: miter; stroke-miterlimit: 4; fill: rgb(10,10,9); fill-rule: nonzero; opacity: 1;" vector-effect="non-scaling-stroke" transform=" translate(-0.87, -4)" d="M 0 0 C 0.66 0 1.32 0 2 0 C 4.125 4.625 4.125 4.625 3 8 C 2.67 6.68 2.34 5.36 2 4 C 0.68 4 -0.64 4 -2 4 C -1.33333333 2.66666667 -0.66666667 1.33333333 0 0 Z" stroke-linecap="round"/>
</g>
</svg>
</svg>

Before

Width:  |  Height:  |  Size: 126 KiB

After

Width:  |  Height:  |  Size: 126 KiB

View File

@@ -1 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" width="64" height="64"><path d="M12.506 8.407C5.603 8.407 0 14.01 0 20.912l.097 34.74h11.117l-.097-23.1v-11.63a1.4 1.4 0 0 1 1.25-1.39c.047-.005.09 0 .14 0 .383 0 .73.157.98.408L42.653 51.93c2.342 2.35 5.523 3.668 8.84 3.665C58.397 55.594 64 49.99 64 43.088V8.35H52.883V43.14a1.39 1.39 0 0 1-1.39 1.337c-.368 0-.72-.147-.98-.408L21.347 12.07c-2.342-2.35-5.524-3.668-8.84-3.664z"/></svg>
<svg xmlns="http://www.w3.org/2000/svg" width="64" height="64"><path d="M12.506 8.407C5.603 8.407 0 14.01 0 20.912l.097 34.74h11.117l-.097-23.1v-11.63a1.4 1.4 0 0 1 1.25-1.39c.047-.005.09 0 .14 0 .383 0 .73.157.98.408L42.653 51.93c2.342 2.35 5.523 3.668 8.84 3.665C58.397 55.594 64 49.99 64 43.088V8.35H52.883V43.14a1.39 1.39 0 0 1-1.39 1.337c-.368 0-.72-.147-.98-.408L21.347 12.07c-2.342-2.35-5.524-3.668-8.84-3.664z"/></svg>

Before

Width:  |  Height:  |  Size: 426 B

After

Width:  |  Height:  |  Size: 427 B

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 547 KiB

After

Width:  |  Height:  |  Size: 547 KiB

View File

@@ -15,7 +15,10 @@
"provider": "aws",
"provider_id": "001",
"alias": "Amazon EC2",
"regions": ["us-east-1", "us-east-2"],
"regions": [
"us-east-1",
"us-east-2"
],
"findings": {
"failed": 3,
"last_checked_at": null
@@ -32,7 +35,10 @@
"provider": "aws",
"provider_id": "002",
"alias": "Amazon EMR",
"regions": ["us-west-1", "us-west-2"],
"regions": [
"us-west-1",
"us-west-2"
],
"findings": {
"failed": 0,
"last_checked_at": null
@@ -49,7 +55,10 @@
"provider": "aws",
"provider_id": "003",
"alias": "Amazon GuardDuty",
"regions": ["eu-west-1", "eu-west-2"],
"regions": [
"eu-west-1",
"eu-west-2"
],
"findings": {
"failed": 5,
"last_checked_at": null
@@ -66,7 +75,10 @@
"provider": "aws",
"provider_id": "004",
"alias": "Amazon Inspector",
"regions": ["ap-southeast-1", "ap-southeast-2"],
"regions": [
"ap-southeast-1",
"ap-southeast-2"
],
"findings": {
"failed": 0,
"last_checked_at": null
@@ -83,7 +95,10 @@
"provider": "aws",
"provider_id": "005",
"alias": "Amazon Macie",
"regions": ["eu-north-1", "eu-north-2"],
"regions": [
"eu-north-1",
"eu-north-2"
],
"findings": {
"failed": 2,
"last_checked_at": null
@@ -100,7 +115,10 @@
"provider": "aws",
"provider_id": "006",
"alias": "Amazon RDS",
"regions": ["sa-east-1", "sa-east-2"],
"regions": [
"sa-east-1",
"sa-east-2"
],
"findings": {
"failed": 0,
"last_checked_at": null
@@ -117,7 +135,10 @@
"provider": "aws",
"provider_id": "007",
"alias": "Amazon Route 53",
"regions": ["af-south-1", "af-south-2"],
"regions": [
"af-south-1",
"af-south-2"
],
"findings": {
"failed": 1,
"last_checked_at": null
@@ -134,7 +155,10 @@
"provider": "aws",
"provider_id": "008",
"alias": "Amazon S3",
"regions": ["us-east-1", "us-west-1"],
"regions": [
"us-east-1",
"us-west-1"
],
"findings": {
"failed": 3,
"last_checked_at": null
@@ -151,7 +175,10 @@
"provider": "aws",
"provider_id": "009",
"alias": "Amazon SNS",
"regions": ["eu-central-1", "eu-central-2"],
"regions": [
"eu-central-1",
"eu-central-2"
],
"findings": {
"failed": 0,
"last_checked_at": null
@@ -168,7 +195,10 @@
"provider": "aws",
"provider_id": "010",
"alias": "Amazon VPC",
"regions": ["us-west-1", "us-west-2"],
"regions": [
"us-west-1",
"us-west-2"
],
"findings": {
"failed": 4,
"last_checked_at": null
@@ -185,7 +215,10 @@
"provider": "aws",
"provider_id": "011",
"alias": "AWS Account",
"regions": ["eu-west-1", "eu-west-3"],
"regions": [
"eu-west-1",
"eu-west-3"
],
"findings": {
"failed": 0,
"last_checked_at": null
@@ -202,7 +235,10 @@
"provider": "aws",
"provider_id": "012",
"alias": "AWS Athena",
"regions": ["us-east-1", "us-east-2"],
"regions": [
"us-east-1",
"us-east-2"
],
"findings": {
"failed": 5,
"last_checked_at": null
@@ -219,7 +255,10 @@
"provider": "aws",
"provider_id": "013",
"alias": "AWS Certificate Manager",
"regions": ["us-west-1", "us-west-2"],
"regions": [
"us-west-1",
"us-west-2"
],
"findings": {
"failed": 1,
"last_checked_at": null
@@ -236,7 +275,10 @@
"provider": "aws",
"provider_id": "014",
"alias": "AWS CloudFormation",
"regions": ["eu-central-1", "eu-central-2"],
"regions": [
"eu-central-1",
"eu-central-2"
],
"findings": {
"failed": 0,
"last_checked_at": null
@@ -253,7 +295,10 @@
"provider": "aws",
"provider_id": "015",
"alias": "AWS CloudTrail",
"regions": ["us-east-1", "us-east-2"],
"regions": [
"us-east-1",
"us-east-2"
],
"findings": {
"failed": 4,
"last_checked_at": null
@@ -270,7 +315,10 @@
"provider": "aws",
"provider_id": "016",
"alias": "AWS CloudWatch",
"regions": ["us-west-1", "us-west-2"],
"regions": [
"us-west-1",
"us-west-2"
],
"findings": {
"failed": 2,
"last_checked_at": null
@@ -287,7 +335,10 @@
"provider": "aws",
"provider_id": "017",
"alias": "AWS Config",
"regions": ["eu-west-1", "eu-west-3"],
"regions": [
"eu-west-1",
"eu-west-3"
],
"findings": {
"failed": 0,
"last_checked_at": null
@@ -304,7 +355,10 @@
"provider": "aws",
"provider_id": "018",
"alias": "AWS Database Migration",
"regions": ["us-east-1", "us-east-2"],
"regions": [
"us-east-1",
"us-east-2"
],
"findings": {
"failed": 5,
"last_checked_at": null
@@ -321,7 +375,10 @@
"provider": "aws",
"provider_id": "019",
"alias": "AWS Glue",
"regions": ["us-west-1", "us-west-2"],
"regions": [
"us-west-1",
"us-west-2"
],
"findings": {
"failed": 1,
"last_checked_at": null
@@ -338,7 +395,10 @@
"provider": "aws",
"provider_id": "020",
"alias": "AWS IAM",
"regions": ["eu-central-1", "eu-central-2"],
"regions": [
"eu-central-1",
"eu-central-2"
],
"findings": {
"failed": 0,
"last_checked_at": null
@@ -355,7 +415,10 @@
"provider": "aws",
"provider_id": "021",
"alias": "AWS Lambda",
"regions": ["us-east-1", "us-east-2"],
"regions": [
"us-east-1",
"us-east-2"
],
"findings": {
"failed": 4,
"last_checked_at": null
@@ -372,7 +435,10 @@
"provider": "aws",
"provider_id": "022",
"alias": "AWS Network Firewall",
"regions": ["us-west-1", "us-west-2"],
"regions": [
"us-west-1",
"us-west-2"
],
"findings": {
"failed": 2,
"last_checked_at": null
@@ -389,7 +455,10 @@
"provider": "aws",
"provider_id": "023",
"alias": "AWS Organizations",
"regions": ["eu-west-1", "eu-west-3"],
"regions": [
"eu-west-1",
"eu-west-3"
],
"findings": {
"failed": 0,
"last_checked_at": null
@@ -406,7 +475,10 @@
"provider": "aws",
"provider_id": "024",
"alias": "AWS Resource Explorer",
"regions": ["us-east-1", "us-east-2"],
"regions": [
"us-east-1",
"us-east-2"
],
"findings": {
"failed": 3,
"last_checked_at": null
@@ -423,7 +495,10 @@
"provider": "aws",
"provider_id": "025",
"alias": "AWS Security Hub",
"regions": ["us-west-1", "us-west-2"],
"regions": [
"us-west-1",
"us-west-2"
],
"findings": {
"failed": 5,
"last_checked_at": null
@@ -440,7 +515,10 @@
"provider": "aws",
"provider_id": "026",
"alias": "AWS Systems Manager",
"regions": ["eu-west-1", "eu-west-3"],
"regions": [
"eu-west-1",
"eu-west-3"
],
"findings": {
"failed": 0,
"last_checked_at": null
@@ -457,7 +535,10 @@
"provider": "aws",
"provider_id": "027",
"alias": "AWS Trusted Advisor",
"regions": ["us-east-1", "us-east-2"],
"regions": [
"us-east-1",
"us-east-2"
],
"findings": {
"failed": 2,
"last_checked_at": null
@@ -474,7 +555,10 @@
"provider": "aws",
"provider_id": "028",
"alias": "IAM Access Analyzer",
"regions": ["eu-west-1", "eu-west-3"],
"regions": [
"eu-west-1",
"eu-west-3"
],
"findings": {
"failed": 0,
"last_checked_at": null

View File

@@ -1 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 394 80"><path fill="#000" d="M262 0h68.5v12.7h-27.2v66.6h-13.6V12.7H262V0ZM149 0v12.7H94v20.4h44.3v12.6H94v21h55v12.6H80.5V0h68.7zm34.3 0h-17.8l63.8 79.4h17.9l-32-39.7 32-39.6h-17.9l-23 28.6-23-28.6zm18.3 56.7-9-11-27.1 33.7h17.8l18.3-22.7z"/><path fill="#000" d="M81 79.3 17 0H0v79.3h13.6V17l50.2 62.3H81Zm252.6-.4c-1 0-1.8-.4-2.5-1s-1.1-1.6-1.1-2.6.3-1.8 1-2.5 1.6-1 2.6-1 1.8.3 2.5 1a3.4 3.4 0 0 1 .6 4.3 3.7 3.7 0 0 1-3 1.8zm23.2-33.5h6v23.3c0 2.1-.4 4-1.3 5.5a9.1 9.1 0 0 1-3.8 3.5c-1.6.8-3.5 1.3-5.7 1.3-2 0-3.7-.4-5.3-1s-2.8-1.8-3.7-3.2c-.9-1.3-1.4-3-1.4-5h6c.1.8.3 1.6.7 2.2s1 1.2 1.6 1.5c.7.4 1.5.5 2.4.5 1 0 1.8-.2 2.4-.6a4 4 0 0 0 1.6-1.8c.3-.8.5-1.8.5-3V45.5zm30.9 9.1a4.4 4.4 0 0 0-2-3.3 7.5 7.5 0 0 0-4.3-1.1c-1.3 0-2.4.2-3.3.5-.9.4-1.6 1-2 1.6a3.5 3.5 0 0 0-.3 4c.3.5.7.9 1.3 1.2l1.8 1 2 .5 3.2.8c1.3.3 2.5.7 3.7 1.2a13 13 0 0 1 3.2 1.8 8.1 8.1 0 0 1 3 6.5c0 2-.5 3.7-1.5 5.1a10 10 0 0 1-4.4 3.5c-1.8.8-4.1 1.2-6.8 1.2-2.6 0-4.9-.4-6.8-1.2-2-.8-3.4-2-4.5-3.5a10 10 0 0 1-1.7-5.6h6a5 5 0 0 0 3.5 4.6c1 .4 2.2.6 3.4.6 1.3 0 2.5-.2 3.5-.6 1-.4 1.8-1 2.4-1.7a4 4 0 0 0 .8-2.4c0-.9-.2-1.6-.7-2.2a11 11 0 0 0-2.1-1.4l-3.2-1-3.8-1c-2.8-.7-5-1.7-6.6-3.2a7.2 7.2 0 0 1-2.4-5.7 8 8 0 0 1 1.7-5 10 10 0 0 1 4.3-3.5c2-.8 4-1.2 6.4-1.2 2.3 0 4.4.4 6.2 1.2 1.8.8 3.2 2 4.3 3.4 1 1.4 1.5 3 1.5 5h-5.8z"/></svg>
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 394 80"><path fill="#000" d="M262 0h68.5v12.7h-27.2v66.6h-13.6V12.7H262V0ZM149 0v12.7H94v20.4h44.3v12.6H94v21h55v12.6H80.5V0h68.7zm34.3 0h-17.8l63.8 79.4h17.9l-32-39.7 32-39.6h-17.9l-23 28.6-23-28.6zm18.3 56.7-9-11-27.1 33.7h17.8l18.3-22.7z"/><path fill="#000" d="M81 79.3 17 0H0v79.3h13.6V17l50.2 62.3H81Zm252.6-.4c-1 0-1.8-.4-2.5-1s-1.1-1.6-1.1-2.6.3-1.8 1-2.5 1.6-1 2.6-1 1.8.3 2.5 1a3.4 3.4 0 0 1 .6 4.3 3.7 3.7 0 0 1-3 1.8zm23.2-33.5h6v23.3c0 2.1-.4 4-1.3 5.5a9.1 9.1 0 0 1-3.8 3.5c-1.6.8-3.5 1.3-5.7 1.3-2 0-3.7-.4-5.3-1s-2.8-1.8-3.7-3.2c-.9-1.3-1.4-3-1.4-5h6c.1.8.3 1.6.7 2.2s1 1.2 1.6 1.5c.7.4 1.5.5 2.4.5 1 0 1.8-.2 2.4-.6a4 4 0 0 0 1.6-1.8c.3-.8.5-1.8.5-3V45.5zm30.9 9.1a4.4 4.4 0 0 0-2-3.3 7.5 7.5 0 0 0-4.3-1.1c-1.3 0-2.4.2-3.3.5-.9.4-1.6 1-2 1.6a3.5 3.5 0 0 0-.3 4c.3.5.7.9 1.3 1.2l1.8 1 2 .5 3.2.8c1.3.3 2.5.7 3.7 1.2a13 13 0 0 1 3.2 1.8 8.1 8.1 0 0 1 3 6.5c0 2-.5 3.7-1.5 5.1a10 10 0 0 1-4.4 3.5c-1.8.8-4.1 1.2-6.8 1.2-2.6 0-4.9-.4-6.8-1.2-2-.8-3.4-2-4.5-3.5a10 10 0 0 1-1.7-5.6h6a5 5 0 0 0 3.5 4.6c1 .4 2.2.6 3.4.6 1.3 0 2.5-.2 3.5-.6 1-.4 1.8-1 2.4-1.7a4 4 0 0 0 .8-2.4c0-.9-.2-1.6-.7-2.2a11 11 0 0 0-2.1-1.4l-3.2-1-3.8-1c-2.8-.7-5-1.7-6.6-3.2a7.2 7.2 0 0 1-2.4-5.7 8 8 0 0 1 1.7-5 10 10 0 0 1 4.3-3.5c2-.8 4-1.2 6.4-1.2 2.3 0 4.4.4 6.2 1.2 1.8.8 3.2 2 4.3 3.4 1 1.4 1.5 3 1.5 5h-5.8z"/></svg>

Before

Width:  |  Height:  |  Size: 1.3 KiB

After

Width:  |  Height:  |  Size: 1.3 KiB

View File

@@ -1 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 283 64"><path fill="black" d="M141 16c-11 0-19 7-19 18s9 18 20 18c7 0 13-3 16-7l-7-5c-2 3-6 4-9 4-5 0-9-3-10-7h28v-3c0-11-8-18-19-18zm-9 15c1-4 4-7 9-7s8 3 9 7h-18zm117-15c-11 0-19 7-19 18s9 18 20 18c6 0 12-3 16-7l-8-5c-2 3-5 4-8 4-5 0-9-3-11-7h28l1-3c0-11-8-18-19-18zm-10 15c2-4 5-7 10-7s8 3 9 7h-19zm-39 3c0 6 4 10 10 10 4 0 7-2 9-5l8 5c-3 5-9 8-17 8-11 0-19-7-19-18s8-18 19-18c8 0 14 3 17 8l-8 5c-2-3-5-5-9-5-6 0-10 4-10 10zm83-29v46h-9V5h9zM37 0l37 64H0L37 0zm92 5-27 48L74 5h10l18 30 17-30h10zm59 12v10l-3-1c-6 0-10 4-10 10v15h-9V17h9v9c0-5 6-9 13-9z"/></svg>
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 283 64"><path fill="black" d="M141 16c-11 0-19 7-19 18s9 18 20 18c7 0 13-3 16-7l-7-5c-2 3-6 4-9 4-5 0-9-3-10-7h28v-3c0-11-8-18-19-18zm-9 15c1-4 4-7 9-7s8 3 9 7h-18zm117-15c-11 0-19 7-19 18s9 18 20 18c6 0 12-3 16-7l-8-5c-2 3-5 4-8 4-5 0-9-3-11-7h28l1-3c0-11-8-18-19-18zm-10 15c2-4 5-7 10-7s8 3 9 7h-19zm-39 3c0 6 4 10 10 10 4 0 7-2 9-5l8 5c-3 5-9 8-17 8-11 0-19-7-19-18s8-18 19-18c8 0 14 3 17 8l-8 5c-2-3-5-5-9-5-6 0-10 4-10 10zm83-29v46h-9V5h9zM37 0l37 64H0L37 0zm92 5-27 48L74 5h10l18 30 17-30h10zm59 12v10l-3-1c-6 0-10 4-10 10v15h-9V17h9v9c0-5 6-9 13-9z"/></svg>

Before

Width:  |  Height:  |  Size: 629 B

After

Width:  |  Height:  |  Size: 630 B

View File

@@ -6,13 +6,19 @@
"incremental": true,
"isolatedModules": true,
"jsx": "preserve",
"lib": ["dom", "dom.iterable", "esnext"],
"lib": [
"dom",
"dom.iterable",
"esnext"
],
"module": "esnext",
"moduleResolution": "node",
"noEmit": true,
"baseUrl": ".",
"paths": {
"@/*": ["./*"]
"@/*": [
"./*"
]
},
"plugins": [
{
@@ -24,7 +30,9 @@
"strict": true,
"target": "es5"
},
"exclude": ["node_modules"],
"exclude": [
"node_modules"
],
"include": [
"next-env.d.ts",
"**/*.ts",