feat(integrations): Added new endpoints to allow configuring integrations (#7167)

This commit is contained in:
Víctor Fernández Poyatos
2025-03-12 15:12:55 +01:00
committed by GitHub
parent bdb877009f
commit 6f027e3c57
15 changed files with 2236 additions and 11 deletions

View File

@@ -6,6 +6,10 @@ All notable changes to the **Prowler API** are documented in this file.
## [v1.6.0] (Prowler UNRELEASED)
### Added
- Support for developing new integrations [(#7167)](https://github.com/prowler-cloud/prowler/pull/7167).
---
## [v1.5.0] (Prowler v5.4.0)
@@ -26,6 +30,6 @@ All notable changes to the **Prowler API** are documented in this file.
- Daily scheduled scan instances are now created beforehand with `SCHEDULED` state [(#6700)](https://github.com/prowler-cloud/prowler/pull/6700).
- Findings endpoints now require at least one date filter [(#6800)](https://github.com/prowler-cloud/prowler/pull/6800).
- Findings metadata endpoint received a performance improvement [(#6863)](https://github.com/prowler-cloud/prowler/pull/6863).
- Increase the allowed length of the provider UID for Kubernetes providers [(#6869)](https://github.com/prowler-cloud/prowler/pull/6869).
- Increased the allowed length of the provider UID for Kubernetes providers [(#6869)](https://github.com/prowler-cloud/prowler/pull/6869).
---

View File

@@ -318,3 +318,15 @@ class InvitationStateEnum(EnumType):
class InvitationStateEnumField(PostgresEnumField):
def __init__(self, *args, **kwargs):
super().__init__("invitation_state", *args, **kwargs)
# Postgres enum definition for Integration type
class IntegrationTypeEnum(EnumType):
enum_type_name = "integration_type"
class IntegrationTypeEnumField(PostgresEnumField):
def __init__(self, *args, **kwargs):
super().__init__("integration_type", *args, **kwargs)

View File

@@ -24,6 +24,7 @@ from api.db_utils import (
from api.models import (
ComplianceOverview,
Finding,
Integration,
Invitation,
Membership,
PermissionChoices,
@@ -648,3 +649,19 @@ class ServiceOverviewFilter(ScanSummaryFilter):
}
)
return super().is_valid()
class IntegrationFilter(FilterSet):
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
integration_type = ChoiceFilter(choices=Integration.IntegrationChoices.choices)
integration_type__in = ChoiceInFilter(
choices=Integration.IntegrationChoices.choices,
field_name="integration_type",
lookup_expr="in",
)
class Meta:
model = Integration
fields = {
"inserted_at": ["date", "gte", "lte"],
}

View File

@@ -0,0 +1,35 @@
# Generated by Django 5.1.5 on 2025-03-03 15:46
from functools import partial
from django.db import migrations
from api.db_utils import IntegrationTypeEnum, PostgresEnumMigration, register_enum
from api.models import Integration
IntegrationTypeEnumMigration = PostgresEnumMigration(
enum_name="integration_type",
enum_values=tuple(
integration_type[0]
for integration_type in Integration.IntegrationChoices.choices
),
)
class Migration(migrations.Migration):
atomic = False
dependencies = [
("api", "0012_scan_report_output"),
]
operations = [
migrations.RunPython(
IntegrationTypeEnumMigration.create_enum_type,
reverse_code=IntegrationTypeEnumMigration.drop_enum_type,
),
migrations.RunPython(
partial(register_enum, enum_class=IntegrationTypeEnum),
reverse_code=migrations.RunPython.noop,
),
]

View File

@@ -0,0 +1,127 @@
# Generated by Django 5.1.5 on 2025-03-03 15:46
import uuid
import django.db.models.deletion
from django.db import migrations, models
import api.db_utils
import api.rls
from api.rls import RowLevelSecurityConstraint
class Migration(migrations.Migration):
dependencies = [
("api", "0013_integrations_enum"),
]
operations = [
migrations.CreateModel(
name="Integration",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
("inserted_at", models.DateTimeField(auto_now_add=True)),
("updated_at", models.DateTimeField(auto_now=True)),
("enabled", models.BooleanField(default=False)),
("connected", models.BooleanField(blank=True, null=True)),
(
"connection_last_checked_at",
models.DateTimeField(blank=True, null=True),
),
(
"integration_type",
api.db_utils.IntegrationTypeEnumField(
choices=[
("amazon_s3", "Amazon S3"),
("saml", "SAML"),
("aws_security_hub", "AWS Security Hub"),
("jira", "JIRA"),
("slack", "Slack"),
]
),
),
("configuration", models.JSONField(default=dict)),
("_credentials", models.BinaryField(db_column="credentials")),
(
"providers",
models.ManyToManyField(
related_name="integrations", to="api.provider", blank=True
),
),
(
"tenant",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
),
),
],
options={"db_table": "integrations", "abstract": False},
),
migrations.AddConstraint(
model_name="integration",
constraint=RowLevelSecurityConstraint(
"tenant_id",
name="rls_on_integration",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
),
migrations.CreateModel(
name="IntegrationProviderRelationship",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
("inserted_at", models.DateTimeField(auto_now_add=True)),
(
"integration",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="api.integration",
),
),
(
"provider",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.provider"
),
),
(
"tenant",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
),
),
],
options={
"db_table": "integration_provider_mappings",
"constraints": [
models.UniqueConstraint(
fields=("integration_id", "provider_id"),
name="unique_integration_provider_rel",
),
],
},
),
migrations.AddConstraint(
model_name="IntegrationProviderRelationship",
constraint=RowLevelSecurityConstraint(
"tenant_id",
name="rls_on_integrationproviderrelationship",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
),
]

View File

@@ -21,6 +21,7 @@ from uuid6 import uuid7
from api.db_utils import (
CustomUserManager,
FindingDeltaEnumField,
IntegrationTypeEnumField,
InvitationStateEnumField,
MemberRoleEnumField,
ProviderEnumField,
@@ -1138,3 +1139,80 @@ class ScanSummary(RowLevelSecurityProtectedModel):
class JSONAPIMeta:
resource_name = "scan-summaries"
class Integration(RowLevelSecurityProtectedModel):
class IntegrationChoices(models.TextChoices):
S3 = "amazon_s3", _("Amazon S3")
SAML = "saml", _("SAML")
AWS_SECURITY_HUB = "aws_security_hub", _("AWS Security Hub")
JIRA = "jira", _("JIRA")
SLACK = "slack", _("Slack")
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
updated_at = models.DateTimeField(auto_now=True, editable=False)
enabled = models.BooleanField(default=False)
connected = models.BooleanField(null=True, blank=True)
connection_last_checked_at = models.DateTimeField(null=True, blank=True)
integration_type = IntegrationTypeEnumField(choices=IntegrationChoices.choices)
configuration = models.JSONField(default=dict)
_credentials = models.BinaryField(db_column="credentials")
providers = models.ManyToManyField(
Provider,
related_name="integrations",
through="IntegrationProviderRelationship",
blank=True,
)
class Meta(RowLevelSecurityProtectedModel.Meta):
db_table = "integrations"
constraints = [
RowLevelSecurityConstraint(
field="tenant_id",
name="rls_on_%(class)s",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
]
class JSONAPIMeta:
resource_name = "integrations"
@property
def credentials(self):
if isinstance(self._credentials, memoryview):
encrypted_bytes = self._credentials.tobytes()
elif isinstance(self._credentials, str):
encrypted_bytes = self._credentials.encode()
else:
encrypted_bytes = self._credentials
decrypted_data = fernet.decrypt(encrypted_bytes)
return json.loads(decrypted_data.decode())
@credentials.setter
def credentials(self, value):
encrypted_data = fernet.encrypt(json.dumps(value).encode())
self._credentials = encrypted_data
class IntegrationProviderRelationship(RowLevelSecurityProtectedModel):
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
integration = models.ForeignKey(Integration, on_delete=models.CASCADE)
provider = models.ForeignKey(Provider, on_delete=models.CASCADE)
inserted_at = models.DateTimeField(auto_now_add=True)
class Meta:
db_table = "integration_provider_mappings"
constraints = [
models.UniqueConstraint(
fields=["integration_id", "provider_id"],
name="unique_integration_provider_rel",
),
RowLevelSecurityConstraint(
field="tenant_id",
name="rls_on_%(class)s",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
]

View File

@@ -58,11 +58,11 @@ class RowLevelSecurityConstraint(models.BaseConstraint):
drop_sql_query = """
ALTER TABLE %(table_name)s NO FORCE ROW LEVEL SECURITY;
ALTER TABLE %(table_name)s DISABLE ROW LEVEL SECURITY;
REVOKE ALL ON TABLE %(table_name) TO %(db_user)s;
REVOKE ALL ON TABLE %(table_name)s FROM %(db_user)s;
"""
drop_policy_sql_query = """
DROP POLICY IF EXISTS %(db_user)s_%(table_name)s_{statement} on %(table_name)s;
DROP POLICY IF EXISTS %(db_user)s_%(raw_table_name)s_{statement} ON %(table_name)s;
"""
def __init__(
@@ -104,16 +104,20 @@ class RowLevelSecurityConstraint(models.BaseConstraint):
def remove_sql(self, model: Any, schema_editor: Any) -> Any:
field_column = schema_editor.quote_name(self.target_field)
raw_table_name = model._meta.db_table
table_name = raw_table_name
if self.partition_name:
raw_table_name = f"{raw_table_name}_{self.partition_name}"
table_name = raw_table_name
full_drop_sql_query = (
f"{self.drop_sql_query}"
f"{''.join([self.drop_policy_sql_query.format(statement) for statement in self.statements])}"
f"{''.join([self.drop_policy_sql_query.format(statement=statement) for statement in self.statements])}"
)
table_name = model._meta.db_table
if self.partition_name:
table_name = f"{table_name}_{self.partition_name}"
return Statement(
full_drop_sql_query,
table_name=Table(table_name, schema_editor.quote_name),
raw_table_name=raw_table_name,
field_column=field_column,
db_user=DB_USER,
partition_name=self.partition_name,

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,19 @@
from unittest.mock import ANY, Mock, patch
import pytest
from django.urls import reverse
from rest_framework import status
from unittest.mock import patch, ANY, Mock
from api.models import (
Membership,
ProviderGroup,
ProviderGroupMembership,
Role,
RoleProviderGroupRelationship,
User,
UserRoleRelationship,
)
from api.v1.serializers import TokenSerializer
@pytest.mark.django_db
@@ -304,3 +316,96 @@ class TestProviderViewSet:
reverse("provider-connection", kwargs={"pk": provider.id})
)
assert response.status_code == status.HTTP_403_FORBIDDEN
@pytest.mark.django_db
class TestLimitedVisibility:
TEST_EMAIL = "rbac@rbac.com"
TEST_PASSWORD = "thisisapassword123"
@pytest.fixture
def limited_admin_user(
self, django_db_setup, django_db_blocker, tenants_fixture, providers_fixture
):
with django_db_blocker.unblock():
tenant = tenants_fixture[0]
provider = providers_fixture[0]
user = User.objects.create_user(
name="testing",
email=self.TEST_EMAIL,
password=self.TEST_PASSWORD,
)
Membership.objects.create(
user=user,
tenant=tenant,
role=Membership.RoleChoices.OWNER,
)
role = Role.objects.create(
name="limited_visibility",
tenant=tenant,
manage_users=True,
manage_account=True,
manage_billing=True,
manage_providers=True,
manage_integrations=True,
manage_scans=True,
unlimited_visibility=False,
)
UserRoleRelationship.objects.create(
user=user,
role=role,
tenant=tenant,
)
provider_group = ProviderGroup.objects.create(
name="limited_visibility_group",
tenant=tenant,
)
ProviderGroupMembership.objects.create(
tenant=tenant,
provider=provider,
provider_group=provider_group,
)
RoleProviderGroupRelationship.objects.create(
tenant=tenant, role=role, provider_group=provider_group
)
return user
@pytest.fixture
def authenticated_client_rbac_limited(
self, limited_admin_user, tenants_fixture, client
):
client.user = limited_admin_user
tenant_id = tenants_fixture[0].id
serializer = TokenSerializer(
data={
"type": "tokens",
"email": self.TEST_EMAIL,
"password": self.TEST_PASSWORD,
"tenant_id": tenant_id,
}
)
serializer.is_valid(raise_exception=True)
access_token = serializer.validated_data["access"]
client.defaults["HTTP_AUTHORIZATION"] = f"Bearer {access_token}"
return client
def test_integrations(
self, authenticated_client_rbac_limited, integrations_fixture, providers_fixture
):
# Integration 2 is related to provider1 and provider 2
# This user cannot see provider 2
integration = integrations_fixture[1]
response = authenticated_client_rbac_limited.get(
reverse("integration-detail", kwargs={"pk": integration.id})
)
assert response.status_code == status.HTTP_200_OK
assert integration.providers.count() == 2
assert (
response.json()["data"]["relationships"]["providers"]["meta"]["count"] == 1
)

View File

@@ -14,6 +14,7 @@ from django.urls import reverse
from rest_framework import status
from api.models import (
Integration,
Invitation,
Membership,
Provider,
@@ -4571,3 +4572,415 @@ class TestScheduleViewSet:
reverse("schedule-daily"), data=json_payload, format="json"
)
assert response.status_code == status.HTTP_404_NOT_FOUND
@pytest.mark.django_db
class TestIntegrationViewSet:
def test_integrations_list(self, authenticated_client, integrations_fixture):
response = authenticated_client.get(reverse("integration-list"))
assert response.status_code == status.HTTP_200_OK
assert len(response.json()["data"]) == len(integrations_fixture)
def test_integrations_retrieve(self, authenticated_client, integrations_fixture):
integration1, *_ = integrations_fixture
response = authenticated_client.get(
reverse("integration-detail", kwargs={"pk": integration1.id}),
)
assert response.status_code == status.HTTP_200_OK
assert response.json()["data"]["id"] == str(integration1.id)
assert (
response.json()["data"]["attributes"]["configuration"]
== integration1.configuration
)
def test_integrations_invalid_retrieve(self, authenticated_client):
response = authenticated_client.get(
reverse(
"integration-detail",
kwargs={"pk": "f498b103-c760-4785-9a3e-e23fafbb7b02"},
)
)
assert response.status_code == status.HTTP_404_NOT_FOUND
@pytest.mark.parametrize(
"include_values, expected_resources",
[
("providers", ["providers"]),
],
)
def test_integrations_list_include(
self,
include_values,
expected_resources,
authenticated_client,
integrations_fixture,
):
response = authenticated_client.get(
reverse("integration-list"), {"include": include_values}
)
assert response.status_code == status.HTTP_200_OK
assert len(response.json()["data"]) == len(integrations_fixture)
assert "included" in response.json()
included_data = response.json()["included"]
for expected_type in expected_resources:
assert any(
d.get("type") == expected_type for d in included_data
), f"Expected type '{expected_type}' not found in included data"
@pytest.mark.parametrize(
"integration_type, configuration, credentials",
[
# Amazon S3 - AWS credentials
(
Integration.IntegrationChoices.S3,
{
"bucket_name": "bucket-name",
"output_directory": "output-directory",
},
{
"role_arn": "arn:aws",
"external_id": "external-id",
},
),
# Amazon S3 - No credentials (AWS self-hosted)
(
Integration.IntegrationChoices.S3,
{
"bucket_name": "bucket-name",
"output_directory": "output-directory",
},
{},
),
],
)
def test_integrations_create_valid(
self,
authenticated_client,
providers_fixture,
integration_type,
configuration,
credentials,
):
provider = Provider.objects.first()
data = {
"data": {
"type": "integrations",
"attributes": {
"integration_type": integration_type,
"configuration": configuration,
"credentials": credentials,
},
"relationships": {
"providers": {
"data": [{"type": "providers", "id": str(provider.id)}]
}
},
}
}
response = authenticated_client.post(
reverse("integration-list"),
data=json.dumps(data),
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_201_CREATED
assert Integration.objects.count() == 1
integration = Integration.objects.first()
assert integration.configuration == data["data"]["attributes"]["configuration"]
assert (
integration.integration_type
== data["data"]["attributes"]["integration_type"]
)
assert "credentials" not in response.json()["data"]["attributes"]
assert (
str(provider.id)
== data["data"]["relationships"]["providers"]["data"][0]["id"]
)
def test_integrations_create_valid_relationships(
self,
authenticated_client,
providers_fixture,
):
provider1, provider2, *_ = providers_fixture
data = {
"data": {
"type": "integrations",
"attributes": {
"integration_type": Integration.IntegrationChoices.S3,
"configuration": {
"bucket_name": "bucket-name",
"output_directory": "output-directory",
},
"credentials": {
"role_arn": "arn:aws",
"external_id": "external-id",
},
},
"relationships": {
"providers": {
"data": [
{"type": "providers", "id": str(provider1.id)},
{"type": "providers", "id": str(provider2.id)},
]
}
},
}
}
response = authenticated_client.post(
reverse("integration-list"),
data=json.dumps(data),
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_201_CREATED
assert Integration.objects.first().providers.count() == 2
@pytest.mark.parametrize(
"attributes, error_code, error_pointer",
(
[
(
{
"integration_type": "whatever",
"configuration": {
"bucket_name": "bucket-name",
"output_directory": "output-directory",
},
"credentials": {
"role_arn": "arn:aws",
"external_id": "external-id",
},
},
"invalid_choice",
"integration_type",
),
(
{
"integration_type": "amazon_s3",
"configuration": {},
"credentials": {
"role_arn": "arn:aws",
"external_id": "external-id",
},
},
"required",
"bucket_name",
),
(
{
"integration_type": "amazon_s3",
"configuration": {
"bucket_name": "bucket_name",
"output_directory": "output_directory",
"invalid_key": "invalid_value",
},
"credentials": {
"role_arn": "arn:aws",
"external_id": "external-id",
},
},
"invalid",
None,
),
(
{
"integration_type": "amazon_s3",
"configuration": {
"bucket_name": "bucket_name",
"output_directory": "output_directory",
},
"credentials": {"invalid_key": "invalid_key"},
},
"invalid",
None,
),
]
),
)
def test_integrations_invalid_create(
self,
authenticated_client,
attributes,
error_code,
error_pointer,
):
data = {
"data": {
"type": "integrations",
"attributes": attributes,
}
}
response = authenticated_client.post(
reverse("integration-list"),
data=json.dumps(data),
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
assert response.json()["errors"][0]["code"] == error_code
assert (
response.json()["errors"][0]["source"]["pointer"]
== f"/data/attributes/{error_pointer}"
if error_pointer
else "/data"
)
def test_integrations_partial_update(
self, authenticated_client, integrations_fixture
):
integration, *_ = integrations_fixture
data = {
"data": {
"type": "integrations",
"id": str(integration.id),
"attributes": {
"credentials": {
"aws_access_key_id": "new_value",
},
# integration_type is `amazon_s3`
"configuration": {
"bucket_name": "new_bucket_name",
"output_directory": "new_output_directory",
},
},
}
}
response = authenticated_client.patch(
reverse("integration-detail", kwargs={"pk": integration.id}),
data=json.dumps(data),
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_200_OK
integration.refresh_from_db()
assert integration.credentials["aws_access_key_id"] == "new_value"
assert integration.configuration["bucket_name"] == "new_bucket_name"
assert integration.configuration["output_directory"] == "new_output_directory"
def test_integrations_partial_update_relationships(
self, authenticated_client, integrations_fixture
):
integration, *_ = integrations_fixture
data = {
"data": {
"type": "integrations",
"id": str(integration.id),
"attributes": {
"credentials": {
"aws_access_key_id": "new_value",
},
# integration_type is `amazon_s3`
"configuration": {
"bucket_name": "new_bucket_name",
"output_directory": "new_output_directory",
},
},
"relationships": {"providers": {"data": []}},
}
}
assert integration.providers.count() > 0
response = authenticated_client.patch(
reverse("integration-detail", kwargs={"pk": integration.id}),
data=json.dumps(data),
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_200_OK
integration.refresh_from_db()
assert integration.providers.count() == 0
def test_integrations_partial_update_invalid_content_type(
self, authenticated_client, integrations_fixture
):
integration, *_ = integrations_fixture
response = authenticated_client.patch(
reverse("integration-detail", kwargs={"pk": integration.id}),
data={},
)
assert response.status_code == status.HTTP_415_UNSUPPORTED_MEDIA_TYPE
def test_integrations_partial_update_invalid_content(
self, authenticated_client, integrations_fixture
):
integration, *_ = integrations_fixture
data = {
"data": {
"type": "integrations",
"id": str(integration.id),
"attributes": {"invalid_config": "value"},
}
}
response = authenticated_client.patch(
reverse("integration-detail", kwargs={"pk": integration.id}),
data=json.dumps(data),
content_type="application/vnd.api+json",
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
def test_integrations_delete(
self,
authenticated_client,
integrations_fixture,
):
integration, *_ = integrations_fixture
response = authenticated_client.delete(
reverse("integration-detail", kwargs={"pk": integration.id})
)
assert response.status_code == status.HTTP_204_NO_CONTENT
def test_integrations_delete_invalid(self, authenticated_client):
response = authenticated_client.delete(
reverse(
"integration-detail",
kwargs={"pk": "e67d0283-440f-48d1-b5f8-38d0763474f4"},
)
)
assert response.status_code == status.HTTP_404_NOT_FOUND
@pytest.mark.parametrize(
"filter_name, filter_value, expected_count",
(
[
("inserted_at", TODAY, 2),
("inserted_at.gte", "2024-01-01", 2),
("inserted_at.lte", "2024-01-01", 0),
("integration_type", Integration.IntegrationChoices.S3, 2),
("integration_type", Integration.IntegrationChoices.SLACK, 0),
(
"integration_type__in",
f"{Integration.IntegrationChoices.S3},{Integration.IntegrationChoices.SLACK}",
2,
),
]
),
)
def test_integrations_filters(
self,
authenticated_client,
integrations_fixture,
filter_name,
filter_value,
expected_count,
):
response = authenticated_client.get(
reverse("integration-list"),
{f"filter[{filter_name}]": filter_value},
)
assert response.status_code == status.HTTP_200_OK
assert len(response.json()["data"]) == expected_count
@pytest.mark.parametrize(
"filter_name",
(
[
"invalid",
]
),
)
def test_integrations_filters_invalid(self, authenticated_client, filter_name):
response = authenticated_client.get(
reverse("integration-list"),
{f"filter[{filter_name}]": "whatever"},
)
assert response.status_code == status.HTTP_400_BAD_REQUEST

View File

@@ -0,0 +1,122 @@
from drf_spectacular.utils import extend_schema_field
from rest_framework_json_api import serializers
from rest_framework_json_api.serializers import ValidationError
class BaseValidateSerializer(serializers.Serializer):
def validate(self, data):
if hasattr(self, "initial_data"):
initial_data = set(self.initial_data.keys()) - {"id", "type"}
unknown_keys = initial_data - set(self.fields.keys())
if unknown_keys:
raise ValidationError(f"Invalid fields: {unknown_keys}")
return data
# Integrations
class S3ConfigSerializer(BaseValidateSerializer):
bucket_name = serializers.CharField()
output_directory = serializers.CharField()
class Meta:
resource_name = "integrations"
class AWSCredentialSerializer(BaseValidateSerializer):
role_arn = serializers.CharField(required=False)
external_id = serializers.CharField(required=False)
role_session_name = serializers.CharField(required=False)
session_duration = serializers.IntegerField(
required=False, min_value=900, max_value=43200
)
aws_access_key_id = serializers.CharField(required=False)
aws_secret_access_key = serializers.CharField(required=False)
aws_session_token = serializers.CharField(required=False)
class Meta:
resource_name = "integrations"
@extend_schema_field(
{
"oneOf": [
{
"type": "object",
"title": "AWS Credentials",
"properties": {
"role_arn": {
"type": "string",
"description": "The Amazon Resource Name (ARN) of the role to assume. Required for AWS role "
"assumption.",
},
"external_id": {
"type": "string",
"description": "An identifier to enhance security for role assumption.",
},
"aws_access_key_id": {
"type": "string",
"description": "The AWS access key ID. Only required if the environment lacks pre-configured "
"AWS credentials.",
},
"aws_secret_access_key": {
"type": "string",
"description": "The AWS secret access key. Required if 'aws_access_key_id' is provided or if "
"no AWS credentials are pre-configured.",
},
"aws_session_token": {
"type": "string",
"description": "The session token for temporary credentials, if applicable.",
},
"session_duration": {
"type": "integer",
"minimum": 900,
"maximum": 43200,
"default": 3600,
"description": "The duration (in seconds) for the role session.",
},
"role_session_name": {
"type": "string",
"description": "An identifier for the role session, useful for tracking sessions in AWS logs. "
"The regex used to validate this parameter is a string of characters consisting of "
"upper- and lower-case alphanumeric characters with no spaces. You can also include "
"underscores or any of the following characters: =,.@-\n\n"
"Examples:\n"
"- MySession123\n"
"- User_Session-1\n"
"- Test.Session@2",
"pattern": "^[a-zA-Z0-9=,.@_-]+$",
},
},
},
]
}
)
class IntegrationCredentialField(serializers.JSONField):
pass
@extend_schema_field(
{
"oneOf": [
{
"type": "object",
"title": "Amazon S3",
"properties": {
"bucket_name": {
"type": "string",
"description": "The name of the S3 bucket where files will be stored.",
},
"output_directory": {
"type": "string",
"description": "The directory path within the bucket where files will be saved.",
},
},
"required": ["bucket_name", "output_directory"],
},
]
}
)
class IntegrationConfigField(serializers.JSONField):
pass

View File

@@ -16,6 +16,8 @@ from rest_framework_simplejwt.tokens import RefreshToken
from api.models import (
ComplianceOverview,
Finding,
Integration,
IntegrationProviderRelationship,
Invitation,
InvitationRoleRelationship,
Membership,
@@ -34,6 +36,12 @@ from api.models import (
UserRoleRelationship,
)
from api.rls import Tenant
from api.v1.serializer_utils.integrations import (
AWSCredentialSerializer,
IntegrationConfigField,
IntegrationCredentialField,
S3ConfigSerializer,
)
# Tokens
@@ -1606,8 +1614,8 @@ class RoleSerializer(RLSSerializer, BaseWriteSerializer):
"manage_account",
# Disable for the first release
# "manage_billing",
# "manage_integrations",
# /Disable for the first release
"manage_integrations",
"manage_providers",
"manage_scans",
"permission_state",
@@ -2013,3 +2021,201 @@ class ScheduleDailyCreateSerializer(serializers.Serializer):
if unknown_keys:
raise ValidationError(f"Invalid fields: {unknown_keys}")
return data
# Integrations
class BaseWriteIntegrationSerializer(BaseWriteSerializer):
@staticmethod
def validate_integration_data(
integration_type: str,
providers: list[Provider], # noqa
configuration: dict,
credentials: dict,
):
if integration_type == Integration.IntegrationChoices.S3:
config_serializer = S3ConfigSerializer
credentials_serializers = [AWSCredentialSerializer]
# TODO: This will be required for AWS Security Hub
# if providers and not all(
# provider.provider == Provider.ProviderChoices.AWS
# for provider in providers
# ):
# raise serializers.ValidationError(
# {"providers": "All providers must be AWS for the S3 integration."}
# )
else:
raise serializers.ValidationError(
{
"integration_type": f"Integration type not supported yet: {integration_type}"
}
)
config_serializer(data=configuration).is_valid(raise_exception=True)
for cred_serializer in credentials_serializers:
try:
cred_serializer(data=credentials).is_valid(raise_exception=True)
break
except ValidationError:
continue
else:
raise ValidationError(
{"credentials": "Invalid credentials for the integration type."}
)
class IntegrationSerializer(RLSSerializer):
"""
Serializer for the Integration model.
"""
providers = serializers.ResourceRelatedField(
queryset=Provider.objects.all(), many=True
)
class Meta:
model = Integration
fields = [
"id",
"inserted_at",
"updated_at",
"enabled",
"connected",
"connection_last_checked_at",
"integration_type",
"configuration",
"providers",
"url",
]
included_serializers = {
"providers": "api.v1.serializers.ProviderIncludeSerializer",
}
def to_representation(self, instance):
representation = super().to_representation(instance)
allowed_providers = self.context.get("allowed_providers")
if allowed_providers:
allowed_provider_ids = {str(provider.id) for provider in allowed_providers}
representation["providers"] = [
provider
for provider in representation["providers"]
if provider["id"] in allowed_provider_ids
]
return representation
class IntegrationCreateSerializer(BaseWriteIntegrationSerializer):
credentials = IntegrationCredentialField(write_only=True)
configuration = IntegrationConfigField()
providers = serializers.ResourceRelatedField(
queryset=Provider.objects.all(), many=True, required=False
)
class Meta:
model = Integration
fields = [
"inserted_at",
"updated_at",
"enabled",
"connected",
"connection_last_checked_at",
"integration_type",
"configuration",
"credentials",
"providers",
]
extra_kwargs = {
"inserted_at": {"read_only": True},
"updated_at": {"read_only": True},
"connected": {"read_only": True},
"enabled": {"read_only": True},
"connection_last_checked_at": {"read_only": True},
}
def validate(self, attrs):
integration_type = attrs.get("integration_type")
providers = attrs.get("providers")
configuration = attrs.get("configuration")
credentials = attrs.get("credentials")
validated_attrs = super().validate(attrs)
self.validate_integration_data(
integration_type, providers, configuration, credentials
)
return validated_attrs
def create(self, validated_data):
tenant_id = self.context.get("tenant_id")
providers = validated_data.pop("providers", [])
integration = Integration.objects.create(tenant_id=tenant_id, **validated_data)
through_model_instances = [
IntegrationProviderRelationship(
integration=integration,
provider=provider,
tenant_id=tenant_id,
)
for provider in providers
]
IntegrationProviderRelationship.objects.bulk_create(through_model_instances)
return integration
class IntegrationUpdateSerializer(BaseWriteIntegrationSerializer):
credentials = IntegrationCredentialField(write_only=True, required=False)
configuration = IntegrationConfigField(required=False)
providers = serializers.ResourceRelatedField(
queryset=Provider.objects.all(), many=True, required=False
)
class Meta:
model = Integration
fields = [
"inserted_at",
"updated_at",
"enabled",
"connected",
"connection_last_checked_at",
"integration_type",
"configuration",
"credentials",
"providers",
]
extra_kwargs = {
"inserted_at": {"read_only": True},
"updated_at": {"read_only": True},
"connected": {"read_only": True},
"connection_last_checked_at": {"read_only": True},
"integration_type": {"read_only": True},
}
def validate(self, attrs):
integration_type = self.instance.integration_type
providers = attrs.get("providers")
configuration = attrs.get("configuration") or self.instance.configuration
credentials = attrs.get("credentials") or self.instance.credentials
validated_attrs = super().validate(attrs)
self.validate_integration_data(
integration_type, providers, configuration, credentials
)
return validated_attrs
def update(self, instance, validated_data):
tenant_id = self.context.get("tenant_id")
if validated_data.get("providers") is not None:
instance.providers.clear()
new_relationships = [
IntegrationProviderRelationship(
integration=instance, provider=provider, tenant_id=tenant_id
)
for provider in validated_data["providers"]
]
IntegrationProviderRelationship.objects.bulk_create(new_relationships)
return super().update(instance, validated_data)

View File

@@ -10,6 +10,7 @@ from api.v1.views import (
FindingViewSet,
GithubSocialLoginView,
GoogleSocialLoginView,
IntegrationViewSet,
InvitationAcceptViewSet,
InvitationViewSet,
MembershipViewSet,
@@ -47,6 +48,7 @@ router.register(
)
router.register(r"overviews", OverviewViewSet, basename="overview")
router.register(r"schedules", ScheduleViewSet, basename="schedule")
router.register(r"integrations", IntegrationViewSet, basename="integration")
tenants_router = routers.NestedSimpleRouter(router, r"tenants", lookup="tenant")
tenants_router.register(

View File

@@ -57,6 +57,7 @@ from api.db_router import MainRouter
from api.filters import (
ComplianceOverviewFilter,
FindingFilter,
IntegrationFilter,
InvitationFilter,
MembershipFilter,
ProviderFilter,
@@ -74,6 +75,7 @@ from api.filters import (
from api.models import (
ComplianceOverview,
Finding,
Integration,
Invitation,
Membership,
Provider,
@@ -102,6 +104,9 @@ from api.v1.serializers import (
FindingDynamicFilterSerializer,
FindingMetadataSerializer,
FindingSerializer,
IntegrationCreateSerializer,
IntegrationSerializer,
IntegrationUpdateSerializer,
InvitationAcceptSerializer,
InvitationCreateSerializer,
InvitationSerializer,
@@ -239,7 +244,7 @@ class SchemaView(SpectacularAPIView):
def get(self, request, *args, **kwargs):
spectacular_settings.TITLE = "Prowler API"
spectacular_settings.VERSION = "1.5.0"
spectacular_settings.VERSION = "1.6.0"
spectacular_settings.DESCRIPTION = (
"Prowler API specification.\n\nThis file is auto-generated."
)
@@ -295,6 +300,11 @@ class SchemaView(SpectacularAPIView):
"description": "Endpoints for task management, allowing retrieval of task status and "
"revoking tasks that have not started.",
},
{
"name": "Integration",
"description": "Endpoints for managing third-party integrations, including registration, configuration,"
" retrieval, and deletion of integrations such as S3, JIRA, or other services.",
},
]
return super().get(request, *args, **kwargs)
@@ -2433,3 +2443,67 @@ class ScheduleViewSet(BaseRLSViewSet):
)
},
)
@extend_schema_view(
list=extend_schema(
tags=["Integration"],
summary="List all integrations",
description="Retrieve a list of all configured integrations with options for filtering by various criteria.",
),
retrieve=extend_schema(
tags=["Integration"],
summary="Retrieve integration details",
description="Fetch detailed information about a specific integration by its ID.",
),
create=extend_schema(
tags=["Integration"],
summary="Create a new integration",
description="Register a new integration with the system, providing necessary configuration details.",
),
partial_update=extend_schema(
tags=["Integration"],
summary="Partially update an integration",
description="Modify certain fields of an existing integration without affecting other settings.",
),
destroy=extend_schema(
tags=["Integration"],
summary="Delete an integration",
description="Remove an integration from the system by its ID.",
),
)
@method_decorator(CACHE_DECORATOR, name="list")
@method_decorator(CACHE_DECORATOR, name="retrieve")
class IntegrationViewSet(BaseRLSViewSet):
queryset = Integration.objects.all()
serializer_class = IntegrationSerializer
http_method_names = ["get", "post", "patch", "delete"]
filterset_class = IntegrationFilter
ordering = ["integration_type", "-inserted_at"]
# RBAC required permissions
required_permissions = [Permissions.MANAGE_INTEGRATIONS]
allowed_providers = None
def get_queryset(self):
user_roles = get_role(self.request.user)
if user_roles.unlimited_visibility:
# User has unlimited visibility, return all integrations
queryset = Integration.objects.filter(tenant_id=self.request.tenant_id)
else:
# User lacks permission, filter providers based on provider groups associated with the role
allowed_providers = get_providers(user_roles)
queryset = Integration.objects.filter(providers__in=allowed_providers)
self.allowed_providers = allowed_providers
return queryset
def get_serializer_class(self):
if self.action == "create":
return IntegrationCreateSerializer
elif self.action == "partial_update":
return IntegrationUpdateSerializer
return super().get_serializer_class()
def get_serializer_context(self):
context = super().get_serializer_context()
context["allowed_providers"] = self.allowed_providers
return context

View File

@@ -15,6 +15,8 @@ from api.db_utils import rls_transaction
from api.models import (
ComplianceOverview,
Finding,
Integration,
IntegrationProviderRelationship,
Invitation,
Membership,
Provider,
@@ -877,6 +879,46 @@ def scan_summaries_fixture(tenants_fixture, providers_fixture):
)
@pytest.fixture
def integrations_fixture(providers_fixture):
provider1, provider2, *_ = providers_fixture
tenant_id = provider1.tenant_id
integration1 = Integration.objects.create(
tenant_id=tenant_id,
enabled=True,
connected=True,
integration_type="amazon_s3",
configuration={"key": "value"},
credentials={"psswd": "1234"},
)
IntegrationProviderRelationship.objects.create(
tenant_id=tenant_id,
integration=integration1,
provider=provider1,
)
integration2 = Integration.objects.create(
tenant_id=tenant_id,
enabled=True,
connected=True,
integration_type="amazon_s3",
configuration={"key": "value"},
credentials={"psswd": "1234"},
)
IntegrationProviderRelationship.objects.create(
tenant_id=tenant_id,
integration=integration2,
provider=provider1,
)
IntegrationProviderRelationship.objects.create(
tenant_id=tenant_id,
integration=integration2,
provider=provider2,
)
return integration1, integration2
def get_authorization_header(access_token: str) -> dict:
return {"Authorization": f"Bearer {access_token}"}