mirror of
https://github.com/prowler-cloud/prowler.git
synced 2026-04-01 05:37:14 +00:00
Compare commits
4 Commits
chore/fix-
...
PROWLER-69
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
68e11086e9 | ||
|
|
19e6daeac3 | ||
|
|
598035b381 | ||
|
|
e4640a0497 |
@@ -7703,6 +7703,265 @@ paths:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ProviderCreateResponse'
|
||||
description: ''
|
||||
/api/v1/providers/batch:
|
||||
post:
|
||||
operationId: providers_batch_create
|
||||
description: |
|
||||
Create multiple providers in a single atomic operation.
|
||||
JSON:API compliant with all-or-nothing semantics.
|
||||
Secrets must be added separately via the provider secrets endpoint.
|
||||
summary: Batch create providers
|
||||
tags:
|
||||
- Provider
|
||||
requestBody:
|
||||
content:
|
||||
application/vnd.api+json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
data:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
type:
|
||||
type: string
|
||||
example: providers
|
||||
description: Resource type identifier (should be "providers")
|
||||
attributes:
|
||||
type: object
|
||||
properties:
|
||||
provider:
|
||||
type: string
|
||||
enum:
|
||||
- alibabacloud
|
||||
- aws
|
||||
- azure
|
||||
- gcp
|
||||
- github
|
||||
- iac
|
||||
- kubernetes
|
||||
- m365
|
||||
- mongodbatlas
|
||||
- oraclecloud
|
||||
description: Type of cloud provider
|
||||
uid:
|
||||
type: string
|
||||
description: Unique identifier for the provider (e.g., AWS account ID, Azure subscription ID)
|
||||
alias:
|
||||
type: string
|
||||
description: Human-readable name for the provider
|
||||
required:
|
||||
- provider
|
||||
- uid
|
||||
required:
|
||||
- type
|
||||
- attributes
|
||||
maxItems: 100
|
||||
minItems: 1
|
||||
required:
|
||||
- data
|
||||
required: true
|
||||
security:
|
||||
- JWT or API Key: []
|
||||
responses:
|
||||
'201':
|
||||
content:
|
||||
application/vnd.api+json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
data:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/Provider'
|
||||
meta:
|
||||
type: object
|
||||
properties:
|
||||
version:
|
||||
type: string
|
||||
example: "v1"
|
||||
examples:
|
||||
success:
|
||||
summary: All providers created successfully
|
||||
value:
|
||||
data:
|
||||
- type: providers
|
||||
id: "550e8400-e29b-41d4-a716-446655440001"
|
||||
attributes:
|
||||
provider: aws
|
||||
uid: "123456789012"
|
||||
alias: "AWS Production"
|
||||
connection:
|
||||
connected: null
|
||||
last_checked_at: null
|
||||
inserted_at: "2025-01-20T10:30:00Z"
|
||||
updated_at: "2025-01-20T10:30:00Z"
|
||||
relationships:
|
||||
secret:
|
||||
data: null
|
||||
provider_groups:
|
||||
meta:
|
||||
count: 0
|
||||
data: []
|
||||
links:
|
||||
self: "https://api.prowler.com/api/v1/providers/550e8400-e29b-41d4-a716-446655440001"
|
||||
meta:
|
||||
version: "v1"
|
||||
description: All providers created successfully
|
||||
'400':
|
||||
content:
|
||||
application/vnd.api+json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
errors:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
detail:
|
||||
type: string
|
||||
source:
|
||||
type: object
|
||||
properties:
|
||||
pointer:
|
||||
type: string
|
||||
examples:
|
||||
validation_errors:
|
||||
summary: Validation errors - entire batch rejected (all-or-nothing)
|
||||
value:
|
||||
errors:
|
||||
- detail: "Invalid AWS account ID format. Must be 12 digits."
|
||||
source:
|
||||
pointer: "/data/0/attributes/uid"
|
||||
- detail: "Provider with uid '111111111111' already exists"
|
||||
source:
|
||||
pointer: "/data/1/attributes/uid"
|
||||
description: Validation errors - entire batch rejected (all-or-nothing)
|
||||
patch:
|
||||
operationId: providers_batch_update
|
||||
description: |
|
||||
Update multiple providers in a single atomic operation.
|
||||
JSON:API compliant with all-or-nothing semantics.
|
||||
Only alias can be updated.
|
||||
summary: Batch update providers
|
||||
tags:
|
||||
- Provider
|
||||
requestBody:
|
||||
content:
|
||||
application/vnd.api+json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
data:
|
||||
type: array
|
||||
description: Array of provider objects to update (max 100)
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
type:
|
||||
type: string
|
||||
example: providers
|
||||
description: Resource type identifier (should be "providers")
|
||||
id:
|
||||
type: string
|
||||
format: uuid
|
||||
description: Provider UUID to update
|
||||
attributes:
|
||||
type: object
|
||||
properties:
|
||||
alias:
|
||||
type: string
|
||||
description: New display name for the provider
|
||||
required:
|
||||
- type
|
||||
- id
|
||||
- attributes
|
||||
maxItems: 100
|
||||
minItems: 1
|
||||
required:
|
||||
- data
|
||||
required: true
|
||||
security:
|
||||
- JWT or API Key: []
|
||||
responses:
|
||||
'200':
|
||||
content:
|
||||
application/vnd.api+json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
data:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/Provider'
|
||||
meta:
|
||||
type: object
|
||||
properties:
|
||||
version:
|
||||
type: string
|
||||
example: "v1"
|
||||
examples:
|
||||
success:
|
||||
summary: All providers updated successfully
|
||||
value:
|
||||
data:
|
||||
- type: providers
|
||||
id: "550e8400-e29b-41d4-a716-446655440001"
|
||||
attributes:
|
||||
provider: aws
|
||||
uid: "123456789012"
|
||||
alias: "AWS Production (Updated)"
|
||||
connection:
|
||||
connected: null
|
||||
last_checked_at: null
|
||||
inserted_at: "2025-01-20T10:30:00Z"
|
||||
updated_at: "2025-01-20T11:00:00Z"
|
||||
relationships:
|
||||
secret:
|
||||
data: null
|
||||
provider_groups:
|
||||
meta:
|
||||
count: 0
|
||||
data: []
|
||||
links:
|
||||
self: "https://api.prowler.com/api/v1/providers/550e8400-e29b-41d4-a716-446655440001"
|
||||
meta:
|
||||
version: "v1"
|
||||
description: All providers updated successfully
|
||||
'400':
|
||||
content:
|
||||
application/vnd.api+json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
errors:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
detail:
|
||||
type: string
|
||||
description: Error message
|
||||
source:
|
||||
type: object
|
||||
properties:
|
||||
pointer:
|
||||
type: string
|
||||
description: JSON pointer to the error location (e.g., /data/0/id)
|
||||
examples:
|
||||
validation_errors:
|
||||
summary: All items failed validation
|
||||
value:
|
||||
errors:
|
||||
- detail: "Provider '550e8400-e29b-41d4-a716-446655440099' not found"
|
||||
source:
|
||||
pointer: "/data/0/id"
|
||||
- detail: "This field is required."
|
||||
source:
|
||||
pointer: "/data/1/id"
|
||||
description: Validation errors - entire batch rejected (all-or-nothing)
|
||||
/api/v1/providers/{id}:
|
||||
get:
|
||||
operationId: providers_retrieve
|
||||
@@ -7992,6 +8251,324 @@ paths:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ProviderSecretCreateResponse'
|
||||
description: ''
|
||||
/api/v1/providers/secrets/batch:
|
||||
post:
|
||||
operationId: providers_secrets_batch_create
|
||||
description: |
|
||||
Create multiple provider secrets in a single atomic operation.
|
||||
Supports to-many relationships where one secret definition can be associated with multiple providers.
|
||||
JSON:API compliant with all-or-nothing semantics for hard errors.
|
||||
Providers that already have secrets are skipped and reported in meta.skipped.
|
||||
summary: Batch create provider secrets
|
||||
tags:
|
||||
- Provider
|
||||
requestBody:
|
||||
content:
|
||||
application/vnd.api+json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
data:
|
||||
type: array
|
||||
description: Array of provider-secret objects (max 100)
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
type:
|
||||
type: string
|
||||
example: provider-secrets
|
||||
description: Resource type identifier (should be "provider-secrets")
|
||||
attributes:
|
||||
type: object
|
||||
properties:
|
||||
name:
|
||||
type: string
|
||||
description: Human-readable name for the secret
|
||||
secret_type:
|
||||
type: string
|
||||
enum:
|
||||
- static
|
||||
- role
|
||||
- service_account
|
||||
description: Type of secret/authentication method
|
||||
secret:
|
||||
type: object
|
||||
description: Credentials object (varies by provider/secret_type)
|
||||
required:
|
||||
- secret_type
|
||||
- secret
|
||||
relationships:
|
||||
type: object
|
||||
properties:
|
||||
providers:
|
||||
type: object
|
||||
description: To-many relationship - one secret definition can create secrets for multiple providers
|
||||
properties:
|
||||
data:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
type:
|
||||
type: string
|
||||
enum:
|
||||
- providers
|
||||
id:
|
||||
type: string
|
||||
format: uuid
|
||||
required:
|
||||
- type
|
||||
- id
|
||||
required:
|
||||
- data
|
||||
required:
|
||||
- providers
|
||||
required:
|
||||
- type
|
||||
- attributes
|
||||
- relationships
|
||||
maxItems: 100
|
||||
minItems: 1
|
||||
required:
|
||||
- data
|
||||
required: true
|
||||
security:
|
||||
- JWT or API Key: []
|
||||
responses:
|
||||
'201':
|
||||
content:
|
||||
application/vnd.api+json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
data:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/ProviderSecret'
|
||||
meta:
|
||||
type: object
|
||||
description: Contains version and optionally skipped providers info
|
||||
properties:
|
||||
version:
|
||||
type: string
|
||||
example: "v1"
|
||||
skipped:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
provider_id:
|
||||
type: string
|
||||
format: uuid
|
||||
source_index:
|
||||
type: integer
|
||||
description: Index of the item in the request that contained this provider
|
||||
reason:
|
||||
type: string
|
||||
examples:
|
||||
all_created:
|
||||
summary: All secrets created successfully
|
||||
value:
|
||||
data:
|
||||
- type: provider-secrets
|
||||
id: "660e8400-e29b-41d4-a716-446655440001"
|
||||
attributes:
|
||||
name: "AWS Production Credentials"
|
||||
secret_type: role
|
||||
inserted_at: "2025-01-20T10:30:00Z"
|
||||
updated_at: "2025-01-20T10:30:00Z"
|
||||
relationships:
|
||||
provider:
|
||||
data:
|
||||
type: providers
|
||||
id: "550e8400-e29b-41d4-a716-446655440001"
|
||||
links:
|
||||
self: "https://api.prowler.com/api/v1/providers/secrets/660e8400-e29b-41d4-a716-446655440001"
|
||||
meta:
|
||||
version: "v1"
|
||||
some_skipped:
|
||||
summary: Some providers skipped (already have secrets)
|
||||
value:
|
||||
data:
|
||||
- type: provider-secrets
|
||||
id: "660e8400-e29b-41d4-a716-446655440001"
|
||||
attributes:
|
||||
name: "Shared AWS Secret"
|
||||
secret_type: static
|
||||
inserted_at: "2025-01-20T10:30:00Z"
|
||||
updated_at: "2025-01-20T10:30:00Z"
|
||||
relationships:
|
||||
provider:
|
||||
data:
|
||||
type: providers
|
||||
id: "550e8400-e29b-41d4-a716-446655440002"
|
||||
links:
|
||||
self: "https://api.prowler.com/api/v1/providers/secrets/660e8400-e29b-41d4-a716-446655440001"
|
||||
meta:
|
||||
version: "v1"
|
||||
skipped:
|
||||
- provider_id: "550e8400-e29b-41d4-a716-446655440001"
|
||||
source_index: 0
|
||||
reason: "Provider already has a secret."
|
||||
description: Provider secrets created successfully. May include meta.skipped if some providers were skipped.
|
||||
'400':
|
||||
content:
|
||||
application/vnd.api+json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
errors:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
detail:
|
||||
type: string
|
||||
description: Error message
|
||||
source:
|
||||
type: object
|
||||
properties:
|
||||
pointer:
|
||||
type: string
|
||||
description: JSON pointer to the error location (e.g., /data/0/attributes/secret_type)
|
||||
examples:
|
||||
validation_errors:
|
||||
summary: Validation errors - entire batch rejected (all-or-nothing)
|
||||
value:
|
||||
errors:
|
||||
- detail: "Role secret type is not supported for GCP providers"
|
||||
source:
|
||||
pointer: "/data/0/attributes/secret_type"
|
||||
- detail: "Provider '550e8400-e29b-41d4-a716-446655440099' not found."
|
||||
source:
|
||||
pointer: "/data/1/relationships/providers/data/0"
|
||||
description: Validation errors - entire batch rejected (all-or-nothing)
|
||||
patch:
|
||||
operationId: providers_secrets_batch_update
|
||||
description: |
|
||||
Update multiple provider secrets in a single atomic operation.
|
||||
JSON:API compliant with all-or-nothing semantics.
|
||||
summary: Batch update provider secrets
|
||||
tags:
|
||||
- Provider
|
||||
requestBody:
|
||||
content:
|
||||
application/vnd.api+json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
data:
|
||||
type: array
|
||||
description: Array of provider-secret objects to update (max 100)
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
type:
|
||||
type: string
|
||||
example: provider-secrets
|
||||
description: Resource type identifier (should be "provider-secrets")
|
||||
id:
|
||||
type: string
|
||||
format: uuid
|
||||
description: Provider secret UUID to update
|
||||
attributes:
|
||||
type: object
|
||||
properties:
|
||||
name:
|
||||
type: string
|
||||
description: Human-readable name for the secret
|
||||
secret_type:
|
||||
type: string
|
||||
enum:
|
||||
- static
|
||||
- role
|
||||
- service_account
|
||||
description: Type of secret/authentication method
|
||||
secret:
|
||||
type: object
|
||||
description: Credentials object (varies by provider/secret_type)
|
||||
required:
|
||||
- type
|
||||
- id
|
||||
- attributes
|
||||
maxItems: 100
|
||||
minItems: 1
|
||||
required:
|
||||
- data
|
||||
required: true
|
||||
security:
|
||||
- JWT or API Key: []
|
||||
responses:
|
||||
'200':
|
||||
content:
|
||||
application/vnd.api+json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
data:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/ProviderSecret'
|
||||
meta:
|
||||
type: object
|
||||
properties:
|
||||
version:
|
||||
type: string
|
||||
example: "v1"
|
||||
examples:
|
||||
all_updated:
|
||||
summary: All secrets updated successfully
|
||||
value:
|
||||
data:
|
||||
- type: provider-secrets
|
||||
id: "660e8400-e29b-41d4-a716-446655440001"
|
||||
attributes:
|
||||
name: "AWS Production Credentials (Updated)"
|
||||
secret_type: role
|
||||
inserted_at: "2025-01-20T10:30:00Z"
|
||||
updated_at: "2025-01-20T11:00:00Z"
|
||||
relationships:
|
||||
provider:
|
||||
data:
|
||||
type: providers
|
||||
id: "550e8400-e29b-41d4-a716-446655440001"
|
||||
links:
|
||||
self: "https://api.prowler.com/api/v1/providers/secrets/660e8400-e29b-41d4-a716-446655440001"
|
||||
meta:
|
||||
version: "v1"
|
||||
description: All provider secrets updated successfully
|
||||
'400':
|
||||
content:
|
||||
application/vnd.api+json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
errors:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
detail:
|
||||
type: string
|
||||
description: Error message
|
||||
source:
|
||||
type: object
|
||||
properties:
|
||||
pointer:
|
||||
type: string
|
||||
description: JSON pointer to the error location (e.g., /data/0/id)
|
||||
examples:
|
||||
validation_errors:
|
||||
summary: Validation errors - entire batch rejected (all-or-nothing)
|
||||
value:
|
||||
errors:
|
||||
- detail: "Provider secret '660e8400-e29b-41d4-a716-446655440099' not found."
|
||||
source:
|
||||
pointer: "/data/0/id"
|
||||
- detail: "This field is required."
|
||||
source:
|
||||
pointer: "/data/1/id"
|
||||
description: Validation errors - entire batch rejected (all-or-nothing)
|
||||
/api/v1/providers/secrets/{id}:
|
||||
get:
|
||||
operationId: providers_secrets_retrieve
|
||||
|
||||
1389
api/src/backend/api/tests/integration/test_provider_secrets_batch.py
Normal file
1389
api/src/backend/api/tests/integration/test_provider_secrets_batch.py
Normal file
File diff suppressed because it is too large
Load Diff
921
api/src/backend/api/tests/integration/test_providers_batch.py
Normal file
921
api/src/backend/api/tests/integration/test_providers_batch.py
Normal file
@@ -0,0 +1,921 @@
|
||||
import uuid
|
||||
|
||||
import pytest
|
||||
from django.conf import settings
|
||||
from django.test import override_settings
|
||||
from django.urls import reverse
|
||||
from rest_framework import status
|
||||
|
||||
from api.models import Provider
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestProviderBatchCreate:
|
||||
"""Tests for the batch provider creation endpoint."""
|
||||
|
||||
content_type = "application/json"
|
||||
|
||||
def test_batch_create_single_provider_success(
|
||||
self, authenticated_client, tenants_fixture
|
||||
):
|
||||
"""Test creating a single provider via batch endpoint."""
|
||||
payload = {
|
||||
"data": [
|
||||
{
|
||||
"type": "providers",
|
||||
"attributes": {
|
||||
"provider": "aws",
|
||||
"uid": "111111111111",
|
||||
"alias": "Test AWS Account",
|
||||
},
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
response = authenticated_client.post(
|
||||
reverse("provider-batch"),
|
||||
data=payload,
|
||||
content_type=self.content_type,
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_201_CREATED
|
||||
data = response.json()["data"]
|
||||
assert len(data) == 1
|
||||
assert data[0]["attributes"]["provider"] == "aws"
|
||||
assert data[0]["attributes"]["uid"] == "111111111111"
|
||||
assert data[0]["attributes"]["alias"] == "Test AWS Account"
|
||||
|
||||
def test_batch_create_multiple_providers_mixed_types(
|
||||
self, authenticated_client, tenants_fixture
|
||||
):
|
||||
"""Test creating multiple providers of different types in one batch."""
|
||||
payload = {
|
||||
"data": [
|
||||
{
|
||||
"type": "providers",
|
||||
"attributes": {
|
||||
"provider": "aws",
|
||||
"uid": "222222222222",
|
||||
"alias": "AWS Account 1",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "providers",
|
||||
"attributes": {
|
||||
"provider": "azure",
|
||||
"uid": "a1b2c3d4-e5f6-4890-abcd-ef1234567890",
|
||||
"alias": "Azure Subscription",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "providers",
|
||||
"attributes": {
|
||||
"provider": "gcp",
|
||||
"uid": "my-gcp-project-id",
|
||||
"alias": "GCP Project",
|
||||
},
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
response = authenticated_client.post(
|
||||
reverse("provider-batch"),
|
||||
data=payload,
|
||||
content_type=self.content_type,
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_201_CREATED
|
||||
data = response.json()["data"]
|
||||
assert len(data) == 3
|
||||
|
||||
providers_by_type = {p["attributes"]["provider"]: p for p in data}
|
||||
assert "aws" in providers_by_type
|
||||
assert "azure" in providers_by_type
|
||||
assert "gcp" in providers_by_type
|
||||
|
||||
def test_batch_create_duplicate_uid_in_batch_error(
|
||||
self, authenticated_client, tenants_fixture
|
||||
):
|
||||
"""Test that duplicate UIDs within same batch fails entire batch (all-or-nothing)."""
|
||||
initial_count = Provider.objects.count()
|
||||
|
||||
payload = {
|
||||
"data": [
|
||||
{
|
||||
"type": "providers",
|
||||
"attributes": {
|
||||
"provider": "aws",
|
||||
"uid": "444444444444",
|
||||
"alias": "AWS Account 1",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "providers",
|
||||
"attributes": {
|
||||
"provider": "aws",
|
||||
"uid": "444444444444",
|
||||
"alias": "AWS Account 2 (duplicate)",
|
||||
},
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
response = authenticated_client.post(
|
||||
reverse("provider-batch"),
|
||||
data=payload,
|
||||
content_type=self.content_type,
|
||||
)
|
||||
|
||||
# All-or-nothing: entire batch fails
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
errors = response.json()["errors"]
|
||||
|
||||
# Should have duplicate error
|
||||
assert any("Duplicate UID" in str(e.get("detail", "")) for e in errors)
|
||||
|
||||
# Verify no providers were created
|
||||
assert Provider.objects.count() == initial_count
|
||||
|
||||
def test_batch_create_existing_uid_error(
|
||||
self, authenticated_client, providers_fixture
|
||||
):
|
||||
"""Test that UIDs already existing in tenant are rejected."""
|
||||
existing_provider = providers_fixture[0]
|
||||
|
||||
payload = {
|
||||
"data": [
|
||||
{
|
||||
"type": "providers",
|
||||
"attributes": {
|
||||
"provider": existing_provider.provider,
|
||||
"uid": existing_provider.uid,
|
||||
"alias": "Duplicate of existing",
|
||||
},
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
response = authenticated_client.post(
|
||||
reverse("provider-batch"),
|
||||
data=payload,
|
||||
content_type=self.content_type,
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
errors = response.json()["errors"]
|
||||
assert any("already exists" in str(e.get("detail", "")) for e in errors)
|
||||
|
||||
def test_batch_create_invalid_uid_format_error(
|
||||
self, authenticated_client, tenants_fixture
|
||||
):
|
||||
"""Test that invalid UID formats are rejected with proper error messages."""
|
||||
payload = {
|
||||
"data": [
|
||||
{
|
||||
"type": "providers",
|
||||
"attributes": {
|
||||
"provider": "aws",
|
||||
"uid": "invalid-aws-uid",
|
||||
"alias": "Invalid AWS",
|
||||
},
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
response = authenticated_client.post(
|
||||
reverse("provider-batch"),
|
||||
data=payload,
|
||||
content_type=self.content_type,
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
errors = response.json()["errors"]
|
||||
assert any("/data/0/attributes" in str(e.get("source", {})) for e in errors)
|
||||
|
||||
def test_batch_create_permission_denied(
|
||||
self, authenticated_client_no_permissions_rbac, tenants_fixture
|
||||
):
|
||||
"""Test that users without MANAGE_PROVIDERS permission cannot batch create."""
|
||||
payload = {
|
||||
"data": [
|
||||
{
|
||||
"type": "providers",
|
||||
"attributes": {
|
||||
"provider": "aws",
|
||||
"uid": "555555555555",
|
||||
"alias": "Test",
|
||||
},
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
response = authenticated_client_no_permissions_rbac.post(
|
||||
reverse("provider-batch"),
|
||||
data=payload,
|
||||
content_type=self.content_type,
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
def test_batch_create_exceeds_limit_error(
|
||||
self, authenticated_client, tenants_fixture
|
||||
):
|
||||
"""Test that batch requests exceeding the limit are rejected."""
|
||||
limit = settings.API_BATCH_MAX_SIZE
|
||||
payload = {
|
||||
"data": [
|
||||
{
|
||||
"type": "providers",
|
||||
"attributes": {
|
||||
"provider": "aws",
|
||||
"uid": f"{i:012d}",
|
||||
"alias": f"Provider {i}",
|
||||
},
|
||||
}
|
||||
for i in range(limit + 1)
|
||||
]
|
||||
}
|
||||
|
||||
response = authenticated_client.post(
|
||||
reverse("provider-batch"),
|
||||
data=payload,
|
||||
content_type=self.content_type,
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
errors = response.json()["errors"]
|
||||
assert any(f"Maximum {limit}" in str(e.get("detail", "")) for e in errors)
|
||||
|
||||
def test_batch_create_empty_array_error(
|
||||
self, authenticated_client, tenants_fixture
|
||||
):
|
||||
"""Test that empty batch requests are rejected."""
|
||||
payload = {"data": []}
|
||||
|
||||
response = authenticated_client.post(
|
||||
reverse("provider-batch"),
|
||||
data=payload,
|
||||
content_type=self.content_type,
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
errors = response.json()["errors"]
|
||||
assert any("At least one provider" in str(e.get("detail", "")) for e in errors)
|
||||
|
||||
def test_batch_create_invalid_data_format_error(
|
||||
self, authenticated_client, tenants_fixture
|
||||
):
|
||||
"""Test that non-array data is rejected."""
|
||||
payload = {
|
||||
"data": {
|
||||
"type": "providers",
|
||||
"attributes": {
|
||||
"provider": "aws",
|
||||
"uid": "666666666666",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
response = authenticated_client.post(
|
||||
reverse("provider-batch"),
|
||||
data=payload,
|
||||
content_type=self.content_type,
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
errors = response.json()["errors"]
|
||||
assert any("Must be an array" in str(e.get("detail", "")) for e in errors)
|
||||
|
||||
def test_batch_create_sets_correct_tenant(
|
||||
self, authenticated_client, tenants_fixture
|
||||
):
|
||||
"""Test that batch-created providers have correct tenant assignment."""
|
||||
payload = {
|
||||
"data": [
|
||||
{
|
||||
"type": "providers",
|
||||
"attributes": {
|
||||
"provider": "aws",
|
||||
"uid": "777777777777",
|
||||
"alias": "Tenant 1 Provider",
|
||||
},
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
response = authenticated_client.post(
|
||||
reverse("provider-batch"),
|
||||
data=payload,
|
||||
content_type=self.content_type,
|
||||
)
|
||||
assert response.status_code == status.HTTP_201_CREATED
|
||||
provider_id = response.json()["data"][0]["id"]
|
||||
|
||||
provider = Provider.objects.get(id=provider_id)
|
||||
assert provider.tenant_id == tenants_fixture[0].id
|
||||
|
||||
def test_batch_create_mixed_valid_invalid_error(
|
||||
self, authenticated_client, tenants_fixture
|
||||
):
|
||||
"""Test that mixed valid/invalid items fails entire batch (all-or-nothing)."""
|
||||
initial_count = Provider.objects.count()
|
||||
|
||||
payload = {
|
||||
"data": [
|
||||
{
|
||||
"type": "providers",
|
||||
"attributes": {
|
||||
"provider": "aws",
|
||||
"uid": "888888888888",
|
||||
"alias": "Valid AWS",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "providers",
|
||||
"attributes": {
|
||||
"provider": "aws",
|
||||
"uid": "invalid-uid",
|
||||
"alias": "Invalid AWS",
|
||||
},
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
response = authenticated_client.post(
|
||||
reverse("provider-batch"),
|
||||
data=payload,
|
||||
content_type=self.content_type,
|
||||
)
|
||||
|
||||
# All-or-nothing: entire batch fails
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
errors = response.json()["errors"]
|
||||
|
||||
# Should have error for invalid item
|
||||
assert len(errors) >= 1
|
||||
assert any(
|
||||
"/data/1" in str(e.get("source", {}).get("pointer", "")) for e in errors
|
||||
)
|
||||
|
||||
# No providers should have been created
|
||||
assert Provider.objects.count() == initial_count
|
||||
|
||||
def test_batch_create_multiple_errors_reported(
|
||||
self, authenticated_client, tenants_fixture
|
||||
):
|
||||
"""Test that all validation errors are reported, not just the first one."""
|
||||
payload = {
|
||||
"data": [
|
||||
{
|
||||
"type": "providers",
|
||||
"attributes": {
|
||||
"provider": "aws",
|
||||
"uid": "invalid1",
|
||||
"alias": "Invalid 1",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "providers",
|
||||
"attributes": {
|
||||
"provider": "azure",
|
||||
"uid": "not-a-uuid",
|
||||
"alias": "Invalid 2",
|
||||
},
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
response = authenticated_client.post(
|
||||
reverse("provider-batch"),
|
||||
data=payload,
|
||||
content_type=self.content_type,
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
errors = response.json()["errors"]
|
||||
# Should have errors for both items
|
||||
error_pointers = [e.get("source", {}).get("pointer", "") for e in errors]
|
||||
assert any("/data/0" in p for p in error_pointers)
|
||||
assert any("/data/1" in p for p in error_pointers)
|
||||
|
||||
def test_batch_create_at_exact_limit_success(
|
||||
self, authenticated_client, tenants_fixture
|
||||
):
|
||||
"""Test that batch requests at exactly the limit are accepted."""
|
||||
limit = settings.API_BATCH_MAX_SIZE
|
||||
payload = {
|
||||
"data": [
|
||||
{
|
||||
"type": "providers",
|
||||
"attributes": {
|
||||
"provider": "aws",
|
||||
"uid": f"{i:012d}",
|
||||
"alias": f"Provider {i}",
|
||||
},
|
||||
}
|
||||
for i in range(limit)
|
||||
]
|
||||
}
|
||||
|
||||
response = authenticated_client.post(
|
||||
reverse("provider-batch"),
|
||||
data=payload,
|
||||
content_type=self.content_type,
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_201_CREATED
|
||||
data = response.json()["data"]
|
||||
assert len(data) == limit
|
||||
|
||||
@override_settings(API_BATCH_MAX_SIZE=5)
|
||||
def test_batch_create_respects_custom_limit_setting(
|
||||
self, authenticated_client, tenants_fixture
|
||||
):
|
||||
"""Test that batch endpoint respects custom API_BATCH_MAX_SIZE setting."""
|
||||
payload = {
|
||||
"data": [
|
||||
{
|
||||
"type": "providers",
|
||||
"attributes": {
|
||||
"provider": "aws",
|
||||
"uid": f"{900000000000 + i}",
|
||||
"alias": f"Provider {i}",
|
||||
},
|
||||
}
|
||||
for i in range(6)
|
||||
]
|
||||
}
|
||||
|
||||
response = authenticated_client.post(
|
||||
reverse("provider-batch"),
|
||||
data=payload,
|
||||
content_type=self.content_type,
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
errors = response.json()["errors"]
|
||||
assert any("Maximum 5" in str(e.get("detail", "")) for e in errors)
|
||||
|
||||
@override_settings(API_BATCH_MAX_SIZE=3)
|
||||
def test_batch_create_at_custom_limit_success(
|
||||
self, authenticated_client, tenants_fixture
|
||||
):
|
||||
"""Test that batch requests at exactly the custom limit are accepted."""
|
||||
payload = {
|
||||
"data": [
|
||||
{
|
||||
"type": "providers",
|
||||
"attributes": {
|
||||
"provider": "aws",
|
||||
"uid": f"{800000000000 + i}",
|
||||
"alias": f"Provider {i}",
|
||||
},
|
||||
}
|
||||
for i in range(3)
|
||||
]
|
||||
}
|
||||
|
||||
response = authenticated_client.post(
|
||||
reverse("provider-batch"),
|
||||
data=payload,
|
||||
content_type=self.content_type,
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_201_CREATED
|
||||
data = response.json()["data"]
|
||||
assert len(data) == 3
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestProviderBatchUpdate:
|
||||
"""Tests for the batch provider update endpoint."""
|
||||
|
||||
content_type = "application/json"
|
||||
|
||||
def test_batch_update_single_provider_success(
|
||||
self, authenticated_client, providers_fixture
|
||||
):
|
||||
"""Test updating a single provider via batch endpoint."""
|
||||
provider = providers_fixture[0]
|
||||
new_alias = "Updated AWS Account"
|
||||
|
||||
payload = {
|
||||
"data": [
|
||||
{
|
||||
"type": "providers",
|
||||
"id": str(provider.id),
|
||||
"attributes": {
|
||||
"alias": new_alias,
|
||||
},
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
response = authenticated_client.patch(
|
||||
reverse("provider-batch"),
|
||||
data=payload,
|
||||
content_type=self.content_type,
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
data = response.json()["data"]
|
||||
assert len(data) == 1
|
||||
assert data[0]["attributes"]["alias"] == new_alias
|
||||
|
||||
# Verify in database
|
||||
provider.refresh_from_db()
|
||||
assert provider.alias == new_alias
|
||||
|
||||
def test_batch_update_multiple_providers_success(
|
||||
self, authenticated_client, providers_fixture
|
||||
):
|
||||
"""Test updating multiple providers in one batch."""
|
||||
provider1 = providers_fixture[0]
|
||||
provider2 = providers_fixture[1]
|
||||
|
||||
payload = {
|
||||
"data": [
|
||||
{
|
||||
"type": "providers",
|
||||
"id": str(provider1.id),
|
||||
"attributes": {"alias": "Updated Provider 1"},
|
||||
},
|
||||
{
|
||||
"type": "providers",
|
||||
"id": str(provider2.id),
|
||||
"attributes": {"alias": "Updated Provider 2"},
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
response = authenticated_client.patch(
|
||||
reverse("provider-batch"),
|
||||
data=payload,
|
||||
content_type=self.content_type,
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
data = response.json()["data"]
|
||||
assert len(data) == 2
|
||||
|
||||
# Verify in database
|
||||
provider1.refresh_from_db()
|
||||
provider2.refresh_from_db()
|
||||
assert provider1.alias == "Updated Provider 1"
|
||||
assert provider2.alias == "Updated Provider 2"
|
||||
|
||||
def test_batch_update_provider_not_found_error(
|
||||
self, authenticated_client, tenants_fixture
|
||||
):
|
||||
"""Test that non-existent providers are rejected."""
|
||||
fake_id = str(uuid.uuid4())
|
||||
|
||||
payload = {
|
||||
"data": [
|
||||
{
|
||||
"type": "providers",
|
||||
"id": fake_id,
|
||||
"attributes": {"alias": "New Alias"},
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
response = authenticated_client.patch(
|
||||
reverse("provider-batch"),
|
||||
data=payload,
|
||||
content_type=self.content_type,
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
errors = response.json()["errors"]
|
||||
assert any("not found" in str(e.get("detail", "")) for e in errors)
|
||||
|
||||
def test_batch_update_duplicate_id_in_batch_error(
|
||||
self, authenticated_client, providers_fixture
|
||||
):
|
||||
"""Test that duplicate IDs within same batch fails entire batch (all-or-nothing)."""
|
||||
provider = providers_fixture[0]
|
||||
original_alias = provider.alias
|
||||
|
||||
payload = {
|
||||
"data": [
|
||||
{
|
||||
"type": "providers",
|
||||
"id": str(provider.id),
|
||||
"attributes": {"alias": "First Update"},
|
||||
},
|
||||
{
|
||||
"type": "providers",
|
||||
"id": str(provider.id),
|
||||
"attributes": {"alias": "Second Update (duplicate)"},
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
response = authenticated_client.patch(
|
||||
reverse("provider-batch"),
|
||||
data=payload,
|
||||
content_type=self.content_type,
|
||||
)
|
||||
|
||||
# All-or-nothing: entire batch fails
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
errors = response.json()["errors"]
|
||||
|
||||
# Should have duplicate error
|
||||
assert any("Duplicate provider ID" in str(e.get("detail", "")) for e in errors)
|
||||
|
||||
# Verify provider was not updated
|
||||
provider.refresh_from_db()
|
||||
assert provider.alias == original_alias
|
||||
|
||||
def test_batch_update_permission_denied(
|
||||
self, authenticated_client_no_permissions_rbac, providers_fixture
|
||||
):
|
||||
"""Test that users without MANAGE_PROVIDERS permission cannot batch update."""
|
||||
provider = providers_fixture[0]
|
||||
|
||||
payload = {
|
||||
"data": [
|
||||
{
|
||||
"type": "providers",
|
||||
"id": str(provider.id),
|
||||
"attributes": {"alias": "New Alias"},
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
response = authenticated_client_no_permissions_rbac.patch(
|
||||
reverse("provider-batch"),
|
||||
data=payload,
|
||||
content_type=self.content_type,
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
def test_batch_update_exceeds_limit_error(
|
||||
self, authenticated_client, providers_fixture
|
||||
):
|
||||
"""Test that batch requests exceeding the limit are rejected."""
|
||||
limit = settings.API_BATCH_MAX_SIZE
|
||||
provider = providers_fixture[0]
|
||||
|
||||
payload = {
|
||||
"data": [
|
||||
{
|
||||
"type": "providers",
|
||||
"id": str(provider.id),
|
||||
"attributes": {"alias": f"Provider {i}"},
|
||||
}
|
||||
for i in range(limit + 1)
|
||||
]
|
||||
}
|
||||
|
||||
response = authenticated_client.patch(
|
||||
reverse("provider-batch"),
|
||||
data=payload,
|
||||
content_type=self.content_type,
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
errors = response.json()["errors"]
|
||||
assert any(f"Maximum {limit}" in str(e.get("detail", "")) for e in errors)
|
||||
|
||||
def test_batch_update_empty_array_error(
|
||||
self, authenticated_client, tenants_fixture
|
||||
):
|
||||
"""Test that empty batch requests are rejected."""
|
||||
payload = {"data": []}
|
||||
|
||||
response = authenticated_client.patch(
|
||||
reverse("provider-batch"),
|
||||
data=payload,
|
||||
content_type=self.content_type,
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
errors = response.json()["errors"]
|
||||
assert any("At least one provider" in str(e.get("detail", "")) for e in errors)
|
||||
|
||||
def test_batch_update_invalid_data_format_error(
|
||||
self, authenticated_client, tenants_fixture
|
||||
):
|
||||
"""Test that non-array data is rejected."""
|
||||
payload = {
|
||||
"data": {
|
||||
"type": "providers",
|
||||
"id": str(uuid.uuid4()),
|
||||
"attributes": {"alias": "Test"},
|
||||
}
|
||||
}
|
||||
|
||||
response = authenticated_client.patch(
|
||||
reverse("provider-batch"),
|
||||
data=payload,
|
||||
content_type=self.content_type,
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
errors = response.json()["errors"]
|
||||
assert any("Must be an array" in str(e.get("detail", "")) for e in errors)
|
||||
|
||||
def test_batch_update_missing_id_error(self, authenticated_client, tenants_fixture):
|
||||
"""Test that missing ID is rejected."""
|
||||
payload = {
|
||||
"data": [
|
||||
{
|
||||
"type": "providers",
|
||||
"attributes": {"alias": "New Alias"},
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
response = authenticated_client.patch(
|
||||
reverse("provider-batch"),
|
||||
data=payload,
|
||||
content_type=self.content_type,
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
errors = response.json()["errors"]
|
||||
assert any("required" in str(e.get("detail", "")).lower() for e in errors)
|
||||
|
||||
def test_batch_update_preserves_other_fields(
|
||||
self, authenticated_client, providers_fixture
|
||||
):
|
||||
"""Test that updating alias doesn't change other fields."""
|
||||
provider = providers_fixture[0]
|
||||
original_uid = provider.uid
|
||||
original_provider_type = provider.provider
|
||||
|
||||
payload = {
|
||||
"data": [
|
||||
{
|
||||
"type": "providers",
|
||||
"id": str(provider.id),
|
||||
"attributes": {"alias": "Updated Alias Only"},
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
response = authenticated_client.patch(
|
||||
reverse("provider-batch"),
|
||||
data=payload,
|
||||
content_type=self.content_type,
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
|
||||
provider.refresh_from_db()
|
||||
assert provider.alias == "Updated Alias Only"
|
||||
assert provider.uid == original_uid
|
||||
assert provider.provider == original_provider_type
|
||||
|
||||
def test_batch_update_multiple_errors_reported(
|
||||
self, authenticated_client, tenants_fixture
|
||||
):
|
||||
"""Test that all validation errors are reported, not just the first one."""
|
||||
fake_id1 = str(uuid.uuid4())
|
||||
fake_id2 = str(uuid.uuid4())
|
||||
|
||||
payload = {
|
||||
"data": [
|
||||
{
|
||||
"type": "providers",
|
||||
"id": fake_id1,
|
||||
"attributes": {"alias": "Provider 1"},
|
||||
},
|
||||
{
|
||||
"type": "providers",
|
||||
"id": fake_id2,
|
||||
"attributes": {"alias": "Provider 2"},
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
response = authenticated_client.patch(
|
||||
reverse("provider-batch"),
|
||||
data=payload,
|
||||
content_type=self.content_type,
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
errors = response.json()["errors"]
|
||||
# Should have errors for both items
|
||||
error_pointers = [e.get("source", {}).get("pointer", "") for e in errors]
|
||||
assert any("/data/0" in p for p in error_pointers)
|
||||
assert any("/data/1" in p for p in error_pointers)
|
||||
|
||||
def test_batch_update_at_exact_limit_success(
|
||||
self, authenticated_client, tenants_fixture
|
||||
):
|
||||
"""Test that batch requests at exactly the limit are accepted."""
|
||||
limit = settings.API_BATCH_MAX_SIZE
|
||||
tenant = tenants_fixture[0]
|
||||
|
||||
providers = [
|
||||
Provider.objects.create(
|
||||
provider="aws",
|
||||
uid=f"{700000000000 + i}",
|
||||
alias=f"Provider {i}",
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
for i in range(limit)
|
||||
]
|
||||
|
||||
payload = {
|
||||
"data": [
|
||||
{
|
||||
"type": "providers",
|
||||
"id": str(providers[i].id),
|
||||
"attributes": {"alias": f"Updated Provider {i}"},
|
||||
}
|
||||
for i in range(limit)
|
||||
]
|
||||
}
|
||||
|
||||
response = authenticated_client.patch(
|
||||
reverse("provider-batch"),
|
||||
data=payload,
|
||||
content_type=self.content_type,
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
data = response.json()["data"]
|
||||
assert len(data) == limit
|
||||
|
||||
@override_settings(API_BATCH_MAX_SIZE=5)
|
||||
def test_batch_update_respects_custom_limit_setting(
|
||||
self, authenticated_client, tenants_fixture
|
||||
):
|
||||
"""Test that batch update endpoint respects custom API_BATCH_MAX_SIZE setting."""
|
||||
tenant = tenants_fixture[0]
|
||||
|
||||
providers = [
|
||||
Provider.objects.create(
|
||||
provider="aws",
|
||||
uid=f"{600000000000 + i}",
|
||||
alias=f"Provider {i}",
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
for i in range(6)
|
||||
]
|
||||
|
||||
payload = {
|
||||
"data": [
|
||||
{
|
||||
"type": "providers",
|
||||
"id": str(providers[i].id),
|
||||
"attributes": {"alias": f"Updated Provider {i}"},
|
||||
}
|
||||
for i in range(6)
|
||||
]
|
||||
}
|
||||
|
||||
response = authenticated_client.patch(
|
||||
reverse("provider-batch"),
|
||||
data=payload,
|
||||
content_type=self.content_type,
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
errors = response.json()["errors"]
|
||||
assert any("Maximum 5" in str(e.get("detail", "")) for e in errors)
|
||||
|
||||
@override_settings(API_BATCH_MAX_SIZE=3)
|
||||
def test_batch_update_at_custom_limit_success(
|
||||
self, authenticated_client, tenants_fixture
|
||||
):
|
||||
"""Test that batch requests at exactly the custom limit are accepted."""
|
||||
tenant = tenants_fixture[0]
|
||||
|
||||
providers = [
|
||||
Provider.objects.create(
|
||||
provider="aws",
|
||||
uid=f"{500000000000 + i}",
|
||||
alias=f"Provider {i}",
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
for i in range(3)
|
||||
]
|
||||
|
||||
payload = {
|
||||
"data": [
|
||||
{
|
||||
"type": "providers",
|
||||
"id": str(providers[i].id),
|
||||
"attributes": {"alias": f"Updated Provider {i}"},
|
||||
}
|
||||
for i in range(3)
|
||||
]
|
||||
}
|
||||
|
||||
response = authenticated_client.patch(
|
||||
reverse("provider-batch"),
|
||||
data=payload,
|
||||
content_type=self.content_type,
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
data = response.json()["data"]
|
||||
assert len(data) == 3
|
||||
@@ -979,6 +979,283 @@ class ProviderUpdateSerializer(BaseWriteSerializer):
|
||||
}
|
||||
|
||||
|
||||
class ProviderBatchItemSerializer(RLSSerializer, BaseWriteSerializer):
|
||||
class Meta:
|
||||
model = Provider
|
||||
fields = ["alias", "provider", "uid"]
|
||||
|
||||
def validate(self, attrs):
|
||||
provider_type = attrs.get("provider")
|
||||
uid = attrs.get("uid")
|
||||
if provider_type and uid:
|
||||
validator_method = getattr(Provider, f"validate_{provider_type}_uid", None)
|
||||
if validator_method:
|
||||
validator_method(uid)
|
||||
return attrs
|
||||
|
||||
|
||||
class ProviderBatchCreateSerializer(BaseSerializerV1):
|
||||
"""Serializer for batch creation of providers with all-or-nothing semantics (JSON:API compliant)."""
|
||||
|
||||
class Meta:
|
||||
resource_name = "providers"
|
||||
|
||||
def validate(self, attrs):
|
||||
data = self.initial_data.get("data", [])
|
||||
|
||||
if not isinstance(data, list):
|
||||
raise ValidationError({"data": "Must be an array of provider objects"})
|
||||
|
||||
if len(data) > settings.API_BATCH_MAX_SIZE:
|
||||
raise ValidationError(
|
||||
{"data": f"Maximum {settings.API_BATCH_MAX_SIZE} providers per batch"}
|
||||
)
|
||||
|
||||
if len(data) == 0:
|
||||
raise ValidationError({"data": "At least one provider required"})
|
||||
|
||||
seen_uids = {}
|
||||
all_errors = []
|
||||
validated_items = []
|
||||
tenant_id = self.context.get("tenant_id")
|
||||
|
||||
for idx, item in enumerate(data):
|
||||
current_errors = []
|
||||
item_type = item.get("type")
|
||||
|
||||
if not item_type:
|
||||
current_errors.append(
|
||||
{
|
||||
"detail": "This field is required.",
|
||||
"source": {"pointer": f"/data/{idx}/type"},
|
||||
}
|
||||
)
|
||||
elif item_type != "providers":
|
||||
current_errors.append(
|
||||
{
|
||||
"detail": f"Invalid type '{item_type}'. Expected 'providers'.",
|
||||
"source": {"pointer": f"/data/{idx}/type"},
|
||||
}
|
||||
)
|
||||
|
||||
item_attrs = item.get("attributes", {})
|
||||
if not item_attrs:
|
||||
current_errors.append(
|
||||
{
|
||||
"detail": "This field is required.",
|
||||
"source": {"pointer": f"/data/{idx}/attributes"},
|
||||
}
|
||||
)
|
||||
all_errors.extend(current_errors)
|
||||
continue
|
||||
|
||||
provider_type = item_attrs.get("provider")
|
||||
uid = item_attrs.get("uid")
|
||||
key = (provider_type, uid)
|
||||
|
||||
# Validate provider type before any DB queries
|
||||
valid_provider_types = [choice.value for choice in Provider.ProviderChoices]
|
||||
provider_type_valid = provider_type in valid_provider_types
|
||||
|
||||
if not provider_type:
|
||||
current_errors.append(
|
||||
{
|
||||
"detail": "This field is required.",
|
||||
"source": {"pointer": f"/data/{idx}/attributes/provider"},
|
||||
}
|
||||
)
|
||||
elif not provider_type_valid:
|
||||
current_errors.append(
|
||||
{
|
||||
"detail": f"Invalid provider type '{provider_type}'. Must be one of: {', '.join(valid_provider_types)}.",
|
||||
"source": {"pointer": f"/data/{idx}/attributes/provider"},
|
||||
}
|
||||
)
|
||||
|
||||
if key in seen_uids:
|
||||
current_errors.append(
|
||||
{
|
||||
"detail": f"Duplicate UID '{uid}' at index {idx} (first at {seen_uids[key]})",
|
||||
"source": {"pointer": f"/data/{idx}/attributes/uid"},
|
||||
}
|
||||
)
|
||||
else:
|
||||
seen_uids[key] = idx
|
||||
|
||||
# Only check DB if provider type is valid (to avoid enum errors)
|
||||
if (
|
||||
provider_type_valid
|
||||
and uid
|
||||
and Provider.objects.filter(
|
||||
tenant_id=tenant_id,
|
||||
provider=provider_type,
|
||||
uid=uid,
|
||||
is_deleted=False,
|
||||
).exists()
|
||||
):
|
||||
current_errors.append(
|
||||
{
|
||||
"detail": f"Provider with uid '{uid}' already exists",
|
||||
"source": {"pointer": f"/data/{idx}/attributes/uid"},
|
||||
}
|
||||
)
|
||||
|
||||
item_serializer = ProviderBatchItemSerializer(
|
||||
data=item_attrs, context=self.context
|
||||
)
|
||||
if not item_serializer.is_valid():
|
||||
for field, field_errors in item_serializer.errors.items():
|
||||
current_errors.append(
|
||||
{
|
||||
"detail": str(field_errors[0]),
|
||||
"source": {"pointer": f"/data/{idx}/attributes/{field}"},
|
||||
}
|
||||
)
|
||||
|
||||
if current_errors:
|
||||
all_errors.extend(current_errors)
|
||||
else:
|
||||
validated_items.append(
|
||||
{"index": idx, "data": item_serializer.validated_data}
|
||||
)
|
||||
|
||||
# All-or-nothing: if any errors, fail the entire batch
|
||||
if all_errors:
|
||||
raise ValidationError(all_errors)
|
||||
|
||||
attrs["_validated_items"] = validated_items
|
||||
return attrs
|
||||
|
||||
|
||||
class ProviderBatchUpdateItemSerializer(BaseWriteSerializer):
|
||||
"""Serializer for validating individual provider update items in batch."""
|
||||
|
||||
class Meta:
|
||||
model = Provider
|
||||
fields = ["alias"]
|
||||
|
||||
|
||||
class ProviderBatchUpdateSerializer(BaseSerializerV1):
|
||||
"""Serializer for batch update of providers with all-or-nothing semantics (JSON:API compliant)."""
|
||||
|
||||
class Meta:
|
||||
resource_name = "providers"
|
||||
|
||||
def validate(self, attrs):
|
||||
data = self.initial_data.get("data", [])
|
||||
|
||||
if not isinstance(data, list):
|
||||
raise ValidationError({"data": "Must be an array of provider objects"})
|
||||
|
||||
if len(data) > settings.API_BATCH_MAX_SIZE:
|
||||
raise ValidationError(
|
||||
{"data": f"Maximum {settings.API_BATCH_MAX_SIZE} providers per batch"}
|
||||
)
|
||||
|
||||
if len(data) == 0:
|
||||
raise ValidationError({"data": "At least one provider required"})
|
||||
|
||||
all_errors = []
|
||||
validated_items = []
|
||||
tenant_id = self.context.get("tenant_id")
|
||||
seen_ids = {}
|
||||
|
||||
for idx, item in enumerate(data):
|
||||
current_errors = []
|
||||
item_type = item.get("type")
|
||||
item_id = item.get("id")
|
||||
|
||||
if not item_type:
|
||||
current_errors.append(
|
||||
{
|
||||
"detail": "This field is required.",
|
||||
"source": {"pointer": f"/data/{idx}/type"},
|
||||
}
|
||||
)
|
||||
elif item_type != "providers":
|
||||
current_errors.append(
|
||||
{
|
||||
"detail": f"Invalid type '{item_type}'. Expected 'providers'.",
|
||||
"source": {"pointer": f"/data/{idx}/type"},
|
||||
}
|
||||
)
|
||||
|
||||
if not item_id:
|
||||
current_errors.append(
|
||||
{
|
||||
"detail": "This field is required.",
|
||||
"source": {"pointer": f"/data/{idx}/id"},
|
||||
}
|
||||
)
|
||||
all_errors.extend(current_errors)
|
||||
continue
|
||||
|
||||
if item_id in seen_ids:
|
||||
current_errors.append(
|
||||
{
|
||||
"detail": f"Duplicate provider ID '{item_id}' at index {idx} (first at {seen_ids[item_id]})",
|
||||
"source": {"pointer": f"/data/{idx}/id"},
|
||||
}
|
||||
)
|
||||
else:
|
||||
seen_ids[item_id] = idx
|
||||
|
||||
try:
|
||||
provider = Provider.objects.get(
|
||||
id=item_id, tenant_id=tenant_id, is_deleted=False
|
||||
)
|
||||
except Provider.DoesNotExist:
|
||||
current_errors.append(
|
||||
{
|
||||
"detail": f"Provider '{item_id}' not found.",
|
||||
"source": {"pointer": f"/data/{idx}/id"},
|
||||
}
|
||||
)
|
||||
all_errors.extend(current_errors)
|
||||
continue
|
||||
|
||||
item_attrs = item.get("attributes", {})
|
||||
if not item_attrs:
|
||||
current_errors.append(
|
||||
{
|
||||
"detail": "This field is required.",
|
||||
"source": {"pointer": f"/data/{idx}/attributes"},
|
||||
}
|
||||
)
|
||||
all_errors.extend(current_errors)
|
||||
continue
|
||||
|
||||
item_serializer = ProviderBatchUpdateItemSerializer(
|
||||
data=item_attrs, context=self.context
|
||||
)
|
||||
if not item_serializer.is_valid():
|
||||
for field, field_errors in item_serializer.errors.items():
|
||||
current_errors.append(
|
||||
{
|
||||
"detail": str(field_errors[0]),
|
||||
"source": {"pointer": f"/data/{idx}/attributes/{field}"},
|
||||
}
|
||||
)
|
||||
|
||||
if current_errors:
|
||||
all_errors.extend(current_errors)
|
||||
else:
|
||||
validated_items.append(
|
||||
{
|
||||
"index": idx,
|
||||
"provider": provider,
|
||||
**item_serializer.validated_data,
|
||||
}
|
||||
)
|
||||
|
||||
# All-or-nothing: if any errors, fail the entire batch
|
||||
if all_errors:
|
||||
raise ValidationError(all_errors)
|
||||
|
||||
attrs["_validated_items"] = validated_items
|
||||
return attrs
|
||||
|
||||
|
||||
# Scans
|
||||
|
||||
|
||||
@@ -1714,6 +1991,13 @@ class ProviderSecretSerializer(RLSSerializer):
|
||||
"url",
|
||||
]
|
||||
|
||||
def get_root_meta(self, _resource, _many):
|
||||
meta = super().get_root_meta(_resource, _many)
|
||||
skipped = self.context.get("_skipped_providers")
|
||||
if skipped:
|
||||
meta["skipped"] = skipped
|
||||
return meta
|
||||
|
||||
|
||||
class ProviderSecretCreateSerializer(RLSSerializer, BaseWriteProviderSecretSerializer):
|
||||
secret = ProviderSecretField(write_only=True)
|
||||
@@ -1775,6 +2059,446 @@ class ProviderSecretUpdateSerializer(BaseWriteProviderSecretSerializer):
|
||||
return validated_attrs
|
||||
|
||||
|
||||
class ProviderSecretBatchItemSerializer(BaseWriteProviderSecretSerializer):
|
||||
"""Serializer for an individual item in the batch of secrets."""
|
||||
|
||||
secret = ProviderSecretField(write_only=True)
|
||||
|
||||
class Meta:
|
||||
model = ProviderSecret
|
||||
fields = ["name", "secret_type", "secret"]
|
||||
|
||||
def validate(self, attrs):
|
||||
# Provider is passed via context since it's validated separately
|
||||
provider = self.context.get("provider")
|
||||
secret_type = attrs.get("secret_type")
|
||||
secret = attrs.get("secret")
|
||||
|
||||
if provider and secret_type and secret:
|
||||
self.validate_secret_based_on_provider(
|
||||
provider.provider, secret_type, secret
|
||||
)
|
||||
return attrs
|
||||
|
||||
|
||||
class ProviderSecretBatchCreateSerializer(BaseSerializerV1):
|
||||
"""
|
||||
Serializer for batch creation of provider secrets.
|
||||
|
||||
Supports to-many relationship format where one secret definition can be
|
||||
associated with multiple providers. Each provider creates a separate secret.
|
||||
|
||||
JSON:API compliant: all-or-nothing for hard errors, soft skips for providers
|
||||
that already have secrets (reported in meta.skipped).
|
||||
"""
|
||||
|
||||
class Meta:
|
||||
resource_name = "provider-secrets"
|
||||
|
||||
def _extract_providers_data(self, relationships, idx):
|
||||
"""
|
||||
Extract providers data from relationships, supporting both formats:
|
||||
- to-one: relationships.provider.data (single object) - backwards compatible
|
||||
- to-many: relationships.providers.data (array) - new format
|
||||
|
||||
Returns (providers_list, errors) where providers_list is normalized to array.
|
||||
"""
|
||||
errors = []
|
||||
|
||||
# Try to-many format first (providers plural)
|
||||
providers_rel = relationships.get("providers", {})
|
||||
providers_data = providers_rel.get("data")
|
||||
|
||||
if providers_data is not None:
|
||||
# to-many format
|
||||
if isinstance(providers_data, dict):
|
||||
# Single object in to-many field - normalize to array
|
||||
providers_data = [providers_data]
|
||||
elif not isinstance(providers_data, list):
|
||||
errors.append(
|
||||
{
|
||||
"detail": "Must be an array of provider resource identifiers.",
|
||||
"source": {
|
||||
"pointer": f"/data/{idx}/relationships/providers/data"
|
||||
},
|
||||
}
|
||||
)
|
||||
return None, errors
|
||||
return providers_data, errors
|
||||
|
||||
# Fall back to to-one format (provider singular) for backwards compatibility
|
||||
provider_rel = relationships.get("provider", {})
|
||||
provider_data = provider_rel.get("data")
|
||||
|
||||
if provider_data is not None:
|
||||
if isinstance(provider_data, dict):
|
||||
return [provider_data], errors
|
||||
else:
|
||||
errors.append(
|
||||
{
|
||||
"detail": "Must be a provider resource identifier object.",
|
||||
"source": {
|
||||
"pointer": f"/data/{idx}/relationships/provider/data"
|
||||
},
|
||||
}
|
||||
)
|
||||
return None, errors
|
||||
|
||||
# No providers relationship found
|
||||
errors.append(
|
||||
{
|
||||
"detail": "Providers relationship is required.",
|
||||
"source": {"pointer": f"/data/{idx}/relationships/providers"},
|
||||
}
|
||||
)
|
||||
return None, errors
|
||||
|
||||
def validate(self, attrs):
|
||||
data = self.initial_data.get("data", [])
|
||||
|
||||
if not isinstance(data, list):
|
||||
raise ValidationError(
|
||||
{"data": "Must be an array of provider-secret objects"}
|
||||
)
|
||||
|
||||
if len(data) > settings.API_BATCH_MAX_SIZE:
|
||||
raise ValidationError(
|
||||
{"data": f"Maximum {settings.API_BATCH_MAX_SIZE} secrets per batch"}
|
||||
)
|
||||
|
||||
if len(data) == 0:
|
||||
raise ValidationError({"data": "At least one secret required"})
|
||||
|
||||
hard_errors = [] # Will cause full batch failure
|
||||
skipped_providers = [] # Already have secrets, reported in meta
|
||||
validated_items = []
|
||||
tenant_id = self.context.get("tenant_id")
|
||||
seen_providers = {} # Track duplicates across the entire batch
|
||||
|
||||
for idx, item in enumerate(data):
|
||||
item_type = item.get("type")
|
||||
|
||||
# Validate type
|
||||
if not item_type:
|
||||
hard_errors.append(
|
||||
{
|
||||
"detail": "This field is required.",
|
||||
"source": {"pointer": f"/data/{idx}/type"},
|
||||
}
|
||||
)
|
||||
continue
|
||||
elif item_type != "provider-secrets":
|
||||
hard_errors.append(
|
||||
{
|
||||
"detail": f"Invalid type '{item_type}'. Expected 'provider-secrets'.",
|
||||
"source": {"pointer": f"/data/{idx}/type"},
|
||||
}
|
||||
)
|
||||
continue
|
||||
|
||||
# Validate attributes
|
||||
item_attrs = item.get("attributes", {})
|
||||
if not item_attrs:
|
||||
hard_errors.append(
|
||||
{
|
||||
"detail": "This field is required.",
|
||||
"source": {"pointer": f"/data/{idx}/attributes"},
|
||||
}
|
||||
)
|
||||
continue
|
||||
|
||||
# Extract providers (supports both to-one and to-many formats)
|
||||
relationships = item.get("relationships", {})
|
||||
providers_data, extract_errors = self._extract_providers_data(
|
||||
relationships, idx
|
||||
)
|
||||
if extract_errors:
|
||||
hard_errors.extend(extract_errors)
|
||||
continue
|
||||
|
||||
if not providers_data:
|
||||
hard_errors.append(
|
||||
{
|
||||
"detail": "At least one provider is required.",
|
||||
"source": {
|
||||
"pointer": f"/data/{idx}/relationships/providers/data"
|
||||
},
|
||||
}
|
||||
)
|
||||
continue
|
||||
|
||||
# Process each provider in the relationship
|
||||
for prov_idx, prov_data in enumerate(providers_data):
|
||||
provider_id = (
|
||||
prov_data.get("id") if isinstance(prov_data, dict) else None
|
||||
)
|
||||
provider_type = (
|
||||
prov_data.get("type") if isinstance(prov_data, dict) else None
|
||||
)
|
||||
|
||||
# Validate provider resource identifier
|
||||
if not provider_id:
|
||||
hard_errors.append(
|
||||
{
|
||||
"detail": "Provider id is required.",
|
||||
"source": {
|
||||
"pointer": f"/data/{idx}/relationships/providers/data/{prov_idx}/id"
|
||||
},
|
||||
}
|
||||
)
|
||||
continue
|
||||
|
||||
if provider_type and provider_type != "providers":
|
||||
hard_errors.append(
|
||||
{
|
||||
"detail": f"Invalid type '{provider_type}'. Expected 'providers'.",
|
||||
"source": {
|
||||
"pointer": f"/data/{idx}/relationships/providers/data/{prov_idx}/type"
|
||||
},
|
||||
}
|
||||
)
|
||||
continue
|
||||
|
||||
# Check for duplicate provider in entire batch
|
||||
if provider_id in seen_providers:
|
||||
prev_idx, prev_prov_idx = seen_providers[provider_id]
|
||||
hard_errors.append(
|
||||
{
|
||||
"detail": f"Duplicate provider '{provider_id}' (first at data/{prev_idx}/providers/{prev_prov_idx}).",
|
||||
"source": {
|
||||
"pointer": f"/data/{idx}/relationships/providers/data/{prov_idx}"
|
||||
},
|
||||
}
|
||||
)
|
||||
continue
|
||||
|
||||
seen_providers[provider_id] = (idx, prov_idx)
|
||||
|
||||
# Validate provider exists and belongs to tenant
|
||||
try:
|
||||
provider = Provider.objects.get(
|
||||
id=provider_id, tenant_id=tenant_id, is_deleted=False
|
||||
)
|
||||
except Provider.DoesNotExist:
|
||||
hard_errors.append(
|
||||
{
|
||||
"detail": f"Provider '{provider_id}' not found.",
|
||||
"source": {
|
||||
"pointer": f"/data/{idx}/relationships/providers/data/{prov_idx}"
|
||||
},
|
||||
}
|
||||
)
|
||||
continue
|
||||
|
||||
# Soft skip: provider already has a secret
|
||||
if ProviderSecret.objects.filter(
|
||||
provider_id=provider_id, tenant_id=tenant_id
|
||||
).exists():
|
||||
skipped_providers.append(
|
||||
{
|
||||
"provider_id": str(provider_id),
|
||||
"source_index": idx,
|
||||
"reason": "Provider already has a secret.",
|
||||
}
|
||||
)
|
||||
continue
|
||||
|
||||
# Validate secret attributes for this specific provider
|
||||
item_context = {**self.context, "provider": provider}
|
||||
item_serializer = ProviderSecretBatchItemSerializer(
|
||||
data=item_attrs, context=item_context
|
||||
)
|
||||
|
||||
if not item_serializer.is_valid():
|
||||
for field, field_errors in item_serializer.errors.items():
|
||||
pointer = f"/data/{idx}/attributes/{field}"
|
||||
hard_errors.append(
|
||||
{
|
||||
"detail": str(field_errors[0]),
|
||||
"source": {"pointer": pointer},
|
||||
}
|
||||
)
|
||||
continue
|
||||
|
||||
validated_items.append(
|
||||
{
|
||||
"source_index": idx,
|
||||
**item_serializer.validated_data,
|
||||
"provider": provider,
|
||||
}
|
||||
)
|
||||
|
||||
# All-or-nothing: if any hard errors, fail the entire batch
|
||||
if hard_errors:
|
||||
raise ValidationError(hard_errors)
|
||||
|
||||
attrs["_validated_items"] = validated_items
|
||||
attrs["_skipped_providers"] = skipped_providers
|
||||
return attrs
|
||||
|
||||
|
||||
class ProviderSecretBatchUpdateItemSerializer(BaseWriteProviderSecretSerializer):
|
||||
"""Serializer for validating individual provider secret update items in batch."""
|
||||
|
||||
secret = ProviderSecretField(write_only=True, required=False)
|
||||
|
||||
class Meta:
|
||||
model = ProviderSecret
|
||||
fields = ["name", "secret_type", "secret"]
|
||||
extra_kwargs = {
|
||||
"name": {"required": False},
|
||||
"secret_type": {"required": False},
|
||||
}
|
||||
|
||||
def validate(self, attrs):
|
||||
provider = self.context.get("provider")
|
||||
secret_type = attrs.get("secret_type")
|
||||
secret = attrs.get("secret")
|
||||
|
||||
if provider and secret_type and secret:
|
||||
self.validate_secret_based_on_provider(
|
||||
provider.provider, secret_type, secret
|
||||
)
|
||||
elif provider and secret and not secret_type:
|
||||
existing_secret = ProviderSecret.objects.filter(provider=provider).first()
|
||||
if existing_secret:
|
||||
self.validate_secret_based_on_provider(
|
||||
provider.provider, existing_secret.secret_type, secret
|
||||
)
|
||||
return attrs
|
||||
|
||||
|
||||
class ProviderSecretBatchUpdateSerializer(BaseSerializerV1):
|
||||
"""
|
||||
Serializer for batch update of provider secrets.
|
||||
|
||||
JSON:API compliant with all-or-nothing semantics for validation errors.
|
||||
"""
|
||||
|
||||
class Meta:
|
||||
resource_name = "provider-secrets"
|
||||
|
||||
def validate(self, attrs):
|
||||
data = self.initial_data.get("data", [])
|
||||
|
||||
if not isinstance(data, list):
|
||||
raise ValidationError(
|
||||
{"data": "Must be an array of provider-secret objects"}
|
||||
)
|
||||
|
||||
if len(data) > settings.API_BATCH_MAX_SIZE:
|
||||
raise ValidationError(
|
||||
{"data": f"Maximum {settings.API_BATCH_MAX_SIZE} secrets per batch"}
|
||||
)
|
||||
|
||||
if len(data) == 0:
|
||||
raise ValidationError({"data": "At least one secret required"})
|
||||
|
||||
hard_errors = [] # Will cause full batch failure
|
||||
validated_items = []
|
||||
tenant_id = self.context.get("tenant_id")
|
||||
seen_ids = {}
|
||||
|
||||
for idx, item in enumerate(data):
|
||||
item_type = item.get("type")
|
||||
item_id = item.get("id")
|
||||
|
||||
# Validate type
|
||||
if not item_type:
|
||||
hard_errors.append(
|
||||
{
|
||||
"detail": "This field is required.",
|
||||
"source": {"pointer": f"/data/{idx}/type"},
|
||||
}
|
||||
)
|
||||
continue
|
||||
elif item_type != "provider-secrets":
|
||||
hard_errors.append(
|
||||
{
|
||||
"detail": f"Invalid type '{item_type}'. Expected 'provider-secrets'.",
|
||||
"source": {"pointer": f"/data/{idx}/type"},
|
||||
}
|
||||
)
|
||||
continue
|
||||
|
||||
# Validate id
|
||||
if not item_id:
|
||||
hard_errors.append(
|
||||
{
|
||||
"detail": "This field is required.",
|
||||
"source": {"pointer": f"/data/{idx}/id"},
|
||||
}
|
||||
)
|
||||
continue
|
||||
|
||||
# Check for duplicate id in batch
|
||||
if item_id in seen_ids:
|
||||
hard_errors.append(
|
||||
{
|
||||
"detail": f"Duplicate secret ID '{item_id}' (first at data/{seen_ids[item_id]}).",
|
||||
"source": {"pointer": f"/data/{idx}/id"},
|
||||
}
|
||||
)
|
||||
continue
|
||||
|
||||
seen_ids[item_id] = idx
|
||||
|
||||
# Validate secret exists and belongs to tenant
|
||||
try:
|
||||
provider_secret = ProviderSecret.objects.select_related("provider").get(
|
||||
id=item_id, tenant_id=tenant_id
|
||||
)
|
||||
except ProviderSecret.DoesNotExist:
|
||||
hard_errors.append(
|
||||
{
|
||||
"detail": f"Provider secret '{item_id}' not found.",
|
||||
"source": {"pointer": f"/data/{idx}/id"},
|
||||
}
|
||||
)
|
||||
continue
|
||||
|
||||
# Validate attributes
|
||||
item_attrs = item.get("attributes", {})
|
||||
if not item_attrs:
|
||||
hard_errors.append(
|
||||
{
|
||||
"detail": "This field is required.",
|
||||
"source": {"pointer": f"/data/{idx}/attributes"},
|
||||
}
|
||||
)
|
||||
continue
|
||||
|
||||
item_context = {**self.context, "provider": provider_secret.provider}
|
||||
|
||||
item_serializer = ProviderSecretBatchUpdateItemSerializer(
|
||||
data=item_attrs, context=item_context
|
||||
)
|
||||
if not item_serializer.is_valid():
|
||||
for field, field_errors in item_serializer.errors.items():
|
||||
hard_errors.append(
|
||||
{
|
||||
"detail": str(field_errors[0]),
|
||||
"source": {"pointer": f"/data/{idx}/attributes/{field}"},
|
||||
}
|
||||
)
|
||||
continue
|
||||
|
||||
validated_items.append(
|
||||
{
|
||||
"source_index": idx,
|
||||
"provider_secret": provider_secret,
|
||||
**item_serializer.validated_data,
|
||||
}
|
||||
)
|
||||
|
||||
# All-or-nothing: if any hard errors, fail the entire batch
|
||||
if hard_errors:
|
||||
raise ValidationError(hard_errors)
|
||||
|
||||
attrs["_validated_items"] = validated_items
|
||||
return attrs
|
||||
|
||||
|
||||
# Invitations
|
||||
|
||||
|
||||
|
||||
@@ -111,6 +111,18 @@ urlpatterns = [
|
||||
ProviderSecretViewSet.as_view({"get": "list", "post": "create"}),
|
||||
name="providersecret-list",
|
||||
),
|
||||
path(
|
||||
"providers/secrets/batch",
|
||||
ProviderSecretViewSet.as_view(
|
||||
{"post": "batch_create", "patch": "batch_update"}
|
||||
),
|
||||
name="providersecret-batch",
|
||||
),
|
||||
path(
|
||||
"providers/batch",
|
||||
ProviderViewSet.as_view({"post": "batch_create", "patch": "batch_update"}),
|
||||
name="provider-batch",
|
||||
),
|
||||
path(
|
||||
"providers/secrets/<uuid:pk>",
|
||||
ProviderSecretViewSet.as_view(
|
||||
|
||||
@@ -70,6 +70,7 @@ from rest_framework.exceptions import (
|
||||
ValidationError,
|
||||
)
|
||||
from rest_framework.generics import GenericAPIView, get_object_or_404
|
||||
from rest_framework.parsers import JSONParser
|
||||
from rest_framework.permissions import SAFE_METHODS
|
||||
from rest_framework_json_api.views import RelationshipView, Response
|
||||
from rest_framework_simplejwt.exceptions import InvalidToken, TokenError
|
||||
@@ -243,11 +244,15 @@ from api.v1.serializers import (
|
||||
ProcessorCreateSerializer,
|
||||
ProcessorSerializer,
|
||||
ProcessorUpdateSerializer,
|
||||
ProviderBatchCreateSerializer,
|
||||
ProviderBatchUpdateSerializer,
|
||||
ProviderCreateSerializer,
|
||||
ProviderGroupCreateSerializer,
|
||||
ProviderGroupMembershipSerializer,
|
||||
ProviderGroupSerializer,
|
||||
ProviderGroupUpdateSerializer,
|
||||
ProviderSecretBatchCreateSerializer,
|
||||
ProviderSecretBatchUpdateSerializer,
|
||||
ProviderSecretCreateSerializer,
|
||||
ProviderSecretSerializer,
|
||||
ProviderSecretUpdateSerializer,
|
||||
@@ -289,6 +294,17 @@ from prowler.providers.aws.lib.cloudtrail_timeline.cloudtrail_timeline import (
|
||||
CloudTrailTimeline,
|
||||
)
|
||||
|
||||
|
||||
class JSONAPIRawParser(JSONParser):
|
||||
"""Parser that accepts application/vnd.api+json but passes through raw JSON.
|
||||
|
||||
Unlike rest_framework_json_api's JSONParser which transforms
|
||||
JSON:API payloads, this parser just parses JSON and leaves the
|
||||
data structure intact for batch serializers to handle.
|
||||
"""
|
||||
|
||||
media_type = "application/vnd.api+json"
|
||||
|
||||
logger = logging.getLogger(BackendLogger.API)
|
||||
|
||||
CACHE_DECORATOR = cache_control(
|
||||
@@ -1551,12 +1567,26 @@ class ProviderViewSet(DisablePaginationMixin, BaseRLSViewSet):
|
||||
def get_serializer_class(self):
|
||||
if self.action == "create":
|
||||
return ProviderCreateSerializer
|
||||
elif self.action == "batch_create":
|
||||
return ProviderBatchCreateSerializer
|
||||
elif self.action == "batch_update":
|
||||
return ProviderBatchUpdateSerializer
|
||||
elif self.action == "partial_update":
|
||||
return ProviderUpdateSerializer
|
||||
elif self.action in ["connection", "destroy"]:
|
||||
return TaskSerializer
|
||||
return super().get_serializer_class()
|
||||
|
||||
def get_parsers(self):
|
||||
# Check action_map since self.action is not set yet during parser initialization
|
||||
action_map = getattr(self, "action_map", {})
|
||||
if (
|
||||
"batch_create" in action_map.values()
|
||||
or "batch_update" in action_map.values()
|
||||
):
|
||||
return [JSONAPIRawParser(), JSONParser()]
|
||||
return super().get_parsers()
|
||||
|
||||
def partial_update(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
serializer = self.get_serializer(
|
||||
@@ -1621,6 +1651,197 @@ class ProviderViewSet(DisablePaginationMixin, BaseRLSViewSet):
|
||||
},
|
||||
)
|
||||
|
||||
@extend_schema(
|
||||
tags=["Provider"],
|
||||
summary="Batch create providers",
|
||||
description="Create multiple providers in a single atomic operation. JSON:API compliant with all-or-nothing semantics. Secrets must be added separately via the provider secrets endpoint.",
|
||||
request={
|
||||
"application/vnd.api+json": {
|
||||
"type": "object",
|
||||
"required": ["data"],
|
||||
"properties": {
|
||||
"data": {
|
||||
"type": "array",
|
||||
"description": "Array of provider objects to create (max 100)",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"required": ["type", "attributes"],
|
||||
"properties": {
|
||||
"type": {
|
||||
"type": "string",
|
||||
"enum": ["providers"],
|
||||
"description": "Must be 'providers'",
|
||||
},
|
||||
"attributes": {
|
||||
"type": "object",
|
||||
"required": ["provider", "uid"],
|
||||
"properties": {
|
||||
"provider": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"aws",
|
||||
"azure",
|
||||
"gcp",
|
||||
"kubernetes",
|
||||
"github",
|
||||
"m365",
|
||||
],
|
||||
"description": "Cloud provider type",
|
||||
},
|
||||
"uid": {
|
||||
"type": "string",
|
||||
"description": "Provider unique identifier (e.g., AWS 12-digit account ID)",
|
||||
},
|
||||
"alias": {
|
||||
"type": "string",
|
||||
"description": "Optional display name",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
responses={
|
||||
201: OpenApiResponse(
|
||||
response=ProviderSerializer(many=True),
|
||||
description="All providers created successfully",
|
||||
),
|
||||
400: OpenApiResponse(
|
||||
description="Validation errors - entire batch rejected (all-or-nothing)"
|
||||
),
|
||||
},
|
||||
)
|
||||
@action(
|
||||
detail=False,
|
||||
methods=["post"],
|
||||
url_path="batch",
|
||||
url_name="batch",
|
||||
parser_classes=[JSONAPIRawParser, JSONParser],
|
||||
)
|
||||
def batch_create(self, request):
|
||||
serializer = ProviderBatchCreateSerializer(
|
||||
data=request.data, context=self.get_serializer_context()
|
||||
)
|
||||
# All-or-nothing: raise ValidationError if any item fails validation
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
validated_items = serializer.validated_data.get("_validated_items", [])
|
||||
|
||||
# Create all providers atomically
|
||||
created_provider_ids = []
|
||||
with transaction.atomic():
|
||||
for item in validated_items:
|
||||
item_data = item["data"]
|
||||
provider = Provider.objects.create(
|
||||
tenant_id=request.tenant_id, **item_data
|
||||
)
|
||||
created_provider_ids.append(provider.id)
|
||||
|
||||
created_providers = (
|
||||
Provider.objects.filter(id__in=created_provider_ids)
|
||||
.select_related("secret")
|
||||
.prefetch_related("provider_groups")
|
||||
)
|
||||
|
||||
response_serializer = ProviderSerializer(
|
||||
created_providers, many=True, context=self.get_serializer_context()
|
||||
)
|
||||
|
||||
return Response(data=response_serializer.data, status=status.HTTP_201_CREATED)
|
||||
|
||||
@extend_schema(
|
||||
tags=["Provider"],
|
||||
summary="Batch update providers",
|
||||
description="Update multiple providers in a single atomic operation. JSON:API compliant with all-or-nothing semantics. Only alias can be updated.",
|
||||
request={
|
||||
"application/vnd.api+json": {
|
||||
"type": "object",
|
||||
"required": ["data"],
|
||||
"properties": {
|
||||
"data": {
|
||||
"type": "array",
|
||||
"description": "Array of provider objects to update (max 100)",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"required": ["type", "id", "attributes"],
|
||||
"properties": {
|
||||
"type": {
|
||||
"type": "string",
|
||||
"enum": ["providers"],
|
||||
"description": "Must be 'providers'",
|
||||
},
|
||||
"id": {
|
||||
"type": "string",
|
||||
"format": "uuid",
|
||||
"description": "Provider UUID to update",
|
||||
},
|
||||
"attributes": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"alias": {
|
||||
"type": "string",
|
||||
"description": "New display name for the provider",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
responses={
|
||||
200: OpenApiResponse(
|
||||
response=ProviderSerializer(many=True),
|
||||
description="All providers updated successfully",
|
||||
),
|
||||
400: OpenApiResponse(
|
||||
description="Validation errors - entire batch rejected (all-or-nothing)"
|
||||
),
|
||||
},
|
||||
)
|
||||
@action(
|
||||
detail=False,
|
||||
methods=["patch"],
|
||||
url_path="batch",
|
||||
url_name="batch-update",
|
||||
parser_classes=[JSONAPIRawParser, JSONParser],
|
||||
)
|
||||
def batch_update(self, request):
|
||||
serializer = ProviderBatchUpdateSerializer(
|
||||
data=request.data, context=self.get_serializer_context()
|
||||
)
|
||||
# All-or-nothing: raise ValidationError if any item fails validation
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
validated_items = serializer.validated_data.get("_validated_items", [])
|
||||
|
||||
# Update all providers atomically
|
||||
updated_provider_ids = []
|
||||
with transaction.atomic():
|
||||
for item_data in validated_items:
|
||||
item_data.pop("index")
|
||||
provider = item_data.pop("provider")
|
||||
for key, value in item_data.items():
|
||||
setattr(provider, key, value)
|
||||
provider.save()
|
||||
updated_provider_ids.append(provider.id)
|
||||
|
||||
updated_providers = (
|
||||
Provider.objects.filter(id__in=updated_provider_ids)
|
||||
.select_related("secret")
|
||||
.prefetch_related("provider_groups")
|
||||
)
|
||||
|
||||
response_serializer = ProviderSerializer(
|
||||
updated_providers, many=True, context=self.get_serializer_context()
|
||||
)
|
||||
|
||||
return Response(data=response_serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
@extend_schema_view(
|
||||
list=extend_schema(
|
||||
@@ -3558,10 +3779,259 @@ class ProviderSecretViewSet(BaseRLSViewSet):
|
||||
def get_serializer_class(self):
|
||||
if self.action == "create":
|
||||
return ProviderSecretCreateSerializer
|
||||
elif self.action == "batch_create":
|
||||
return ProviderSecretBatchCreateSerializer
|
||||
elif self.action == "batch_update":
|
||||
return ProviderSecretBatchUpdateSerializer
|
||||
elif self.action == "partial_update":
|
||||
return ProviderSecretUpdateSerializer
|
||||
return super().get_serializer_class()
|
||||
|
||||
def get_parsers(self):
|
||||
# Check action_map since self.action is not set yet during parser initialization
|
||||
action_map = getattr(self, "action_map", {})
|
||||
if (
|
||||
"batch_create" in action_map.values()
|
||||
or "batch_update" in action_map.values()
|
||||
):
|
||||
return [JSONAPIRawParser(), JSONParser()]
|
||||
return super().get_parsers()
|
||||
|
||||
@extend_schema(
|
||||
tags=["Provider"],
|
||||
summary="Batch create provider secrets",
|
||||
description=(
|
||||
"Create multiple provider secrets in a single atomic operation. "
|
||||
"Supports to-many relationships where one secret definition can be "
|
||||
"associated with multiple providers. JSON:API compliant with all-or-nothing "
|
||||
"semantics for hard errors. Providers that already have secrets are skipped "
|
||||
"and reported in meta.skipped."
|
||||
),
|
||||
request={
|
||||
"application/vnd.api+json": {
|
||||
"type": "object",
|
||||
"required": ["data"],
|
||||
"properties": {
|
||||
"data": {
|
||||
"type": "array",
|
||||
"description": "Array of provider-secret objects (max 100)",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"required": ["type", "attributes", "relationships"],
|
||||
"properties": {
|
||||
"type": {
|
||||
"type": "string",
|
||||
"enum": ["provider-secrets"],
|
||||
},
|
||||
"attributes": {
|
||||
"type": "object",
|
||||
"required": ["secret_type", "secret"],
|
||||
"properties": {
|
||||
"name": {"type": "string"},
|
||||
"secret_type": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"static",
|
||||
"role",
|
||||
"service_account",
|
||||
],
|
||||
},
|
||||
"secret": {
|
||||
"type": "object",
|
||||
"description": "Credentials (varies by provider/secret_type)",
|
||||
},
|
||||
},
|
||||
},
|
||||
"relationships": {
|
||||
"type": "object",
|
||||
"required": ["providers"],
|
||||
"properties": {
|
||||
"providers": {
|
||||
"type": "object",
|
||||
"description": "To-many relationship: one secret can be created for multiple providers",
|
||||
"properties": {
|
||||
"data": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"required": ["type", "id"],
|
||||
"properties": {
|
||||
"type": {
|
||||
"type": "string",
|
||||
"enum": ["providers"],
|
||||
},
|
||||
"id": {
|
||||
"type": "string",
|
||||
"format": "uuid",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
responses={
|
||||
201: OpenApiResponse(
|
||||
description=(
|
||||
"All secrets created successfully. Response includes meta.skipped "
|
||||
"if any providers were skipped because they already have secrets."
|
||||
),
|
||||
),
|
||||
400: OpenApiResponse(
|
||||
description="Validation errors - entire batch rejected (all-or-nothing)"
|
||||
),
|
||||
},
|
||||
)
|
||||
@action(
|
||||
detail=False,
|
||||
methods=["post"],
|
||||
url_path="batch",
|
||||
url_name="batch",
|
||||
parser_classes=[JSONAPIRawParser, JSONParser],
|
||||
)
|
||||
def batch_create(self, request):
|
||||
serializer = ProviderSecretBatchCreateSerializer(
|
||||
data=request.data, context=self.get_serializer_context()
|
||||
)
|
||||
# All-or-nothing: raises ValidationError on any hard error
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
validated_items = serializer.validated_data.get("_validated_items", [])
|
||||
skipped_providers = serializer.validated_data.get("_skipped_providers", [])
|
||||
|
||||
# Create all validated secrets atomically
|
||||
created_secret_ids = []
|
||||
with transaction.atomic():
|
||||
for item_data in validated_items:
|
||||
item_data.pop("source_index", None)
|
||||
provider = item_data.pop("provider")
|
||||
secret = ProviderSecret.objects.create(
|
||||
tenant_id=request.tenant_id, provider=provider, **item_data
|
||||
)
|
||||
created_secret_ids.append(secret.id)
|
||||
|
||||
created_secrets = ProviderSecret.objects.filter(
|
||||
id__in=created_secret_ids
|
||||
).select_related("provider")
|
||||
|
||||
# Pass skipped providers through context so the serializer's
|
||||
# get_root_meta() can include them in the response meta.
|
||||
context = self.get_serializer_context()
|
||||
if skipped_providers:
|
||||
context["_skipped_providers"] = skipped_providers
|
||||
|
||||
response_serializer = ProviderSecretSerializer(
|
||||
created_secrets, many=True, context=context
|
||||
)
|
||||
|
||||
return Response(data=response_serializer.data, status=status.HTTP_201_CREATED)
|
||||
|
||||
@extend_schema(
|
||||
tags=["Provider"],
|
||||
summary="Batch update provider secrets",
|
||||
description=(
|
||||
"Update multiple provider secrets in a single atomic operation. "
|
||||
"JSON:API compliant with all-or-nothing semantics."
|
||||
),
|
||||
request={
|
||||
"application/vnd.api+json": {
|
||||
"type": "object",
|
||||
"required": ["data"],
|
||||
"properties": {
|
||||
"data": {
|
||||
"type": "array",
|
||||
"description": "Array of provider-secret objects (max 100)",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"required": ["type", "id", "attributes"],
|
||||
"properties": {
|
||||
"type": {
|
||||
"type": "string",
|
||||
"enum": ["provider-secrets"],
|
||||
},
|
||||
"id": {
|
||||
"type": "string",
|
||||
"format": "uuid",
|
||||
"description": "Provider secret UUID to update",
|
||||
},
|
||||
"attributes": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {"type": "string"},
|
||||
"secret_type": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"static",
|
||||
"role",
|
||||
"service_account",
|
||||
],
|
||||
},
|
||||
"secret": {
|
||||
"type": "object",
|
||||
"description": "Credentials (varies by provider/secret_type)",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
responses={
|
||||
200: OpenApiResponse(
|
||||
response=ProviderSecretSerializer(many=True),
|
||||
description="All secrets updated successfully",
|
||||
),
|
||||
400: OpenApiResponse(
|
||||
description="Validation errors - entire batch rejected (all-or-nothing)"
|
||||
),
|
||||
},
|
||||
)
|
||||
@action(
|
||||
detail=False,
|
||||
methods=["patch"],
|
||||
url_path="batch",
|
||||
url_name="batch-update",
|
||||
parser_classes=[JSONAPIRawParser, JSONParser],
|
||||
)
|
||||
def batch_update(self, request):
|
||||
serializer = ProviderSecretBatchUpdateSerializer(
|
||||
data=request.data, context=self.get_serializer_context()
|
||||
)
|
||||
# All-or-nothing: raises ValidationError on any hard error
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
validated_items = serializer.validated_data.get("_validated_items", [])
|
||||
|
||||
# Update all validated secrets atomically
|
||||
updated_secret_ids = []
|
||||
with transaction.atomic():
|
||||
for item_data in validated_items:
|
||||
item_data.pop("source_index", None)
|
||||
provider_secret = item_data.pop("provider_secret")
|
||||
for key, value in item_data.items():
|
||||
setattr(provider_secret, key, value)
|
||||
provider_secret.save()
|
||||
updated_secret_ids.append(provider_secret.id)
|
||||
|
||||
updated_secrets = ProviderSecret.objects.filter(
|
||||
id__in=updated_secret_ids
|
||||
).select_related("provider")
|
||||
|
||||
response_serializer = ProviderSecretSerializer(
|
||||
updated_secrets, many=True, context=self.get_serializer_context()
|
||||
)
|
||||
|
||||
return Response(data=response_serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
@extend_schema_view(
|
||||
list=extend_schema(
|
||||
|
||||
@@ -293,6 +293,8 @@ SECURE_REFERRER_POLICY = "strict-origin-when-cross-origin"
|
||||
|
||||
DJANGO_DELETION_BATCH_SIZE = env.int("DJANGO_DELETION_BATCH_SIZE", 5000)
|
||||
|
||||
API_BATCH_MAX_SIZE = env.int("DJANGO_API_BATCH_MAX_SIZE", 100)
|
||||
|
||||
# SAML requirement
|
||||
CSRF_COOKIE_SECURE = True
|
||||
SESSION_COOKIE_SECURE = True
|
||||
|
||||
Reference in New Issue
Block a user