mirror of
https://github.com/prowler-cloud/prowler.git
synced 2026-02-09 02:30:43 +00:00
Compare commits
57 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f70cf8d81e | ||
|
|
83b6c79203 | ||
|
|
1192c038b2 | ||
|
|
4ebbf6553e | ||
|
|
c501d63382 | ||
|
|
72d6d3f535 | ||
|
|
ddd34dc9cc | ||
|
|
03b1c10d13 | ||
|
|
4cd5b8fd04 | ||
|
|
f0ce17182b | ||
|
|
2a8a7d844b | ||
|
|
ff33f426e5 | ||
|
|
f691046c1f | ||
|
|
9fad8735b8 | ||
|
|
c632055517 | ||
|
|
fd850790d5 | ||
|
|
912d5d7f8c | ||
|
|
d88a136ac3 | ||
|
|
172484cf08 | ||
|
|
821083639a | ||
|
|
e4f0f3ec87 | ||
|
|
cc6302f7b8 | ||
|
|
c89fd82856 | ||
|
|
0e29a92d42 | ||
|
|
835d8ffe5d | ||
|
|
21ee2068a6 | ||
|
|
0ad149942b | ||
|
|
66305768c0 | ||
|
|
05f98fe993 | ||
|
|
89416f37af | ||
|
|
7285ddcb4e | ||
|
|
8993a4f707 | ||
|
|
633d7bd8a8 | ||
|
|
3944ea2055 | ||
|
|
d85d0f5877 | ||
|
|
d32a7986a5 | ||
|
|
71813425bd | ||
|
|
da000b54ca | ||
|
|
74a9b42d9f | ||
|
|
f9322ab3aa | ||
|
|
5becaca2c4 | ||
|
|
50a670fbc4 | ||
|
|
48f405a696 | ||
|
|
bc56c4242e | ||
|
|
1b63256b9c | ||
|
|
7930b449b3 | ||
|
|
e5cd42da55 | ||
|
|
2a54bbf901 | ||
|
|
2e134ed947 | ||
|
|
ba727391db | ||
|
|
d4346149fa | ||
|
|
2637fc5132 | ||
|
|
ac5135470b | ||
|
|
613966aecf | ||
|
|
83ddcb9c39 | ||
|
|
957c2433cf | ||
|
|
c10b367070 |
@@ -1,14 +0,0 @@
|
||||
{
|
||||
"repoOwner": "prowler-cloud",
|
||||
"repoName": "prowler",
|
||||
"targetPRLabels": [
|
||||
"backport"
|
||||
],
|
||||
"sourcePRLabels": [
|
||||
"was-backported"
|
||||
],
|
||||
"copySourcePRLabels": false,
|
||||
"copySourcePRReviewers": true,
|
||||
"prTitle": "{{sourcePullRequest.title}}",
|
||||
"commitConflicts": true
|
||||
}
|
||||
17
.dockerignore
Normal file
17
.dockerignore
Normal file
@@ -0,0 +1,17 @@
|
||||
# Ignore git files
|
||||
.git/
|
||||
.github/
|
||||
|
||||
# Ignore Dodckerfile
|
||||
Dockerfile
|
||||
|
||||
# Ignore hidden files
|
||||
.pre-commit-config.yaml
|
||||
.dockerignore
|
||||
.gitignore
|
||||
.pytest*
|
||||
.DS_Store
|
||||
|
||||
# Ignore output directories
|
||||
output/
|
||||
junit-reports/
|
||||
126
.env
126
.env
@@ -1,126 +0,0 @@
|
||||
#### Important Note ####
|
||||
# This file is used to store environment variables for the Prowler App.
|
||||
# For production, it is recommended to use a secure method to store these variables and change the default secret keys.
|
||||
|
||||
#### Prowler UI Configuration ####
|
||||
PROWLER_UI_VERSION="stable"
|
||||
AUTH_URL=http://localhost:3000
|
||||
API_BASE_URL=http://prowler-api:8080/api/v1
|
||||
NEXT_PUBLIC_API_BASE_URL=${API_BASE_URL}
|
||||
NEXT_PUBLIC_API_DOCS_URL=http://prowler-api:8080/api/v1/docs
|
||||
AUTH_TRUST_HOST=true
|
||||
UI_PORT=3000
|
||||
# Temp URL for feeds need to use actual
|
||||
RSS_FEED_URL=https://prowler.com/blog/rss
|
||||
# openssl rand -base64 32
|
||||
AUTH_SECRET="N/c6mnaS5+SWq81+819OrzQZlmx1Vxtp/orjttJSmw8="
|
||||
# Google Tag Manager ID
|
||||
NEXT_PUBLIC_GOOGLE_TAG_MANAGER_ID=""
|
||||
|
||||
|
||||
#### Prowler API Configuration ####
|
||||
PROWLER_API_VERSION="stable"
|
||||
# PostgreSQL settings
|
||||
# If running Django and celery on host, use 'localhost', else use 'postgres-db'
|
||||
POSTGRES_HOST=postgres-db
|
||||
POSTGRES_PORT=5432
|
||||
POSTGRES_ADMIN_USER=prowler_admin
|
||||
POSTGRES_ADMIN_PASSWORD=postgres
|
||||
POSTGRES_USER=prowler
|
||||
POSTGRES_PASSWORD=postgres
|
||||
POSTGRES_DB=prowler_db
|
||||
# Read replica settings (optional)
|
||||
# POSTGRES_REPLICA_HOST=postgres-db
|
||||
# POSTGRES_REPLICA_PORT=5432
|
||||
# POSTGRES_REPLICA_USER=prowler
|
||||
# POSTGRES_REPLICA_PASSWORD=postgres
|
||||
# POSTGRES_REPLICA_DB=prowler_db
|
||||
|
||||
# Celery-Prowler task settings
|
||||
TASK_RETRY_DELAY_SECONDS=0.1
|
||||
TASK_RETRY_ATTEMPTS=5
|
||||
|
||||
# Valkey settings
|
||||
# If running Valkey and celery on host, use localhost, else use 'valkey'
|
||||
VALKEY_HOST=valkey
|
||||
VALKEY_PORT=6379
|
||||
VALKEY_DB=0
|
||||
|
||||
# API scan settings
|
||||
|
||||
# The path to the directory where scan output should be stored
|
||||
DJANGO_TMP_OUTPUT_DIRECTORY="/tmp/prowler_api_output"
|
||||
|
||||
# The maximum number of findings to process in a single batch
|
||||
DJANGO_FINDINGS_BATCH_SIZE=1000
|
||||
|
||||
# The AWS access key to be used when uploading scan output to an S3 bucket
|
||||
# If left empty, default AWS credentials resolution behavior will be used
|
||||
DJANGO_OUTPUT_S3_AWS_ACCESS_KEY_ID=""
|
||||
|
||||
# The AWS secret key to be used when uploading scan output to an S3 bucket
|
||||
DJANGO_OUTPUT_S3_AWS_SECRET_ACCESS_KEY=""
|
||||
|
||||
# An optional AWS session token
|
||||
DJANGO_OUTPUT_S3_AWS_SESSION_TOKEN=""
|
||||
|
||||
# The AWS region where your S3 bucket is located (e.g., "us-east-1")
|
||||
DJANGO_OUTPUT_S3_AWS_DEFAULT_REGION=""
|
||||
|
||||
# The name of the S3 bucket where scan output should be stored
|
||||
DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET=""
|
||||
|
||||
# Django settings
|
||||
DJANGO_ALLOWED_HOSTS=localhost,127.0.0.1,prowler-api
|
||||
DJANGO_BIND_ADDRESS=0.0.0.0
|
||||
DJANGO_PORT=8080
|
||||
DJANGO_DEBUG=False
|
||||
DJANGO_SETTINGS_MODULE=config.django.production
|
||||
# Select one of [ndjson|human_readable]
|
||||
DJANGO_LOGGING_FORMATTER=human_readable
|
||||
# Select one of [DEBUG|INFO|WARNING|ERROR|CRITICAL]
|
||||
# Applies to both Django and Celery Workers
|
||||
DJANGO_LOGGING_LEVEL=INFO
|
||||
# Defaults to the maximum available based on CPU cores if not set.
|
||||
DJANGO_WORKERS=4
|
||||
# Token lifetime is in minutes
|
||||
DJANGO_ACCESS_TOKEN_LIFETIME=30
|
||||
# Token lifetime is in minutes
|
||||
DJANGO_REFRESH_TOKEN_LIFETIME=1440
|
||||
DJANGO_CACHE_MAX_AGE=3600
|
||||
DJANGO_STALE_WHILE_REVALIDATE=60
|
||||
DJANGO_MANAGE_DB_PARTITIONS=True
|
||||
# openssl genrsa -out private.pem 2048
|
||||
DJANGO_TOKEN_SIGNING_KEY=""
|
||||
# openssl rsa -in private.pem -pubout -out public.pem
|
||||
DJANGO_TOKEN_VERIFYING_KEY=""
|
||||
# openssl rand -base64 32
|
||||
DJANGO_SECRETS_ENCRYPTION_KEY="oE/ltOhp/n1TdbHjVmzcjDPLcLA41CVI/4Rk+UB5ESc="
|
||||
DJANGO_BROKER_VISIBILITY_TIMEOUT=86400
|
||||
DJANGO_SENTRY_DSN=
|
||||
DJANGO_THROTTLE_TOKEN_OBTAIN=50/minute
|
||||
|
||||
# Sentry settings
|
||||
SENTRY_ENVIRONMENT=local
|
||||
SENTRY_RELEASE=local
|
||||
|
||||
#### Prowler release version ####
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.12.2
|
||||
|
||||
# Social login credentials
|
||||
SOCIAL_GOOGLE_OAUTH_CALLBACK_URL="${AUTH_URL}/api/auth/callback/google"
|
||||
SOCIAL_GOOGLE_OAUTH_CLIENT_ID=""
|
||||
SOCIAL_GOOGLE_OAUTH_CLIENT_SECRET=""
|
||||
|
||||
SOCIAL_GITHUB_OAUTH_CALLBACK_URL="${AUTH_URL}/api/auth/callback/github"
|
||||
SOCIAL_GITHUB_OAUTH_CLIENT_ID=""
|
||||
SOCIAL_GITHUB_OAUTH_CLIENT_SECRET=""
|
||||
|
||||
# Single Sign-On (SSO)
|
||||
SAML_SSO_CALLBACK_URL="${AUTH_URL}/api/auth/callback/saml"
|
||||
|
||||
# Lighthouse tracing
|
||||
LANGSMITH_TRACING=false
|
||||
LANGSMITH_ENDPOINT="https://api.smith.langchain.com"
|
||||
LANGSMITH_API_KEY=""
|
||||
LANGCHAIN_PROJECT=""
|
||||
29
.github/CODEOWNERS
vendored
29
.github/CODEOWNERS
vendored
@@ -1,28 +1 @@
|
||||
# SDK
|
||||
/* @prowler-cloud/sdk
|
||||
/prowler/ @prowler-cloud/sdk @prowler-cloud/detection-and-remediation
|
||||
/tests/ @prowler-cloud/sdk @prowler-cloud/detection-and-remediation
|
||||
/dashboard/ @prowler-cloud/sdk
|
||||
/docs/ @prowler-cloud/sdk
|
||||
/examples/ @prowler-cloud/sdk
|
||||
/util/ @prowler-cloud/sdk
|
||||
/contrib/ @prowler-cloud/sdk
|
||||
/permissions/ @prowler-cloud/sdk
|
||||
/codecov.yml @prowler-cloud/sdk @prowler-cloud/api
|
||||
|
||||
# API
|
||||
/api/ @prowler-cloud/api
|
||||
|
||||
# UI
|
||||
/ui/ @prowler-cloud/ui
|
||||
|
||||
# AI
|
||||
/mcp_server/ @prowler-cloud/ai
|
||||
|
||||
# Platform
|
||||
/.github/ @prowler-cloud/platform
|
||||
/Makefile @prowler-cloud/platform
|
||||
/kubernetes/ @prowler-cloud/platform
|
||||
**/Dockerfile* @prowler-cloud/platform
|
||||
**/docker-compose*.yml @prowler-cloud/platform
|
||||
**/docker-compose*.yaml @prowler-cloud/platform
|
||||
* @prowler-cloud/prowler-team
|
||||
|
||||
50
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
50
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: "[Bug]: "
|
||||
labels: bug, status/needs-triage
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
<!--
|
||||
Please use this template to create your bug report. By providing as much info as possible you help us understand the issue, reproduce it and resolve it for you quicker. Therefore, take a couple of extra minutes to make sure you have provided all info needed.
|
||||
|
||||
PROTIP: record your screen and attach it as a gif to showcase the issue.
|
||||
|
||||
- How to record and attach gif: https://bit.ly/2Mi8T6K
|
||||
-->
|
||||
|
||||
**What happened?**
|
||||
A clear and concise description of what the bug is or what is not working as expected
|
||||
|
||||
|
||||
**How to reproduce it**
|
||||
Steps to reproduce the behavior:
|
||||
1. What command are you running?
|
||||
2. Environment you have, like single account, multi-account, organizations, etc.
|
||||
3. See error
|
||||
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
|
||||
**Screenshots or Logs**
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
Also, you can add logs (anonymize them first!). Here a command that may help to share a log
|
||||
`bash -x ./prowler -options > debug.log 2>&1` then attach here `debug.log`
|
||||
|
||||
|
||||
**From where are you running Prowler?**
|
||||
Please, complete the following information:
|
||||
- Resource: [e.g. EC2 instance, Fargate task, Docker container manually, EKS, Cloud9, CodeBuild, workstation, etc.)
|
||||
- OS: [e.g. Amazon Linux 2, Mac, Alpine, Windows, etc. ]
|
||||
- AWS-CLI Version [`aws --version`]:
|
||||
- Prowler Version [`./prowler -V`]:
|
||||
- Shell and version:
|
||||
- Others:
|
||||
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
||||
140
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
140
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@@ -1,140 +0,0 @@
|
||||
name: 🐞 Bug Report
|
||||
description: Create a report to help us improve
|
||||
labels: ["bug", "status/needs-triage"]
|
||||
|
||||
body:
|
||||
- type: checkboxes
|
||||
id: search
|
||||
attributes:
|
||||
label: Issue search
|
||||
options:
|
||||
- label: I have searched the existing issues and this bug has not been reported yet
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: component
|
||||
attributes:
|
||||
label: Which component is affected?
|
||||
multiple: true
|
||||
options:
|
||||
- Prowler CLI/SDK
|
||||
- Prowler API
|
||||
- Prowler UI
|
||||
- Prowler Dashboard
|
||||
- Prowler MCP Server
|
||||
- Documentation
|
||||
- Other
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: provider
|
||||
attributes:
|
||||
label: Cloud Provider (if applicable)
|
||||
multiple: true
|
||||
options:
|
||||
- AWS
|
||||
- Azure
|
||||
- GCP
|
||||
- Kubernetes
|
||||
- GitHub
|
||||
- Microsoft 365
|
||||
- Not applicable
|
||||
- type: textarea
|
||||
id: reproduce
|
||||
attributes:
|
||||
label: Steps to Reproduce
|
||||
description: Steps to reproduce the behavior
|
||||
placeholder: |-
|
||||
1. What command are you running?
|
||||
2. Cloud provider you are launching
|
||||
3. Environment you have, like single account, multi-account, organizations, multi or single subscription, etc.
|
||||
4. See error
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: expected
|
||||
attributes:
|
||||
label: Expected behavior
|
||||
description: A clear and concise description of what you expected to happen.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: actual
|
||||
attributes:
|
||||
label: Actual Result with Screenshots or Logs
|
||||
description: If applicable, add screenshots to help explain your problem. Also, you can add logs (anonymize them first!). Here a command that may help to share a log `prowler <your arguments> --log-level ERROR --log-file $(date +%F)_error.log` then attach here the log file.
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: type
|
||||
attributes:
|
||||
label: How did you install Prowler?
|
||||
options:
|
||||
- Cloning the repository from github.com (git clone)
|
||||
- From pip package (pip install prowler)
|
||||
- From brew (brew install prowler)
|
||||
- Docker (docker pull toniblyx/prowler)
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: environment
|
||||
attributes:
|
||||
label: Environment Resource
|
||||
description: From where are you running Prowler?
|
||||
placeholder: |-
|
||||
1. EC2 instance
|
||||
2. Fargate task
|
||||
3. Docker container locally
|
||||
4. EKS
|
||||
5. Cloud9
|
||||
6. CodeBuild
|
||||
7. Workstation
|
||||
8. Other(please specify)
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: os
|
||||
attributes:
|
||||
label: OS used
|
||||
description: Which OS are you using?
|
||||
placeholder: |-
|
||||
1. Amazon Linux 2
|
||||
2. MacOS
|
||||
3. Alpine Linux
|
||||
4. Windows
|
||||
5. Other(please specify)
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: prowler-version
|
||||
attributes:
|
||||
label: Prowler version
|
||||
description: Which Prowler version are you using?
|
||||
placeholder: |-
|
||||
prowler --version
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: python-version
|
||||
attributes:
|
||||
label: Python version
|
||||
description: Which Python version are you using?
|
||||
placeholder: |-
|
||||
python --version
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: pip-version
|
||||
attributes:
|
||||
label: Pip version
|
||||
description: Which pip version are you using?
|
||||
placeholder: |-
|
||||
pip --version
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: additional
|
||||
attributes:
|
||||
description: Additional context
|
||||
label: Context
|
||||
validations:
|
||||
required: false
|
||||
11
.github/ISSUE_TEMPLATE/config.yml
vendored
11
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,11 +0,0 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: 📖 Documentation
|
||||
url: https://docs.prowler.com
|
||||
about: Check our comprehensive documentation for guides and tutorials
|
||||
- name: 💬 GitHub Discussions
|
||||
url: https://github.com/prowler-cloud/prowler/discussions
|
||||
about: Ask questions and discuss with the community
|
||||
- name: 🌟 Prowler Community
|
||||
url: https://goto.prowler.com/slack
|
||||
about: Join our community for support and updates
|
||||
79
.github/ISSUE_TEMPLATE/feature-request.yml
vendored
79
.github/ISSUE_TEMPLATE/feature-request.yml
vendored
@@ -1,79 +0,0 @@
|
||||
name: 💡 Feature Request
|
||||
description: Suggest an idea for this project
|
||||
labels: ["feature-request", "status/needs-triage"]
|
||||
|
||||
body:
|
||||
- type: checkboxes
|
||||
id: search
|
||||
attributes:
|
||||
label: Feature search
|
||||
options:
|
||||
- label: I have searched the existing issues and this feature has not been requested yet
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: component
|
||||
attributes:
|
||||
label: Which component would this feature affect?
|
||||
multiple: true
|
||||
options:
|
||||
- Prowler CLI/SDK
|
||||
- Prowler API
|
||||
- Prowler UI
|
||||
- Prowler Dashboard
|
||||
- Prowler MCP Server
|
||||
- Documentation
|
||||
- New component/Integration
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: provider
|
||||
attributes:
|
||||
label: Related to specific cloud provider?
|
||||
multiple: true
|
||||
options:
|
||||
- AWS
|
||||
- Azure
|
||||
- GCP
|
||||
- Kubernetes
|
||||
- GitHub
|
||||
- Microsoft 365
|
||||
- All providers
|
||||
- Not provider-specific
|
||||
- type: textarea
|
||||
id: Problem
|
||||
attributes:
|
||||
label: New feature motivation
|
||||
description: Is your feature request related to a problem? Please describe
|
||||
placeholder: |-
|
||||
1. A clear and concise description of what the problem is. Ex. I'm always frustrated when
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: Solution
|
||||
attributes:
|
||||
label: Solution Proposed
|
||||
description: A clear and concise description of what you want to happen.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: use-case
|
||||
attributes:
|
||||
label: Use case and benefits
|
||||
description: Who would benefit from this feature and how?
|
||||
placeholder: This would help security teams by...
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: Alternatives
|
||||
attributes:
|
||||
label: Describe alternatives you've considered
|
||||
description: A clear and concise description of any alternative solutions or features you've considered.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: Context
|
||||
attributes:
|
||||
label: Additional context
|
||||
description: Add any other context or screenshots about the feature request here.
|
||||
validations:
|
||||
required: false
|
||||
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: ''
|
||||
labels: enhancement, status/needs-triage
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Describe alternatives you've considered**
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
||||
71
.github/actions/setup-python-poetry/action.yml
vendored
71
.github/actions/setup-python-poetry/action.yml
vendored
@@ -1,71 +0,0 @@
|
||||
name: 'Setup Python with Poetry'
|
||||
description: 'Setup Python environment with Poetry and install dependencies'
|
||||
author: 'Prowler'
|
||||
|
||||
inputs:
|
||||
python-version:
|
||||
description: 'Python version to use'
|
||||
required: true
|
||||
working-directory:
|
||||
description: 'Working directory for Poetry'
|
||||
required: false
|
||||
default: '.'
|
||||
poetry-version:
|
||||
description: 'Poetry version to install'
|
||||
required: false
|
||||
default: '2.1.1'
|
||||
install-dependencies:
|
||||
description: 'Install Python dependencies with Poetry'
|
||||
required: false
|
||||
default: 'true'
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Replace @master with current branch in pyproject.toml
|
||||
if: github.event_name == 'pull_request' && github.base_ref == 'master'
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
run: |
|
||||
BRANCH_NAME="${GITHUB_HEAD_REF:-${GITHUB_REF_NAME}}"
|
||||
echo "Using branch: $BRANCH_NAME"
|
||||
sed -i "s|@master|@$BRANCH_NAME|g" pyproject.toml
|
||||
|
||||
- name: Install poetry
|
||||
shell: bash
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pipx install poetry==${{ inputs.poetry-version }}
|
||||
|
||||
- name: Update SDK resolved_reference to latest commit
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/master'
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
run: |
|
||||
LATEST_COMMIT=$(curl -s "https://api.github.com/repos/prowler-cloud/prowler/commits/master" | jq -r '.sha')
|
||||
echo "Latest commit hash: $LATEST_COMMIT"
|
||||
sed -i '/url = "https:\/\/github\.com\/prowler-cloud\/prowler\.git"/,/resolved_reference = / {
|
||||
s/resolved_reference = "[a-f0-9]\{40\}"/resolved_reference = "'"$LATEST_COMMIT"'"/
|
||||
}' poetry.lock
|
||||
echo "Updated resolved_reference:"
|
||||
grep -A2 -B2 "resolved_reference" poetry.lock
|
||||
|
||||
- name: Update poetry.lock
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
run: poetry lock
|
||||
|
||||
- name: Set up Python ${{ inputs.python-version }}
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ inputs.python-version }}
|
||||
cache: 'poetry'
|
||||
cache-dependency-path: ${{ inputs.working-directory }}/poetry.lock
|
||||
|
||||
- name: Install Python dependencies
|
||||
if: inputs.install-dependencies == 'true'
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
run: |
|
||||
poetry install --no-root
|
||||
poetry run pip list
|
||||
152
.github/actions/trivy-scan/action.yml
vendored
152
.github/actions/trivy-scan/action.yml
vendored
@@ -1,152 +0,0 @@
|
||||
name: 'Container Security Scan with Trivy'
|
||||
description: 'Scans container images for vulnerabilities using Trivy and reports results'
|
||||
author: 'Prowler'
|
||||
|
||||
inputs:
|
||||
image-name:
|
||||
description: 'Container image name to scan'
|
||||
required: true
|
||||
image-tag:
|
||||
description: 'Container image tag to scan'
|
||||
required: true
|
||||
default: ${{ github.sha }}
|
||||
severity:
|
||||
description: 'Severities to scan for (comma-separated)'
|
||||
required: false
|
||||
default: 'CRITICAL,HIGH,MEDIUM,LOW'
|
||||
fail-on-critical:
|
||||
description: 'Fail the build if critical vulnerabilities are found'
|
||||
required: false
|
||||
default: 'false'
|
||||
upload-sarif:
|
||||
description: 'Upload results to GitHub Security tab'
|
||||
required: false
|
||||
default: 'true'
|
||||
create-pr-comment:
|
||||
description: 'Create a comment on the PR with scan results'
|
||||
required: false
|
||||
default: 'true'
|
||||
artifact-retention-days:
|
||||
description: 'Days to retain the Trivy report artifact'
|
||||
required: false
|
||||
default: '2'
|
||||
|
||||
outputs:
|
||||
critical-count:
|
||||
description: 'Number of critical vulnerabilities found'
|
||||
value: ${{ steps.security-check.outputs.critical }}
|
||||
high-count:
|
||||
description: 'Number of high vulnerabilities found'
|
||||
value: ${{ steps.security-check.outputs.high }}
|
||||
total-count:
|
||||
description: 'Total number of vulnerabilities found'
|
||||
value: ${{ steps.security-check.outputs.total }}
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Run Trivy vulnerability scan (SARIF)
|
||||
if: inputs.upload-sarif == 'true'
|
||||
uses: aquasecurity/trivy-action@b6643a29fecd7f34b3597bc6acb0a98b03d33ff8 # v0.33.1
|
||||
with:
|
||||
image-ref: ${{ inputs.image-name }}:${{ inputs.image-tag }}
|
||||
format: 'sarif'
|
||||
output: 'trivy-results.sarif'
|
||||
severity: 'CRITICAL,HIGH'
|
||||
exit-code: '0'
|
||||
|
||||
- name: Upload Trivy results to GitHub Security tab
|
||||
if: inputs.upload-sarif == 'true'
|
||||
uses: github/codeql-action/upload-sarif@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||
with:
|
||||
sarif_file: 'trivy-results.sarif'
|
||||
category: 'trivy-container'
|
||||
|
||||
- name: Run Trivy vulnerability scan (JSON)
|
||||
uses: aquasecurity/trivy-action@b6643a29fecd7f34b3597bc6acb0a98b03d33ff8 # v0.33.1
|
||||
with:
|
||||
image-ref: ${{ inputs.image-name }}:${{ inputs.image-tag }}
|
||||
format: 'json'
|
||||
output: 'trivy-report.json'
|
||||
severity: ${{ inputs.severity }}
|
||||
exit-code: '0'
|
||||
|
||||
- name: Upload Trivy report artifact
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
if: always()
|
||||
with:
|
||||
name: trivy-scan-report-${{ inputs.image-name }}
|
||||
path: trivy-report.json
|
||||
retention-days: ${{ inputs.artifact-retention-days }}
|
||||
|
||||
- name: Generate security summary
|
||||
id: security-check
|
||||
shell: bash
|
||||
run: |
|
||||
CRITICAL=$(jq '[.Results[]?.Vulnerabilities[]? | select(.Severity=="CRITICAL")] | length' trivy-report.json)
|
||||
HIGH=$(jq '[.Results[]?.Vulnerabilities[]? | select(.Severity=="HIGH")] | length' trivy-report.json)
|
||||
TOTAL=$(jq '[.Results[]?.Vulnerabilities[]?] | length' trivy-report.json)
|
||||
|
||||
echo "critical=$CRITICAL" >> $GITHUB_OUTPUT
|
||||
echo "high=$HIGH" >> $GITHUB_OUTPUT
|
||||
echo "total=$TOTAL" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "### 🔒 Container Security Scan" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "**Image:** \`${{ inputs.image-name }}:${{ inputs.image-tag }}\`" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- 🔴 Critical: $CRITICAL" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- 🟠 High: $HIGH" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- **Total**: $TOTAL" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
- name: Comment scan results on PR
|
||||
if: inputs.create-pr-comment == 'true' && github.event_name == 'pull_request'
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
env:
|
||||
IMAGE_NAME: ${{ inputs.image-name }}
|
||||
GITHUB_SHA: ${{ inputs.image-tag }}
|
||||
SEVERITY: ${{ inputs.severity }}
|
||||
with:
|
||||
script: |
|
||||
const comment = require('./.github/scripts/trivy-pr-comment.js');
|
||||
|
||||
// Unique identifier to find our comment
|
||||
const marker = '<!-- trivy-scan-comment:${{ inputs.image-name }} -->';
|
||||
const body = marker + '\n' + comment;
|
||||
|
||||
// Find existing comment
|
||||
const { data: comments } = await github.rest.issues.listComments({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
});
|
||||
|
||||
const existingComment = comments.find(c => c.body?.includes(marker));
|
||||
|
||||
if (existingComment) {
|
||||
// Update existing comment
|
||||
await github.rest.issues.updateComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
comment_id: existingComment.id,
|
||||
body: body
|
||||
});
|
||||
console.log('✅ Updated existing Trivy scan comment');
|
||||
} else {
|
||||
// Create new comment
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
body: body
|
||||
});
|
||||
console.log('✅ Created new Trivy scan comment');
|
||||
}
|
||||
|
||||
- name: Check for critical vulnerabilities
|
||||
if: inputs.fail-on-critical == 'true' && steps.security-check.outputs.critical != '0'
|
||||
shell: bash
|
||||
run: |
|
||||
echo "::error::Found ${{ steps.security-check.outputs.critical }} critical vulnerabilities"
|
||||
echo "::warning::Please update packages or use a different base image"
|
||||
exit 1
|
||||
12
.github/codeql/api-codeql-config.yml
vendored
12
.github/codeql/api-codeql-config.yml
vendored
@@ -1,12 +0,0 @@
|
||||
name: 'API: CodeQL Config'
|
||||
paths:
|
||||
- 'api/'
|
||||
|
||||
paths-ignore:
|
||||
- 'api/tests/**'
|
||||
- 'api/**/__pycache__/**'
|
||||
- 'api/**/migrations/**'
|
||||
- 'api/**/*.md'
|
||||
|
||||
queries:
|
||||
- uses: security-and-quality
|
||||
4
.github/codeql/sdk-codeql-config.yml
vendored
4
.github/codeql/sdk-codeql-config.yml
vendored
@@ -1,4 +0,0 @@
|
||||
name: "SDK - CodeQL Config"
|
||||
paths-ignore:
|
||||
- "api/"
|
||||
- "ui/"
|
||||
3
.github/codeql/ui-codeql-config.yml
vendored
3
.github/codeql/ui-codeql-config.yml
vendored
@@ -1,3 +0,0 @@
|
||||
name: "UI - CodeQL Config"
|
||||
paths:
|
||||
- "ui/"
|
||||
120
.github/dependabot.yml
vendored
120
.github/dependabot.yml
vendored
@@ -1,120 +0,0 @@
|
||||
# To get started with Dependabot version updates, you'll need to specify which
|
||||
# package ecosystems to update and where the package manifests are located.
|
||||
# Please see the documentation for all configuration options:
|
||||
# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
# v5
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 25
|
||||
target-branch: master
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "pip"
|
||||
|
||||
# Dependabot Updates are temporary disabled - 2025/03/19
|
||||
# - package-ecosystem: "pip"
|
||||
# directory: "/api"
|
||||
# schedule:
|
||||
# interval: "daily"
|
||||
# open-pull-requests-limit: 10
|
||||
# target-branch: master
|
||||
# labels:
|
||||
# - "dependencies"
|
||||
# - "pip"
|
||||
# - "component/api"
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 25
|
||||
target-branch: master
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "github_actions"
|
||||
|
||||
# Dependabot Updates are temporary disabled - 2025/03/19
|
||||
# - package-ecosystem: "npm"
|
||||
# directory: "/ui"
|
||||
# schedule:
|
||||
# interval: "daily"
|
||||
# open-pull-requests-limit: 10
|
||||
# target-branch: master
|
||||
# labels:
|
||||
# - "dependencies"
|
||||
# - "npm"
|
||||
# - "component/ui"
|
||||
|
||||
- package-ecosystem: "docker"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 25
|
||||
target-branch: master
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "docker"
|
||||
|
||||
# Dependabot Updates are temporary disabled - 2025/04/15
|
||||
# v4.6
|
||||
# - package-ecosystem: "pip"
|
||||
# directory: "/"
|
||||
# schedule:
|
||||
# interval: "weekly"
|
||||
# open-pull-requests-limit: 10
|
||||
# target-branch: v4.6
|
||||
# labels:
|
||||
# - "dependencies"
|
||||
# - "pip"
|
||||
# - "v4"
|
||||
|
||||
# - package-ecosystem: "github-actions"
|
||||
# directory: "/"
|
||||
# schedule:
|
||||
# interval: "weekly"
|
||||
# open-pull-requests-limit: 10
|
||||
# target-branch: v4.6
|
||||
# labels:
|
||||
# - "dependencies"
|
||||
# - "github_actions"
|
||||
# - "v4"
|
||||
|
||||
# - package-ecosystem: "docker"
|
||||
# directory: "/"
|
||||
# schedule:
|
||||
# interval: "weekly"
|
||||
# open-pull-requests-limit: 10
|
||||
# target-branch: v4.6
|
||||
# labels:
|
||||
# - "dependencies"
|
||||
# - "docker"
|
||||
# - "v4"
|
||||
|
||||
# Dependabot Updates are temporary disabled - 2025/03/19
|
||||
# v3
|
||||
# - package-ecosystem: "pip"
|
||||
# directory: "/"
|
||||
# schedule:
|
||||
# interval: "monthly"
|
||||
# open-pull-requests-limit: 10
|
||||
# target-branch: v3
|
||||
# labels:
|
||||
# - "dependencies"
|
||||
# - "pip"
|
||||
# - "v3"
|
||||
|
||||
# - package-ecosystem: "github-actions"
|
||||
# directory: "/"
|
||||
# schedule:
|
||||
# interval: "monthly"
|
||||
# open-pull-requests-limit: 10
|
||||
# target-branch: v3
|
||||
# labels:
|
||||
# - "dependencies"
|
||||
# - "github_actions"
|
||||
# - "v3"
|
||||
134
.github/labeler.yml
vendored
134
.github/labeler.yml
vendored
@@ -1,134 +0,0 @@
|
||||
documentation:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "docs/**"
|
||||
|
||||
provider/aws:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/providers/aws/**"
|
||||
- any-glob-to-any-file: "tests/providers/aws/**"
|
||||
|
||||
provider/azure:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/providers/azure/**"
|
||||
- any-glob-to-any-file: "tests/providers/azure/**"
|
||||
|
||||
provider/gcp:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/providers/gcp/**"
|
||||
- any-glob-to-any-file: "tests/providers/gcp/**"
|
||||
|
||||
provider/kubernetes:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/providers/kubernetes/**"
|
||||
- any-glob-to-any-file: "tests/providers/kubernetes/**"
|
||||
|
||||
provider/m365:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/providers/m365/**"
|
||||
- any-glob-to-any-file: "tests/providers/m365/**"
|
||||
|
||||
provider/github:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/providers/github/**"
|
||||
- any-glob-to-any-file: "tests/providers/github/**"
|
||||
|
||||
provider/iac:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/providers/iac/**"
|
||||
- any-glob-to-any-file: "tests/providers/iac/**"
|
||||
|
||||
provider/mongodbatlas:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/providers/mongodbatlas/**"
|
||||
- any-glob-to-any-file: "tests/providers/mongodbatlas/**"
|
||||
|
||||
provider/oci:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/providers/oraclecloud/**"
|
||||
- any-glob-to-any-file: "tests/providers/oraclecloud/**"
|
||||
|
||||
github_actions:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: ".github/workflows/*"
|
||||
|
||||
cli:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "cli/**"
|
||||
|
||||
mutelist:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "prowler/providers/aws/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "prowler/providers/azure/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "prowler/providers/gcp/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "prowler/providers/kubernetes/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "prowler/providers/mongodbatlas/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "tests/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "tests/providers/aws/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "tests/providers/azure/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "tests/providers/gcp/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "tests/providers/kubernetes/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "tests/providers/mongodbatlas/lib/mutelist/**"
|
||||
|
||||
integration/s3:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/providers/aws/lib/s3/**"
|
||||
- any-glob-to-any-file: "tests/providers/aws/lib/s3/**"
|
||||
|
||||
integration/slack:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/lib/outputs/slack/**"
|
||||
- any-glob-to-any-file: "tests/lib/outputs/slack/**"
|
||||
|
||||
integration/security-hub:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/providers/aws/lib/security_hub/**"
|
||||
- any-glob-to-any-file: "tests/providers/aws/lib/security_hub/**"
|
||||
- any-glob-to-any-file: "prowler/lib/outputs/asff/**"
|
||||
- any-glob-to-any-file: "tests/lib/outputs/asff/**"
|
||||
|
||||
output/html:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/lib/outputs/html/**"
|
||||
- any-glob-to-any-file: "tests/lib/outputs/html/**"
|
||||
|
||||
output/asff:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/lib/outputs/asff/**"
|
||||
- any-glob-to-any-file: "tests/lib/outputs/asff/**"
|
||||
|
||||
output/ocsf:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/lib/outputs/ocsf/**"
|
||||
- any-glob-to-any-file: "tests/lib/outputs/ocsf/**"
|
||||
|
||||
output/csv:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/lib/outputs/csv/**"
|
||||
- any-glob-to-any-file: "tests/lib/outputs/csv/**"
|
||||
|
||||
component/api:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "api/**"
|
||||
|
||||
component/ui:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "ui/**"
|
||||
|
||||
component/mcp-server:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "mcp_server/**"
|
||||
|
||||
compliance:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/compliance/**"
|
||||
- any-glob-to-any-file: "prowler/lib/outputs/compliance/**"
|
||||
- any-glob-to-any-file: "tests/lib/outputs/compliance/**"
|
||||
|
||||
review-django-migrations:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "api/src/backend/api/migrations/**"
|
||||
|
||||
metadata-review:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "**/*.metadata.json"
|
||||
21
.github/pull_request_template.md
vendored
21
.github/pull_request_template.md
vendored
@@ -1,31 +1,12 @@
|
||||
### Context
|
||||
### Context
|
||||
|
||||
Please include relevant motivation and context for this PR.
|
||||
|
||||
If fixes an issue please add it with `Fix #XXXX`
|
||||
|
||||
### Description
|
||||
|
||||
Please include a summary of the change and which issue is fixed. List any dependencies that are required for this change.
|
||||
|
||||
### Steps to review
|
||||
|
||||
Please add a detailed description of how to review this PR.
|
||||
|
||||
### Checklist
|
||||
|
||||
- Are there new checks included in this PR? Yes / No
|
||||
- If so, do we need to update permissions for the provider? Please review this carefully.
|
||||
- [ ] Review if the code is being covered by tests.
|
||||
- [ ] Review if code is being documented following this specification https://github.com/google/styleguide/blob/gh-pages/pyguide.md#38-comments-and-docstrings
|
||||
- [ ] Review if backport is needed.
|
||||
- [ ] Review if is needed to change the [Readme.md](https://github.com/prowler-cloud/prowler/blob/master/README.md)
|
||||
- [ ] Ensure new entries are added to [CHANGELOG.md](https://github.com/prowler-cloud/prowler/blob/master/prowler/CHANGELOG.md), if applicable.
|
||||
|
||||
#### API
|
||||
- [ ] Verify if API specs need to be regenerated.
|
||||
- [ ] Check if version updates are required (e.g., specs, Poetry, etc.).
|
||||
- [ ] Ensure new entries are added to [CHANGELOG.md](https://github.com/prowler-cloud/prowler/blob/master/api/CHANGELOG.md), if applicable.
|
||||
|
||||
### License
|
||||
|
||||
|
||||
102
.github/scripts/trivy-pr-comment.js
vendored
102
.github/scripts/trivy-pr-comment.js
vendored
@@ -1,102 +0,0 @@
|
||||
const fs = require('fs');
|
||||
|
||||
// Configuration from environment variables
|
||||
const REPORT_FILE = process.env.TRIVY_REPORT_FILE || 'trivy-report.json';
|
||||
const IMAGE_NAME = process.env.IMAGE_NAME || 'container-image';
|
||||
const GITHUB_SHA = process.env.GITHUB_SHA || 'unknown';
|
||||
const GITHUB_REPOSITORY = process.env.GITHUB_REPOSITORY || '';
|
||||
const GITHUB_RUN_ID = process.env.GITHUB_RUN_ID || '';
|
||||
const SEVERITY = process.env.SEVERITY || 'CRITICAL,HIGH,MEDIUM,LOW';
|
||||
|
||||
// Parse severities to scan
|
||||
const scannedSeverities = SEVERITY.split(',').map(s => s.trim());
|
||||
|
||||
// Read and parse the Trivy report
|
||||
const report = JSON.parse(fs.readFileSync(REPORT_FILE, 'utf-8'));
|
||||
|
||||
let vulnCount = 0;
|
||||
let vulnsByType = { CRITICAL: 0, HIGH: 0, MEDIUM: 0, LOW: 0 };
|
||||
let affectedPackages = new Set();
|
||||
|
||||
if (report.Results && Array.isArray(report.Results)) {
|
||||
for (const result of report.Results) {
|
||||
if (result.Vulnerabilities && Array.isArray(result.Vulnerabilities)) {
|
||||
for (const vuln of result.Vulnerabilities) {
|
||||
vulnCount++;
|
||||
if (vulnsByType[vuln.Severity] !== undefined) {
|
||||
vulnsByType[vuln.Severity]++;
|
||||
}
|
||||
if (vuln.PkgName) {
|
||||
affectedPackages.add(vuln.PkgName);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const shortSha = GITHUB_SHA.substring(0, 7);
|
||||
const timestamp = new Date().toISOString().replace('T', ' ').substring(0, 19) + ' UTC';
|
||||
|
||||
// Severity icons and labels
|
||||
const severityConfig = {
|
||||
CRITICAL: { icon: '🔴', label: 'Critical' },
|
||||
HIGH: { icon: '🟠', label: 'High' },
|
||||
MEDIUM: { icon: '🟡', label: 'Medium' },
|
||||
LOW: { icon: '🔵', label: 'Low' }
|
||||
};
|
||||
|
||||
let comment = '## 🔒 Container Security Scan\n\n';
|
||||
comment += `**Image:** \`${IMAGE_NAME}:${shortSha}\`\n`;
|
||||
comment += `**Last scan:** ${timestamp}\n\n`;
|
||||
|
||||
if (vulnCount === 0) {
|
||||
comment += '### ✅ No Vulnerabilities Detected\n\n';
|
||||
comment += 'The container image passed all security checks. No known CVEs were found.\n';
|
||||
} else {
|
||||
comment += '### 📊 Vulnerability Summary\n\n';
|
||||
comment += '| Severity | Count |\n';
|
||||
comment += '|----------|-------|\n';
|
||||
|
||||
// Only show severities that were scanned
|
||||
for (const severity of scannedSeverities) {
|
||||
const config = severityConfig[severity];
|
||||
const count = vulnsByType[severity] || 0;
|
||||
const isBold = (severity === 'CRITICAL' || severity === 'HIGH') && count > 0;
|
||||
const countDisplay = isBold ? `**${count}**` : count;
|
||||
comment += `| ${config.icon} ${config.label} | ${countDisplay} |\n`;
|
||||
}
|
||||
|
||||
comment += `| **Total** | **${vulnCount}** |\n\n`;
|
||||
|
||||
if (affectedPackages.size > 0) {
|
||||
comment += `**${affectedPackages.size}** package(s) affected\n\n`;
|
||||
}
|
||||
|
||||
if (vulnsByType.CRITICAL > 0) {
|
||||
comment += '### ⚠️ Action Required\n\n';
|
||||
comment += '**Critical severity vulnerabilities detected.** These should be addressed before merging:\n';
|
||||
comment += '- Review the detailed scan results\n';
|
||||
comment += '- Update affected packages to patched versions\n';
|
||||
comment += '- Consider using a different base image if updates are unavailable\n\n';
|
||||
} else if (vulnsByType.HIGH > 0) {
|
||||
comment += '### ⚠️ Attention Needed\n\n';
|
||||
comment += '**High severity vulnerabilities found.** Please review and plan remediation:\n';
|
||||
comment += '- Assess the risk and exploitability\n';
|
||||
comment += '- Prioritize updates in the next maintenance cycle\n\n';
|
||||
} else {
|
||||
comment += '### ℹ️ Review Recommended\n\n';
|
||||
comment += 'Medium/Low severity vulnerabilities found. Consider addressing during regular maintenance.\n\n';
|
||||
}
|
||||
}
|
||||
|
||||
comment += '---\n';
|
||||
comment += '📋 **Resources:**\n';
|
||||
|
||||
if (GITHUB_REPOSITORY && GITHUB_RUN_ID) {
|
||||
comment += `- [Download full report](https://github.com/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}) (see artifacts)\n`;
|
||||
}
|
||||
|
||||
comment += '- [View in Security tab](https://github.com/' + (GITHUB_REPOSITORY || 'repository') + '/security/code-scanning)\n';
|
||||
comment += '- Scanned with [Trivy](https://github.com/aquasecurity/trivy)\n';
|
||||
|
||||
module.exports = comment;
|
||||
115
.github/workflows/api-build-lint-push-containers.yml
vendored
115
.github/workflows/api-build-lint-push-containers.yml
vendored
@@ -1,115 +0,0 @@
|
||||
name: API - Build and Push containers
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
paths:
|
||||
- "api/**"
|
||||
- "prowler/**"
|
||||
- ".github/workflows/api-build-lint-push-containers.yml"
|
||||
|
||||
# Uncomment the code below to test this action on PRs
|
||||
# pull_request:
|
||||
# branches:
|
||||
# - "master"
|
||||
# paths:
|
||||
# - "api/**"
|
||||
# - ".github/workflows/api-build-lint-push-containers.yml"
|
||||
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
env:
|
||||
# Tags
|
||||
LATEST_TAG: latest
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name }}
|
||||
STABLE_TAG: stable
|
||||
|
||||
WORKING_DIRECTORY: ./api
|
||||
|
||||
# Container Registries
|
||||
PROWLERCLOUD_DOCKERHUB_REPOSITORY: prowlercloud
|
||||
PROWLERCLOUD_DOCKERHUB_IMAGE: prowler-api
|
||||
|
||||
jobs:
|
||||
repository-check:
|
||||
name: Repository check
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
is_repo: ${{ steps.repository_check.outputs.is_repo }}
|
||||
steps:
|
||||
- name: Repository check
|
||||
id: repository_check
|
||||
working-directory: /tmp
|
||||
run: |
|
||||
if [[ ${{ github.repository }} == "prowler-cloud/prowler" ]]
|
||||
then
|
||||
echo "is_repo=true" >> "${GITHUB_OUTPUT}"
|
||||
else
|
||||
echo "This action only runs for prowler-cloud/prowler"
|
||||
echo "is_repo=false" >> "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
|
||||
# Build Prowler OSS container
|
||||
container-build-push:
|
||||
needs: repository-check
|
||||
if: needs.repository-check.outputs.is_repo == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ${{ env.WORKING_DIRECTORY }}
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Set short git commit SHA
|
||||
id: vars
|
||||
run: |
|
||||
shortSha=$(git rev-parse --short ${{ github.sha }})
|
||||
echo "SHORT_SHA=${shortSha}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Build and push container image (latest)
|
||||
# Comment the following line for testing
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
# Set push: false for testing
|
||||
push: true
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.LATEST_TAG }}
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.SHORT_SHA }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Build and push container image (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
push: true
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.RELEASE_TAG }}
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.STABLE_TAG }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Trigger deployment
|
||||
if: github.event_name == 'push'
|
||||
uses: peter-evans/repository-dispatch@5fc4efd1a4797ddb68ffd0714a238564e4cc0e6f # v4.0.0
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
repository: ${{ secrets.CLOUD_DISPATCH }}
|
||||
event-type: prowler-api-deploy
|
||||
client-payload: '{"sha": "${{ github.sha }}", "short_sha": "${{ env.SHORT_SHA }}"}'
|
||||
56
.github/workflows/api-codeql.yml
vendored
56
.github/workflows/api-codeql.yml
vendored
@@ -1,56 +0,0 @@
|
||||
name: 'API: CodeQL'
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- 'api/**'
|
||||
- '.github/workflows/api-codeql.yml'
|
||||
- '.github/codeql/api-codeql-config.yml'
|
||||
pull_request:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- 'api/**'
|
||||
- '.github/workflows/api-codeql.yml'
|
||||
- '.github/codeql/api-codeql-config.yml'
|
||||
schedule:
|
||||
- cron: '00 12 * * *'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: CodeQL Security Analysis
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language:
|
||||
- 'python'
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/api-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||
with:
|
||||
category: '/language:${{ matrix.language }}'
|
||||
228
.github/workflows/api-pull-request.yml
vendored
228
.github/workflows/api-pull-request.yml
vendored
@@ -1,228 +0,0 @@
|
||||
name: 'API: Pull Request'
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- '.github/workflows/api-pull-request.yml'
|
||||
- 'api/**'
|
||||
- '!api/docs/**'
|
||||
- '!api/README.md'
|
||||
- '!api/CHANGELOG.md'
|
||||
pull_request:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- '.github/workflows/api-pull-request.yml'
|
||||
- 'api/**'
|
||||
- '!api/docs/**'
|
||||
- '!api/README.md'
|
||||
- '!api/CHANGELOG.md'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
POSTGRES_HOST: localhost
|
||||
POSTGRES_PORT: 5432
|
||||
POSTGRES_ADMIN_USER: prowler
|
||||
POSTGRES_ADMIN_PASSWORD: S3cret
|
||||
POSTGRES_USER: prowler_user
|
||||
POSTGRES_PASSWORD: prowler
|
||||
POSTGRES_DB: postgres-db
|
||||
VALKEY_HOST: localhost
|
||||
VALKEY_PORT: 6379
|
||||
VALKEY_DB: 0
|
||||
API_WORKING_DIR: ./api
|
||||
IMAGE_NAME: prowler-api
|
||||
|
||||
jobs:
|
||||
code-quality:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
contents: read
|
||||
strategy:
|
||||
matrix:
|
||||
python-version:
|
||||
- '3.12'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./api
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Setup Python with Poetry
|
||||
uses: ./.github/actions/setup-python-poetry
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
working-directory: ./api
|
||||
|
||||
- name: Poetry check
|
||||
run: poetry check --lock
|
||||
|
||||
- name: Ruff lint
|
||||
run: poetry run ruff check . --exclude contrib
|
||||
|
||||
- name: Ruff format
|
||||
run: poetry run ruff format --check . --exclude contrib
|
||||
|
||||
- name: Pylint
|
||||
run: poetry run pylint --disable=W,C,R,E -j 0 -rn -sn src/
|
||||
|
||||
security-scans:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
strategy:
|
||||
matrix:
|
||||
python-version:
|
||||
- '3.12'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./api
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Setup Python with Poetry
|
||||
uses: ./.github/actions/setup-python-poetry
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
working-directory: ./api
|
||||
|
||||
- name: Bandit
|
||||
run: poetry run bandit -q -lll -x '*_test.py,./contrib/' -r .
|
||||
|
||||
- name: Safety
|
||||
# 76352, 76353, 77323 come from SDK, but they cannot upgrade it yet. It does not affect API
|
||||
# TODO: Botocore needs urllib3 1.X so we need to ignore these vulnerabilities 77744,77745. Remove this once we upgrade to urllib3 2.X
|
||||
run: poetry run safety check --ignore 70612,66963,74429,76352,76353,77323,77744,77745
|
||||
|
||||
- name: Vulture
|
||||
run: poetry run vulture --exclude "contrib,tests,conftest.py" --min-confidence 100 .
|
||||
|
||||
tests:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
contents: read
|
||||
strategy:
|
||||
matrix:
|
||||
python-version:
|
||||
- '3.12'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./api
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres
|
||||
env:
|
||||
POSTGRES_HOST: ${{ env.POSTGRES_HOST }}
|
||||
POSTGRES_PORT: ${{ env.POSTGRES_PORT }}
|
||||
POSTGRES_USER: ${{ env.POSTGRES_USER }}
|
||||
POSTGRES_PASSWORD: ${{ env.POSTGRES_PASSWORD }}
|
||||
POSTGRES_DB: ${{ env.POSTGRES_DB }}
|
||||
ports:
|
||||
- 5432:5432
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
valkey:
|
||||
image: valkey/valkey:7-alpine3.19
|
||||
env:
|
||||
VALKEY_HOST: ${{ env.VALKEY_HOST }}
|
||||
VALKEY_PORT: ${{ env.VALKEY_PORT }}
|
||||
VALKEY_DB: ${{ env.VALKEY_DB }}
|
||||
ports:
|
||||
- 6379:6379
|
||||
options: >-
|
||||
--health-cmd "valkey-cli ping"
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Setup Python with Poetry
|
||||
uses: ./.github/actions/setup-python-poetry
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
working-directory: ./api
|
||||
|
||||
- name: Run tests with pytest
|
||||
run: poetry run pytest --cov=./src/backend --cov-report=xml src/backend
|
||||
|
||||
- name: Upload coverage reports to Codecov
|
||||
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
flags: api
|
||||
|
||||
dockerfile-lint:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Lint Dockerfile with Hadolint
|
||||
uses: hadolint/hadolint-action@2332a7b74a6de0dda2e2221d575162eba76ba5e5 # v3.3.0
|
||||
with:
|
||||
dockerfile: api/Dockerfile
|
||||
ignore: DL3013
|
||||
|
||||
container-build-and-scan:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Build container
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: ${{ env.API_WORKING_DIR }}
|
||||
push: false
|
||||
load: true
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ github.sha }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Scan container with Trivy
|
||||
uses: ./.github/actions/trivy-scan
|
||||
with:
|
||||
image-name: ${{ env.IMAGE_NAME }}
|
||||
image-tag: ${{ github.sha }}
|
||||
fail-on-critical: 'false'
|
||||
severity: 'CRITICAL'
|
||||
54
.github/workflows/backport.yml
vendored
54
.github/workflows/backport.yml
vendored
@@ -1,54 +0,0 @@
|
||||
name: 'Tools: Backport'
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
branches:
|
||||
- 'master'
|
||||
types:
|
||||
- 'labeled'
|
||||
- 'closed'
|
||||
paths:
|
||||
- '.github/workflows/backport.yml'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
|
||||
cancel-in-progress: false
|
||||
|
||||
env:
|
||||
BACKPORT_LABEL_PREFIX: backport-to-
|
||||
BACKPORT_LABEL_IGNORE: was-backported
|
||||
|
||||
jobs:
|
||||
backport:
|
||||
if: github.event.pull_request.merged == true && !(contains(github.event.pull_request.labels.*.name, 'backport')) && !(contains(github.event.pull_request.labels.*.name, 'was-backported'))
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Check labels
|
||||
id: label_check
|
||||
uses: agilepathway/label-checker@c3d16ad512e7cea5961df85ff2486bb774caf3c5 # v1.6.65
|
||||
with:
|
||||
allow_failure: true
|
||||
prefix_mode: true
|
||||
any_of: ${{ env.BACKPORT_LABEL_PREFIX }}
|
||||
none_of: ${{ env.BACKPORT_LABEL_IGNORE }}
|
||||
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Backport PR
|
||||
if: steps.label_check.outputs.label_check == 'success'
|
||||
uses: sorenlouv/backport-github-action@ad888e978060bc1b2798690dd9d03c4036560947 # v9.5.1
|
||||
with:
|
||||
github_token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
auto_backport_label_prefix: ${{ env.BACKPORT_LABEL_PREFIX }}
|
||||
|
||||
- name: Display backport info log
|
||||
if: success() && steps.label_check.outputs.label_check == 'success'
|
||||
run: cat ~/.backport/backport.info.log
|
||||
|
||||
- name: Display backport debug log
|
||||
if: failure() && steps.label_check.outputs.label_check == 'success'
|
||||
run: cat ~/.backport/backport.debug.log
|
||||
198
.github/workflows/build-lint-push-containers.yml
vendored
Normal file
198
.github/workflows/build-lint-push-containers.yml
vendored
Normal file
@@ -0,0 +1,198 @@
|
||||
name: build-lint-push-containers
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
paths-ignore:
|
||||
- '.github/**'
|
||||
- 'README.md'
|
||||
|
||||
release:
|
||||
types: [published, edited]
|
||||
|
||||
env:
|
||||
AWS_REGION_STG: eu-west-1
|
||||
AWS_REGION_PRO: us-east-1
|
||||
IMAGE_NAME: prowler
|
||||
LATEST_TAG: latest
|
||||
TEMPORARY_TAG: temporary
|
||||
DOCKERFILE_PATH: ./Dockerfile
|
||||
|
||||
jobs:
|
||||
# Lint Dockerfile using Hadolint
|
||||
# dockerfile-linter:
|
||||
# runs-on: ubuntu-latest
|
||||
# steps:
|
||||
# -
|
||||
# name: Checkout
|
||||
# uses: actions/checkout@v3
|
||||
# -
|
||||
# name: Install Hadolint
|
||||
# run: |
|
||||
# VERSION=$(curl --silent "https://api.github.com/repos/hadolint/hadolint/releases/latest" | \
|
||||
# grep '"tag_name":' | \
|
||||
# sed -E 's/.*"v([^"]+)".*/\1/' \
|
||||
# ) && curl -L -o /tmp/hadolint https://github.com/hadolint/hadolint/releases/download/v${VERSION}/hadolint-Linux-x86_64 \
|
||||
# && chmod +x /tmp/hadolint
|
||||
# -
|
||||
# name: Run Hadolint
|
||||
# run: |
|
||||
# /tmp/hadolint util/Dockerfile
|
||||
|
||||
# Build Prowler OSS container
|
||||
container-build:
|
||||
# needs: dockerfile-linter
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
-
|
||||
name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
-
|
||||
name: Build
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
# Without pushing to registries
|
||||
push: false
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ env.TEMPORARY_TAG }}
|
||||
file: ${{ env.DOCKERFILE_PATH }}
|
||||
outputs: type=docker,dest=/tmp/${{ env.IMAGE_NAME }}.tar
|
||||
-
|
||||
name: Share image between jobs
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: ${{ env.IMAGE_NAME }}.tar
|
||||
path: /tmp/${{ env.IMAGE_NAME }}.tar
|
||||
|
||||
# Lint Prowler OSS container using Dockle
|
||||
# container-linter:
|
||||
# needs: container-build
|
||||
# runs-on: ubuntu-latest
|
||||
# steps:
|
||||
# -
|
||||
# name: Get container image from shared
|
||||
# uses: actions/download-artifact@v2
|
||||
# with:
|
||||
# name: ${{ env.IMAGE_NAME }}.tar
|
||||
# path: /tmp
|
||||
# -
|
||||
# name: Load Docker image
|
||||
# run: |
|
||||
# docker load --input /tmp/${{ env.IMAGE_NAME }}.tar
|
||||
# docker image ls -a
|
||||
# -
|
||||
# name: Install Dockle
|
||||
# run: |
|
||||
# VERSION=$(curl --silent "https://api.github.com/repos/goodwithtech/dockle/releases/latest" | \
|
||||
# grep '"tag_name":' | \
|
||||
# sed -E 's/.*"v([^"]+)".*/\1/' \
|
||||
# ) && curl -L -o dockle.deb https://github.com/goodwithtech/dockle/releases/download/v${VERSION}/dockle_${VERSION}_Linux-64bit.deb \
|
||||
# && sudo dpkg -i dockle.deb && rm dockle.deb
|
||||
# -
|
||||
# name: Run Dockle
|
||||
# run: dockle ${{ env.IMAGE_NAME }}:${{ env.TEMPORARY_TAG }}
|
||||
|
||||
# Push Prowler OSS container to registries
|
||||
container-push:
|
||||
# needs: container-linter
|
||||
needs: container-build
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: read # This is required for actions/checkout
|
||||
steps:
|
||||
-
|
||||
name: Get container image from shared
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: ${{ env.IMAGE_NAME }}.tar
|
||||
path: /tmp
|
||||
-
|
||||
name: Load Docker image
|
||||
run: |
|
||||
docker load --input /tmp/${{ env.IMAGE_NAME }}.tar
|
||||
docker image ls -a
|
||||
-
|
||||
name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
-
|
||||
name: Login to Public ECR
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: public.ecr.aws
|
||||
username: ${{ secrets.PUBLIC_ECR_AWS_ACCESS_KEY_ID }}
|
||||
password: ${{ secrets.PUBLIC_ECR_AWS_SECRET_ACCESS_KEY }}
|
||||
env:
|
||||
AWS_REGION: ${{ env.AWS_REGION_PRO }}
|
||||
-
|
||||
name: Configure AWS Credentials -- STG
|
||||
if: github.event_name == 'push'
|
||||
uses: aws-actions/configure-aws-credentials@v1
|
||||
with:
|
||||
aws-region: ${{ env.AWS_REGION_STG }}
|
||||
role-to-assume: ${{ secrets.STG_IAM_ROLE_ARN }}
|
||||
role-session-name: build-lint-containers-stg
|
||||
-
|
||||
name: Login to ECR -- STG
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ${{ secrets.STG_ECR }}
|
||||
-
|
||||
name: Configure AWS Credentials -- PRO
|
||||
if: github.event_name == 'release'
|
||||
uses: aws-actions/configure-aws-credentials@v1
|
||||
with:
|
||||
aws-region: ${{ env.AWS_REGION_PRO }}
|
||||
role-to-assume: ${{ secrets.PRO_IAM_ROLE_ARN }}
|
||||
role-session-name: build-lint-containers-pro
|
||||
-
|
||||
name: Login to ECR -- PRO
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ${{ secrets.PRO_ECR }}
|
||||
-
|
||||
# Push to master branch - push "latest" tag
|
||||
name: Tag (latest)
|
||||
if: github.event_name == 'push'
|
||||
run: |
|
||||
docker tag ${{ env.IMAGE_NAME }}:${{ env.TEMPORARY_TAG }} ${{ secrets.STG_ECR }}/${{ secrets.STG_ECR_REPOSITORY }}:${{ env.LATEST_TAG }}
|
||||
docker tag ${{ env.IMAGE_NAME }}:${{ env.TEMPORARY_TAG }} ${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.LATEST_TAG }}
|
||||
docker tag ${{ env.IMAGE_NAME }}:${{ env.TEMPORARY_TAG }} ${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.LATEST_TAG }}
|
||||
-
|
||||
# Push to master branch - push "latest" tag
|
||||
name: Push (latest)
|
||||
if: github.event_name == 'push'
|
||||
run: |
|
||||
docker push ${{ secrets.STG_ECR }}/${{ secrets.STG_ECR_REPOSITORY }}:${{ env.LATEST_TAG }}
|
||||
docker push ${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.LATEST_TAG }}
|
||||
docker push ${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.LATEST_TAG }}
|
||||
-
|
||||
# Push the new release
|
||||
name: Tag (release)
|
||||
if: github.event_name == 'release'
|
||||
run: |
|
||||
docker tag ${{ env.IMAGE_NAME }}:${{ env.TEMPORARY_TAG }} ${{ secrets.PRO_ECR }}/${{ secrets.PRO_ECR_REPOSITORY }}:${{ github.event.release.tag_name }}
|
||||
docker tag ${{ env.IMAGE_NAME }}:${{ env.TEMPORARY_TAG }} ${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ github.event.release.tag_name }}
|
||||
docker tag ${{ env.IMAGE_NAME }}:${{ env.TEMPORARY_TAG }} ${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ github.event.release.tag_name }}
|
||||
-
|
||||
# Push the new release
|
||||
name: Push (release)
|
||||
if: github.event_name == 'release'
|
||||
run: |
|
||||
docker push ${{ secrets.PRO_ECR }}/${{ secrets.PRO_ECR_REPOSITORY }}:${{ github.event.release.tag_name }}
|
||||
docker push ${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ github.event.release.tag_name }}
|
||||
docker push ${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ github.event.release.tag_name }}
|
||||
-
|
||||
name: Delete artifacts
|
||||
if: always()
|
||||
uses: geekyeggo/delete-artifact@v1
|
||||
with:
|
||||
name: ${{ env.IMAGE_NAME }}.tar
|
||||
31
.github/workflows/conventional-commit.yml
vendored
31
.github/workflows/conventional-commit.yml
vendored
@@ -1,31 +0,0 @@
|
||||
name: 'Tools: Conventional Commit'
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v3'
|
||||
- 'v4.*'
|
||||
- 'v5.*'
|
||||
types:
|
||||
- 'opened'
|
||||
- 'edited'
|
||||
- 'synchronize'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
conventional-commit-check:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: read
|
||||
|
||||
steps:
|
||||
- name: Check PR title format
|
||||
uses: agenthunt/conventional-commit-checker-action@9e552d650d0e205553ec7792d447929fc78e012b # v2.0.0
|
||||
with:
|
||||
pr-title-regex: '^(feat|fix|docs|style|refactor|perf|test|chore|build|ci|revert)(\([^)]+\))?!?: .+'
|
||||
70
.github/workflows/create-backport-label.yml
vendored
70
.github/workflows/create-backport-label.yml
vendored
@@ -1,70 +0,0 @@
|
||||
name: 'Tools: Backport Label'
|
||||
|
||||
on:
|
||||
release:
|
||||
types:
|
||||
- 'published'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.release.tag_name }}
|
||||
cancel-in-progress: false
|
||||
|
||||
env:
|
||||
BACKPORT_LABEL_PREFIX: backport-to-
|
||||
BACKPORT_LABEL_COLOR: B60205
|
||||
|
||||
jobs:
|
||||
create-label:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
|
||||
steps:
|
||||
- name: Create backport label for minor releases
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
RELEASE_TAG="${{ github.event.release.tag_name }}"
|
||||
|
||||
if [ -z "$RELEASE_TAG" ]; then
|
||||
echo "Error: No release tag provided"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Processing release tag: $RELEASE_TAG"
|
||||
|
||||
# Remove 'v' prefix if present (e.g., v3.2.0 -> 3.2.0)
|
||||
VERSION_ONLY="${RELEASE_TAG#v}"
|
||||
|
||||
# Check if it's a minor version (X.Y.0)
|
||||
if [[ "$VERSION_ONLY" =~ ^([0-9]+)\.([0-9]+)\.0$ ]]; then
|
||||
echo "Release $RELEASE_TAG (version $VERSION_ONLY) is a minor version. Proceeding to create backport label."
|
||||
|
||||
# Extract X.Y from X.Y.0 (e.g., 5.6 from 5.6.0)
|
||||
MAJOR="${BASH_REMATCH[1]}"
|
||||
MINOR="${BASH_REMATCH[2]}"
|
||||
TWO_DIGIT_VERSION="${MAJOR}.${MINOR}"
|
||||
|
||||
LABEL_NAME="${BACKPORT_LABEL_PREFIX}v${TWO_DIGIT_VERSION}"
|
||||
LABEL_DESC="Backport PR to the v${TWO_DIGIT_VERSION} branch"
|
||||
LABEL_COLOR="$BACKPORT_LABEL_COLOR"
|
||||
|
||||
echo "Label name: $LABEL_NAME"
|
||||
echo "Label description: $LABEL_DESC"
|
||||
|
||||
# Check if label already exists
|
||||
if gh label list --repo ${{ github.repository }} --limit 1000 | grep -q "^${LABEL_NAME}[[:space:]]"; then
|
||||
echo "Label '$LABEL_NAME' already exists."
|
||||
else
|
||||
echo "Label '$LABEL_NAME' does not exist. Creating it..."
|
||||
gh label create "$LABEL_NAME" \
|
||||
--description "$LABEL_DESC" \
|
||||
--color "$LABEL_COLOR" \
|
||||
--repo ${{ github.repository }}
|
||||
echo "Label '$LABEL_NAME' created successfully."
|
||||
fi
|
||||
else
|
||||
echo "Release $RELEASE_TAG (version $VERSION_ONLY) is not a minor version. Skipping backport label creation."
|
||||
fi
|
||||
35
.github/workflows/find-secrets.yml
vendored
35
.github/workflows/find-secrets.yml
vendored
@@ -1,33 +1,18 @@
|
||||
name: 'Tools: TruffleHog'
|
||||
name: find-secrets
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
pull_request:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
on: pull_request
|
||||
|
||||
jobs:
|
||||
scan-secrets:
|
||||
trufflehog:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Scan for secrets with TruffleHog
|
||||
uses: trufflesecurity/trufflehog@ad6fc8fb446b8fafbf7ea8193d2d6bfd42f45690 # v3.90.11
|
||||
- name: TruffleHog OSS
|
||||
uses: trufflesecurity/trufflehog@v3.4.4
|
||||
with:
|
||||
extra_args: '--results=verified,unknown'
|
||||
path: ./
|
||||
base: ${{ github.event.repository.default_branch }}
|
||||
head: HEAD
|
||||
|
||||
29
.github/workflows/labeler.yml
vendored
29
.github/workflows/labeler.yml
vendored
@@ -1,29 +0,0 @@
|
||||
name: 'Tools: PR Labeler'
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
types:
|
||||
- 'opened'
|
||||
- 'reopened'
|
||||
- 'synchronize'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
labeler:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Apply labels to PR
|
||||
uses: actions/labeler@634933edcd8ababfe52f92936142cc22ac488b1b # v6.0.1
|
||||
with:
|
||||
sync-labels: true
|
||||
110
.github/workflows/mcp-container-build-push.yml
vendored
110
.github/workflows/mcp-container-build-push.yml
vendored
@@ -1,110 +0,0 @@
|
||||
name: 'MCP: Container Build and Push'
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
paths:
|
||||
- 'mcp_server/**'
|
||||
- '.github/workflows/mcp-container-build-push.yml'
|
||||
release:
|
||||
types:
|
||||
- 'published'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
# Tags
|
||||
LATEST_TAG: latest
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name }}
|
||||
STABLE_TAG: stable
|
||||
WORKING_DIRECTORY: ./mcp_server
|
||||
|
||||
# Container registries
|
||||
PROWLERCLOUD_DOCKERHUB_REPOSITORY: prowlercloud
|
||||
PROWLERCLOUD_DOCKERHUB_IMAGE: prowler-mcp
|
||||
|
||||
jobs:
|
||||
setup:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
outputs:
|
||||
short-sha: ${{ steps.set-short-sha.outputs.short-sha }}
|
||||
steps:
|
||||
- name: Calculate short SHA
|
||||
id: set-short-sha
|
||||
run: echo "short-sha=${GITHUB_SHA::7}" >> $GITHUB_OUTPUT
|
||||
|
||||
container-build-push:
|
||||
needs: setup
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Build and push MCP container (latest)
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
push: true
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.LATEST_TAG }}
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.setup.outputs.short-sha }}
|
||||
labels: |
|
||||
org.opencontainers.image.title=Prowler MCP Server
|
||||
org.opencontainers.image.description=Model Context Protocol server for Prowler
|
||||
org.opencontainers.image.vendor=ProwlerPro, Inc.
|
||||
org.opencontainers.image.source=https://github.com/${{ github.repository }}
|
||||
org.opencontainers.image.revision=${{ github.sha }}
|
||||
org.opencontainers.image.created=${{ github.event.head_commit.timestamp }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Build and push MCP container (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
push: true
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.RELEASE_TAG }}
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.STABLE_TAG }}
|
||||
labels: |
|
||||
org.opencontainers.image.title=Prowler MCP Server
|
||||
org.opencontainers.image.description=Model Context Protocol server for Prowler
|
||||
org.opencontainers.image.vendor=ProwlerPro, Inc.
|
||||
org.opencontainers.image.version=${{ env.RELEASE_TAG }}
|
||||
org.opencontainers.image.source=https://github.com/${{ github.repository }}
|
||||
org.opencontainers.image.revision=${{ github.sha }}
|
||||
org.opencontainers.image.created=${{ github.event.release.published_at }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Trigger MCP deployment
|
||||
if: github.event_name == 'push'
|
||||
uses: peter-evans/repository-dispatch@5fc4efd1a4797ddb68ffd0714a238564e4cc0e6f # v4.0.0
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
repository: ${{ secrets.CLOUD_DISPATCH }}
|
||||
event-type: mcp-prowler-deployment
|
||||
client-payload: '{"sha": "${{ github.sha }}", "short_sha": "${{ needs.setup.outputs.short-sha }}"}'
|
||||
103
.github/workflows/pr-check-changelog.yml
vendored
103
.github/workflows/pr-check-changelog.yml
vendored
@@ -1,103 +0,0 @@
|
||||
name: 'Tools: Check Changelog'
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- 'opened'
|
||||
- 'synchronize'
|
||||
- 'reopened'
|
||||
- 'labeled'
|
||||
- 'unlabeled'
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
check-changelog:
|
||||
if: contains(github.event.pull_request.labels.*.name, 'no-changelog') == false
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
env:
|
||||
MONITORED_FOLDERS: 'api ui prowler mcp_server'
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: |
|
||||
api/**
|
||||
ui/**
|
||||
prowler/**
|
||||
mcp_server/**
|
||||
|
||||
- name: Check for folder changes and changelog presence
|
||||
id: check-folders
|
||||
run: |
|
||||
missing_changelogs=""
|
||||
|
||||
# Check api folder
|
||||
if [[ "${{ steps.changed-files.outputs.any_changed }}" == "true" ]]; then
|
||||
for folder in $MONITORED_FOLDERS; do
|
||||
# Get files changed in this folder
|
||||
changed_in_folder=$(echo "${{ steps.changed-files.outputs.all_changed_files }}" | tr ' ' '\n' | grep "^${folder}/" || true)
|
||||
|
||||
if [ -n "$changed_in_folder" ]; then
|
||||
echo "Detected changes in ${folder}/"
|
||||
|
||||
# Check if CHANGELOG.md was updated
|
||||
if ! echo "$changed_in_folder" | grep -q "^${folder}/CHANGELOG.md$"; then
|
||||
echo "No changelog update found for ${folder}/"
|
||||
missing_changelogs="${missing_changelogs}- \`${folder}\`"$'\n'
|
||||
fi
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
{
|
||||
echo "missing_changelogs<<EOF"
|
||||
echo -e "${missing_changelogs}"
|
||||
echo "EOF"
|
||||
} >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Find existing changelog comment
|
||||
if: github.event.pull_request.head.repo.full_name == github.repository
|
||||
id: find-comment
|
||||
uses: peter-evans/find-comment@b30e6a3c0ed37e7c023ccd3f1db5c6c0b0c23aad # v4.0.0
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: 'github-actions[bot]'
|
||||
body-includes: '<!-- changelog-check -->'
|
||||
|
||||
- name: Update PR comment with changelog status
|
||||
if: github.event.pull_request.head.repo.full_name == github.repository
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||
edit-mode: replace
|
||||
body: |
|
||||
<!-- changelog-check -->
|
||||
${{ steps.check-folders.outputs.missing_changelogs != '' && format('⚠️ **Changes detected in the following folders without a corresponding update to the `CHANGELOG.md`:**
|
||||
|
||||
{0}
|
||||
|
||||
Please add an entry to the corresponding `CHANGELOG.md` file to maintain a clear history of changes.', steps.check-folders.outputs.missing_changelogs) || '✅ All necessary `CHANGELOG.md` files have been updated.' }}
|
||||
|
||||
- name: Fail if changelog is missing
|
||||
if: steps.check-folders.outputs.missing_changelogs != ''
|
||||
run: |
|
||||
echo "::error::Missing changelog updates in some folders"
|
||||
exit 1
|
||||
123
.github/workflows/pr-conflict-checker.yml
vendored
123
.github/workflows/pr-conflict-checker.yml
vendored
@@ -1,123 +0,0 @@
|
||||
name: 'Tools: PR Conflict Checker'
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types:
|
||||
- 'opened'
|
||||
- 'synchronize'
|
||||
- 'reopened'
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
check-conflicts:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
issues: write
|
||||
|
||||
steps:
|
||||
- name: Checkout PR head
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: '**'
|
||||
|
||||
- name: Check for conflict markers
|
||||
id: conflict-check
|
||||
run: |
|
||||
echo "Checking for conflict markers in changed files..."
|
||||
|
||||
CONFLICT_FILES=""
|
||||
HAS_CONFLICTS=false
|
||||
|
||||
# Check each changed file for conflict markers
|
||||
for file in ${{ steps.changed-files.outputs.all_changed_files }}; do
|
||||
if [ -f "$file" ]; then
|
||||
echo "Checking file: $file"
|
||||
|
||||
# Look for conflict markers (more precise regex)
|
||||
if grep -qE '^(<<<<<<<|=======|>>>>>>>)' "$file" 2>/dev/null; then
|
||||
echo "Conflict markers found in: $file"
|
||||
CONFLICT_FILES="${CONFLICT_FILES}- \`${file}\`"$'\n'
|
||||
HAS_CONFLICTS=true
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
if [ "$HAS_CONFLICTS" = true ]; then
|
||||
echo "has_conflicts=true" >> $GITHUB_OUTPUT
|
||||
{
|
||||
echo "conflict_files<<EOF"
|
||||
echo "$CONFLICT_FILES"
|
||||
echo "EOF"
|
||||
} >> $GITHUB_OUTPUT
|
||||
echo "Conflict markers detected"
|
||||
else
|
||||
echo "has_conflicts=false" >> $GITHUB_OUTPUT
|
||||
echo "No conflict markers found in changed files"
|
||||
fi
|
||||
|
||||
- name: Manage conflict label
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
HAS_CONFLICTS: ${{ steps.conflict-check.outputs.has_conflicts }}
|
||||
run: |
|
||||
LABEL_NAME="has-conflicts"
|
||||
|
||||
# Add or remove label based on conflict status
|
||||
if [ "$HAS_CONFLICTS" = "true" ]; then
|
||||
echo "Adding conflict label to PR #${PR_NUMBER}..."
|
||||
gh pr edit "$PR_NUMBER" --add-label "$LABEL_NAME" --repo ${{ github.repository }} || true
|
||||
else
|
||||
echo "Removing conflict label from PR #${PR_NUMBER}..."
|
||||
gh pr edit "$PR_NUMBER" --remove-label "$LABEL_NAME" --repo ${{ github.repository }} || true
|
||||
fi
|
||||
|
||||
- name: Find existing comment
|
||||
uses: peter-evans/find-comment@b30e6a3c0ed37e7c023ccd3f1db5c6c0b0c23aad # v4.0.0
|
||||
id: find-comment
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: 'github-actions[bot]'
|
||||
body-includes: '<!-- conflict-checker-comment -->'
|
||||
|
||||
- name: Create or update comment
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0
|
||||
with:
|
||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
edit-mode: replace
|
||||
body: |
|
||||
<!-- conflict-checker-comment -->
|
||||
${{ steps.conflict-check.outputs.has_conflicts == 'true' && '⚠️ **Conflict Markers Detected**' || '✅ **Conflict Markers Resolved**' }}
|
||||
|
||||
${{ steps.conflict-check.outputs.has_conflicts == 'true' && format('This pull request contains unresolved conflict markers in the following files:
|
||||
|
||||
{0}
|
||||
|
||||
Please resolve these conflicts by:
|
||||
1. Locating the conflict markers: `<<<<<<<`, `=======`, and `>>>>>>>`
|
||||
2. Manually editing the files to resolve the conflicts
|
||||
3. Removing all conflict markers
|
||||
4. Committing and pushing the changes', steps.conflict-check.outputs.conflict_files) || 'All conflict markers have been successfully resolved in this pull request.' }}
|
||||
|
||||
- name: Fail workflow if conflicts detected
|
||||
if: steps.conflict-check.outputs.has_conflicts == 'true'
|
||||
run: |
|
||||
echo "::error::Workflow failed due to conflict markers detected in the PR"
|
||||
exit 1
|
||||
46
.github/workflows/pr-merged.yml
vendored
46
.github/workflows/pr-merged.yml
vendored
@@ -1,46 +0,0 @@
|
||||
name: 'Tools: PR Merged'
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
branches:
|
||||
- 'master'
|
||||
types:
|
||||
- 'closed'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
|
||||
cancel-in-progress: false
|
||||
|
||||
jobs:
|
||||
trigger-cloud-pull-request:
|
||||
if: github.event.pull_request.merged == true && github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- name: Calculate short commit SHA
|
||||
id: vars
|
||||
run: |
|
||||
SHORT_SHA="${{ github.event.pull_request.merge_commit_sha }}"
|
||||
echo "SHORT_SHA=${SHORT_SHA::7}" >> $GITHUB_ENV
|
||||
|
||||
- name: Trigger Cloud repository pull request
|
||||
uses: peter-evans/repository-dispatch@5fc4efd1a4797ddb68ffd0714a238564e4cc0e6f # v4.0.0
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
repository: ${{ secrets.CLOUD_DISPATCH }}
|
||||
event-type: prowler-pull-request-merged
|
||||
client-payload: |
|
||||
{
|
||||
"PROWLER_COMMIT_SHA": "${{ github.event.pull_request.merge_commit_sha }}",
|
||||
"PROWLER_COMMIT_SHORT_SHA": "${{ env.SHORT_SHA }}",
|
||||
"PROWLER_PR_NUMBER": "${{ github.event.pull_request.number }}",
|
||||
"PROWLER_PR_TITLE": ${{ toJson(github.event.pull_request.title) }},
|
||||
"PROWLER_PR_LABELS": ${{ toJson(github.event.pull_request.labels.*.name) }},
|
||||
"PROWLER_PR_BODY": ${{ toJson(github.event.pull_request.body) }},
|
||||
"PROWLER_PR_URL": ${{ toJson(github.event.pull_request.html_url) }},
|
||||
"PROWLER_PR_MERGED_BY": "${{ github.event.pull_request.merged_by.login }}",
|
||||
"PROWLER_PR_BASE_BRANCH": "${{ github.event.pull_request.base.ref }}",
|
||||
"PROWLER_PR_HEAD_BRANCH": "${{ github.event.pull_request.head.ref }}"
|
||||
}
|
||||
410
.github/workflows/prepare-release.yml
vendored
410
.github/workflows/prepare-release.yml
vendored
@@ -1,410 +0,0 @@
|
||||
name: 'Tools: Prepare Release'
|
||||
|
||||
run-name: 'Prepare Release for Prowler ${{ inputs.prowler_version }}'
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
prowler_version:
|
||||
description: 'Prowler version to release (e.g., 5.9.0)'
|
||||
required: true
|
||||
type: string
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ inputs.prowler_version }}
|
||||
cancel-in-progress: false
|
||||
|
||||
env:
|
||||
PROWLER_VERSION: ${{ inputs.prowler_version }}
|
||||
|
||||
jobs:
|
||||
prepare-release:
|
||||
if: github.event_name == 'workflow_dispatch' && github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
- name: Install Poetry
|
||||
run: |
|
||||
python3 -m pip install --user poetry==2.1.1
|
||||
echo "$HOME/.local/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Configure Git
|
||||
run: |
|
||||
git config --global user.name 'prowler-bot'
|
||||
git config --global user.email '179230569+prowler-bot@users.noreply.github.com'
|
||||
|
||||
- name: Parse version and read changelogs
|
||||
run: |
|
||||
# Validate version format (reusing pattern from sdk-bump-version.yml)
|
||||
if [[ $PROWLER_VERSION =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
|
||||
MAJOR_VERSION=${BASH_REMATCH[1]}
|
||||
MINOR_VERSION=${BASH_REMATCH[2]}
|
||||
PATCH_VERSION=${BASH_REMATCH[3]}
|
||||
|
||||
# Export version components to environment
|
||||
echo "MAJOR_VERSION=${MAJOR_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "MINOR_VERSION=${MINOR_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "PATCH_VERSION=${PATCH_VERSION}" >> "${GITHUB_ENV}"
|
||||
|
||||
# Determine branch name (format: v5.9)
|
||||
BRANCH_NAME="v${MAJOR_VERSION}.${MINOR_VERSION}"
|
||||
echo "BRANCH_NAME=${BRANCH_NAME}" >> "${GITHUB_ENV}"
|
||||
|
||||
# Function to extract the latest version from changelog
|
||||
extract_latest_version() {
|
||||
local changelog_file="$1"
|
||||
if [ -f "$changelog_file" ]; then
|
||||
# Extract the first version entry (most recent) from changelog
|
||||
# Format: ## [version] (1.2.3) or ## [vversion] (v1.2.3)
|
||||
local version=$(grep -m 1 '^## \[' "$changelog_file" | sed 's/^## \[\(.*\)\].*/\1/' | sed 's/^v//' | tr -d '[:space:]')
|
||||
echo "$version"
|
||||
else
|
||||
echo ""
|
||||
fi
|
||||
}
|
||||
|
||||
# Read actual versions from changelogs (source of truth)
|
||||
UI_VERSION=$(extract_latest_version "ui/CHANGELOG.md")
|
||||
API_VERSION=$(extract_latest_version "api/CHANGELOG.md")
|
||||
SDK_VERSION=$(extract_latest_version "prowler/CHANGELOG.md")
|
||||
MCP_VERSION=$(extract_latest_version "mcp_server/CHANGELOG.md")
|
||||
|
||||
echo "UI_VERSION=${UI_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "API_VERSION=${API_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "SDK_VERSION=${SDK_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "MCP_VERSION=${MCP_VERSION}" >> "${GITHUB_ENV}"
|
||||
|
||||
if [ -n "$UI_VERSION" ]; then
|
||||
echo "Read UI version from changelog: $UI_VERSION"
|
||||
else
|
||||
echo "Warning: No UI version found in ui/CHANGELOG.md"
|
||||
fi
|
||||
|
||||
if [ -n "$API_VERSION" ]; then
|
||||
echo "Read API version from changelog: $API_VERSION"
|
||||
else
|
||||
echo "Warning: No API version found in api/CHANGELOG.md"
|
||||
fi
|
||||
|
||||
if [ -n "$SDK_VERSION" ]; then
|
||||
echo "Read SDK version from changelog: $SDK_VERSION"
|
||||
else
|
||||
echo "Warning: No SDK version found in prowler/CHANGELOG.md"
|
||||
fi
|
||||
|
||||
if [ -n "$MCP_VERSION" ]; then
|
||||
echo "Read MCP version from changelog: $MCP_VERSION"
|
||||
else
|
||||
echo "Warning: No MCP version found in mcp_server/CHANGELOG.md"
|
||||
fi
|
||||
|
||||
echo "Prowler version: $PROWLER_VERSION"
|
||||
echo "Branch name: $BRANCH_NAME"
|
||||
echo "UI version: $UI_VERSION"
|
||||
echo "API version: $API_VERSION"
|
||||
echo "SDK version: $SDK_VERSION"
|
||||
echo "MCP version: $MCP_VERSION"
|
||||
echo "Is minor release: $([ $PATCH_VERSION -eq 0 ] && echo 'true' || echo 'false')"
|
||||
else
|
||||
echo "Invalid version syntax: '$PROWLER_VERSION' (must be N.N.N)" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Extract and combine changelog entries
|
||||
run: |
|
||||
set -e
|
||||
|
||||
# Function to extract changelog for a specific version
|
||||
extract_changelog() {
|
||||
local file="$1"
|
||||
local version="$2"
|
||||
local output_file="$3"
|
||||
|
||||
if [ ! -f "$file" ]; then
|
||||
echo "Warning: $file not found, skipping..."
|
||||
touch "$output_file"
|
||||
return
|
||||
fi
|
||||
|
||||
# Extract changelog section for this version
|
||||
awk -v version="$version" '
|
||||
/^## \[v?'"$version"'\]/ { found=1; next }
|
||||
found && /^## \[v?[0-9]+\.[0-9]+\.[0-9]+\]/ { found=0 }
|
||||
found && !/^## \[v?'"$version"'\]/ { print }
|
||||
' "$file" > "$output_file"
|
||||
|
||||
# Remove --- separators
|
||||
sed -i '/^---$/d' "$output_file"
|
||||
|
||||
# Remove trailing empty lines
|
||||
sed -i '/^$/d' "$output_file"
|
||||
}
|
||||
|
||||
# Calculate expected versions for this release
|
||||
if [[ $PROWLER_VERSION =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
|
||||
EXPECTED_UI_VERSION="1.${BASH_REMATCH[2]}.${BASH_REMATCH[3]}"
|
||||
EXPECTED_API_VERSION="1.$((${BASH_REMATCH[2]} + 1)).${BASH_REMATCH[3]}"
|
||||
|
||||
echo "Expected UI version for this release: $EXPECTED_UI_VERSION"
|
||||
echo "Expected API version for this release: $EXPECTED_API_VERSION"
|
||||
fi
|
||||
|
||||
# Determine if components have changes for this specific release
|
||||
# UI has changes if its current version matches what we expect for this release
|
||||
if [ -n "$UI_VERSION" ] && [ "$UI_VERSION" = "$EXPECTED_UI_VERSION" ]; then
|
||||
echo "HAS_UI_CHANGES=true" >> $GITHUB_ENV
|
||||
echo "✓ UI changes detected - version matches expected: $UI_VERSION"
|
||||
extract_changelog "ui/CHANGELOG.md" "$UI_VERSION" "ui_changelog.md"
|
||||
else
|
||||
echo "HAS_UI_CHANGES=false" >> $GITHUB_ENV
|
||||
echo "ℹ No UI changes for this release (current: $UI_VERSION, expected: $EXPECTED_UI_VERSION)"
|
||||
touch "ui_changelog.md"
|
||||
fi
|
||||
|
||||
# API has changes if its current version matches what we expect for this release
|
||||
if [ -n "$API_VERSION" ] && [ "$API_VERSION" = "$EXPECTED_API_VERSION" ]; then
|
||||
echo "HAS_API_CHANGES=true" >> $GITHUB_ENV
|
||||
echo "✓ API changes detected - version matches expected: $API_VERSION"
|
||||
extract_changelog "api/CHANGELOG.md" "$API_VERSION" "api_changelog.md"
|
||||
else
|
||||
echo "HAS_API_CHANGES=false" >> $GITHUB_ENV
|
||||
echo "ℹ No API changes for this release (current: $API_VERSION, expected: $EXPECTED_API_VERSION)"
|
||||
touch "api_changelog.md"
|
||||
fi
|
||||
|
||||
# SDK has changes if its current version matches the input version
|
||||
if [ -n "$SDK_VERSION" ] && [ "$SDK_VERSION" = "$PROWLER_VERSION" ]; then
|
||||
echo "HAS_SDK_CHANGES=true" >> $GITHUB_ENV
|
||||
echo "✓ SDK changes detected - version matches input: $SDK_VERSION"
|
||||
extract_changelog "prowler/CHANGELOG.md" "$PROWLER_VERSION" "prowler_changelog.md"
|
||||
else
|
||||
echo "HAS_SDK_CHANGES=false" >> $GITHUB_ENV
|
||||
echo "ℹ No SDK changes for this release (current: $SDK_VERSION, input: $PROWLER_VERSION)"
|
||||
touch "prowler_changelog.md"
|
||||
fi
|
||||
|
||||
# MCP has changes if the changelog references this Prowler version
|
||||
# Check if the changelog contains "(Prowler X.Y.Z)" or "(Prowler UNRELEASED)"
|
||||
if [ -f "mcp_server/CHANGELOG.md" ]; then
|
||||
MCP_PROWLER_REF=$(grep -m 1 "^## \[.*\] (Prowler" mcp_server/CHANGELOG.md | sed -E 's/.*\(Prowler ([^)]+)\).*/\1/' | tr -d '[:space:]')
|
||||
if [ "$MCP_PROWLER_REF" = "$PROWLER_VERSION" ] || [ "$MCP_PROWLER_REF" = "UNRELEASED" ]; then
|
||||
echo "HAS_MCP_CHANGES=true" >> $GITHUB_ENV
|
||||
echo "✓ MCP changes detected - Prowler reference: $MCP_PROWLER_REF (version: $MCP_VERSION)"
|
||||
extract_changelog "mcp_server/CHANGELOG.md" "$MCP_VERSION" "mcp_changelog.md"
|
||||
else
|
||||
echo "HAS_MCP_CHANGES=false" >> $GITHUB_ENV
|
||||
echo "ℹ No MCP changes for this release (Prowler reference: $MCP_PROWLER_REF, input: $PROWLER_VERSION)"
|
||||
touch "mcp_changelog.md"
|
||||
fi
|
||||
else
|
||||
echo "HAS_MCP_CHANGES=false" >> $GITHUB_ENV
|
||||
echo "ℹ No MCP changelog found"
|
||||
touch "mcp_changelog.md"
|
||||
fi
|
||||
|
||||
# Combine changelogs in order: UI, API, SDK, MCP
|
||||
> combined_changelog.md
|
||||
|
||||
if [ "$HAS_UI_CHANGES" = "true" ] && [ -s "ui_changelog.md" ]; then
|
||||
echo "## UI" >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
cat ui_changelog.md >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
fi
|
||||
|
||||
if [ "$HAS_API_CHANGES" = "true" ] && [ -s "api_changelog.md" ]; then
|
||||
echo "## API" >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
cat api_changelog.md >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
fi
|
||||
|
||||
if [ "$HAS_SDK_CHANGES" = "true" ] && [ -s "prowler_changelog.md" ]; then
|
||||
echo "## SDK" >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
cat prowler_changelog.md >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
fi
|
||||
|
||||
if [ "$HAS_MCP_CHANGES" = "true" ] && [ -s "mcp_changelog.md" ]; then
|
||||
echo "## MCP" >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
cat mcp_changelog.md >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
fi
|
||||
|
||||
echo "Combined changelog preview:"
|
||||
cat combined_changelog.md
|
||||
|
||||
- name: Checkout release branch for patch release
|
||||
if: ${{ env.PATCH_VERSION != '0' }}
|
||||
run: |
|
||||
echo "Patch release detected, checking out existing branch $BRANCH_NAME..."
|
||||
if git show-ref --verify --quiet "refs/heads/$BRANCH_NAME"; then
|
||||
echo "Branch $BRANCH_NAME exists locally, checking out..."
|
||||
git checkout "$BRANCH_NAME"
|
||||
elif git show-ref --verify --quiet "refs/remotes/origin/$BRANCH_NAME"; then
|
||||
echo "Branch $BRANCH_NAME exists remotely, checking out..."
|
||||
git checkout -b "$BRANCH_NAME" "origin/$BRANCH_NAME"
|
||||
else
|
||||
echo "ERROR: Branch $BRANCH_NAME should exist for patch release $PROWLER_VERSION"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Verify SDK version in pyproject.toml
|
||||
run: |
|
||||
CURRENT_VERSION=$(grep '^version = ' pyproject.toml | sed -E 's/version = "([^"]+)"/\1/' | tr -d '[:space:]')
|
||||
PROWLER_VERSION_TRIMMED=$(echo "$PROWLER_VERSION" | tr -d '[:space:]')
|
||||
if [ "$CURRENT_VERSION" != "$PROWLER_VERSION_TRIMMED" ]; then
|
||||
echo "ERROR: Version mismatch in pyproject.toml (expected: '$PROWLER_VERSION_TRIMMED', found: '$CURRENT_VERSION')"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ pyproject.toml version: $CURRENT_VERSION"
|
||||
|
||||
- name: Verify SDK version in prowler/config/config.py
|
||||
run: |
|
||||
CURRENT_VERSION=$(grep '^prowler_version = ' prowler/config/config.py | sed -E 's/prowler_version = "([^"]+)"/\1/' | tr -d '[:space:]')
|
||||
PROWLER_VERSION_TRIMMED=$(echo "$PROWLER_VERSION" | tr -d '[:space:]')
|
||||
if [ "$CURRENT_VERSION" != "$PROWLER_VERSION_TRIMMED" ]; then
|
||||
echo "ERROR: Version mismatch in prowler/config/config.py (expected: '$PROWLER_VERSION_TRIMMED', found: '$CURRENT_VERSION')"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ prowler/config/config.py version: $CURRENT_VERSION"
|
||||
|
||||
- name: Verify API version in api/pyproject.toml
|
||||
if: ${{ env.HAS_API_CHANGES == 'true' }}
|
||||
run: |
|
||||
CURRENT_API_VERSION=$(grep '^version = ' api/pyproject.toml | sed -E 's/version = "([^"]+)"/\1/' | tr -d '[:space:]')
|
||||
API_VERSION_TRIMMED=$(echo "$API_VERSION" | tr -d '[:space:]')
|
||||
if [ "$CURRENT_API_VERSION" != "$API_VERSION_TRIMMED" ]; then
|
||||
echo "ERROR: API version mismatch in api/pyproject.toml (expected: '$API_VERSION_TRIMMED', found: '$CURRENT_API_VERSION')"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ api/pyproject.toml version: $CURRENT_API_VERSION"
|
||||
|
||||
- name: Verify API prowler dependency in api/pyproject.toml
|
||||
if: ${{ env.PATCH_VERSION != '0' && env.HAS_API_CHANGES == 'true' }}
|
||||
run: |
|
||||
CURRENT_PROWLER_REF=$(grep 'prowler @ git+https://github.com/prowler-cloud/prowler.git@' api/pyproject.toml | sed -E 's/.*@([^"]+)".*/\1/' | tr -d '[:space:]')
|
||||
BRANCH_NAME_TRIMMED=$(echo "$BRANCH_NAME" | tr -d '[:space:]')
|
||||
if [ "$CURRENT_PROWLER_REF" != "$BRANCH_NAME_TRIMMED" ]; then
|
||||
echo "ERROR: Prowler dependency mismatch in api/pyproject.toml (expected: '$BRANCH_NAME_TRIMMED', found: '$CURRENT_PROWLER_REF')"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ api/pyproject.toml prowler dependency: $CURRENT_PROWLER_REF"
|
||||
|
||||
- name: Verify API version in api/src/backend/api/v1/views.py
|
||||
if: ${{ env.HAS_API_CHANGES == 'true' }}
|
||||
run: |
|
||||
CURRENT_API_VERSION=$(grep 'spectacular_settings.VERSION = ' api/src/backend/api/v1/views.py | sed -E 's/.*spectacular_settings.VERSION = "([^"]+)".*/\1/' | tr -d '[:space:]')
|
||||
API_VERSION_TRIMMED=$(echo "$API_VERSION" | tr -d '[:space:]')
|
||||
if [ "$CURRENT_API_VERSION" != "$API_VERSION_TRIMMED" ]; then
|
||||
echo "ERROR: API version mismatch in views.py (expected: '$API_VERSION_TRIMMED', found: '$CURRENT_API_VERSION')"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ api/src/backend/api/v1/views.py version: $CURRENT_API_VERSION"
|
||||
|
||||
- name: Checkout release branch for minor release
|
||||
if: ${{ env.PATCH_VERSION == '0' }}
|
||||
run: |
|
||||
echo "Minor release detected (patch = 0), checking out existing branch $BRANCH_NAME..."
|
||||
if git show-ref --verify --quiet "refs/remotes/origin/$BRANCH_NAME"; then
|
||||
echo "Branch $BRANCH_NAME exists remotely, checking out..."
|
||||
git checkout -b "$BRANCH_NAME" "origin/$BRANCH_NAME"
|
||||
else
|
||||
echo "ERROR: Branch $BRANCH_NAME should exist for minor release $PROWLER_VERSION. Please create it manually first."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Update API prowler dependency for minor release
|
||||
if: ${{ env.PATCH_VERSION == '0' }}
|
||||
run: |
|
||||
CURRENT_PROWLER_REF=$(grep 'prowler @ git+https://github.com/prowler-cloud/prowler.git@' api/pyproject.toml | sed -E 's/.*@([^"]+)".*/\1/' | tr -d '[:space:]')
|
||||
BRANCH_NAME_TRIMMED=$(echo "$BRANCH_NAME" | tr -d '[:space:]')
|
||||
|
||||
# Create a temporary branch for the PR from the minor version branch
|
||||
TEMP_BRANCH="update-api-dependency-$BRANCH_NAME_TRIMMED-$(date +%s)"
|
||||
echo "TEMP_BRANCH=$TEMP_BRANCH" >> $GITHUB_ENV
|
||||
|
||||
# Create temp branch from the current minor version branch
|
||||
git checkout -b "$TEMP_BRANCH"
|
||||
|
||||
# Minor release: update the dependency to use the release branch
|
||||
echo "Updating prowler dependency from '$CURRENT_PROWLER_REF' to '$BRANCH_NAME_TRIMMED'"
|
||||
sed -i "s|prowler @ git+https://github.com/prowler-cloud/prowler.git@[^\"]*\"|prowler @ git+https://github.com/prowler-cloud/prowler.git@$BRANCH_NAME_TRIMMED\"|" api/pyproject.toml
|
||||
|
||||
# Verify the change was made
|
||||
UPDATED_PROWLER_REF=$(grep 'prowler @ git+https://github.com/prowler-cloud/prowler.git@' api/pyproject.toml | sed -E 's/.*@([^"]+)".*/\1/' | tr -d '[:space:]')
|
||||
if [ "$UPDATED_PROWLER_REF" != "$BRANCH_NAME_TRIMMED" ]; then
|
||||
echo "ERROR: Failed to update prowler dependency in api/pyproject.toml"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Update poetry lock file
|
||||
echo "Updating poetry.lock file..."
|
||||
cd api
|
||||
poetry lock
|
||||
cd ..
|
||||
|
||||
# Commit and push the temporary branch
|
||||
git add api/pyproject.toml api/poetry.lock
|
||||
git commit -m "chore(api): update prowler dependency to $BRANCH_NAME_TRIMMED for release $PROWLER_VERSION"
|
||||
git push origin "$TEMP_BRANCH"
|
||||
|
||||
echo "✓ Prepared prowler dependency update to: $UPDATED_PROWLER_REF"
|
||||
|
||||
- name: Create PR for API dependency update
|
||||
if: ${{ env.PATCH_VERSION == '0' }}
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
branch: ${{ env.TEMP_BRANCH }}
|
||||
base: ${{ env.BRANCH_NAME }}
|
||||
title: "chore(api): Update prowler dependency to ${{ env.BRANCH_NAME }} for release ${{ env.PROWLER_VERSION }}"
|
||||
body: |
|
||||
### Description
|
||||
|
||||
Updates the API prowler dependency for release ${{ env.PROWLER_VERSION }}.
|
||||
|
||||
**Changes:**
|
||||
- Updates `api/pyproject.toml` prowler dependency from `@master` to `@${{ env.BRANCH_NAME }}`
|
||||
- Updates `api/poetry.lock` file with resolved dependencies
|
||||
|
||||
This PR should be merged into the `${{ env.BRANCH_NAME }}` release branch.
|
||||
|
||||
### License
|
||||
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
labels: |
|
||||
component/api
|
||||
no-changelog
|
||||
|
||||
- name: Create draft release
|
||||
uses: softprops/action-gh-release@6cbd405e2c4e67a21c47fa9e383d020e4e28b836 # v2.3.3
|
||||
with:
|
||||
tag_name: ${{ env.PROWLER_VERSION }}
|
||||
name: Prowler ${{ env.PROWLER_VERSION }}
|
||||
body_path: combined_changelog.md
|
||||
draft: true
|
||||
target_commitish: ${{ env.BRANCH_NAME }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Clean up temporary files
|
||||
run: |
|
||||
rm -f prowler_changelog.md api_changelog.md ui_changelog.md mcp_changelog.md combined_changelog.md
|
||||
50
.github/workflows/refresh_aws_services_regions.yml
vendored
Normal file
50
.github/workflows/refresh_aws_services_regions.yml
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
# This is a basic workflow to help you get started with Actions
|
||||
|
||||
name: Refresh regions of AWS services
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 9 * * *" #runs at 09:00 UTC everyday
|
||||
|
||||
env:
|
||||
GITHUB_BRANCH: "prowler-3.0-dev"
|
||||
|
||||
# A workflow run is made up of one or more jobs that can run sequentially or in parallel
|
||||
jobs:
|
||||
# This workflow contains a single job called "build"
|
||||
build:
|
||||
# The type of runner that the job will run on
|
||||
runs-on: ubuntu-latest
|
||||
# Steps represent a sequence of tasks that will be executed as part of the job
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ env.GITHUB_BRANCH }}
|
||||
|
||||
- name: setup python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9 #install the python needed
|
||||
|
||||
# Runs a single command using the runners shell
|
||||
- name: Run a one-line script
|
||||
run: python3 util/update_aws_services_regions.py
|
||||
|
||||
# Create pull request
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
commit-message: "feat(regions_update): Update regions for AWS services."
|
||||
branch: "aws-services-regions-updated"
|
||||
labels: "status/waiting-for-revision, severity/low"
|
||||
title: "feat(regions_update): Changes in regions for AWS services."
|
||||
body: |
|
||||
### Description
|
||||
|
||||
This PR updates the regions for AWS services.
|
||||
|
||||
### License
|
||||
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
202
.github/workflows/sdk-build-lint-push-containers.yml
vendored
202
.github/workflows/sdk-build-lint-push-containers.yml
vendored
@@ -1,202 +0,0 @@
|
||||
name: SDK - Build and Push containers
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
# For `v3-latest`
|
||||
- "v3"
|
||||
# For `v4-latest`
|
||||
- "v4.6"
|
||||
# For `latest`
|
||||
- "master"
|
||||
paths-ignore:
|
||||
- ".github/**"
|
||||
- "README.md"
|
||||
- "docs/**"
|
||||
- "ui/**"
|
||||
- "api/**"
|
||||
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
env:
|
||||
# AWS Configuration
|
||||
AWS_REGION_STG: eu-west-1
|
||||
AWS_REGION_PLATFORM: eu-west-1
|
||||
AWS_REGION: us-east-1
|
||||
|
||||
# Container's configuration
|
||||
IMAGE_NAME: prowler
|
||||
DOCKERFILE_PATH: ./Dockerfile
|
||||
|
||||
# Tags
|
||||
LATEST_TAG: latest
|
||||
STABLE_TAG: stable
|
||||
# The RELEASE_TAG is set during runtime in releases
|
||||
RELEASE_TAG: ""
|
||||
# The PROWLER_VERSION and PROWLER_VERSION_MAJOR are set during runtime in releases
|
||||
PROWLER_VERSION: ""
|
||||
PROWLER_VERSION_MAJOR: ""
|
||||
# TEMPORARY_TAG: temporary
|
||||
|
||||
# Python configuration
|
||||
PYTHON_VERSION: 3.12
|
||||
|
||||
# Container Registries
|
||||
PROWLERCLOUD_DOCKERHUB_REPOSITORY: prowlercloud
|
||||
PROWLERCLOUD_DOCKERHUB_IMAGE: prowler
|
||||
|
||||
jobs:
|
||||
# Build Prowler OSS container
|
||||
container-build-push:
|
||||
# needs: dockerfile-linter
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
prowler_version_major: ${{ steps.get-prowler-version.outputs.PROWLER_VERSION_MAJOR }}
|
||||
prowler_version: ${{ steps.get-prowler-version.outputs.PROWLER_VERSION }}
|
||||
env:
|
||||
POETRY_VIRTUALENVS_CREATE: "false"
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
- name: Install Poetry
|
||||
run: |
|
||||
pipx install poetry==2.*
|
||||
pipx inject poetry poetry-bumpversion
|
||||
|
||||
- name: Get Prowler version
|
||||
id: get-prowler-version
|
||||
run: |
|
||||
PROWLER_VERSION="$(poetry version -s 2>/dev/null)"
|
||||
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
# Store prowler version major just for the release
|
||||
PROWLER_VERSION_MAJOR="${PROWLER_VERSION%%.*}"
|
||||
echo "PROWLER_VERSION_MAJOR=${PROWLER_VERSION_MAJOR}" >> "${GITHUB_ENV}"
|
||||
echo "PROWLER_VERSION_MAJOR=${PROWLER_VERSION_MAJOR}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
case ${PROWLER_VERSION_MAJOR} in
|
||||
3)
|
||||
echo "LATEST_TAG=v3-latest" >> "${GITHUB_ENV}"
|
||||
echo "STABLE_TAG=v3-stable" >> "${GITHUB_ENV}"
|
||||
;;
|
||||
|
||||
|
||||
4)
|
||||
echo "LATEST_TAG=v4-latest" >> "${GITHUB_ENV}"
|
||||
echo "STABLE_TAG=v4-stable" >> "${GITHUB_ENV}"
|
||||
;;
|
||||
|
||||
5)
|
||||
echo "LATEST_TAG=latest" >> "${GITHUB_ENV}"
|
||||
echo "STABLE_TAG=stable" >> "${GITHUB_ENV}"
|
||||
;;
|
||||
|
||||
*)
|
||||
# Fallback if any other version is present
|
||||
echo "Releasing another Prowler major version, aborting..."
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to Public ECR
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: public.ecr.aws
|
||||
username: ${{ secrets.PUBLIC_ECR_AWS_ACCESS_KEY_ID }}
|
||||
password: ${{ secrets.PUBLIC_ECR_AWS_SECRET_ACCESS_KEY }}
|
||||
env:
|
||||
AWS_REGION: ${{ env.AWS_REGION }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Build and push container image (latest)
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
push: true
|
||||
tags: |
|
||||
${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.LATEST_TAG }}
|
||||
${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.LATEST_TAG }}
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.LATEST_TAG }}
|
||||
file: ${{ env.DOCKERFILE_PATH }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Build and push container image (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
# Use local context to get changes
|
||||
# https://github.com/docker/build-push-action#path-context
|
||||
context: .
|
||||
push: true
|
||||
tags: |
|
||||
${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.PROWLER_VERSION }}
|
||||
${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.STABLE_TAG }}
|
||||
${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.PROWLER_VERSION }}
|
||||
${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.STABLE_TAG }}
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.PROWLER_VERSION }}
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.STABLE_TAG }}
|
||||
file: ${{ env.DOCKERFILE_PATH }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
# - name: Push README to Docker Hub (toniblyx)
|
||||
# uses: peter-evans/dockerhub-description@432a30c9e07499fd01da9f8a49f0faf9e0ca5b77 # v4.0.2
|
||||
# with:
|
||||
# username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
# password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
# repository: ${{ env.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}
|
||||
# readme-filepath: ./README.md
|
||||
#
|
||||
# - name: Push README to Docker Hub (prowlercloud)
|
||||
# uses: peter-evans/dockerhub-description@432a30c9e07499fd01da9f8a49f0faf9e0ca5b77 # v4.0.2
|
||||
# with:
|
||||
# username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
# password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
# repository: ${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}
|
||||
# readme-filepath: ./README.md
|
||||
|
||||
dispatch-action:
|
||||
needs: container-build-push
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Get latest commit info (latest)
|
||||
if: github.event_name == 'push'
|
||||
run: |
|
||||
LATEST_COMMIT_HASH=$(echo ${{ github.event.after }} | cut -b -7)
|
||||
echo "LATEST_COMMIT_HASH=${LATEST_COMMIT_HASH}" >> $GITHUB_ENV
|
||||
|
||||
- name: Dispatch event (latest)
|
||||
if: github.event_name == 'push' && needs.container-build-push.outputs.prowler_version_major == '3'
|
||||
run: |
|
||||
curl https://api.github.com/repos/${{ secrets.DISPATCH_OWNER }}/${{ secrets.DISPATCH_REPO }}/dispatches \
|
||||
-H "Accept: application/vnd.github+json" \
|
||||
-H "Authorization: Bearer ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}" \
|
||||
-H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
--data '{"event_type":"dispatch","client_payload":{"version":"v3-latest", "tag": "${{ env.LATEST_COMMIT_HASH }}"}}'
|
||||
|
||||
- name: Dispatch event (release)
|
||||
if: github.event_name == 'release' && needs.container-build-push.outputs.prowler_version_major == '3'
|
||||
run: |
|
||||
curl https://api.github.com/repos/${{ secrets.DISPATCH_OWNER }}/${{ secrets.DISPATCH_REPO }}/dispatches \
|
||||
-H "Accept: application/vnd.github+json" \
|
||||
-H "Authorization: Bearer ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}" \
|
||||
-H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
--data '{"event_type":"dispatch","client_payload":{"version":"release", "tag":"${{ needs.container-build-push.outputs.prowler_version }}"}}'
|
||||
146
.github/workflows/sdk-bump-version.yml
vendored
146
.github/workflows/sdk-bump-version.yml
vendored
@@ -1,146 +0,0 @@
|
||||
name: SDK - Bump Version
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
|
||||
env:
|
||||
PROWLER_VERSION: ${{ github.event.release.tag_name }}
|
||||
BASE_BRANCH: master
|
||||
|
||||
jobs:
|
||||
bump-version:
|
||||
name: Bump Version
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Get Prowler version
|
||||
shell: bash
|
||||
run: |
|
||||
if [[ $PROWLER_VERSION =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
|
||||
MAJOR_VERSION=${BASH_REMATCH[1]}
|
||||
MINOR_VERSION=${BASH_REMATCH[2]}
|
||||
FIX_VERSION=${BASH_REMATCH[3]}
|
||||
|
||||
# Export version components to GitHub environment
|
||||
echo "MAJOR_VERSION=${MAJOR_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "MINOR_VERSION=${MINOR_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "FIX_VERSION=${FIX_VERSION}" >> "${GITHUB_ENV}"
|
||||
|
||||
if (( MAJOR_VERSION == 5 )); then
|
||||
if (( FIX_VERSION == 0 )); then
|
||||
echo "Minor Release: $PROWLER_VERSION"
|
||||
|
||||
# Set up next minor version for master
|
||||
BUMP_VERSION_TO=${MAJOR_VERSION}.$((MINOR_VERSION + 1)).${FIX_VERSION}
|
||||
echo "BUMP_VERSION_TO=${BUMP_VERSION_TO}" >> "${GITHUB_ENV}"
|
||||
|
||||
TARGET_BRANCH=${BASE_BRANCH}
|
||||
echo "TARGET_BRANCH=${TARGET_BRANCH}" >> "${GITHUB_ENV}"
|
||||
|
||||
# Set up patch version for version branch
|
||||
PATCH_VERSION_TO=${MAJOR_VERSION}.${MINOR_VERSION}.1
|
||||
echo "PATCH_VERSION_TO=${PATCH_VERSION_TO}" >> "${GITHUB_ENV}"
|
||||
|
||||
VERSION_BRANCH=v${MAJOR_VERSION}.${MINOR_VERSION}
|
||||
echo "VERSION_BRANCH=${VERSION_BRANCH}" >> "${GITHUB_ENV}"
|
||||
|
||||
echo "Bumping to next minor version: ${BUMP_VERSION_TO} in branch ${TARGET_BRANCH}"
|
||||
echo "Bumping to next patch version: ${PATCH_VERSION_TO} in branch ${VERSION_BRANCH}"
|
||||
else
|
||||
echo "Patch Release: $PROWLER_VERSION"
|
||||
|
||||
BUMP_VERSION_TO=${MAJOR_VERSION}.${MINOR_VERSION}.$((FIX_VERSION + 1))
|
||||
echo "BUMP_VERSION_TO=${BUMP_VERSION_TO}" >> "${GITHUB_ENV}"
|
||||
|
||||
TARGET_BRANCH=v${MAJOR_VERSION}.${MINOR_VERSION}
|
||||
echo "TARGET_BRANCH=${TARGET_BRANCH}" >> "${GITHUB_ENV}"
|
||||
|
||||
echo "Bumping to next patch version: ${BUMP_VERSION_TO} in branch ${TARGET_BRANCH}"
|
||||
fi
|
||||
else
|
||||
echo "Releasing another Prowler major version, aborting..."
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "Invalid version syntax: '$PROWLER_VERSION' (must be N.N.N)" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Bump versions in files
|
||||
run: |
|
||||
echo "Using PROWLER_VERSION=$PROWLER_VERSION"
|
||||
echo "Using BUMP_VERSION_TO=$BUMP_VERSION_TO"
|
||||
|
||||
set -e
|
||||
|
||||
echo "Bumping version in pyproject.toml ..."
|
||||
sed -i "s|version = \"${PROWLER_VERSION}\"|version = \"${BUMP_VERSION_TO}\"|" pyproject.toml
|
||||
|
||||
echo "Bumping version in prowler/config/config.py ..."
|
||||
sed -i "s|prowler_version = \"${PROWLER_VERSION}\"|prowler_version = \"${BUMP_VERSION_TO}\"|" prowler/config/config.py
|
||||
|
||||
echo "Bumping version in .env ..."
|
||||
sed -i "s|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${PROWLER_VERSION}|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${BUMP_VERSION_TO}|" .env
|
||||
|
||||
git --no-pager diff
|
||||
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
base: ${{ env.TARGET_BRANCH }}
|
||||
commit-message: "chore(release): Bump version to v${{ env.BUMP_VERSION_TO }}"
|
||||
branch: "version-bump-to-v${{ env.BUMP_VERSION_TO }}"
|
||||
title: "chore(release): Bump version to v${{ env.BUMP_VERSION_TO }}"
|
||||
labels: no-changelog
|
||||
body: |
|
||||
### Description
|
||||
|
||||
Bump Prowler version to v${{ env.BUMP_VERSION_TO }}
|
||||
|
||||
### License
|
||||
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
|
||||
- name: Handle patch version for minor release
|
||||
if: env.FIX_VERSION == '0'
|
||||
run: |
|
||||
echo "Using PROWLER_VERSION=$PROWLER_VERSION"
|
||||
echo "Using PATCH_VERSION_TO=$PATCH_VERSION_TO"
|
||||
|
||||
set -e
|
||||
|
||||
echo "Bumping version in pyproject.toml ..."
|
||||
sed -i "s|version = \"${PROWLER_VERSION}\"|version = \"${PATCH_VERSION_TO}\"|" pyproject.toml
|
||||
|
||||
echo "Bumping version in prowler/config/config.py ..."
|
||||
sed -i "s|prowler_version = \"${PROWLER_VERSION}\"|prowler_version = \"${PATCH_VERSION_TO}\"|" prowler/config/config.py
|
||||
|
||||
echo "Bumping version in .env ..."
|
||||
sed -i "s|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${PROWLER_VERSION}|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${PATCH_VERSION_TO}|" .env
|
||||
|
||||
git --no-pager diff
|
||||
|
||||
- name: Create Pull Request for patch version
|
||||
if: env.FIX_VERSION == '0'
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
base: ${{ env.VERSION_BRANCH }}
|
||||
commit-message: "chore(release): Bump version to v${{ env.PATCH_VERSION_TO }}"
|
||||
branch: "version-bump-to-v${{ env.PATCH_VERSION_TO }}"
|
||||
title: "chore(release): Bump version to v${{ env.PATCH_VERSION_TO }}"
|
||||
labels: no-changelog
|
||||
body: |
|
||||
### Description
|
||||
|
||||
Bump Prowler version to v${{ env.PATCH_VERSION_TO }}
|
||||
|
||||
### License
|
||||
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
67
.github/workflows/sdk-codeql.yml
vendored
67
.github/workflows/sdk-codeql.yml
vendored
@@ -1,67 +0,0 @@
|
||||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
#
|
||||
# ******** NOTE ********
|
||||
# We have attempted to detect the languages in your repository. Please check
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: SDK - CodeQL
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
- "v5.*"
|
||||
paths-ignore:
|
||||
- 'ui/**'
|
||||
- 'api/**'
|
||||
- '.github/**'
|
||||
pull_request:
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
- "v5.*"
|
||||
paths-ignore:
|
||||
- 'ui/**'
|
||||
- 'api/**'
|
||||
- '.github/**'
|
||||
schedule:
|
||||
- cron: '00 12 * * *'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'python' ]
|
||||
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/sdk-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
286
.github/workflows/sdk-pull-request.yml
vendored
286
.github/workflows/sdk-pull-request.yml
vendored
@@ -1,286 +0,0 @@
|
||||
name: SDK - Pull Request
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
- "v5.*"
|
||||
pull_request:
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
- "v5.*"
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.9", "3.10", "3.11", "3.12"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Test if changes are in not ignored paths
|
||||
id: are-non-ignored-files-changed
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: ./**
|
||||
files_ignore: |
|
||||
.github/**
|
||||
docs/**
|
||||
permissions/**
|
||||
api/**
|
||||
ui/**
|
||||
prowler/CHANGELOG.md
|
||||
README.md
|
||||
mkdocs.yml
|
||||
.backportrc.json
|
||||
.env
|
||||
docker-compose*
|
||||
examples/**
|
||||
.gitignore
|
||||
|
||||
- name: Install poetry
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pipx install poetry==2.1.1
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: "poetry"
|
||||
|
||||
- name: Install dependencies
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry install --no-root
|
||||
poetry run pip list
|
||||
VERSION=$(curl --silent "https://api.github.com/repos/hadolint/hadolint/releases/latest" | \
|
||||
grep '"tag_name":' | \
|
||||
sed -E 's/.*"v([^"]+)".*/\1/' \
|
||||
) && curl -L -o /tmp/hadolint "https://github.com/hadolint/hadolint/releases/download/v${VERSION}/hadolint-Linux-x86_64" \
|
||||
&& chmod +x /tmp/hadolint
|
||||
|
||||
- name: Poetry check
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry check --lock
|
||||
|
||||
- name: Lint with flake8
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run flake8 . --ignore=E266,W503,E203,E501,W605,E128 --exclude contrib,ui,api
|
||||
|
||||
- name: Checking format with black
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run black --exclude api ui --check .
|
||||
|
||||
- name: Lint with pylint
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pylint --disable=W,C,R,E -j 0 -rn -sn prowler/
|
||||
|
||||
- name: Bandit
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run bandit -q -lll -x '*_test.py,./contrib/,./api/,./ui' -r .
|
||||
|
||||
- name: Safety
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run safety check --ignore 70612 -r pyproject.toml
|
||||
|
||||
- name: Vulture
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run vulture --exclude "contrib,api,ui" --min-confidence 100 .
|
||||
|
||||
- name: Dockerfile - Check if Dockerfile has changed
|
||||
id: dockerfile-changed-files
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: |
|
||||
Dockerfile
|
||||
|
||||
- name: Hadolint
|
||||
if: steps.dockerfile-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
/tmp/hadolint Dockerfile --ignore=DL3013
|
||||
|
||||
# Test AWS
|
||||
- name: AWS - Check if any file has changed
|
||||
id: aws-changed-files
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/aws/**
|
||||
./tests/providers/aws/**
|
||||
./poetry.lock
|
||||
|
||||
- name: AWS - Test
|
||||
if: steps.aws-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/aws --cov-report=xml:aws_coverage.xml tests/providers/aws
|
||||
|
||||
# Test Azure
|
||||
- name: Azure - Check if any file has changed
|
||||
id: azure-changed-files
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/azure/**
|
||||
./tests/providers/azure/**
|
||||
./poetry.lock
|
||||
|
||||
- name: Azure - Test
|
||||
if: steps.azure-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/azure --cov-report=xml:azure_coverage.xml tests/providers/azure
|
||||
|
||||
# Test GCP
|
||||
- name: GCP - Check if any file has changed
|
||||
id: gcp-changed-files
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/gcp/**
|
||||
./tests/providers/gcp/**
|
||||
./poetry.lock
|
||||
|
||||
- name: GCP - Test
|
||||
if: steps.gcp-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/gcp --cov-report=xml:gcp_coverage.xml tests/providers/gcp
|
||||
|
||||
# Test Kubernetes
|
||||
- name: Kubernetes - Check if any file has changed
|
||||
id: kubernetes-changed-files
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/kubernetes/**
|
||||
./tests/providers/kubernetes/**
|
||||
./poetry.lock
|
||||
|
||||
- name: Kubernetes - Test
|
||||
if: steps.kubernetes-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/kubernetes --cov-report=xml:kubernetes_coverage.xml tests/providers/kubernetes
|
||||
|
||||
# Test GitHub
|
||||
- name: GitHub - Check if any file has changed
|
||||
id: github-changed-files
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/github/**
|
||||
./tests/providers/github/**
|
||||
./poetry.lock
|
||||
|
||||
- name: GitHub - Test
|
||||
if: steps.github-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/github --cov-report=xml:github_coverage.xml tests/providers/github
|
||||
|
||||
# Test NHN
|
||||
- name: NHN - Check if any file has changed
|
||||
id: nhn-changed-files
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/nhn/**
|
||||
./tests/providers/nhn/**
|
||||
./poetry.lock
|
||||
|
||||
- name: NHN - Test
|
||||
if: steps.nhn-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/nhn --cov-report=xml:nhn_coverage.xml tests/providers/nhn
|
||||
|
||||
# Test M365
|
||||
- name: M365 - Check if any file has changed
|
||||
id: m365-changed-files
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/m365/**
|
||||
./tests/providers/m365/**
|
||||
./poetry.lock
|
||||
|
||||
- name: M365 - Test
|
||||
if: steps.m365-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/m365 --cov-report=xml:m365_coverage.xml tests/providers/m365
|
||||
|
||||
# Test IaC
|
||||
- name: IaC - Check if any file has changed
|
||||
id: iac-changed-files
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/iac/**
|
||||
./tests/providers/iac/**
|
||||
./poetry.lock
|
||||
|
||||
- name: IaC - Test
|
||||
if: steps.iac-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/iac --cov-report=xml:iac_coverage.xml tests/providers/iac
|
||||
|
||||
# Test MongoDB Atlas
|
||||
- name: MongoDB Atlas - Check if any file has changed
|
||||
id: mongodb-atlas-changed-files
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/mongodbatlas/**
|
||||
./tests/providers/mongodbatlas/**
|
||||
.poetry.lock
|
||||
|
||||
- name: MongoDB Atlas - Test
|
||||
if: steps.mongodb-atlas-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/mongodbatlas --cov-report=xml:mongodb_atlas_coverage.xml tests/providers/mongodbatlas
|
||||
|
||||
# Test OCI
|
||||
- name: OCI - Check if any file has changed
|
||||
id: oci-changed-files
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/oraclecloud/**
|
||||
./tests/providers/oraclecloud/**
|
||||
./poetry.lock
|
||||
|
||||
- name: OCI - Test
|
||||
if: steps.oci-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/oraclecloud --cov-report=xml:oci_coverage.xml tests/providers/oraclecloud
|
||||
|
||||
# Common Tests
|
||||
- name: Lib - Test
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/lib --cov-report=xml:lib_coverage.xml tests/lib
|
||||
|
||||
- name: Config - Test
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/config --cov-report=xml:config_coverage.xml tests/config
|
||||
|
||||
# Codecov
|
||||
- name: Upload coverage reports to Codecov
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
flags: prowler
|
||||
files: ./aws_coverage.xml,./azure_coverage.xml,./gcp_coverage.xml,./kubernetes_coverage.xml,./github_coverage.xml,./nhn_coverage.xml,./m365_coverage.xml,./oci_coverage.xml,./lib_coverage.xml,./config_coverage.xml
|
||||
98
.github/workflows/sdk-pypi-release.yml
vendored
98
.github/workflows/sdk-pypi-release.yml
vendored
@@ -1,98 +0,0 @@
|
||||
name: SDK - PyPI release
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
env:
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name }}
|
||||
PYTHON_VERSION: 3.11
|
||||
# CACHE: "poetry"
|
||||
|
||||
jobs:
|
||||
repository-check:
|
||||
name: Repository check
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
is_repo: ${{ steps.repository_check.outputs.is_repo }}
|
||||
steps:
|
||||
- name: Repository check
|
||||
id: repository_check
|
||||
working-directory: /tmp
|
||||
run: |
|
||||
if [[ ${{ github.repository }} == "prowler-cloud/prowler" ]]
|
||||
then
|
||||
echo "is_repo=true" >> "${GITHUB_OUTPUT}"
|
||||
else
|
||||
echo "This action only runs for prowler-cloud/prowler"
|
||||
echo "is_repo=false" >> "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
|
||||
release-prowler-job:
|
||||
runs-on: ubuntu-latest
|
||||
needs: repository-check
|
||||
if: needs.repository-check.outputs.is_repo == 'true'
|
||||
env:
|
||||
POETRY_VIRTUALENVS_CREATE: "false"
|
||||
name: Release Prowler to PyPI
|
||||
steps:
|
||||
- name: Repository check
|
||||
working-directory: /tmp
|
||||
run: |
|
||||
if [[ "${{ github.repository }}" != "prowler-cloud/prowler" ]]; then
|
||||
echo "This action only runs for prowler-cloud/prowler"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Get Prowler version
|
||||
run: |
|
||||
PROWLER_VERSION="${{ env.RELEASE_TAG }}"
|
||||
|
||||
case ${PROWLER_VERSION%%.*} in
|
||||
3)
|
||||
echo "Releasing Prowler v3 with tag ${PROWLER_VERSION}"
|
||||
;;
|
||||
4)
|
||||
echo "Releasing Prowler v4 with tag ${PROWLER_VERSION}"
|
||||
;;
|
||||
5)
|
||||
echo "Releasing Prowler v5 with tag ${PROWLER_VERSION}"
|
||||
;;
|
||||
*)
|
||||
echo "Releasing another Prowler major version, aborting..."
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pipx install poetry==2.1.1
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
# cache: ${{ env.CACHE }}
|
||||
|
||||
- name: Build Prowler package
|
||||
run: |
|
||||
poetry build
|
||||
|
||||
- name: Publish Prowler package to PyPI
|
||||
run: |
|
||||
poetry config pypi-token.pypi ${{ secrets.PYPI_API_TOKEN }}
|
||||
poetry publish
|
||||
|
||||
- name: Replicate PyPI package
|
||||
run: |
|
||||
rm -rf ./dist && rm -rf ./build && rm -rf prowler.egg-info
|
||||
pip install toml
|
||||
python util/replicate_pypi_package.py
|
||||
poetry build
|
||||
|
||||
- name: Publish prowler-cloud package to PyPI
|
||||
run: |
|
||||
poetry config pypi-token.pypi ${{ secrets.PYPI_API_TOKEN }}
|
||||
poetry publish
|
||||
@@ -1,68 +0,0 @@
|
||||
# This is a basic workflow to help you get started with Actions
|
||||
|
||||
name: SDK - Refresh AWS services' regions
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 9 * * 1" # runs at 09:00 UTC every Monday
|
||||
|
||||
env:
|
||||
GITHUB_BRANCH: "master"
|
||||
AWS_REGION_DEV: us-east-1
|
||||
|
||||
# A workflow run is made up of one or more jobs that can run sequentially or in parallel
|
||||
jobs:
|
||||
# This workflow contains a single job called "build"
|
||||
build:
|
||||
# The type of runner that the job will run on
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write
|
||||
pull-requests: write
|
||||
contents: write
|
||||
# Steps represent a sequence of tasks that will be executed as part of the job
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
ref: ${{ env.GITHUB_BRANCH }}
|
||||
|
||||
- name: setup python
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: 3.9 #install the python needed
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install boto3
|
||||
|
||||
- name: Configure AWS Credentials -- DEV
|
||||
uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 # v5.0.0
|
||||
with:
|
||||
aws-region: ${{ env.AWS_REGION_DEV }}
|
||||
role-to-assume: ${{ secrets.DEV_IAM_ROLE_ARN }}
|
||||
role-session-name: refresh-AWS-regions-dev
|
||||
|
||||
# Runs a single command using the runners shell
|
||||
- name: Run a one-line script
|
||||
run: python3 util/update_aws_services_regions.py
|
||||
|
||||
# Create pull request
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
commit-message: "feat(regions_update): Update regions for AWS services"
|
||||
branch: "aws-services-regions-updated-${{ github.sha }}"
|
||||
labels: "status/waiting-for-revision, severity/low, provider/aws, no-changelog"
|
||||
title: "chore(regions_update): Changes in regions for AWS services"
|
||||
body: |
|
||||
### Description
|
||||
|
||||
This PR updates the regions for AWS services.
|
||||
|
||||
### License
|
||||
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
121
.github/workflows/ui-build-lint-push-containers.yml
vendored
121
.github/workflows/ui-build-lint-push-containers.yml
vendored
@@ -1,121 +0,0 @@
|
||||
name: UI - Build and Push containers
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
paths:
|
||||
- "ui/**"
|
||||
- ".github/workflows/ui-build-lint-push-containers.yml"
|
||||
|
||||
# Uncomment the below code to test this action on PRs
|
||||
# pull_request:
|
||||
# branches:
|
||||
# - "master"
|
||||
# paths:
|
||||
# - "ui/**"
|
||||
# - ".github/workflows/ui-build-lint-push-containers.yml"
|
||||
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
env:
|
||||
# Tags
|
||||
LATEST_TAG: latest
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name }}
|
||||
STABLE_TAG: stable
|
||||
|
||||
WORKING_DIRECTORY: ./ui
|
||||
|
||||
# Container Registries
|
||||
PROWLERCLOUD_DOCKERHUB_REPOSITORY: prowlercloud
|
||||
PROWLERCLOUD_DOCKERHUB_IMAGE: prowler-ui
|
||||
NEXT_PUBLIC_API_BASE_URL: http://prowler-api:8080/api/v1
|
||||
|
||||
jobs:
|
||||
repository-check:
|
||||
name: Repository check
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
is_repo: ${{ steps.repository_check.outputs.is_repo }}
|
||||
steps:
|
||||
- name: Repository check
|
||||
id: repository_check
|
||||
working-directory: /tmp
|
||||
run: |
|
||||
if [[ ${{ github.repository }} == "prowler-cloud/prowler" ]]
|
||||
then
|
||||
echo "is_repo=true" >> "${GITHUB_OUTPUT}"
|
||||
else
|
||||
echo "This action only runs for prowler-cloud/prowler"
|
||||
echo "is_repo=false" >> "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
|
||||
# Build Prowler OSS container
|
||||
container-build-push:
|
||||
needs: repository-check
|
||||
if: needs.repository-check.outputs.is_repo == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ${{ env.WORKING_DIRECTORY }}
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Set short git commit SHA
|
||||
id: vars
|
||||
run: |
|
||||
shortSha=$(git rev-parse --short ${{ github.sha }})
|
||||
echo "SHORT_SHA=${shortSha}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Build and push container image (latest)
|
||||
# Comment the following line for testing
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
build-args: |
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=${{ env.SHORT_SHA }}
|
||||
NEXT_PUBLIC_API_BASE_URL=${{ env.NEXT_PUBLIC_API_BASE_URL }}
|
||||
# Set push: false for testing
|
||||
push: true
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.LATEST_TAG }}
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.SHORT_SHA }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Build and push container image (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
build-args: |
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${{ env.RELEASE_TAG }}
|
||||
NEXT_PUBLIC_API_BASE_URL=${{ env.NEXT_PUBLIC_API_BASE_URL }}
|
||||
push: true
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.RELEASE_TAG }}
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.STABLE_TAG }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Trigger deployment
|
||||
if: github.event_name == 'push'
|
||||
uses: peter-evans/repository-dispatch@5fc4efd1a4797ddb68ffd0714a238564e4cc0e6f # v4.0.0
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
repository: ${{ secrets.CLOUD_DISPATCH }}
|
||||
event-type: prowler-ui-deploy
|
||||
client-payload: '{"sha": "${{ github.sha }}", "short_sha": "${{ env.SHORT_SHA }}"}'
|
||||
59
.github/workflows/ui-codeql.yml
vendored
59
.github/workflows/ui-codeql.yml
vendored
@@ -1,59 +0,0 @@
|
||||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
#
|
||||
# ******** NOTE ********
|
||||
# We have attempted to detect the languages in your repository. Please check
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: UI - CodeQL
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
- "v5.*"
|
||||
paths:
|
||||
- "ui/**"
|
||||
pull_request:
|
||||
branches:
|
||||
- "master"
|
||||
- "v5.*"
|
||||
paths:
|
||||
- "ui/**"
|
||||
schedule:
|
||||
- cron: "00 12 * * *"
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: ["javascript"]
|
||||
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/ui-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
100
.github/workflows/ui-e2e-tests.yml
vendored
100
.github/workflows/ui-e2e-tests.yml
vendored
@@ -1,100 +0,0 @@
|
||||
name: UI - E2E Tests
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
- "v5.*"
|
||||
paths:
|
||||
- '.github/workflows/ui-e2e-tests.yml'
|
||||
- 'ui/**'
|
||||
|
||||
jobs:
|
||||
e2e-tests:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
AUTH_SECRET: 'fallback-ci-secret-for-testing'
|
||||
AUTH_TRUST_HOST: true
|
||||
NEXTAUTH_URL: 'http://localhost:3000'
|
||||
NEXT_PUBLIC_API_BASE_URL: 'http://localhost:8080/api/v1'
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Fix API data directory permissions
|
||||
run: docker run --rm -v $(pwd)/_data/api:/data alpine chown -R 1000:1000 /data
|
||||
- name: Start API services
|
||||
run: |
|
||||
# Override docker-compose image tag to use latest instead of stable
|
||||
# This overrides any PROWLER_API_VERSION set in .env file
|
||||
export PROWLER_API_VERSION=latest
|
||||
echo "Using PROWLER_API_VERSION=${PROWLER_API_VERSION}"
|
||||
docker compose up -d api worker worker-beat
|
||||
- name: Wait for API to be ready
|
||||
run: |
|
||||
echo "Waiting for prowler-api..."
|
||||
timeout=150 # 5 minutes max
|
||||
elapsed=0
|
||||
while [ $elapsed -lt $timeout ]; do
|
||||
if curl -s ${NEXT_PUBLIC_API_BASE_URL}/docs >/dev/null 2>&1; then
|
||||
echo "Prowler API is ready!"
|
||||
exit 0
|
||||
fi
|
||||
echo "Waiting for prowler-api... (${elapsed}s elapsed)"
|
||||
sleep 5
|
||||
elapsed=$((elapsed + 5))
|
||||
done
|
||||
echo "Timeout waiting for prowler-api to start"
|
||||
exit 1
|
||||
- name: Load database fixtures for E2E tests
|
||||
run: |
|
||||
docker compose exec -T api sh -c '
|
||||
echo "Loading all fixtures from api/fixtures/dev/..."
|
||||
for fixture in api/fixtures/dev/*.json; do
|
||||
if [ -f "$fixture" ]; then
|
||||
echo "Loading $fixture"
|
||||
poetry run python manage.py loaddata "$fixture" --database admin
|
||||
fi
|
||||
done
|
||||
echo "All database fixtures loaded successfully!"
|
||||
'
|
||||
- name: Setup Node.js environment
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
with:
|
||||
node-version: '20.x'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: './ui/package-lock.json'
|
||||
- name: Install UI dependencies
|
||||
working-directory: ./ui
|
||||
run: npm ci
|
||||
- name: Build UI application
|
||||
working-directory: ./ui
|
||||
run: npm run build
|
||||
- name: Cache Playwright browsers
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
id: playwright-cache
|
||||
with:
|
||||
path: ~/.cache/ms-playwright
|
||||
key: ${{ runner.os }}-playwright-${{ hashFiles('ui/package-lock.json') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-playwright-
|
||||
- name: Install Playwright browsers
|
||||
working-directory: ./ui
|
||||
if: steps.playwright-cache.outputs.cache-hit != 'true'
|
||||
run: npm run test:e2e:install
|
||||
- name: Run E2E tests
|
||||
working-directory: ./ui
|
||||
run: npm run test:e2e
|
||||
- name: Upload test reports
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
if: failure()
|
||||
with:
|
||||
name: playwright-report
|
||||
path: ui/playwright-report/
|
||||
retention-days: 30
|
||||
- name: Cleanup services
|
||||
if: always()
|
||||
run: |
|
||||
echo "Shutting down services..."
|
||||
docker compose down -v || true
|
||||
echo "Cleanup completed"
|
||||
65
.github/workflows/ui-pull-request.yml
vendored
65
.github/workflows/ui-pull-request.yml
vendored
@@ -1,65 +0,0 @@
|
||||
name: UI - Pull Request
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
- "v5.*"
|
||||
paths:
|
||||
- ".github/workflows/ui-pull-request.yml"
|
||||
- "ui/**"
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
- "v5.*"
|
||||
paths:
|
||||
- 'ui/**'
|
||||
env:
|
||||
UI_WORKING_DIR: ./ui
|
||||
IMAGE_NAME: prowler-ui
|
||||
|
||||
jobs:
|
||||
test-and-coverage:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest]
|
||||
node-version: [20.x]
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Setup Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
cache: 'npm'
|
||||
cache-dependency-path: './ui/package-lock.json'
|
||||
- name: Install dependencies
|
||||
working-directory: ./ui
|
||||
run: npm ci
|
||||
- name: Run Healthcheck
|
||||
working-directory: ./ui
|
||||
run: npm run healthcheck
|
||||
- name: Build the application
|
||||
working-directory: ./ui
|
||||
run: npm run build
|
||||
|
||||
test-container-build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
- name: Build Container
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: ${{ env.UI_WORKING_DIR }}
|
||||
# Always build using `prod` target
|
||||
target: prod
|
||||
push: false
|
||||
tags: ${{ env.IMAGE_NAME }}:latest
|
||||
outputs: type=docker
|
||||
build-args: |
|
||||
NEXT_PUBLIC_STRIPE_PUBLISHABLE_KEY=pk_test_51LwpXXXX
|
||||
56
.gitignore
vendored
56
.gitignore
vendored
@@ -5,15 +5,6 @@
|
||||
[._]ss[a-gi-z]
|
||||
[._]sw[a-p]
|
||||
|
||||
# Python code
|
||||
__pycache__
|
||||
venv/
|
||||
build/
|
||||
/dist/
|
||||
*.egg-info/
|
||||
*/__pycache__/*.pyc
|
||||
.idea/
|
||||
|
||||
# Session
|
||||
Session.vim
|
||||
Sessionx.vim
|
||||
@@ -31,7 +22,7 @@ tags
|
||||
*.DS_Store
|
||||
|
||||
# Prowler output
|
||||
/output
|
||||
output/
|
||||
|
||||
# Prowler found secrets
|
||||
secrets-*/
|
||||
@@ -42,47 +33,8 @@ junit-reports/
|
||||
# VSCode files
|
||||
.vscode/
|
||||
|
||||
# Cursor files
|
||||
.cursorignore
|
||||
.cursor/
|
||||
terraform-kickstarter/.terraform.lock.hcl
|
||||
|
||||
# RooCode files
|
||||
.roo/
|
||||
.rooignore
|
||||
.roomodes
|
||||
terraform-kickstarter/.terraform/providers/registry.terraform.io/hashicorp/aws/3.56.0/darwin_amd64/terraform-provider-aws_v3.56.0_x5
|
||||
|
||||
# Cline files
|
||||
.cline/
|
||||
.clineignore
|
||||
|
||||
# Terraform
|
||||
.terraform*
|
||||
*.tfstate
|
||||
*.tfstate.*
|
||||
|
||||
# .env
|
||||
ui/.env*
|
||||
api/.env*
|
||||
mcp_server/.env*
|
||||
.env.local
|
||||
|
||||
# Coverage
|
||||
.coverage*
|
||||
.coverage
|
||||
coverage*
|
||||
|
||||
# Node
|
||||
node_modules
|
||||
|
||||
# Persistent data
|
||||
_data/
|
||||
|
||||
# Claude
|
||||
CLAUDE.md
|
||||
|
||||
# MCP Server
|
||||
mcp_server/prowler_mcp_server/prowler_app/server.py
|
||||
mcp_server/prowler_mcp_server/prowler_app/utils/schema.yaml
|
||||
|
||||
# Compliance report
|
||||
*.pdf
|
||||
terraform-kickstarter/terraform.tfstate
|
||||
|
||||
@@ -1,128 +1,29 @@
|
||||
repos:
|
||||
## GENERAL
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.6.0
|
||||
rev: v3.3.0
|
||||
hooks:
|
||||
- id: check-merge-conflict
|
||||
- id: check-yaml
|
||||
args: ["--unsafe"]
|
||||
exclude: prowler/config/llm_config.yaml
|
||||
args: ['--unsafe']
|
||||
- id: check-json
|
||||
- id: end-of-file-fixer
|
||||
- id: trailing-whitespace
|
||||
exclude: 'README.md'
|
||||
- id: no-commit-to-branch
|
||||
- id: pretty-format-json
|
||||
args: ["--autofix", --no-sort-keys, --no-ensure-ascii]
|
||||
args: ['--autofix']
|
||||
|
||||
## TOML
|
||||
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
|
||||
rev: v2.13.0
|
||||
hooks:
|
||||
- id: pretty-format-toml
|
||||
args: [--autofix]
|
||||
files: pyproject.toml
|
||||
|
||||
## BASH
|
||||
- repo: https://github.com/koalaman/shellcheck-precommit
|
||||
rev: v0.10.0
|
||||
rev: v0.8.0
|
||||
hooks:
|
||||
- id: shellcheck
|
||||
exclude: contrib
|
||||
|
||||
## PYTHON
|
||||
- repo: https://github.com/myint/autoflake
|
||||
rev: v2.3.1
|
||||
hooks:
|
||||
- id: autoflake
|
||||
args:
|
||||
[
|
||||
"--in-place",
|
||||
"--remove-all-unused-imports",
|
||||
"--remove-unused-variable",
|
||||
]
|
||||
|
||||
- repo: https://github.com/timothycrosley/isort
|
||||
rev: 5.13.2
|
||||
hooks:
|
||||
- id: isort
|
||||
args: ["--profile", "black"]
|
||||
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 24.4.2
|
||||
hooks:
|
||||
- id: black
|
||||
|
||||
- repo: https://github.com/pycqa/flake8
|
||||
rev: 7.0.0
|
||||
hooks:
|
||||
- id: flake8
|
||||
exclude: contrib
|
||||
args: ["--ignore=E266,W503,E203,E501,W605"]
|
||||
|
||||
- repo: https://github.com/python-poetry/poetry
|
||||
rev: 2.1.1
|
||||
hooks:
|
||||
- id: poetry-check
|
||||
name: API - poetry-check
|
||||
args: ["--directory=./api"]
|
||||
pass_filenames: false
|
||||
|
||||
- id: poetry-lock
|
||||
name: API - poetry-lock
|
||||
args: ["--directory=./api"]
|
||||
pass_filenames: false
|
||||
|
||||
- id: poetry-check
|
||||
name: SDK - poetry-check
|
||||
args: ["--directory=./"]
|
||||
pass_filenames: false
|
||||
|
||||
- id: poetry-lock
|
||||
name: SDK - poetry-lock
|
||||
args: ["--directory=./"]
|
||||
pass_filenames: false
|
||||
|
||||
- id: shellcheck
|
||||
|
||||
- repo: https://github.com/hadolint/hadolint
|
||||
rev: v2.13.0-beta
|
||||
rev: v2.10.0
|
||||
hooks:
|
||||
- id: hadolint
|
||||
args: ["--ignore=DL3013"]
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: pylint
|
||||
name: pylint
|
||||
entry: bash -c 'pylint --disable=W,C,R,E -j 0 -rn -sn prowler/'
|
||||
name: Lint Dockerfiles
|
||||
description: Runs hadolint to lint Dockerfiles
|
||||
language: system
|
||||
files: '.*\.py'
|
||||
|
||||
- id: trufflehog
|
||||
name: TruffleHog
|
||||
description: Detect secrets in your data.
|
||||
entry: bash -c 'trufflehog --no-update git file://. --only-verified --fail'
|
||||
# For running trufflehog in docker, use the following entry instead:
|
||||
# entry: bash -c 'docker run -v "$(pwd):/workdir" -i --rm trufflesecurity/trufflehog:latest git file:///workdir --only-verified --fail'
|
||||
language: system
|
||||
stages: ["pre-commit", "pre-push"]
|
||||
|
||||
- id: bandit
|
||||
name: bandit
|
||||
description: "Bandit is a tool for finding common security issues in Python code"
|
||||
entry: bash -c 'bandit -q -lll -x '*_test.py,./contrib/,./.venv/' -r .'
|
||||
language: system
|
||||
files: '.*\.py'
|
||||
|
||||
- id: safety
|
||||
name: safety
|
||||
description: "Safety is a tool that checks your installed dependencies for known security vulnerabilities"
|
||||
# TODO: Botocore needs urllib3 1.X so we need to ignore these vulnerabilities 77744,77745. Remove this once we upgrade to urllib3 2.X
|
||||
entry: bash -c 'safety check --ignore 70612,66963,74429,76352,76353,77744,77745'
|
||||
language: system
|
||||
|
||||
- id: vulture
|
||||
name: vulture
|
||||
description: "Vulture finds unused code in Python programs."
|
||||
entry: bash -c 'vulture --exclude "contrib,.venv,api/src/backend/api/tests/,api/src/backend/conftest.py,api/src/backend/tasks/tests/" --min-confidence 100 .'
|
||||
language: system
|
||||
files: '.*\.py'
|
||||
types: ["dockerfile"]
|
||||
entry: hadolint
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
# .readthedocs.yaml
|
||||
# Read the Docs configuration file
|
||||
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
|
||||
|
||||
# Required
|
||||
version: 2
|
||||
|
||||
build:
|
||||
os: "ubuntu-22.04"
|
||||
tools:
|
||||
python: "3.11"
|
||||
jobs:
|
||||
post_create_environment:
|
||||
# Install poetry
|
||||
# https://python-poetry.org/docs/#installing-manually
|
||||
- python -m pip install poetry
|
||||
post_install:
|
||||
# Install dependencies with 'docs' dependency group
|
||||
# https://python-poetry.org/docs/managing-dependencies/#dependency-groups
|
||||
# VIRTUAL_ENV needs to be set manually for now.
|
||||
# See https://github.com/readthedocs/readthedocs.org/pull/11152/
|
||||
- VIRTUAL_ENV=${READTHEDOCS_VIRTUALENV_PATH} python -m poetry install --only=docs
|
||||
|
||||
mkdocs:
|
||||
configuration: mkdocs.yml
|
||||
110
AGENTS.md
110
AGENTS.md
@@ -1,110 +0,0 @@
|
||||
# Repository Guidelines
|
||||
|
||||
## How to Use This Guide
|
||||
|
||||
- Start here for cross-project norms, Prowler is a monorepo with several components. Every component should have an `AGENTS.md` file that contains the guidelines for the agents in that component. The file is located beside the code you are touching (e.g. `api/AGENTS.md`, `ui/AGENTS.md`, `prowler/AGENTS.md`).
|
||||
- Follow the stricter rule when guidance conflicts; component docs override this file for their scope.
|
||||
- Keep instructions synchronized. When you add new workflows or scripts, update both, the relevant component `AGENTS.md` and this file if they apply broadly.
|
||||
|
||||
## Project Overview
|
||||
|
||||
Prowler is an open-source cloud security assessment tool that supports multiple cloud providers (AWS, Azure, GCP, Kubernetes, GitHub, M365, etc.). The project consists in a monorepo with the following main components:
|
||||
|
||||
- **Prowler SDK**: Python SDK, includes the Prowler CLI, providers, services, checks, compliances, config, etc. (`prowler/`)
|
||||
- **Prowler API**: Django-based REST API backend (`api/`)
|
||||
- **Prowler UI**: Next.js frontend application (`ui/`)
|
||||
- **Prowler MCP Server**: Model Context Protocol server that gives access to the entire Prowler ecosystem for LLMs (`mcp_server/`)
|
||||
- **Prowler Dashboard**: Prowler CLI feature that allows to visualize the results of the scans in a simple dashboard (`dashboard/`)
|
||||
|
||||
### Project Structure (Key Folders & Files)
|
||||
|
||||
- `prowler/`: Main source code for Prowler SDK (CLI, providers, services, checks, compliances, config, etc.)
|
||||
- `api/`: Django-based REST API backend components
|
||||
- `ui/`: Next.js frontend application
|
||||
- `mcp_server/`: Model Context Protocol server that gives access to the entire Prowler ecosystem for LLMs
|
||||
- `dashboard/`: Prowler CLI feature that allows to visualize the results of the scans in a simple dashboard
|
||||
- `docs/`: Documentation
|
||||
- `examples/`: Example output formats for providers and scripts
|
||||
- `permissions/`: Permission-related files and policies
|
||||
- `contrib/`: Community-contributed scripts or modules
|
||||
- `tests/`: Prowler SDK test suite
|
||||
- `docker-compose.yml`: Docker compose file to run the Prowler App (API + UI) production environment
|
||||
- `docker-compose-dev.yml`: Docker compose file to run the Prowler App (API + UI) development environment
|
||||
- `pyproject.toml`: Poetry Prowler SDK project file
|
||||
- `.pre-commit-config.yaml`: Pre-commit hooks configuration
|
||||
- `Makefile`: Makefile to run the project
|
||||
- `LICENSE`: License file
|
||||
- `README.md`: README file
|
||||
- `CONTRIBUTING.md`: Contributing guide
|
||||
|
||||
## Python Development
|
||||
|
||||
Most of the code is written in Python, so the main files in the root are focused on Python code.
|
||||
|
||||
### Poetry Dev Environment
|
||||
|
||||
For developing in Python we recommend using `poetry` to manage the dependencies. The minimal version is `2.1.1`. So it is recommended to run all commands using `poetry run ...`.
|
||||
|
||||
To install the core dependencies to develop it is needed to run `poetry install --with dev`.
|
||||
|
||||
### Pre-commit hooks
|
||||
|
||||
The project has pre-commit hooks to lint and format the code. They are installed by running `poetry run pre-commit install`.
|
||||
|
||||
When commiting a change, the hooks will be run automatically. Some of them are:
|
||||
|
||||
- Code formatting (black, isort)
|
||||
- Linting (flake8, pylint)
|
||||
- Security checks (bandit, safety, trufflehog)
|
||||
- YAML/JSON validation
|
||||
- Poetry lock file validation
|
||||
|
||||
|
||||
### Linting and Formatting
|
||||
|
||||
We use the following tools to lint and format the code:
|
||||
|
||||
- `flake8`: for linting the code
|
||||
- `black`: for formatting the code
|
||||
- `pylint`: for linting the code
|
||||
|
||||
You can run all using the `make` command:
|
||||
```bash
|
||||
poetry run make lint
|
||||
poetry run make format
|
||||
```
|
||||
|
||||
Or they will be run automatically when you commit your changes using pre-commit hooks.
|
||||
|
||||
## Commit & Pull Request Guidelines
|
||||
|
||||
For the commit messages and pull requests name follow the conventional-commit style.
|
||||
|
||||
Befire creating a pull request, complete the checklist in `.github/pull_request_template.md`. Summaries should explain deployment impact, highlight review steps, and note changelog or permission updates. Run all relevant tests and linters before requesting review and link screenshots for UI or dashboard changes.
|
||||
|
||||
### Conventional Commit Style
|
||||
|
||||
The Conventional Commits specification is a lightweight convention on top of commit messages. It provides an easy set of rules for creating an explicit commit history; which makes it easier to write automated tools on top of.
|
||||
|
||||
The commit message should be structured as follows:
|
||||
|
||||
```
|
||||
<type>[optional scope]: <description>
|
||||
<BLANK LINE>
|
||||
[optional body]
|
||||
<BLANK LINE>
|
||||
[optional footer(s)]
|
||||
```
|
||||
|
||||
Any line of the commit message cannot be longer 100 characters! This allows the message to be easier to read on GitHub as well as in various git tools
|
||||
|
||||
#### Commit Types
|
||||
|
||||
- **feat**: code change introuce new functionality to the application
|
||||
- **fix**: code change that solve a bug in the codebase
|
||||
- **docs**: documentation only changes
|
||||
- **chore**: changes related to the build process or auxiliary tools and libraries, that do not affect the application's functionality
|
||||
- **perf**: code change that improves performance
|
||||
- **refactor**: code change that neither fixes a bug nor adds a feature
|
||||
- **style**: changes that do not affect the meaning of the code (white-space, formatting, missing semi-colons, etc)
|
||||
- **test**: adding missing tests or correcting existing tests
|
||||
@@ -55,7 +55,7 @@ further defined and clarified by project maintainers.
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported by contacting the project team at [support.prowler.com](https://customer.support.prowler.com/servicedesk/customer/portals). All
|
||||
reported by contacting the project team at community@prowler.cloud. All
|
||||
complaints will be reviewed and investigated and will result in a response that
|
||||
is deemed necessary and appropriate to the circumstances. The project team is
|
||||
obligated to maintain confidentiality with regard to the reporter of an incident.
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
# Do you want to learn on how to...
|
||||
|
||||
- Contribute with your code or fixes to Prowler
|
||||
- Create a new check for a provider
|
||||
- Create a new security compliance framework
|
||||
- Add a custom output format
|
||||
- Add a new integration
|
||||
- Contribute with documentation
|
||||
|
||||
Want some swag as appreciation for your contribution?
|
||||
|
||||
# Prowler Developer Guide
|
||||
https://docs.prowler.com/projects/prowler-open-source/en/latest/developer-guide/introduction/
|
||||
97
Dockerfile
97
Dockerfile
@@ -1,61 +1,64 @@
|
||||
FROM python:3.12.11-slim-bookworm AS build
|
||||
# Build command
|
||||
# docker build --platform=linux/amd64 --no-cache -t prowler:latest -f util/Dockerfile .
|
||||
|
||||
# hadolint ignore=DL3007
|
||||
FROM public.ecr.aws/amazonlinux/amazonlinux:latest
|
||||
|
||||
LABEL maintainer="https://github.com/prowler-cloud/prowler"
|
||||
LABEL org.opencontainers.image.source="https://github.com/prowler-cloud/prowler"
|
||||
|
||||
ARG POWERSHELL_VERSION=7.5.0
|
||||
ARG USERNAME=prowler
|
||||
ARG USERID=34000
|
||||
|
||||
# hadolint ignore=DL3008
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
wget libicu72 libunwind8 libssl3 libcurl4 ca-certificates apt-transport-https gnupg \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
# Prepare image as root
|
||||
USER 0
|
||||
# System dependencies
|
||||
# hadolint ignore=DL3006,DL3013,DL3033
|
||||
RUN yum upgrade -y && \
|
||||
yum install -y python3 bash curl jq coreutils py3-pip which unzip shadow-utils && \
|
||||
yum clean all && \
|
||||
rm -rf /var/cache/yum
|
||||
|
||||
# Install PowerShell
|
||||
RUN ARCH=$(uname -m) && \
|
||||
if [ "$ARCH" = "x86_64" ]; then \
|
||||
wget --progress=dot:giga https://github.com/PowerShell/PowerShell/releases/download/v${POWERSHELL_VERSION}/powershell-${POWERSHELL_VERSION}-linux-x64.tar.gz -O /tmp/powershell.tar.gz ; \
|
||||
elif [ "$ARCH" = "aarch64" ]; then \
|
||||
wget --progress=dot:giga https://github.com/PowerShell/PowerShell/releases/download/v${POWERSHELL_VERSION}/powershell-${POWERSHELL_VERSION}-linux-arm64.tar.gz -O /tmp/powershell.tar.gz ; \
|
||||
else \
|
||||
echo "Unsupported architecture: $ARCH" && exit 1 ; \
|
||||
fi && \
|
||||
mkdir -p /opt/microsoft/powershell/7 && \
|
||||
tar zxf /tmp/powershell.tar.gz -C /opt/microsoft/powershell/7 && \
|
||||
chmod +x /opt/microsoft/powershell/7/pwsh && \
|
||||
ln -s /opt/microsoft/powershell/7/pwsh /usr/bin/pwsh && \
|
||||
rm /tmp/powershell.tar.gz
|
||||
RUN amazon-linux-extras install -y epel postgresql14 && \
|
||||
yum clean all && \
|
||||
rm -rf /var/cache/yum
|
||||
|
||||
# Add prowler user
|
||||
RUN addgroup --gid 1000 prowler && \
|
||||
adduser --uid 1000 --gid 1000 --disabled-password --gecos "" prowler
|
||||
# Create non-root user
|
||||
RUN useradd -l -s /bin/bash -U -u ${USERID} ${USERNAME}
|
||||
|
||||
USER prowler
|
||||
USER ${USERNAME}
|
||||
|
||||
WORKDIR /home/prowler
|
||||
# Python dependencies
|
||||
# hadolint ignore=DL3006,DL3013,DL3042
|
||||
RUN pip3 install --upgrade pip && \
|
||||
pip3 install --no-cache-dir boto3 detect-secrets==1.0.3 && \
|
||||
pip3 cache purge
|
||||
# Set Python PATH
|
||||
ENV PATH="/home/${USERNAME}/.local/bin:${PATH}"
|
||||
|
||||
# Copy necessary files
|
||||
COPY prowler/ /home/prowler/prowler/
|
||||
COPY dashboard/ /home/prowler/dashboard/
|
||||
COPY pyproject.toml /home/prowler
|
||||
COPY README.md /home/prowler/
|
||||
COPY prowler/providers/m365/lib/powershell/m365_powershell.py /home/prowler/prowler/providers/m365/lib/powershell/m365_powershell.py
|
||||
USER 0
|
||||
|
||||
# Install Python dependencies
|
||||
ENV HOME='/home/prowler'
|
||||
ENV PATH="${HOME}/.local/bin:${PATH}"
|
||||
#hadolint ignore=DL3013
|
||||
RUN pip install --no-cache-dir --upgrade pip && \
|
||||
pip install --no-cache-dir poetry
|
||||
# Install AWS CLI
|
||||
RUN curl https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip -o awscliv2.zip && \
|
||||
unzip -q awscliv2.zip && \
|
||||
aws/install && \
|
||||
rm -rf aws awscliv2.zip
|
||||
|
||||
RUN poetry install --compile && \
|
||||
rm -rf ~/.cache/pip
|
||||
# Keep Python2 for yum
|
||||
RUN sed -i '1 s/python/python2.7/' /usr/bin/yum
|
||||
|
||||
# Install PowerShell modules
|
||||
RUN poetry run python prowler/providers/m365/lib/powershell/m365_powershell.py
|
||||
# Set Python3
|
||||
RUN rm /usr/bin/python && \
|
||||
ln -s /usr/bin/python3 /usr/bin/python
|
||||
|
||||
# Remove deprecated dash dependencies
|
||||
RUN pip uninstall dash-html-components -y && \
|
||||
pip uninstall dash-core-components -y
|
||||
# Set working directory
|
||||
WORKDIR /prowler
|
||||
|
||||
USER prowler
|
||||
ENTRYPOINT ["poetry", "run", "prowler"]
|
||||
# Copy all files
|
||||
COPY . ./
|
||||
|
||||
# Set files ownership
|
||||
RUN chown -R prowler .
|
||||
|
||||
USER ${USERNAME}
|
||||
|
||||
ENTRYPOINT ["./prowler"]
|
||||
|
||||
4
LICENSE
4
LICENSE
@@ -186,7 +186,7 @@
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright @ 2024 Toni de la Fuente
|
||||
Copyright 2018 Netflix, Inc.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
@@ -198,4 +198,4 @@
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
limitations under the License.
|
||||
5
LIST_OF_CHECKS_AND_GROUPS.md
Normal file
5
LIST_OF_CHECKS_AND_GROUPS.md
Normal file
@@ -0,0 +1,5 @@
|
||||
```
|
||||
./prowler -l # to see all available checks and their groups.
|
||||
./prowler -L # to see all available groups only.
|
||||
./prowler -l -g groupname # to see checks in a particular group
|
||||
```
|
||||
59
Makefile
59
Makefile
@@ -1,59 +0,0 @@
|
||||
.DEFAULT_GOAL:=help
|
||||
|
||||
##@ Testing
|
||||
test: ## Test with pytest
|
||||
rm -rf .coverage && \
|
||||
pytest -n auto -vvv -s --cov=./prowler --cov-report=xml tests
|
||||
|
||||
coverage: ## Show Test Coverage
|
||||
coverage run --skip-covered -m pytest -v && \
|
||||
coverage report -m && \
|
||||
rm -rf .coverage && \
|
||||
coverage report -m
|
||||
|
||||
coverage-html: ## Show Test Coverage
|
||||
rm -rf ./htmlcov && \
|
||||
coverage html && \
|
||||
open htmlcov/index.html
|
||||
|
||||
##@ Linting
|
||||
format: ## Format Code
|
||||
@echo "Running black..."
|
||||
black .
|
||||
|
||||
lint: ## Lint Code
|
||||
@echo "Running flake8..."
|
||||
flake8 . --ignore=E266,W503,E203,E501,W605,E128 --exclude contrib
|
||||
@echo "Running black... "
|
||||
black --check .
|
||||
@echo "Running pylint..."
|
||||
pylint --disable=W,C,R,E -j 0 prowler util
|
||||
|
||||
##@ PyPI
|
||||
pypi-clean: ## Delete the distribution files
|
||||
rm -rf ./dist && rm -rf ./build && rm -rf prowler.egg-info
|
||||
|
||||
pypi-build: ## Build package
|
||||
$(MAKE) pypi-clean && \
|
||||
poetry build
|
||||
|
||||
pypi-upload: ## Upload package
|
||||
python3 -m twine upload --repository pypi dist/*
|
||||
|
||||
|
||||
##@ Help
|
||||
help: ## Show this help.
|
||||
@echo "Prowler Makefile"
|
||||
@awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m<target>\033[0m\n"} /^[a-zA-Z_-]+:.*?##/ { printf " \033[36m%-15s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST)
|
||||
|
||||
##@ Build no cache
|
||||
build-no-cache-dev:
|
||||
docker compose -f docker-compose-dev.yml build --no-cache api-dev worker-dev worker-beat
|
||||
|
||||
##@ Development Environment
|
||||
run-api-dev: ## Start development environment with API, PostgreSQL, Valkey, and workers
|
||||
docker compose -f docker-compose-dev.yml up api-dev postgres valkey worker-dev worker-beat
|
||||
|
||||
##@ Development Environment
|
||||
build-and-run-api-dev: build-no-cache-dev run-api-dev
|
||||
|
||||
13
Pipfile
Normal file
13
Pipfile
Normal file
@@ -0,0 +1,13 @@
|
||||
[[source]]
|
||||
name = "pypi"
|
||||
url = "https://pypi.org/simple"
|
||||
verify_ssl = true
|
||||
|
||||
[dev-packages]
|
||||
|
||||
[packages]
|
||||
boto3 = ">=1.9.188"
|
||||
detect-secrets = "==1.0.3"
|
||||
|
||||
[requires]
|
||||
python_version = "3.7"
|
||||
65
SECURITY.md
65
SECURITY.md
@@ -1,65 +0,0 @@
|
||||
# Security
|
||||
|
||||
## Reporting Vulnerabilities
|
||||
|
||||
At Prowler, we consider the security of our open source software and systems a top priority. But no matter how much effort we put into system security, there can still be vulnerabilities present.
|
||||
|
||||
If you discover a vulnerability, we would like to know about it so we can take steps to address it as quickly as possible. We would like to ask you to help us better protect our users, our clients and our systems.
|
||||
|
||||
When reporting vulnerabilities, please consider (1) attack scenario / exploitability, and (2) the security impact of the bug. The following issues are considered out of scope:
|
||||
|
||||
- Social engineering support or attacks requiring social engineering.
|
||||
- Clickjacking on pages with no sensitive actions.
|
||||
- Cross-Site Request Forgery (CSRF) on unauthenticated forms or forms with no sensitive actions.
|
||||
- Attacks requiring Man-In-The-Middle (MITM) or physical access to a user's device.
|
||||
- Previously known vulnerable libraries without a working Proof of Concept (PoC).
|
||||
- Comma Separated Values (CSV) injection without demonstrating a vulnerability.
|
||||
- Missing best practices in SSL/TLS configuration.
|
||||
- Any activity that could lead to the disruption of service (DoS).
|
||||
- Rate limiting or brute force issues on non-authentication endpoints.
|
||||
- Missing best practices in Content Security Policy (CSP).
|
||||
- Missing HttpOnly or Secure flags on cookies.
|
||||
- Configuration of or missing security headers.
|
||||
- Missing email best practices, such as invalid, incomplete, or missing SPF/DKIM/DMARC records.
|
||||
- Vulnerabilities only affecting users of outdated or unpatched browsers (less than two stable versions behind).
|
||||
- Software version disclosure, banner identification issues, or descriptive error messages.
|
||||
- Tabnabbing.
|
||||
- Issues that require unlikely user interaction.
|
||||
- Improper logout functionality and improper session timeout.
|
||||
- CORS misconfiguration without an exploitation scenario.
|
||||
- Broken link hijacking.
|
||||
- Automated scanning results (e.g., sqlmap, Burp active scanner) that have not been manually verified.
|
||||
- Content spoofing and text injection issues without a clear attack vector.
|
||||
- Email spoofing without exploiting security flaws.
|
||||
- Dead links or broken links.
|
||||
- User enumeration.
|
||||
|
||||
Testing guidelines:
|
||||
- Do not run automated scanners on other customer projects. Running automated scanners can run up costs for our users. Aggressively configured scanners might inadvertently disrupt services, exploit vulnerabilities, lead to system instability or breaches and violate Terms of Service from our upstream providers. Our own security systems won't be able to distinguish hostile reconnaissance from whitehat research. If you wish to run an automated scanner, notify us at support@prowler.com and only run it on your own Prowler app project. Do NOT attack Prowler in usage of other customers.
|
||||
- Do not take advantage of the vulnerability or problem you have discovered, for example by downloading more data than necessary to demonstrate the vulnerability or deleting or modifying other people's data.
|
||||
|
||||
Reporting guidelines:
|
||||
- File a report through our Support Desk at https://support.prowler.com
|
||||
- If it is about a lack of a security functionality, please file a feature request instead at https://github.com/prowler-cloud/prowler/issues
|
||||
- Do provide sufficient information to reproduce the problem, so we will be able to resolve it as quickly as possible.
|
||||
- If you have further questions and want direct interaction with the Prowler team, please contact us at via our Community Slack at goto.prowler.com/slack.
|
||||
|
||||
Disclosure guidelines:
|
||||
- In order to protect our users and customers, do not reveal the problem to others until we have researched, addressed and informed our affected customers.
|
||||
- If you want to publicly share your research about Prowler at a conference, in a blog or any other public forum, you should share a draft with us for review and approval at least 30 days prior to the publication date. Please note that the following should not be included:
|
||||
- Data regarding any Prowler user or customer projects.
|
||||
- Prowler customers' data.
|
||||
- Information about Prowler employees, contractors or partners.
|
||||
|
||||
What we promise:
|
||||
- We will respond to your report within 5 business days with our evaluation of the report and an expected resolution date.
|
||||
- If you have followed the instructions above, we will not take any legal action against you in regard to the report.
|
||||
- We will handle your report with strict confidentiality, and not pass on your personal details to third parties without your permission.
|
||||
- We will keep you informed of the progress towards resolving the problem.
|
||||
- In the public information concerning the problem reported, we will give your name as the discoverer of the problem (unless you desire otherwise).
|
||||
|
||||
We strive to resolve all problems as quickly as possible, and we would like to play an active role in the ultimate publication on the problem after it is resolved.
|
||||
|
||||
---
|
||||
|
||||
For more information about our security policies, please refer to our [Security](https://docs.prowler.com/projects/prowler-open-source/en/latest/security/) section in our documentation.
|
||||
29
allowlist_example.txt
Normal file
29
allowlist_example.txt
Normal file
@@ -0,0 +1,29 @@
|
||||
# Each line is a (checkid:item) tuple
|
||||
|
||||
# Example: Will not consider a myignoredbucket failures as full failure. (Still printed as a warning)
|
||||
check26:myignoredbucket
|
||||
|
||||
# Note that by default, this searches for the string appearing *anywhere* in the resource name.
|
||||
# For example:
|
||||
# extra718:ci-logs # Will block bucket "ci-logs" AND ALSO bucket "ci-logs-replica"
|
||||
# extra718:logs # Will block EVERY BUCKET containing the string "logs"
|
||||
|
||||
# line starting with # are ignored as comments
|
||||
# add a line per resource as here:
|
||||
#<checkid1>:<resource to ignore 1>
|
||||
#<checkid1>:<resource to ignore 2>
|
||||
# checkid2
|
||||
#<checkid2>:<resource to ignore 1>
|
||||
|
||||
# REGEXES
|
||||
# This allowlist works with regexes (ERE, the same style of regex as grep -E and bash's =~ use)
|
||||
# therefore:
|
||||
# extra718:[[:alnum:]]+-logs # will ignore all buckets containing the terms ci-logs, qa-logs, etc.
|
||||
|
||||
# EXAMPLE: CONTROL TOWER
|
||||
# When using Control Tower, guardrails prevent access to certain protected resources. The allowlist
|
||||
# below ensures that warnings instead of errors are reported for the affected resources.
|
||||
#extra734:aws-controltower-logs-[[:digit:]]+-[[:alpha:]\-]+
|
||||
#extra734:aws-controltower-s3-access-logs-[[:digit:]]+-[[:alpha:]\-]+
|
||||
#extra764:aws-controltower-logs-[[:digit:]]+-[[:alpha:]\-]+
|
||||
#extra764:aws-controltower-s3-access-logs-[[:digit:]]+-[[:alpha:]\-]+
|
||||
@@ -1,60 +0,0 @@
|
||||
# Django settings
|
||||
DJANGO_ALLOWED_HOSTS=localhost,127.0.0.1
|
||||
DJANGO_BIND_ADDRESS=0.0.0.0
|
||||
DJANGO_PORT=8000
|
||||
DJANGO_DEBUG=False
|
||||
# Select one of [production|devel]
|
||||
DJANGO_SETTINGS_MODULE=config.django.[production|devel]
|
||||
# Select one of [ndjson|human_readable]
|
||||
DJANGO_LOGGING_FORMATTER=[ndjson|human_readable]
|
||||
# Select one of [DEBUG|INFO|WARNING|ERROR|CRITICAL]
|
||||
# Applies to both Django and Celery Workers
|
||||
DJANGO_LOGGING_LEVEL=INFO
|
||||
DJANGO_WORKERS=4 # Defaults to the maximum available based on CPU cores if not set.
|
||||
DJANGO_TOKEN_SIGNING_KEY=""
|
||||
DJANGO_TOKEN_VERIFYING_KEY=""
|
||||
# Token lifetime is in minutes
|
||||
DJANGO_ACCESS_TOKEN_LIFETIME=30
|
||||
DJANGO_REFRESH_TOKEN_LIFETIME=1440
|
||||
DJANGO_CACHE_MAX_AGE=3600
|
||||
DJANGO_STALE_WHILE_REVALIDATE=60
|
||||
DJANGO_SECRETS_ENCRYPTION_KEY=""
|
||||
# Throttle, two options: Empty means no throttle; or if desired use one in DRF format: https://www.django-rest-framework.org/api-guide/throttling/#setting-the-throttling-policy
|
||||
DJANGO_THROTTLE_TOKEN_OBTAIN=50/minute
|
||||
# Decide whether to allow Django manage database table partitions
|
||||
DJANGO_MANAGE_DB_PARTITIONS=[True|False]
|
||||
DJANGO_CELERY_DEADLOCK_ATTEMPTS=5
|
||||
DJANGO_BROKER_VISIBILITY_TIMEOUT=86400
|
||||
DJANGO_SENTRY_DSN=
|
||||
|
||||
# PostgreSQL settings
|
||||
# If running django and celery on host, use 'localhost', else use 'postgres-db'
|
||||
POSTGRES_HOST=[localhost|postgres-db]
|
||||
POSTGRES_PORT=5432
|
||||
POSTGRES_ADMIN_USER=prowler
|
||||
POSTGRES_ADMIN_PASSWORD=S3cret
|
||||
POSTGRES_USER=prowler_user
|
||||
POSTGRES_PASSWORD=S3cret
|
||||
POSTGRES_DB=prowler_db
|
||||
|
||||
# Valkey settings
|
||||
# If running django and celery on host, use localhost, else use 'valkey'
|
||||
VALKEY_HOST=[localhost|valkey]
|
||||
VALKEY_PORT=6379
|
||||
VALKEY_DB=0
|
||||
|
||||
# Sentry settings
|
||||
SENTRY_ENVIRONMENT=local
|
||||
SENTRY_RELEASE=local
|
||||
|
||||
# Social login credentials
|
||||
DJANGO_GOOGLE_OAUTH_CLIENT_ID=""
|
||||
DJANGO_GOOGLE_OAUTH_CLIENT_SECRET=""
|
||||
DJANGO_GOOGLE_OAUTH_CALLBACK_URL=""
|
||||
|
||||
DJANGO_GITHUB_OAUTH_CLIENT_ID=""
|
||||
DJANGO_GITHUB_OAUTH_CLIENT_SECRET=""
|
||||
DJANGO_GITHUB_OAUTH_CALLBACK_URL=""
|
||||
|
||||
# Deletion Task Batch Size
|
||||
DJANGO_DELETION_BATCH_SIZE=5000
|
||||
280
api/CHANGELOG.md
280
api/CHANGELOG.md
@@ -1,280 +0,0 @@
|
||||
# Prowler API Changelog
|
||||
|
||||
All notable changes to the **Prowler API** are documented in this file.
|
||||
|
||||
## [1.14.0] (Prowler 5.13.0)
|
||||
|
||||
### Added
|
||||
- Default JWT keys are generated and stored if they are missing from configuration [(#8655)](https://github.com/prowler-cloud/prowler/pull/8655)
|
||||
- `compliance_name` for each compliance [(#7920)](https://github.com/prowler-cloud/prowler/pull/7920)
|
||||
- Support C5 compliance framework for the AWS provider [(#8830)](https://github.com/prowler-cloud/prowler/pull/8830)
|
||||
- Support for M365 Certificate authentication [(#8538)](https://github.com/prowler-cloud/prowler/pull/8538)
|
||||
- API Key support [(#8805)](https://github.com/prowler-cloud/prowler/pull/8805)
|
||||
- SAML role mapping protection for single-admin tenants to prevent accidental lockout [(#8882)](https://github.com/prowler-cloud/prowler/pull/8882)
|
||||
- Support for `passed_findings` and `total_findings` fields in compliance requirement overview for accurate Prowler ThreatScore calculation [(#8582)](https://github.com/prowler-cloud/prowler/pull/8582)
|
||||
- PDF reporting for Prowler ThreatScore [(#8867)](https://github.com/prowler-cloud/prowler/pull/8867)
|
||||
- Database read replica support [(#8869)](https://github.com/prowler-cloud/prowler/pull/8869)
|
||||
- Support Common Cloud Controls for AWS, Azure and GCP [(#8000)](https://github.com/prowler-cloud/prowler/pull/8000)
|
||||
- Add `provider_id__in` filter support to findings and findings severity overview endpoints [(#8951)](https://github.com/prowler-cloud/prowler/pull/8951)
|
||||
|
||||
### Changed
|
||||
- Now the MANAGE_ACCOUNT permission is required to modify or read user permissions instead of MANAGE_USERS [(#8281)](https://github.com/prowler-cloud/prowler/pull/8281)
|
||||
- Now at least one user with MANAGE_ACCOUNT permission is required in the tenant [(#8729)](https://github.com/prowler-cloud/prowler/pull/8729)
|
||||
|
||||
### Security
|
||||
- Django updated to the latest 5.1 security release, 5.1.13, due to problems with potential [SQL injection](https://github.com/prowler-cloud/prowler/security/dependabot/104) and [directory traversals](https://github.com/prowler-cloud/prowler/security/dependabot/103) [(#8842)](https://github.com/prowler-cloud/prowler/pull/8842)
|
||||
|
||||
---
|
||||
|
||||
## [1.13.2] (Prowler 5.12.3)
|
||||
|
||||
### Fixed
|
||||
- 500 error when deleting user [(#8731)](https://github.com/prowler-cloud/prowler/pull/8731)
|
||||
|
||||
---
|
||||
|
||||
## [1.13.1] (Prowler 5.12.2)
|
||||
|
||||
### Changed
|
||||
- Renamed compliance overview task queue to `compliance` [(#8755)](https://github.com/prowler-cloud/prowler/pull/8755)
|
||||
|
||||
### Security
|
||||
- Django updated to the latest 5.1 security release, 5.1.12, due to [problems](https://www.djangoproject.com/weblog/2025/sep/03/security-releases/) with potential SQL injection in FilteredRelation column aliases [(#8693)](https://github.com/prowler-cloud/prowler/pull/8693)
|
||||
|
||||
---
|
||||
|
||||
## [1.13.0] (Prowler 5.12.0)
|
||||
|
||||
### Added
|
||||
- Integration with JIRA, enabling sending findings to a JIRA project [(#8622)](https://github.com/prowler-cloud/prowler/pull/8622), [(#8637)](https://github.com/prowler-cloud/prowler/pull/8637)
|
||||
- `GET /overviews/findings_severity` now supports `filter[status]` and `filter[status__in]` to aggregate by specific statuses (`FAIL`, `PASS`)[(#8186)](https://github.com/prowler-cloud/prowler/pull/8186)
|
||||
- Throttling options for `/api/v1/tokens` using the `DJANGO_THROTTLE_TOKEN_OBTAIN` environment variable [(#8647)](https://github.com/prowler-cloud/prowler/pull/8647)
|
||||
|
||||
---
|
||||
|
||||
## [1.12.0] (Prowler 5.11.0)
|
||||
|
||||
### Added
|
||||
- Lighthouse support for OpenAI GPT-5 [(#8527)](https://github.com/prowler-cloud/prowler/pull/8527)
|
||||
- Integration with Amazon Security Hub, enabling sending findings to Security Hub [(#8365)](https://github.com/prowler-cloud/prowler/pull/8365)
|
||||
- Generate ASFF output for AWS providers with SecurityHub integration enabled [(#8569)](https://github.com/prowler-cloud/prowler/pull/8569)
|
||||
|
||||
### Fixed
|
||||
- GitHub provider always scans user instead of organization when using provider UID [(#8587)](https://github.com/prowler-cloud/prowler/pull/8587)
|
||||
|
||||
---
|
||||
|
||||
## [1.11.0] (Prowler 5.10.0)
|
||||
|
||||
### Added
|
||||
- Github provider support [(#8271)](https://github.com/prowler-cloud/prowler/pull/8271)
|
||||
- Integration with Amazon S3, enabling storage and retrieval of scan data via S3 buckets [(#8056)](https://github.com/prowler-cloud/prowler/pull/8056)
|
||||
|
||||
### Fixed
|
||||
- Avoid sending errors to Sentry in M365 provider when user authentication fails [(#8420)](https://github.com/prowler-cloud/prowler/pull/8420)
|
||||
|
||||
---
|
||||
|
||||
## [1.10.2] (Prowler v5.9.2)
|
||||
|
||||
### Changed
|
||||
- Optimized queries for resources views [(#8336)](https://github.com/prowler-cloud/prowler/pull/8336)
|
||||
|
||||
---
|
||||
|
||||
## [v1.10.1] (Prowler v5.9.1)
|
||||
|
||||
### Fixed
|
||||
- Calculate failed findings during scans to prevent heavy database queries [(#8322)](https://github.com/prowler-cloud/prowler/pull/8322)
|
||||
|
||||
---
|
||||
|
||||
## [v1.10.0] (Prowler v5.9.0)
|
||||
|
||||
### Added
|
||||
- SSO with SAML support [(#8175)](https://github.com/prowler-cloud/prowler/pull/8175)
|
||||
- `GET /resources/metadata`, `GET /resources/metadata/latest` and `GET /resources/latest` to expose resource metadata and latest scan results [(#8112)](https://github.com/prowler-cloud/prowler/pull/8112)
|
||||
|
||||
### Changed
|
||||
- `/processors` endpoints to post-process findings. Currently, only the Mutelist processor is supported to allow to mute findings.
|
||||
- Optimized the underlying queries for resources endpoints [(#8112)](https://github.com/prowler-cloud/prowler/pull/8112)
|
||||
- Optimized include parameters for resources view [(#8229)](https://github.com/prowler-cloud/prowler/pull/8229)
|
||||
- Optimized overview background tasks [(#8300)](https://github.com/prowler-cloud/prowler/pull/8300)
|
||||
|
||||
### Fixed
|
||||
- Search filter for findings and resources [(#8112)](https://github.com/prowler-cloud/prowler/pull/8112)
|
||||
- RBAC is now applied to `GET /overviews/providers` [(#8277)](https://github.com/prowler-cloud/prowler/pull/8277)
|
||||
|
||||
### Changed
|
||||
- `POST /schedules/daily` returns a `409 CONFLICT` if already created [(#8258)](https://github.com/prowler-cloud/prowler/pull/8258)
|
||||
|
||||
### Security
|
||||
- Enhanced password validation to enforce 12+ character passwords with special characters, uppercase, lowercase, and numbers [(#8225)](https://github.com/prowler-cloud/prowler/pull/8225)
|
||||
|
||||
---
|
||||
|
||||
## [v1.9.1] (Prowler v5.8.1)
|
||||
|
||||
### Added
|
||||
- Custom exception for provider connection errors during scans [(#8234)](https://github.com/prowler-cloud/prowler/pull/8234)
|
||||
|
||||
### Changed
|
||||
- Summary and overview tasks now use a dedicated queue and no longer propagate errors to compliance tasks [(#8214)](https://github.com/prowler-cloud/prowler/pull/8214)
|
||||
|
||||
### Fixed
|
||||
- Scan with no resources will not trigger legacy code for findings metadata [(#8183)](https://github.com/prowler-cloud/prowler/pull/8183)
|
||||
- Invitation email comparison case-insensitive [(#8206)](https://github.com/prowler-cloud/prowler/pull/8206)
|
||||
|
||||
### Removed
|
||||
- Validation of the provider's secret type during updates [(#8197)](https://github.com/prowler-cloud/prowler/pull/8197)
|
||||
|
||||
---
|
||||
|
||||
## [v1.9.0] (Prowler v5.8.0)
|
||||
|
||||
### Added
|
||||
- Support GCP Service Account key [(#7824)](https://github.com/prowler-cloud/prowler/pull/7824)
|
||||
- `GET /compliance-overviews` endpoints to retrieve compliance metadata and specific requirements statuses [(#7877)](https://github.com/prowler-cloud/prowler/pull/7877)
|
||||
- Lighthouse configuration support [(#7848)](https://github.com/prowler-cloud/prowler/pull/7848)
|
||||
|
||||
### Changed
|
||||
- Reworked `GET /compliance-overviews` to return proper requirement metrics [(#7877)](https://github.com/prowler-cloud/prowler/pull/7877)
|
||||
- Optional `user` and `password` for M365 provider [(#7992)](https://github.com/prowler-cloud/prowler/pull/7992)
|
||||
|
||||
### Fixed
|
||||
- Scheduled scans are no longer deleted when their daily schedule run is disabled [(#8082)](https://github.com/prowler-cloud/prowler/pull/8082)
|
||||
|
||||
---
|
||||
|
||||
## [v1.8.5] (Prowler v5.7.5)
|
||||
|
||||
### Fixed
|
||||
- Normalize provider UID to ensure safe and unique export directory paths [(#8007)](https://github.com/prowler-cloud/prowler/pull/8007).
|
||||
- Blank resource types in `/metadata` endpoints [(#8027)](https://github.com/prowler-cloud/prowler/pull/8027)
|
||||
|
||||
---
|
||||
|
||||
## [v1.8.4] (Prowler v5.7.4)
|
||||
|
||||
### Removed
|
||||
- Reverted RLS transaction handling and DB custom backend [(#7994)](https://github.com/prowler-cloud/prowler/pull/7994)
|
||||
|
||||
---
|
||||
|
||||
## [v1.8.3] (Prowler v5.7.3)
|
||||
|
||||
### Added
|
||||
- Database backend to handle already closed connections [(#7935)](https://github.com/prowler-cloud/prowler/pull/7935)
|
||||
|
||||
### Changed
|
||||
- Renamed field encrypted_password to password for M365 provider [(#7784)](https://github.com/prowler-cloud/prowler/pull/7784)
|
||||
|
||||
### Fixed
|
||||
- Transaction persistence with RLS operations [(#7916)](https://github.com/prowler-cloud/prowler/pull/7916)
|
||||
- Reverted the change `get_with_retry` to use the original `get` method for retrieving tasks [(#7932)](https://github.com/prowler-cloud/prowler/pull/7932)
|
||||
|
||||
---
|
||||
|
||||
## [v1.8.2] (Prowler v5.7.2)
|
||||
|
||||
### Fixed
|
||||
- Task lookup to use task_kwargs instead of task_args for scan report resolution [(#7830)](https://github.com/prowler-cloud/prowler/pull/7830)
|
||||
- Kubernetes UID validation to allow valid context names [(#7871)](https://github.com/prowler-cloud/prowler/pull/7871)
|
||||
- Connection status verification before launching a scan [(#7831)](https://github.com/prowler-cloud/prowler/pull/7831)
|
||||
- Race condition when creating background tasks [(#7876)](https://github.com/prowler-cloud/prowler/pull/7876)
|
||||
- Error when modifying or retrieving tenants due to missing user UUID in transaction context [(#7890)](https://github.com/prowler-cloud/prowler/pull/7890)
|
||||
|
||||
---
|
||||
|
||||
## [v1.8.1] (Prowler v5.7.1)
|
||||
|
||||
### Fixed
|
||||
- Added database index to improve performance on finding lookup [(#7800)](https://github.com/prowler-cloud/prowler/pull/7800)
|
||||
|
||||
---
|
||||
|
||||
## [v1.8.0] (Prowler v5.7.0)
|
||||
|
||||
### Added
|
||||
- Huge improvements to `/findings/metadata` and resource related filters for findings [(#7690)](https://github.com/prowler-cloud/prowler/pull/7690)
|
||||
- Improvements to `/overviews` endpoints [(#7690)](https://github.com/prowler-cloud/prowler/pull/7690)
|
||||
- Queue to perform backfill background tasks [(#7690)](https://github.com/prowler-cloud/prowler/pull/7690)
|
||||
- New endpoints to retrieve latest findings and metadata [(#7743)](https://github.com/prowler-cloud/prowler/pull/7743)
|
||||
- Export support for Prowler ThreatScore in M365 [(7783)](https://github.com/prowler-cloud/prowler/pull/7783)
|
||||
|
||||
---
|
||||
|
||||
## [v1.7.0] (Prowler v5.6.0)
|
||||
|
||||
### Added
|
||||
|
||||
- M365 as a new provider [(#7563)](https://github.com/prowler-cloud/prowler/pull/7563)
|
||||
- `compliance/` folder and ZIP‐export functionality for all compliance reports [(#7653)](https://github.com/prowler-cloud/prowler/pull/7653)
|
||||
- API endpoint to fetch and download any specific compliance file by name [(#7653)](https://github.com/prowler-cloud/prowler/pull/7653)
|
||||
|
||||
---
|
||||
|
||||
## [v1.6.0] (Prowler v5.5.0)
|
||||
|
||||
### Added
|
||||
|
||||
- Support for developing new integrations [(#7167)](https://github.com/prowler-cloud/prowler/pull/7167)
|
||||
- HTTP Security Headers [(#7289)](https://github.com/prowler-cloud/prowler/pull/7289)
|
||||
- New endpoint to get the compliance overviews metadata [(#7333)](https://github.com/prowler-cloud/prowler/pull/7333)
|
||||
- Support for muted findings [(#7378)](https://github.com/prowler-cloud/prowler/pull/7378)
|
||||
- Missing fields to API findings and resources [(#7318)](https://github.com/prowler-cloud/prowler/pull/7318)
|
||||
|
||||
---
|
||||
|
||||
## [v1.5.4] (Prowler v5.4.4)
|
||||
|
||||
### Fixed
|
||||
- Bug with periodic tasks when trying to delete a provider [(#7466)](https://github.com/prowler-cloud/prowler/pull/7466)
|
||||
|
||||
---
|
||||
|
||||
## [v1.5.3] (Prowler v5.4.3)
|
||||
|
||||
### Fixed
|
||||
- Duplicated scheduled scans handling [(#7401)](https://github.com/prowler-cloud/prowler/pull/7401)
|
||||
- Environment variable to configure the deletion task batch size [(#7423)](https://github.com/prowler-cloud/prowler/pull/7423)
|
||||
|
||||
---
|
||||
|
||||
## [v1.5.2] (Prowler v5.4.2)
|
||||
|
||||
### Changed
|
||||
- Refactored deletion logic and implemented retry mechanism for deletion tasks [(#7349)](https://github.com/prowler-cloud/prowler/pull/7349)
|
||||
|
||||
---
|
||||
|
||||
## [v1.5.1] (Prowler v5.4.1)
|
||||
|
||||
### Fixed
|
||||
- Handle response in case local files are missing [(#7183)](https://github.com/prowler-cloud/prowler/pull/7183)
|
||||
- Race condition when deleting export files after the S3 upload [(#7172)](https://github.com/prowler-cloud/prowler/pull/7172)
|
||||
- Handle exception when a provider has no secret in test connection [(#7283)](https://github.com/prowler-cloud/prowler/pull/7283)
|
||||
|
||||
---
|
||||
|
||||
## [v1.5.0] (Prowler v5.4.0)
|
||||
|
||||
### Added
|
||||
- Social login integration with Google and GitHub [(#6906)](https://github.com/prowler-cloud/prowler/pull/6906)
|
||||
- API scan report system, now all scans launched from the API will generate a compressed file with the report in OCSF, CSV and HTML formats [(#6878)](https://github.com/prowler-cloud/prowler/pull/6878)
|
||||
- Configurable Sentry integration [(#6874)](https://github.com/prowler-cloud/prowler/pull/6874)
|
||||
|
||||
### Changed
|
||||
- Optimized `GET /findings` endpoint to improve response time and size [(#7019)](https://github.com/prowler-cloud/prowler/pull/7019)
|
||||
|
||||
---
|
||||
|
||||
## [v1.4.0] (Prowler v5.3.0)
|
||||
|
||||
### Changed
|
||||
- Daily scheduled scan instances are now created beforehand with `SCHEDULED` state [(#6700)](https://github.com/prowler-cloud/prowler/pull/6700)
|
||||
- Findings endpoints now require at least one date filter [(#6800)](https://github.com/prowler-cloud/prowler/pull/6800)
|
||||
- Findings metadata endpoint received a performance improvement [(#6863)](https://github.com/prowler-cloud/prowler/pull/6863)
|
||||
- Increased the allowed length of the provider UID for Kubernetes providers [(#6869)](https://github.com/prowler-cloud/prowler/pull/6869)
|
||||
|
||||
---
|
||||
@@ -1,76 +0,0 @@
|
||||
FROM python:3.12.10-slim-bookworm AS build
|
||||
|
||||
LABEL maintainer="https://github.com/prowler-cloud/api"
|
||||
|
||||
ARG POWERSHELL_VERSION=7.5.0
|
||||
ENV POWERSHELL_VERSION=${POWERSHELL_VERSION}
|
||||
|
||||
# hadolint ignore=DL3008
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
wget \
|
||||
libicu72 \
|
||||
gcc \
|
||||
g++ \
|
||||
make \
|
||||
libxml2-dev \
|
||||
libxmlsec1-dev \
|
||||
libxmlsec1-openssl \
|
||||
pkg-config \
|
||||
libtool \
|
||||
libxslt1-dev \
|
||||
python3-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install PowerShell
|
||||
RUN ARCH=$(uname -m) && \
|
||||
if [ "$ARCH" = "x86_64" ]; then \
|
||||
wget --progress=dot:giga https://github.com/PowerShell/PowerShell/releases/download/v${POWERSHELL_VERSION}/powershell-${POWERSHELL_VERSION}-linux-x64.tar.gz -O /tmp/powershell.tar.gz ; \
|
||||
elif [ "$ARCH" = "aarch64" ]; then \
|
||||
wget --progress=dot:giga https://github.com/PowerShell/PowerShell/releases/download/v${POWERSHELL_VERSION}/powershell-${POWERSHELL_VERSION}-linux-arm64.tar.gz -O /tmp/powershell.tar.gz ; \
|
||||
else \
|
||||
echo "Unsupported architecture: $ARCH" && exit 1 ; \
|
||||
fi && \
|
||||
mkdir -p /opt/microsoft/powershell/7 && \
|
||||
tar zxf /tmp/powershell.tar.gz -C /opt/microsoft/powershell/7 && \
|
||||
chmod +x /opt/microsoft/powershell/7/pwsh && \
|
||||
ln -s /opt/microsoft/powershell/7/pwsh /usr/bin/pwsh && \
|
||||
rm /tmp/powershell.tar.gz
|
||||
|
||||
# Add prowler user
|
||||
RUN addgroup --gid 1000 prowler && \
|
||||
adduser --uid 1000 --gid 1000 --disabled-password --gecos "" prowler
|
||||
|
||||
USER prowler
|
||||
|
||||
WORKDIR /home/prowler
|
||||
|
||||
# Ensure output directory exists
|
||||
RUN mkdir -p /tmp/prowler_api_output
|
||||
|
||||
COPY pyproject.toml ./
|
||||
|
||||
RUN pip install --no-cache-dir --upgrade pip && \
|
||||
pip install --no-cache-dir poetry
|
||||
|
||||
ENV PATH="/home/prowler/.local/bin:$PATH"
|
||||
|
||||
# Add `--no-root` to avoid installing the current project as a package
|
||||
RUN poetry install --no-root && \
|
||||
rm -rf ~/.cache/pip
|
||||
|
||||
RUN poetry run python "$(poetry env info --path)/src/prowler/prowler/providers/m365/lib/powershell/m365_powershell.py"
|
||||
|
||||
COPY src/backend/ ./backend/
|
||||
COPY docker-entrypoint.sh ./docker-entrypoint.sh
|
||||
|
||||
WORKDIR /home/prowler/backend
|
||||
|
||||
# Development image
|
||||
FROM build AS dev
|
||||
|
||||
ENTRYPOINT ["../docker-entrypoint.sh", "dev"]
|
||||
|
||||
# Production image
|
||||
FROM build
|
||||
|
||||
ENTRYPOINT ["../docker-entrypoint.sh", "prod"]
|
||||
339
api/README.md
339
api/README.md
@@ -1,339 +0,0 @@
|
||||
# Description
|
||||
|
||||
This repository contains the JSON API and Task Runner components for Prowler, which facilitate a complete backend that interacts with the Prowler SDK and is used by the Prowler UI.
|
||||
|
||||
# Components
|
||||
The Prowler API is composed of the following components:
|
||||
|
||||
- The JSON API, which is an API built with Django Rest Framework.
|
||||
- The Celery worker, which is responsible for executing the background tasks that are defined in the JSON API.
|
||||
- The PostgreSQL database, which is used to store the data.
|
||||
- The Valkey database, which is an in-memory database which is used as a message broker for the Celery workers.
|
||||
|
||||
## Note about Valkey
|
||||
|
||||
[Valkey](https://valkey.io/) is an open source (BSD) high performance key/value datastore.
|
||||
|
||||
Valkey exposes a Redis 7.2 compliant API. Any service that exposes the Redis API can be used with Prowler API.
|
||||
|
||||
# Modify environment variables
|
||||
|
||||
Under the root path of the project, you can find a file called `.env`. This file shows all the environment variables that the project uses. You should review it and set the values for the variables you want to change.
|
||||
|
||||
If you don’t set `DJANGO_TOKEN_SIGNING_KEY` or `DJANGO_TOKEN_VERIFYING_KEY`, the API will generate them at `~/.config/prowler-api/` with `0600` and `0644` permissions; back up these files to persist identity across redeploys.
|
||||
|
||||
**Important note**: Every Prowler version (or repository branches and tags) could have different variables set in its `.env` file. Please use the `.env` file that corresponds with each version.
|
||||
|
||||
## Local deployment
|
||||
Keep in mind if you export the `.env` file to use it with local deployment that you will have to do it within the context of the Poetry interpreter, not before. Otherwise, variables will not be loaded properly.
|
||||
|
||||
To do this, you can run:
|
||||
|
||||
```console
|
||||
poetry shell
|
||||
set -a
|
||||
source .env
|
||||
```
|
||||
|
||||
# 🚀 Production deployment
|
||||
## Docker deployment
|
||||
|
||||
This method requires `docker` and `docker compose`.
|
||||
|
||||
### Clone the repository
|
||||
|
||||
```console
|
||||
# HTTPS
|
||||
git clone https://github.com/prowler-cloud/api.git
|
||||
|
||||
# SSH
|
||||
git clone git@github.com:prowler-cloud/api.git
|
||||
|
||||
```
|
||||
|
||||
### Build the base image
|
||||
|
||||
```console
|
||||
docker compose --profile prod build
|
||||
```
|
||||
|
||||
### Run the production service
|
||||
|
||||
This command will start the Django production server and the Celery worker and also the Valkey and PostgreSQL databases.
|
||||
|
||||
```console
|
||||
docker compose --profile prod up -d
|
||||
```
|
||||
|
||||
You can access the server in `http://localhost:8080`.
|
||||
|
||||
> **NOTE:** notice how the port is different. When developing using docker, the port will be `8080` to prevent conflicts.
|
||||
|
||||
### View the Production Server Logs
|
||||
|
||||
To view the logs for any component (e.g., Django, Celery worker), you can use the following command with a wildcard. This command will follow logs for any container that matches the specified pattern:
|
||||
|
||||
```console
|
||||
docker logs -f $(docker ps --format "{{.Names}}" | grep 'api-')
|
||||
|
||||
## Local deployment
|
||||
|
||||
To use this method, you'll need to set up a Python virtual environment (version ">=3.11,<3.13") and keep dependencies updated. Additionally, ensure that `poetry` and `docker compose` are installed.
|
||||
|
||||
### Clone the repository
|
||||
|
||||
```console
|
||||
# HTTPS
|
||||
git clone https://github.com/prowler-cloud/api.git
|
||||
|
||||
# SSH
|
||||
git clone git@github.com:prowler-cloud/api.git
|
||||
|
||||
```
|
||||
### Install all dependencies with Poetry
|
||||
|
||||
```console
|
||||
poetry install
|
||||
poetry shell
|
||||
```
|
||||
|
||||
## Start the PostgreSQL Database and Valkey
|
||||
|
||||
The PostgreSQL database (version 16.3) and Valkey (version 7) are required for the development environment. To make development easier, we have provided a `docker-compose` file that will start these components for you.
|
||||
|
||||
**Note:** Make sure to use the specified versions, as there are features in our setup that may not be compatible with older versions of PostgreSQL and Valkey.
|
||||
|
||||
|
||||
```console
|
||||
docker compose up postgres valkey -d
|
||||
```
|
||||
|
||||
## Deploy Django and the Celery worker
|
||||
|
||||
### Run migrations
|
||||
|
||||
For migrations, you need to force the `admin` database router. Assuming you have the correct environment variables and Python virtual environment, run:
|
||||
|
||||
```console
|
||||
cd src/backend
|
||||
python manage.py migrate --database admin
|
||||
```
|
||||
|
||||
### Run the Celery worker
|
||||
|
||||
```console
|
||||
cd src/backend
|
||||
python -m celery -A config.celery worker -l info -E
|
||||
```
|
||||
|
||||
### Run the Django server with Gunicorn
|
||||
|
||||
```console
|
||||
cd src/backend
|
||||
gunicorn -c config/guniconf.py config.wsgi:application
|
||||
```
|
||||
|
||||
> By default, the Gunicorn server will try to use as many workers as your machine can handle. You can manually change that in the `src/backend/config/guniconf.py` file.
|
||||
|
||||
# 🧪 Development guide
|
||||
|
||||
## Local deployment
|
||||
|
||||
To use this method, you'll need to set up a Python virtual environment (version ">=3.11,<3.13") and keep dependencies updated. Additionally, ensure that `poetry` and `docker compose` are installed.
|
||||
|
||||
### Clone the repository
|
||||
|
||||
```console
|
||||
# HTTPS
|
||||
git clone https://github.com/prowler-cloud/api.git
|
||||
|
||||
# SSH
|
||||
git clone git@github.com:prowler-cloud/api.git
|
||||
|
||||
```
|
||||
|
||||
### Start the PostgreSQL Database and Valkey
|
||||
|
||||
The PostgreSQL database (version 16.3) and Valkey (version 7) are required for the development environment. To make development easier, we have provided a `docker-compose` file that will start these components for you.
|
||||
|
||||
**Note:** Make sure to use the specified versions, as there are features in our setup that may not be compatible with older versions of PostgreSQL and Valkey.
|
||||
|
||||
|
||||
```console
|
||||
docker compose up postgres valkey -d
|
||||
```
|
||||
|
||||
### Install the Python dependencies
|
||||
|
||||
> You must have Poetry installed
|
||||
|
||||
```console
|
||||
poetry install
|
||||
poetry shell
|
||||
```
|
||||
|
||||
### Apply migrations
|
||||
|
||||
For migrations, you need to force the `admin` database router. Assuming you have the correct environment variables and Python virtual environment, run:
|
||||
|
||||
```console
|
||||
cd src/backend
|
||||
python manage.py migrate --database admin
|
||||
```
|
||||
|
||||
### Run the Django development server
|
||||
|
||||
```console
|
||||
cd src/backend
|
||||
python manage.py runserver
|
||||
```
|
||||
|
||||
You can access the server in `http://localhost:8000`.
|
||||
All changes in the code will be automatically reloaded in the server.
|
||||
|
||||
### Run the Celery worker
|
||||
|
||||
```console
|
||||
python -m celery -A config.celery worker -l info -E
|
||||
```
|
||||
|
||||
The Celery worker does not detect and reload changes in the code, so you need to restart it manually when you make changes.
|
||||
|
||||
## Docker deployment
|
||||
|
||||
This method requires `docker` and `docker compose`.
|
||||
|
||||
### Clone the repository
|
||||
|
||||
```console
|
||||
# HTTPS
|
||||
git clone https://github.com/prowler-cloud/api.git
|
||||
|
||||
# SSH
|
||||
git clone git@github.com:prowler-cloud/api.git
|
||||
|
||||
```
|
||||
|
||||
### Build the base image
|
||||
|
||||
```console
|
||||
docker compose --profile dev build
|
||||
```
|
||||
|
||||
### Run the development service
|
||||
|
||||
This command will start the Django development server and the Celery worker and also the Valkey and PostgreSQL databases.
|
||||
|
||||
```console
|
||||
docker compose --profile dev up -d
|
||||
```
|
||||
|
||||
You can access the server in `http://localhost:8080`.
|
||||
All changes in the code will be automatically reloaded in the server.
|
||||
|
||||
> **NOTE:** notice how the port is different. When developing using docker, the port will be `8080` to prevent conflicts.
|
||||
|
||||
### View the development server logs
|
||||
|
||||
To view the logs for any component (e.g., Django, Celery worker), you can use the following command with a wildcard. This command will follow logs for any container that matches the specified pattern:
|
||||
|
||||
```console
|
||||
docker logs -f $(docker ps --format "{{.Names}}" | grep 'api-')
|
||||
```
|
||||
|
||||
## Applying migrations
|
||||
|
||||
For migrations, you need to force the `admin` database router. Assuming you have the correct environment variables and Python virtual environment, run:
|
||||
|
||||
```console
|
||||
poetry shell
|
||||
cd src/backend
|
||||
python manage.py migrate --database admin
|
||||
```
|
||||
|
||||
## Apply fixtures
|
||||
|
||||
Fixtures are used to populate the database with initial development data.
|
||||
|
||||
```console
|
||||
poetry shell
|
||||
cd src/backend
|
||||
python manage.py loaddata api/fixtures/0_dev_users.json --database admin
|
||||
```
|
||||
|
||||
> The default credentials are `dev@prowler.com:Thisisapassword123@` or `dev2@prowler.com:Thisisapassword123@`
|
||||
|
||||
## Run tests
|
||||
|
||||
Note that the tests will fail if you use the same `.env` file as the development environment.
|
||||
|
||||
For best results, run in a new shell with no environment variables set.
|
||||
|
||||
```console
|
||||
poetry shell
|
||||
cd src/backend
|
||||
pytest
|
||||
```
|
||||
|
||||
# Custom commands
|
||||
|
||||
Django provides a way to create custom commands that can be run from the command line.
|
||||
|
||||
> These commands can be found in: ```prowler/api/src/backend/api/management/commands```
|
||||
|
||||
To run a custom command, you need to be in the `prowler/api/src/backend` directory and run:
|
||||
|
||||
```console
|
||||
poetry shell
|
||||
python manage.py <command_name>
|
||||
```
|
||||
|
||||
## Generate dummy data
|
||||
|
||||
```console
|
||||
python manage.py findings --tenant
|
||||
<TENANT_ID> --findings <NUM_FINDINGS> --re
|
||||
sources <NUM_RESOURCES> --batch <TRANSACTION_BATCH_SIZE> --alias <ALIAS>
|
||||
```
|
||||
|
||||
This command creates, for a given tenant, a provider, scan and a set of findings and resources related altogether.
|
||||
|
||||
> Scan progress and state are updated in real time.
|
||||
> - 0-33%: Create resources.
|
||||
> - 33-66%: Create findings.
|
||||
> - 66%: Create resource-finding mapping.
|
||||
>
|
||||
> The last step is required to access the findings details, since the UI needs that to print all the information.
|
||||
|
||||
### Example
|
||||
|
||||
```console
|
||||
~/backend $ poetry run python manage.py findings --tenant
|
||||
fffb1893-3fc7-4623-a5d9-fae47da1c528 --findings 25000 --re
|
||||
sources 1000 --batch 5000 --alias test-script
|
||||
|
||||
Starting data population
|
||||
Tenant: fffb1893-3fc7-4623-a5d9-fae47da1c528
|
||||
Alias: test-script
|
||||
Resources: 1000
|
||||
Findings: 25000
|
||||
Batch size: 5000
|
||||
|
||||
|
||||
Creating resources...
|
||||
100%|███████████████████████| 1/1 [00:00<00:00, 7.72it/s]
|
||||
Resources created successfully.
|
||||
|
||||
|
||||
Creating findings...
|
||||
100%|███████████████████████| 5/5 [00:05<00:00, 1.09s/it]
|
||||
Findings created successfully.
|
||||
|
||||
|
||||
Creating resource-finding mappings...
|
||||
100%|███████████████████████| 5/5 [00:02<00:00, 1.81it/s]
|
||||
Resource-finding mappings created successfully.
|
||||
|
||||
|
||||
Successfully populated test data.
|
||||
```
|
||||
@@ -1,75 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
|
||||
apply_migrations() {
|
||||
echo "Applying database migrations..."
|
||||
|
||||
# Fix Inconsistent migration history after adding sites app
|
||||
poetry run python manage.py check_and_fix_socialaccount_sites_migration --database admin
|
||||
|
||||
poetry run python manage.py migrate --database admin
|
||||
}
|
||||
|
||||
apply_fixtures() {
|
||||
echo "Applying Django fixtures..."
|
||||
for fixture in api/fixtures/dev/*.json; do
|
||||
if [ -f "$fixture" ]; then
|
||||
echo "Loading $fixture"
|
||||
poetry run python manage.py loaddata "$fixture" --database admin
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
start_dev_server() {
|
||||
echo "Starting the development server..."
|
||||
poetry run python manage.py runserver 0.0.0.0:"${DJANGO_PORT:-8080}"
|
||||
}
|
||||
|
||||
start_prod_server() {
|
||||
echo "Starting the Gunicorn server..."
|
||||
poetry run gunicorn -c config/guniconf.py config.wsgi:application
|
||||
}
|
||||
|
||||
start_worker() {
|
||||
echo "Starting the worker..."
|
||||
poetry run python -m celery -A config.celery worker -l "${DJANGO_LOGGING_LEVEL:-info}" -Q celery,scans,scan-reports,deletion,backfill,overview,integrations,compliance -E --max-tasks-per-child 1
|
||||
}
|
||||
|
||||
start_worker_beat() {
|
||||
echo "Starting the worker-beat..."
|
||||
sleep 15
|
||||
poetry run python -m celery -A config.celery beat -l "${DJANGO_LOGGING_LEVEL:-info}" --scheduler django_celery_beat.schedulers:DatabaseScheduler
|
||||
}
|
||||
|
||||
manage_db_partitions() {
|
||||
if [ "${DJANGO_MANAGE_DB_PARTITIONS}" = "True" ]; then
|
||||
echo "Managing DB partitions..."
|
||||
# For now we skip the deletion of partitions until we define the data retention policy
|
||||
# --yes auto approves the operation without the need of an interactive terminal
|
||||
poetry run python manage.py pgpartition --using admin --skip-delete --yes
|
||||
fi
|
||||
}
|
||||
|
||||
case "$1" in
|
||||
dev)
|
||||
apply_migrations
|
||||
apply_fixtures
|
||||
manage_db_partitions
|
||||
start_dev_server
|
||||
;;
|
||||
prod)
|
||||
apply_migrations
|
||||
manage_db_partitions
|
||||
start_prod_server
|
||||
;;
|
||||
worker)
|
||||
start_worker
|
||||
;;
|
||||
beat)
|
||||
start_worker_beat
|
||||
;;
|
||||
*)
|
||||
echo "Usage: $0 {dev|prod|worker|beat}"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
@@ -1,65 +0,0 @@
|
||||
# Partitions
|
||||
|
||||
## Overview
|
||||
|
||||
Partitions are used to split the data in a table into smaller chunks, allowing for more efficient querying and storage.
|
||||
|
||||
The Prowler API uses partitions to store findings. The partitions are created based on the UUIDv7 `id` field.
|
||||
|
||||
You can use the Prowler API without ever creating additional partitions. This documentation is only relevant if you want to manage partitions to gain additional query performance.
|
||||
|
||||
### Required Postgres Configuration
|
||||
|
||||
There are 3 configuration options that need to be set in the `postgres.conf` file to get the most performance out of the partitioning:
|
||||
|
||||
- `enable_partition_pruning = on` (default is on)
|
||||
- `enable_partitionwise_join = on` (default is off)
|
||||
- `enable_partitionwise_aggregate = on` (default is off)
|
||||
|
||||
For more information on these options, see the [Postgres documentation](https://www.postgresql.org/docs/current/runtime-config-query.html).
|
||||
|
||||
## Partitioning Strategy
|
||||
|
||||
The partitioning strategy is defined in the `api.partitions` module. The strategy is responsible for creating and deleting partitions based on the provided configuration.
|
||||
|
||||
## Managing Partitions
|
||||
|
||||
The application will run without any extra work on your part. If you want to add or delete partitions, you can use the following commands:
|
||||
|
||||
To manage the partitions, run `python manage.py pgpartition --using admin`
|
||||
|
||||
This command will generate a list of partitions to create and delete based on the provided configuration.
|
||||
|
||||
By default, the command will prompt you to accept the changes before applying them.
|
||||
|
||||
```shell
|
||||
Finding:
|
||||
+ 2024_nov
|
||||
name: 2024_nov
|
||||
from_values: 0192e505-9000-72c8-a47c-cce719d8fb93
|
||||
to_values: 01937f84-5418-7eb8-b2a6-e3be749e839d
|
||||
size_unit: months
|
||||
size_value: 1
|
||||
+ 2024_dec
|
||||
name: 2024_dec
|
||||
from_values: 01937f84-5800-7b55-879c-9cdb46f023f6
|
||||
to_values: 01941f29-7818-7f9f-b4be-20b05bb2f574
|
||||
size_unit: months
|
||||
size_value: 1
|
||||
|
||||
0 partitions will be deleted
|
||||
2 partitions will be created
|
||||
```
|
||||
|
||||
If you choose to apply the partitions, tables will be generated with the following format: `<table_name>_<year>_<month>`.
|
||||
|
||||
For more info on the partitioning manager, see https://github.com/SectorLabs/django-postgres-extra
|
||||
|
||||
### Changing the Partitioning Parameters
|
||||
|
||||
There are 4 environment variables that can be used to change the partitioning parameters:
|
||||
|
||||
- `DJANGO_MANAGE_DB_PARTITIONS`: Allow Django to manage database partitons. By default is set to `False`.
|
||||
- `FINDINGS_TABLE_PARTITION_MONTHS`: Set the months for each partition. Setting the partition monts to 1 will create partitions with a size of 1 natural month.
|
||||
- `FINDINGS_TABLE_PARTITION_COUNT`: Set the number of partitions to create
|
||||
- `FINDINGS_TABLE_PARTITION_MAX_AGE_MONTHS`: Set the number of months to keep partitions before deleting them. Setting this to `None` will keep partitions indefinitely.
|
||||
6844
api/poetry.lock
generated
6844
api/poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,69 +0,0 @@
|
||||
[build-system]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
requires = ["poetry-core"]
|
||||
|
||||
[project]
|
||||
authors = [{name = "Prowler Engineering", email = "engineering@prowler.com"}]
|
||||
dependencies = [
|
||||
"celery[pytest] (>=5.4.0,<6.0.0)",
|
||||
"dj-rest-auth[with_social,jwt] (==7.0.1)",
|
||||
"django (==5.1.13)",
|
||||
"django-allauth[saml] (>=65.8.0,<66.0.0)",
|
||||
"django-celery-beat (>=2.7.0,<3.0.0)",
|
||||
"django-celery-results (>=2.5.1,<3.0.0)",
|
||||
"django-cors-headers==4.4.0",
|
||||
"django-environ==0.11.2",
|
||||
"django-filter==24.3",
|
||||
"django-guid==3.5.0",
|
||||
"django-postgres-extra (>=2.0.8,<3.0.0)",
|
||||
"djangorestframework==3.15.2",
|
||||
"djangorestframework-jsonapi==7.0.2",
|
||||
"djangorestframework-simplejwt (>=5.3.1,<6.0.0)",
|
||||
"drf-nested-routers (>=0.94.1,<1.0.0)",
|
||||
"drf-spectacular==0.27.2",
|
||||
"drf-spectacular-jsonapi==0.5.1",
|
||||
"gunicorn==23.0.0",
|
||||
"lxml==5.3.2",
|
||||
"prowler @ git+https://github.com/prowler-cloud/prowler.git@v5.13",
|
||||
"psycopg2-binary==2.9.9",
|
||||
"pytest-celery[redis] (>=1.0.1,<2.0.0)",
|
||||
"sentry-sdk[django] (>=2.20.0,<3.0.0)",
|
||||
"uuid6==2024.7.10",
|
||||
"openai (>=1.82.0,<2.0.0)",
|
||||
"xmlsec==1.3.14",
|
||||
"h2 (==4.3.0)",
|
||||
"markdown (>=3.9,<4.0)",
|
||||
"drf-simple-apikey (==2.2.1)",
|
||||
"matplotlib (>=3.10.6,<4.0.0)",
|
||||
"reportlab (>=4.4.4,<5.0.0)"
|
||||
]
|
||||
description = "Prowler's API (Django/DRF)"
|
||||
license = "Apache-2.0"
|
||||
name = "prowler-api"
|
||||
package-mode = false
|
||||
# Needed for the SDK compatibility
|
||||
requires-python = ">=3.11,<3.13"
|
||||
version = "1.14.0"
|
||||
|
||||
[project.scripts]
|
||||
celery = "src.backend.config.settings.celery"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
bandit = "1.7.9"
|
||||
coverage = "7.5.4"
|
||||
django-silk = "5.3.2"
|
||||
docker = "7.1.0"
|
||||
freezegun = "1.5.1"
|
||||
marshmallow = ">=3.15.0,<4.0.0"
|
||||
mypy = "1.10.1"
|
||||
pylint = "3.2.5"
|
||||
pytest = "8.2.2"
|
||||
pytest-cov = "5.0.0"
|
||||
pytest-django = "4.8.0"
|
||||
pytest-env = "1.1.3"
|
||||
pytest-randomly = "3.15.0"
|
||||
pytest-xdist = "3.6.1"
|
||||
ruff = "0.5.0"
|
||||
safety = "3.2.9"
|
||||
tqdm = "4.67.1"
|
||||
vulture = "2.14"
|
||||
@@ -1,71 +0,0 @@
|
||||
from allauth.socialaccount.adapter import DefaultSocialAccountAdapter
|
||||
from django.db import transaction
|
||||
|
||||
from api.db_router import MainRouter
|
||||
from api.db_utils import rls_transaction
|
||||
from api.models import Membership, Role, Tenant, User, UserRoleRelationship
|
||||
|
||||
|
||||
class ProwlerSocialAccountAdapter(DefaultSocialAccountAdapter):
|
||||
@staticmethod
|
||||
def get_user_by_email(email: str):
|
||||
try:
|
||||
return User.objects.get(email=email)
|
||||
except User.DoesNotExist:
|
||||
return None
|
||||
|
||||
def pre_social_login(self, request, sociallogin):
|
||||
# Link existing accounts with the same email address
|
||||
email = sociallogin.account.extra_data.get("email")
|
||||
if sociallogin.provider.id == "saml":
|
||||
email = sociallogin.user.email
|
||||
if email:
|
||||
existing_user = self.get_user_by_email(email)
|
||||
if existing_user:
|
||||
sociallogin.connect(request, existing_user)
|
||||
|
||||
def save_user(self, request, sociallogin, form=None):
|
||||
"""
|
||||
Called after the user data is fully populated from the provider
|
||||
and is about to be saved to the DB for the first time.
|
||||
"""
|
||||
with transaction.atomic(using=MainRouter.admin_db):
|
||||
user = super().save_user(request, sociallogin, form)
|
||||
provider = sociallogin.provider.id
|
||||
extra = sociallogin.account.extra_data
|
||||
|
||||
if provider != "saml":
|
||||
# Handle other providers (e.g., GitHub, Google)
|
||||
user.save(using=MainRouter.admin_db)
|
||||
social_account_name = extra.get("name")
|
||||
if social_account_name:
|
||||
user.name = social_account_name
|
||||
user.save(using=MainRouter.admin_db)
|
||||
|
||||
tenant = Tenant.objects.using(MainRouter.admin_db).create(
|
||||
name=f"{user.email.split('@')[0]} default tenant"
|
||||
)
|
||||
with rls_transaction(str(tenant.id)):
|
||||
Membership.objects.using(MainRouter.admin_db).create(
|
||||
user=user, tenant=tenant, role=Membership.RoleChoices.OWNER
|
||||
)
|
||||
role = Role.objects.using(MainRouter.admin_db).create(
|
||||
name="admin",
|
||||
tenant_id=tenant.id,
|
||||
manage_users=True,
|
||||
manage_account=True,
|
||||
manage_billing=True,
|
||||
manage_providers=True,
|
||||
manage_integrations=True,
|
||||
manage_scans=True,
|
||||
unlimited_visibility=True,
|
||||
)
|
||||
UserRoleRelationship.objects.using(MainRouter.admin_db).create(
|
||||
user=user,
|
||||
role=role,
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
else:
|
||||
request.session["saml_user_created"] = str(user.id)
|
||||
|
||||
return user
|
||||
@@ -1,3 +0,0 @@
|
||||
# from django.contrib import admin
|
||||
|
||||
# Register your models here.
|
||||
@@ -1,169 +0,0 @@
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from config.custom_logging import BackendLogger
|
||||
from config.env import env
|
||||
from django.apps import AppConfig
|
||||
from django.conf import settings
|
||||
|
||||
logger = logging.getLogger(BackendLogger.API)
|
||||
|
||||
SIGNING_KEY_ENV = "DJANGO_TOKEN_SIGNING_KEY"
|
||||
VERIFYING_KEY_ENV = "DJANGO_TOKEN_VERIFYING_KEY"
|
||||
|
||||
PRIVATE_KEY_FILE = "jwt_private.pem"
|
||||
PUBLIC_KEY_FILE = "jwt_public.pem"
|
||||
|
||||
KEYS_DIRECTORY = (
|
||||
Path.home() / ".config" / "prowler-api"
|
||||
) # `/home/prowler/.config/prowler-api` inside the container
|
||||
|
||||
_keys_initialized = False # Flag to prevent multiple executions within the same process
|
||||
|
||||
|
||||
class ApiConfig(AppConfig):
|
||||
default_auto_field = "django.db.models.BigAutoField"
|
||||
name = "api"
|
||||
|
||||
def ready(self):
|
||||
from api import schema_extensions # noqa: F401
|
||||
from api import signals # noqa: F401
|
||||
from api.compliance import load_prowler_compliance
|
||||
|
||||
# Generate required cryptographic keys if not present, but only if:
|
||||
# `"manage.py" not in sys.argv`: If an external server (e.g., Gunicorn) is running the app
|
||||
# `os.environ.get("RUN_MAIN")`: If it's not a Django command or using `runserver`,
|
||||
# only the main process will do it
|
||||
if "manage.py" not in sys.argv or os.environ.get("RUN_MAIN"):
|
||||
self._ensure_crypto_keys()
|
||||
|
||||
load_prowler_compliance()
|
||||
|
||||
def _ensure_crypto_keys(self):
|
||||
"""
|
||||
Orchestrator method that ensures all required cryptographic keys are present.
|
||||
This method coordinates the generation of:
|
||||
- RSA key pairs for JWT token signing and verification
|
||||
Note: During development, Django spawns multiple processes (migrations, fixtures, etc.)
|
||||
which will each generate their own keys. This is expected behavior and each process
|
||||
will have consistent keys for its lifetime. In production, set the keys as environment
|
||||
variables to avoid regeneration.
|
||||
"""
|
||||
global _keys_initialized
|
||||
|
||||
# Skip key generation if running tests
|
||||
if hasattr(settings, "TESTING") and settings.TESTING:
|
||||
return
|
||||
|
||||
# Skip if already initialized in this process
|
||||
if _keys_initialized:
|
||||
return
|
||||
|
||||
# Check if both JWT keys are set; if not, generate them
|
||||
signing_key = env.str(SIGNING_KEY_ENV, default="").strip()
|
||||
verifying_key = env.str(VERIFYING_KEY_ENV, default="").strip()
|
||||
|
||||
if not signing_key or not verifying_key:
|
||||
logger.info(
|
||||
f"Generating JWT RSA key pair. In production, set '{SIGNING_KEY_ENV}' and '{VERIFYING_KEY_ENV}' "
|
||||
"environment variables."
|
||||
)
|
||||
self._ensure_jwt_keys()
|
||||
|
||||
# Mark as initialized to prevent future executions in this process
|
||||
_keys_initialized = True
|
||||
|
||||
def _read_key_file(self, file_name):
|
||||
"""
|
||||
Utility method to read the contents of a file.
|
||||
"""
|
||||
file_path = KEYS_DIRECTORY / file_name
|
||||
return file_path.read_text().strip() if file_path.is_file() else None
|
||||
|
||||
def _write_key_file(self, file_name, content, private=True):
|
||||
"""
|
||||
Utility method to write content to a file.
|
||||
"""
|
||||
try:
|
||||
file_path = KEYS_DIRECTORY / file_name
|
||||
file_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
file_path.write_text(content)
|
||||
file_path.chmod(0o600 if private else 0o644)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error writing key file '{file_name}': {e}. "
|
||||
f"Please set '{SIGNING_KEY_ENV}' and '{VERIFYING_KEY_ENV}' manually."
|
||||
)
|
||||
raise e
|
||||
|
||||
def _ensure_jwt_keys(self):
|
||||
"""
|
||||
Generate RSA key pairs for JWT token signing and verification
|
||||
if they are not already set in environment variables.
|
||||
"""
|
||||
# Read existing keys from files if they exist
|
||||
signing_key = self._read_key_file(PRIVATE_KEY_FILE)
|
||||
verifying_key = self._read_key_file(PUBLIC_KEY_FILE)
|
||||
|
||||
if not signing_key or not verifying_key:
|
||||
# Generate and store the RSA key pair
|
||||
signing_key, verifying_key = self._generate_jwt_keys()
|
||||
self._write_key_file(PRIVATE_KEY_FILE, signing_key, private=True)
|
||||
self._write_key_file(PUBLIC_KEY_FILE, verifying_key, private=False)
|
||||
logger.info("JWT keys generated and stored successfully")
|
||||
|
||||
else:
|
||||
logger.info("JWT keys already generated")
|
||||
|
||||
# Set environment variables and Django settings
|
||||
os.environ[SIGNING_KEY_ENV] = signing_key
|
||||
settings.SIMPLE_JWT["SIGNING_KEY"] = signing_key
|
||||
|
||||
os.environ[VERIFYING_KEY_ENV] = verifying_key
|
||||
settings.SIMPLE_JWT["VERIFYING_KEY"] = verifying_key
|
||||
|
||||
def _generate_jwt_keys(self):
|
||||
"""
|
||||
Generate and set RSA key pairs for JWT token operations.
|
||||
"""
|
||||
try:
|
||||
from cryptography.hazmat.primitives import serialization
|
||||
from cryptography.hazmat.primitives.asymmetric import rsa
|
||||
|
||||
# Generate RSA key pair
|
||||
private_key = rsa.generate_private_key( # Future improvement: we could read the next values from env vars
|
||||
public_exponent=65537,
|
||||
key_size=2048,
|
||||
)
|
||||
|
||||
# Serialize private key (for signing)
|
||||
private_pem = private_key.private_bytes(
|
||||
encoding=serialization.Encoding.PEM,
|
||||
format=serialization.PrivateFormat.PKCS8,
|
||||
encryption_algorithm=serialization.NoEncryption(),
|
||||
).decode("utf-8")
|
||||
|
||||
# Serialize public key (for verification)
|
||||
public_key = private_key.public_key()
|
||||
public_pem = public_key.public_bytes(
|
||||
encoding=serialization.Encoding.PEM,
|
||||
format=serialization.PublicFormat.SubjectPublicKeyInfo,
|
||||
).decode("utf-8")
|
||||
|
||||
logger.debug("JWT RSA key pair generated successfully.")
|
||||
return private_pem, public_pem
|
||||
|
||||
except ImportError as e:
|
||||
logger.warning(
|
||||
"The 'cryptography' package is required for automatic JWT key generation."
|
||||
)
|
||||
raise e
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error generating JWT keys: {e}. Please set '{SIGNING_KEY_ENV}' and '{VERIFYING_KEY_ENV}' manually."
|
||||
)
|
||||
raise e
|
||||
@@ -1,95 +0,0 @@
|
||||
from typing import Optional, Tuple
|
||||
from uuid import UUID
|
||||
|
||||
from cryptography.fernet import InvalidToken
|
||||
from django.utils import timezone
|
||||
from drf_simple_apikey.backends import APIKeyAuthentication as BaseAPIKeyAuth
|
||||
from drf_simple_apikey.crypto import get_crypto
|
||||
from rest_framework.authentication import BaseAuthentication
|
||||
from rest_framework.exceptions import AuthenticationFailed
|
||||
from rest_framework.request import Request
|
||||
from rest_framework_simplejwt.authentication import JWTAuthentication
|
||||
|
||||
from api.db_router import MainRouter
|
||||
from api.models import TenantAPIKey, TenantAPIKeyManager
|
||||
|
||||
|
||||
class TenantAPIKeyAuthentication(BaseAPIKeyAuth):
|
||||
model = TenantAPIKey
|
||||
|
||||
def __init__(self):
|
||||
self.key_crypto = get_crypto()
|
||||
|
||||
def _authenticate_credentials(self, request, key):
|
||||
"""
|
||||
Override to use admin connection, bypassing RLS during authentication.
|
||||
Delegates to parent after temporarily routing model queries to admin DB.
|
||||
"""
|
||||
# Temporarily point the model's manager to admin database
|
||||
original_objects = self.model.objects
|
||||
self.model.objects = self.model.objects.using(MainRouter.admin_db)
|
||||
|
||||
try:
|
||||
# Call parent method which will now use admin database
|
||||
return super()._authenticate_credentials(request, key)
|
||||
finally:
|
||||
# Restore original manager
|
||||
self.model.objects = original_objects
|
||||
|
||||
def authenticate(self, request: Request):
|
||||
prefixed_key = self.get_key(request)
|
||||
|
||||
# Split prefix from key (format: pk_xxxxxxxx.encrypted_key)
|
||||
try:
|
||||
prefix, key = prefixed_key.split(TenantAPIKeyManager.separator, 1)
|
||||
except ValueError:
|
||||
raise AuthenticationFailed("Invalid API Key.")
|
||||
|
||||
try:
|
||||
entity, _ = self._authenticate_credentials(request, key)
|
||||
except InvalidToken:
|
||||
raise AuthenticationFailed("Invalid API Key.")
|
||||
|
||||
# Get the API key instance to update last_used_at and retrieve tenant info
|
||||
# We need to decrypt again to get the pk (already validated by _authenticate_credentials)
|
||||
payload = self.key_crypto.decrypt(key)
|
||||
api_key_pk = payload["_pk"]
|
||||
|
||||
# Convert string UUID back to UUID object for lookup
|
||||
if isinstance(api_key_pk, str):
|
||||
api_key_pk = UUID(api_key_pk)
|
||||
|
||||
try:
|
||||
api_key_instance = TenantAPIKey.objects.using(MainRouter.admin_db).get(
|
||||
id=api_key_pk, prefix=prefix
|
||||
)
|
||||
except TenantAPIKey.DoesNotExist:
|
||||
raise AuthenticationFailed("Invalid API Key.")
|
||||
|
||||
# Update last_used_at
|
||||
api_key_instance.last_used_at = timezone.now()
|
||||
api_key_instance.save(update_fields=["last_used_at"], using=MainRouter.admin_db)
|
||||
|
||||
return entity, {
|
||||
"tenant_id": str(api_key_instance.tenant_id),
|
||||
"sub": str(api_key_instance.entity.id),
|
||||
"api_key_prefix": prefix,
|
||||
}
|
||||
|
||||
|
||||
class CombinedJWTOrAPIKeyAuthentication(BaseAuthentication):
|
||||
jwt_auth = JWTAuthentication()
|
||||
api_key_auth = TenantAPIKeyAuthentication()
|
||||
|
||||
def authenticate(self, request: Request) -> Optional[Tuple[object, dict]]:
|
||||
auth_header = request.headers.get("Authorization", "")
|
||||
|
||||
# Prioritize JWT authentication if both are present
|
||||
if auth_header.startswith("Bearer "):
|
||||
return self.jwt_auth.authenticate(request)
|
||||
|
||||
if auth_header.startswith("Api-Key "):
|
||||
return self.api_key_auth.authenticate(request)
|
||||
|
||||
# Default fallback
|
||||
return self.jwt_auth.authenticate(request)
|
||||
@@ -1,207 +0,0 @@
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.db import transaction
|
||||
from rest_framework import permissions
|
||||
from rest_framework.exceptions import NotAuthenticated
|
||||
from rest_framework.filters import SearchFilter
|
||||
from rest_framework.permissions import SAFE_METHODS
|
||||
from rest_framework_json_api import filters
|
||||
from rest_framework_json_api.views import ModelViewSet
|
||||
|
||||
from api.authentication import CombinedJWTOrAPIKeyAuthentication
|
||||
from api.db_router import MainRouter, reset_read_db_alias, set_read_db_alias
|
||||
from api.db_utils import POSTGRES_USER_VAR, rls_transaction
|
||||
from api.filters import CustomDjangoFilterBackend
|
||||
from api.models import Role, Tenant
|
||||
from api.rbac.permissions import HasPermissions
|
||||
|
||||
|
||||
class BaseViewSet(ModelViewSet):
|
||||
authentication_classes = [CombinedJWTOrAPIKeyAuthentication]
|
||||
required_permissions = []
|
||||
permission_classes = [permissions.IsAuthenticated, HasPermissions]
|
||||
filter_backends = [
|
||||
filters.QueryParameterValidationFilter,
|
||||
filters.OrderingFilter,
|
||||
CustomDjangoFilterBackend,
|
||||
SearchFilter,
|
||||
]
|
||||
|
||||
filterset_fields = []
|
||||
search_fields = []
|
||||
|
||||
ordering_fields = "__all__"
|
||||
ordering = ["id"]
|
||||
|
||||
def _get_request_db_alias(self, request):
|
||||
if request is None:
|
||||
return MainRouter.default_db
|
||||
|
||||
read_alias = (
|
||||
MainRouter.replica_db
|
||||
if request.method in SAFE_METHODS
|
||||
and MainRouter.replica_db in settings.DATABASES
|
||||
else None
|
||||
)
|
||||
if read_alias:
|
||||
return read_alias
|
||||
return MainRouter.default_db
|
||||
|
||||
def initial(self, request, *args, **kwargs):
|
||||
"""
|
||||
Sets required_permissions before permissions are checked.
|
||||
"""
|
||||
self.set_required_permissions()
|
||||
super().initial(request, *args, **kwargs)
|
||||
|
||||
def set_required_permissions(self):
|
||||
"""This is an abstract method that must be implemented by subclasses."""
|
||||
NotImplemented
|
||||
|
||||
def get_queryset(self):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class BaseRLSViewSet(BaseViewSet):
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
self.db_alias = self._get_request_db_alias(request)
|
||||
alias_token = None
|
||||
try:
|
||||
if self.db_alias != MainRouter.default_db:
|
||||
alias_token = set_read_db_alias(self.db_alias)
|
||||
|
||||
if request is not None:
|
||||
request.db_alias = self.db_alias
|
||||
|
||||
with transaction.atomic(using=self.db_alias):
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
finally:
|
||||
if alias_token is not None:
|
||||
reset_read_db_alias(alias_token)
|
||||
self.db_alias = MainRouter.default_db
|
||||
|
||||
def initial(self, request, *args, **kwargs):
|
||||
# Ideally, this logic would be in the `.setup()` method but DRF view sets don't call it
|
||||
# https://docs.djangoproject.com/en/5.1/ref/class-based-views/base/#django.views.generic.base.View.setup
|
||||
if request.auth is None:
|
||||
raise NotAuthenticated
|
||||
|
||||
tenant_id = request.auth.get("tenant_id")
|
||||
if tenant_id is None:
|
||||
raise NotAuthenticated("Tenant ID is not present in token")
|
||||
|
||||
with rls_transaction(
|
||||
tenant_id, using=getattr(self, "db_alias", MainRouter.default_db)
|
||||
):
|
||||
self.request.tenant_id = tenant_id
|
||||
return super().initial(request, *args, **kwargs)
|
||||
|
||||
def get_serializer_context(self):
|
||||
context = super().get_serializer_context()
|
||||
context["tenant_id"] = self.request.tenant_id
|
||||
return context
|
||||
|
||||
|
||||
class BaseTenantViewset(BaseViewSet):
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
self.db_alias = self._get_request_db_alias(request)
|
||||
alias_token = None
|
||||
try:
|
||||
if self.db_alias != MainRouter.default_db:
|
||||
alias_token = set_read_db_alias(self.db_alias)
|
||||
|
||||
if request is not None:
|
||||
request.db_alias = self.db_alias
|
||||
|
||||
with transaction.atomic(using=self.db_alias):
|
||||
tenant = super().dispatch(request, *args, **kwargs)
|
||||
|
||||
try:
|
||||
# If the request is a POST, create the admin role
|
||||
if request.method == "POST":
|
||||
isinstance(tenant, dict) and self._create_admin_role(
|
||||
tenant.data["id"]
|
||||
)
|
||||
except Exception as e:
|
||||
self._handle_creation_error(e, tenant)
|
||||
raise
|
||||
|
||||
return tenant
|
||||
finally:
|
||||
if alias_token is not None:
|
||||
reset_read_db_alias(alias_token)
|
||||
self.db_alias = MainRouter.default_db
|
||||
|
||||
def _create_admin_role(self, tenant_id):
|
||||
Role.objects.using(MainRouter.admin_db).create(
|
||||
name="admin",
|
||||
tenant_id=tenant_id,
|
||||
manage_users=True,
|
||||
manage_account=True,
|
||||
manage_billing=True,
|
||||
manage_providers=True,
|
||||
manage_integrations=True,
|
||||
manage_scans=True,
|
||||
unlimited_visibility=True,
|
||||
)
|
||||
|
||||
def _handle_creation_error(self, error, tenant):
|
||||
if tenant.data.get("id"):
|
||||
try:
|
||||
Tenant.objects.using(MainRouter.admin_db).filter(
|
||||
id=tenant.data["id"]
|
||||
).delete()
|
||||
except ObjectDoesNotExist:
|
||||
pass # Tenant might not exist, handle gracefully
|
||||
|
||||
def initial(self, request, *args, **kwargs):
|
||||
if request.auth is None:
|
||||
raise NotAuthenticated
|
||||
|
||||
tenant_id = request.auth.get("tenant_id")
|
||||
if tenant_id is None:
|
||||
raise NotAuthenticated("Tenant ID is not present in token")
|
||||
|
||||
user_id = str(request.user.id)
|
||||
with rls_transaction(
|
||||
value=user_id,
|
||||
parameter=POSTGRES_USER_VAR,
|
||||
using=getattr(self, "db_alias", MainRouter.default_db),
|
||||
):
|
||||
return super().initial(request, *args, **kwargs)
|
||||
|
||||
|
||||
class BaseUserViewset(BaseViewSet):
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
self.db_alias = self._get_request_db_alias(request)
|
||||
alias_token = None
|
||||
try:
|
||||
if self.db_alias != MainRouter.default_db:
|
||||
alias_token = set_read_db_alias(self.db_alias)
|
||||
|
||||
if request is not None:
|
||||
request.db_alias = self.db_alias
|
||||
|
||||
with transaction.atomic(using=self.db_alias):
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
finally:
|
||||
if alias_token is not None:
|
||||
reset_read_db_alias(alias_token)
|
||||
self.db_alias = MainRouter.default_db
|
||||
|
||||
def initial(self, request, *args, **kwargs):
|
||||
# TODO refactor after improving RLS on users
|
||||
if request.stream is not None and request.stream.method == "POST":
|
||||
return super().initial(request, *args, **kwargs)
|
||||
if request.auth is None:
|
||||
raise NotAuthenticated
|
||||
|
||||
tenant_id = request.auth.get("tenant_id")
|
||||
if tenant_id is None:
|
||||
raise NotAuthenticated("Tenant ID is not present in token")
|
||||
|
||||
with rls_transaction(
|
||||
tenant_id, using=getattr(self, "db_alias", MainRouter.default_db)
|
||||
):
|
||||
self.request.tenant_id = tenant_id
|
||||
return super().initial(request, *args, **kwargs)
|
||||
@@ -1,244 +0,0 @@
|
||||
from types import MappingProxyType
|
||||
|
||||
from api.models import Provider
|
||||
from prowler.config.config import get_available_compliance_frameworks
|
||||
from prowler.lib.check.compliance_models import Compliance
|
||||
from prowler.lib.check.models import CheckMetadata
|
||||
|
||||
PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE = {}
|
||||
PROWLER_CHECKS = {}
|
||||
AVAILABLE_COMPLIANCE_FRAMEWORKS = {}
|
||||
|
||||
|
||||
def get_compliance_frameworks(provider_type: Provider.ProviderChoices) -> list[str]:
|
||||
"""
|
||||
Retrieve and cache the list of available compliance frameworks for a specific cloud provider.
|
||||
|
||||
This function lazily loads and caches the available compliance frameworks (e.g., CIS, MITRE, ISO)
|
||||
for each provider type (AWS, Azure, GCP, etc.) on first access. Subsequent calls for the same
|
||||
provider will return the cached result.
|
||||
|
||||
Args:
|
||||
provider_type (Provider.ProviderChoices): The cloud provider type for which to retrieve
|
||||
available compliance frameworks (e.g., "aws", "azure", "gcp", "m365").
|
||||
|
||||
Returns:
|
||||
list[str]: A list of framework identifiers (e.g., "cis_1.4_aws", "mitre_attack_azure") available
|
||||
for the given provider.
|
||||
"""
|
||||
global AVAILABLE_COMPLIANCE_FRAMEWORKS
|
||||
if provider_type not in AVAILABLE_COMPLIANCE_FRAMEWORKS:
|
||||
AVAILABLE_COMPLIANCE_FRAMEWORKS[provider_type] = (
|
||||
get_available_compliance_frameworks(provider_type)
|
||||
)
|
||||
|
||||
return AVAILABLE_COMPLIANCE_FRAMEWORKS[provider_type]
|
||||
|
||||
|
||||
def get_prowler_provider_checks(provider_type: Provider.ProviderChoices):
|
||||
"""
|
||||
Retrieve all check IDs for the specified provider type.
|
||||
|
||||
This function fetches the check metadata for the given cloud provider
|
||||
and returns an iterable of check IDs.
|
||||
|
||||
Args:
|
||||
provider_type (Provider.ProviderChoices): The provider type
|
||||
(e.g., 'aws', 'azure') for which to retrieve check IDs.
|
||||
|
||||
Returns:
|
||||
Iterable[str]: An iterable of check IDs associated with the specified provider type.
|
||||
"""
|
||||
return CheckMetadata.get_bulk(provider_type).keys()
|
||||
|
||||
|
||||
def get_prowler_provider_compliance(provider_type: Provider.ProviderChoices) -> dict:
|
||||
"""
|
||||
Retrieve the Prowler compliance data for a specified provider type.
|
||||
|
||||
This function fetches the compliance frameworks and their associated
|
||||
requirements for the given cloud provider.
|
||||
|
||||
Args:
|
||||
provider_type (Provider.ProviderChoices): The provider type
|
||||
(e.g., 'aws', 'azure') for which to retrieve compliance data.
|
||||
|
||||
Returns:
|
||||
dict: A dictionary mapping compliance framework names to their respective
|
||||
Compliance objects for the specified provider.
|
||||
"""
|
||||
return Compliance.get_bulk(provider_type)
|
||||
|
||||
|
||||
def load_prowler_compliance():
|
||||
"""
|
||||
Load and initialize the Prowler compliance data and checks for all provider types.
|
||||
|
||||
This function retrieves compliance data for all supported provider types,
|
||||
generates a compliance overview template, and populates the global variables
|
||||
`PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE` and `PROWLER_CHECKS` with read-only mappings
|
||||
of the compliance templates and checks, respectively.
|
||||
"""
|
||||
global PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE
|
||||
global PROWLER_CHECKS
|
||||
|
||||
prowler_compliance = {
|
||||
provider_type: get_prowler_provider_compliance(provider_type)
|
||||
for provider_type in Provider.ProviderChoices.values
|
||||
}
|
||||
template = generate_compliance_overview_template(prowler_compliance)
|
||||
PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE = MappingProxyType(template)
|
||||
PROWLER_CHECKS = MappingProxyType(load_prowler_checks(prowler_compliance))
|
||||
|
||||
|
||||
def load_prowler_checks(prowler_compliance):
|
||||
"""
|
||||
Generate a mapping of checks to the compliance frameworks that include them.
|
||||
|
||||
This function processes the provided compliance data and creates a dictionary
|
||||
mapping each provider type to a dictionary where each check ID maps to a set
|
||||
of compliance names that include that check.
|
||||
|
||||
Args:
|
||||
prowler_compliance (dict): The compliance data for all provider types,
|
||||
as returned by `get_prowler_provider_compliance`.
|
||||
|
||||
Returns:
|
||||
dict: A nested dictionary where the first-level keys are provider types,
|
||||
and the values are dictionaries mapping check IDs to sets of compliance names.
|
||||
"""
|
||||
checks = {}
|
||||
for provider_type in Provider.ProviderChoices.values:
|
||||
checks[provider_type] = {
|
||||
check_id: set() for check_id in get_prowler_provider_checks(provider_type)
|
||||
}
|
||||
for compliance_name, compliance_data in prowler_compliance[
|
||||
provider_type
|
||||
].items():
|
||||
for requirement in compliance_data.Requirements:
|
||||
for check in requirement.Checks:
|
||||
try:
|
||||
checks[provider_type][check].add(compliance_name)
|
||||
except KeyError:
|
||||
continue
|
||||
return checks
|
||||
|
||||
|
||||
def generate_scan_compliance(
|
||||
compliance_overview, provider_type: str, check_id: str, status: str
|
||||
):
|
||||
"""
|
||||
Update the compliance overview with the status of a specific check.
|
||||
|
||||
This function updates the compliance overview by setting the status of the given check
|
||||
within all compliance frameworks and requirements that include it. It then updates the
|
||||
requirement status to 'FAIL' if any of its checks have failed, and adjusts the counts
|
||||
of passed and failed requirements in the compliance overview.
|
||||
|
||||
Args:
|
||||
compliance_overview (dict): The compliance overview data structure to update.
|
||||
provider_type (str): The provider type (e.g., 'aws', 'azure') associated with the check.
|
||||
check_id (str): The identifier of the check whose status is being updated.
|
||||
status (str): The status of the check (e.g., 'PASS', 'FAIL', 'MUTED').
|
||||
|
||||
Returns:
|
||||
None: This function modifies the compliance_overview in place.
|
||||
"""
|
||||
for compliance_id in PROWLER_CHECKS[provider_type][check_id]:
|
||||
for requirement in compliance_overview[compliance_id]["requirements"].values():
|
||||
if check_id in requirement["checks"]:
|
||||
requirement["checks"][check_id] = status
|
||||
requirement["checks_status"][status.lower()] += 1
|
||||
|
||||
if requirement["status"] != "FAIL" and any(
|
||||
value == "FAIL" for value in requirement["checks"].values()
|
||||
):
|
||||
requirement["status"] = "FAIL"
|
||||
compliance_overview[compliance_id]["requirements_status"][
|
||||
"passed"
|
||||
] -= 1
|
||||
compliance_overview[compliance_id]["requirements_status"][
|
||||
"failed"
|
||||
] += 1
|
||||
|
||||
|
||||
def generate_compliance_overview_template(prowler_compliance: dict):
|
||||
"""
|
||||
Generate a compliance overview template for all provider types.
|
||||
|
||||
This function creates a nested dictionary structure representing the compliance
|
||||
overview template for each provider type, compliance framework, and requirement.
|
||||
It initializes the status of all checks and requirements, and calculates initial
|
||||
counts for requirements status.
|
||||
|
||||
Args:
|
||||
prowler_compliance (dict): The compliance data for all provider types,
|
||||
as returned by `get_prowler_provider_compliance`.
|
||||
|
||||
Returns:
|
||||
dict: A nested dictionary representing the compliance overview template,
|
||||
structured by provider type and compliance framework.
|
||||
"""
|
||||
template = {}
|
||||
for provider_type in Provider.ProviderChoices.values:
|
||||
provider_compliance = template.setdefault(provider_type, {})
|
||||
compliance_data_dict = prowler_compliance[provider_type]
|
||||
|
||||
for compliance_name, compliance_data in compliance_data_dict.items():
|
||||
compliance_requirements = {}
|
||||
requirements_status = {"passed": 0, "failed": 0, "manual": 0}
|
||||
total_requirements = 0
|
||||
|
||||
for requirement in compliance_data.Requirements:
|
||||
total_requirements += 1
|
||||
total_checks = len(requirement.Checks)
|
||||
checks_dict = {check: None for check in requirement.Checks}
|
||||
|
||||
req_status_val = "MANUAL" if total_checks == 0 else "PASS"
|
||||
|
||||
# Build requirement dictionary
|
||||
requirement_dict = {
|
||||
"name": requirement.Name or requirement.Id,
|
||||
"description": requirement.Description,
|
||||
"tactics": getattr(requirement, "Tactics", []),
|
||||
"subtechniques": getattr(requirement, "SubTechniques", []),
|
||||
"platforms": getattr(requirement, "Platforms", []),
|
||||
"technique_url": getattr(requirement, "TechniqueURL", ""),
|
||||
"attributes": [
|
||||
dict(attribute) for attribute in requirement.Attributes
|
||||
],
|
||||
"checks": checks_dict,
|
||||
"checks_status": {
|
||||
"pass": 0,
|
||||
"fail": 0,
|
||||
"manual": 0,
|
||||
"total": total_checks,
|
||||
},
|
||||
"status": req_status_val,
|
||||
}
|
||||
|
||||
# Update requirements status counts for the framework
|
||||
if req_status_val == "MANUAL":
|
||||
requirements_status["manual"] += 1
|
||||
elif req_status_val == "PASS":
|
||||
requirements_status["passed"] += 1
|
||||
|
||||
# Add requirement to compliance requirements
|
||||
compliance_requirements[requirement.Id] = requirement_dict
|
||||
|
||||
# Build compliance dictionary
|
||||
compliance_dict = {
|
||||
"framework": compliance_data.Framework,
|
||||
"name": compliance_data.Name,
|
||||
"version": compliance_data.Version,
|
||||
"provider": provider_type,
|
||||
"description": compliance_data.Description,
|
||||
"requirements": compliance_requirements,
|
||||
"requirements_status": requirements_status,
|
||||
"total_requirements": total_requirements,
|
||||
}
|
||||
|
||||
# Add compliance to provider compliance
|
||||
provider_compliance[compliance_name] = compliance_dict
|
||||
|
||||
return template
|
||||
@@ -1,59 +0,0 @@
|
||||
from contextvars import ContextVar
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
ALLOWED_APPS = ("django", "socialaccount", "account", "authtoken", "silk")
|
||||
|
||||
_read_db_alias = ContextVar("read_db_alias", default=None)
|
||||
|
||||
|
||||
def set_read_db_alias(alias: str | None):
|
||||
if not alias:
|
||||
return None
|
||||
return _read_db_alias.set(alias)
|
||||
|
||||
|
||||
def get_read_db_alias() -> str | None:
|
||||
return _read_db_alias.get()
|
||||
|
||||
|
||||
def reset_read_db_alias(token) -> None:
|
||||
if token is not None:
|
||||
_read_db_alias.reset(token)
|
||||
|
||||
|
||||
class MainRouter:
|
||||
default_db = "default"
|
||||
admin_db = "admin"
|
||||
replica_db = "replica"
|
||||
|
||||
def db_for_read(self, model, **hints): # noqa: F841
|
||||
model_table_name = model._meta.db_table
|
||||
if model_table_name.startswith("django_") or any(
|
||||
model_table_name.startswith(f"{app}_") for app in ALLOWED_APPS
|
||||
):
|
||||
return self.admin_db
|
||||
read_alias = get_read_db_alias()
|
||||
if read_alias:
|
||||
return read_alias
|
||||
return None
|
||||
|
||||
def db_for_write(self, model, **hints): # noqa: F841
|
||||
model_table_name = model._meta.db_table
|
||||
if any(model_table_name.startswith(f"{app}_") for app in ALLOWED_APPS):
|
||||
return self.admin_db
|
||||
return None
|
||||
|
||||
def allow_migrate(self, db, app_label, model_name=None, **hints): # noqa: F841
|
||||
return db == self.admin_db
|
||||
|
||||
def allow_relation(self, obj1, obj2, **hints): # noqa: F841
|
||||
# Allow relations if both objects are in either "default" or "admin" db connectors
|
||||
if {obj1._state.db, obj2._state.db} <= {self.default_db, self.admin_db}:
|
||||
return True
|
||||
return None
|
||||
|
||||
|
||||
READ_REPLICA_ALIAS = (
|
||||
MainRouter.replica_db if MainRouter.replica_db in settings.DATABASES else None
|
||||
)
|
||||
@@ -1,594 +0,0 @@
|
||||
import re
|
||||
import secrets
|
||||
import uuid
|
||||
from contextlib import contextmanager
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import BaseUserManager
|
||||
from django.db import DEFAULT_DB_ALIAS, connection, connections, models, transaction
|
||||
from django_celery_beat.models import PeriodicTask
|
||||
from psycopg2 import connect as psycopg2_connect
|
||||
from psycopg2.extensions import AsIs, new_type, register_adapter, register_type
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
|
||||
from api.db_router import get_read_db_alias, reset_read_db_alias, set_read_db_alias
|
||||
|
||||
DB_USER = settings.DATABASES["default"]["USER"] if not settings.TESTING else "test"
|
||||
DB_PASSWORD = (
|
||||
settings.DATABASES["default"]["PASSWORD"] if not settings.TESTING else "test"
|
||||
)
|
||||
DB_PROWLER_USER = (
|
||||
settings.DATABASES["prowler_user"]["USER"] if not settings.TESTING else "test"
|
||||
)
|
||||
DB_PROWLER_PASSWORD = (
|
||||
settings.DATABASES["prowler_user"]["PASSWORD"] if not settings.TESTING else "test"
|
||||
)
|
||||
TASK_RUNNER_DB_TABLE = "django_celery_results_taskresult"
|
||||
POSTGRES_TENANT_VAR = "api.tenant_id"
|
||||
POSTGRES_USER_VAR = "api.user_id"
|
||||
|
||||
SET_CONFIG_QUERY = "SELECT set_config(%s, %s::text, TRUE);"
|
||||
|
||||
|
||||
@contextmanager
|
||||
def psycopg_connection(database_alias: str):
|
||||
psycopg2_connection = None
|
||||
try:
|
||||
admin_db = settings.DATABASES[database_alias]
|
||||
|
||||
psycopg2_connection = psycopg2_connect(
|
||||
dbname=admin_db["NAME"],
|
||||
user=admin_db["USER"],
|
||||
password=admin_db["PASSWORD"],
|
||||
host=admin_db["HOST"],
|
||||
port=admin_db["PORT"],
|
||||
)
|
||||
yield psycopg2_connection
|
||||
finally:
|
||||
if psycopg2_connection is not None:
|
||||
psycopg2_connection.close()
|
||||
|
||||
|
||||
@contextmanager
|
||||
def rls_transaction(
|
||||
value: str,
|
||||
parameter: str = POSTGRES_TENANT_VAR,
|
||||
using: str | None = None,
|
||||
):
|
||||
"""
|
||||
Creates a new database transaction setting the given configuration value for Postgres RLS. It validates the
|
||||
if the value is a valid UUID.
|
||||
|
||||
Args:
|
||||
value (str): Database configuration parameter value.
|
||||
parameter (str): Database configuration parameter name, by default is 'api.tenant_id'.
|
||||
using (str | None): Optional database alias to run the transaction against. Defaults to the
|
||||
active read alias (if any) or Django's default connection.
|
||||
"""
|
||||
requested_alias = using or get_read_db_alias()
|
||||
db_alias = requested_alias or DEFAULT_DB_ALIAS
|
||||
if db_alias not in connections:
|
||||
db_alias = DEFAULT_DB_ALIAS
|
||||
|
||||
router_token = None
|
||||
try:
|
||||
if db_alias != DEFAULT_DB_ALIAS:
|
||||
router_token = set_read_db_alias(db_alias)
|
||||
|
||||
with transaction.atomic(using=db_alias):
|
||||
conn = connections[db_alias]
|
||||
with conn.cursor() as cursor:
|
||||
try:
|
||||
# just in case the value is a UUID object
|
||||
uuid.UUID(str(value))
|
||||
except ValueError:
|
||||
raise ValidationError("Must be a valid UUID")
|
||||
cursor.execute(SET_CONFIG_QUERY, [parameter, value])
|
||||
yield cursor
|
||||
finally:
|
||||
if router_token is not None:
|
||||
reset_read_db_alias(router_token)
|
||||
|
||||
|
||||
class CustomUserManager(BaseUserManager):
|
||||
def create_user(self, email, password=None, **extra_fields):
|
||||
if not email:
|
||||
raise ValueError("The email field must be set")
|
||||
email = self.normalize_email(email)
|
||||
user = self.model(email=email, **extra_fields)
|
||||
user.set_password(password)
|
||||
user.save(using=self._db)
|
||||
return user
|
||||
|
||||
def get_by_natural_key(self, email):
|
||||
return self.get(email__iexact=email)
|
||||
|
||||
|
||||
def enum_to_choices(enum_class):
|
||||
"""
|
||||
This function converts a Python Enum to a list of tuples, where the first element is the value and the second element is the name.
|
||||
|
||||
It's for use with Django's `choices` attribute, which expects a list of tuples.
|
||||
"""
|
||||
return [(item.value, item.name.replace("_", " ").title()) for item in enum_class]
|
||||
|
||||
|
||||
def one_week_from_now():
|
||||
"""
|
||||
Return a datetime object with a date one week from now.
|
||||
"""
|
||||
return datetime.now(timezone.utc) + timedelta(days=7)
|
||||
|
||||
|
||||
def generate_random_token(length: int = 14, symbols: str | None = None) -> str:
|
||||
"""
|
||||
Generate a random token with the specified length.
|
||||
"""
|
||||
_symbols = "23456789ABCDEFGHJKMNPQRSTVWXYZ"
|
||||
return "".join(secrets.choice(symbols or _symbols) for _ in range(length))
|
||||
|
||||
|
||||
def batch_delete(tenant_id, queryset, batch_size=settings.DJANGO_DELETION_BATCH_SIZE):
|
||||
"""
|
||||
Deletes objects in batches and returns the total number of deletions and a summary.
|
||||
|
||||
Args:
|
||||
tenant_id (str): Tenant ID the queryset belongs to.
|
||||
queryset (QuerySet): The queryset of objects to delete.
|
||||
batch_size (int): The number of objects to delete in each batch.
|
||||
|
||||
Returns:
|
||||
tuple: (total_deleted, deletion_summary)
|
||||
"""
|
||||
total_deleted = 0
|
||||
deletion_summary = {}
|
||||
|
||||
while True:
|
||||
with rls_transaction(tenant_id, POSTGRES_TENANT_VAR):
|
||||
# Get a batch of IDs to delete
|
||||
batch_ids = set(
|
||||
queryset.values_list("id", flat=True).order_by("id")[:batch_size]
|
||||
)
|
||||
if not batch_ids:
|
||||
# No more objects to delete
|
||||
break
|
||||
|
||||
deleted_count, deleted_info = queryset.filter(id__in=batch_ids).delete()
|
||||
|
||||
total_deleted += deleted_count
|
||||
for model_label, count in deleted_info.items():
|
||||
deletion_summary[model_label] = deletion_summary.get(model_label, 0) + count
|
||||
|
||||
return total_deleted, deletion_summary
|
||||
|
||||
|
||||
def delete_related_daily_task(provider_id: str):
|
||||
"""
|
||||
Deletes the periodic task associated with a specific provider.
|
||||
|
||||
Args:
|
||||
provider_id (str): The unique identifier for the provider
|
||||
whose related periodic task should be deleted.
|
||||
"""
|
||||
task_name = f"scan-perform-scheduled-{provider_id}"
|
||||
PeriodicTask.objects.filter(name=task_name).delete()
|
||||
|
||||
|
||||
def create_objects_in_batches(
|
||||
tenant_id: str, model, objects: list, batch_size: int = 500
|
||||
):
|
||||
"""
|
||||
Bulk-create model instances in repeated, per-tenant RLS transactions.
|
||||
|
||||
All chunks execute in their own transaction, so no single transaction
|
||||
grows too large.
|
||||
|
||||
Args:
|
||||
tenant_id (str): UUID string of the tenant under which to set RLS.
|
||||
model: Django model class whose `.objects.bulk_create()` will be called.
|
||||
objects (list): List of model instances (unsaved) to bulk-create.
|
||||
batch_size (int): Maximum number of objects per bulk_create call.
|
||||
"""
|
||||
total = len(objects)
|
||||
for i in range(0, total, batch_size):
|
||||
chunk = objects[i : i + batch_size]
|
||||
with rls_transaction(value=tenant_id, parameter=POSTGRES_TENANT_VAR):
|
||||
model.objects.bulk_create(chunk, batch_size)
|
||||
|
||||
|
||||
def update_objects_in_batches(
|
||||
tenant_id: str, model, objects: list, fields: list, batch_size: int = 500
|
||||
):
|
||||
"""
|
||||
Bulk-update model instances in repeated, per-tenant RLS transactions.
|
||||
|
||||
All chunks execute in their own transaction, so no single transaction
|
||||
grows too large.
|
||||
|
||||
Args:
|
||||
tenant_id (str): UUID string of the tenant under which to set RLS.
|
||||
model: Django model class whose `.objects.bulk_update()` will be called.
|
||||
objects (list): List of model instances (saved) to bulk-update.
|
||||
fields (list): List of field names to update.
|
||||
batch_size (int): Maximum number of objects per bulk_update call.
|
||||
"""
|
||||
total = len(objects)
|
||||
for start in range(0, total, batch_size):
|
||||
chunk = objects[start : start + batch_size]
|
||||
with rls_transaction(value=tenant_id, parameter=POSTGRES_TENANT_VAR):
|
||||
model.objects.bulk_update(chunk, fields, batch_size)
|
||||
|
||||
|
||||
# Postgres Enums
|
||||
|
||||
|
||||
class PostgresEnumMigration:
|
||||
def __init__(self, enum_name: str, enum_values: tuple):
|
||||
self.enum_name = enum_name
|
||||
self.enum_values = enum_values
|
||||
|
||||
def create_enum_type(self, apps, schema_editor): # noqa: F841
|
||||
string_enum_values = ", ".join([f"'{value}'" for value in self.enum_values])
|
||||
with schema_editor.connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
f"CREATE TYPE {self.enum_name} AS ENUM ({string_enum_values});"
|
||||
)
|
||||
|
||||
def drop_enum_type(self, apps, schema_editor): # noqa: F841
|
||||
with schema_editor.connection.cursor() as cursor:
|
||||
cursor.execute(f"DROP TYPE {self.enum_name};")
|
||||
|
||||
|
||||
class PostgresEnumField(models.Field):
|
||||
def __init__(self, enum_type_name, *args, **kwargs):
|
||||
self.enum_type_name = enum_type_name
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def db_type(self, connection):
|
||||
return self.enum_type_name
|
||||
|
||||
def from_db_value(self, value, expression, connection): # noqa: F841
|
||||
return value
|
||||
|
||||
def to_python(self, value):
|
||||
if isinstance(value, EnumType):
|
||||
return value.value
|
||||
return value
|
||||
|
||||
def get_prep_value(self, value):
|
||||
if isinstance(value, EnumType):
|
||||
return value.value
|
||||
return value
|
||||
|
||||
|
||||
class EnumType:
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
|
||||
def __str__(self):
|
||||
return self.value
|
||||
|
||||
|
||||
def enum_adapter(enum_obj):
|
||||
return AsIs(f"'{enum_obj.value}'::{enum_obj.__class__.enum_type_name}")
|
||||
|
||||
|
||||
def get_enum_oid(connection, enum_type_name: str):
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("SELECT oid FROM pg_type WHERE typname = %s;", (enum_type_name,))
|
||||
result = cursor.fetchone()
|
||||
if result is None:
|
||||
raise ValueError(f"Enum type '{enum_type_name}' not found")
|
||||
return result[0]
|
||||
|
||||
|
||||
def register_enum(apps, schema_editor, enum_class): # noqa: F841
|
||||
with psycopg_connection(schema_editor.connection.alias) as connection:
|
||||
enum_oid = get_enum_oid(connection, enum_class.enum_type_name)
|
||||
enum_instance = new_type(
|
||||
(enum_oid,),
|
||||
enum_class.enum_type_name,
|
||||
lambda value, cur: value, # noqa: F841
|
||||
)
|
||||
register_type(enum_instance, connection)
|
||||
register_adapter(enum_class, enum_adapter)
|
||||
|
||||
|
||||
def _should_create_index_on_partition(
|
||||
partition_name: str, all_partitions: bool = False
|
||||
) -> bool:
|
||||
"""
|
||||
Determine if we should create an index on this partition.
|
||||
|
||||
Args:
|
||||
partition_name: The name of the partition (e.g., "findings_2025_aug", "findings_default")
|
||||
all_partitions: If True, create on all partitions. If False, only current/future partitions.
|
||||
|
||||
Returns:
|
||||
bool: True if index should be created on this partition, False otherwise.
|
||||
"""
|
||||
if all_partitions:
|
||||
return True
|
||||
|
||||
# Extract date from partition name if it follows the pattern
|
||||
# Partition names look like: findings_2025_aug, findings_2025_jul, etc.
|
||||
date_pattern = r"(\d{4})_([a-z]{3})$"
|
||||
match = re.search(date_pattern, partition_name)
|
||||
|
||||
if not match:
|
||||
# If we can't parse the date, include it to be safe (e.g., default partition)
|
||||
return True
|
||||
|
||||
try:
|
||||
year_str, month_abbr = match.groups()
|
||||
year = int(year_str)
|
||||
|
||||
# Map month abbreviations to numbers
|
||||
month_map = {
|
||||
"jan": 1,
|
||||
"feb": 2,
|
||||
"mar": 3,
|
||||
"apr": 4,
|
||||
"may": 5,
|
||||
"jun": 6,
|
||||
"jul": 7,
|
||||
"aug": 8,
|
||||
"sep": 9,
|
||||
"oct": 10,
|
||||
"nov": 11,
|
||||
"dec": 12,
|
||||
}
|
||||
|
||||
month = month_map.get(month_abbr.lower())
|
||||
if month is None:
|
||||
# Unknown month abbreviation, include it to be safe
|
||||
return True
|
||||
|
||||
partition_date = datetime(year, month, 1, tzinfo=timezone.utc)
|
||||
|
||||
# Get current month start
|
||||
now = datetime.now(timezone.utc)
|
||||
current_month_start = now.replace(
|
||||
day=1, hour=0, minute=0, second=0, microsecond=0
|
||||
)
|
||||
|
||||
# Include current month and future partitions
|
||||
return partition_date >= current_month_start
|
||||
|
||||
except (ValueError, TypeError):
|
||||
# If date parsing fails, include it to be safe
|
||||
return True
|
||||
|
||||
|
||||
def create_index_on_partitions(
|
||||
apps, # noqa: F841
|
||||
schema_editor,
|
||||
parent_table: str,
|
||||
index_name: str,
|
||||
columns: str,
|
||||
method: str = "BTREE",
|
||||
where: str = "",
|
||||
all_partitions: bool = True,
|
||||
):
|
||||
"""
|
||||
Create an index on existing partitions of `parent_table`.
|
||||
|
||||
Args:
|
||||
parent_table: The name of the root table (e.g. "findings").
|
||||
index_name: A short name for the index (will be prefixed per-partition).
|
||||
columns: The parenthesized column list, e.g. "tenant_id, scan_id, status".
|
||||
method: The index method—BTREE, GIN, etc. Defaults to BTREE.
|
||||
where: Optional WHERE clause (without the leading "WHERE"), e.g. "status = 'FAIL'".
|
||||
all_partitions: Whether to create indexes on all partitions or just current/future ones.
|
||||
Defaults to False (current/future only) to avoid maintenance overhead
|
||||
on old partitions where the index may not be needed.
|
||||
|
||||
Examples:
|
||||
# Create index only on current and future partitions (recommended for new indexes)
|
||||
create_index_on_partitions(
|
||||
apps, schema_editor,
|
||||
parent_table="findings",
|
||||
index_name="new_performance_idx",
|
||||
columns="tenant_id, status, severity",
|
||||
all_partitions=False # Default behavior
|
||||
)
|
||||
|
||||
# Create index on all partitions (use when migrating existing critical indexes)
|
||||
create_index_on_partitions(
|
||||
apps, schema_editor,
|
||||
parent_table="findings",
|
||||
index_name="critical_existing_idx",
|
||||
columns="tenant_id, scan_id",
|
||||
all_partitions=True
|
||||
)
|
||||
"""
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT inhrelid::regclass::text
|
||||
FROM pg_inherits
|
||||
WHERE inhparent = %s::regclass
|
||||
""",
|
||||
[parent_table],
|
||||
)
|
||||
partitions = [row[0] for row in cursor.fetchall()]
|
||||
|
||||
where_sql = f" WHERE {where}" if where else ""
|
||||
for partition in partitions:
|
||||
if _should_create_index_on_partition(partition, all_partitions):
|
||||
idx_name = f"{partition.replace('.', '_')}_{index_name}"
|
||||
sql = (
|
||||
f"CREATE INDEX CONCURRENTLY IF NOT EXISTS {idx_name} "
|
||||
f"ON {partition} USING {method} ({columns})"
|
||||
f"{where_sql};"
|
||||
)
|
||||
schema_editor.execute(sql)
|
||||
|
||||
|
||||
def drop_index_on_partitions(
|
||||
apps, # noqa: F841
|
||||
schema_editor,
|
||||
parent_table: str,
|
||||
index_name: str,
|
||||
):
|
||||
"""
|
||||
Drop the per-partition indexes that were created by create_index_on_partitions.
|
||||
|
||||
Args:
|
||||
parent_table: The name of the root table (e.g. "findings").
|
||||
index_name: The same short name used when creating them.
|
||||
"""
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT inhrelid::regclass::text
|
||||
FROM pg_inherits
|
||||
WHERE inhparent = %s::regclass
|
||||
""",
|
||||
[parent_table],
|
||||
)
|
||||
partitions = [row[0] for row in cursor.fetchall()]
|
||||
|
||||
for partition in partitions:
|
||||
idx_name = f"{partition.replace('.', '_')}_{index_name}"
|
||||
sql = f"DROP INDEX CONCURRENTLY IF EXISTS {idx_name};"
|
||||
schema_editor.execute(sql)
|
||||
|
||||
|
||||
def generate_api_key_prefix():
|
||||
"""Generate a random 8-character prefix for API keys (e.g., 'pk_abc123de')."""
|
||||
random_chars = generate_random_token(length=8)
|
||||
return f"pk_{random_chars}"
|
||||
|
||||
|
||||
# Postgres enum definition for member role
|
||||
|
||||
|
||||
class MemberRoleEnum(EnumType):
|
||||
enum_type_name = "member_role"
|
||||
|
||||
|
||||
class MemberRoleEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("member_role", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for Provider.provider
|
||||
|
||||
|
||||
class ProviderEnum(EnumType):
|
||||
enum_type_name = "provider"
|
||||
|
||||
|
||||
class ProviderEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("provider", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for Scan.type
|
||||
|
||||
|
||||
class ScanTriggerEnum(EnumType):
|
||||
enum_type_name = "scan_trigger"
|
||||
|
||||
|
||||
class ScanTriggerEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("scan_trigger", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for state
|
||||
|
||||
|
||||
class StateEnum(EnumType):
|
||||
enum_type_name = "state"
|
||||
|
||||
|
||||
class StateEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("state", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for Finding.Delta
|
||||
|
||||
|
||||
class FindingDeltaEnum(EnumType):
|
||||
enum_type_name = "finding_delta"
|
||||
|
||||
|
||||
class FindingDeltaEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("finding_delta", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for Severity
|
||||
|
||||
|
||||
class SeverityEnum(EnumType):
|
||||
enum_type_name = "severity"
|
||||
|
||||
|
||||
class SeverityEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("severity", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for Status
|
||||
|
||||
|
||||
class StatusEnum(EnumType):
|
||||
enum_type_name = "status"
|
||||
|
||||
|
||||
class StatusEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("status", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for Provider secrets type
|
||||
|
||||
|
||||
class ProviderSecretTypeEnum(EnumType):
|
||||
enum_type_name = "provider_secret_type"
|
||||
|
||||
|
||||
class ProviderSecretTypeEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("provider_secret_type", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for Provider secrets type
|
||||
|
||||
|
||||
class InvitationStateEnum(EnumType):
|
||||
enum_type_name = "invitation_state"
|
||||
|
||||
|
||||
class InvitationStateEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("invitation_state", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for Integration type
|
||||
|
||||
|
||||
class IntegrationTypeEnum(EnumType):
|
||||
enum_type_name = "integration_type"
|
||||
|
||||
|
||||
class IntegrationTypeEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("integration_type", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for Processor type
|
||||
|
||||
|
||||
class ProcessorTypeEnum(EnumType):
|
||||
enum_type_name = "processor_type"
|
||||
|
||||
|
||||
class ProcessorTypeEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("processor_type", *args, **kwargs)
|
||||
@@ -1,68 +0,0 @@
|
||||
import uuid
|
||||
from functools import wraps
|
||||
|
||||
from django.db import connection, transaction
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
|
||||
from api.db_utils import POSTGRES_TENANT_VAR, SET_CONFIG_QUERY
|
||||
|
||||
|
||||
def set_tenant(func=None, *, keep_tenant=False):
|
||||
"""
|
||||
Decorator to set the tenant context for a Celery task based on the provided tenant_id.
|
||||
|
||||
This decorator extracts the `tenant_id` from the task's keyword arguments,
|
||||
and uses it to set the tenant context for the current database session.
|
||||
The `tenant_id` is then removed from the kwargs before the task function
|
||||
is executed. If `tenant_id` is not provided, a KeyError is raised.
|
||||
|
||||
Args:
|
||||
func (function): The Celery task function to be decorated.
|
||||
|
||||
Raises:
|
||||
KeyError: If `tenant_id` is not found in the task's keyword arguments.
|
||||
|
||||
Returns:
|
||||
function: The wrapped function with tenant context set.
|
||||
|
||||
Example:
|
||||
# This decorator MUST be defined the last in the decorator chain
|
||||
|
||||
@shared_task
|
||||
@set_tenant
|
||||
def some_task(arg1, **kwargs):
|
||||
# Task logic here
|
||||
pass
|
||||
|
||||
# When calling the task
|
||||
some_task.delay(arg1, tenant_id="8db7ca86-03cc-4d42-99f6-5e480baf6ab5")
|
||||
|
||||
# The tenant context will be set before the task logic executes.
|
||||
"""
|
||||
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
@transaction.atomic
|
||||
def wrapper(*args, **kwargs):
|
||||
try:
|
||||
if not keep_tenant:
|
||||
tenant_id = kwargs.pop("tenant_id")
|
||||
else:
|
||||
tenant_id = kwargs["tenant_id"]
|
||||
except KeyError:
|
||||
raise KeyError("This task requires the tenant_id")
|
||||
try:
|
||||
uuid.UUID(tenant_id)
|
||||
except ValueError:
|
||||
raise ValidationError("Tenant ID must be a valid UUID")
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(SET_CONFIG_QUERY, [POSTGRES_TENANT_VAR, tenant_id])
|
||||
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
if func is None:
|
||||
return decorator
|
||||
else:
|
||||
return decorator(func)
|
||||
@@ -1,105 +0,0 @@
|
||||
from django.core.exceptions import ValidationError as django_validation_error
|
||||
from rest_framework import status
|
||||
from rest_framework.exceptions import (
|
||||
APIException,
|
||||
AuthenticationFailed,
|
||||
NotAuthenticated,
|
||||
)
|
||||
from rest_framework_json_api.exceptions import exception_handler
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
from rest_framework_simplejwt.exceptions import InvalidToken, TokenError
|
||||
|
||||
|
||||
class ModelValidationError(ValidationError):
|
||||
def __init__(
|
||||
self,
|
||||
detail: str | None = None,
|
||||
code: str | None = None,
|
||||
pointer: str | None = None,
|
||||
status_code: int = 400,
|
||||
):
|
||||
super().__init__(
|
||||
detail=[
|
||||
{
|
||||
"detail": detail,
|
||||
"status": str(status_code),
|
||||
"source": {"pointer": pointer},
|
||||
"code": code,
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class InvitationTokenExpiredException(APIException):
|
||||
status_code = status.HTTP_410_GONE
|
||||
default_detail = "The invitation token has expired and is no longer valid."
|
||||
default_code = "token_expired"
|
||||
|
||||
|
||||
# Task Management Exceptions (non-HTTP)
|
||||
class TaskManagementError(Exception):
|
||||
"""Base exception for task management errors."""
|
||||
|
||||
def __init__(self, task=None):
|
||||
self.task = task
|
||||
super().__init__()
|
||||
|
||||
|
||||
class TaskFailedException(TaskManagementError):
|
||||
"""Raised when a task has failed."""
|
||||
|
||||
|
||||
class TaskNotFoundException(TaskManagementError):
|
||||
"""Raised when a task is not found."""
|
||||
|
||||
|
||||
class TaskInProgressException(TaskManagementError):
|
||||
"""Raised when a task is running but there's no related Task object to return."""
|
||||
|
||||
def __init__(self, task_result=None):
|
||||
self.task_result = task_result
|
||||
super().__init__()
|
||||
|
||||
|
||||
# Provider connection errors
|
||||
class ProviderConnectionError(Exception):
|
||||
"""Base exception for provider connection errors."""
|
||||
|
||||
|
||||
def custom_exception_handler(exc, context):
|
||||
if isinstance(exc, django_validation_error):
|
||||
if hasattr(exc, "error_dict"):
|
||||
exc = ValidationError(exc.message_dict)
|
||||
else:
|
||||
exc = ValidationError(detail=exc.messages[0], code=exc.code)
|
||||
# Force 401 status for AuthenticationFailed exceptions regardless of the authentication backend
|
||||
elif isinstance(exc, (AuthenticationFailed, NotAuthenticated, TokenError)):
|
||||
exc.status_code = status.HTTP_401_UNAUTHORIZED
|
||||
if isinstance(exc, (TokenError, InvalidToken)):
|
||||
if (
|
||||
hasattr(exc, "detail")
|
||||
and isinstance(exc.detail, dict)
|
||||
and "messages" in exc.detail
|
||||
):
|
||||
exc.detail["messages"] = [
|
||||
message_item["message"] for message_item in exc.detail["messages"]
|
||||
]
|
||||
return exception_handler(exc, context)
|
||||
|
||||
|
||||
class ConflictException(APIException):
|
||||
status_code = status.HTTP_409_CONFLICT
|
||||
default_detail = "A conflict occurred. The resource already exists."
|
||||
default_code = "conflict"
|
||||
|
||||
def __init__(self, detail=None, code=None, pointer=None):
|
||||
error_detail = {
|
||||
"detail": detail or self.default_detail,
|
||||
"status": self.status_code,
|
||||
"code": self.default_code,
|
||||
}
|
||||
|
||||
if pointer:
|
||||
error_detail["source"] = {"pointer": pointer}
|
||||
|
||||
super().__init__(detail=[error_detail])
|
||||
@@ -1,930 +0,0 @@
|
||||
from datetime import date, datetime, timedelta, timezone
|
||||
|
||||
from dateutil.parser import parse
|
||||
from django.conf import settings
|
||||
from django.db.models import F, Q
|
||||
from django_filters.rest_framework import (
|
||||
BaseInFilter,
|
||||
BooleanFilter,
|
||||
CharFilter,
|
||||
ChoiceFilter,
|
||||
DateFilter,
|
||||
FilterSet,
|
||||
UUIDFilter,
|
||||
)
|
||||
from rest_framework_json_api.django_filters.backends import DjangoFilterBackend
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
|
||||
from api.db_utils import (
|
||||
FindingDeltaEnumField,
|
||||
InvitationStateEnumField,
|
||||
ProviderEnumField,
|
||||
SeverityEnumField,
|
||||
StatusEnumField,
|
||||
)
|
||||
from api.models import (
|
||||
ComplianceRequirementOverview,
|
||||
Finding,
|
||||
Integration,
|
||||
Invitation,
|
||||
Membership,
|
||||
OverviewStatusChoices,
|
||||
PermissionChoices,
|
||||
Processor,
|
||||
Provider,
|
||||
ProviderGroup,
|
||||
ProviderSecret,
|
||||
Resource,
|
||||
ResourceTag,
|
||||
Role,
|
||||
Scan,
|
||||
ScanSummary,
|
||||
SeverityChoices,
|
||||
StateChoices,
|
||||
StatusChoices,
|
||||
Task,
|
||||
TenantAPIKey,
|
||||
User,
|
||||
)
|
||||
from api.rls import Tenant
|
||||
from api.uuid_utils import (
|
||||
datetime_to_uuid7,
|
||||
transform_into_uuid7,
|
||||
uuid7_end,
|
||||
uuid7_range,
|
||||
uuid7_start,
|
||||
)
|
||||
from api.v1.serializers import TaskBase
|
||||
|
||||
|
||||
class CustomDjangoFilterBackend(DjangoFilterBackend):
|
||||
def to_html(self, _request, _queryset, _view):
|
||||
"""Override this method to use the Browsable API in dev environments.
|
||||
|
||||
This disables the HTML render for the default filter.
|
||||
"""
|
||||
return None
|
||||
|
||||
def get_filterset_class(self, view, queryset=None):
|
||||
# Check if the view has 'get_filterset_class' method
|
||||
if hasattr(view, "get_filterset_class"):
|
||||
return view.get_filterset_class()
|
||||
# Fallback to the default implementation
|
||||
return super().get_filterset_class(view, queryset)
|
||||
|
||||
|
||||
class UUIDInFilter(BaseInFilter, UUIDFilter):
|
||||
pass
|
||||
|
||||
|
||||
class CharInFilter(BaseInFilter, CharFilter):
|
||||
pass
|
||||
|
||||
|
||||
class ChoiceInFilter(BaseInFilter, ChoiceFilter):
|
||||
pass
|
||||
|
||||
|
||||
class CommonFindingFilters(FilterSet):
|
||||
# We filter providers from the scan in findings
|
||||
provider = UUIDFilter(field_name="scan__provider__id", lookup_expr="exact")
|
||||
provider__in = UUIDInFilter(field_name="scan__provider__id", lookup_expr="in")
|
||||
provider_type = ChoiceFilter(
|
||||
choices=Provider.ProviderChoices.choices, field_name="scan__provider__provider"
|
||||
)
|
||||
provider_type__in = ChoiceInFilter(
|
||||
choices=Provider.ProviderChoices.choices, field_name="scan__provider__provider"
|
||||
)
|
||||
provider_uid = CharFilter(field_name="scan__provider__uid", lookup_expr="exact")
|
||||
provider_uid__in = CharInFilter(field_name="scan__provider__uid", lookup_expr="in")
|
||||
provider_uid__icontains = CharFilter(
|
||||
field_name="scan__provider__uid", lookup_expr="icontains"
|
||||
)
|
||||
provider_alias = CharFilter(field_name="scan__provider__alias", lookup_expr="exact")
|
||||
provider_alias__in = CharInFilter(
|
||||
field_name="scan__provider__alias", lookup_expr="in"
|
||||
)
|
||||
provider_alias__icontains = CharFilter(
|
||||
field_name="scan__provider__alias", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
|
||||
uid = CharFilter(field_name="uid")
|
||||
delta = ChoiceFilter(choices=Finding.DeltaChoices.choices)
|
||||
status = ChoiceFilter(choices=StatusChoices.choices)
|
||||
severity = ChoiceFilter(choices=SeverityChoices)
|
||||
impact = ChoiceFilter(choices=SeverityChoices)
|
||||
muted = BooleanFilter(
|
||||
help_text="If this filter is not provided, muted and non-muted findings will be returned."
|
||||
)
|
||||
|
||||
resources = UUIDInFilter(field_name="resource__id", lookup_expr="in")
|
||||
|
||||
region = CharFilter(method="filter_resource_region")
|
||||
region__in = CharInFilter(field_name="resource_regions", lookup_expr="overlap")
|
||||
region__icontains = CharFilter(
|
||||
field_name="resource_regions", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
service = CharFilter(method="filter_resource_service")
|
||||
service__in = CharInFilter(field_name="resource_services", lookup_expr="overlap")
|
||||
service__icontains = CharFilter(
|
||||
field_name="resource_services", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
resource_uid = CharFilter(field_name="resources__uid")
|
||||
resource_uid__in = CharInFilter(field_name="resources__uid", lookup_expr="in")
|
||||
resource_uid__icontains = CharFilter(
|
||||
field_name="resources__uid", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
resource_name = CharFilter(field_name="resources__name")
|
||||
resource_name__in = CharInFilter(field_name="resources__name", lookup_expr="in")
|
||||
resource_name__icontains = CharFilter(
|
||||
field_name="resources__name", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
resource_type = CharFilter(method="filter_resource_type")
|
||||
resource_type__in = CharInFilter(field_name="resource_types", lookup_expr="overlap")
|
||||
resource_type__icontains = CharFilter(
|
||||
field_name="resources__type", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
# Temporarily disabled until we implement tag filtering in the UI
|
||||
# resource_tag_key = CharFilter(field_name="resources__tags__key")
|
||||
# resource_tag_key__in = CharInFilter(
|
||||
# field_name="resources__tags__key", lookup_expr="in"
|
||||
# )
|
||||
# resource_tag_key__icontains = CharFilter(
|
||||
# field_name="resources__tags__key", lookup_expr="icontains"
|
||||
# )
|
||||
# resource_tag_value = CharFilter(field_name="resources__tags__value")
|
||||
# resource_tag_value__in = CharInFilter(
|
||||
# field_name="resources__tags__value", lookup_expr="in"
|
||||
# )
|
||||
# resource_tag_value__icontains = CharFilter(
|
||||
# field_name="resources__tags__value", lookup_expr="icontains"
|
||||
# )
|
||||
# resource_tags = CharInFilter(
|
||||
# method="filter_resource_tag",
|
||||
# lookup_expr="in",
|
||||
# help_text="Filter by resource tags `key:value` pairs.\nMultiple values may be "
|
||||
# "separated by commas.",
|
||||
# )
|
||||
|
||||
def filter_resource_service(self, queryset, name, value):
|
||||
return queryset.filter(resource_services__contains=[value])
|
||||
|
||||
def filter_resource_region(self, queryset, name, value):
|
||||
return queryset.filter(resource_regions__contains=[value])
|
||||
|
||||
def filter_resource_type(self, queryset, name, value):
|
||||
return queryset.filter(resource_types__contains=[value])
|
||||
|
||||
def filter_resource_tag(self, queryset, name, value):
|
||||
overall_query = Q()
|
||||
for key_value_pair in value:
|
||||
tag_key, tag_value = key_value_pair.split(":", 1)
|
||||
overall_query |= Q(
|
||||
resources__tags__key__icontains=tag_key,
|
||||
resources__tags__value__icontains=tag_value,
|
||||
)
|
||||
return queryset.filter(overall_query).distinct()
|
||||
|
||||
|
||||
class TenantFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
|
||||
class Meta:
|
||||
model = Tenant
|
||||
fields = {
|
||||
"name": ["exact", "icontains"],
|
||||
"inserted_at": ["date", "gte", "lte"],
|
||||
"updated_at": ["gte", "lte"],
|
||||
}
|
||||
|
||||
|
||||
class MembershipFilter(FilterSet):
|
||||
date_joined = DateFilter(field_name="date_joined", lookup_expr="date")
|
||||
role = ChoiceFilter(choices=Membership.RoleChoices.choices)
|
||||
|
||||
class Meta:
|
||||
model = Membership
|
||||
fields = {
|
||||
"tenant": ["exact"],
|
||||
"role": ["exact"],
|
||||
"date_joined": ["date", "gte", "lte"],
|
||||
}
|
||||
|
||||
|
||||
class ProviderFilter(FilterSet):
|
||||
inserted_at = DateFilter(
|
||||
field_name="inserted_at",
|
||||
lookup_expr="date",
|
||||
help_text="""Filter by date when the provider was added
|
||||
(format: YYYY-MM-DD)""",
|
||||
)
|
||||
updated_at = DateFilter(
|
||||
field_name="updated_at",
|
||||
lookup_expr="date",
|
||||
help_text="""Filter by date when the provider was updated
|
||||
(format: YYYY-MM-DD)""",
|
||||
)
|
||||
connected = BooleanFilter(
|
||||
help_text="""Filter by connection status. Set to True to return only
|
||||
connected providers, or False to return only providers with failed
|
||||
connections. If not specified, both connected and failed providers are
|
||||
included. Providers with no connection attempt (status is null) are
|
||||
excluded from this filter."""
|
||||
)
|
||||
provider = ChoiceFilter(choices=Provider.ProviderChoices.choices)
|
||||
provider__in = ChoiceInFilter(
|
||||
field_name="provider",
|
||||
choices=Provider.ProviderChoices.choices,
|
||||
lookup_expr="in",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Provider
|
||||
fields = {
|
||||
"provider": ["exact", "in"],
|
||||
"id": ["exact", "in"],
|
||||
"uid": ["exact", "icontains", "in"],
|
||||
"alias": ["exact", "icontains", "in"],
|
||||
"inserted_at": ["gte", "lte"],
|
||||
"updated_at": ["gte", "lte"],
|
||||
}
|
||||
filter_overrides = {
|
||||
ProviderEnumField: {
|
||||
"filter_class": CharFilter,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
class ProviderRelationshipFilterSet(FilterSet):
|
||||
provider_type = ChoiceFilter(
|
||||
choices=Provider.ProviderChoices.choices, field_name="provider__provider"
|
||||
)
|
||||
provider_type__in = ChoiceInFilter(
|
||||
choices=Provider.ProviderChoices.choices, field_name="provider__provider"
|
||||
)
|
||||
provider_uid = CharFilter(field_name="provider__uid", lookup_expr="exact")
|
||||
provider_uid__in = CharInFilter(field_name="provider__uid", lookup_expr="in")
|
||||
provider_uid__icontains = CharFilter(
|
||||
field_name="provider__uid", lookup_expr="icontains"
|
||||
)
|
||||
provider_alias = CharFilter(field_name="provider__alias", lookup_expr="exact")
|
||||
provider_alias__in = CharInFilter(field_name="provider__alias", lookup_expr="in")
|
||||
provider_alias__icontains = CharFilter(
|
||||
field_name="provider__alias", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
|
||||
class ProviderGroupFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
|
||||
class Meta:
|
||||
model = ProviderGroup
|
||||
fields = {
|
||||
"id": ["exact", "in"],
|
||||
"name": ["exact", "in"],
|
||||
"inserted_at": ["gte", "lte"],
|
||||
"updated_at": ["gte", "lte"],
|
||||
}
|
||||
|
||||
|
||||
class ScanFilter(ProviderRelationshipFilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
completed_at = DateFilter(field_name="completed_at", lookup_expr="date")
|
||||
started_at = DateFilter(field_name="started_at", lookup_expr="date")
|
||||
next_scan_at = DateFilter(field_name="next_scan_at", lookup_expr="date")
|
||||
trigger = ChoiceFilter(choices=Scan.TriggerChoices.choices)
|
||||
state = ChoiceFilter(choices=StateChoices.choices)
|
||||
state__in = ChoiceInFilter(
|
||||
field_name="state", choices=StateChoices.choices, lookup_expr="in"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Scan
|
||||
fields = {
|
||||
"provider": ["exact", "in"],
|
||||
"name": ["exact", "icontains"],
|
||||
"started_at": ["gte", "lte"],
|
||||
"next_scan_at": ["gte", "lte"],
|
||||
"trigger": ["exact"],
|
||||
}
|
||||
|
||||
|
||||
class TaskFilter(FilterSet):
|
||||
name = CharFilter(field_name="task_runner_task__task_name", lookup_expr="exact")
|
||||
name__icontains = CharFilter(
|
||||
field_name="task_runner_task__task_name", lookup_expr="icontains"
|
||||
)
|
||||
state = ChoiceFilter(
|
||||
choices=StateChoices.choices, method="filter_state", lookup_expr="exact"
|
||||
)
|
||||
task_state_inverse_mapping_values = {
|
||||
v: k for k, v in TaskBase.state_mapping.items()
|
||||
}
|
||||
|
||||
def filter_state(self, queryset, name, value):
|
||||
if value not in StateChoices:
|
||||
raise ValidationError(
|
||||
f"Invalid provider value: '{value}'. Valid values are: "
|
||||
f"{', '.join(StateChoices)}"
|
||||
)
|
||||
|
||||
return queryset.filter(
|
||||
task_runner_task__status=self.task_state_inverse_mapping_values[value]
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Task
|
||||
fields = []
|
||||
|
||||
|
||||
class ResourceTagFilter(FilterSet):
|
||||
class Meta:
|
||||
model = ResourceTag
|
||||
fields = {
|
||||
"key": ["exact", "icontains"],
|
||||
"value": ["exact", "icontains"],
|
||||
}
|
||||
search = ["text_search"]
|
||||
|
||||
|
||||
class ResourceFilter(ProviderRelationshipFilterSet):
|
||||
tag_key = CharFilter(method="filter_tag_key")
|
||||
tag_value = CharFilter(method="filter_tag_value")
|
||||
tag = CharFilter(method="filter_tag")
|
||||
tags = CharFilter(method="filter_tag")
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
scan = UUIDFilter(field_name="provider__scan", lookup_expr="exact")
|
||||
scan__in = UUIDInFilter(field_name="provider__scan", lookup_expr="in")
|
||||
|
||||
class Meta:
|
||||
model = Resource
|
||||
fields = {
|
||||
"provider": ["exact", "in"],
|
||||
"uid": ["exact", "icontains"],
|
||||
"name": ["exact", "icontains"],
|
||||
"region": ["exact", "icontains", "in"],
|
||||
"service": ["exact", "icontains", "in"],
|
||||
"type": ["exact", "icontains", "in"],
|
||||
"inserted_at": ["gte", "lte"],
|
||||
"updated_at": ["gte", "lte"],
|
||||
}
|
||||
|
||||
def filter_queryset(self, queryset):
|
||||
if not (self.data.get("scan") or self.data.get("scan__in")) and not (
|
||||
self.data.get("updated_at")
|
||||
or self.data.get("updated_at__date")
|
||||
or self.data.get("updated_at__gte")
|
||||
or self.data.get("updated_at__lte")
|
||||
):
|
||||
raise ValidationError(
|
||||
[
|
||||
{
|
||||
"detail": "At least one date filter is required: filter[updated_at], filter[updated_at.gte], "
|
||||
"or filter[updated_at.lte].",
|
||||
"status": 400,
|
||||
"source": {"pointer": "/data/attributes/updated_at"},
|
||||
"code": "required",
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
gte_date = (
|
||||
parse(self.data.get("updated_at__gte")).date()
|
||||
if self.data.get("updated_at__gte")
|
||||
else datetime.now(timezone.utc).date()
|
||||
)
|
||||
lte_date = (
|
||||
parse(self.data.get("updated_at__lte")).date()
|
||||
if self.data.get("updated_at__lte")
|
||||
else datetime.now(timezone.utc).date()
|
||||
)
|
||||
|
||||
if abs(lte_date - gte_date) > timedelta(
|
||||
days=settings.FINDINGS_MAX_DAYS_IN_RANGE
|
||||
):
|
||||
raise ValidationError(
|
||||
[
|
||||
{
|
||||
"detail": f"The date range cannot exceed {settings.FINDINGS_MAX_DAYS_IN_RANGE} days.",
|
||||
"status": 400,
|
||||
"source": {"pointer": "/data/attributes/updated_at"},
|
||||
"code": "invalid",
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
return super().filter_queryset(queryset)
|
||||
|
||||
def filter_tag_key(self, queryset, name, value):
|
||||
return queryset.filter(Q(tags__key=value) | Q(tags__key__icontains=value))
|
||||
|
||||
def filter_tag_value(self, queryset, name, value):
|
||||
return queryset.filter(Q(tags__value=value) | Q(tags__value__icontains=value))
|
||||
|
||||
def filter_tag(self, queryset, name, value):
|
||||
# We won't know what the user wants to filter on just based on the value,
|
||||
# and we don't want to build special filtering logic for every possible
|
||||
# provider tag spec, so we'll just do a full text search
|
||||
return queryset.filter(tags__text_search=value)
|
||||
|
||||
|
||||
class LatestResourceFilter(ProviderRelationshipFilterSet):
|
||||
tag_key = CharFilter(method="filter_tag_key")
|
||||
tag_value = CharFilter(method="filter_tag_value")
|
||||
tag = CharFilter(method="filter_tag")
|
||||
tags = CharFilter(method="filter_tag")
|
||||
|
||||
class Meta:
|
||||
model = Resource
|
||||
fields = {
|
||||
"provider": ["exact", "in"],
|
||||
"uid": ["exact", "icontains"],
|
||||
"name": ["exact", "icontains"],
|
||||
"region": ["exact", "icontains", "in"],
|
||||
"service": ["exact", "icontains", "in"],
|
||||
"type": ["exact", "icontains", "in"],
|
||||
}
|
||||
|
||||
def filter_tag_key(self, queryset, name, value):
|
||||
return queryset.filter(Q(tags__key=value) | Q(tags__key__icontains=value))
|
||||
|
||||
def filter_tag_value(self, queryset, name, value):
|
||||
return queryset.filter(Q(tags__value=value) | Q(tags__value__icontains=value))
|
||||
|
||||
def filter_tag(self, queryset, name, value):
|
||||
# We won't know what the user wants to filter on just based on the value,
|
||||
# and we don't want to build special filtering logic for every possible
|
||||
# provider tag spec, so we'll just do a full text search
|
||||
return queryset.filter(tags__text_search=value)
|
||||
|
||||
|
||||
class FindingFilter(CommonFindingFilters):
|
||||
scan = UUIDFilter(method="filter_scan_id")
|
||||
scan__in = UUIDInFilter(method="filter_scan_id_in")
|
||||
|
||||
inserted_at = DateFilter(method="filter_inserted_at", lookup_expr="date")
|
||||
inserted_at__date = DateFilter(method="filter_inserted_at", lookup_expr="date")
|
||||
inserted_at__gte = DateFilter(
|
||||
method="filter_inserted_at_gte",
|
||||
help_text=f"Maximum date range is {settings.FINDINGS_MAX_DAYS_IN_RANGE} days.",
|
||||
)
|
||||
inserted_at__lte = DateFilter(
|
||||
method="filter_inserted_at_lte",
|
||||
help_text=f"Maximum date range is {settings.FINDINGS_MAX_DAYS_IN_RANGE} days.",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Finding
|
||||
fields = {
|
||||
"id": ["exact", "in"],
|
||||
"uid": ["exact", "in"],
|
||||
"scan": ["exact", "in"],
|
||||
"delta": ["exact", "in"],
|
||||
"status": ["exact", "in"],
|
||||
"severity": ["exact", "in"],
|
||||
"impact": ["exact", "in"],
|
||||
"check_id": ["exact", "in", "icontains"],
|
||||
"inserted_at": ["date", "gte", "lte"],
|
||||
"updated_at": ["gte", "lte"],
|
||||
}
|
||||
filter_overrides = {
|
||||
FindingDeltaEnumField: {
|
||||
"filter_class": CharFilter,
|
||||
},
|
||||
StatusEnumField: {
|
||||
"filter_class": CharFilter,
|
||||
},
|
||||
SeverityEnumField: {
|
||||
"filter_class": CharFilter,
|
||||
},
|
||||
}
|
||||
|
||||
def filter_resource_type(self, queryset, name, value):
|
||||
return queryset.filter(resource_types__contains=[value])
|
||||
|
||||
def filter_resource_region(self, queryset, name, value):
|
||||
return queryset.filter(resource_regions__contains=[value])
|
||||
|
||||
def filter_resource_service(self, queryset, name, value):
|
||||
return queryset.filter(resource_services__contains=[value])
|
||||
|
||||
def filter_queryset(self, queryset):
|
||||
if not (self.data.get("scan") or self.data.get("scan__in")) and not (
|
||||
self.data.get("inserted_at")
|
||||
or self.data.get("inserted_at__date")
|
||||
or self.data.get("inserted_at__gte")
|
||||
or self.data.get("inserted_at__lte")
|
||||
):
|
||||
raise ValidationError(
|
||||
[
|
||||
{
|
||||
"detail": "At least one date filter is required: filter[inserted_at], filter[inserted_at.gte], "
|
||||
"or filter[inserted_at.lte].",
|
||||
"status": 400,
|
||||
"source": {"pointer": "/data/attributes/inserted_at"},
|
||||
"code": "required",
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
gte_date = (
|
||||
datetime.strptime(self.data.get("inserted_at__gte"), "%Y-%m-%d").date()
|
||||
if self.data.get("inserted_at__gte")
|
||||
else datetime.now(timezone.utc).date()
|
||||
)
|
||||
lte_date = (
|
||||
datetime.strptime(self.data.get("inserted_at__lte"), "%Y-%m-%d").date()
|
||||
if self.data.get("inserted_at__lte")
|
||||
else datetime.now(timezone.utc).date()
|
||||
)
|
||||
|
||||
if abs(lte_date - gte_date) > timedelta(
|
||||
days=settings.FINDINGS_MAX_DAYS_IN_RANGE
|
||||
):
|
||||
raise ValidationError(
|
||||
[
|
||||
{
|
||||
"detail": f"The date range cannot exceed {settings.FINDINGS_MAX_DAYS_IN_RANGE} days.",
|
||||
"status": 400,
|
||||
"source": {"pointer": "/data/attributes/inserted_at"},
|
||||
"code": "invalid",
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
return super().filter_queryset(queryset)
|
||||
|
||||
# Convert filter values to UUIDv7 values for use with partitioning
|
||||
def filter_scan_id(self, queryset, name, value):
|
||||
try:
|
||||
value_uuid = transform_into_uuid7(value)
|
||||
start = uuid7_start(value_uuid)
|
||||
end = uuid7_end(value_uuid, settings.FINDINGS_TABLE_PARTITION_MONTHS)
|
||||
except ValidationError as validation_error:
|
||||
detail = str(validation_error.detail[0])
|
||||
raise ValidationError(
|
||||
[
|
||||
{
|
||||
"detail": detail,
|
||||
"status": 400,
|
||||
"source": {"pointer": "/data/relationships/scan"},
|
||||
"code": "invalid",
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
return (
|
||||
queryset.filter(id__gte=start).filter(id__lt=end).filter(scan_id=value_uuid)
|
||||
)
|
||||
|
||||
def filter_scan_id_in(self, queryset, name, value):
|
||||
try:
|
||||
uuid_list = [
|
||||
transform_into_uuid7(value_uuid)
|
||||
for value_uuid in value
|
||||
if value_uuid is not None
|
||||
]
|
||||
|
||||
start, end = uuid7_range(uuid_list)
|
||||
except ValidationError as validation_error:
|
||||
detail = str(validation_error.detail[0])
|
||||
raise ValidationError(
|
||||
[
|
||||
{
|
||||
"detail": detail,
|
||||
"status": 400,
|
||||
"source": {"pointer": "/data/relationships/scan"},
|
||||
"code": "invalid",
|
||||
}
|
||||
]
|
||||
)
|
||||
if start == end:
|
||||
return queryset.filter(id__gte=start).filter(scan_id__in=uuid_list)
|
||||
else:
|
||||
return (
|
||||
queryset.filter(id__gte=start)
|
||||
.filter(id__lt=end)
|
||||
.filter(scan_id__in=uuid_list)
|
||||
)
|
||||
|
||||
def filter_inserted_at(self, queryset, name, value):
|
||||
datetime_value = self.maybe_date_to_datetime(value)
|
||||
start = uuid7_start(datetime_to_uuid7(datetime_value))
|
||||
end = uuid7_start(datetime_to_uuid7(datetime_value + timedelta(days=1)))
|
||||
|
||||
return queryset.filter(id__gte=start, id__lt=end)
|
||||
|
||||
def filter_inserted_at_gte(self, queryset, name, value):
|
||||
datetime_value = self.maybe_date_to_datetime(value)
|
||||
start = uuid7_start(datetime_to_uuid7(datetime_value))
|
||||
|
||||
return queryset.filter(id__gte=start)
|
||||
|
||||
def filter_inserted_at_lte(self, queryset, name, value):
|
||||
datetime_value = self.maybe_date_to_datetime(value)
|
||||
end = uuid7_start(datetime_to_uuid7(datetime_value + timedelta(days=1)))
|
||||
|
||||
return queryset.filter(id__lt=end)
|
||||
|
||||
@staticmethod
|
||||
def maybe_date_to_datetime(value):
|
||||
dt = value
|
||||
if isinstance(value, date):
|
||||
dt = datetime.combine(value, datetime.min.time(), tzinfo=timezone.utc)
|
||||
return dt
|
||||
|
||||
|
||||
class LatestFindingFilter(CommonFindingFilters):
|
||||
class Meta:
|
||||
model = Finding
|
||||
fields = {
|
||||
"id": ["exact", "in"],
|
||||
"uid": ["exact", "in"],
|
||||
"delta": ["exact", "in"],
|
||||
"status": ["exact", "in"],
|
||||
"severity": ["exact", "in"],
|
||||
"impact": ["exact", "in"],
|
||||
"check_id": ["exact", "in", "icontains"],
|
||||
}
|
||||
filter_overrides = {
|
||||
FindingDeltaEnumField: {
|
||||
"filter_class": CharFilter,
|
||||
},
|
||||
StatusEnumField: {
|
||||
"filter_class": CharFilter,
|
||||
},
|
||||
SeverityEnumField: {
|
||||
"filter_class": CharFilter,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
class ProviderSecretFilter(FilterSet):
|
||||
inserted_at = DateFilter(
|
||||
field_name="inserted_at",
|
||||
lookup_expr="date",
|
||||
help_text="Filter by date when the secret was added (format: YYYY-MM-DD)",
|
||||
)
|
||||
updated_at = DateFilter(
|
||||
field_name="updated_at",
|
||||
lookup_expr="date",
|
||||
help_text="Filter by date when the secret was updated (format: YYYY-MM-DD)",
|
||||
)
|
||||
provider = UUIDFilter(field_name="provider__id", lookup_expr="exact")
|
||||
|
||||
class Meta:
|
||||
model = ProviderSecret
|
||||
fields = {
|
||||
"name": ["exact", "icontains"],
|
||||
}
|
||||
|
||||
|
||||
class InvitationFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
expires_at = DateFilter(field_name="expires_at", lookup_expr="date")
|
||||
state = ChoiceFilter(choices=Invitation.State.choices)
|
||||
state__in = ChoiceInFilter(choices=Invitation.State.choices, lookup_expr="in")
|
||||
|
||||
class Meta:
|
||||
model = Invitation
|
||||
fields = {
|
||||
"email": ["exact", "icontains"],
|
||||
"inserted_at": ["date", "gte", "lte"],
|
||||
"updated_at": ["date", "gte", "lte"],
|
||||
"expires_at": ["date", "gte", "lte"],
|
||||
"inviter": ["exact"],
|
||||
}
|
||||
filter_overrides = {
|
||||
InvitationStateEnumField: {
|
||||
"filter_class": CharFilter,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class UserFilter(FilterSet):
|
||||
date_joined = DateFilter(field_name="date_joined", lookup_expr="date")
|
||||
|
||||
class Meta:
|
||||
model = User
|
||||
fields = {
|
||||
"name": ["exact", "icontains"],
|
||||
"email": ["exact", "icontains"],
|
||||
"company_name": ["exact", "icontains"],
|
||||
"date_joined": ["date", "gte", "lte"],
|
||||
"is_active": ["exact"],
|
||||
}
|
||||
|
||||
|
||||
class RoleFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
permission_state = ChoiceFilter(
|
||||
choices=PermissionChoices.choices, method="filter_permission_state"
|
||||
)
|
||||
|
||||
def filter_permission_state(self, queryset, name, value):
|
||||
return Role.filter_by_permission_state(queryset, value)
|
||||
|
||||
class Meta:
|
||||
model = Role
|
||||
fields = {
|
||||
"id": ["exact", "in"],
|
||||
"name": ["exact", "in"],
|
||||
"inserted_at": ["gte", "lte"],
|
||||
"updated_at": ["gte", "lte"],
|
||||
}
|
||||
|
||||
|
||||
class ComplianceOverviewFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
scan_id = UUIDFilter(field_name="scan_id")
|
||||
region = CharFilter(field_name="region")
|
||||
|
||||
class Meta:
|
||||
model = ComplianceRequirementOverview
|
||||
fields = {
|
||||
"inserted_at": ["date", "gte", "lte"],
|
||||
"compliance_id": ["exact", "icontains"],
|
||||
"framework": ["exact", "iexact", "icontains"],
|
||||
"version": ["exact", "icontains"],
|
||||
"region": ["exact", "icontains", "in"],
|
||||
}
|
||||
|
||||
|
||||
class ScanSummaryFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
provider_id = UUIDFilter(field_name="scan__provider__id", lookup_expr="exact")
|
||||
provider_id__in = UUIDInFilter(field_name="scan__provider__id", lookup_expr="in")
|
||||
provider_type = ChoiceFilter(
|
||||
field_name="scan__provider__provider", choices=Provider.ProviderChoices.choices
|
||||
)
|
||||
provider_type__in = ChoiceInFilter(
|
||||
field_name="scan__provider__provider", choices=Provider.ProviderChoices.choices
|
||||
)
|
||||
region = CharFilter(field_name="region")
|
||||
|
||||
class Meta:
|
||||
model = ScanSummary
|
||||
fields = {
|
||||
"inserted_at": ["date", "gte", "lte"],
|
||||
"region": ["exact", "icontains", "in"],
|
||||
}
|
||||
|
||||
|
||||
class ScanSummarySeverityFilter(ScanSummaryFilter):
|
||||
"""Filter for findings_severity ScanSummary endpoint - includes status filters"""
|
||||
|
||||
# Custom status filters - only for severity grouping endpoint
|
||||
status = ChoiceFilter(method="filter_status", choices=OverviewStatusChoices.choices)
|
||||
status__in = CharInFilter(method="filter_status_in", lookup_expr="in")
|
||||
|
||||
def filter_status(self, queryset, name, value):
|
||||
# Validate the status value
|
||||
if value not in [choice[0] for choice in OverviewStatusChoices.choices]:
|
||||
raise ValidationError(f"Invalid status value: {value}")
|
||||
|
||||
# Apply the filter by annotating the queryset with the status field
|
||||
if value == OverviewStatusChoices.FAIL:
|
||||
return queryset.annotate(status_count=F("fail"))
|
||||
elif value == OverviewStatusChoices.PASS:
|
||||
return queryset.annotate(status_count=F("_pass"))
|
||||
else:
|
||||
return queryset.annotate(status_count=F("total"))
|
||||
|
||||
def filter_status_in(self, queryset, name, value):
|
||||
# Validate the status values
|
||||
valid_statuses = [choice[0] for choice in OverviewStatusChoices.choices]
|
||||
for status_val in value:
|
||||
if status_val not in valid_statuses:
|
||||
raise ValidationError(f"Invalid status value: {status_val}")
|
||||
|
||||
# If all statuses or no valid statuses, use total
|
||||
if (
|
||||
set(value)
|
||||
>= {
|
||||
OverviewStatusChoices.FAIL,
|
||||
OverviewStatusChoices.PASS,
|
||||
}
|
||||
or not value
|
||||
):
|
||||
return queryset.annotate(status_count=F("total"))
|
||||
|
||||
# Build the sum expression based on status values
|
||||
sum_expression = None
|
||||
for status in value:
|
||||
if status == OverviewStatusChoices.FAIL:
|
||||
field_expr = F("fail")
|
||||
elif status == OverviewStatusChoices.PASS:
|
||||
field_expr = F("_pass")
|
||||
else:
|
||||
continue
|
||||
|
||||
if sum_expression is None:
|
||||
sum_expression = field_expr
|
||||
else:
|
||||
sum_expression = sum_expression + field_expr
|
||||
|
||||
if sum_expression is None:
|
||||
return queryset.annotate(status_count=F("total"))
|
||||
|
||||
return queryset.annotate(status_count=sum_expression)
|
||||
|
||||
class Meta:
|
||||
model = ScanSummary
|
||||
fields = {
|
||||
"inserted_at": ["date", "gte", "lte"],
|
||||
"region": ["exact", "icontains", "in"],
|
||||
}
|
||||
|
||||
|
||||
class ServiceOverviewFilter(ScanSummaryFilter):
|
||||
def is_valid(self):
|
||||
# Check if at least one of the inserted_at filters is present
|
||||
inserted_at_filters = [
|
||||
self.data.get("inserted_at"),
|
||||
self.data.get("inserted_at__gte"),
|
||||
self.data.get("inserted_at__lte"),
|
||||
]
|
||||
if not any(inserted_at_filters):
|
||||
raise ValidationError(
|
||||
{
|
||||
"inserted_at": [
|
||||
"At least one of filter[inserted_at], filter[inserted_at__gte], or "
|
||||
"filter[inserted_at__lte] is required."
|
||||
]
|
||||
}
|
||||
)
|
||||
return super().is_valid()
|
||||
|
||||
|
||||
class IntegrationFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
integration_type = ChoiceFilter(choices=Integration.IntegrationChoices.choices)
|
||||
integration_type__in = ChoiceInFilter(
|
||||
choices=Integration.IntegrationChoices.choices,
|
||||
field_name="integration_type",
|
||||
lookup_expr="in",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Integration
|
||||
fields = {
|
||||
"inserted_at": ["date", "gte", "lte"],
|
||||
}
|
||||
|
||||
|
||||
class ProcessorFilter(FilterSet):
|
||||
processor_type = ChoiceFilter(choices=Processor.ProcessorChoices.choices)
|
||||
processor_type__in = ChoiceInFilter(
|
||||
choices=Processor.ProcessorChoices.choices,
|
||||
field_name="processor_type",
|
||||
lookup_expr="in",
|
||||
)
|
||||
|
||||
|
||||
class IntegrationJiraFindingsFilter(FilterSet):
|
||||
# To be expanded as needed
|
||||
finding_id = UUIDFilter(field_name="id", lookup_expr="exact")
|
||||
finding_id__in = UUIDInFilter(field_name="id", lookup_expr="in")
|
||||
|
||||
class Meta:
|
||||
model = Finding
|
||||
fields = {}
|
||||
|
||||
def filter_queryset(self, queryset):
|
||||
# Validate that there is at least one filter provided
|
||||
if not self.data:
|
||||
raise ValidationError(
|
||||
{
|
||||
"findings": "No finding filters provided. At least one filter is required."
|
||||
}
|
||||
)
|
||||
return super().filter_queryset(queryset)
|
||||
|
||||
|
||||
class TenantApiKeyFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="created", lookup_expr="date")
|
||||
inserted_at__gte = DateFilter(field_name="created", lookup_expr="gte")
|
||||
inserted_at__lte = DateFilter(field_name="created", lookup_expr="lte")
|
||||
expires_at = DateFilter(field_name="expiry_date", lookup_expr="date")
|
||||
expires_at__gte = DateFilter(field_name="expiry_date", lookup_expr="gte")
|
||||
expires_at__lte = DateFilter(field_name="expiry_date", lookup_expr="lte")
|
||||
|
||||
class Meta:
|
||||
model = TenantAPIKey
|
||||
fields = {
|
||||
"prefix": ["exact", "icontains"],
|
||||
"revoked": ["exact"],
|
||||
"name": ["exact", "icontains"],
|
||||
}
|
||||
@@ -1,41 +0,0 @@
|
||||
[
|
||||
{
|
||||
"model": "api.user",
|
||||
"pk": "8b38e2eb-6689-4f1e-a4ba-95b275130200",
|
||||
"fields": {
|
||||
"password": "pbkdf2_sha256$870000$Z63pGJ7nre48hfcGbk5S0O$rQpKczAmijs96xa+gPVJifpT3Fetb8DOusl5Eq6gxac=",
|
||||
"last_login": null,
|
||||
"name": "Devie Prowlerson",
|
||||
"email": "dev@prowler.com",
|
||||
"company_name": "Prowler Developers",
|
||||
"is_active": true,
|
||||
"date_joined": "2024-09-17T09:04:20.850Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.user",
|
||||
"pk": "b6493a3a-c997-489b-8b99-278bf74de9f6",
|
||||
"fields": {
|
||||
"password": "pbkdf2_sha256$870000$Z63pGJ7nre48hfcGbk5S0O$rQpKczAmijs96xa+gPVJifpT3Fetb8DOusl5Eq6gxac=",
|
||||
"last_login": null,
|
||||
"name": "Devietoo Prowlerson",
|
||||
"email": "dev2@prowler.com",
|
||||
"company_name": "Prowler Developers",
|
||||
"is_active": true,
|
||||
"date_joined": "2024-09-18T09:04:20.850Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.user",
|
||||
"pk": "6d4f8a91-3c2e-4b5a-8f7d-1e9c5b2a4d6f",
|
||||
"fields": {
|
||||
"password": "pbkdf2_sha256$870000$Z63pGJ7nre48hfcGbk5S0O$rQpKczAmijs96xa+gPVJifpT3Fetb8DOusl5Eq6gxac=",
|
||||
"last_login": null,
|
||||
"name": "E2E Test User",
|
||||
"email": "e2e@prowler.com",
|
||||
"company_name": "Prowler E2E Tests",
|
||||
"is_active": true,
|
||||
"date_joined": "2024-01-01T00:00:00.850Z"
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -1,69 +0,0 @@
|
||||
[
|
||||
{
|
||||
"model": "api.tenant",
|
||||
"pk": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"fields": {
|
||||
"inserted_at": "2024-03-21T23:00:00Z",
|
||||
"updated_at": "2024-03-21T23:00:00Z",
|
||||
"name": "Tenant1"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.tenant",
|
||||
"pk": "0412980b-06e3-436a-ab98-3c9b1d0333d3",
|
||||
"fields": {
|
||||
"inserted_at": "2024-03-21T23:00:00Z",
|
||||
"updated_at": "2024-03-21T23:00:00Z",
|
||||
"name": "Tenant2"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.membership",
|
||||
"pk": "2b0db93a-7e0b-4edf-a851-ea448676b7eb",
|
||||
"fields": {
|
||||
"user": "8b38e2eb-6689-4f1e-a4ba-95b275130200",
|
||||
"tenant": "0412980b-06e3-436a-ab98-3c9b1d0333d3",
|
||||
"role": "owner",
|
||||
"date_joined": "2024-09-19T11:03:59.712Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.membership",
|
||||
"pk": "797d7cee-abc9-4598-98bb-4bf4bfb97f27",
|
||||
"fields": {
|
||||
"user": "8b38e2eb-6689-4f1e-a4ba-95b275130200",
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"role": "owner",
|
||||
"date_joined": "2024-09-19T11:02:59.712Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.membership",
|
||||
"pk": "dea37563-7009-4dcf-9f18-25efb41462a7",
|
||||
"fields": {
|
||||
"user": "b6493a3a-c997-489b-8b99-278bf74de9f6",
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"role": "member",
|
||||
"date_joined": "2024-09-19T11:03:59.712Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.tenant",
|
||||
"pk": "7c8f94a3-e2d1-4b3a-9f87-2c4d5e6f1a2b",
|
||||
"fields": {
|
||||
"inserted_at": "2024-01-01T00:00:00Z",
|
||||
"updated_at": "2024-01-01T00:00:00Z",
|
||||
"name": "E2E Test Tenant"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.membership",
|
||||
"pk": "9b1a2c3d-4e5f-6789-abc1-23456789def0",
|
||||
"fields": {
|
||||
"user": "6d4f8a91-3c2e-4b5a-8f7d-1e9c5b2a4d6f",
|
||||
"tenant": "7c8f94a3-e2d1-4b3a-9f87-2c4d5e6f1a2b",
|
||||
"role": "owner",
|
||||
"date_joined": "2024-01-01T00:00:00.000Z"
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -1,193 +0,0 @@
|
||||
[
|
||||
{
|
||||
"model": "api.provider",
|
||||
"pk": "37b065f8-26b0-4218-a665-0b23d07b27d9",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-08-01T17:20:27.050Z",
|
||||
"updated_at": "2024-08-01T17:20:27.050Z",
|
||||
"provider": "gcp",
|
||||
"uid": "a12322-test321",
|
||||
"alias": "gcp_testing_2",
|
||||
"connected": null,
|
||||
"connection_last_checked_at": null,
|
||||
"metadata": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.provider",
|
||||
"pk": "8851db6b-42e5-4533-aa9e-30a32d67e875",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-08-01T17:19:42.453Z",
|
||||
"updated_at": "2024-08-01T17:19:42.453Z",
|
||||
"provider": "gcp",
|
||||
"uid": "a12345-test123",
|
||||
"alias": "gcp_testing_1",
|
||||
"connected": null,
|
||||
"connection_last_checked_at": null,
|
||||
"metadata": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.provider",
|
||||
"pk": "b85601a8-4b45-4194-8135-03fb980ef428",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-08-01T17:19:09.556Z",
|
||||
"updated_at": "2024-08-01T17:19:09.556Z",
|
||||
"provider": "aws",
|
||||
"uid": "123456789020",
|
||||
"alias": "aws_testing_2",
|
||||
"connected": null,
|
||||
"connection_last_checked_at": null,
|
||||
"metadata": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.provider",
|
||||
"pk": "baa7b895-8bac-4f47-b010-4226d132856e",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-08-01T17:20:16.962Z",
|
||||
"updated_at": "2024-08-01T17:20:16.962Z",
|
||||
"provider": "gcp",
|
||||
"uid": "a12322-test123",
|
||||
"alias": "gcp_testing_3",
|
||||
"connected": null,
|
||||
"connection_last_checked_at": null,
|
||||
"metadata": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.provider",
|
||||
"pk": "d7c7ea89-d9af-423b-a364-1290dcad5a01",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-08-01T17:18:58.132Z",
|
||||
"updated_at": "2024-08-01T17:18:58.132Z",
|
||||
"provider": "aws",
|
||||
"uid": "123456789015",
|
||||
"alias": "aws_testing_1",
|
||||
"connected": null,
|
||||
"connection_last_checked_at": null,
|
||||
"metadata": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.provider",
|
||||
"pk": "1b59e032-3eb6-4694-93a5-df84cd9b3ce2",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-08-06T16:03:26.176Z",
|
||||
"updated_at": "2024-08-06T16:03:26.176Z",
|
||||
"provider": "azure",
|
||||
"uid": "8851db6b-42e5-4533-aa9e-30a32d67e875",
|
||||
"alias": "azure_testing",
|
||||
"connected": null,
|
||||
"connection_last_checked_at": null,
|
||||
"metadata": {},
|
||||
"scanner_args": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.provider",
|
||||
"pk": "26e55a24-cb2c-4cef-ac87-6f91fddb2c97",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-08-06T16:03:07.037Z",
|
||||
"updated_at": "2024-08-06T16:03:07.037Z",
|
||||
"provider": "kubernetes",
|
||||
"uid": "kubernetes-test-12345",
|
||||
"alias": "k8s_testing",
|
||||
"connected": null,
|
||||
"connection_last_checked_at": null,
|
||||
"metadata": {},
|
||||
"scanner_args": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.provider",
|
||||
"pk": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:45:26.352Z",
|
||||
"updated_at": "2024-10-18T11:16:23.533Z",
|
||||
"provider": "aws",
|
||||
"uid": "106908755759",
|
||||
"alias": "real testing aws provider",
|
||||
"connected": true,
|
||||
"connection_last_checked_at": "2024-10-18T11:16:23.503Z",
|
||||
"metadata": {},
|
||||
"scanner_args": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.provider",
|
||||
"pk": "7791914f-d646-4fe2-b2ed-73f2c6499a36",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:45:26.352Z",
|
||||
"updated_at": "2024-10-18T11:16:23.533Z",
|
||||
"provider": "kubernetes",
|
||||
"uid": "gke_lucky-coast-419309_us-central1_autopilot-cluster-2",
|
||||
"alias": "k8s_testing_2",
|
||||
"connected": true,
|
||||
"connection_last_checked_at": "2024-10-18T11:16:23.503Z",
|
||||
"metadata": {},
|
||||
"scanner_args": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.providersecret",
|
||||
"pk": "11491b47-75ae-4f71-ad8d-3e630a72182e",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-11T08:03:05.026Z",
|
||||
"updated_at": "2024-10-11T08:04:47.033Z",
|
||||
"name": "GCP static secrets",
|
||||
"secret_type": "static",
|
||||
"_secret": "Z0FBQUFBQm5DTndmZW9KakRZUHM2UHhQN2V3RzN0QmM1cERham8yMHp5cnVTT0lzdGFyS1FuVmJXUlpYSGsyU0cxR3RMMEdQYXlYMUVsaWtqLU1OZWlaVUp6OFREYlotZTVBY3BuTlZYbm9YcUJydzAxV2p5dkpLamI1Y2tUYzA0MmJUNWxsNTBRM0E1SDRCa0pPQWVlb05YU3dfeUhkLTRmOEh3dGczOGh1ZGhQcVdZdVAtYmtoSWlwNXM4VGFoVmF3dno2X1hrbk5GZjZTWjVuWEdEZUFXeHJSQjEzbTlVakhNdzYyWTdiVEpvUEc2MTNpRzUtczhEank1eGI0b3MyMlAyaGN6dlByZmtUWHByaDNUYWFqYS1tYnNBUkRKTzBacFNSRjFuVmd5bUtFUEJhd1ZVS1ZDd2xSUV9PaEtLTnc0XzVkY2lhM01WTjQwaWdJSk9wNUJSXzQ4RUNQLXFPNy1VdzdPYkZyWkVkU3RyQjVLTS1MVHN0R3k4THNKZ2NBNExaZnl3Q1EwN2dwNGRsUXptMjB0LXUzTUpzTDE2Q1hmS0ZSN2g1ZjBPeV8taFoxNUwxc2FEcktXX0dCM1IzeUZTTHNiTmNxVXBvNWViZTJScUVWV2VYTFQ4UHlid21PY1A0UjdNMGtERkZCd0lLMlJENDMzMVZUM09DQ0twd1N3VHlZd09XLUctOWhYcFJIR1p5aUlZeEUzejc2dWRYdGNsd0xOODNqRUFEczhSTWNtWU0tdFZ1ZTExaHNHUVYtd0Zxdld1LTdKVUNINzlZTGdHODhKeVVpQmRZMHRUNTJRRWhwS1F1Y3I2X2Iwc0c1NHlXSVRLZWxreEt0dVRnOTZFMkptU2VMS1dWXzdVOVRzMUNUWXM2aFlxVDJXdGo3d2cxSVZGWlI2ZWhIZzZBcEl4bEJ6UnVHc0RYWVNHcjFZUHI5ZUYyWG9rSlo0QUVSUkFCX3h2UmtJUTFzVXJUZ25vTmk2VzdoTTNta05ucmNfTi0yR1ZxN1E2MnZJOVVKOGxmMXMzdHMxVndmSVhQbUItUHgtMVpVcHJwMU5JVHJLb0Y1aHV5OEEwS0kzQkEtcFJkdkRnWGxmZnprNFhndWg1TmQyd09yTFdTRmZ3d2ZvZFUtWXp4a2VYb3JjckFIcE13MDUzX0RHSnlzM0N2ZE5IRzJzMXFMc0k4MDRyTHdLZFlWOG9SaFF0LU43Ynd6VFlEcVNvdFZ0emJEVk10aEp4dDZFTFNFNzk0UUo2WTlVLWRGYm1fanZHaFZreHBIMmtzVjhyS0xPTk9fWHhiVTJHQXZwVlVuY3JtSjFUYUdHQzhEaHFNZXhwUHBmY0kxaUVrOHo4a0FYOTdpZVJDbFRvdFlQeWo3eFZHX1ZMZ1Myc3prU3o2c3o2eXNja1U4N0Y1T0d1REVjZFRGNTByUkgyemVCSjlQYkY2bmJ4YTZodHB0cUNzd2xZcENycUdsczBIaEZPbG1jVUlqNlM2cEE3aGpVaWswTzBDLVFGUHM5UHhvM09saWNtaDhaNVlsc3FZdktKeWlheDF5OGhTODE2N3JWamdTZG5Fa3JSQ2ZUSEVfRjZOZXdreXRZLTBZRFhleVFFeC1YUzc0cWhYeEhobGxvdnZ3Rm15WFlBWXp0dm1DeTA5eExLeEFRRXVRSXBXdTNEaWdZZ3JDenItdDhoZlFiTzI0SGZ1c01FR1FNaFVweVBKR1YxWGRUMW1Mc2JVdW9raWR6UHk2ZTBnS05pV3oyZVBjREdkY3k4ZHZPUWE5S281MkJRSHF3NnpTclZ5bl90bk1wUEh6Tkp5dXlDcE5paWRqcVhxRFVObWIzRldWOGJ2aC1CRHZpbFZrb0hjNGpCMm5POGRiS2lETUpMLUVfQlhCdTZPLW9USW1LTFlTSF9zRUJYZ1NKeFFEQjNOR215ZXJDbkFndmcxWl9rWlk9",
|
||||
"provider": "8851db6b-42e5-4533-aa9e-30a32d67e875"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.providersecret",
|
||||
"pk": "40191ad5-d8c2-40a9-826d-241397626b68",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-10T11:11:44.515Z",
|
||||
"updated_at": "2024-10-11T07:59:56.102Z",
|
||||
"name": "AWS static secrets",
|
||||
"secret_type": "static",
|
||||
"_secret": "Z0FBQUFBQm5DTnI4Y1RyV19UWEJzc3kzQUExcU5tdlQzbFVLeDdZMWd1MzkwWkl2UF9oZGhiVEJHVWpSMXV4MjYyN3g2OVpvNVpkQUQ3S0VGaGdQLTFhQWE3MkpWZUt2cnVhODc4d3FpY3FVZkpwdHJzNUJPeFRwZ3N4bGpPZTlkNWRNdFlwTHU3aTNWR3JjSzJwLWRITHdfQWpXb1F0c1l3bVFxbnFrTEpPTGgxcnF1VUprSzZ5dGRQU2VGYmZhTTlwbVpsNFBNWlFhVW9RbjJyYnZ5N0oweE5kV0ZEaUdpUUpNVExOa3oyQ2dNREVSenJ0TEFZc0RrRWpXNUhyMmtybGNLWDVOR0FabEl4QVR1bkZyb2hBLWc1MFNIekVyeXI0SmVreHBjRnJ1YUlVdXpVbW9JZkk0aEgxYlM1VGhSRlhtcS14YzdTYUhXR2xodElmWjZuNUVwaHozX1RVTG1QWHdPZWd4clNHYnAyOTBsWEl5UU83RGxZb0RKWjdadjlsTmJtSHQ0Yl9uaDJoODB0QV9sWmFYbFAxcjA1bmhNVlNqc2xEeHlvcUJFbVZvY250ZENnMnZLT1psb1JDclB3WVR6NGdZb2pzb3U4Ny04QlB0UTZub0dMOXZEUTZEcVJhZldCWEZZSDdLTy02UVZqck5zVTZwS3pObGlOejNJeHUzbFRabFM2V2xaekZVRjZtX3VzZlplendnOWQzT01WMFd3ejNadHVlTFlqRGR2dk5Da29zOFYwOUdOaEc4OHhHRnJFMmJFMk12VDNPNlBBTGlsXy13cUM1QkVYb0o1Z2U4ZXJnWXpZdm1sWjA5bzQzb2NFWC1xbmIycGZRbGtCaGNaOWlkX094UUNNampwbkZoREctNWI4QnZRaE8zM3BEQ1BwNzA1a3BzOGczZXdIM2s1NHFGN1ZTbmJhZkc4RVdfM0ZIZU5udTBYajd1RGxpWXZpRWdSMmhHa2RKOEIzbmM0X2F1OGxrN2p6LW9UVldDOFVpREoxZ1UzcTBZX19OQ0xJb0syWlhNSlQ4MzQwdzRtVG94Y01GS3FMLV95UVlxOTFORk8zdjE5VGxVaXdhbGlzeHdoYWNzazZWai1GUGtUM2gzR0ZWTTY4SThWeVFnZldIaklOTTJqTTg1VkhEYW5wNmdEVllXMmJCV2tpVmVYeUV2c0E1T00xbHJRNzgzVG9wb0Q1cV81UEhqYUFsQ2p1a0VpRDVINl9SVkpyZVRNVnVXQUxwY3NWZnJrNmRVREpiLWNHYUpXWmxkQlhNbWhuR1NmQ1BaVDlidUxCWHJMaHhZbk1FclVBaEVZeWg1ZlFoenZzRHlKbV8wa3lmMGZrd3NmTDZjQkE0UXNSUFhpTWtUUHBrX29BVzc4QzEtWEJIQW1GMGFuZVlXQWZIOXJEamloeGFCeHpYMHNjMFVfNXpQdlJfSkk2bzFROU5NU0c1SHREWW1nbkFNZFZ0UjdPRGdjaF96RGplY1hjdFFzLVR6MTVXYlRjbHIxQ2JRejRpVko5NWhBU0ZHR3ZvczU5elljRGpHRTdIc0FsSm5fUHEwT1gtTS1lN3M3X3ZZRnlkYUZoZXRQeEJsZlhLdFdTUzU1NUl4a29aOWZIdTlPM0Fnak1xYWVkYTNiMmZXUHlXS2lwUVBZLXQyaUxuRmtQNFFieE9SVmdZVW9WTHlzbnBPZlNIdGVHOE1LNVNESjN3cGtVSHVpT1NJWHE1ZzNmUTVTOC0xX3NGSmJqU19IbjZfQWtMRG1YNUQtRy13TUJIZFlyOXJkQzFQbkdZVXVzM2czbS1HWHFBT1pXdVd3N09tcG82SVhnY1ZtUWxqTEg2UzJCUmllb2pweVN2aGwwS1FVRUhjNEN2amRMc3MwVU4zN3dVMWM5Slg4SERtenFaQk1yMWx0LWtxVWtLZVVtbU4yejVEM2h6TEt0RGdfWE09",
|
||||
"provider": "b85601a8-4b45-4194-8135-03fb980ef428"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.providersecret",
|
||||
"pk": "ed89d1ea-366a-4d12-a602-f2ab77019742",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-10T11:11:44.515Z",
|
||||
"updated_at": "2024-10-11T07:59:56.102Z",
|
||||
"name": "Azure static secrets",
|
||||
"secret_type": "static",
|
||||
"_secret": "Z0FBQUFBQm5DTnI4Y1RyV19UWEJzc3kzQUExcU5tdlQzbFVLeDdZMWd1MzkwWkl2UF9oZGhiVEJHVWpSMXV4MjYyN3g2OVpvNVpkQUQ3S0VGaGdQLTFhQWE3MkpWZUt2cnVhODc4d3FpY3FVZkpwdHJzNUJPeFRwZ3N4bGpPZTlkNWRNdFlwTHU3aTNWR3JjSzJwLWRITHdfQWpXb1F0c1l3bVFxbnFrTEpPTGgxcnF1VUprSzZ5dGRQU2VGYmZhTTlwbVpsNFBNWlFhVW9RbjJyYnZ5N0oweE5kV0ZEaUdpUUpNVExOa3oyQ2dNREVSenJ0TEFZc0RrRWpXNUhyMmtybGNLWDVOR0FabEl4QVR1bkZyb2hBLWc1MFNIekVyeXI0SmVreHBjRnJ1YUlVdXpVbW9JZkk0aEgxYlM1VGhSRlhtcS14YzdTYUhXR2xodElmWjZuNUVwaHozX1RVTG1QWHdPZWd4clNHYnAyOTBsWEl5UU83RGxZb0RKWjdadjlsTmJtSHQ0Yl9uaDJoODB0QV9sWmFYbFAxcjA1bmhNVlNqc2xEeHlvcUJFbVZvY250ZENnMnZLT1psb1JDclB3WVR6NGdZb2pzb3U4Ny04QlB0UTZub0dMOXZEUTZEcVJhZldCWEZZSDdLTy02UVZqck5zVTZwS3pObGlOejNJeHUzbFRabFM2V2xaekZVRjZtX3VzZlplendnOWQzT01WMFd3ejNadHVlTFlqRGR2dk5Da29zOFYwOUdOaEc4OHhHRnJFMmJFMk12VDNPNlBBTGlsXy13cUM1QkVYb0o1Z2U4ZXJnWXpZdm1sWjA5bzQzb2NFWC1xbmIycGZRbGtCaGNaOWlkX094UUNNampwbkZoREctNWI4QnZRaE8zM3BEQ1BwNzA1a3BzOGczZXdIM2s1NHFGN1ZTbmJhZkc4RVdfM0ZIZU5udTBYajd1RGxpWXZpRWdSMmhHa2RKOEIzbmM0X2F1OGxrN2p6LW9UVldDOFVpREoxZ1UzcTBZX19OQ0xJb0syWlhNSlQ4MzQwdzRtVG94Y01GS3FMLV95UVlxOTFORk8zdjE5VGxVaXdhbGlzeHdoYWNzazZWai1GUGtUM2gzR0ZWTTY4SThWeVFnZldIaklOTTJqTTg1VkhEYW5wNmdEVllXMmJCV2tpVmVYeUV2c0E1T00xbHJRNzgzVG9wb0Q1cV81UEhqYUFsQ2p1a0VpRDVINl9SVkpyZVRNVnVXQUxwY3NWZnJrNmRVREpiLWNHYUpXWmxkQlhNbWhuR1NmQ1BaVDlidUxCWHJMaHhZbk1FclVBaEVZeWg1ZlFoenZzRHlKbV8wa3lmMGZrd3NmTDZjQkE0UXNSUFhpTWtUUHBrX29BVzc4QzEtWEJIQW1GMGFuZVlXQWZIOXJEamloeGFCeHpYMHNjMFVfNXpQdlJfSkk2bzFROU5NU0c1SHREWW1nbkFNZFZ0UjdPRGdjaF96RGplY1hjdFFzLVR6MTVXYlRjbHIxQ2JRejRpVko5NWhBU0ZHR3ZvczU5elljRGpHRTdIc0FsSm5fUHEwT1gtTS1lN3M3X3ZZRnlkYUZoZXRQeEJsZlhLdFdTUzU1NUl4a29aOWZIdTlPM0Fnak1xYWVkYTNiMmZXUHlXS2lwUVBZLXQyaUxuRmtQNFFieE9SVmdZVW9WTHlzbnBPZlNIdGVHOE1LNVNESjN3cGtVSHVpT1NJWHE1ZzNmUTVTOC0xX3NGSmJqU19IbjZfQWtMRG1YNUQtRy13TUJIZFlyOXJkQzFQbkdZVXVzM2czbS1HWHFBT1pXdVd3N09tcG82SVhnY1ZtUWxqTEg2UzJCUmllb2pweVN2aGwwS1FVRUhjNEN2amRMc3MwVU4zN3dVMWM5Slg4SERtenFaQk1yMWx0LWtxVWtLZVVtbU4yejVEM2h6TEt0RGdfWE09",
|
||||
"provider": "1b59e032-3eb6-4694-93a5-df84cd9b3ce2"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.providersecret",
|
||||
"pk": "ae48ecde-75cd-4814-92ab-18f48719e5d9",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:45:26.412Z",
|
||||
"updated_at": "2024-10-18T10:45:26.412Z",
|
||||
"name": "Valid AWS Credentials",
|
||||
"secret_type": "static",
|
||||
"_secret": "Z0FBQUFBQm5FanhHa3dXS0I3M2NmWm56SktiaGNqdDZUN0xQU1QwUi15QkhLZldFUmRENk1BXzlscG9JSUxVSTF5ekxuMkdEanlJNjhPUS1VSV9wVTBvU2l4ZnNGOVJhYW93RC1LTEhmc2pyOTJvUWwyWnpFY19WN1pRQk5IdDYwYnBDQnF1eU9nUzdwTGU3QU5qMGFyX1E4SXdpSk9paGVLcVpOVUhwb3duaXgxZ0ZxME5Pcm40QzBGWEZKY2lmRVlCMGFuVFVzemxuVjVNalZVQ2JsY2ZqNWt3Z01IYUZ0dk92YkdtSUZ5SlBvQWZoVU5DWlRFWmExNnJGVEY4Q1Bnd2VJUW9TSWdRcG9rSDNfREQwRld3Q1RYVnVYWVJLWWIxZmpsWGpwd0xQM0dtLTlYUjdHOVhhNklLWXFGTHpFQUVyVmNhYW9CU0tocGVyX3VjMkVEcVdjdFBfaVpsLTBzaUxrWTlta3dpelNtTG9xYVhBUHUzNUE4RnI1WXdJdHcxcFVfaG1XRHhDVFBKamxJb1FaQ2lsQ3FzRmxZbEJVemVkT1E2aHZfbDJqWDJPT3ViOWJGYzQ3eTNWNlFQSHBWRDFiV2tneDM4SmVqMU9Bd01TaXhPY2dmWG5RdENURkM2b2s5V3luVUZQcnFKNldnWEdYaWE2MnVNQkEwMHd6cUY5cVJkcGw4bHBtNzhPeHhkREdwSXNEc1JqQkxUR1FYRTV0UFNwbVlVSWF5LWgtbVhJZXlPZ0Q4cG9HX2E0Qld0LTF1TTFEVy1XNGdnQTRpLWpQQmFJUEdaOFJGNDVoUVJnQ25YVU5DTENMaTY4YmxtYWJFRERXTjAydVN2YnBDb3RkUE0zSDRlN1A3TXc4d2h1Wmd0LWUzZEcwMUstNUw2YnFyS2Z0NEVYMXllQW5GLVBpeU55SkNhczFIeFhrWXZpVXdwSFVrTDdiQjQtWHZJdERXVThzSnJsT2FNZzJDaUt6Y2NXYUZhUlo3VkY0R1BrSHNHNHprTmxjYmp1TXVKakRha0VtNmRFZWRmZHJWdnRCOVNjVGFVWjVQM3RwWWl4SkNmOU1pb2xqMFdOblhNY3Y3aERpOHFlWjJRc2dtRDkzZm1Qc29wdk5OQmJPbGk5ZUpGM1I2YzRJN2gxR3FEMllXR1pma1k0emVqSjZyMUliMGZsc3NfSlVDbGt4QzJTc3hHOU9FRHlZb09zVnlvcDR6WC1uclRSenI0Yy13WlFWNzJWRkwydjhmSjFZdnZ5X3NmZVF6UWRNMXo5STVyV3B0d09UUlFtOURITGhXSDVIUl9zYURJc05KWUNxekVyYkxJclNFNV9leEk4R2xsMGJod3lYeFIwaXR2dllwLTZyNWlXdDRpRkxVYkxWZFdvYUhKck5aeElBZUtKejNKS2tYVW1rTnVrRjJBQmdlZmV6ckozNjNwRmxLS1FaZzRVTTBZYzFFYi1idjBpZkQ3bWVvbEdRZXJrWFNleWZmSmFNdG1wQlp0YmxjWDV5T0tEbHRsYnNHbjRPRjl5MkttOUhRWlJtd1pmTnY4Z1lPRlZoTzFGVDdTZ0RDY1ByV0RndTd5LUNhcHNXUnNIeXdLMEw3WS1tektRTWFLQy1zakpMLWFiM3FOakE1UWU4LXlOX2VPbmd4MTZCRk9OY3Z4UGVDSWxhRlg4eHI4X1VUTDZZM0pjV0JDVi1UUjlTUl85cm1LWlZ0T1dzU0lpdWUwbXgtZ0l6eHNSNExRTV9MczJ6UkRkVElnRV9Rc0RoTDFnVHRZSEFPb2paX200TzZiRzVmRE5hOW5CTjh5Qi1WaEtueEpqRzJDY1luVWZtX1pseUpQSE5lQ0RrZ05EbWo5cU9MZ0ZkcXlqUll4UUkyejRfY2p4RXdEeC1PS1JIQVNUcmNIdkRJbzRiUktMWEQxUFM3aGNzeVFWUDdtcm5xNHlOYUU9",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555"
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -1,256 +0,0 @@
|
||||
[
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "0191e280-9d2f-71c8-9b18-487a23ba185e",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "37b065f8-26b0-4218-a665-0b23d07b27d9",
|
||||
"trigger": "manual",
|
||||
"name": "test scan 1",
|
||||
"state": "completed",
|
||||
"unique_resource_count": 1,
|
||||
"duration": 5,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": ["accessanalyzer_enabled"]
|
||||
},
|
||||
"inserted_at": "2024-09-01T17:25:27.050Z",
|
||||
"started_at": "2024-09-01T17:25:27.050Z",
|
||||
"updated_at": "2024-09-01T17:25:27.050Z",
|
||||
"completed_at": "2024-09-01T17:25:32.050Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "01920573-aa9c-73c9-bcda-f2e35c9b19d2",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "b85601a8-4b45-4194-8135-03fb980ef428",
|
||||
"trigger": "manual",
|
||||
"name": "test aws scan 2",
|
||||
"state": "completed",
|
||||
"unique_resource_count": 1,
|
||||
"duration": 20,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": ["accessanalyzer_enabled"]
|
||||
},
|
||||
"inserted_at": "2024-09-02T17:24:27.050Z",
|
||||
"started_at": "2024-09-02T17:24:27.050Z",
|
||||
"updated_at": "2024-09-02T17:24:27.050Z",
|
||||
"completed_at": "2024-09-01T17:24:37.050Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "01920573-ea5b-77fd-a93f-1ed2ae12f728",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "baa7b895-8bac-4f47-b010-4226d132856e",
|
||||
"trigger": "manual",
|
||||
"name": "test gcp scan",
|
||||
"state": "completed",
|
||||
"unique_resource_count": 10,
|
||||
"duration": 10,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": ["cloudsql_instance_automated_backups"]
|
||||
},
|
||||
"inserted_at": "2024-09-02T19:26:27.050Z",
|
||||
"started_at": "2024-09-02T19:26:27.050Z",
|
||||
"updated_at": "2024-09-02T19:26:27.050Z",
|
||||
"completed_at": "2024-09-01T17:26:37.050Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "01920573-ea5b-77fd-a93f-1ed2ae12f728",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "b85601a8-4b45-4194-8135-03fb980ef428",
|
||||
"trigger": "manual",
|
||||
"name": "test aws scan",
|
||||
"state": "completed",
|
||||
"unique_resource_count": 1,
|
||||
"duration": 35,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": ["accessanalyzer_enabled"]
|
||||
},
|
||||
"inserted_at": "2024-09-02T19:27:27.050Z",
|
||||
"started_at": "2024-09-02T19:27:27.050Z",
|
||||
"updated_at": "2024-09-02T19:27:27.050Z",
|
||||
"completed_at": "2024-09-01T17:27:37.050Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "c281c924-23f3-4fcc-ac63-73a22154b7de",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "b85601a8-4b45-4194-8135-03fb980ef428",
|
||||
"trigger": "scheduled",
|
||||
"name": "test scheduled aws scan",
|
||||
"state": "available",
|
||||
"scanner_args": {
|
||||
"checks_to_execute": ["cloudformation_stack_outputs_find_secrets"]
|
||||
},
|
||||
"scheduled_at": "2030-09-02T19:20:27.050Z",
|
||||
"inserted_at": "2024-09-02T19:24:27.050Z",
|
||||
"updated_at": "2024-09-02T19:24:27.050Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "25c8907c-b26e-4ec0-966b-a1f53a39d8e6",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "b85601a8-4b45-4194-8135-03fb980ef428",
|
||||
"trigger": "scheduled",
|
||||
"name": "test scheduled aws scan 2",
|
||||
"state": "available",
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"accessanalyzer_enabled",
|
||||
"cloudformation_stack_outputs_find_secrets"
|
||||
]
|
||||
},
|
||||
"scheduled_at": "2030-08-02T19:31:27.050Z",
|
||||
"inserted_at": "2024-09-02T19:38:27.050Z",
|
||||
"updated_at": "2024-09-02T19:38:27.050Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "25c8907c-b26e-4ec0-966b-a1f53a39d8e6",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "baa7b895-8bac-4f47-b010-4226d132856e",
|
||||
"trigger": "scheduled",
|
||||
"name": "test scheduled gcp scan",
|
||||
"state": "available",
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"cloudsql_instance_automated_backups",
|
||||
"iam_audit_logs_enabled"
|
||||
]
|
||||
},
|
||||
"scheduled_at": "2030-07-02T19:30:27.050Z",
|
||||
"inserted_at": "2024-09-02T19:29:27.050Z",
|
||||
"updated_at": "2024-09-02T19:29:27.050Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "25c8907c-b26e-4ec0-966b-a1f53a39d8e6",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "1b59e032-3eb6-4694-93a5-df84cd9b3ce2",
|
||||
"trigger": "scheduled",
|
||||
"name": "test scheduled azure scan",
|
||||
"state": "available",
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"aks_cluster_rbac_enabled",
|
||||
"defender_additional_email_configured_with_a_security_contact"
|
||||
]
|
||||
},
|
||||
"scheduled_at": "2030-08-05T19:32:27.050Z",
|
||||
"inserted_at": "2024-09-02T19:29:27.050Z",
|
||||
"updated_at": "2024-09-02T19:29:27.050Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "01929f3b-ed2e-7623-ad63-7c37cd37828f",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"name": "real scan 1",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"trigger": "manual",
|
||||
"state": "completed",
|
||||
"unique_resource_count": 19,
|
||||
"progress": 100,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": ["accessanalyzer_enabled"]
|
||||
},
|
||||
"duration": 7,
|
||||
"scheduled_at": null,
|
||||
"inserted_at": "2024-10-18T10:45:57.678Z",
|
||||
"updated_at": "2024-10-18T10:46:05.127Z",
|
||||
"started_at": "2024-10-18T10:45:57.909Z",
|
||||
"completed_at": "2024-10-18T10:46:05.127Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "6dd8925f-a52d-48de-a546-d2d90db30ab1",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"name": "real scan azure",
|
||||
"provider": "1b59e032-3eb6-4694-93a5-df84cd9b3ce2",
|
||||
"trigger": "manual",
|
||||
"state": "completed",
|
||||
"unique_resource_count": 20,
|
||||
"progress": 100,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"accessanalyzer_enabled",
|
||||
"account_security_contact_information_is_registered"
|
||||
]
|
||||
},
|
||||
"duration": 4,
|
||||
"scheduled_at": null,
|
||||
"inserted_at": "2024-10-18T11:16:21.358Z",
|
||||
"updated_at": "2024-10-18T11:16:26.060Z",
|
||||
"started_at": "2024-10-18T11:16:21.593Z",
|
||||
"completed_at": "2024-10-18T11:16:26.060Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "4ca7ce89-3236-41a8-a369-8937bc152af5",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"name": "real scan k8s",
|
||||
"provider": "7791914f-d646-4fe2-b2ed-73f2c6499a36",
|
||||
"trigger": "manual",
|
||||
"state": "completed",
|
||||
"unique_resource_count": 20,
|
||||
"progress": 100,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"accessanalyzer_enabled",
|
||||
"account_security_contact_information_is_registered"
|
||||
]
|
||||
},
|
||||
"duration": 4,
|
||||
"scheduled_at": null,
|
||||
"inserted_at": "2024-10-18T11:16:21.358Z",
|
||||
"updated_at": "2024-10-18T11:16:26.060Z",
|
||||
"started_at": "2024-10-18T11:16:21.593Z",
|
||||
"completed_at": "2024-10-18T11:16:26.060Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "01929f57-c0ee-7553-be0b-cbde006fb6f7",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"name": "real scan 2",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"trigger": "manual",
|
||||
"state": "completed",
|
||||
"unique_resource_count": 20,
|
||||
"progress": 100,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"accessanalyzer_enabled",
|
||||
"account_security_contact_information_is_registered"
|
||||
]
|
||||
},
|
||||
"duration": 4,
|
||||
"scheduled_at": null,
|
||||
"inserted_at": "2024-10-18T11:16:21.358Z",
|
||||
"updated_at": "2024-10-18T11:16:26.060Z",
|
||||
"started_at": "2024-10-18T11:16:21.593Z",
|
||||
"completed_at": "2024-10-18T11:16:26.060Z"
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -1,322 +0,0 @@
|
||||
[
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "0234477d-0b8e-439f-87d3-ce38dff3a434",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.772Z",
|
||||
"updated_at": "2024-10-18T11:16:24.466Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root",
|
||||
"name": "",
|
||||
"region": "eu-south-2",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'2':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'eu':7C 'eu-south':6C 'iam':3A 'other':11 'root':5A 'south':8C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "17ce30a3-6e77-42a5-bb08-29dfcad7396a",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.882Z",
|
||||
"updated_at": "2024-10-18T11:16:24.533Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root2",
|
||||
"name": "",
|
||||
"region": "eu-west-1",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'eu':7C 'eu-west':6C 'iam':3A 'other':11 'root':5A 'west':8C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "1f9de587-ba5b-415a-b9b0-ceed4c6c9f32",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.091Z",
|
||||
"updated_at": "2024-10-18T11:16:24.637Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root3",
|
||||
"name": "",
|
||||
"region": "ap-northeast-2",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'2':9C '112233445566':4A 'accessanalyzer':10 'ap':7C 'ap-northeast':6C 'arn':1A 'aws':2A 'iam':3A 'northeast':8C 'other':11 'root':5A"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "29b35668-6dad-411d-bfec-492311889892",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.008Z",
|
||||
"updated_at": "2024-10-18T11:16:24.600Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root4",
|
||||
"name": "",
|
||||
"region": "us-west-2",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'2':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'iam':3A 'other':11 'root':5A 'us':7C 'us-west':6C 'west':8C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "30505514-01d4-42bb-8b0c-471bbab27460",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:26.014Z",
|
||||
"updated_at": "2024-10-18T11:16:26.023Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root5",
|
||||
"name": "",
|
||||
"region": "us-east-1",
|
||||
"service": "account",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9C '112233445566':4A 'account':10 'arn':1A 'aws':2A 'east':8C 'iam':3A 'other':11 'root':5A 'us':7C 'us-east':6C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "372932f0-e4df-4968-9721-bb4f6236fae4",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.848Z",
|
||||
"updated_at": "2024-10-18T11:16:24.516Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root6",
|
||||
"name": "",
|
||||
"region": "eu-west-3",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'3':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'eu':7C 'eu-west':6C 'iam':3A 'other':11 'root':5A 'west':8C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "3a37d124-7637-43f6-9df7-e9aa7ef98c53",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.979Z",
|
||||
"updated_at": "2024-10-18T11:16:24.585Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root7",
|
||||
"name": "",
|
||||
"region": "sa-east-1",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'east':8C 'iam':3A 'other':11 'root':5A 'sa':7C 'sa-east':6C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "3c49318e-03c6-4f12-876f-40451ce7de3d",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.072Z",
|
||||
"updated_at": "2024-10-18T11:16:24.630Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root8",
|
||||
"name": "",
|
||||
"region": "ap-southeast-2",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'2':9C '112233445566':4A 'accessanalyzer':10 'ap':7C 'ap-southeast':6C 'arn':1A 'aws':2A 'iam':3A 'other':11 'root':5A 'southeast':8C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "430bf313-8733-4bc5-ac70-5402adfce880",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.994Z",
|
||||
"updated_at": "2024-10-18T11:16:24.593Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root9",
|
||||
"name": "",
|
||||
"region": "eu-north-1",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'eu':7C 'eu-north':6C 'iam':3A 'north':8C 'other':11 'root':5A"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "78bd2a52-82f9-45df-90a9-4ad78254fdc4",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.055Z",
|
||||
"updated_at": "2024-10-18T11:16:24.622Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root10",
|
||||
"name": "",
|
||||
"region": "ap-northeast-1",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'ap':7C 'ap-northeast':6C 'arn':1A 'aws':2A 'iam':3A 'northeast':8C 'other':11 'root':5A"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "7973e332-795e-4a74-b4d4-a53a21c98c80",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.896Z",
|
||||
"updated_at": "2024-10-18T11:16:24.542Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root11",
|
||||
"name": "",
|
||||
"region": "us-east-2",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'2':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'east':8C 'iam':3A 'other':11 'root':5A 'us':7C 'us-east':6C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "8ca0a188-5699-436e-80fd-e566edaeb259",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.938Z",
|
||||
"updated_at": "2024-10-18T11:16:24.565Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root12",
|
||||
"name": "",
|
||||
"region": "ca-central-1",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'ca':7C 'ca-central':6C 'central':8C 'iam':3A 'other':11 'root':5A"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "8fe4514f-71d7-46ab-b0dc-70cef23b4d13",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.965Z",
|
||||
"updated_at": "2024-10-18T11:16:24.578Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root13",
|
||||
"name": "",
|
||||
"region": "eu-west-2",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'2':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'eu':7C 'eu-west':6C 'iam':3A 'other':11 'root':5A 'west':8C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "9ab35225-dc7c-4ebd-bbc0-d81fb5d9de77",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.909Z",
|
||||
"updated_at": "2024-10-18T11:16:24.549Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root14",
|
||||
"name": "",
|
||||
"region": "ap-south-1",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'ap':7C 'ap-south':6C 'arn':1A 'aws':2A 'iam':3A 'other':11 'root':5A 'south':8C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "9be26c1d-adf0-4ba8-9ca9-c740f4a0dc4e",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.863Z",
|
||||
"updated_at": "2024-10-18T11:16:24.524Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root15",
|
||||
"name": "",
|
||||
"region": "eu-central-2",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'2':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'central':8C 'eu':7C 'eu-central':6C 'iam':3A 'other':11 'root':5A"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "ba108c01-bcad-44f1-b211-c1d8985da89d",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.110Z",
|
||||
"updated_at": "2024-10-18T11:16:24.644Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root16",
|
||||
"name": "",
|
||||
"region": "ap-northeast-3",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'3':9C '112233445566':4A 'accessanalyzer':10 'ap':7C 'ap-northeast':6C 'arn':1A 'aws':2A 'iam':3A 'northeast':8C 'other':11 'root':5A"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "dc6cfb5d-6835-4c7b-9152-c18c734a6eaa",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.038Z",
|
||||
"updated_at": "2024-10-18T11:16:24.615Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root17",
|
||||
"name": "",
|
||||
"region": "eu-central-1",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'central':8C 'eu':7C 'eu-central':6C 'iam':3A 'other':11 'root':5A"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "e0664164-cfda-44a4-b743-acee1c69386c",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.924Z",
|
||||
"updated_at": "2024-10-18T11:16:24.557Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root18",
|
||||
"name": "",
|
||||
"region": "us-west-1",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'iam':3A 'other':11 'root':5A 'us':7C 'us-west':6C 'west':8C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "e1929daa-a984-4116-8131-492a48321dba",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.023Z",
|
||||
"updated_at": "2024-10-18T11:16:24.607Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root19",
|
||||
"name": "",
|
||||
"region": "ap-southeast-1",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'ap':7C 'ap-southeast':6C 'arn':1A 'aws':2A 'iam':3A 'other':11 'root':5A 'southeast':8C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "e37bb1f1-1669-4bb3-be86-e3378ddfbcba",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.952Z",
|
||||
"updated_at": "2024-10-18T11:16:24.571Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:access-analyzer:us-east-1:112233445566:analyzer/ConsoleAnalyzer-83b66ad7-d024-454e-b851-52d11cc1cf7c",
|
||||
"name": "",
|
||||
"region": "us-east-1",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9A,15C '112233445566':10A 'access':4A 'access-analyzer':3A 'accessanalyzer':16 'analyzer':5A 'analyzer/consoleanalyzer-83b66ad7-d024-454e-b851-52d11cc1cf7c':11A 'arn':1A 'aws':2A 'east':8A,14C 'other':17 'us':7A,13C 'us-east':6A,12C"
|
||||
}
|
||||
}
|
||||
]
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,180 +0,0 @@
|
||||
[
|
||||
{
|
||||
"model": "api.providergroup",
|
||||
"pk": "3fe28fb8-e545-424c-9b8f-69aff638f430",
|
||||
"fields": {
|
||||
"name": "first_group",
|
||||
"inserted_at": "2024-11-13T11:36:19.503Z",
|
||||
"updated_at": "2024-11-13T11:36:19.503Z",
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.providergroup",
|
||||
"pk": "525e91e7-f3f3-4254-bbc3-27ce1ade86b1",
|
||||
"fields": {
|
||||
"name": "second_group",
|
||||
"inserted_at": "2024-11-13T11:36:25.421Z",
|
||||
"updated_at": "2024-11-13T11:36:25.421Z",
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.providergroup",
|
||||
"pk": "481769f5-db2b-447b-8b00-1dee18db90ec",
|
||||
"fields": {
|
||||
"name": "third_group",
|
||||
"inserted_at": "2024-11-13T11:36:37.603Z",
|
||||
"updated_at": "2024-11-13T11:36:37.603Z",
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.providergroupmembership",
|
||||
"pk": "13625bd3-f428-4021-ac1b-b0bd41b6e02f",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "1b59e032-3eb6-4694-93a5-df84cd9b3ce2",
|
||||
"provider_group": "3fe28fb8-e545-424c-9b8f-69aff638f430",
|
||||
"inserted_at": "2024-11-13T11:55:17.138Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.providergroupmembership",
|
||||
"pk": "54784ebe-42d2-4937-aa6a-e21c62879567",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"provider_group": "3fe28fb8-e545-424c-9b8f-69aff638f430",
|
||||
"inserted_at": "2024-11-13T11:55:17.138Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.providergroupmembership",
|
||||
"pk": "c8bd52d5-42a5-48fe-8e0a-3eef154b8ebe",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"provider_group": "525e91e7-f3f3-4254-bbc3-27ce1ade86b1",
|
||||
"inserted_at": "2024-11-13T11:55:41.237Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.role",
|
||||
"pk": "3f01e759-bdf9-4a99-8888-1ab805b79f93",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"name": "admin_test",
|
||||
"manage_users": true,
|
||||
"manage_account": true,
|
||||
"manage_billing": true,
|
||||
"manage_providers": true,
|
||||
"manage_integrations": true,
|
||||
"manage_scans": true,
|
||||
"unlimited_visibility": true,
|
||||
"inserted_at": "2024-11-20T15:32:42.402Z",
|
||||
"updated_at": "2024-11-20T15:32:42.402Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.role",
|
||||
"pk": "845ff03a-87ef-42ba-9786-6577c70c4df0",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"name": "first_role",
|
||||
"manage_users": true,
|
||||
"manage_account": true,
|
||||
"manage_billing": true,
|
||||
"manage_providers": true,
|
||||
"manage_integrations": false,
|
||||
"manage_scans": false,
|
||||
"unlimited_visibility": true,
|
||||
"inserted_at": "2024-11-20T15:31:53.239Z",
|
||||
"updated_at": "2024-11-20T15:31:53.239Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.role",
|
||||
"pk": "902d726c-4bd5-413a-a2a4-f7b4754b6b20",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"name": "third_role",
|
||||
"manage_users": false,
|
||||
"manage_account": false,
|
||||
"manage_billing": false,
|
||||
"manage_providers": false,
|
||||
"manage_integrations": false,
|
||||
"manage_scans": true,
|
||||
"unlimited_visibility": false,
|
||||
"inserted_at": "2024-11-20T15:34:05.440Z",
|
||||
"updated_at": "2024-11-20T15:34:05.440Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.roleprovidergrouprelationship",
|
||||
"pk": "57fd024a-0a7f-49b4-a092-fa0979a07aaf",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"role": "3f01e759-bdf9-4a99-8888-1ab805b79f93",
|
||||
"provider_group": "3fe28fb8-e545-424c-9b8f-69aff638f430",
|
||||
"inserted_at": "2024-11-20T15:32:42.402Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.roleprovidergrouprelationship",
|
||||
"pk": "a3cd0099-1c13-4df1-a5e5-ecdfec561b35",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"role": "3f01e759-bdf9-4a99-8888-1ab805b79f93",
|
||||
"provider_group": "481769f5-db2b-447b-8b00-1dee18db90ec",
|
||||
"inserted_at": "2024-11-20T15:32:42.402Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.roleprovidergrouprelationship",
|
||||
"pk": "cfd84182-a058-40c2-af3c-0189b174940f",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"role": "3f01e759-bdf9-4a99-8888-1ab805b79f93",
|
||||
"provider_group": "525e91e7-f3f3-4254-bbc3-27ce1ade86b1",
|
||||
"inserted_at": "2024-11-20T15:32:42.402Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.userrolerelationship",
|
||||
"pk": "92339663-e954-4fd8-98fb-8bfe15949975",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"role": "3f01e759-bdf9-4a99-8888-1ab805b79f93",
|
||||
"user": "8b38e2eb-6689-4f1e-a4ba-95b275130200",
|
||||
"inserted_at": "2024-11-20T15:36:14.302Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.role",
|
||||
"pk": "a5b6c7d8-9e0f-1234-5678-90abcdef1234",
|
||||
"fields": {
|
||||
"tenant": "7c8f94a3-e2d1-4b3a-9f87-2c4d5e6f1a2b",
|
||||
"name": "e2e_admin",
|
||||
"manage_users": true,
|
||||
"manage_account": true,
|
||||
"manage_billing": true,
|
||||
"manage_providers": true,
|
||||
"manage_integrations": true,
|
||||
"manage_scans": true,
|
||||
"unlimited_visibility": true,
|
||||
"inserted_at": "2024-01-01T00:00:00.000Z",
|
||||
"updated_at": "2024-01-01T00:00:00.000Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.userrolerelationship",
|
||||
"pk": "f1e2d3c4-b5a6-9876-5432-10fedcba9876",
|
||||
"fields": {
|
||||
"tenant": "7c8f94a3-e2d1-4b3a-9f87-2c4d5e6f1a2b",
|
||||
"role": "a5b6c7d8-9e0f-1234-5678-90abcdef1234",
|
||||
"user": "6d4f8a91-3c2e-4b5a-8f7d-1e9c5b2a4d6f",
|
||||
"inserted_at": "2024-01-01T00:00:00.000Z"
|
||||
}
|
||||
}
|
||||
]
|
||||
File diff suppressed because one or more lines are too long
@@ -1,80 +0,0 @@
|
||||
from django.contrib.sites.models import Site
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import DEFAULT_DB_ALIAS, connection, connections, transaction
|
||||
from django.db.migrations.recorder import MigrationRecorder
|
||||
|
||||
|
||||
def table_exists(table_name):
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM information_schema.tables
|
||||
WHERE table_name = %s
|
||||
)
|
||||
""",
|
||||
[table_name],
|
||||
)
|
||||
return cursor.fetchone()[0]
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Fix migration inconsistency between socialaccount and sites"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"--database",
|
||||
default=DEFAULT_DB_ALIAS,
|
||||
help="Specifies the database to operate on.",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
db = options["database"]
|
||||
connection = connections[db]
|
||||
recorder = MigrationRecorder(connection)
|
||||
|
||||
applied = set(recorder.applied_migrations())
|
||||
|
||||
has_social = ("socialaccount", "0001_initial") in applied
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT EXISTS (
|
||||
SELECT FROM information_schema.tables
|
||||
WHERE table_name = 'django_site'
|
||||
);
|
||||
"""
|
||||
)
|
||||
site_table_exists = cursor.fetchone()[0]
|
||||
|
||||
if has_social and not site_table_exists:
|
||||
self.stdout.write(
|
||||
f"Detected inconsistency in '{db}'. Creating 'django_site' table manually..."
|
||||
)
|
||||
|
||||
with transaction.atomic(using=db):
|
||||
with connection.schema_editor() as schema_editor:
|
||||
schema_editor.create_model(Site)
|
||||
|
||||
recorder.record_applied("sites", "0001_initial")
|
||||
recorder.record_applied("sites", "0002_alter_domain_unique")
|
||||
|
||||
self.stdout.write(
|
||||
"Fixed: 'django_site' table created and migrations registered."
|
||||
)
|
||||
|
||||
# Ensure the relationship table also exists
|
||||
if not table_exists("socialaccount_socialapp_sites"):
|
||||
self.stdout.write(
|
||||
"Detected missing 'socialaccount_socialapp_sites' table. Creating manually..."
|
||||
)
|
||||
with connection.schema_editor() as schema_editor:
|
||||
from allauth.socialaccount.models import SocialApp
|
||||
|
||||
schema_editor.create_model(
|
||||
SocialApp._meta.get_field("sites").remote_field.through
|
||||
)
|
||||
self.stdout.write(
|
||||
"Fixed: 'socialaccount_socialapp_sites' table created."
|
||||
)
|
||||
@@ -1,285 +0,0 @@
|
||||
import random
|
||||
from datetime import datetime, timezone
|
||||
from math import ceil
|
||||
from uuid import uuid4
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from tqdm import tqdm
|
||||
|
||||
from api.db_utils import rls_transaction
|
||||
from api.models import (
|
||||
Finding,
|
||||
Provider,
|
||||
Resource,
|
||||
ResourceFindingMapping,
|
||||
ResourceScanSummary,
|
||||
Scan,
|
||||
StatusChoices,
|
||||
)
|
||||
from prowler.lib.check.models import CheckMetadata
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Populates the database with test data for performance testing."
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"--tenant",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Tenant id for which the data will be populated.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--resources",
|
||||
type=int,
|
||||
required=True,
|
||||
help="The number of resources to create.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--findings",
|
||||
type=int,
|
||||
required=True,
|
||||
help="The number of findings to create.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--batch", type=int, required=True, help="The batch size for bulk creation."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--alias",
|
||||
type=str,
|
||||
required=False,
|
||||
help="Optional alias for the provider and scan",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
tenant_id = options["tenant"]
|
||||
num_resources = options["resources"]
|
||||
num_findings = options["findings"]
|
||||
batch_size = options["batch"]
|
||||
alias = options["alias"] or "Testing"
|
||||
uid_token = str(uuid4())
|
||||
|
||||
self.stdout.write(self.style.NOTICE("Starting data population"))
|
||||
self.stdout.write(self.style.NOTICE(f"\tTenant: {tenant_id}"))
|
||||
self.stdout.write(self.style.NOTICE(f"\tAlias: {alias}"))
|
||||
self.stdout.write(self.style.NOTICE(f"\tResources: {num_resources}"))
|
||||
self.stdout.write(self.style.NOTICE(f"\tFindings: {num_findings}"))
|
||||
self.stdout.write(self.style.NOTICE(f"\tBatch size: {batch_size}\n\n"))
|
||||
|
||||
# Resource metadata
|
||||
possible_regions = [
|
||||
"us-east-1",
|
||||
"us-east-2",
|
||||
"us-west-1",
|
||||
"us-west-2",
|
||||
"ca-central-1",
|
||||
"eu-central-1",
|
||||
"eu-west-1",
|
||||
"eu-west-2",
|
||||
"eu-west-3",
|
||||
"ap-southeast-1",
|
||||
"ap-southeast-2",
|
||||
"ap-northeast-1",
|
||||
"ap-northeast-2",
|
||||
"ap-south-1",
|
||||
"sa-east-1",
|
||||
]
|
||||
possible_services = []
|
||||
possible_types = []
|
||||
|
||||
bulk_check_metadata = CheckMetadata.get_bulk(provider="aws")
|
||||
for check_metadata in bulk_check_metadata.values():
|
||||
if check_metadata.ServiceName not in possible_services:
|
||||
possible_services.append(check_metadata.ServiceName)
|
||||
if (
|
||||
check_metadata.ResourceType
|
||||
and check_metadata.ResourceType not in possible_types
|
||||
):
|
||||
possible_types.append(check_metadata.ResourceType)
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
provider, _ = Provider.all_objects.get_or_create(
|
||||
tenant_id=tenant_id,
|
||||
provider="aws",
|
||||
connected=True,
|
||||
uid=str(random.randint(100000000000, 999999999999)),
|
||||
defaults={
|
||||
"alias": alias,
|
||||
},
|
||||
)
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
scan = Scan.all_objects.create(
|
||||
tenant_id=tenant_id,
|
||||
provider=provider,
|
||||
name=alias,
|
||||
trigger="manual",
|
||||
state="executing",
|
||||
progress=0,
|
||||
started_at=datetime.now(timezone.utc),
|
||||
)
|
||||
scan_state = "completed"
|
||||
|
||||
try:
|
||||
# Create resources
|
||||
resources = []
|
||||
|
||||
for i in range(num_resources):
|
||||
resources.append(
|
||||
Resource(
|
||||
tenant_id=tenant_id,
|
||||
provider_id=provider.id,
|
||||
uid=f"testing-{uid_token}-{i}",
|
||||
name=f"Testing {uid_token}-{i}",
|
||||
region=random.choice(possible_regions),
|
||||
service=random.choice(possible_services),
|
||||
type=random.choice(possible_types),
|
||||
inserted_at="2024-10-01T00:00:00Z",
|
||||
)
|
||||
)
|
||||
|
||||
num_batches = ceil(len(resources) / batch_size)
|
||||
self.stdout.write(self.style.WARNING("Creating resources..."))
|
||||
for i in tqdm(range(0, len(resources), batch_size), total=num_batches):
|
||||
with rls_transaction(tenant_id):
|
||||
Resource.all_objects.bulk_create(resources[i : i + batch_size])
|
||||
self.stdout.write(self.style.SUCCESS("Resources created successfully.\n\n"))
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
scan.progress = 33
|
||||
scan.save()
|
||||
|
||||
# Create Findings
|
||||
findings = []
|
||||
possible_deltas = ["new", "changed", None]
|
||||
possible_severities = ["critical", "high", "medium", "low"]
|
||||
findings_resources_mapping = []
|
||||
|
||||
for i in range(num_findings):
|
||||
severity = random.choice(possible_severities)
|
||||
check_id = random.randint(1, 1000)
|
||||
assigned_resource_num = random.randint(0, len(resources) - 1)
|
||||
assigned_resource = resources[assigned_resource_num]
|
||||
findings_resources_mapping.append(assigned_resource_num)
|
||||
|
||||
findings.append(
|
||||
Finding(
|
||||
tenant_id=tenant_id,
|
||||
scan=scan,
|
||||
uid=f"testing-{uid_token}-{i}",
|
||||
delta=random.choice(possible_deltas),
|
||||
check_id=f"check-{check_id}",
|
||||
status=random.choice(list(StatusChoices)),
|
||||
severity=severity,
|
||||
impact=severity,
|
||||
raw_result={},
|
||||
check_metadata={
|
||||
"checktitle": f"Test title for check {check_id}",
|
||||
"risk": f"Testing risk {uid_token}-{i}",
|
||||
"provider": "aws",
|
||||
"severity": severity,
|
||||
"categories": ["category1", "category2", "category3"],
|
||||
"description": "This is a random description that should not matter for testing purposes.",
|
||||
"servicename": assigned_resource.service,
|
||||
"resourcetype": assigned_resource.type,
|
||||
},
|
||||
resource_types=[assigned_resource.type],
|
||||
resource_regions=[assigned_resource.region],
|
||||
resource_services=[assigned_resource.service],
|
||||
inserted_at="2024-10-01T00:00:00Z",
|
||||
)
|
||||
)
|
||||
|
||||
num_batches = ceil(len(findings) / batch_size)
|
||||
self.stdout.write(self.style.WARNING("Creating findings..."))
|
||||
for i in tqdm(range(0, len(findings), batch_size), total=num_batches):
|
||||
with rls_transaction(tenant_id):
|
||||
Finding.all_objects.bulk_create(findings[i : i + batch_size])
|
||||
self.stdout.write(self.style.SUCCESS("Findings created successfully.\n\n"))
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
scan.progress = 66
|
||||
scan.save()
|
||||
|
||||
# Create ResourceFindingMapping
|
||||
mappings = []
|
||||
scan_resource_cache: set[tuple] = set()
|
||||
for index, finding_instance in enumerate(findings):
|
||||
resource_instance = resources[findings_resources_mapping[index]]
|
||||
mappings.append(
|
||||
ResourceFindingMapping(
|
||||
tenant_id=tenant_id,
|
||||
resource=resource_instance,
|
||||
finding=finding_instance,
|
||||
)
|
||||
)
|
||||
scan_resource_cache.add(
|
||||
(
|
||||
str(resource_instance.id),
|
||||
resource_instance.service,
|
||||
resource_instance.region,
|
||||
resource_instance.type,
|
||||
)
|
||||
)
|
||||
|
||||
num_batches = ceil(len(mappings) / batch_size)
|
||||
self.stdout.write(
|
||||
self.style.WARNING("Creating resource-finding mappings...")
|
||||
)
|
||||
for i in tqdm(range(0, len(mappings), batch_size), total=num_batches):
|
||||
with rls_transaction(tenant_id):
|
||||
ResourceFindingMapping.objects.bulk_create(
|
||||
mappings[i : i + batch_size]
|
||||
)
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
"Resource-finding mappings created successfully.\n\n"
|
||||
)
|
||||
)
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
scan.progress = 99
|
||||
scan.save()
|
||||
|
||||
self.stdout.write(self.style.WARNING("Creating finding filter values..."))
|
||||
resource_scan_summaries = [
|
||||
ResourceScanSummary(
|
||||
tenant_id=tenant_id,
|
||||
scan_id=str(scan.id),
|
||||
resource_id=resource_id,
|
||||
service=service,
|
||||
region=region,
|
||||
resource_type=resource_type,
|
||||
)
|
||||
for resource_id, service, region, resource_type in scan_resource_cache
|
||||
]
|
||||
num_batches = ceil(len(resource_scan_summaries) / batch_size)
|
||||
with rls_transaction(tenant_id):
|
||||
for i in tqdm(
|
||||
range(0, len(resource_scan_summaries), batch_size),
|
||||
total=num_batches,
|
||||
):
|
||||
with rls_transaction(tenant_id):
|
||||
ResourceScanSummary.objects.bulk_create(
|
||||
resource_scan_summaries[i : i + batch_size],
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("Finding filter values created successfully.\n\n")
|
||||
)
|
||||
except Exception as e:
|
||||
self.stdout.write(self.style.ERROR(f"Failed to populate test data: {e}"))
|
||||
scan_state = "failed"
|
||||
finally:
|
||||
scan.completed_at = datetime.now(timezone.utc)
|
||||
scan.duration = int(
|
||||
(datetime.now(timezone.utc) - scan.started_at).total_seconds()
|
||||
)
|
||||
scan.progress = 100
|
||||
scan.state = scan_state
|
||||
scan.unique_resource_count = num_resources
|
||||
with rls_transaction(tenant_id):
|
||||
scan.save()
|
||||
|
||||
self.stdout.write(self.style.NOTICE("Successfully populated test data."))
|
||||
@@ -1,55 +0,0 @@
|
||||
import logging
|
||||
import time
|
||||
|
||||
from config.custom_logging import BackendLogger
|
||||
|
||||
|
||||
def extract_auth_info(request) -> dict:
|
||||
if getattr(request, "auth", None) is not None:
|
||||
tenant_id = request.auth.get("tenant_id", "N/A")
|
||||
user_id = request.auth.get("sub", "N/A")
|
||||
api_key_prefix = request.auth.get("api_key_prefix", "N/A")
|
||||
else:
|
||||
tenant_id, user_id, api_key_prefix = "N/A", "N/A", "N/A"
|
||||
return {
|
||||
"tenant_id": tenant_id,
|
||||
"user_id": user_id,
|
||||
"api_key_prefix": api_key_prefix,
|
||||
}
|
||||
|
||||
|
||||
class APILoggingMiddleware:
|
||||
"""
|
||||
Middleware for logging API requests.
|
||||
|
||||
This middleware logs details of API requests, including the typical request metadata among other useful information.
|
||||
|
||||
Args:
|
||||
get_response (Callable): A callable to get the response, typically the next middleware or view.
|
||||
"""
|
||||
|
||||
def __init__(self, get_response):
|
||||
self.get_response = get_response
|
||||
self.logger = logging.getLogger(BackendLogger.API)
|
||||
|
||||
def __call__(self, request):
|
||||
request_start_time = time.time()
|
||||
|
||||
response = self.get_response(request)
|
||||
duration = time.time() - request_start_time
|
||||
auth_info = extract_auth_info(request)
|
||||
self.logger.info(
|
||||
"",
|
||||
extra={
|
||||
"user_id": auth_info["user_id"],
|
||||
"tenant_id": auth_info["tenant_id"],
|
||||
"api_key_prefix": auth_info["api_key_prefix"],
|
||||
"method": request.method,
|
||||
"path": request.path,
|
||||
"query_params": request.GET.dict(),
|
||||
"status_code": response.status_code,
|
||||
"duration": duration,
|
||||
},
|
||||
)
|
||||
|
||||
return response
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,23 +0,0 @@
|
||||
from django.conf import settings
|
||||
from django.db import migrations
|
||||
|
||||
from api.db_utils import DB_PROWLER_USER
|
||||
|
||||
DB_NAME = settings.DATABASES["default"]["NAME"]
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0001_initial"),
|
||||
("token_blacklist", "0012_alter_outstandingtoken_user"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunSQL(
|
||||
f"""
|
||||
GRANT SELECT, INSERT, UPDATE, DELETE ON token_blacklist_blacklistedtoken TO {DB_PROWLER_USER};
|
||||
GRANT SELECT, INSERT, UPDATE, DELETE ON token_blacklist_outstandingtoken TO {DB_PROWLER_USER};
|
||||
GRANT SELECT, DELETE ON django_admin_log TO {DB_PROWLER_USER};
|
||||
"""
|
||||
),
|
||||
]
|
||||
@@ -1,23 +0,0 @@
|
||||
# Generated by Django 5.1.1 on 2024-12-20 13:16
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0002_token_migrations"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveConstraint(
|
||||
model_name="provider",
|
||||
name="unique_provider_uids",
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="provider",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("tenant_id", "provider", "uid", "is_deleted"),
|
||||
name="unique_provider_uids",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,248 +0,0 @@
|
||||
# Generated by Django 5.1.1 on 2024-12-05 12:29
|
||||
|
||||
import uuid
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
import api.rls
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0003_update_provider_unique_constraint_with_is_deleted"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="Role",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("name", models.CharField(max_length=255)),
|
||||
("manage_users", models.BooleanField(default=False)),
|
||||
("manage_account", models.BooleanField(default=False)),
|
||||
("manage_billing", models.BooleanField(default=False)),
|
||||
("manage_providers", models.BooleanField(default=False)),
|
||||
("manage_integrations", models.BooleanField(default=False)),
|
||||
("manage_scans", models.BooleanField(default=False)),
|
||||
("unlimited_visibility", models.BooleanField(default=False)),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "roles",
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="RoleProviderGroupRelationship",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "role_provider_group_relationship",
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="UserRoleRelationship",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "role_user_relationship",
|
||||
},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="roleprovidergrouprelationship",
|
||||
name="provider_group",
|
||||
field=models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.providergroup"
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="roleprovidergrouprelationship",
|
||||
name="role",
|
||||
field=models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.role"
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="role",
|
||||
name="provider_groups",
|
||||
field=models.ManyToManyField(
|
||||
related_name="roles",
|
||||
through="api.RoleProviderGroupRelationship",
|
||||
to="api.providergroup",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="userrolerelationship",
|
||||
name="role",
|
||||
field=models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.role"
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="userrolerelationship",
|
||||
name="user",
|
||||
field=models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="role",
|
||||
name="users",
|
||||
field=models.ManyToManyField(
|
||||
related_name="roles",
|
||||
through="api.UserRoleRelationship",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="roleprovidergrouprelationship",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("role_id", "provider_group_id"),
|
||||
name="unique_role_provider_group_relationship",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="roleprovidergrouprelationship",
|
||||
constraint=api.rls.RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_roleprovidergrouprelationship",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="userrolerelationship",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("role_id", "user_id"), name="unique_role_user_relationship"
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="userrolerelationship",
|
||||
constraint=api.rls.RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_userrolerelationship",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="role",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("tenant_id", "name"), name="unique_role_per_tenant"
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="role",
|
||||
constraint=api.rls.RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_role",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="InvitationRoleRelationship",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
(
|
||||
"invitation",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.invitation"
|
||||
),
|
||||
),
|
||||
(
|
||||
"role",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.role"
|
||||
),
|
||||
),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "role_invitation_relationship",
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="invitationrolerelationship",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("role_id", "invitation_id"),
|
||||
name="unique_role_invitation_relationship",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="invitationrolerelationship",
|
||||
constraint=api.rls.RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_invitationrolerelationship",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="role",
|
||||
name="invitations",
|
||||
field=models.ManyToManyField(
|
||||
related_name="roles",
|
||||
through="api.InvitationRoleRelationship",
|
||||
to="api.invitation",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,44 +0,0 @@
|
||||
from django.db import migrations
|
||||
|
||||
from api.db_router import MainRouter
|
||||
|
||||
|
||||
def create_admin_role(apps, schema_editor):
|
||||
Tenant = apps.get_model("api", "Tenant")
|
||||
Role = apps.get_model("api", "Role")
|
||||
User = apps.get_model("api", "User")
|
||||
UserRoleRelationship = apps.get_model("api", "UserRoleRelationship")
|
||||
|
||||
for tenant in Tenant.objects.using(MainRouter.admin_db).all():
|
||||
admin_role, _ = Role.objects.using(MainRouter.admin_db).get_or_create(
|
||||
name="admin",
|
||||
tenant=tenant,
|
||||
defaults={
|
||||
"manage_users": True,
|
||||
"manage_account": True,
|
||||
"manage_billing": True,
|
||||
"manage_providers": True,
|
||||
"manage_integrations": True,
|
||||
"manage_scans": True,
|
||||
"unlimited_visibility": True,
|
||||
},
|
||||
)
|
||||
users = User.objects.using(MainRouter.admin_db).filter(
|
||||
membership__tenant=tenant
|
||||
)
|
||||
for user in users:
|
||||
UserRoleRelationship.objects.using(MainRouter.admin_db).get_or_create(
|
||||
user=user,
|
||||
role=admin_role,
|
||||
tenant=tenant,
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0004_rbac"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(create_admin_role),
|
||||
]
|
||||
@@ -1,15 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0005_rbac_missing_admin_roles"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="finding",
|
||||
name="first_seen_at",
|
||||
field=models.DateTimeField(editable=False, null=True),
|
||||
),
|
||||
]
|
||||
@@ -1,25 +0,0 @@
|
||||
# Generated by Django 5.1.5 on 2025-01-28 15:03
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0006_findings_first_seen"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="scan",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "provider_id", "state", "inserted_at"],
|
||||
name="scans_prov_state_insert_idx",
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="scansummary",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "scan_id"], name="scan_summaries_tenant_scan_idx"
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,64 +0,0 @@
|
||||
import json
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
from django_celery_beat.models import PeriodicTask
|
||||
|
||||
from api.db_utils import rls_transaction
|
||||
from api.models import Scan, StateChoices
|
||||
|
||||
|
||||
def migrate_daily_scheduled_scan_tasks(apps, schema_editor):
|
||||
for daily_scheduled_scan_task in PeriodicTask.objects.filter(
|
||||
task="scan-perform-scheduled"
|
||||
):
|
||||
task_kwargs = json.loads(daily_scheduled_scan_task.kwargs)
|
||||
tenant_id = task_kwargs["tenant_id"]
|
||||
provider_id = task_kwargs["provider_id"]
|
||||
|
||||
current_time = datetime.now(timezone.utc)
|
||||
scheduled_time_today = datetime.combine(
|
||||
current_time.date(),
|
||||
daily_scheduled_scan_task.start_time.time(),
|
||||
tzinfo=timezone.utc,
|
||||
)
|
||||
|
||||
if current_time < scheduled_time_today:
|
||||
next_scan_date = scheduled_time_today
|
||||
else:
|
||||
next_scan_date = scheduled_time_today + timedelta(days=1)
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
Scan.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
name="Daily scheduled scan",
|
||||
provider_id=provider_id,
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.SCHEDULED,
|
||||
scheduled_at=next_scan_date,
|
||||
scheduler_task_id=daily_scheduled_scan_task.id,
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0007_scan_and_scan_summaries_indexes"),
|
||||
("django_celery_beat", "0019_alter_periodictasks_options"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="scan",
|
||||
name="scheduler_task",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="django_celery_beat.periodictask",
|
||||
),
|
||||
),
|
||||
migrations.RunPython(migrate_daily_scheduled_scan_tasks),
|
||||
]
|
||||
@@ -1,22 +0,0 @@
|
||||
# Generated by Django 5.1.5 on 2025-02-07 09:42
|
||||
|
||||
import django.core.validators
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0008_daily_scheduled_tasks_update"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="provider",
|
||||
name="uid",
|
||||
field=models.CharField(
|
||||
max_length=250,
|
||||
validators=[django.core.validators.MinLengthValidator(3)],
|
||||
verbose_name="Unique identifier for the provider, set by the provider",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,109 +0,0 @@
|
||||
from functools import partial
|
||||
|
||||
from django.db import connection, migrations
|
||||
|
||||
|
||||
def create_index_on_partitions(
|
||||
apps, schema_editor, parent_table: str, index_name: str, index_details: str
|
||||
):
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT inhrelid::regclass::text
|
||||
FROM pg_inherits
|
||||
WHERE inhparent = %s::regclass;
|
||||
""",
|
||||
[parent_table],
|
||||
)
|
||||
partitions = [row[0] for row in cursor.fetchall()]
|
||||
# Iterate over partitions and create index concurrently.
|
||||
# Note: PostgreSQL does not allow CONCURRENTLY inside a transaction,
|
||||
# so we need atomic = False for this migration.
|
||||
for partition in partitions:
|
||||
sql = (
|
||||
f"CREATE INDEX CONCURRENTLY IF NOT EXISTS {partition.replace('.', '_')}_{index_name} ON {partition} "
|
||||
f"{index_details};"
|
||||
)
|
||||
schema_editor.execute(sql)
|
||||
|
||||
|
||||
def drop_index_on_partitions(apps, schema_editor, parent_table: str, index_name: str):
|
||||
with schema_editor.connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT inhrelid::regclass::text
|
||||
FROM pg_inherits
|
||||
WHERE inhparent = %s::regclass;
|
||||
""",
|
||||
[parent_table],
|
||||
)
|
||||
partitions = [row[0] for row in cursor.fetchall()]
|
||||
|
||||
# Iterate over partitions and drop index concurrently.
|
||||
for partition in partitions:
|
||||
partition_index = f"{partition.replace('.', '_')}_{index_name}"
|
||||
sql = f"DROP INDEX CONCURRENTLY IF EXISTS {partition_index};"
|
||||
schema_editor.execute(sql)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0009_increase_provider_uid_maximum_length"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="findings_tenant_and_id_idx",
|
||||
index_details="(tenant_id, id)",
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="findings_tenant_and_id_idx",
|
||||
),
|
||||
),
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="find_tenant_scan_idx",
|
||||
index_details="(tenant_id, scan_id)",
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="find_tenant_scan_idx",
|
||||
),
|
||||
),
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="find_tenant_scan_id_idx",
|
||||
index_details="(tenant_id, scan_id, id)",
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="find_tenant_scan_id_idx",
|
||||
),
|
||||
),
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="find_delta_new_idx",
|
||||
index_details="(tenant_id, id) where delta = 'new'",
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="find_delta_new_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user