mirror of
https://github.com/prowler-cloud/prowler.git
synced 2026-01-25 02:08:11 +00:00
Compare commits
20 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
75cdf67277 | ||
|
|
103d176a38 | ||
|
|
2fedfab814 | ||
|
|
5c6bc6bfe4 | ||
|
|
cc858d886d | ||
|
|
44b599ac37 | ||
|
|
21a27592b6 | ||
|
|
a51436fbda | ||
|
|
810ac7eb72 | ||
|
|
8829db5c4f | ||
|
|
6cd6620dca | ||
|
|
6d8f9bd2ec | ||
|
|
b9d3491732 | ||
|
|
9ac66865bf | ||
|
|
3760f0757d | ||
|
|
ee9e4500ea | ||
|
|
0155f44b43 | ||
|
|
21e26e3a56 | ||
|
|
37e10f60f6 | ||
|
|
f0e59bcb13 |
2
.env
2
.env
@@ -119,7 +119,7 @@ NEXT_PUBLIC_SENTRY_ENVIRONMENT=${SENTRY_ENVIRONMENT}
|
||||
|
||||
|
||||
#### Prowler release version ####
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.12.2
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.16.2
|
||||
|
||||
# Social login credentials
|
||||
SOCIAL_GOOGLE_OAUTH_CALLBACK_URL="${AUTH_URL}/api/auth/callback/google"
|
||||
|
||||
254
.github/workflows/api-bump-version.yml
vendored
Normal file
254
.github/workflows/api-bump-version.yml
vendored
Normal file
@@ -0,0 +1,254 @@
|
||||
name: 'API: Bump Version'
|
||||
|
||||
on:
|
||||
release:
|
||||
types:
|
||||
- 'published'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.release.tag_name }}
|
||||
cancel-in-progress: false
|
||||
|
||||
env:
|
||||
PROWLER_VERSION: ${{ github.event.release.tag_name }}
|
||||
BASE_BRANCH: master
|
||||
|
||||
jobs:
|
||||
detect-release-type:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
is_minor: ${{ steps.detect.outputs.is_minor }}
|
||||
is_patch: ${{ steps.detect.outputs.is_patch }}
|
||||
major_version: ${{ steps.detect.outputs.major_version }}
|
||||
minor_version: ${{ steps.detect.outputs.minor_version }}
|
||||
patch_version: ${{ steps.detect.outputs.patch_version }}
|
||||
current_api_version: ${{ steps.get_api_version.outputs.current_api_version }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Get current API version
|
||||
id: get_api_version
|
||||
run: |
|
||||
CURRENT_API_VERSION=$(grep -oP '^version = "\K[^"]+' api/pyproject.toml)
|
||||
echo "current_api_version=${CURRENT_API_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
echo "Current API version: $CURRENT_API_VERSION"
|
||||
|
||||
- name: Detect release type and parse version
|
||||
id: detect
|
||||
run: |
|
||||
if [[ $PROWLER_VERSION =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
|
||||
MAJOR_VERSION=${BASH_REMATCH[1]}
|
||||
MINOR_VERSION=${BASH_REMATCH[2]}
|
||||
PATCH_VERSION=${BASH_REMATCH[3]}
|
||||
|
||||
echo "major_version=${MAJOR_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
echo "minor_version=${MINOR_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
echo "patch_version=${PATCH_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
if (( MAJOR_VERSION != 5 )); then
|
||||
echo "::error::Releasing another Prowler major version, aborting..."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if (( PATCH_VERSION == 0 )); then
|
||||
echo "is_minor=true" >> "${GITHUB_OUTPUT}"
|
||||
echo "is_patch=false" >> "${GITHUB_OUTPUT}"
|
||||
echo "✓ Minor release detected: $PROWLER_VERSION"
|
||||
else
|
||||
echo "is_minor=false" >> "${GITHUB_OUTPUT}"
|
||||
echo "is_patch=true" >> "${GITHUB_OUTPUT}"
|
||||
echo "✓ Patch release detected: $PROWLER_VERSION"
|
||||
fi
|
||||
else
|
||||
echo "::error::Invalid version syntax: '$PROWLER_VERSION' (must be X.Y.Z)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
bump-minor-version:
|
||||
needs: detect-release-type
|
||||
if: needs.detect-release-type.outputs.is_minor == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Calculate next API minor version
|
||||
run: |
|
||||
MAJOR_VERSION=${{ needs.detect-release-type.outputs.major_version }}
|
||||
MINOR_VERSION=${{ needs.detect-release-type.outputs.minor_version }}
|
||||
CURRENT_API_VERSION="${{ needs.detect-release-type.outputs.current_api_version }}"
|
||||
|
||||
# API version follows Prowler minor + 1
|
||||
# For Prowler 5.17.0 -> API 1.18.0
|
||||
# For next master (Prowler 5.18.0) -> API 1.19.0
|
||||
NEXT_API_VERSION=1.$((MINOR_VERSION + 2)).0
|
||||
|
||||
echo "CURRENT_API_VERSION=${CURRENT_API_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "NEXT_API_VERSION=${NEXT_API_VERSION}" >> "${GITHUB_ENV}"
|
||||
|
||||
echo "Prowler release version: ${MAJOR_VERSION}.${MINOR_VERSION}.0"
|
||||
echo "Current API version: $CURRENT_API_VERSION"
|
||||
echo "Next API minor version (for master): $NEXT_API_VERSION"
|
||||
|
||||
- name: Bump API versions in files for master
|
||||
run: |
|
||||
set -e
|
||||
|
||||
sed -i "s|version = \"${CURRENT_API_VERSION}\"|version = \"${NEXT_API_VERSION}\"|" api/pyproject.toml
|
||||
sed -i "s|spectacular_settings.VERSION = \"${CURRENT_API_VERSION}\"|spectacular_settings.VERSION = \"${NEXT_API_VERSION}\"|" api/src/backend/api/v1/views.py
|
||||
sed -i "s| version: ${CURRENT_API_VERSION}| version: ${NEXT_API_VERSION}|" api/src/backend/api/specs/v1.yaml
|
||||
|
||||
echo "Files modified:"
|
||||
git --no-pager diff
|
||||
|
||||
- name: Create PR for next API minor version to master
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
base: master
|
||||
commit-message: 'chore(api): Bump version to v${{ env.NEXT_API_VERSION }}'
|
||||
branch: api-version-bump-to-v${{ env.NEXT_API_VERSION }}
|
||||
title: 'chore(api): Bump version to v${{ env.NEXT_API_VERSION }}'
|
||||
labels: no-changelog,skip-sync
|
||||
body: |
|
||||
### Description
|
||||
|
||||
Bump Prowler API version to v${{ env.NEXT_API_VERSION }} after releasing Prowler v${{ env.PROWLER_VERSION }}.
|
||||
|
||||
### License
|
||||
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
|
||||
- name: Checkout version branch
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
ref: v${{ needs.detect-release-type.outputs.major_version }}.${{ needs.detect-release-type.outputs.minor_version }}
|
||||
|
||||
- name: Calculate first API patch version
|
||||
run: |
|
||||
MAJOR_VERSION=${{ needs.detect-release-type.outputs.major_version }}
|
||||
MINOR_VERSION=${{ needs.detect-release-type.outputs.minor_version }}
|
||||
CURRENT_API_VERSION="${{ needs.detect-release-type.outputs.current_api_version }}"
|
||||
VERSION_BRANCH=v${MAJOR_VERSION}.${MINOR_VERSION}
|
||||
|
||||
# API version follows Prowler minor + 1
|
||||
# For Prowler 5.17.0 release -> version branch v5.17 should have API 1.18.1
|
||||
FIRST_API_PATCH_VERSION=1.$((MINOR_VERSION + 1)).1
|
||||
|
||||
echo "CURRENT_API_VERSION=${CURRENT_API_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "FIRST_API_PATCH_VERSION=${FIRST_API_PATCH_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "VERSION_BRANCH=${VERSION_BRANCH}" >> "${GITHUB_ENV}"
|
||||
|
||||
echo "Prowler release version: ${MAJOR_VERSION}.${MINOR_VERSION}.0"
|
||||
echo "First API patch version (for ${VERSION_BRANCH}): $FIRST_API_PATCH_VERSION"
|
||||
echo "Version branch: $VERSION_BRANCH"
|
||||
|
||||
- name: Bump API versions in files for version branch
|
||||
run: |
|
||||
set -e
|
||||
|
||||
sed -i "s|version = \"${CURRENT_API_VERSION}\"|version = \"${FIRST_API_PATCH_VERSION}\"|" api/pyproject.toml
|
||||
sed -i "s|spectacular_settings.VERSION = \"${CURRENT_API_VERSION}\"|spectacular_settings.VERSION = \"${FIRST_API_PATCH_VERSION}\"|" api/src/backend/api/v1/views.py
|
||||
sed -i "s| version: ${CURRENT_API_VERSION}| version: ${FIRST_API_PATCH_VERSION}|" api/src/backend/api/specs/v1.yaml
|
||||
|
||||
echo "Files modified:"
|
||||
git --no-pager diff
|
||||
|
||||
- name: Create PR for first API patch version to version branch
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
base: ${{ env.VERSION_BRANCH }}
|
||||
commit-message: 'chore(api): Bump version to v${{ env.FIRST_API_PATCH_VERSION }}'
|
||||
branch: api-version-bump-to-v${{ env.FIRST_API_PATCH_VERSION }}
|
||||
title: 'chore(api): Bump version to v${{ env.FIRST_API_PATCH_VERSION }}'
|
||||
labels: no-changelog,skip-sync
|
||||
body: |
|
||||
### Description
|
||||
|
||||
Bump Prowler API version to v${{ env.FIRST_API_PATCH_VERSION }} in version branch after releasing Prowler v${{ env.PROWLER_VERSION }}.
|
||||
|
||||
### License
|
||||
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
|
||||
bump-patch-version:
|
||||
needs: detect-release-type
|
||||
if: needs.detect-release-type.outputs.is_patch == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Calculate next API patch version
|
||||
run: |
|
||||
MAJOR_VERSION=${{ needs.detect-release-type.outputs.major_version }}
|
||||
MINOR_VERSION=${{ needs.detect-release-type.outputs.minor_version }}
|
||||
PATCH_VERSION=${{ needs.detect-release-type.outputs.patch_version }}
|
||||
CURRENT_API_VERSION="${{ needs.detect-release-type.outputs.current_api_version }}"
|
||||
VERSION_BRANCH=v${MAJOR_VERSION}.${MINOR_VERSION}
|
||||
|
||||
# Extract current API patch to increment it
|
||||
if [[ $CURRENT_API_VERSION =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
|
||||
API_PATCH=${BASH_REMATCH[3]}
|
||||
|
||||
# API version follows Prowler minor + 1
|
||||
# Keep same API minor (based on Prowler minor), increment patch
|
||||
NEXT_API_PATCH_VERSION=1.$((MINOR_VERSION + 1)).$((API_PATCH + 1))
|
||||
|
||||
echo "CURRENT_API_VERSION=${CURRENT_API_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "NEXT_API_PATCH_VERSION=${NEXT_API_PATCH_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "VERSION_BRANCH=${VERSION_BRANCH}" >> "${GITHUB_ENV}"
|
||||
|
||||
echo "Prowler release version: ${MAJOR_VERSION}.${MINOR_VERSION}.${PATCH_VERSION}"
|
||||
echo "Current API version: $CURRENT_API_VERSION"
|
||||
echo "Next API patch version: $NEXT_API_PATCH_VERSION"
|
||||
echo "Target branch: $VERSION_BRANCH"
|
||||
else
|
||||
echo "::error::Invalid API version format: $CURRENT_API_VERSION"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Bump API versions in files for version branch
|
||||
run: |
|
||||
set -e
|
||||
|
||||
sed -i "s|version = \"${CURRENT_API_VERSION}\"|version = \"${NEXT_API_PATCH_VERSION}\"|" api/pyproject.toml
|
||||
sed -i "s|spectacular_settings.VERSION = \"${CURRENT_API_VERSION}\"|spectacular_settings.VERSION = \"${NEXT_API_PATCH_VERSION}\"|" api/src/backend/api/v1/views.py
|
||||
sed -i "s| version: ${CURRENT_API_VERSION}| version: ${NEXT_API_PATCH_VERSION}|" api/src/backend/api/specs/v1.yaml
|
||||
|
||||
echo "Files modified:"
|
||||
git --no-pager diff
|
||||
|
||||
- name: Create PR for next API patch version to version branch
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
base: ${{ env.VERSION_BRANCH }}
|
||||
commit-message: 'chore(api): Bump version to v${{ env.NEXT_API_PATCH_VERSION }}'
|
||||
branch: api-version-bump-to-v${{ env.NEXT_API_PATCH_VERSION }}
|
||||
title: 'chore(api): Bump version to v${{ env.NEXT_API_PATCH_VERSION }}'
|
||||
labels: no-changelog,skip-sync
|
||||
body: |
|
||||
### Description
|
||||
|
||||
Bump Prowler API version to v${{ env.NEXT_API_PATCH_VERSION }} after releasing Prowler v${{ env.PROWLER_VERSION }}.
|
||||
|
||||
### License
|
||||
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
247
.github/workflows/docs-bump-version.yml
vendored
Normal file
247
.github/workflows/docs-bump-version.yml
vendored
Normal file
@@ -0,0 +1,247 @@
|
||||
name: 'Docs: Bump Version'
|
||||
|
||||
on:
|
||||
release:
|
||||
types:
|
||||
- 'published'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.release.tag_name }}
|
||||
cancel-in-progress: false
|
||||
|
||||
env:
|
||||
PROWLER_VERSION: ${{ github.event.release.tag_name }}
|
||||
BASE_BRANCH: master
|
||||
|
||||
jobs:
|
||||
detect-release-type:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
is_minor: ${{ steps.detect.outputs.is_minor }}
|
||||
is_patch: ${{ steps.detect.outputs.is_patch }}
|
||||
major_version: ${{ steps.detect.outputs.major_version }}
|
||||
minor_version: ${{ steps.detect.outputs.minor_version }}
|
||||
patch_version: ${{ steps.detect.outputs.patch_version }}
|
||||
current_docs_version: ${{ steps.get_docs_version.outputs.current_docs_version }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Get current documentation version
|
||||
id: get_docs_version
|
||||
run: |
|
||||
CURRENT_DOCS_VERSION=$(grep -oP 'PROWLER_UI_VERSION="\K[^"]+' docs/getting-started/installation/prowler-app.mdx)
|
||||
echo "current_docs_version=${CURRENT_DOCS_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
echo "Current documentation version: $CURRENT_DOCS_VERSION"
|
||||
|
||||
- name: Detect release type and parse version
|
||||
id: detect
|
||||
run: |
|
||||
if [[ $PROWLER_VERSION =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
|
||||
MAJOR_VERSION=${BASH_REMATCH[1]}
|
||||
MINOR_VERSION=${BASH_REMATCH[2]}
|
||||
PATCH_VERSION=${BASH_REMATCH[3]}
|
||||
|
||||
echo "major_version=${MAJOR_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
echo "minor_version=${MINOR_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
echo "patch_version=${PATCH_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
if (( MAJOR_VERSION != 5 )); then
|
||||
echo "::error::Releasing another Prowler major version, aborting..."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if (( PATCH_VERSION == 0 )); then
|
||||
echo "is_minor=true" >> "${GITHUB_OUTPUT}"
|
||||
echo "is_patch=false" >> "${GITHUB_OUTPUT}"
|
||||
echo "✓ Minor release detected: $PROWLER_VERSION"
|
||||
else
|
||||
echo "is_minor=false" >> "${GITHUB_OUTPUT}"
|
||||
echo "is_patch=true" >> "${GITHUB_OUTPUT}"
|
||||
echo "✓ Patch release detected: $PROWLER_VERSION"
|
||||
fi
|
||||
else
|
||||
echo "::error::Invalid version syntax: '$PROWLER_VERSION' (must be X.Y.Z)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
bump-minor-version:
|
||||
needs: detect-release-type
|
||||
if: needs.detect-release-type.outputs.is_minor == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Calculate next minor version
|
||||
run: |
|
||||
MAJOR_VERSION=${{ needs.detect-release-type.outputs.major_version }}
|
||||
MINOR_VERSION=${{ needs.detect-release-type.outputs.minor_version }}
|
||||
CURRENT_DOCS_VERSION="${{ needs.detect-release-type.outputs.current_docs_version }}"
|
||||
|
||||
NEXT_MINOR_VERSION=${MAJOR_VERSION}.$((MINOR_VERSION + 1)).0
|
||||
echo "CURRENT_DOCS_VERSION=${CURRENT_DOCS_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "NEXT_MINOR_VERSION=${NEXT_MINOR_VERSION}" >> "${GITHUB_ENV}"
|
||||
|
||||
echo "Current documentation version: $CURRENT_DOCS_VERSION"
|
||||
echo "Current release version: $PROWLER_VERSION"
|
||||
echo "Next minor version: $NEXT_MINOR_VERSION"
|
||||
|
||||
- name: Bump versions in documentation for master
|
||||
run: |
|
||||
set -e
|
||||
|
||||
# Update prowler-app.mdx with current release version
|
||||
sed -i "s|PROWLER_UI_VERSION=\"${CURRENT_DOCS_VERSION}\"|PROWLER_UI_VERSION=\"${PROWLER_VERSION}\"|" docs/getting-started/installation/prowler-app.mdx
|
||||
sed -i "s|PROWLER_API_VERSION=\"${CURRENT_DOCS_VERSION}\"|PROWLER_API_VERSION=\"${PROWLER_VERSION}\"|" docs/getting-started/installation/prowler-app.mdx
|
||||
|
||||
echo "Files modified:"
|
||||
git --no-pager diff
|
||||
|
||||
- name: Create PR for documentation update to master
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
base: master
|
||||
commit-message: 'docs: Update version to v${{ env.PROWLER_VERSION }}'
|
||||
branch: docs-version-update-to-v${{ env.PROWLER_VERSION }}
|
||||
title: 'docs: Update version to v${{ env.PROWLER_VERSION }}'
|
||||
labels: no-changelog,skip-sync
|
||||
body: |
|
||||
### Description
|
||||
|
||||
Update Prowler documentation version references to v${{ env.PROWLER_VERSION }} after releasing Prowler v${{ env.PROWLER_VERSION }}.
|
||||
|
||||
### Files Updated
|
||||
- `docs/getting-started/installation/prowler-app.mdx`: `PROWLER_UI_VERSION` and `PROWLER_API_VERSION`
|
||||
- All `*.mdx` files with `<VersionBadge>` components
|
||||
|
||||
### License
|
||||
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
|
||||
- name: Checkout version branch
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
ref: v${{ needs.detect-release-type.outputs.major_version }}.${{ needs.detect-release-type.outputs.minor_version }}
|
||||
|
||||
- name: Calculate first patch version
|
||||
run: |
|
||||
MAJOR_VERSION=${{ needs.detect-release-type.outputs.major_version }}
|
||||
MINOR_VERSION=${{ needs.detect-release-type.outputs.minor_version }}
|
||||
CURRENT_DOCS_VERSION="${{ needs.detect-release-type.outputs.current_docs_version }}"
|
||||
|
||||
FIRST_PATCH_VERSION=${MAJOR_VERSION}.${MINOR_VERSION}.1
|
||||
VERSION_BRANCH=v${MAJOR_VERSION}.${MINOR_VERSION}
|
||||
|
||||
echo "CURRENT_DOCS_VERSION=${CURRENT_DOCS_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "FIRST_PATCH_VERSION=${FIRST_PATCH_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "VERSION_BRANCH=${VERSION_BRANCH}" >> "${GITHUB_ENV}"
|
||||
|
||||
echo "First patch version: $FIRST_PATCH_VERSION"
|
||||
echo "Version branch: $VERSION_BRANCH"
|
||||
|
||||
- name: Bump versions in documentation for version branch
|
||||
run: |
|
||||
set -e
|
||||
|
||||
# Update prowler-app.mdx with current release version
|
||||
sed -i "s|PROWLER_UI_VERSION=\"${CURRENT_DOCS_VERSION}\"|PROWLER_UI_VERSION=\"${PROWLER_VERSION}\"|" docs/getting-started/installation/prowler-app.mdx
|
||||
sed -i "s|PROWLER_API_VERSION=\"${CURRENT_DOCS_VERSION}\"|PROWLER_API_VERSION=\"${PROWLER_VERSION}\"|" docs/getting-started/installation/prowler-app.mdx
|
||||
|
||||
echo "Files modified:"
|
||||
git --no-pager diff
|
||||
|
||||
- name: Create PR for documentation update to version branch
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
base: ${{ env.VERSION_BRANCH }}
|
||||
commit-message: 'docs: Update version to v${{ env.PROWLER_VERSION }}'
|
||||
branch: docs-version-update-to-v${{ env.PROWLER_VERSION }}-branch
|
||||
title: 'docs: Update version to v${{ env.PROWLER_VERSION }}'
|
||||
labels: no-changelog,skip-sync
|
||||
body: |
|
||||
### Description
|
||||
|
||||
Update Prowler documentation version references to v${{ env.PROWLER_VERSION }} in version branch after releasing Prowler v${{ env.PROWLER_VERSION }}.
|
||||
|
||||
### Files Updated
|
||||
- `docs/getting-started/installation/prowler-app.mdx`: `PROWLER_UI_VERSION` and `PROWLER_API_VERSION`
|
||||
|
||||
### License
|
||||
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
|
||||
bump-patch-version:
|
||||
needs: detect-release-type
|
||||
if: needs.detect-release-type.outputs.is_patch == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Calculate next patch version
|
||||
run: |
|
||||
MAJOR_VERSION=${{ needs.detect-release-type.outputs.major_version }}
|
||||
MINOR_VERSION=${{ needs.detect-release-type.outputs.minor_version }}
|
||||
PATCH_VERSION=${{ needs.detect-release-type.outputs.patch_version }}
|
||||
CURRENT_DOCS_VERSION="${{ needs.detect-release-type.outputs.current_docs_version }}"
|
||||
|
||||
NEXT_PATCH_VERSION=${MAJOR_VERSION}.${MINOR_VERSION}.$((PATCH_VERSION + 1))
|
||||
VERSION_BRANCH=v${MAJOR_VERSION}.${MINOR_VERSION}
|
||||
|
||||
echo "CURRENT_DOCS_VERSION=${CURRENT_DOCS_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "NEXT_PATCH_VERSION=${NEXT_PATCH_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "VERSION_BRANCH=${VERSION_BRANCH}" >> "${GITHUB_ENV}"
|
||||
|
||||
echo "Current documentation version: $CURRENT_DOCS_VERSION"
|
||||
echo "Current release version: $PROWLER_VERSION"
|
||||
echo "Next patch version: $NEXT_PATCH_VERSION"
|
||||
echo "Target branch: $VERSION_BRANCH"
|
||||
|
||||
- name: Bump versions in documentation for patch version
|
||||
run: |
|
||||
set -e
|
||||
|
||||
# Update prowler-app.mdx with current release version
|
||||
sed -i "s|PROWLER_UI_VERSION=\"${CURRENT_DOCS_VERSION}\"|PROWLER_UI_VERSION=\"${PROWLER_VERSION}\"|" docs/getting-started/installation/prowler-app.mdx
|
||||
sed -i "s|PROWLER_API_VERSION=\"${CURRENT_DOCS_VERSION}\"|PROWLER_API_VERSION=\"${PROWLER_VERSION}\"|" docs/getting-started/installation/prowler-app.mdx
|
||||
|
||||
echo "Files modified:"
|
||||
git --no-pager diff
|
||||
|
||||
- name: Create PR for documentation update to version branch
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
base: ${{ env.VERSION_BRANCH }}
|
||||
commit-message: 'docs: Update version to v${{ env.PROWLER_VERSION }}'
|
||||
branch: docs-version-update-to-v${{ env.PROWLER_VERSION }}
|
||||
title: 'docs: Update version to v${{ env.PROWLER_VERSION }}'
|
||||
labels: no-changelog,skip-sync
|
||||
body: |
|
||||
### Description
|
||||
|
||||
Update Prowler documentation version references to v${{ env.PROWLER_VERSION }} after releasing Prowler v${{ env.PROWLER_VERSION }}.
|
||||
|
||||
### Files Updated
|
||||
- `docs/getting-started/installation/prowler-app.mdx`: `PROWLER_UI_VERSION` and `PROWLER_API_VERSION`
|
||||
|
||||
### License
|
||||
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
5
.github/workflows/pr-merged.yml
vendored
5
.github/workflows/pr-merged.yml
vendored
@@ -13,7 +13,10 @@ concurrency:
|
||||
|
||||
jobs:
|
||||
trigger-cloud-pull-request:
|
||||
if: github.event.pull_request.merged == true && github.repository == 'prowler-cloud/prowler'
|
||||
if: |
|
||||
github.event.pull_request.merged == true &&
|
||||
github.repository == 'prowler-cloud/prowler' &&
|
||||
!contains(github.event.pull_request.labels.*.name, 'skip-sync')
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
permissions:
|
||||
|
||||
9
.github/workflows/sdk-bump-version.yml
vendored
9
.github/workflows/sdk-bump-version.yml
vendored
@@ -86,7 +86,6 @@ jobs:
|
||||
|
||||
sed -i "s|version = \"${PROWLER_VERSION}\"|version = \"${NEXT_MINOR_VERSION}\"|" pyproject.toml
|
||||
sed -i "s|prowler_version = \"${PROWLER_VERSION}\"|prowler_version = \"${NEXT_MINOR_VERSION}\"|" prowler/config/config.py
|
||||
sed -i "s|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${PROWLER_VERSION}|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${NEXT_MINOR_VERSION}|" .env
|
||||
|
||||
echo "Files modified:"
|
||||
git --no-pager diff
|
||||
@@ -100,7 +99,7 @@ jobs:
|
||||
commit-message: 'chore(release): Bump version to v${{ env.NEXT_MINOR_VERSION }}'
|
||||
branch: version-bump-to-v${{ env.NEXT_MINOR_VERSION }}
|
||||
title: 'chore(release): Bump version to v${{ env.NEXT_MINOR_VERSION }}'
|
||||
labels: no-changelog
|
||||
labels: no-changelog,skip-sync
|
||||
body: |
|
||||
### Description
|
||||
|
||||
@@ -135,7 +134,6 @@ jobs:
|
||||
|
||||
sed -i "s|version = \"${PROWLER_VERSION}\"|version = \"${FIRST_PATCH_VERSION}\"|" pyproject.toml
|
||||
sed -i "s|prowler_version = \"${PROWLER_VERSION}\"|prowler_version = \"${FIRST_PATCH_VERSION}\"|" prowler/config/config.py
|
||||
sed -i "s|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${PROWLER_VERSION}|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${FIRST_PATCH_VERSION}|" .env
|
||||
|
||||
echo "Files modified:"
|
||||
git --no-pager diff
|
||||
@@ -149,7 +147,7 @@ jobs:
|
||||
commit-message: 'chore(release): Bump version to v${{ env.FIRST_PATCH_VERSION }}'
|
||||
branch: version-bump-to-v${{ env.FIRST_PATCH_VERSION }}
|
||||
title: 'chore(release): Bump version to v${{ env.FIRST_PATCH_VERSION }}'
|
||||
labels: no-changelog
|
||||
labels: no-changelog,skip-sync
|
||||
body: |
|
||||
### Description
|
||||
|
||||
@@ -193,7 +191,6 @@ jobs:
|
||||
|
||||
sed -i "s|version = \"${PROWLER_VERSION}\"|version = \"${NEXT_PATCH_VERSION}\"|" pyproject.toml
|
||||
sed -i "s|prowler_version = \"${PROWLER_VERSION}\"|prowler_version = \"${NEXT_PATCH_VERSION}\"|" prowler/config/config.py
|
||||
sed -i "s|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${PROWLER_VERSION}|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${NEXT_PATCH_VERSION}|" .env
|
||||
|
||||
echo "Files modified:"
|
||||
git --no-pager diff
|
||||
@@ -207,7 +204,7 @@ jobs:
|
||||
commit-message: 'chore(release): Bump version to v${{ env.NEXT_PATCH_VERSION }}'
|
||||
branch: version-bump-to-v${{ env.NEXT_PATCH_VERSION }}
|
||||
title: 'chore(release): Bump version to v${{ env.NEXT_PATCH_VERSION }}'
|
||||
labels: no-changelog
|
||||
labels: no-changelog,skip-sync
|
||||
body: |
|
||||
### Description
|
||||
|
||||
|
||||
221
.github/workflows/ui-bump-version.yml
vendored
Normal file
221
.github/workflows/ui-bump-version.yml
vendored
Normal file
@@ -0,0 +1,221 @@
|
||||
name: 'UI: Bump Version'
|
||||
|
||||
on:
|
||||
release:
|
||||
types:
|
||||
- 'published'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.release.tag_name }}
|
||||
cancel-in-progress: false
|
||||
|
||||
env:
|
||||
PROWLER_VERSION: ${{ github.event.release.tag_name }}
|
||||
BASE_BRANCH: master
|
||||
|
||||
jobs:
|
||||
detect-release-type:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
is_minor: ${{ steps.detect.outputs.is_minor }}
|
||||
is_patch: ${{ steps.detect.outputs.is_patch }}
|
||||
major_version: ${{ steps.detect.outputs.major_version }}
|
||||
minor_version: ${{ steps.detect.outputs.minor_version }}
|
||||
patch_version: ${{ steps.detect.outputs.patch_version }}
|
||||
steps:
|
||||
- name: Detect release type and parse version
|
||||
id: detect
|
||||
run: |
|
||||
if [[ $PROWLER_VERSION =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
|
||||
MAJOR_VERSION=${BASH_REMATCH[1]}
|
||||
MINOR_VERSION=${BASH_REMATCH[2]}
|
||||
PATCH_VERSION=${BASH_REMATCH[3]}
|
||||
|
||||
echo "major_version=${MAJOR_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
echo "minor_version=${MINOR_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
echo "patch_version=${PATCH_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
if (( MAJOR_VERSION != 5 )); then
|
||||
echo "::error::Releasing another Prowler major version, aborting..."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if (( PATCH_VERSION == 0 )); then
|
||||
echo "is_minor=true" >> "${GITHUB_OUTPUT}"
|
||||
echo "is_patch=false" >> "${GITHUB_OUTPUT}"
|
||||
echo "✓ Minor release detected: $PROWLER_VERSION"
|
||||
else
|
||||
echo "is_minor=false" >> "${GITHUB_OUTPUT}"
|
||||
echo "is_patch=true" >> "${GITHUB_OUTPUT}"
|
||||
echo "✓ Patch release detected: $PROWLER_VERSION"
|
||||
fi
|
||||
else
|
||||
echo "::error::Invalid version syntax: '$PROWLER_VERSION' (must be X.Y.Z)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
bump-minor-version:
|
||||
needs: detect-release-type
|
||||
if: needs.detect-release-type.outputs.is_minor == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Calculate next minor version
|
||||
run: |
|
||||
MAJOR_VERSION=${{ needs.detect-release-type.outputs.major_version }}
|
||||
MINOR_VERSION=${{ needs.detect-release-type.outputs.minor_version }}
|
||||
|
||||
NEXT_MINOR_VERSION=${MAJOR_VERSION}.$((MINOR_VERSION + 1)).0
|
||||
echo "NEXT_MINOR_VERSION=${NEXT_MINOR_VERSION}" >> "${GITHUB_ENV}"
|
||||
|
||||
echo "Current version: $PROWLER_VERSION"
|
||||
echo "Next minor version: $NEXT_MINOR_VERSION"
|
||||
|
||||
- name: Bump UI version in .env for master
|
||||
run: |
|
||||
set -e
|
||||
|
||||
sed -i "s|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${PROWLER_VERSION}|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${NEXT_MINOR_VERSION}|" .env
|
||||
|
||||
echo "Files modified:"
|
||||
git --no-pager diff
|
||||
|
||||
- name: Create PR for next minor version to master
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
base: master
|
||||
commit-message: 'chore(ui): Bump version to v${{ env.NEXT_MINOR_VERSION }}'
|
||||
branch: ui-version-bump-to-v${{ env.NEXT_MINOR_VERSION }}
|
||||
title: 'chore(ui): Bump version to v${{ env.NEXT_MINOR_VERSION }}'
|
||||
labels: no-changelog,skip-sync
|
||||
body: |
|
||||
### Description
|
||||
|
||||
Bump Prowler UI version to v${{ env.NEXT_MINOR_VERSION }} after releasing Prowler v${{ env.PROWLER_VERSION }}.
|
||||
|
||||
### Files Updated
|
||||
- `.env`: `NEXT_PUBLIC_PROWLER_RELEASE_VERSION`
|
||||
|
||||
### License
|
||||
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
|
||||
- name: Checkout version branch
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
ref: v${{ needs.detect-release-type.outputs.major_version }}.${{ needs.detect-release-type.outputs.minor_version }}
|
||||
|
||||
- name: Calculate first patch version
|
||||
run: |
|
||||
MAJOR_VERSION=${{ needs.detect-release-type.outputs.major_version }}
|
||||
MINOR_VERSION=${{ needs.detect-release-type.outputs.minor_version }}
|
||||
|
||||
FIRST_PATCH_VERSION=${MAJOR_VERSION}.${MINOR_VERSION}.1
|
||||
VERSION_BRANCH=v${MAJOR_VERSION}.${MINOR_VERSION}
|
||||
|
||||
echo "FIRST_PATCH_VERSION=${FIRST_PATCH_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "VERSION_BRANCH=${VERSION_BRANCH}" >> "${GITHUB_ENV}"
|
||||
|
||||
echo "First patch version: $FIRST_PATCH_VERSION"
|
||||
echo "Version branch: $VERSION_BRANCH"
|
||||
|
||||
- name: Bump UI version in .env for version branch
|
||||
run: |
|
||||
set -e
|
||||
|
||||
sed -i "s|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${PROWLER_VERSION}|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${FIRST_PATCH_VERSION}|" .env
|
||||
|
||||
echo "Files modified:"
|
||||
git --no-pager diff
|
||||
|
||||
- name: Create PR for first patch version to version branch
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
base: ${{ env.VERSION_BRANCH }}
|
||||
commit-message: 'chore(ui): Bump version to v${{ env.FIRST_PATCH_VERSION }}'
|
||||
branch: ui-version-bump-to-v${{ env.FIRST_PATCH_VERSION }}
|
||||
title: 'chore(ui): Bump version to v${{ env.FIRST_PATCH_VERSION }}'
|
||||
labels: no-changelog,skip-sync
|
||||
body: |
|
||||
### Description
|
||||
|
||||
Bump Prowler UI version to v${{ env.FIRST_PATCH_VERSION }} in version branch after releasing Prowler v${{ env.PROWLER_VERSION }}.
|
||||
|
||||
### Files Updated
|
||||
- `.env`: `NEXT_PUBLIC_PROWLER_RELEASE_VERSION`
|
||||
|
||||
### License
|
||||
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
|
||||
bump-patch-version:
|
||||
needs: detect-release-type
|
||||
if: needs.detect-release-type.outputs.is_patch == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Calculate next patch version
|
||||
run: |
|
||||
MAJOR_VERSION=${{ needs.detect-release-type.outputs.major_version }}
|
||||
MINOR_VERSION=${{ needs.detect-release-type.outputs.minor_version }}
|
||||
PATCH_VERSION=${{ needs.detect-release-type.outputs.patch_version }}
|
||||
|
||||
NEXT_PATCH_VERSION=${MAJOR_VERSION}.${MINOR_VERSION}.$((PATCH_VERSION + 1))
|
||||
VERSION_BRANCH=v${MAJOR_VERSION}.${MINOR_VERSION}
|
||||
|
||||
echo "NEXT_PATCH_VERSION=${NEXT_PATCH_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "VERSION_BRANCH=${VERSION_BRANCH}" >> "${GITHUB_ENV}"
|
||||
|
||||
echo "Current version: $PROWLER_VERSION"
|
||||
echo "Next patch version: $NEXT_PATCH_VERSION"
|
||||
echo "Target branch: $VERSION_BRANCH"
|
||||
|
||||
- name: Bump UI version in .env for version branch
|
||||
run: |
|
||||
set -e
|
||||
|
||||
sed -i "s|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${PROWLER_VERSION}|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${NEXT_PATCH_VERSION}|" .env
|
||||
|
||||
echo "Files modified:"
|
||||
git --no-pager diff
|
||||
|
||||
- name: Create PR for next patch version to version branch
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
base: ${{ env.VERSION_BRANCH }}
|
||||
commit-message: 'chore(ui): Bump version to v${{ env.NEXT_PATCH_VERSION }}'
|
||||
branch: ui-version-bump-to-v${{ env.NEXT_PATCH_VERSION }}
|
||||
title: 'chore(ui): Bump version to v${{ env.NEXT_PATCH_VERSION }}'
|
||||
labels: no-changelog,skip-sync
|
||||
body: |
|
||||
### Description
|
||||
|
||||
Bump Prowler UI version to v${{ env.NEXT_PATCH_VERSION }} after releasing Prowler v${{ env.PROWLER_VERSION }}.
|
||||
|
||||
### Files Updated
|
||||
- `.env`: `NEXT_PUBLIC_PROWLER_RELEASE_VERSION`
|
||||
|
||||
### License
|
||||
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
@@ -2,6 +2,23 @@
|
||||
|
||||
All notable changes to the **Prowler API** are documented in this file.
|
||||
|
||||
## [1.17.2] (Prowler v5.16.2)
|
||||
|
||||
### Security
|
||||
- Updated dependencies to patch security vulnerabilities: Django 5.1.15 (CVE-2025-64460, CVE-2025-13372), Werkzeug 3.1.4 (CVE-2025-66221), sqlparse 0.5.5 (PVE-2025-82038), fonttools 4.60.2 (CVE-2025-66034) [(#9730)](https://github.com/prowler-cloud/prowler/pull/9730)
|
||||
|
||||
---
|
||||
|
||||
## [1.17.1] (Prowler v5.16.1)
|
||||
|
||||
### Changed
|
||||
- Security Hub integration error when no regions [(#9635)](https://github.com/prowler-cloud/prowler/pull/9635)
|
||||
|
||||
### Fixed
|
||||
- Orphan scheduled scans caused by transaction isolation during provider creation [(#9633)](https://github.com/prowler-cloud/prowler/pull/9633)
|
||||
|
||||
---
|
||||
|
||||
## [1.17.0] (Prowler v5.16.0)
|
||||
|
||||
### Added
|
||||
|
||||
930
api/poetry.lock
generated
930
api/poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -7,7 +7,7 @@ authors = [{name = "Prowler Engineering", email = "engineering@prowler.com"}]
|
||||
dependencies = [
|
||||
"celery[pytest] (>=5.4.0,<6.0.0)",
|
||||
"dj-rest-auth[with_social,jwt] (==7.0.1)",
|
||||
"django (==5.1.14)",
|
||||
"django (==5.1.15)",
|
||||
"django-allauth[saml] (>=65.8.0,<66.0.0)",
|
||||
"django-celery-beat (>=2.7.0,<3.0.0)",
|
||||
"django-celery-results (>=2.5.1,<3.0.0)",
|
||||
@@ -24,7 +24,7 @@ dependencies = [
|
||||
"drf-spectacular-jsonapi==0.5.1",
|
||||
"gunicorn==23.0.0",
|
||||
"lxml==5.3.2",
|
||||
"prowler @ git+https://github.com/prowler-cloud/prowler.git@master",
|
||||
"prowler @ git+https://github.com/prowler-cloud/prowler.git@v5.16",
|
||||
"psycopg2-binary==2.9.9",
|
||||
"pytest-celery[redis] (>=1.0.1,<2.0.0)",
|
||||
"sentry-sdk[django] (>=2.20.0,<3.0.0)",
|
||||
@@ -36,7 +36,10 @@ dependencies = [
|
||||
"drf-simple-apikey (==2.2.1)",
|
||||
"matplotlib (>=3.10.6,<4.0.0)",
|
||||
"reportlab (>=4.4.4,<5.0.0)",
|
||||
"gevent (>=25.9.1,<26.0.0)"
|
||||
"gevent (>=25.9.1,<26.0.0)",
|
||||
"werkzeug (>=3.1.4)",
|
||||
"sqlparse (>=0.5.4)",
|
||||
"fonttools (>=4.60.2)"
|
||||
]
|
||||
description = "Prowler's API (Django/DRF)"
|
||||
license = "Apache-2.0"
|
||||
@@ -44,7 +47,7 @@ name = "prowler-api"
|
||||
package-mode = false
|
||||
# Needed for the SDK compatibility
|
||||
requires-python = ">=3.11,<3.13"
|
||||
version = "1.16.0"
|
||||
version = "1.17.2"
|
||||
|
||||
[project.scripts]
|
||||
celery = "src.backend.config.settings.celery"
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
openapi: 3.0.3
|
||||
info:
|
||||
title: Prowler API
|
||||
version: 1.17.0
|
||||
version: 1.17.2
|
||||
description: |-
|
||||
Prowler API specification.
|
||||
|
||||
|
||||
@@ -359,7 +359,7 @@ class SchemaView(SpectacularAPIView):
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
spectacular_settings.TITLE = "Prowler API"
|
||||
spectacular_settings.VERSION = "1.17.0"
|
||||
spectacular_settings.VERSION = "1.17.2"
|
||||
spectacular_settings.DESCRIPTION = (
|
||||
"Prowler API specification.\n\nThis file is auto-generated."
|
||||
)
|
||||
|
||||
@@ -19,6 +19,9 @@ from prowler.providers.aws.aws_provider import AwsProvider
|
||||
from prowler.providers.aws.lib.s3.s3 import S3
|
||||
from prowler.providers.aws.lib.security_hub.security_hub import SecurityHub
|
||||
from prowler.providers.common.models import Connection
|
||||
from prowler.providers.aws.lib.security_hub.exceptions.exceptions import (
|
||||
SecurityHubNoEnabledRegionsError,
|
||||
)
|
||||
|
||||
logger = get_task_logger(__name__)
|
||||
|
||||
@@ -222,8 +225,9 @@ def get_security_hub_client_from_integration(
|
||||
)
|
||||
return True, security_hub
|
||||
else:
|
||||
# Reset regions information if connection fails
|
||||
# Reset regions information if connection fails and integration is not connected
|
||||
with rls_transaction(tenant_id, using=MainRouter.default_db):
|
||||
integration.connected = False
|
||||
integration.configuration["regions"] = {}
|
||||
integration.save()
|
||||
|
||||
@@ -330,15 +334,18 @@ def upload_security_hub_integration(
|
||||
)
|
||||
|
||||
if not connected:
|
||||
logger.error(
|
||||
f"Security Hub connection failed for integration {integration.id}: "
|
||||
f"{security_hub.error}"
|
||||
)
|
||||
with rls_transaction(
|
||||
tenant_id, using=MainRouter.default_db
|
||||
if isinstance(
|
||||
security_hub.error,
|
||||
SecurityHubNoEnabledRegionsError,
|
||||
):
|
||||
integration.connected = False
|
||||
integration.save()
|
||||
logger.warning(
|
||||
f"Security Hub integration {integration.id} has no enabled regions"
|
||||
)
|
||||
else:
|
||||
logger.error(
|
||||
f"Security Hub connection failed for integration {integration.id}: "
|
||||
f"{security_hub.error}"
|
||||
)
|
||||
break # Skip this integration
|
||||
|
||||
security_hub_client = security_hub
|
||||
@@ -409,22 +416,16 @@ def upload_security_hub_integration(
|
||||
logger.warning(
|
||||
f"Failed to archive previous findings: {str(archive_error)}"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Security Hub integration {integration.id} failed: {str(e)}"
|
||||
)
|
||||
continue
|
||||
|
||||
result = integration_executions == len(integrations)
|
||||
if result:
|
||||
logger.info(
|
||||
f"All Security Hub integrations completed successfully for provider {provider_id}"
|
||||
)
|
||||
else:
|
||||
logger.error(
|
||||
f"Some Security Hub integrations failed for provider {provider_id}"
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
@@ -61,6 +61,58 @@ from prowler.lib.outputs.finding import Finding as FindingOutput
|
||||
logger = get_task_logger(__name__)
|
||||
|
||||
|
||||
def _cleanup_orphan_scheduled_scans(
|
||||
tenant_id: str,
|
||||
provider_id: str,
|
||||
scheduler_task_id: int,
|
||||
) -> int:
|
||||
"""
|
||||
TEMPORARY WORKAROUND: Clean up orphan AVAILABLE scans.
|
||||
|
||||
Detects and removes AVAILABLE scans that were never used due to an
|
||||
issue during the first scheduled scan setup.
|
||||
|
||||
An AVAILABLE scan is considered orphan if there's also a SCHEDULED scan for
|
||||
the same provider with the same scheduler_task_id. This situation indicates
|
||||
that the first scan execution didn't find the AVAILABLE scan (because it
|
||||
wasn't committed yet, probably) and created a new one, leaving the AVAILABLE orphaned.
|
||||
|
||||
Args:
|
||||
tenant_id: The tenant ID.
|
||||
provider_id: The provider ID.
|
||||
scheduler_task_id: The PeriodicTask ID that triggers these scans.
|
||||
|
||||
Returns:
|
||||
Number of orphan scans deleted (0 if none found).
|
||||
"""
|
||||
orphan_available_scans = Scan.objects.filter(
|
||||
tenant_id=tenant_id,
|
||||
provider_id=provider_id,
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.AVAILABLE,
|
||||
scheduler_task_id=scheduler_task_id,
|
||||
)
|
||||
|
||||
scheduled_scan_exists = Scan.objects.filter(
|
||||
tenant_id=tenant_id,
|
||||
provider_id=provider_id,
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.SCHEDULED,
|
||||
scheduler_task_id=scheduler_task_id,
|
||||
).exists()
|
||||
|
||||
if scheduled_scan_exists and orphan_available_scans.exists():
|
||||
orphan_count = orphan_available_scans.count()
|
||||
logger.warning(
|
||||
f"[WORKAROUND] Found {orphan_count} orphan AVAILABLE scan(s) for "
|
||||
f"provider {provider_id} alongside a SCHEDULED scan. Cleaning up orphans..."
|
||||
)
|
||||
orphan_available_scans.delete()
|
||||
return orphan_count
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def _perform_scan_complete_tasks(tenant_id: str, scan_id: str, provider_id: str):
|
||||
"""
|
||||
Helper function to perform tasks after a scan is completed.
|
||||
@@ -247,6 +299,14 @@ def perform_scheduled_scan_task(self, tenant_id: str, provider_id: str):
|
||||
return serializer.data
|
||||
|
||||
next_scan_datetime = get_next_execution_datetime(task_id, provider_id)
|
||||
|
||||
# TEMPORARY WORKAROUND: Clean up orphan scans from transaction isolation issue
|
||||
_cleanup_orphan_scheduled_scans(
|
||||
tenant_id=tenant_id,
|
||||
provider_id=provider_id,
|
||||
scheduler_task_id=periodic_task_instance.id,
|
||||
)
|
||||
|
||||
scan_instance, _ = Scan.objects.get_or_create(
|
||||
tenant_id=tenant_id,
|
||||
provider_id=provider_id,
|
||||
|
||||
@@ -1199,9 +1199,6 @@ class TestSecurityHubIntegrationUploads:
|
||||
)
|
||||
|
||||
assert result is False
|
||||
# Integration should be marked as disconnected
|
||||
integration.save.assert_called_once()
|
||||
assert integration.connected is False
|
||||
|
||||
@patch("tasks.jobs.integrations.ASFF")
|
||||
@patch("tasks.jobs.integrations.FindingOutput")
|
||||
|
||||
@@ -4,11 +4,13 @@ from unittest.mock import MagicMock, patch
|
||||
import openai
|
||||
import pytest
|
||||
from botocore.exceptions import ClientError
|
||||
from django_celery_beat.models import IntervalSchedule, PeriodicTask
|
||||
from tasks.jobs.lighthouse_providers import (
|
||||
_create_bedrock_client,
|
||||
_extract_bedrock_credentials,
|
||||
)
|
||||
from tasks.tasks import (
|
||||
_cleanup_orphan_scheduled_scans,
|
||||
_perform_scan_complete_tasks,
|
||||
check_integrations_task,
|
||||
check_lighthouse_provider_connection_task,
|
||||
@@ -22,6 +24,8 @@ from api.models import (
|
||||
Integration,
|
||||
LighthouseProviderConfiguration,
|
||||
LighthouseProviderModels,
|
||||
Scan,
|
||||
StateChoices,
|
||||
)
|
||||
|
||||
|
||||
@@ -1715,3 +1719,343 @@ class TestRefreshLighthouseProviderModelsTask:
|
||||
assert result["deleted"] == 0
|
||||
assert "error" in result
|
||||
assert result["error"] is not None
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestCleanupOrphanScheduledScans:
|
||||
"""Unit tests for _cleanup_orphan_scheduled_scans helper function."""
|
||||
|
||||
def _create_periodic_task(self, provider_id, tenant_id):
|
||||
"""Helper to create a PeriodicTask for testing."""
|
||||
interval, _ = IntervalSchedule.objects.get_or_create(every=24, period="hours")
|
||||
return PeriodicTask.objects.create(
|
||||
name=f"scan-perform-scheduled-{provider_id}",
|
||||
task="scan-perform-scheduled",
|
||||
interval=interval,
|
||||
kwargs=f'{{"tenant_id": "{tenant_id}", "provider_id": "{provider_id}"}}',
|
||||
enabled=True,
|
||||
)
|
||||
|
||||
def test_cleanup_deletes_orphan_when_both_available_and_scheduled_exist(
|
||||
self, tenants_fixture, providers_fixture
|
||||
):
|
||||
"""Test that AVAILABLE scan is deleted when SCHEDULED also exists."""
|
||||
tenant = tenants_fixture[0]
|
||||
provider = providers_fixture[0]
|
||||
periodic_task = self._create_periodic_task(provider.id, tenant.id)
|
||||
|
||||
# Create orphan AVAILABLE scan
|
||||
orphan_scan = Scan.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
provider=provider,
|
||||
name="Daily scheduled scan",
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.AVAILABLE,
|
||||
scheduler_task_id=periodic_task.id,
|
||||
)
|
||||
|
||||
# Create SCHEDULED scan (next execution)
|
||||
scheduled_scan = Scan.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
provider=provider,
|
||||
name="Daily scheduled scan",
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.SCHEDULED,
|
||||
scheduler_task_id=periodic_task.id,
|
||||
)
|
||||
|
||||
# Execute cleanup
|
||||
deleted_count = _cleanup_orphan_scheduled_scans(
|
||||
tenant_id=str(tenant.id),
|
||||
provider_id=str(provider.id),
|
||||
scheduler_task_id=periodic_task.id,
|
||||
)
|
||||
|
||||
# Verify orphan was deleted
|
||||
assert deleted_count == 1
|
||||
assert not Scan.objects.filter(id=orphan_scan.id).exists()
|
||||
assert Scan.objects.filter(id=scheduled_scan.id).exists()
|
||||
|
||||
def test_cleanup_does_not_delete_when_only_available_exists(
|
||||
self, tenants_fixture, providers_fixture
|
||||
):
|
||||
"""Test that AVAILABLE scan is NOT deleted when no SCHEDULED exists."""
|
||||
tenant = tenants_fixture[0]
|
||||
provider = providers_fixture[0]
|
||||
periodic_task = self._create_periodic_task(provider.id, tenant.id)
|
||||
|
||||
# Create only AVAILABLE scan (normal first scan scenario)
|
||||
available_scan = Scan.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
provider=provider,
|
||||
name="Daily scheduled scan",
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.AVAILABLE,
|
||||
scheduler_task_id=periodic_task.id,
|
||||
)
|
||||
|
||||
# Execute cleanup
|
||||
deleted_count = _cleanup_orphan_scheduled_scans(
|
||||
tenant_id=str(tenant.id),
|
||||
provider_id=str(provider.id),
|
||||
scheduler_task_id=periodic_task.id,
|
||||
)
|
||||
|
||||
# Verify nothing was deleted
|
||||
assert deleted_count == 0
|
||||
assert Scan.objects.filter(id=available_scan.id).exists()
|
||||
|
||||
def test_cleanup_does_not_delete_when_only_scheduled_exists(
|
||||
self, tenants_fixture, providers_fixture
|
||||
):
|
||||
"""Test that nothing is deleted when only SCHEDULED exists."""
|
||||
tenant = tenants_fixture[0]
|
||||
provider = providers_fixture[0]
|
||||
periodic_task = self._create_periodic_task(provider.id, tenant.id)
|
||||
|
||||
# Create only SCHEDULED scan (normal subsequent scan scenario)
|
||||
scheduled_scan = Scan.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
provider=provider,
|
||||
name="Daily scheduled scan",
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.SCHEDULED,
|
||||
scheduler_task_id=periodic_task.id,
|
||||
)
|
||||
|
||||
# Execute cleanup
|
||||
deleted_count = _cleanup_orphan_scheduled_scans(
|
||||
tenant_id=str(tenant.id),
|
||||
provider_id=str(provider.id),
|
||||
scheduler_task_id=periodic_task.id,
|
||||
)
|
||||
|
||||
# Verify nothing was deleted
|
||||
assert deleted_count == 0
|
||||
assert Scan.objects.filter(id=scheduled_scan.id).exists()
|
||||
|
||||
def test_cleanup_returns_zero_when_no_scans_exist(
|
||||
self, tenants_fixture, providers_fixture
|
||||
):
|
||||
"""Test that cleanup returns 0 when no scans exist."""
|
||||
tenant = tenants_fixture[0]
|
||||
provider = providers_fixture[0]
|
||||
periodic_task = self._create_periodic_task(provider.id, tenant.id)
|
||||
|
||||
# Execute cleanup with no scans
|
||||
deleted_count = _cleanup_orphan_scheduled_scans(
|
||||
tenant_id=str(tenant.id),
|
||||
provider_id=str(provider.id),
|
||||
scheduler_task_id=periodic_task.id,
|
||||
)
|
||||
|
||||
assert deleted_count == 0
|
||||
|
||||
def test_cleanup_deletes_multiple_orphan_available_scans(
|
||||
self, tenants_fixture, providers_fixture
|
||||
):
|
||||
"""Test that multiple AVAILABLE orphan scans are all deleted."""
|
||||
tenant = tenants_fixture[0]
|
||||
provider = providers_fixture[0]
|
||||
periodic_task = self._create_periodic_task(provider.id, tenant.id)
|
||||
|
||||
# Create multiple orphan AVAILABLE scans
|
||||
orphan_scan_1 = Scan.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
provider=provider,
|
||||
name="Daily scheduled scan",
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.AVAILABLE,
|
||||
scheduler_task_id=periodic_task.id,
|
||||
)
|
||||
orphan_scan_2 = Scan.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
provider=provider,
|
||||
name="Daily scheduled scan",
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.AVAILABLE,
|
||||
scheduler_task_id=periodic_task.id,
|
||||
)
|
||||
|
||||
# Create SCHEDULED scan
|
||||
scheduled_scan = Scan.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
provider=provider,
|
||||
name="Daily scheduled scan",
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.SCHEDULED,
|
||||
scheduler_task_id=periodic_task.id,
|
||||
)
|
||||
|
||||
# Execute cleanup
|
||||
deleted_count = _cleanup_orphan_scheduled_scans(
|
||||
tenant_id=str(tenant.id),
|
||||
provider_id=str(provider.id),
|
||||
scheduler_task_id=periodic_task.id,
|
||||
)
|
||||
|
||||
# Verify all orphans were deleted
|
||||
assert deleted_count == 2
|
||||
assert not Scan.objects.filter(id=orphan_scan_1.id).exists()
|
||||
assert not Scan.objects.filter(id=orphan_scan_2.id).exists()
|
||||
assert Scan.objects.filter(id=scheduled_scan.id).exists()
|
||||
|
||||
def test_cleanup_does_not_affect_different_provider(
|
||||
self, tenants_fixture, providers_fixture
|
||||
):
|
||||
"""Test that cleanup only affects scans for the specified provider."""
|
||||
tenant = tenants_fixture[0]
|
||||
provider1 = providers_fixture[0]
|
||||
provider2 = providers_fixture[1]
|
||||
periodic_task1 = self._create_periodic_task(provider1.id, tenant.id)
|
||||
periodic_task2 = self._create_periodic_task(provider2.id, tenant.id)
|
||||
|
||||
# Create orphan scenario for provider1
|
||||
orphan_scan_p1 = Scan.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
provider=provider1,
|
||||
name="Daily scheduled scan",
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.AVAILABLE,
|
||||
scheduler_task_id=periodic_task1.id,
|
||||
)
|
||||
scheduled_scan_p1 = Scan.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
provider=provider1,
|
||||
name="Daily scheduled scan",
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.SCHEDULED,
|
||||
scheduler_task_id=periodic_task1.id,
|
||||
)
|
||||
|
||||
# Create AVAILABLE scan for provider2 (should not be affected)
|
||||
available_scan_p2 = Scan.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
provider=provider2,
|
||||
name="Daily scheduled scan",
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.AVAILABLE,
|
||||
scheduler_task_id=periodic_task2.id,
|
||||
)
|
||||
|
||||
# Execute cleanup for provider1 only
|
||||
deleted_count = _cleanup_orphan_scheduled_scans(
|
||||
tenant_id=str(tenant.id),
|
||||
provider_id=str(provider1.id),
|
||||
scheduler_task_id=periodic_task1.id,
|
||||
)
|
||||
|
||||
# Verify only provider1's orphan was deleted
|
||||
assert deleted_count == 1
|
||||
assert not Scan.objects.filter(id=orphan_scan_p1.id).exists()
|
||||
assert Scan.objects.filter(id=scheduled_scan_p1.id).exists()
|
||||
assert Scan.objects.filter(id=available_scan_p2.id).exists()
|
||||
|
||||
def test_cleanup_does_not_affect_manual_scans(
|
||||
self, tenants_fixture, providers_fixture
|
||||
):
|
||||
"""Test that cleanup only affects SCHEDULED trigger scans, not MANUAL."""
|
||||
tenant = tenants_fixture[0]
|
||||
provider = providers_fixture[0]
|
||||
periodic_task = self._create_periodic_task(provider.id, tenant.id)
|
||||
|
||||
# Create orphan AVAILABLE scheduled scan
|
||||
orphan_scan = Scan.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
provider=provider,
|
||||
name="Daily scheduled scan",
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.AVAILABLE,
|
||||
scheduler_task_id=periodic_task.id,
|
||||
)
|
||||
|
||||
# Create SCHEDULED scan
|
||||
scheduled_scan = Scan.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
provider=provider,
|
||||
name="Daily scheduled scan",
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.SCHEDULED,
|
||||
scheduler_task_id=periodic_task.id,
|
||||
)
|
||||
|
||||
# Create AVAILABLE manual scan (should not be affected)
|
||||
manual_scan = Scan.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
provider=provider,
|
||||
name="Manual scan",
|
||||
trigger=Scan.TriggerChoices.MANUAL,
|
||||
state=StateChoices.AVAILABLE,
|
||||
)
|
||||
|
||||
# Execute cleanup
|
||||
deleted_count = _cleanup_orphan_scheduled_scans(
|
||||
tenant_id=str(tenant.id),
|
||||
provider_id=str(provider.id),
|
||||
scheduler_task_id=periodic_task.id,
|
||||
)
|
||||
|
||||
# Verify only scheduled orphan was deleted
|
||||
assert deleted_count == 1
|
||||
assert not Scan.objects.filter(id=orphan_scan.id).exists()
|
||||
assert Scan.objects.filter(id=scheduled_scan.id).exists()
|
||||
assert Scan.objects.filter(id=manual_scan.id).exists()
|
||||
|
||||
def test_cleanup_does_not_affect_different_scheduler_task(
|
||||
self, tenants_fixture, providers_fixture
|
||||
):
|
||||
"""Test that cleanup only affects scans with the specified scheduler_task_id."""
|
||||
tenant = tenants_fixture[0]
|
||||
provider = providers_fixture[0]
|
||||
periodic_task1 = self._create_periodic_task(provider.id, tenant.id)
|
||||
|
||||
# Create another periodic task
|
||||
interval, _ = IntervalSchedule.objects.get_or_create(every=24, period="hours")
|
||||
periodic_task2 = PeriodicTask.objects.create(
|
||||
name=f"scan-perform-scheduled-other-{provider.id}",
|
||||
task="scan-perform-scheduled",
|
||||
interval=interval,
|
||||
kwargs=f'{{"tenant_id": "{tenant.id}", "provider_id": "{provider.id}"}}',
|
||||
enabled=True,
|
||||
)
|
||||
|
||||
# Create orphan scenario for periodic_task1
|
||||
orphan_scan = Scan.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
provider=provider,
|
||||
name="Daily scheduled scan",
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.AVAILABLE,
|
||||
scheduler_task_id=periodic_task1.id,
|
||||
)
|
||||
scheduled_scan = Scan.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
provider=provider,
|
||||
name="Daily scheduled scan",
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.SCHEDULED,
|
||||
scheduler_task_id=periodic_task1.id,
|
||||
)
|
||||
|
||||
# Create AVAILABLE scan for periodic_task2 (should not be affected)
|
||||
available_scan_other_task = Scan.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
provider=provider,
|
||||
name="Daily scheduled scan",
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.AVAILABLE,
|
||||
scheduler_task_id=periodic_task2.id,
|
||||
)
|
||||
|
||||
# Execute cleanup for periodic_task1 only
|
||||
deleted_count = _cleanup_orphan_scheduled_scans(
|
||||
tenant_id=str(tenant.id),
|
||||
provider_id=str(provider.id),
|
||||
scheduler_task_id=periodic_task1.id,
|
||||
)
|
||||
|
||||
# Verify only periodic_task1's orphan was deleted
|
||||
assert deleted_count == 1
|
||||
assert not Scan.objects.filter(id=orphan_scan.id).exists()
|
||||
assert Scan.objects.filter(id=scheduled_scan.id).exists()
|
||||
assert Scan.objects.filter(id=available_scan_other_task.id).exists()
|
||||
|
||||
@@ -115,8 +115,8 @@ To update the environment file:
|
||||
Edit the `.env` file and change version values:
|
||||
|
||||
```env
|
||||
PROWLER_UI_VERSION="5.15.0"
|
||||
PROWLER_API_VERSION="5.15.0"
|
||||
PROWLER_UI_VERSION="5.16.1"
|
||||
PROWLER_API_VERSION="5.16.1"
|
||||
```
|
||||
|
||||
<Note>
|
||||
|
||||
@@ -164,7 +164,7 @@ prowler oci --profile PRODUCTION
|
||||
Use a config file from a custom location:
|
||||
|
||||
```bash
|
||||
prowler oci --config-file /path/to/custom/config
|
||||
prowler oci --oci-config-file /path/to/custom/config
|
||||
```
|
||||
|
||||
### Setting Up API Keys
|
||||
@@ -377,7 +377,7 @@ ls -la ~/.oci/config
|
||||
mkdir -p ~/.oci
|
||||
|
||||
# Specify custom location
|
||||
prowler oci --config-file /path/to/config
|
||||
prowler oci --oci-config-file /path/to/config
|
||||
```
|
||||
|
||||
#### Error: "InvalidKeyOrSignature"
|
||||
|
||||
@@ -122,7 +122,7 @@ prowler oci --profile production
|
||||
##### Using a Custom Config File
|
||||
|
||||
```bash
|
||||
prowler oci --config-file /path/to/custom/config
|
||||
prowler oci --oci-config-file /path/to/custom/config
|
||||
```
|
||||
|
||||
#### Instance Principal Authentication
|
||||
|
||||
@@ -2,15 +2,28 @@
|
||||
|
||||
All notable changes to the **Prowler SDK** are documented in this file.
|
||||
|
||||
## [5.16.2] (Prowler v5.16.2) (UNRELEASED)
|
||||
|
||||
### Fixed
|
||||
- Fix OCI authentication error handling and validation [(#9738)](https://github.com/prowler-cloud/prowler/pull/9738)
|
||||
- Fixup AWS EC2 SG library [(#9216)](https://github.com/prowler-cloud/prowler/pull/9216)
|
||||
|
||||
---
|
||||
|
||||
## [5.16.1] (Prowler v5.16.1)
|
||||
|
||||
### Fixed
|
||||
- ZeroDivision error from Prowler ThreatScore [(#9653)](https://github.com/prowler-cloud/prowler/pull/9653)
|
||||
|
||||
---
|
||||
|
||||
## [5.16.0] (Prowler v5.16.0)
|
||||
|
||||
### Added
|
||||
|
||||
- `privilege-escalation` and `ec2-imdsv1` categories for AWS checks [(#9537)](https://github.com/prowler-cloud/prowler/pull/9537)
|
||||
- Supported IaC formats and scanner documentation for the IaC provider [(#9553)](https://github.com/prowler-cloud/prowler/pull/9553)
|
||||
|
||||
### Changed
|
||||
|
||||
- Update AWS Glue service metadata to new format [(#9258)](https://github.com/prowler-cloud/prowler/pull/9258)
|
||||
- Update AWS Kafka service metadata to new format [(#9261)](https://github.com/prowler-cloud/prowler/pull/9261)
|
||||
- Update AWS KMS service metadata to new format [(#9263)](https://github.com/prowler-cloud/prowler/pull/9263)
|
||||
|
||||
@@ -38,7 +38,7 @@ class _MutableTimestamp:
|
||||
|
||||
timestamp = _MutableTimestamp(datetime.today())
|
||||
timestamp_utc = _MutableTimestamp(datetime.now(timezone.utc))
|
||||
prowler_version = "5.16.0"
|
||||
prowler_version = "5.16.2"
|
||||
html_logo_url = "https://github.com/prowler-cloud/prowler/"
|
||||
square_logo_img = "https://raw.githubusercontent.com/prowler-cloud/prowler/dc7d2d5aeb92fdf12e8604f42ef6472cd3e8e889/docs/img/prowler-logo-black.png"
|
||||
aws_logo = "https://user-images.githubusercontent.com/38561120/235953920-3e3fba08-0795-41dc-b480-9bea57db9f2e.png"
|
||||
|
||||
@@ -103,8 +103,16 @@ def get_prowler_threatscore_table(
|
||||
for pillar in pillars:
|
||||
pillar_table["Provider"].append(compliance.Provider)
|
||||
pillar_table["Pillar"].append(pillar)
|
||||
if max_score_per_pillar[pillar] == 0:
|
||||
pillar_score = 100.0
|
||||
score_color = Fore.GREEN
|
||||
else:
|
||||
pillar_score = (
|
||||
score_per_pillar[pillar] / max_score_per_pillar[pillar]
|
||||
) * 100
|
||||
score_color = Fore.RED
|
||||
pillar_table["Score"].append(
|
||||
f"{Style.BRIGHT}{Fore.RED}{(score_per_pillar[pillar] / max_score_per_pillar[pillar]) * 100:.2f}%{Style.RESET_ALL}"
|
||||
f"{Style.BRIGHT}{score_color}{pillar_score:.2f}%{Style.RESET_ALL}"
|
||||
)
|
||||
if pillars[pillar]["FAIL"] > 0:
|
||||
pillar_table["Status"].append(
|
||||
@@ -148,9 +156,12 @@ def get_prowler_threatscore_table(
|
||||
print(
|
||||
f"\nFramework {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL} Results:"
|
||||
)
|
||||
print(
|
||||
f"\nGeneric Threat Score: {generic_score / max_generic_score * 100:.2f}%"
|
||||
)
|
||||
# Handle division by zero when all findings are muted
|
||||
if max_generic_score == 0:
|
||||
generic_threat_score = 100.0
|
||||
else:
|
||||
generic_threat_score = generic_score / max_generic_score * 100
|
||||
print(f"\nGeneric Threat Score: {generic_threat_score:.2f}%")
|
||||
print(
|
||||
tabulate(
|
||||
pillar_table,
|
||||
|
||||
@@ -25,8 +25,8 @@ class dms_instance_no_public_access(Check):
|
||||
if check_security_group(
|
||||
ingress_rule,
|
||||
"-1",
|
||||
ports=None,
|
||||
any_address=True,
|
||||
all_ports=True,
|
||||
):
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"DMS Replication Instance {instance.id} is set as publicly accessible and security group {security_group.name} ({security_group.id}) is open to the Internet."
|
||||
|
||||
@@ -31,7 +31,7 @@ class ec2_securitygroup_allow_ingress_from_internet_to_any_port(Check):
|
||||
report.status_extended = f"Security group {security_group.name} ({security_group.id}) does not have any port open to the Internet."
|
||||
for ingress_rule in security_group.ingress_rules:
|
||||
if check_security_group(
|
||||
ingress_rule, "-1", ports=None, any_address=True
|
||||
ingress_rule, "-1", any_address=True, all_ports=True
|
||||
):
|
||||
self.check_enis(
|
||||
report=report,
|
||||
|
||||
@@ -3,10 +3,14 @@ from typing import Any
|
||||
|
||||
|
||||
def check_security_group(
|
||||
ingress_rule: Any, protocol: str, ports: list = [], any_address: bool = False
|
||||
ingress_rule: Any,
|
||||
protocol: str,
|
||||
ports: list | None = None,
|
||||
any_address: bool = False,
|
||||
all_ports: bool = False,
|
||||
) -> bool:
|
||||
"""
|
||||
Check if the security group ingress rule has public access to the check_ports using the protocol
|
||||
Check if the security group ingress rule has public access to the check_ports using the protocol.
|
||||
|
||||
@param ingress_rule: AWS Security Group IpPermissions Ingress Rule
|
||||
{
|
||||
@@ -29,13 +33,17 @@ def check_security_group(
|
||||
|
||||
@param protocol: Protocol to check. If -1, all protocols will be checked.
|
||||
|
||||
|
||||
@param ports: List of ports to check. If empty, any port will be checked. If None, any port will be checked. (Default: [])
|
||||
@param ports: List of ports to check. If not provided all ports will be checked unless all_ports is False. (Default: None)
|
||||
|
||||
@param any_address: If True, only 0.0.0.0/0 or "::/0" will be public and do not search for public addresses. (Default: False)
|
||||
|
||||
@param all_ports: If True, empty ports list will be treated as all ports. (Default: False)
|
||||
|
||||
@return: True if the security group has public access to the check_ports using the protocol
|
||||
"""
|
||||
if ports is None:
|
||||
ports = []
|
||||
|
||||
# Check for all traffic ingress rules regardless of the protocol
|
||||
if ingress_rule["IpProtocol"] == "-1":
|
||||
for ip_ingress_rule in ingress_rule["IpRanges"]:
|
||||
@@ -54,54 +62,42 @@ def check_security_group(
|
||||
|
||||
# Check for specific ports in ingress rules
|
||||
if "FromPort" in ingress_rule:
|
||||
# If there is a port range
|
||||
|
||||
# If the ports are not the same create a covering range.
|
||||
# Note range is exclusive of the end value so we add 1 to the ToPort.
|
||||
if ingress_rule["FromPort"] != ingress_rule["ToPort"]:
|
||||
# Calculate port range, adding 1
|
||||
diff = (ingress_rule["ToPort"] - ingress_rule["FromPort"]) + 1
|
||||
ingress_port_range = []
|
||||
for x in range(diff):
|
||||
ingress_port_range.append(int(ingress_rule["FromPort"]) + x)
|
||||
# If FromPort and ToPort are the same
|
||||
ingress_port_range = set(
|
||||
range(ingress_rule["FromPort"], ingress_rule["ToPort"] + 1)
|
||||
)
|
||||
else:
|
||||
ingress_port_range = []
|
||||
ingress_port_range.append(int(ingress_rule["FromPort"]))
|
||||
ingress_port_range = {int(ingress_rule["FromPort"])}
|
||||
|
||||
# Test Security Group
|
||||
# IPv4
|
||||
for ip_ingress_rule in ingress_rule["IpRanges"]:
|
||||
if _is_cidr_public(ip_ingress_rule["CidrIp"], any_address):
|
||||
# If there are input ports to check
|
||||
if ports:
|
||||
for port in ports:
|
||||
if (
|
||||
port in ingress_port_range
|
||||
and ingress_rule["IpProtocol"] == protocol
|
||||
):
|
||||
return True
|
||||
# If empty input ports check if all ports are open
|
||||
if len(set(ingress_port_range)) == 65536:
|
||||
return True
|
||||
# If None input ports check if any port is open
|
||||
if ports is None:
|
||||
return True
|
||||
# Combine IPv4 and IPv6 ranges to facilitate a single check loop.
|
||||
all_ingress_rules = []
|
||||
all_ingress_rules.extend(ingress_rule["IpRanges"])
|
||||
all_ingress_rules.extend(ingress_rule["Ipv6Ranges"])
|
||||
|
||||
# IPv6
|
||||
for ip_ingress_rule in ingress_rule["Ipv6Ranges"]:
|
||||
if _is_cidr_public(ip_ingress_rule["CidrIpv6"], any_address):
|
||||
# If there are input ports to check
|
||||
if ports:
|
||||
for port in ports:
|
||||
if (
|
||||
port in ingress_port_range
|
||||
and ingress_rule["IpProtocol"] == protocol
|
||||
):
|
||||
return True
|
||||
# If empty input ports check if all ports are open
|
||||
if len(set(ingress_port_range)) == 65536:
|
||||
return True
|
||||
# If None input ports check if any port is open
|
||||
if ports is None:
|
||||
return True
|
||||
for ip_ingress_rule in all_ingress_rules:
|
||||
# We only check public CIDRs
|
||||
if _is_cidr_public(
|
||||
ip_ingress_rule.get("CidrIp", ip_ingress_rule.get("CidrIpv6")),
|
||||
any_address,
|
||||
):
|
||||
for port in ports:
|
||||
if port in ingress_port_range and (
|
||||
ingress_rule["IpProtocol"] == protocol or protocol == "-1"
|
||||
):
|
||||
# Direct match for a port in the specified port range
|
||||
return True
|
||||
|
||||
# We did not find a specific port for the given protocol for
|
||||
# a public cidr so let's see if all the ports are open
|
||||
all_ports_open = len(ingress_port_range) == 65536
|
||||
|
||||
# Use the all_ports flag to determine if empty ports should be treated as all ports.
|
||||
empty_ports_same_as_all_ports_open = all_ports and not ports
|
||||
|
||||
return all_ports_open or empty_ports_same_as_all_ports_open
|
||||
|
||||
return False
|
||||
|
||||
@@ -120,3 +116,4 @@ def _is_cidr_public(cidr: str, any_address: bool = False) -> bool:
|
||||
return True
|
||||
if not any_address:
|
||||
return ipaddress.ip_network(cidr).is_global
|
||||
return False
|
||||
|
||||
@@ -22,7 +22,7 @@
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Configure versioning using the Amazon console or API for buckets with sensitive information that is changing frequently, and backup may not be enough to capture all the changes.",
|
||||
"Url": "https://docs.aws.amazon.com/AmazonS3/latest/dev-retired/Versioning.html"
|
||||
"Url": "https://docs.aws.amazon.com/AmazonS3/latest/userguide/Versioning.html"
|
||||
}
|
||||
},
|
||||
"Categories": [],
|
||||
|
||||
@@ -266,7 +266,6 @@ class OraclecloudProvider(Provider):
|
||||
# If API key credentials are provided directly, create config from them
|
||||
if user and fingerprint and tenancy and region:
|
||||
import base64
|
||||
import tempfile
|
||||
|
||||
logger.info("Using API key credentials from direct parameters")
|
||||
|
||||
@@ -280,21 +279,19 @@ class OraclecloudProvider(Provider):
|
||||
|
||||
# Handle private key
|
||||
if key_content:
|
||||
# Decode base64 key content and write to temp file
|
||||
# Decode base64 key content
|
||||
try:
|
||||
key_data = base64.b64decode(key_content)
|
||||
temp_key_file = tempfile.NamedTemporaryFile(
|
||||
mode="wb", delete=False, suffix=".pem"
|
||||
)
|
||||
temp_key_file.write(key_data)
|
||||
temp_key_file.close()
|
||||
config["key_file"] = temp_key_file.name
|
||||
decoded_key = key_data.decode("utf-8")
|
||||
except Exception as decode_error:
|
||||
logger.error(f"Failed to decode key_content: {decode_error}")
|
||||
raise OCIInvalidConfigError(
|
||||
file=pathlib.Path(__file__).name,
|
||||
message="Failed to decode key_content. Ensure it is base64 encoded.",
|
||||
)
|
||||
|
||||
# Use OCI SDK's native key_content support
|
||||
config["key_content"] = decoded_key
|
||||
elif key_file:
|
||||
config["key_file"] = os.path.expanduser(key_file)
|
||||
else:
|
||||
@@ -428,78 +425,85 @@ class OraclecloudProvider(Provider):
|
||||
Raises:
|
||||
- OCIAuthenticationError: If authentication fails.
|
||||
"""
|
||||
try:
|
||||
# Get tenancy from config
|
||||
tenancy_id = session.config.get("tenancy")
|
||||
# Get tenancy from config
|
||||
tenancy_id = session.config.get("tenancy")
|
||||
|
||||
if not tenancy_id:
|
||||
raise OCINoCredentialsError(
|
||||
file=pathlib.Path(__file__).name,
|
||||
message="Tenancy ID not found in configuration",
|
||||
)
|
||||
|
||||
# Validate tenancy OCID format
|
||||
if not OraclecloudProvider.validate_ocid(tenancy_id, "tenancy"):
|
||||
raise OCIInvalidTenancyError(
|
||||
file=pathlib.Path(__file__).name,
|
||||
message=f"Invalid tenancy OCID format: {tenancy_id}",
|
||||
)
|
||||
|
||||
# Get user from config (not available in instance principal)
|
||||
user_id = session.config.get("user", "instance-principal")
|
||||
|
||||
# Get region from config or use provided region
|
||||
if not region:
|
||||
region = session.config.get("region", "us-ashburn-1")
|
||||
|
||||
# Validate region
|
||||
if region not in OCI_REGIONS:
|
||||
raise OCIInvalidRegionError(
|
||||
file=pathlib.Path(__file__).name,
|
||||
message=f"Invalid region: {region}",
|
||||
)
|
||||
|
||||
# Get tenancy name using Identity service
|
||||
tenancy_name = "unknown"
|
||||
try:
|
||||
# Create identity client with proper authentication handling
|
||||
if session.signer:
|
||||
identity_client = oci.identity.IdentityClient(
|
||||
config=session.config, signer=session.signer
|
||||
)
|
||||
else:
|
||||
identity_client = oci.identity.IdentityClient(config=session.config)
|
||||
|
||||
tenancy = identity_client.get_tenancy(tenancy_id).data
|
||||
tenancy_name = tenancy.name
|
||||
logger.info(f"Tenancy Name: {tenancy_name}")
|
||||
except Exception as error:
|
||||
logger.warning(
|
||||
f"Could not retrieve tenancy name: {error}. Using 'unknown'"
|
||||
)
|
||||
|
||||
logger.info(f"OCI Tenancy ID: {tenancy_id}")
|
||||
logger.info(f"OCI User ID: {user_id}")
|
||||
logger.info(f"OCI Region: {region}")
|
||||
|
||||
return OCIIdentityInfo(
|
||||
tenancy_id=tenancy_id,
|
||||
tenancy_name=tenancy_name,
|
||||
user_id=user_id,
|
||||
region=region,
|
||||
profile=session.profile,
|
||||
audited_regions=set([region]) if region else set(),
|
||||
audited_compartments=compartment_ids if compartment_ids else [],
|
||||
if not tenancy_id:
|
||||
raise OCINoCredentialsError(
|
||||
file=pathlib.Path(__file__).name,
|
||||
message="Tenancy ID not found in configuration",
|
||||
)
|
||||
|
||||
except Exception as error:
|
||||
# Validate tenancy OCID format
|
||||
if not OraclecloudProvider.validate_ocid(tenancy_id, "tenancy"):
|
||||
raise OCIInvalidTenancyError(
|
||||
file=pathlib.Path(__file__).name,
|
||||
message=f"Invalid tenancy OCID format: {tenancy_id}",
|
||||
)
|
||||
|
||||
# Get user from config (not available in instance principal)
|
||||
user_id = session.config.get("user", "instance-principal")
|
||||
|
||||
# Get region from config or use provided region
|
||||
if not region:
|
||||
region = session.config.get("region", "us-ashburn-1")
|
||||
|
||||
# Validate region
|
||||
if region not in OCI_REGIONS:
|
||||
raise OCIInvalidRegionError(
|
||||
file=pathlib.Path(__file__).name,
|
||||
message=f"Invalid region: {region}",
|
||||
)
|
||||
|
||||
# Validate credentials by calling OCI Identity service
|
||||
try:
|
||||
if session.signer:
|
||||
identity_client = oci.identity.IdentityClient(
|
||||
config=session.config, signer=session.signer
|
||||
)
|
||||
else:
|
||||
identity_client = oci.identity.IdentityClient(config=session.config)
|
||||
|
||||
tenancy = identity_client.get_tenancy(tenancy_id).data
|
||||
tenancy_name = tenancy.name
|
||||
logger.info(f"Tenancy Name: {tenancy_name}")
|
||||
except oci.exceptions.ServiceError as error:
|
||||
logger.critical(
|
||||
f"OCIAuthenticationError[{error.__traceback__.tb_lineno}]: {error}"
|
||||
f"OCI credential validation failed (HTTP {error.status}): {error.message}"
|
||||
)
|
||||
raise OCIAuthenticationError(
|
||||
original_exception=error,
|
||||
file=pathlib.Path(__file__).name,
|
||||
message=f"OCI credential validation failed: {error.message}. Please verify your credentials and try again.",
|
||||
original_exception=error,
|
||||
)
|
||||
except oci.exceptions.InvalidPrivateKey as error:
|
||||
logger.critical(f"Invalid OCI private key: {error}")
|
||||
raise OCIAuthenticationError(
|
||||
file=pathlib.Path(__file__).name,
|
||||
message="Invalid OCI private key format. Ensure the key is a valid PEM-encoded private key.",
|
||||
original_exception=error,
|
||||
)
|
||||
except Exception as error:
|
||||
logger.critical(f"OCI authentication error: {error}")
|
||||
raise OCIAuthenticationError(
|
||||
file=pathlib.Path(__file__).name,
|
||||
message=f"Failed to authenticate with OCI: {error}",
|
||||
original_exception=error,
|
||||
)
|
||||
|
||||
logger.info(f"OCI Tenancy ID: {tenancy_id}")
|
||||
logger.info(f"OCI User ID: {user_id}")
|
||||
logger.info(f"OCI Region: {region}")
|
||||
|
||||
return OCIIdentityInfo(
|
||||
tenancy_id=tenancy_id,
|
||||
tenancy_name=tenancy_name,
|
||||
user_id=user_id,
|
||||
region=region,
|
||||
profile=session.profile,
|
||||
audited_regions=set([region]) if region else set(),
|
||||
audited_compartments=compartment_ids if compartment_ids else [],
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def validate_ocid(ocid: str, resource_type: str = None) -> bool:
|
||||
@@ -838,7 +842,6 @@ class OraclecloudProvider(Provider):
|
||||
# If API key credentials are provided directly, create config from them
|
||||
if user and fingerprint and tenancy and region:
|
||||
import base64
|
||||
import tempfile
|
||||
|
||||
logger.info("Using API key credentials from direct parameters")
|
||||
|
||||
@@ -852,21 +855,19 @@ class OraclecloudProvider(Provider):
|
||||
|
||||
# Handle private key
|
||||
if key_content:
|
||||
# Decode base64 key content and write to temp file
|
||||
# Decode base64 key content
|
||||
try:
|
||||
key_data = base64.b64decode(key_content)
|
||||
temp_key_file = tempfile.NamedTemporaryFile(
|
||||
mode="wb", delete=False, suffix=".pem"
|
||||
)
|
||||
temp_key_file.write(key_data)
|
||||
temp_key_file.close()
|
||||
config["key_file"] = temp_key_file.name
|
||||
decoded_key = key_data.decode("utf-8")
|
||||
except Exception as decode_error:
|
||||
logger.error(f"Failed to decode key_content: {decode_error}")
|
||||
raise OCIInvalidConfigError(
|
||||
file=pathlib.Path(__file__).name,
|
||||
message="Failed to decode key_content. Ensure it is base64 encoded.",
|
||||
)
|
||||
|
||||
# Use OCI SDK's native key_content support
|
||||
config["key_content"] = decoded_key
|
||||
elif key_file:
|
||||
config["key_file"] = os.path.expanduser(key_file)
|
||||
else:
|
||||
|
||||
@@ -90,7 +90,7 @@ maintainers = [{name = "Prowler Engineering", email = "engineering@prowler.com"}
|
||||
name = "prowler"
|
||||
readme = "README.md"
|
||||
requires-python = ">3.9.1,<3.13"
|
||||
version = "5.16.0"
|
||||
version = "5.16.2"
|
||||
|
||||
[project.scripts]
|
||||
prowler = "prowler.__main__:prowler"
|
||||
|
||||
@@ -48,7 +48,7 @@ class Test_is_cidr_public:
|
||||
with pytest.raises(ValueError) as ex:
|
||||
_is_cidr_public(cidr)
|
||||
|
||||
assert ex.type == ValueError
|
||||
assert ex.type is ValueError
|
||||
assert ex.match(f"{cidr} has host bits set")
|
||||
|
||||
def test__is_cidr_public_Public_IPv6_all_IPs_any_address_false(self):
|
||||
@@ -77,7 +77,7 @@ class Test_is_cidr_public:
|
||||
|
||||
|
||||
class Test_check_security_group:
|
||||
def generate_ip_ranges_list(self, input_ip_ranges: [str], v4=True):
|
||||
def generate_ip_ranges_list(self, input_ip_ranges: list[str], v4=True):
|
||||
cidr_ranges = "CidrIp" if v4 else "CidrIpv6"
|
||||
return [{cidr_ranges: ip, "Description": ""} for ip in input_ip_ranges]
|
||||
|
||||
@@ -86,8 +86,8 @@ class Test_check_security_group:
|
||||
from_port: int,
|
||||
to_port: int,
|
||||
ip_protocol: str,
|
||||
input_ipv4_ranges: [str],
|
||||
input_ipv6_ranges: [str],
|
||||
input_ipv4_ranges: list[str],
|
||||
input_ipv6_ranges: list[str],
|
||||
):
|
||||
"""
|
||||
ingress_rule_generator returns the following AWS Security Group IpPermissions Ingress Rule based on the input arguments
|
||||
|
||||
201
tests/providers/oraclecloud/oraclecloud_provider_test.py
Normal file
201
tests/providers/oraclecloud/oraclecloud_provider_test.py
Normal file
@@ -0,0 +1,201 @@
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from prowler.providers.oraclecloud.exceptions.exceptions import (
|
||||
OCIAuthenticationError,
|
||||
OCIInvalidConfigError,
|
||||
)
|
||||
from prowler.providers.oraclecloud.models import OCISession
|
||||
from prowler.providers.oraclecloud.oraclecloud_provider import OraclecloudProvider
|
||||
|
||||
|
||||
class TestSetIdentityAuthenticationErrors:
|
||||
"""Tests for authentication error handling in set_identity()"""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_session(self):
|
||||
"""Create a mock OCI session."""
|
||||
session = OCISession(
|
||||
config={
|
||||
"tenancy": "ocid1.tenancy.oc1..aaaaaaaexample",
|
||||
"user": "ocid1.user.oc1..aaaaaaaexample",
|
||||
"region": "us-ashburn-1",
|
||||
"fingerprint": "aa:bb:cc:dd:ee:ff:00:11:22:33:44:55:66:77:88:99",
|
||||
},
|
||||
signer=None,
|
||||
profile="DEFAULT",
|
||||
)
|
||||
return session
|
||||
|
||||
def test_authentication_error_401_raises_exception(self, mock_session):
|
||||
"""Test 401 error raises OCIAuthenticationError."""
|
||||
with patch("oci.identity.IdentityClient") as mock_identity_client:
|
||||
mock_client_instance = MagicMock()
|
||||
mock_client_instance.get_tenancy.side_effect = self._create_service_error(
|
||||
401, "Authentication failed"
|
||||
)
|
||||
mock_identity_client.return_value = mock_client_instance
|
||||
|
||||
with pytest.raises(OCIAuthenticationError) as exc_info:
|
||||
OraclecloudProvider.set_identity(mock_session)
|
||||
|
||||
assert "OCI credential validation failed" in str(exc_info.value)
|
||||
|
||||
def test_authentication_error_403_raises_exception(self, mock_session):
|
||||
"""Test 403 error raises OCIAuthenticationError."""
|
||||
with patch("oci.identity.IdentityClient") as mock_identity_client:
|
||||
mock_client_instance = MagicMock()
|
||||
mock_client_instance.get_tenancy.side_effect = self._create_service_error(
|
||||
403, "Forbidden access"
|
||||
)
|
||||
mock_identity_client.return_value = mock_client_instance
|
||||
|
||||
with pytest.raises(OCIAuthenticationError) as exc_info:
|
||||
OraclecloudProvider.set_identity(mock_session)
|
||||
|
||||
assert "OCI credential validation failed" in str(exc_info.value)
|
||||
|
||||
def test_authentication_error_404_raises_exception(self, mock_session):
|
||||
"""Test 404 error raises OCIAuthenticationError."""
|
||||
with patch("oci.identity.IdentityClient") as mock_identity_client:
|
||||
mock_client_instance = MagicMock()
|
||||
mock_client_instance.get_tenancy.side_effect = self._create_service_error(
|
||||
404, "Resource not found"
|
||||
)
|
||||
mock_identity_client.return_value = mock_client_instance
|
||||
|
||||
with pytest.raises(OCIAuthenticationError) as exc_info:
|
||||
OraclecloudProvider.set_identity(mock_session)
|
||||
|
||||
assert "OCI credential validation failed" in str(exc_info.value)
|
||||
|
||||
def test_service_error_500_raises_exception(self, mock_session):
|
||||
"""Test 500 error raises OCIAuthenticationError (can't validate credentials)."""
|
||||
with patch("oci.identity.IdentityClient") as mock_identity_client:
|
||||
mock_client_instance = MagicMock()
|
||||
mock_client_instance.get_tenancy.side_effect = self._create_service_error(
|
||||
500, "Internal server error"
|
||||
)
|
||||
mock_identity_client.return_value = mock_client_instance
|
||||
|
||||
with pytest.raises(OCIAuthenticationError) as exc_info:
|
||||
OraclecloudProvider.set_identity(mock_session)
|
||||
|
||||
assert "OCI credential validation failed" in str(exc_info.value)
|
||||
|
||||
def test_invalid_private_key_raises_exception(self, mock_session):
|
||||
"""Test InvalidPrivateKey exception raises OCIAuthenticationError."""
|
||||
with patch("oci.identity.IdentityClient") as mock_identity_client:
|
||||
import oci
|
||||
|
||||
mock_client_instance = MagicMock()
|
||||
mock_client_instance.get_tenancy.side_effect = (
|
||||
oci.exceptions.InvalidPrivateKey("Invalid private key")
|
||||
)
|
||||
mock_identity_client.return_value = mock_client_instance
|
||||
|
||||
with pytest.raises(OCIAuthenticationError) as exc_info:
|
||||
OraclecloudProvider.set_identity(mock_session)
|
||||
|
||||
assert "Invalid OCI private key format" in str(exc_info.value)
|
||||
|
||||
def test_generic_exception_raises_authentication_error(self, mock_session):
|
||||
"""Test generic exception raises OCIAuthenticationError."""
|
||||
with patch("oci.identity.IdentityClient") as mock_identity_client:
|
||||
mock_client_instance = MagicMock()
|
||||
mock_client_instance.get_tenancy.side_effect = Exception("Unexpected error")
|
||||
mock_identity_client.return_value = mock_client_instance
|
||||
|
||||
with pytest.raises(OCIAuthenticationError) as exc_info:
|
||||
OraclecloudProvider.set_identity(mock_session)
|
||||
|
||||
assert "Failed to authenticate with OCI" in str(exc_info.value)
|
||||
|
||||
def test_successful_authentication(self, mock_session):
|
||||
"""Test successful authentication returns identity info."""
|
||||
with patch("oci.identity.IdentityClient") as mock_identity_client:
|
||||
mock_tenancy = MagicMock()
|
||||
mock_tenancy.name = "test-tenancy"
|
||||
mock_response = MagicMock()
|
||||
mock_response.data = mock_tenancy
|
||||
|
||||
mock_client_instance = MagicMock()
|
||||
mock_client_instance.get_tenancy.return_value = mock_response
|
||||
mock_identity_client.return_value = mock_client_instance
|
||||
|
||||
identity = OraclecloudProvider.set_identity(mock_session)
|
||||
|
||||
assert identity.tenancy_name == "test-tenancy"
|
||||
assert identity.tenancy_id == "ocid1.tenancy.oc1..aaaaaaaexample"
|
||||
assert identity.user_id == "ocid1.user.oc1..aaaaaaaexample"
|
||||
assert identity.region == "us-ashburn-1"
|
||||
|
||||
@staticmethod
|
||||
def _create_service_error(status, message):
|
||||
"""Helper to create an OCI ServiceError."""
|
||||
import oci
|
||||
|
||||
error = oci.exceptions.ServiceError(
|
||||
status=status,
|
||||
code="TestError",
|
||||
headers={},
|
||||
message=message,
|
||||
)
|
||||
return error
|
||||
|
||||
|
||||
class TestTestConnectionKeyValidation:
|
||||
"""Tests for key_content validation in test_connection()"""
|
||||
|
||||
def test_test_connection_invalid_base64_key_raises_error(self):
|
||||
"""Test invalid base64 key content raises OCIInvalidConfigError."""
|
||||
with pytest.raises(OCIInvalidConfigError) as exc_info:
|
||||
OraclecloudProvider.test_connection(
|
||||
oci_config_file=None,
|
||||
profile=None,
|
||||
key_content="not-valid-base64!!!",
|
||||
user="ocid1.user.oc1..aaaaaaaexample",
|
||||
fingerprint="aa:bb:cc:dd:ee:ff:00:11:22:33:44:55:66:77:88:99",
|
||||
tenancy="ocid1.tenancy.oc1..aaaaaaaexample",
|
||||
region="us-ashburn-1",
|
||||
)
|
||||
|
||||
assert "Failed to decode key_content" in str(exc_info.value)
|
||||
|
||||
def test_test_connection_valid_key_content_proceeds(self):
|
||||
"""Test valid base64 key content proceeds to authentication."""
|
||||
import base64
|
||||
|
||||
# The SDK will validate the actual key format during authentication
|
||||
valid_key = """-----BEGIN RSA PRIVATE KEY-----
|
||||
MIIEpQIBAAKCAQEA0Z3VS5JJcds3xfn/ygWyF8n0sMcD/QHWCJ7yGSEtLN2T
|
||||
...key content...
|
||||
-----END RSA PRIVATE KEY-----"""
|
||||
encoded_key = base64.b64encode(valid_key.encode("utf-8")).decode("utf-8")
|
||||
|
||||
with (
|
||||
patch("oci.config.validate_config"),
|
||||
patch("oci.identity.IdentityClient") as mock_identity_client,
|
||||
):
|
||||
mock_tenancy = MagicMock()
|
||||
mock_tenancy.name = "test-tenancy"
|
||||
mock_response = MagicMock()
|
||||
mock_response.data = mock_tenancy
|
||||
|
||||
mock_client_instance = MagicMock()
|
||||
mock_client_instance.get_tenancy.return_value = mock_response
|
||||
mock_identity_client.return_value = mock_client_instance
|
||||
|
||||
connection = OraclecloudProvider.test_connection(
|
||||
oci_config_file=None,
|
||||
profile=None,
|
||||
key_content=encoded_key,
|
||||
user="ocid1.user.oc1..aaaaaaaexample",
|
||||
fingerprint="aa:bb:cc:dd:ee:ff:00:11:22:33:44:55:66:77:88:99",
|
||||
tenancy="ocid1.tenancy.oc1..aaaaaaaexample",
|
||||
region="us-ashburn-1",
|
||||
raise_on_exception=False,
|
||||
)
|
||||
|
||||
assert connection.is_connected is True
|
||||
@@ -2,6 +2,22 @@
|
||||
|
||||
All notable changes to the **Prowler UI** are documented in this file.
|
||||
|
||||
## [1.16.2] (Prowler v5.16.2) (UNRELEASED)
|
||||
|
||||
### 🐞 Fixed
|
||||
|
||||
- OCI update credentials form failing silently due to missing provider UID [(#9746)](https://github.com/prowler-cloud/prowler/pull/9746)
|
||||
|
||||
---
|
||||
|
||||
## [1.16.1] (Prowler v5.16.1)
|
||||
|
||||
### 🔄 Changed
|
||||
|
||||
- Lighthouse AI meta tools descriptions updated for clarity with more representative examples [(#9632)](https://github.com/prowler-cloud/prowler/pull/9632)
|
||||
|
||||
---
|
||||
|
||||
## [1.16.0] (Prowler v5.16.0)
|
||||
|
||||
### 🚀 Added
|
||||
@@ -42,6 +58,7 @@ All notable changes to the **Prowler UI** are documented in this file.
|
||||
- Navigation progress bar for page transitions using Next.js `onRouterTransitionStart` [(#9465)](https://github.com/prowler-cloud/prowler/pull/9465)
|
||||
- Findings Severity Over Time chart component to Overview page [(#9405)](https://github.com/prowler-cloud/prowler/pull/9405)
|
||||
- Attack Surface component to Overview page [(#9412)](https://github.com/prowler-cloud/prowler/pull/9412)
|
||||
- Add Alibaba Cloud provider [(#9501)](https://github.com/prowler-cloud/prowler/pull/9501)
|
||||
|
||||
### 🔄 Changed
|
||||
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import { redirect } from "next/navigation";
|
||||
import React from "react";
|
||||
|
||||
import { getProvider } from "@/actions/providers/providers";
|
||||
import { CredentialsUpdateInfo } from "@/components/providers";
|
||||
import {
|
||||
UpdateViaCredentialsForm,
|
||||
@@ -20,9 +22,24 @@ interface Props {
|
||||
|
||||
export default async function UpdateCredentialsPage({ searchParams }: Props) {
|
||||
const resolvedSearchParams = await searchParams;
|
||||
const { type: providerType, via } = resolvedSearchParams;
|
||||
const { type: providerType, via, id: providerId } = resolvedSearchParams;
|
||||
|
||||
if (!providerId) {
|
||||
redirect("/providers");
|
||||
}
|
||||
|
||||
const formType = getProviderFormType(providerType, via);
|
||||
|
||||
const formData = new FormData();
|
||||
formData.append("id", providerId);
|
||||
const providerResponse = await getProvider(formData);
|
||||
|
||||
if (providerResponse?.errors) {
|
||||
redirect("/providers");
|
||||
}
|
||||
|
||||
const providerUid = providerResponse?.data?.attributes?.uid;
|
||||
|
||||
switch (formType) {
|
||||
case "selector":
|
||||
return (
|
||||
@@ -30,14 +47,27 @@ export default async function UpdateCredentialsPage({ searchParams }: Props) {
|
||||
);
|
||||
|
||||
case "credentials":
|
||||
return <UpdateViaCredentialsForm searchParams={resolvedSearchParams} />;
|
||||
return (
|
||||
<UpdateViaCredentialsForm
|
||||
searchParams={resolvedSearchParams}
|
||||
providerUid={providerUid}
|
||||
/>
|
||||
);
|
||||
|
||||
case "role":
|
||||
return <UpdateViaRoleForm searchParams={resolvedSearchParams} />;
|
||||
return (
|
||||
<UpdateViaRoleForm
|
||||
searchParams={resolvedSearchParams}
|
||||
providerUid={providerUid}
|
||||
/>
|
||||
);
|
||||
|
||||
case "service-account":
|
||||
return (
|
||||
<UpdateViaServiceAccountForm searchParams={resolvedSearchParams} />
|
||||
<UpdateViaServiceAccountForm
|
||||
searchParams={resolvedSearchParams}
|
||||
providerUid={providerUid}
|
||||
/>
|
||||
);
|
||||
|
||||
default:
|
||||
|
||||
@@ -7,8 +7,10 @@ import { BaseCredentialsForm } from "./base-credentials-form";
|
||||
|
||||
export const UpdateViaCredentialsForm = ({
|
||||
searchParams,
|
||||
providerUid,
|
||||
}: {
|
||||
searchParams: { type: string; id: string; secretId?: string };
|
||||
providerUid?: string;
|
||||
}) => {
|
||||
const providerType = searchParams.type as ProviderType;
|
||||
const providerId = searchParams.id;
|
||||
@@ -24,6 +26,7 @@ export const UpdateViaCredentialsForm = ({
|
||||
<BaseCredentialsForm
|
||||
providerType={providerType}
|
||||
providerId={providerId}
|
||||
providerUid={providerUid}
|
||||
onSubmit={handleUpdateCredentials}
|
||||
successNavigationUrl={successNavigationUrl}
|
||||
submitButtonText="Next"
|
||||
|
||||
@@ -7,8 +7,10 @@ import { BaseCredentialsForm } from "./base-credentials-form";
|
||||
|
||||
export const UpdateViaRoleForm = ({
|
||||
searchParams,
|
||||
providerUid,
|
||||
}: {
|
||||
searchParams: { type: string; id: string; secretId?: string };
|
||||
providerUid?: string;
|
||||
}) => {
|
||||
const providerType = searchParams.type as ProviderType;
|
||||
const providerId = searchParams.id;
|
||||
@@ -24,6 +26,7 @@ export const UpdateViaRoleForm = ({
|
||||
<BaseCredentialsForm
|
||||
providerType={providerType}
|
||||
providerId={providerId}
|
||||
providerUid={providerUid}
|
||||
onSubmit={handleUpdateCredentials}
|
||||
successNavigationUrl={successNavigationUrl}
|
||||
submitButtonText="Next"
|
||||
|
||||
@@ -7,8 +7,10 @@ import { BaseCredentialsForm } from "./base-credentials-form";
|
||||
|
||||
export const UpdateViaServiceAccountForm = ({
|
||||
searchParams,
|
||||
providerUid,
|
||||
}: {
|
||||
searchParams: { type: string; id: string; secretId?: string };
|
||||
providerUid?: string;
|
||||
}) => {
|
||||
const providerType = searchParams.type as ProviderType;
|
||||
const providerId = searchParams.id;
|
||||
@@ -24,6 +26,7 @@ export const UpdateViaServiceAccountForm = ({
|
||||
<BaseCredentialsForm
|
||||
providerType={providerType}
|
||||
providerId={providerId}
|
||||
providerUid={providerUid}
|
||||
onSubmit={handleUpdateCredentials}
|
||||
successNavigationUrl={successNavigationUrl}
|
||||
submitButtonText="Next"
|
||||
|
||||
@@ -89,23 +89,23 @@ export const describeTool = tool(
|
||||
},
|
||||
{
|
||||
name: "describe_tool",
|
||||
description: `Get the full schema and parameter details for a specific Prowler Hub tool.
|
||||
description: `Get the full schema and parameter details for a specific Prowler tool.
|
||||
|
||||
Use this to understand what parameters a tool requires before executing it.
|
||||
Tool names are listed in your system prompt - use the exact name.
|
||||
|
||||
You must always provide the toolName key in the JSON object.
|
||||
Example: describe_tool({ "toolName": "prowler_hub_list_providers" })
|
||||
Example: describe_tool({ "toolName": "prowler_app_search_security_findings" })
|
||||
|
||||
Returns:
|
||||
- Full parameter schema with types and descriptions
|
||||
- Tool description
|
||||
- Required vs optional parameters`,
|
||||
- Required and optional parameters`,
|
||||
schema: z.object({
|
||||
toolName: z
|
||||
.string()
|
||||
.describe(
|
||||
"Exact name of the tool to describe (e.g., 'prowler_hub_list_providers'). You must always provide the toolName key in the JSON object.",
|
||||
"Exact name of the tool to describe (e.g., 'prowler_hub_list_compliances'). You must always provide the toolName key in the JSON object.",
|
||||
),
|
||||
}),
|
||||
},
|
||||
@@ -198,20 +198,20 @@ export const executeTool = tool(
|
||||
},
|
||||
{
|
||||
name: "execute_tool",
|
||||
description: `Execute a Prowler Hub MCP tool with the specified parameters.
|
||||
description: `Execute a Prowler MCP tool with the specified parameters.
|
||||
|
||||
Provide the exact tool name and its input parameters as specified in the tool's schema.
|
||||
|
||||
You must always provide the toolName and toolInput keys in the JSON object.
|
||||
Example: execute_tool({ "toolName": "prowler_hub_list_providers", "toolInput": {} })
|
||||
Example: execute_tool({ "toolName": "prowler_app_search_security_findings", "toolInput": {} })
|
||||
|
||||
All input to the tool must be provided in the toolInput key as a JSON object.
|
||||
Example: execute_tool({ "toolName": "prowler_hub_list_providers", "toolInput": { "query": "value1", "page": 1, "pageSize": 10 } })
|
||||
Example: execute_tool({ "toolName": "prowler_hub_list_compliances", "toolInput": { "provider": ["aws"] } })
|
||||
|
||||
Always describe the tool first to understand:
|
||||
1. What parameters it requires
|
||||
2. The expected input format
|
||||
3. Required vs optional parameters`,
|
||||
3. Which parameters are mandatory and which are optional`,
|
||||
schema: z.object({
|
||||
toolName: z
|
||||
.string()
|
||||
@@ -222,7 +222,7 @@ Always describe the tool first to understand:
|
||||
.record(z.string(), z.unknown())
|
||||
.default({})
|
||||
.describe(
|
||||
"Input parameters for the tool as a JSON object. Use empty object {} if tool requires no parameters.",
|
||||
"Input parameters for the tool as a JSON object. Use empty object {} if tool requires no parameters or it has defined defaults or only optional parameters.",
|
||||
),
|
||||
}),
|
||||
},
|
||||
|
||||
@@ -607,18 +607,22 @@ export class ProvidersPage extends BasePage {
|
||||
}
|
||||
|
||||
// Fallback logic: try finding any common primary action buttons in expected order
|
||||
const candidates: Array<{ name: string | RegExp }> = [
|
||||
{ name: "Next" }, // Try the "Next" button
|
||||
{ name: "Save" }, // Try the "Save" button
|
||||
const candidates: Array<{ name: string | RegExp; exact?: boolean }> = [
|
||||
{ name: "Next", exact: true }, // Try the "Next" button (exact match to avoid Next.js dev tools)
|
||||
{ name: "Save", exact: true }, // Try the "Save" button
|
||||
{ name: "Launch scan" }, // Try the "Launch scan" button
|
||||
{ name: /Continue|Proceed/i }, // Try "Continue" or "Proceed" (case-insensitive)
|
||||
];
|
||||
|
||||
// Try each candidate name and click it if found
|
||||
for (const candidate of candidates) {
|
||||
const btn = this.page.getByRole("button", {
|
||||
name: candidate.name,
|
||||
});
|
||||
// Exclude Next.js dev tools button by filtering out buttons with aria-haspopup attribute
|
||||
const btn = this.page
|
||||
.getByRole("button", {
|
||||
name: candidate.name,
|
||||
exact: candidate.exact,
|
||||
})
|
||||
.and(this.page.locator(":not([aria-haspopup])"));
|
||||
|
||||
if (await btn.count()) {
|
||||
await btn.click();
|
||||
@@ -847,7 +851,7 @@ export class ProvidersPage extends BasePage {
|
||||
}
|
||||
|
||||
async verifyOCICredentialsPageLoaded(): Promise<void> {
|
||||
// Verify the OCI credentials page is loaded
|
||||
// Verify the OCI credentials page is loaded (add flow - all fields visible)
|
||||
|
||||
await this.verifyPageHasProwlerTitle();
|
||||
await expect(this.ociTenancyIdInput).toBeVisible();
|
||||
@@ -857,6 +861,17 @@ export class ProvidersPage extends BasePage {
|
||||
await expect(this.ociRegionInput).toBeVisible();
|
||||
}
|
||||
|
||||
async verifyOCIUpdateCredentialsPageLoaded(): Promise<void> {
|
||||
// Verify the OCI update credentials page is loaded
|
||||
// Note: Tenancy OCID is hidden in update flow (auto-populated from provider UID)
|
||||
|
||||
await this.verifyPageHasProwlerTitle();
|
||||
await expect(this.ociUserIdInput).toBeVisible();
|
||||
await expect(this.ociFingerprintInput).toBeVisible();
|
||||
await expect(this.ociKeyContentInput).toBeVisible();
|
||||
await expect(this.ociRegionInput).toBeVisible();
|
||||
}
|
||||
|
||||
async verifyPageLoaded(): Promise<void> {
|
||||
// Verify the providers page is loaded
|
||||
|
||||
@@ -995,4 +1010,42 @@ export class ProvidersPage extends BasePage {
|
||||
throw new Error(`Invalid authentication method: ${method}`);
|
||||
}
|
||||
}
|
||||
|
||||
async clickProviderRowActions(providerUid: string): Promise<void> {
|
||||
// Click the actions dropdown for a specific provider row
|
||||
const row = this.providersTable.locator("tbody tr", {
|
||||
hasText: providerUid,
|
||||
});
|
||||
await expect(row).toBeVisible();
|
||||
|
||||
// Click the dropdown trigger - it's the last button in the row (after the copy button)
|
||||
const actionsButton = row.locator("button").last();
|
||||
await actionsButton.click();
|
||||
}
|
||||
|
||||
async clickUpdateCredentials(providerUid: string): Promise<void> {
|
||||
// Click update credentials for a specific provider
|
||||
await this.clickProviderRowActions(providerUid);
|
||||
|
||||
// Wait for dropdown menu to stabilize and click Update Credentials
|
||||
const updateCredentialsOption = this.page.getByRole("menuitem", {
|
||||
name: /Update Credentials/i,
|
||||
});
|
||||
await expect(updateCredentialsOption).toBeVisible();
|
||||
// Wait a bit for the menu to stabilize before clicking
|
||||
await this.page.waitForTimeout(100);
|
||||
await updateCredentialsOption.click({ force: true });
|
||||
}
|
||||
|
||||
async verifyUpdateCredentialsPageLoaded(): Promise<void> {
|
||||
// Verify the update credentials page is loaded
|
||||
await this.verifyPageHasProwlerTitle();
|
||||
await expect(this.page).toHaveURL(/\/providers\/update-credentials/);
|
||||
}
|
||||
|
||||
async verifyTestConnectionPageLoaded(): Promise<void> {
|
||||
// Verify the test connection page is loaded
|
||||
await this.verifyPageHasProwlerTitle();
|
||||
await expect(this.page).toHaveURL(/\/providers\/test-connection/);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -708,3 +708,61 @@
|
||||
- Provider cleanup performed before each test to ensure clean state
|
||||
- Requires valid OCI account with API Key set up
|
||||
- API Key credential type is automatically used for OCI providers
|
||||
|
||||
---
|
||||
|
||||
## Test Case: `PROVIDER-E2E-013` - Update OCI Provider Credentials
|
||||
|
||||
**Priority:** `normal`
|
||||
|
||||
**Tags:**
|
||||
|
||||
- type → @e2e, @serial
|
||||
- feature → @providers
|
||||
- provider → @oci
|
||||
|
||||
**Description/Objective:** Validates the complete flow of updating credentials for an existing OCI provider. This test verifies that the provider UID is correctly passed to the update credentials form, which is required for OCI credential validation.
|
||||
|
||||
**Preconditions:**
|
||||
|
||||
- Admin user authentication required (admin.auth.setup setup)
|
||||
- Environment variables configured: E2E_OCI_TENANCY_ID, E2E_OCI_USER_ID, E2E_OCI_FINGERPRINT, E2E_OCI_KEY_CONTENT, E2E_OCI_REGION
|
||||
- An OCI provider with the specified Tenancy ID must already exist (run PROVIDER-E2E-012 first)
|
||||
- This test must be run serially and never in parallel with other tests
|
||||
|
||||
### Flow Steps:
|
||||
|
||||
1. Navigate to providers page
|
||||
2. Verify OCI provider exists in the table
|
||||
3. Click row actions menu for the OCI provider
|
||||
4. Click "Update Credentials" option
|
||||
5. Verify update credentials page is loaded
|
||||
6. Verify OCI credentials form fields are visible (confirms providerUid is loaded)
|
||||
7. Fill OCI credentials (user ID, fingerprint, key content, region)
|
||||
8. Click Next to submit
|
||||
9. Verify successful navigation to test connection page
|
||||
|
||||
### Expected Result:
|
||||
|
||||
- Update credentials page loads successfully
|
||||
- OCI credentials form is displayed with all required fields
|
||||
- Provider UID is correctly passed to the form (hidden field populated)
|
||||
- Credentials can be updated and submitted
|
||||
- User is redirected to test connection page after successful update
|
||||
|
||||
### Key verification points:
|
||||
|
||||
- Provider page loads correctly
|
||||
- OCI provider row is visible in providers table
|
||||
- Row actions dropdown opens and displays "Update Credentials" option
|
||||
- Update credentials page URL contains correct parameters
|
||||
- OCI credentials form displays all fields (tenancy ID, user ID, fingerprint, key content, region)
|
||||
- Form submission succeeds (no silent failures due to missing provider UID)
|
||||
- Successful redirect to test connection page
|
||||
|
||||
### Notes:
|
||||
|
||||
- Test uses same environment variables as PROVIDER-E2E-012 (add OCI provider)
|
||||
- Requires PROVIDER-E2E-012 to be run first to create the OCI provider
|
||||
- This test validates the fix for OCI update credentials form failing silently due to missing provider UID
|
||||
- The provider UID is required for OCI credential validation (tenancy field auto-populated from UID)
|
||||
|
||||
@@ -1139,3 +1139,87 @@ test.describe("Add Provider", () => {
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
test.describe("Update Provider Credentials", () => {
|
||||
test.describe.serial("Update OCI Provider Credentials", () => {
|
||||
let providersPage: ProvidersPage;
|
||||
|
||||
// Test data from environment variables (same as add OCI provider test)
|
||||
const tenancyId = process.env.E2E_OCI_TENANCY_ID;
|
||||
const userId = process.env.E2E_OCI_USER_ID;
|
||||
const fingerprint = process.env.E2E_OCI_FINGERPRINT;
|
||||
const keyContent = process.env.E2E_OCI_KEY_CONTENT;
|
||||
const region = process.env.E2E_OCI_REGION;
|
||||
|
||||
// Validate required environment variables
|
||||
if (!tenancyId || !userId || !fingerprint || !keyContent || !region) {
|
||||
throw new Error(
|
||||
"E2E_OCI_TENANCY_ID, E2E_OCI_USER_ID, E2E_OCI_FINGERPRINT, E2E_OCI_KEY_CONTENT, and E2E_OCI_REGION environment variables are not set",
|
||||
);
|
||||
}
|
||||
|
||||
// Setup before each test
|
||||
test.beforeEach(async ({ page }) => {
|
||||
providersPage = new ProvidersPage(page);
|
||||
});
|
||||
|
||||
// Use admin authentication for provider management
|
||||
test.use({ storageState: "playwright/.auth/admin_user.json" });
|
||||
|
||||
test(
|
||||
"should update OCI provider credentials successfully",
|
||||
{
|
||||
tag: [
|
||||
"@e2e",
|
||||
"@providers",
|
||||
"@oci",
|
||||
"@serial",
|
||||
"@PROVIDER-E2E-013",
|
||||
],
|
||||
},
|
||||
async () => {
|
||||
// Prepare updated credentials
|
||||
const ociCredentials: OCIProviderCredential = {
|
||||
type: OCI_CREDENTIAL_OPTIONS.OCI_API_KEY,
|
||||
tenancyId: tenancyId,
|
||||
userId: userId,
|
||||
fingerprint: fingerprint,
|
||||
keyContent: keyContent,
|
||||
region: region,
|
||||
};
|
||||
|
||||
// Navigate to providers page
|
||||
await providersPage.goto();
|
||||
await providersPage.verifyPageLoaded();
|
||||
|
||||
// Verify OCI provider exists in the table
|
||||
const providerExists =
|
||||
await providersPage.verifySingleRowForProviderUID(tenancyId);
|
||||
if (!providerExists) {
|
||||
throw new Error(
|
||||
`OCI provider with tenancy ID ${tenancyId} not found. Run the add OCI provider test first.`,
|
||||
);
|
||||
}
|
||||
|
||||
// Click update credentials for the OCI provider
|
||||
await providersPage.clickUpdateCredentials(tenancyId);
|
||||
|
||||
// Verify update credentials page is loaded
|
||||
await providersPage.verifyUpdateCredentialsPageLoaded();
|
||||
|
||||
// Verify OCI credentials form fields are visible (confirms providerUid is loaded)
|
||||
// Note: Tenancy OCID is hidden in update flow (auto-populated from provider UID)
|
||||
await providersPage.verifyOCIUpdateCredentialsPageLoaded();
|
||||
|
||||
// Fill updated credentials
|
||||
await providersPage.fillOCICredentials(ociCredentials);
|
||||
|
||||
// Click Next to submit
|
||||
await providersPage.clickNext();
|
||||
|
||||
// Verify successful navigation to test connection page
|
||||
await providersPage.verifyTestConnectionPageLoaded();
|
||||
},
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user