Compare commits

..

1 Commits

Author SHA1 Message Date
Pedro De Castro 000b5ec3d5 wip: PoC memory management 2024-08-22 16:24:12 +02:00
1707 changed files with 15945 additions and 92845 deletions
-14
View File
@@ -1,14 +0,0 @@
{
"repoOwner": "prowler-cloud",
"repoName": "prowler",
"targetPRLabels": [
"backport"
],
"sourcePRLabels": [
"was-backported"
],
"copySourcePRLabels": false,
"copySourcePRReviewers": true,
"prTitle": "{{sourcePullRequest.title}}",
"commitConflicts": true
}
-4
View File
@@ -20,9 +20,6 @@ updates:
interval: "daily"
open-pull-requests-limit: 10
target-branch: master
labels:
- "dependencies"
- "github_actions"
- package-ecosystem: "pip"
directory: "/"
@@ -41,6 +38,5 @@ updates:
open-pull-requests-limit: 10
target-branch: v3
labels:
- "dependencies"
- "github_actions"
- "v3"
-8
View File
@@ -2,19 +2,11 @@
Please include relevant motivation and context for this PR.
If fixes an issue please add it with `Fix #XXXX`
### Description
Please include a summary of the change and which issue is fixed. List any dependencies that are required for this change.
### Checklist
- Are there new checks included in this PR? Yes / No
- If so, do we need to update permissions for the provider? Please review this carefully.
- [ ] Review if the code is being covered by tests.
- [ ] Review if code is being documented following this specification https://github.com/google/styleguide/blob/gh-pages/pyguide.md#38-comments-and-docstrings
- [ ] Review if backport is needed.
### License
-42
View File
@@ -1,42 +0,0 @@
name: Automatic Backport
on:
pull_request_target:
branches: ['master']
types: ['labeled', 'closed']
jobs:
backport:
name: Backport PR
if: github.event.pull_request.merged == true && !(contains(github.event.pull_request.labels.*.name, 'backport'))
runs-on: ubuntu-latest
permissions:
id-token: write
pull-requests: write
contents: write
steps:
# Workaround not to fail the workflow if the PR does not need a backport
# https://github.com/sorenlouv/backport-github-action/issues/127#issuecomment-2258561266
- name: Check for backport labels
id: check_labels
run: |-
labels='${{ toJSON(github.event.pull_request.labels.*.name) }}'
echo "$labels"
matched=$(echo "${labels}" | jq '. | map(select(startswith("backport-to-"))) | length')
echo "matched=$matched"
echo "matched=$matched" >> $GITHUB_OUTPUT
- name: Backport Action
if: fromJSON(steps.check_labels.outputs.matched) > 0
uses: sorenlouv/backport-github-action@v9.5.1
with:
github_token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
auto_backport_label_prefix: backport-to-
- name: Info log
if: ${{ success() && fromJSON(steps.check_labels.outputs.matched) > 0 }}
run: cat ~/.backport/backport.info.log
- name: Debug log
if: ${{ failure() && fromJSON(steps.check_labels.outputs.matched) > 0 }}
run: cat ~/.backport/backport.debug.log
@@ -16,9 +16,9 @@ jobs:
name: Documentation Link
runs-on: ubuntu-latest
steps:
- name: Leave PR comment with the Prowler Documentation URI
- name: Leave PR comment with the SaaS Documentation URI
uses: peter-evans/create-or-update-comment@v4
with:
issue-number: ${{ env.PR_NUMBER }}
body: |
You can check the documentation for this PR here -> [Prowler Documentation](https://prowler-prowler-docs--${{ env.PR_NUMBER }}.com.readthedocs.build/projects/prowler-open-source/en/${{ env.PR_NUMBER }}/)
You can check the documentation for this PR here -> [SaaS Documentation](https://prowler-prowler-docs--${{ env.PR_NUMBER }}.com.readthedocs.build/projects/prowler-open-source/en/${{ env.PR_NUMBER }}/)
@@ -43,7 +43,7 @@ jobs:
runs-on: ubuntu-latest
outputs:
prowler_version_major: ${{ steps.get-prowler-version.outputs.PROWLER_VERSION_MAJOR }}
prowler_version: ${{ steps.get-prowler-version.outputs.PROWLER_VERSION }}
prowler_version: ${{ steps.update-prowler-version.outputs.PROWLER_VERSION }}
env:
POETRY_VIRTUALENVS_CREATE: "false"
@@ -65,8 +65,6 @@ jobs:
id: get-prowler-version
run: |
PROWLER_VERSION="$(poetry version -s 2>/dev/null)"
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_ENV}"
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_OUTPUT}"
# Store prowler version major just for the release
PROWLER_VERSION_MAJOR="${PROWLER_VERSION%%.*}"
@@ -91,6 +89,15 @@ jobs:
;;
esac
- name: Update Prowler version (release)
id: update-prowler-version
if: github.event_name == 'release'
run: |
PROWLER_VERSION="${{ github.event.release.tag_name }}"
poetry version "${PROWLER_VERSION}"
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_ENV}"
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_OUTPUT}"
- name: Login to DockerHub
uses: docker/login-action@v3
with:
@@ -153,7 +160,7 @@ jobs:
run: |
curl https://api.github.com/repos/${{ secrets.DISPATCH_OWNER }}/${{ secrets.DISPATCH_REPO }}/dispatches \
-H "Accept: application/vnd.github+json" \
-H "Authorization: Bearer ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}" \
-H "Authorization: Bearer ${{ secrets.ACCESS_TOKEN }}" \
-H "X-GitHub-Api-Version: 2022-11-28" \
--data '{"event_type":"dispatch","client_payload":{"version":"v3-latest", "tag": "${{ env.LATEST_COMMIT_HASH }}"}}'
@@ -162,6 +169,6 @@ jobs:
run: |
curl https://api.github.com/repos/${{ secrets.DISPATCH_OWNER }}/${{ secrets.DISPATCH_REPO }}/dispatches \
-H "Accept: application/vnd.github+json" \
-H "Authorization: Bearer ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}" \
-H "Authorization: Bearer ${{ secrets.ACCESS_TOKEN }}" \
-H "X-GitHub-Api-Version: 2022-11-28" \
--data '{"event_type":"dispatch","client_payload":{"version":"release", "tag":"${{ needs.container-build-push.outputs.prowler_version }}"}}'
+2 -2
View File
@@ -13,10 +13,10 @@ name: "CodeQL"
on:
push:
branches: [ "master", "v3", "v4.*" ]
branches: [ "master", "v3" ]
pull_request:
# The branches below must be a subset of the branches above
branches: [ "master", "v3", "v4.*" ]
branches: [ "master", "v3" ]
schedule:
- cron: '00 12 * * *'
+1 -1
View File
@@ -11,7 +11,7 @@ jobs:
with:
fetch-depth: 0
- name: TruffleHog OSS
uses: trufflesecurity/trufflehog@v3.83.2
uses: trufflesecurity/trufflehog@v3.80.2
with:
path: ./
base: ${{ github.event.repository.default_branch }}
-1
View File
@@ -5,7 +5,6 @@ on:
branches:
- "master"
- "v3"
- "v4.*"
jobs:
labeler:
+1 -4
View File
@@ -5,12 +5,10 @@ on:
branches:
- "master"
- "v3"
- "v4.*"
pull_request:
branches:
- "master"
- "v3"
- "v4.*"
jobs:
build:
runs-on: ubuntu-latest
@@ -22,7 +20,7 @@ jobs:
- uses: actions/checkout@v4
- name: Test if changes are in not ignored paths
id: are-non-ignored-files-changed
uses: tj-actions/changed-files@v45
uses: tj-actions/changed-files@v44
with:
files: ./**
files_ignore: |
@@ -31,7 +29,6 @@ jobs:
docs/**
permissions/**
mkdocs.yml
.backportrc.json
- name: Install poetry
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
+31
View File
@@ -8,6 +8,8 @@ env:
RELEASE_TAG: ${{ github.event.release.tag_name }}
PYTHON_VERSION: 3.11
CACHE: "poetry"
# TODO: create a bot user for this kind of tasks, like prowler-bot
GIT_COMMITTER_EMAIL: "sergio@prowler.com"
jobs:
release-prowler-job:
@@ -38,6 +40,7 @@ jobs:
- name: Install dependencies
run: |
pipx install poetry
pipx inject poetry poetry-bumpversion
- name: Setup Python
uses: actions/setup-python@v5
@@ -45,6 +48,34 @@ jobs:
python-version: ${{ env.PYTHON_VERSION }}
cache: ${{ env.CACHE }}
- name: Update Poetry and config version
run: |
poetry version ${{ env.RELEASE_TAG }}
- name: Import GPG key
uses: crazy-max/ghaction-import-gpg@v6
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.GPG_PASSPHRASE }}
git_user_signingkey: true
git_commit_gpgsign: true
- name: Push updated version to the release tag
run: |
# Configure Git
git config user.name "github-actions"
git config user.email "${{ env.GIT_COMMITTER_EMAIL }}"
# Add the files with the version changed
git add prowler/config/config.py pyproject.toml
git commit -m "chore(release): ${{ env.RELEASE_TAG }}" --no-verify -S
# Replace the tag with the version updated
git tag -fa ${{ env.RELEASE_TAG }} -m "chore(release): ${{ env.RELEASE_TAG }}" --sign
# Push the tag
git push -f origin ${{ env.RELEASE_TAG }}
- name: Build Prowler package
run: |
poetry build
@@ -50,13 +50,13 @@ jobs:
# Create pull request
- name: Create Pull Request
uses: peter-evans/create-pull-request@v7
uses: peter-evans/create-pull-request@v6
with:
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
commit-message: "feat(regions_update): Update regions for AWS services"
token: ${{ secrets.PROWLER_ACCESS_TOKEN }}
commit-message: "feat(regions_update): Update regions for AWS services."
branch: "aws-services-regions-updated-${{ github.sha }}"
labels: "status/waiting-for-revision, severity/low, provider/aws, backport-to-v3"
title: "chore(regions_update): Changes in regions for AWS services"
labels: "status/waiting-for-revision, severity/low, provider/aws, backport-v3"
title: "chore(regions_update): Changes in regions for AWS services."
body: |
### Description
+1 -1
View File
@@ -85,7 +85,7 @@ repos:
# For running trufflehog in docker, use the following entry instead:
# entry: bash -c 'docker run -v "$(pwd):/workdir" -i --rm trufflesecurity/trufflehog:latest git file:///workdir --only-verified --fail'
language: system
stages: ["pre-commit", "pre-push"]
stages: ["commit", "push"]
- id: bandit
name: bandit
+7 -6
View File
@@ -2,28 +2,29 @@ FROM python:3.12-alpine
LABEL maintainer="https://github.com/prowler-cloud/prowler"
# Update system dependencies and install essential tools
# Update system dependencies
#hadolint ignore=DL3018
RUN apk --no-cache upgrade && apk --no-cache add curl git g++
RUN apk --no-cache upgrade && apk --no-cache add curl
# Create non-root user
# Create nonroot user
RUN mkdir -p /home/prowler && \
echo 'prowler:x:1000:1000:prowler:/home/prowler:' > /etc/passwd && \
echo 'prowler:x:1000:' > /etc/group && \
chown -R prowler:prowler /home/prowler
USER prowler
# Copy necessary files
# Copy necessary files
WORKDIR /home/prowler
COPY prowler/ /home/prowler/prowler/
COPY dashboard/ /home/prowler/dashboard/
COPY pyproject.toml /home/prowler
COPY README.md /home/prowler
# Install Python dependencies
# Install dependencies
ENV HOME='/home/prowler'
ENV PATH="$HOME/.local/bin:$PATH"
RUN pip install --no-cache-dir --upgrade pip setuptools wheel && \
#hadolint ignore=DL3013
RUN pip install --no-cache-dir --upgrade pip && \
pip install --no-cache-dir .
# Remove deprecated dash dependencies
+4 -4
View File
@@ -12,7 +12,7 @@
<p align="center">
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog"><img width="30" height="30" alt="Prowler community on Slack" src="https://github.com/prowler-cloud/prowler/assets/38561120/3c8b4ec5-6849-41a5-b5e1-52bbb94af73a"></a>
<br>
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-2oinmgmw6-cl7gOrljSEqo_aoripVPFA">Join our Prowler community!</a>
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog">Join our Prowler community!</a>
</p>
<hr>
<p align="center">
@@ -63,9 +63,9 @@ It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, Fe
| Provider | Checks | Services | [Compliance Frameworks](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/compliance/) | [Categories](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/misc/#categories) |
|---|---|---|---|---|
| AWS | 553 | 77 -> `prowler aws --list-services` | 30 -> `prowler aws --list-compliance` | 9 -> `prowler aws --list-categories` |
| GCP | 77 | 13 -> `prowler gcp --list-services` | 2 -> `prowler gcp --list-compliance` | 2 -> `prowler gcp --list-categories`|
| Azure | 138 | 17 -> `prowler azure --list-services` | 3 -> `prowler azure --list-compliance` | 2 -> `prowler azure --list-categories` |
| AWS | 385 | 67 -> `prowler aws --list-services` | 28 -> `prowler aws --list-compliance` | 7 -> `prowler aws --list-categories` |
| GCP | 77 | 13 -> `prowler gcp --list-services` | 1 -> `prowler gcp --list-compliance` | 2 -> `prowler gcp --list-categories`|
| Azure | 135 | 16 -> `prowler azure --list-services` | 2 -> `prowler azure --list-compliance` | 2 -> `prowler azure --list-categories` |
| Kubernetes | 83 | 7 -> `prowler kubernetes --list-services` | 1 -> `prowler kubernetes --list-compliance` | 7 -> `prowler kubernetes --list-categories` |
# 💻 Installation
@@ -12,11 +12,7 @@ Originally based on [org-multi-account](https://github.com/prowler-cloud/prowler
## Architecture Explanation
The solution is designed to be very simple. Prowler is run via an ECS Task definition that launches a single Fargate container. This Task Definition is executed on a schedule using an EventBridge Rule.
## Prerequisites
This solution assumes that you have a VPC architecture with two redundant subnets that can reach the AWS API endpoints (e.g. PrivateLink, NAT Gateway, etc.).
The solution is designed to be very simple. Prowler is run via an ECS Task definition that launches a single Fargate container. This Task Definition is executed on a schedule using an EventBridge Rule.
## CloudFormation Templates
@@ -63,9 +59,9 @@ The logs that are generated and sent to Cloudwatch are error logs, and assessmen
## Instructions
1. Create a Private ECR Repository in the account that will host the Prowler container. The Audit account is recommended, but any account can be used.
2. Configure the .awsvariables file. Note the ROLE name chosen as it will be the CrossAccountRole.
3. Follow the steps from "View Push Commands" to build and upload the container image. Substitute step 2 with the build command provided in the Dockerfile. You need to have Docker and AWS CLI installed, and use the cli to login to the account first. After upload note the Image URI, as it is required for the CF-Prowler-ECS template. Ensure that you pay attention to the architecture while performing the docker build command. A common mistake is not specifying the architecture and then building on Apple silicon. Your task will fail with *exec /home/prowler/.local/bin/prowler: exec format error*.
4. Make sure SecurityHub is enabled in every account in AWS Organizations, and that the SecurityHub integration is enabled as explained in [Prowler - Security Hub Integration](https://github.com/prowler-cloud/prowler#security-hub-integration)
2. Configure the .awsvariables file. Note the ROLE name chosen as it will be the CrossAccountRole.
3. Follow the steps from "View Push Commands" to build and upload the container image. You need to have Docker and AWS CLI installed, and use the cli to login to the account first. After upload note the Image URI, as it is required for the CF-Prowler-ECS template.
4. Make sure SecurityHub is enabled in every account in AWS Organizations, and that the SecurityHub integration is enabled as explained in [Prowler - Security Hub Integration](https://github.com/prowler-cloud/prowler#security-hub-integration)
5. Deploy **CF-Prowler-CrossAccountRole.yml** in the Master Account as a single stack. You will have to choose the CrossAccountRole name (ProwlerXA-Role by default) and the ProwlerTaskRoleName (ProwlerECSTask-Role by default)
6. Deploy **CF-Prowler-CrossAccountRole.yml** in every Member Account as a StackSet. Choose the same CrossAccountName and ProwlerTaskRoleName as the previous step.
7. Deploy **CF-Prowler-IAM.yml** in the account that will host the Prowler container (the same from step 1). The following template parameters must be provided:
@@ -95,4 +91,4 @@ If you permission find errors in the CloudWatch logs, the culprit might be a [Se
## Upgrading Prowler
Prowler version is controlled by the PROWLERVER argument in the Dockerfile, change it to the desired version and follow the ECR Push Commands to update the container image.
Old images can be deleted from the ECR Repository after the new image is confirmed to work. They will show as "untagged" as only one image can hold the "latest" tag.
Old images can be deleted from the ECR Repository after the new image is confirmed to work. They will show as "untagged" as only one image can hold the "latest" tag.
@@ -68,7 +68,7 @@ for accountId in ${ACCOUNTS_IN_ORGS}; do
# Run Prowler
echo -e "Assessing AWS Account: ${accountId}, using Role: ${ROLE} on $(date)"
# Pipe stdout to /dev/null to reduce unnecessary Cloudwatch logs
prowler aws -R arn:"${PARTITION}":iam::"${accountId}":role/"${ROLE}" --security-hub --send-sh-only-fails -f "${REGION}" > /dev/null
prowler aws -R arn:"${PARTITION}":iam::"${accountId}":role/"${ROLE}" -q -S -f "${REGION}" > /dev/null
TOTAL_SEC=$((SECONDS - START_TIME))
printf "Completed AWS Account: ${accountId} in %02dh:%02dm:%02ds" $((TOTAL_SEC / 3600)) $((TOTAL_SEC % 3600 / 60)) $((TOTAL_SEC % 60))
echo ""
@@ -60,42 +60,24 @@ Resources:
Effect: Allow
Resource: "*"
Action:
- account:Get*
- appstream:Describe*
- appstream:List*
- backup:List*
- cloudtrail:GetInsightSelectors
- codeartifact:List*
- codebuild:BatchGet*
- cognito-idp:GetUserPoolMfaConfig
- dlm:Get*
- drs:Describe*
- ds:Describe*
- ds:Get*
- ds:List*
- dynamodb:GetResourcePolicy
- ds:ListAuthorizedApplications
- ec2:GetEbsEncryptionByDefault
- ec2:GetSnapshotBlockPublicAccessState
- ec2:GetInstanceMetadataDefaults
- ecr:Describe*
- ecr:GetRegistryScanningConfiguration
- elasticfilesystem:DescribeBackupPolicy
- glue:GetConnections
- glue:GetSecurityConfiguration*
- glue:GetSecurityConfiguration
- glue:SearchTables
- lambda:GetFunction*
- logs:FilterLogEvents
- lightsail:GetRelationalDatabases
- macie2:GetMacieSession
- lambda:GetFunction
- s3:GetAccountPublicAccessBlock
- shield:DescribeProtection
- shield:GetSubscriptionState
- ssm:GetDocument
- ssm-incidents:List*
- support:Describe*
- tag:GetTagKeys
- wellarchitected:List*
- PolicyName: Prowler-Security-Hub
PolicyDocument:
Version: 2012-10-17
Statement:
- Sid: AllowProwlerSecurityHub
Effect: Allow
Resource: "*"
@@ -62,7 +62,7 @@ Resources:
awslogs-stream-prefix: ecs
Cpu: 1024
ExecutionRoleArn: !Ref ECSExecutionRole
Memory: 8192
Memory: 2048
NetworkMode: awsvpc
TaskRoleArn: !Ref ProwlerTaskRole
Family: SecurityHubProwlerTask
@@ -97,15 +97,9 @@ Outputs:
ECSExecutionRoleARN:
Description: ARN of the ECS Task Execution Role
Value: !GetAtt ECSExecutionRole.Arn
Export:
Name: ECSExecutionRoleArn
ProwlerTaskRoleARN:
Description: ARN of the ECS Prowler Task Role
Value: !GetAtt ProwlerTaskRole.Arn
Export:
Name: ProwlerTaskRoleArn
ECSEventRoleARN:
Description: ARN of the Eventbridge Task Role
Value: !GetAtt ECSEventRole.Arn
Export:
Name: ECSEventRoleARN
-229
View File
@@ -2223,232 +2223,3 @@ def get_section_containers_ens(data, section_1, section_2, section_3, section_4)
section_containers.append(section_container)
return html.Div(section_containers, className="compliance-data-layout")
# This function extracts and compares up to two numeric values, ensuring correct sorting for version-like strings.
def extract_numeric_values(value):
numbers = re.findall(r"\d+", str(value))
if len(numbers) >= 2:
return int(numbers[0]), int(numbers[1])
elif len(numbers) == 1:
return int(numbers[0]), 0
return 0, 0
def get_section_containers_kisa_ismsp(data, section_1, section_2):
data["STATUS"] = data["STATUS"].apply(map_status_to_icon)
data[section_1] = data[section_1].astype(str)
data[section_2] = data[section_2].astype(str)
data.sort_values(
by=section_1,
key=lambda x: x.map(extract_numeric_values),
ascending=True,
inplace=True,
)
findings_counts_section = (
data.groupby([section_2, "STATUS"]).size().unstack(fill_value=0)
)
findings_counts_name = (
data.groupby([section_1, "STATUS"]).size().unstack(fill_value=0)
)
section_containers = []
for name in data[section_1].unique():
success_name = (
findings_counts_name.loc[name, pass_emoji]
if pass_emoji in findings_counts_name.columns
else 0
)
failed_name = (
findings_counts_name.loc[name, fail_emoji]
if fail_emoji in findings_counts_name.columns
else 0
)
fig_name = go.Figure(
data=[
go.Bar(
name="Failed",
x=[failed_name],
y=[""],
orientation="h",
marker=dict(color="#e77676"),
width=[0.8],
),
go.Bar(
name="Success",
x=[success_name],
y=[""],
orientation="h",
marker=dict(color="#45cc6e"),
width=[0.8],
),
]
)
fig_name.update_layout(
barmode="stack",
margin=dict(l=10, r=10, t=10, b=10),
paper_bgcolor="rgba(0,0,0,0)",
plot_bgcolor="rgba(0,0,0,0)",
showlegend=False,
width=350,
height=30,
xaxis=dict(showticklabels=False, showgrid=False, zeroline=False),
yaxis=dict(showticklabels=False, showgrid=False, zeroline=False),
annotations=[
dict(
x=success_name + failed_name,
y=0,
xref="x",
yref="y",
text=str(success_name),
showarrow=False,
font=dict(color="#45cc6e", size=14),
xanchor="left",
yanchor="middle",
),
dict(
x=0,
y=0,
xref="x",
yref="y",
text=str(failed_name),
showarrow=False,
font=dict(color="#e77676", size=14),
xanchor="right",
yanchor="middle",
),
],
)
graph_name = dcc.Graph(
figure=fig_name, config={"staticPlot": True}, className="info-bar"
)
graph_div = html.Div(graph_name, className="graph-section")
direct_internal_items = []
for section in data[data[section_1] == name][section_2].unique():
specific_data = data[
(data[section_1] == name) & (data[section_2] == section)
]
success_section = (
findings_counts_section.loc[section, pass_emoji]
if pass_emoji in findings_counts_section.columns
else 0
)
failed_section = (
findings_counts_section.loc[section, fail_emoji]
if fail_emoji in findings_counts_section.columns
else 0
)
data_table = dash_table.DataTable(
data=specific_data.to_dict("records"),
columns=[
{"name": i, "id": i}
for i in ["CHECKID", "STATUS", "REGION", "ACCOUNTID", "RESOURCEID"]
],
style_table={"overflowX": "auto"},
style_as_list_view=True,
style_cell={"textAlign": "left", "padding": "5px"},
)
fig_section = go.Figure(
data=[
go.Bar(
name="Failed",
x=[failed_section],
y=[""],
orientation="h",
marker=dict(color="#e77676"),
),
go.Bar(
name="Success",
x=[success_section],
y=[""],
orientation="h",
marker=dict(color="#45cc6e"),
),
]
)
fig_section.update_layout(
barmode="stack",
margin=dict(l=10, r=10, t=10, b=10),
paper_bgcolor="rgba(0,0,0,0)",
plot_bgcolor="rgba(0,0,0,0)",
showlegend=False,
width=350,
height=30,
xaxis=dict(showticklabels=False, showgrid=False, zeroline=False),
yaxis=dict(showticklabels=False, showgrid=False, zeroline=False),
annotations=[
dict(
x=success_section + failed_section,
y=0,
xref="x",
yref="y",
text=str(success_section),
showarrow=False,
font=dict(color="#45cc6e", size=14),
xanchor="left",
yanchor="middle",
),
dict(
x=0,
y=0,
xref="x",
yref="y",
text=str(failed_section),
showarrow=False,
font=dict(color="#e77676", size=14),
xanchor="right",
yanchor="middle",
),
],
)
graph_section = dcc.Graph(
figure=fig_section,
config={"staticPlot": True},
className="info-bar-child",
)
graph_div_section = html.Div(graph_section, className="graph-section-req")
internal_accordion_item = dbc.AccordionItem(
title=section,
children=[html.Div([data_table], className="inner-accordion-content")],
)
internal_section_container = html.Div(
[
graph_div_section,
dbc.Accordion(
[internal_accordion_item], start_collapsed=True, flush=True
),
],
className="accordion-inner--child",
)
direct_internal_items.append(internal_section_container)
accordion_item = dbc.AccordionItem(
title=f"{name}", children=direct_internal_items
)
section_container = html.Div(
[
graph_div,
dbc.Accordion([accordion_item], start_collapsed=True, flush=True),
],
className="accordion-inner",
)
section_containers.append(section_container)
return html.Div(section_containers, className="compliance-data-layout")
@@ -1,25 +0,0 @@
import warnings
from dashboard.common_methods import get_section_containers_kisa_ismsp
warnings.filterwarnings("ignore")
def get_table(data):
aux = data[
[
"REQUIREMENTS_ID",
"REQUIREMENTS_ATTRIBUTES_SUBDOMAIN",
"REQUIREMENTS_ATTRIBUTES_SECTION",
# "REQUIREMENTS_DESCRIPTION",
"CHECKID",
"STATUS",
"REGION",
"ACCOUNTID",
"RESOURCEID",
]
].copy()
return get_section_containers_kisa_ismsp(
aux, "REQUIREMENTS_ATTRIBUTES_SUBDOMAIN", "REQUIREMENTS_ATTRIBUTES_SECTION"
)
@@ -1,25 +0,0 @@
import warnings
from dashboard.common_methods import get_section_containers_kisa_ismsp
warnings.filterwarnings("ignore")
def get_table(data):
aux = data[
[
"REQUIREMENTS_ID",
"REQUIREMENTS_ATTRIBUTES_SUBDOMAIN",
"REQUIREMENTS_ATTRIBUTES_SECTION",
# "REQUIREMENTS_DESCRIPTION",
"CHECKID",
"STATUS",
"REGION",
"ACCOUNTID",
"RESOURCEID",
]
].copy()
return get_section_containers_kisa_ismsp(
aux, "REQUIREMENTS_ATTRIBUTES_SUBDOMAIN", "REQUIREMENTS_ATTRIBUTES_SECTION"
)
+2 -2
View File
@@ -222,7 +222,7 @@ class ec2_securitygroup_with_many_ingress_egress_rules(Check):
max_security_group_rules = ec2_client.audit_config.get(
"max_security_group_rules", 50
)
for security_group_arn, security_group in ec2_client.security_groups.items():
for security_group in ec2_client.security_groups:
```
```yaml title="config.yaml"
@@ -272,7 +272,7 @@ Each Prowler check has metadata associated which is stored at the same level of
# Severity holds the check's severity, always in lowercase (critical, high, medium, low or informational)
"Severity": "critical",
# ResourceType only for AWS, holds the type from here
# https://docs.aws.amazon.com/securityhub/latest/userguide/asff-resources.html
# https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-template-resource-type-ref.html
"ResourceType": "Other",
# Description holds the title of the check, for now is the same as CheckTitle
"Description": "Ensure there are no EC2 AMIs set as Public.",
+4 -4
View File
@@ -14,8 +14,10 @@ Once that is satisfied go ahead and clone your forked repo:
git clone https://github.com/<your-github-user>/prowler
cd prowler
```
For isolation and to avoid conflicts with other environments, we recommend using `poetry`, a Python dependency management tool. You can install it by following the instructions [here](https://python-poetry.org/docs/#installation).
For isolation and avoid conflicts with other environments, we recommend usage of `poetry`:
```
pip install poetry
```
Then install all dependencies including the ones for developers:
```
poetry install --with dev
@@ -48,8 +50,6 @@ You can see all dependencies in file `pyproject.toml`.
Moreover, you would need to install [`TruffleHog`](https://github.com/trufflesecurity/trufflehog) on the latest version to check for secrets in the code. You can install it using the official installation guide [here](https://github.com/trufflesecurity/trufflehog?tab=readme-ov-file#floppy_disk-installation).
Additionally, please ensure to follow the code documentation practices outlined in this guide: [Google Python Style Guide - Comments and Docstrings](https://github.com/google/styleguide/blob/gh-pages/pyguide.md#38-comments-and-docstrings).
???+ note
If you have any trouble when committing to the Prowler repository, add the `--no-verify` flag to the `git commit` command.
+10
View File
@@ -44,6 +44,7 @@ class Provider(ABC):
Methods:
print_credentials(): Displays the provider's credentials used for auditing in the command-line interface.
setup_session(): Sets up the session for the provider.
get_output_mapping(): Returns the output mapping between the provider and the generic model.
validate_arguments(): Validates the arguments for the provider.
get_checks_to_execute_by_audit_resources(): Returns a set of checks based on the input resources to scan.
@@ -130,6 +131,15 @@ class Provider(ABC):
"""
raise NotImplementedError()
@abstractmethod
def get_output_mapping(self) -> dict:
"""
get_output_mapping returns the output mapping between the provider and the generic model.
This method needs to be created in each provider.
"""
raise NotImplementedError()
def validate_arguments(self) -> None:
"""
validate_arguments validates the arguments for the provider.
+6 -6
View File
@@ -592,7 +592,7 @@ is following the actual format, add one function where the client is passed to b
`mock_api_<endpoint>_calls` (*endpoint* refers to the first attribute pointed after *client*).
In the example of BigQuery the function is called `mock_api_dataset_calls`. And inside of this function we found an assignation to
be used in the `_get_datasets` method in BigQuery class:
be used in the `__get_datasets__` method in BigQuery class:
```python
# Mocking datasets
@@ -765,7 +765,7 @@ from tests.providers.azure.azure_fixtures import (
set_mocked_azure_provider,
)
# Function to mock the service function _get_components, this function task is to return a possible value that real function could returns
# Function to mock the service function __get_components__, this function task is to return a possible value that real function could returns
def mock_appinsights_get_components(_):
return {
AZURE_SUBSCRIPTION_ID: {
@@ -779,12 +779,12 @@ def mock_appinsights_get_components(_):
# Patch decorator to use the mocked function instead the function with the real API call
@patch(
"prowler.providers.azure.services.appinsights.appinsights_service.AppInsights._get_components",
"prowler.providers.azure.services.appinsights.appinsights_service.AppInsights.__get_components__",
new=mock_appinsights_get_components,
)
class Test_AppInsights_Service:
# Mandatory test for every service, this method test the instance of the client is correct
def test_get_client(self):
def test__get_client__(self):
app_insights = AppInsights(set_mocked_azure_provider())
assert (
app_insights.clients[AZURE_SUBSCRIPTION_ID].__class__.__name__
@@ -794,8 +794,8 @@ class Test_AppInsights_Service:
def test__get_subscriptions__(self):
app_insights = AppInsights(set_mocked_azure_provider())
assert app_insights.subscriptions.__class__.__name__ == "dict"
# Test for the function _get_components, inside this client is used the mocked function
def test_get_components(self):
# Test for the function __get_components__, inside this client is used the mocked function
def test__get_components__(self):
appinsights = AppInsights(set_mocked_azure_provider())
assert len(appinsights.components) == 1
assert (
+37 -72
View File
@@ -19,40 +19,14 @@ It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, Fe
## Quick Start
### Installation
Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/), thus can be installed as Python package with `Python >= 3.9`:
Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/), thus can be installed using pip with `Python >= 3.9`:
=== "pipx"
[pipx](https://pipx.pypa.io/stable/) is a tool to install Python applications in isolated environments. It is recommended to use `pipx` for a global installation.
=== "Generic"
_Requirements_:
* `Python >= 3.9`
* `pipx` installed: [pipx installation](https://pipx.pypa.io/stable/installation/).
* AWS, GCP, Azure and/or Kubernetes credentials
_Commands_:
``` bash
pipx install prowler
prowler -v
```
To upgrade Prowler to the latest version, run:
``` bash
pipx upgrade prowler
```
=== "pip"
???+ warning
This method is not recommended because it will modify the environment which you choose to install. Consider using [pipx](https://docs.prowler.com/projects/prowler-open-source/en/latest/#__tabbed_1_1) for a global installation.
_Requirements_:
* `Python >= 3.9`
* `Python pip >= 21.0.0`
* `Python pip >= 3.9`
* AWS, GCP, Azure and/or Kubernetes credentials
_Commands_:
@@ -62,19 +36,13 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
prowler -v
```
To upgrade Prowler to the latest version, run:
``` bash
pip install --upgrade prowler
```
=== "Docker"
_Requirements_:
* Have `docker` installed: https://docs.docker.com/get-docker/.
* In the command below, change `-v` to your local directory path in order to access the reports.
* AWS, GCP, Azure and/or Kubernetes credentials
* In the command below, change `-v` to your local directory path in order to access the reports.
_Commands_:
@@ -86,21 +54,41 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
--env AWS_SESSION_TOKEN toniblyx/prowler:latest
```
=== "Ubuntu"
_Requirements for Ubuntu 20.04.3 LTS_:
* AWS, GCP, Azure and/or Kubernetes credentials
* Install python 3.9 with: `sudo apt-get install python3.9`
* Remove python 3.8 to avoid conflicts if you can: `sudo apt-get remove python3.8`
* Make sure you have the python3 distutils package installed: `sudo apt-get install python3-distutils`
* To make sure you use pip for 3.9 get the get-pip script with: `curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py`
* Execute it with the proper python version: `sudo python3.9 get-pip.py`
* Now you should have pip for 3.9 ready: `pip3.9 --version`
_Commands_:
```
pip3.9 install prowler
export PATH=$PATH:/home/$HOME/.local/bin/
prowler -v
```
=== "GitHub"
_Requirements for Developers_:
* `git`
* `poetry` installed: [poetry installation](https://python-poetry.org/docs/#installation).
* AWS, GCP, Azure and/or Kubernetes credentials
* `git`, `Python >= 3.9`, `pip` and `poetry` installed (`pip install poetry`)
_Commands_:
```
git clone https://github.com/prowler-cloud/prowler
cd prowler
poetry shell
poetry install
poetry run python prowler.py -v
python prowler.py -v
```
???+ note
If you want to clone Prowler from Windows, use `git config core.longpaths true` to allow long file paths.
@@ -109,33 +97,15 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
_Requirements_:
* `Python >= 3.9`
* AWS, GCP, Azure and/or Kubernetes credentials
* Latest Amazon Linux 2 should come with Python 3.9 already installed however it may need pip. Install Python pip 3.9 with: `sudo yum install -y python3-pip`.
* Make sure setuptools for python is already installed with: `pip3 install setuptools`
_Commands_:
```
python3 -m pip install --user pipx
python3 -m pipx ensurepath
pipx install prowler
prowler -v
```
=== "Ubuntu"
_Requirements_:
* `Ubuntu 23.04` or above, if you are using an older version of Ubuntu check [pipx installation](https://docs.prowler.com/projects/prowler-open-source/en/latest/#__tabbed_1_1) and ensure you have `Python >= 3.9`.
* `Python >= 3.9`
* AWS, GCP, Azure and/or Kubernetes credentials
_Commands_:
``` bash
sudo apt update
sudo apt install pipx
pipx ensurepath
pipx install prowler
pip3.9 install prowler
export PATH=$PATH:/home/$HOME/.local/bin/
prowler -v
```
@@ -155,7 +125,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
=== "AWS CloudShell"
After the migration of AWS CloudShell from Amazon Linux 2 to Amazon Linux 2023 [[1]](https://aws.amazon.com/about-aws/whats-new/2023/12/aws-cloudshell-migrated-al2023/) [[2]](https://docs.aws.amazon.com/cloudshell/latest/userguide/cloudshell-AL2023-migration.html), there is no longer a need to manually compile Python 3.9 as it's already included in AL2023. Prowler can thus be easily installed following the Generic method of installation via pip. Follow the steps below to successfully execute Prowler v4 in AWS CloudShell:
After the migration of AWS CloudShell from Amazon Linux 2 to Amazon Linux 2023 [[1]](https://aws.amazon.com/about-aws/whats-new/2023/12/aws-cloudshell-migrated-al2023/) [2](https://docs.aws.amazon.com/cloudshell/latest/userguide/cloudshell-AL2023-migration.html), there is no longer a need to manually compile Python 3.9 as it's already included in AL2023. Prowler can thus be easily installed following the Generic method of installation via pip. Follow the steps below to successfully execute Prowler v4 in AWS CloudShell:
_Requirements_:
@@ -163,13 +133,11 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
_Commands_:
```bash
```
sudo bash
adduser prowler
su prowler
python3 -m pip install --user pipx
python3 -m pipx ensurepath
pipx install prowler
pip install prowler
cd /tmp
prowler aws
```
@@ -185,12 +153,9 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
_Commands_:
```bash
python3 -m pip install --user pipx
python3 -m pipx ensurepath
pipx install prowler
cd /tmp
prowler azure --az-cli-auth
```
pip install prowler
prowler -v
```
## Prowler container versions
+7 -11
View File
@@ -4,25 +4,21 @@ Prowler allows you to do threat detection in AWS based on the CloudTrail log rec
```
prowler aws --category threat-detection
```
This command will run these checks:
This comand will run these checks:
* `cloudtrail_threat_detection_privilege_escalation` -> Detects privilege escalation attacks.
* `cloudtrail_threat_detection_enumeration` -> Detects enumeration attacks.
* `cloudtrail_threat_detection_llm_jacking` -> Detects LLM Jacking attacks.
* `cloudtrail_threat_detection_privilege_escalation`
* `cloudtrail_threat_detection_enumeration`
???+ note
Threat Detection checks will be only executed using `--category threat-detection` flag due to performance.
Threat Detection checks will be only executed using `--category threat-detection` flag due to preformance.
## Config File
If you want to manage the behavior of the Threat Detection checks you can edit `config.yaml` file from `/prowler/config`. In this file you can edit the following attributes related with Threat Detection:
* `threat_detection_privilege_escalation_threshold`: determines the percentage of actions found to decide if it is an privilege_scalation attack event, by default is 0.2 (20%)
* `threat_detection_privilege_escalation_threshold`: determines the percentage of actions found to decide if it is an privilege_scalation attack event, by default is 0.1 (10%)
* `threat_detection_privilege_escalation_minutes`: it is the past minutes to search from now for privilege_escalation attacks, by default is 1440 minutes (24 hours)
* `threat_detection_privilege_escalation_actions`: these are the default actions related with privilege escalation.
* `threat_detection_enumeration_threshold`: determines the percentage of actions found to decide if it is an enumeration attack event, by default is 0.3 (30%)
* `threat_detection_privilege_escalation_actions`: these are the default actions related with priviledge scalation.
* `threat_detection_enumeration_threshold`: determines the percentage of actions found to decide if it is an enumeration attack event, by default is 0.1 (10%)
* `threat_detection_enumeration_minutes`: it is the past minutes to search from now for enumeration attacks, by default is 1440 minutes (24 hours)
* `threat_detection_enumeration_actions`: these are the default actions related with enumeration attacks.
* `threat_detection_llm_jacking_threshold`: determines the percentage of actions found to decide if it is an LLM Jacking attack event, by default is 0.4 (40%)
* `threat_detection_llm_jacking_minutes`: it is the past minutes to search from now for LLM Jacking attacks, by default is 1440 minutes (24 hours)
* `threat_detection_llm_jacking_actions`: these are the default actions related with LLM Jacking attacks.
@@ -7,6 +7,7 @@ At the time of writing this documentation the available Azure Clouds from differ
- AzureCloud
- AzureChinaCloud
- AzureUSGovernment
- AzureGermanCloud
If you want to change the default one you must include the flag `--azure-region`, i.e.:
+29 -95
View File
@@ -13,57 +13,37 @@ The following list includes all the AWS checks with configurable variables that
| Check Name | Value | Type |
|---------------------------------------------------------------|--------------------------------------------------|-----------------|
| `acm_certificates_expiration_check` | `days_to_expire_threshold` | Integer |
| `appstream_fleet_maximum_session_duration` | `max_session_duration_seconds` | Integer |
| `appstream_fleet_session_disconnect_timeout` | `max_disconnect_timeout_in_seconds` | Integer |
| `appstream_fleet_session_idle_disconnect_timeout` | `max_idle_disconnect_timeout_in_seconds` | Integer |
| `autoscaling_find_secrets_ec2_launch_configuration` | `secrets_ignore_patterns` | List of Strings |
| `awslambda_function_no_secrets_in_code` | `secrets_ignore_patterns` | List of Strings |
| `awslambda_function_no_secrets_in_variables` | `secrets_ignore_patterns` | List of Strings |
| `awslambda_function_using_supported_runtimes` | `obsolete_lambda_runtimes` | Integer |
| `awslambda_function_vpc_is_in_multi_azs` | `lambda_min_azs` | Integer |
| `cloudformation_stack_outputs_find_secrets` | `secrets_ignore_patterns` | List of Strings |
| `cloudtrail_threat_detection_enumeration` | `threat_detection_enumeration_actions` | List of Strings |
| `cloudtrail_threat_detection_enumeration` | `threat_detection_enumeration_entropy` | Integer |
| `cloudtrail_threat_detection_enumeration` | `threat_detection_enumeration_minutes` | Integer |
| `cloudtrail_threat_detection_privilege_escalation` | `threat_detection_privilege_escalation_actions` | List of Strings |
| `cloudtrail_threat_detection_privilege_escalation` | `threat_detection_privilege_escalation_entropy` | Integer |
| `cloudtrail_threat_detection_privilege_escalation` | `threat_detection_privilege_escalation_minutes` | Integer |
| `cloudwatch_log_group_no_secrets_in_logs` | `secrets_ignore_patterns` | List of Strings |
| `cloudwatch_log_group_no_critical_pii_in_logs` | `critical_pii_entities` | List of Strings |
| `cloudwatch_log_group_no_critical_pii_in_logs` | `pii_language` | String |
| `cloudwatch_log_group_retention_policy_specific_days_enabled` | `log_group_retention_days` | Integer |
| `codebuild_project_no_secrets_in_variables` | `excluded_sensitive_environment_variables` | List of Strings |
| `codebuild_project_no_secrets_in_variables` | `secrets_ignore_patterns` | List of Strings |
| `config_recorder_all_regions_enabled` | `mute_non_default_regions` | Boolean |
| `drs_job_exist` | `mute_non_default_regions` | Boolean |
| `ec2_elastic_ip_shodan` | `shodan_api_key` | String |
| `ec2_instance_older_than_specific_days` | `max_ec2_instance_age_in_days` | Integer |
| `ec2_instance_secrets_user_data` | `secrets_ignore_patterns` | List of Strings |
| `ec2_launch_template_no_secrets` | `secrets_ignore_patterns` | List of Strings |
| `ec2_securitygroup_allow_ingress_from_internet_to_any_port` | `ec2_allowed_instance_owners` | List of Strings |
| `ec2_securitygroup_allow_ingress_from_internet_to_any_port` | `ec2_allowed_interface_types` | List of Strings |
| `ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports`| `ec2_sg_high_risk_ports` | List of Integer |
| `ec2_securitygroup_with_many_ingress_egress_rules` | `max_security_group_rules` | Integer |
| `ecs_task_definitions_no_environment_secrets` | `secrets_ignore_patterns` | List of Strings |
| `ecr_repositories_scan_vulnerabilities_in_latest_image` | `ecr_repository_vulnerability_minimum_severity` | String |
| `eks_cluster_uses_a_supported_version` | `eks_cluster_oldest_version_supported` | String |
| `eks_control_plane_logging_all_types_enabled` | `eks_required_log_types` | List of Strings |
| `elb_is_in_multiple_az` | `elb_min_azs` | Integer |
| `elbv2_is_in_multiple_az` | `elbv2_min_azs` | Integer |
| `guardduty_is_enabled` | `mute_non_default_regions` | Boolean |
| `iam_user_accesskey_unused` | `max_unused_access_keys_days` | Integer |
| `iam_user_console_access_unused` | `max_console_access_days` | Integer |
| `organizations_delegated_administrators` | `organizations_trusted_delegated_administrators` | List of Strings |
| `organizations_scp_check_deny_regions` | `organizations_enabled_regions` | List of Strings |
| `rds_instance_backup_enabled` | `check_rds_instance_replicas` | Boolean |
| `securityhub_enabled` | `mute_non_default_regions` | Boolean |
| `secretsmanager_secret_unused` | `max_days_secret_unused` | Integer |
| `secretsmanager_secret_rotated_periodically` | `max_days_secret_unrotated` | Integer |
| `ssm_document_secrets` | `secrets_ignore_patterns` | List of Strings |
| `trustedadvisor_premium_support_plan_subscribed` | `verify_premium_support_plans` | Boolean |
| `ec2_elastic_ip_shodan` | `shodan_api_key` | String |
| `ec2_securitygroup_with_many_ingress_egress_rules` | `max_security_group_rules` | Integer |
| `ec2_instance_older_than_specific_days` | `max_ec2_instance_age_in_days` | Integer |
| `vpc_endpoint_connections_trust_boundaries` | `trusted_account_ids` | List of Strings |
| `vpc_endpoint_services_allowed_principals_trust_boundaries` | `trusted_account_ids` | List of Strings |
| `cloudwatch_log_group_retention_policy_specific_days_enabled` | `log_group_retention_days` | Integer |
| `appstream_fleet_session_idle_disconnect_timeout` | `max_idle_disconnect_timeout_in_seconds` | Integer |
| `appstream_fleet_session_disconnect_timeout` | `max_disconnect_timeout_in_seconds` | Integer |
| `appstream_fleet_maximum_session_duration` | `max_session_duration_seconds` | Integer |
| `awslambda_function_using_supported_runtimes` | `obsolete_lambda_runtimes` | Integer |
| `organizations_scp_check_deny_regions` | `organizations_enabled_regions` | List of Strings |
| `organizations_delegated_administrators` | `organizations_trusted_delegated_administrators` | List of Strings |
| `ecr_repositories_scan_vulnerabilities_in_latest_image` | `ecr_repository_vulnerability_minimum_severity` | String |
| `trustedadvisor_premium_support_plan_subscribed` | `verify_premium_support_plans` | Boolean |
| `config_recorder_all_regions_enabled` | `mute_non_default_regions` | Boolean |
| `drs_job_exist` | `mute_non_default_regions` | Boolean |
| `guardduty_is_enabled` | `mute_non_default_regions` | Boolean |
| `securityhub_enabled` | `mute_non_default_regions` | Boolean |
| `cloudtrail_threat_detection_privilege_escalation` | `threat_detection_privilege_escalation_entropy` | Integer |
| `cloudtrail_threat_detection_privilege_escalation` | `threat_detection_privilege_escalation_minutes` | Integer |
| `cloudtrail_threat_detection_privilege_escalation` | `threat_detection_privilege_escalation_actions` | List of Strings |
| `cloudtrail_threat_detection_enumeration` | `threat_detection_enumeration_entropy` | Integer |
| `cloudtrail_threat_detection_enumeration` | `threat_detection_enumeration_minutes` | Integer |
| `cloudtrail_threat_detection_enumeration` | `threat_detection_enumeration_actions` | List of Strings |
| `rds_instance_backup_enabled` | `check_rds_instance_replicas` | Boolean |
| `ec2_securitygroup_allow_ingress_from_internet_to_any_port` | `ec2_allowed_interface_types` | List of Strings |
| `ec2_securitygroup_allow_ingress_from_internet_to_any_port` | `ec2_allowed_instance_owners` | List of Strings |
| `acm_certificates_expiration_check` | `days_to_expire_threshold` | Integer |
| `eks_control_plane_logging_all_types_enabled` | `eks_required_log_types` | List of Strings |
## Azure
@@ -145,24 +125,8 @@ aws:
[
"amazon-elb"
]
# aws.ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports
ec2_sg_high_risk_ports:
[
25,
110,
135,
143,
445,
3000,
4333,
5000,
5500,
8080,
8088,
]
# AWS VPC Configuration (vpc_endpoint_connections_trust_boundaries, vpc_endpoint_services_allowed_principals_trust_boundaries)
# AWS SSM Configuration (aws.ssm_documents_set_as_public)
# Single account environment: No action required. The AWS account number will be automatically added by the checks.
# Multi account environment: Any additional trusted account number should be added as a space separated list, e.g.
# trusted_account_ids : ["123456789012", "098765432109", "678901234567"]
@@ -230,7 +194,7 @@ aws:
# AWS CloudTrail Configuration
# aws.cloudtrail_threat_detection_privilege_escalation
threat_detection_privilege_escalation_threshold: 0.2 # Percentage of actions found to decide if it is an privilege_escalation attack event, by default is 0.2 (20%)
threat_detection_privilege_escalation_threshold: 0.1 # Percentage of actions found to decide if it is an privilege_escalation attack event, by default is 0.1 (10%)
threat_detection_privilege_escalation_minutes: 1440 # Past minutes to search from now for privilege_escalation attacks, by default is 1440 minutes (24 hours)
threat_detection_privilege_escalation_actions:
[
@@ -287,7 +251,7 @@ aws:
"UpdateLoginProfile",
]
# aws.cloudtrail_threat_detection_enumeration
threat_detection_enumeration_threshold: 0.3 # Percentage of actions found to decide if it is an enumeration attack event, by default is 0.3 (30%)
threat_detection_enumeration_threshold: 0.1 # Percentage of actions found to decide if it is an enumeration attack event, by default is 0.1 (10%)
threat_detection_enumeration_minutes: 1440 # Past minutes to search from now for enumeration attacks, by default is 1440 minutes (24 hours)
threat_detection_enumeration_actions:
[
@@ -382,24 +346,6 @@ aws:
"LookupEvents",
"Search",
]
# aws.cloudtrail_threat_detection_llm_jacking
threat_detection_llm_jacking_threshold: 0.4 # Percentage of actions found to decide if it is an LLM Jacking attack event, by default is 0.4 (40%)
threat_detection_llm_jacking_minutes: 1440 # Past minutes to search from now for LLM Jacking attacks, by default is 1440 minutes (24 hours)
threat_detection_llm_jacking_actions:
[
"PutUseCaseForModelAccess", # Submits a use case for model access, providing justification (Write).
"PutFoundationModelEntitlement", # Grants entitlement for accessing a foundation model (Write).
"PutModelInvocationLoggingConfiguration", # Configures logging for model invocations (Write).
"CreateFoundationModelAgreement", # Creates a new agreement to use a foundation model (Write).
"InvokeModel", # Invokes a specified Bedrock model for inference using provided prompt and parameters (Read).
"InvokeModelWithResponseStream", # Invokes a Bedrock model for inference with real-time token streaming (Read).
"GetUseCaseForModelAccess", # Retrieves an existing use case for model access (Read).
"GetModelInvocationLoggingConfiguration", # Fetches the logging configuration for model invocations (Read).
"GetFoundationModelAvailability", # Checks the availability of a foundation model for use (Read).
"ListFoundationModelAgreementOffers", # Lists available agreement offers for accessing foundation models (List).
"ListFoundationModels", # Lists the available foundation models in Bedrock (List).
"ListProvisionedModelThroughputs", # Lists the provisioned throughput for previously created models (List).
]
# AWS RDS Configuration
# aws.rds_instance_backup_enabled
@@ -422,18 +368,6 @@ aws:
"scheduler",
]
# aws.eks_cluster_uses_a_supported_version
# EKS clusters must be version 1.28 or higher
eks_cluster_oldest_version_supported: "1.28"
# AWS CodeBuild Configuration
# aws.codebuild_project_no_secrets_in_variables
# CodeBuild sensitive variables that are excluded from the check
excluded_sensitive_environment_variables:
[
]
# Azure Configuration
azure:
# Azure Network Configuration
+1 -3
View File
@@ -10,11 +10,9 @@ prowler dashboard
To run Prowler local dashboard with Docker, use:
```sh
docker run -v /your/local/dir/prowler-output:/home/prowler/output --env HOST=0.0.0.0 --publish 127.0.0.1:11666:11666 toniblyx/prowler:latest dashboard
docker run --env HOST=0.0.0.0 --publish 127.0.0.1:11666:11666 toniblyx/prowler:latest dashboard
```
Make sure you update the `/your/local/dir/prowler-output` to match the path that contains your prowler output.
???+ note
**Remember that the `dashboard` server is not authenticated, if you expose it to the internet, you are running it at your own risk.**
+1 -1
View File
@@ -13,7 +13,7 @@ prowler <provider> -c <check_to_fix_1> <check_to_fix_2> ... --fixer
```sh
prowler <provider> --list-fixers
```
It's important to note that using the fixers for `Access Analyzer`, `GuardDuty`, and `SecurityHub` may incur additional costs. These AWS services might trigger actions or deploy resources that can lead to charges on your AWS account.
## Writing a Fixer
To write a fixer, you need to create a file called `<check_id>_fixer.py` inside the check folder, with a function called `fixer` that receives either the region or the resource to be fixed as a parameter, and returns a boolean value indicating if the fix was successful or not.
-22
View File
@@ -1,22 +0,0 @@
# GCP Organization
By default, Prowler scans all Google Cloud projects accessible to the authenticated user.
To limit the scan to projects within a specific Google Cloud organization, use the `--organization-id` option with the GCP organization ID:
```console
prowler gcp --organization-id organization-id
```
???+ warning
Make sure that the used credentials have the role Cloud Asset Viewer (`roles/cloudasset.viewer`) or Cloud Asset Owner (`roles/cloudasset.owner`) on the organization level.
???+ note
With this option, Prowler retrieves all projects within the specified organization, including those organized in folders and nested subfolders. This ensures that every project under the organizations hierarchy is scanned, providing full visibility across the entire organization.
???+ note
To find the organization ID, use the following command:
```console
gcloud organizations list
```
+1 -6
View File
@@ -10,7 +10,7 @@ Execute Prowler in verbose mode (like in Version 2):
prowler <provider> --verbose
```
## Filter findings by status
Prowler can filter the findings by their status, so you can see only in the CLI and in the reports the findings with a specific status:
Prowler can filter the findings by their status:
```console
prowler <provider> --status [PASS, FAIL, MANUAL]
```
@@ -24,11 +24,6 @@ Prowler can run without showing its banner:
```console
prowler <provider> -b/--no-banner
```
## Disable Colors
Prowler can run without showing colors:
```console
prowler <provider> --no-color
```
## Checks
Prowler has checks per provider, there are options related with them:
+84 -142
View File
@@ -7,155 +7,97 @@ Mutelist option works along with other options and will modify the output in the
- CSV: `muted` is `True`. The field `status` will keep the original status, `MANUAL`, `PASS` or `FAIL`, of the finding.
## How the Mutelist Works
The **Mutelist** uses both "AND" and "OR" logic to determine which resources, checks, regions, and tags should be muted. For each check, the Mutelist evaluates whether the account, region, and resource match the specified criteria using "AND" logic. If tags are specified, the Mutelist can apply either "AND" or "OR" logic.
If any of the criteria do not match, the check is not muted.
???+ note
Remember that mutelist can be used with regular expressions.
## Mutelist Specification
???+ note
- For Azure provider, the Account ID is the Subscription Name and the Region is the Location.
- For GCP provider, the Account ID is the Project ID and the Region is the Zone.
- For Kubernetes provider, the Account ID is the Cluster Name and the Region is the Namespace.
The Mutelist file uses the [YAML](https://en.wikipedia.org/wiki/YAML) format with the following syntax:
```yaml
### Account, Check and/or Region can be * to apply for all the cases.
### Resources and tags are lists that can have either Regex or Keywords.
### Tags is an optional list that matches on tuples of 'key=value' and are "ANDed" together.
### Use an alternation Regex to match one of multiple tags with "ORed" logic.
### For each check you can except Accounts, Regions, Resources and/or Tags.
########################### MUTELIST EXAMPLE ###########################
Mutelist:
Accounts:
"123456789012":
Checks:
"iam_user_hardware_mfa_enabled":
Regions:
- "us-east-1"
Resources:
- "user-1" # Will mute user-1 in check iam_user_hardware_mfa_enabled
- "user-2" # Will mute user-2 in check iam_user_hardware_mfa_enabled
"ec2_*":
Regions:
- "*"
Resources:
- "*" # Will mute every EC2 check in every account and region
"*":
Regions:
- "*"
Resources:
- "test"
Tags:
- "test=test" # Will mute every resource containing the string "test" and the tags 'test=test' and
- "project=test|project=stage" # either of ('project=test' OR project=stage) in account 123456789012 and every region
"*":
Regions:
- "*"
Resources:
- "test"
Tags:
- "test=test"
- "project=test" # This will mute every resource containing the string "test" and BOTH tags at the same time.
"*":
Regions:
- "*"
Resources:
- "test"
Tags: # This will mute every resource containing the string "test" and the ones that contain EITHER the `test=test` OR `project=test` OR `project=dev`
- "test=test|project=(test|dev)"
"*":
Regions:
- "*"
Resources:
- "test"
Tags:
- "test=test" # This will mute every resource containing the string "test" and the tags `test=test` and either `project=test` OR `project=stage` in every account and region.
- "project=test|project=stage"
"*":
Checks:
"s3_bucket_object_versioning":
Regions:
- "eu-west-1"
- "us-east-1"
Resources:
- "ci-logs" # Will mute bucket "ci-logs" AND ALSO bucket "ci-logs-replica" in specified check and regions
- "logs" # Will mute EVERY BUCKET containing the string "logs" in specified check and regions
- ".+-logs" # Will mute all buckets containing the terms ci-logs, qa-logs, etc. in specified check and regions
"ecs_task_definitions_no_environment_secrets":
Regions:
- "*"
Resources:
- "*"
Exceptions:
Accounts:
- "0123456789012"
Regions:
- "eu-west-1"
- "eu-south-2" # Will mute every resource in check ecs_task_definitions_no_environment_secrets except the ones in account 0123456789012 located in eu-south-2 or eu-west-1
"*":
Regions:
- "*"
Resources:
- "*"
Tags:
- "environment=dev" # Will mute every resource containing the tag 'environment=dev' in every account and region
"123456789012":
Checks:
"*":
Regions:
- "*"
Resources:
- "*"
Exceptions:
Resources:
- "test"
Tags:
- "environment=prod" # Will mute every resource except in account 123456789012 except the ones containing the string "test" and tag environment=prod
"*":
Checks:
"ec2_*":
Regions:
- "*"
Resources:
- "test-resource" # Will mute the resource "test-resource" in all accounts and regions for whatever check from the EC2 service
```
### Account, Check, Region, Resource, and Tag
| Field | Description | Logic |
|----------|----------|----------|
| `account_id` | Use `*` to apply the mutelist to all accounts. | `ANDed` |
| `check_name` | The name of the Prowler check. Use `*` to apply the mutelist to all checks, or `service_*` to apply it to all service's checks. | `ANDed` |
| `region` | The region identifier. Use `*` to apply the mutelist to all regions. | `ANDed` |
| `resource` | The resource identifier. Use `*` to apply the mutelist to all resources. | `ANDed` |
| `tag` | The tag value. | `ORed` |
## How to Use the Mutelist
To use the Mutelist, you need to specify the path to the Mutelist YAML file using the `-w` or `--mutelist-file` option when running Prowler:
You can use `-w`/`--mutelist-file` with the path of your mutelist yaml file:
```
prowler <provider> -w mutelist.yaml
```
Replace `<provider>` with the appropriate provider name.
## Mutelist YAML File Syntax
## Considerations
???+ note
For Azure provider, the Account ID is the Subscription Name and the Region is the Location.
- The Mutelist can be used in combination with other Prowler options, such as the `--service` or `--checks` option, to further customize the scanning process.
- Make sure to review and update the Mutelist regularly to ensure it reflects the desired exclusions and remains up to date with your infrastructure.
???+ note
For GCP provider, the Account ID is the Project ID and the Region is the Zone.
???+ note
For Kubernetes provider, the Account ID is the Cluster Name and the Region is the Namespace.
The Mutelist file is a YAML file with the following syntax:
```yaml
### Account, Check and/or Region can be * to apply for all the cases.
### Resources and tags are lists that can have either Regex or Keywords.
### Tags is an optional list that matches on tuples of 'key=value' and are "ANDed" together.
### Use an alternation Regex to match one of multiple tags with "ORed" logic.
### For each check you can except Accounts, Regions, Resources and/or Tags.
########################### MUTELIST EXAMPLE ###########################
Mutelist:
Accounts:
"123456789012":
Checks:
"iam_user_hardware_mfa_enabled":
Regions:
- "us-east-1"
Resources:
- "user-1" # Will ignore user-1 in check iam_user_hardware_mfa_enabled
- "user-2" # Will ignore user-2 in check iam_user_hardware_mfa_enabled
"ec2_*":
Regions:
- "*"
Resources:
- "*" # Will ignore every EC2 check in every account and region
"*":
Regions:
- "*"
Resources:
- "test"
Tags:
- "test=test" # Will ignore every resource containing the string "test" and the tags 'test=test' and
- "project=test|project=stage" # either of ('project=test' OR project=stage) in account 123456789012 and every region
"*":
Checks:
"s3_bucket_object_versioning":
Regions:
- "eu-west-1"
- "us-east-1"
Resources:
- "ci-logs" # Will ignore bucket "ci-logs" AND ALSO bucket "ci-logs-replica" in specified check and regions
- "logs" # Will ignore EVERY BUCKET containing the string "logs" in specified check and regions
- ".+-logs" # Will ignore all buckets containing the terms ci-logs, qa-logs, etc. in specified check and regions
"ecs_task_definitions_no_environment_secrets":
Regions:
- "*"
Resources:
- "*"
Exceptions:
Accounts:
- "0123456789012"
Regions:
- "eu-west-1"
- "eu-south-2" # Will ignore every resource in check ecs_task_definitions_no_environment_secrets except the ones in account 0123456789012 located in eu-south-2 or eu-west-1
"*":
Regions:
- "*"
Resources:
- "*"
Tags:
- "environment=dev" # Will ignore every resource containing the tag 'environment=dev' in every account and region
"123456789012":
Checks:
"*":
Regions:
- "*"
Resources:
- "*"
Exceptions:
Resources:
- "test"
Tags:
- "environment=prod" # Will ignore every resource except in account 123456789012 except the ones containing the string "test" and tag environment=prod
```
## AWS Mutelist
### Mute specific AWS regions
+6 -6
View File
@@ -142,8 +142,7 @@ The JSON-OCSF output format implements the [Detection Finding](https://schema.oc
"desc": "Ensure CloudTrail is enabled in all regions",
"product_uid": "prowler",
"title": "Ensure CloudTrail is enabled in all regions",
"uid": "prowler-aws-cloudtrail_multi_region_enabled-123456789012-ap-northeast-1-123456789012",
"types": ["Software and Configuration Checks","Industry and Regulatory Standards","CIS AWS Foundations Benchmark"],
"uid": "prowler-aws-cloudtrail_multi_region_enabled-123456789012-ap-northeast-1-123456789012"
},
"resources": [
{
@@ -190,10 +189,11 @@ The JSON-OCSF output format implements the [Detection Finding](https://schema.oc
"type_uid": 200401,
"type_name": "Create",
"unmapped": {
"check_type": "Software and Configuration Checks,Industry and Regulatory Standards,CIS AWS Foundations Benchmark",
"related_url": "",
"categories": ["forensics-ready"],
"depends_on": [],
"related_to": [],
"categories": "forensics-ready",
"depends_on": "",
"related_to": "",
"notes": "",
"compliance": {
"CISA": [
@@ -336,7 +336,7 @@ The following is the mapping between the native JSON and the Detection Finding f
| Provider | cloud.provider |
| CheckID | metadata.event_code |
| CheckTitle | finding_info.title |
| CheckType | finding_info.types |
| CheckType | unmapped.check_type |
| ServiceName | resources.group.name |
| SubServiceName | _Not mapped yet_ |
| Status | status_code |
+1 -2
View File
@@ -36,11 +36,10 @@ If EBS default encyption is not enabled, sensitive information at rest is not pr
- `ec2_ebs_default_encryption`
If your Security groups are not properly configured the attack surface is increased, nonetheless, Prowler will detect those security groups that are being used (they are attached) to only notify those that are being used. This logic applies to the 15 checks related to open ports in security groups, the check for the default security group and for the security groups that allow ingress and egress traffic.
If your Security groups are not properly configured the attack surface is increased, nonetheless, Prowler will detect those security groups that are being used (they are attached) to only notify those that are being used. This logic applies to the 15 checks related to open ports in security groups and the check for the default security group.
- `ec2_securitygroup_allow_ingress_from_internet_to_port_X` (15 checks)
- `ec2_securitygroup_default_restrict_traffic`
- `ec2_securitygroup_allow_wide_open_public_ipv4`
Prowler will also check for used Network ACLs to only alerts those with open ports that are being used.
-1
View File
@@ -87,7 +87,6 @@ nav:
- Google Cloud:
- Authentication: tutorials/gcp/authentication.md
- Projects: tutorials/gcp/projects.md
- Organization: tutorials/gcp/organization.md
- Kubernetes:
- In-Cluster Execution: tutorials/kubernetes/in-cluster.md
- Non In-Cluster Execution: tutorials/kubernetes/outside-cluster.md
@@ -58,44 +58,32 @@ Resources:
- 'account:Get*'
- 'appstream:Describe*'
- 'appstream:List*'
- 'backup:List*'
- 'bedrock:List*'
- 'bedrock:Get*'
- 'cloudtrail:GetInsightSelectors'
- 'codeartifact:List*'
- 'codebuild:BatchGet*'
- 'codebuild:ListReportGroups'
- 'cognito-idp:GetUserPoolMfaConfig'
- 'dlm:Get*'
- 'drs:Describe*'
- 'ds:Get*'
- 'ds:Describe*'
- 'ds:List*'
- 'dynamodb:GetResourcePolicy'
- 'ec2:GetEbsEncryptionByDefault'
- 'ec2:GetSnapshotBlockPublicAccessState'
- 'ec2:GetInstanceMetadataDefaults'
- 'ecr:Describe*'
- 'ecr:GetRegistryScanningConfiguration'
- 'elasticfilesystem:DescribeBackupPolicy'
- 'glue:GetConnections'
- 'glue:GetSecurityConfiguration*'
- 'glue:SearchTables'
- 'lambda:GetFunction*'
- 'logs:FilterLogEvents'
- 'lightsail:GetRelationalDatabases'
- 'macie2:GetMacieSession'
- 'macie2:GetAutomatedDiscoveryConfiguration'
- 's3:GetAccountPublicAccessBlock'
- 'shield:DescribeProtection'
- 'shield:GetSubscriptionState'
- 'securityhub:BatchImportFindings'
- 'securityhub:GetFindings'
- 'ssm:GetDocument'
- 'ssm-incidents:List*'
- 'support:Describe*'
- 'tag:GetTagKeys'
- 'wellarchitected:List*'
Resource: '*'
- PolicyName: ProwlerScanRoleAdditionalViewPrivilegesApiGateway
PolicyDocument:
@@ -7,12 +7,9 @@
"appstream:Describe*",
"appstream:List*",
"backup:List*",
"bedrock:List*",
"bedrock:Get*",
"cloudtrail:GetInsightSelectors",
"codeartifact:List*",
"codebuild:BatchGet*",
"codebuild:ListReportGroups",
"cognito-idp:GetUserPoolMfaConfig",
"dlm:Get*",
"drs:Describe*",
@@ -21,7 +18,6 @@
"ds:List*",
"dynamodb:GetResourcePolicy",
"ec2:GetEbsEncryptionByDefault",
"ec2:GetSnapshotBlockPublicAccessState",
"ec2:GetInstanceMetadataDefaults",
"ecr:Describe*",
"ecr:GetRegistryScanningConfiguration",
@@ -33,7 +29,6 @@
"logs:FilterLogEvents",
"lightsail:GetRelationalDatabases",
"macie2:GetMacieSession",
"macie2:GetAutomatedDiscoveryConfiguration",
"s3:GetAccountPublicAccessBlock",
"shield:DescribeProtection",
"shield:GetSubscriptionState",
Generated
+1298 -2147
View File
File diff suppressed because it is too large Load Diff
+2
View File
@@ -3,5 +3,7 @@ import sys
from prowler.__main__ import prowler
if __name__ == "__main__":
sys.exit(prowler())
+115 -107
View File
@@ -5,7 +5,15 @@ import sys
from os import environ
from colorama import Fore, Style
from colorama import init as colorama_init
from prowler.providers.aws.services.ec2.ec2_service import PaginatedDict, PaginatedList
import pdb
import psutil
import os
def check_memory_usage():
process = psutil.Process(os.getpid())
memory_info = process.memory_info()
return memory_info.rss # Resident Set Size: memory in bytes
from prowler.config.config import (
csv_file_suffix,
@@ -16,6 +24,8 @@ from prowler.config.config import (
)
from prowler.lib.banner import print_banner
from prowler.lib.check.check import (
bulk_load_checks_metadata,
bulk_load_compliance_frameworks,
exclude_checks_to_run,
exclude_services_to_run,
execute_checks,
@@ -35,12 +45,10 @@ from prowler.lib.check.check import (
)
from prowler.lib.check.checks_loader import load_checks_to_execute
from prowler.lib.check.compliance import update_checks_metadata_with_compliance
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.check.custom_checks_metadata import (
parse_custom_checks_metadata_file,
update_checks_metadata,
)
from prowler.lib.check.models import CheckMetadata
from prowler.lib.cli.parser import ProwlerArgumentParser
from prowler.lib.logger import logger, set_logging_config
from prowler.lib.outputs.asff.asff import ASFF
@@ -55,7 +63,6 @@ from prowler.lib.outputs.compliance.compliance import display_compliance_table
from prowler.lib.outputs.compliance.ens.ens_aws import AWSENS
from prowler.lib.outputs.compliance.generic.generic import GenericCompliance
from prowler.lib.outputs.compliance.iso27001.iso27001_aws import AWSISO27001
from prowler.lib.outputs.compliance.kisa_ismsp.kisa_ismsp_aws import AWSKISAISMSP
from prowler.lib.outputs.compliance.mitre_attack.mitre_attack_aws import AWSMitreAttack
from prowler.lib.outputs.compliance.mitre_attack.mitre_attack_azure import (
AzureMitreAttack,
@@ -70,15 +77,20 @@ from prowler.lib.outputs.slack.slack import Slack
from prowler.lib.outputs.summary_table import display_summary_table
from prowler.providers.aws.lib.s3.s3 import S3
from prowler.providers.aws.lib.security_hub.security_hub import SecurityHub
from prowler.providers.aws.models import AWSOutputOptions
from prowler.providers.azure.models import AzureOutputOptions
from prowler.providers.common.provider import Provider
from prowler.providers.common.quick_inventory import run_provider_quick_inventory
from prowler.providers.gcp.models import GCPOutputOptions
from prowler.providers.kubernetes.models import KubernetesOutputOptions
from memory_profiler import profile
from pympler import asizeof
from pympler import tracker
from pympler import muppy
from pympler import summary
import objgraph
from memory_profiler import profile
def prowler():
#tr = tracker.SummaryTracker()
# Parse Arguments
# Refactor(CLI)
parser = ProwlerArgumentParser()
@@ -113,9 +125,6 @@ def prowler():
and not checks_folder
)
if args.no_color:
colorama_init(strip=True)
if not args.no_banner:
legend = args.verbose or getattr(args, "fixer", None)
print_banner(legend)
@@ -140,7 +149,7 @@ def prowler():
# Load checks metadata
logger.debug("Loading checks metadata from .metadata.json files")
bulk_checks_metadata = CheckMetadata.get_bulk(provider)
bulk_checks_metadata = bulk_load_checks_metadata(provider)
if args.list_categories:
print_categories(list_categories(bulk_checks_metadata))
@@ -150,7 +159,7 @@ def prowler():
# Load compliance frameworks
logger.debug("Loading compliance frameworks from .json files")
bulk_compliance_frameworks = Compliance.get_bulk(provider)
bulk_compliance_frameworks = bulk_load_compliance_frameworks(provider)
# Complete checks metadata with the compliance framework specification
bulk_checks_metadata = update_checks_metadata_with_compliance(
bulk_compliance_frameworks, bulk_checks_metadata
@@ -177,17 +186,19 @@ def prowler():
# Load checks to execute
checks_to_execute = load_checks_to_execute(
bulk_checks_metadata=bulk_checks_metadata,
bulk_compliance_frameworks=bulk_compliance_frameworks,
checks_file=checks_file,
check_list=checks,
service_list=services,
severities=severities,
compliance_frameworks=compliance_framework,
categories=categories,
provider=provider,
bulk_checks_metadata,
bulk_compliance_frameworks,
checks_file,
checks,
services,
severities,
compliance_framework,
categories,
provider,
)
#pdb.set_trace() # Break
memory_usage = check_memory_usage()
print(f"Memory usage at checks_to_execute: {memory_usage / (1024 * 1024)} MB")
# if --list-checks-json, dump a json file and exit
if args.list_checks_json:
print(list_checks_json(provider, sorted(checks_to_execute)))
@@ -199,9 +210,13 @@ def prowler():
sys.exit()
# Provider to scan
Provider.init_global_provider(args)
Provider.set_global_provider(args)
global_provider = Provider.get_global_provider()
memory_usage = check_memory_usage()
print(f"Memory usage at global_provider = Provider. __main__.py:217 : {memory_usage / (1024 * 1024)} MB")
#pdb.set_trace() # Break
# Print Provider Credentials
if not args.only_logs:
global_provider.print_credentials()
@@ -233,30 +248,17 @@ def prowler():
# Once the provider is set and we have the eventual checks based on the resource identifier,
# it is time to check what Prowler's checks are going to be executed
checks_from_resources = global_provider.get_checks_to_execute_by_audit_resources()
# Intersect checks from resources with checks to execute so we only run the checks that apply to the resources with the specified ARNs or tags
if getattr(args, "resource_arn", None) or getattr(args, "resource_tag", None):
if checks_from_resources:
checks_to_execute = checks_to_execute.intersection(checks_from_resources)
# Sort final check list
checks_to_execute = sorted(checks_to_execute)
# Setup Mutelist
global_provider.mutelist = args.mutelist_file
# Setup Output Options
if provider == "aws":
output_options = AWSOutputOptions(
args, bulk_checks_metadata, global_provider.identity
)
elif provider == "azure":
output_options = AzureOutputOptions(
args, bulk_checks_metadata, global_provider.identity
)
elif provider == "gcp":
output_options = GCPOutputOptions(
args, bulk_checks_metadata, global_provider.identity
)
elif provider == "kubernetes":
output_options = KubernetesOutputOptions(
args, bulk_checks_metadata, global_provider.identity
)
global_provider.output_options = (args, bulk_checks_metadata)
# Run the quick inventory for the provider if available
if hasattr(args, "quick_inventory") and args.quick_inventory:
@@ -264,7 +266,11 @@ def prowler():
sys.exit()
# Execute checks
findings = []
paginated = 0
if paginated:
findings = PaginatedList()
else:
findings = []
if len(checks_to_execute):
findings = execute_checks(
@@ -272,15 +278,16 @@ def prowler():
global_provider,
custom_checks_metadata,
args.config_file,
output_options,
)
else:
logger.error(
"There are no checks to execute. Please, check your input arguments"
)
memory_usage = check_memory_usage()
print(f"Memory usage at execute_checks __main__.py:284 {memory_usage / (1024 * 1024)} MB")
#pdb.set_trace() # Break
# Prowler Fixer
if output_options.fixer:
if global_provider.output_options.fixer:
print(f"{Style.BRIGHT}\nRunning Prowler Fixer, please wait...{Style.RESET_ALL}")
# Check if there are any FAIL findings
if any("FAIL" in finding.status for finding in findings):
@@ -326,17 +333,21 @@ def prowler():
# TODO: this part is needed since the checks generates a Check_Report_XXX and the output uses Finding
# This will be refactored for the outputs generate directly the Finding
finding_outputs = [
Finding.generate_output(global_provider, finding, output_options)
for finding in findings
Finding.generate_output(global_provider, finding) for finding in findings
]
generated_outputs = {"regular": [], "compliance": []}
logger.debug("Output generated")
#pdb.set_trace() # Break
memory_usage = check_memory_usage()
print(f"Memory usage at findings_output: {memory_usage / (1024 * 1024)} MB")
if args.output_formats:
for mode in args.output_formats:
filename = (
f"{output_options.output_directory}/"
f"{output_options.output_filename}"
f"{global_provider.output_options.output_directory}/"
f"{global_provider.output_options.output_filename}"
)
if mode == "csv":
csv_output = CSV(
@@ -347,7 +358,9 @@ def prowler():
generated_outputs["regular"].append(csv_output)
# Write CSV Finding Object to file
csv_output.batch_write_data_to_file()
#pdb.set_trace() # Break
memory_usage = check_memory_usage()
print(f"Memory usage at csv_output_batch_write_data: {memory_usage / (1024 * 1024)} MB")
if mode == "json-asff":
asff_output = ASFF(
findings=finding_outputs,
@@ -376,18 +389,20 @@ def prowler():
html_output.batch_write_data_to_file(
provider=global_provider, stats=stats
)
#pdb.set_trace() # Break
memory_usage = check_memory_usage()
print(f"Memory usage at html_output_batch_write: {memory_usage / (1024 * 1024)} MB")
# Compliance Frameworks
input_compliance_frameworks = set(output_options.output_modes).intersection(
get_available_compliance_frameworks(provider)
)
input_compliance_frameworks = set(
global_provider.output_options.output_modes
).intersection(get_available_compliance_frameworks(provider))
if provider == "aws":
for compliance_name in input_compliance_frameworks:
if compliance_name.startswith("cis_"):
# Generate CIS Finding Object
filename = (
f"{output_options.output_directory}/compliance/"
f"{output_options.output_filename}_{compliance_name}.csv"
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
)
cis = AWSCIS(
findings=finding_outputs,
@@ -400,8 +415,8 @@ def prowler():
elif compliance_name == "mitre_attack_aws":
# Generate MITRE ATT&CK Finding Object
filename = (
f"{output_options.output_directory}/compliance/"
f"{output_options.output_filename}_{compliance_name}.csv"
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
)
mitre_attack = AWSMitreAttack(
findings=finding_outputs,
@@ -414,8 +429,8 @@ def prowler():
elif compliance_name.startswith("ens_"):
# Generate ENS Finding Object
filename = (
f"{output_options.output_directory}/compliance/"
f"{output_options.output_filename}_{compliance_name}.csv"
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
)
ens = AWSENS(
findings=finding_outputs,
@@ -428,8 +443,8 @@ def prowler():
elif compliance_name.startswith("aws_well_architected_framework"):
# Generate AWS Well-Architected Finding Object
filename = (
f"{output_options.output_directory}/compliance/"
f"{output_options.output_filename}_{compliance_name}.csv"
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
)
aws_well_architected = AWSWellArchitected(
findings=finding_outputs,
@@ -442,8 +457,8 @@ def prowler():
elif compliance_name.startswith("iso27001_"):
# Generate ISO27001 Finding Object
filename = (
f"{output_options.output_directory}/compliance/"
f"{output_options.output_filename}_{compliance_name}.csv"
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
)
iso27001 = AWSISO27001(
findings=finding_outputs,
@@ -453,24 +468,10 @@ def prowler():
)
generated_outputs["compliance"].append(iso27001)
iso27001.batch_write_data_to_file()
elif compliance_name.startswith("kisa"):
# Generate KISA-ISMS-P Finding Object
filename = (
f"{output_options.output_directory}/compliance/"
f"{output_options.output_filename}_{compliance_name}.csv"
)
kisa_ismsp = AWSKISAISMSP(
findings=finding_outputs,
compliance=bulk_compliance_frameworks[compliance_name],
create_file_descriptor=True,
file_path=filename,
)
generated_outputs["compliance"].append(kisa_ismsp)
kisa_ismsp.batch_write_data_to_file()
else:
filename = (
f"{output_options.output_directory}/compliance/"
f"{output_options.output_filename}_{compliance_name}.csv"
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
)
generic_compliance = GenericCompliance(
findings=finding_outputs,
@@ -486,8 +487,8 @@ def prowler():
if compliance_name.startswith("cis_"):
# Generate CIS Finding Object
filename = (
f"{output_options.output_directory}/compliance/"
f"{output_options.output_filename}_{compliance_name}.csv"
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
)
cis = AzureCIS(
findings=finding_outputs,
@@ -500,8 +501,8 @@ def prowler():
elif compliance_name == "mitre_attack_azure":
# Generate MITRE ATT&CK Finding Object
filename = (
f"{output_options.output_directory}/compliance/"
f"{output_options.output_filename}_{compliance_name}.csv"
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
)
mitre_attack = AzureMitreAttack(
findings=finding_outputs,
@@ -513,8 +514,8 @@ def prowler():
mitre_attack.batch_write_data_to_file()
else:
filename = (
f"{output_options.output_directory}/compliance/"
f"{output_options.output_filename}_{compliance_name}.csv"
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
)
generic_compliance = GenericCompliance(
findings=finding_outputs,
@@ -530,8 +531,8 @@ def prowler():
if compliance_name.startswith("cis_"):
# Generate CIS Finding Object
filename = (
f"{output_options.output_directory}/compliance/"
f"{output_options.output_filename}_{compliance_name}.csv"
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
)
cis = GCPCIS(
findings=finding_outputs,
@@ -544,8 +545,8 @@ def prowler():
elif compliance_name == "mitre_attack_gcp":
# Generate MITRE ATT&CK Finding Object
filename = (
f"{output_options.output_directory}/compliance/"
f"{output_options.output_filename}_{compliance_name}.csv"
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
)
mitre_attack = GCPMitreAttack(
findings=finding_outputs,
@@ -557,8 +558,8 @@ def prowler():
mitre_attack.batch_write_data_to_file()
else:
filename = (
f"{output_options.output_directory}/compliance/"
f"{output_options.output_filename}_{compliance_name}.csv"
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
)
generic_compliance = GenericCompliance(
findings=finding_outputs,
@@ -574,8 +575,8 @@ def prowler():
if compliance_name.startswith("cis_"):
# Generate CIS Finding Object
filename = (
f"{output_options.output_directory}/compliance/"
f"{output_options.output_filename}_{compliance_name}.csv"
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
)
cis = KubernetesCIS(
findings=finding_outputs,
@@ -587,8 +588,8 @@ def prowler():
cis.batch_write_data_to_file()
else:
filename = (
f"{output_options.output_directory}/compliance/"
f"{output_options.output_filename}_{compliance_name}.csv"
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
)
generic_compliance = GenericCompliance(
findings=finding_outputs,
@@ -621,11 +622,7 @@ def prowler():
)
security_hub_regions = (
global_provider.get_available_aws_service_regions(
"securityhub",
global_provider.identity.partition,
global_provider.identity.audited_regions,
)
global_provider.get_available_aws_service_regions("securityhub")
if not global_provider.identity.audited_regions
else global_provider.identity.audited_regions
)
@@ -635,7 +632,8 @@ def prowler():
aws_partition=global_provider.identity.partition,
aws_session=global_provider.session.current_session,
findings=asff_output.data,
send_only_fails=output_options.send_sh_only_fails,
status=global_provider.output_options.status,
send_only_fails=global_provider.output_options.send_sh_only_fails,
aws_security_hub_available_regions=security_hub_regions,
)
# Send the findings to Security Hub
@@ -661,7 +659,7 @@ def prowler():
display_summary_table(
findings,
global_provider,
output_options,
global_provider.output_options,
)
# Only display compliance table if there are findings (not all MANUAL) and it is a default execution
if (
@@ -680,20 +678,30 @@ def prowler():
findings,
bulk_checks_metadata,
compliance,
output_options.output_filename,
output_options.output_directory,
global_provider.output_options.output_filename,
global_provider.output_options.output_directory,
compliance_overview,
)
if compliance_overview:
print(
f"\nDetailed compliance results are in {Fore.YELLOW}{output_options.output_directory}/compliance/{Style.RESET_ALL}\n"
f"\nDetailed compliance results are in {Fore.YELLOW}{global_provider.output_options.output_directory}/compliance/{Style.RESET_ALL}\n"
)
# Print the memory usage of the largest objects
#all_objects = muppy.get_objects()
#sum1 = summary.summarize(all_objects)
#summary.print_(sum1)
#objgraph.show_most_common_types(limit=20)
#objgraph.show_growth()
# If custom checks were passed, remove the modules
if checks_folder:
remove_custom_checks_module(checks_folder, provider)
# If there are failed findings exit code 3, except if -z is input
#pdb.set_trace() # Break
memory_usage = check_memory_usage()
print(f"Memory usage at ending: {memory_usage / (1024 * 1024)} MB")
if (
not args.ignore_exit_code_3
and stats["total_fail"] > 0
@@ -557,7 +557,7 @@
}
],
"Checks": [
"inspector2_is_enabled"
"inspector2_findings_exist"
]
},
{
@@ -587,8 +587,7 @@
}
],
"Checks": [
"inspector2_active_findings_exist",
"inspector2_is_enabled",
"inspector2_findings_exist",
"ecr_registry_scan_images_on_push_enabled",
"ecr_repositories_scan_vulnerabilities_in_latest_image",
"ecr_repositories_scan_images_on_push_enabled"
@@ -485,7 +485,7 @@
"codeartifact_packages_external_public_publishing_disabled",
"ecr_repositories_not_publicly_accessible",
"efs_not_publicly_accessible",
"eks_cluster_not_publicly_accessible",
"eks_endpoints_not_publicly_accessible",
"elb_internet_facing",
"elbv2_internet_facing",
"s3_account_level_public_access_blocks",
@@ -664,7 +664,7 @@
"awslambda_function_not_publicly_accessible",
"apigateway_restapi_waf_acl_attached",
"cloudfront_distributions_using_waf",
"eks_cluster_not_publicly_accessible",
"eks_control_plane_endpoint_access_restricted",
"sagemaker_models_network_isolation_enabled",
"sagemaker_models_vpc_settings_configured",
"sagemaker_notebook_instance_vpc_settings_configured",
File diff suppressed because it is too large Load Diff
File diff suppressed because it is too large Load Diff
@@ -19,7 +19,7 @@
"ec2_ebs_public_snapshot",
"ec2_instance_profile_attached",
"ec2_instance_public_ip",
"eks_cluster_not_publicly_accessible",
"eks_endpoints_not_publicly_accessible",
"emr_cluster_master_nodes_no_public_ip",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
@@ -61,7 +61,7 @@
"ec2_ebs_public_snapshot",
"ec2_instance_profile_attached",
"ec2_instance_public_ip",
"eks_cluster_not_publicly_accessible",
"eks_endpoints_not_publicly_accessible",
"emr_cluster_master_nodes_no_public_ip",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
@@ -102,7 +102,7 @@
"Checks": [
"ec2_ebs_public_snapshot",
"ec2_instance_public_ip",
"eks_cluster_not_publicly_accessible",
"eks_endpoints_not_publicly_accessible",
"emr_cluster_master_nodes_no_public_ip",
"awslambda_function_not_publicly_accessible",
"awslambda_function_url_public",
+1 -1
View File
@@ -971,7 +971,7 @@
"Checks": [
"ec2_ebs_public_snapshot",
"ec2_instance_public_ip",
"eks_cluster_not_publicly_accessible",
"eks_endpoints_not_publicly_accessible",
"emr_cluster_master_nodes_no_public_ip",
"awslambda_function_url_public",
"rds_instance_no_public_access",
+71 -7
View File
@@ -3043,7 +3043,9 @@
{
"Id": "9.4",
"Description": "Ensure that Register with Entra ID is enabled on App Service",
"Checks": [],
"Checks": [
"app_client_certificates_on"
],
"Attributes": [
{
"Section": "9. AppService",
@@ -3064,7 +3066,7 @@
"Id": "9.5",
"Description": "Ensure That 'PHP version' is the Latest, If Used to Run the Web App",
"Checks": [
"app_ensure_php_version_is_latest"
"app_register_with_identity"
],
"Attributes": [
{
@@ -3086,7 +3088,7 @@
"Id": "9.6",
"Description": "Ensure that 'Python version' is the Latest Stable Version, if Used to Run the Web App",
"Checks": [
"app_ensure_python_version_is_latest"
"app_ensure_php_version_is_latest"
],
"Attributes": [
{
@@ -3108,7 +3110,7 @@
"Id": "9.7",
"Description": "Ensure that 'Java version' is the latest, if used to run the Web App",
"Checks": [
"app_ensure_java_version_is_latest"
"app_ensure_python_version_is_latest"
],
"Attributes": [
{
@@ -3130,7 +3132,7 @@
"Id": "9.8",
"Description": "Ensure that 'HTTP Version' is the Latest, if Used to Run the Web App",
"Checks": [
"app_ensure_using_http20"
"app_ensure_java_version_is_latest"
],
"Attributes": [
{
@@ -3152,7 +3154,7 @@
"Id": "9.9",
"Description": "Ensure FTP deployments are Disabled",
"Checks": [
"app_ftp_deployment_disabled"
"app_ensure_using_http20"
],
"Attributes": [
{
@@ -3173,7 +3175,9 @@
{
"Id": "9.10",
"Description": "Ensure Azure Key Vaults are Used to Store Secrets",
"Checks": [],
"Checks": [
"app_ftp_deployment_disabled"
],
"Attributes": [
{
"Section": "9. AppService",
@@ -3209,6 +3213,66 @@
"References": "https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-group-lock-resources:https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-subscription-governance#azure-resource-locks:https://docs.microsoft.com/en-us/azure/governance/blueprints/concepts/resource-locking:https://learn.microsoft.com/en-us/security/benchmark/azure/mcsb-asset-management#am-4-limit-access-to-asset-management"
}
]
},
{
"Id": "9.10",
"Description": "Ensure FTP deployments are Disabled",
"Checks": [],
"Attributes": [
{
"Section": "9. AppService",
"Profile": "Level 1",
"AssessmentStatus": "Automated",
"Description": "By default, Azure Functions, Web, and API Services can be deployed over FTP. If FTP is required for an essential deployment workflow, FTPS should be required for FTP login for all App Service Apps and Functions.",
"RationaleStatement": "Azure FTP deployment endpoints are public. An attacker listening to traffic on a wifi network used by a remote employee or a corporate network could see login traffic in clear-text which would then grant them full control of the code base of the app or service. This finding is more severe if User Credentials for deployment are set at the subscription level rather than using the default Application Credentials which are unique per App.",
"ImpactStatement": "Any deployment workflows that rely on FTP or FTPs rather than the WebDeploy or HTTPs endpoints may be affected.",
"RemediationProcedure": "**From Azure Portal** 1. Go to the Azure Portal 2. Select `App Services` 3. Click on an app 4. Select `Settings` and then `Configuration` 5. Under `General Settings`, for the `Platform Settings`, the `FTP state` should be set to `Disabled` or `FTPS Only` **From Azure CLI** For each out of compliance application, run the following choosing either 'disabled' or 'FtpsOnly' as appropriate: ``` az webapp config set --resource-group <resource group name> --name <app name> --ftps-state [disabled|FtpsOnly] ``` **From PowerShell** For each out of compliance application, run the following: ``` Set-AzWebApp -ResourceGroupName <resource group name> -Name <app name> -FtpsState <Disabled or FtpsOnly> ```",
"AuditProcedure": "**From Azure Portal** 1. Go to the Azure Portal 2. Select `App Services` 3. Click on an app 4. Select `Settings` and then `Configuration` 5. Under `General Settings`, for the `Platform Settings`, the `FTP state` should not be set to `All allowed` **From Azure CLI** List webapps to obtain the ids. ``` az webapp list ``` List the publish profiles to obtain the username, password and ftp server url. ``` az webapp deployment list-publishing-profiles --ids <ids> { publishUrl: <URL_FOR_WEB_APP>, userName: <USER_NAME>, userPWD: <USER_PASSWORD>, } ``` **From PowerShell** List all Web Apps: ``` Get-AzWebApp ``` For each app: ``` Get-AzWebApp -ResourceGroupName <resource group name> -Name <app name> | Select-Object -ExpandProperty SiteConfig ``` In the output, look for the value of **FtpsState**. If its value is **AllAllowed** the setting is out of compliance. Any other value is considered in compliance with this check.",
"AdditionalInformation": "",
"DefaultValue": "[Azure Web Service Deploy via FTP](https://docs.microsoft.com/en-us/azure/app-service/deploy-ftp):[Azure Web Service Deployment](https://docs.microsoft.com/en-us/azure/app-service/overview-security):https://docs.microsoft.com/en-us/security/benchmark/azure/security-controls-v3-data-protection#dp-4-encrypt-sensitive-information-in-transit:https://docs.microsoft.com/en-us/security/benchmark/azure/security-controls-v3-posture-vulnerability-management#pv-7-rapidly-and-automatically-remediate-software-vulnerabilities",
"References": "TA0008, T1570, M1031"
}
]
},
{
"Id": "9.11",
"Description": "Ensure Azure Key Vaults are Used to Store Secrets",
"Checks": [],
"Attributes": [
{
"Section": "9. AppService",
"Profile": "Level 2",
"AssessmentStatus": "Manual",
"Description": "Azure Key Vault will store multiple types of sensitive information such as encryption keys, certificate thumbprints, and Managed Identity Credentials. Access to these 'Secrets' can be controlled through granular permissions.",
"RationaleStatement": "The credentials given to an application have permissions to create, delete, or modify data stored within the systems they access. If these credentials are stored within the application itself, anyone with access to the application or a copy of the code has access to them. Storing within Azure Key Vault as secrets increases security by controlling access. This also allows for updates of the credentials without redeploying the entire application.",
"ImpactStatement": "Integrating references to secrets within the key vault are required to be specifically integrated within the application code. This will require additional configuration to be made during the writing of an application, or refactoring of an already written one. There are also additional costs that are charged per 10000 requests to the Key Vault.",
"RemediationProcedure": "Remediation has 2 steps 1. Setup the Key Vault 2. Setup the App Service to use the Key Vault **Step 1: Set up the Key Vault** **From Azure CLI** ``` az keyvault create --name <name> --resource-group <myResourceGroup> --location myLocation ``` **From Powershell** ``` New-AzKeyvault -name <name> -ResourceGroupName <myResourceGroup> -Location <myLocation> ``` **Step 2: Set up the App Service to use the Key Vault** Sample JSON Template for App Service Configuration: ``` { //... resources: [ { type: Microsoft.Storage/storageAccounts, name: [variables('storageAccountName')], //... }, { type: Microsoft.Insights/components, name: [variables('appInsightsName')], //... }, { type: Microsoft.Web/sites, name: [variables('functionAppName')], identity: { type: SystemAssigned }, //... resources: [ { type: config, name: appsettings, //... dependsOn: [ [resourceId('Microsoft.Web/sites', variables('functionAppName'))], [resourceId('Microsoft.KeyVault/vaults/', variables('keyVaultName'))], [resourceId('Microsoft.KeyVault/vaults/secrets', variables('keyVaultName'), variables('storageConnectionStringName'))], [resourceId('Microsoft.KeyVault/vaults/secrets', variables('keyVaultName'), variables('appInsightsKeyName'))] ], properties: { AzureWebJobsStorage: [concat('@Microsoft.KeyVault(SecretUri=', reference(variables('storageConnectionStringResourceId')).secretUriWithVersion, ')')], WEBSITE_CONTENTAZUREFILECONNECTIONSTRING: [concat('@Microsoft.KeyVault(SecretUri=', reference(variables('storageConnectionStringResourceId')).secretUriWithVersion, ')')], APPINSIGHTS_INSTRUMENTATIONKEY: [concat('@Microsoft.KeyVault(SecretUri=', reference(variables('appInsightsKeyResourceId')).secretUriWithVersion, ')')], WEBSITE_ENABLE_SYNC_UPDATE_SITE: true //... } }, { type: sourcecontrols, name: web, //... dependsOn: [ [resourceId('Microsoft.Web/sites', variables('functionAppName'))], [resourceId('Microsoft.Web/sites/config', variables('functionAppName'), 'appsettings')] ], } ] }, { type: Microsoft.KeyVault/vaults, name: [variables('keyVaultName')], //... dependsOn: [ [resourceId('Microsoft.Web/sites', variables('functionAppName'))] ], properties: { //... accessPolicies: [ { tenantId: [reference(concat('Microsoft.Web/sites/', variables('functionAppName'), '/providers/Microsoft.ManagedIdentity/Identities/default'), '2015-08-31-PREVIEW').tenantId], objectId: [reference(concat('Microsoft.Web/sites/', variables('functionAppName'), '/providers/Microsoft.ManagedIdentity/Identities/default'), '2015-08-31-PREVIEW').principalId], permissions: { secrets: [ get ] } } ] }, resources: [ { type: secrets, name: [variables('storageConnectionStringName')], //... dependsOn: [ [resourceId('Microsoft.KeyVault/vaults/', variables('keyVaultName'))], [resourceId('Microsoft.Storage/storageAccounts', variables('storageAccountName'))] ], properties: { value: [concat('DefaultEndpointsProtocol=https;AccountName=', variables('storageAccountName'), ';AccountKey=', listKeys(variables('storageAccountResourceId'),'2015-05-01-preview').key1)] } }, { type: secrets, name: [variables('appInsightsKeyName')], //... dependsOn: [ [resourceId('Microsoft.KeyVault/vaults/', variables('keyVaultName'))], [resourceId('Microsoft.Insights/components', variables('appInsightsName'))] ], properties: { value: [reference(resourceId('microsoft.insights/components/', variables('appInsightsName')), '2015-05-01').InstrumentationKey] } } ] } ] } ```",
"AuditProcedure": "**From Azure Portal** 1. Login to Azure Portal 2. In the expandable menu on the left go to `Key Vaults` 3. View the Key Vaults listed. **From Azure CLI** To list key vaults within a subscription run the following command: ``` Get-AzKeyVault ``` To list the secrets within these key vaults run the following command: ``` Get-AzKeyVaultSecret [-VaultName] <vault name> ``` **From Powershell** To list key vaults within a subscription run the following command: ``` Get-AzKeyVault ``` To list all secrets in a key vault run the following command: ``` Get-AzKeyVaultSecret -VaultName '<vaultName' ```",
"AdditionalInformation": "",
"DefaultValue": "https://docs.microsoft.com/en-us/azure/app-service/app-service-key-vault-references:https://docs.microsoft.com/en-us/security/benchmark/azure/security-controls-v3-identity-management#im-2-manage-application-identities-securely-and-automatically:https://docs.microsoft.com/en-us/cli/azure/keyvault?view=azure-cli-latest:https://docs.microsoft.com/en-us/cli/azure/keyvault?view=azure-cli-latest",
"References": "TA0006, T1552, M1041"
}
]
},
{
"Id": "10.1",
"Description": "Ensure that Resource Locks are set for Mission-Critical Azure Resources",
"Checks": [],
"Attributes": [
{
"Section": "10. Miscellaneous",
"Profile": "Level 2",
"AssessmentStatus": "Manual",
"Description": "Resource Manager Locks provide a way for administrators to lock down Azure resources to prevent deletion of, or modifications to, a resource. These locks sit outside of the Role Based Access Controls (RBAC) hierarchy and, when applied, will place restrictions on the resource for all users. These locks are very useful when there is an important resource in a subscription that users should not be able to delete or change. Locks can help prevent accidental and malicious changes or deletion.",
"RationaleStatement": "As an administrator, it may be necessary to lock a subscription, resource group, or resource to prevent other users in the organization from accidentally deleting or modifying critical resources. The lock level can be set to to `CanNotDelete` or `ReadOnly` to achieve this purpose. - `CanNotDelete` means authorized users can still read and modify a resource, but they cannot delete the resource. - `ReadOnly` means authorized users can read a resource, but they cannot delete or update the resource. Applying this lock is similar to restricting all authorized users to the permissions granted by the Reader role.",
"ImpactStatement": "There can be unintended outcomes of locking a resource. Applying a lock to a parent service will cause it to be inherited by all resources within. Conversely, applying a lock to a resource may not apply to connected storage, leaving it unlocked. Please see the documentation for further information.",
"RemediationProcedure": "**From Azure Portal** 1. Navigate to the specific Azure Resource or Resource Group 2. For each mission critical resource, click on `Locks` 3. Click `Add` 4. Give the lock a name and a description, then select the type, `Read-only` or `Delete` as appropriate 5. Click OK **From Azure CLI** To lock a resource, provide the name of the resource, its resource type, and its resource group name. ``` az lock create --name <LockName> --lock-type <CanNotDelete/Read-only> --resource-group <resourceGroupName> --resource-name <resourceName> --resource-type <resourceType> ``` **From Powershell** ``` Get-AzResourceLock -ResourceName <Resource Name> -ResourceType <Resource Type> -ResourceGroupName <Resource Group Name> -Locktype <CanNotDelete/Read-only> ```",
"AuditProcedure": "**From Azure Portal** 1. Navigate to the specific Azure Resource or Resource Group 2. Click on `Locks` 3. Ensure the lock is defined with name and description, with type `Read-only` or `Delete` as appropriate. **From Azure CLI** Review the list of all locks set currently: ``` az lock list --resource-group <resourcegroupname> --resource-name <resourcename> --namespace <Namespace> --resource-type <type> --parent ``` **From Powershell** Run the following command to list all resources. ``` Get-AzResource ``` For each resource, run the following command to check for Resource Locks. ``` Get-AzResourceLock -ResourceName <Resource Name> -ResourceType <Resource Type> -ResourceGroupName <Resource Group Name> ``` Review the output of the `Properties` setting. Compliant settings will have the `CanNotDelete` or `ReadOnly` value.",
"AdditionalInformation": "",
"DefaultValue": "https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-group-lock-resources:https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-subscription-governance#azure-resource-locks:https://docs.microsoft.com/en-us/azure/governance/blueprints/concepts/resource-locking:https://docs.microsoft.com/en-us/security/benchmark/azure/security-controls-v3-asset-management#am-4-limit-access-to-asset-management",
"References": ""
}
]
}
]
}
+2 -5
View File
@@ -19,11 +19,8 @@ Mutelist:
- "StackSet-AWSControlTowerSecurityResources-*"
- "StackSet-AWSControlTowerLoggingResources-*"
- "StackSet-AWSControlTowerExecutionRole-*"
- "AWSControlTowerBP-BASELINE-CLOUDTRAIL-MASTER*"
- "AWSControlTowerBP-BASELINE-CONFIG-MASTER*"
- "StackSet-AWSControlTower*"
- "CLOUDTRAIL-ENABLED-ON-SHARED-ACCOUNTS-*"
- "AFT-Backend*"
- "AWSControlTowerBP-BASELINE-CLOUDTRAIL-MASTER"
- "AWSControlTowerBP-BASELINE-CONFIG-MASTER"
"cloudtrail_*":
Regions:
- "*"
+5 -12
View File
@@ -1,18 +1,16 @@
import os
import pathlib
from datetime import datetime, timezone
from enum import Enum
from os import getcwd
import requests
import yaml
from packaging import version
from prowler.lib.logger import logger
timestamp = datetime.today()
timestamp_utc = datetime.now(timezone.utc).replace(tzinfo=timezone.utc)
prowler_version = "4.5.1"
prowler_version = "4.3.0"
html_logo_url = "https://github.com/prowler-cloud/prowler/"
square_logo_img = "https://prowler.com/wp-content/uploads/logo-html.png"
aws_logo = "https://user-images.githubusercontent.com/38561120/235953920-3e3fba08-0795-41dc-b480-9bea57db9f2e.png"
@@ -22,13 +20,8 @@ gcp_logo = "https://user-images.githubusercontent.com/38561120/235928332-eb4accd
orange_color = "\033[38;5;208m"
banner_color = "\033[1;92m"
class Provider(str, Enum):
AWS = "aws"
GCP = "gcp"
AZURE = "azure"
KUBERNETES = "kubernetes"
finding_statuses = ["PASS", "FAIL", "MANUAL"]
valid_severities = ["critical", "high", "medium", "low", "informational"]
# Compliance
actual_directory = pathlib.Path(os.path.dirname(os.path.realpath(__file__)))
@@ -36,7 +29,7 @@ actual_directory = pathlib.Path(os.path.dirname(os.path.realpath(__file__)))
def get_available_compliance_frameworks(provider=None):
available_compliance_frameworks = []
providers = [p.value for p in Provider]
providers = ["aws", "gcp", "azure", "kubernetes"]
if provider:
providers = [provider]
for provider in providers:
@@ -93,7 +86,7 @@ def check_current_version():
"https://api.github.com/repos/prowler-cloud/prowler/tags", timeout=1
)
latest_version = release_response.json()[0]["name"]
if version.parse(latest_version) > version.parse(prowler_version):
if latest_version != prowler_version:
return f"{prowler_version_string} (latest is {latest_version}, upgrade for the latest features)"
else:
return (
+2 -111
View File
@@ -41,29 +41,8 @@ aws:
[
"amazon-elb"
]
# aws.ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports
ec2_sg_high_risk_ports:
[
25,
110,
135,
143,
445,
3000,
4333,
5000,
5500,
8080,
8088,
]
# AWS ECS Configuration
# aws.ecs_service_fargate_latest_platform_version
fargate_linux_latest_version: "1.4.0"
fargate_windows_latest_version: "1.0.0"
# AWS VPC Configuration (vpc_endpoint_connections_trust_boundaries, vpc_endpoint_services_allowed_principals_trust_boundaries)
# AWS SSM Configuration (aws.ssm_documents_set_as_public)
# Single account environment: No action required. The AWS account number will be automatically added by the checks.
# Multi account environment: Any additional trusted account number should be added as a space separated list, e.g.
# trusted_account_ids : ["123456789012", "098765432109", "678901234567"]
@@ -72,31 +51,6 @@ aws:
# AWS Cloudwatch Configuration
# aws.cloudwatch_log_group_retention_policy_specific_days_enabled --> by default is 365 days
log_group_retention_days: 365
# aws.cloudwatch_log_group_no_critical_pii_in_logs --> see all available entities in https://microsoft.github.io/presidio/supported_entities/
critical_pii_entities : [
"CREDIT_CARD", # Credit card numbers are highly sensitive financial information.
"CRYPTO", # Crypto wallet numbers (e.g., Bitcoin addresses) can give access to cryptocurrency.
"IBAN_CODE", # International Bank Account Numbers are critical financial information.
"US_BANK_NUMBER", # US bank account numbers are sensitive and should be protected.
"US_SSN", # US Social Security Numbers are critical PII used for identity verification.
"US_PASSPORT", # US passport numbers can be used for identity theft.
"US_ITIN", # US Individual Taxpayer Identification Numbers are sensitive personal identifiers.
#"UK_NHS", # UK NHS numbers can be used to access medical records and other private information.
#"ES_NIF", # Spanish NIF (Personal tax ID) is critical for identification and tax purposes.
#"ES_NIE", # Spanish NIE (Foreigners ID card) is a critical identifier for foreign residents.
#"IT_FISCAL_CODE", # Italian personal identification code is sensitive PII for tax and legal purposes.
#"IT_PASSPORT", # Italian passport numbers are critical PII.
#"IT_IDENTITY_CARD", # Italian identity card numbers are critical for personal identification.
#"PL_PESEL", # Polish PESEL numbers are sensitive personal identifiers.
#"SG_NRIC_FIN", # Singapore National Registration Identification Card is critical PII.
#"AU_ABN", # Australian Business Numbers are critical for business identification.
#"AU_TFN", # Australian Tax File Numbers are sensitive and used for taxation purposes.
#"AU_MEDICARE", # Australian Medicare numbers are sensitive medical identifiers.
#"IN_PAN", # Indian Permanent Account Numbers are critical for tax purposes and identity.
#"IN_AADHAAR", # Indian Aadhaar numbers are highly sensitive and serve as a universal identity number.
#"FI_PERSONAL_IDENTITY_CODE" # Finnish Personal Identity Code is sensitive PII for personal identification.
]
pii_language: "en" # Language for recognizing PII entities
# AWS AppStream Session Configuration
# aws.appstream_fleet_session_idle_disconnect_timeout
@@ -124,9 +78,7 @@ aws:
"nodejs10.x",
"nodejs12.x",
"nodejs14.x",
"nodejs16.x",
"dotnet5.0",
"dotnet7",
"dotnetcore1.0",
"dotnetcore2.0",
"dotnetcore2.1",
@@ -134,8 +86,6 @@ aws:
"ruby2.5",
"ruby2.7",
]
# aws.awslambda_function_vpc_is_in_multi_azs
lambda_min_azs: 2
# AWS Organizations
# aws.organizations_scp_check_deny_regions
@@ -160,7 +110,7 @@ aws:
# AWS CloudTrail Configuration
# aws.cloudtrail_threat_detection_privilege_escalation
threat_detection_privilege_escalation_threshold: 0.2 # Percentage of actions found to decide if it is an privilege_escalation attack event, by default is 0.2 (20%)
threat_detection_privilege_escalation_threshold: 0.1 # Percentage of actions found to decide if it is an privilege_escalation attack event, by default is 0.1 (10%)
threat_detection_privilege_escalation_minutes: 1440 # Past minutes to search from now for privilege_escalation attacks, by default is 1440 minutes (24 hours)
threat_detection_privilege_escalation_actions:
[
@@ -217,7 +167,7 @@ aws:
"UpdateLoginProfile",
]
# aws.cloudtrail_threat_detection_enumeration
threat_detection_enumeration_threshold: 0.3 # Percentage of actions found to decide if it is an enumeration attack event, by default is 0.3 (30%)
threat_detection_enumeration_threshold: 0.1 # Percentage of actions found to decide if it is an enumeration attack event, by default is 0.1 (10%)
threat_detection_enumeration_minutes: 1440 # Past minutes to search from now for enumeration attacks, by default is 1440 minutes (24 hours)
threat_detection_enumeration_actions:
[
@@ -312,24 +262,6 @@ aws:
"LookupEvents",
"Search",
]
# aws.cloudtrail_threat_detection_llm_jacking
threat_detection_llm_jacking_threshold: 0.4 # Percentage of actions found to decide if it is an LLM Jacking attack event, by default is 0.4 (40%)
threat_detection_llm_jacking_minutes: 1440 # Past minutes to search from now for LLM Jacking attacks, by default is 1440 minutes (24 hours)
threat_detection_llm_jacking_actions:
[
"PutUseCaseForModelAccess", # Submits a use case for model access, providing justification (Write).
"PutFoundationModelEntitlement", # Grants entitlement for accessing a foundation model (Write).
"PutModelInvocationLoggingConfiguration", # Configures logging for model invocations (Write).
"CreateFoundationModelAgreement", # Creates a new agreement to use a foundation model (Write).
"InvokeModel", # Invokes a specified Bedrock model for inference using provided prompt and parameters (Read).
"InvokeModelWithResponseStream", # Invokes a Bedrock model for inference with real-time token streaming (Read).
"GetUseCaseForModelAccess", # Retrieves an existing use case for model access (Read).
"GetModelInvocationLoggingConfiguration", # Fetches the logging configuration for model invocations (Read).
"GetFoundationModelAvailability", # Checks the availability of a foundation model for use (Read).
"ListFoundationModelAgreementOffers", # Lists available agreement offers for accessing foundation models (List).
"ListFoundationModels", # Lists the available foundation models in Bedrock (List).
"ListProvisionedModelThroughputs", # Lists the provisioned throughput for previously created models (List).
]
# AWS RDS Configuration
# aws.rds_instance_backup_enabled
@@ -339,11 +271,6 @@ aws:
# AWS ACM Configuration
# aws.acm_certificates_expiration_check
days_to_expire_threshold: 7
# aws.acm_certificates_rsa_key_length
insecure_key_algorithms:
[
"RSA-1024",
]
# AWS EKS Configuration
# aws.eks_control_plane_logging_all_types_enabled
@@ -357,42 +284,6 @@ aws:
"scheduler",
]
# aws.eks_cluster_uses_a_supported_version
# EKS clusters must be version 1.28 or higher
eks_cluster_oldest_version_supported: "1.28"
# AWS CodeBuild Configuration
# aws.codebuild_project_no_secrets_in_variables
# CodeBuild sensitive variables that are excluded from the check
excluded_sensitive_environment_variables:
[
]
# AWS ELB Configuration
# aws.elb_is_in_multiple_az
# Minimum number of Availability Zones that an CLB must be in
elb_min_azs: 2
# AWS ELBv2 Configuration
# aws.elbv2_is_in_multiple_az
# Minimum number of Availability Zones that an ELBv2 must be in
elbv2_min_azs: 2
# AWS Secrets Configuration
# Patterns to ignore in the secrets checks
secrets_ignore_patterns: []
# AWS Secrets Manager Configuration
# aws.secretsmanager_secret_unused
# Maximum number of days a secret can be unused
max_days_secret_unused: 90
# aws.secretsmanager_secret_rotated_periodically
# Maximum number of days a secret should be rotated
max_days_secret_unrotated: 90
# Azure Configuration
azure:
# Azure Network Configuration
-56
View File
@@ -1,56 +0,0 @@
class ProwlerException(Exception):
"""Base exception for all Prowler SDK errors."""
ERROR_CODES = {
(1901, "UnexpectedError"): {
"message": "Unexpected error occurred.",
"remediation": "Please review the error message and try again.",
}
}
def __init__(
self, code, source=None, file=None, original_exception=None, error_info=None
):
"""
Initialize the ProwlerException class.
Args:
code (int): The error code.
source (str): The source name. This can be the provider name, module name, service name, etc.
file (str): The file name.
original_exception (Exception): The original exception.
error_info (dict): The error information.
Example:
A ProwlerException is raised with the following parameters and format:
>>> original_exception = Exception("Error occurred.")
ProwlerException(1901, "AWS", "file.txt", original_exception)
>>> [1901] Unexpected error occurred. - Exception: Error occurred.
"""
self.code = code
self.source = source
self.file = file
if error_info is None:
error_info = self.ERROR_CODES.get((code, self.__class__.__name__))
self.message = error_info.get("message")
self.remediation = error_info.get("remediation")
self.original_exception = original_exception
# Format -> [code] message - original_exception
if original_exception is None:
super().__init__(f"[{self.code}] {self.message}")
else:
super().__init__(
f"[{self.code}] {self.message} - {self.original_exception}"
)
def __str__(self):
"""Overriding the __str__ method"""
default_str = f"{self.__class__.__name__}[{self.code}]: {self.message}"
if self.original_exception:
default_str += f" - {self.original_exception}"
return default_str
class UnexpectedError(ProwlerException):
def __init__(self, source, file, original_exception=None):
super().__init__(1901, source, file, original_exception)
+273 -124
View File
@@ -1,3 +1,4 @@
import functools
import importlib
import json
import os
@@ -5,22 +6,85 @@ import re
import shutil
import sys
import traceback
from pkgutil import walk_packages
from types import ModuleType
from typing import Any
from memory_profiler import profile
from alive_progress import alive_bar
from colorama import Fore, Style
import prowler
from prowler.config.config import orange_color
from prowler.lib.check.compliance_models import load_compliance_framework
from prowler.lib.check.custom_checks_metadata import update_check_metadata
from prowler.lib.check.models import Check
from prowler.lib.check.utils import recover_checks_from_provider
from prowler.lib.check.models import Check, load_check_metadata
from prowler.lib.logger import logger
from prowler.lib.outputs.outputs import report
from prowler.lib.utils.utils import open_file, parse_json_file, print_boxes
from prowler.providers.common.models import Audit_Metadata
import pdb
import psutil
import os
def check_memory_usage():
process = psutil.Process(os.getpid())
memory_info = process.memory_info()
return memory_info.rss # Resident Set Size: memory in bytes
# Load all checks metadata
def bulk_load_checks_metadata(provider: str) -> dict:
bulk_check_metadata = {}
checks = recover_checks_from_provider(provider)
# Build list of check's metadata files
for check_info in checks:
# Build check path name
check_name = check_info[0]
check_path = check_info[1]
# Ignore fixer files
if check_name.endswith("_fixer"):
continue
# Append metadata file extension
metadata_file = f"{check_path}/{check_name}.metadata.json"
# Load metadata
check_metadata = load_check_metadata(metadata_file)
bulk_check_metadata[check_metadata.CheckID] = check_metadata
return bulk_check_metadata
# Bulk load all compliance frameworks specification
def bulk_load_compliance_frameworks(provider: str) -> dict:
"""Bulk load all compliance frameworks specification into a dict"""
try:
bulk_compliance_frameworks = {}
available_compliance_framework_modules = list_compliance_modules()
for compliance_framework in available_compliance_framework_modules:
if provider in compliance_framework.name:
compliance_specification_dir_path = (
f"{compliance_framework.module_finder.path}/{provider}"
)
# for compliance_framework in available_compliance_framework_modules:
for filename in os.listdir(compliance_specification_dir_path):
file_path = os.path.join(
compliance_specification_dir_path, filename
)
# Check if it is a file and ti size is greater than 0
if os.path.isfile(file_path) and os.stat(file_path).st_size > 0:
# Open Compliance file in JSON
# cis_v1.4_aws.json --> cis_v1.4_aws
compliance_framework_name = filename.split(".json")[0]
# Store the compliance info
bulk_compliance_frameworks[compliance_framework_name] = (
load_compliance_framework(file_path)
)
except Exception as e:
logger.error(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}] -- {e}")
return bulk_compliance_frameworks
# Exclude checks to run
def exclude_checks_to_run(checks_to_execute: set, excluded_checks: list) -> set:
@@ -273,7 +337,7 @@ def print_checks(
for check in check_list:
try:
print(
f"[{bulk_checks_metadata[check].CheckID}] {bulk_checks_metadata[check].CheckTitle} - {Fore.MAGENTA}{bulk_checks_metadata[check].ServiceName} {Fore.YELLOW}[{bulk_checks_metadata[check].Severity.value}]{Style.RESET_ALL}"
f"[{bulk_checks_metadata[check].CheckID}] {bulk_checks_metadata[check].CheckTitle} - {Fore.MAGENTA}{bulk_checks_metadata[check].ServiceName} {Fore.YELLOW}[{bulk_checks_metadata[check].Severity}]{Style.RESET_ALL}"
)
except KeyError as error:
logger.error(
@@ -292,11 +356,128 @@ def print_checks(
print(message)
# Parse checks from compliance frameworks specification
def parse_checks_from_compliance_framework(
compliance_frameworks: list, bulk_compliance_frameworks: dict
) -> list:
"""parse_checks_from_compliance_framework returns a set of checks from the given compliance_frameworks"""
checks_to_execute = set()
try:
for framework in compliance_frameworks:
# compliance_framework_json["Requirements"][*]["Checks"]
compliance_framework_checks_list = [
requirement.Checks
for requirement in bulk_compliance_frameworks[framework].Requirements
]
# Reduce nested list into a list
# Pythonic functional magic
compliance_framework_checks = functools.reduce(
lambda x, y: x + y, compliance_framework_checks_list
)
# Then union this list of checks with the initial one
checks_to_execute = checks_to_execute.union(compliance_framework_checks)
except Exception as e:
logger.error(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}] -- {e}")
return checks_to_execute
def recover_checks_from_provider(
provider: str, service: str = None, include_fixers: bool = False
) -> list[tuple]:
"""
Recover all checks from the selected provider and service
Returns a list of tuples with the following format (check_name, check_path)
"""
try:
checks = []
modules = list_modules(provider, service)
for module_name in modules:
# Format: "prowler.providers.{provider}.services.{service}.{check_name}.{check_name}"
check_module_name = module_name.name
# We need to exclude common shared libraries in services
if (
check_module_name.count(".") == 6
and "lib" not in check_module_name
and (not check_module_name.endswith("_fixer") or include_fixers)
):
check_path = module_name.module_finder.path
# Check name is the last part of the check_module_name
check_name = check_module_name.split(".")[-1]
check_info = (check_name, check_path)
checks.append(check_info)
except ModuleNotFoundError:
logger.critical(f"Service {service} was not found for the {provider} provider.")
sys.exit(1)
except Exception as e:
logger.critical(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}]: {e}")
sys.exit(1)
else:
return checks
def list_compliance_modules():
"""
list_compliance_modules returns the available compliance frameworks and returns their path
"""
# This module path requires the full path including "prowler."
module_path = "prowler.compliance"
return walk_packages(
importlib.import_module(module_path).__path__,
importlib.import_module(module_path).__name__ + ".",
)
# List all available modules in the selected provider and service
def list_modules(provider: str, service: str):
# This module path requires the full path including "prowler."
module_path = f"prowler.providers.{provider}.services"
if service:
module_path += f".{service}"
return walk_packages(
importlib.import_module(module_path).__path__,
importlib.import_module(module_path).__name__ + ".",
)
# Import an input check using its path
def import_check(check_path: str) -> ModuleType:
print(f"{check_memory_usage() / (1024 * 1024)} MB : Memory usage before import {check_path}")
lib = importlib.import_module(f"{check_path}")
print(f"{check_memory_usage() / (1024 * 1024)} MB : Memory usage after import {check_path}")
return lib
def run_check(check: Check, verbose: bool = False, only_logs: bool = False) -> list:
"""
Run the check and return the findings
Args:
check (Check): check class
output_options (Any): output options
Returns:
list: list of findings
"""
findings = []
if verbose:
print(
f"\nCheck ID: {check.CheckID} - {Fore.MAGENTA}{check.ServiceName}{Fore.YELLOW} [{check.Severity}]{Style.RESET_ALL}"
)
logger.debug(f"Executing check: {check.CheckID}")
try:
findings = check.execute()
except Exception as error:
if not only_logs:
print(
f"Something went wrong in {check.CheckID}, please use --log-level ERROR"
)
logger.error(
f"{check.CheckID} -- {error.__class__.__name__}[{traceback.extract_tb(error.__traceback__)[-1].lineno}]: {error}"
)
finally:
return findings
def run_fixer(check_findings: list) -> int:
"""
@@ -379,7 +560,6 @@ def execute_checks(
global_provider: Any,
custom_checks_metadata: Any,
config_file: str,
output_options: Any,
) -> list:
# List to store all the check's findings
all_findings = []
@@ -415,51 +595,22 @@ def execute_checks(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
# Set verbose flag
verbose = False
if hasattr(output_options, "verbose"):
verbose = output_options.verbose
elif hasattr(output_options, "fixer"):
verbose = output_options.fixer
# Execution with the --only-logs flag
if output_options.only_logs:
if global_provider.output_options.only_logs:
for check_name in checks_to_execute:
# Recover service from check name
service = check_name.split("_")[0]
try:
try:
# Import check module
check_module_path = f"prowler.providers.{global_provider.type}.services.{service}.{check_name}.{check_name}"
lib = import_check(check_module_path)
# Recover functions from check
check_to_execute = getattr(lib, check_name)
check = check_to_execute()
except ModuleNotFoundError:
logger.error(
f"Check '{check_name}' was not found for the {global_provider.type.upper()} provider"
)
continue
if verbose:
print(
f"\nCheck ID: {check.CheckID} - {Fore.MAGENTA}{check.ServiceName}{Fore.YELLOW} [{check.Severity.value}]{Style.RESET_ALL}"
)
check_findings = execute(
check,
service,
check_name,
global_provider,
services_executed,
checks_executed,
custom_checks_metadata,
output_options,
)
report(check_findings, global_provider, output_options)
all_findings.extend(check_findings)
# Update Audit Status
services_executed.add(service)
checks_executed.add(check_name)
global_provider.audit_metadata = update_audit_metadata(
global_provider.audit_metadata, services_executed, checks_executed
)
# If check does not exists in the provider or is from another provider
except ModuleNotFoundError:
logger.error(
@@ -508,39 +659,15 @@ def execute_checks(
f"-> Scanning {orange_color}{service}{Style.RESET_ALL} service"
)
try:
try:
# Import check module
check_module_path = f"prowler.providers.{global_provider.type}.services.{service}.{check_name}.{check_name}"
lib = import_check(check_module_path)
# Recover functions from check
check_to_execute = getattr(lib, check_name)
check = check_to_execute()
except ModuleNotFoundError:
logger.error(
f"Check '{check_name}' was not found for the {global_provider.type.upper()} provider"
)
continue
if verbose:
print(
f"\nCheck ID: {check.CheckID} - {Fore.MAGENTA}{check.ServiceName}{Fore.YELLOW} [{check.Severity.value}]{Style.RESET_ALL}"
)
check_findings = execute(
check,
service,
check_name,
global_provider,
custom_checks_metadata,
output_options,
)
report(check_findings, global_provider, output_options)
all_findings.extend(check_findings)
services_executed.add(service)
checks_executed.add(check_name)
global_provider.audit_metadata = update_audit_metadata(
global_provider.audit_metadata,
services_executed,
checks_executed,
custom_checks_metadata,
)
all_findings.extend(check_findings)
# If check does not exists in the provider or is from another provider
except ModuleNotFoundError:
@@ -555,79 +682,52 @@ def execute_checks(
)
bar()
bar.title = f"-> {Fore.GREEN}Scan completed!{Style.RESET_ALL}"
# Custom report interface
if os.environ.get("PROWLER_REPORT_LIB_PATH"):
try:
logger.info("Using custom report interface ...")
lib = os.environ["PROWLER_REPORT_LIB_PATH"]
outputs_module = importlib.import_module(lib)
custom_report_interface = getattr(outputs_module, "report")
# TODO: review this call and see if we can remove the global_provider.output_options since it is contained in the global_provider
custom_report_interface(check_findings, output_options, global_provider)
except Exception:
sys.exit(1)
return all_findings
def execute(
check: Check,
service: str,
check_name: str,
global_provider: Any,
services_executed: set,
checks_executed: set,
custom_checks_metadata: Any,
output_options: Any = None,
):
"""
Execute the check and report the findings
Args:
service (str): service name
check_name (str): check name
global_provider (Any): provider object
custom_checks_metadata (Any): custom checks metadata
output_options (Any): output options, depending on the provider
Returns:
list: list of findings
"""
try:
# Import check module
check_module_path = f"prowler.providers.{global_provider.type}.services.{service}.{check_name}.{check_name}"
lib = import_check(check_module_path)
# Recover functions from check
check_to_execute = getattr(lib, check_name)
check_class = check_to_execute()
# Update check metadata to reflect that in the outputs
if custom_checks_metadata and custom_checks_metadata["Checks"].get(
check.CheckID
check_class.CheckID
):
check = update_check_metadata(
check, custom_checks_metadata["Checks"][check.CheckID]
check_class = update_check_metadata(
check_class, custom_checks_metadata["Checks"][check_class.CheckID]
)
only_logs = False
if hasattr(output_options, "only_logs"):
only_logs = output_options.only_logs
# Run check
verbose = (
global_provider.output_options.verbose
or global_provider.output_options.fixer
)
check_findings = run_check(
check_class, verbose, global_provider.output_options.only_logs
)
# Execute the check
check_findings = []
logger.debug(f"Executing check: {check.CheckID}")
try:
check_findings = check.execute()
except Exception as error:
if not only_logs:
print(
f"Something went wrong in {check.CheckID}, please use --log-level ERROR"
)
logger.error(
f"{check.CheckID} -- {error.__class__.__name__}[{traceback.extract_tb(error.__traceback__)[-1].lineno}]: {error}"
)
# Update Audit Status
services_executed.add(service)
checks_executed.add(check_name)
global_provider.audit_metadata = update_audit_metadata(
global_provider.audit_metadata, services_executed, checks_executed
)
# Exclude findings per status
if hasattr(output_options, "status") and output_options.status:
check_findings = [
finding
for finding in check_findings
if finding.status in output_options.status
]
# Before returning the findings, we need to apply the mute list logic
# Mutelist findings
if hasattr(global_provider, "mutelist") and global_provider.mutelist.mutelist:
# TODO: make this prettier
is_finding_muted_args = {}
if global_provider.type == "aws":
is_finding_muted_args["aws_account_id"] = (
@@ -642,9 +742,27 @@ def execute(
**is_finding_muted_args
)
# Refactor(Outputs)
# Report the check's findings
report(check_findings, global_provider)
# Refactor(Outputs)
if os.environ.get("PROWLER_REPORT_LIB_PATH"):
try:
logger.info("Using custom report interface ...")
lib = os.environ["PROWLER_REPORT_LIB_PATH"]
outputs_module = importlib.import_module(lib)
custom_report_interface = getattr(outputs_module, "report")
# TODO: review this call and see if we can remove the global_provider.output_options since it is contained in the global_provider
custom_report_interface(
check_findings, global_provider.output_options, global_provider
)
except Exception:
sys.exit(1)
except ModuleNotFoundError:
logger.error(
f"Check '{check.CheckID}' was not found for the {global_provider.type.upper()} provider"
f"Check '{check_name}' was not found for the {global_provider.type.upper()} provider"
)
check_findings = []
except Exception as error:
@@ -674,3 +792,34 @@ def update_audit_metadata(
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def recover_checks_from_service(service_list: list, provider: str) -> set:
"""
Recover all checks from the selected provider and service
Returns a set of checks from the given services
"""
try:
checks = set()
service_list = [
"awslambda" if service == "lambda" else service for service in service_list
]
for service in service_list:
service_checks = recover_checks_from_provider(provider, service)
if not service_checks:
logger.error(f"Service '{service}' does not have checks.")
else:
for check in service_checks:
# Recover check name and module name from import path
# Format: "providers.{provider}.services.{service}.{check_name}.{check_name}"
check_name = check[0].split(".")[-1]
# If the service is present in the group list passed as parameters
# if service_name in group_list: checks_from_arn.add(check_name)
checks.add(check_name)
return checks
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
+34 -45
View File
@@ -1,33 +1,35 @@
from colorama import Fore, Style
from prowler.lib.check.check import parse_checks_from_file
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.check.models import CheckMetadata, Severity
from prowler.config.config import valid_severities
from prowler.lib.check.check import (
parse_checks_from_compliance_framework,
parse_checks_from_file,
recover_checks_from_provider,
recover_checks_from_service,
)
from prowler.lib.logger import logger
# Generate the list of checks to execute
def load_checks_to_execute(
bulk_checks_metadata: dict,
bulk_compliance_frameworks: dict,
checks_file: str,
check_list: list,
service_list: list,
severities: list,
compliance_frameworks: list,
categories: set,
provider: str,
bulk_checks_metadata: dict = None,
bulk_compliance_frameworks: dict = None,
checks_file: str = None,
check_list: list = None,
service_list: list = None,
severities: list = None,
compliance_frameworks: list = None,
categories: set = None,
) -> set:
"""Generate the list of checks to execute based on the cloud provider and the input arguments given"""
try:
# Local subsets
checks_to_execute = set()
check_aliases = {}
check_severities = {key: [] for key in valid_severities}
check_categories = {}
check_severities = {severity.value: [] for severity in Severity}
if not bulk_checks_metadata:
bulk_checks_metadata = CheckMetadata.get_bulk(provider=provider)
# First, loop over the bulk_checks_metadata to extract the needed subsets
for check, metadata in bulk_checks_metadata.items():
try:
@@ -62,41 +64,24 @@ def load_checks_to_execute(
checks_to_execute.update(check_severities[severity])
if service_list:
for service in service_list:
checks_to_execute = (
set(
CheckMetadata.list(
bulk_checks_metadata=bulk_checks_metadata,
service=service,
)
)
& checks_to_execute
)
checks_to_execute = (
recover_checks_from_service(service_list, provider)
& checks_to_execute
)
# Handle if there are checks passed using -C/--checks-file
elif checks_file:
checks_to_execute = parse_checks_from_file(checks_file, provider)
# Handle if there are services passed using -s/--services
elif service_list:
for service in service_list:
checks_to_execute.update(
CheckMetadata.list(
bulk_checks_metadata=bulk_checks_metadata,
service=service,
)
)
checks_to_execute = recover_checks_from_service(service_list, provider)
# Handle if there are compliance frameworks passed using --compliance
elif compliance_frameworks:
if not bulk_compliance_frameworks:
bulk_compliance_frameworks = Compliance.get_bulk(provider=provider)
for compliance_framework in compliance_frameworks:
checks_to_execute.update(
CheckMetadata.list(
bulk_compliance_frameworks=bulk_compliance_frameworks,
compliance_framework=compliance_framework,
)
)
checks_to_execute = parse_checks_from_compliance_framework(
compliance_frameworks, bulk_compliance_frameworks
)
# Handle if there are categories passed using --categories
elif categories:
@@ -105,13 +90,17 @@ def load_checks_to_execute(
# If there are no checks passed as argument
else:
# get all checks
for check_name in CheckMetadata.list(
bulk_checks_metadata=bulk_checks_metadata
):
# Get all check modules to run with the specific provider
checks = recover_checks_from_provider(provider)
for check_info in checks:
# Recover check name from import path (last part)
# Format: "providers.{provider}.services.{service}.{check_name}.{check_name}"
check_name = check_info[0]
checks_to_execute.add(check_name)
# Only execute threat detection checks if threat-detection category is set
if categories and categories != [] and "threat-detection" not in categories:
if "threat-detection" not in categories:
for threat_detection_check in check_categories.get("threat-detection", []):
checks_to_execute.discard(threat_detection_check)
+3 -3
View File
@@ -1,6 +1,6 @@
import sys
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.logger import logger
@@ -26,8 +26,8 @@ def update_checks_metadata_with_compliance(
if check in requirement.Checks:
# Include the requirement into the check's framework requirements
compliance_requirements.append(requirement)
# Create the Compliance
compliance = Compliance(
# Create the Compliance_Model
compliance = ComplianceBaseModel(
Framework=framework.Framework,
Provider=framework.Provider,
Version=framework.Version,
+6 -143
View File
@@ -1,11 +1,9 @@
import os
import sys
from enum import Enum
from typing import Optional, Union
from pydantic import BaseModel, ValidationError, root_validator
from prowler.lib.check.utils import list_compliance_modules
from prowler.lib.logger import logger
@@ -169,19 +167,6 @@ class Mitre_Requirement(BaseModel):
Checks: list[str]
# KISA-ISMS-P Requirement Attribute
class KISA_ISMSP_Requirement_Attribute(BaseModel):
"""KISA ISMS-P Requirement Attribute"""
Domain: str
Subdomain: str
Section: str
AuditChecklist: Optional[list[str]]
RelatedRegulations: Optional[list[str]]
AuditEvidence: Optional[list[str]]
NonComplianceCases: Optional[list[str]]
# Base Compliance Model
# TODO: move this to compliance folder
class Compliance_Requirement(BaseModel):
@@ -196,7 +181,6 @@ class Compliance_Requirement(BaseModel):
ENS_Requirement_Attribute,
ISO27001_2013_Requirement_Attribute,
AWS_Well_Architected_Requirement_Attribute,
KISA_ISMSP_Requirement_Attribute,
# Generic_Compliance_Requirement_Attribute must be the last one since it is the fallback for generic compliance framework
Generic_Compliance_Requirement_Attribute,
]
@@ -204,8 +188,8 @@ class Compliance_Requirement(BaseModel):
Checks: list[str]
class Compliance(BaseModel):
"""Compliance holds the base model for every compliance framework"""
class ComplianceBaseModel(BaseModel):
"""ComplianceBaseModel holds the base model for every compliance framework"""
Framework: str
Provider: str
@@ -229,137 +213,16 @@ class Compliance(BaseModel):
raise ValueError("Framework or Provider must not be empty")
return values
@staticmethod
def list(bulk_compliance_frameworks: dict, provider: str = None) -> list[str]:
"""
Returns a list of compliance frameworks from bulk compliance frameworks
Args:
bulk_compliance_frameworks (dict): The bulk compliance frameworks
provider (str): The provider name
Returns:
list: The list of compliance frameworks
"""
if provider:
compliance_frameworks = [
compliance_framework
for compliance_framework in bulk_compliance_frameworks.keys()
if provider in compliance_framework
]
else:
compliance_frameworks = [
compliance_framework
for compliance_framework in bulk_compliance_frameworks.keys()
]
return compliance_frameworks
@staticmethod
def get(
bulk_compliance_frameworks: dict, compliance_framework_name: str
) -> "Compliance":
"""
Returns a compliance framework from bulk compliance frameworks
Args:
bulk_compliance_frameworks (dict): The bulk compliance frameworks
compliance_framework_name (str): The compliance framework name
Returns:
Compliance: The compliance framework
"""
return bulk_compliance_frameworks.get(compliance_framework_name, None)
@staticmethod
def list_requirements(
bulk_compliance_frameworks: dict, compliance_framework: str = None
) -> list:
"""
Returns a list of compliance requirements from a compliance framework
Args:
bulk_compliance_frameworks (dict): The bulk compliance frameworks
compliance_framework (str): The compliance framework name
Returns:
list: The list of compliance requirements for the provided compliance framework
"""
compliance_requirements = []
if bulk_compliance_frameworks and compliance_framework:
compliance_requirements = [
compliance_requirement.Id
for compliance_requirement in bulk_compliance_frameworks.get(
compliance_framework
).Requirements
]
return compliance_requirements
@staticmethod
def get_requirement(
bulk_compliance_frameworks: dict, compliance_framework: str, requirement_id: str
) -> Union[Mitre_Requirement, Compliance_Requirement]:
"""
Returns a compliance requirement from a compliance framework
Args:
bulk_compliance_frameworks (dict): The bulk compliance frameworks
compliance_framework (str): The compliance framework name
requirement_id (str): The compliance requirement ID
Returns:
Mitre_Requirement | Compliance_Requirement: The compliance requirement
"""
requirement = None
for compliance_requirement in bulk_compliance_frameworks.get(
compliance_framework
).Requirements:
if compliance_requirement.Id == requirement_id:
requirement = compliance_requirement
break
return requirement
@staticmethod
def get_bulk(provider: str) -> dict:
"""Bulk load all compliance frameworks specification into a dict"""
try:
bulk_compliance_frameworks = {}
available_compliance_framework_modules = list_compliance_modules()
for compliance_framework in available_compliance_framework_modules:
if provider in compliance_framework.name:
compliance_specification_dir_path = (
f"{compliance_framework.module_finder.path}/{provider}"
)
# for compliance_framework in available_compliance_framework_modules:
for filename in os.listdir(compliance_specification_dir_path):
file_path = os.path.join(
compliance_specification_dir_path, filename
)
# Check if it is a file and ti size is greater than 0
if os.path.isfile(file_path) and os.stat(file_path).st_size > 0:
# Open Compliance file in JSON
# cis_v1.4_aws.json --> cis_v1.4_aws
compliance_framework_name = filename.split(".json")[0]
# Store the compliance info
bulk_compliance_frameworks[compliance_framework_name] = (
load_compliance_framework(file_path)
)
except Exception as e:
logger.error(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}] -- {e}")
return bulk_compliance_frameworks
# Testing Pending
def load_compliance_framework(
compliance_specification_file: str,
) -> Compliance:
) -> ComplianceBaseModel:
"""load_compliance_framework loads and parse a Compliance Framework Specification"""
try:
compliance_framework = Compliance.parse_file(compliance_specification_file)
compliance_framework = ComplianceBaseModel.parse_file(
compliance_specification_file
)
except ValidationError as error:
logger.critical(
f"Compliance Framework Specification from {compliance_specification_file} is not valid: {error}"
+2 -2
View File
@@ -3,7 +3,7 @@ import sys
import yaml
from jsonschema import validate
from prowler.lib.check.models import Severity
from prowler.config.config import valid_severities
from prowler.lib.logger import logger
custom_checks_metadata_schema = {
@@ -17,7 +17,7 @@ custom_checks_metadata_schema = {
"properties": {
"Severity": {
"type": "string",
"enum": [severity.value for severity in Severity],
"enum": valid_severities,
},
"CheckTitle": {
"type": "string",
+21 -319
View File
@@ -1,30 +1,17 @@
import functools
import os
import re
import sys
from abc import ABC, abstractmethod
from dataclasses import dataclass
from enum import Enum
from typing import Set
from pydantic import BaseModel, ValidationError, validator
from prowler.config.config import Provider
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.check.utils import recover_checks_from_provider
from prowler.config.config import valid_severities
from prowler.lib.logger import logger
class Code(BaseModel):
"""
Represents the remediation code using IaC like CloudFormation, Terraform or the native CLI.
Attributes:
NativeIaC (str): The NativeIaC code.
Terraform (str): The Terraform code.
CLI (str): The CLI code.
Other (str): Other code.
"""
"""Check's remediation information using IaC like CloudFormation, Terraform or the native CLI"""
NativeIaC: str
Terraform: str
@@ -33,69 +20,21 @@ class Code(BaseModel):
class Recommendation(BaseModel):
"""
Represents a recommendation.
Attributes:
Text (str): The text of the recommendation.
Url (str): The URL associated with the recommendation.
"""
"""Check's recommendation information"""
Text: str
Url: str
class Remediation(BaseModel):
"""
Represents a remediation action for a specific .
Attributes:
Code (Code): The code associated with the remediation action.
Recommendation (Recommendation): The recommendation for the remediation action.
"""
"""Check's remediation: Code and Recommendation"""
Code: Code
Recommendation: Recommendation
class Severity(str, Enum):
critical = "critical"
high = "high"
medium = "medium"
low = "low"
informational = "informational"
class CheckMetadata(BaseModel):
"""
Model representing the metadata of a check.
Attributes:
Provider (str): The provider of the check.
CheckID (str): The ID of the check.
CheckTitle (str): The title of the check.
CheckType (list[str]): The type of the check.
CheckAliases (list[str], optional): The aliases of the check. Defaults to an empty list.
ServiceName (str): The name of the service.
SubServiceName (str): The name of the sub-service.
ResourceIdTemplate (str): The template for the resource ID.
Severity (str): The severity of the check.
ResourceType (str): The type of the resource.
Description (str): The description of the check.
Risk (str): The risk associated with the check.
RelatedUrl (str): The URL related to the check.
Remediation (Remediation): The remediation steps for the check.
Categories (list[str]): The categories of the check.
DependsOn (list[str]): The dependencies of the check.
RelatedTo (list[str]): The related checks.
Notes (str): Additional notes for the check.
Compliance (list, optional): The compliance information for the check. Defaults to None.
Validators:
valid_category(value): Validator function to validate the categories of the check.
severity_to_lower(severity): Validator function to convert the severity to lowercase.
valid_severity(severity): Validator function to validate the severity of the check.
"""
class Check_Metadata_Model(BaseModel):
"""Check Metadata Model"""
Provider: str
CheckID: str
@@ -105,7 +44,7 @@ class CheckMetadata(BaseModel):
ServiceName: str
SubServiceName: str
ResourceIdTemplate: str
Severity: Severity
Severity: str
ResourceType: str
Description: str
Risk: str
@@ -134,244 +73,16 @@ class CheckMetadata(BaseModel):
def severity_to_lower(severity):
return severity.lower()
@staticmethod
def get_bulk(provider: str) -> dict[str, "CheckMetadata"]:
"""
Load the metadata of all checks for a given provider reading the check's metadata files.
Args:
provider (str): The name of the provider.
Returns:
dict[str, CheckMetadata]: A dictionary containing the metadata of all checks, with the CheckID as the key.
"""
bulk_check_metadata = {}
checks = recover_checks_from_provider(provider)
# Build list of check's metadata files
for check_info in checks:
# Build check path name
check_name = check_info[0]
check_path = check_info[1]
# Ignore fixer files
if check_name.endswith("_fixer"):
continue
# Append metadata file extension
metadata_file = f"{check_path}/{check_name}.metadata.json"
# Load metadata
check_metadata = load_check_metadata(metadata_file)
bulk_check_metadata[check_metadata.CheckID] = check_metadata
return bulk_check_metadata
@staticmethod
def list(
bulk_checks_metadata: dict = None,
bulk_compliance_frameworks: dict = None,
provider: str = None,
severity: str = None,
category: str = None,
service: str = None,
compliance_framework: str = None,
) -> Set["CheckMetadata"]:
"""
Returns a set of checks from the bulk checks metadata.
Args:
provider (str): The provider of the checks.
bulk_checks_metadata (dict): The bulk checks metadata.
bulk_compliance_frameworks (dict): The bulk compliance frameworks.
severity (str): The severity of the checks.
category (str): The category of the checks.
service (str): The service of the checks.
compliance_framework (str): The compliance framework of the checks.
Returns:
set: A set of checks.
"""
checks_from_provider = set()
checks_from_severity = set()
checks_from_category = set()
checks_from_service = set()
checks_from_compliance_framework = set()
# If the bulk checks metadata is not provided, get it
if not bulk_checks_metadata:
bulk_checks_metadata = {}
available_providers = [p.value for p in Provider]
for provider_name in available_providers:
bulk_checks_metadata.update(CheckMetadata.get_bulk(provider_name))
if provider:
checks_from_provider = {
check_name
for check_name, check_metadata in bulk_checks_metadata.items()
if check_metadata.Provider == provider
}
if severity:
checks_from_severity = CheckMetadata.list_by_severity(
bulk_checks_metadata=bulk_checks_metadata, severity=severity
@validator("Severity")
def valid_severity(severity):
if severity not in valid_severities:
raise ValueError(
f"Invalid severity: {severity}. Severity must be one of {', '.join(valid_severities)}"
)
if category:
checks_from_category = CheckMetadata.list_by_category(
bulk_checks_metadata=bulk_checks_metadata, category=category
)
if service:
checks_from_service = CheckMetadata.list_by_service(
bulk_checks_metadata=bulk_checks_metadata, service=service
)
if compliance_framework:
# Loaded here, as it is not always needed
if not bulk_compliance_frameworks:
bulk_compliance_frameworks = {}
available_providers = [p.value for p in Provider]
for provider in available_providers:
bulk_compliance_frameworks = Compliance.get_bulk(provider=provider)
checks_from_compliance_framework = (
CheckMetadata.list_by_compliance_framework(
bulk_compliance_frameworks=bulk_compliance_frameworks,
compliance_framework=compliance_framework,
)
)
# Get all the checks:
checks = set(bulk_checks_metadata.keys())
# Get the intersection of the checks
if len(checks_from_provider) > 0 or provider:
checks = checks & checks_from_provider
if len(checks_from_severity) > 0 or severity:
checks = checks & checks_from_severity
if len(checks_from_category) > 0 or category:
checks = checks & checks_from_category
if len(checks_from_service) > 0 or service:
checks = checks & checks_from_service
if len(checks_from_compliance_framework) > 0 or compliance_framework:
checks = checks & checks_from_compliance_framework
return checks
@staticmethod
def get(bulk_checks_metadata: dict, check_id: str) -> "CheckMetadata":
"""
Returns the check metadata from the bulk checks metadata.
Args:
bulk_checks_metadata (dict): The bulk checks metadata.
check_id (str): The check ID.
Returns:
CheckMetadata: The check metadata.
"""
return bulk_checks_metadata.get(check_id, None)
@staticmethod
def list_by_severity(bulk_checks_metadata: dict, severity: str = None) -> set:
"""
Returns a set of checks by severity from the bulk checks metadata.
Args:
bulk_checks_metadata (dict): The bulk checks metadata.
severity (str): The severity.
Returns:
set: A set of checks by severity.
"""
checks = set()
if severity:
checks = {
check_name
for check_name, check_metadata in bulk_checks_metadata.items()
if check_metadata.Severity == severity
}
return checks
@staticmethod
def list_by_category(bulk_checks_metadata: dict, category: str = None) -> set:
"""
Returns a set of checks by category from the bulk checks metadata.
Args:
bulk_checks_metadata (dict): The bulk checks metadata.
category (str): The category.
Returns:
set: A set of checks by category.
"""
checks = set()
if category:
checks = {
check_name
for check_name, check_metadata in bulk_checks_metadata.items()
if category in check_metadata.Categories
}
return checks
@staticmethod
def list_by_service(bulk_checks_metadata: dict, service: str = None) -> set:
"""
Returns a set of checks by service from the bulk checks metadata.
Args:
bulk_checks_metadata (dict): The bulk checks metadata.
service (str): The service.
Returns:
set: A set of checks by service.
"""
checks = set()
if service:
if service == "lambda":
service = "awslambda"
checks = {
check_name
for check_name, check_metadata in bulk_checks_metadata.items()
if check_metadata.ServiceName == service
}
return checks
@staticmethod
def list_by_compliance_framework(
bulk_compliance_frameworks: dict, compliance_framework: str = None
) -> set:
"""
Returns a set of checks by compliance framework from the bulk compliance frameworks.
Args:
bulk_compliance_frameworks (dict): The bulk compliance frameworks.
compliance_framework (str): The compliance framework.
Returns:
set: A set of checks by compliance framework.
"""
checks = set()
if compliance_framework:
try:
checks_from_framework_list = [
requirement.Checks
for requirement in bulk_compliance_frameworks[
compliance_framework
].Requirements
]
# Reduce nested list into a list
# Pythonic functional magic
checks_from_framework = functools.reduce(
lambda x, y: x + y, checks_from_framework_list
)
# Then union this list of checks with the initial one
checks = checks.union(checks_from_framework)
except Exception as e:
logger.error(
f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}] -- {e}"
)
return checks
return severity
class Check(ABC, CheckMetadata):
class Check(ABC, Check_Metadata_Model):
"""Prowler Check"""
def __init__(self, **data):
@@ -382,7 +93,8 @@ class Check(ABC, CheckMetadata):
+ ".metadata.json"
)
# Store it to validate them with Pydantic
data = CheckMetadata.parse_file(metadata_file).dict()
data = Check_Metadata_Model.parse_file(metadata_file).dict()
# data = {}
# Calls parents init function
super().__init__(**data)
# TODO: verify that the CheckID is the same as the filename and classname
@@ -403,14 +115,14 @@ class Check_Report:
status: str
status_extended: str
check_metadata: CheckMetadata
check_metadata: Check_Metadata_Model
resource_details: str
resource_tags: list
muted: bool
def __init__(self, metadata):
self.status = ""
self.check_metadata = CheckMetadata.parse_raw(metadata)
self.check_metadata = Check_Metadata_Model.parse_raw(metadata)
self.status_extended = ""
self.resource_details = ""
self.resource_tags = []
@@ -483,22 +195,12 @@ class Check_Report_Kubernetes(Check_Report):
# Testing Pending
def load_check_metadata(metadata_file: str) -> CheckMetadata:
"""
Load check metadata from a file.
Args:
metadata_file (str): The path to the metadata file.
Returns:
CheckMetadata: The loaded check metadata.
Raises:
ValidationError: If the metadata file is not valid.
"""
def load_check_metadata(metadata_file: str) -> Check_Metadata_Model:
"""load_check_metadata loads and parse a Check's metadata file"""
try:
check_metadata = CheckMetadata.parse_file(metadata_file)
check_metadata = Check_Metadata_Model.parse_file(metadata_file)
except ValidationError as error:
logger.critical(f"Metadata from {metadata_file} is not valid: {error}")
# TODO: remove this exit and raise an exception
sys.exit(1)
else:
return check_metadata
-95
View File
@@ -1,95 +0,0 @@
import importlib
import sys
from pkgutil import walk_packages
from prowler.lib.logger import logger
def recover_checks_from_provider(
provider: str, service: str = None, include_fixers: bool = False
) -> list[tuple]:
"""
Recover all checks from the selected provider and service
Returns a list of tuples with the following format (check_name, check_path)
"""
try:
checks = []
modules = list_modules(provider, service)
for module_name in modules:
# Format: "prowler.providers.{provider}.services.{service}.{check_name}.{check_name}"
check_module_name = module_name.name
# We need to exclude common shared libraries in services
if (
check_module_name.count(".") == 6
and "lib" not in check_module_name
and (not check_module_name.endswith("_fixer") or include_fixers)
):
check_path = module_name.module_finder.path
# Check name is the last part of the check_module_name
check_name = check_module_name.split(".")[-1]
check_info = (check_name, check_path)
checks.append(check_info)
except ModuleNotFoundError:
logger.critical(f"Service {service} was not found for the {provider} provider.")
sys.exit(1)
except Exception as e:
logger.critical(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}]: {e}")
sys.exit(1)
else:
return checks
# List all available modules in the selected provider and service
def list_modules(provider: str, service: str):
# This module path requires the full path including "prowler."
module_path = f"prowler.providers.{provider}.services"
if service:
module_path += f".{service}"
return walk_packages(
importlib.import_module(module_path).__path__,
importlib.import_module(module_path).__name__ + ".",
)
def recover_checks_from_service(service_list: list, provider: str) -> set:
"""
Recover all checks from the selected provider and service
Returns a set of checks from the given services
"""
try:
checks = set()
service_list = [
"awslambda" if service == "lambda" else service for service in service_list
]
for service in service_list:
service_checks = recover_checks_from_provider(provider, service)
if not service_checks:
logger.error(f"Service '{service}' does not have checks.")
else:
for check in service_checks:
# Recover check name and module name from import path
# Format: "providers.{provider}.services.{service}.{check_name}.{check_name}"
check_name = check[0].split(".")[-1]
# If the service is present in the group list passed as parameters
# if service_name in group_list: checks_from_arn.add(check_name)
checks.add(check_name)
return checks
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def list_compliance_modules():
"""
list_compliance_modules returns the available compliance frameworks and returns their path
"""
# This module path requires the full path including "prowler."
module_path = "prowler.compliance"
return walk_packages(
importlib.import_module(module_path).__path__,
importlib.import_module(module_path).__name__ + ".",
)
+6 -12
View File
@@ -10,9 +10,9 @@ from prowler.config.config import (
default_config_file_path,
default_fixer_config_file_path,
default_output_directory,
finding_statuses,
valid_severities,
)
from prowler.lib.check.models import Severity
from prowler.lib.outputs.common import Status
from prowler.providers.common.arguments import (
init_providers_parser,
validate_provider_arguments,
@@ -138,8 +138,8 @@ Detailed documentation at https://docs.prowler.com
common_outputs_parser.add_argument(
"--status",
nargs="+",
help=f"Filter by the status of the findings {[status.value for status in Status]}",
choices=[status.value for status in Status],
help=f"Filter by the status of the findings {finding_statuses}",
choices=finding_statuses,
)
common_outputs_parser.add_argument(
"--output-formats",
@@ -177,12 +177,6 @@ Detailed documentation at https://docs.prowler.com
common_outputs_parser.add_argument(
"--no-banner", "-b", action="store_true", help="Hide Prowler banner"
)
common_outputs_parser.add_argument(
"--no-color",
action="store_true",
help="Disable color codes in output",
)
common_outputs_parser.add_argument(
"--unix-timestamp",
action="store_true",
@@ -263,8 +257,8 @@ Detailed documentation at https://docs.prowler.com
"--severity",
"--severities",
nargs="+",
help=f"Severities to be executed {[severity.value for severity in Severity]}",
choices=[severity.value for severity in Severity],
help=f"Severities to be executed {valid_severities}",
choices=valid_severities,
)
group.add_argument(
"--compliance",
+6 -90
View File
@@ -5,31 +5,9 @@ import yaml
from prowler.lib.logger import logger
from prowler.lib.mutelist.models import mutelist_schema
from prowler.lib.outputs.common import Status
from prowler.lib.outputs.utils import unroll_dict, unroll_tags
class Mutelist(ABC):
"""
Abstract base class for managing a mutelist.
Attributes:
_mutelist (dict): Dictionary containing information about muted checks for different accounts.
_mutelist_file_path (str): Path to the mutelist file.
MUTELIST_KEY (str): Key used to access the mutelist in the mutelist file.
Methods:
__init__: Initializes a Mutelist object.
mutelist: Property that returns the mutelist dictionary.
mutelist_file_path: Property that returns the mutelist file path.
is_finding_muted: Abstract method to check if a finding is muted.
get_mutelist_file_from_local_file: Retrieves the mutelist file from a local file.
validate_mutelist: Validates the mutelist against a schema.
is_muted: Checks if a finding is muted for the audited account, check, region, resource, and tags.
is_muted_in_check: Checks if a check is muted.
is_excepted: Checks if the account, region, resource, and tags are excepted based on the exceptions.
"""
_mutelist: dict = {}
_mutelist_file_path: str = None
@@ -90,25 +68,6 @@ class Mutelist(ABC):
"""
Check if the provided finding is muted for the audited account, check, region, resource and tags.
The Mutelist works in a way that each field is ANDed, so if a check is muted for an account, region, resource and tags, it will be muted.
The exceptions are ORed, so if a check is excepted for an account, region, resource or tags, it will not be muted.
The only particularity is the tags, which are ORed.
So, for the following Mutelist:
```
Mutelist:
Accounts:
'*':
Checks:
ec2_instance_detailed_monitoring_enabled:
Regions: ['*']
Resources:
- 'i-123456789'
Tags:
- 'Name=AdminInstance | Environment=Prod'
```
The check `ec2_instance_detailed_monitoring_enabled` will be muted for all accounts and regions and for the resource_id 'i-123456789' with at least one of the tags 'Name=AdminInstance' or 'Environment=Prod'.
Args:
mutelist (dict): Dictionary containing information about muted checks for different accounts.
audited_account (str): The account being audited.
@@ -213,9 +172,7 @@ class Mutelist(ABC):
muted_in_resource = self.is_item_matched(
muted_resources, finding_resource
)
muted_in_tags = self.is_item_matched(
muted_tags, finding_tags, tag=True
)
muted_in_tags = self.is_item_matched(muted_tags, finding_tags)
# For a finding to be muted requires the following set to True:
# - muted_in_check -> True
@@ -239,35 +196,6 @@ class Mutelist(ABC):
)
return False
def mute_finding(self, finding):
"""
Check if the provided finding is muted
Args:
finding (Finding): The finding to be evaluated for muting.
Returns:
Finding: The finding with the status updated if it is muted, otherwise the finding is returned
"""
try:
if self.is_muted(
finding.account_uid,
finding.metadata.CheckID,
finding.region,
finding.resource_uid,
unroll_dict(unroll_tags(finding.resource_tags)),
):
finding.raw["status"] = finding.status
finding.status = Status.MUTED
finding.muted = True
return finding
except Exception as error:
logger.error(
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
)
return finding
def is_excepted(
self,
exceptions,
@@ -312,9 +240,7 @@ class Mutelist(ABC):
)
excepted_tags = exceptions.get("Tags", [])
is_tag_excepted = self.is_item_matched(
excepted_tags, finding_tags, tag=True
)
is_tag_excepted = self.is_item_matched(excepted_tags, finding_tags)
if (
not is_account_excepted
@@ -338,16 +264,13 @@ class Mutelist(ABC):
return False
@staticmethod
def is_item_matched(matched_items, finding_items, tag=False) -> bool:
def is_item_matched(matched_items, finding_items):
"""
Check if any of the items in matched_items are present in finding_items.
Args:
matched_items (list): List of items to be matched.
finding_items (str): String to search for matched items.
tag (bool): If True the search will have a different logic due to the tags being ANDed or ORed:
- Check of AND logic -> True if all the tags are present in the finding.
- Check of OR logic -> True if any of the tags is present in the finding.
Returns:
bool: True if any of the matched_items are present in finding_items, otherwise False.
@@ -355,19 +278,12 @@ class Mutelist(ABC):
try:
is_item_matched = False
if matched_items and (finding_items or finding_items == ""):
if tag:
is_item_matched = True
for item in matched_items:
if item.startswith("*"):
item = ".*" + item[1:]
if tag:
if not re.search(item, finding_items):
is_item_matched = False
break
else:
if re.search(item, finding_items):
is_item_matched = True
break
if re.match(item, finding_items):
is_item_matched = True
break
return is_item_matched
except Exception as error:
logger.error(
+50 -21
View File
@@ -25,6 +25,7 @@ class ASFF(Output):
- transform(findings: list[Finding]) -> None: Transforms a list of findings into ASFF format.
- batch_write_data_to_file() -> None: Writes the findings data to a file in JSON ASFF format.
- generate_status(status: str, muted: bool = False) -> str: Generates the ASFF status based on the provided status and muted flag.
- format_resource_tags(tags: str) -> dict: Transforms a string of tags into a dictionary format.
References:
- AWS Security Hub API Reference: https://docs.aws.amazon.com/securityhub/1.0/APIReference/API_Compliance.html
@@ -61,6 +62,7 @@ class ASFF(Output):
if finding.status == "MANUAL":
continue
timestamp = timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ")
resource_tags = ASFF.format_resource_tags(finding.resource_tags)
associated_standards, compliance_summary = ASFF.format_compliance(
finding.compliance
@@ -68,39 +70,36 @@ class ASFF(Output):
# Ensures finding_status matches allowed values in ASFF
finding_status = ASFF.generate_status(finding.status, finding.muted)
self._data.append(
AWSSecurityFindingFormat(
# The following line cannot be changed because it is the format we use to generate unique findings for AWS Security Hub
# If changed some findings could be lost because the unique identifier will be different
Id=f"prowler-{finding.metadata.CheckID}-{finding.account_uid}-{finding.region}-{hash_sha512(finding.resource_uid)}",
Id=f"prowler-{finding.check_id}-{finding.account_uid}-{finding.region}-{hash_sha512(finding.resource_uid)}",
ProductArn=f"arn:{finding.partition}:securityhub:{finding.region}::product/prowler/prowler",
ProductFields=ProductFields(
ProwlerResourceName=finding.resource_uid,
),
GeneratorId="prowler-" + finding.metadata.CheckID,
GeneratorId="prowler-" + finding.check_id,
AwsAccountId=finding.account_uid,
Types=(
finding.metadata.CheckType
if finding.metadata.CheckType
finding.check_type.split(",")
if finding.check_type
else ["Software and Configuration Checks"]
),
FirstObservedAt=timestamp,
UpdatedAt=timestamp,
CreatedAt=timestamp,
Severity=Severity(Label=finding.metadata.Severity.value),
Title=finding.metadata.CheckTitle,
Description=(
(finding.status_extended[:1000] + "...")
if len(finding.status_extended) > 1000
else finding.status_extended
),
Severity=Severity(Label=finding.severity.value),
Title=finding.check_title,
Description=finding.description,
Resources=[
Resource(
Id=finding.resource_uid,
Type=finding.metadata.ResourceType,
Type=finding.resource_type,
Partition=finding.partition,
Region=finding.region,
Tags=finding.resource_tags,
Tags=resource_tags,
)
],
Compliance=Compliance(
@@ -110,8 +109,8 @@ class ASFF(Output):
),
Remediation=Remediation(
Recommendation=Recommendation(
Text=finding.metadata.Remediation.Recommendation.Text,
Url=finding.metadata.Remediation.Recommendation.Url,
Text=finding.remediation_recommendation_text,
Url=finding.remediation_recommendation_url,
)
),
)
@@ -196,6 +195,42 @@ class ASFF(Output):
return json_asff_status
@staticmethod
def format_resource_tags(tags: str) -> dict:
"""
Transforms a string of tags into a dictionary format.
Parameters:
- tags (str): A string containing tags separated by ' | ' and key-value pairs separated by '='.
Returns:
- dict: A dictionary where keys are tag names and values are tag values.
Notes:
- If the input string is empty or None, it returns None.
- Each tag in the input string should be in the format 'key=value'.
- If the input string is not formatted correctly, it logs an error and returns None.
"""
try:
tags_dict = None
if tags:
tags = tags.split(" | ")
tags_dict = {}
for tag in tags:
value = tag.split("=")
tags_dict[value[0]] = value[1]
return tags_dict
except IndexError as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
return None
except AttributeError as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
return None
@staticmethod
def format_compliance(compliance: dict) -> tuple[list[dict], list[str]]:
"""
@@ -281,12 +316,6 @@ class Resource(BaseModel):
Region: str
Tags: Optional[dict]
@validator("Tags", pre=True, always=True)
def tags_cannot_be_empty_dict(tags):
if not tags:
return None
return tags
class Compliance(BaseModel):
"""
+46 -10
View File
@@ -1,26 +1,62 @@
from enum import Enum
from operator import attrgetter
from prowler.config.config import timestamp
from prowler.lib.outputs.utils import unroll_tags
from prowler.lib.logger import logger
from prowler.lib.outputs.utils import unroll_list, unroll_tags
from prowler.lib.utils.utils import outputs_unix_timestamp
def get_provider_data_mapping(provider) -> dict:
data = {}
for generic_field, provider_field in provider.get_output_mapping.items():
try:
provider_value = attrgetter(provider_field)(provider)
data[generic_field] = provider_value
except AttributeError:
data[generic_field] = ""
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
return data
# TODO: add test for outputs_unix_timestamp
def fill_common_finding_data(finding: dict, unix_timestamp: bool) -> dict:
finding_data = {
"metadata": finding.check_metadata,
"timestamp": outputs_unix_timestamp(unix_timestamp, timestamp),
"check_id": finding.check_metadata.CheckID,
"check_title": finding.check_metadata.CheckTitle,
"check_type": ",".join(finding.check_metadata.CheckType),
"status": finding.status,
"status_extended": finding.status_extended,
"muted": finding.muted,
"service_name": finding.check_metadata.ServiceName,
"subservice_name": finding.check_metadata.SubServiceName,
"severity": finding.check_metadata.Severity,
"resource_type": finding.check_metadata.ResourceType,
"resource_details": finding.resource_details,
"resource_tags": unroll_tags(finding.resource_tags),
"description": finding.check_metadata.Description,
"risk": finding.check_metadata.Risk,
"related_url": finding.check_metadata.RelatedUrl,
"remediation_recommendation_text": (
finding.check_metadata.Remediation.Recommendation.Text
),
"remediation_recommendation_url": (
finding.check_metadata.Remediation.Recommendation.Url
),
"remediation_code_nativeiac": (
finding.check_metadata.Remediation.Code.NativeIaC
),
"remediation_code_terraform": (
finding.check_metadata.Remediation.Code.Terraform
),
"remediation_code_cli": (finding.check_metadata.Remediation.Code.CLI),
"remediation_code_other": (finding.check_metadata.Remediation.Code.Other),
"categories": unroll_list(finding.check_metadata.Categories),
"depends_on": unroll_list(finding.check_metadata.DependsOn),
"related_to": unroll_list(finding.check_metadata.RelatedTo),
"notes": finding.check_metadata.Notes,
}
return finding_data
class Status(str, Enum):
PASS = "PASS"
FAIL = "FAIL"
MANUAL = "MANUAL"
MUTED = "MUTED"
@@ -1,4 +1,4 @@
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.outputs.compliance.aws_well_architected.models import (
AWSWellArchitectedModel,
)
@@ -21,7 +21,7 @@ class AWSWellArchitected(ComplianceOutput):
def transform(
self,
findings: list[Finding],
compliance: Compliance,
compliance: ComplianceBaseModel,
compliance_name: str,
) -> None:
"""
@@ -29,7 +29,7 @@ class AWSWellArchitected(ComplianceOutput):
Parameters:
- findings (list): A list of findings.
- compliance (Compliance): A compliance model.
- compliance (ComplianceBaseModel): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:
@@ -1,4 +1,4 @@
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.outputs.compliance.cis.models import AWSCISModel
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.finding import Finding
@@ -19,7 +19,7 @@ class AWSCIS(ComplianceOutput):
def transform(
self,
findings: list[Finding],
compliance: Compliance,
compliance: ComplianceBaseModel,
compliance_name: str,
) -> None:
"""
@@ -27,7 +27,7 @@ class AWSCIS(ComplianceOutput):
Parameters:
- findings (list): A list of findings.
- compliance (Compliance): A compliance model.
- compliance (ComplianceBaseModel): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:
@@ -1,4 +1,4 @@
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.outputs.compliance.cis.models import AzureCISModel
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.finding import Finding
@@ -19,7 +19,7 @@ class AzureCIS(ComplianceOutput):
def transform(
self,
findings: list[Finding],
compliance: Compliance,
compliance: ComplianceBaseModel,
compliance_name: str,
) -> None:
"""
@@ -27,7 +27,7 @@ class AzureCIS(ComplianceOutput):
Parameters:
- findings (list): A list of findings.
- compliance (Compliance): A compliance model.
- compliance (ComplianceBaseModel): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:
@@ -1,4 +1,4 @@
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.outputs.compliance.cis.models import GCPCISModel
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.finding import Finding
@@ -19,7 +19,7 @@ class GCPCIS(ComplianceOutput):
def transform(
self,
findings: list[Finding],
compliance: Compliance,
compliance: ComplianceBaseModel,
compliance_name: str,
) -> None:
"""
@@ -27,7 +27,7 @@ class GCPCIS(ComplianceOutput):
Parameters:
- findings (list): A list of findings.
- compliance (Compliance): A compliance model.
- compliance (ComplianceBaseModel): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:
@@ -1,6 +1,6 @@
from datetime import datetime
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.outputs.compliance.cis.models import KubernetesCISModel
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.finding import Finding
@@ -21,7 +21,7 @@ class KubernetesCIS(ComplianceOutput):
def transform(
self,
findings: list[Finding],
compliance: Compliance,
compliance: ComplianceBaseModel,
compliance_name: str,
) -> None:
"""
@@ -29,7 +29,7 @@ class KubernetesCIS(ComplianceOutput):
Parameters:
- findings (list): A list of findings.
- compliance (Compliance): A compliance model.
- compliance (ComplianceBaseModel): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:
@@ -7,7 +7,6 @@ from prowler.lib.outputs.compliance.ens.ens import get_ens_table
from prowler.lib.outputs.compliance.generic.generic_table import (
get_generic_compliance_table,
)
from prowler.lib.outputs.compliance.kisa_ismsp.kisa_ismsp import get_kisa_ismsp_table
from prowler.lib.outputs.compliance.mitre_attack.mitre_attack import (
get_mitre_attack_table,
)
@@ -63,15 +62,6 @@ def display_compliance_table(
output_directory,
compliance_overview,
)
elif "kisa_isms_" in compliance_framework:
get_kisa_ismsp_table(
findings,
bulk_checks_metadata,
compliance_framework,
output_filename,
output_directory,
compliance_overview,
)
else:
get_generic_compliance_table(
findings,
@@ -2,7 +2,7 @@ from csv import DictWriter
from pathlib import Path
from typing import List
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.logger import logger
from prowler.lib.outputs.finding import Finding
from prowler.lib.outputs.output import Output
@@ -28,7 +28,7 @@ class ComplianceOutput(Output):
def __init__(
self,
findings: List[Finding],
compliance: Compliance,
compliance: ComplianceBaseModel,
create_file_descriptor: bool = False,
file_path: str = None,
file_extension: str = "",
@@ -1,4 +1,4 @@
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.compliance.ens.models import AWSENSModel
from prowler.lib.outputs.finding import Finding
@@ -19,7 +19,7 @@ class AWSENS(ComplianceOutput):
def transform(
self,
findings: list[Finding],
compliance: Compliance,
compliance: ComplianceBaseModel,
compliance_name: str,
) -> None:
"""
@@ -27,7 +27,7 @@ class AWSENS(ComplianceOutput):
Parameters:
- findings (list): A list of findings.
- compliance (Compliance): A compliance model.
- compliance (ComplianceBaseModel): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:
@@ -1,4 +1,4 @@
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.compliance.generic.models import GenericComplianceModel
from prowler.lib.outputs.finding import Finding
@@ -19,7 +19,7 @@ class GenericCompliance(ComplianceOutput):
def transform(
self,
findings: list[Finding],
compliance: Compliance,
compliance: ComplianceBaseModel,
compliance_name: str,
) -> None:
"""
@@ -27,7 +27,7 @@ class GenericCompliance(ComplianceOutput):
Parameters:
- findings (list): A list of findings.
- compliance (Compliance): A compliance model.
- compliance (ComplianceBaseModel): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:
@@ -1,4 +1,4 @@
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.compliance.iso27001.models import AWSISO27001Model
from prowler.lib.outputs.finding import Finding
@@ -19,7 +19,7 @@ class AWSISO27001(ComplianceOutput):
def transform(
self,
findings: list[Finding],
compliance: Compliance,
compliance: ComplianceBaseModel,
compliance_name: str,
) -> None:
"""
@@ -27,7 +27,7 @@ class AWSISO27001(ComplianceOutput):
Parameters:
- findings (list): A list of findings.
- compliance (Compliance): A compliance model.
- compliance (ComplianceBaseModel): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:
@@ -1,89 +0,0 @@
from colorama import Fore, Style
from tabulate import tabulate
from prowler.config.config import orange_color
def get_kisa_ismsp_table(
findings: list,
bulk_checks_metadata: dict,
compliance_framework: str,
output_filename: str,
output_directory: str,
compliance_overview: bool,
):
sections = {}
kisa_ismsp_compliance_table = {
"Provider": [],
"Section": [],
"Status": [],
"Muted": [],
}
pass_count = []
fail_count = []
muted_count = []
for index, finding in enumerate(findings):
check = bulk_checks_metadata[finding.check_metadata.CheckID]
check_compliances = check.Compliance
for compliance in check_compliances:
if (
compliance.Framework.startswith("KISA")
and compliance.Version in compliance_framework
):
for requirement in compliance.Requirements:
for attribute in requirement.Attributes:
section = attribute.Section
# Check if Section exists
if section not in sections:
sections[section] = {
"Status": f"{Fore.GREEN}PASS{Style.RESET_ALL}",
"Muted": 0,
}
if finding.muted:
if index not in muted_count:
muted_count.append(index)
sections[section]["Muted"] += 1
else:
if finding.status == "FAIL" and index not in fail_count:
fail_count.append(index)
elif finding.status == "PASS" and index not in pass_count:
pass_count.append(index)
# Add results to table
sections = dict(sorted(sections.items()))
for section in sections:
kisa_ismsp_compliance_table["Provider"].append(compliance.Provider)
kisa_ismsp_compliance_table["Section"].append(section)
kisa_ismsp_compliance_table["Muted"].append(
f"{orange_color}{sections[section]['Muted']}{Style.RESET_ALL}"
)
if len(fail_count) + len(pass_count) + len(muted_count) > 1:
print(
f"\nCompliance Status of {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL} Framework:"
)
overview_table = [
[
f"{Fore.RED}{round(len(fail_count) / len(findings) * 100, 2)}% ({len(fail_count)}) FAIL{Style.RESET_ALL}",
f"{Fore.GREEN}{round(len(pass_count) / len(findings) * 100, 2)}% ({len(pass_count)}) PASS{Style.RESET_ALL}",
f"{orange_color}{round(len(muted_count) / len(findings) * 100, 2)}% ({len(muted_count)}) MUTED{Style.RESET_ALL}",
]
]
print(tabulate(overview_table, tablefmt="rounded_grid"))
if not compliance_overview:
print(
f"\nFramework {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL} Results:"
)
print(
tabulate(
kisa_ismsp_compliance_table,
headers="keys",
tablefmt="rounded_grid",
)
)
print(
f"{Style.BRIGHT}* Only sections containing results appear.{Style.RESET_ALL}"
)
print(f"\nDetailed results of {compliance_framework.upper()} are in:")
print(
f" - CSV: {output_directory}/compliance/{output_filename}_{compliance_framework}.csv\n"
)
@@ -1,93 +0,0 @@
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.compliance.kisa_ismsp.models import AWSKISAISMSPModel
from prowler.lib.outputs.finding import Finding
class AWSKISAISMSP(ComplianceOutput):
"""
This class represents the AWS KISA-ISMS-P compliance output.
Attributes:
- _data (list): A list to store transformed data from findings.
- _file_descriptor (TextIOWrapper): A file descriptor to write data to a file.
Methods:
- transform: Transforms findings into AWS KISA-ISMS-P compliance format.
"""
def transform(
self,
findings: list[Finding],
compliance: Compliance,
compliance_name: str,
) -> None:
"""
Transforms a list of findings into AWS KISA-ISMS-P compliance format.
Parameters:
- findings (list): A list of findings.
- compliance (Compliance): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:
- None
"""
for finding in findings:
# Get the compliance requirements for the finding
finding_requirements = finding.compliance.get(compliance_name, [])
for requirement in compliance.Requirements:
if requirement.Id in finding_requirements:
for attribute in requirement.Attributes:
compliance_row = AWSKISAISMSPModel(
Provider=finding.provider,
Description=compliance.Description,
AccountId=finding.account_uid,
Region=finding.region,
AssessmentDate=str(finding.timestamp),
Requirements_Id=requirement.Id,
Requirements_Name=requirement.Name,
Requirements_Description=requirement.Description,
Requirements_Attributes_Domain=attribute.Domain,
Requirements_Attributes_Subdomain=attribute.Subdomain,
Requirements_Attributes_Section=attribute.Section,
Requirements_Attributes_AuditChecklist=attribute.AuditChecklist,
Requirements_Attributes_RelatedRegulations=attribute.RelatedRegulations,
Requirements_Attributes_AuditEvidence=attribute.AuditEvidence,
Requirements_Attributes_NonComplianceCases=attribute.NonComplianceCases,
Status=finding.status,
StatusExtended=finding.status_extended,
ResourceId=finding.resource_uid,
ResourceName=finding.resource_name,
CheckId=finding.check_id,
Muted=finding.muted,
)
self._data.append(compliance_row)
# Add manual requirements to the compliance output
for requirement in compliance.Requirements:
if not requirement.Checks:
for attribute in requirement.Attributes:
compliance_row = AWSKISAISMSPModel(
Provider=compliance.Provider.lower(),
Description=compliance.Description,
AccountId="",
Region="",
AssessmentDate=str(finding.timestamp),
Requirements_Id=requirement.Id,
Requirements_Name=requirement.Name,
Requirements_Description=requirement.Description,
Requirements_Attributes_Domain=attribute.Domain,
Requirements_Attributes_Subdomain=attribute.Subdomain,
Requirements_Attributes_Section=attribute.Section,
Requirements_Attributes_AuditChecklist=attribute.AuditChecklist,
Requirements_Attributes_RelatedRegulations=attribute.RelatedRegulations,
Requirements_Attributes_AuditEvidence=attribute.AuditEvidence,
Requirements_Attributes_NonComplianceCases=attribute.NonComplianceCases,
Status="MANUAL",
StatusExtended="Manual check",
ResourceId="manual_check",
ResourceName="Manual check",
CheckId="manual",
Muted=False,
)
self._data.append(compliance_row)
@@ -1,31 +0,0 @@
from typing import Optional
from pydantic import BaseModel
class AWSKISAISMSPModel(BaseModel):
"""
The AWS KISA-ISMS-P Model outputs findings in a format compliant with the AWS KISA-ISMS-P standard
"""
Provider: str
Description: str
AccountId: str
Region: str
AssessmentDate: str
Requirements_Id: str
Requirements_Name: str
Requirements_Description: str
Requirements_Attributes_Domain: str
Requirements_Attributes_Subdomain: str
Requirements_Attributes_Section: str
Requirements_Attributes_AuditChecklist: Optional[list[str]]
Requirements_Attributes_RelatedRegulations: Optional[list[str]]
Requirements_Attributes_AuditEvidence: Optional[list[str]]
Requirements_Attributes_NonComplianceCases: Optional[list[str]]
Status: str
StatusExtended: str
ResourceId: str
ResourceName: str
CheckId: str
Muted: bool
@@ -1,4 +1,4 @@
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.compliance.mitre_attack.models import AWSMitreAttackModel
from prowler.lib.outputs.finding import Finding
@@ -20,7 +20,7 @@ class AWSMitreAttack(ComplianceOutput):
def transform(
self,
findings: list[Finding],
compliance: Compliance,
compliance: ComplianceBaseModel,
compliance_name: str,
) -> None:
"""
@@ -28,7 +28,7 @@ class AWSMitreAttack(ComplianceOutput):
Parameters:
- findings (list): A list of findings.
- compliance (Compliance): A compliance model.
- compliance (ComplianceBaseModel): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:
@@ -1,4 +1,4 @@
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.compliance.mitre_attack.models import AzureMitreAttackModel
from prowler.lib.outputs.finding import Finding
@@ -20,7 +20,7 @@ class AzureMitreAttack(ComplianceOutput):
def transform(
self,
findings: list[Finding],
compliance: Compliance,
compliance: ComplianceBaseModel,
compliance_name: str,
) -> None:
"""
@@ -28,7 +28,7 @@ class AzureMitreAttack(ComplianceOutput):
Parameters:
- findings (list): A list of findings.
- compliance (Compliance): A compliance model.
- compliance (ComplianceBaseModel): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:
@@ -1,4 +1,4 @@
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.compliance.mitre_attack.models import GCPMitreAttackModel
from prowler.lib.outputs.finding import Finding
@@ -20,7 +20,7 @@ class GCPMitreAttack(ComplianceOutput):
def transform(
self,
findings: list[Finding],
compliance: Compliance,
compliance: ComplianceBaseModel,
compliance_name: str,
) -> None:
"""
@@ -28,7 +28,7 @@ class GCPMitreAttack(ComplianceOutput):
Parameters:
- findings (list): A list of findings.
- compliance (Compliance): A compliance model.
- compliance (ComplianceBaseModel): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:
+5 -63
View File
@@ -1,5 +1,4 @@
from csv import DictWriter
from typing import List
from prowler.lib.logger import logger
from prowler.lib.outputs.finding import Finding
@@ -8,7 +7,7 @@ from prowler.lib.outputs.utils import unroll_dict, unroll_list
class CSV(Output):
def transform(self, findings: List[Finding]) -> None:
def transform(self, findings: list[Finding]) -> None:
"""Transforms the findings into the CSV format.
Args:
@@ -17,68 +16,11 @@ class CSV(Output):
"""
try:
for finding in findings:
finding_dict = {}
finding_dict["AUTH_METHOD"] = finding.auth_method
finding_dict["TIMESTAMP"] = finding.timestamp
finding_dict["ACCOUNT_UID"] = finding.account_uid
finding_dict["ACCOUNT_NAME"] = finding.account_name
finding_dict["ACCOUNT_EMAIL"] = finding.account_email
finding_dict["ACCOUNT_ORGANIZATION_UID"] = (
finding.account_organization_uid
)
finding_dict["ACCOUNT_ORGANIZATION_NAME"] = (
finding.account_organization_name
)
finding_dict["ACCOUNT_TAGS"] = unroll_dict(
finding.account_tags, separator=":"
)
finding_dict["FINDING_UID"] = finding.uid
finding_dict["PROVIDER"] = finding.metadata.Provider
finding_dict["CHECK_ID"] = finding.metadata.CheckID
finding_dict["CHECK_TITLE"] = finding.metadata.CheckTitle
finding_dict["CHECK_TYPE"] = unroll_list(finding.metadata.CheckType)
finding_dict = {k.upper(): v for k, v in finding.dict().items()}
finding_dict["COMPLIANCE"] = unroll_dict(finding.compliance)
finding_dict["ACCOUNT_TAGS"] = unroll_list(finding.account_tags)
finding_dict["STATUS"] = finding.status.value
finding_dict["STATUS_EXTENDED"] = finding.status_extended
finding_dict["MUTED"] = finding.muted
finding_dict["SERVICE_NAME"] = finding.metadata.ServiceName
finding_dict["SUBSERVICE_NAME"] = finding.metadata.SubServiceName
finding_dict["SEVERITY"] = finding.metadata.Severity.value
finding_dict["RESOURCE_TYPE"] = finding.metadata.ResourceType
finding_dict["RESOURCE_UID"] = finding.resource_uid
finding_dict["RESOURCE_NAME"] = finding.resource_name
finding_dict["RESOURCE_DETAILS"] = finding.resource_details
finding_dict["RESOURCE_TAGS"] = unroll_dict(finding.resource_tags)
finding_dict["PARTITION"] = finding.partition
finding_dict["REGION"] = finding.region
finding_dict["DESCRIPTION"] = finding.metadata.Description
finding_dict["RISK"] = finding.metadata.Risk
finding_dict["RELATED_URL"] = finding.metadata.RelatedUrl
finding_dict["REMEDIATION_RECOMMENDATION_TEXT"] = (
finding.metadata.Remediation.Recommendation.Text
)
finding_dict["REMEDIATION_RECOMMENDATION_URL"] = (
finding.metadata.Remediation.Recommendation.Url
)
finding_dict["REMEDIATION_CODE_NATIVEIAC"] = (
finding.metadata.Remediation.Code.NativeIaC
)
finding_dict["REMEDIATION_CODE_TERRAFORM"] = (
finding.metadata.Remediation.Code.Terraform
)
finding_dict["REMEDIATION_CODE_CLI"] = (
finding.metadata.Remediation.Code.CLI
)
finding_dict["REMEDIATION_CODE_OTHER"] = (
finding.metadata.Remediation.Code.Other
)
finding_dict["COMPLIANCE"] = unroll_dict(
finding.compliance, separator=": "
)
finding_dict["CATEGORIES"] = unroll_list(finding.metadata.Categories)
finding_dict["DEPENDS_ON"] = unroll_list(finding.metadata.DependsOn)
finding_dict["RELATED_TO"] = unroll_list(finding.metadata.RelatedTo)
finding_dict["NOTES"] = finding.metadata.Notes
finding_dict["PROWLER_VERSION"] = finding.prowler_version
finding_dict["SEVERITY"] = finding.severity.value
self._data.append(finding_dict)
except Exception as error:
logger.error(
+73 -125
View File
@@ -1,17 +1,34 @@
from datetime import datetime
from enum import Enum
from typing import Optional, Union
from pydantic import BaseModel, Field
from pydantic import BaseModel
from prowler.config.config import prowler_version
from prowler.lib.check.models import Check_Report, CheckMetadata
from prowler.lib.check.models import Check_Report
from prowler.lib.logger import logger
from prowler.lib.outputs.common import Status, fill_common_finding_data
from prowler.lib.outputs.common import (
fill_common_finding_data,
get_provider_data_mapping,
)
from prowler.lib.outputs.compliance.compliance import get_check_compliance
from prowler.lib.utils.utils import dict_to_lowercase, get_nested_attribute
from prowler.providers.common.provider import Provider
class Status(str, Enum):
PASS = "PASS"
FAIL = "FAIL"
MANUAL = "MANUAL"
class Severity(str, Enum):
critical = "critical"
high = "high"
medium = "medium"
low = "low"
informational = "informational"
class Finding(BaseModel):
"""
Represents the output model for a finding across different providers.
@@ -24,130 +41,82 @@ class Finding(BaseModel):
auth_method: str
timestamp: Union[int, datetime]
account_uid: str
account_name: Optional[str] = None
account_email: Optional[str] = None
account_organization_uid: Optional[str] = None
account_organization_name: Optional[str] = None
metadata: CheckMetadata
account_tags: dict = {}
uid: str
# Optional since it depends on permissions
account_name: Optional[str]
# Optional since it depends on permissions
account_email: Optional[str]
# Optional since it depends on permissions
account_organization_uid: Optional[str]
# Optional since it depends on permissions
account_organization_name: Optional[str]
# Optional since it depends on permissions
account_tags: Optional[list[str]]
finding_uid: str
provider: str
check_id: str
check_title: str
check_type: str
status: Status
status_extended: str
muted: bool = False
service_name: str
subservice_name: str
severity: Severity
resource_type: str
resource_uid: str
resource_name: str
resource_details: str
resource_tags: dict = Field(default_factory=dict)
partition: Optional[str] = None
resource_tags: str
# Only present for AWS and Azure
partition: Optional[str]
region: str
description: str
risk: str
related_url: str
remediation_recommendation_text: str
remediation_recommendation_url: str
remediation_code_nativeiac: str
remediation_code_terraform: str
remediation_code_cli: str
remediation_code_other: str
compliance: dict
categories: str
depends_on: str
related_to: str
notes: str
prowler_version: str = prowler_version
raw: dict = Field(default_factory=dict)
@property
def provider(self) -> str:
"""
Returns the provider from the finding check's metadata.
"""
return self.metadata.Provider
@property
def check_id(self) -> str:
"""
Returns the ID from the finding check's metadata.
"""
return self.metadata.CheckID
@property
def severity(self) -> str:
"""
Returns the severity from the finding check's metadata.
"""
return self.metadata.Severity
@property
def resource_type(self) -> str:
"""
Returns the resource type from the finding check's metadata.
"""
return self.metadata.ResourceType
@property
def service_name(self) -> str:
"""
Returns the service name from the finding check's metadata.
"""
return self.metadata.ServiceName
def get_metadata(self) -> dict:
"""
Retrieves the metadata of the object and returns it as a dictionary with all keys in lowercase.
Returns:
dict: A dictionary containing the metadata with keys converted to lowercase.
"""
return dict_to_lowercase(self.metadata.dict())
@classmethod
def generate_output(
cls, provider: Provider, check_output: Check_Report, output_options
cls, provider: Provider, check_output: Check_Report
) -> "Finding":
"""Generates the output for a finding based on the provider and output options
Args:
provider (Provider): the provider object
check_output (Check_Report): the check output object
output_options: the output options object, depending on the provider
Returns:
finding_output (Finding): the finding output object
"""
output_options = provider.output_options
# TODO: think about get_provider_data_mapping
provider_data_mapping = get_provider_data_mapping(provider)
# TODO: move fill_common_finding_data
unix_timestamp = False
if hasattr(output_options, "unix_timestamp"):
unix_timestamp = output_options.unix_timestamp
common_finding_data = fill_common_finding_data(check_output, unix_timestamp)
common_finding_data = fill_common_finding_data(
check_output, output_options.unix_timestamp
)
output_data = {}
output_data.update(provider_data_mapping)
output_data.update(common_finding_data)
bulk_checks_metadata = {}
if hasattr(output_options, "bulk_checks_metadata"):
bulk_checks_metadata = output_options.bulk_checks_metadata
output_data["compliance"] = get_check_compliance(
check_output, provider.type, bulk_checks_metadata
check_output, provider.type, output_options.bulk_checks_metadata
)
try:
output_data["provider"] = provider.type
if provider.type == "aws":
output_data["account_uid"] = get_nested_attribute(
provider, "identity.account"
)
output_data["account_name"] = get_nested_attribute(
provider, "organizations_metadata.account_name"
)
output_data["account_email"] = get_nested_attribute(
provider, "organizations_metadata.account_email"
)
output_data["account_organization_uid"] = get_nested_attribute(
provider, "organizations_metadata.organization_arn"
)
output_data["account_organization_name"] = get_nested_attribute(
provider, "organizations_metadata.organization_id"
)
output_data["account_tags"] = get_nested_attribute(
provider, "organizations_metadata.account_tags"
)
output_data["partition"] = get_nested_attribute(
provider, "identity.partition"
)
# TODO: probably Organization UID is without the account id
output_data["auth_method"] = (
f"profile: {get_nested_attribute(provider, 'identity.profile')}"
)
output_data["auth_method"] = f"profile: {output_data['auth_method']}"
output_data["resource_name"] = check_output.resource_id
output_data["resource_uid"] = check_output.resource_arn
output_data["region"] = check_output.region
@@ -158,9 +127,9 @@ class Finding(BaseModel):
f"{provider.identity.identity_type}: {provider.identity.identity_id}"
)
# Get the first tenant domain ID, just in case
output_data["account_organization_uid"] = get_nested_attribute(
provider, "identity.tenant_ids"
)[0]
output_data["account_organization_uid"] = output_data[
"account_organization_uid"
][0]
output_data["account_uid"] = (
output_data["account_organization_uid"]
if "Tenant:" in check_output.subscription
@@ -170,33 +139,15 @@ class Finding(BaseModel):
output_data["resource_name"] = check_output.resource_name
output_data["resource_uid"] = check_output.resource_id
output_data["region"] = check_output.location
# TODO: check the tenant_ids
# TODO: we have to get the account organization, the tenant is not that
output_data["account_organization_name"] = get_nested_attribute(
provider, "identity.tenant_domain"
)
output_data["partition"] = get_nested_attribute(
provider, "region_config.name"
)
# TODO: pending to get the subscription tags
# "account_tags": "organizations_metadata.account_details_tags",
# TODO: store subscription_name + id pairs
# "account_name": "organizations_metadata.account_details_name",
# "account_email": "organizations_metadata.account_details_email",
elif provider.type == "gcp":
output_data["auth_method"] = (
f"Principal: {get_nested_attribute(provider, 'identity.profile')}"
)
output_data["auth_method"] = f"Principal: {output_data['auth_method']}"
output_data["account_uid"] = provider.projects[
check_output.project_id
].id
output_data["account_name"] = provider.projects[
check_output.project_id
].name
# There is no concept as project email in GCP
# "account_email": "organizations_metadata.account_details_email",
output_data["account_tags"] = provider.projects[
check_output.project_id
].labels
@@ -215,7 +166,7 @@ class Finding(BaseModel):
check_output.project_id
].organization.id
# TODO: for now is None since we don't retrieve that data
output_data["account_organization_name"] = provider.projects[
output_data["account_organization"] = provider.projects[
check_output.project_id
].organization.display_name
@@ -227,19 +178,16 @@ class Finding(BaseModel):
output_data["resource_name"] = check_output.resource_name
output_data["resource_uid"] = check_output.resource_id
output_data["account_name"] = f"context: {provider.identity.context}"
output_data["account_uid"] = get_nested_attribute(
provider, "identity.cluster"
)
output_data["region"] = f"namespace: {check_output.namespace}"
# check_output Unique ID
# TODO: move this to a function
# TODO: in Azure, GCP and K8s there are fidings without resource_name
output_data["uid"] = (
output_data["finding_uid"] = (
f"prowler-{provider.type}-{check_output.check_metadata.CheckID}-{output_data['account_uid']}-"
f"{output_data['region']}-{output_data['resource_name']}"
)
logger.debug("Generating finding: = %s", output_data["finding_uid"])
return cls(**output_data)
except Exception as error:
logger.error(
+8 -14
View File
@@ -39,17 +39,17 @@ class HTML(Output):
f"""
<tr class="{row_class}">
<td>{finding_status}</td>
<td>{finding.metadata.Severity.value}</td>
<td>{finding.metadata.ServiceName}</td>
<td>{finding.severity.value}</td>
<td>{finding.service_name}</td>
<td>{finding.region.lower()}</td>
<td>{finding.metadata.CheckID.replace("_", "<wbr />_")}</td>
<td>{finding.metadata.CheckTitle}</td>
<td>{finding.check_id.replace("_", "<wbr />_")}</td>
<td>{finding.check_title}</td>
<td>{finding.resource_uid.replace("<", "&lt;").replace(">", "&gt;").replace("_", "<wbr />_")}</td>
<td>{parse_html_string(unroll_dict(finding.resource_tags))}</td>
<td>{parse_html_string(finding.resource_tags)}</td>
<td>{finding.status_extended.replace("<", "&lt;").replace(">", "&gt;").replace("_", "<wbr />_")}</td>
<td><p class="show-read-more">{html.escape(finding.metadata.Risk)}</p></td>
<td><p class="show-read-more">{html.escape(finding.metadata.Remediation.Recommendation.Text)}</p> <a class="read-more" href="{finding.metadata.Remediation.Recommendation.Url}"><i class="fas fa-external-link-alt"></i></a></td>
<td><p class="show-read-more">{parse_html_string(unroll_dict(finding.compliance, separator=": "))}</p></td>
<td><p class="show-read-more">{html.escape(finding.risk)}</p></td>
<td><p class="show-read-more">{html.escape(finding.remediation_recommendation_text)}</p> <a class="read-more" href="{finding.remediation_recommendation_url}"><i class="fas fa-external-link-alt"></i></a></td>
<td><p class="show-read-more">{parse_html_string(unroll_dict(finding.compliance))}</p></td>
</tr>
"""
)
@@ -173,15 +173,9 @@ class HTML(Output):
<li class="list-group-item">
<b>Passed:</b> {str(stats.get("total_pass", 0))}
</li>
<li class="list-group-item">
<b>Passed (Muted):</b> {str(stats.get("total_muted_pass", 0))}
</li>
<li class="list-group-item">
<b>Failed:</b> {str(stats.get("total_fail", 0))}
</li>
<li class="list-group-item">
<b>Failed (Muted):</b> {str(stats.get("total_muted_fail", 0))}
</li>
<li class="list-group-item">
<b>Total Resources:</b> {str(stats.get("resources_count", 0))}
</li>
+63 -73
View File
@@ -9,6 +9,7 @@ from py_ocsf_models.events.findings.detection_finding import (
from py_ocsf_models.events.findings.finding import ActivityID, FindingInformation
from py_ocsf_models.objects.account import Account, TypeID
from py_ocsf_models.objects.cloud import Cloud
from py_ocsf_models.objects.container import Container
from py_ocsf_models.objects.group import Group
from py_ocsf_models.objects.metadata import Metadata
from py_ocsf_models.objects.organization import Organization
@@ -19,7 +20,6 @@ from py_ocsf_models.objects.resource_details import ResourceDetails
from prowler.lib.logger import logger
from prowler.lib.outputs.finding import Finding
from prowler.lib.outputs.output import Output
from prowler.lib.outputs.utils import unroll_dict_to_list
class OCSF(Output):
@@ -36,7 +36,7 @@ class OCSF(Output):
- transform(findings: List[Finding]) -> None: Transforms the findings into the OCSF Detection Finding format.
- batch_write_data_to_file() -> None: Writes the findings to a file using the OCSF Detection Finding format using the `Output._file_descriptor`.
- get_account_type_id_by_provider(provider: str) -> TypeID: Returns the TypeID based on the provider.
- get_finding_status_id(muted: bool) -> StatusID: Returns the StatusID based on the muted value.
- get_finding_status_id(status: str, muted: bool) -> StatusID: Returns the StatusID based on the status and muted values.
References:
- OCSF: https://schema.ocsf.io/1.2.0/classes/detection_finding
@@ -53,42 +53,37 @@ class OCSF(Output):
for finding in findings:
finding_activity = ActivityID.Create
cloud_account_type = self.get_account_type_id_by_provider(
finding.metadata.Provider
finding.provider
)
finding_severity = getattr(
SeverityID,
finding.metadata.Severity.capitalize(),
SeverityID.Unknown,
SeverityID, finding.severity.capitalize(), SeverityID.Unknown
)
finding_status = self.get_finding_status_id(
finding.status, finding.muted
)
finding_status = self.get_finding_status_id(finding.muted)
detection_finding = DetectionFinding(
message=finding.status_extended,
activity_id=finding_activity.value,
activity_name=finding_activity.name,
finding_info=FindingInformation(
created_time_dt=finding.timestamp,
created_time=int(finding.timestamp.timestamp()),
desc=finding.metadata.Description,
title=finding.metadata.CheckTitle,
uid=finding.uid,
name=finding.resource_name,
created_time=finding.timestamp,
desc=finding.description,
title=finding.check_title,
uid=finding.finding_uid,
product_uid="prowler",
types=finding.metadata.CheckType,
),
time_dt=finding.timestamp,
time=int(finding.timestamp.timestamp()),
event_time=finding.timestamp,
remediation=Remediation(
desc=finding.metadata.Remediation.Recommendation.Text,
desc=finding.remediation_recommendation_text,
references=list(
filter(
None,
[
finding.metadata.Remediation.Code.NativeIaC,
finding.metadata.Remediation.Code.Terraform,
finding.metadata.Remediation.Code.CLI,
finding.metadata.Remediation.Code.Other,
finding.metadata.Remediation.Recommendation.Url,
finding.remediation_code_nativeiac,
finding.remediation_code_terraform,
finding.remediation_code_cli,
finding.remediation_code_other,
finding.remediation_recommendation_url,
],
)
),
@@ -99,73 +94,65 @@ class OCSF(Output):
status=finding_status.name,
status_code=finding.status,
status_detail=finding.status_extended,
risk_details=finding.metadata.Risk,
resources=(
[
ResourceDetails(
labels=unroll_dict_to_list(finding.resource_tags),
name=finding.resource_name,
uid=finding.resource_uid,
group=Group(name=finding.metadata.ServiceName),
type=finding.metadata.ResourceType,
# TODO: this should be included only if using the Cloud profile
cloud_partition=finding.partition,
region=finding.region,
data={"details": finding.resource_details},
)
]
if finding.metadata.Provider != "kubernetes"
else [
ResourceDetails(
labels=unroll_dict_to_list(finding.resource_tags),
name=finding.resource_name,
uid=finding.resource_uid,
group=Group(name=finding.metadata.ServiceName),
type=finding.metadata.ResourceType,
data={"details": finding.resource_details},
namespace=finding.region.replace("namespace: ", ""),
)
]
),
risk_details=finding.risk,
resources=[
ResourceDetails(
# TODO: Check labels for other providers
labels=(
finding.resource_tags.split(",")
if finding.resource_tags
else []
),
name=finding.resource_name,
uid=finding.resource_uid,
group=Group(name=finding.service_name),
type=finding.resource_type,
# TODO: this should be included only if using the Cloud profile
cloud_partition=finding.partition,
region=finding.region,
data={"details": finding.resource_details},
)
],
metadata=Metadata(
event_code=finding.metadata.CheckID,
event_code=finding.check_id,
product=Product(
uid="prowler",
name="Prowler",
vendor_name="Prowler",
version=finding.prowler_version,
),
profiles=(
["cloud", "datetime"]
if finding.metadata.Provider != "kubernetes"
else ["container", "datetime"]
),
tenant_uid=finding.account_organization_uid,
),
type_uid=DetectionFindingTypeID.Create,
type_name=f"Detection Finding: {DetectionFindingTypeID.Create.name}",
type_name=DetectionFindingTypeID.Create.name,
unmapped={
"related_url": finding.metadata.RelatedUrl,
"categories": finding.metadata.Categories,
"depends_on": finding.metadata.DependsOn,
"related_to": finding.metadata.RelatedTo,
"notes": finding.metadata.Notes,
"check_type": finding.check_type,
"related_url": finding.related_url,
"categories": finding.categories,
"depends_on": finding.depends_on,
"related_to": finding.related_to,
"notes": finding.notes,
"compliance": finding.compliance,
},
)
if finding.provider != "kubernetes":
if finding.provider == "kubernetes":
detection_finding.container = Container(
name=finding.resource_name,
uid=finding.resource_uid,
)
# TODO: Get the PID of the namespace (we only have the name of the namespace)
# detection_finding.namespace_pid=,
else:
detection_finding.cloud = Cloud(
account=Account(
name=finding.account_name,
type_id=cloud_account_type.value,
type=cloud_account_type.name.replace("_", " "),
type=cloud_account_type.name,
uid=finding.account_uid,
labels=unroll_dict_to_list(finding.account_tags),
labels=finding.account_tags,
),
org=Organization(
uid=finding.account_organization_uid,
name=finding.account_organization_name,
# TODO: add the org unit id and name
),
provider=finding.provider,
region=finding.region,
@@ -225,17 +212,20 @@ class OCSF(Output):
return type_id
@staticmethod
def get_finding_status_id(muted: bool) -> StatusID:
def get_finding_status_id(status: str, muted: bool) -> StatusID:
"""
Returns the StatusID based on the muted value.
Returns the StatusID based on the status and muted values.
Args:
status (str): The status value
muted (bool): The muted value
Returns:
StatusID: The StatusID based on the muted value
StatusID: The StatusID based on the status and muted values
"""
status_id = StatusID.New
status_id = StatusID.Other
if status == "FAIL":
status_id = StatusID.New
if muted:
status_id = StatusID.Suppressed
return status_id
+7 -63
View File
@@ -25,12 +25,9 @@ def stdout_report(finding, color, verbose, status, fix):
)
# TODO: Only pass check_findings, output_options and provider.type
def report(check_findings, provider, output_options):
def report(check_findings, provider):
try:
verbose = False
if hasattr(output_options, "verbose"):
verbose = output_options.verbose
output_options = provider.output_options
if check_findings:
# TO-DO Generic Function
if provider.type == "aws":
@@ -41,27 +38,21 @@ def report(check_findings, provider, output_options):
for finding in check_findings:
# Print findings by stdout
status = []
if hasattr(output_options, "status"):
status = output_options.status
fixer = False
if hasattr(output_options, "fixer"):
fixer = output_options.fixer
color = set_report_color(finding.status, finding.muted)
stdout_report(
finding,
color,
verbose,
status,
fixer,
output_options.verbose,
output_options.status,
output_options.fixer,
)
else: # No service resources in the whole account
color = set_report_color("MANUAL")
if verbose:
if output_options.verbose:
print(f"\t{color}INFO{Style.RESET_ALL} There are no resources")
# Separator between findings and bar
if verbose:
if output_options.verbose:
print()
except Exception as error:
logger.error(
@@ -90,82 +81,35 @@ def extract_findings_statistics(findings: list) -> dict:
extract_findings_statistics takes a list of findings and returns the following dict with the aggregated statistics
{
"total_pass": 0,
"total_muted_pass": 0,
"total_fail": 0,
"total_muted_fail": 0,
"resources_count": 0,
"findings_count": 0,
"critical_failed_findings": [],
"critical_passed_findings": []
"all_fails_are_muted": False
}
"""
logger.info("Extracting audit statistics...")
stats = {}
total_pass = 0
total_fail = 0
muted_pass = 0
muted_fail = 0
resources = set()
findings_count = 0
all_fails_are_muted = True
critical_severity_pass = 0
critical_severity_fail = 0
high_severity_pass = 0
high_severity_fail = 0
medium_severity_pass = 0
medium_severity_fail = 0
low_severity_pass = 0
low_severity_fail = 0
for finding in findings:
# Save the resource_id
resources.add(finding.resource_id)
if finding.status == "PASS":
if finding.check_metadata.Severity == "critical":
critical_severity_pass += 1
if finding.check_metadata.Severity == "high":
high_severity_pass += 1
if finding.check_metadata.Severity == "medium":
medium_severity_pass += 1
if finding.check_metadata.Severity == "low":
low_severity_pass += 1
total_pass += 1
findings_count += 1
if finding.muted is True:
muted_pass += 1
if finding.status == "FAIL":
if finding.check_metadata.Severity == "critical":
critical_severity_fail += 1
if finding.check_metadata.Severity == "high":
high_severity_fail += 1
if finding.check_metadata.Severity == "medium":
medium_severity_fail += 1
if finding.check_metadata.Severity == "low":
low_severity_fail += 1
total_fail += 1
findings_count += 1
if finding.muted is True:
muted_fail += 1
if not finding.muted and all_fails_are_muted:
all_fails_are_muted = False
stats["total_pass"] = total_pass
stats["total_muted_pass"] = muted_pass
stats["total_fail"] = total_fail
stats["total_muted_fail"] = muted_fail
stats["resources_count"] = len(resources)
stats["findings_count"] = findings_count
stats["total_critical_severity_fail"] = critical_severity_fail
stats["total_critical_severity_pass"] = critical_severity_pass
stats["total_high_severity_fail"] = high_severity_fail
stats["total_high_severity_pass"] = high_severity_pass
stats["total_medium_severity_fail"] = medium_severity_fail
stats["total_medium_severity_pass"] = medium_severity_pass
stats["total_low_severity_fail"] = medium_severity_fail
stats["total_low_severity_pass"] = medium_severity_pass
stats["all_fails_are_muted"] = all_fails_are_muted
return stats
@@ -1,61 +0,0 @@
from prowler.exceptions.exceptions import ProwlerException
# Exceptions codes from 8000 to 8999 are reserved for Slack exceptions
class SlackBaseException(ProwlerException):
"""Base class for Slack errors."""
SLACK_ERROR_CODES = {
(8000, "SlackClientError"): {
"message": "Slack ClientError occurred",
"remediation": "Check your Slack client configuration and permissions.",
},
(8001, "SlackNoCredentialsError"): {
"message": "Invalid Slack credentials found",
"remediation": "Some aspect of authentication cannot be validated. Either the provided token is invalid or the request originates from an IP address disallowed from making the request.",
},
(8002, "SlackChannelNotFound"): {
"message": "Slack channel not found",
"remediation": "Check the channel name and ensure it exists.",
},
}
def __init__(self, code, file=None, original_exception=None, message=None):
error_info = self.SLACK_ERROR_CODES.get((code, self.__class__.__name__))
if message:
error_info["message"] = message
super().__init__(
code,
source="Slack",
file=file,
original_exception=original_exception,
error_info=error_info,
)
class SlackCredentialsError(SlackBaseException):
"""Base class for Slack credentials errors."""
def __init__(self, code, file=None, original_exception=None, message=None):
super().__init__(code, file, original_exception, message)
class SlackClientError(SlackCredentialsError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
8000, file=file, original_exception=original_exception, message=message
)
class SlackNoCredentialsError(SlackCredentialsError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
8001, file=file, original_exception=original_exception, message=message
)
class SlackChannelNotFound(SlackCredentialsError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
8002, file=file, original_exception=original_exception, message=message
)
+1 -97
View File
@@ -1,4 +1,3 @@
import os
from typing import Any
from slack_sdk import WebClient
@@ -6,12 +5,6 @@ from slack_sdk.web.base_client import SlackResponse
from prowler.config.config import aws_logo, azure_logo, gcp_logo, square_logo_img
from prowler.lib.logger import logger
from prowler.lib.outputs.slack.exceptions.exceptions import (
SlackChannelNotFound,
SlackClientError,
SlackNoCredentialsError,
)
from prowler.providers.common.models import Connection
class Slack:
@@ -50,7 +43,6 @@ class Slack:
username="Prowler",
icon_url=square_logo_img,
channel=f"#{self.channel}",
text="Prowler Scan Summary",
blocks=self.__create_message_blocks__(identity, logo, stats, args),
)
return response
@@ -58,6 +50,7 @@ class Slack:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
return error
def __create_message_identity__(self, provider: Any):
"""
@@ -128,19 +121,6 @@ class Slack:
"text": f"\n:white_check_mark: *{stats['total_pass']} Passed findings* ({round(stats['total_pass'] / stats['findings_count'] * 100 , 2)}%)\n",
},
},
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": (
"*Severities:*\n"
f"• *Critical:* {stats['total_critical_severity_pass']} "
f"• *High:* {stats['total_high_severity_pass']} "
f"• *Medium:* {stats['total_medium_severity_pass']} "
f"• *Low:* {stats['total_low_severity_pass']}"
),
},
},
{
"type": "section",
"text": {
@@ -148,19 +128,6 @@ class Slack:
"text": f"\n:x: *{stats['total_fail']} Failed findings* ({round(stats['total_fail'] / stats['findings_count'] * 100 , 2)}%)\n ",
},
},
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": (
"*Severities:*\n"
f"• *Critical:* {stats['total_critical_severity_fail']} "
f"• *High:* {stats['total_high_severity_fail']} "
f"• *Medium:* {stats['total_medium_severity_fail']} "
f"• *Low:* {stats['total_low_severity_fail']}"
),
},
},
{
"type": "section",
"text": {
@@ -237,66 +204,3 @@ class Slack:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
@staticmethod
def test_connection(
token: str,
channel: str,
raise_on_exception: bool = True,
) -> Connection:
"""
Test the Slack connection by validating the provided token and channel.
Args:
token (str): The Slack token to be tested.
channel (str): The Slack channel to be validated.
Returns:
Connection: A Connection object.
"""
try:
client = WebClient(token=token)
# Test if the token is valid
auth_response = client.auth_test()
if auth_response["ok"]:
# Test if the channel is accessible
channels_response = client.conversations_info(
token=token, channel=channel
)
if channels_response["ok"]:
return Connection(is_connected=True)
else:
exception = SlackChannelNotFound(
file=os.path.basename(__file__),
message=(
channels_response["error"]
if "error" in channels_response
else "Unknown error"
),
)
if raise_on_exception:
raise exception
return Connection(error=exception)
else:
exception = SlackNoCredentialsError(
file=os.path.basename(__file__),
message=(
auth_response["error"]
if "error" in auth_response
else "Unknown error"
),
)
if raise_on_exception:
raise exception
return Connection(error=exception)
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
if raise_on_exception:
raise SlackClientError(
file=os.path.basename(__file__),
original_exception=error,
) from error
return Connection(error=error)
+49 -153
View File
@@ -1,24 +1,4 @@
def unroll_list(listed_items: list, separator: str = "|") -> str:
"""
Unrolls a list of items into a single string, separated by a specified separator.
Args:
listed_items (list): The list of items to be unrolled.
separator (str, optional): The separator to be used between the items. Defaults to "|".
Returns:
str: The unrolled string.
Examples:
>>> unroll_list(['apple', 'banana', 'orange'])
'apple | banana | orange'
>>> unroll_list(['apple', 'banana', 'orange'], separator=',')
'apple, banana, orange'
>>> unroll_list([])
''
"""
def unroll_list(listed_items: list, separator: str = "|"):
unrolled_items = ""
if listed_items:
for item in listed_items:
@@ -33,138 +13,70 @@ def unroll_list(listed_items: list, separator: str = "|") -> str:
return unrolled_items
def unroll_tags(tags: list) -> dict:
"""
Unrolls a list of tags into a dictionary.
Args:
tags (list): A list of tags.
Returns:
dict: A dictionary containing the unrolled tags.
Examples:
>>> tags = [{"key": "name", "value": "John"}, {"key": "age", "value": "30"}]
>>> unroll_tags(tags)
{'name': 'John', 'age': '30'}
>>> tags = [{"Key": "name", "Value": "John"}, {"Key": "age", "Value": "30"}]
>>> unroll_tags(tags)
{'name': 'John', 'age': '30'}
>>> tags = [{"key": "name"}]
>>> unroll_tags(tags)
{'name': ''}
>>> tags = [{"Key": "name"}]
>>> unroll_tags(tags)
{'name': ''}
>>> tags = [{"name": "John", "age": "30"}]
>>> unroll_tags(tags)
{'name': 'John', 'age': '30'}
>>> tags = []
>>> unroll_tags(tags)
{}
>>> tags = {"name": "John", "age": "30"}
>>> unroll_tags(tags)
{'name': 'John', 'age': '30'}
>>> tags = ["name", "age"]
>>> unroll_tags(tags)
{'name': '', 'age': ''}
"""
if tags and tags != [{}] and tags != [None] and tags != []:
if isinstance(tags, dict):
return tags
if isinstance(tags[0], str) and len(tags) > 0:
return {tag: "" for tag in tags}
if "key" in tags[0]:
return {item["key"]: item.get("value", "") for item in tags}
elif "Key" in tags[0]:
return {item["Key"]: item.get("Value", "") for item in tags}
else:
return {key: value for d in tags for key, value in d.items()}
return {}
def unroll_dict(dict: dict, separator: str = "=") -> str:
"""
Unrolls a dictionary into a string representation.
Args:
dict (dict): The dictionary to be unrolled.
Returns:
str: The unrolled string representation of the dictionary.
Examples:
>>> my_dict = {'name': 'John', 'age': 30, 'hobbies': ['reading', 'coding']}
>>> unroll_dict(my_dict)
'name: John | age: 30 | hobbies: reading, coding'
"""
def unroll_tags(tags: list):
unrolled_items = ""
for key, value in dict.items():
if isinstance(value, list):
value = ", ".join(value)
if not unrolled_items:
unrolled_items = f"{key}{separator}{value}"
else:
unrolled_items = f"{unrolled_items} | {key}{separator}{value}"
separator = "|"
if tags and tags != [{}] and tags != [None]:
for item in tags:
# Check if there are tags in list
if isinstance(item, dict):
for key, value in item.items():
if not unrolled_items:
# Check the pattern of tags (Key:Value or Key:key/Value:value)
if "Key" != key and "Value" != key:
unrolled_items = f"{key}={value}"
else:
if "Key" == key:
unrolled_items = f"{value}="
else:
unrolled_items = f"{value}"
else:
if "Key" != key and "Value" != key:
unrolled_items = (
f"{unrolled_items} {separator} {key}={value}"
)
else:
if "Key" == key:
unrolled_items = (
f"{unrolled_items} {separator} {value}="
)
else:
unrolled_items = f"{unrolled_items}{value}"
elif not unrolled_items:
unrolled_items = f"{item}"
else:
unrolled_items = f"{unrolled_items} {separator} {item}"
return unrolled_items
def unroll_dict_to_list(dict: dict) -> list:
"""
Unrolls a dictionary into a list of key-value pairs.
def unroll_dict(dict: dict):
unrolled_items = ""
separator = "|"
for key, value in dict.items():
if isinstance(value, list):
value = ", ".join(value)
if not unrolled_items:
unrolled_items = f"{key}: {value}"
else:
unrolled_items = f"{unrolled_items} {separator} {key}: {value}"
Args:
dict (dict): The dictionary to be unrolled.
return unrolled_items
Returns:
list: A list of key-value pairs, where each pair is represented as a string.
Examples:
>>> my_dict = {'name': 'John', 'age': 30, 'hobbies': ['reading', 'coding']}
>>> unroll_dict_to_list(my_dict)
['name: John', 'age: 30', 'hobbies: reading, coding']
"""
def unroll_dict_to_list(dict: dict):
dict_list = []
for key, value in dict.items():
if isinstance(value, list):
value = ", ".join(value)
dict_list.append(f"{key}:{value}")
dict_list.append(f"{key}: {value}")
else:
dict_list.append(f"{key}:{value}")
dict_list.append(f"{key}: {value}")
return dict_list
def parse_json_tags(tags: list) -> dict[str, str]:
"""
Parses a list of JSON tags and returns a dictionary of key-value pairs.
Args:
tags (list): A list of JSON tags.
Returns:
dict: A dictionary containing the parsed key-value pairs from the tags.
Examples:
>>> tags = [
... {"Key": "Name", "Value": "John"},
... {"Key": "Age", "Value": "30"},
... {"Key": "City", "Value": "New York"}
... ]
>>> parse_json_tags(tags)
{'Name': 'John', 'Age': '30', 'City': 'New York'}
"""
def parse_json_tags(tags: list):
dict_tags = {}
if tags and tags != [{}] and tags != [None]:
for tag in tags:
@@ -176,23 +88,7 @@ def parse_json_tags(tags: list) -> dict[str, str]:
return dict_tags
def parse_html_string(str: str) -> str:
"""
Parses a string and returns a formatted HTML string.
This function takes an input string and splits it using the delimiter " | ".
It then formats each element of the split string as a bullet point in HTML format.
Args:
str (str): The input string to be parsed.
Returns:
str: The formatted HTML string.
Example:
>>> parse_html_string("item1 | item2 | item3")
'\n&#x2022;item1\n\n&#x2022;item2\n\n&#x2022;item3\n'
"""
def parse_html_string(str: str):
string = ""
for elem in str.split(" | "):
if elem:
-88
View File
@@ -1,88 +0,0 @@
from prowler.exceptions.exceptions import ProwlerException
# Exceptions codes from 5000 to 5999 are reserved for Scan exceptions
class ScanBaseException(ProwlerException):
"""Base class for Scan errors."""
SCAN_ERROR_CODES = {
(5000, "ScanInvalidSeverityError"): {
"message": "Invalid severity level provided.",
"remediation": "Please provide a valid severity level. Valid severities are: critical, high, medium, low, informational.",
},
(5001, "ScanInvalidCheckError"): {
"message": "Invalid check provided.",
"remediation": "Please provide a valid check name.",
},
(5002, "ScanInvalidServiceError"): {
"message": "Invalid service provided.",
"remediation": "Please provide a valid service name.",
},
(5003, "ScanInvalidComplianceFrameworkError"): {
"message": "Invalid compliance framework provided.",
"remediation": "Please provide a valid compliance framework name for the chosen provider.",
},
(5004, "ScanInvalidCategoryError"): {
"message": "Invalid category provided.",
"remediation": "Please provide a valid category name.",
},
(5005, "ScanInvalidStatusError"): {
"message": "Invalid status provided.",
"remediation": "Please provide a valid status: FAIL, PASS, MANUAL.",
},
}
def __init__(self, code, file=None, original_exception=None, message=None):
module = "Scan"
error_info = self.SCAN_ERROR_CODES.get((code, self.__class__.__name__))
if message:
error_info["message"] = message
super().__init__(
code=code,
source=module,
file=file,
original_exception=original_exception,
error_info=error_info,
)
class ScanInvalidSeverityError(ScanBaseException):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
5000, file=file, original_exception=original_exception, message=message
)
class ScanInvalidCheckError(ScanBaseException):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
5001, file=file, original_exception=original_exception, message=message
)
class ScanInvalidServiceError(ScanBaseException):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
5002, file=file, original_exception=original_exception, message=message
)
class ScanInvalidComplianceFrameworkError(ScanBaseException):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
5003, file=file, original_exception=original_exception, message=message
)
class ScanInvalidCategoryError(ScanBaseException):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
5004, file=file, original_exception=original_exception, message=message
)
class ScanInvalidStatusError(ScanBaseException):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
5005, file=file, original_exception=original_exception, message=message
)
+48 -367
View File
@@ -1,379 +1,60 @@
import datetime
from typing import Generator
from typing import Any
from prowler.lib.check.check import (
execute,
import_check,
list_services,
update_audit_metadata,
)
from prowler.lib.check.checks_loader import load_checks_to_execute
from prowler.lib.check.compliance import update_checks_metadata_with_compliance
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.check.models import CheckMetadata, Severity
from prowler.lib.check.check import execute
from prowler.lib.check.models import Check_Report
from prowler.lib.logger import logger
from prowler.lib.outputs.common import Status
from prowler.lib.outputs.finding import Finding
from prowler.lib.scan.exceptions.exceptions import (
ScanInvalidCategoryError,
ScanInvalidCheckError,
ScanInvalidComplianceFrameworkError,
ScanInvalidServiceError,
ScanInvalidSeverityError,
ScanInvalidStatusError,
)
from prowler.providers.common.models import Audit_Metadata
from prowler.providers.common.provider import Provider
class Scan:
_provider: Provider
# Refactor(Core): This should replace the Audit_Metadata
_number_of_checks_to_execute: int = 0
_number_of_checks_completed: int = 0
# TODO the str should be a set of Check objects
_checks_to_execute: list[str]
_service_checks_to_execute: dict[str, set[str]]
_service_checks_completed: dict[str, set[str]]
_progress: float = 0.0
_findings: list = []
_duration: int = 0
_status: list[str] = None
def scan(
checks_to_execute: list,
global_provider: Any,
custom_checks_metadata: Any,
) -> list[Check_Report]:
try:
# List to store all the check's findings
all_findings = []
# Services and checks executed for the Audit Status
services_executed = set()
checks_executed = set()
def __init__(
self,
provider: Provider,
checks: list[str] = None,
services: list[str] = None,
compliances: list[str] = None,
categories: list[str] = None,
severities: list[str] = None,
excluded_checks: list[str] = None,
excluded_services: list[str] = None,
status: list[str] = None,
):
"""
Scan is the class that executes the checks and yields the progress and the findings.
# Initialize the Audit Metadata
# TODO: this should be done in the provider class
# Refactor(Core): Audit manager?
global_provider.audit_metadata = Audit_Metadata(
services_scanned=0,
expected_checks=checks_to_execute,
completed_checks=0,
audit_progress=0,
)
Params:
provider: Provider -> The provider to scan
checks: list[str] -> The checks to execute
services: list[str] -> The services to scan
compliances: list[str] -> The compliances to check
categories: list[str] -> The categories of the checks
severities: list[str] -> The severities of the checks
excluded_checks: list[str] -> The checks to exclude
excluded_services: list[str] -> The services to exclude
status: list[str] -> The status of the checks
Raises:
ScanInvalidCheckError: If the check does not exist in the provider or is from another provider.
ScanInvalidServiceError: If the service does not exist in the provider.
ScanInvalidComplianceFrameworkError: If the compliance framework does not exist in the provider.
ScanInvalidCategoryError: If the category does not exist in the provider.
ScanInvalidSeverityError: If the severity does not exist in the provider.
ScanInvalidStatusError: If the status does not exist in the provider.
"""
self._provider = provider
# Validate the status
if status:
for check_name in checks_to_execute:
try:
for s in status:
Status(s)
if not self._status:
self._status = []
if s not in self._status:
self._status.append(s)
except ValueError:
raise ScanInvalidStatusError(f"Invalid status provided: {s}.")
# Recover service from check name
service = check_name.split("_")[0]
# Load bulk compliance frameworks
bulk_compliance_frameworks = Compliance.get_bulk(provider.type)
check_findings = execute(
service,
check_name,
global_provider,
services_executed,
checks_executed,
custom_checks_metadata,
)
all_findings.extend(check_findings)
# Get bulk checks metadata for the provider
bulk_checks_metadata = CheckMetadata.get_bulk(provider.type)
# Complete checks metadata with the compliance framework specification
bulk_checks_metadata = update_checks_metadata_with_compliance(
bulk_compliance_frameworks, bulk_checks_metadata
# If check does not exists in the provider or is from another provider
except ModuleNotFoundError:
logger.error(
f"Check '{check_name}' was not found for the {global_provider.type.upper()} provider"
)
except Exception as error:
logger.error(
f"{check_name} - {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
except Exception as error:
logger.error(
f"{check_name} - {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
# Create a list of valid categories
valid_categories = set()
for check, metadata in bulk_checks_metadata.items():
for category in metadata.Categories:
if category not in valid_categories:
valid_categories.add(category)
# Validate checks
if checks:
for check in checks:
if check not in bulk_checks_metadata.keys():
raise ScanInvalidCheckError(f"Invalid check provided: {check}.")
# Validate services
if services:
for service in services:
if service not in list_services(provider.type):
raise ScanInvalidServiceError(
f"Invalid service provided: {service}."
)
# Validate compliances
if compliances:
for compliance in compliances:
if compliance not in bulk_compliance_frameworks.keys():
raise ScanInvalidComplianceFrameworkError(
f"Invalid compliance provided: {compliance}."
)
# Validate categories
if categories:
for category in categories:
if category not in valid_categories:
raise ScanInvalidCategoryError(
f"Invalid category provided: {category}."
)
# Validate severity
if severities:
for severity in severities:
try:
Severity(severity)
except ValueError:
raise ScanInvalidSeverityError(
f"Invalid severity provided: {severity}."
)
# Load checks to execute
self._checks_to_execute = sorted(
load_checks_to_execute(
bulk_checks_metadata=bulk_checks_metadata,
bulk_compliance_frameworks=bulk_compliance_frameworks,
check_list=checks,
service_list=services,
compliance_frameworks=compliances,
categories=categories,
severities=severities,
provider=provider.type,
checks_file=None,
)
)
# Exclude checks
if excluded_checks:
for check in excluded_checks:
if check in self._checks_to_execute:
self._checks_to_execute.remove(check)
else:
raise ScanInvalidCheckError(
f"Invalid check provided: {check}. Check does not exist in the provider."
)
# Exclude services
if excluded_services:
for check in self._checks_to_execute:
if get_service_name_from_check_name(check) in excluded_services:
self._checks_to_execute.remove(check)
else:
raise ScanInvalidServiceError(
f"Invalid service provided: {check}. Service does not exist in the provider."
)
self._number_of_checks_to_execute = len(self._checks_to_execute)
service_checks_to_execute = get_service_checks_to_execute(
self._checks_to_execute
)
service_checks_completed = dict()
self._service_checks_to_execute = service_checks_to_execute
self._service_checks_completed = service_checks_completed
@property
def checks_to_execute(self) -> list[str]:
return self._checks_to_execute
@property
def service_checks_to_execute(self) -> dict[str, set[str]]:
return self._service_checks_to_execute
@property
def service_checks_completed(self) -> dict[str, set[str]]:
return self._service_checks_completed
@property
def provider(self) -> Provider:
return self._provider
@property
def progress(self) -> float:
return (
self._number_of_checks_completed / self._number_of_checks_to_execute * 100
)
@property
def duration(self) -> int:
return self._duration
@property
def findings(self) -> list:
return self._findings
def scan(
self,
custom_checks_metadata: dict = {},
) -> Generator[tuple[float, list[Finding]], None, None]:
"""
Executes the scan by iterating over the checks to execute and executing each check.
Yields the progress and findings for each check.
Args:
custom_checks_metadata (dict): Custom metadata for the checks (default: {}).
Yields:
Tuple[float, list[Finding]]: A tuple containing the progress and findings for each check.
Raises:
ModuleNotFoundError: If the check does not exist in the provider or is from another provider.
Exception: If any other error occurs during the execution of a check.
"""
try:
checks_to_execute = self.checks_to_execute
# Initialize the Audit Metadata
# TODO: this should be done in the provider class
# Refactor(Core): Audit manager?
self._provider.audit_metadata = Audit_Metadata(
services_scanned=0,
expected_checks=checks_to_execute,
completed_checks=0,
audit_progress=0,
)
start_time = datetime.datetime.now()
for check_name in checks_to_execute:
try:
# Recover service from check name
service = get_service_name_from_check_name(check_name)
try:
# Import check module
check_module_path = f"prowler.providers.{self._provider.type}.services.{service}.{check_name}.{check_name}"
lib = import_check(check_module_path)
# Recover functions from check
check_to_execute = getattr(lib, check_name)
check = check_to_execute()
except ModuleNotFoundError:
logger.error(
f"Check '{check_name}' was not found for the {self._provider.type.upper()} provider"
)
continue
# Execute the check
check_findings = execute(
check,
self._provider,
custom_checks_metadata,
output_options=None,
)
# Filter the findings by the status
if self._status:
for finding in check_findings:
if finding.status not in self._status:
check_findings.remove(finding)
# Store findings
self._findings.extend(check_findings)
# Remove the executed check
self._service_checks_to_execute[service].remove(check_name)
if len(self._service_checks_to_execute[service]) == 0:
self._service_checks_to_execute.pop(service, None)
# Add the completed check
if service not in self._service_checks_completed:
self._service_checks_completed[service] = set()
self._service_checks_completed[service].add(check_name)
self._number_of_checks_completed += 1
# This should be done just once all the service's checks are completed
# This metadata needs to get to the services not within the provider
# since it is present in the Scan class
self._provider.audit_metadata = update_audit_metadata(
self._provider.audit_metadata,
self.get_completed_services(),
self.get_completed_checks(),
)
findings = [
Finding.generate_output(
self._provider, finding, output_options=None
)
for finding in check_findings
]
yield self.progress, findings
# If check does not exists in the provider or is from another provider
except ModuleNotFoundError:
logger.error(
f"Check '{check_name}' was not found for the {self._provider.type.upper()} provider"
)
except Exception as error:
logger.error(
f"{check_name} - {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
# Update the scan duration when all checks are completed
self._duration = int((datetime.datetime.now() - start_time).total_seconds())
except Exception as error:
logger.error(
f"{check_name} - {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def get_completed_services(self) -> set[str]:
"""
get_completed_services returns the services that have been completed.
Example:
get_completed_services() -> {"ec2", "s3"}
"""
return self._service_checks_completed.keys()
def get_completed_checks(self) -> set[str]:
"""
get_completed_checks returns the checks that have been completed.
Example:
get_completed_checks() -> {"ec2_instance_public_ip", "s3_bucket_public"}
"""
completed_checks = set()
for checks in self._service_checks_completed.values():
completed_checks.update(checks)
return completed_checks
def get_service_name_from_check_name(check_name: str) -> str:
"""
get_service_name_from_check_name returns the service name for a given check name.
Example:
get_service_name_from_check_name("ec2_instance_public") -> "ec2"
"""
return check_name.split("_")[0]
def get_service_checks_to_execute(checks_to_execute: set[str]) -> dict[str, set[str]]:
"""
get_service_checks_to_execute returns a dictionary with the services and the checks to execute.
Example:
get_service_checks_to_execute({"accessanalyzer_enabled", "ec2_instance_public_ip"})
-> {"accessanalyzer": {"accessanalyzer_enabled"}, "ec2": {"ec2_instance_public_ip"}}
"""
service_checks_to_execute = dict()
for check in checks_to_execute:
# check -> accessanalyzer_enabled
# service -> accessanalyzer
service = get_service_name_from_check_name(check)
if service not in service_checks_to_execute:
service_checks_to_execute[service] = set()
service_checks_to_execute[service].add(check)
return service_checks_to_execute
return all_findings
+14 -132
View File
@@ -1,6 +1,5 @@
import json
import os
from operator import attrgetter
try:
import grp
@@ -17,11 +16,11 @@ from io import TextIOWrapper
from ipaddress import ip_address
from os.path import exists
from time import mktime
from typing import Any, Optional
from typing import Optional
from colorama import Style
from detect_secrets import SecretsCollection
from detect_secrets.settings import transient_settings
from detect_secrets.settings import default_settings
from prowler.config.config import encoding_format_utf_8
from prowler.lib.logger import logger
@@ -81,96 +80,20 @@ def hash_sha512(string: str) -> str:
return sha512(string.encode(encoding_format_utf_8)).hexdigest()[0:9]
def detect_secrets_scan(
data: str = None, file=None, excluded_secrets: list[str] = None
) -> list[dict[str, str]]:
"""detect_secrets_scan scans the data or file for secrets using the detect-secrets library.
Args:
data (str): The data to scan for secrets.
file (str): The file to scan for secrets.
excluded_secrets (list): A list of regex patterns to exclude from the scan.
Returns:
dict: The secrets found in the
Raises:
Exception: If an error occurs during the scan.
Examples:
>>> detect_secrets_scan(data="password=password")
[{'filename': 'data', 'hashed_secret': 'f7c3bc1d808e04732adf679965ccc34ca7ae3441', 'is_verified': False, 'line_number': 1, 'type': 'Secret Keyword'}]
>>> detect_secrets_scan(file="file.txt")
{'file.txt': [{'filename': 'file.txt', 'hashed_secret': 'f7c3bc1d808e04732adf679965ccc34ca7ae3441', 'is_verified': False, 'line_number': 1, 'type': 'Secret Keyword'}]}
"""
try:
if not file:
temp_data_file = tempfile.NamedTemporaryFile(delete=False)
temp_data_file.write(bytes(data, encoding="raw_unicode_escape"))
temp_data_file.close()
def detect_secrets_scan(data):
temp_data_file = tempfile.NamedTemporaryFile(delete=False)
temp_data_file.write(bytes(data, encoding="raw_unicode_escape"))
temp_data_file.close()
secrets = SecretsCollection()
secrets = SecretsCollection()
with default_settings():
secrets.scan_file(temp_data_file.name)
os.remove(temp_data_file.name)
settings = {
"plugins_used": [
{"name": "ArtifactoryDetector"},
{"name": "AWSKeyDetector"},
{"name": "AzureStorageKeyDetector"},
{"name": "BasicAuthDetector"},
{"name": "CloudantDetector"},
{"name": "DiscordBotTokenDetector"},
{"name": "GitHubTokenDetector"},
{"name": "GitLabTokenDetector"},
{"name": "Base64HighEntropyString", "limit": 6.0},
{"name": "HexHighEntropyString", "limit": 3.0},
{"name": "IbmCloudIamDetector"},
{"name": "IbmCosHmacDetector"},
# {"name": "IPPublicDetector"}, https://github.com/Yelp/detect-secrets/pull/885
{"name": "JwtTokenDetector"},
{"name": "KeywordDetector"},
{"name": "MailchimpDetector"},
{"name": "NpmDetector"},
{"name": "OpenAIDetector"},
{"name": "PrivateKeyDetector"},
{"name": "PypiTokenDetector"},
{"name": "SendGridDetector"},
{"name": "SlackDetector"},
{"name": "SoftlayerDetector"},
{"name": "SquareOAuthDetector"},
{"name": "StripeDetector"},
# {"name": "TelegramBotTokenDetector"}, https://github.com/Yelp/detect-secrets/pull/878
{"name": "TwilioKeyDetector"},
],
"filters_used": [
{"path": "detect_secrets.filters.common.is_invalid_file"},
{"path": "detect_secrets.filters.common.is_known_false_positive"},
{"path": "detect_secrets.filters.heuristic.is_likely_id_string"},
{"path": "detect_secrets.filters.heuristic.is_potential_secret"},
],
}
if excluded_secrets and len(excluded_secrets) > 0:
settings["filters_used"].append(
{
"path": "detect_secrets.filters.regex.should_exclude_line",
"pattern": excluded_secrets,
}
)
with transient_settings(settings):
if file:
secrets.scan_file(file)
else:
secrets.scan_file(temp_data_file.name)
if not file:
os.remove(temp_data_file.name)
detect_secrets_output = secrets.json()
if detect_secrets_output:
if file:
return detect_secrets_output[file]
else:
return detect_secrets_output[temp_data_file.name]
else:
return None
except Exception as e:
logger.error(f"Error scanning for secrets: {e}")
detect_secrets_output = secrets.json()
if detect_secrets_output:
return detect_secrets_output[temp_data_file.name]
else:
return None
@@ -273,44 +196,3 @@ def print_boxes(messages: list, report_title: str):
f"{Style.BRIGHT}{Style.RESET_ALL} · {message}{Style.BRIGHT}{Style.RESET_ALL}"
)
print()
def dict_to_lowercase(d):
"""
Convert all keys in a dictionary to lowercase.
This function takes a dictionary and returns a new dictionary
with all the keys converted to lowercase. If a value in the
dictionary is another dictionary, the function will recursively
convert the keys of that dictionary to lowercase as well.
Args:
d (dict): The dictionary to convert.
Returns:
dict: A new dictionary with all keys in lowercase.
"""
new_dict = {}
for k, v in d.items():
if isinstance(v, dict):
v = dict_to_lowercase(v)
new_dict[k.lower()] = v
return new_dict
def get_nested_attribute(obj: Any, attr: str) -> Any:
"""
Get a nested attribute from an object.
Args:
obj (Any): The object to get the attribute from.
attr (str): The attribute to get.
Returns:
Any: The attribute value if present, otherwise "".
"""
try:
return attrgetter(attr)(obj)
except AttributeError:
return ""
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
return ""
File diff suppressed because it is too large Load Diff
File diff suppressed because it is too large Load Diff
@@ -1,222 +0,0 @@
from prowler.exceptions.exceptions import ProwlerException
# Exceptions codes from 1000 to 1999 are reserved for AWS exceptions
class AWSBaseException(ProwlerException):
"""Base class for AWS errors."""
AWS_ERROR_CODES = {
(1000, "AWSClientError"): {
"message": "AWS ClientError occurred",
"remediation": "Check your AWS client configuration and permissions.",
},
(1001, "AWSProfileNotFoundError"): {
"message": "AWS Profile not found",
"remediation": "Ensure the AWS profile is correctly configured, please visit https://docs.aws.amazon.com/cli/v1/userguide/cli-configure-files.html",
},
(1002, "AWSNoCredentialsError"): {
"message": "No AWS credentials found",
"remediation": "Verify that AWS credentials are properly set up, please visit https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/aws/authentication/ and https://docs.aws.amazon.com/cli/v1/userguide/cli-chap-configure.html",
},
(1003, "AWSArgumentTypeValidationError"): {
"message": "AWS argument type validation error",
"remediation": "Check the provided argument types specific to AWS and ensure they meet the required format. For session duration check: https://docs.aws.amazon.com/singlesignon/latest/userguide/howtosessionduration.html and for role session name check: https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_terms-and-concepts.html#iam-term-role-session-name",
},
(1004, "AWSSetUpSessionError"): {
"message": "AWS session setup error",
"remediation": "Check the AWS session setup and ensure it is properly configured, please visit https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-files.html and check if the provided profile has the necessary permissions.",
},
(1005, "AWSIAMRoleARNRegionNotEmtpyError"): {
"message": "AWS IAM Role ARN region is not empty",
"remediation": "Check the AWS IAM Role ARN region and ensure it is empty, visit https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html#identifiers-arns for more information.",
},
(1006, "AWSIAMRoleARNPartitionEmptyError"): {
"message": "AWS IAM Role ARN partition is empty",
"remediation": "Check the AWS IAM Role ARN partition and ensure it is not empty, visit https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html#identifiers-arns for more information.",
},
(1007, "AWSIAMRoleARNMissingFieldsError"): {
"message": "AWS IAM Role ARN missing fields",
"remediation": "Check the AWS IAM Role ARN and ensure all required fields are present, visit https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html#identifiers-arns for more information.",
},
(1008, "AWSIAMRoleARNServiceNotIAMnorSTSError"): {
"message": "AWS IAM Role ARN service is not IAM nor STS",
"remediation": "Check the AWS IAM Role ARN service and ensure it is either IAM or STS, visit https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html#identifiers-arns for more information.",
},
(1009, "AWSIAMRoleARNInvalidAccountIDError"): {
"message": "AWS IAM Role ARN account ID is invalid",
"remediation": "Check the AWS IAM Role ARN account ID and ensure it is a valid 12-digit number, visit https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html#identifiers-arns for more information.",
},
(1010, "AWSIAMRoleARNInvalidResourceTypeError"): {
"message": "AWS IAM Role ARN resource type is invalid",
"remediation": "Check the AWS IAM Role ARN resource type and ensure it is valid, resources types are: role, user, assumed-role, root, federated-user, visit https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html#identifiers-arns for more information.",
},
(1011, "AWSIAMRoleARNEmptyResourceError"): {
"message": "AWS IAM Role ARN resource is empty",
"remediation": "Check the AWS IAM Role ARN resource and ensure it is not empty, visit https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html#identifiers-arns for more information.",
},
(1012, "AWSAssumeRoleError"): {
"message": "AWS assume role error",
"remediation": "Check the AWS assume role configuration and ensure it is properly set up, please visit https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/aws/role-assumption/ and https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_terms-and-concepts.html#iam-term-role-session-name",
},
(1013, "AWSAccessKeyIDInvalidError"): {
"message": "AWS Access Key ID or Session Token is invalid",
"remediation": "Check your AWS Access Key ID or Session Token and ensure it is valid.",
},
(1014, "AWSSecretAccessKeyInvalidError"): {
"message": "AWS Secret Access Key is invalid",
"remediation": "Check your AWS Secret Access Key and signing method and ensure it is valid.",
},
(1015, "AWSInvalidProviderIdError"): {
"message": "The provided AWS credentials belong to a different account",
"remediation": "Check the provided AWS credentials and review if belong to the account you want to use.",
},
(1016, "AWSSessionTokenExpiredError"): {
"message": "The provided AWS Session Token is expired",
"remediation": "Get a new AWS Session Token and configure it for the provider.",
},
}
def __init__(self, code, file=None, original_exception=None, message=None):
error_info = self.AWS_ERROR_CODES.get((code, self.__class__.__name__))
if message:
error_info["message"] = message
super().__init__(
code,
source="AWS",
file=file,
original_exception=original_exception,
error_info=error_info,
)
class AWSCredentialsError(AWSBaseException):
"""Base class for AWS credentials errors."""
def __init__(self, code, file=None, original_exception=None, message=None):
super().__init__(code, file, original_exception, message)
class AWSClientError(AWSCredentialsError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1000, file=file, original_exception=original_exception, message=message
)
class AWSProfileNotFoundError(AWSCredentialsError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1001, file=file, original_exception=original_exception, message=message
)
class AWSNoCredentialsError(AWSCredentialsError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1002, file=file, original_exception=original_exception, message=message
)
class AWSArgumentTypeValidationError(AWSBaseException):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1003, file=file, original_exception=original_exception, message=message
)
class AWSSetUpSessionError(AWSBaseException):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1004, file=file, original_exception=original_exception, message=message
)
class AWSRoleArnError(AWSBaseException):
"""Base class for AWS role ARN errors."""
def __init__(self, code, file=None, original_exception=None, message=None):
super().__init__(code, file, original_exception, message)
class AWSIAMRoleARNRegionNotEmtpyError(AWSRoleArnError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1005, file=file, original_exception=original_exception, message=message
)
class AWSIAMRoleARNPartitionEmptyError(AWSRoleArnError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1006, file=file, original_exception=original_exception, message=message
)
class AWSIAMRoleARNMissingFieldsError(AWSRoleArnError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1007, file=file, original_exception=original_exception, message=message
)
class AWSIAMRoleARNServiceNotIAMnorSTSError(AWSRoleArnError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1008, file=file, original_exception=original_exception, message=message
)
class AWSIAMRoleARNInvalidAccountIDError(AWSRoleArnError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1009, file=file, original_exception=original_exception, message=message
)
class AWSIAMRoleARNInvalidResourceTypeError(AWSRoleArnError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1010, file=file, original_exception=original_exception, message=message
)
class AWSIAMRoleARNEmptyResourceError(AWSRoleArnError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1011, file=file, original_exception=original_exception, message=message
)
class AWSAssumeRoleError(AWSBaseException):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1012, file=file, original_exception=original_exception, message=message
)
class AWSAccessKeyIDInvalidError(AWSCredentialsError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1013, file=file, original_exception=original_exception, message=message
)
class AWSSecretAccessKeyInvalidError(AWSCredentialsError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1014, file=file, original_exception=original_exception, message=message
)
class AWSInvalidProviderIdError(AWSCredentialsError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1015, file=file, original_exception=original_exception, message=message
)
class AWSSessionTokenExpiredError(AWSCredentialsError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1016, file=file, original_exception=original_exception, message=message
)
@@ -168,40 +168,13 @@ def init_parser(self):
)
def validate_session_duration(session_duration: int) -> int:
"""validate_session_duration validates that the input session_duration is valid"""
duration = int(session_duration)
def validate_session_duration(duration):
"""validate_session_duration validates that the AWS STS Assume Role Session Duration is between 900 and 43200 seconds."""
duration = int(duration)
# Since the range(i,j) goes from i to j-1 we have to j+1
if duration not in range(900, 43201):
raise ArgumentTypeError(
"Session duration must be between 900 and 43200 seconds"
)
else:
return duration
def validate_role_session_name(session_name) -> str:
"""
Validates that the role session name is valid.
Args:
session_name (str): The role session name to be validated.
Returns:
str: The validated role session name.
Raises:
ArgumentTypeError: If the role session name is invalid.
Documentation:
- AWS STS AssumeRole API: https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html
"""
if fullmatch(r"[\w+=,.@-]{2,64}", session_name):
return session_name
else:
raise ArgumentTypeError(
"Role session name must be between 2 and 64 characters long and may contain alphanumeric characters, hyphens, underscores, plus signs, equal signs, commas, periods, at signs, and tildes."
)
raise ArgumentTypeError("Session duration must be between 900 and 43200")
return duration
def validate_arguments(arguments: Namespace) -> tuple[bool, str]:
@@ -222,7 +195,7 @@ def validate_arguments(arguments: Namespace) -> tuple[bool, str]:
return (True, "")
def validate_bucket(bucket_name: str) -> str:
def validate_bucket(bucket_name):
"""validate_bucket validates that the input bucket_name is valid"""
if search("(?!(^xn--|.+-s3alias$))^[a-z0-9][a-z0-9-]{1,61}[a-z0-9]$", bucket_name):
return bucket_name
@@ -230,3 +203,16 @@ def validate_bucket(bucket_name: str) -> str:
raise ArgumentTypeError(
"Bucket name must be valid (https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html)"
)
def validate_role_session_name(session_name):
"""
validates that the role session name is valid
Documentation: https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html
"""
if fullmatch(r"[\w+=,.@-]{2,64}", session_name):
return session_name
else:
raise ArgumentTypeError(
"Role Session Name must be 2-64 characters long and consist only of upper- and lower-case alphanumeric characters with no spaces. You can also include underscores or any of the following characters: =,.@-"
)
+14 -17
View File
@@ -1,14 +1,13 @@
import os
import re
from argparse import ArgumentTypeError
from prowler.providers.aws.exceptions.exceptions import (
AWSIAMRoleARNEmptyResourceError,
AWSIAMRoleARNInvalidAccountIDError,
AWSIAMRoleARNInvalidResourceTypeError,
AWSIAMRoleARNPartitionEmptyError,
AWSIAMRoleARNRegionNotEmtpyError,
AWSIAMRoleARNServiceNotIAMnorSTSError,
from prowler.providers.aws.lib.arn.error import (
RoleArnParsingEmptyResource,
RoleArnParsingIAMRegionNotEmpty,
RoleArnParsingInvalidAccountID,
RoleArnParsingInvalidResourceType,
RoleArnParsingPartitionEmpty,
RoleArnParsingServiceNotIAMnorSTS,
)
from prowler.providers.aws.lib.arn.models import ARN
@@ -25,7 +24,7 @@ def parse_iam_credentials_arn(arn: str) -> ARN:
arn_parsed = ARN(arn)
# First check if region is empty (in IAM ARN's region is always empty)
if arn_parsed.region:
raise AWSIAMRoleARNRegionNotEmtpyError(file=os.path.basename(__file__))
raise RoleArnParsingIAMRegionNotEmpty
else:
# check if needed fields are filled:
# - partition
@@ -34,30 +33,28 @@ def parse_iam_credentials_arn(arn: str) -> ARN:
# - resource_type
# - resource
if arn_parsed.partition is None or arn_parsed.partition == "":
raise AWSIAMRoleARNPartitionEmptyError(file=os.path.basename(__file__))
raise RoleArnParsingPartitionEmpty
elif arn_parsed.service != "iam" and arn_parsed.service != "sts":
raise AWSIAMRoleARNServiceNotIAMnorSTSError(file=os.path.basename(__file__))
raise RoleArnParsingServiceNotIAMnorSTS
elif (
arn_parsed.account_id is None
or len(arn_parsed.account_id) != 12
or not arn_parsed.account_id.isnumeric()
):
raise AWSIAMRoleARNInvalidAccountIDError(file=os.path.basename(__file__))
raise RoleArnParsingInvalidAccountID
elif (
arn_parsed.resource_type != "role"
and arn_parsed.resource_type != "user"
and arn_parsed.resource_type != "assumed-role"
and arn_parsed.resource_type != "root"
and arn_parsed.resource_type != "federated-user"
):
raise AWSIAMRoleARNInvalidResourceTypeError(file=os.path.basename(__file__))
raise RoleArnParsingInvalidResourceType
elif arn_parsed.resource == "":
raise AWSIAMRoleARNEmptyResourceError(file=os.path.basename(__file__))
raise RoleArnParsingEmptyResource
else:
return arn_parsed
def is_valid_arn(arn: str) -> bool:
"""is_valid_arn returns True or False whether the given AWS ARN (Amazon Resource Name) is valid or not."""
regex = r"^arn:aws(-cn|-us-gov|-iso|-iso-b)?:[a-zA-Z0-9\-]+:([a-z]{2}-[a-z]+-\d{1})?:(\d{12})?:[a-zA-Z0-9\-_\/:\.\*]+(:\d+)?$"
regex = r"^arn:aws(-cn|-us-gov|-iso|-iso-b)?:[a-zA-Z0-9\-]+:([a-z]{2}-[a-z]+-\d{1})?:(\d{12})?:[a-zA-Z0-9\-_\/:\.]+(:\d+)?$"
return re.match(regex, arn) is not None
+49
View File
@@ -0,0 +1,49 @@
class RoleArnParsingFailedMissingFields(Exception):
# The ARN contains a number of fields different than six separated by :"
def __init__(self):
self.message = "The assumed role ARN contains an invalid number of fields separated by : or it does not start by arn, please input a valid ARN"
super().__init__(self.message)
class RoleArnParsingIAMRegionNotEmpty(Exception):
# The ARN contains a non-empty value for region, since it is an IAM ARN is not valid
def __init__(self):
self.message = "The assumed role ARN contains a non-empty value for region, since it is an IAM ARN is not valid, please input a valid ARN"
super().__init__(self.message)
class RoleArnParsingPartitionEmpty(Exception):
# The ARN contains an empty value for partition
def __init__(self):
self.message = "The assumed role ARN does not contain a value for partition, please input a valid ARN"
super().__init__(self.message)
class RoleArnParsingServiceNotIAMnorSTS(Exception):
def __init__(self):
self.message = "The assumed role ARN contains a value for service distinct than IAM or STS, please input a valid ARN"
super().__init__(self.message)
class RoleArnParsingServiceNotSTS(Exception):
def __init__(self):
self.message = "The assumed role ARN contains a value for service distinct than STS, please input a valid ARN"
super().__init__(self.message)
class RoleArnParsingInvalidAccountID(Exception):
def __init__(self):
self.message = "The assumed role ARN contains a value for account id empty or invalid, a valid account id must be composed of 12 numbers, please input a valid ARN"
super().__init__(self.message)
class RoleArnParsingInvalidResourceType(Exception):
def __init__(self):
self.message = "The assumed role ARN contains a value for resource type different than role, please input a valid ARN"
super().__init__(self.message)
class RoleArnParsingEmptyResource(Exception):
def __init__(self):
self.message = "The assumed role ARN does not contain a value for resource, please input a valid ARN"
super().__init__(self.message)
+2 -3
View File
@@ -1,9 +1,8 @@
import os
from typing import Optional
from pydantic import BaseModel
from prowler.providers.aws.exceptions.exceptions import AWSIAMRoleARNMissingFieldsError
from prowler.providers.aws.lib.arn.error import RoleArnParsingFailedMissingFields
class ARN(BaseModel):
@@ -19,7 +18,7 @@ class ARN(BaseModel):
# Validate the ARN
## Check that arn starts with arn
if not arn.startswith("arn:"):
raise AWSIAMRoleARNMissingFieldsError(file=os.path.basename(__file__))
raise RoleArnParsingFailedMissingFields
## Retrieve fields
arn_elements = arn.split(":", 5)
data = {

Some files were not shown because too many files have changed in this diff Show More