diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 568d48e556..9702a361e7 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -13,3 +13,8 @@ updates: labels: - "dependencies" - "pip" + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" + target-branch: master diff --git a/.github/labeler.yml b/.github/labeler.yml new file mode 100644 index 0000000000..d4df3619b3 --- /dev/null +++ b/.github/labeler.yml @@ -0,0 +1,27 @@ +documentation: + - changed-files: + - any-glob-to-any-file: "docs/**" + +provider/aws: + - changed-files: + - any-glob-to-any-file: "prowler/providers/aws/**" + - any-glob-to-any-file: "tests/providers/aws/**" + +provider/azure: + - changed-files: + - any-glob-to-any-file: "prowler/providers/azure/**" + - any-glob-to-any-file: "tests/providers/azure/**" + +provider/gcp: + - changed-files: + - any-glob-to-any-file: "prowler/providers/gcp/**" + - any-glob-to-any-file: "tests/providers/gcp/**" + +provider/kubernetes: + - changed-files: + - any-glob-to-any-file: "prowler/providers/kubernetes/**" + - any-glob-to-any-file: "tests/providers/kubernetes/**" + +github_actions: + - changed-files: + - any-glob-to-any-file: ".github/workflows/*" diff --git a/.github/workflows/build-documentation-on-pr.yml b/.github/workflows/build-documentation-on-pr.yml new file mode 100644 index 0000000000..015a3742d6 --- /dev/null +++ b/.github/workflows/build-documentation-on-pr.yml @@ -0,0 +1,24 @@ +name: Pull Request Documentation Link + +on: + pull_request: + branches: + - 'master' + - 'prowler-4.0-dev' + paths: + - 'docs/**' + +env: + PR_NUMBER: ${{ github.event.pull_request.number }} + +jobs: + documentation-link: + name: Documentation Link + runs-on: ubuntu-latest + steps: + - name: Leave PR comment with the SaaS Documentation URI + uses: peter-evans/create-or-update-comment@v4 + with: + issue-number: ${{ env.PR_NUMBER }} + body: | + You can check the documentation for this PR here -> [SaaS Documentation](https://prowler-prowler-docs--${{ env.PR_NUMBER }}.com.readthedocs.build/projects/prowler-open-source/en/${{ env.PR_NUMBER }}/) diff --git a/.github/workflows/build-lint-push-containers.yml b/.github/workflows/build-lint-push-containers.yml index 2e8ec43750..43fd281c13 100644 --- a/.github/workflows/build-lint-push-containers.yml +++ b/.github/workflows/build-lint-push-containers.yml @@ -32,11 +32,11 @@ jobs: POETRY_VIRTUALENVS_CREATE: "false" steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup python (release) if: github.event_name == 'release' - uses: actions/setup-python@v2 + uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} @@ -52,13 +52,13 @@ jobs: poetry version ${{ github.event.release.tag_name }} - name: Login to DockerHub - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - name: Login to Public ECR - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: public.ecr.aws username: ${{ secrets.PUBLIC_ECR_AWS_ACCESS_KEY_ID }} @@ -67,11 +67,11 @@ jobs: AWS_REGION: ${{ env.AWS_REGION }} - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - name: Build and push container image (latest) if: github.event_name == 'push' - uses: docker/build-push-action@v2 + uses: docker/build-push-action@v5 with: push: true tags: | @@ -83,7 +83,7 @@ jobs: - name: Build and push container image (release) if: github.event_name == 'release' - uses: docker/build-push-action@v2 + uses: docker/build-push-action@v5 with: # Use local context to get changes # https://github.com/docker/build-push-action#path-context diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index b02607fe38..a788c10cbb 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -37,11 +37,11 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v2 + uses: github/codeql-action/init@v3 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. @@ -52,6 +52,6 @@ jobs: # queries: security-extended,security-and-quality - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 + uses: github/codeql-action/analyze@v3 with: category: "/language:${{matrix.language}}" diff --git a/.github/workflows/find-secrets.yml b/.github/workflows/find-secrets.yml index f5166d3e8b..6428ee0a0a 100644 --- a/.github/workflows/find-secrets.yml +++ b/.github/workflows/find-secrets.yml @@ -7,11 +7,11 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 0 - name: TruffleHog OSS - uses: trufflesecurity/trufflehog@v3.4.4 + uses: trufflesecurity/trufflehog@v3.68.2 with: path: ./ base: ${{ github.event.repository.default_branch }} diff --git a/.github/workflows/labeler.yml b/.github/workflows/labeler.yml new file mode 100644 index 0000000000..25d6135bc4 --- /dev/null +++ b/.github/workflows/labeler.yml @@ -0,0 +1,16 @@ +name: "Pull Request Labeler" + +on: + pull_request_target: + branches: + - "master" + - "prowler-4.0-dev" + +jobs: + labeler: + permissions: + contents: read + pull-requests: write + runs-on: ubuntu-latest + steps: + - uses: actions/labeler@v5 diff --git a/.github/workflows/pull-request.yml b/.github/workflows/pull-request.yml index 6353157b66..a884abc3a2 100644 --- a/.github/workflows/pull-request.yml +++ b/.github/workflows/pull-request.yml @@ -14,13 +14,13 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.9", "3.10", "3.11"] + python-version: ["3.9", "3.10", "3.11", "3.12"] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Test if changes are in not ignored paths id: are-non-ignored-files-changed - uses: tj-actions/changed-files@v39 + uses: tj-actions/changed-files@v42 with: files: ./** files_ignore: | @@ -28,6 +28,7 @@ jobs: README.md docs/** permissions/** + mkdocs.yml - name: Install poetry if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true' run: | @@ -35,7 +36,7 @@ jobs: pipx install poetry - name: Set up Python ${{ matrix.python-version }} if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true' - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} cache: "poetry" @@ -87,6 +88,6 @@ jobs: poetry run pytest -n auto --cov=./prowler --cov-report=xml tests - name: Upload coverage reports to Codecov if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true' - uses: codecov/codecov-action@v3 + uses: codecov/codecov-action@v4 env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/pypi-release.yml b/.github/workflows/pypi-release.yml index 59b8af68de..53289f1c2e 100644 --- a/.github/workflows/pypi-release.yml +++ b/.github/workflows/pypi-release.yml @@ -16,7 +16,7 @@ jobs: name: Release Prowler to PyPI steps: # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: ref: ${{ env.GITHUB_BRANCH }} - name: Install dependencies @@ -24,7 +24,7 @@ jobs: pipx install poetry pipx inject poetry poetry-bumpversion - name: setup python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.9 cache: 'poetry' @@ -44,7 +44,7 @@ jobs: poetry publish # Create pull request with new version - name: Create Pull Request - uses: peter-evans/create-pull-request@v4 + uses: peter-evans/create-pull-request@v6 with: token: ${{ secrets.PROWLER_ACCESS_TOKEN }} commit-message: "chore(release): update Prowler Version to ${{ env.RELEASE_TAG }}." diff --git a/.github/workflows/refresh_aws_services_regions.yml b/.github/workflows/refresh_aws_services_regions.yml index e22bedc333..fea14d3bce 100644 --- a/.github/workflows/refresh_aws_services_regions.yml +++ b/.github/workflows/refresh_aws_services_regions.yml @@ -23,12 +23,12 @@ jobs: # Steps represent a sequence of tasks that will be executed as part of the job steps: # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: ref: ${{ env.GITHUB_BRANCH }} - name: setup python - uses: actions/setup-python@v2 + uses: actions/setup-python@v5 with: python-version: 3.9 #install the python needed @@ -38,7 +38,7 @@ jobs: pip install boto3 - name: Configure AWS Credentials -- DEV - uses: aws-actions/configure-aws-credentials@v1 + uses: aws-actions/configure-aws-credentials@v4 with: aws-region: ${{ env.AWS_REGION_DEV }} role-to-assume: ${{ secrets.DEV_IAM_ROLE_ARN }} @@ -50,12 +50,12 @@ jobs: # Create pull request - name: Create Pull Request - uses: peter-evans/create-pull-request@v4 + uses: peter-evans/create-pull-request@v6 with: token: ${{ secrets.PROWLER_ACCESS_TOKEN }} commit-message: "feat(regions_update): Update regions for AWS services." branch: "aws-services-regions-updated-${{ github.sha }}" - labels: "status/waiting-for-revision, severity/low" + labels: "status/waiting-for-revision, severity/low, provider/aws" title: "chore(regions_update): Changes in regions for AWS services." body: | ### Description diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 9c01e1d92e..d58fc12bb1 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,7 +1,7 @@ repos: ## GENERAL - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v4.5.0 hooks: - id: check-merge-conflict - id: check-yaml @@ -15,7 +15,7 @@ repos: ## TOML - repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks - rev: v2.10.0 + rev: v2.12.0 hooks: - id: pretty-format-toml args: [--autofix] @@ -28,7 +28,7 @@ repos: - id: shellcheck ## PYTHON - repo: https://github.com/myint/autoflake - rev: v2.2.0 + rev: v2.2.1 hooks: - id: autoflake args: @@ -39,25 +39,25 @@ repos: ] - repo: https://github.com/timothycrosley/isort - rev: 5.12.0 + rev: 5.13.2 hooks: - id: isort args: ["--profile", "black"] - repo: https://github.com/psf/black - rev: 22.12.0 + rev: 24.1.1 hooks: - id: black - repo: https://github.com/pycqa/flake8 - rev: 6.1.0 + rev: 7.0.0 hooks: - id: flake8 exclude: contrib args: ["--ignore=E266,W503,E203,E501,W605"] - repo: https://github.com/python-poetry/poetry - rev: 1.6.0 # add version here + rev: 1.7.0 hooks: - id: poetry-check - id: poetry-lock @@ -80,18 +80,12 @@ repos: - id: trufflehog name: TruffleHog description: Detect secrets in your data. - # entry: bash -c 'trufflehog git file://. --only-verified --fail' + entry: bash -c 'trufflehog --no-update git file://. --only-verified --fail' # For running trufflehog in docker, use the following entry instead: - entry: bash -c 'docker run -v "$(pwd):/workdir" -i --rm trufflesecurity/trufflehog:latest git file:///workdir --only-verified --fail' + # entry: bash -c 'docker run -v "$(pwd):/workdir" -i --rm trufflesecurity/trufflehog:latest git file:///workdir --only-verified --fail' language: system stages: ["commit", "push"] - - id: pytest-check - name: pytest-check - entry: bash -c 'pytest tests -n auto' - language: system - files: '.*\.py' - - id: bandit name: bandit description: "Bandit is a tool for finding common security issues in Python code" diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 557613e35f..17d338d2e9 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -8,16 +8,18 @@ version: 2 build: os: "ubuntu-22.04" tools: - python: "3.9" + python: "3.11" jobs: post_create_environment: # Install poetry # https://python-poetry.org/docs/#installing-manually - - pip install poetry - # Tell poetry to not use a virtual environment - - poetry config virtualenvs.create false + - python -m pip install poetry post_install: - - poetry install -E docs + # Install dependencies with 'docs' dependency group + # https://python-poetry.org/docs/managing-dependencies/#dependency-groups + # VIRTUAL_ENV needs to be set manually for now. + # See https://github.com/readthedocs/readthedocs.org/pull/11152/ + - VIRTUAL_ENV=${READTHEDOCS_VIRTUALENV_PATH} python -m poetry install --only=docs mkdocs: configuration: mkdocs.yml diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index 419b6d76be..c4bba21640 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -55,7 +55,7 @@ further defined and clarified by project maintainers. ## Enforcement Instances of abusive, harassing, or otherwise unacceptable behavior may be -reported by contacting the project team at community@prowler.cloud. All +reported by contacting the project team at [support.prowler.com](https://customer.support.prowler.com/servicedesk/customer/portals). All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. diff --git a/README.md b/README.md index 82ab6fe4ee..43c6f53dae 100644 --- a/README.md +++ b/README.md @@ -1,24 +1,31 @@
-
-
+
+
- See all the things you and your team can do with ProwlerPro at prowler.pro + Prowler SaaS and Prowler Open Source are as dynamic and adaptable as the environment they’re meant to protect. Trusted by the leaders in security.
++Learn more at prowler.com +
+ +
+
+
+Join our Prowler community!
+
-
-
-
-
**ProwlerPro** gives you the benefits of Prowler Open Source plus continuous monitoring, faster execution, personalized support, visualization of your data with dashboards, alerts and much more.
-Visit prowler.pro for more info.
+
+Prowler offers hundreds of controls covering more than 25 standards and compliance frameworks like CIS, PCI-DSS, ISO27001, GDPR, HIPAA, FFIEC, SOC2, AWS FTR, ENS and custom security frameworks.
## Quick Start
### Installation
-Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-cloud/), thus can be installed using pip with `Python >= 3.9`:
-
+Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/), thus can be installed using pip with `Python >= 3.9`:
=== "Generic"
@@ -136,30 +111,21 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-clo
=== "AWS CloudShell"
- Prowler can be easely executed in AWS CloudShell but it has some prerequsites to be able to to so. AWS CloudShell is a container running with `Amazon Linux release 2 (Karoo)` that comes with Python 3.7, since Prowler requires Python >= 3.9 we need to first install a newer version of Python. Follow the steps below to successfully execute Prowler v3 in AWS CloudShell:
+ After the migration of AWS CloudShell from Amazon Linux 2 to Amazon Linux 2023 [[1]](https://aws.amazon.com/about-aws/whats-new/2023/12/aws-cloudshell-migrated-al2023/) [2](https://docs.aws.amazon.com/cloudshell/latest/userguide/cloudshell-AL2023-migration.html), there is no longer a need to manually compile Python 3.9 as it's already included in AL2023. Prowler can thus be easily installed following the Generic method of installation via pip. Follow the steps below to successfully execute Prowler v3 in AWS CloudShell:
_Requirements_:
- * First install all dependences and then Python, in this case we need to compile it because there is not a package available at the time this document is written:
- ```
- sudo yum -y install gcc openssl-devel bzip2-devel libffi-devel
- wget https://www.python.org/ftp/python/3.9.16/Python-3.9.16.tgz
- tar zxf Python-3.9.16.tgz
- cd Python-3.9.16/
- ./configure --enable-optimizations
- sudo make altinstall
- python3.9 --version
- cd
- ```
+ * Open AWS CloudShell `bash`.
+
_Commands_:
- * Once Python 3.9 is available we can install Prowler from pip:
```
- pip3.9 install prowler
+ pip install prowler
prowler -v
```
- > To download the results from AWS CloudShell, select Actions -> Download File and add the full path of each file. For the CSV file it will be something like `/home/cloudshell-user/output/prowler-output-123456789012-20221220191331.csv`
+ ???+ note
+ To download the results from AWS CloudShell, select Actions -> Download File and add the full path of each file. For the CSV file it will be something like `/home/cloudshell-user/output/prowler-output-123456789012-20221220191331.csv`
=== "Azure CloudShell"
@@ -194,14 +160,18 @@ You can run Prowler from your workstation, an EC2 instance, Fargate or any other

## Basic Usage
-To run Prowler, you will need to specify the provider (e.g aws, gcp or azure):
-> If no provider specified, AWS will be used for backward compatibility with most of v2 options.
+To run Prowler, you will need to specify the provider (e.g `aws`, `gcp` or `azure`):
+
+???+ note
+ If no provider specified, AWS will be used for backward compatibility with most of v2 options.
```console
prowler
-> Make sure that the used AWS credentials have `dynamodb:PartiQLSelect` permissions in the table.
+???+ note
+ Make sure that the used AWS credentials have `dynamodb:PartiQLSelect` permissions in the table.
### AWS Lambda ARN
diff --git a/docs/tutorials/parallel-execution.md b/docs/tutorials/parallel-execution.md
new file mode 100644
index 0000000000..3e4cb2aeb1
--- /dev/null
+++ b/docs/tutorials/parallel-execution.md
@@ -0,0 +1,188 @@
+# Parallel Execution
+
+The strategy used here will be to execute Prowler once per service. You can modify this approach as per your requirements.
+
+This can help for really large accounts, but please be aware of AWS API rate limits:
+
+1. **Service-Specific Limits**: Each AWS service has its own rate limits. For instance, Amazon EC2 might have different rate limits for launching instances versus making API calls to describe instances.
+2. **API Rate Limits**: Most of the rate limits in AWS are applied at the API level. Each API call to an AWS service counts towards the rate limit for that service.
+3. **Throttling Responses**: When you exceed the rate limit for a service, AWS responds with a throttling error. In AWS SDKs, these are typically represented as `ThrottlingException` or `RateLimitExceeded` errors.
+
+For information on Prowler's retrier configuration please refer to this [page](https://docs.prowler.cloud/en/latest/tutorials/aws/boto3-configuration/).
+
+???+ note
+ You might need to increase the `--aws-retries-max-attempts` parameter from the default value of 3. The retrier follows an exponential backoff strategy.
+
+## Linux
+
+Generate a list of services that Prowler supports, and populate this info into a file:
+
+```bash
+prowler aws --list-services | awk -F"- " '{print $2}' | sed '/^$/d' > services
+```
+
+Make any modifications for services you would like to skip scanning by modifying this file.
+
+Then create a new PowerShell script file `parallel-prowler.sh` and add the following contents. Update the `$profile` variable to the AWS CLI profile you want to run Prowler with.
+
+```bash
+#!/bin/bash
+
+# Change these variables as needed
+profile="your_profile"
+account_id=$(aws sts get-caller-identity --profile "${profile}" --query 'Account' --output text)
+
+echo "Executing in account: ${account_id}"
+
+# Maximum number of concurrent processes
+MAX_PROCESSES=5
+
+# Loop through the services
+while read service; do
+ echo "$(date '+%Y-%m-%d %H:%M:%S'): Starting job for service: ${service}"
+
+ # Run the command in the background
+ (prowler -p "$profile" -s "$service" -F "${account_id}-${service}" --ignore-unused-services --only-logs; echo "$(date '+%Y-%m-%d %H:%M:%S') - ${service} has completed") &
+
+ # Check if we have reached the maximum number of processes
+ while [ $(jobs -r | wc -l) -ge ${MAX_PROCESSES} ]; do
+ # Wait for a second before checking again
+ sleep 1
+ done
+done < ./services
+
+# Wait for all background processes to finish
+wait
+echo "All jobs completed"
+```
+
+Output will be stored in the `output/` folder that is in the same directory from which you executed the script.
+
+## Windows
+
+Generate a list of services that Prowler supports, and populate this info into a file:
+
+```powershell
+prowler aws --list-services | ForEach-Object {
+ # Capture lines that are likely service names
+ if ($_ -match '^\- \w+$') {
+ $_.Trim().Substring(2)
+ }
+} | Where-Object {
+ # Filter out empty or null lines
+ $_ -ne $null -and $_ -ne ''
+} | Set-Content -Path "services"
+```
+
+Make any modifications for services you would like to skip scanning by modifying this file.
+
+Then create a new PowerShell script file `parallel-prowler.ps1` and add the following contents. Update the `$profile` variable to the AWS CLI profile you want to run prowler with.
+
+Change any parameters you would like when calling prowler in the `Start-Job -ScriptBlock` section. Note that you need to keep the `--only-logs` parameter, else some encoding issue occurs when trying to render the progress-bar and prowler won't successfully execute.
+
+```powershell
+$profile = "your_profile"
+$account_id = Invoke-Expression -Command "aws sts get-caller-identity --profile $profile --query 'Account' --output text"
+
+Write-Host "Executing Prowler in $account_id"
+
+# Maximum number of concurrent jobs
+$MAX_PROCESSES = 5
+
+# Read services from a file
+$services = Get-Content -Path "services"
+
+# Array to keep track of started jobs
+$jobs = @()
+
+foreach ($service in $services) {
+ # Start the command as a job
+ $job = Start-Job -ScriptBlock {
+ prowler -p ${using:profile} -s ${using:service} -F "${using:account_id}-${using:service}" --ignore-unused-services --only-logs
+ $endTimestamp = Get-Date -Format "yyyy-MM-dd HH:mm:ss"
+ Write-Output "${endTimestamp} - $using:service has completed"
+ }
+ $jobs += $job
+ Write-Host "$(Get-Date -Format 'yyyy-MM-dd HH:mm:ss') - Starting job for service: $service"
+
+ # Check if we have reached the maximum number of jobs
+ while (($jobs | Where-Object { $_.State -eq 'Running' }).Count -ge $MAX_PROCESSES) {
+ Start-Sleep -Seconds 1
+ # Check for any completed jobs and receive their output
+ $completedJobs = $jobs | Where-Object { $_.State -eq 'Completed' }
+ foreach ($completedJob in $completedJobs) {
+ Receive-Job -Job $completedJob -Keep | ForEach-Object { Write-Host $_ }
+ $jobs = $jobs | Where-Object { $_.Id -ne $completedJob.Id }
+ Remove-Job -Job $completedJob
+ }
+ }
+}
+
+# Check for any remaining completed jobs
+$remainingCompletedJobs = $jobs | Where-Object { $_.State -eq 'Completed' }
+foreach ($remainingJob in $remainingCompletedJobs) {
+ Receive-Job -Job $remainingJob -Keep | ForEach-Object { Write-Host $_ }
+ Remove-Job -Job $remainingJob
+}
+
+Write-Host "$(Get-Date -Format 'yyyy-MM-dd HH:mm:ss') - All jobs completed"
+```
+
+Output will be stored in `C:\Users\YOUR-USER\Documents\output\`
+
+## Combining the output files
+
+Guidance is provided for the CSV file format. From the ouput directory, execute either the following Bash or PowerShell script. The script will collect the output from the CSV files, only include the header from the first file, and then output the result as CombinedCSV.csv in the current working directory.
+
+There is no logic implemented in terms of which CSV files it will combine. If you have additional CSV files from other actions, such as running a quick inventory, you will need to move that out of the current (or any nested) directory, or move the output you want to combine into its own folder and run the script from there.
+
+```bash
+#!/bin/bash
+
+# Initialize a variable to indicate the first file
+firstFile=true
+
+# Find all CSV files and loop through them
+find . -name "*.csv" -print0 | while IFS= read -r -d '' file; do
+ if [ "$firstFile" = true ]; then
+ # For the first file, keep the header
+ cat "$file" > CombinedCSV.csv
+ firstFile=false
+ else
+ # For subsequent files, skip the header
+ tail -n +2 "$file" >> CombinedCSV.csv
+ fi
+done
+```
+
+```powershell
+# Get all CSV files from current directory and its subdirectories
+$csvFiles = Get-ChildItem -Recurse -Filter "*.csv"
+
+# Initialize a variable to track if it's the first file
+$firstFile = $true
+
+# Loop through each CSV file
+foreach ($file in $csvFiles) {
+ if ($firstFile) {
+ # For the first file, keep the header and change the flag
+ $combinedCsv = Import-Csv -Path $file.FullName
+ $firstFile = $false
+ } else {
+ # For subsequent files, skip the header
+ $tempCsv = Import-Csv -Path $file.FullName
+ $combinedCsv += $tempCsv | Select-Object * -Skip 1
+ }
+}
+
+# Export the combined data to a new CSV file
+$combinedCsv | Export-Csv -Path "CombinedCSV.csv" -NoTypeInformation
+```
+
+## TODO: Additional Improvements
+
+Some services need to instantiate another service to perform a check. For instance, `cloudwatch` will instantiate Prowler's `iam` service to perform the `cloudwatch_cross_account_sharing_disabled` check. When the `iam` service is instantiated, it will perform the `__init__` function, and pull all the information required for that service. This provides an opportunity for an improvement in the above script to group related services together so that the `iam` services (or any other cross-service references) isn't repeatedily instantiated by grouping dependant services together. A complete mapping between these services still needs to be further investigated, but these are the cross-references that have been noted:
+
+* inspector2 needs lambda and ec2
+* cloudwatch needs iam
+* dlm needs ec2
diff --git a/docs/tutorials/pentesting.md b/docs/tutorials/pentesting.md
index 03d566070f..b31de750fb 100644
--- a/docs/tutorials/pentesting.md
+++ b/docs/tutorials/pentesting.md
@@ -50,6 +50,7 @@ Several checks analyse resources that are exposed to the Internet, these are:
- sagemaker_notebook_instance_without_direct_internet_access_configured
- sns_topics_not_publicly_accessible
- sqs_queues_not_publicly_accessible
+- network_public_ip_shodan
...
@@ -64,5 +65,9 @@ prowler { \"Sid\": \"Allow CloudTrail access\", \"Effect\": \"Allow\", \"Principal\": { \"Service\": \"cloudtrail.amazonaws.com\" }, \"Action\": \"kms:DescribeKey\", \"Resource\": \"*\" } ``` 2\\. Granting encrypt permissions ``` { \"Sid\": \"Allow CloudTrail to encrypt logs\", \"Effect\": \"Allow\", \"Principal\": { \"Service\": \"cloudtrail.amazonaws.com\" }, \"Action\": \"kms:GenerateDataKey*\", \"Resource\": \"*\", \"Condition\": { \"StringLike\": { \"kms:EncryptionContext:aws:cloudtrail:arn\": [ \"arn:aws:cloudtrail:*:aws-account-id:trail/*\" ] } } } ``` 3\\. Granting decrypt permissions ``` { \"Sid\": \"Enable CloudTrail log decrypt permissions\", \"Effect\": \"Allow\", \"Principal\": { \"AWS\": \"arn:aws:iam::aws-account-id:user/username\" }, \"Action\": \"kms:Decrypt\", \"Resource\": \"*\", \"Condition\": { \"Null\": { \"kms:EncryptionContext:aws:cloudtrail:arn\": \"false\" } } } ```",
+ "References": "https://docs.aws.amazon.com/awscloudtrail/latest/userguide/encrypting-cloudtrail-log-files-with-aws-kms.html:https://docs.aws.amazon.com/kms/latest/developerguide/create-keys.html"
+ }
+ ]
+ },
+ {
+ "Id": "3.6",
+ "Description": "Ensure rotation for customer created symmetric CMKs is enabled",
+ "Checks": [
+ "kms_cmk_rotation_enabled"
+ ],
+ "Attributes": [
+ {
+ "Section": "3. Logging",
+ "Profile": "Level 2",
+ "AssessmentStatus": "Automated",
+ "Description": "AWS Key Management Service (KMS) allows customers to rotate the backing key which is key material stored within the KMS which is tied to the key ID of the Customer Created customer master key (CMK). It is the backing key that is used to perform cryptographic operations such as encryption and decryption. Automated key rotation currently retains all prior backing keys so that decryption of encrypted data can take place transparently. It is recommended that CMK key rotation be enabled for symmetric keys. Key rotation can not be enabled for any asymmetric CMK.",
+ "RationaleStatement": "Rotating encryption keys helps reduce the potential impact of a compromised key as data encrypted with a new key cannot be accessed with a previous key that may have been exposed. Keys should be rotated every year, or upon event that would result in the compromise of that key.",
+ "ImpactStatement": "Creation, management, and storage of CMKs may require additional time from and administrator.",
+ "RemediationProcedure": "**From Console:** 1. Sign in to the AWS Management Console and open the KMS console at: https://console.aws.amazon.com/kms. 2. In the left navigation pane, click Customer-managed keys. 3. Select a key where Key spec = SYMMETRIC_DEFAULT that does not have automatic rotation enabled. 4. Select the Key rotation tab. 5. Check the Automatically rotate this KMS key every year checkbox. 6. Click Save. 7. Repeat steps 3–6 for all customer-managed CMKs that do not have automatic rotation enabled.",
+ "AuditProcedure": "**From Console:** 1. Sign in to the AWS Management Console and open the KMS console at: https://console.aws.amazon.com/kms. 2. In the left navigation pane, click Customer-managed keys. 3. Select a customer managed CMK where Key spec = SYMMETRIC_DEFAULT. 4. Select the Key rotation tab. 5. Ensure the Automatically rotate this KMS key every year checkbox is checked. 6. Repeat steps 3–5 for all customer-managed CMKs where 'Key spec = SYMMETRIC_DEFAULT'.",
+ "AdditionalInformation": "",
+ "References": "https://aws.amazon.com/kms/pricing/:https://csrc.nist.gov/publications/detail/sp/800-57-part-1/rev-5/final"
+ }
+ ]
+ },
+ {
+ "Id": "3.7",
+ "Description": "Ensure VPC flow logging is enabled in all VPCs",
+ "Checks": [
+ "vpc_flow_logs_enabled"
+ ],
+ "Attributes": [
+ {
+ "Section": "3. Logging",
+ "Profile": "Level 2",
+ "AssessmentStatus": "Automated",
+ "Description": "VPC Flow Logs is a feature that enables you to capture information about the IP traffic going to and from network interfaces in your VPC. After you've created a flow log, you can view and retrieve its data in Amazon CloudWatch Logs. It is recommended that VPC Flow Logs be enabled for packet \"Rejects\" for VPCs.",
+ "RationaleStatement": "VPC Flow Logs provide visibility into network traffic that traverses the VPC and can be used to detect anomalous traffic or insight during security workflows.",
+ "ImpactStatement": "By default, CloudWatch Logs will store Logs indefinitely unless a specific retention period is defined for the log group. When choosing the number of days to retain, keep in mind the average days it takes an organization to realize they have been breached is 210 days (at the time of this writing). Since additional time is required to research a breach, a minimum 365 day retention policy allows time for detection and research. You may also wish to archive the logs to a cheaper storage service rather than simply deleting them. See the following AWS resource to manage CloudWatch Logs retention periods: 1. https://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/SettingLogRetention.html",
+ "RemediationProcedure": "Perform the following to determine if VPC Flow logs is enabled: **From Console:** 1. Sign into the management console 2. Select `Services` then `VPC` 3. In the left navigation pane, select `Your VPCs` 4. Select a VPC 5. In the right pane, select the `Flow Logs` tab. 6. If no Flow Log exists, click `Create Flow Log` 7. For Filter, select `Reject` 8. Enter in a `Role` and `Destination Log Group` 9. Click `Create Log Flow` 10. Click on `CloudWatch Logs Group` **Note:** Setting the filter to \"Reject\" will dramatically reduce the logging data accumulation for this recommendation and provide sufficient information for the purposes of breach detection, research and remediation. However, during periods of least privilege security group engineering, setting this the filter to \"All\" can be very helpful in discovering existing traffic flows required for proper operation of an already running environment. **From Command Line:** 1. Create a policy document and name it as `role_policy_document.json` and paste the following content: ``` { \"Version\": \"2012-10-17\", \"Statement\": [ { \"Sid\": \"test\", \"Effect\": \"Allow\", \"Principal\": { \"Service\": \"ec2.amazonaws.com\" }, \"Action\": \"sts:AssumeRole\" } ] } ``` 2. Create another policy document and name it as `iam_policy.json` and paste the following content: ``` { \"Version\": \"2012-10-17\", \"Statement\": [ { \"Effect\": \"Allow\", \"Action\":[ \"logs:CreateLogGroup\", \"logs:CreateLogStream\", \"logs:DescribeLogGroups\", \"logs:DescribeLogStreams\", \"logs:PutLogEvents\", \"logs:GetLogEvents\", \"logs:FilterLogEvents\" ], \"Resource\": \"*\" } ] } ``` 3. Run the below command to create an IAM role: ``` aws iam create-role --role-name --assume-role-policy-document file://role_policy_document.json ``` 4. Run the below command to create an IAM policy: ``` aws iam create-policy --policy-name --policy-document file://iam-policy.json ``` 5. Run `attach-group-policy` command using the IAM policy ARN returned at the previous step to attach the policy to the IAM role (if the command succeeds, no output is returned): ``` aws iam attach-group-policy --policy-arn arn:aws:iam:::policy/ --group-name ``` 6. Run `describe-vpcs` to get the VpcId available in the selected region: ``` aws ec2 describe-vpcs --region ``` 7. The command output should return the VPC Id available in the selected region. 8. Run `create-flow-logs` to create a flow log for the vpc: ``` aws ec2 create-flow-logs --resource-type VPC --resource-ids