mirror of
https://github.com/prowler-cloud/prowler.git
synced 2026-01-25 02:08:11 +00:00
Compare commits
43 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0f2dfd3908 | ||
|
|
6c4d6b6f18 | ||
|
|
2988268718 | ||
|
|
39bbc4f824 | ||
|
|
12ec09877b | ||
|
|
d82d8e80bc | ||
|
|
5624625c60 | ||
|
|
fab3330977 | ||
|
|
bfea1187be | ||
|
|
7c05709f42 | ||
|
|
7954e61944 | ||
|
|
955846140f | ||
|
|
c2af649259 | ||
|
|
3a3ac5d556 | ||
|
|
cb66709e18 | ||
|
|
9b962d954a | ||
|
|
35ed90f589 | ||
|
|
f861ee3ca6 | ||
|
|
9b165db923 | ||
|
|
8aa06cab6f | ||
|
|
0e5e863af2 | ||
|
|
37c40bfe95 | ||
|
|
24d5acdc33 | ||
|
|
15133f20ce | ||
|
|
c052822ff5 | ||
|
|
37daf4f2bd | ||
|
|
890aa57841 | ||
|
|
149217732d | ||
|
|
cf62bbbdea | ||
|
|
d63919c2c5 | ||
|
|
c76a1b01b7 | ||
|
|
c422f8e447 | ||
|
|
c5502786d0 | ||
|
|
da6d74ec21 | ||
|
|
7638dbe208 | ||
|
|
7c0d6aef3a | ||
|
|
38aea1847c | ||
|
|
ba983b49c3 | ||
|
|
d153fdf0dd | ||
|
|
86e076cf2d | ||
|
|
396edad930 | ||
|
|
fc49de47eb | ||
|
|
6bb7ce2fdd |
2
.github/CODEOWNERS
vendored
2
.github/CODEOWNERS
vendored
@@ -1 +1 @@
|
||||
* @prowler-cloud/prowler-oss
|
||||
* @prowler-cloud/prowler-oss @prowler-cloud/prowler-dev
|
||||
|
||||
11
.github/workflows/build-lint-push-containers.yml
vendored
11
.github/workflows/build-lint-push-containers.yml
vendored
@@ -43,6 +43,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
prowler_version_major: ${{ steps.get-prowler-version.outputs.PROWLER_VERSION_MAJOR }}
|
||||
prowler_version: ${{ steps.update-prowler-version.outputs.PROWLER_VERSION }}
|
||||
env:
|
||||
POETRY_VIRTUALENVS_CREATE: "false"
|
||||
|
||||
@@ -89,12 +90,14 @@ jobs:
|
||||
esac
|
||||
|
||||
- name: Update Prowler version (release)
|
||||
id: update-prowler-version
|
||||
if: github.event_name == 'release'
|
||||
run: |
|
||||
PROWLER_VERSION="${{ github.event.release.tag_name }}"
|
||||
poetry version "${PROWLER_VERSION}"
|
||||
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_ENV}"
|
||||
|
||||
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
@@ -153,7 +156,7 @@ jobs:
|
||||
echo "LATEST_COMMIT_HASH=${LATEST_COMMIT_HASH}" >> $GITHUB_ENV
|
||||
|
||||
- name: Dispatch event (latest)
|
||||
if: github.event_name == 'push' && ${{ needs.container-build-push.outputs.prowler_version_major == '3' }}
|
||||
if: github.event_name == 'push' && needs.container-build-push.outputs.prowler_version_major == '3'
|
||||
run: |
|
||||
curl https://api.github.com/repos/${{ secrets.DISPATCH_OWNER }}/${{ secrets.DISPATCH_REPO }}/dispatches \
|
||||
-H "Accept: application/vnd.github+json" \
|
||||
@@ -162,10 +165,10 @@ jobs:
|
||||
--data '{"event_type":"dispatch","client_payload":{"version":"v3-latest", "tag": "${{ env.LATEST_COMMIT_HASH }}"}}'
|
||||
|
||||
- name: Dispatch event (release)
|
||||
if: github.event_name == 'release' && ${{ needs.container-build-push.outputs.prowler_version_major == '3' }}
|
||||
if: github.event_name == 'release' && needs.container-build-push.outputs.prowler_version_major == '3'
|
||||
run: |
|
||||
curl https://api.github.com/repos/${{ secrets.DISPATCH_OWNER }}/${{ secrets.DISPATCH_REPO }}/dispatches \
|
||||
-H "Accept: application/vnd.github+json" \
|
||||
-H "Authorization: Bearer ${{ secrets.ACCESS_TOKEN }}" \
|
||||
-H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
--data '{"event_type":"dispatch","client_payload":{"version":"release", "tag":"${{ env.PROWLER_VERSION }}"}}'
|
||||
--data '{"event_type":"dispatch","client_payload":{"version":"release", "tag":"${{ needs.container-build-push.outputs.prowler_version }}"}}'
|
||||
|
||||
3
.github/workflows/find-secrets.yml
vendored
3
.github/workflows/find-secrets.yml
vendored
@@ -11,8 +11,9 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: TruffleHog OSS
|
||||
uses: trufflesecurity/trufflehog@v3.72.0
|
||||
uses: trufflesecurity/trufflehog@v3.75.1
|
||||
with:
|
||||
path: ./
|
||||
base: ${{ github.event.repository.default_branch }}
|
||||
head: HEAD
|
||||
extra_args: --only-verified
|
||||
|
||||
@@ -10,4 +10,4 @@
|
||||
Want some swag as appreciation for your contribution?
|
||||
|
||||
# Prowler Developer Guide
|
||||
https://docs.prowler.cloud/en/latest/tutorials/developer-guide/
|
||||
https://docs.prowler.com/projects/prowler-open-source/en/latest/developer-guide/introduction/
|
||||
|
||||
@@ -243,11 +243,11 @@ Each Prowler check has metadata associated which is stored at the same level of
|
||||
# Code holds different methods to remediate the FAIL finding
|
||||
"Code": {
|
||||
# CLI holds the command in the provider native CLI to remediate it
|
||||
"CLI": "https://docs.bridgecrew.io/docs/public_8#cli-command",
|
||||
"CLI": "https://docs.prowler.com/checks/public_8#cli-command",
|
||||
# NativeIaC holds the native IaC code to remediate it, use "https://docs.bridgecrew.io/docs"
|
||||
"NativeIaC": "",
|
||||
# Other holds the other commands, scripts or code to remediate it, use "https://www.trendmicro.com/cloudoneconformity"
|
||||
"Other": "https://docs.bridgecrew.io/docs/public_8#aws-console",
|
||||
"Other": "https://docs.prowler.com/checks/public_8#aws-console",
|
||||
# Terraform holds the Terraform code to remediate it, use "https://docs.bridgecrew.io/docs"
|
||||
"Terraform": ""
|
||||
},
|
||||
|
||||
@@ -175,6 +175,8 @@ class <Service>(ServiceParentClass):
|
||||
f"{<item>.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
```
|
||||
???+note
|
||||
To avoid fake findings, when Prowler can't retrieve the items, because an Access Denied or similar error, we set that items value as `None`.
|
||||
|
||||
### Service Models
|
||||
|
||||
|
||||
@@ -509,7 +509,113 @@ class Test_compute_firewall_rdp_access_from_the_internet_allowed:
|
||||
|
||||
### Services
|
||||
|
||||
Coming soon ...
|
||||
For testing Google Cloud Services, we have to follow the same logic as with the Google Cloud checks. We still mocking all API calls, but in this case, every API call to set up an attribute is defined in [fixtures file](https://github.com/prowler-cloud/prowler/blob/master/tests/providers/gcp/gcp_fixtures.py) in `mock_api_client` function. Remember that EVERY method of a service must be tested.
|
||||
|
||||
The following code shows a real example of a testing class, but it has more comments than usual for educational purposes.
|
||||
|
||||
```python title="BigQuery Service Test"
|
||||
# We need to import the unittest.mock.patch to allow us to patch some objects
|
||||
# not to use shared ones between test, hence to isolate the test
|
||||
from unittest.mock import patch
|
||||
# Import the class needed from the service file
|
||||
from prowler.providers.gcp.services.bigquery.bigquery_service import BigQuery
|
||||
# Necessary constans and functions from fixtures file
|
||||
from tests.providers.gcp.gcp_fixtures import (
|
||||
GCP_PROJECT_ID,
|
||||
mock_api_client,
|
||||
mock_is_api_active,
|
||||
set_mocked_gcp_audit_info,
|
||||
)
|
||||
|
||||
|
||||
class TestBigQueryService:
|
||||
# Only method needed to test full service
|
||||
def test_service(self):
|
||||
# In this case we are mocking the __is_api_active__ to ensure our mocked project is used
|
||||
# And all the client to use our mocked API calls
|
||||
with patch(
|
||||
"prowler.providers.gcp.lib.service.service.GCPService.__is_api_active__",
|
||||
new=mock_is_api_active,
|
||||
), patch(
|
||||
"prowler.providers.gcp.lib.service.service.GCPService.__generate_client__",
|
||||
new=mock_api_client,
|
||||
):
|
||||
# Instantiate an object of class with the mocked provider
|
||||
bigquery_client = BigQuery(
|
||||
set_mocked_gcp_audit_info(project_ids=[GCP_PROJECT_ID])
|
||||
)
|
||||
# Check all attributes of the tested class is well set up according API calls mocked from GCP fixture file
|
||||
assert bigquery_client.service == "bigquery"
|
||||
assert bigquery_client.project_ids == [GCP_PROJECT_ID]
|
||||
|
||||
assert len(bigquery_client.datasets) == 2
|
||||
|
||||
assert bigquery_client.datasets[0].name == "unique_dataset1_name"
|
||||
assert bigquery_client.datasets[0].id.__class__.__name__ == "str"
|
||||
assert bigquery_client.datasets[0].region == "US"
|
||||
assert bigquery_client.datasets[0].cmk_encryption
|
||||
assert bigquery_client.datasets[0].public
|
||||
assert bigquery_client.datasets[0].project_id == GCP_PROJECT_ID
|
||||
|
||||
assert bigquery_client.datasets[1].name == "unique_dataset2_name"
|
||||
assert bigquery_client.datasets[1].id.__class__.__name__ == "str"
|
||||
assert bigquery_client.datasets[1].region == "EU"
|
||||
assert not bigquery_client.datasets[1].cmk_encryption
|
||||
assert not bigquery_client.datasets[1].public
|
||||
assert bigquery_client.datasets[1].project_id == GCP_PROJECT_ID
|
||||
|
||||
assert len(bigquery_client.tables) == 2
|
||||
|
||||
assert bigquery_client.tables[0].name == "unique_table1_name"
|
||||
assert bigquery_client.tables[0].id.__class__.__name__ == "str"
|
||||
assert bigquery_client.tables[0].region == "US"
|
||||
assert bigquery_client.tables[0].cmk_encryption
|
||||
assert bigquery_client.tables[0].project_id == GCP_PROJECT_ID
|
||||
|
||||
assert bigquery_client.tables[1].name == "unique_table2_name"
|
||||
assert bigquery_client.tables[1].id.__class__.__name__ == "str"
|
||||
assert bigquery_client.tables[1].region == "US"
|
||||
assert not bigquery_client.tables[1].cmk_encryption
|
||||
assert bigquery_client.tables[1].project_id == GCP_PROJECT_ID
|
||||
```
|
||||
As it can be confusing where all these values come from, I'll give an example to make this clearer. First we need to check
|
||||
what is the API call used to obtain the datasets. In this case if we check the service the call is
|
||||
`self.client.datasets().list(projectId=project_id)`.
|
||||
|
||||
Now in the fixture file we have to mock this call in our `MagicMock` client in the function `mock_api_client`. The best way to mock
|
||||
is following the actual format, add one function where the client is passed to be changed, the format of this function name must be
|
||||
`mock_api_<endpoint>_calls` (*endpoint* refers to the first attribute pointed after *client*).
|
||||
|
||||
In the example of BigQuery the function is called `mock_api_dataset_calls`. And inside of this function we found an assignation to
|
||||
be used in the `__get_datasets__` method in BigQuery class:
|
||||
|
||||
```python
|
||||
# Mocking datasets
|
||||
dataset1_id = str(uuid4())
|
||||
dataset2_id = str(uuid4())
|
||||
|
||||
client.datasets().list().execute.return_value = {
|
||||
"datasets": [
|
||||
{
|
||||
"datasetReference": {
|
||||
"datasetId": "unique_dataset1_name",
|
||||
"projectId": GCP_PROJECT_ID,
|
||||
},
|
||||
"id": dataset1_id,
|
||||
"location": "US",
|
||||
},
|
||||
{
|
||||
"datasetReference": {
|
||||
"datasetId": "unique_dataset2_name",
|
||||
"projectId": GCP_PROJECT_ID,
|
||||
},
|
||||
"id": dataset2_id,
|
||||
"location": "EU",
|
||||
},
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
## Azure
|
||||
|
||||
|
||||
@@ -100,18 +100,27 @@ aws:
|
||||
# aws.awslambda_function_using_supported_runtimes
|
||||
obsolete_lambda_runtimes:
|
||||
[
|
||||
"java8",
|
||||
"go1.x",
|
||||
"provided",
|
||||
"python3.6",
|
||||
"python2.7",
|
||||
"python3.7",
|
||||
"nodejs4.3",
|
||||
"nodejs4.3-edge",
|
||||
"nodejs6.10",
|
||||
"nodejs",
|
||||
"nodejs8.10",
|
||||
"nodejs10.x",
|
||||
"nodejs12.x",
|
||||
"nodejs14.x",
|
||||
"dotnet5.0",
|
||||
"dotnetcore1.0",
|
||||
"dotnetcore2.0",
|
||||
"dotnetcore2.1",
|
||||
"dotnetcore3.1",
|
||||
"ruby2.5",
|
||||
"ruby2.7",
|
||||
]
|
||||
|
||||
# AWS Organizations
|
||||
|
||||
19
mkdocs.yml
19
mkdocs.yml
@@ -16,8 +16,23 @@ theme:
|
||||
- navigation.sections
|
||||
- navigation.top
|
||||
palette:
|
||||
primary: black
|
||||
accent: green
|
||||
# Palette toggle for light mode
|
||||
- media: "(prefers-color-scheme: light)"
|
||||
scheme: default
|
||||
primary: black
|
||||
accent: green
|
||||
toggle:
|
||||
icon: material/weather-night
|
||||
name: Switch to dark mode
|
||||
# Palette toggle for dark mode
|
||||
- media: "(prefers-color-scheme: dark)"
|
||||
scheme: slate
|
||||
primary: black
|
||||
accent: green
|
||||
toggle:
|
||||
icon: material/weather-sunny
|
||||
name: Switch to light mode
|
||||
|
||||
|
||||
plugins:
|
||||
- search
|
||||
|
||||
333
poetry.lock
generated
333
poetry.lock
generated
@@ -322,20 +322,20 @@ aio = ["aiohttp (>=3.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "azure-identity"
|
||||
version = "1.15.0"
|
||||
version = "1.16.0"
|
||||
description = "Microsoft Azure Identity Library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "azure-identity-1.15.0.tar.gz", hash = "sha256:4c28fc246b7f9265610eb5261d65931183d019a23d4b0e99357facb2e6c227c8"},
|
||||
{file = "azure_identity-1.15.0-py3-none-any.whl", hash = "sha256:a14b1f01c7036f11f148f22cd8c16e05035293d714458d6b44ddf534d93eb912"},
|
||||
{file = "azure-identity-1.16.0.tar.gz", hash = "sha256:6ff1d667cdcd81da1ceab42f80a0be63ca846629f518a922f7317a7e3c844e1b"},
|
||||
{file = "azure_identity-1.16.0-py3-none-any.whl", hash = "sha256:722fdb60b8fdd55fa44dc378b8072f4b419b56a5e54c0de391f644949f3a826f"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
azure-core = ">=1.23.0,<2.0.0"
|
||||
azure-core = ">=1.23.0"
|
||||
cryptography = ">=2.5"
|
||||
msal = ">=1.24.0,<2.0.0"
|
||||
msal-extensions = ">=0.3.0,<2.0.0"
|
||||
msal = ">=1.24.0"
|
||||
msal-extensions = ">=0.3.0"
|
||||
|
||||
[[package]]
|
||||
name = "azure-keyvault-keys"
|
||||
@@ -388,13 +388,13 @@ isodate = ">=0.6.1,<1.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "azure-mgmt-compute"
|
||||
version = "30.6.0"
|
||||
version = "31.0.0"
|
||||
description = "Microsoft Azure Compute Management Client Library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "azure-mgmt-compute-30.6.0.tar.gz", hash = "sha256:4d80d723ec6d4cb9583617ebec0716e7d74b2732acbaed023ed2e3cc7053d00e"},
|
||||
{file = "azure_mgmt_compute-30.6.0-py3-none-any.whl", hash = "sha256:9f6d29864ebe080796d4020533e79e4c8508512d3c53ec5a7a8930e4bd2f0bd4"},
|
||||
{file = "azure-mgmt-compute-31.0.0.tar.gz", hash = "sha256:5a5b1c4fc1a19ecb022a12ded1be8b1b155f6979d03fb9efc04642f606644bbf"},
|
||||
{file = "azure_mgmt_compute-31.0.0-py3-none-any.whl", hash = "sha256:39cad123d814390cca6adbe02afe2269aa179b9051dc022c7f07134ebd416207"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -404,19 +404,19 @@ isodate = ">=0.6.1"
|
||||
|
||||
[[package]]
|
||||
name = "azure-mgmt-containerservice"
|
||||
version = "29.1.0"
|
||||
version = "30.0.0"
|
||||
description = "Microsoft Azure Container Service Management Client Library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "azure-mgmt-containerservice-29.1.0.tar.gz", hash = "sha256:46887178bb1035933f06fa63121c1ac9d4c5871f202ae2b86bc4af6e1e3b354f"},
|
||||
{file = "azure_mgmt_containerservice-29.1.0-py3-none-any.whl", hash = "sha256:0941a26a9c61930e004001e7340812dadb8a726e2ccc5b4d30ce4e6403fe1f43"},
|
||||
{file = "azure-mgmt-containerservice-30.0.0.tar.gz", hash = "sha256:6c62e6ac590e34fedd739fe24b31b3750713a014616696ea8d44c7bcc81c06b7"},
|
||||
{file = "azure_mgmt_containerservice-30.0.0-py3-none-any.whl", hash = "sha256:795a6a50d6632344910216853167b9bd47b09d50cb2afa28b2a18e58f5088c3f"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
azure-common = ">=1.1,<2.0"
|
||||
azure-mgmt-core = ">=1.3.2,<2.0.0"
|
||||
isodate = ">=0.6.1,<1.0.0"
|
||||
azure-common = ">=1.1"
|
||||
azure-mgmt-core = ">=1.3.2"
|
||||
isodate = ">=0.6.1"
|
||||
|
||||
[[package]]
|
||||
name = "azure-mgmt-core"
|
||||
@@ -668,33 +668,33 @@ yaml = ["PyYAML"]
|
||||
|
||||
[[package]]
|
||||
name = "black"
|
||||
version = "24.3.0"
|
||||
version = "24.4.2"
|
||||
description = "The uncompromising code formatter."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "black-24.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7d5e026f8da0322b5662fa7a8e752b3fa2dac1c1cbc213c3d7ff9bdd0ab12395"},
|
||||
{file = "black-24.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9f50ea1132e2189d8dff0115ab75b65590a3e97de1e143795adb4ce317934995"},
|
||||
{file = "black-24.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2af80566f43c85f5797365077fb64a393861a3730bd110971ab7a0c94e873e7"},
|
||||
{file = "black-24.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:4be5bb28e090456adfc1255e03967fb67ca846a03be7aadf6249096100ee32d0"},
|
||||
{file = "black-24.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4f1373a7808a8f135b774039f61d59e4be7eb56b2513d3d2f02a8b9365b8a8a9"},
|
||||
{file = "black-24.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:aadf7a02d947936ee418777e0247ea114f78aff0d0959461057cae8a04f20597"},
|
||||
{file = "black-24.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c02e4ea2ae09d16314d30912a58ada9a5c4fdfedf9512d23326128ac08ac3d"},
|
||||
{file = "black-24.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:bf21b7b230718a5f08bd32d5e4f1db7fc8788345c8aea1d155fc17852b3410f5"},
|
||||
{file = "black-24.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:2818cf72dfd5d289e48f37ccfa08b460bf469e67fb7c4abb07edc2e9f16fb63f"},
|
||||
{file = "black-24.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4acf672def7eb1725f41f38bf6bf425c8237248bb0804faa3965c036f7672d11"},
|
||||
{file = "black-24.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7ed6668cbbfcd231fa0dc1b137d3e40c04c7f786e626b405c62bcd5db5857e4"},
|
||||
{file = "black-24.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:56f52cfbd3dabe2798d76dbdd299faa046a901041faf2cf33288bc4e6dae57b5"},
|
||||
{file = "black-24.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:79dcf34b33e38ed1b17434693763301d7ccbd1c5860674a8f871bd15139e7837"},
|
||||
{file = "black-24.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e19cb1c6365fd6dc38a6eae2dcb691d7d83935c10215aef8e6c38edee3f77abd"},
|
||||
{file = "black-24.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65b76c275e4c1c5ce6e9870911384bff5ca31ab63d19c76811cb1fb162678213"},
|
||||
{file = "black-24.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:b5991d523eee14756f3c8d5df5231550ae8993e2286b8014e2fdea7156ed0959"},
|
||||
{file = "black-24.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c45f8dff244b3c431b36e3224b6be4a127c6aca780853574c00faf99258041eb"},
|
||||
{file = "black-24.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6905238a754ceb7788a73f02b45637d820b2f5478b20fec82ea865e4f5d4d9f7"},
|
||||
{file = "black-24.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7de8d330763c66663661a1ffd432274a2f92f07feeddd89ffd085b5744f85e7"},
|
||||
{file = "black-24.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:7bb041dca0d784697af4646d3b62ba4a6b028276ae878e53f6b4f74ddd6db99f"},
|
||||
{file = "black-24.3.0-py3-none-any.whl", hash = "sha256:41622020d7120e01d377f74249e677039d20e6344ff5851de8a10f11f513bf93"},
|
||||
{file = "black-24.3.0.tar.gz", hash = "sha256:a0c9c4a0771afc6919578cec71ce82a3e31e054904e7197deacbc9382671c41f"},
|
||||
{file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"},
|
||||
{file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"},
|
||||
{file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"},
|
||||
{file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"},
|
||||
{file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"},
|
||||
{file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"},
|
||||
{file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"},
|
||||
{file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"},
|
||||
{file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"},
|
||||
{file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"},
|
||||
{file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"},
|
||||
{file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"},
|
||||
{file = "black-24.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7"},
|
||||
{file = "black-24.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94"},
|
||||
{file = "black-24.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8"},
|
||||
{file = "black-24.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c"},
|
||||
{file = "black-24.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1"},
|
||||
{file = "black-24.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741"},
|
||||
{file = "black-24.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e"},
|
||||
{file = "black-24.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7"},
|
||||
{file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"},
|
||||
{file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -714,17 +714,17 @@ uvloop = ["uvloop (>=0.15.2)"]
|
||||
|
||||
[[package]]
|
||||
name = "boto3"
|
||||
version = "1.34.77"
|
||||
version = "1.34.94"
|
||||
description = "The AWS SDK for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "boto3-1.34.77-py3-none-any.whl", hash = "sha256:7abd327980258ec2ae980d2ff7fc32ede7448146b14d34c56bf0be074e2a149b"},
|
||||
{file = "boto3-1.34.77.tar.gz", hash = "sha256:8ebed4fa5a3b84dd4037f28226985af00e00fb860d739fc8b1ed6381caa4b330"},
|
||||
{file = "boto3-1.34.94-py3-none-any.whl", hash = "sha256:bbb87d641c73462e53b1777083b55c8f13921618ad08757478a8122985c56c13"},
|
||||
{file = "boto3-1.34.94.tar.gz", hash = "sha256:22f65b3c9b7a419f8f39c2dddc421e14fab8cbb3bd8a9d467e874237d39f59b1"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
botocore = ">=1.34.77,<1.35.0"
|
||||
botocore = ">=1.34.94,<1.35.0"
|
||||
jmespath = ">=0.7.1,<2.0.0"
|
||||
s3transfer = ">=0.10.0,<0.11.0"
|
||||
|
||||
@@ -733,13 +733,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
|
||||
|
||||
[[package]]
|
||||
name = "botocore"
|
||||
version = "1.34.77"
|
||||
version = "1.34.99"
|
||||
description = "Low-level, data-driven core of boto 3."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "botocore-1.34.77-py3-none-any.whl", hash = "sha256:6d6a402032ca0b89525212356a865397f8f2839683dd53d41b8cee1aa84b2b4b"},
|
||||
{file = "botocore-1.34.77.tar.gz", hash = "sha256:6dab60261cdbfb7d0059488ea39408d5522fad419c004ba5db3484e6df854ea8"},
|
||||
{file = "botocore-1.34.99-py3-none-any.whl", hash = "sha256:18c68bdeb0ffb73290912b0c96204fc36d3128f00a00b5cdc35ac34d66225f1c"},
|
||||
{file = "botocore-1.34.99.tar.gz", hash = "sha256:cafe569e2136cb33cb0e5dd32fb1c0e1503ddc1413d3be215df8ddf05e69137a"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -751,7 +751,7 @@ urllib3 = [
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
crt = ["awscrt (==0.19.19)"]
|
||||
crt = ["awscrt (==0.20.9)"]
|
||||
|
||||
[[package]]
|
||||
name = "cachetools"
|
||||
@@ -1003,76 +1003,65 @@ files = [
|
||||
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "contextlib2"
|
||||
version = "21.6.0"
|
||||
description = "Backports and enhancements for the contextlib module"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "contextlib2-21.6.0-py2.py3-none-any.whl", hash = "sha256:3fbdb64466afd23abaf6c977627b75b6139a5a3e8ce38405c5b413aed7a0471f"},
|
||||
{file = "contextlib2-21.6.0.tar.gz", hash = "sha256:ab1e2bfe1d01d968e1b7e8d9023bc51ef3509bba217bb730cee3827e1ee82869"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "coverage"
|
||||
version = "7.4.4"
|
||||
version = "7.5.1"
|
||||
description = "Code coverage measurement for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "coverage-7.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0be5efd5127542ef31f165de269f77560d6cdef525fffa446de6f7e9186cfb2"},
|
||||
{file = "coverage-7.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ccd341521be3d1b3daeb41960ae94a5e87abe2f46f17224ba5d6f2b8398016cf"},
|
||||
{file = "coverage-7.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fa497a8ab37784fbb20ab699c246053ac294d13fc7eb40ec007a5043ec91f8"},
|
||||
{file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1a93009cb80730c9bca5d6d4665494b725b6e8e157c1cb7f2db5b4b122ea562"},
|
||||
{file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:690db6517f09336559dc0b5f55342df62370a48f5469fabf502db2c6d1cffcd2"},
|
||||
{file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:09c3255458533cb76ef55da8cc49ffab9e33f083739c8bd4f58e79fecfe288f7"},
|
||||
{file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ce1415194b4a6bd0cdcc3a1dfbf58b63f910dcb7330fe15bdff542c56949f87"},
|
||||
{file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b91cbc4b195444e7e258ba27ac33769c41b94967919f10037e6355e998af255c"},
|
||||
{file = "coverage-7.4.4-cp310-cp310-win32.whl", hash = "sha256:598825b51b81c808cb6f078dcb972f96af96b078faa47af7dfcdf282835baa8d"},
|
||||
{file = "coverage-7.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:09ef9199ed6653989ebbcaacc9b62b514bb63ea2f90256e71fea3ed74bd8ff6f"},
|
||||
{file = "coverage-7.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f9f50e7ef2a71e2fae92774c99170eb8304e3fdf9c8c3c7ae9bab3e7229c5cf"},
|
||||
{file = "coverage-7.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:623512f8ba53c422fcfb2ce68362c97945095b864cda94a92edbaf5994201083"},
|
||||
{file = "coverage-7.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0513b9508b93da4e1716744ef6ebc507aff016ba115ffe8ecff744d1322a7b63"},
|
||||
{file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40209e141059b9370a2657c9b15607815359ab3ef9918f0196b6fccce8d3230f"},
|
||||
{file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a2b2b78c78293782fd3767d53e6474582f62443d0504b1554370bde86cc8227"},
|
||||
{file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:73bfb9c09951125d06ee473bed216e2c3742f530fc5acc1383883125de76d9cd"},
|
||||
{file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f384c3cc76aeedce208643697fb3e8437604b512255de6d18dae3f27655a384"},
|
||||
{file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:54eb8d1bf7cacfbf2a3186019bcf01d11c666bd495ed18717162f7eb1e9dd00b"},
|
||||
{file = "coverage-7.4.4-cp311-cp311-win32.whl", hash = "sha256:cac99918c7bba15302a2d81f0312c08054a3359eaa1929c7e4b26ebe41e9b286"},
|
||||
{file = "coverage-7.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:b14706df8b2de49869ae03a5ccbc211f4041750cd4a66f698df89d44f4bd30ec"},
|
||||
{file = "coverage-7.4.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:201bef2eea65e0e9c56343115ba3814e896afe6d36ffd37bab783261db430f76"},
|
||||
{file = "coverage-7.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:41c9c5f3de16b903b610d09650e5e27adbfa7f500302718c9ffd1c12cf9d6818"},
|
||||
{file = "coverage-7.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d898fe162d26929b5960e4e138651f7427048e72c853607f2b200909794ed978"},
|
||||
{file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ea79bb50e805cd6ac058dfa3b5c8f6c040cb87fe83de10845857f5535d1db70"},
|
||||
{file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce4b94265ca988c3f8e479e741693d143026632672e3ff924f25fab50518dd51"},
|
||||
{file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:00838a35b882694afda09f85e469c96367daa3f3f2b097d846a7216993d37f4c"},
|
||||
{file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fdfafb32984684eb03c2d83e1e51f64f0906b11e64482df3c5db936ce3839d48"},
|
||||
{file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:69eb372f7e2ece89f14751fbcbe470295d73ed41ecd37ca36ed2eb47512a6ab9"},
|
||||
{file = "coverage-7.4.4-cp312-cp312-win32.whl", hash = "sha256:137eb07173141545e07403cca94ab625cc1cc6bc4c1e97b6e3846270e7e1fea0"},
|
||||
{file = "coverage-7.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:d71eec7d83298f1af3326ce0ff1d0ea83c7cb98f72b577097f9083b20bdaf05e"},
|
||||
{file = "coverage-7.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d5ae728ff3b5401cc320d792866987e7e7e880e6ebd24433b70a33b643bb0384"},
|
||||
{file = "coverage-7.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cc4f1358cb0c78edef3ed237ef2c86056206bb8d9140e73b6b89fbcfcbdd40e1"},
|
||||
{file = "coverage-7.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8130a2aa2acb8788e0b56938786c33c7c98562697bf9f4c7d6e8e5e3a0501e4a"},
|
||||
{file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf271892d13e43bc2b51e6908ec9a6a5094a4df1d8af0bfc360088ee6c684409"},
|
||||
{file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4cdc86d54b5da0df6d3d3a2f0b710949286094c3a6700c21e9015932b81447e"},
|
||||
{file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ae71e7ddb7a413dd60052e90528f2f65270aad4b509563af6d03d53e979feafd"},
|
||||
{file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:38dd60d7bf242c4ed5b38e094baf6401faa114fc09e9e6632374388a404f98e7"},
|
||||
{file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa5b1c1bfc28384f1f53b69a023d789f72b2e0ab1b3787aae16992a7ca21056c"},
|
||||
{file = "coverage-7.4.4-cp38-cp38-win32.whl", hash = "sha256:dfa8fe35a0bb90382837b238fff375de15f0dcdb9ae68ff85f7a63649c98527e"},
|
||||
{file = "coverage-7.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:b2991665420a803495e0b90a79233c1433d6ed77ef282e8e152a324bbbc5e0c8"},
|
||||
{file = "coverage-7.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b799445b9f7ee8bf299cfaed6f5b226c0037b74886a4e11515e569b36fe310d"},
|
||||
{file = "coverage-7.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b4d33f418f46362995f1e9d4f3a35a1b6322cb959c31d88ae56b0298e1c22357"},
|
||||
{file = "coverage-7.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aadacf9a2f407a4688d700e4ebab33a7e2e408f2ca04dbf4aef17585389eff3e"},
|
||||
{file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c95949560050d04d46b919301826525597f07b33beba6187d04fa64d47ac82e"},
|
||||
{file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff7687ca3d7028d8a5f0ebae95a6e4827c5616b31a4ee1192bdfde697db110d4"},
|
||||
{file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5fc1de20b2d4a061b3df27ab9b7c7111e9a710f10dc2b84d33a4ab25065994ec"},
|
||||
{file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c74880fc64d4958159fbd537a091d2a585448a8f8508bf248d72112723974cbd"},
|
||||
{file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:742a76a12aa45b44d236815d282b03cfb1de3b4323f3e4ec933acfae08e54ade"},
|
||||
{file = "coverage-7.4.4-cp39-cp39-win32.whl", hash = "sha256:d89d7b2974cae412400e88f35d86af72208e1ede1a541954af5d944a8ba46c57"},
|
||||
{file = "coverage-7.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:9ca28a302acb19b6af89e90f33ee3e1906961f94b54ea37de6737b7ca9d8827c"},
|
||||
{file = "coverage-7.4.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:b2c5edc4ac10a7ef6605a966c58929ec6c1bd0917fb8c15cb3363f65aa40e677"},
|
||||
{file = "coverage-7.4.4.tar.gz", hash = "sha256:c901df83d097649e257e803be22592aedfd5182f07b3cc87d640bbb9afd50f49"},
|
||||
{file = "coverage-7.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0884920835a033b78d1c73b6d3bbcda8161a900f38a488829a83982925f6c2e"},
|
||||
{file = "coverage-7.5.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:39afcd3d4339329c5f58de48a52f6e4e50f6578dd6099961cf22228feb25f38f"},
|
||||
{file = "coverage-7.5.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a7b0ceee8147444347da6a66be737c9d78f3353b0681715b668b72e79203e4a"},
|
||||
{file = "coverage-7.5.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a9ca3f2fae0088c3c71d743d85404cec8df9be818a005ea065495bedc33da35"},
|
||||
{file = "coverage-7.5.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fd215c0c7d7aab005221608a3c2b46f58c0285a819565887ee0b718c052aa4e"},
|
||||
{file = "coverage-7.5.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4bf0655ab60d754491004a5efd7f9cccefcc1081a74c9ef2da4735d6ee4a6223"},
|
||||
{file = "coverage-7.5.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:61c4bf1ba021817de12b813338c9be9f0ad5b1e781b9b340a6d29fc13e7c1b5e"},
|
||||
{file = "coverage-7.5.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:db66fc317a046556a96b453a58eced5024af4582a8dbdc0c23ca4dbc0d5b3146"},
|
||||
{file = "coverage-7.5.1-cp310-cp310-win32.whl", hash = "sha256:b016ea6b959d3b9556cb401c55a37547135a587db0115635a443b2ce8f1c7228"},
|
||||
{file = "coverage-7.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:df4e745a81c110e7446b1cc8131bf986157770fa405fe90e15e850aaf7619bc8"},
|
||||
{file = "coverage-7.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:796a79f63eca8814ca3317a1ea443645c9ff0d18b188de470ed7ccd45ae79428"},
|
||||
{file = "coverage-7.5.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4fc84a37bfd98db31beae3c2748811a3fa72bf2007ff7902f68746d9757f3746"},
|
||||
{file = "coverage-7.5.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6175d1a0559986c6ee3f7fccfc4a90ecd12ba0a383dcc2da30c2b9918d67d8a3"},
|
||||
{file = "coverage-7.5.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fc81d5878cd6274ce971e0a3a18a8803c3fe25457165314271cf78e3aae3aa2"},
|
||||
{file = "coverage-7.5.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:556cf1a7cbc8028cb60e1ff0be806be2eded2daf8129b8811c63e2b9a6c43bca"},
|
||||
{file = "coverage-7.5.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9981706d300c18d8b220995ad22627647be11a4276721c10911e0e9fa44c83e8"},
|
||||
{file = "coverage-7.5.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d7fed867ee50edf1a0b4a11e8e5d0895150e572af1cd6d315d557758bfa9c057"},
|
||||
{file = "coverage-7.5.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ef48e2707fb320c8f139424a596f5b69955a85b178f15af261bab871873bb987"},
|
||||
{file = "coverage-7.5.1-cp311-cp311-win32.whl", hash = "sha256:9314d5678dcc665330df5b69c1e726a0e49b27df0461c08ca12674bcc19ef136"},
|
||||
{file = "coverage-7.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:5fa567e99765fe98f4e7d7394ce623e794d7cabb170f2ca2ac5a4174437e90dd"},
|
||||
{file = "coverage-7.5.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b6cf3764c030e5338e7f61f95bd21147963cf6aa16e09d2f74f1fa52013c1206"},
|
||||
{file = "coverage-7.5.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ec92012fefebee89a6b9c79bc39051a6cb3891d562b9270ab10ecfdadbc0c34"},
|
||||
{file = "coverage-7.5.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16db7f26000a07efcf6aea00316f6ac57e7d9a96501e990a36f40c965ec7a95d"},
|
||||
{file = "coverage-7.5.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:beccf7b8a10b09c4ae543582c1319c6df47d78fd732f854ac68d518ee1fb97fa"},
|
||||
{file = "coverage-7.5.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8748731ad392d736cc9ccac03c9845b13bb07d020a33423fa5b3a36521ac6e4e"},
|
||||
{file = "coverage-7.5.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7352b9161b33fd0b643ccd1f21f3a3908daaddf414f1c6cb9d3a2fd618bf2572"},
|
||||
{file = "coverage-7.5.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:7a588d39e0925f6a2bff87154752481273cdb1736270642aeb3635cb9b4cad07"},
|
||||
{file = "coverage-7.5.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:68f962d9b72ce69ea8621f57551b2fa9c70509af757ee3b8105d4f51b92b41a7"},
|
||||
{file = "coverage-7.5.1-cp312-cp312-win32.whl", hash = "sha256:f152cbf5b88aaeb836127d920dd0f5e7edff5a66f10c079157306c4343d86c19"},
|
||||
{file = "coverage-7.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:5a5740d1fb60ddf268a3811bcd353de34eb56dc24e8f52a7f05ee513b2d4f596"},
|
||||
{file = "coverage-7.5.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e2213def81a50519d7cc56ed643c9e93e0247f5bbe0d1247d15fa520814a7cd7"},
|
||||
{file = "coverage-7.5.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5037f8fcc2a95b1f0e80585bd9d1ec31068a9bcb157d9750a172836e98bc7a90"},
|
||||
{file = "coverage-7.5.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3721c2c9e4c4953a41a26c14f4cef64330392a6d2d675c8b1db3b645e31f0e"},
|
||||
{file = "coverage-7.5.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca498687ca46a62ae590253fba634a1fe9836bc56f626852fb2720f334c9e4e5"},
|
||||
{file = "coverage-7.5.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cdcbc320b14c3e5877ee79e649677cb7d89ef588852e9583e6b24c2e5072661"},
|
||||
{file = "coverage-7.5.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:57e0204b5b745594e5bc14b9b50006da722827f0b8c776949f1135677e88d0b8"},
|
||||
{file = "coverage-7.5.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fe7502616b67b234482c3ce276ff26f39ffe88adca2acf0261df4b8454668b4"},
|
||||
{file = "coverage-7.5.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9e78295f4144f9dacfed4f92935fbe1780021247c2fabf73a819b17f0ccfff8d"},
|
||||
{file = "coverage-7.5.1-cp38-cp38-win32.whl", hash = "sha256:1434e088b41594baa71188a17533083eabf5609e8e72f16ce8c186001e6b8c41"},
|
||||
{file = "coverage-7.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:0646599e9b139988b63704d704af8e8df7fa4cbc4a1f33df69d97f36cb0a38de"},
|
||||
{file = "coverage-7.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4cc37def103a2725bc672f84bd939a6fe4522310503207aae4d56351644682f1"},
|
||||
{file = "coverage-7.5.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fc0b4d8bfeabd25ea75e94632f5b6e047eef8adaed0c2161ada1e922e7f7cece"},
|
||||
{file = "coverage-7.5.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d0a0f5e06881ecedfe6f3dd2f56dcb057b6dbeb3327fd32d4b12854df36bf26"},
|
||||
{file = "coverage-7.5.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9735317685ba6ec7e3754798c8871c2f49aa5e687cc794a0b1d284b2389d1bd5"},
|
||||
{file = "coverage-7.5.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d21918e9ef11edf36764b93101e2ae8cc82aa5efdc7c5a4e9c6c35a48496d601"},
|
||||
{file = "coverage-7.5.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c3e757949f268364b96ca894b4c342b41dc6f8f8b66c37878aacef5930db61be"},
|
||||
{file = "coverage-7.5.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:79afb6197e2f7f60c4824dd4b2d4c2ec5801ceb6ba9ce5d2c3080e5660d51a4f"},
|
||||
{file = "coverage-7.5.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d1d0d98d95dd18fe29dc66808e1accf59f037d5716f86a501fc0256455219668"},
|
||||
{file = "coverage-7.5.1-cp39-cp39-win32.whl", hash = "sha256:1cc0fe9b0b3a8364093c53b0b4c0c2dd4bb23acbec4c9240b5f284095ccf7981"},
|
||||
{file = "coverage-7.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:dde0070c40ea8bb3641e811c1cfbf18e265d024deff6de52c5950677a8fb1e0f"},
|
||||
{file = "coverage-7.5.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:6537e7c10cc47c595828b8a8be04c72144725c383c4702703ff4e42e44577312"},
|
||||
{file = "coverage-7.5.1.tar.gz", hash = "sha256:54de9ef3a9da981f7af93eafde4ede199e0846cd819eb27c88e2b712aae9708c"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1154,13 +1143,13 @@ dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"]
|
||||
|
||||
[[package]]
|
||||
name = "detect-secrets"
|
||||
version = "1.4.0"
|
||||
version = "1.5.0"
|
||||
description = "Tool for detecting secrets in the codebase"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "detect_secrets-1.4.0-py3-none-any.whl", hash = "sha256:d08ecabeee8b68c0acb0e8a354fb98d822a653f6ed05e520cead4c6fc1fc02cd"},
|
||||
{file = "detect_secrets-1.4.0.tar.gz", hash = "sha256:d56787e339758cef48c9ccd6692f7a094b9963c979c9813580b0169e41132833"},
|
||||
{file = "detect_secrets-1.5.0-py3-none-any.whl", hash = "sha256:e24e7b9b5a35048c313e983f76c4bd09dad89f045ff059e354f9943bf45aa060"},
|
||||
{file = "detect_secrets-1.5.0.tar.gz", hash = "sha256:6bb46dcc553c10df51475641bb30fd69d25645cc12339e46c824c1e0c388898a"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1290,13 +1279,13 @@ pyflakes = ">=3.2.0,<3.3.0"
|
||||
|
||||
[[package]]
|
||||
name = "freezegun"
|
||||
version = "1.4.0"
|
||||
version = "1.5.0"
|
||||
description = "Let your Python tests travel through time"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "freezegun-1.4.0-py3-none-any.whl", hash = "sha256:55e0fc3c84ebf0a96a5aa23ff8b53d70246479e9a68863f1fcac5a3e52f19dd6"},
|
||||
{file = "freezegun-1.4.0.tar.gz", hash = "sha256:10939b0ba0ff5adaecf3b06a5c2f73071d9678e507c5eaedb23c761d56ac774b"},
|
||||
{file = "freezegun-1.5.0-py3-none-any.whl", hash = "sha256:ec3f4ba030e34eb6cf7e1e257308aee2c60c3d038ff35996d7475760c9ff3719"},
|
||||
{file = "freezegun-1.5.0.tar.gz", hash = "sha256:200a64359b363aa3653d8aac289584078386c7c3da77339d257e46a01fb5c77c"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1462,13 +1451,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"]
|
||||
|
||||
[[package]]
|
||||
name = "google-api-python-client"
|
||||
version = "2.125.0"
|
||||
version = "2.127.0"
|
||||
description = "Google API Client Library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "google-api-python-client-2.125.0.tar.gz", hash = "sha256:51a0385cff65ec135106e8be60ee7112557396dde5f44113ae23912baddda143"},
|
||||
{file = "google_api_python_client-2.125.0-py2.py3-none-any.whl", hash = "sha256:0a62b60fbd61b61a455f15d925264b3301099b67cafd2d33cf8bf151f1fca4f4"},
|
||||
{file = "google-api-python-client-2.127.0.tar.gz", hash = "sha256:bbb51b0fbccdf40e536c26341e372d7800f09afebb53103bbcc94e08f14b523b"},
|
||||
{file = "google_api_python_client-2.127.0-py2.py3-none-any.whl", hash = "sha256:d01c70c7840ec37888aa02b1aea5d9baba4c1701e268d1a0251640afd56e5e90"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1667,13 +1656,13 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "3.6"
|
||||
version = "3.7"
|
||||
description = "Internationalized Domain Names in Applications (IDNA)"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
files = [
|
||||
{file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"},
|
||||
{file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"},
|
||||
{file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"},
|
||||
{file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1862,13 +1851,13 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "jsonschema"
|
||||
version = "4.21.1"
|
||||
version = "4.22.0"
|
||||
description = "An implementation of JSON Schema validation for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "jsonschema-4.21.1-py3-none-any.whl", hash = "sha256:7996507afae316306f9e2290407761157c6f78002dcf7419acb99822143d1c6f"},
|
||||
{file = "jsonschema-4.21.1.tar.gz", hash = "sha256:85727c00279f5fa6bedbe6238d2aa6403bedd8b4864ab11207d07df3cc1b2ee5"},
|
||||
{file = "jsonschema-4.22.0-py3-none-any.whl", hash = "sha256:ff4cfd6b1367a40e7bc6411caec72effadd3db0bbe5017de188f2d6108335802"},
|
||||
{file = "jsonschema-4.22.0.tar.gz", hash = "sha256:5b22d434a45935119af990552c862e5d6d564e8f6601206b305a61fdf661a2b7"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2278,13 +2267,13 @@ min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-imp
|
||||
|
||||
[[package]]
|
||||
name = "mkdocs-git-revision-date-localized-plugin"
|
||||
version = "1.2.4"
|
||||
version = "1.2.5"
|
||||
description = "Mkdocs plugin that enables displaying the localized date of the last git modification of a markdown file."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "mkdocs-git-revision-date-localized-plugin-1.2.4.tar.gz", hash = "sha256:08fd0c6f33c8da9e00daf40f7865943113b3879a1c621b2bbf0fa794ffe997d3"},
|
||||
{file = "mkdocs_git_revision_date_localized_plugin-1.2.4-py3-none-any.whl", hash = "sha256:1f94eb510862ef94e982a2910404fa17a1657ecf29f45a07b0f438c00767fc85"},
|
||||
{file = "mkdocs_git_revision_date_localized_plugin-1.2.5-py3-none-any.whl", hash = "sha256:d796a18b07cfcdb154c133e3ec099d2bb5f38389e4fd54d3eb516a8a736815b8"},
|
||||
{file = "mkdocs_git_revision_date_localized_plugin-1.2.5.tar.gz", hash = "sha256:0c439816d9d0dba48e027d9d074b2b9f1d7cd179f74ba46b51e4da7bb3dc4b9b"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2295,13 +2284,13 @@ pytz = "*"
|
||||
|
||||
[[package]]
|
||||
name = "mkdocs-material"
|
||||
version = "9.5.17"
|
||||
version = "9.5.18"
|
||||
description = "Documentation that simply works"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "mkdocs_material-9.5.17-py3-none-any.whl", hash = "sha256:14a2a60119a785e70e765dd033e6211367aca9fc70230e577c1cf6a326949571"},
|
||||
{file = "mkdocs_material-9.5.17.tar.gz", hash = "sha256:06ae1275a72db1989cf6209de9e9ecdfbcfdbc24c58353877b2bb927dbe413e4"},
|
||||
{file = "mkdocs_material-9.5.18-py3-none-any.whl", hash = "sha256:1e0e27fc9fe239f9064318acf548771a4629d5fd5dfd45444fd80a953fe21eb4"},
|
||||
{file = "mkdocs_material-9.5.18.tar.gz", hash = "sha256:a43f470947053fa2405c33995f282d24992c752a50114f23f30da9d8d0c57e62"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2351,13 +2340,13 @@ test = ["pytest", "pytest-cov"]
|
||||
|
||||
[[package]]
|
||||
name = "moto"
|
||||
version = "5.0.5"
|
||||
version = "5.0.6"
|
||||
description = ""
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "moto-5.0.5-py2.py3-none-any.whl", hash = "sha256:4ecdd4084491a2f25f7a7925416dcf07eee0031ce724957439a32ef764b22874"},
|
||||
{file = "moto-5.0.5.tar.gz", hash = "sha256:2eaca2df7758f6868df420bf0725cd0b93d98709606f1fb8b2343b5bdc822d91"},
|
||||
{file = "moto-5.0.6-py2.py3-none-any.whl", hash = "sha256:ca1e22831a741733b581ff2ef4d6ae2e1c6db1eab97af1b78b86ca2c6e88c609"},
|
||||
{file = "moto-5.0.6.tar.gz", hash = "sha256:ad8b23f2b555ad694da8b2432a42b6d96beaaf67a4e7d932196a72193a2eee2c"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2484,13 +2473,13 @@ dev = ["bumpver", "isort", "mypy", "pylint", "pytest", "yapf"]
|
||||
|
||||
[[package]]
|
||||
name = "msgraph-sdk"
|
||||
version = "1.2.0"
|
||||
version = "1.3.0"
|
||||
description = "The Microsoft Graph Python SDK"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "msgraph-sdk-1.2.0.tar.gz", hash = "sha256:689eec74fcb5cb29446947e4761fa57edeeb3ec1dccd7975c44d12d8d9db9c4f"},
|
||||
{file = "msgraph_sdk-1.2.0-py3-none-any.whl", hash = "sha256:4a9f706413c0a497cdfffd0b741122a5e73206333d566d115089cef9f4adadb7"},
|
||||
{file = "msgraph_sdk-1.3.0-py3-none-any.whl", hash = "sha256:b8636e4f3957bad35958c1578d1610ed4e1edf71c4fdb3423396868cab84e156"},
|
||||
{file = "msgraph_sdk-1.3.0.tar.gz", hash = "sha256:a0a0f529800e528a455e2ace31660d9d3697a8cc87b0789e30147ce4c4e9c268"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2944,13 +2933,13 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-
|
||||
|
||||
[[package]]
|
||||
name = "pluggy"
|
||||
version = "1.4.0"
|
||||
version = "1.5.0"
|
||||
description = "plugin and hook calling mechanisms for python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"},
|
||||
{file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"},
|
||||
{file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"},
|
||||
{file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
@@ -3247,13 +3236,13 @@ diagrams = ["jinja2", "railroad-diagrams"]
|
||||
|
||||
[[package]]
|
||||
name = "pytest"
|
||||
version = "8.1.1"
|
||||
version = "8.2.0"
|
||||
description = "pytest: simple powerful testing with Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"},
|
||||
{file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"},
|
||||
{file = "pytest-8.2.0-py3-none-any.whl", hash = "sha256:1733f0620f6cda4095bbf0d9ff8022486e91892245bb9e7d5542c018f612f233"},
|
||||
{file = "pytest-8.2.0.tar.gz", hash = "sha256:d507d4482197eac0ba2bae2e9babf0672eb333017bcedaa5fb1a3d42c1174b3f"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -3261,11 +3250,11 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""}
|
||||
exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
|
||||
iniconfig = "*"
|
||||
packaging = "*"
|
||||
pluggy = ">=1.4,<2.0"
|
||||
pluggy = ">=1.5,<2.0"
|
||||
tomli = {version = ">=1", markers = "python_version < \"3.11\""}
|
||||
|
||||
[package.extras]
|
||||
testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
|
||||
dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
|
||||
|
||||
[[package]]
|
||||
name = "pytest-cov"
|
||||
@@ -3320,18 +3309,18 @@ pytest = "*"
|
||||
|
||||
[[package]]
|
||||
name = "pytest-xdist"
|
||||
version = "3.5.0"
|
||||
version = "3.6.1"
|
||||
description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pytest-xdist-3.5.0.tar.gz", hash = "sha256:cbb36f3d67e0c478baa57fa4edc8843887e0f6cfc42d677530a36d7472b32d8a"},
|
||||
{file = "pytest_xdist-3.5.0-py3-none-any.whl", hash = "sha256:d075629c7e00b611df89f490a5063944bee7a4362a5ff11c7cc7824a03dfce24"},
|
||||
{file = "pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7"},
|
||||
{file = "pytest_xdist-3.6.1.tar.gz", hash = "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
execnet = ">=1.1"
|
||||
pytest = ">=6.2.0"
|
||||
execnet = ">=2.1"
|
||||
pytest = ">=7.0.0"
|
||||
|
||||
[package.extras]
|
||||
psutil = ["psutil (>=3.0)"]
|
||||
@@ -3398,6 +3387,7 @@ files = [
|
||||
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"},
|
||||
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"},
|
||||
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"},
|
||||
{file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"},
|
||||
{file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"},
|
||||
{file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"},
|
||||
{file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"},
|
||||
@@ -3405,8 +3395,16 @@ files = [
|
||||
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"},
|
||||
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"},
|
||||
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"},
|
||||
{file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"},
|
||||
{file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"},
|
||||
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
|
||||
{file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
|
||||
{file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
|
||||
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"},
|
||||
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
|
||||
{file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
|
||||
{file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
|
||||
{file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"},
|
||||
{file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"},
|
||||
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"},
|
||||
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"},
|
||||
@@ -3423,6 +3421,7 @@ files = [
|
||||
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"},
|
||||
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"},
|
||||
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"},
|
||||
{file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"},
|
||||
{file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"},
|
||||
{file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"},
|
||||
{file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"},
|
||||
@@ -3430,6 +3429,7 @@ files = [
|
||||
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"},
|
||||
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"},
|
||||
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"},
|
||||
{file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"},
|
||||
{file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"},
|
||||
{file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"},
|
||||
{file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"},
|
||||
@@ -3819,24 +3819,24 @@ python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:d92f81886165cb14d7b067ef37e142256f1c6a90a65cd156b063a43da1708cfd"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:aa2267c6a303eb483de8d02db2871afb5c5fc15618d894300b88958f729ad74f"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:840f0c7f194986a63d2c2465ca63af8ccbbc90ab1c6001b1978f05119b5e7334"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:024cfe1fc7c7f4e1aff4a81e718109e13409767e4f871443cbff3dba3578203d"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:b5edda50e5e9e15e54a6a8a0070302b00c518a9d32accc2346ad6c984aacd279"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_24_aarch64.whl", hash = "sha256:1707814f0d9791df063f8c19bb51b0d1278b8e9a2353abbb676c2f685dee6afe"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:46d378daaac94f454b3a0e3d8d78cafd78a026b1d71443f4966c696b48a6d899"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:09b055c05697b38ecacb7ac50bdab2240bfca1a0c4872b0fd309bb07dc9aa3a9"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:7048c338b6c86627afb27faecf418768acb6331fc24cfa56c93e8c9780f815fa"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_24_aarch64.whl", hash = "sha256:1dc67314e7e1086c9fdf2680b7b6c2be1c0d8e3a8279f2e993ca2a7545fecf62"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3213ece08ea033eb159ac52ae052a4899b56ecc124bb80020d9bbceeb50258e9"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aab7fd643f71d7946f2ee58cc88c9b7bfc97debd71dcc93e03e2d174628e7e2d"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win32.whl", hash = "sha256:5c365d91c88390c8d0a8545df0b5857172824b1c604e867161e6b3d59a827eaa"},
|
||||
@@ -3844,7 +3844,7 @@ files = [
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3fcc54cb0c8b811ff66082de1680b4b14cf8a81dce0d4fbf665c2265a81e07a1"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:77159f5d5b5c14f7c34073862a6b7d34944075d9f93e681638f6d753606c6ce6"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4ecbf9c3e19f9562c7fdd462e8d18dd902a47ca046a2e64dba80699f0b6c09b7"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:87ea5ff66d8064301a154b3933ae406b0863402a799b16e4a1d24d9fbbcbe0d3"},
|
||||
@@ -3852,7 +3852,7 @@ files = [
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win_amd64.whl", hash = "sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:665f58bfd29b167039f714c6998178d27ccd83984084c286110ef26b230f259f"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:305889baa4043a09e5b76f8e2a51d4ffba44259f6b4c72dec8ca56207d9c6fe1"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e2b4c44b60eadec492926a7270abb100ef9f72798e18743939bdbf037aab8c28"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e79e5db08739731b0ce4850bed599235d601701d5694c36570a99a0c5ca41a9d"},
|
||||
@@ -3860,7 +3860,7 @@ files = [
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:9eb5dee2772b0f704ca2e45b1713e4e5198c18f515b52743576d196348f374d3"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:a1a45e0bb052edf6a1d3a93baef85319733a888363938e1fc9924cb00c8df24c"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:184565012b60405d93838167f425713180b949e9d8dd0bbc7b49f074407c5a8b"},
|
||||
{file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a75879bacf2c987c003368cf14bed0ffe99e8e85acfa6c0bfffc21a090f16880"},
|
||||
@@ -3888,13 +3888,13 @@ crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "safety"
|
||||
version = "3.1.0"
|
||||
version = "3.2.0"
|
||||
description = "Checks installed dependencies for known vulnerabilities and licenses."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "safety-3.1.0-py3-none-any.whl", hash = "sha256:f2ba2d36f15ac1e24751547a73b854509a7d6db31efd30b57f64ffdf9d021934"},
|
||||
{file = "safety-3.1.0.tar.gz", hash = "sha256:71f47b82ece153ec2f240e277f7cbfa70d5da2e0d143162c67f63b2f7459a1aa"},
|
||||
{file = "safety-3.2.0-py3-none-any.whl", hash = "sha256:a432fc9d17e79a4386c4f093656b617c56f839cde022649cfa796d72c7a544de"},
|
||||
{file = "safety-3.2.0.tar.gz", hash = "sha256:8bd5cab5f3d8a61ce0ea6e98f267c1006d056097c45c644fee7afeff7d5949c1"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -3954,18 +3954,15 @@ pbr = "*"
|
||||
|
||||
[[package]]
|
||||
name = "schema"
|
||||
version = "0.7.5"
|
||||
version = "0.7.7"
|
||||
description = "Simple data validation library"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "schema-0.7.5-py2.py3-none-any.whl", hash = "sha256:f3ffdeeada09ec34bf40d7d79996d9f7175db93b7a5065de0faa7f41083c1e6c"},
|
||||
{file = "schema-0.7.5.tar.gz", hash = "sha256:f06717112c61895cabc4707752b88716e8420a8819d71404501e114f91043197"},
|
||||
{file = "schema-0.7.7-py2.py3-none-any.whl", hash = "sha256:5d976a5b50f36e74e2157b47097b60002bd4d42e65425fcc9c9befadb4255dde"},
|
||||
{file = "schema-0.7.7.tar.gz", hash = "sha256:7da553abd2958a19dc2547c388cde53398b39196175a9be59ea1caf5ab0a1807"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
contextlib2 = ">=0.5.5"
|
||||
|
||||
[[package]]
|
||||
name = "setuptools"
|
||||
version = "69.2.0"
|
||||
@@ -4531,4 +4528,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.9,<3.13"
|
||||
content-hash = "847d50d1ea27be04adf7a9ff038c922ce56fd7f1ea500c3d0ec3eba40dd76822"
|
||||
content-hash = "9682a9bce7dd06121419fd782b485034b21864c3e507a5a91b00e207f565139c"
|
||||
|
||||
@@ -11,7 +11,7 @@ from prowler.lib.logger import logger
|
||||
|
||||
timestamp = datetime.today()
|
||||
timestamp_utc = datetime.now(timezone.utc).replace(tzinfo=timezone.utc)
|
||||
prowler_version = "3.16.0"
|
||||
prowler_version = "3.16.4"
|
||||
html_logo_url = "https://github.com/prowler-cloud/prowler/"
|
||||
html_logo_img = "https://user-images.githubusercontent.com/3985464/113734260-7ba06900-96fb-11eb-82bc-d4f68a1e2710.png"
|
||||
square_logo_img = "https://user-images.githubusercontent.com/38561120/235905862-9ece5bd7-9aa3-4e48-807a-3a9035eb8bfb.png"
|
||||
|
||||
@@ -52,18 +52,27 @@ aws:
|
||||
# aws.awslambda_function_using_supported_runtimes
|
||||
obsolete_lambda_runtimes:
|
||||
[
|
||||
"java8",
|
||||
"go1.x",
|
||||
"provided",
|
||||
"python3.6",
|
||||
"python2.7",
|
||||
"python3.7",
|
||||
"nodejs4.3",
|
||||
"nodejs4.3-edge",
|
||||
"nodejs6.10",
|
||||
"nodejs",
|
||||
"nodejs8.10",
|
||||
"nodejs10.x",
|
||||
"nodejs12.x",
|
||||
"nodejs14.x",
|
||||
"dotnet5.0",
|
||||
"dotnetcore1.0",
|
||||
"dotnetcore2.0",
|
||||
"dotnetcore2.1",
|
||||
"dotnetcore3.1",
|
||||
"ruby2.5",
|
||||
"ruby2.7",
|
||||
]
|
||||
|
||||
# AWS Organizations
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import html
|
||||
import importlib
|
||||
import sys
|
||||
from os import path
|
||||
@@ -30,9 +31,9 @@ def add_html_header(file_descriptor, audit_info):
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
|
||||
<!-- Required meta tags -->
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no" />
|
||||
<style>
|
||||
.read-more {
|
||||
color: #00f;
|
||||
@@ -48,7 +49,7 @@ def add_html_header(file_descriptor, audit_info):
|
||||
</style>
|
||||
<!-- Bootstrap CSS -->
|
||||
<link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.5.0/css/bootstrap.min.css"
|
||||
integrity="sha384-9aIt2nRpC12Uk9gS9baDl411NQApFmC26EwAOH8WgZl5MYYxFfc+NcPb1dKGj7Sk" crossorigin="anonymous">
|
||||
integrity="sha384-9aIt2nRpC12Uk9gS9baDl411NQApFmC26EwAOH8WgZl5MYYxFfc+NcPb1dKGj7Sk" crossorigin="anonymous" />
|
||||
<!-- https://datatables.net/download/index with jQuery, DataTables, Buttons, SearchPanes, and Select //-->
|
||||
<link rel="stylesheet" type="text/css"
|
||||
href="https://cdn.datatables.net/v/dt/jqc-1.12.4/dt-1.10.25/b-1.7.1/sp-1.4.0/sl-1.3.3/datatables.min.css" />
|
||||
@@ -78,13 +79,13 @@ def add_html_header(file_descriptor, audit_info):
|
||||
<div class="container-fluid">
|
||||
<div class="row mt-3">
|
||||
<div class="col-md-4">
|
||||
<a href="""
|
||||
<a href=\""""
|
||||
+ html_logo_url
|
||||
+ """><img class="float-left card-img-left mt-4 mr-4 ml-4"
|
||||
src="""
|
||||
+ """\"><img class="float-left card-img-left mt-4 mr-4 ml-4"
|
||||
src=\""""
|
||||
+ html_logo_img
|
||||
+ """
|
||||
alt="prowler-logo"></a>
|
||||
+ """\"
|
||||
alt="prowler-logo" /></a>
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
Report Information
|
||||
@@ -182,13 +183,13 @@ def fill_html(file_descriptor, finding, output_options):
|
||||
<td>{finding.check_metadata.Severity}</td>
|
||||
<td>{finding.check_metadata.ServiceName}</td>
|
||||
<td>{finding.location.lower() if isinstance(finding, Check_Report_GCP) else finding.region if isinstance(finding, Check_Report_AWS) else ""}</td>
|
||||
<td>{finding.check_metadata.CheckID.replace("_", "<wbr>_")}</td>
|
||||
<td>{finding.check_metadata.CheckID.replace("_", "<wbr />_")}</td>
|
||||
<td>{finding.check_metadata.CheckTitle}</td>
|
||||
<td>{finding.resource_id.replace("<", "<").replace(">", ">").replace("_", "<wbr>_")}</td>
|
||||
<td>{finding.resource_id.replace("<", "<").replace(">", ">").replace("_", "<wbr />_")}</td>
|
||||
<td>{parse_html_string(unroll_tags(finding.resource_tags))}</td>
|
||||
<td>{finding.status_extended.replace("<", "<").replace(">", ">").replace("_", "<wbr>_")}</td>
|
||||
<td><p class="show-read-more">{finding.check_metadata.Risk}</p></td>
|
||||
<td><p class="show-read-more">{finding.check_metadata.Remediation.Recommendation.Text}</p> <a class="read-more" href="{finding.check_metadata.Remediation.Recommendation.Url}"><i class="fas fa-external-link-alt"></i></a></td>
|
||||
<td>{finding.status_extended.replace("<", "<").replace(">", ">").replace("_", "<wbr />_")}</td>
|
||||
<td><p class="show-read-more">{html.escape(finding.check_metadata.Risk)}</p></td>
|
||||
<td><p class="show-read-more">{html.escape(finding.check_metadata.Remediation.Recommendation.Text)}</p> <a class="read-more" href="{finding.check_metadata.Remediation.Recommendation.Url}"><i class="fas fa-external-link-alt"></i></a></td>
|
||||
<td><p class="show-read-more">{parse_html_string(unroll_dict(get_check_compliance(finding, finding.check_metadata.Provider, output_options)))}</p></td>
|
||||
</tr>
|
||||
"""
|
||||
@@ -247,8 +248,6 @@ def add_html_footer(output_filename, output_directory):
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<!-- Table search and paginator -->
|
||||
<!-- Optional JavaScript -->
|
||||
<!-- jQuery first, then Popper.js, then Bootstrap JS -->
|
||||
|
||||
@@ -45,6 +45,7 @@ def display_summary_table(
|
||||
"Service": "",
|
||||
"Provider": "",
|
||||
"Total": 0,
|
||||
"Pass": 0,
|
||||
"Critical": 0,
|
||||
"High": 0,
|
||||
"Medium": 0,
|
||||
@@ -78,6 +79,7 @@ def display_summary_table(
|
||||
current["Total"] += 1
|
||||
if finding.status == "PASS":
|
||||
pass_count += 1
|
||||
current["Pass"] += 1
|
||||
elif finding.status == "FAIL":
|
||||
fail_count += 1
|
||||
if finding.check_metadata.Severity == "critical":
|
||||
@@ -157,7 +159,8 @@ def add_service_to_table(findings_table, current):
|
||||
)
|
||||
current["Status"] = f"{Fore.RED}FAIL ({total_fails}){Style.RESET_ALL}"
|
||||
else:
|
||||
current["Status"] = f"{Fore.GREEN}PASS ({current['Total']}){Style.RESET_ALL}"
|
||||
current["Status"] = f"{Fore.GREEN}PASS ({current['Pass']}){Style.RESET_ALL}"
|
||||
|
||||
findings_table["Provider"].append(current["Provider"])
|
||||
findings_table["Service"].append(current["Service"])
|
||||
findings_table["Status"].append(current["Status"])
|
||||
|
||||
@@ -1253,8 +1253,11 @@
|
||||
"regions": {
|
||||
"aws": [
|
||||
"ap-northeast-1",
|
||||
"ap-south-1",
|
||||
"ap-southeast-1",
|
||||
"ap-southeast-2",
|
||||
"eu-central-1",
|
||||
"eu-west-1",
|
||||
"eu-west-3",
|
||||
"us-east-1",
|
||||
"us-west-2"
|
||||
@@ -1277,6 +1280,7 @@
|
||||
"braket": {
|
||||
"regions": {
|
||||
"aws": [
|
||||
"eu-west-2",
|
||||
"us-east-1",
|
||||
"us-west-1",
|
||||
"us-west-2"
|
||||
@@ -2269,6 +2273,7 @@
|
||||
"ap-northeast-2",
|
||||
"ap-northeast-3",
|
||||
"ap-south-1",
|
||||
"ap-south-2",
|
||||
"ap-southeast-1",
|
||||
"ap-southeast-2",
|
||||
"ap-southeast-3",
|
||||
@@ -2278,6 +2283,7 @@
|
||||
"eu-central-2",
|
||||
"eu-north-1",
|
||||
"eu-south-1",
|
||||
"eu-south-2",
|
||||
"eu-west-1",
|
||||
"eu-west-2",
|
||||
"eu-west-3",
|
||||
@@ -2304,6 +2310,7 @@
|
||||
"ap-northeast-2",
|
||||
"ap-northeast-3",
|
||||
"ap-south-1",
|
||||
"ap-south-2",
|
||||
"ap-southeast-1",
|
||||
"ap-southeast-2",
|
||||
"ap-southeast-3",
|
||||
@@ -2313,6 +2320,7 @@
|
||||
"eu-central-2",
|
||||
"eu-north-1",
|
||||
"eu-south-1",
|
||||
"eu-south-2",
|
||||
"eu-west-1",
|
||||
"eu-west-2",
|
||||
"eu-west-3",
|
||||
@@ -2341,6 +2349,7 @@
|
||||
"ap-northeast-2",
|
||||
"ap-northeast-3",
|
||||
"ap-south-1",
|
||||
"ap-south-2",
|
||||
"ap-southeast-1",
|
||||
"ap-southeast-2",
|
||||
"ap-southeast-3",
|
||||
@@ -2350,6 +2359,7 @@
|
||||
"eu-central-2",
|
||||
"eu-north-1",
|
||||
"eu-south-1",
|
||||
"eu-south-2",
|
||||
"eu-west-1",
|
||||
"eu-west-2",
|
||||
"eu-west-3",
|
||||
@@ -2613,6 +2623,7 @@
|
||||
"ap-southeast-3",
|
||||
"ap-southeast-4",
|
||||
"ca-central-1",
|
||||
"ca-west-1",
|
||||
"eu-central-1",
|
||||
"eu-central-2",
|
||||
"eu-north-1",
|
||||
@@ -4214,6 +4225,7 @@
|
||||
"eu-central-1",
|
||||
"eu-north-1",
|
||||
"eu-south-1",
|
||||
"eu-south-2",
|
||||
"eu-west-1",
|
||||
"eu-west-2",
|
||||
"eu-west-3",
|
||||
@@ -4246,6 +4258,7 @@
|
||||
"ap-southeast-3",
|
||||
"ap-southeast-4",
|
||||
"ca-central-1",
|
||||
"ca-west-1",
|
||||
"eu-central-1",
|
||||
"eu-central-2",
|
||||
"eu-north-1",
|
||||
@@ -4367,6 +4380,7 @@
|
||||
"ap-southeast-3",
|
||||
"ap-southeast-4",
|
||||
"ca-central-1",
|
||||
"ca-west-1",
|
||||
"eu-central-1",
|
||||
"eu-central-2",
|
||||
"eu-north-1",
|
||||
@@ -4409,6 +4423,7 @@
|
||||
"ap-southeast-3",
|
||||
"ap-southeast-4",
|
||||
"ca-central-1",
|
||||
"ca-west-1",
|
||||
"eu-central-1",
|
||||
"eu-central-2",
|
||||
"eu-north-1",
|
||||
@@ -4451,6 +4466,7 @@
|
||||
"ap-southeast-3",
|
||||
"ap-southeast-4",
|
||||
"ca-central-1",
|
||||
"ca-west-1",
|
||||
"eu-central-1",
|
||||
"eu-central-2",
|
||||
"eu-north-1",
|
||||
@@ -4492,6 +4508,7 @@
|
||||
"ap-southeast-2",
|
||||
"ap-southeast-3",
|
||||
"ca-central-1",
|
||||
"ca-west-1",
|
||||
"eu-central-1",
|
||||
"eu-central-2",
|
||||
"eu-north-1",
|
||||
@@ -4534,6 +4551,7 @@
|
||||
"ap-southeast-3",
|
||||
"ap-southeast-4",
|
||||
"ca-central-1",
|
||||
"ca-west-1",
|
||||
"eu-central-1",
|
||||
"eu-central-2",
|
||||
"eu-north-1",
|
||||
@@ -4644,6 +4662,7 @@
|
||||
"ap-southeast-3",
|
||||
"ap-southeast-4",
|
||||
"ca-central-1",
|
||||
"ca-west-1",
|
||||
"eu-central-1",
|
||||
"eu-central-2",
|
||||
"eu-north-1",
|
||||
@@ -5219,16 +5238,6 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"iot-roborunner": {
|
||||
"regions": {
|
||||
"aws": [
|
||||
"eu-central-1",
|
||||
"us-east-1"
|
||||
],
|
||||
"aws-cn": [],
|
||||
"aws-us-gov": []
|
||||
}
|
||||
},
|
||||
"iot1click-devices": {
|
||||
"regions": {
|
||||
"aws": [
|
||||
@@ -5579,6 +5588,7 @@
|
||||
"ap-southeast-3",
|
||||
"ap-southeast-4",
|
||||
"ca-central-1",
|
||||
"ca-west-1",
|
||||
"eu-central-1",
|
||||
"eu-central-2",
|
||||
"eu-north-1",
|
||||
@@ -6169,7 +6179,10 @@
|
||||
"us-west-2"
|
||||
],
|
||||
"aws-cn": [],
|
||||
"aws-us-gov": []
|
||||
"aws-us-gov": [
|
||||
"us-gov-east-1",
|
||||
"us-gov-west-1"
|
||||
]
|
||||
}
|
||||
},
|
||||
"lightsail": {
|
||||
@@ -6521,7 +6534,9 @@
|
||||
"aws": [
|
||||
"us-east-1"
|
||||
],
|
||||
"aws-cn": [],
|
||||
"aws-cn": [
|
||||
"cn-northwest-1"
|
||||
],
|
||||
"aws-us-gov": []
|
||||
}
|
||||
},
|
||||
@@ -7194,6 +7209,7 @@
|
||||
"eu-west-1",
|
||||
"eu-west-2",
|
||||
"eu-west-3",
|
||||
"il-central-1",
|
||||
"me-south-1",
|
||||
"sa-east-1",
|
||||
"us-east-1",
|
||||
@@ -7945,6 +7961,16 @@
|
||||
"aws-us-gov": []
|
||||
}
|
||||
},
|
||||
"qbusiness": {
|
||||
"regions": {
|
||||
"aws": [
|
||||
"us-east-1",
|
||||
"us-west-2"
|
||||
],
|
||||
"aws-cn": [],
|
||||
"aws-us-gov": []
|
||||
}
|
||||
},
|
||||
"qldb": {
|
||||
"regions": {
|
||||
"aws": [
|
||||
@@ -8667,16 +8693,21 @@
|
||||
"ap-northeast-2",
|
||||
"ap-northeast-3",
|
||||
"ap-south-1",
|
||||
"ap-south-2",
|
||||
"ap-southeast-1",
|
||||
"ap-southeast-2",
|
||||
"ap-southeast-3",
|
||||
"ap-southeast-4",
|
||||
"ca-central-1",
|
||||
"eu-central-1",
|
||||
"eu-central-2",
|
||||
"eu-north-1",
|
||||
"eu-south-1",
|
||||
"eu-south-2",
|
||||
"eu-west-1",
|
||||
"eu-west-2",
|
||||
"eu-west-3",
|
||||
"me-central-1",
|
||||
"me-south-1",
|
||||
"sa-east-1",
|
||||
"us-east-1",
|
||||
@@ -8734,18 +8765,29 @@
|
||||
"s3control": {
|
||||
"regions": {
|
||||
"aws": [
|
||||
"af-south-1",
|
||||
"ap-east-1",
|
||||
"ap-northeast-1",
|
||||
"ap-northeast-2",
|
||||
"ap-northeast-3",
|
||||
"ap-south-1",
|
||||
"ap-south-2",
|
||||
"ap-southeast-1",
|
||||
"ap-southeast-2",
|
||||
"ap-southeast-3",
|
||||
"ap-southeast-4",
|
||||
"ca-central-1",
|
||||
"eu-central-1",
|
||||
"eu-central-2",
|
||||
"eu-north-1",
|
||||
"eu-south-1",
|
||||
"eu-south-2",
|
||||
"eu-west-1",
|
||||
"eu-west-2",
|
||||
"eu-west-3",
|
||||
"il-central-1",
|
||||
"me-central-1",
|
||||
"me-south-1",
|
||||
"sa-east-1",
|
||||
"us-east-1",
|
||||
"us-east-2",
|
||||
@@ -9437,6 +9479,7 @@
|
||||
],
|
||||
"aws-cn": [],
|
||||
"aws-us-gov": [
|
||||
"us-gov-east-1",
|
||||
"us-gov-west-1"
|
||||
]
|
||||
}
|
||||
@@ -9456,6 +9499,7 @@
|
||||
"ap-southeast-3",
|
||||
"ap-southeast-4",
|
||||
"ca-central-1",
|
||||
"ca-west-1",
|
||||
"eu-central-1",
|
||||
"eu-central-2",
|
||||
"eu-north-1",
|
||||
@@ -9601,6 +9645,7 @@
|
||||
"eu-west-1",
|
||||
"eu-west-2",
|
||||
"eu-west-3",
|
||||
"il-central-1",
|
||||
"me-central-1",
|
||||
"sa-east-1",
|
||||
"us-east-1",
|
||||
@@ -9641,21 +9686,6 @@
|
||||
"aws-us-gov": []
|
||||
}
|
||||
},
|
||||
"snowmobile": {
|
||||
"regions": {
|
||||
"aws": [
|
||||
"us-east-1",
|
||||
"us-east-2",
|
||||
"us-west-1",
|
||||
"us-west-2"
|
||||
],
|
||||
"aws-cn": [],
|
||||
"aws-us-gov": [
|
||||
"us-gov-east-1",
|
||||
"us-gov-west-1"
|
||||
]
|
||||
}
|
||||
},
|
||||
"sns": {
|
||||
"regions": {
|
||||
"aws": [
|
||||
@@ -9963,6 +9993,7 @@
|
||||
"ap-southeast-3",
|
||||
"ap-southeast-4",
|
||||
"ca-central-1",
|
||||
"ca-west-1",
|
||||
"eu-central-1",
|
||||
"eu-central-2",
|
||||
"eu-north-1",
|
||||
@@ -10228,6 +10259,24 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"timestream-influxdb": {
|
||||
"regions": {
|
||||
"aws": [
|
||||
"ap-northeast-1",
|
||||
"ap-south-1",
|
||||
"ap-southeast-1",
|
||||
"ap-southeast-2",
|
||||
"eu-central-1",
|
||||
"eu-north-1",
|
||||
"eu-west-1",
|
||||
"us-east-1",
|
||||
"us-east-2",
|
||||
"us-west-2"
|
||||
],
|
||||
"aws-cn": [],
|
||||
"aws-us-gov": []
|
||||
}
|
||||
},
|
||||
"timestream-write": {
|
||||
"regions": {
|
||||
"aws": [
|
||||
@@ -10311,6 +10360,7 @@
|
||||
"ap-southeast-3",
|
||||
"ap-southeast-4",
|
||||
"ca-central-1",
|
||||
"ca-west-1",
|
||||
"eu-central-1",
|
||||
"eu-central-2",
|
||||
"eu-north-1",
|
||||
@@ -10494,6 +10544,7 @@
|
||||
"ap-southeast-3",
|
||||
"ap-southeast-4",
|
||||
"ca-central-1",
|
||||
"ca-west-1",
|
||||
"eu-central-1",
|
||||
"eu-central-2",
|
||||
"eu-north-1",
|
||||
@@ -10502,6 +10553,7 @@
|
||||
"eu-west-1",
|
||||
"eu-west-2",
|
||||
"eu-west-3",
|
||||
"il-central-1",
|
||||
"me-central-1",
|
||||
"me-south-1",
|
||||
"sa-east-1",
|
||||
@@ -10511,7 +10563,10 @@
|
||||
"us-west-2"
|
||||
],
|
||||
"aws-cn": [],
|
||||
"aws-us-gov": []
|
||||
"aws-us-gov": [
|
||||
"us-gov-east-1",
|
||||
"us-gov-west-1"
|
||||
]
|
||||
}
|
||||
},
|
||||
"vmwarecloudonaws": {
|
||||
@@ -10683,6 +10738,7 @@
|
||||
"ap-southeast-3",
|
||||
"ap-southeast-4",
|
||||
"ca-central-1",
|
||||
"ca-west-1",
|
||||
"eu-central-1",
|
||||
"eu-central-2",
|
||||
"eu-north-1",
|
||||
@@ -10767,6 +10823,7 @@
|
||||
"ap-southeast-3",
|
||||
"ap-southeast-4",
|
||||
"ca-central-1",
|
||||
"ca-west-1",
|
||||
"eu-central-1",
|
||||
"eu-central-2",
|
||||
"eu-north-1",
|
||||
|
||||
@@ -11,13 +11,13 @@
|
||||
"Severity": "medium",
|
||||
"ResourceType": "Other",
|
||||
"Description": "Maintain current contact details.",
|
||||
"Risk": "Ensure contact email and telephone details for AWS accounts are current and map to more than one individual in your organization. An AWS account supports a number of contact details; and AWS will use these to contact the account owner if activity judged to be in breach of Acceptable Use Policy. If an AWS account is observed to be behaving in a prohibited or suspicious manner; AWS will attempt to contact the account owner by email and phone using the contact details listed. If this is unsuccessful and the account behavior needs urgent mitigation; proactive measures may be taken; including throttling of traffic between the account exhibiting suspicious behavior and the AWS API endpoints and the Internet. This will result in impaired service to and from the account in question.",
|
||||
"Risk": "Ensure contact email and telephone details for AWS accounts are current and map to more than one individual in your organization. An AWS account supports a number of contact details, and AWS will use these to contact the account owner if activity judged to be in breach of Acceptable Use Policy. If an AWS account is observed to be behaving in a prohibited or suspicious manner, AWS will attempt to contact the account owner by email and phone using the contact details listed. If this is unsuccessful and the account behavior needs urgent mitigation, proactive measures may be taken, including throttling of traffic between the account exhibiting suspicious behavior and the AWS API endpoints and the Internet. This will result in impaired service to and from the account in question.",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "No command available.",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://docs.bridgecrew.io/docs/iam_18-maintain-contact-details#aws-console",
|
||||
"Other": "https://docs.prowler.com/checks/aws/iam-policies/iam_18-maintain-contact-details#aws-console",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
|
||||
@@ -11,13 +11,13 @@
|
||||
"Severity": "medium",
|
||||
"ResourceType": "Other",
|
||||
"Description": "Maintain different contact details to security, billing and operations.",
|
||||
"Risk": "Ensure contact email and telephone details for AWS accounts are current and map to more than one individual in your organization. An AWS account supports a number of contact details; and AWS will use these to contact the account owner if activity judged to be in breach of Acceptable Use Policy. If an AWS account is observed to be behaving in a prohibited or suspicious manner; AWS will attempt to contact the account owner by email and phone using the contact details listed. If this is unsuccessful and the account behavior needs urgent mitigation; proactive measures may be taken; including throttling of traffic between the account exhibiting suspicious behavior and the AWS API endpoints and the Internet. This will result in impaired service to and from the account in question.",
|
||||
"Risk": "Ensure contact email and telephone details for AWS accounts are current and map to more than one individual in your organization. An AWS account supports a number of contact details, and AWS will use these to contact the account owner if activity judged to be in breach of Acceptable Use Policy. If an AWS account is observed to be behaving in a prohibited or suspicious manner, AWS will attempt to contact the account owner by email and phone using the contact details listed. If this is unsuccessful and the account behavior needs urgent mitigation, proactive measures may be taken, including throttling of traffic between the account exhibiting suspicious behavior and the AWS API endpoints and the Internet. This will result in impaired service to and from the account in question.",
|
||||
"RelatedUrl": "https://docs.aws.amazon.com/accounts/latest/reference/manage-acct-update-contact.html",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://docs.bridgecrew.io/docs/iam_18-maintain-contact-details#aws-console",
|
||||
"Other": "https://docs.prowler.com/checks/aws/iam-policies/iam_18-maintain-contact-details#aws-console",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
|
||||
@@ -6,22 +6,26 @@ class account_maintain_different_contact_details_to_security_billing_and_operati
|
||||
Check
|
||||
):
|
||||
def execute(self):
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.region = account_client.region
|
||||
report.resource_id = account_client.audited_account
|
||||
report.resource_arn = account_client.audited_account_arn
|
||||
findings = []
|
||||
if account_client.contact_base:
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.region = account_client.region
|
||||
report.resource_id = account_client.audited_account
|
||||
report.resource_arn = account_client.audited_account_arn
|
||||
|
||||
if (
|
||||
len(account_client.contact_phone_numbers)
|
||||
== account_client.number_of_contacts
|
||||
and len(account_client.contact_names) == account_client.number_of_contacts
|
||||
# This is because the primary contact has no email field
|
||||
and len(account_client.contact_emails)
|
||||
== account_client.number_of_contacts - 1
|
||||
):
|
||||
report.status = "PASS"
|
||||
report.status_extended = "SECURITY, BILLING and OPERATIONS contacts found and they are different between each other and between ROOT contact."
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = "SECURITY, BILLING and OPERATIONS contacts not found or they are not different between each other and between ROOT contact."
|
||||
return [report]
|
||||
if (
|
||||
len(account_client.contact_phone_numbers)
|
||||
== account_client.number_of_contacts
|
||||
and len(account_client.contact_names)
|
||||
== account_client.number_of_contacts
|
||||
# This is because the primary contact has no email field
|
||||
and len(account_client.contact_emails)
|
||||
== account_client.number_of_contacts - 1
|
||||
):
|
||||
report.status = "PASS"
|
||||
report.status_extended = "SECURITY, BILLING and OPERATIONS contacts found and they are different between each other and between ROOT contact."
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = "SECURITY, BILLING and OPERATIONS contacts not found or they are not different between each other and between ROOT contact."
|
||||
findings.append(report)
|
||||
return findings
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
"Code": {
|
||||
"CLI": "No command available.",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://docs.bridgecrew.io/docs/iam_19#aws-console",
|
||||
"Other": "https://docs.prowler.com/checks/aws/iam-policies/iam_19#aws-console",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
"Code": {
|
||||
"CLI": "No command available.",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://docs.bridgecrew.io/docs/iam_15",
|
||||
"Other": "https://docs.prowler.com/checks/aws/iam-policies/iam_15",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
|
||||
@@ -18,28 +18,29 @@ class Account(AWSService):
|
||||
self.contacts_security = self.__get_alternate_contact__("SECURITY")
|
||||
self.contacts_operations = self.__get_alternate_contact__("OPERATIONS")
|
||||
|
||||
# Set of contact phone numbers
|
||||
self.contact_phone_numbers = {
|
||||
self.contact_base.phone_number,
|
||||
self.contacts_billing.phone_number,
|
||||
self.contacts_security.phone_number,
|
||||
self.contacts_operations.phone_number,
|
||||
}
|
||||
if self.contact_base:
|
||||
# Set of contact phone numbers
|
||||
self.contact_phone_numbers = {
|
||||
self.contact_base.phone_number,
|
||||
self.contacts_billing.phone_number,
|
||||
self.contacts_security.phone_number,
|
||||
self.contacts_operations.phone_number,
|
||||
}
|
||||
|
||||
# Set of contact names
|
||||
self.contact_names = {
|
||||
self.contact_base.name,
|
||||
self.contacts_billing.name,
|
||||
self.contacts_security.name,
|
||||
self.contacts_operations.name,
|
||||
}
|
||||
# Set of contact names
|
||||
self.contact_names = {
|
||||
self.contact_base.name,
|
||||
self.contacts_billing.name,
|
||||
self.contacts_security.name,
|
||||
self.contacts_operations.name,
|
||||
}
|
||||
|
||||
# Set of contact emails
|
||||
self.contact_emails = {
|
||||
self.contacts_billing.email,
|
||||
self.contacts_security.email,
|
||||
self.contacts_operations.email,
|
||||
}
|
||||
# Set of contact emails
|
||||
self.contact_emails = {
|
||||
self.contacts_billing.email,
|
||||
self.contacts_security.email,
|
||||
self.contacts_operations.email,
|
||||
}
|
||||
|
||||
def __get_contact_information__(self):
|
||||
try:
|
||||
@@ -53,10 +54,16 @@ class Account(AWSService):
|
||||
phone_number=primary_account_contact.get("PhoneNumber"),
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
return Contact(type="PRIMARY")
|
||||
if error.response["Error"]["Code"] == "AccessDeniedException":
|
||||
logger.error(
|
||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
return None
|
||||
else:
|
||||
logger.error(
|
||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
return Contact(type="PRIMARY")
|
||||
|
||||
def __get_alternate_contact__(self, contact_type: str):
|
||||
try:
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Monitor certificate expiration and take automated action to renew; replace or remove. Having shorter TTL for any security artifact is a general recommendation; but requires additional automation in place. If not longer required delete certificate. Use AWS config using the managed rule: acm-certificate-expiration-check.",
|
||||
"Text": "Monitor certificate expiration and take automated action to renew, replace or remove. Having shorter TTL for any security artifact is a general recommendation, but requires additional automation in place. If not longer required delete certificate. Use AWS config using the managed rule: acm-certificate-expiration-check.",
|
||||
"Url": "https://docs.aws.amazon.com/config/latest/developerguide/acm-certificate-expiration-check.html"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -19,9 +19,9 @@
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "https://docs.bridgecrew.io/docs/public_6-api-gateway-authorizer-set#cloudformation",
|
||||
"NativeIaC": "https://docs.prowler.com/checks/aws/public-policies/public_6-api-gateway-authorizer-set#cloudformation",
|
||||
"Other": "",
|
||||
"Terraform": "https://docs.bridgecrew.io/docs/public_6-api-gateway-authorizer-set#terraform"
|
||||
"Terraform": "https://docs.prowler.com/checks/aws/public-policies/public_6-api-gateway-authorizer-set#terraform"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Implement Amazon Cognito or a Lambda function to control access to your API.",
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "",
|
||||
"Terraform": "https://docs.bridgecrew.io/docs/ensure-api-gateway-stage-have-logging-level-defined-as-appropiate#terraform"
|
||||
"Terraform": "https://docs.prowler.com/checks/aws/logging-policies/ensure-api-gateway-stage-have-logging-level-defined-as-appropiate#terraform"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Monitoring is an important part of maintaining the reliability, availability and performance of API Gateway and your AWS solutions. You should collect monitoring data from all of the parts of your AWS solution. CloudTrail provides a record of actions taken by a user, role, or an AWS service in API Gateway. Using the information collected by CloudTrail, you can determine the request that was made to API Gateway, the IP address from which the request was made, who made the request, etc.",
|
||||
|
||||
@@ -20,8 +20,8 @@
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://docs.bridgecrew.io/docs/bc_aws_logging_30#aws-console",
|
||||
"Terraform": "https://docs.bridgecrew.io/docs/bc_aws_logging_30#cloudformation"
|
||||
"Other": "https://docs.prowler.com/checks/aws/logging-policies/bc_aws_logging_30#aws-console",
|
||||
"Terraform": "https://docs.prowler.com/checks/aws/logging-policies/bc_aws_logging_30#cloudformation"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Monitoring is an important part of maintaining the reliability, availability and performance of API Gateway and your AWS solutions. You should collect monitoring data from all of the parts of your AWS solution. CloudTrail provides a record of actions taken by a user, role, or an AWS service in API Gateway. Using the information collected by CloudTrail, you can determine the request that was made to API Gateway, the IP address from which the request was made, who made the request, etc.",
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
"CLI": "aws athena update-work-group --region <REGION> --work-group <workgroup_name> --configuration-updates ResultConfigurationUpdates={EncryptionConfiguration={EncryptionOption=SSE_S3|SSE_KMS|CSE_KMS}}",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://www.trendmicro.com/cloudoneconformity/knowledge-base/aws/Athena/encryption-enabled.html",
|
||||
"Terraform": "https://docs.bridgecrew.io/docs/ensure-that-athena-workgroup-is-encrypted#terraform"
|
||||
"Terraform": "https://docs.prowler.com/checks/aws/general-policies/ensure-that-athena-workgroup-is-encrypted#terraform"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Enable Encryption. Use a CMK where possible. It will provide additional management and privacy benefits.",
|
||||
|
||||
@@ -16,9 +16,9 @@
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "aws athena update-work-group --region <REGION> --work-group <workgroup_name> --configuration-updates EnforceWorkGroupConfiguration=True",
|
||||
"NativeIaC": "https://docs.bridgecrew.io/docs/bc_aws_general_33#cloudformation",
|
||||
"NativeIaC": "https://docs.prowler.com/checks/aws/general-policies/bc_aws_general_33#cloudformation",
|
||||
"Other": "",
|
||||
"Terraform": "https://docs.bridgecrew.io/docs/bc_aws_general_33#terraform"
|
||||
"Terraform": "https://docs.prowler.com/checks/aws/general-policies/bc_aws_general_33#terraform"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Ensure that workgroup configuration is enforced so it cannot be overriden by client-side settings.",
|
||||
@@ -29,4 +29,4 @@
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": ""
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
"Severity": "low",
|
||||
"ResourceType": "AwsLambdaFunction",
|
||||
"Description": "Check if Lambda functions invoke API operations are being recorded by CloudTrail.",
|
||||
"Risk": "If logs are not enabled; monitoring of service use and threat analysis is not possible.",
|
||||
"Risk": "If logs are not enabled, monitoring of service use and threat analysis is not possible.",
|
||||
"RelatedUrl": "https://docs.aws.amazon.com/lambda/latest/dg/logging-using-cloudtrail.html",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
"Severity": "critical",
|
||||
"ResourceType": "AwsLambdaFunction",
|
||||
"Description": "Find secrets in Lambda functions code.",
|
||||
"Risk": "The use of a hard-coded password increases the possibility of password guessing. If hard-coded passwords are used; it is possible that malicious users gain access through the account in question.",
|
||||
"Risk": "The use of a hard-coded password increases the possibility of password guessing. If hard-coded passwords are used, it is possible that malicious users gain access through the account in question.",
|
||||
"RelatedUrl": "https://docs.aws.amazon.com/secretsmanager/latest/userguide/lambda-functions.html",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
|
||||
@@ -9,14 +9,14 @@
|
||||
"Severity": "critical",
|
||||
"ResourceType": "AwsLambdaFunction",
|
||||
"Description": "Find secrets in Lambda functions variables.",
|
||||
"Risk": "The use of a hard-coded password increases the possibility of password guessing. If hard-coded passwords are used; it is possible that malicious users gain access through the account in question.",
|
||||
"Risk": "The use of a hard-coded password increases the possibility of password guessing. If hard-coded passwords are used, it is possible that malicious users gain access through the account in question.",
|
||||
"RelatedUrl": "https://docs.aws.amazon.com/secretsmanager/latest/userguide/lambda-functions.html",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "https://docs.bridgecrew.io/docs/bc_aws_secrets_3#cli-command",
|
||||
"NativeIaC": "https://docs.bridgecrew.io/docs/bc_aws_secrets_3#cloudformation",
|
||||
"CLI": "https://docs.prowler.com/checks/aws/secrets-policies/bc_aws_secrets_3#cli-command",
|
||||
"NativeIaC": "https://docs.prowler.com/checks/aws/secrets-policies/bc_aws_secrets_3#cloudformation",
|
||||
"Other": "",
|
||||
"Terraform": "https://docs.bridgecrew.io/docs/bc_aws_secrets_3#terraform"
|
||||
"Terraform": "https://docs.prowler.com/checks/aws/secrets-policies/bc_aws_secrets_3#terraform"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Use Secrets Manager to securely provide database credentials to Lambda functions and secure the databases as well as use the credentials to connect and query them without hardcoding the secrets in code or passing them through environmental variables.",
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
"Severity": "medium",
|
||||
"ResourceType": "AwsLambdaFunction",
|
||||
"Description": "Find obsolete Lambda runtimes.",
|
||||
"Risk": "If you have functions running on a runtime that will be deprecated in the next 60 days; Lambda notifies you by email that you should prepare by migrating your function to a supported runtime. In some cases; such as security issues that require a backwards-incompatible update; or software that does not support a long-term support (LTS) schedule; advance notice might not be possible. After a runtime is deprecated; Lambda might retire it completely at any time by disabling invocation. Deprecated runtimes are not eligible for security updates or technical support.",
|
||||
"Risk": "If you have functions running on a runtime that will be deprecated in the next 60 days, Lambda notifies you by email that you should prepare by migrating your function to a supported runtime. In some cases, such as security issues that require a backwards-incompatible update, or software that does not support a long-term support (LTS) schedule, advance notice might not be possible. After a runtime is deprecated, Lambda might retire it completely at any time by disabling invocation. Deprecated runtimes are not eligible for security updates or technical support.",
|
||||
"RelatedUrl": "https://docs.aws.amazon.com/lambda/latest/dg/runtime-support-policy.html",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from botocore.client import ClientError
|
||||
from pydantic import BaseModel
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
@@ -37,6 +38,8 @@ class Backup(AWSService):
|
||||
self.audit_resources,
|
||||
)
|
||||
):
|
||||
if self.backup_vaults is None:
|
||||
self.backup_vaults = []
|
||||
self.backup_vaults.append(
|
||||
BackupVault(
|
||||
arn=configuration.get("BackupVaultArn"),
|
||||
@@ -55,7 +58,13 @@ class Backup(AWSService):
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
except ClientError as error:
|
||||
logger.error(
|
||||
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
if error.response["Error"]["Code"] == "AccessDeniedException":
|
||||
if not self.backup_vaults:
|
||||
self.backup_vaults = None
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
|
||||
@@ -5,24 +5,24 @@ from prowler.providers.aws.services.backup.backup_client import backup_client
|
||||
class backup_vaults_encrypted(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
|
||||
for backup_vault in backup_client.backup_vaults:
|
||||
# By default we assume that the result is fail
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Backup Vault {backup_vault.name} is not encrypted."
|
||||
)
|
||||
report.resource_arn = backup_vault.arn
|
||||
report.resource_id = backup_vault.name
|
||||
report.region = backup_vault.region
|
||||
# if it is encrypted we only change the status and the status extended
|
||||
if backup_vault.encryption:
|
||||
report.status = "PASS"
|
||||
if backup_client.backup_vaults:
|
||||
for backup_vault in backup_client.backup_vaults:
|
||||
# By default we assume that the result is fail
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Backup Vault {backup_vault.name} is encrypted."
|
||||
f"Backup Vault {backup_vault.name} is not encrypted."
|
||||
)
|
||||
# then we store the finding
|
||||
findings.append(report)
|
||||
report.resource_arn = backup_vault.arn
|
||||
report.resource_id = backup_vault.name
|
||||
report.region = backup_vault.region
|
||||
# if it is encrypted we only change the status and the status extended
|
||||
if backup_vault.encryption:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Backup Vault {backup_vault.name} is encrypted."
|
||||
)
|
||||
# then we store the finding
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
|
||||
@@ -5,18 +5,19 @@ from prowler.providers.aws.services.backup.backup_client import backup_client
|
||||
class backup_vaults_exist(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.status = "FAIL"
|
||||
report.status_extended = "No Backup Vault exist."
|
||||
report.resource_arn = backup_client.backup_vault_arn_template
|
||||
report.resource_id = backup_client.audited_account
|
||||
report.region = backup_client.region
|
||||
if backup_client.backup_vaults:
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"At least one backup vault exists: {backup_client.backup_vaults[0].name}."
|
||||
report.resource_arn = backup_client.backup_vaults[0].arn
|
||||
report.resource_id = backup_client.backup_vaults[0].name
|
||||
report.region = backup_client.backup_vaults[0].region
|
||||
if backup_client.backup_vaults is not None:
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.status = "FAIL"
|
||||
report.status_extended = "No Backup Vault exist."
|
||||
report.resource_arn = backup_client.backup_vault_arn_template
|
||||
report.resource_id = backup_client.audited_account
|
||||
report.region = backup_client.region
|
||||
if backup_client.backup_vaults:
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"At least one backup vault exists: {backup_client.backup_vaults[0].name}."
|
||||
report.resource_arn = backup_client.backup_vaults[0].arn
|
||||
report.resource_id = backup_client.backup_vaults[0].name
|
||||
report.region = backup_client.backup_vaults[0].region
|
||||
|
||||
findings.append(report)
|
||||
findings.append(report)
|
||||
return findings
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
"RelatedUrl": "https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-secretsmanager-secret-generatesecretstring.html",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "https://docs.bridgecrew.io/docs/bc_aws_secrets_2#cli-command",
|
||||
"CLI": "https://docs.prowler.com/checks/aws/secrets-policies/bc_aws_secrets_2#cli-command",
|
||||
"NativeIaC": "",
|
||||
"Other": "",
|
||||
"Terraform": ""
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
"Severity": "medium",
|
||||
"ResourceType": "AwsCloudFormationStack",
|
||||
"Description": "Enable termination protection for Cloudformation Stacks",
|
||||
"Risk": "Without termination protection enabled; a critical cloudformation stack can be accidently deleted.",
|
||||
"Risk": "Without termination protection enabled, a critical cloudformation stack can be accidently deleted.",
|
||||
"RelatedUrl": "https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/using-cfn-protect-stacks.html",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
"Severity": "low",
|
||||
"ResourceType": "AwsCloudFrontDistribution",
|
||||
"Description": "Check if Geo restrictions are enabled in CloudFront distributions.",
|
||||
"Risk": "Consider countries where service should not be accessed; by legal or compliance requirements. Additionally if not restricted the attack vector is increased.",
|
||||
"Risk": "Consider countries where service should not be accessed, by legal or compliance requirements. Additionally if not restricted the attack vector is increased.",
|
||||
"RelatedUrl": "https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/georestrictions.html",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
@@ -19,7 +19,7 @@
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "If possible; define and enable Geo restrictions for this service.",
|
||||
"Text": "If possible, define and enable Geo restrictions for this service.",
|
||||
"Url": "https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/georestrictions.html"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -14,9 +14,9 @@
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "https://www.trendmicro.com/cloudoneconformity/knowledge-base/aws/CloudFront/security-policy.html",
|
||||
"NativeIaC": "https://docs.bridgecrew.io/docs/networking_32#cloudformation",
|
||||
"NativeIaC": "https://docs.prowler.com/checks/aws/networking-policies/networking_32#cloudformation",
|
||||
"Other": "",
|
||||
"Terraform": "https://docs.bridgecrew.io/docs/networking_32#terraform"
|
||||
"Terraform": "https://docs.prowler.com/checks/aws/networking-policies/networking_32#terraform"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Use HTTPS everywhere possible. It will enforce privacy and protect against account hijacking and other threats.",
|
||||
|
||||
@@ -13,10 +13,10 @@
|
||||
"RelatedUrl": "https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/AccessLogs.html",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "https://docs.bridgecrew.io/docs/logging_20#cli-command",
|
||||
"NativeIaC": "https://docs.bridgecrew.io/docs/logging_20#cloudformation",
|
||||
"CLI": "https://docs.prowler.com/checks/aws/logging-policies/logging_20#cli-command",
|
||||
"NativeIaC": "https://docs.prowler.com/checks/aws/logging-policies/logging_20#cloudformation",
|
||||
"Other": "",
|
||||
"Terraform": "https://docs.bridgecrew.io/docs/logging_20#terraform"
|
||||
"Terraform": "https://docs.prowler.com/checks/aws/logging-policies/logging_20#terraform"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Real-time monitoring can be achieved by directing CloudTrail Logs to CloudWatch Logs and establishing corresponding metric filters and alarms. Enable logging for services with defined log rotation. These logs are useful for Incident Response and forensics investigation among other use cases.",
|
||||
|
||||
@@ -13,9 +13,9 @@
|
||||
"RelatedUrl": "https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/secure-connections-supported-viewer-protocols-ciphers.html",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "https://docs.bridgecrew.io/docs/networking_33#cli-command",
|
||||
"CLI": "https://docs.prowler.com/checks/aws/networking-policies/networking_33#cli-command",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://docs.bridgecrew.io/docs/networking_33#aws-cloudfront-console",
|
||||
"Other": "https://docs.prowler.com/checks/aws/networking-policies/networking_33#aws-cloudfront-console",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
|
||||
@@ -11,17 +11,17 @@
|
||||
"Severity": "medium",
|
||||
"ResourceType": "AwsCloudFrontDistribution",
|
||||
"Description": "Check if CloudFront distributions are using WAF.",
|
||||
"Risk": "Potential attacks and / or abuse of service; more even for even for internet reachable services.",
|
||||
"Risk": "Potential attacks and / or abuse of service, more even for even for internet reachable services.",
|
||||
"RelatedUrl": "https://docs.aws.amazon.com/waf/latest/developerguide/cloudfront-features.html",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "https://www.trendmicro.com/cloudoneconformity/knowledge-base/aws/CloudFront/cloudfront-integrated-with-waf.html",
|
||||
"NativeIaC": "https://docs.bridgecrew.io/docs/bc_aws_general_27#cloudformation",
|
||||
"Other": "https://docs.bridgecrew.io/docs/bc_aws_general_27#cloudfront-console",
|
||||
"Terraform": "https://docs.bridgecrew.io/docs/bc_aws_general_27#terraform"
|
||||
"NativeIaC": "https://docs.prowler.com/checks/aws/general-policies/bc_aws_general_27#cloudformation",
|
||||
"Other": "https://docs.prowler.com/checks/aws/general-policies/bc_aws_general_27#cloudfront-console",
|
||||
"Terraform": "https://docs.prowler.com/checks/aws/general-policies/bc_aws_general_27#terraform"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Use AWS WAF to protect your service from common web exploits. These could affect availability and performance; compromise security; or consume excessive resources.",
|
||||
"Text": "Use AWS WAF to protect your service from common web exploits. These could affect availability and performance, compromise security, or consume excessive resources.",
|
||||
"Url": "https://docs.aws.amazon.com/waf/latest/developerguide/cloudfront-features.html"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -8,28 +8,29 @@ from prowler.providers.aws.services.s3.s3_client import s3_client
|
||||
class cloudtrail_bucket_requires_mfa_delete(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
for trail in cloudtrail_client.trails.values():
|
||||
if trail.is_logging:
|
||||
trail_bucket_is_in_account = False
|
||||
trail_bucket = trail.s3_bucket
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.region = trail.region
|
||||
report.resource_id = trail.name
|
||||
report.resource_arn = trail.arn
|
||||
report.resource_tags = trail.tags
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Trail {trail.name} bucket ({trail_bucket}) does not have MFA delete enabled."
|
||||
for bucket in s3_client.buckets:
|
||||
if trail_bucket == bucket.name:
|
||||
trail_bucket_is_in_account = True
|
||||
if bucket.mfa_delete:
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Trail {trail.name} bucket ({trail_bucket}) has MFA delete enabled."
|
||||
# check if trail bucket is a cross account bucket
|
||||
if not trail_bucket_is_in_account:
|
||||
report.status = "INFO"
|
||||
report.status_extended = f"Trail {trail.name} bucket ({trail_bucket}) is a cross-account bucket in another account out of Prowler's permissions scope, please check it manually."
|
||||
if cloudtrail_client.trails is not None:
|
||||
for trail in cloudtrail_client.trails.values():
|
||||
if trail.is_logging:
|
||||
trail_bucket_is_in_account = False
|
||||
trail_bucket = trail.s3_bucket
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.region = trail.region
|
||||
report.resource_id = trail.name
|
||||
report.resource_arn = trail.arn
|
||||
report.resource_tags = trail.tags
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Trail {trail.name} bucket ({trail_bucket}) does not have MFA delete enabled."
|
||||
for bucket in s3_client.buckets:
|
||||
if trail_bucket == bucket.name:
|
||||
trail_bucket_is_in_account = True
|
||||
if bucket.mfa_delete:
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Trail {trail.name} bucket ({trail_bucket}) has MFA delete enabled."
|
||||
# check if trail bucket is a cross account bucket
|
||||
if not trail_bucket_is_in_account:
|
||||
report.status = "INFO"
|
||||
report.status_extended = f"Trail {trail.name} bucket ({trail_bucket}) is a cross-account bucket in another account out of Prowler's permissions scope, please check it manually."
|
||||
|
||||
findings.append(report)
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
|
||||
@@ -13,13 +13,13 @@
|
||||
"Severity": "low",
|
||||
"ResourceType": "AwsCloudTrailTrail",
|
||||
"Description": "Ensure CloudTrail trails are integrated with CloudWatch Logs",
|
||||
"Risk": "Sending CloudTrail logs to CloudWatch Logs will facilitate real-time and historic activity logging based on user; API; resource; and IP address; and provides opportunity to establish alarms and notifications for anomalous or sensitivity account activity.",
|
||||
"Risk": "Sending CloudTrail logs to CloudWatch Logs will facilitate real-time and historic activity logging based on user, API, resource, and IP address, and provides opportunity to establish alarms and notifications for anomalous or sensitivity account activity.",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "aws cloudtrail update-trail --name <trail_name> --cloudwatch-logs-log-group- arn <cloudtrail_log_group_arn> --cloudwatch-logs-role-arn <cloudtrail_cloudwatchLogs_role_arn>",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://docs.bridgecrew.io/docs/logging_4#aws-console",
|
||||
"Other": "https://docs.prowler.com/checks/aws/logging-policies/logging_4#aws-console",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
|
||||
@@ -11,37 +11,38 @@ maximum_time_without_logging = 1
|
||||
class cloudtrail_cloudwatch_logging_enabled(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
for trail in cloudtrail_client.trails.values():
|
||||
if trail.name:
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.region = trail.region
|
||||
report.resource_id = trail.name
|
||||
report.resource_arn = trail.arn
|
||||
report.resource_tags = trail.tags
|
||||
report.status = "PASS"
|
||||
if trail.is_multiregion:
|
||||
report.status_extended = (
|
||||
f"Multiregion trail {trail.name} has been logging the last 24h."
|
||||
)
|
||||
else:
|
||||
report.status_extended = f"Single region trail {trail.name} has been logging the last 24h."
|
||||
if trail.latest_cloudwatch_delivery_time:
|
||||
last_log_delivery = (
|
||||
datetime.now().replace(tzinfo=timezone.utc)
|
||||
- trail.latest_cloudwatch_delivery_time
|
||||
)
|
||||
if last_log_delivery > timedelta(days=maximum_time_without_logging):
|
||||
if cloudtrail_client.trails is not None:
|
||||
for trail in cloudtrail_client.trails.values():
|
||||
if trail.name:
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.region = trail.region
|
||||
report.resource_id = trail.name
|
||||
report.resource_arn = trail.arn
|
||||
report.resource_tags = trail.tags
|
||||
report.status = "PASS"
|
||||
if trail.is_multiregion:
|
||||
report.status_extended = f"Multiregion trail {trail.name} has been logging the last 24h."
|
||||
else:
|
||||
report.status_extended = f"Single region trail {trail.name} has been logging the last 24h."
|
||||
if trail.latest_cloudwatch_delivery_time:
|
||||
last_log_delivery = (
|
||||
datetime.now().replace(tzinfo=timezone.utc)
|
||||
- trail.latest_cloudwatch_delivery_time
|
||||
)
|
||||
if last_log_delivery > timedelta(
|
||||
days=maximum_time_without_logging
|
||||
):
|
||||
report.status = "FAIL"
|
||||
if trail.is_multiregion:
|
||||
report.status_extended = f"Multiregion trail {trail.name} is not logging in the last 24h."
|
||||
else:
|
||||
report.status_extended = f"Single region trail {trail.name} is not logging in the last 24h."
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
if trail.is_multiregion:
|
||||
report.status_extended = f"Multiregion trail {trail.name} is not logging in the last 24h."
|
||||
report.status_extended = f"Multiregion trail {trail.name} is not logging in the last 24h or not configured to deliver logs."
|
||||
else:
|
||||
report.status_extended = f"Single region trail {trail.name} is not logging in the last 24h."
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
if trail.is_multiregion:
|
||||
report.status_extended = f"Multiregion trail {trail.name} is not logging in the last 24h or not configured to deliver logs."
|
||||
else:
|
||||
report.status_extended = f"Single region trail {trail.name} is not logging in the last 24h or not configured to deliver logs."
|
||||
findings.append(report)
|
||||
report.status_extended = f"Single region trail {trail.name} is not logging in the last 24h or not configured to deliver logs."
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
|
||||
@@ -7,19 +7,18 @@ from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
|
||||
class cloudtrail_insights_exist(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
for trail in cloudtrail_client.trails.values():
|
||||
if trail.is_logging:
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.region = trail.region
|
||||
report.resource_id = trail.name
|
||||
report.resource_arn = trail.arn
|
||||
report.resource_tags = trail.tags
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Trail {trail.name} does not have insight selectors and it is logging."
|
||||
if trail.has_insight_selectors:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Trail {trail.name} has insight selectors and it is logging."
|
||||
)
|
||||
findings.append(report)
|
||||
if cloudtrail_client.trails is not None:
|
||||
for trail in cloudtrail_client.trails.values():
|
||||
if trail.is_logging:
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.region = trail.region
|
||||
report.resource_id = trail.name
|
||||
report.resource_arn = trail.arn
|
||||
report.resource_tags = trail.tags
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Trail {trail.name} does not have insight selectors and it is logging."
|
||||
if trail.has_insight_selectors:
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Trail {trail.name} has insight selectors and it is logging."
|
||||
findings.append(report)
|
||||
return findings
|
||||
|
||||
@@ -13,12 +13,12 @@
|
||||
"Severity": "medium",
|
||||
"ResourceType": "AwsCloudTrailTrail",
|
||||
"Description": "Ensure CloudTrail logs are encrypted at rest using KMS CMKs",
|
||||
"Risk": "By default; the log files delivered by CloudTrail to your bucket are encrypted by Amazon server-side encryption with Amazon S3-managed encryption keys (SSE-S3). To provide a security layer that is directly manageable; you can instead use server-side encryption with AWS KMS–managed keys (SSE-KMS) for your CloudTrail log files.",
|
||||
"Risk": "By default, the log files delivered by CloudTrail to your bucket are encrypted by Amazon server-side encryption with Amazon S3-managed encryption keys (SSE-S3). To provide a security layer that is directly manageable, you can instead use server-side encryption with AWS KMS–managed keys (SSE-KMS) for your CloudTrail log files.",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "aws cloudtrail update-trail --name <trail_name> --kms-id <cloudtrail_kms_key> aws kms put-key-policy --key-id <cloudtrail_kms_key> --policy <cloudtrail_kms_key_policy>",
|
||||
"NativeIaC": "https://docs.bridgecrew.io/docs/logging_7#fix---buildtime",
|
||||
"NativeIaC": "https://docs.prowler.com/checks/aws/logging-policies/logging_7#fix---buildtime",
|
||||
"Other": "",
|
||||
"Terraform": ""
|
||||
},
|
||||
|
||||
@@ -7,32 +7,29 @@ from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
|
||||
class cloudtrail_kms_encryption_enabled(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
for trail in cloudtrail_client.trails.values():
|
||||
if trail.name:
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.region = trail.region
|
||||
report.resource_id = trail.name
|
||||
report.resource_arn = trail.arn
|
||||
report.resource_tags = trail.tags
|
||||
report.status = "FAIL"
|
||||
if trail.is_multiregion:
|
||||
report.status_extended = (
|
||||
f"Multiregion trail {trail.name} has encryption disabled."
|
||||
)
|
||||
else:
|
||||
report.status_extended = (
|
||||
f"Single region trail {trail.name} has encryption disabled."
|
||||
)
|
||||
if trail.kms_key:
|
||||
report.status = "PASS"
|
||||
if cloudtrail_client.trails is not None:
|
||||
for trail in cloudtrail_client.trails.values():
|
||||
if trail.name:
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.region = trail.region
|
||||
report.resource_id = trail.name
|
||||
report.resource_arn = trail.arn
|
||||
report.resource_tags = trail.tags
|
||||
report.status = "FAIL"
|
||||
if trail.is_multiregion:
|
||||
report.status_extended = (
|
||||
f"Multiregion trail {trail.name} has encryption enabled."
|
||||
f"Multiregion trail {trail.name} has encryption disabled."
|
||||
)
|
||||
else:
|
||||
report.status_extended = (
|
||||
f"Single region trail {trail.name} has encryption enabled."
|
||||
f"Single region trail {trail.name} has encryption disabled."
|
||||
)
|
||||
findings.append(report)
|
||||
if trail.kms_key:
|
||||
report.status = "PASS"
|
||||
if trail.is_multiregion:
|
||||
report.status_extended = f"Multiregion trail {trail.name} has encryption enabled."
|
||||
else:
|
||||
report.status_extended = f"Single region trail {trail.name} has encryption enabled."
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
|
||||
@@ -18,9 +18,9 @@
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "aws cloudtrail update-trail --name <trail_name> --enable-log-file-validation",
|
||||
"NativeIaC": "https://docs.bridgecrew.io/docs/logging_2#cloudformation",
|
||||
"NativeIaC": "https://docs.prowler.com/checks/aws/logging-policies/logging_2#cloudformation",
|
||||
"Other": "",
|
||||
"Terraform": "https://docs.bridgecrew.io/docs/logging_2#terraform"
|
||||
"Terraform": "https://docs.prowler.com/checks/aws/logging-policies/logging_2#terraform"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Ensure LogFileValidationEnabled is set to true for each trail.",
|
||||
|
||||
@@ -7,26 +7,25 @@ from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
|
||||
class cloudtrail_log_file_validation_enabled(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
for trail in cloudtrail_client.trails.values():
|
||||
if trail.name:
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.region = trail.region
|
||||
report.resource_id = trail.name
|
||||
report.resource_arn = trail.arn
|
||||
report.resource_tags = trail.tags
|
||||
report.status = "FAIL"
|
||||
if trail.is_multiregion:
|
||||
report.status_extended = (
|
||||
f"Multiregion trail {trail.name} log file validation disabled."
|
||||
)
|
||||
else:
|
||||
report.status_extended = f"Single region trail {trail.name} log file validation disabled."
|
||||
if trail.log_file_validation_enabled:
|
||||
report.status = "PASS"
|
||||
if cloudtrail_client.trails is not None:
|
||||
for trail in cloudtrail_client.trails.values():
|
||||
if trail.name:
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.region = trail.region
|
||||
report.resource_id = trail.name
|
||||
report.resource_arn = trail.arn
|
||||
report.resource_tags = trail.tags
|
||||
report.status = "FAIL"
|
||||
if trail.is_multiregion:
|
||||
report.status_extended = f"Multiregion trail {trail.name} log file validation enabled."
|
||||
report.status_extended = f"Multiregion trail {trail.name} log file validation disabled."
|
||||
else:
|
||||
report.status_extended = f"Single region trail {trail.name} log file validation enabled."
|
||||
findings.append(report)
|
||||
report.status_extended = f"Single region trail {trail.name} log file validation disabled."
|
||||
if trail.log_file_validation_enabled:
|
||||
report.status = "PASS"
|
||||
if trail.is_multiregion:
|
||||
report.status_extended = f"Multiregion trail {trail.name} log file validation enabled."
|
||||
else:
|
||||
report.status_extended = f"Single region trail {trail.name} log file validation enabled."
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
|
||||
@@ -13,17 +13,17 @@
|
||||
"Severity": "medium",
|
||||
"ResourceType": "AwsCloudTrailTrail",
|
||||
"Description": "Ensure S3 bucket access logging is enabled on the CloudTrail S3 bucket",
|
||||
"Risk": "Server access logs can assist you in security and access audits; help you learn about your customer base; and understand your Amazon S3 bill.",
|
||||
"Risk": "Server access logs can assist you in security and access audits, help you learn about your customer base, and understand your Amazon S3 bill.",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://docs.bridgecrew.io/docs/logging_6#aws-console",
|
||||
"Other": "https://docs.prowler.com/checks/aws/logging-policies/logging_6#aws-console",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Ensure that S3 buckets have Logging enabled. CloudTrail data events can be used in place of S3 bucket logging. If that is the case; this finding can be considered a false positive.",
|
||||
"Text": "Ensure that S3 buckets have Logging enabled. CloudTrail data events can be used in place of S3 bucket logging. If that is the case, this finding can be considered a false positive.",
|
||||
"Url": "https://docs.aws.amazon.com/AmazonS3/latest/dev/security-best-practices.html"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -8,35 +8,36 @@ from prowler.providers.aws.services.s3.s3_client import s3_client
|
||||
class cloudtrail_logs_s3_bucket_access_logging_enabled(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
for trail in cloudtrail_client.trails.values():
|
||||
if trail.name:
|
||||
trail_bucket_is_in_account = False
|
||||
trail_bucket = trail.s3_bucket
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.region = trail.region
|
||||
report.resource_id = trail.name
|
||||
report.resource_arn = trail.arn
|
||||
report.resource_tags = trail.tags
|
||||
report.status = "FAIL"
|
||||
if trail.is_multiregion:
|
||||
report.status_extended = f"Multiregion Trail {trail.name} S3 bucket access logging is not enabled for bucket {trail_bucket}."
|
||||
else:
|
||||
report.status_extended = f"Single region Trail {trail.name} S3 bucket access logging is not enabled for bucket {trail_bucket}."
|
||||
for bucket in s3_client.buckets:
|
||||
if trail_bucket == bucket.name:
|
||||
trail_bucket_is_in_account = True
|
||||
if bucket.logging:
|
||||
report.status = "PASS"
|
||||
if trail.is_multiregion:
|
||||
report.status_extended = f"Multiregion trail {trail.name} S3 bucket access logging is enabled for bucket {trail_bucket}."
|
||||
else:
|
||||
report.status_extended = f"Single region trail {trail.name} S3 bucket access logging is enabled for bucket {trail_bucket}."
|
||||
break
|
||||
if cloudtrail_client.trails is not None:
|
||||
for trail in cloudtrail_client.trails.values():
|
||||
if trail.name:
|
||||
trail_bucket_is_in_account = False
|
||||
trail_bucket = trail.s3_bucket
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.region = trail.region
|
||||
report.resource_id = trail.name
|
||||
report.resource_arn = trail.arn
|
||||
report.resource_tags = trail.tags
|
||||
report.status = "FAIL"
|
||||
if trail.is_multiregion:
|
||||
report.status_extended = f"Multiregion Trail {trail.name} S3 bucket access logging is not enabled for bucket {trail_bucket}."
|
||||
else:
|
||||
report.status_extended = f"Single region Trail {trail.name} S3 bucket access logging is not enabled for bucket {trail_bucket}."
|
||||
for bucket in s3_client.buckets:
|
||||
if trail_bucket == bucket.name:
|
||||
trail_bucket_is_in_account = True
|
||||
if bucket.logging:
|
||||
report.status = "PASS"
|
||||
if trail.is_multiregion:
|
||||
report.status_extended = f"Multiregion trail {trail.name} S3 bucket access logging is enabled for bucket {trail_bucket}."
|
||||
else:
|
||||
report.status_extended = f"Single region trail {trail.name} S3 bucket access logging is enabled for bucket {trail_bucket}."
|
||||
break
|
||||
|
||||
# check if trail is delivering logs in a cross account bucket
|
||||
if not trail_bucket_is_in_account:
|
||||
report.status = "INFO"
|
||||
report.status_extended = f"Trail {trail.name} is delivering logs in a cross-account bucket {trail_bucket} in another account out of Prowler's permissions scope, please check it manually."
|
||||
findings.append(report)
|
||||
# check if trail is delivering logs in a cross account bucket
|
||||
if not trail_bucket_is_in_account:
|
||||
report.status = "INFO"
|
||||
report.status_extended = f"Trail {trail.name} is delivering logs in a cross-account bucket {trail_bucket} in another account out of Prowler's permissions scope, please check it manually."
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://docs.bridgecrew.io/docs/logging_3#aws-console",
|
||||
"Other": "https://docs.prowler.com/checks/aws/logging-policies/logging_3#aws-console",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
|
||||
@@ -8,41 +8,42 @@ from prowler.providers.aws.services.s3.s3_client import s3_client
|
||||
class cloudtrail_logs_s3_bucket_is_not_publicly_accessible(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
for trail in cloudtrail_client.trails.values():
|
||||
if trail.name:
|
||||
trail_bucket_is_in_account = False
|
||||
trail_bucket = trail.s3_bucket
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.region = trail.region
|
||||
report.resource_id = trail.name
|
||||
report.resource_arn = trail.arn
|
||||
report.resource_tags = trail.tags
|
||||
report.status = "PASS"
|
||||
if trail.is_multiregion:
|
||||
report.status_extended = f"S3 Bucket {trail_bucket} from multiregion trail {trail.name} is not publicly accessible."
|
||||
else:
|
||||
report.status_extended = f"S3 Bucket {trail_bucket} from single region trail {trail.name} is not publicly accessible."
|
||||
for bucket in s3_client.buckets:
|
||||
# Here we need to ensure that acl_grantee is filled since if we don't have permissions to query the api for a concrete region
|
||||
# (for example due to a SCP) we are going to try access an attribute from a None type
|
||||
if trail_bucket == bucket.name:
|
||||
trail_bucket_is_in_account = True
|
||||
if bucket.acl_grantees:
|
||||
for grant in bucket.acl_grantees:
|
||||
if (
|
||||
grant.URI
|
||||
== "http://acs.amazonaws.com/groups/global/AllUsers"
|
||||
):
|
||||
report.status = "FAIL"
|
||||
if trail.is_multiregion:
|
||||
report.status_extended = f"S3 Bucket {trail_bucket} from multiregion trail {trail.name} is publicly accessible."
|
||||
else:
|
||||
report.status_extended = f"S3 Bucket {trail_bucket} from single region trail {trail.name} is publicly accessible."
|
||||
break
|
||||
# check if trail bucket is a cross account bucket
|
||||
if not trail_bucket_is_in_account:
|
||||
report.status = "INFO"
|
||||
report.status_extended = f"Trail {trail.name} bucket ({trail_bucket}) is a cross-account bucket in another account out of Prowler's permissions scope, please check it manually."
|
||||
findings.append(report)
|
||||
if cloudtrail_client.trails is not None:
|
||||
for trail in cloudtrail_client.trails.values():
|
||||
if trail.name:
|
||||
trail_bucket_is_in_account = False
|
||||
trail_bucket = trail.s3_bucket
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.region = trail.region
|
||||
report.resource_id = trail.name
|
||||
report.resource_arn = trail.arn
|
||||
report.resource_tags = trail.tags
|
||||
report.status = "PASS"
|
||||
if trail.is_multiregion:
|
||||
report.status_extended = f"S3 Bucket {trail_bucket} from multiregion trail {trail.name} is not publicly accessible."
|
||||
else:
|
||||
report.status_extended = f"S3 Bucket {trail_bucket} from single region trail {trail.name} is not publicly accessible."
|
||||
for bucket in s3_client.buckets:
|
||||
# Here we need to ensure that acl_grantee is filled since if we don't have permissions to query the api for a concrete region
|
||||
# (for example due to a SCP) we are going to try access an attribute from a None type
|
||||
if trail_bucket == bucket.name:
|
||||
trail_bucket_is_in_account = True
|
||||
if bucket.acl_grantees:
|
||||
for grant in bucket.acl_grantees:
|
||||
if (
|
||||
grant.URI
|
||||
== "http://acs.amazonaws.com/groups/global/AllUsers"
|
||||
):
|
||||
report.status = "FAIL"
|
||||
if trail.is_multiregion:
|
||||
report.status_extended = f"S3 Bucket {trail_bucket} from multiregion trail {trail.name} is publicly accessible."
|
||||
else:
|
||||
report.status_extended = f"S3 Bucket {trail_bucket} from single region trail {trail.name} is publicly accessible."
|
||||
break
|
||||
# check if trail bucket is a cross account bucket
|
||||
if not trail_bucket_is_in_account:
|
||||
report.status = "INFO"
|
||||
report.status_extended = f"Trail {trail.name} bucket ({trail_bucket}) is a cross-account bucket in another account out of Prowler's permissions scope, please check it manually."
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
|
||||
@@ -13,14 +13,14 @@
|
||||
"Severity": "high",
|
||||
"ResourceType": "AwsCloudTrailTrail",
|
||||
"Description": "Ensure CloudTrail is enabled in all regions",
|
||||
"Risk": "AWS CloudTrail is a web service that records AWS API calls for your account and delivers log files to you. The recorded information includes the identity of the API caller; the time of the API call; the source IP address of the API caller; the request parameters; and the response elements returned by the AWS service.",
|
||||
"Risk": "AWS CloudTrail is a web service that records AWS API calls for your account and delivers log files to you. The recorded information includes the identity of the API caller, the time of the API call, the source IP address of the API caller, the request parameters, and the response elements returned by the AWS service.",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "aws cloudtrail create-trail --name <trail_name> --bucket-name <s3_bucket_for_cloudtrail> --is-multi-region-trail aws cloudtrail update-trail --name <trail_name> --is-multi-region-trail ",
|
||||
"NativeIaC": "https://docs.bridgecrew.io/docs/logging_1#cloudformation",
|
||||
"Other": "https://docs.bridgecrew.io/docs/logging_1#aws-console",
|
||||
"Terraform": "https://docs.bridgecrew.io/docs/logging_1#terraform"
|
||||
"NativeIaC": "https://docs.prowler.com/checks/aws/logging-policies/logging_1#cloudformation",
|
||||
"Other": "https://docs.prowler.com/checks/aws/logging-policies/logging_1#aws-console",
|
||||
"Terraform": "https://docs.prowler.com/checks/aws/logging-policies/logging_1#terraform"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Ensure Logging is set to ON on all regions (even if they are not being used at the moment.",
|
||||
|
||||
@@ -7,36 +7,35 @@ from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
|
||||
class cloudtrail_multi_region_enabled(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
for region in cloudtrail_client.regional_clients.keys():
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.region = region
|
||||
for trail in cloudtrail_client.trails.values():
|
||||
if trail.region == region or trail.is_multiregion:
|
||||
if trail.is_logging:
|
||||
report.status = "PASS"
|
||||
report.resource_id = trail.name
|
||||
report.resource_arn = trail.arn
|
||||
report.resource_tags = trail.tags
|
||||
if trail.is_multiregion:
|
||||
report.status_extended = (
|
||||
f"Trail {trail.name} is multiregion and it is logging."
|
||||
)
|
||||
if cloudtrail_client.trails is not None:
|
||||
for region in cloudtrail_client.regional_clients.keys():
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.region = region
|
||||
for trail in cloudtrail_client.trails.values():
|
||||
if trail.region == region or trail.is_multiregion:
|
||||
if trail.is_logging:
|
||||
report.status = "PASS"
|
||||
report.resource_id = trail.name
|
||||
report.resource_arn = trail.arn
|
||||
report.resource_tags = trail.tags
|
||||
if trail.is_multiregion:
|
||||
report.status_extended = f"Trail {trail.name} is multiregion and it is logging."
|
||||
else:
|
||||
report.status_extended = f"Trail {trail.name} is not multiregion and it is logging."
|
||||
# Since there exists a logging trail in that region there is no point in checking the remaining trails
|
||||
# Store the finding and exit the loop
|
||||
findings.append(report)
|
||||
break
|
||||
else:
|
||||
report.status_extended = f"Trail {trail.name} is not multiregion and it is logging."
|
||||
# Since there exists a logging trail in that region there is no point in checking the remaining trails
|
||||
# Store the finding and exit the loop
|
||||
findings.append(report)
|
||||
break
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
"No CloudTrail trails enabled and logging were found."
|
||||
)
|
||||
report.resource_arn = (
|
||||
cloudtrail_client.__get_trail_arn_template__(region)
|
||||
)
|
||||
report.resource_id = cloudtrail_client.audited_account
|
||||
# If there are no trails logging it is needed to store the FAIL once all the trails have been checked
|
||||
if report.status == "FAIL":
|
||||
findings.append(report)
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
"No CloudTrail trails enabled and logging were found."
|
||||
)
|
||||
report.resource_arn = (
|
||||
cloudtrail_client.__get_trail_arn_template__(region)
|
||||
)
|
||||
report.resource_id = cloudtrail_client.audited_account
|
||||
# If there are no trails logging it is needed to store the FAIL once all the trails have been checked
|
||||
if report.status == "FAIL":
|
||||
findings.append(report)
|
||||
return findings
|
||||
|
||||
@@ -12,17 +12,17 @@
|
||||
"ResourceType": "AwsCloudTrailTrail",
|
||||
"Description": "Ensure CloudTrail logging management events in All Regions",
|
||||
"Risk": "AWS CloudTrail enables governance, compliance, operational auditing, and risk auditing of your AWS account. To meet FTR requirements, you must have management events enabled for all AWS accounts and in all regions and aggregate these logs into an Amazon Simple Storage Service (Amazon S3) bucket owned by a separate AWS account.",
|
||||
"RelatedUrl": "https://docs.bridgecrew.io/docs/logging_14",
|
||||
"RelatedUrl": "https://docs.prowler.com/checks/aws/logging-policies/logging_14",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "aws cloudtrail update-trail --name <trail_name> --is-multi-region-trail",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://docs.bridgecrew.io/docs/logging_14",
|
||||
"Terraform": "https://docs.bridgecrew.io/docs/logging_14#terraform"
|
||||
"Other": "https://docs.prowler.com/checks/aws/logging-policies/logging_14",
|
||||
"Terraform": "https://docs.prowler.com/checks/aws/logging-policies/logging_14#terraform"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Enable CloudTrail logging management events in All Regions",
|
||||
"Url": "https://docs.bridgecrew.io/docs/logging_14"
|
||||
"Url": "https://docs.prowler.com/checks/aws/logging-policies/logging_14"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
|
||||
@@ -7,48 +7,49 @@ from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
|
||||
class cloudtrail_multi_region_enabled_logging_management_events(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
"No trail found with multi-region enabled and logging management events."
|
||||
)
|
||||
report.region = cloudtrail_client.region
|
||||
report.resource_id = cloudtrail_client.audited_account
|
||||
report.resource_arn = cloudtrail_client.trail_arn_template
|
||||
if cloudtrail_client.trails is not None:
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.status = "FAIL"
|
||||
report.status_extended = "No trail found with multi-region enabled and logging management events."
|
||||
report.region = cloudtrail_client.region
|
||||
report.resource_id = cloudtrail_client.audited_account
|
||||
report.resource_arn = cloudtrail_client.trail_arn_template
|
||||
|
||||
for trail in cloudtrail_client.trails.values():
|
||||
if trail.is_logging:
|
||||
if trail.is_multiregion:
|
||||
for event in trail.data_events:
|
||||
# Classic event selectors
|
||||
if not event.is_advanced:
|
||||
# Check if trail has IncludeManagementEvents and ReadWriteType is All
|
||||
if (
|
||||
event.event_selector["ReadWriteType"] == "All"
|
||||
and event.event_selector["IncludeManagementEvents"]
|
||||
):
|
||||
report.region = trail.region
|
||||
report.resource_id = trail.name
|
||||
report.resource_arn = trail.arn
|
||||
report.resource_tags = trail.tags
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Trail {trail.name} from home region {trail.home_region} is multi-region, is logging and have management events enabled."
|
||||
for trail in cloudtrail_client.trails.values():
|
||||
if trail.is_logging:
|
||||
if trail.is_multiregion:
|
||||
for event in trail.data_events:
|
||||
# Classic event selectors
|
||||
if not event.is_advanced:
|
||||
# Check if trail has IncludeManagementEvents and ReadWriteType is All
|
||||
if (
|
||||
event.event_selector["ReadWriteType"] == "All"
|
||||
and event.event_selector["IncludeManagementEvents"]
|
||||
):
|
||||
report.region = trail.region
|
||||
report.resource_id = trail.name
|
||||
report.resource_arn = trail.arn
|
||||
report.resource_tags = trail.tags
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Trail {trail.name} from home region {trail.home_region} is multi-region, is logging and have management events enabled."
|
||||
|
||||
# Advanced event selectors
|
||||
elif event.is_advanced:
|
||||
if event.event_selector.get(
|
||||
"Name"
|
||||
) == "Management events selector" and all(
|
||||
[
|
||||
field["Field"] != "readOnly"
|
||||
for field in event.event_selector["FieldSelectors"]
|
||||
]
|
||||
):
|
||||
report.region = trail.region
|
||||
report.resource_id = trail.name
|
||||
report.resource_arn = trail.arn
|
||||
report.resource_tags = trail.tags
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Trail {trail.name} from home region {trail.home_region} is multi-region, is logging and have management events enabled."
|
||||
findings.append(report)
|
||||
# Advanced event selectors
|
||||
elif event.is_advanced:
|
||||
if event.event_selector.get(
|
||||
"Name"
|
||||
) == "Management events selector" and all(
|
||||
[
|
||||
field["Field"] != "readOnly"
|
||||
for field in event.event_selector[
|
||||
"FieldSelectors"
|
||||
]
|
||||
]
|
||||
):
|
||||
report.region = trail.region
|
||||
report.resource_id = trail.name
|
||||
report.resource_arn = trail.arn
|
||||
report.resource_tags = trail.tags
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Trail {trail.name} from home region {trail.home_region} is multi-region, is logging and have management events enabled."
|
||||
findings.append(report)
|
||||
return findings
|
||||
|
||||
@@ -8,23 +8,41 @@ from prowler.providers.aws.services.s3.s3_client import s3_client
|
||||
class cloudtrail_s3_dataevents_read_enabled(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
for trail in cloudtrail_client.trails.values():
|
||||
for data_event in trail.data_events:
|
||||
# classic event selectors
|
||||
if not data_event.is_advanced:
|
||||
# Check if trail has a data event for all S3 Buckets for read
|
||||
if (
|
||||
data_event.event_selector["ReadWriteType"] == "ReadOnly"
|
||||
or data_event.event_selector["ReadWriteType"] == "All"
|
||||
):
|
||||
for resource in data_event.event_selector["DataResources"]:
|
||||
if "AWS::S3::Object" == resource["Type"] and (
|
||||
f"arn:{cloudtrail_client.audited_partition}:s3"
|
||||
in resource["Values"]
|
||||
or f"arn:{cloudtrail_client.audited_partition}:s3:::"
|
||||
in resource["Values"]
|
||||
or f"arn:{cloudtrail_client.audited_partition}:s3:::*/*"
|
||||
in resource["Values"]
|
||||
if cloudtrail_client.trails is not None:
|
||||
for trail in cloudtrail_client.trails.values():
|
||||
for data_event in trail.data_events:
|
||||
# classic event selectors
|
||||
if not data_event.is_advanced:
|
||||
# Check if trail has a data event for all S3 Buckets for read
|
||||
if (
|
||||
data_event.event_selector["ReadWriteType"] == "ReadOnly"
|
||||
or data_event.event_selector["ReadWriteType"] == "All"
|
||||
):
|
||||
for resource in data_event.event_selector["DataResources"]:
|
||||
if "AWS::S3::Object" == resource["Type"] and (
|
||||
f"arn:{cloudtrail_client.audited_partition}:s3"
|
||||
in resource["Values"]
|
||||
or f"arn:{cloudtrail_client.audited_partition}:s3:::"
|
||||
in resource["Values"]
|
||||
or f"arn:{cloudtrail_client.audited_partition}:s3:::*/*"
|
||||
in resource["Values"]
|
||||
):
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.region = trail.region
|
||||
report.resource_id = trail.name
|
||||
report.resource_arn = trail.arn
|
||||
report.resource_tags = trail.tags
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Trail {trail.name} from home region {trail.home_region} has a classic data event selector to record all S3 object-level API operations."
|
||||
findings.append(report)
|
||||
# advanced event selectors
|
||||
elif data_event.is_advanced:
|
||||
for field_selector in data_event.event_selector[
|
||||
"FieldSelectors"
|
||||
]:
|
||||
if (
|
||||
field_selector["Field"] == "resources.type"
|
||||
and field_selector["Equals"][0] == "AWS::S3::Object"
|
||||
):
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.region = trail.region
|
||||
@@ -32,31 +50,17 @@ class cloudtrail_s3_dataevents_read_enabled(Check):
|
||||
report.resource_arn = trail.arn
|
||||
report.resource_tags = trail.tags
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Trail {trail.name} from home region {trail.home_region} has a classic data event selector to record all S3 object-level API operations."
|
||||
report.status_extended = f"Trail {trail.name} from home region {trail.home_region} has an advanced data event selector to record all S3 object-level API operations."
|
||||
findings.append(report)
|
||||
# advanced event selectors
|
||||
elif data_event.is_advanced:
|
||||
for field_selector in data_event.event_selector["FieldSelectors"]:
|
||||
if (
|
||||
field_selector["Field"] == "resources.type"
|
||||
and field_selector["Equals"][0] == "AWS::S3::Object"
|
||||
):
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.region = trail.region
|
||||
report.resource_id = trail.name
|
||||
report.resource_arn = trail.arn
|
||||
report.resource_tags = trail.tags
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Trail {trail.name} from home region {trail.home_region} has an advanced data event selector to record all S3 object-level API operations."
|
||||
findings.append(report)
|
||||
if not findings and (
|
||||
s3_client.buckets or not cloudtrail_client.audit_info.ignore_unused_services
|
||||
):
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.region = cloudtrail_client.region
|
||||
report.resource_arn = cloudtrail_client.trail_arn_template
|
||||
report.resource_id = cloudtrail_client.audited_account
|
||||
report.status = "FAIL"
|
||||
report.status_extended = "No CloudTrail trails have a data event to record all S3 object-level API operations."
|
||||
findings.append(report)
|
||||
if not findings and (
|
||||
s3_client.buckets
|
||||
or not cloudtrail_client.audit_info.ignore_unused_services
|
||||
):
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.region = cloudtrail_client.region
|
||||
report.resource_arn = cloudtrail_client.trail_arn_template
|
||||
report.resource_id = cloudtrail_client.audited_account
|
||||
report.status = "FAIL"
|
||||
report.status_extended = "No CloudTrail trails have a data event to record all S3 object-level API operations."
|
||||
findings.append(report)
|
||||
return findings
|
||||
|
||||
@@ -8,23 +8,41 @@ from prowler.providers.aws.services.s3.s3_client import s3_client
|
||||
class cloudtrail_s3_dataevents_write_enabled(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
for trail in cloudtrail_client.trails.values():
|
||||
for data_event in trail.data_events:
|
||||
# Classic event selectors
|
||||
if not data_event.is_advanced:
|
||||
# Check if trail has a data event for all S3 Buckets for write
|
||||
if (
|
||||
data_event.event_selector["ReadWriteType"] == "All"
|
||||
or data_event.event_selector["ReadWriteType"] == "WriteOnly"
|
||||
):
|
||||
for resource in data_event.event_selector["DataResources"]:
|
||||
if "AWS::S3::Object" == resource["Type"] and (
|
||||
f"arn:{cloudtrail_client.audited_partition}:s3"
|
||||
in resource["Values"]
|
||||
or f"arn:{cloudtrail_client.audited_partition}:s3:::"
|
||||
in resource["Values"]
|
||||
or f"arn:{cloudtrail_client.audited_partition}:s3:::*/*"
|
||||
in resource["Values"]
|
||||
if cloudtrail_client.trails is not None:
|
||||
for trail in cloudtrail_client.trails.values():
|
||||
for data_event in trail.data_events:
|
||||
# Classic event selectors
|
||||
if not data_event.is_advanced:
|
||||
# Check if trail has a data event for all S3 Buckets for write
|
||||
if (
|
||||
data_event.event_selector["ReadWriteType"] == "All"
|
||||
or data_event.event_selector["ReadWriteType"] == "WriteOnly"
|
||||
):
|
||||
for resource in data_event.event_selector["DataResources"]:
|
||||
if "AWS::S3::Object" == resource["Type"] and (
|
||||
f"arn:{cloudtrail_client.audited_partition}:s3"
|
||||
in resource["Values"]
|
||||
or f"arn:{cloudtrail_client.audited_partition}:s3:::"
|
||||
in resource["Values"]
|
||||
or f"arn:{cloudtrail_client.audited_partition}:s3:::*/*"
|
||||
in resource["Values"]
|
||||
):
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.region = trail.region
|
||||
report.resource_id = trail.name
|
||||
report.resource_arn = trail.arn
|
||||
report.resource_tags = trail.tags
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Trail {trail.name} from home region {trail.home_region} has a classic data event selector to record all S3 object-level API operations."
|
||||
findings.append(report)
|
||||
# Advanced event selectors
|
||||
elif data_event.is_advanced:
|
||||
for field_selector in data_event.event_selector[
|
||||
"FieldSelectors"
|
||||
]:
|
||||
if (
|
||||
field_selector["Field"] == "resources.type"
|
||||
and field_selector["Equals"][0] == "AWS::S3::Object"
|
||||
):
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.region = trail.region
|
||||
@@ -32,31 +50,17 @@ class cloudtrail_s3_dataevents_write_enabled(Check):
|
||||
report.resource_arn = trail.arn
|
||||
report.resource_tags = trail.tags
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Trail {trail.name} from home region {trail.home_region} has a classic data event selector to record all S3 object-level API operations."
|
||||
report.status_extended = f"Trail {trail.name} from home region {trail.home_region} has an advanced data event selector to record all S3 object-level API operations."
|
||||
findings.append(report)
|
||||
# Advanced event selectors
|
||||
elif data_event.is_advanced:
|
||||
for field_selector in data_event.event_selector["FieldSelectors"]:
|
||||
if (
|
||||
field_selector["Field"] == "resources.type"
|
||||
and field_selector["Equals"][0] == "AWS::S3::Object"
|
||||
):
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.region = trail.region
|
||||
report.resource_id = trail.name
|
||||
report.resource_arn = trail.arn
|
||||
report.resource_tags = trail.tags
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Trail {trail.name} from home region {trail.home_region} has an advanced data event selector to record all S3 object-level API operations."
|
||||
findings.append(report)
|
||||
if not findings and (
|
||||
s3_client.buckets or not cloudtrail_client.audit_info.ignore_unused_services
|
||||
):
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.region = cloudtrail_client.region
|
||||
report.resource_arn = cloudtrail_client.trail_arn_template
|
||||
report.resource_id = cloudtrail_client.audited_account
|
||||
report.status = "FAIL"
|
||||
report.status_extended = "No CloudTrail trails have a data event to record all S3 object-level API operations."
|
||||
findings.append(report)
|
||||
if not findings and (
|
||||
s3_client.buckets
|
||||
or not cloudtrail_client.audit_info.ignore_unused_services
|
||||
):
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.region = cloudtrail_client.region
|
||||
report.resource_arn = cloudtrail_client.trail_arn_template
|
||||
report.resource_id = cloudtrail_client.audited_account
|
||||
report.status = "FAIL"
|
||||
report.status_extended = "No CloudTrail trails have a data event to record all S3 object-level API operations."
|
||||
findings.append(report)
|
||||
return findings
|
||||
|
||||
@@ -17,10 +17,11 @@ class Cloudtrail(AWSService):
|
||||
self.trail_arn_template = f"arn:{self.audited_partition}:cloudtrail:{self.region}:{self.audited_account}:trail"
|
||||
self.trails = {}
|
||||
self.__threading_call__(self.__get_trails__)
|
||||
self.__get_trail_status__()
|
||||
self.__get_insight_selectors__()
|
||||
self.__get_event_selectors__()
|
||||
self.__list_tags_for_resource__()
|
||||
if self.trails:
|
||||
self.__get_trail_status__()
|
||||
self.__get_insight_selectors__()
|
||||
self.__get_event_selectors__()
|
||||
self.__list_tags_for_resource__()
|
||||
|
||||
def __get_trail_arn_template__(self, region):
|
||||
return (
|
||||
@@ -45,6 +46,8 @@ class Cloudtrail(AWSService):
|
||||
kms_key_id = trail["KmsKeyId"]
|
||||
if "CloudWatchLogsLogGroupArn" in trail:
|
||||
log_group_arn = trail["CloudWatchLogsLogGroupArn"]
|
||||
if self.trails is None:
|
||||
self.trails = {}
|
||||
self.trails[trail["TrailARN"]] = Trail(
|
||||
name=trail["Name"],
|
||||
is_multiregion=trail["IsMultiRegionTrail"],
|
||||
@@ -61,12 +64,24 @@ class Cloudtrail(AWSService):
|
||||
has_insight_selectors=trail.get("HasInsightSelectors"),
|
||||
)
|
||||
if trails_count == 0:
|
||||
if self.trails is None:
|
||||
self.trails = {}
|
||||
self.trails[self.__get_trail_arn_template__(regional_client.region)] = (
|
||||
Trail(
|
||||
region=regional_client.region,
|
||||
)
|
||||
)
|
||||
|
||||
except ClientError as error:
|
||||
if error.response["Error"]["Code"] == "AccessDeniedException":
|
||||
logger.error(
|
||||
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
if not self.trails:
|
||||
self.trails = None
|
||||
else:
|
||||
logger.error(
|
||||
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
|
||||
@@ -15,10 +15,10 @@
|
||||
"RelatedUrl": "https://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudwatch-alarms-for-cloudtrail.html",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "https://docs.bridgecrew.io/docs/monitoring_11#procedure",
|
||||
"CLI": "https://docs.prowler.com/checks/aws/monitoring-policies/monitoring_11#procedure",
|
||||
"NativeIaC": "",
|
||||
"Other": "",
|
||||
"Terraform": "https://docs.bridgecrew.io/docs/monitoring_11#fix---buildtime"
|
||||
"Terraform": "https://docs.prowler.com/checks/aws/monitoring-policies/monitoring_11#fix---buildtime"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "It is recommended that a metric filter and alarm be established for unauthorized requests.",
|
||||
|
||||
@@ -15,21 +15,24 @@ class cloudwatch_changes_to_network_acls_alarm_configured(Check):
|
||||
def execute(self):
|
||||
pattern = r"\$\.eventName\s*=\s*.?CreateNetworkAcl.+\$\.eventName\s*=\s*.?CreateNetworkAclEntry.+\$\.eventName\s*=\s*.?DeleteNetworkAcl.+\$\.eventName\s*=\s*.?DeleteNetworkAclEntry.+\$\.eventName\s*=\s*.?ReplaceNetworkAclEntry.+\$\.eventName\s*=\s*.?ReplaceNetworkAclAssociation.?"
|
||||
findings = []
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
"No CloudWatch log groups found with metric filters or alarms associated."
|
||||
)
|
||||
report.region = logs_client.region
|
||||
report.resource_id = logs_client.audited_account
|
||||
report.resource_arn = logs_client.log_group_arn_template
|
||||
report = check_cloudwatch_log_metric_filter(
|
||||
pattern,
|
||||
cloudtrail_client.trails,
|
||||
logs_client.metric_filters,
|
||||
cloudwatch_client.metric_alarms,
|
||||
report,
|
||||
)
|
||||
if (
|
||||
cloudtrail_client.trails is not None
|
||||
and logs_client.metric_filters is not None
|
||||
and cloudwatch_client.metric_alarms is not None
|
||||
):
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.status = "FAIL"
|
||||
report.status_extended = "No CloudWatch log groups found with metric filters or alarms associated."
|
||||
report.region = logs_client.region
|
||||
report.resource_id = logs_client.audited_account
|
||||
report.resource_arn = logs_client.log_group_arn_template
|
||||
report = check_cloudwatch_log_metric_filter(
|
||||
pattern,
|
||||
cloudtrail_client.trails,
|
||||
logs_client.metric_filters,
|
||||
cloudwatch_client.metric_alarms,
|
||||
report,
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
findings.append(report)
|
||||
return findings
|
||||
|
||||
@@ -15,10 +15,10 @@
|
||||
"RelatedUrl": "https://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudwatch-alarms-for-cloudtrail.html",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "https://docs.bridgecrew.io/docs/monitoring_12#procedure",
|
||||
"CLI": "https://docs.prowler.com/checks/aws/monitoring-policies/monitoring_12#procedure",
|
||||
"NativeIaC": "",
|
||||
"Other": "",
|
||||
"Terraform": "https://docs.bridgecrew.io/docs/monitoring_12#fix---buildtime"
|
||||
"Terraform": "https://docs.prowler.com/checks/aws/monitoring-policies/monitoring_12#fix---buildtime"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "It is recommended that a metric filter and alarm be established for unauthorized requests.",
|
||||
|
||||
@@ -15,21 +15,24 @@ class cloudwatch_changes_to_network_gateways_alarm_configured(Check):
|
||||
def execute(self):
|
||||
pattern = r"\$\.eventName\s*=\s*.?CreateCustomerGateway.+\$\.eventName\s*=\s*.?DeleteCustomerGateway.+\$\.eventName\s*=\s*.?AttachInternetGateway.+\$\.eventName\s*=\s*.?CreateInternetGateway.+\$\.eventName\s*=\s*.?DeleteInternetGateway.+\$\.eventName\s*=\s*.?DetachInternetGateway.?"
|
||||
findings = []
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
"No CloudWatch log groups found with metric filters or alarms associated."
|
||||
)
|
||||
report.region = logs_client.region
|
||||
report.resource_id = logs_client.audited_account
|
||||
report.resource_arn = logs_client.log_group_arn_template
|
||||
report = check_cloudwatch_log_metric_filter(
|
||||
pattern,
|
||||
cloudtrail_client.trails,
|
||||
logs_client.metric_filters,
|
||||
cloudwatch_client.metric_alarms,
|
||||
report,
|
||||
)
|
||||
if (
|
||||
cloudtrail_client.trails is not None
|
||||
and logs_client.metric_filters is not None
|
||||
and cloudwatch_client.metric_alarms is not None
|
||||
):
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.status = "FAIL"
|
||||
report.status_extended = "No CloudWatch log groups found with metric filters or alarms associated."
|
||||
report.region = logs_client.region
|
||||
report.resource_id = logs_client.audited_account
|
||||
report.resource_arn = logs_client.log_group_arn_template
|
||||
report = check_cloudwatch_log_metric_filter(
|
||||
pattern,
|
||||
cloudtrail_client.trails,
|
||||
logs_client.metric_filters,
|
||||
cloudwatch_client.metric_alarms,
|
||||
report,
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
findings.append(report)
|
||||
return findings
|
||||
|
||||
@@ -15,10 +15,10 @@
|
||||
"RelatedUrl": "https://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudwatch-alarms-for-cloudtrail.html",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "https://docs.bridgecrew.io/docs/monitoring_13#procedure",
|
||||
"CLI": "https://docs.prowler.com/checks/aws/monitoring-policies/monitoring_13#procedure",
|
||||
"NativeIaC": "",
|
||||
"Other": "",
|
||||
"Terraform": "https://docs.bridgecrew.io/docs/monitoring_13#fix---buildtime"
|
||||
"Terraform": "https://docs.prowler.com/checks/aws/monitoring-policies/monitoring_13#fix---buildtime"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "If you are using CloudTrails and CloudWatch, perform the following to setup the metric filter, alarm, SNS topic, and subscription: 1. Create a metric filter based on filter pattern provided which checks for route table changes and the <cloudtrail_log_group_name> taken from audit step 1. aws logs put-metric-filter --log-group-name <cloudtrail_log_group_name> -- filter-name `<route_table_changes_metric>` --metric-transformations metricName= `<route_table_changes_metric>` ,metricNamespace='CISBenchmark',metricValue=1 --filter-pattern '{($.eventSource = ec2.amazonaws.com) && (($.eventName = CreateRoute) || ($.eventName = CreateRouteTable) || ($.eventName = ReplaceRoute) || ($.eventName = ReplaceRouteTableAssociation) || ($.eventName = DeleteRouteTable) || ($.eventName = DeleteRoute) || ($.eventName = DisassociateRouteTable)) }' Note: You can choose your own metricName and metricNamespace strings. Using the same metricNamespace for all Foundations Benchmark metrics will group them together. 2. Create an SNS topic that the alarm will notify aws sns create-topic --name <sns_topic_name> Note: you can execute this command once and then re-use the same topic for all monitoring alarms. 3. Create an SNS subscription to the topic created in step 2 aws sns subscribe --topic-arn <sns_topic_arn> --protocol <protocol_for_sns> - -notification-endpoint <sns_subscription_endpoints> Note: you can execute this command once and then re-use the SNS subscription for all monitoring alarms. 4. Create an alarm that is associated with the CloudWatch Logs Metric Filter created in step 1 and an SNS topic created in step 2 aws cloudwatch put-metric-alarm --alarm-name `<route_table_changes_alarm>` --metric-name `<route_table_changes_metric>` --statistic Sum --period 300 - -threshold 1 --comparison-operator GreaterThanOrEqualToThreshold -- evaluation-periods 1 --namespace 'CISBenchmark' --alarm-actions <sns_topic_arn>",
|
||||
|
||||
@@ -15,21 +15,24 @@ class cloudwatch_changes_to_network_route_tables_alarm_configured(Check):
|
||||
def execute(self):
|
||||
pattern = r"\$\.eventSource\s*=\s*.?ec2.amazonaws.com.+\$\.eventName\s*=\s*.?CreateRoute.+\$\.eventName\s*=\s*.?CreateRouteTable.+\$\.eventName\s*=\s*.?ReplaceRoute.+\$\.eventName\s*=\s*.?ReplaceRouteTableAssociation.+\$\.eventName\s*=\s*.?DeleteRouteTable.+\$\.eventName\s*=\s*.?DeleteRoute.+\$\.eventName\s*=\s*.?DisassociateRouteTable.?"
|
||||
findings = []
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
"No CloudWatch log groups found with metric filters or alarms associated."
|
||||
)
|
||||
report.region = logs_client.region
|
||||
report.resource_id = logs_client.audited_account
|
||||
report.resource_arn = logs_client.log_group_arn_template
|
||||
report = check_cloudwatch_log_metric_filter(
|
||||
pattern,
|
||||
cloudtrail_client.trails,
|
||||
logs_client.metric_filters,
|
||||
cloudwatch_client.metric_alarms,
|
||||
report,
|
||||
)
|
||||
if (
|
||||
cloudtrail_client.trails is not None
|
||||
and logs_client.metric_filters is not None
|
||||
and cloudwatch_client.metric_alarms is not None
|
||||
):
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.status = "FAIL"
|
||||
report.status_extended = "No CloudWatch log groups found with metric filters or alarms associated."
|
||||
report.region = logs_client.region
|
||||
report.resource_id = logs_client.audited_account
|
||||
report.resource_arn = logs_client.log_group_arn_template
|
||||
report = check_cloudwatch_log_metric_filter(
|
||||
pattern,
|
||||
cloudtrail_client.trails,
|
||||
logs_client.metric_filters,
|
||||
cloudwatch_client.metric_alarms,
|
||||
report,
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
findings.append(report)
|
||||
return findings
|
||||
|
||||
@@ -15,10 +15,10 @@
|
||||
"RelatedUrl": "https://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudwatch-alarms-for-cloudtrail.html",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "https://docs.bridgecrew.io/docs/monitoring_14#procedure",
|
||||
"CLI": "https://docs.prowler.com/checks/aws/monitoring-policies/monitoring_14#procedure",
|
||||
"NativeIaC": "",
|
||||
"Other": "",
|
||||
"Terraform": "https://docs.bridgecrew.io/docs/monitoring_14#fix---buildtime"
|
||||
"Terraform": "https://docs.prowler.com/checks/aws/monitoring-policies/monitoring_14#fix---buildtime"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "It is recommended that a metric filter and alarm be established for unauthorized requests.",
|
||||
|
||||
@@ -15,21 +15,24 @@ class cloudwatch_changes_to_vpcs_alarm_configured(Check):
|
||||
def execute(self):
|
||||
pattern = r"\$\.eventName\s*=\s*.?CreateVpc.+\$\.eventName\s*=\s*.?DeleteVpc.+\$\.eventName\s*=\s*.?ModifyVpcAttribute.+\$\.eventName\s*=\s*.?AcceptVpcPeeringConnection.+\$\.eventName\s*=\s*.?CreateVpcPeeringConnection.+\$\.eventName\s*=\s*.?DeleteVpcPeeringConnection.+\$\.eventName\s*=\s*.?RejectVpcPeeringConnection.+\$\.eventName\s*=\s*.?AttachClassicLinkVpc.+\$\.eventName\s*=\s*.?DetachClassicLinkVpc.+\$\.eventName\s*=\s*.?DisableVpcClassicLink.+\$\.eventName\s*=\s*.?EnableVpcClassicLink.?"
|
||||
findings = []
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
"No CloudWatch log groups found with metric filters or alarms associated."
|
||||
)
|
||||
report.region = logs_client.region
|
||||
report.resource_id = logs_client.audited_account
|
||||
report.resource_arn = logs_client.log_group_arn_template
|
||||
report = check_cloudwatch_log_metric_filter(
|
||||
pattern,
|
||||
cloudtrail_client.trails,
|
||||
logs_client.metric_filters,
|
||||
cloudwatch_client.metric_alarms,
|
||||
report,
|
||||
)
|
||||
if (
|
||||
cloudtrail_client.trails is not None
|
||||
and logs_client.metric_filters is not None
|
||||
and cloudwatch_client.metric_alarms is not None
|
||||
):
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.status = "FAIL"
|
||||
report.status_extended = "No CloudWatch log groups found with metric filters or alarms associated."
|
||||
report.region = logs_client.region
|
||||
report.resource_id = logs_client.audited_account
|
||||
report.resource_arn = logs_client.log_group_arn_template
|
||||
report = check_cloudwatch_log_metric_filter(
|
||||
pattern,
|
||||
cloudtrail_client.trails,
|
||||
logs_client.metric_filters,
|
||||
cloudwatch_client.metric_alarms,
|
||||
report,
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
findings.append(report)
|
||||
return findings
|
||||
|
||||
@@ -5,17 +5,20 @@ from prowler.providers.aws.services.iam.iam_client import iam_client
|
||||
class cloudwatch_cross_account_sharing_disabled(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.status = "PASS"
|
||||
report.status_extended = "CloudWatch doesn't allow cross-account sharing."
|
||||
report.resource_arn = iam_client.role_arn_template
|
||||
report.resource_id = iam_client.audited_account
|
||||
report.region = iam_client.region
|
||||
for role in iam_client.roles:
|
||||
if role.name == "CloudWatch-CrossAccountSharingRole":
|
||||
report.resource_arn = role.arn
|
||||
report.resource_id = role.name
|
||||
report.status = "FAIL"
|
||||
report.status_extended = "CloudWatch has allowed cross-account sharing."
|
||||
findings.append(report)
|
||||
if iam_client.roles is not None:
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.status = "PASS"
|
||||
report.status_extended = "CloudWatch doesn't allow cross-account sharing."
|
||||
report.resource_arn = iam_client.role_arn_template
|
||||
report.resource_id = iam_client.audited_account
|
||||
report.region = iam_client.region
|
||||
for role in iam_client.roles:
|
||||
if role.name == "CloudWatch-CrossAccountSharingRole":
|
||||
report.resource_arn = role.arn
|
||||
report.resource_id = role.name
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
"CloudWatch has allowed cross-account sharing."
|
||||
)
|
||||
findings.append(report)
|
||||
return findings
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
"Code": {
|
||||
"CLI": "associate-kms-key --log-group-name <value> --kms-key-id <value>",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://docs.bridgecrew.io/docs/logging_21#aws-console",
|
||||
"Other": "https://docs.prowler.com/checks/aws/logging-policies/logging_21#aws-console",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
|
||||
@@ -5,19 +5,18 @@ from prowler.providers.aws.services.cloudwatch.logs_client import logs_client
|
||||
class cloudwatch_log_group_kms_encryption_enabled(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
for log_group in logs_client.log_groups:
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.region = log_group.region
|
||||
report.resource_id = log_group.name
|
||||
report.resource_arn = log_group.arn
|
||||
report.resource_tags = log_group.tags
|
||||
if log_group.kms_id:
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Log Group {log_group.name} does have AWS KMS key {log_group.kms_id} associated."
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Log Group {log_group.name} does not have AWS KMS keys associated."
|
||||
)
|
||||
findings.append(report)
|
||||
if logs_client.log_groups:
|
||||
for log_group in logs_client.log_groups:
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.region = log_group.region
|
||||
report.resource_id = log_group.name
|
||||
report.resource_arn = log_group.arn
|
||||
report.resource_tags = log_group.tags
|
||||
if log_group.kms_id:
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Log Group {log_group.name} does have AWS KMS key {log_group.kms_id} associated."
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Log Group {log_group.name} does not have AWS KMS keys associated."
|
||||
findings.append(report)
|
||||
return findings
|
||||
|
||||
@@ -11,78 +11,86 @@ from prowler.providers.aws.services.cloudwatch.logs_client import logs_client
|
||||
class cloudwatch_log_group_no_secrets_in_logs(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
for log_group in logs_client.log_groups:
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"No secrets found in {log_group.name} log group."
|
||||
report.region = log_group.region
|
||||
report.resource_id = log_group.name
|
||||
report.resource_arn = log_group.arn
|
||||
log_group_secrets = []
|
||||
if log_group.log_streams:
|
||||
for log_stream_name in log_group.log_streams:
|
||||
log_stream_secrets = {}
|
||||
log_stream_data = "\n".join(
|
||||
[
|
||||
dumps(event["message"])
|
||||
for event in log_group.log_streams[log_stream_name]
|
||||
]
|
||||
)
|
||||
log_stream_secrets_output = detect_secrets_scan(log_stream_data)
|
||||
|
||||
if log_stream_secrets_output:
|
||||
for secret in log_stream_secrets_output:
|
||||
flagged_event = log_group.log_streams[log_stream_name][
|
||||
secret["line_number"] - 1
|
||||
]
|
||||
cloudwatch_timestamp = (
|
||||
convert_to_cloudwatch_timestamp_format(
|
||||
flagged_event["timestamp"]
|
||||
)
|
||||
)
|
||||
if cloudwatch_timestamp not in log_stream_secrets.keys():
|
||||
log_stream_secrets[cloudwatch_timestamp] = SecretsDict()
|
||||
|
||||
try:
|
||||
log_event_data = dumps(
|
||||
loads(flagged_event["message"]), indent=2
|
||||
)
|
||||
except Exception:
|
||||
log_event_data = dumps(
|
||||
flagged_event["message"], indent=2
|
||||
)
|
||||
if len(log_event_data.split("\n")) > 1:
|
||||
# Can get more informative output if there is more than 1 line.
|
||||
# Will rescan just this event to get the type of secret and the line number
|
||||
event_detect_secrets_output = detect_secrets_scan(
|
||||
log_event_data
|
||||
)
|
||||
if event_detect_secrets_output:
|
||||
for secret in event_detect_secrets_output:
|
||||
log_stream_secrets[
|
||||
cloudwatch_timestamp
|
||||
].add_secret(
|
||||
secret["line_number"], secret["type"]
|
||||
)
|
||||
else:
|
||||
log_stream_secrets[cloudwatch_timestamp].add_secret(
|
||||
1, secret["type"]
|
||||
)
|
||||
if log_stream_secrets:
|
||||
secrets_string = "; ".join(
|
||||
if logs_client.log_groups:
|
||||
for log_group in logs_client.log_groups:
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"No secrets found in {log_group.name} log group."
|
||||
)
|
||||
report.region = log_group.region
|
||||
report.resource_id = log_group.name
|
||||
report.resource_arn = log_group.arn
|
||||
log_group_secrets = []
|
||||
if log_group.log_streams:
|
||||
for log_stream_name in log_group.log_streams:
|
||||
log_stream_secrets = {}
|
||||
log_stream_data = "\n".join(
|
||||
[
|
||||
f"at {timestamp} - {log_stream_secrets[timestamp].to_string()}"
|
||||
for timestamp in log_stream_secrets
|
||||
dumps(event["message"])
|
||||
for event in log_group.log_streams[log_stream_name]
|
||||
]
|
||||
)
|
||||
log_group_secrets.append(
|
||||
f"in log stream {log_stream_name} {secrets_string}"
|
||||
)
|
||||
if log_group_secrets:
|
||||
secrets_string = "; ".join(log_group_secrets)
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Potential secrets found in log group {log_group.name} {secrets_string}."
|
||||
findings.append(report)
|
||||
log_stream_secrets_output = detect_secrets_scan(log_stream_data)
|
||||
|
||||
if log_stream_secrets_output:
|
||||
for secret in log_stream_secrets_output:
|
||||
flagged_event = log_group.log_streams[log_stream_name][
|
||||
secret["line_number"] - 1
|
||||
]
|
||||
cloudwatch_timestamp = (
|
||||
convert_to_cloudwatch_timestamp_format(
|
||||
flagged_event["timestamp"]
|
||||
)
|
||||
)
|
||||
if (
|
||||
cloudwatch_timestamp
|
||||
not in log_stream_secrets.keys()
|
||||
):
|
||||
log_stream_secrets[cloudwatch_timestamp] = (
|
||||
SecretsDict()
|
||||
)
|
||||
|
||||
try:
|
||||
log_event_data = dumps(
|
||||
loads(flagged_event["message"]), indent=2
|
||||
)
|
||||
except Exception:
|
||||
log_event_data = dumps(
|
||||
flagged_event["message"], indent=2
|
||||
)
|
||||
if len(log_event_data.split("\n")) > 1:
|
||||
# Can get more informative output if there is more than 1 line.
|
||||
# Will rescan just this event to get the type of secret and the line number
|
||||
event_detect_secrets_output = detect_secrets_scan(
|
||||
log_event_data
|
||||
)
|
||||
if event_detect_secrets_output:
|
||||
for secret in event_detect_secrets_output:
|
||||
log_stream_secrets[
|
||||
cloudwatch_timestamp
|
||||
].add_secret(
|
||||
secret["line_number"], secret["type"]
|
||||
)
|
||||
else:
|
||||
log_stream_secrets[cloudwatch_timestamp].add_secret(
|
||||
1, secret["type"]
|
||||
)
|
||||
if log_stream_secrets:
|
||||
secrets_string = "; ".join(
|
||||
[
|
||||
f"at {timestamp} - {log_stream_secrets[timestamp].to_string()}"
|
||||
for timestamp in log_stream_secrets
|
||||
]
|
||||
)
|
||||
log_group_secrets.append(
|
||||
f"in log stream {log_stream_name} {secrets_string}"
|
||||
)
|
||||
if log_group_secrets:
|
||||
secrets_string = "; ".join(log_group_secrets)
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Potential secrets found in log group {log_group.name} {secrets_string}."
|
||||
findings.append(report)
|
||||
return findings
|
||||
|
||||
|
||||
|
||||
@@ -15,10 +15,10 @@
|
||||
"RelatedUrl": "https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/AWS_Logs.html",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "https://docs.bridgecrew.io/docs/logging_13#cli-command",
|
||||
"NativeIaC": "https://docs.bridgecrew.io/docs/logging_13#cloudformation",
|
||||
"CLI": "https://docs.prowler.com/checks/aws/logging-policies/logging_13#cli-command",
|
||||
"NativeIaC": "https://docs.prowler.com/checks/aws/logging-policies/logging_13#cloudformation",
|
||||
"Other": "",
|
||||
"Terraform": "https://docs.bridgecrew.io/docs/logging_13#terraform"
|
||||
"Terraform": "https://docs.prowler.com/checks/aws/logging-policies/logging_13#terraform"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Add Log Retention policy of specific days to log groups. This will persist logs and traces for a long time.",
|
||||
|
||||
@@ -10,23 +10,24 @@ class cloudwatch_log_group_retention_policy_specific_days_enabled(Check):
|
||||
specific_retention_days = logs_client.audit_config.get(
|
||||
"log_group_retention_days", 365
|
||||
)
|
||||
for log_group in logs_client.log_groups:
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.region = log_group.region
|
||||
report.resource_id = log_group.name
|
||||
report.resource_arn = log_group.arn
|
||||
report.resource_tags = log_group.tags
|
||||
if (
|
||||
log_group.never_expire is False
|
||||
and log_group.retention_days < specific_retention_days
|
||||
):
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Log Group {log_group.name} has less than {specific_retention_days} days retention period ({log_group.retention_days} days)."
|
||||
else:
|
||||
report.status = "PASS"
|
||||
if log_group.never_expire is True:
|
||||
report.status_extended = f"Log Group {log_group.name} comply with {specific_retention_days} days retention period since it never expires."
|
||||
if logs_client.log_groups:
|
||||
for log_group in logs_client.log_groups:
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.region = log_group.region
|
||||
report.resource_id = log_group.name
|
||||
report.resource_arn = log_group.arn
|
||||
report.resource_tags = log_group.tags
|
||||
if (
|
||||
log_group.never_expire is False
|
||||
and log_group.retention_days < specific_retention_days
|
||||
):
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Log Group {log_group.name} has less than {specific_retention_days} days retention period ({log_group.retention_days} days)."
|
||||
else:
|
||||
report.status_extended = f"Log Group {log_group.name} comply with {specific_retention_days} days retention period since it has {log_group.retention_days} days."
|
||||
findings.append(report)
|
||||
report.status = "PASS"
|
||||
if log_group.never_expire is True:
|
||||
report.status_extended = f"Log Group {log_group.name} comply with {specific_retention_days} days retention period since it never expires."
|
||||
else:
|
||||
report.status_extended = f"Log Group {log_group.name} comply with {specific_retention_days} days retention period since it has {log_group.retention_days} days."
|
||||
findings.append(report)
|
||||
return findings
|
||||
|
||||
@@ -15,10 +15,10 @@
|
||||
"RelatedUrl": "https://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudwatch-alarms-for-cloudtrail.html",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "https://docs.bridgecrew.io/docs/monitoring_9#procedure",
|
||||
"CLI": "https://docs.prowler.com/checks/aws/monitoring-policies/monitoring_9#procedure",
|
||||
"NativeIaC": "",
|
||||
"Other": "",
|
||||
"Terraform": "https://docs.bridgecrew.io/docs/monitoring_9#fix---buildtime"
|
||||
"Terraform": "https://docs.prowler.com/checks/aws/monitoring-policies/monitoring_9#fix---buildtime"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "It is recommended that a metric filter and alarm be established for unauthorized requests.",
|
||||
|
||||
@@ -17,21 +17,24 @@ class cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_change
|
||||
def execute(self):
|
||||
pattern = r"\$\.eventSource\s*=\s*.?config.amazonaws.com.+\$\.eventName\s*=\s*.?StopConfigurationRecorder.+\$\.eventName\s*=\s*.?DeleteDeliveryChannel.+\$\.eventName\s*=\s*.?PutDeliveryChannel.+\$\.eventName\s*=\s*.?PutConfigurationRecorder.?"
|
||||
findings = []
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
"No CloudWatch log groups found with metric filters or alarms associated."
|
||||
)
|
||||
report.region = logs_client.region
|
||||
report.resource_id = logs_client.audited_account
|
||||
report.resource_arn = logs_client.log_group_arn_template
|
||||
report = check_cloudwatch_log_metric_filter(
|
||||
pattern,
|
||||
cloudtrail_client.trails,
|
||||
logs_client.metric_filters,
|
||||
cloudwatch_client.metric_alarms,
|
||||
report,
|
||||
)
|
||||
if (
|
||||
cloudtrail_client.trails is not None
|
||||
and logs_client.metric_filters is not None
|
||||
and cloudwatch_client.metric_alarms is not None
|
||||
):
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.status = "FAIL"
|
||||
report.status_extended = "No CloudWatch log groups found with metric filters or alarms associated."
|
||||
report.region = logs_client.region
|
||||
report.resource_id = logs_client.audited_account
|
||||
report.resource_arn = logs_client.log_group_arn_template
|
||||
report = check_cloudwatch_log_metric_filter(
|
||||
pattern,
|
||||
cloudtrail_client.trails,
|
||||
logs_client.metric_filters,
|
||||
cloudwatch_client.metric_alarms,
|
||||
report,
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
findings.append(report)
|
||||
return findings
|
||||
|
||||
@@ -15,10 +15,10 @@
|
||||
"RelatedUrl": "https://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudwatch-alarms-for-cloudtrail.html",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "https://docs.bridgecrew.io/docs/monitoring_5#procedure",
|
||||
"CLI": "https://docs.prowler.com/checks/aws/monitoring-policies/monitoring_5#procedure",
|
||||
"NativeIaC": "",
|
||||
"Other": "",
|
||||
"Terraform": "https://docs.bridgecrew.io/docs/monitoring_5#fix---buildtime"
|
||||
"Terraform": "https://docs.prowler.com/checks/aws/monitoring-policies/monitoring_5#fix---buildtime"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "It is recommended that a metric filter and alarm be established for unauthorized requests.",
|
||||
|
||||
@@ -17,21 +17,24 @@ class cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_change
|
||||
def execute(self):
|
||||
pattern = r"\$\.eventName\s*=\s*.?CreateTrail.+\$\.eventName\s*=\s*.?UpdateTrail.+\$\.eventName\s*=\s*.?DeleteTrail.+\$\.eventName\s*=\s*.?StartLogging.+\$\.eventName\s*=\s*.?StopLogging.?"
|
||||
findings = []
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
"No CloudWatch log groups found with metric filters or alarms associated."
|
||||
)
|
||||
report.region = logs_client.region
|
||||
report.resource_id = logs_client.audited_account
|
||||
report.resource_arn = logs_client.log_group_arn_template
|
||||
report = check_cloudwatch_log_metric_filter(
|
||||
pattern,
|
||||
cloudtrail_client.trails,
|
||||
logs_client.metric_filters,
|
||||
cloudwatch_client.metric_alarms,
|
||||
report,
|
||||
)
|
||||
if (
|
||||
cloudtrail_client.trails is not None
|
||||
and logs_client.metric_filters is not None
|
||||
and cloudwatch_client.metric_alarms is not None
|
||||
):
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.status = "FAIL"
|
||||
report.status_extended = "No CloudWatch log groups found with metric filters or alarms associated."
|
||||
report.region = logs_client.region
|
||||
report.resource_id = logs_client.audited_account
|
||||
report.resource_arn = logs_client.log_group_arn_template
|
||||
report = check_cloudwatch_log_metric_filter(
|
||||
pattern,
|
||||
cloudtrail_client.trails,
|
||||
logs_client.metric_filters,
|
||||
cloudwatch_client.metric_alarms,
|
||||
report,
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
findings.append(report)
|
||||
return findings
|
||||
|
||||
@@ -15,10 +15,10 @@
|
||||
"RelatedUrl": "https://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudwatch-alarms-for-cloudtrail.html",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "https://docs.bridgecrew.io/docs/monitoring_6#procedure",
|
||||
"CLI": "https://docs.prowler.com/checks/aws/monitoring-policies/monitoring_6#procedure",
|
||||
"NativeIaC": "",
|
||||
"Other": "",
|
||||
"Terraform": "https://docs.bridgecrew.io/docs/monitoring_6#fix---buildtime"
|
||||
"Terraform": "https://docs.prowler.com/checks/aws/monitoring-policies/monitoring_6#fix---buildtime"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "It is recommended that a metric filter and alarm be established for unauthorized requests.",
|
||||
|
||||
@@ -15,21 +15,24 @@ class cloudwatch_log_metric_filter_authentication_failures(Check):
|
||||
def execute(self):
|
||||
pattern = r"\$\.eventName\s*=\s*.?ConsoleLogin.+\$\.errorMessage\s*=\s*.?Failed authentication.?"
|
||||
findings = []
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
"No CloudWatch log groups found with metric filters or alarms associated."
|
||||
)
|
||||
report.region = logs_client.region
|
||||
report.resource_id = logs_client.audited_account
|
||||
report.resource_arn = logs_client.log_group_arn_template
|
||||
report = check_cloudwatch_log_metric_filter(
|
||||
pattern,
|
||||
cloudtrail_client.trails,
|
||||
logs_client.metric_filters,
|
||||
cloudwatch_client.metric_alarms,
|
||||
report,
|
||||
)
|
||||
if (
|
||||
cloudtrail_client.trails is not None
|
||||
and logs_client.metric_filters is not None
|
||||
and cloudwatch_client.metric_alarms is not None
|
||||
):
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.status = "FAIL"
|
||||
report.status_extended = "No CloudWatch log groups found with metric filters or alarms associated."
|
||||
report.region = logs_client.region
|
||||
report.resource_id = logs_client.audited_account
|
||||
report.resource_arn = logs_client.log_group_arn_template
|
||||
report = check_cloudwatch_log_metric_filter(
|
||||
pattern,
|
||||
cloudtrail_client.trails,
|
||||
logs_client.metric_filters,
|
||||
cloudwatch_client.metric_alarms,
|
||||
report,
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
findings.append(report)
|
||||
return findings
|
||||
|
||||
@@ -15,21 +15,24 @@ class cloudwatch_log_metric_filter_aws_organizations_changes(Check):
|
||||
def execute(self):
|
||||
pattern = r"\$\.eventSource\s*=\s*.?organizations\.amazonaws\.com.+\$\.eventName\s*=\s*.?AcceptHandshake.+\$\.eventName\s*=\s*.?AttachPolicy.+\$\.eventName\s*=\s*.?CancelHandshake.+\$\.eventName\s*=\s*.?CreateAccount.+\$\.eventName\s*=\s*.?CreateOrganization.+\$\.eventName\s*=\s*.?CreateOrganizationalUnit.+\$\.eventName\s*=\s*.?CreatePolicy.+\$\.eventName\s*=\s*.?DeclineHandshake.+\$\.eventName\s*=\s*.?DeleteOrganization.+\$\.eventName\s*=\s*.?DeleteOrganizationalUnit.+\$\.eventName\s*=\s*.?DeletePolicy.+\$\.eventName\s*=\s*.?EnableAllFeatures.+\$\.eventName\s*=\s*.?EnablePolicyType.+\$\.eventName\s*=\s*.?InviteAccountToOrganization.+\$\.eventName\s*=\s*.?LeaveOrganization.+\$\.eventName\s*=\s*.?DetachPolicy.+\$\.eventName\s*=\s*.?DisablePolicyType.+\$\.eventName\s*=\s*.?MoveAccount.+\$\.eventName\s*=\s*.?RemoveAccountFromOrganization.+\$\.eventName\s*=\s*.?UpdateOrganizationalUnit.+\$\.eventName\s*=\s*.?UpdatePolicy.?"
|
||||
findings = []
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
"No CloudWatch log groups found with metric filters or alarms associated."
|
||||
)
|
||||
report.region = logs_client.region
|
||||
report.resource_id = logs_client.audited_account
|
||||
report.resource_arn = logs_client.log_group_arn_template
|
||||
report = check_cloudwatch_log_metric_filter(
|
||||
pattern,
|
||||
cloudtrail_client.trails,
|
||||
logs_client.metric_filters,
|
||||
cloudwatch_client.metric_alarms,
|
||||
report,
|
||||
)
|
||||
if (
|
||||
cloudtrail_client.trails is not None
|
||||
and logs_client.metric_filters is not None
|
||||
and cloudwatch_client.metric_alarms is not None
|
||||
):
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.status = "FAIL"
|
||||
report.status_extended = "No CloudWatch log groups found with metric filters or alarms associated."
|
||||
report.region = logs_client.region
|
||||
report.resource_id = logs_client.audited_account
|
||||
report.resource_arn = logs_client.log_group_arn_template
|
||||
report = check_cloudwatch_log_metric_filter(
|
||||
pattern,
|
||||
cloudtrail_client.trails,
|
||||
logs_client.metric_filters,
|
||||
cloudwatch_client.metric_alarms,
|
||||
report,
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
findings.append(report)
|
||||
return findings
|
||||
|
||||
@@ -15,10 +15,10 @@
|
||||
"RelatedUrl": "https://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudwatch-alarms-for-cloudtrail.html",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "https://docs.bridgecrew.io/docs/monitoring_7#procedure",
|
||||
"CLI": "https://docs.prowler.com/checks/aws/monitoring-policies/monitoring_7#procedure",
|
||||
"NativeIaC": "",
|
||||
"Other": "",
|
||||
"Terraform": "https://docs.bridgecrew.io/docs/monitoring_7#fix---buildtime"
|
||||
"Terraform": "https://docs.prowler.com/checks/aws/monitoring-policies/monitoring_7#fix---buildtime"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "It is recommended that a metric filter and alarm be established for unauthorized requests.",
|
||||
|
||||
@@ -15,21 +15,24 @@ class cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk(Chec
|
||||
def execute(self):
|
||||
pattern = r"\$\.eventSource\s*=\s*.?kms.amazonaws.com.+\$\.eventName\s*=\s*.?DisableKey.+\$\.eventName\s*=\s*.?ScheduleKeyDeletion.?"
|
||||
findings = []
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
"No CloudWatch log groups found with metric filters or alarms associated."
|
||||
)
|
||||
report.region = logs_client.region
|
||||
report.resource_id = logs_client.audited_account
|
||||
report.resource_arn = logs_client.log_group_arn_template
|
||||
report = check_cloudwatch_log_metric_filter(
|
||||
pattern,
|
||||
cloudtrail_client.trails,
|
||||
logs_client.metric_filters,
|
||||
cloudwatch_client.metric_alarms,
|
||||
report,
|
||||
)
|
||||
if (
|
||||
cloudtrail_client.trails is not None
|
||||
and logs_client.metric_filters is not None
|
||||
and cloudwatch_client.metric_alarms is not None
|
||||
):
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.status = "FAIL"
|
||||
report.status_extended = "No CloudWatch log groups found with metric filters or alarms associated."
|
||||
report.region = logs_client.region
|
||||
report.resource_id = logs_client.audited_account
|
||||
report.resource_arn = logs_client.log_group_arn_template
|
||||
report = check_cloudwatch_log_metric_filter(
|
||||
pattern,
|
||||
cloudtrail_client.trails,
|
||||
logs_client.metric_filters,
|
||||
cloudwatch_client.metric_alarms,
|
||||
report,
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
findings.append(report)
|
||||
return findings
|
||||
|
||||
@@ -15,10 +15,10 @@
|
||||
"RelatedUrl": "https://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudwatch-alarms-for-cloudtrail.html",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "https://docs.bridgecrew.io/docs/monitoring_8#procedure",
|
||||
"CLI": "https://docs.prowler.com/checks/aws/monitoring-policies/monitoring_8#procedure",
|
||||
"NativeIaC": "",
|
||||
"Other": "",
|
||||
"Terraform": "https://docs.bridgecrew.io/docs/monitoring_8#fix---buildtime"
|
||||
"Terraform": "https://docs.prowler.com/checks/aws/monitoring-policies/monitoring_8#fix---buildtime"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "It is recommended that a metric filter and alarm be established for unauthorized requests.",
|
||||
|
||||
@@ -15,22 +15,25 @@ class cloudwatch_log_metric_filter_for_s3_bucket_policy_changes(Check):
|
||||
def execute(self):
|
||||
pattern = r"\$\.eventSource\s*=\s*.?s3.amazonaws.com.+\$\.eventName\s*=\s*.?PutBucketAcl.+\$\.eventName\s*=\s*.?PutBucketPolicy.+\$\.eventName\s*=\s*.?PutBucketCors.+\$\.eventName\s*=\s*.?PutBucketLifecycle.+\$\.eventName\s*=\s*.?PutBucketReplication.+\$\.eventName\s*=\s*.?DeleteBucketPolicy.+\$\.eventName\s*=\s*.?DeleteBucketCors.+\$\.eventName\s*=\s*.?DeleteBucketLifecycle.+\$\.eventName\s*=\s*.?DeleteBucketReplication.?"
|
||||
findings = []
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
"No CloudWatch log groups found with metric filters or alarms associated."
|
||||
)
|
||||
report.region = logs_client.region
|
||||
report.resource_id = logs_client.audited_account
|
||||
report.resource_arn = logs_client.log_group_arn_template
|
||||
if (
|
||||
cloudtrail_client.trails is not None
|
||||
and logs_client.metric_filters is not None
|
||||
and cloudwatch_client.metric_alarms is not None
|
||||
):
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.status = "FAIL"
|
||||
report.status_extended = "No CloudWatch log groups found with metric filters or alarms associated."
|
||||
report.region = logs_client.region
|
||||
report.resource_id = logs_client.audited_account
|
||||
report.resource_arn = logs_client.log_group_arn_template
|
||||
|
||||
report = check_cloudwatch_log_metric_filter(
|
||||
pattern,
|
||||
cloudtrail_client.trails,
|
||||
logs_client.metric_filters,
|
||||
cloudwatch_client.metric_alarms,
|
||||
report,
|
||||
)
|
||||
report = check_cloudwatch_log_metric_filter(
|
||||
pattern,
|
||||
cloudtrail_client.trails,
|
||||
logs_client.metric_filters,
|
||||
cloudwatch_client.metric_alarms,
|
||||
report,
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
findings.append(report)
|
||||
return findings
|
||||
|
||||
@@ -15,10 +15,10 @@
|
||||
"RelatedUrl": "https://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudwatch-alarms-for-cloudtrail.html",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "https://docs.bridgecrew.io/docs/monitoring_4#procedure",
|
||||
"CLI": "https://docs.prowler.com/checks/aws/monitoring-policies/monitoring_4#procedure",
|
||||
"NativeIaC": "",
|
||||
"Other": "",
|
||||
"Terraform": "https://docs.bridgecrew.io/docs/monitoring_4#fix---buildtime"
|
||||
"Terraform": "https://docs.prowler.com/checks/aws/monitoring-policies/monitoring_4#fix---buildtime"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "It is recommended that a metric filter and alarm be established for unauthorized requests.",
|
||||
|
||||
@@ -15,21 +15,24 @@ class cloudwatch_log_metric_filter_policy_changes(Check):
|
||||
def execute(self):
|
||||
pattern = r"\$\.eventName\s*=\s*.?DeleteGroupPolicy.+\$\.eventName\s*=\s*.?DeleteRolePolicy.+\$\.eventName\s*=\s*.?DeleteUserPolicy.+\$\.eventName\s*=\s*.?PutGroupPolicy.+\$\.eventName\s*=\s*.?PutRolePolicy.+\$\.eventName\s*=\s*.?PutUserPolicy.+\$\.eventName\s*=\s*.?CreatePolicy.+\$\.eventName\s*=\s*.?DeletePolicy.+\$\.eventName\s*=\s*.?CreatePolicyVersion.+\$\.eventName\s*=\s*.?DeletePolicyVersion.+\$\.eventName\s*=\s*.?AttachRolePolicy.+\$\.eventName\s*=\s*.?DetachRolePolicy.+\$\.eventName\s*=\s*.?AttachUserPolicy.+\$\.eventName\s*=\s*.?DetachUserPolicy.+\$\.eventName\s*=\s*.?AttachGroupPolicy.+\$\.eventName\s*=\s*.?DetachGroupPolicy.?"
|
||||
findings = []
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
"No CloudWatch log groups found with metric filters or alarms associated."
|
||||
)
|
||||
report.region = logs_client.region
|
||||
report.resource_id = logs_client.audited_account
|
||||
report.resource_arn = logs_client.log_group_arn_template
|
||||
report = check_cloudwatch_log_metric_filter(
|
||||
pattern,
|
||||
cloudtrail_client.trails,
|
||||
logs_client.metric_filters,
|
||||
cloudwatch_client.metric_alarms,
|
||||
report,
|
||||
)
|
||||
if (
|
||||
cloudtrail_client.trails is not None
|
||||
and logs_client.metric_filters is not None
|
||||
and cloudwatch_client.metric_alarms is not None
|
||||
):
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.status = "FAIL"
|
||||
report.status_extended = "No CloudWatch log groups found with metric filters or alarms associated."
|
||||
report.region = logs_client.region
|
||||
report.resource_id = logs_client.audited_account
|
||||
report.resource_arn = logs_client.log_group_arn_template
|
||||
report = check_cloudwatch_log_metric_filter(
|
||||
pattern,
|
||||
cloudtrail_client.trails,
|
||||
logs_client.metric_filters,
|
||||
cloudwatch_client.metric_alarms,
|
||||
report,
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
findings.append(report)
|
||||
return findings
|
||||
|
||||
@@ -15,10 +15,10 @@
|
||||
"RelatedUrl": "https://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudwatch-alarms-for-cloudtrail.html",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "https://docs.bridgecrew.io/docs/monitoring_3#procedure",
|
||||
"CLI": "https://docs.prowler.com/checks/aws/monitoring-policies/monitoring_3#procedure",
|
||||
"NativeIaC": "",
|
||||
"Other": "",
|
||||
"Terraform": "https://docs.bridgecrew.io/docs/monitoring_3#fix---buildtime"
|
||||
"Terraform": "https://docs.prowler.com/checks/aws/monitoring-policies/monitoring_3#fix---buildtime"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "It is recommended that a metric filter and alarm be established for unauthorized requests.",
|
||||
|
||||
@@ -15,21 +15,24 @@ class cloudwatch_log_metric_filter_root_usage(Check):
|
||||
def execute(self):
|
||||
pattern = r"\$\.userIdentity\.type\s*=\s*.?Root.+\$\.userIdentity\.invokedBy NOT EXISTS.+\$\.eventType\s*!=\s*.?AwsServiceEvent.?"
|
||||
findings = []
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
"No CloudWatch log groups found with metric filters or alarms associated."
|
||||
)
|
||||
report.region = logs_client.region
|
||||
report.resource_id = logs_client.audited_account
|
||||
report.resource_arn = logs_client.log_group_arn_template
|
||||
report = check_cloudwatch_log_metric_filter(
|
||||
pattern,
|
||||
cloudtrail_client.trails,
|
||||
logs_client.metric_filters,
|
||||
cloudwatch_client.metric_alarms,
|
||||
report,
|
||||
)
|
||||
if (
|
||||
cloudtrail_client.trails is not None
|
||||
and logs_client.metric_filters is not None
|
||||
and cloudwatch_client.metric_alarms is not None
|
||||
):
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.status = "FAIL"
|
||||
report.status_extended = "No CloudWatch log groups found with metric filters or alarms associated."
|
||||
report.region = logs_client.region
|
||||
report.resource_id = logs_client.audited_account
|
||||
report.resource_arn = logs_client.log_group_arn_template
|
||||
report = check_cloudwatch_log_metric_filter(
|
||||
pattern,
|
||||
cloudtrail_client.trails,
|
||||
logs_client.metric_filters,
|
||||
cloudwatch_client.metric_alarms,
|
||||
report,
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
findings.append(report)
|
||||
return findings
|
||||
|
||||
@@ -15,10 +15,10 @@
|
||||
"RelatedUrl": "https://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudwatch-alarms-for-cloudtrail.html",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "https://docs.bridgecrew.io/docs/monitoring_10#procedure",
|
||||
"CLI": "https://docs.prowler.com/checks/aws/monitoring-policies/monitoring_10#procedure",
|
||||
"NativeIaC": "",
|
||||
"Other": "",
|
||||
"Terraform": "https://docs.bridgecrew.io/docs/monitoring_10#fix---buildtime"
|
||||
"Terraform": "https://docs.prowler.com/checks/aws/monitoring-policies/monitoring_10#fix---buildtime"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "It is recommended that a metric filter and alarm be established for unauthorized requests.",
|
||||
|
||||
@@ -15,21 +15,24 @@ class cloudwatch_log_metric_filter_security_group_changes(Check):
|
||||
def execute(self):
|
||||
pattern = r"\$\.eventName\s*=\s*.?AuthorizeSecurityGroupIngress.+\$\.eventName\s*=\s*.?AuthorizeSecurityGroupEgress.+\$\.eventName\s*=\s*.?RevokeSecurityGroupIngress.+\$\.eventName\s*=\s*.?RevokeSecurityGroupEgress.+\$\.eventName\s*=\s*.?CreateSecurityGroup.+\$\.eventName\s*=\s*.?DeleteSecurityGroup.?"
|
||||
findings = []
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
"No CloudWatch log groups found with metric filters or alarms associated."
|
||||
)
|
||||
report.region = logs_client.region
|
||||
report.resource_id = logs_client.audited_account
|
||||
report.resource_arn = logs_client.log_group_arn_template
|
||||
report = check_cloudwatch_log_metric_filter(
|
||||
pattern,
|
||||
cloudtrail_client.trails,
|
||||
logs_client.metric_filters,
|
||||
cloudwatch_client.metric_alarms,
|
||||
report,
|
||||
)
|
||||
if (
|
||||
cloudtrail_client.trails is not None
|
||||
and logs_client.metric_filters is not None
|
||||
and cloudwatch_client.metric_alarms is not None
|
||||
):
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.status = "FAIL"
|
||||
report.status_extended = "No CloudWatch log groups found with metric filters or alarms associated."
|
||||
report.region = logs_client.region
|
||||
report.resource_id = logs_client.audited_account
|
||||
report.resource_arn = logs_client.log_group_arn_template
|
||||
report = check_cloudwatch_log_metric_filter(
|
||||
pattern,
|
||||
cloudtrail_client.trails,
|
||||
logs_client.metric_filters,
|
||||
cloudwatch_client.metric_alarms,
|
||||
report,
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
findings.append(report)
|
||||
return findings
|
||||
|
||||
@@ -15,10 +15,10 @@
|
||||
"RelatedUrl": "https://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudwatch-alarms-for-cloudtrail.html",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "https://docs.bridgecrew.io/docs/monitoring_2#procedure",
|
||||
"CLI": "https://docs.prowler.com/checks/aws/monitoring-policies/monitoring_2#procedure",
|
||||
"NativeIaC": "",
|
||||
"Other": "",
|
||||
"Terraform": "https://docs.bridgecrew.io/docs/monitoring_2#fix---buildtime"
|
||||
"Terraform": "https://docs.prowler.com/checks/aws/monitoring-policies/monitoring_2#fix---buildtime"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "It is recommended that a metric filter and alarm be established for unauthorized requests.",
|
||||
|
||||
@@ -15,21 +15,24 @@ class cloudwatch_log_metric_filter_sign_in_without_mfa(Check):
|
||||
def execute(self):
|
||||
pattern = r"\$\.eventName\s*=\s*.?ConsoleLogin.+\$\.additionalEventData\.MFAUsed\s*!=\s*.?Yes.?"
|
||||
findings = []
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
"No CloudWatch log groups found with metric filters or alarms associated."
|
||||
)
|
||||
report.region = logs_client.region
|
||||
report.resource_id = logs_client.audited_account
|
||||
report.resource_arn = logs_client.log_group_arn_template
|
||||
report = check_cloudwatch_log_metric_filter(
|
||||
pattern,
|
||||
cloudtrail_client.trails,
|
||||
logs_client.metric_filters,
|
||||
cloudwatch_client.metric_alarms,
|
||||
report,
|
||||
)
|
||||
if (
|
||||
cloudtrail_client.trails is not None
|
||||
and logs_client.metric_filters is not None
|
||||
and cloudwatch_client.metric_alarms is not None
|
||||
):
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.status = "FAIL"
|
||||
report.status_extended = "No CloudWatch log groups found with metric filters or alarms associated."
|
||||
report.region = logs_client.region
|
||||
report.resource_id = logs_client.audited_account
|
||||
report.resource_arn = logs_client.log_group_arn_template
|
||||
report = check_cloudwatch_log_metric_filter(
|
||||
pattern,
|
||||
cloudtrail_client.trails,
|
||||
logs_client.metric_filters,
|
||||
cloudwatch_client.metric_alarms,
|
||||
report,
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
findings.append(report)
|
||||
return findings
|
||||
|
||||
@@ -15,10 +15,10 @@
|
||||
"RelatedUrl": "https://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudwatch-alarms-for-cloudtrail.html",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "https://docs.bridgecrew.io/docs/monitoring_1#procedure",
|
||||
"CLI": "https://docs.prowler.com/checks/aws/monitoring-policies/monitoring_1#procedure",
|
||||
"NativeIaC": "",
|
||||
"Other": "",
|
||||
"Terraform": "https://docs.bridgecrew.io/docs/monitoring_1#fix---buildtime"
|
||||
"Terraform": "https://docs.prowler.com/checks/aws/monitoring-policies/monitoring_1#fix---buildtime"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "It is recommended that a metric filter and alarm be established for unauthorized requests.",
|
||||
|
||||
@@ -15,21 +15,24 @@ class cloudwatch_log_metric_filter_unauthorized_api_calls(Check):
|
||||
def execute(self):
|
||||
pattern = r"\$\.errorCode\s*=\s*.?\*UnauthorizedOperation.+\$\.errorCode\s*=\s*.?AccessDenied\*.?"
|
||||
findings = []
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
"No CloudWatch log groups found with metric filters or alarms associated."
|
||||
)
|
||||
report.region = logs_client.region
|
||||
report.resource_id = logs_client.audited_account
|
||||
report.resource_arn = logs_client.log_group_arn_template
|
||||
report = check_cloudwatch_log_metric_filter(
|
||||
pattern,
|
||||
cloudtrail_client.trails,
|
||||
logs_client.metric_filters,
|
||||
cloudwatch_client.metric_alarms,
|
||||
report,
|
||||
)
|
||||
if (
|
||||
cloudtrail_client.trails is not None
|
||||
and logs_client.metric_filters is not None
|
||||
and cloudwatch_client.metric_alarms is not None
|
||||
):
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.status = "FAIL"
|
||||
report.status_extended = "No CloudWatch log groups found with metric filters or alarms associated."
|
||||
report.region = logs_client.region
|
||||
report.resource_id = logs_client.audited_account
|
||||
report.resource_arn = logs_client.log_group_arn_template
|
||||
report = check_cloudwatch_log_metric_filter(
|
||||
pattern,
|
||||
cloudtrail_client.trails,
|
||||
logs_client.metric_filters,
|
||||
cloudwatch_client.metric_alarms,
|
||||
report,
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
findings.append(report)
|
||||
return findings
|
||||
|
||||
@@ -16,7 +16,8 @@ class CloudWatch(AWSService):
|
||||
super().__init__(__class__.__name__, audit_info)
|
||||
self.metric_alarms = []
|
||||
self.__threading_call__(self.__describe_alarms__)
|
||||
self.__list_tags_for_resource__()
|
||||
if self.metric_alarms:
|
||||
self.__list_tags_for_resource__()
|
||||
|
||||
def __describe_alarms__(self, regional_client):
|
||||
logger.info("CloudWatch - Describing alarms...")
|
||||
@@ -33,6 +34,8 @@ class CloudWatch(AWSService):
|
||||
namespace = None
|
||||
if "Namespace" in alarm:
|
||||
namespace = alarm["Namespace"]
|
||||
if self.metric_alarms is None:
|
||||
self.metric_alarms = []
|
||||
self.metric_alarms.append(
|
||||
MetricAlarm(
|
||||
arn=alarm["AlarmArn"],
|
||||
@@ -42,6 +45,17 @@ class CloudWatch(AWSService):
|
||||
region=regional_client.region,
|
||||
)
|
||||
)
|
||||
except ClientError as error:
|
||||
if error.response["Error"]["Code"] == "AccessDenied":
|
||||
logger.error(
|
||||
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
if not self.metric_alarms:
|
||||
self.metric_alarms = None
|
||||
else:
|
||||
logger.error(
|
||||
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
@@ -72,15 +86,16 @@ class Logs(AWSService):
|
||||
self.log_groups = []
|
||||
self.__threading_call__(self.__describe_metric_filters__)
|
||||
self.__threading_call__(self.__describe_log_groups__)
|
||||
if (
|
||||
"cloudwatch_log_group_no_secrets_in_logs"
|
||||
in audit_info.audit_metadata.expected_checks
|
||||
):
|
||||
self.events_per_log_group_threshold = (
|
||||
1000 # The threshold for number of events to return per log group.
|
||||
)
|
||||
self.__threading_call__(self.__get_log_events__)
|
||||
self.__list_tags_for_resource__()
|
||||
if self.log_groups:
|
||||
if (
|
||||
"cloudwatch_log_group_no_secrets_in_logs"
|
||||
in audit_info.audit_metadata.expected_checks
|
||||
):
|
||||
self.events_per_log_group_threshold = (
|
||||
1000 # The threshold for number of events to return per log group.
|
||||
)
|
||||
self.__threading_call__(self.__get_log_events__)
|
||||
self.__list_tags_for_resource__()
|
||||
|
||||
def __describe_metric_filters__(self, regional_client):
|
||||
logger.info("CloudWatch Logs - Describing metric filters...")
|
||||
@@ -94,6 +109,8 @@ class Logs(AWSService):
|
||||
if not self.audit_resources or (
|
||||
is_resource_filtered(arn, self.audit_resources)
|
||||
):
|
||||
if self.metric_filters is None:
|
||||
self.metric_filters = []
|
||||
self.metric_filters.append(
|
||||
MetricFilter(
|
||||
arn=arn,
|
||||
@@ -104,6 +121,17 @@ class Logs(AWSService):
|
||||
region=regional_client.region,
|
||||
)
|
||||
)
|
||||
except ClientError as error:
|
||||
if error.response["Error"]["Code"] == "AccessDeniedException":
|
||||
logger.error(
|
||||
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
if not self.metric_filters:
|
||||
self.metric_filters = []
|
||||
else:
|
||||
logger.error(
|
||||
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
@@ -126,6 +154,8 @@ class Logs(AWSService):
|
||||
if not retention_days:
|
||||
never_expire = True
|
||||
retention_days = 9999
|
||||
if self.log_groups is None:
|
||||
self.log_groups = []
|
||||
self.log_groups.append(
|
||||
LogGroup(
|
||||
arn=log_group["arn"],
|
||||
@@ -136,6 +166,17 @@ class Logs(AWSService):
|
||||
region=regional_client.region,
|
||||
)
|
||||
)
|
||||
except ClientError as error:
|
||||
if error.response["Error"]["Code"] == "AccessDeniedException":
|
||||
logger.error(
|
||||
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
if not self.log_groups:
|
||||
self.log_groups = None
|
||||
else:
|
||||
logger.error(
|
||||
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
|
||||
@@ -12,9 +12,10 @@ def check_cloudwatch_log_metric_filter(
|
||||
):
|
||||
# 1. Iterate for CloudWatch Log Group in CloudTrail trails
|
||||
log_groups = []
|
||||
for trail in trails.values():
|
||||
if trail.log_group_arn:
|
||||
log_groups.append(trail.log_group_arn.split(":")[6])
|
||||
if trails is not None:
|
||||
for trail in trails.values():
|
||||
if trail.log_group_arn:
|
||||
log_groups.append(trail.log_group_arn.split(":")[6])
|
||||
# 2. Describe metric filters for previous log groups
|
||||
for metric_filter in metric_filters:
|
||||
if metric_filter.log_group in log_groups:
|
||||
|
||||
@@ -15,10 +15,10 @@
|
||||
"RelatedUrl": "https://aws.amazon.com/blogs/mt/aws-config-best-practices/",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "https://docs.bridgecrew.io/docs/logging_5-enable-aws-config-regions#cli-command",
|
||||
"CLI": "https://docs.prowler.com/checks/aws/logging-policies/logging_5-enable-aws-config-regions#cli-command",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://docs.bridgecrew.io/docs/logging_5-enable-aws-config-regions#aws-console",
|
||||
"Terraform": "https://docs.bridgecrew.io/docs/logging_5-enable-aws-config-regions#terraform"
|
||||
"Other": "https://docs.prowler.com/checks/aws/logging-policies/logging_5-enable-aws-config-regions#aws-console",
|
||||
"Terraform": "https://docs.prowler.com/checks/aws/logging-policies/logging_5-enable-aws-config-regions#terraform"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "It is recommended to enable AWS Config in all regions.",
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
"CLI": "aws docdb create-db-cluster --db-cluster-identifier <db_cluster_id> --port 27017 --engine docdb --master-username <yourMasterUsername> --master-user-password <yourMasterPassword> --storage-encrypted",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://www.trendmicro.com/cloudoneconformity-staging/knowledge-base/aws/DocumentDB/encryption-enabled.html",
|
||||
"Terraform": "https://docs.bridgecrew.io/docs/bc_aws_general_28#terraform"
|
||||
"Terraform": "https://docs.prowler.com/checks/aws/general-policies/bc_aws_general_28#terraform"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Enable Encryption. Use a CMK where possible. It will provide additional management and privacy benefits.",
|
||||
|
||||
@@ -16,9 +16,9 @@
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "aws dax create-cluster --cluster-name <cluster_name> --node-type <node_type> --replication-factor <nodes_number> --iam-role-arn <role_arn> --sse-specification Enabled=true",
|
||||
"NativeIaC": "https://docs.bridgecrew.io/docs/bc_aws_general_23#cloudformation",
|
||||
"NativeIaC": "https://docs.prowler.com/checks/aws/general-policies/bc_aws_general_23#cloudformation",
|
||||
"Other": "",
|
||||
"Terraform": "https://docs.bridgecrew.io/docs/bc_aws_general_23#terraform"
|
||||
"Terraform": "https://docs.prowler.com/checks/aws/general-policies/bc_aws_general_23#terraform"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Re-create the cluster to enable encryption at rest if it was not enabled at creation.",
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user