Compare commits

...

34 Commits

Author SHA1 Message Date
Prowler Bot
4fb010847f fix(compliance_tables): add correct values for findings (#6126)
Co-authored-by: Pedro Martín <pedromarting3@gmail.com>
2024-12-10 16:47:16 -04:00
Prowler Bot
8eac17d826 fix(aws): get firewall manager managed rule groups (#6123)
Co-authored-by: Hugo Pereira Brito <101209179+HugoPBrito@users.noreply.github.com>
2024-12-10 16:47:01 -04:00
dependabot[bot]
8a1abb60c9 chore(deps-dev): bump pytest from 8.3.3 to 8.3.4 (#6075)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-10 14:38:57 -04:00
Prowler Bot
74776bed06 fix(aws): check AWS Owned keys in firehose_stream_encrypted_at_rest (#6120)
Co-authored-by: Hugo Pereira Brito <101209179+HugoPBrito@users.noreply.github.com>
2024-12-10 14:30:45 -04:00
dependabot[bot]
3373a8e6c8 chore(deps): bump microsoft-kiota-abstractions from 1.6.2 to 1.6.6 (#6080)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-10 13:48:43 -04:00
Prowler Bot
7db74516b3 fix(aws): set IAM identity as resource in threat detection (#6117)
Co-authored-by: Sergio Garcia <hello@mistercloudsec.com>
2024-12-10 13:37:27 -04:00
Prowler Bot
3ae2f9e0bf fix(gcp): make sure default project is active (#6112)
Co-authored-by: Sergio Garcia <hello@mistercloudsec.com>
2024-12-10 11:53:28 -04:00
dependabot[bot]
91a8a13dc6 chore(deps): bump msgraph-sdk from 1.12.0 to 1.14.0 (#6074)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-10 11:42:18 -04:00
dependabot[bot]
700ed5ecec chore(deps-dev): bump pylint from 3.3.1 to 3.3.2 (#6099)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-10 10:36:15 -04:00
Prowler Bot
50aebcaf88 fix(aws): set same severity for EC2 IMDSv2 checks (#6103)
Co-authored-by: Sergio Garcia <hello@mistercloudsec.com>
2024-12-10 09:30:34 -04:00
dependabot[bot]
ad96963706 chore(deps): bump boto3 from 1.35.66 to 1.35.77 (#6107)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-10 09:26:53 -04:00
dependabot[bot]
95832bdb12 chore(deps): bump botocore from 1.35.76 to 1.35.77 (#6100)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-10 10:55:31 +01:00
dependabot[bot]
d7bdc0aaef chore(deps-dev): bump bandit from 1.7.10 to 1.8.0 (#6072)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-09 17:39:53 -04:00
dependabot[bot]
1c0c836286 chore(deps-dev): bump coverage from 7.6.7 to 7.6.9 (#6076)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-09 16:51:09 -04:00
dependabot[bot]
822252d03b chore(deps): bump slack-sdk from 3.33.4 to 3.33.5 (#6077)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-09 15:53:24 -04:00
dependabot[bot]
a20fdb30f9 chore(deps-dev): bump mkdocs-material from 9.5.45 to 9.5.48 (#6078)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-09 14:44:08 -04:00
dependabot[bot]
2e339f964a chore(deps): bump botocore from 1.35.66 to 1.35.76 (#6071)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-09 13:38:26 -04:00
Sergio Garcia
bb4498a3d5 chore(container): upload v4 with correct tags (#6093) 2024-12-09 18:38:14 +01:00
Prowler Bot
dae4392efc fix(backup): modify list recovery points call (#6057)
Co-authored-by: Daniel Barranquero <74871504+danibarranqueroo@users.noreply.github.com>
Co-authored-by: MrCloudSec <hello@mistercloudsec.com>
2024-12-09 12:55:12 -04:00
dependabot[bot]
8b3a1608c2 chore(deps-dev): bump vulture from 2.13 to 2.14 (#6069)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: MrCloudSec <hello@mistercloudsec.com>
2024-12-09 12:22:48 -04:00
Prowler Bot
148bdacebe chore(containers): support for v4.6 branch (#6084)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
Co-authored-by: Sergio Garcia <hello@mistercloudsec.com>
2024-12-09 11:23:06 -04:00
Prowler Bot
b60edc1f6b fix(tests): use datetime.datetime.now() in GCP kms_key_rotation_enabled (#6083)
Co-authored-by: Sergio Garcia <hello@mistercloudsec.com>
2024-12-09 11:06:37 -04:00
Prowler Bot
d28462b642 chore(dependabot): Update for UI and v4 (#6087)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2024-12-09 10:45:24 -04:00
dependabot[bot]
ef9b37d5c0 chore(deps): bump trufflesecurity/trufflehog from 3.84.1 to 3.85.0 (#6067)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-09 09:28:22 -04:00
Sergio Garcia
764f260cea chore(version): update Prowler version (#5969) 2024-11-29 13:14:08 -04:00
Prowler Bot
e771218bba fix(k8s): handle Kubernetes kubeconfig content correctly (#5967)
Co-authored-by: Sergio Garcia <hello@mistercloudsec.com>
2024-11-29 10:19:00 -04:00
Prowler Bot
c627a3e9aa fix(azure): containerregistry_not_publicly_accesible is not accurate (#5966)
Co-authored-by: StylusFrost <43682773+StylusFrost@users.noreply.github.com>
2024-11-29 09:53:06 -04:00
Prowler Bot
1c58644118 fix(rds): add default key value to RDS event (#5965)
Co-authored-by: Rubén De la Torre Vico <rubendltv22@gmail.com>
2024-11-29 09:52:55 -04:00
Prowler Bot
398fa37a23 fix(gcp): use session credentials to check if API is active (#5936)
Co-authored-by: Sergio Garcia <hello@mistercloudsec.com>
2024-11-27 16:58:40 -04:00
Prowler Bot
412d948585 fix(aws): exclude threat detection checks if category not present (#5934)
Co-authored-by: Sergio Garcia <hello@mistercloudsec.com>
2024-11-27 12:56:00 -04:00
Prowler Bot
6bc0432ed5 fix(list_by_service): execute lambda if requested (#5931)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2024-11-27 11:22:49 -04:00
Pepe Fagoaga
b2ebc8410a chore(version): update Prowler v4 version (#5901) 2024-11-26 08:11:16 -04:00
Prowler Bot
94b6fbda91 chore(gcp): update docstring of ADC credentials (#5879)
Co-authored-by: Sergio Garcia <hello@mistercloudsec.com>
2024-11-25 13:54:40 -04:00
Prowler Bot
3ce785a7b4 chore(aws): update fixers docstring (#5878)
Co-authored-by: Daniel Barranquero <74871504+danibarranqueroo@users.noreply.github.com>
2024-11-25 13:01:45 -04:00
66 changed files with 1199 additions and 560 deletions

View File

@@ -5,6 +5,7 @@
version: 2
updates:
# v5
- package-ecosystem: "pip"
directory: "/"
schedule:
@@ -14,6 +15,7 @@ updates:
labels:
- "dependencies"
- "pip"
- package-ecosystem: "github-actions"
directory: "/"
schedule:
@@ -24,20 +26,55 @@ updates:
- "dependencies"
- "github_actions"
- package-ecosystem: "pip"
- package-ecosystem: "npm"
directory: "/"
schedule:
interval: "daily"
open-pull-requests-limit: 10
target-branch: master
labels:
- "dependencies"
- "npm"
# v4.6
- package-ecosystem: "pip"
directory: "/"
schedule:
interval: "weekly"
open-pull-requests-limit: 10
target-branch: v4.6
labels:
- "dependencies"
- "pip"
- "v4"
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"
open-pull-requests-limit: 10
target-branch: v4.6
labels:
- "dependencies"
- "github_actions"
- "v4"
# v3
- package-ecosystem: "pip"
directory: "/"
schedule:
interval: "monthly"
open-pull-requests-limit: 10
target-branch: v3
labels:
- "dependencies"
- "pip"
- "v3"
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "daily"
interval: "monthly"
open-pull-requests-limit: 10
target-branch: v3
labels:

View File

@@ -3,7 +3,11 @@ name: build-lint-push-containers
on:
push:
branches:
# For `v3-latest`
- "v3"
# For `v4-latest`
- "v4.6"
# For `latest`
- "master"
paths-ignore:
- ".github/**"
@@ -80,8 +84,8 @@ jobs:
;;
4)
echo "LATEST_TAG=latest" >> "${GITHUB_ENV}"
echo "STABLE_TAG=stable" >> "${GITHUB_ENV}"
echo "LATEST_TAG=v4-latest" >> "${GITHUB_ENV}"
echo "STABLE_TAG=v4-stable" >> "${GITHUB_ENV}"
;;
*)

View File

@@ -11,7 +11,7 @@ jobs:
with:
fetch-depth: 0
- name: TruffleHog OSS
uses: trufflesecurity/trufflehog@v3.84.1
uses: trufflesecurity/trufflehog@v3.85.0
with:
path: ./
base: ${{ github.event.repository.default_branch }}

200
poetry.lock generated
View File

@@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand.
# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
[[package]]
name = "about-time"
@@ -694,13 +694,13 @@ dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"]
[[package]]
name = "bandit"
version = "1.7.10"
version = "1.8.0"
description = "Security oriented static analyser for python code."
optional = false
python-versions = ">=3.8"
python-versions = ">=3.9"
files = [
{file = "bandit-1.7.10-py3-none-any.whl", hash = "sha256:665721d7bebbb4485a339c55161ac0eedde27d51e638000d91c8c2d68343ad02"},
{file = "bandit-1.7.10.tar.gz", hash = "sha256:59ed5caf5d92b6ada4bf65bc6437feea4a9da1093384445fed4d472acc6cff7b"},
{file = "bandit-1.8.0-py3-none-any.whl", hash = "sha256:b1a61d829c0968aed625381e426aa378904b996529d048f8d908fa28f6b13e38"},
{file = "bandit-1.8.0.tar.gz", hash = "sha256:b5bfe55a095abd9fe20099178a7c6c060f844bfd4fe4c76d28e35e4c52b9d31e"},
]
[package.dependencies]
@@ -775,17 +775,17 @@ files = [
[[package]]
name = "boto3"
version = "1.35.66"
version = "1.35.77"
description = "The AWS SDK for Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "boto3-1.35.66-py3-none-any.whl", hash = "sha256:09a610f8cf4d3c22d4ca69c1f89079e3a1c82805ce94fa0eb4ecdd4d2ba6c4bc"},
{file = "boto3-1.35.66.tar.gz", hash = "sha256:c392b9168b65e9c23483eaccb5b68d1f960232d7f967a1e00a045ba065ce050d"},
{file = "boto3-1.35.77-py3-none-any.whl", hash = "sha256:a09871805f8e462349a1c33c23eb413668df0bf68424e61d53518e1a7d883b2f"},
{file = "boto3-1.35.77.tar.gz", hash = "sha256:cc819cdbccbc2d0dc185f1dcfe74cf3809489c4cae63c2e5d6a557aa0c5ab928"},
]
[package.dependencies]
botocore = ">=1.35.66,<1.36.0"
botocore = ">=1.35.77,<1.36.0"
jmespath = ">=0.7.1,<2.0.0"
s3transfer = ">=0.10.0,<0.11.0"
@@ -794,13 +794,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
[[package]]
name = "botocore"
version = "1.35.66"
version = "1.35.77"
description = "Low-level, data-driven core of boto 3."
optional = false
python-versions = ">=3.8"
files = [
{file = "botocore-1.35.66-py3-none-any.whl", hash = "sha256:d0683e9c18bb6852f768da268086c3749d925332a664db0dd1459cfa7e96e475"},
{file = "botocore-1.35.66.tar.gz", hash = "sha256:51f43220315f384959f02ea3266740db4d421592dd87576c18824e424b349fdb"},
{file = "botocore-1.35.77-py3-none-any.whl", hash = "sha256:3faa27d65841499762228902d7e215fa99a4c2fdc76c9113e1c3f339bdf685b8"},
{file = "botocore-1.35.77.tar.gz", hash = "sha256:17b778016644e9342ca3ff2f430c1d1db0c6126e9b41a57cff52ac58e7a455e0"},
]
[package.dependencies]
@@ -1099,73 +1099,73 @@ files = [
[[package]]
name = "coverage"
version = "7.6.7"
version = "7.6.9"
description = "Code coverage measurement for Python"
optional = false
python-versions = ">=3.9"
files = [
{file = "coverage-7.6.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:108bb458827765d538abcbf8288599fee07d2743357bdd9b9dad456c287e121e"},
{file = "coverage-7.6.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c973b2fe4dc445cb865ab369df7521df9c27bf40715c837a113edaa2aa9faf45"},
{file = "coverage-7.6.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c6b24007c4bcd0b19fac25763a7cac5035c735ae017e9a349b927cfc88f31c1"},
{file = "coverage-7.6.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:acbb8af78f8f91b3b51f58f288c0994ba63c646bc1a8a22ad072e4e7e0a49f1c"},
{file = "coverage-7.6.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad32a981bcdedb8d2ace03b05e4fd8dace8901eec64a532b00b15217d3677dd2"},
{file = "coverage-7.6.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:34d23e28ccb26236718a3a78ba72744212aa383141961dd6825f6595005c8b06"},
{file = "coverage-7.6.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e25bacb53a8c7325e34d45dddd2f2fbae0dbc230d0e2642e264a64e17322a777"},
{file = "coverage-7.6.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:af05bbba896c4472a29408455fe31b3797b4d8648ed0a2ccac03e074a77e2314"},
{file = "coverage-7.6.7-cp310-cp310-win32.whl", hash = "sha256:796c9b107d11d2d69e1849b2dfe41730134b526a49d3acb98ca02f4985eeff7a"},
{file = "coverage-7.6.7-cp310-cp310-win_amd64.whl", hash = "sha256:987a8e3da7da4eed10a20491cf790589a8e5e07656b6dc22d3814c4d88faf163"},
{file = "coverage-7.6.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7e61b0e77ff4dddebb35a0e8bb5a68bf0f8b872407d8d9f0c726b65dfabe2469"},
{file = "coverage-7.6.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1a5407a75ca4abc20d6252efeb238377a71ce7bda849c26c7a9bece8680a5d99"},
{file = "coverage-7.6.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df002e59f2d29e889c37abd0b9ee0d0e6e38c24f5f55d71ff0e09e3412a340ec"},
{file = "coverage-7.6.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:673184b3156cba06154825f25af33baa2671ddae6343f23175764e65a8c4c30b"},
{file = "coverage-7.6.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e69ad502f1a2243f739f5bd60565d14a278be58be4c137d90799f2c263e7049a"},
{file = "coverage-7.6.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:60dcf7605c50ea72a14490d0756daffef77a5be15ed1b9fea468b1c7bda1bc3b"},
{file = "coverage-7.6.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9c2eb378bebb2c8f65befcb5147877fc1c9fbc640fc0aad3add759b5df79d55d"},
{file = "coverage-7.6.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3c0317288f032221d35fa4cbc35d9f4923ff0dfd176c79c9b356e8ef8ef2dff4"},
{file = "coverage-7.6.7-cp311-cp311-win32.whl", hash = "sha256:951aade8297358f3618a6e0660dc74f6b52233c42089d28525749fc8267dccd2"},
{file = "coverage-7.6.7-cp311-cp311-win_amd64.whl", hash = "sha256:5e444b8e88339a2a67ce07d41faabb1d60d1004820cee5a2c2b54e2d8e429a0f"},
{file = "coverage-7.6.7-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f07ff574986bc3edb80e2c36391678a271d555f91fd1d332a1e0f4b5ea4b6ea9"},
{file = "coverage-7.6.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:49ed5ee4109258973630c1f9d099c7e72c5c36605029f3a91fe9982c6076c82b"},
{file = "coverage-7.6.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3e8796434a8106b3ac025fd15417315d7a58ee3e600ad4dbcfddc3f4b14342c"},
{file = "coverage-7.6.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3b925300484a3294d1c70f6b2b810d6526f2929de954e5b6be2bf8caa1f12c1"},
{file = "coverage-7.6.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c42ec2c522e3ddd683dec5cdce8e62817afb648caedad9da725001fa530d354"},
{file = "coverage-7.6.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0266b62cbea568bd5e93a4da364d05de422110cbed5056d69339bd5af5685433"},
{file = "coverage-7.6.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e5f2a0f161d126ccc7038f1f3029184dbdf8f018230af17ef6fd6a707a5b881f"},
{file = "coverage-7.6.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c132b5a22821f9b143f87446805e13580b67c670a548b96da945a8f6b4f2efbb"},
{file = "coverage-7.6.7-cp312-cp312-win32.whl", hash = "sha256:7c07de0d2a110f02af30883cd7dddbe704887617d5c27cf373362667445a4c76"},
{file = "coverage-7.6.7-cp312-cp312-win_amd64.whl", hash = "sha256:fd49c01e5057a451c30c9b892948976f5d38f2cbd04dc556a82743ba8e27ed8c"},
{file = "coverage-7.6.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:46f21663e358beae6b368429ffadf14ed0a329996248a847a4322fb2e35d64d3"},
{file = "coverage-7.6.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:40cca284c7c310d622a1677f105e8507441d1bb7c226f41978ba7c86979609ab"},
{file = "coverage-7.6.7-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77256ad2345c29fe59ae861aa11cfc74579c88d4e8dbf121cbe46b8e32aec808"},
{file = "coverage-7.6.7-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:87ea64b9fa52bf395272e54020537990a28078478167ade6c61da7ac04dc14bc"},
{file = "coverage-7.6.7-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d608a7808793e3615e54e9267519351c3ae204a6d85764d8337bd95993581a8"},
{file = "coverage-7.6.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdd94501d65adc5c24f8a1a0eda110452ba62b3f4aeaba01e021c1ed9cb8f34a"},
{file = "coverage-7.6.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:82c809a62e953867cf57e0548c2b8464207f5f3a6ff0e1e961683e79b89f2c55"},
{file = "coverage-7.6.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:bb684694e99d0b791a43e9fc0fa58efc15ec357ac48d25b619f207c41f2fd384"},
{file = "coverage-7.6.7-cp313-cp313-win32.whl", hash = "sha256:963e4a08cbb0af6623e61492c0ec4c0ec5c5cf74db5f6564f98248d27ee57d30"},
{file = "coverage-7.6.7-cp313-cp313-win_amd64.whl", hash = "sha256:14045b8bfd5909196a90da145a37f9d335a5d988a83db34e80f41e965fb7cb42"},
{file = "coverage-7.6.7-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:f2c7a045eef561e9544359a0bf5784b44e55cefc7261a20e730baa9220c83413"},
{file = "coverage-7.6.7-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5dd4e4a49d9c72a38d18d641135d2fb0bdf7b726ca60a103836b3d00a1182acd"},
{file = "coverage-7.6.7-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c95e0fa3d1547cb6f021ab72f5c23402da2358beec0a8e6d19a368bd7b0fb37"},
{file = "coverage-7.6.7-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f63e21ed474edd23f7501f89b53280014436e383a14b9bd77a648366c81dce7b"},
{file = "coverage-7.6.7-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ead9b9605c54d15be228687552916c89c9683c215370c4a44f1f217d2adcc34d"},
{file = "coverage-7.6.7-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:0573f5cbf39114270842d01872952d301027d2d6e2d84013f30966313cadb529"},
{file = "coverage-7.6.7-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:e2c8e3384c12dfa19fa9a52f23eb091a8fad93b5b81a41b14c17c78e23dd1d8b"},
{file = "coverage-7.6.7-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:70a56a2ec1869e6e9fa69ef6b76b1a8a7ef709972b9cc473f9ce9d26b5997ce3"},
{file = "coverage-7.6.7-cp313-cp313t-win32.whl", hash = "sha256:dbba8210f5067398b2c4d96b4e64d8fb943644d5eb70be0d989067c8ca40c0f8"},
{file = "coverage-7.6.7-cp313-cp313t-win_amd64.whl", hash = "sha256:dfd14bcae0c94004baba5184d1c935ae0d1231b8409eb6c103a5fd75e8ecdc56"},
{file = "coverage-7.6.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37a15573f988b67f7348916077c6d8ad43adb75e478d0910957394df397d2874"},
{file = "coverage-7.6.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b6cce5c76985f81da3769c52203ee94722cd5d5889731cd70d31fee939b74bf0"},
{file = "coverage-7.6.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ab9763d291a17b527ac6fd11d1a9a9c358280adb320e9c2672a97af346ac2c"},
{file = "coverage-7.6.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6cf96ceaa275f071f1bea3067f8fd43bec184a25a962c754024c973af871e1b7"},
{file = "coverage-7.6.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aee9cf6b0134d6f932d219ce253ef0e624f4fa588ee64830fcba193269e4daa3"},
{file = "coverage-7.6.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2bc3e45c16564cc72de09e37413262b9f99167803e5e48c6156bccdfb22c8327"},
{file = "coverage-7.6.7-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:623e6965dcf4e28a3debaa6fcf4b99ee06d27218f46d43befe4db1c70841551c"},
{file = "coverage-7.6.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:850cfd2d6fc26f8346f422920ac204e1d28814e32e3a58c19c91980fa74d8289"},
{file = "coverage-7.6.7-cp39-cp39-win32.whl", hash = "sha256:c296263093f099da4f51b3dff1eff5d4959b527d4f2f419e16508c5da9e15e8c"},
{file = "coverage-7.6.7-cp39-cp39-win_amd64.whl", hash = "sha256:90746521206c88bdb305a4bf3342b1b7316ab80f804d40c536fc7d329301ee13"},
{file = "coverage-7.6.7-pp39.pp310-none-any.whl", hash = "sha256:0ddcb70b3a3a57581b450571b31cb774f23eb9519c2aaa6176d3a84c9fc57671"},
{file = "coverage-7.6.7.tar.gz", hash = "sha256:d79d4826e41441c9a118ff045e4bccb9fdbdcb1d02413e7ea6eb5c87b5439d24"},
{file = "coverage-7.6.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:85d9636f72e8991a1706b2b55b06c27545448baf9f6dbf51c4004609aacd7dcb"},
{file = "coverage-7.6.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:608a7fd78c67bee8936378299a6cb9f5149bb80238c7a566fc3e6717a4e68710"},
{file = "coverage-7.6.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96d636c77af18b5cb664ddf12dab9b15a0cfe9c0bde715da38698c8cea748bfa"},
{file = "coverage-7.6.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d75cded8a3cff93da9edc31446872d2997e327921d8eed86641efafd350e1df1"},
{file = "coverage-7.6.9-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7b15f589593110ae767ce997775d645b47e5cbbf54fd322f8ebea6277466cec"},
{file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:44349150f6811b44b25574839b39ae35291f6496eb795b7366fef3bd3cf112d3"},
{file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d891c136b5b310d0e702e186d70cd16d1119ea8927347045124cb286b29297e5"},
{file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:db1dab894cc139f67822a92910466531de5ea6034ddfd2b11c0d4c6257168073"},
{file = "coverage-7.6.9-cp310-cp310-win32.whl", hash = "sha256:41ff7b0da5af71a51b53f501a3bac65fb0ec311ebed1632e58fc6107f03b9198"},
{file = "coverage-7.6.9-cp310-cp310-win_amd64.whl", hash = "sha256:35371f8438028fdccfaf3570b31d98e8d9eda8bb1d6ab9473f5a390969e98717"},
{file = "coverage-7.6.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:932fc826442132dde42ee52cf66d941f581c685a6313feebed358411238f60f9"},
{file = "coverage-7.6.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:085161be5f3b30fd9b3e7b9a8c301f935c8313dcf928a07b116324abea2c1c2c"},
{file = "coverage-7.6.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ccc660a77e1c2bf24ddbce969af9447a9474790160cfb23de6be4fa88e3951c7"},
{file = "coverage-7.6.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c69e42c892c018cd3c8d90da61d845f50a8243062b19d228189b0224150018a9"},
{file = "coverage-7.6.9-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0824a28ec542a0be22f60c6ac36d679e0e262e5353203bea81d44ee81fe9c6d4"},
{file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4401ae5fc52ad8d26d2a5d8a7428b0f0c72431683f8e63e42e70606374c311a1"},
{file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:98caba4476a6c8d59ec1eb00c7dd862ba9beca34085642d46ed503cc2d440d4b"},
{file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ee5defd1733fd6ec08b168bd4f5387d5b322f45ca9e0e6c817ea6c4cd36313e3"},
{file = "coverage-7.6.9-cp311-cp311-win32.whl", hash = "sha256:f2d1ec60d6d256bdf298cb86b78dd715980828f50c46701abc3b0a2b3f8a0dc0"},
{file = "coverage-7.6.9-cp311-cp311-win_amd64.whl", hash = "sha256:0d59fd927b1f04de57a2ba0137166d31c1a6dd9e764ad4af552912d70428c92b"},
{file = "coverage-7.6.9-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:99e266ae0b5d15f1ca8d278a668df6f51cc4b854513daab5cae695ed7b721cf8"},
{file = "coverage-7.6.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9901d36492009a0a9b94b20e52ebfc8453bf49bb2b27bca2c9706f8b4f5a554a"},
{file = "coverage-7.6.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abd3e72dd5b97e3af4246cdada7738ef0e608168de952b837b8dd7e90341f015"},
{file = "coverage-7.6.9-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff74026a461eb0660366fb01c650c1d00f833a086b336bdad7ab00cc952072b3"},
{file = "coverage-7.6.9-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65dad5a248823a4996724a88eb51d4b31587aa7aa428562dbe459c684e5787ae"},
{file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:22be16571504c9ccea919fcedb459d5ab20d41172056206eb2994e2ff06118a4"},
{file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f957943bc718b87144ecaee70762bc2bc3f1a7a53c7b861103546d3a403f0a6"},
{file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0ae1387db4aecb1f485fb70a6c0148c6cdaebb6038f1d40089b1fc84a5db556f"},
{file = "coverage-7.6.9-cp312-cp312-win32.whl", hash = "sha256:1a330812d9cc7ac2182586f6d41b4d0fadf9be9049f350e0efb275c8ee8eb692"},
{file = "coverage-7.6.9-cp312-cp312-win_amd64.whl", hash = "sha256:b12c6b18269ca471eedd41c1b6a1065b2f7827508edb9a7ed5555e9a56dcfc97"},
{file = "coverage-7.6.9-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:899b8cd4781c400454f2f64f7776a5d87bbd7b3e7f7bda0cb18f857bb1334664"},
{file = "coverage-7.6.9-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:61f70dc68bd36810972e55bbbe83674ea073dd1dcc121040a08cdf3416c5349c"},
{file = "coverage-7.6.9-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a289d23d4c46f1a82d5db4abeb40b9b5be91731ee19a379d15790e53031c014"},
{file = "coverage-7.6.9-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e216d8044a356fc0337c7a2a0536d6de07888d7bcda76febcb8adc50bdbbd00"},
{file = "coverage-7.6.9-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c026eb44f744acaa2bda7493dad903aa5bf5fc4f2554293a798d5606710055d"},
{file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e77363e8425325384f9d49272c54045bbed2f478e9dd698dbc65dbc37860eb0a"},
{file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:777abfab476cf83b5177b84d7486497e034eb9eaea0d746ce0c1268c71652077"},
{file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:447af20e25fdbe16f26e84eb714ba21d98868705cb138252d28bc400381f6ffb"},
{file = "coverage-7.6.9-cp313-cp313-win32.whl", hash = "sha256:d872ec5aeb086cbea771c573600d47944eea2dcba8be5f3ee649bfe3cb8dc9ba"},
{file = "coverage-7.6.9-cp313-cp313-win_amd64.whl", hash = "sha256:fd1213c86e48dfdc5a0cc676551db467495a95a662d2396ecd58e719191446e1"},
{file = "coverage-7.6.9-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:ba9e7484d286cd5a43744e5f47b0b3fb457865baf07bafc6bee91896364e1419"},
{file = "coverage-7.6.9-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e5ea1cf0872ee455c03e5674b5bca5e3e68e159379c1af0903e89f5eba9ccc3a"},
{file = "coverage-7.6.9-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d10e07aa2b91835d6abec555ec8b2733347956991901eea6ffac295f83a30e4"},
{file = "coverage-7.6.9-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:13a9e2d3ee855db3dd6ea1ba5203316a1b1fd8eaeffc37c5b54987e61e4194ae"},
{file = "coverage-7.6.9-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c38bf15a40ccf5619fa2fe8f26106c7e8e080d7760aeccb3722664c8656b030"},
{file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d5275455b3e4627c8e7154feaf7ee0743c2e7af82f6e3b561967b1cca755a0be"},
{file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:8f8770dfc6e2c6a2d4569f411015c8d751c980d17a14b0530da2d7f27ffdd88e"},
{file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8d2dfa71665a29b153a9681edb1c8d9c1ea50dfc2375fb4dac99ea7e21a0bcd9"},
{file = "coverage-7.6.9-cp313-cp313t-win32.whl", hash = "sha256:5e6b86b5847a016d0fbd31ffe1001b63355ed309651851295315031ea7eb5a9b"},
{file = "coverage-7.6.9-cp313-cp313t-win_amd64.whl", hash = "sha256:97ddc94d46088304772d21b060041c97fc16bdda13c6c7f9d8fcd8d5ae0d8611"},
{file = "coverage-7.6.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:adb697c0bd35100dc690de83154627fbab1f4f3c0386df266dded865fc50a902"},
{file = "coverage-7.6.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:be57b6d56e49c2739cdf776839a92330e933dd5e5d929966fbbd380c77f060be"},
{file = "coverage-7.6.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1592791f8204ae9166de22ba7e6705fa4ebd02936c09436a1bb85aabca3e599"},
{file = "coverage-7.6.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e12ae8cc979cf83d258acb5e1f1cf2f3f83524d1564a49d20b8bec14b637f08"},
{file = "coverage-7.6.9-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb5555cff66c4d3d6213a296b360f9e1a8e323e74e0426b6c10ed7f4d021e464"},
{file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b9389a429e0e5142e69d5bf4a435dd688c14478a19bb901735cdf75e57b13845"},
{file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:592ac539812e9b46046620341498caf09ca21023c41c893e1eb9dbda00a70cbf"},
{file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a27801adef24cc30871da98a105f77995e13a25a505a0161911f6aafbd66e678"},
{file = "coverage-7.6.9-cp39-cp39-win32.whl", hash = "sha256:8e3c3e38930cfb729cb8137d7f055e5a473ddaf1217966aa6238c88bd9fd50e6"},
{file = "coverage-7.6.9-cp39-cp39-win_amd64.whl", hash = "sha256:e28bf44afa2b187cc9f41749138a64435bf340adfcacb5b2290c070ce99839d4"},
{file = "coverage-7.6.9-pp39.pp310-none-any.whl", hash = "sha256:f3ca78518bc6bc92828cd11867b121891d75cae4ea9e908d72030609b996db1b"},
{file = "coverage-7.6.9.tar.gz", hash = "sha256:4a8d8977b0c6ef5aeadcb644da9e69ae0dcfe66ec7f368c89c72e058bd71164d"},
]
[package.dependencies]
@@ -2094,6 +2094,8 @@ optional = false
python-versions = "*"
files = [
{file = "jsonpath-ng-1.7.0.tar.gz", hash = "sha256:f6f5f7fd4e5ff79c785f1573b394043b39849fb2bb47bcead935d12b00beab3c"},
{file = "jsonpath_ng-1.7.0-py2-none-any.whl", hash = "sha256:898c93fc173f0c336784a3fa63d7434297544b7198124a68f9a3ef9597b0ae6e"},
{file = "jsonpath_ng-1.7.0-py3-none-any.whl", hash = "sha256:f3d7f9e848cba1b6da28c55b1c26ff915dc9e0b1ba7e752a53d6da8d5cbd00b6"},
]
[package.dependencies]
@@ -2401,13 +2403,13 @@ files = [
[[package]]
name = "microsoft-kiota-abstractions"
version = "1.6.2"
version = "1.6.6"
description = "Core abstractions for kiota generated libraries in Python"
optional = false
python-versions = "<4.0,>=3.8"
files = [
{file = "microsoft_kiota_abstractions-1.6.2-py3-none-any.whl", hash = "sha256:8c2c777748e80f17dba3809b5d149585d9918198f0f94125e87432f7165ba80e"},
{file = "microsoft_kiota_abstractions-1.6.2.tar.gz", hash = "sha256:dec30f0fb427a051003e94b5c6fcf266f4702ecbd9d6961e3966124b9cbe41bf"},
{file = "microsoft_kiota_abstractions-1.6.6-py3-none-any.whl", hash = "sha256:29071715baf0d604c381c5d17be47f35e6e63a441dcfb5e9141963406b469d50"},
{file = "microsoft_kiota_abstractions-1.6.6.tar.gz", hash = "sha256:2554495b00c9c25b43f6964a71b65c89a277bd6b50f4d0028a7febcec6c4fd67"},
]
[package.dependencies]
@@ -2581,13 +2583,13 @@ dev = ["click", "codecov", "mkdocs-gen-files", "mkdocs-git-authors-plugin", "mkd
[[package]]
name = "mkdocs-material"
version = "9.5.45"
version = "9.5.48"
description = "Documentation that simply works"
optional = false
python-versions = ">=3.8"
files = [
{file = "mkdocs_material-9.5.45-py3-none-any.whl", hash = "sha256:a9be237cfd0be14be75f40f1726d83aa3a81ce44808dc3594d47a7a592f44547"},
{file = "mkdocs_material-9.5.45.tar.gz", hash = "sha256:286489cf0beca4a129d91d59d6417419c63bceed1ce5cd0ec1fc7e1ebffb8189"},
{file = "mkdocs_material-9.5.48-py3-none-any.whl", hash = "sha256:b695c998f4b939ce748adbc0d3bff73fa886a670ece948cf27818fa115dc16f8"},
{file = "mkdocs_material-9.5.48.tar.gz", hash = "sha256:a582531e8b34f4c7ed38c29d5c44763053832cf2a32f7409567e0c74749a47db"},
]
[package.dependencies]
@@ -2767,13 +2769,13 @@ dev = ["bumpver", "isort", "mypy", "pylint", "pytest", "yapf"]
[[package]]
name = "msgraph-sdk"
version = "1.12.0"
version = "1.14.0"
description = "The Microsoft Graph Python SDK"
optional = false
python-versions = ">=3.8"
files = [
{file = "msgraph_sdk-1.12.0-py3-none-any.whl", hash = "sha256:ac298b546b240391b0e407379d039db32862a56d6fe15cf7c5f7e77631fc6771"},
{file = "msgraph_sdk-1.12.0.tar.gz", hash = "sha256:fbb5a8a9f6eed89b496f207eb35b6b4cfc7fefa75608aeef07477a3b2276d4fa"},
{file = "msgraph_sdk-1.14.0-py3-none-any.whl", hash = "sha256:1a2f327dc8fbe5a5e6d0d84cf71d605e7b118b3066b1e16f011ccd8fd927bb03"},
{file = "msgraph_sdk-1.14.0.tar.gz", hash = "sha256:5bbda80941c5d1794682753b8b291bd2ebed719a43d6de949fd0cd613b6dfbbd"},
]
[package.dependencies]
@@ -3794,17 +3796,17 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"]
[[package]]
name = "pylint"
version = "3.3.1"
version = "3.3.2"
description = "python code static checker"
optional = false
python-versions = ">=3.9.0"
files = [
{file = "pylint-3.3.1-py3-none-any.whl", hash = "sha256:2f846a466dd023513240bc140ad2dd73bfc080a5d85a710afdb728c420a5a2b9"},
{file = "pylint-3.3.1.tar.gz", hash = "sha256:9f3dcc87b1203e612b78d91a896407787e708b3f189b5fa0b307712d49ff0c6e"},
{file = "pylint-3.3.2-py3-none-any.whl", hash = "sha256:77f068c287d49b8683cd7c6e624243c74f92890f767f106ffa1ddf3c0a54cb7a"},
{file = "pylint-3.3.2.tar.gz", hash = "sha256:9ec054ec992cd05ad30a6df1676229739a73f8feeabf3912c995d17601052b01"},
]
[package.dependencies]
astroid = ">=3.3.4,<=3.4.0-dev0"
astroid = ">=3.3.5,<=3.4.0-dev0"
colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""}
dill = [
{version = ">=0.2", markers = "python_version < \"3.11\""},
@@ -3856,13 +3858,13 @@ diagrams = ["jinja2", "railroad-diagrams"]
[[package]]
name = "pytest"
version = "8.3.3"
version = "8.3.4"
description = "pytest: simple powerful testing with Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"},
{file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"},
{file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"},
{file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"},
]
[package.dependencies]
@@ -4636,17 +4638,17 @@ files = [
[[package]]
name = "slack-sdk"
version = "3.33.4"
version = "3.33.5"
description = "The Slack API Platform SDK for Python"
optional = false
python-versions = ">=3.6"
files = [
{file = "slack_sdk-3.33.4-py2.py3-none-any.whl", hash = "sha256:9f30cb3c9c07b441c49d53fc27f9f1837ad1592a7e9d4ca431f53cdad8826cc6"},
{file = "slack_sdk-3.33.4.tar.gz", hash = "sha256:5e109847f6b6a22d227609226ba4ed936109dc00675bddeb7e0bee502d3ee7e0"},
{file = "slack_sdk-3.33.5-py2.py3-none-any.whl", hash = "sha256:b8cccadfa3d4005a5e6529f52000d25c583f46173fda8e9136fdd2bc58923ff6"},
{file = "slack_sdk-3.33.5.tar.gz", hash = "sha256:a5e74c00c99dc844ad93e501ab764a20d86fa8184bbc9432af217496f632c4ee"},
]
[package.extras]
optional = ["SQLAlchemy (>=1.4,<3)", "aiodns (>1.0)", "aiohttp (>=3.7.3,<4)", "boto3 (<=2)", "websocket-client (>=1,<2)", "websockets (>=9.1,<14)"]
optional = ["SQLAlchemy (>=1.4,<3)", "aiodns (>1.0)", "aiohttp (>=3.7.3,<4)", "boto3 (<=2)", "websocket-client (>=1,<2)", "websockets (>=9.1,<15)"]
[[package]]
name = "smmap"
@@ -4886,13 +4888,13 @@ zstd = ["zstandard (>=0.18.0)"]
[[package]]
name = "vulture"
version = "2.13"
version = "2.14"
description = "Find dead code"
optional = false
python-versions = ">=3.8"
files = [
{file = "vulture-2.13-py2.py3-none-any.whl", hash = "sha256:34793ba60488e7cccbecdef3a7fe151656372ef94fdac9fe004c52a4000a6d44"},
{file = "vulture-2.13.tar.gz", hash = "sha256:78248bf58f5eaffcc2ade306141ead73f437339950f80045dce7f8b078e5a1aa"},
{file = "vulture-2.14-py2.py3-none-any.whl", hash = "sha256:d9a90dba89607489548a49d557f8bac8112bd25d3cbc8aeef23e860811bd5ed9"},
{file = "vulture-2.14.tar.gz", hash = "sha256:cb8277902a1138deeab796ec5bef7076a6e0248ca3607a3f3dee0b6d9e9b8415"},
]
[package.dependencies]
@@ -5192,4 +5194,4 @@ type = ["pytest-mypy"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.9,<3.13"
content-hash = "0313d13861b3253896ca0b19d2f9317d576dfd37cc9003e89944306f1c2da666"
content-hash = "f94001d4038c14dc1e1a7b2e711ed24180ee0dfc3f3a55d1cc55cf8c945ffb09"

View File

@@ -12,7 +12,7 @@ from prowler.lib.logger import logger
timestamp = datetime.today()
timestamp_utc = datetime.now(timezone.utc).replace(tzinfo=timezone.utc)
prowler_version = "4.6.0"
prowler_version = "4.6.2"
html_logo_url = "https://github.com/prowler-cloud/prowler/"
square_logo_img = "https://prowler.com/wp-content/uploads/logo-html.png"
aws_logo = "https://user-images.githubusercontent.com/38561120/235953920-3e3fba08-0795-41dc-b480-9bea57db9f2e.png"

View File

@@ -111,7 +111,7 @@ def load_checks_to_execute(
):
checks_to_execute.add(check_name)
# Only execute threat detection checks if threat-detection category is set
if categories and categories != [] and "threat-detection" not in categories:
if not categories or "threat-detection" not in categories:
for threat_detection_check in check_categories.get("threat-detection", []):
checks_to_execute.discard(threat_detection_check)

View File

@@ -322,8 +322,9 @@ class CheckMetadata(BaseModel):
checks = set()
if service:
if service == "lambda":
service = "awslambda"
# This is a special case for the AWS provider since `lambda` is a reserved keyword in Python
if service == "awslambda":
service = "lambda"
checks = {
check_name
for check_name, check_metadata in bulk_checks_metadata.items()

View File

@@ -94,11 +94,12 @@ def get_cis_table(
print(
f"\nCompliance Status of {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL} Framework:"
)
total_findings_count = len(fail_count) + len(pass_count) + len(muted_count)
overview_table = [
[
f"{Fore.RED}{round(len(fail_count) / len(findings) * 100, 2)}% ({len(fail_count)}) FAIL{Style.RESET_ALL}",
f"{Fore.GREEN}{round(len(pass_count) / len(findings) * 100, 2)}% ({len(pass_count)}) PASS{Style.RESET_ALL}",
f"{orange_color}{round(len(muted_count) / len(findings) * 100, 2)}% ({len(muted_count)}) MUTED{Style.RESET_ALL}",
f"{Fore.RED}{round(len(fail_count) / total_findings_count * 100, 2)}% ({len(fail_count)}) FAIL{Style.RESET_ALL}",
f"{Fore.GREEN}{round(len(pass_count) / total_findings_count * 100, 2)}% ({len(pass_count)}) PASS{Style.RESET_ALL}",
f"{orange_color}{round(len(muted_count) / total_findings_count * 100, 2)}% ({len(muted_count)}) MUTED{Style.RESET_ALL}",
]
]
print(tabulate(overview_table, tablefmt="rounded_grid"))

View File

@@ -95,11 +95,12 @@ def get_ens_table(
print(
f"\nEstado de Cumplimiento de {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL}:"
)
total_findings_count = len(fail_count) + len(pass_count) + len(muted_count)
overview_table = [
[
f"{Fore.RED}{round(len(fail_count) / len(findings) * 100, 2)}% ({len(fail_count)}) NO CUMPLE{Style.RESET_ALL}",
f"{Fore.GREEN}{round(len(pass_count) / len(findings) * 100, 2)}% ({len(pass_count)}) CUMPLE{Style.RESET_ALL}",
f"{orange_color}{round(len(muted_count) / len(findings) * 100, 2)}% ({len(muted_count)}) MUTED{Style.RESET_ALL}",
f"{Fore.RED}{round(len(fail_count) / total_findings_count * 100, 2)}% ({len(fail_count)}) NO CUMPLE{Style.RESET_ALL}",
f"{Fore.GREEN}{round(len(pass_count) / total_findings_count * 100, 2)}% ({len(pass_count)}) CUMPLE{Style.RESET_ALL}",
f"{orange_color}{round(len(muted_count) / total_findings_count * 100, 2)}% ({len(muted_count)}) MUTED{Style.RESET_ALL}",
]
]
print(tabulate(overview_table, tablefmt="rounded_grid"))

View File

@@ -39,11 +39,12 @@ def get_generic_compliance_table(
print(
f"\nCompliance Status of {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL} Framework:"
)
total_findings_count = len(fail_count) + len(pass_count) + len(muted_count)
overview_table = [
[
f"{Fore.RED}{round(len(fail_count) / len(findings) * 100, 2)}% ({len(fail_count)}) FAIL{Style.RESET_ALL}",
f"{Fore.GREEN}{round(len(pass_count) / len(findings) * 100, 2)}% ({len(pass_count)}) PASS{Style.RESET_ALL}",
f"{orange_color}{round(len(muted_count) / len(findings) * 100, 2)}% ({len(muted_count)}) MUTED{Style.RESET_ALL}",
f"{Fore.RED}{round(len(fail_count) / total_findings_count * 100, 2)}% ({len(fail_count)}) FAIL{Style.RESET_ALL}",
f"{Fore.GREEN}{round(len(pass_count) / total_findings_count * 100, 2)}% ({len(pass_count)}) PASS{Style.RESET_ALL}",
f"{orange_color}{round(len(muted_count) / total_findings_count * 100, 2)}% ({len(muted_count)}) MUTED{Style.RESET_ALL}",
]
]
print(tabulate(overview_table, tablefmt="rounded_grid"))

View File

@@ -61,11 +61,12 @@ def get_kisa_ismsp_table(
print(
f"\nCompliance Status of {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL} Framework:"
)
total_findings_count = len(fail_count) + len(pass_count) + len(muted_count)
overview_table = [
[
f"{Fore.RED}{round(len(fail_count) / len(findings) * 100, 2)}% ({len(fail_count)}) FAIL{Style.RESET_ALL}",
f"{Fore.GREEN}{round(len(pass_count) / len(findings) * 100, 2)}% ({len(pass_count)}) PASS{Style.RESET_ALL}",
f"{orange_color}{round(len(muted_count) / len(findings) * 100, 2)}% ({len(muted_count)}) MUTED{Style.RESET_ALL}",
f"{Fore.RED}{round(len(fail_count) / total_findings_count * 100, 2)}% ({len(fail_count)}) FAIL{Style.RESET_ALL}",
f"{Fore.GREEN}{round(len(pass_count) / total_findings_count * 100, 2)}% ({len(pass_count)}) PASS{Style.RESET_ALL}",
f"{orange_color}{round(len(muted_count) / total_findings_count * 100, 2)}% ({len(muted_count)}) MUTED{Style.RESET_ALL}",
]
]
print(tabulate(overview_table, tablefmt="rounded_grid"))

View File

@@ -69,11 +69,12 @@ def get_mitre_attack_table(
print(
f"\nCompliance Status of {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL} Framework:"
)
total_findings_count = len(fail_count) + len(pass_count) + len(muted_count)
overview_table = [
[
f"{Fore.RED}{round(len(fail_count) / len(findings) * 100, 2)}% ({len(fail_count)}) FAIL{Style.RESET_ALL}",
f"{Fore.GREEN}{round(len(pass_count) / len(findings) * 100, 2)}% ({len(pass_count)}) PASS{Style.RESET_ALL}",
f"{orange_color}{round(len(muted_count) / len(findings) * 100, 2)}% ({len(muted_count)}) MUTED{Style.RESET_ALL}",
f"{Fore.RED}{round(len(fail_count) / total_findings_count * 100, 2)}% ({len(fail_count)}) FAIL{Style.RESET_ALL}",
f"{Fore.GREEN}{round(len(pass_count) / total_findings_count * 100, 2)}% ({len(pass_count)}) PASS{Style.RESET_ALL}",
f"{orange_color}{round(len(muted_count) / total_findings_count * 100, 2)}% ({len(muted_count)}) MUTED{Style.RESET_ALL}",
]
]
print(tabulate(overview_table, tablefmt="rounded_grid"))

View File

@@ -6,7 +6,8 @@ from prowler.providers.aws.services.accessanalyzer.accessanalyzer_client import
def fixer(region):
"""
Enable Access Analyzer in a region. Requires the access-analyzer:CreateAnalyzer permission:
Enable Access Analyzer in a region. Requires the access-analyzer:CreateAnalyzer permission.
Permissions:
{
"Version": "2012-10-17",
"Statement": [

View File

@@ -183,21 +183,26 @@ class Backup(AWSService):
def _list_recovery_points(self, regional_client):
logger.info("Backup - Listing Recovery Points...")
try:
for backup_vault in self.backup_vaults:
paginator = regional_client.get_paginator(
"list_recovery_points_by_backup_vault"
)
for page in paginator.paginate(BackupVaultName=backup_vault.name):
for recovery_point in page.get("RecoveryPoints", []):
self.recovery_points.append(
RecoveryPoint(
arn=recovery_point.get("RecoveryPointArn"),
backup_vault_name=backup_vault.name,
encrypted=recovery_point.get("IsEncrypted", False),
backup_vault_region=backup_vault.region,
tags=[],
)
)
if self.backup_vaults:
for backup_vault in self.backup_vaults:
paginator = regional_client.get_paginator(
"list_recovery_points_by_backup_vault"
)
for page in paginator.paginate(BackupVaultName=backup_vault.name):
for recovery_point in page.get("RecoveryPoints", []):
arn = recovery_point.get("RecoveryPointArn")
if arn:
self.recovery_points.append(
RecoveryPoint(
arn=arn,
backup_vault_name=backup_vault.name,
encrypted=recovery_point.get(
"IsEncrypted", False
),
backup_vault_region=backup_vault.region,
tags=[],
)
)
except ClientError as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -7,7 +7,8 @@ from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
def fixer(region):
"""
NOTE: Define the S3 bucket name in the fixer_config.yaml file.
Enable CloudTrail in a region. Requires the cloudtrail:CreateTrail permission:
Enable CloudTrail in a region. Requires the cloudtrail:CreateTrail permission.
Permissions:
{
"Version": "2012-10-17",
"Statement": [

View File

@@ -67,10 +67,8 @@ class cloudtrail_threat_detection_enumeration(Check):
found_potential_enumeration = True
report = Check_Report_AWS(self.metadata())
report.region = cloudtrail_client.region
report.resource_id = cloudtrail_client.audited_account
report.resource_arn = cloudtrail_client._get_trail_arn_template(
cloudtrail_client.region
)
report.resource_id = aws_identity_arn.split("/")[-1]
report.resource_arn = aws_identity_arn
report.status = "FAIL"
report.status_extended = f"Potential enumeration attack detected from AWS {aws_identity_type} {aws_identity_arn.split('/')[-1]} with an threshold of {identity_threshold}."
findings.append(report)

View File

@@ -67,10 +67,8 @@ class cloudtrail_threat_detection_llm_jacking(Check):
found_potential_llm_jacking = True
report = Check_Report_AWS(self.metadata())
report.region = cloudtrail_client.region
report.resource_id = cloudtrail_client.audited_account
report.resource_arn = cloudtrail_client._get_trail_arn_template(
cloudtrail_client.region
)
report.resource_id = aws_identity_arn.split("/")[-1]
report.resource_arn = aws_identity_arn
report.status = "FAIL"
report.status_extended = f"Potential LLM Jacking attack detected from AWS {aws_identity_type} {aws_identity_arn.split('/')[-1]} with an threshold of {identity_threshold}."
findings.append(report)

View File

@@ -69,10 +69,8 @@ class cloudtrail_threat_detection_privilege_escalation(Check):
found_potential_privilege_escalation = True
report = Check_Report_AWS(self.metadata())
report.region = cloudtrail_client.region
report.resource_id = cloudtrail_client.audited_account
report.resource_arn = cloudtrail_client._get_trail_arn_template(
cloudtrail_client.region
)
report.resource_id = aws_identity_arn.split("/")[-1]
report.resource_arn = aws_identity_arn
report.status = "FAIL"
report.status_extended = f"Potential privilege escalation attack detected from AWS {aws_identity_type} {aws_identity_arn.split('/')[-1]} with an threshold of {identity_threshold}."
findings.append(report)

View File

@@ -8,9 +8,8 @@ def fixer(resource_id: str, region: str) -> bool:
"""
Modify the attributes of a DocumentDB cluster snapshot to remove public access.
Specifically, this fixer removes the 'all' value from the 'restore' attribute to
prevent the snapshot from being publicly accessible.
Requires the rds:ModifyDBClusterSnapshotAttribute permissions.
prevent the snapshot from being publicly accessible. Requires the rds:ModifyDBClusterSnapshotAttribute permissions.
Permissions:
{
"Version": "2012-10-17",
"Statement": [
@@ -21,7 +20,6 @@ def fixer(resource_id: str, region: str) -> bool:
}
]
}
Args:
resource_id (str): The DB cluster snapshot identifier.
region (str): AWS region where the snapshot exists.

View File

@@ -5,7 +5,8 @@ from prowler.providers.aws.services.ec2.ec2_client import ec2_client
def fixer(region):
"""
Enable EBS encryption by default in a region. NOTE: Custom KMS keys for EBS Default Encryption may be overwritten.
Requires the ec2:EnableEbsEncryptionByDefault permission:
Requires the ec2:EnableEbsEncryptionByDefault permission.
Permissions:
{
"Version": "2012-10-17",
"Statement": [

View File

@@ -6,8 +6,8 @@ def fixer(resource_id: str, region: str) -> bool:
"""
Modify the attributes of an EBS snapshot to remove public access.
Specifically, this fixer removes the 'all' value from the 'createVolumePermission' attribute to
prevent the snapshot from being publicly accessible.
Requires the ec2:ModifySnapshotAttribute permission.
prevent the snapshot from being publicly accessible. Requires the ec2:ModifySnapshotAttribute permission.
Permissions:
{
"Version": "2012-10-17",
"Statement": [

View File

@@ -5,7 +5,8 @@ from prowler.providers.aws.services.ec2.ec2_client import ec2_client
def fixer(region):
"""
Enable EBS snapshot block public access in a region.
Requires the ec2:EnableSnapshotBlockPublicAccess permission:
Requires the ec2:EnableSnapshotBlockPublicAccess permission.
Permissions:
{
"Version": "2012-10-17",
"Statement": [

View File

@@ -8,7 +8,7 @@
"ServiceName": "ec2",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:service:region:account-id",
"Severity": "medium",
"Severity": "high",
"ResourceType": "AwsEc2Instance",
"Description": "Ensure Instance Metadata Service Version 2 (IMDSv2) is enforced for EC2 instances at the account level to protect against SSRF vulnerabilities.",
"Risk": "EC2 instances that use IMDSv1 are vulnerable to SSRF attacks.",

View File

@@ -5,7 +5,8 @@ from prowler.providers.aws.services.ec2.ec2_client import ec2_client
def fixer(region):
"""
Enable IMDSv2 for EC2 instances in the specified region.
Requires the ec2:ModifyInstanceMetadataDefaults permission:
Requires the ec2:ModifyInstanceMetadataDefaults permission.
Permissions:
{
"Version": "2012-10-17",
"Statement": [

View File

@@ -8,7 +8,7 @@
"ServiceName": "ec2",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
"Severity": "medium",
"Severity": "high",
"ResourceType": "AwsEc2Instance",
"Description": "Check if EC2 Instance Metadata Service Version 2 (IMDSv2) is Enabled and Required.",
"Risk": "Using IMDSv2 will protect from misconfiguration and SSRF vulnerabilities. IMDSv1 will not.",

View File

@@ -31,10 +31,7 @@ class firehose_stream_encrypted_at_rest(Check):
f"Firehose Stream {stream.name} does have at rest encryption enabled."
)
if (
stream.kms_encryption != EncryptionStatus.ENABLED
or not stream.kms_key_arn
):
if stream.kms_encryption != EncryptionStatus.ENABLED:
report.status = "FAIL"
report.status_extended = f"Firehose Stream {stream.name} does not have at rest encryption enabled."

View File

@@ -4,7 +4,8 @@ from prowler.providers.aws.services.guardduty.guardduty_client import guardduty_
def fixer(region):
"""
Enable GuardDuty in a region. Requires the guardduty:CreateDetector permission:
Enable GuardDuty in a region. Requires the guardduty:CreateDetector permission.
Permissions:
{
"Version": "2012-10-17",
"Statement": [

View File

@@ -5,7 +5,8 @@ from prowler.providers.aws.services.iam.iam_client import iam_client
def fixer(resource_id: str) -> bool:
"""
Enable IAM password policy to expire passwords within 90 days or less or the configurable value in prowler/config/fixer_config.yaml.
Requires the iam:UpdateAccountPasswordPolicy permission:
Requires the iam:UpdateAccountPasswordPolicy permission.
Permissions:
{
"Version": "2012-10-17",
"Statement": [
@@ -16,6 +17,8 @@ def fixer(resource_id: str) -> bool:
}
]
}
Args:
resource_id (str): AWS account ID
Returns:
bool: True if IAM password policy is updated, False otherwise
"""

View File

@@ -5,7 +5,8 @@ from prowler.providers.aws.services.iam.iam_client import iam_client
def fixer(resource_id: str) -> bool:
"""
Enable IAM password policy to require lowercase characters or the configurable value in prowler/config/fixer_config.yaml.
Requires the iam:UpdateAccountPasswordPolicy permission:
Requires the iam:UpdateAccountPasswordPolicy permission.
Permissions:
{
"Version": "2012-10-17",
"Statement": [
@@ -16,6 +17,8 @@ def fixer(resource_id: str) -> bool:
}
]
}
Args:
resource_id (str): AWS account ID
Returns:
bool: True if IAM password policy is updated, False otherwise
"""

View File

@@ -5,7 +5,8 @@ from prowler.providers.aws.services.iam.iam_client import iam_client
def fixer(resource_id: str) -> bool:
"""
Enable IAM password policy to require a minimum password length of 14 characters or the configurable value in prowler/config/fixer_config.yaml.
Requires the iam:UpdateAccountPasswordPolicy permission:
Requires the iam:UpdateAccountPasswordPolicy permission.
Permissions:
{
"Version": "2012-10-17",
"Statement": [
@@ -16,6 +17,8 @@ def fixer(resource_id: str) -> bool:
}
]
}
Args:
resource_id (str): AWS account ID
Returns:
bool: True if IAM password policy is updated, False otherwise
"""

View File

@@ -5,7 +5,8 @@ from prowler.providers.aws.services.iam.iam_client import iam_client
def fixer(resource_id: str) -> bool:
"""
Enable IAM password policy to require numbers or the configurable value in prowler/config/fixer_config.yaml.
Requires the iam:UpdateAccountPasswordPolicy permission:
Requires the iam:UpdateAccountPasswordPolicy permission.
Permissions:
{
"Version": "2012-10-17",
"Statement": [
@@ -16,6 +17,8 @@ def fixer(resource_id: str) -> bool:
}
]
}
Args:
resource_id (str): AWS account ID
Returns:
bool: True if IAM password policy is updated, False otherwise
"""

View File

@@ -5,7 +5,8 @@ from prowler.providers.aws.services.iam.iam_client import iam_client
def fixer(resource_id: str) -> bool:
"""
Enable IAM password policy to prevent reusing the 24 previous passwords or the configurable value in prowler/config/fixer_config.yaml.
Requires the iam:UpdateAccountPasswordPolicy permission:
Requires the iam:UpdateAccountPasswordPolicy permission.
Permissions:
{
"Version": "2012-10-17",
"Statement": [
@@ -16,6 +17,8 @@ def fixer(resource_id: str) -> bool:
}
]
}
Args:
resource_id (str): AWS account ID
Returns:
bool: True if IAM password policy is updated, False otherwise
"""

View File

@@ -5,7 +5,8 @@ from prowler.providers.aws.services.iam.iam_client import iam_client
def fixer(resource_id: str) -> bool:
"""
Enable IAM password policy to require symbols or the configurable value in prowler/config/fixer_config.yaml.
Requires the iam:UpdateAccountPasswordPolicy permission:
Requires the iam:UpdateAccountPasswordPolicy permission.
Permissions:
{
"Version": "2012-10-17",
"Statement": [
@@ -16,6 +17,8 @@ def fixer(resource_id: str) -> bool:
}
]
}
Args:
resource_id (str): AWS account ID
Returns:
bool: True if IAM password policy is updated, False otherwise
"""

View File

@@ -5,7 +5,8 @@ from prowler.providers.aws.services.iam.iam_client import iam_client
def fixer(resource_id: str) -> bool:
"""
Enable IAM password policy to require uppercase characters or the configurable value in prowler/config/fixer_config.yaml.
Requires the iam:UpdateAccountPasswordPolicy permission:
Requires the iam:UpdateAccountPasswordPolicy permission.
Permissions:
{
"Version": "2012-10-17",
"Statement": [
@@ -16,6 +17,8 @@ def fixer(resource_id: str) -> bool:
}
]
}
Args:
resource_id (str): AWS account ID
Returns:
bool: True if IAM password policy is updated, False otherwise
"""

View File

@@ -7,7 +7,6 @@ def fixer(resource_id: str, region: str) -> bool:
Cancel the scheduled deletion of a KMS key.
Specifically, this fixer calls the 'cancel_key_deletion' method to restore the KMS key's availability if it is marked for deletion.
Requires the kms:CancelKeyDeletion permission.
Permissions:
{
"Version": "2012-10-17",
@@ -19,11 +18,9 @@ def fixer(resource_id: str, region: str) -> bool:
}
]
}
Args:
resource_id (str): The ID of the KMS key to cancel the deletion for.
region (str): AWS region where the KMS key exists.
Returns:
bool: True if the operation is successful (deletion cancellation is completed), False otherwise.
"""

View File

@@ -4,7 +4,8 @@ from prowler.providers.aws.services.kms.kms_client import kms_client
def fixer(resource_id: str, region: str) -> bool:
"""
Enable CMK rotation. Requires the kms:EnableKeyRotation permission:
Enable CMK rotation. Requires the kms:EnableKeyRotation permission.
Permissions:
{
"Version": "2012-10-17",
"Statement": [

View File

@@ -6,9 +6,8 @@ def fixer(resource_id: str, region: str) -> bool:
"""
Modify the attributes of a Neptune DB cluster snapshot to remove public access.
Specifically, this fixer removes the 'all' value from the 'restore' attribute to
prevent the snapshot from being publicly accessible.
Requires the rds:ModifyDBClusterSnapshotAttribute permissions.
prevent the snapshot from being publicly accessible. Requires the rds:ModifyDBClusterSnapshotAttribute permissions.
Permissions:
{
"Version": "2012-10-17",
"Statement": [
@@ -19,11 +18,9 @@ def fixer(resource_id: str, region: str) -> bool:
}
]
}
Args:
resource_id (str): The DB cluster snapshot identifier.
region (str): AWS region where the snapshot exists.
Returns:
bool: True if the operation is successful (public access is removed), False otherwise.
"""

View File

@@ -6,9 +6,8 @@ def fixer(resource_id: str, region: str) -> bool:
"""
Modify the attributes of an RDS instance to disable public accessibility.
Specifically, this fixer sets the 'PubliclyAccessible' attribute to False
to prevent the RDS instance from being publicly accessible.
Requires the rds:ModifyDBInstance permission:
to prevent the RDS instance from being publicly accessible. Requires the rds:ModifyDBInstance permission.
Permissions:
{
"Version": "2012-10-17",
"Statement": [
@@ -19,11 +18,9 @@ def fixer(resource_id: str, region: str) -> bool:
}
]
}
Args:
resource_id (str): The DB instance identifier.
region (str): AWS region where the DB instance exists.
Returns:
bool: True if the operation is successful (public access is disabled), False otherwise.
"""

View File

@@ -446,7 +446,7 @@ class RDS(AWSService):
arn=arn,
sns_topic_arn=event["SnsTopicArn"],
status=event["Status"],
source_type=event["SourceType"],
source_type=event.get("SourceType", ""),
source_id=event.get("SourceIdsList", []),
event_list=event.get("EventCategoriesList", []),
enabled=event["Enabled"],

View File

@@ -5,10 +5,9 @@ from prowler.providers.aws.services.rds.rds_client import rds_client
def fixer(resource_id: str, region: str) -> bool:
"""
Modify the attributes of an RDS DB snapshot or DB cluster snapshot to remove public access.
Specifically, this fixer removes the 'all' value from the 'restore' attribute to
prevent the snapshot from being publicly accessible for both DB snapshots and DB cluster snapshots.
Requires the rds:ModifyDBSnapshotAttribute or rds:ModifyDBClusterSnapshotAttribute permissions.
Specifically, this fixer removes the 'all' value from the 'restore' attribute to prevent the snapshot from being publicly accessible
for both DB snapshots and DB cluster snapshots. Requires the rds:ModifyDBSnapshotAttribute or rds:ModifyDBClusterSnapshotAttribute permissions.
Permissions:
{
"Version": "2012-10-17",
"Statement": [
@@ -24,11 +23,9 @@ def fixer(resource_id: str, region: str) -> bool:
}
]
}
Args:
resource_id (str): The DB snapshot or DB cluster snapshot identifier.
region (str): AWS region where the snapshot exists.
Returns:
bool: True if the operation is successful (public access is removed), False otherwise.
"""

View File

@@ -5,7 +5,8 @@ from prowler.providers.aws.services.s3.s3control_client import s3control_client
def fixer(resource_id: str) -> bool:
"""
Enable S3 Block Public Access for the account. NOTE: By blocking all S3 public access you may break public S3 buckets.
Requires the s3:PutAccountPublicAccessBlock permission:
Requires the s3:PutAccountPublicAccessBlock permission.
Permissions:
{
"Version": "2012-10-17",
"Statement": [
@@ -16,6 +17,8 @@ def fixer(resource_id: str) -> bool:
}
]
}
Args:
resource_id (str): The AWS account ID.
Returns:
bool: True if S3 Block Public Access is enabled, False otherwise
"""

View File

@@ -6,7 +6,8 @@ from prowler.providers.aws.services.securityhub.securityhub_client import (
def fixer(region):
"""
Enable Security Hub in a region. Requires the securityhub:EnableSecurityHub permission:
Enable Security Hub in a region. Requires the securityhub:EnableSecurityHub permission.
Permissions:
{
"Version": "2012-10-17",
"Statement": [

View File

@@ -150,6 +150,22 @@ class WAFv2(AWSService):
else:
acl.rules.append(new_rule)
firewall_manager_managed_rg = get_web_acl.get("WebACL", {}).get(
"PreProcessFirewallManagerRuleGroups", []
) + get_web_acl.get("WebACL", {}).get(
"PostProcessFirewallManagerRuleGroups", []
)
for rule in firewall_manager_managed_rg:
acl.rule_groups.append(
Rule(
name=rule.get("Name", ""),
cloudwatch_metrics_enabled=rule.get(
"VisibilityConfig", {}
).get("CloudWatchMetricsEnabled", False),
)
)
except Exception as error:
logger.error(
f"{acl.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -193,13 +209,6 @@ class Rule(BaseModel):
cloudwatch_metrics_enabled: bool = False
class FirewallManagerRuleGroup(BaseModel):
"""Model representing a rule group for the Web ACL."""
name: str
cloudwatch_metrics_enabled: bool = False
class WebAclv2(BaseModel):
"""Model representing a Web ACL for WAFv2."""

View File

@@ -18,12 +18,7 @@ class containerregistry_not_publicly_accessible(Check):
report.status = "FAIL"
report.status_extended = f"Container Registry {container_registry_info.name} from subscription {subscription} allows unrestricted network access."
if (
getattr(
container_registry_info.network_rule_set, "default_action", ""
).lower()
== "deny"
):
if not container_registry_info.public_network_access:
report.status = "PASS"
report.status_extended = f"Container Registry {container_registry_info.name} from subscription {subscription} does not allow unrestricted network access."

View File

@@ -37,8 +37,13 @@ class ContainerRegistry(AzureService):
resource_group=resource_group,
sku=getattr(registry.sku, "name", ""),
login_server=getattr(registry, "login_server", ""),
public_network_access=getattr(
registry, "public_network_access", ""
public_network_access=(
False
if getattr(
registry, "public_network_access" "Enabled"
)
== "Disabled"
else True
),
admin_user_enabled=getattr(
registry, "admin_user_enabled", False
@@ -93,7 +98,7 @@ class ContainerRegistryInfo:
resource_group: str
sku: str
login_server: str
public_network_access: str
public_network_access: bool
admin_user_enabled: bool
network_rule_set: NetworkRuleSet
monitor_diagnostic_settings: list[DiagnosticSetting]

View File

@@ -181,8 +181,6 @@ class GcpProvider(Provider):
message="No Project IDs can be accessed via Google Credentials.",
)
if project_ids:
if self._default_project_id not in project_ids:
self._default_project_id = project_ids[0]
for input_project in project_ids:
for (
accessible_project_id,
@@ -203,6 +201,10 @@ class GcpProvider(Provider):
self._projects[project_id] = project
self._project_ids.append(project_id)
# Change default project if not in active projects
if self._project_ids and self._default_project_id not in self._project_ids:
self._default_project_id = self._project_ids[0]
# Remove excluded projects if any input
if excluded_project_ids:
for excluded_project in excluded_project_ids:
@@ -417,17 +419,17 @@ class GcpProvider(Provider):
GCPTestConnectionError if an error occurs during the test connection
Usage:
- Using static credentials:
- Using ADC credentials from `/Users/<user>/.config/gcloud/application_default_credentials.json`:
>>> GcpProvider.test_connection(
... client_id="client_id",
... client_secret="client_secret",
... refresh_token="refresh_token"
... )
- Using a credentials file:
- Using a Service Account credentials file path:
>>> GcpProvider.test_connection(
... credentials_file="credentials_file"
... )
- Using a service account to impersonate, authentication is required to impersonate a service account:
- Using ADC credentials with a Service Account to impersonate:
>>> GcpProvider.test_connection(
... client_id="client_id",
... client_secret="client_secret",
@@ -436,7 +438,7 @@ class GcpProvider(Provider):
... )
"""
try:
# Set the GCP credentials using the provided client_id, client_secret and refresh_token
# Set the GCP credentials using the provided client_id, client_secret and refresh_token from ADC
gcp_credentials = None
if any([client_id, client_secret, refresh_token]):
gcp_credentials = GcpProvider.validate_static_arguments(
@@ -738,7 +740,7 @@ class GcpProvider(Provider):
client_id: str = None, client_secret: str = None, refresh_token: str = None
) -> dict:
"""
Validate the static arguments client_id, client_secret and refresh_token
Validate the static arguments client_id, client_secret and refresh_token of ADC credentials
Args:
client_id: str
@@ -749,7 +751,7 @@ class GcpProvider(Provider):
dict
Raises:
GCPStaticCredentialsError if any of the static arguments is missing
GCPStaticCredentialsError if any of the static arguments is missing from the ADC credentials
Usage:
>>> GcpProvider.validate_static_arguments(client_id, client_secret, refresh_token)

View File

@@ -55,7 +55,9 @@ class GCPService:
project_ids = []
for project_id in audited_project_ids:
try:
client = discovery.build("serviceusage", "v1")
client = discovery.build(
"serviceusage", "v1", credentials=self.credentials
)
request = client.services().get(
name=f"projects/{project_id}/services/{self.service}.googleapis.com"
)

View File

@@ -1,4 +1,5 @@
import os
from typing import Union
from colorama import Fore, Style
from kubernetes.client.exceptions import ApiException
@@ -74,14 +75,14 @@ class KubernetesProvider(Provider):
fixer_config: dict = {},
mutelist_path: str = None,
mutelist_content: dict = {},
kubeconfig_content: dict = None,
kubeconfig_content: Union[dict, str] = None,
):
"""
Initializes the KubernetesProvider instance.
Args:
kubeconfig_file (str): Path to the kubeconfig file.
kubeconfig_content (dict): Content of the kubeconfig file.
kubeconfig_content (str or dict): Content of the kubeconfig file.
context (str): Context name.
namespace (list): List of namespaces.
config_content (dict): Audit configuration.
@@ -224,7 +225,7 @@ class KubernetesProvider(Provider):
@staticmethod
def setup_session(
kubeconfig_file: str = None,
kubeconfig_content: dict = None,
kubeconfig_content: Union[dict, str] = None,
context: str = None,
) -> KubernetesSession:
"""
@@ -232,7 +233,7 @@ class KubernetesProvider(Provider):
Args:
kubeconfig_file (str): Path to the kubeconfig file.
kubeconfig_content (dict): Content of the kubeconfig file.
kubeconfig_content (str or dict): Content of the kubeconfig file.
context (str): Context name.
Returns:
@@ -243,14 +244,20 @@ class KubernetesProvider(Provider):
KubernetesInvalidProviderIdError: If the provider ID is invalid.
KubernetesSetUpSessionError: If an error occurs while setting up the session.
"""
logger.info(f"Using kubeconfig file: {kubeconfig_file}")
try:
if kubeconfig_content:
config.load_kube_config_from_dict(
safe_load(kubeconfig_content), context=context
)
logger.info("Using kubeconfig content...")
config_data = safe_load(kubeconfig_content)
config.load_kube_config_from_dict(config_data, context=context)
if context:
contexts = config_data.get("contexts", [])
for context_item in contexts:
if context_item["name"] == context:
context = context_item
else:
context = config_data.get("contexts", [])[0]
else:
logger.info(f"Using kubeconfig file: {kubeconfig_file}...")
kubeconfig_file = (
kubeconfig_file if kubeconfig_file else "~/.kube/config"
)
@@ -273,17 +280,19 @@ class KubernetesProvider(Provider):
return KubernetesSession(
api_client=client.ApiClient(), context=context
)
if context:
contexts = config.list_kube_config_contexts(
config_file=kubeconfig_file
)[0]
for context_item in contexts:
if context_item["name"] == context:
context = context_item
else:
context = config.list_kube_config_contexts(config_file=kubeconfig_file)[
1
]
if context:
contexts = config.list_kube_config_contexts(
config_file=kubeconfig_file
)[0]
for context_item in contexts:
if context_item["name"] == context:
context = context_item
else:
# If no context is provided, use the active context in the kubeconfig file
# The first element is the list of contexts, the second is the active context
context = config.list_kube_config_contexts(
config_file=kubeconfig_file
)[1]
return KubernetesSession(api_client=client.ApiClient(), context=context)
except parser.ParserError as parser_error:
@@ -318,7 +327,7 @@ class KubernetesProvider(Provider):
@staticmethod
def test_connection(
kubeconfig_file: str = "~/.kube/config",
kubeconfig_content: dict = None,
kubeconfig_content: Union[dict, str] = None,
namespace: str = None,
provider_id: str = None,
raise_on_exception: bool = True,
@@ -328,7 +337,7 @@ class KubernetesProvider(Provider):
Args:
kubeconfig_file (str): Path to the kubeconfig file.
kubeconfig_content (dict): Content of the kubeconfig file.
kubeconfig_content (str or dict): Content of the kubeconfig file.
namespace (str): Namespace name.
provider_id (str): Provider ID to use, in this case, the Kubernetes context.
raise_on_exception (bool): Whether to raise an exception on error.
@@ -352,7 +361,7 @@ class KubernetesProvider(Provider):
... )
- Using the kubeconfig content:
>>> connection = KubernetesProvider.test_connection(
... kubeconfig_content={"kubecofig": "content"},
... kubeconfig_content="kubeconfig content",
... namespace="default",
... provider_id="my-context",
... raise_on_exception=True,

View File

@@ -23,7 +23,7 @@ packages = [
{include = "dashboard"}
]
readme = "README.md"
version = "4.6.0"
version = "4.6.2"
[tool.poetry.dependencies]
alive-progress = "3.2.0"
@@ -48,8 +48,8 @@ azure-mgmt-storage = "21.2.1"
azure-mgmt-subscription = "3.1.1"
azure-mgmt-web = "7.3.1"
azure-storage-blob = "12.24.0"
boto3 = "1.35.66"
botocore = "1.35.66"
boto3 = "1.35.77"
botocore = "1.35.77"
colorama = "0.4.6"
cryptography = "43.0.1"
dash = "2.18.2"
@@ -59,8 +59,8 @@ google-api-python-client = "2.154.0"
google-auth-httplib2 = ">=0.1,<0.3"
jsonschema = "4.23.0"
kubernetes = "31.0.0"
microsoft-kiota-abstractions = "1.6.2"
msgraph-sdk = "1.12.0"
microsoft-kiota-abstractions = "1.6.6"
msgraph-sdk = "1.14.0"
numpy = "2.0.2"
pandas = "2.2.3"
py-ocsf-models = "0.2.0"
@@ -70,14 +70,14 @@ python-dateutil = "^2.9.0.post0"
pytz = "2024.2"
schema = "0.7.7"
shodan = "1.31.0"
slack-sdk = "3.33.4"
slack-sdk = "3.33.5"
tabulate = "0.9.0"
tzlocal = "5.2"
[tool.poetry.group.dev.dependencies]
bandit = "1.7.10"
bandit = "1.8.0"
black = "24.10.0"
coverage = "7.6.7"
coverage = "7.6.9"
docker = "7.1.0"
flake8 = "7.1.1"
freezegun = "1.5.1"
@@ -85,14 +85,14 @@ mock = "5.1.0"
moto = {extras = ["all"], version = "5.0.16"}
openapi-schema-validator = "0.6.2"
openapi-spec-validator = "0.7.1"
pylint = "3.3.1"
pytest = "8.3.3"
pylint = "3.3.2"
pytest = "8.3.4"
pytest-cov = "6.0.0"
pytest-env = "1.1.5"
pytest-randomly = "3.16.0"
pytest-xdist = "3.6.1"
safety = "3.2.9"
vulture = "2.13"
vulture = "2.14"
[tool.poetry.group.docs]
optional = true
@@ -100,7 +100,7 @@ optional = true
[tool.poetry.group.docs.dependencies]
mkdocs = "1.6.1"
mkdocs-git-revision-date-localized-plugin = "1.3.0"
mkdocs-material = "9.5.45"
mkdocs-material = "9.5.48"
mkdocs-material-extensions = "1.3.1"
[tool.poetry.scripts]

View File

@@ -14,11 +14,13 @@ S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME_CUSTOM_ALIAS = (
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_SEVERITY = "medium"
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME_SERVICE = "s3"
CLOUDTRAIL_THREAT_DETECTION_ENUMERATION_NAME = "cloudtrail_threat_detection_enumeration"
class TestCheckLoader:
provider = "aws"
def get_custom_check_metadata(self):
def get_custom_check_s3_metadata(self):
return CheckMetadata(
Provider="aws",
CheckID=S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME,
@@ -52,9 +54,37 @@ class TestCheckLoader:
Compliance=[],
)
def get_threat_detection_check_metadata(self):
return CheckMetadata(
Provider="aws",
CheckID=CLOUDTRAIL_THREAT_DETECTION_ENUMERATION_NAME,
CheckTitle="Ensure there are no potential enumeration threats in CloudTrail",
CheckType=[],
ServiceName="cloudtrail",
SubServiceName="",
ResourceIdTemplate="arn:partition:service:region:account-id:resource-id",
Severity="critical",
ResourceType="AwsCloudTrailTrail",
Description="This check ensures that there are no potential enumeration threats in CloudTrail.",
Risk="Potential enumeration threats in CloudTrail can lead to unauthorized access to resources.",
RelatedUrl="",
Remediation=Remediation(
Code=Code(CLI="", NativeIaC="", Other="", Terraform=""),
Recommendation=Recommendation(
Text="To remediate this issue, ensure that there are no potential enumeration threats in CloudTrail.",
Url="https://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudtrail-concepts.html#cloudtrail-concepts-logging-data-events",
),
),
Categories=["threat-detection"],
DependsOn=[],
RelatedTo=[],
Notes="",
Compliance=[],
)
def test_load_checks_to_execute(self):
bulk_checks_metatada = {
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_metadata()
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_s3_metadata()
}
assert {S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME} == load_checks_to_execute(
@@ -64,7 +94,7 @@ class TestCheckLoader:
def test_load_checks_to_execute_with_check_list(self):
bulk_checks_metatada = {
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_metadata()
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_s3_metadata()
}
check_list = [S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME]
@@ -76,7 +106,7 @@ class TestCheckLoader:
def test_load_checks_to_execute_with_severities(self):
bulk_checks_metatada = {
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_metadata()
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_s3_metadata()
}
severities = [S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_SEVERITY]
@@ -88,7 +118,7 @@ class TestCheckLoader:
def test_load_checks_to_execute_with_severities_and_services(self):
bulk_checks_metatada = {
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_metadata()
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_s3_metadata()
}
service_list = [S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME_SERVICE]
severities = [S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_SEVERITY]
@@ -104,7 +134,7 @@ class TestCheckLoader:
self,
):
bulk_checks_metatada = {
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_metadata()
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_s3_metadata()
}
service_list = ["ec2"]
severities = [S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_SEVERITY]
@@ -120,7 +150,7 @@ class TestCheckLoader:
self,
):
bulk_checks_metatada = {
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_metadata()
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_s3_metadata()
}
checks_file = "path/to/test_file"
with patch(
@@ -137,7 +167,7 @@ class TestCheckLoader:
self,
):
bulk_checks_metatada = {
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_metadata()
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_s3_metadata()
}
service_list = [S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME_SERVICE]
@@ -178,7 +208,7 @@ class TestCheckLoader:
self,
):
bulk_checks_metatada = {
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_metadata()
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_s3_metadata()
}
categories = {"internet-exposed"}
@@ -190,7 +220,7 @@ class TestCheckLoader:
def test_load_checks_to_execute_no_bulk_checks_metadata(self):
bulk_checks_metatada = {
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_metadata()
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_s3_metadata()
}
with patch(
"prowler.lib.check.checks_loader.CheckMetadata.get_bulk",
@@ -221,7 +251,7 @@ class TestCheckLoader:
compliance_frameworks = ["soc2_aws"]
bulk_checks_metatada = {
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_metadata()
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_s3_metadata()
}
with patch(
"prowler.lib.check.checks_loader.CheckMetadata.get_bulk",
@@ -248,3 +278,27 @@ class TestCheckLoader:
assert {"check1_name", "check2_name"} == update_checks_to_execute_with_aliases(
checks_to_execute, check_aliases
)
def test_threat_detection_category(self):
bulk_checks_metatada = {
CLOUDTRAIL_THREAT_DETECTION_ENUMERATION_NAME: self.get_threat_detection_check_metadata()
}
categories = {"threat-detection"}
assert {CLOUDTRAIL_THREAT_DETECTION_ENUMERATION_NAME} == load_checks_to_execute(
bulk_checks_metadata=bulk_checks_metatada,
categories=categories,
provider=self.provider,
)
def test_discard_threat_detection_checks(self):
bulk_checks_metatada = {
CLOUDTRAIL_THREAT_DETECTION_ENUMERATION_NAME: self.get_threat_detection_check_metadata()
}
categories = {}
assert set() == load_checks_to_execute(
bulk_checks_metadata=bulk_checks_metatada,
categories=categories,
provider=self.provider,
)

View File

@@ -32,6 +32,35 @@ mock_metadata = CheckMetadata(
Compliance=[],
)
mock_metadata_lambda = CheckMetadata(
Provider="aws",
CheckID="awslambda_function_url_public",
CheckTitle="Check 1",
CheckType=["type1"],
ServiceName="lambda",
SubServiceName="subservice1",
ResourceIdTemplate="template1",
Severity="high",
ResourceType="resource1",
Description="Description 1",
Risk="risk1",
RelatedUrl="url1",
Remediation={
"Code": {
"CLI": "cli1",
"NativeIaC": "native1",
"Other": "other1",
"Terraform": "terraform1",
},
"Recommendation": {"Text": "text1", "Url": "url1"},
},
Categories=["categoryone"],
DependsOn=["dependency1"],
RelatedTo=["related1"],
Notes="notes1",
Compliance=[],
)
class TestCheckMetada:
@@ -188,6 +217,46 @@ class TestCheckMetada:
# Assertions
assert result == {"accessanalyzer_enabled"}
@mock.patch("prowler.lib.check.models.load_check_metadata")
@mock.patch("prowler.lib.check.models.recover_checks_from_provider")
def test_list_by_service_lambda(self, mock_recover_checks, mock_load_metadata):
# Mock the return value of recover_checks_from_provider
mock_recover_checks.return_value = [
("awslambda_function_url_public", "/path/to/awslambda_function_url_public")
]
# Mock the return value of load_check_metadata
mock_load_metadata.return_value = mock_metadata_lambda
bulk_metadata = CheckMetadata.get_bulk(provider="aws")
result = CheckMetadata.list(
bulk_checks_metadata=bulk_metadata, service="lambda"
)
# Assertions
assert result == {"awslambda_function_url_public"}
@mock.patch("prowler.lib.check.models.load_check_metadata")
@mock.patch("prowler.lib.check.models.recover_checks_from_provider")
def test_list_by_service_awslambda(self, mock_recover_checks, mock_load_metadata):
# Mock the return value of recover_checks_from_provider
mock_recover_checks.return_value = [
("awslambda_function_url_public", "/path/to/awslambda_function_url_public")
]
# Mock the return value of load_check_metadata
mock_load_metadata.return_value = mock_metadata_lambda
bulk_metadata = CheckMetadata.get_bulk(provider="aws")
result = CheckMetadata.list(
bulk_checks_metadata=bulk_metadata, service="awslambda"
)
# Assertions
assert result == {"awslambda_function_url_public"}
@mock.patch("prowler.lib.check.models.load_check_metadata")
@mock.patch("prowler.lib.check.models.recover_checks_from_provider")
def test_list_by_service_invalid(self, mock_recover_checks, mock_load_metadata):

View File

@@ -19,10 +19,10 @@ def mock_get_trail_arn_template(region=None, *_) -> str:
def mock__get_lookup_events__(trail=None, event_name=None, minutes=None, *_) -> list:
return [
{
"CloudTrailEvent": '{"eventName": "DescribeAccessEntry", "userIdentity": {"type": "IAMUser", "principalId": "EXAMPLE6E4XEGITWATV6R", "arn": "arn:aws:iam::123456789012:user/Mateo", "accountId": "123456789012", "accessKeyId": "AKIAIOSFODNN7EXAMPLE", "userName": "Mateo", "sessionContext": {"sessionIssuer": {}, "webIdFederationData": {}, "attributes": {"creationDate": "2023-07-19T21:11:57Z", "mfaAuthenticated": "false"}}}}'
"CloudTrailEvent": '{"eventName": "DescribeAccessEntry", "userIdentity": {"type": "IAMUser", "principalId": "EXAMPLE6E4XEGITWATV6R", "arn": "arn:aws:iam::123456789012:user/Attacker", "accountId": "123456789012", "accessKeyId": "AKIAIOSFODNN7EXAMPLE", "userName": "Attacker", "sessionContext": {"sessionIssuer": {}, "webIdFederationData": {}, "attributes": {"creationDate": "2023-07-19T21:11:57Z", "mfaAuthenticated": "false"}}}}'
},
{
"CloudTrailEvent": '{"eventName": "DescribeAccountAttributes", "userIdentity": {"type": "IAMUser", "principalId": "EXAMPLE6E4XEGITWATV6R", "arn": "arn:aws:iam::123456789012:user/Mateo", "accountId": "123456789012", "accessKeyId": "AKIAIOSFODNN7EXAMPLE", "userName": "Mateo", "sessionContext": {"sessionIssuer": {}, "webIdFederationData": {}, "attributes": {"creationDate": "2023-07-19T21:11:57Z", "mfaAuthenticated": "false"}}}}'
"CloudTrailEvent": '{"eventName": "DescribeAccountAttributes", "userIdentity": {"type": "IAMUser", "principalId": "EXAMPLE6E4XEGITWATV6R", "arn": "arn:aws:iam::123456789012:user/Attacker", "accountId": "123456789012", "accessKeyId": "AKIAIOSFODNN7EXAMPLE", "userName": "Attacker", "sessionContext": {"sessionIssuer": {}, "webIdFederationData": {}, "attributes": {"creationDate": "2023-07-19T21:11:57Z", "mfaAuthenticated": "false"}}}}'
},
]
@@ -50,12 +50,15 @@ class Test_cloudtrail_threat_detection_enumeration:
cloudtrail_client.audited_account = AWS_ACCOUNT_NUMBER
cloudtrail_client.region = AWS_REGION_US_EAST_1
with mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_aws_provider(),
), mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_enumeration.cloudtrail_threat_detection_enumeration.cloudtrail_client",
new=cloudtrail_client,
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_aws_provider(),
),
mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_enumeration.cloudtrail_threat_detection_enumeration.cloudtrail_client",
new=cloudtrail_client,
),
):
# Test Check
from prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_enumeration.cloudtrail_threat_detection_enumeration import (
@@ -99,12 +102,15 @@ class Test_cloudtrail_threat_detection_enumeration:
cloudtrail_client._lookup_events = mock__get_lookup_events__
cloudtrail_client._get_trail_arn_template = mock_get_trail_arn_template
with mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_aws_provider(),
), mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_enumeration.cloudtrail_threat_detection_enumeration.cloudtrail_client",
new=cloudtrail_client,
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_aws_provider(),
),
mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_enumeration.cloudtrail_threat_detection_enumeration.cloudtrail_client",
new=cloudtrail_client,
),
):
# Test Check
from prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_enumeration.cloudtrail_threat_detection_enumeration import (
@@ -148,12 +154,15 @@ class Test_cloudtrail_threat_detection_enumeration:
cloudtrail_client._lookup_events = mock__get_lookup_events__
cloudtrail_client._get_trail_arn_template = mock_get_trail_arn_template
with mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_aws_provider(),
), mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_enumeration.cloudtrail_threat_detection_enumeration.cloudtrail_client",
new=cloudtrail_client,
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_aws_provider(),
),
mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_enumeration.cloudtrail_threat_detection_enumeration.cloudtrail_client",
new=cloudtrail_client,
),
):
# Test Check
from prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_enumeration.cloudtrail_threat_detection_enumeration import (
@@ -167,13 +176,13 @@ class Test_cloudtrail_threat_detection_enumeration:
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "Potential enumeration attack detected from AWS IAMUser Mateo with an threshold of 1.0."
== "Potential enumeration attack detected from AWS IAMUser Attacker with an threshold of 1.0."
)
assert result[0].resource_id == AWS_ACCOUNT_NUMBER
assert result[0].resource_id == "Attacker"
assert result[0].region == AWS_REGION_US_EAST_1
assert (
result[0].resource_arn
== f"arn:aws:cloudtrail:{AWS_REGION_US_EAST_1}:{AWS_ACCOUNT_NUMBER}:trail"
== f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:user/Attacker"
)
@mock_aws
@@ -198,12 +207,15 @@ class Test_cloudtrail_threat_detection_enumeration:
cloudtrail_client._lookup_events = mock__get_lookup_events__
cloudtrail_client._get_trail_arn_template = mock_get_trail_arn_template
with mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_aws_provider(),
), mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_enumeration.cloudtrail_threat_detection_enumeration.cloudtrail_client",
new=cloudtrail_client,
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_aws_provider(),
),
mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_enumeration.cloudtrail_threat_detection_enumeration.cloudtrail_client",
new=cloudtrail_client,
),
):
# Test Check
from prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_enumeration.cloudtrail_threat_detection_enumeration import (
@@ -247,12 +259,15 @@ class Test_cloudtrail_threat_detection_enumeration:
cloudtrail_client._lookup_events = mock__get_lookup_events_aws_service__
cloudtrail_client._get_trail_arn_template = mock_get_trail_arn_template
with mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_aws_provider(),
), mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_enumeration.cloudtrail_threat_detection_enumeration.cloudtrail_client",
new=cloudtrail_client,
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_aws_provider(),
),
mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_enumeration.cloudtrail_threat_detection_enumeration.cloudtrail_client",
new=cloudtrail_client,
),
):
# Test Check
from prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_enumeration.cloudtrail_threat_detection_enumeration import (

View File

@@ -19,10 +19,10 @@ def mock_get_trail_arn_template(region=None, *_) -> str:
def mock__get_lookup_events__(trail=None, event_name=None, minutes=None, *_) -> list:
return [
{
"CloudTrailEvent": '{"eventName": "InvokeModel", "userIdentity": {"type": "IAMUser", "principalId": "EXAMPLE6E4XEGITWATV6R", "arn": "arn:aws:iam::123456789012:user/Mateo", "accountId": "123456789012", "accessKeyId": "AKIAIOSFODNN7EXAMPLE", "userName": "Mateo", "sessionContext": {"sessionIssuer": {}, "webIdFederationData": {}, "attributes": {"creationDate": "2023-07-19T21:11:57Z", "mfaAuthenticated": "false"}}}}'
"CloudTrailEvent": '{"eventName": "InvokeModel", "userIdentity": {"type": "IAMUser", "principalId": "EXAMPLE6E4XEGITWATV6R", "arn": "arn:aws:iam::123456789012:user/Attacker", "accountId": "123456789012", "accessKeyId": "AKIAIOSFODNN7EXAMPLE", "userName": "Attacker", "sessionContext": {"sessionIssuer": {}, "webIdFederationData": {}, "attributes": {"creationDate": "2023-07-19T21:11:57Z", "mfaAuthenticated": "false"}}}}'
},
{
"CloudTrailEvent": '{"eventName": "InvokeModelWithResponseStream", "userIdentity": {"type": "IAMUser", "principalId": "EXAMPLE6E4XEGITWATV6R", "arn": "arn:aws:iam::123456789012:user/Mateo", "accountId": "123456789012", "accessKeyId": "AKIAIOSFODNN7EXAMPLE", "userName": "Mateo", "sessionContext": {"sessionIssuer": {}, "webIdFederationData": {}, "attributes": {"creationDate": "2023-07-19T21:11:57Z", "mfaAuthenticated": "false"}}}}'
"CloudTrailEvent": '{"eventName": "InvokeModelWithResponseStream", "userIdentity": {"type": "IAMUser", "principalId": "EXAMPLE6E4XEGITWATV6R", "arn": "arn:aws:iam::123456789012:user/Attacker", "accountId": "123456789012", "accessKeyId": "AKIAIOSFODNN7EXAMPLE", "userName": "Attacker", "sessionContext": {"sessionIssuer": {}, "webIdFederationData": {}, "attributes": {"creationDate": "2023-07-19T21:11:57Z", "mfaAuthenticated": "false"}}}}'
},
]
@@ -50,12 +50,15 @@ class Test_cloudtrail_threat_detection_llm_jacking:
cloudtrail_client.audited_account = AWS_ACCOUNT_NUMBER
cloudtrail_client.region = AWS_REGION_US_EAST_1
with mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_aws_provider(),
), mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_llm_jacking.cloudtrail_threat_detection_llm_jacking.cloudtrail_client",
new=cloudtrail_client,
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_aws_provider(),
),
mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_llm_jacking.cloudtrail_threat_detection_llm_jacking.cloudtrail_client",
new=cloudtrail_client,
),
):
# Test Check
from prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_llm_jacking.cloudtrail_threat_detection_llm_jacking import (
@@ -96,12 +99,15 @@ class Test_cloudtrail_threat_detection_llm_jacking:
cloudtrail_client._lookup_events = mock__get_lookup_events__
cloudtrail_client._get_trail_arn_template = mock_get_trail_arn_template
with mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_aws_provider(),
), mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_llm_jacking.cloudtrail_threat_detection_llm_jacking.cloudtrail_client",
new=cloudtrail_client,
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_aws_provider(),
),
mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_llm_jacking.cloudtrail_threat_detection_llm_jacking.cloudtrail_client",
new=cloudtrail_client,
),
):
# Test Check
from prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_llm_jacking.cloudtrail_threat_detection_llm_jacking import (
@@ -145,12 +151,15 @@ class Test_cloudtrail_threat_detection_llm_jacking:
cloudtrail_client._lookup_events = mock__get_lookup_events__
cloudtrail_client._get_trail_arn_template = mock_get_trail_arn_template
with mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_aws_provider(),
), mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_llm_jacking.cloudtrail_threat_detection_llm_jacking.cloudtrail_client",
new=cloudtrail_client,
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_aws_provider(),
),
mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_llm_jacking.cloudtrail_threat_detection_llm_jacking.cloudtrail_client",
new=cloudtrail_client,
),
):
# Test Check
from prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_llm_jacking.cloudtrail_threat_detection_llm_jacking import (
@@ -164,13 +173,13 @@ class Test_cloudtrail_threat_detection_llm_jacking:
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "Potential LLM Jacking attack detected from AWS IAMUser Mateo with an threshold of 1.0."
== "Potential LLM Jacking attack detected from AWS IAMUser Attacker with an threshold of 1.0."
)
assert result[0].resource_id == AWS_ACCOUNT_NUMBER
assert result[0].resource_id == "Attacker"
assert result[0].region == AWS_REGION_US_EAST_1
assert (
result[0].resource_arn
== f"arn:aws:cloudtrail:{AWS_REGION_US_EAST_1}:{AWS_ACCOUNT_NUMBER}:trail"
== f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:user/Attacker"
)
@mock_aws
@@ -195,12 +204,15 @@ class Test_cloudtrail_threat_detection_llm_jacking:
cloudtrail_client._lookup_events = mock__get_lookup_events__
cloudtrail_client._get_trail_arn_template = mock_get_trail_arn_template
with mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_aws_provider(),
), mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_llm_jacking.cloudtrail_threat_detection_llm_jacking.cloudtrail_client",
new=cloudtrail_client,
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_aws_provider(),
),
mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_llm_jacking.cloudtrail_threat_detection_llm_jacking.cloudtrail_client",
new=cloudtrail_client,
),
):
# Test Check
from prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_llm_jacking.cloudtrail_threat_detection_llm_jacking import (
@@ -244,12 +256,15 @@ class Test_cloudtrail_threat_detection_llm_jacking:
cloudtrail_client._lookup_events = mock__get_lookup_events_aws_service__
cloudtrail_client._get_trail_arn_template = mock_get_trail_arn_template
with mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_aws_provider(),
), mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_llm_jacking.cloudtrail_threat_detection_llm_jacking.cloudtrail_client",
new=cloudtrail_client,
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_aws_provider(),
),
mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_llm_jacking.cloudtrail_threat_detection_llm_jacking.cloudtrail_client",
new=cloudtrail_client,
),
):
# Test Check
from prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_llm_jacking.cloudtrail_threat_detection_llm_jacking import (

View File

@@ -19,10 +19,10 @@ def mock_get_trail_arn_template(region=None, *_) -> str:
def mock__get_lookup_events__(trail=None, event_name=None, minutes=None, *_) -> list:
return [
{
"CloudTrailEvent": '{"eventName": "CreateLoginProfile", "userIdentity": {"type": "IAMUser", "principalId": "EXAMPLE6E4XEGITWATV6R", "arn": "arn:aws:iam::123456789012:user/Mateo", "accountId": "123456789012", "accessKeyId": "AKIAIOSFODNN7EXAMPLE", "userName": "Mateo", "sessionContext": {"sessionIssuer": {}, "webIdFederationData": {}, "attributes": {"creationDate": "2023-07-19T21:11:57Z", "mfaAuthenticated": "false"}}}}'
"CloudTrailEvent": '{"eventName": "CreateLoginProfile", "userIdentity": {"type": "IAMUser", "principalId": "EXAMPLE6E4XEGITWATV6R", "arn": "arn:aws:iam::123456789012:user/Attacker", "accountId": "123456789012", "accessKeyId": "AKIAIOSFODNN7EXAMPLE", "userName": "Attacker", "sessionContext": {"sessionIssuer": {}, "webIdFederationData": {}, "attributes": {"creationDate": "2023-07-19T21:11:57Z", "mfaAuthenticated": "false"}}}}'
},
{
"CloudTrailEvent": '{"eventName": "UpdateLoginProfile", "userIdentity": {"type": "IAMUser", "principalId": "EXAMPLE6E4XEGITWATV6R", "arn": "arn:aws:iam::123456789012:user/Mateo", "accountId": "123456789012", "accessKeyId": "AKIAIOSFODNN7EXAMPLE", "userName": "Mateo", "sessionContext": {"sessionIssuer": {}, "webIdFederationData": {}, "attributes": {"creationDate": "2023-07-19T21:11:57Z", "mfaAuthenticated": "false"}}}}'
"CloudTrailEvent": '{"eventName": "UpdateLoginProfile", "userIdentity": {"type": "IAMUser", "principalId": "EXAMPLE6E4XEGITWATV6R", "arn": "arn:aws:iam::123456789012:user/Attacker", "accountId": "123456789012", "accessKeyId": "AKIAIOSFODNN7EXAMPLE", "userName": "Attacker", "sessionContext": {"sessionIssuer": {}, "webIdFederationData": {}, "attributes": {"creationDate": "2023-07-19T21:11:57Z", "mfaAuthenticated": "false"}}}}'
},
]
@@ -50,12 +50,15 @@ class Test_cloudtrail_threat_detection_privilege_escalation:
cloudtrail_client.audited_account = AWS_ACCOUNT_NUMBER
cloudtrail_client.region = AWS_REGION_US_EAST_1
with mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_aws_provider(),
), mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_privilege_escalation.cloudtrail_threat_detection_privilege_escalation.cloudtrail_client",
new=cloudtrail_client,
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_aws_provider(),
),
mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_privilege_escalation.cloudtrail_threat_detection_privilege_escalation.cloudtrail_client",
new=cloudtrail_client,
),
):
# Test Check
from prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_privilege_escalation.cloudtrail_threat_detection_privilege_escalation import (
@@ -97,12 +100,15 @@ class Test_cloudtrail_threat_detection_privilege_escalation:
cloudtrail_client._lookup_events = mock__get_lookup_events__
cloudtrail_client._get_trail_arn_template = mock_get_trail_arn_template
with mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_aws_provider(),
), mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_privilege_escalation.cloudtrail_threat_detection_privilege_escalation.cloudtrail_client",
new=cloudtrail_client,
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_aws_provider(),
),
mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_privilege_escalation.cloudtrail_threat_detection_privilege_escalation.cloudtrail_client",
new=cloudtrail_client,
),
):
# Test Check
from prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_privilege_escalation.cloudtrail_threat_detection_privilege_escalation import (
@@ -147,12 +153,15 @@ class Test_cloudtrail_threat_detection_privilege_escalation:
cloudtrail_client._lookup_events = mock__get_lookup_events__
cloudtrail_client._get_trail_arn_template = mock_get_trail_arn_template
with mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_aws_provider(),
), mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_privilege_escalation.cloudtrail_threat_detection_privilege_escalation.cloudtrail_client",
new=cloudtrail_client,
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_aws_provider(),
),
mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_privilege_escalation.cloudtrail_threat_detection_privilege_escalation.cloudtrail_client",
new=cloudtrail_client,
),
):
# Test Check
from prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_privilege_escalation.cloudtrail_threat_detection_privilege_escalation import (
@@ -166,13 +175,13 @@ class Test_cloudtrail_threat_detection_privilege_escalation:
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "Potential privilege escalation attack detected from AWS IAMUser Mateo with an threshold of 1.0."
== "Potential privilege escalation attack detected from AWS IAMUser Attacker with an threshold of 1.0."
)
assert result[0].resource_id == AWS_ACCOUNT_NUMBER
assert result[0].resource_id == "Attacker"
assert result[0].region == AWS_REGION_US_EAST_1
assert (
result[0].resource_arn
== f"arn:aws:cloudtrail:{AWS_REGION_US_EAST_1}:{AWS_ACCOUNT_NUMBER}:trail"
== f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:user/Attacker"
)
@mock_aws
@@ -197,12 +206,15 @@ class Test_cloudtrail_threat_detection_privilege_escalation:
cloudtrail_client._lookup_events = mock__get_lookup_events__
cloudtrail_client._get_trail_arn_template = mock_get_trail_arn_template
with mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_aws_provider(),
), mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_privilege_escalation.cloudtrail_threat_detection_privilege_escalation.cloudtrail_client",
new=cloudtrail_client,
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_aws_provider(),
),
mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_privilege_escalation.cloudtrail_threat_detection_privilege_escalation.cloudtrail_client",
new=cloudtrail_client,
),
):
# Test Check
from prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_privilege_escalation.cloudtrail_threat_detection_privilege_escalation import (
@@ -247,12 +259,15 @@ class Test_cloudtrail_threat_detection_privilege_escalation:
cloudtrail_client._lookup_events = mock__get_lookup_events_aws_service__
cloudtrail_client._get_trail_arn_template = mock_get_trail_arn_template
with mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_aws_provider(),
), mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_privilege_escalation.cloudtrail_threat_detection_privilege_escalation.cloudtrail_client",
new=cloudtrail_client,
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_aws_provider(),
),
mock.patch(
"prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_privilege_escalation.cloudtrail_threat_detection_privilege_escalation.cloudtrail_client",
new=cloudtrail_client,
),
):
# Test Check
from prowler.providers.aws.services.cloudtrail.cloudtrail_threat_detection_privilege_escalation.cloudtrail_threat_detection_privilege_escalation import (

View File

@@ -21,6 +21,9 @@ class Test_documentdb_cluster_backup_enabled:
with mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_service.DocumentDB",
new=documentdb_client,
), mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_client.documentdb_client",
new=documentdb_client,
):
from prowler.providers.aws.services.documentdb.documentdb_cluster_backup_enabled.documentdb_cluster_backup_enabled import (
documentdb_cluster_backup_enabled,
@@ -52,6 +55,9 @@ class Test_documentdb_cluster_backup_enabled:
with mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_service.DocumentDB",
new=documentdb_client,
), mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_client.documentdb_client",
new=documentdb_client,
):
from prowler.providers.aws.services.documentdb.documentdb_cluster_backup_enabled.documentdb_cluster_backup_enabled import (
documentdb_cluster_backup_enabled,
@@ -92,6 +98,9 @@ class Test_documentdb_cluster_backup_enabled:
with mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_service.DocumentDB",
new=documentdb_client,
), mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_client.documentdb_client",
new=documentdb_client,
):
from prowler.providers.aws.services.documentdb.documentdb_cluster_backup_enabled.documentdb_cluster_backup_enabled import (
documentdb_cluster_backup_enabled,
@@ -130,6 +139,9 @@ class Test_documentdb_cluster_backup_enabled:
with mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_service.DocumentDB",
new=documentdb_client,
), mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_client.documentdb_client",
new=documentdb_client,
):
from prowler.providers.aws.services.documentdb.documentdb_cluster_backup_enabled.documentdb_cluster_backup_enabled import (
documentdb_cluster_backup_enabled,
@@ -168,6 +180,9 @@ class Test_documentdb_cluster_backup_enabled:
with mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_service.DocumentDB",
new=documentdb_client,
), mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_client.documentdb_client",
new=documentdb_client,
):
from prowler.providers.aws.services.documentdb.documentdb_cluster_backup_enabled.documentdb_cluster_backup_enabled import (
documentdb_cluster_backup_enabled,
@@ -207,6 +222,9 @@ class Test_documentdb_cluster_backup_enabled:
with mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_service.DocumentDB",
new=documentdb_client,
), mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_client.documentdb_client",
new=documentdb_client,
):
from prowler.providers.aws.services.documentdb.documentdb_cluster_backup_enabled.documentdb_cluster_backup_enabled import (
documentdb_cluster_backup_enabled,

View File

@@ -20,6 +20,9 @@ class Test_documentdb_cluster_cloudwatch_log_export:
with mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_service.DocumentDB",
new=documentdb_client,
), mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_client.documentdb_client",
new=documentdb_client,
):
from prowler.providers.aws.services.documentdb.documentdb_cluster_cloudwatch_log_export.documentdb_cluster_cloudwatch_log_export import (
documentdb_cluster_cloudwatch_log_export,
@@ -51,6 +54,9 @@ class Test_documentdb_cluster_cloudwatch_log_export:
with mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_service.DocumentDB",
new=documentdb_client,
), mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_client.documentdb_client",
new=documentdb_client,
):
from prowler.providers.aws.services.documentdb.documentdb_cluster_cloudwatch_log_export.documentdb_cluster_cloudwatch_log_export import (
documentdb_cluster_cloudwatch_log_export,
@@ -90,6 +96,9 @@ class Test_documentdb_cluster_cloudwatch_log_export:
with mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_service.DocumentDB",
new=documentdb_client,
), mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_client.documentdb_client",
new=documentdb_client,
):
from prowler.providers.aws.services.documentdb.documentdb_cluster_cloudwatch_log_export.documentdb_cluster_cloudwatch_log_export import (
documentdb_cluster_cloudwatch_log_export,
@@ -127,6 +136,9 @@ class Test_documentdb_cluster_cloudwatch_log_export:
with mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_service.DocumentDB",
new=documentdb_client,
), mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_client.documentdb_client",
new=documentdb_client,
):
from prowler.providers.aws.services.documentdb.documentdb_cluster_cloudwatch_log_export.documentdb_cluster_cloudwatch_log_export import (
documentdb_cluster_cloudwatch_log_export,
@@ -164,6 +176,9 @@ class Test_documentdb_cluster_cloudwatch_log_export:
with mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_service.DocumentDB",
new=documentdb_client,
), mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_client.documentdb_client",
new=documentdb_client,
):
from prowler.providers.aws.services.documentdb.documentdb_cluster_cloudwatch_log_export.documentdb_cluster_cloudwatch_log_export import (
documentdb_cluster_cloudwatch_log_export,

View File

@@ -20,6 +20,9 @@ class Test_documentdb_cluster_deletion_protection:
with mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_service.DocumentDB",
new=documentdb_client,
), mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_client.documentdb_client",
new=documentdb_client,
):
from prowler.providers.aws.services.documentdb.documentdb_cluster_deletion_protection.documentdb_cluster_deletion_protection import (
documentdb_cluster_deletion_protection,
@@ -51,6 +54,9 @@ class Test_documentdb_cluster_deletion_protection:
with mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_service.DocumentDB",
new=documentdb_client,
), mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_client.documentdb_client",
new=documentdb_client,
):
from prowler.providers.aws.services.documentdb.documentdb_cluster_deletion_protection.documentdb_cluster_deletion_protection import (
documentdb_cluster_deletion_protection,
@@ -90,6 +96,9 @@ class Test_documentdb_cluster_deletion_protection:
with mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_service.DocumentDB",
new=documentdb_client,
), mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_client.documentdb_client",
new=documentdb_client,
):
from prowler.providers.aws.services.documentdb.documentdb_cluster_deletion_protection.documentdb_cluster_deletion_protection import (
documentdb_cluster_deletion_protection,

View File

@@ -20,6 +20,9 @@ class Test_documentdb_cluster_multi_az_enabled:
with mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_service.DocumentDB",
new=documentdb_client,
), mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_client.documentdb_client",
new=documentdb_client,
):
from prowler.providers.aws.services.documentdb.documentdb_cluster_multi_az_enabled.documentdb_cluster_multi_az_enabled import (
documentdb_cluster_multi_az_enabled,
@@ -51,6 +54,9 @@ class Test_documentdb_cluster_multi_az_enabled:
with mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_service.DocumentDB",
new=documentdb_client,
), mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_client.documentdb_client",
new=documentdb_client,
):
from prowler.providers.aws.services.documentdb.documentdb_cluster_multi_az_enabled.documentdb_cluster_multi_az_enabled import (
documentdb_cluster_multi_az_enabled,
@@ -89,6 +95,9 @@ class Test_documentdb_cluster_multi_az_enabled:
with mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_service.DocumentDB",
new=documentdb_client,
), mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_client.documentdb_client",
new=documentdb_client,
):
from prowler.providers.aws.services.documentdb.documentdb_cluster_multi_az_enabled.documentdb_cluster_multi_az_enabled import (
documentdb_cluster_multi_az_enabled,

View File

@@ -22,6 +22,9 @@ class Test_documentdb_cluster_public_snapshot:
with mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_service.DocumentDB",
new=documentdb_client,
), mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_client.documentdb_client",
new=documentdb_client,
):
from prowler.providers.aws.services.documentdb.documentdb_cluster_public_snapshot.documentdb_cluster_public_snapshot import (
documentdb_cluster_public_snapshot,
@@ -63,6 +66,9 @@ class Test_documentdb_cluster_public_snapshot:
with mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_service.DocumentDB",
new=documentdb_client,
), mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_client.documentdb_client",
new=documentdb_client,
):
from prowler.providers.aws.services.documentdb.documentdb_cluster_public_snapshot.documentdb_cluster_public_snapshot import (
documentdb_cluster_public_snapshot,
@@ -116,6 +122,9 @@ class Test_documentdb_cluster_public_snapshot:
with mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_service.DocumentDB",
new=documentdb_client,
), mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_client.documentdb_client",
new=documentdb_client,
):
from prowler.providers.aws.services.documentdb.documentdb_cluster_public_snapshot.documentdb_cluster_public_snapshot import (
documentdb_cluster_public_snapshot,

View File

@@ -20,6 +20,9 @@ class Test_documentdb_cluster_storage_encrypted:
with mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_service.DocumentDB",
new=documentdb_client,
), mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_client.documentdb_client",
new=documentdb_client,
):
from prowler.providers.aws.services.documentdb.documentdb_cluster_storage_encrypted.documentdb_cluster_storage_encrypted import (
documentdb_cluster_storage_encrypted,
@@ -51,6 +54,9 @@ class Test_documentdb_cluster_storage_encrypted:
with mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_service.DocumentDB",
new=documentdb_client,
), mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_client.documentdb_client",
new=documentdb_client,
):
from prowler.providers.aws.services.documentdb.documentdb_cluster_storage_encrypted.documentdb_cluster_storage_encrypted import (
documentdb_cluster_storage_encrypted,
@@ -89,6 +95,9 @@ class Test_documentdb_cluster_storage_encrypted:
with mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_service.DocumentDB",
new=documentdb_client,
), mock.patch(
"prowler.providers.aws.services.documentdb.documentdb_client.documentdb_client",
new=documentdb_client,
):
from prowler.providers.aws.services.documentdb.documentdb_cluster_storage_encrypted.documentdb_cluster_storage_encrypted import (
documentdb_cluster_storage_encrypted,

View File

@@ -17,12 +17,15 @@ class Test_firehose_stream_encrypted_at_rest:
aws_provider = set_mocked_aws_provider([AWS_REGION_EU_WEST_1])
with mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=aws_provider,
), mock.patch(
"prowler.providers.aws.services.firehose.firehose_stream_encrypted_at_rest.firehose_stream_encrypted_at_rest.firehose_client",
new=Firehose(aws_provider),
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=aws_provider,
),
mock.patch(
"prowler.providers.aws.services.firehose.firehose_stream_encrypted_at_rest.firehose_stream_encrypted_at_rest.firehose_client",
new=Firehose(aws_provider),
),
):
# Test Check
from prowler.providers.aws.services.firehose.firehose_stream_encrypted_at_rest.firehose_stream_encrypted_at_rest import (
@@ -94,6 +97,65 @@ class Test_firehose_stream_encrypted_at_rest:
== f"Firehose Stream {stream_name} does have at rest encryption enabled."
)
@mock_aws
def test_stream_kms_encryption_enabled_aws_managed_key(self):
# Generate S3 client
s3_client = client("s3", region_name=AWS_REGION_EU_WEST_1)
s3_client.create_bucket(
Bucket="test-bucket",
CreateBucketConfiguration={"LocationConstraint": AWS_REGION_EU_WEST_1},
)
# Generate Firehose client
firehose_client = client("firehose", region_name=AWS_REGION_EU_WEST_1)
delivery_stream = firehose_client.create_delivery_stream(
DeliveryStreamName="test-delivery-stream",
DeliveryStreamType="DirectPut",
S3DestinationConfiguration={
"RoleARN": "arn:aws:iam::012345678901:role/firehose-role",
"BucketARN": "arn:aws:s3:::test-bucket",
"Prefix": "",
"BufferingHints": {"IntervalInSeconds": 300, "SizeInMBs": 5},
"CompressionFormat": "UNCOMPRESSED",
},
Tags=[{"Key": "key", "Value": "value"}],
)
arn = delivery_stream["DeliveryStreamARN"]
stream_name = arn.split("/")[-1]
firehose_client.start_delivery_stream_encryption(
DeliveryStreamName=stream_name,
DeliveryStreamEncryptionConfigurationInput={
"KeyType": "AWS_OWNED_CMK",
},
)
from prowler.providers.aws.services.firehose.firehose_service import Firehose
aws_provider = set_mocked_aws_provider([AWS_REGION_EU_WEST_1])
with mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=aws_provider,
):
with mock.patch(
"prowler.providers.aws.services.firehose.firehose_stream_encrypted_at_rest.firehose_stream_encrypted_at_rest.firehose_client",
new=Firehose(aws_provider),
):
# Test Check
from prowler.providers.aws.services.firehose.firehose_stream_encrypted_at_rest.firehose_stream_encrypted_at_rest import (
firehose_stream_encrypted_at_rest,
)
check = firehose_stream_encrypted_at_rest()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"Firehose Stream {stream_name} does have at rest encryption enabled."
)
@mock_aws
def test_stream_kms_encryption_not_enabled(self):
# Generate Firehose client

View File

@@ -1,10 +1,61 @@
from unittest import mock
from unittest.mock import patch
import botocore
from boto3 import client
from moto import mock_aws
from tests.providers.aws.utils import AWS_REGION_US_EAST_1, set_mocked_aws_provider
# Original botocore _make_api_call function
orig = botocore.client.BaseClient._make_api_call
FM_RG_NAME = "test-firewall-managed-rule-group"
FM_RG_ARN = "arn:aws:wafv2:us-east-1:123456789012:regional/webacl/test-firewall-managed-rule-group"
# Mocked botocore _make_api_call function
def mock_make_api_call(self, operation_name, kwarg):
if operation_name == "ListWebACLs":
return {
"WebACLs": [
{
"Name": FM_RG_NAME,
"Id": FM_RG_NAME,
"ARN": FM_RG_ARN,
}
]
}
elif operation_name == "GetWebACL":
return {
"WebACL": {
"PostProcessFirewallManagerRuleGroups": [
{
"Name": FM_RG_NAME,
"VisibilityConfig": {
"SampledRequestsEnabled": True,
"CloudWatchMetricsEnabled": True,
"MetricName": "web-acl-test-metric",
},
}
]
}
}
elif operation_name == "ListResourcesForWebACL":
return {
"ResourceArns": [
FM_RG_ARN,
]
}
elif operation_name == "ListTagsForResource":
return {
"TagInfoForResource": {
"ResourceARN": FM_RG_ARN,
"TagList": [{"Key": "Name", "Value": FM_RG_NAME}],
}
}
return orig(self, operation_name, kwarg)
class Test_wafv2_webacl_with_rules:
@mock_aws
@@ -13,12 +64,15 @@ class Test_wafv2_webacl_with_rules:
aws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1])
with mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=aws_provider,
), mock.patch(
"prowler.providers.aws.services.wafv2.wafv2_webacl_with_rules.wafv2_webacl_with_rules.wafv2_client",
new=WAFv2(aws_provider),
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=aws_provider,
),
mock.patch(
"prowler.providers.aws.services.wafv2.wafv2_webacl_with_rules.wafv2_webacl_with_rules.wafv2_client",
new=WAFv2(aws_provider),
),
):
from prowler.providers.aws.services.wafv2.wafv2_webacl_with_rules.wafv2_webacl_with_rules import (
wafv2_webacl_with_rules,
@@ -69,12 +123,15 @@ class Test_wafv2_webacl_with_rules:
aws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1])
with mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=aws_provider,
), mock.patch(
"prowler.providers.aws.services.wafv2.wafv2_webacl_with_rules.wafv2_webacl_with_rules.wafv2_client",
new=WAFv2(aws_provider),
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=aws_provider,
),
mock.patch(
"prowler.providers.aws.services.wafv2.wafv2_webacl_with_rules.wafv2_webacl_with_rules.wafv2_client",
new=WAFv2(aws_provider),
),
):
from prowler.providers.aws.services.wafv2.wafv2_webacl_with_rules.wafv2_webacl_with_rules import (
wafv2_webacl_with_rules,
@@ -137,12 +194,15 @@ class Test_wafv2_webacl_with_rules:
aws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1])
with mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=aws_provider,
), mock.patch(
"prowler.providers.aws.services.wafv2.wafv2_webacl_with_rules.wafv2_webacl_with_rules.wafv2_client",
new=WAFv2(aws_provider),
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=aws_provider,
),
mock.patch(
"prowler.providers.aws.services.wafv2.wafv2_webacl_with_rules.wafv2_webacl_with_rules.wafv2_client",
new=WAFv2(aws_provider),
),
):
from prowler.providers.aws.services.wafv2.wafv2_webacl_with_rules.wafv2_webacl_with_rules import (
wafv2_webacl_with_rules,
@@ -161,6 +221,43 @@ class Test_wafv2_webacl_with_rules:
assert result[0].region == AWS_REGION_US_EAST_1
assert result[0].resource_tags == [{"Key": "Name", "Value": waf_name}]
@patch(
"botocore.client.BaseClient._make_api_call",
new=mock_make_api_call,
)
@mock_aws
def test_wafv2_web_acl_with_firewall_manager_managed_rule_group(self):
from prowler.providers.aws.services.wafv2.wafv2_service import WAFv2
aws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1])
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=aws_provider,
),
mock.patch(
"prowler.providers.aws.services.wafv2.wafv2_webacl_with_rules.wafv2_webacl_with_rules.wafv2_client",
new=WAFv2(aws_provider),
),
):
from prowler.providers.aws.services.wafv2.wafv2_webacl_with_rules.wafv2_webacl_with_rules import (
wafv2_webacl_with_rules,
)
check = wafv2_webacl_with_rules()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"AWS WAFv2 Web ACL {FM_RG_NAME} does have rules or rule groups attached."
)
assert result[0].resource_id == FM_RG_NAME
assert result[0].resource_arn == FM_RG_ARN
assert result[0].region == AWS_REGION_US_EAST_1
assert result[0].resource_tags == [{"Key": "Name", "Value": FM_RG_NAME}]
@mock_aws
def test_wafv2_web_acl_without_rule_or_rule_group(self):
wafv2_client = client("wafv2", region_name=AWS_REGION_US_EAST_1)
@@ -184,12 +281,15 @@ class Test_wafv2_webacl_with_rules:
aws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1])
with mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=aws_provider,
), mock.patch(
"prowler.providers.aws.services.wafv2.wafv2_webacl_with_rules.wafv2_webacl_with_rules.wafv2_client",
new=WAFv2(aws_provider),
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=aws_provider,
),
mock.patch(
"prowler.providers.aws.services.wafv2.wafv2_webacl_with_rules.wafv2_webacl_with_rules.wafv2_client",
new=WAFv2(aws_provider),
),
):
from prowler.providers.aws.services.wafv2.wafv2_webacl_with_rules.wafv2_webacl_with_rules import (
wafv2_webacl_with_rules,

View File

@@ -57,7 +57,7 @@ class Test_containerregistry_not_publicly_accessible:
resource_group="mock_resource_group",
sku="Basic",
login_server="mock_login_server.azurecr.io",
public_network_access="Enabled",
public_network_access=True,
admin_user_enabled=True,
network_rule_set=NetworkRuleSet(default_action="Allow"),
private_endpoint_connections=[],
@@ -131,7 +131,7 @@ class Test_containerregistry_not_publicly_accessible:
resource_group="mock_resource_group",
sku="Basic",
login_server="mock_login_server.azurecr.io",
public_network_access="Enabled",
public_network_access=False,
admin_user_enabled=False,
network_rule_set=NetworkRuleSet(default_action="Deny"),
private_endpoint_connections=[],

View File

@@ -32,7 +32,7 @@ class TestContainerRegistryService:
resource_group="mock_resource_group",
sku="Basic",
login_server="mock_login_server.azurecr.io",
public_network_access="Enabled",
public_network_access=False,
admin_user_enabled=True,
network_rule_set=None,
private_endpoint_connections=[],
@@ -71,7 +71,7 @@ class TestContainerRegistryService:
assert registry_info.resource_group == "mock_resource_group"
assert registry_info.sku == "Basic"
assert registry_info.login_server == "mock_login_server.azurecr.io"
assert registry_info.public_network_access == "Enabled"
assert not registry_info.public_network_access
assert registry_info.admin_user_enabled is True
assert isinstance(registry_info.monitor_diagnostic_settings, list)

View File

@@ -51,18 +51,23 @@ class TestGCPProvider:
execute=MagicMock(return_value={"projects": projects})
)
with patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.setup_session",
return_value=(None, "test-project"),
), patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.get_projects",
return_value=projects,
), patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.update_projects_with_organizations",
return_value=None,
), patch(
"prowler.providers.gcp.gcp_provider.discovery.build",
return_value=mocked_service,
with (
patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.setup_session",
return_value=(None, "test-project"),
),
patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.get_projects",
return_value=projects,
),
patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.update_projects_with_organizations",
return_value=None,
),
patch(
"prowler.providers.gcp.gcp_provider.discovery.build",
return_value=mocked_service,
),
):
gcp_provider = GcpProvider(
project_id,
@@ -119,18 +124,23 @@ class TestGCPProvider:
mocked_service.projects.list.return_value = MagicMock(
execute=MagicMock(return_value={"projects": projects})
)
with patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.setup_session",
return_value=(None, None),
), patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.get_projects",
return_value=projects,
), patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.update_projects_with_organizations",
return_value=None,
), patch(
"prowler.providers.gcp.gcp_provider.discovery.build",
return_value=mocked_service,
with (
patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.setup_session",
return_value=(None, None),
),
patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.get_projects",
return_value=projects,
),
patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.update_projects_with_organizations",
return_value=None,
),
patch(
"prowler.providers.gcp.gcp_provider.discovery.build",
return_value=mocked_service,
),
):
gcp_provider = GcpProvider(
arguments.organization_id,
@@ -193,21 +203,27 @@ class TestGCPProvider:
mocked_service.projects.list.return_value = MagicMock(
execute=MagicMock(return_value={"projects": projects})
)
with patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.get_projects",
return_value=projects,
), patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.update_projects_with_organizations",
return_value=None,
), patch(
"os.path.abspath",
return_value="test_credentials_file",
), patch(
"prowler.providers.gcp.gcp_provider.default",
return_value=(mocked_credentials, MagicMock()),
), patch(
"prowler.providers.gcp.gcp_provider.discovery.build",
return_value=mocked_service,
with (
patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.get_projects",
return_value=projects,
),
patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.update_projects_with_organizations",
return_value=None,
),
patch(
"os.path.abspath",
return_value="test_credentials_file",
),
patch(
"prowler.providers.gcp.gcp_provider.default",
return_value=(mocked_credentials, MagicMock()),
),
patch(
"prowler.providers.gcp.gcp_provider.discovery.build",
return_value=mocked_service,
),
):
gcp_provider = GcpProvider(
arguments.organization_id,
@@ -257,21 +273,27 @@ class TestGCPProvider:
mocked_service.projects.list.return_value = MagicMock(
execute=MagicMock(return_value={"projects": projects})
)
with patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.get_projects",
return_value=projects,
), patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.update_projects_with_organizations",
return_value=None,
), patch(
"os.path.abspath",
return_value="test_credentials_file",
), patch(
"prowler.providers.gcp.gcp_provider.default",
return_value=(mocked_credentials, MagicMock()),
), patch(
"prowler.providers.gcp.gcp_provider.discovery.build",
return_value=mocked_service,
with (
patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.get_projects",
return_value=projects,
),
patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.update_projects_with_organizations",
return_value=None,
),
patch(
"os.path.abspath",
return_value="test_credentials_file",
),
patch(
"prowler.providers.gcp.gcp_provider.default",
return_value=(mocked_credentials, MagicMock()),
),
patch(
"prowler.providers.gcp.gcp_provider.discovery.build",
return_value=mocked_service,
),
):
gcp_provider = GcpProvider(
arguments.organization_id,
@@ -334,21 +356,27 @@ class TestGCPProvider:
mocked_service.projects.list.return_value = MagicMock(
execute=MagicMock(return_value={"projects": projects})
)
with patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.get_projects",
return_value=projects,
), patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.update_projects_with_organizations",
return_value=None,
), patch(
"os.path.abspath",
return_value="test_credentials_file",
), patch(
"prowler.providers.gcp.gcp_provider.default",
return_value=(mocked_credentials, MagicMock()),
), patch(
"prowler.providers.gcp.gcp_provider.discovery.build",
return_value=mocked_service,
with (
patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.get_projects",
return_value=projects,
),
patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.update_projects_with_organizations",
return_value=None,
),
patch(
"os.path.abspath",
return_value="test_credentials_file",
),
patch(
"prowler.providers.gcp.gcp_provider.default",
return_value=(mocked_credentials, MagicMock()),
),
patch(
"prowler.providers.gcp.gcp_provider.discovery.build",
return_value=mocked_service,
),
):
gcp_provider = GcpProvider(
arguments.organization_id,
@@ -401,21 +429,27 @@ class TestGCPProvider:
mocked_service.projects.list.return_value = MagicMock(
execute=MagicMock(return_value={"projects": projects})
)
with patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.get_projects",
return_value=projects,
), patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.update_projects_with_organizations",
return_value=None,
), patch(
"os.path.abspath",
return_value="test_credentials_file",
), patch(
"prowler.providers.gcp.gcp_provider.default",
return_value=(mocked_credentials, MagicMock()),
), patch(
"prowler.providers.gcp.gcp_provider.discovery.build",
return_value=mocked_service,
with (
patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.get_projects",
return_value=projects,
),
patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.update_projects_with_organizations",
return_value=None,
),
patch(
"os.path.abspath",
return_value="test_credentials_file",
),
patch(
"prowler.providers.gcp.gcp_provider.default",
return_value=(mocked_credentials, MagicMock()),
),
patch(
"prowler.providers.gcp.gcp_provider.discovery.build",
return_value=mocked_service,
),
):
with pytest.raises(Exception) as e:
GcpProvider(
@@ -433,6 +467,81 @@ class TestGCPProvider:
)
assert e.type == GCPNoAccesibleProjectsError
def test_setup_session_with_inactive_default_project(self):
mocked_credentials = MagicMock()
mocked_credentials.refresh.return_value = None
mocked_credentials._service_account_email = "test-service-account-email"
arguments = Namespace()
arguments.project_id = ["default_project", "active_project"]
arguments.excluded_project_id = []
arguments.organization_id = None
arguments.list_project_id = False
arguments.credentials_file = "test_credentials_file"
arguments.impersonate_service_account = ""
arguments.config_file = default_config_file_path
arguments.fixer_config = default_fixer_config_file_path
projects = {
"default_project": GCPProject(
number="55555555",
id="default_project",
name="default_project",
labels={"test": "value"},
lifecycle_state="DELETE_REQUESTED",
),
"active_project": GCPProject(
number="12345678",
id="active_project",
name="active_project",
labels={"test": "value"},
lifecycle_state="ACTIVE",
),
}
mocked_service = MagicMock()
mocked_service.projects.list.return_value = MagicMock(
execute=MagicMock(return_value={"projects": projects})
)
with (
patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.get_projects",
return_value=projects,
),
patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.update_projects_with_organizations",
return_value=None,
),
patch(
"os.path.abspath",
return_value="test_credentials_file",
),
patch(
"prowler.providers.gcp.gcp_provider.default",
return_value=(mocked_credentials, "default_project"),
),
patch(
"prowler.providers.gcp.gcp_provider.discovery.build",
return_value=mocked_service,
),
):
gcp_provider = GcpProvider(
arguments.organization_id,
arguments.project_id,
arguments.excluded_project_id,
arguments.credentials_file,
arguments.impersonate_service_account,
arguments.list_project_id,
arguments.config_file,
arguments.fixer_config,
client_id=None,
client_secret=None,
refresh_token=None,
)
assert gcp_provider.default_project_id == "active_project"
def test_print_credentials_default_options(self, capsys):
mocked_credentials = MagicMock()
@@ -464,21 +573,27 @@ class TestGCPProvider:
mocked_service.projects.list.return_value = MagicMock(
execute=MagicMock(return_value={"projects": projects})
)
with patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.get_projects",
return_value=projects,
), patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.update_projects_with_organizations",
return_value=None,
), patch(
"os.path.abspath",
return_value="test_credentials_file",
), patch(
"prowler.providers.gcp.gcp_provider.default",
return_value=(mocked_credentials, MagicMock()),
), patch(
"prowler.providers.gcp.gcp_provider.discovery.build",
return_value=mocked_service,
with (
patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.get_projects",
return_value=projects,
),
patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.update_projects_with_organizations",
return_value=None,
),
patch(
"os.path.abspath",
return_value="test_credentials_file",
),
patch(
"prowler.providers.gcp.gcp_provider.default",
return_value=(mocked_credentials, MagicMock()),
),
patch(
"prowler.providers.gcp.gcp_provider.discovery.build",
return_value=mocked_service,
),
):
gcp_provider = GcpProvider(
arguments.organization_id,
@@ -535,21 +650,27 @@ class TestGCPProvider:
mocked_service.projects.list.return_value = MagicMock(
execute=MagicMock(return_value={"projects": projects})
)
with patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.get_projects",
return_value=projects,
), patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.update_projects_with_organizations",
return_value=None,
), patch(
"os.path.abspath",
return_value="test_credentials_file",
), patch(
"prowler.providers.gcp.gcp_provider.default",
return_value=(mocked_credentials, MagicMock()),
), patch(
"prowler.providers.gcp.gcp_provider.discovery.build",
return_value=mocked_service,
with (
patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.get_projects",
return_value=projects,
),
patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.update_projects_with_organizations",
return_value=None,
),
patch(
"os.path.abspath",
return_value="test_credentials_file",
),
patch(
"prowler.providers.gcp.gcp_provider.default",
return_value=(mocked_credentials, MagicMock()),
),
patch(
"prowler.providers.gcp.gcp_provider.discovery.build",
return_value=mocked_service,
),
):
gcp_provider = GcpProvider(
arguments.organization_id,
@@ -614,21 +735,27 @@ class TestGCPProvider:
execute=MagicMock(return_value={"projects": projects})
)
with patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.get_projects",
return_value=projects,
), patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.update_projects_with_organizations",
return_value=None,
), patch(
"os.path.abspath",
return_value="test_credentials_file",
), patch(
"prowler.providers.gcp.gcp_provider.default",
return_value=(mocked_credentials, MagicMock()),
), patch(
"prowler.providers.gcp.gcp_provider.discovery.build",
return_value=mocked_service,
with (
patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.get_projects",
return_value=projects,
),
patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.update_projects_with_organizations",
return_value=None,
),
patch(
"os.path.abspath",
return_value="test_credentials_file",
),
patch(
"prowler.providers.gcp.gcp_provider.default",
return_value=(mocked_credentials, MagicMock()),
),
patch(
"prowler.providers.gcp.gcp_provider.discovery.build",
return_value=mocked_service,
),
):
gcp_provider = GcpProvider(
arguments.organization_id,
@@ -698,12 +825,15 @@ class TestGCPProvider:
execute=MagicMock(return_value={"projectId": project_id})
)
with patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.setup_session",
return_value=(None, project_id),
), patch(
"prowler.providers.gcp.gcp_provider.discovery.build",
return_value=mocked_service,
with (
patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.setup_session",
return_value=(None, project_id),
),
patch(
"prowler.providers.gcp.gcp_provider.discovery.build",
return_value=mocked_service,
),
):
output = GcpProvider.test_connection(
client_id="test-client-id",
@@ -730,16 +860,19 @@ class TestGCPProvider:
execute=MagicMock(return_value={"projects": projects})
)
with patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.setup_session",
return_value=(None, "test-valid-project"),
), patch(
"prowler.providers.gcp.gcp_provider.discovery.build",
return_value=mocked_service,
), patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.validate_project_id"
) as mock_validate_project_id:
with (
patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.setup_session",
return_value=(None, "test-valid-project"),
),
patch(
"prowler.providers.gcp.gcp_provider.discovery.build",
return_value=mocked_service,
),
patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.validate_project_id"
) as mock_validate_project_id,
):
mock_validate_project_id.side_effect = GCPInvalidProviderIdError(
"Invalid project ID"
)

View File

@@ -1,3 +1,4 @@
import datetime
from unittest import mock
from tests.providers.gcp.gcp_fixtures import (
@@ -239,7 +240,10 @@ class Test_kms_key_rotation_enabled:
project_id=GCP_PROJECT_ID,
key_ring=keyring.name,
location=keylocation.name,
next_rotation_time="2025-09-01T00:00:00Z",
# Next rotation time of now + 100 days
next_rotation_time=(
datetime.datetime.now() - datetime.timedelta(days=+100)
).strftime("%Y-%m-%dT%H:%M:%S.%fZ"),
members=["user:jane@example.com"],
)
]
@@ -296,7 +300,10 @@ class Test_kms_key_rotation_enabled:
project_id=GCP_PROJECT_ID,
key_ring=keyring.name,
location=keylocation.name,
next_rotation_time="2024-09-01T00:00:00Z",
# Next rotation time of now + 30 days
next_rotation_time=(
datetime.datetime.now() - datetime.timedelta(days=+30)
).strftime("%Y-%m-%dT%H:%M:%S.%fZ"),
members=["user:jane@example.com"],
)
]
@@ -352,7 +359,10 @@ class Test_kms_key_rotation_enabled:
id="projects/123/locations/us-central1/keyRings/keyring1/cryptoKeys/key1",
project_id=GCP_PROJECT_ID,
rotation_period="8776000s",
next_rotation_time="2025-09-01T00:00:00Z",
# Next rotation time of now + 100 days
next_rotation_time=(
datetime.datetime.now() - datetime.timedelta(days=+100)
).strftime("%Y-%m-%dT%H:%M:%S.%fZ"),
key_ring=keyring.name,
location=keylocation.name,
members=["user:jane@example.com"],
@@ -412,7 +422,10 @@ class Test_kms_key_rotation_enabled:
id="projects/123/locations/us-central1/keyRings/keyring1/cryptoKeys/key1",
project_id=GCP_PROJECT_ID,
rotation_period="8776000s",
next_rotation_time="2024-09-01T00:00:00Z",
# Next rotation time of now + 30 days
next_rotation_time=(
datetime.datetime.now() - datetime.timedelta(days=+30)
).strftime("%Y-%m-%dT%H:%M:%S.%fZ"),
key_ring=keyring.name,
location=keylocation.name,
members=["user:jane@example.com"],
@@ -470,7 +483,10 @@ class Test_kms_key_rotation_enabled:
id="projects/123/locations/us-central1/keyRings/keyring1/cryptoKeys/key1",
project_id=GCP_PROJECT_ID,
rotation_period="7776000s",
next_rotation_time="2025-09-01T00:00:00Z",
# Next rotation time of now + 100 days
next_rotation_time=(
datetime.datetime.now() - datetime.timedelta(days=+100)
).strftime("%Y-%m-%dT%H:%M:%S.%fZ"),
key_ring=keyring.name,
location=keylocation.name,
members=["user:jane@example.com"],
@@ -530,7 +546,10 @@ class Test_kms_key_rotation_enabled:
id="projects/123/locations/us-central1/keyRings/keyring1/cryptoKeys/key1",
project_id=GCP_PROJECT_ID,
rotation_period="7776000s",
next_rotation_time="2024-09-01T00:00:00Z",
# Next rotation time of now + 30 days
next_rotation_time=(
datetime.datetime.now() - datetime.timedelta(days=+30)
).strftime("%Y-%m-%dT%H:%M:%S.%fZ"),
key_ring=keyring.name,
location=keylocation.name,
members=["user:jane@example.com"],
@@ -588,7 +607,10 @@ class Test_kms_key_rotation_enabled:
id="projects/123/locations/us-central1/keyRings/keyring1/cryptoKeys/key1",
project_id=GCP_PROJECT_ID,
rotation_period="7776000s",
next_rotation_time="2025-07-06T22:00:00.561275Z",
# Next rotation time of now + 100 days
next_rotation_time=(
datetime.datetime.now() - datetime.timedelta(days=+100)
).strftime("%Y-%m-%dT%H:%M:%S.%fZ"),
key_ring=keyring.name,
location=keylocation.name,
members=["user:jane@example.com"],