Compare commits

..

163 Commits

Author SHA1 Message Date
Pepe Fagoaga
30a1cd83dc fix(ocsf): Improve mapping based on owner's recommendation 2024-09-05 08:38:24 +02:00
Pedro Martín
a975e96a45 feat(compliance): add method list_compliance_requirements (#4890)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2024-09-04 20:35:26 +02:00
Pedro Martín
3933440a08 feat(secrets): improve detect secrets checks and add config (#4915) 2024-09-04 16:54:55 +02:00
Prowler Bot
36e7bf0912 chore(regions_update): Changes in regions for AWS services (#4929)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-09-04 11:45:59 +02:00
dependabot[bot]
897e25dd3c chore(deps): bump cryptography from 43.0.0 to 43.0.1 (#4928)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-04 09:46:58 +02:00
dependabot[bot]
f4a8059f9b chore(deps): bump cryptography from 43.0.0 to 43.0.1 (#4923)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-04 08:54:56 +02:00
dependabot[bot]
71d844c101 chore(deps): bump peter-evans/create-pull-request from 6 to 7 (#4926)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-04 08:53:26 +02:00
Pedro Martín
c2b2754926 feat(gcp): add custom exceptions clas (#4908) 2024-09-03 15:56:49 +02:00
Pedro Martín
cfd4019281 fix(aws): raise ArgumentTypeError for parser (#4921) 2024-09-03 13:47:43 +02:00
dependabot[bot]
989fce300d chore(deps-dev): bump pylint from 3.2.6 to 3.2.7 (#4920)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-03 07:21:52 +02:00
Amogh Bantwal
70fdc2693e feat(html): Add number of muted findings in HTML report #4703 (#4895) 2024-09-02 10:13:06 +02:00
Rubén De la Torre Vico
9797c11152 chore(prowler): change all methods from services from format double underscore to single underscore (#4910) 2024-09-02 10:07:21 +02:00
Pedro Martín
007c1febf7 fix(metadata): change description from documentdb_cluster_deletion_protection (#4909) 2024-09-02 09:59:29 +02:00
Pepe Fagoaga
163027a49d chore(aws): Remove token from log line (#4903) 2024-08-30 11:50:18 +02:00
Pepe Fagoaga
80c4802b36 chore(aws_mutelist): Add more Control Tower resources and tests (#4900) 2024-08-30 10:13:00 +02:00
dependabot[bot]
285eb45673 chore(deps): bump trufflesecurity/trufflehog from 3.81.9 to 3.81.10 (#4898)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-30 09:44:12 +02:00
dependabot[bot]
5c2f2ee3b3 chore(deps-dev): bump safety from 3.2.6 to 3.2.7 (#4899)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-30 09:43:58 +02:00
Pedro Martín
1f83e4fe7b chore(pull-request): add check for backport (#4901) 2024-08-30 09:42:52 +02:00
Pedro Martín
b29f99441a feat(aws): add custom exceptions class (#4847) 2024-08-29 19:08:47 +02:00
Pedro Martín
82c065bff4 feat(compliance): rename Compliance class and add list_compliance (#4883) 2024-08-29 16:55:22 +02:00
Pedro Martín
168d44d14b docs(fixers): improve docs about fixers (#4889) 2024-08-29 14:15:31 +02:00
dependabot[bot]
910a72140b chore(deps): bump google-api-python-client from 2.142.0 to 2.143.0 (#4884)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-29 07:56:38 +02:00
Prowler Bot
d988877173 chore(regions_update): Changes in regions for AWS services (#4880)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-08-28 11:45:12 +02:00
Toni de la Fuente
4fd673fd7c chore(readme): Update Slack invite link (#4875) 2024-08-27 21:44:12 +02:00
Pepe Fagoaga
1bff2451e5 chore(release): Remove unused step (#4874) 2024-08-27 16:40:15 +02:00
Pepe Fagoaga
0921daf18b chore: remove not used variable (#4873) 2024-08-27 16:31:13 +02:00
Pedro Martín
7ff80dbb8f fix(rds): get the db_instances values (#4866) 2024-08-27 13:22:54 +02:00
dependabot[bot]
f487bda1fe chore(deps): bump numpy from 2.0.1 to 2.0.2 (#4869)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-27 08:05:57 +02:00
Pepe Fagoaga
d61e999b8f chore(check_metadata): Rename to CheckMetadata (#4864) 2024-08-26 15:25:19 +02:00
Rubén De la Torre Vico
bcb63d0b2d feat(elb): add new check elb_is_in_multiple_az (#4829)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2024-08-26 13:27:08 +02:00
Pepe Fagoaga
71f50422ad chore(aws-region): Use Prowler Bot (#4863) 2024-08-26 11:04:02 +02:00
Rubén De la Torre Vico
2b49aa8e89 chore(readme): Update the number of AWS checks (#4860) 2024-08-26 10:09:54 +02:00
Pedro Martín
921b6b1e85 fix(aws): enchance check cloudformation_stack_outputs_find_secrets (#4859) 2024-08-26 10:08:19 +02:00
dependabot[bot]
fc155e8368 chore(deps): bump azure-mgmt-compute from 32.0.0 to 33.0.0 (#4856)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-26 08:01:31 +02:00
Rubén De la Torre Vico
79f1cf89cf feat(elb): add new check elb_cross_zone_load_balancing_enabled (#4818)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-08-23 10:09:32 -04:00
Pedro Martín
496d4daf01 refactor(azure): refactor azure provider (#4653)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-08-23 10:01:35 -04:00
Daniel Barranquero
559c0d4e0b chore(aws): Change RDS instance type from list to dict (#4851) 2024-08-23 09:26:53 -04:00
Pedro Martín
2fda2388bb refactor(aws): Refactor provider (#4808)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2024-08-23 09:19:05 -04:00
Pepe Fagoaga
0f79312c33 chore(backport): Use Prowler-Bot PAT (#4855) 2024-08-23 09:18:24 -04:00
Daniel Barranquero
472aea6a91 feat(aws): Add new check to ensure RDS db clusters copy tags to snapshots (#4846) 2024-08-23 09:09:52 -04:00
Pedro Martín
0d18406f80 refactor(kubernetes): refactor provider (#4805)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-08-23 14:22:03 +02:00
Pedro Martín
05da5d1796 refactor(gcp): refactor GCP provider (#4790)
Co-authored-by: Sergio <sergio@prowler.com>
2024-08-23 07:37:02 -04:00
Sergio Garcia
fb449cede8 fix(aws): handle AWS key-only tags (#4845) 2024-08-23 13:02:59 +02:00
Pepe Fagoaga
61df2ce0c2 chore(regions_update): Changes in regions for AWS services. (#4849)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-08-23 11:45:45 +02:00
Pedro Martín
b7e20344a8 docs(is_item_matched): update docstrings for method (#4836)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2024-08-23 10:15:15 +02:00
Sergio Garcia
c2552ee508 fix: handle empty input regions (#4841) 2024-08-22 13:54:18 -04:00
Hugo Pereira Brito
57f1fa5bfa feat(s3): add s3_bucket_lifecycle_enabled check (#4801)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-08-22 12:24:59 -04:00
Rubén De la Torre Vico
0b238243b1 feat(elbv2): add new check elbv2_is_in_multiple_az (#4800)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-08-22 11:08:49 -04:00
Sergio Garcia
df405254c6 fix(aws): enhance resource arn filtering (#4821) 2024-08-22 16:48:25 +02:00
Daniel Barranquero
460acf2860 feat(aws): Add new RDS check to verify that db instances copy tags to snapshots (#4806) 2024-08-22 10:44:26 -04:00
Rubén De la Torre Vico
dec3e652c5 feat(IAM): add new check iam_group_administrator_access_policy (#4831)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-08-22 10:14:45 -04:00
Mario Rodriguez Lopez
fc03188bfb feat(ec2): Client VPN Endpoints Should Have Client Connection Logging Enabled (#4804)
Co-authored-by: Sergio <sergio@prowler.com>
2024-08-22 09:57:33 -04:00
Mario Rodriguez Lopez
ff244138d9 feat(ec2): Ensure automatic acceptance of VPC attachment requests is disabled (#4765) 2024-08-22 08:26:01 -04:00
Sergio Garcia
903f9c576f chore(test): improve iam_root_hardware_mfa_enabled tests (#4833) 2024-08-22 08:08:25 -04:00
Daniel Barranquero
0005f86a5f feat(aws): Add new RDS check to ensure db clusters are configured for multiple availability zones (#4781) 2024-08-22 07:49:59 -04:00
Daniel Barranquero
a2144ad353 chore(rds): Revert changes on inherited instance checks (#4827) 2024-08-22 07:33:25 -04:00
Pepe Fagoaga
5f075b296d chore(regions_update): Changes in regions for AWS services. (#4826)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
Co-authored-by: Pedro Martín <pedromarting3@gmail.com>
2024-08-22 13:21:45 +02:00
dependabot[bot]
0c7b960e08 chore(deps-dev): bump safety from 3.2.5 to 3.2.6 (#4825)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-22 08:26:58 +02:00
dependabot[bot]
c65e91f834 chore(deps): bump tj-actions/changed-files from 44 to 45 (#4822)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-22 08:25:43 +02:00
Pedro Martín
5876fea163 fix(outputs): refactor unroll_tags to use str as tags (#4817) 2024-08-21 12:40:46 -04:00
Pepe Fagoaga
a557d62d84 chore(regions_update): Changes in regions for AWS services. (#4814)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-08-21 13:07:03 +02:00
dependabot[bot]
f25319f3f6 chore(deps): bump azure-mgmt-web from 7.3.0 to 7.3.1 (#4813)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-21 11:11:54 +02:00
dependabot[bot]
1e02b05d2d chore(deps): bump google-api-python-client from 2.141.0 to 2.142.0 (#4812)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-21 08:33:53 +02:00
Rubén De la Torre Vico
78042063cb feat(iam): add new check to ensure user does not have policies with admin access (#4802) 2024-08-20 11:08:51 -04:00
Mario Rodriguez Lopez
8129b174f1 feat(CodeBuild): Ensure source repository URLs do not contain sensitive credentials (#4731)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-08-20 09:44:55 -04:00
Daniel Barranquero
3f78fb4220 feat(aws): Add new RDS check for deletion protection enabled on clusters (#4738) 2024-08-20 09:07:11 -04:00
Pedro Martín
e11bb478d6 fix(mutelist): change logic for tags in aws mutelist (#4786) 2024-08-20 07:38:06 -04:00
dependabot[bot]
dec5fb6428 chore(deps-dev): bump mkdocs-git-revision-date-localized-plugin from 1.2.6 to 1.2.7 (#4796)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-20 09:34:40 +02:00
dependabot[bot]
256ccfea79 chore(deps-dev): bump moto from 5.0.12 to 5.0.13 (#4795)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-20 08:16:18 +02:00
Rubén De la Torre Vico
1a8bc14587 feat(awslambda): New check to ensure that a function is inside VPC (#4783)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-08-19 14:22:21 -04:00
Rubén De la Torre Vico
8483486095 chore(elbv2): Add SecurityHub link to elbv2_ssl_listeners metadata (#4787) 2024-08-19 13:06:34 -04:00
Rubén De la Torre Vico
7aaecbabab chore(elbv2): add SecurityHub link to elbv2_desync_mitigation_mode metadata (#4791) 2024-08-19 13:04:48 -04:00
Rubén De la Torre Vico
5cc9554c23 chore(awslambda): Enhance function public access check called from other resource (#4679)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-08-19 13:03:30 -04:00
Hugo Pereira Brito
5d42ae6e6f feat(s3): add s3_bucket_cross_region_replication check (#4761)
Co-authored-by: Sergio <sergio@prowler.com>
2024-08-19 12:42:42 -04:00
Sergio Garcia
38b73fb0c0 feat(kubernetes): add a test_connection method (#4684) 2024-08-19 12:12:00 -04:00
Sergio Garcia
84a76f4535 feat(gcp): add a test_connection method (#4616)
Co-authored-by: Rubén De la Torre Vico <rubendltv22@gmail.com>
2024-08-19 12:11:20 -04:00
Rubén De la Torre Vico
a126fd82b3 fix(ec2): Manage UnicodeDecodeError when reading user data (#4785) 2024-08-19 11:34:39 -04:00
Rubén De la Torre Vico
bf139138e0 chore(azure): Fix CIS 2.1 mapping (#4760) 2024-08-19 11:44:34 +02:00
dependabot[bot]
0fcf4243f5 chore(deps): bump boto3 from 1.34.160 to 1.34.162 (#4778)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-19 09:14:39 +02:00
dependabot[bot]
bbb0248bc1 chore(deps): bump google-api-python-client from 2.140.0 to 2.141.0 (#4751)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-16 12:11:28 -04:00
Sergio Garcia
e6581255c2 fix(iam): update logic of Root Hardware MFA check (#4726) 2024-08-16 11:49:30 -04:00
Sergio Garcia
717932ae26 fix(aws): run Prowler as IAM Root or Federated User (#4712) 2024-08-16 11:49:14 -04:00
Sergio Garcia
3f56731e6d fix(version): update version flag logic (#4688) 2024-08-16 11:48:57 -04:00
Pepe Fagoaga
0f837f658e chore(regions_update): Changes in regions for AWS services. (#4753)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-08-16 11:45:12 -04:00
Sergio Garcia
b70977163e fix(ecr): change log level of non-scanned images (#4747) 2024-08-16 11:43:04 -04:00
Sergio Garcia
98fc624010 fix(ecr): handle non-existing findingSeverityCounts key (#4746) 2024-08-16 11:42:53 -04:00
dependabot[bot]
ccb755340f chore(deps): bump botocore from 1.34.160 to 1.34.162 (#4758)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-16 11:28:04 -04:00
Mario Rodriguez Lopez
49ff901195 feat(EC2): Add new check for security group port restrictions (#4594) 2024-08-16 09:43:00 -04:00
dependabot[bot]
e7d0d49809 chore(deps): bump trufflesecurity/trufflehog from 3.81.8 to 3.81.9 (#4756)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-16 09:35:08 -04:00
Hugo Pereira Brito
47bb97961c chore(cloudtrail): add remediation link to check cloudtrail_s3_dataevents_read_enabled (#4764) 2024-08-16 09:33:09 -04:00
Hugo Pereira Brito
1178317567 chore(cloudtrail): add remediation link to check cloudtrail_s3_dataevents_write_enabled (#4762) 2024-08-16 09:32:35 -04:00
dependabot[bot]
edd0dd1080 chore(deps): bump boto3 from 1.34.159 to 1.34.160 (#4750)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-16 09:18:48 -04:00
Hugo Pereira Brito
ae1b114a13 refactor(s3): Changed buckets variable type form list to dict (#4742) 2024-08-14 10:28:06 -04:00
dependabot[bot]
3c9c28f351 chore(deps): bump botocore from 1.34.159 to 1.34.160 (#4735)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-14 10:20:15 -04:00
dependabot[bot]
93e6751e35 chore(deps): bump boto3 from 1.34.158 to 1.34.159 (#4734)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-14 09:30:11 -04:00
Daniel Barranquero
680781656b feat(aws): Add new RDS check to verify that cluster minor version upgrade is enabled (#4725) 2024-08-14 09:04:27 -04:00
Pepe Fagoaga
21382efd07 chore(regions_update): Changes in regions for AWS services. (#4739)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-08-14 08:31:50 -04:00
Hugo Pereira Brito
097e61ab9d feat(elasticache): Ensure Redis Cache Clusters Automatically Install Minor Updates (#4699) 2024-08-14 08:28:16 -04:00
Daniel Barranquero
52d83bd83b feat(aws): Split the checks that mix RDS Instances and Clusters (#4730) 2024-08-13 10:16:50 -04:00
dependabot[bot]
49cfe15abc chore(deps): bump botocore from 1.34.158 to 1.34.159 (#4728)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-13 09:03:15 -04:00
Mario Rodriguez Lopez
0ef30c655a fix(ACM): Change check logic to scan only in use certificates (#4732) 2024-08-13 08:39:27 -04:00
Daniel Barranquero
e2d211c188 feat(aws): Add new Neptune check for cluster snapshot visibility (#4709) 2024-08-13 08:27:35 -04:00
Daniel Barranquero
62a1d91869 feat(aws): Add new CodeBuild check to validate environment variables (#4632)
Co-authored-by: Sergio <sergio@prowler.com>
2024-08-13 08:15:45 -04:00
dependabot[bot]
8c1347323e chore(deps): bump boto3 from 1.34.157 to 1.34.158 (#4727)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-13 08:13:00 -04:00
Mario Rodriguez Lopez
cb807e4aed feat(DocumentDB): Add new DocumentDB check for cluster snapshot visibility (#4702) 2024-08-12 14:05:04 -04:00
dependabot[bot]
bcc8d5f1fe chore(deps-dev): bump safety from 3.2.4 to 3.2.5 (#4722)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Sergio <sergio@prowler.com>
2024-08-12 10:03:00 -04:00
dependabot[bot]
59acd303fb chore(deps): bump botocore from 1.34.157 to 1.34.158 (#4721)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-12 08:40:42 -04:00
dependabot[bot]
0675cc8fdb chore(deps): bump boto3 from 1.34.156 to 1.34.157 (#4719)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-12 08:02:17 -04:00
dependabot[bot]
ed27491118 chore(deps): bump trufflesecurity/trufflehog from 3.81.7 to 3.81.8 (#4720)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-12 07:59:29 -04:00
dependabot[bot]
abb28af68e chore(deps): bump aiohttp from 3.9.5 to 3.10.2 (#4713)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-09 14:52:42 -04:00
Rubén De la Torre Vico
18885d0cd7 chore(ec2): Change security groups to dict (#4700) 2024-08-09 14:40:34 -04:00
Pedro Martín
ca56ac4e77 feat(azure): add test_connection method (#4615)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2024-08-09 14:38:12 -04:00
Pedro Martín
8f2b39b3ce fix(iam): handle no arn serial numbers for MFA devices (#4697)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2024-08-09 12:57:34 -04:00
Pepe Fagoaga
761eebac1e feat(aws): Add a test_connection method (#4563)
Co-authored-by: pedrooot <pedromarting3@gmail.com>
2024-08-09 12:01:40 +02:00
Pepe Fagoaga
8bdff0d681 fix(backport): Workaround not to fail if no backport is needed (#4707) 2024-08-09 09:56:02 +02:00
dependabot[bot]
55e0656375 chore(deps): bump botocore from 1.34.156 to 1.34.157 (#4704)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-09 07:56:26 +02:00
dependabot[bot]
e666b66ec0 chore(deps): bump boto3 from 1.34.154 to 1.34.156 (#4698)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-08 11:54:40 +02:00
Pedro Martín
cdb4f73803 docs(developer-guide): add info about docstrings (#4701) 2024-08-08 11:41:32 +02:00
dependabot[bot]
b4c7345124 chore(deps): bump botocore from 1.34.155 to 1.34.156 (#4694)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-08 10:49:13 +02:00
dependabot[bot]
af8cc37eea chore(deps): bump trufflesecurity/trufflehog from 3.81.6 to 3.81.7 (#4693)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-08 10:48:41 +02:00
Sergio Garcia
28bed98ee4 chore(version): update version logic in Prowler (#4654) 2024-08-07 18:15:10 +02:00
Sergio Garcia
3d39eb7db6 chore(backport): update backport PR title (#4686)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2024-08-07 16:59:47 +02:00
Pepe Fagoaga
2c5f2e9f5c chore(labeler): Run also for v4.* (#4687) 2024-08-07 10:30:49 -04:00
Hugo Pereira Brito
5ce54e5605 feat(aws): Add new S3 check for public access block configuration in access points (#4608) 2024-08-07 10:23:12 -04:00
Daniel Barranquero
6c029a9d7d feat(aws): Add new KMS check to prevent unintentional key deletion (#4595)
Co-authored-by: Sergio <sergio@prowler.com>
2024-08-07 09:15:22 -04:00
Sergio Garcia
96f893c3ec chore(version): update master version (#4681) 2024-08-07 14:53:45 +02:00
Pepe Fagoaga
f0047cf5a7 chore(actions): Run for v4.* branch (#4682) 2024-08-07 14:11:38 +02:00
Mario Rodriguez Lopez
1b18aef0f0 feat(acm): Add new check for insecure algorithms in certificates (#4551)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-08-07 08:00:24 -04:00
dependabot[bot]
80e13bffa2 chore(deps): bump botocore from 1.34.154 to 1.34.155 (#4665)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-07 11:33:45 +02:00
dependabot[bot]
384d16749c chore(deps): bump azure-storage-blob from 12.21.0 to 12.22.0 (#4664)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-07 11:01:14 +02:00
Pepe Fagoaga
9c4ba1183b chore(regions): Update labels for backporting (#4678) 2024-08-07 11:00:41 +02:00
Pepe Fagoaga
40a88e07d1 chore(backport): Automate all the things! (#4669) 2024-08-07 10:40:14 +02:00
dependabot[bot]
692ed760e0 chore(deps): bump google-api-python-client from 2.139.0 to 2.140.0 (#4666)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-07 10:26:48 +02:00
dependabot[bot]
6c3e451f32 chore(deps): bump boto3 from 1.34.152 to 1.34.154 (#4663)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-07 09:01:28 +02:00
dependabot[bot]
24f511b567 chore(deps): bump trufflesecurity/trufflehog from 3.81.5 to 3.81.6 (#4662)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-07 09:00:56 +02:00
Sergio Garcia
89c6652bd6 fix(tags): handle AWS dictionary type tags (#4656) 2024-08-07 08:34:57 +02:00
dependabot[bot]
8aca456285 chore(deps-dev): bump moto from 5.0.11 to 5.0.12 (#4642)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: Sergio <sergio@prowler.com>
2024-08-06 14:59:29 -04:00
Rubén De la Torre Vico
824a465667 test(awslambda): Cover possible checks with moto instead MagicMock (#4609)
Co-authored-by: Sergio <sergio@prowler.com>
2024-08-06 13:40:51 -04:00
Amogh Bantwal
086c203e6b feat(aws) Add check to make sure EKS clusters have a supported version (#4604)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-08-06 13:40:05 -04:00
dependabot[bot]
f746a9e742 chore(deps-dev): bump flake8 from 7.1.0 to 7.1.1 (#4643)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-06 09:19:05 +02:00
Pepe Fagoaga
90810d9098 chore: change SaaS for Prowler (#4651) 2024-08-06 08:56:04 +02:00
Pepe Fagoaga
75b3f52309 docs(mutelist): Add service_* documentation (#4650) 2024-08-06 08:55:55 +02:00
dependabot[bot]
8ecb4696d4 chore(deps): bump botocore from 1.34.152 to 1.34.154 (#4641)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-06 08:44:43 +02:00
dependabot[bot]
7b22c9c97b chore(deps): bump trufflesecurity/trufflehog from 3.81.4 to 3.81.5 (#4645)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-06 08:24:27 +02:00
dependabot[bot]
84f0542b98 chore(deps-dev): bump coverage from 7.6.0 to 7.6.1 (#4640)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-06 08:07:24 +02:00
Rubén De la Torre Vico
8faa40dfb6 feat(opensearch): Add domain inside VPC case for public domain check (#4570) 2024-08-05 13:04:49 -04:00
Pepe Fagoaga
47f7555d05 refactor(mutelist): Remove re.match and improve docs (#4637)
Co-authored-by: Sergio <sergio@prowler.com>
2024-08-05 12:59:30 -04:00
Pedro Martín
96d9cbd8af fix(gcp): check cloudsql sslMode (#4635) 2024-08-05 12:12:00 -04:00
Pedro Martín
c8bc54aa48 fix(gcp): check next rotation time in KMS keys (#4633) 2024-08-05 11:31:38 -04:00
Rubén De la Torre Vico
fad0b8995a chore(aws): Convert ELB and ELBv2 attributes to dictionaries (#4575)
Co-authored-by: Sergio <sergio@prowler.com>
2024-08-05 11:14:19 -04:00
dependabot[bot]
d4b6fa27e2 chore(deps): bump msgraph-sdk from 1.5.3 to 1.5.4 (#4629)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-05 15:02:49 +02:00
dependabot[bot]
a37723fd32 chore(deps): bump boto3 from 1.34.151 to 1.34.152 (#4628)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-05 08:14:55 -04:00
Pedro Martín
fc5eefe532 fix(scan_test): change resource_tags to a dict (#4631) 2024-08-05 10:02:41 +02:00
Pedro Martín
ffd9b2a2f6 chore(scan-class): add new scan class (#4564)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2024-08-05 08:21:13 +02:00
dependabot[bot]
112f48ac08 chore(deps-dev): bump black from 24.4.2 to 24.8.0 (#4627)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-05 08:19:54 +02:00
Sergio Garcia
95ec3d91b4 refactor(tags): convert tags to a dictionary (#4598)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2024-08-05 08:17:43 +02:00
Sergio Garcia
b0709d08cd fix(gcp): use KMS key id in checks (#4610) 2024-08-05 08:16:56 +02:00
dependabot[bot]
a0e3cb87a4 chore(deps): bump trufflesecurity/trufflehog from 3.80.5 to 3.81.4 (#4625)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-05 08:15:49 +02:00
Pepe Fagoaga
1b9cc9e3db chore(regions_update): Changes in regions for AWS services. (#4630)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-08-05 08:14:49 +02:00
Jon Young
d9fb67bc43 docs(Tutorials): include volume option when running dashboard in docker (#4620) 2024-08-05 08:06:24 +02:00
dependabot[bot]
a79022dce8 chore(deps): bump botocore from 1.34.151 to 1.34.152 (#4611)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-02 08:03:55 +02:00
dependabot[bot]
0a2ce690f4 chore(deps): bump trufflesecurity/trufflehog from 3.80.4 to 3.80.5 (#4612)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-02 07:57:22 +02:00
Pedro Martín
bbc51114b0 fix(sns): add condition to sns topics (#4498)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-08-01 11:54:36 -04:00
555 changed files with 15147 additions and 3795 deletions

14
.backportrc.json Normal file
View File

@@ -0,0 +1,14 @@
{
"repoOwner": "prowler-cloud",
"repoName": "prowler",
"targetPRLabels": [
"backport"
],
"sourcePRLabels": [
"was-backported"
],
"copySourcePRLabels": false,
"copySourcePRReviewers": true,
"prTitle": "{{sourcePullRequest.title}}",
"commitConflicts": true
}

View File

@@ -14,6 +14,7 @@ Please include a summary of the change and which issue is fixed. List any depend
- If so, do we need to update permissions for the provider? Please review this carefully.
- [ ] Review if the code is being covered by tests.
- [ ] Review if code is being documented following this specification https://github.com/google/styleguide/blob/gh-pages/pyguide.md#38-comments-and-docstrings
- [ ] Review if backport is needed.
### License

42
.github/workflows/backport.yml vendored Normal file
View File

@@ -0,0 +1,42 @@
name: Automatic Backport
on:
pull_request_target:
branches: ['master']
types: ['labeled', 'closed']
jobs:
backport:
name: Backport PR
if: github.event.pull_request.merged == true && !(contains(github.event.pull_request.labels.*.name, 'backport'))
runs-on: ubuntu-latest
permissions:
id-token: write
pull-requests: write
contents: write
steps:
# Workaround not to fail the workflow if the PR does not need a backport
# https://github.com/sorenlouv/backport-github-action/issues/127#issuecomment-2258561266
- name: Check for backport labels
id: check_labels
run: |-
labels='${{ toJSON(github.event.pull_request.labels.*.name) }}'
echo "$labels"
matched=$(echo "${labels}" | jq '. | map(select(startswith("backport-to-"))) | length')
echo "matched=$matched"
echo "matched=$matched" >> $GITHUB_OUTPUT
- name: Backport Action
if: fromJSON(steps.check_labels.outputs.matched) > 0
uses: sorenlouv/backport-github-action@v9.5.1
with:
github_token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
auto_backport_label_prefix: backport-to-
- name: Info log
if: ${{ success() && fromJSON(steps.check_labels.outputs.matched) > 0 }}
run: cat ~/.backport/backport.info.log
- name: Debug log
if: ${{ failure() && fromJSON(steps.check_labels.outputs.matched) > 0 }}
run: cat ~/.backport/backport.debug.log

View File

@@ -16,9 +16,9 @@ jobs:
name: Documentation Link
runs-on: ubuntu-latest
steps:
- name: Leave PR comment with the SaaS Documentation URI
- name: Leave PR comment with the Prowler Documentation URI
uses: peter-evans/create-or-update-comment@v4
with:
issue-number: ${{ env.PR_NUMBER }}
body: |
You can check the documentation for this PR here -> [SaaS Documentation](https://prowler-prowler-docs--${{ env.PR_NUMBER }}.com.readthedocs.build/projects/prowler-open-source/en/${{ env.PR_NUMBER }}/)
You can check the documentation for this PR here -> [Prowler Documentation](https://prowler-prowler-docs--${{ env.PR_NUMBER }}.com.readthedocs.build/projects/prowler-open-source/en/${{ env.PR_NUMBER }}/)

View File

@@ -11,7 +11,7 @@ jobs:
with:
fetch-depth: 0
- name: TruffleHog OSS
uses: trufflesecurity/trufflehog@3.80.4
uses: trufflesecurity/trufflehog@v3.81.10
with:
path: ./
base: ${{ github.event.repository.default_branch }}

View File

@@ -5,6 +5,7 @@ on:
branches:
- "master"
- "v3"
- "v4.*"
jobs:
labeler:

View File

@@ -22,7 +22,7 @@ jobs:
- uses: actions/checkout@v4
- name: Test if changes are in not ignored paths
id: are-non-ignored-files-changed
uses: tj-actions/changed-files@v44
uses: tj-actions/changed-files@v45
with:
files: ./**
files_ignore: |
@@ -31,6 +31,7 @@ jobs:
docs/**
permissions/**
mkdocs.yml
.backportrc.json
- name: Install poetry
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |

View File

@@ -8,8 +8,6 @@ env:
RELEASE_TAG: ${{ github.event.release.tag_name }}
PYTHON_VERSION: 3.11
CACHE: "poetry"
# TODO: create a bot user for this kind of tasks, like prowler-bot
GIT_COMMITTER_EMAIL: "sergio@prowler.com"
jobs:
release-prowler-job:
@@ -47,14 +45,6 @@ jobs:
python-version: ${{ env.PYTHON_VERSION }}
cache: ${{ env.CACHE }}
- name: Import GPG key
uses: crazy-max/ghaction-import-gpg@v6
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.GPG_PASSPHRASE }}
git_user_signingkey: true
git_commit_gpgsign: true
- name: Build Prowler package
run: |
poetry build

View File

@@ -50,13 +50,13 @@ jobs:
# Create pull request
- name: Create Pull Request
uses: peter-evans/create-pull-request@v6
uses: peter-evans/create-pull-request@v7
with:
token: ${{ secrets.PROWLER_ACCESS_TOKEN }}
commit-message: "feat(regions_update): Update regions for AWS services."
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
commit-message: "feat(regions_update): Update regions for AWS services"
branch: "aws-services-regions-updated-${{ github.sha }}"
labels: "status/waiting-for-revision, severity/low, provider/aws, backport-v3"
title: "chore(regions_update): Changes in regions for AWS services."
labels: "status/waiting-for-revision, severity/low, provider/aws, backport-to-v3"
title: "chore(regions_update): Changes in regions for AWS services"
body: |
### Description

View File

@@ -12,7 +12,7 @@
<p align="center">
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog"><img width="30" height="30" alt="Prowler community on Slack" src="https://github.com/prowler-cloud/prowler/assets/38561120/3c8b4ec5-6849-41a5-b5e1-52bbb94af73a"></a>
<br>
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog">Join our Prowler community!</a>
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-2oinmgmw6-cl7gOrljSEqo_aoripVPFA">Join our Prowler community!</a>
</p>
<hr>
<p align="center">
@@ -63,7 +63,7 @@ It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, Fe
| Provider | Checks | Services | [Compliance Frameworks](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/compliance/) | [Categories](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/misc/#categories) |
|---|---|---|---|---|
| AWS | 385 | 67 -> `prowler aws --list-services` | 28 -> `prowler aws --list-compliance` | 7 -> `prowler aws --list-categories` |
| AWS | 409 | 67 -> `prowler aws --list-services` | 28 -> `prowler aws --list-compliance` | 7 -> `prowler aws --list-categories` |
| GCP | 77 | 13 -> `prowler gcp --list-services` | 1 -> `prowler gcp --list-compliance` | 2 -> `prowler gcp --list-categories`|
| Azure | 135 | 16 -> `prowler azure --list-services` | 2 -> `prowler azure --list-compliance` | 2 -> `prowler azure --list-categories` |
| Kubernetes | 83 | 7 -> `prowler kubernetes --list-services` | 1 -> `prowler kubernetes --list-compliance` | 7 -> `prowler kubernetes --list-categories` |

View File

@@ -222,7 +222,7 @@ class ec2_securitygroup_with_many_ingress_egress_rules(Check):
max_security_group_rules = ec2_client.audit_config.get(
"max_security_group_rules", 50
)
for security_group in ec2_client.security_groups:
for security_group_arn, security_group in ec2_client.security_groups.items():
```
```yaml title="config.yaml"

View File

@@ -50,6 +50,8 @@ You can see all dependencies in file `pyproject.toml`.
Moreover, you would need to install [`TruffleHog`](https://github.com/trufflesecurity/trufflehog) on the latest version to check for secrets in the code. You can install it using the official installation guide [here](https://github.com/trufflesecurity/trufflehog?tab=readme-ov-file#floppy_disk-installation).
Additionally, please ensure to follow the code documentation practices outlined in this guide: [Google Python Style Guide - Comments and Docstrings](https://github.com/google/styleguide/blob/gh-pages/pyguide.md#38-comments-and-docstrings).
???+ note
If you have any trouble when committing to the Prowler repository, add the `--no-verify` flag to the `git commit` command.

View File

@@ -592,7 +592,7 @@ is following the actual format, add one function where the client is passed to b
`mock_api_<endpoint>_calls` (*endpoint* refers to the first attribute pointed after *client*).
In the example of BigQuery the function is called `mock_api_dataset_calls`. And inside of this function we found an assignation to
be used in the `__get_datasets__` method in BigQuery class:
be used in the `_get_datasets` method in BigQuery class:
```python
# Mocking datasets
@@ -765,7 +765,7 @@ from tests.providers.azure.azure_fixtures import (
set_mocked_azure_provider,
)
# Function to mock the service function __get_components__, this function task is to return a possible value that real function could returns
# Function to mock the service function _get_components, this function task is to return a possible value that real function could returns
def mock_appinsights_get_components(_):
return {
AZURE_SUBSCRIPTION_ID: {
@@ -779,12 +779,12 @@ def mock_appinsights_get_components(_):
# Patch decorator to use the mocked function instead the function with the real API call
@patch(
"prowler.providers.azure.services.appinsights.appinsights_service.AppInsights.__get_components__",
"prowler.providers.azure.services.appinsights.appinsights_service.AppInsights._get_components",
new=mock_appinsights_get_components,
)
class Test_AppInsights_Service:
# Mandatory test for every service, this method test the instance of the client is correct
def test__get_client__(self):
def test_get_client(self):
app_insights = AppInsights(set_mocked_azure_provider())
assert (
app_insights.clients[AZURE_SUBSCRIPTION_ID].__class__.__name__
@@ -794,8 +794,8 @@ class Test_AppInsights_Service:
def test__get_subscriptions__(self):
app_insights = AppInsights(set_mocked_azure_provider())
assert app_insights.subscriptions.__class__.__name__ == "dict"
# Test for the function __get_components__, inside this client is used the mocked function
def test__get_components__(self):
# Test for the function _get_components, inside this client is used the mocked function
def test_get_components(self):
appinsights = AppInsights(set_mocked_azure_provider())
assert len(appinsights.components) == 1
assert (

View File

@@ -18,6 +18,7 @@ The following list includes all the AWS checks with configurable variables that
| `ec2_elastic_ip_shodan` | `shodan_api_key` | String |
| `ec2_securitygroup_with_many_ingress_egress_rules` | `max_security_group_rules` | Integer |
| `ec2_instance_older_than_specific_days` | `max_ec2_instance_age_in_days` | Integer |
| `ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports`| `ec2_sg_high_risk_ports` | List of Integer |
| `vpc_endpoint_connections_trust_boundaries` | `trusted_account_ids` | List of Strings |
| `vpc_endpoint_services_allowed_principals_trust_boundaries` | `trusted_account_ids` | List of Strings |
| `cloudwatch_log_group_retention_policy_specific_days_enabled` | `log_group_retention_days` | Integer |
@@ -39,11 +40,25 @@ The following list includes all the AWS checks with configurable variables that
| `cloudtrail_threat_detection_enumeration` | `threat_detection_enumeration_entropy` | Integer |
| `cloudtrail_threat_detection_enumeration` | `threat_detection_enumeration_minutes` | Integer |
| `cloudtrail_threat_detection_enumeration` | `threat_detection_enumeration_actions` | List of Strings |
| `codebuild_project_no_secrets_in_variables` | `excluded_sensitive_environment_variables` | List of Strings |
| `rds_instance_backup_enabled` | `check_rds_instance_replicas` | Boolean |
| `ec2_securitygroup_allow_ingress_from_internet_to_any_port` | `ec2_allowed_interface_types` | List of Strings |
| `ec2_securitygroup_allow_ingress_from_internet_to_any_port` | `ec2_allowed_instance_owners` | List of Strings |
| `acm_certificates_expiration_check` | `days_to_expire_threshold` | Integer |
| `eks_control_plane_logging_all_types_enabled` | `eks_required_log_types` | List of Strings |
| `eks_cluster_uses_a_supported_version` | `eks_cluster_oldest_version_supported` | String |
| `elbv2_is_in_multiple_az` | `elbv2_min_azs` | Integer |
| `elb_is_in_multiple_az` | `elb_min_azs` | Integer |
| `autoscaling_find_secrets_ec2_launch_configuration` | `secrets_ignore_patterns` | List of Strings |
| `awslambda_function_no_secrets_in_code` | `secrets_ignore_patterns` | List of Strings |
| `awslambda_function_no_secrets_in_variables` | `secrets_ignore_patterns` | List of Strings |
| `cloudformation_stack_outputs_find_secrets` | `secrets_ignore_patterns` | List of Strings |
| `cloudwatch_log_group_no_secrets_in_logs` | `secrets_ignore_patterns` | List of Strings |
| `codebuild_project_no_secrets_in_variables` | `secrets_ignore_patterns` | List of Strings |
| `ec2_instance_secrets_user_data` | `secrets_ignore_patterns` | List of Strings |
| `ec2_launch_template_no_secrets` | `secrets_ignore_patterns` | List of Strings |
| `ecs_task_definitions_no_environment_secrets` | `secrets_ignore_patterns` | List of Strings |
| `ssm_document_secrets` | `secrets_ignore_patterns` | List of Strings |
## Azure
@@ -125,6 +140,21 @@ aws:
[
"amazon-elb"
]
# aws.ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports
ec2_sg_high_risk_ports:
[
25,
110,
135,
143,
445,
3000,
4333,
5000,
5500,
8080,
8088,
]
# AWS VPC Configuration (vpc_endpoint_connections_trust_boundaries, vpc_endpoint_services_allowed_principals_trust_boundaries)
# Single account environment: No action required. The AWS account number will be automatically added by the checks.
@@ -368,6 +398,18 @@ aws:
"scheduler",
]
# aws.eks_cluster_uses_a_supported_version
# EKS clusters must be version 1.28 or higher
eks_cluster_oldest_version_supported: "1.28"
# AWS CodeBuild Configuration
# aws.codebuild_project_no_secrets_in_variables
# CodeBuild sensitive variables that are excluded from the check
excluded_sensitive_environment_variables:
[
]
# Azure Configuration
azure:
# Azure Network Configuration

View File

@@ -10,9 +10,11 @@ prowler dashboard
To run Prowler local dashboard with Docker, use:
```sh
docker run --env HOST=0.0.0.0 --publish 127.0.0.1:11666:11666 toniblyx/prowler:latest dashboard
docker run -v /your/local/dir/prowler-output:/home/prowler/output --env HOST=0.0.0.0 --publish 127.0.0.1:11666:11666 toniblyx/prowler:latest dashboard
```
Make sure you update the `/your/local/dir/prowler-output` to match the path that contains your prowler output.
???+ note
**Remember that the `dashboard` server is not authenticated, if you expose it to the internet, you are running it at your own risk.**

View File

@@ -13,7 +13,7 @@ prowler <provider> -c <check_to_fix_1> <check_to_fix_2> ... --fixer
```sh
prowler <provider> --list-fixers
```
It's important to note that using the fixers for `Access Analyzer`, `GuardDuty`, and `SecurityHub` may incur additional costs. These AWS services might trigger actions or deploy resources that can lead to charges on your AWS account.
## Writing a Fixer
To write a fixer, you need to create a file called `<check_id>_fixer.py` inside the check folder, with a function called `fixer` that receives either the region or the resource to be fixed as a parameter, and returns a boolean value indicating if the fix was successful or not.

View File

@@ -40,20 +40,20 @@ Mutelist:
Regions:
- "us-east-1"
Resources:
- "user-1" # Will ignore user-1 in check iam_user_hardware_mfa_enabled
- "user-2" # Will ignore user-2 in check iam_user_hardware_mfa_enabled
- "user-1" # Will mute user-1 in check iam_user_hardware_mfa_enabled
- "user-2" # Will mute user-2 in check iam_user_hardware_mfa_enabled
"ec2_*":
Regions:
- "*"
Resources:
- "*" # Will ignore every EC2 check in every account and region
- "*" # Will mute every EC2 check in every account and region
"*":
Regions:
- "*"
Resources:
- "test"
Tags:
- "test=test" # Will ignore every resource containing the string "test" and the tags 'test=test' and
- "test=test" # Will mute every resource containing the string "test" and the tags 'test=test' and
- "project=test|project=stage" # either of ('project=test' OR project=stage) in account 123456789012 and every region
"*":
Regions:
@@ -86,9 +86,9 @@ Mutelist:
- "eu-west-1"
- "us-east-1"
Resources:
- "ci-logs" # Will ignore bucket "ci-logs" AND ALSO bucket "ci-logs-replica" in specified check and regions
- "logs" # Will ignore EVERY BUCKET containing the string "logs" in specified check and regions
- ".+-logs" # Will ignore all buckets containing the terms ci-logs, qa-logs, etc. in specified check and regions
- "ci-logs" # Will mute bucket "ci-logs" AND ALSO bucket "ci-logs-replica" in specified check and regions
- "logs" # Will mute EVERY BUCKET containing the string "logs" in specified check and regions
- ".+-logs" # Will mute all buckets containing the terms ci-logs, qa-logs, etc. in specified check and regions
"ecs_task_definitions_no_environment_secrets":
Regions:
- "*"
@@ -99,14 +99,14 @@ Mutelist:
- "0123456789012"
Regions:
- "eu-west-1"
- "eu-south-2" # Will ignore every resource in check ecs_task_definitions_no_environment_secrets except the ones in account 0123456789012 located in eu-south-2 or eu-west-1
- "eu-south-2" # Will mute every resource in check ecs_task_definitions_no_environment_secrets except the ones in account 0123456789012 located in eu-south-2 or eu-west-1
"*":
Regions:
- "*"
Resources:
- "*"
Tags:
- "environment=dev" # Will ignore every resource containing the tag 'environment=dev' in every account and region
- "environment=dev" # Will mute every resource containing the tag 'environment=dev' in every account and region
"123456789012":
Checks:
@@ -119,18 +119,26 @@ Mutelist:
Resources:
- "test"
Tags:
- "environment=prod" # Will ignore every resource except in account 123456789012 except the ones containing the string "test" and tag environment=prod
- "environment=prod" # Will mute every resource except in account 123456789012 except the ones containing the string "test" and tag environment=prod
"*":
Checks:
"ec2_*":
Regions:
- "*"
Resources:
- "test-resource" # Will mute the resource "test-resource" in all accounts and regions for whatever check from the EC2 service
```
### Account, Check, Region, Resource, and Tag
| Field | Description | Logic |
|----------|----------|----------|
| `<account_id>` | Use `*` to apply the mutelist to all accounts. | `ANDed` |
| `<check_name>` | The name of the Prowler check. Use `*` to apply the mutelist to all checks. | `ANDed` |
| `<region>` | The region identifier. Use `*` to apply the mutelist to all regions. | `ANDed` |
| `<resource>` | The resource identifier. Use `*` to apply the mutelist to all resources. | `ANDed` |
| `<tag>` | The tag value. | `ORed` |
| `account_id` | Use `*` to apply the mutelist to all accounts. | `ANDed` |
| `check_name` | The name of the Prowler check. Use `*` to apply the mutelist to all checks, or `service_*` to apply it to all service's checks. | `ANDed` |
| `region` | The region identifier. Use `*` to apply the mutelist to all regions. | `ANDed` |
| `resource` | The resource identifier. Use `*` to apply the mutelist to all resources. | `ANDed` |
| `tag` | The tag value. | `ORed` |
## How to Use the Mutelist

624
poetry.lock generated
View File

@@ -11,92 +11,104 @@ files = [
{file = "about_time-4.2.1-py3-none-any.whl", hash = "sha256:8bbf4c75fe13cbd3d72f49a03b02c5c7dca32169b6d49117c257e7eb3eaee341"},
]
[[package]]
name = "aiohappyeyeballs"
version = "2.3.5"
description = "Happy Eyeballs for asyncio"
optional = false
python-versions = ">=3.8"
files = [
{file = "aiohappyeyeballs-2.3.5-py3-none-any.whl", hash = "sha256:4d6dea59215537dbc746e93e779caea8178c866856a721c9c660d7a5a7b8be03"},
{file = "aiohappyeyeballs-2.3.5.tar.gz", hash = "sha256:6fa48b9f1317254f122a07a131a86b71ca6946ca989ce6326fff54a99a920105"},
]
[[package]]
name = "aiohttp"
version = "3.9.5"
version = "3.10.2"
description = "Async http client/server framework (asyncio)"
optional = false
python-versions = ">=3.8"
files = [
{file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fcde4c397f673fdec23e6b05ebf8d4751314fa7c24f93334bf1f1364c1c69ac7"},
{file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d6b3f1fabe465e819aed2c421a6743d8debbde79b6a8600739300630a01bf2c"},
{file = "aiohttp-3.9.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ae79c1bc12c34082d92bf9422764f799aee4746fd7a392db46b7fd357d4a17a"},
{file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d3ebb9e1316ec74277d19c5f482f98cc65a73ccd5430540d6d11682cd857430"},
{file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84dabd95154f43a2ea80deffec9cb44d2e301e38a0c9d331cc4aa0166fe28ae3"},
{file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a02fbeca6f63cb1f0475c799679057fc9268b77075ab7cf3f1c600e81dd46b"},
{file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c26959ca7b75ff768e2776d8055bf9582a6267e24556bb7f7bd29e677932be72"},
{file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:714d4e5231fed4ba2762ed489b4aec07b2b9953cf4ee31e9871caac895a839c0"},
{file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7a6a8354f1b62e15d48e04350f13e726fa08b62c3d7b8401c0a1314f02e3558"},
{file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c413016880e03e69d166efb5a1a95d40f83d5a3a648d16486592c49ffb76d0db"},
{file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ff84aeb864e0fac81f676be9f4685f0527b660f1efdc40dcede3c251ef1e867f"},
{file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ad7f2919d7dac062f24d6f5fe95d401597fbb015a25771f85e692d043c9d7832"},
{file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:702e2c7c187c1a498a4e2b03155d52658fdd6fda882d3d7fbb891a5cf108bb10"},
{file = "aiohttp-3.9.5-cp310-cp310-win32.whl", hash = "sha256:67c3119f5ddc7261d47163ed86d760ddf0e625cd6246b4ed852e82159617b5fb"},
{file = "aiohttp-3.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:471f0ef53ccedec9995287f02caf0c068732f026455f07db3f01a46e49d76bbb"},
{file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e0ae53e33ee7476dd3d1132f932eeb39bf6125083820049d06edcdca4381f342"},
{file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c088c4d70d21f8ca5c0b8b5403fe84a7bc8e024161febdd4ef04575ef35d474d"},
{file = "aiohttp-3.9.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:639d0042b7670222f33b0028de6b4e2fad6451462ce7df2af8aee37dcac55424"},
{file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f26383adb94da5e7fb388d441bf09c61e5e35f455a3217bfd790c6b6bc64b2ee"},
{file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66331d00fb28dc90aa606d9a54304af76b335ae204d1836f65797d6fe27f1ca2"},
{file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ff550491f5492ab5ed3533e76b8567f4b37bd2995e780a1f46bca2024223233"},
{file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f22eb3a6c1080d862befa0a89c380b4dafce29dc6cd56083f630073d102eb595"},
{file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a81b1143d42b66ffc40a441379387076243ef7b51019204fd3ec36b9f69e77d6"},
{file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f64fd07515dad67f24b6ea4a66ae2876c01031de91c93075b8093f07c0a2d93d"},
{file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:93e22add827447d2e26d67c9ac0161756007f152fdc5210277d00a85f6c92323"},
{file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:55b39c8684a46e56ef8c8d24faf02de4a2b2ac60d26cee93bc595651ff545de9"},
{file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4715a9b778f4293b9f8ae7a0a7cef9829f02ff8d6277a39d7f40565c737d3771"},
{file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:afc52b8d969eff14e069a710057d15ab9ac17cd4b6753042c407dcea0e40bf75"},
{file = "aiohttp-3.9.5-cp311-cp311-win32.whl", hash = "sha256:b3df71da99c98534be076196791adca8819761f0bf6e08e07fd7da25127150d6"},
{file = "aiohttp-3.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:88e311d98cc0bf45b62fc46c66753a83445f5ab20038bcc1b8a1cc05666f428a"},
{file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c7a4b7a6cf5b6eb11e109a9755fd4fda7d57395f8c575e166d363b9fc3ec4678"},
{file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0a158704edf0abcac8ac371fbb54044f3270bdbc93e254a82b6c82be1ef08f3c"},
{file = "aiohttp-3.9.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d153f652a687a8e95ad367a86a61e8d53d528b0530ef382ec5aaf533140ed00f"},
{file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82a6a97d9771cb48ae16979c3a3a9a18b600a8505b1115cfe354dfb2054468b4"},
{file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60cdbd56f4cad9f69c35eaac0fbbdf1f77b0ff9456cebd4902f3dd1cf096464c"},
{file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8676e8fd73141ded15ea586de0b7cda1542960a7b9ad89b2b06428e97125d4fa"},
{file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da00da442a0e31f1c69d26d224e1efd3a1ca5bcbf210978a2ca7426dfcae9f58"},
{file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18f634d540dd099c262e9f887c8bbacc959847cfe5da7a0e2e1cf3f14dbf2daf"},
{file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:320e8618eda64e19d11bdb3bd04ccc0a816c17eaecb7e4945d01deee2a22f95f"},
{file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:2faa61a904b83142747fc6a6d7ad8fccff898c849123030f8e75d5d967fd4a81"},
{file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:8c64a6dc3fe5db7b1b4d2b5cb84c4f677768bdc340611eca673afb7cf416ef5a"},
{file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:393c7aba2b55559ef7ab791c94b44f7482a07bf7640d17b341b79081f5e5cd1a"},
{file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c671dc117c2c21a1ca10c116cfcd6e3e44da7fcde37bf83b2be485ab377b25da"},
{file = "aiohttp-3.9.5-cp312-cp312-win32.whl", hash = "sha256:5a7ee16aab26e76add4afc45e8f8206c95d1d75540f1039b84a03c3b3800dd59"},
{file = "aiohttp-3.9.5-cp312-cp312-win_amd64.whl", hash = "sha256:5ca51eadbd67045396bc92a4345d1790b7301c14d1848feaac1d6a6c9289e888"},
{file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:694d828b5c41255e54bc2dddb51a9f5150b4eefa9886e38b52605a05d96566e8"},
{file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0605cc2c0088fcaae79f01c913a38611ad09ba68ff482402d3410bf59039bfb8"},
{file = "aiohttp-3.9.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4558e5012ee03d2638c681e156461d37b7a113fe13970d438d95d10173d25f78"},
{file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dbc053ac75ccc63dc3a3cc547b98c7258ec35a215a92bd9f983e0aac95d3d5b"},
{file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4109adee842b90671f1b689901b948f347325045c15f46b39797ae1bf17019de"},
{file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6ea1a5b409a85477fd8e5ee6ad8f0e40bf2844c270955e09360418cfd09abac"},
{file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3c2890ca8c59ee683fd09adf32321a40fe1cf164e3387799efb2acebf090c11"},
{file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3916c8692dbd9d55c523374a3b8213e628424d19116ac4308e434dbf6d95bbdd"},
{file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8d1964eb7617907c792ca00b341b5ec3e01ae8c280825deadbbd678447b127e1"},
{file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d5ab8e1f6bee051a4bf6195e38a5c13e5e161cb7bad83d8854524798bd9fcd6e"},
{file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:52c27110f3862a1afbcb2af4281fc9fdc40327fa286c4625dfee247c3ba90156"},
{file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:7f64cbd44443e80094309875d4f9c71d0401e966d191c3d469cde4642bc2e031"},
{file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8b4f72fbb66279624bfe83fd5eb6aea0022dad8eec62b71e7bf63ee1caadeafe"},
{file = "aiohttp-3.9.5-cp38-cp38-win32.whl", hash = "sha256:6380c039ec52866c06d69b5c7aad5478b24ed11696f0e72f6b807cfb261453da"},
{file = "aiohttp-3.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:da22dab31d7180f8c3ac7c7635f3bcd53808f374f6aa333fe0b0b9e14b01f91a"},
{file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1732102949ff6087589408d76cd6dea656b93c896b011ecafff418c9661dc4ed"},
{file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c6021d296318cb6f9414b48e6a439a7f5d1f665464da507e8ff640848ee2a58a"},
{file = "aiohttp-3.9.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:239f975589a944eeb1bad26b8b140a59a3a320067fb3cd10b75c3092405a1372"},
{file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b7b30258348082826d274504fbc7c849959f1989d86c29bc355107accec6cfb"},
{file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd2adf5c87ff6d8b277814a28a535b59e20bfea40a101db6b3bdca7e9926bc24"},
{file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9a3d838441bebcf5cf442700e3963f58b5c33f015341f9ea86dcd7d503c07e2"},
{file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e3a1ae66e3d0c17cf65c08968a5ee3180c5a95920ec2731f53343fac9bad106"},
{file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c69e77370cce2d6df5d12b4e12bdcca60c47ba13d1cbbc8645dd005a20b738b"},
{file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf56238f4bbf49dab8c2dc2e6b1b68502b1e88d335bea59b3f5b9f4c001475"},
{file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d1469f228cd9ffddd396d9948b8c9cd8022b6d1bf1e40c6f25b0fb90b4f893ed"},
{file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:45731330e754f5811c314901cebdf19dd776a44b31927fa4b4dbecab9e457b0c"},
{file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3fcb4046d2904378e3aeea1df51f697b0467f2aac55d232c87ba162709478c46"},
{file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8cf142aa6c1a751fcb364158fd710b8a9be874b81889c2bd13aa8893197455e2"},
{file = "aiohttp-3.9.5-cp39-cp39-win32.whl", hash = "sha256:7b179eea70833c8dee51ec42f3b4097bd6370892fa93f510f76762105568cf09"},
{file = "aiohttp-3.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:38d80498e2e169bc61418ff36170e0aad0cd268da8b38a17c4cf29d254a8b3f1"},
{file = "aiohttp-3.9.5.tar.gz", hash = "sha256:edea7d15772ceeb29db4aff55e482d4bcfb6ae160ce144f2682de02f6d693551"},
{file = "aiohttp-3.10.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:95213b3d79c7e387144e9cb7b9d2809092d6ff2c044cb59033aedc612f38fb6d"},
{file = "aiohttp-3.10.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1aa005f060aff7124cfadaa2493f00a4e28ed41b232add5869e129a2e395935a"},
{file = "aiohttp-3.10.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eabe6bf4c199687592f5de4ccd383945f485779c7ffb62a9b9f1f8a3f9756df8"},
{file = "aiohttp-3.10.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96e010736fc16d21125c7e2dc5c350cd43c528b85085c04bf73a77be328fe944"},
{file = "aiohttp-3.10.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99f81f9c1529fd8e03be4a7bd7df32d14b4f856e90ef6e9cbad3415dbfa9166c"},
{file = "aiohttp-3.10.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d611d1a01c25277bcdea06879afbc11472e33ce842322496b211319aa95441bb"},
{file = "aiohttp-3.10.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e00191d38156e09e8c81ef3d75c0d70d4f209b8381e71622165f22ef7da6f101"},
{file = "aiohttp-3.10.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74c091a5ded6cb81785de2d7a8ab703731f26de910dbe0f3934eabef4ae417cc"},
{file = "aiohttp-3.10.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:18186a80ec5a701816adbf1d779926e1069392cf18504528d6e52e14b5920525"},
{file = "aiohttp-3.10.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5a7ceb2a0d2280f23a02c64cd0afdc922079bb950400c3dd13a1ab2988428aac"},
{file = "aiohttp-3.10.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8bd7be6ff6c162a60cb8fce65ee879a684fbb63d5466aba3fa5b9288eb04aefa"},
{file = "aiohttp-3.10.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:fae962b62944eaebff4f4fddcf1a69de919e7b967136a318533d82d93c3c6bd1"},
{file = "aiohttp-3.10.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a0fde16d284efcacbe15fb0c1013f0967b6c3e379649239d783868230bf1db42"},
{file = "aiohttp-3.10.2-cp310-cp310-win32.whl", hash = "sha256:f81cd85a0e76ec7b8e2b6636fe02952d35befda4196b8c88f3cec5b4fb512839"},
{file = "aiohttp-3.10.2-cp310-cp310-win_amd64.whl", hash = "sha256:54ba10eb5a3481c28282eb6afb5f709aedf53cf9c3a31875ffbdc9fc719ffd67"},
{file = "aiohttp-3.10.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:87fab7f948e407444c2f57088286e00e2ed0003ceaf3d8f8cc0f60544ba61d91"},
{file = "aiohttp-3.10.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ec6ad66ed660d46503243cbec7b2b3d8ddfa020f984209b3b8ef7d98ce69c3f2"},
{file = "aiohttp-3.10.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a4be88807283bd96ae7b8e401abde4ca0bab597ba73b5e9a2d98f36d451e9aac"},
{file = "aiohttp-3.10.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01c98041f90927c2cbd72c22a164bb816fa3010a047d264969cf82e1d4bcf8d1"},
{file = "aiohttp-3.10.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:54e36c67e1a9273ecafab18d6693da0fb5ac48fd48417e4548ac24a918c20998"},
{file = "aiohttp-3.10.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7de3ddb6f424af54535424082a1b5d1ae8caf8256ebd445be68c31c662354720"},
{file = "aiohttp-3.10.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7dd9c7db94b4692b827ce51dcee597d61a0e4f4661162424faf65106775b40e7"},
{file = "aiohttp-3.10.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e57e21e1167705f8482ca29cc5d02702208d8bf4aff58f766d94bcd6ead838cd"},
{file = "aiohttp-3.10.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a1a50e59b720060c29e2951fd9f13c01e1ea9492e5a527b92cfe04dd64453c16"},
{file = "aiohttp-3.10.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:686c87782481fda5ee6ba572d912a5c26d9f98cc5c243ebd03f95222af3f1b0f"},
{file = "aiohttp-3.10.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:dafb4abb257c0ed56dc36f4e928a7341b34b1379bd87e5a15ce5d883c2c90574"},
{file = "aiohttp-3.10.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:494a6f77560e02bd7d1ab579fdf8192390567fc96a603f21370f6e63690b7f3d"},
{file = "aiohttp-3.10.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6fe8503b1b917508cc68bf44dae28823ac05e9f091021e0c41f806ebbb23f92f"},
{file = "aiohttp-3.10.2-cp311-cp311-win32.whl", hash = "sha256:4ddb43d06ce786221c0dfd3c91b4892c318eaa36b903f7c4278e7e2fa0dd5102"},
{file = "aiohttp-3.10.2-cp311-cp311-win_amd64.whl", hash = "sha256:ca2f5abcb0a9a47e56bac173c01e9f6c6e7f27534d91451c5f22e6a35a5a2093"},
{file = "aiohttp-3.10.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:14eb6b17f6246959fb0b035d4f4ae52caa870c4edfb6170aad14c0de5bfbf478"},
{file = "aiohttp-3.10.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:465e445ec348d4e4bd349edd8b22db75f025da9d7b6dc1369c48e7935b85581e"},
{file = "aiohttp-3.10.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:341f8ece0276a828d95b70cd265d20e257f5132b46bf77d759d7f4e0443f2906"},
{file = "aiohttp-3.10.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c01fbb87b5426381cd9418b3ddcf4fc107e296fa2d3446c18ce6c76642f340a3"},
{file = "aiohttp-3.10.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c474af073e1a6763e1c5522bbb2d85ff8318197e4c6c919b8d7886e16213345"},
{file = "aiohttp-3.10.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d9076810a5621236e29b2204e67a68e1fe317c8727ee4c9abbfbb1083b442c38"},
{file = "aiohttp-3.10.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8f515d6859e673940e08de3922b9c4a2249653b0ac181169313bd6e4b1978ac"},
{file = "aiohttp-3.10.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:655e583afc639bef06f3b2446972c1726007a21003cd0ef57116a123e44601bc"},
{file = "aiohttp-3.10.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8da9449a575133828cc99985536552ea2dcd690e848f9d41b48d8853a149a959"},
{file = "aiohttp-3.10.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:19073d57d0feb1865d12361e2a1f5a49cb764bf81a4024a3b608ab521568093a"},
{file = "aiohttp-3.10.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c8e98e1845805f184d91fda6f9ab93d7c7b0dddf1c07e0255924bfdb151a8d05"},
{file = "aiohttp-3.10.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:377220a5efde6f9497c5b74649b8c261d3cce8a84cb661be2ed8099a2196400a"},
{file = "aiohttp-3.10.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:92f7f4a4dc9cdb5980973a74d43cdbb16286dacf8d1896b6c3023b8ba8436f8e"},
{file = "aiohttp-3.10.2-cp312-cp312-win32.whl", hash = "sha256:9bb2834a6f11d65374ce97d366d6311a9155ef92c4f0cee543b2155d06dc921f"},
{file = "aiohttp-3.10.2-cp312-cp312-win_amd64.whl", hash = "sha256:518dc3cb37365255708283d1c1c54485bbacccd84f0a0fb87ed8917ba45eda5b"},
{file = "aiohttp-3.10.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:7f98e70bbbf693086efe4b86d381efad8edac040b8ad02821453083d15ec315f"},
{file = "aiohttp-3.10.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9f6f0b252a009e98fe84028a4ec48396a948e7a65b8be06ccfc6ef68cf1f614d"},
{file = "aiohttp-3.10.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9360e3ffc7b23565600e729e8c639c3c50d5520e05fdf94aa2bd859eef12c407"},
{file = "aiohttp-3.10.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3988044d1635c7821dd44f0edfbe47e9875427464e59d548aece447f8c22800a"},
{file = "aiohttp-3.10.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30a9d59da1543a6f1478c3436fd49ec59be3868bca561a33778b4391005e499d"},
{file = "aiohttp-3.10.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f9f49bdb94809ac56e09a310a62f33e5f22973d6fd351aac72a39cd551e98194"},
{file = "aiohttp-3.10.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddfd2dca3f11c365d6857a07e7d12985afc59798458a2fdb2ffa4a0332a3fd43"},
{file = "aiohttp-3.10.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:685c1508ec97b2cd3e120bfe309a4ff8e852e8a7460f1ef1de00c2c0ed01e33c"},
{file = "aiohttp-3.10.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:49904f38667c44c041a0b44c474b3ae36948d16a0398a8f8cd84e2bb3c42a069"},
{file = "aiohttp-3.10.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:352f3a4e5f11f3241a49b6a48bc5b935fabc35d1165fa0d87f3ca99c1fcca98b"},
{file = "aiohttp-3.10.2-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:fc61f39b534c5d5903490478a0dd349df397d2284a939aa3cbaa2fb7a19b8397"},
{file = "aiohttp-3.10.2-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:ad2274e707be37420d0b6c3d26a8115295fe9d8e6e530fa6a42487a8ca3ad052"},
{file = "aiohttp-3.10.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c836bf3c7512100219fe1123743fd8dd9a2b50dd7cfb0c3bb10d041309acab4b"},
{file = "aiohttp-3.10.2-cp38-cp38-win32.whl", hash = "sha256:53e8898adda402be03ff164b0878abe2d884e3ea03a4701e6ad55399d84b92dc"},
{file = "aiohttp-3.10.2-cp38-cp38-win_amd64.whl", hash = "sha256:7cc8f65f5b22304693de05a245b6736b14cb5bc9c8a03da6e2ae9ef15f8b458f"},
{file = "aiohttp-3.10.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9dfc906d656e14004c5bc672399c1cccc10db38df2b62a13fb2b6e165a81c316"},
{file = "aiohttp-3.10.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:91b10208b222ddf655c3a3d5b727879d7163db12b634492df41a9182a76edaae"},
{file = "aiohttp-3.10.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9fd16b5e1a7bdd14668cd6bde60a2a29b49147a535c74f50d8177d11b38433a7"},
{file = "aiohttp-3.10.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2bfdda4971bd79201f59adbad24ec2728875237e1c83bba5221284dbbf57bda"},
{file = "aiohttp-3.10.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:69d73f869cf29e8a373127fc378014e2b17bcfbe8d89134bc6fb06a2f67f3cb3"},
{file = "aiohttp-3.10.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df59f8486507c421c0620a2c3dce81fbf1d54018dc20ff4fecdb2c106d6e6abc"},
{file = "aiohttp-3.10.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0df930015db36b460aa9badbf35eccbc383f00d52d4b6f3de2ccb57d064a6ade"},
{file = "aiohttp-3.10.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:562b1153ab7f766ee6b8b357ec777a302770ad017cf18505d34f1c088fccc448"},
{file = "aiohttp-3.10.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d984db6d855de58e0fde1ef908d48fe9a634cadb3cf715962722b4da1c40619d"},
{file = "aiohttp-3.10.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:14dc3fcb0d877911d775d511eb617a486a8c48afca0a887276e63db04d3ee920"},
{file = "aiohttp-3.10.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:b52a27a5c97275e254704e1049f4b96a81e67d6205f52fa37a4777d55b0e98ef"},
{file = "aiohttp-3.10.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:cd33d9de8cfd006a0d0fe85f49b4183c57e91d18ffb7e9004ce855e81928f704"},
{file = "aiohttp-3.10.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1238fc979160bc03a92fff9ad021375ff1c8799c6aacb0d8ea1b357ea40932bb"},
{file = "aiohttp-3.10.2-cp39-cp39-win32.whl", hash = "sha256:e2f43d238eae4f0b04f58d4c0df4615697d4ca3e9f9b1963d49555a94f0f5a04"},
{file = "aiohttp-3.10.2-cp39-cp39-win_amd64.whl", hash = "sha256:947847f07a8f81d7b39b2d0202fd73e61962ebe17ac2d8566f260679e467da7b"},
{file = "aiohttp-3.10.2.tar.gz", hash = "sha256:4d1f694b5d6e459352e5e925a42e05bac66655bfde44d81c59992463d2897014"},
]
[package.dependencies]
aiohappyeyeballs = ">=2.3.0"
aiosignal = ">=1.1.2"
async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""}
attrs = ">=17.3.0"
@@ -105,7 +117,7 @@ multidict = ">=4.5,<7.0"
yarl = ">=1.0,<2.0"
[package.extras]
speedups = ["Brotli", "aiodns", "brotlicffi"]
speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"]
[[package]]
name = "aiosignal"
@@ -372,13 +384,13 @@ isodate = ">=0.6.1,<1.0.0"
[[package]]
name = "azure-mgmt-compute"
version = "32.0.0"
version = "33.0.0"
description = "Microsoft Azure Compute Management Client Library for Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "azure-mgmt-compute-32.0.0.tar.gz", hash = "sha256:8d5a86e0116c71a07bcedd8e69d2e09270db3880932656521f3143c6f9475072"},
{file = "azure_mgmt_compute-32.0.0-py3-none-any.whl", hash = "sha256:8578dbeee034a58c41331a71ddd2503e1e5c65a2cc233ebfe9adc5e16ca3d037"},
{file = "azure-mgmt-compute-33.0.0.tar.gz", hash = "sha256:a3cc0fe4f09c8e1d3523c1bfb92620dfe263a0a893b0ac13a33d7057e9ddddd2"},
{file = "azure_mgmt_compute-33.0.0-py3-none-any.whl", hash = "sha256:155f8d78a1fdedcea1725fd12b85b2d87fbcb6b53f8e77451c644f45701e3bcf"},
]
[package.dependencies]
@@ -581,29 +593,30 @@ msrest = ">=0.7.1"
[[package]]
name = "azure-mgmt-web"
version = "7.3.0"
version = "7.3.1"
description = "Microsoft Azure Web Apps Management Client Library for Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "azure-mgmt-web-7.3.0.tar.gz", hash = "sha256:2032bf4d50df0bbb822ea00cac3bfbe0e67487d2c9cc1182c0858f83149cdea9"},
{file = "azure_mgmt_web-7.3.0-py3-none-any.whl", hash = "sha256:0e53db3fea6eae0d68d9a94eef2e19301f80d3d8a8a20eab599b75384ac0296e"},
{file = "azure-mgmt-web-7.3.1.tar.gz", hash = "sha256:87b771436bc99a7a8df59d0ad185b96879a06dce14764a06b3fc3dafa8fcb56b"},
{file = "azure_mgmt_web-7.3.1-py3-none-any.whl", hash = "sha256:ccf881e3ab31c3fdbf9cbff32773d9c0006b5dcd621ea074d7ec89e51049fb72"},
]
[package.dependencies]
azure-common = ">=1.1"
azure-mgmt-core = ">=1.3.2"
isodate = ">=0.6.1"
typing-extensions = ">=4.6.0"
[[package]]
name = "azure-storage-blob"
version = "12.21.0"
version = "12.22.0"
description = "Microsoft Azure Blob Storage Client Library for Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "azure-storage-blob-12.21.0.tar.gz", hash = "sha256:b9722725072f5b7373c0f4dd6d78fbae2bb37bffc5c3e01731ab8c750ee8dd7e"},
{file = "azure_storage_blob-12.21.0-py3-none-any.whl", hash = "sha256:f9ede187dd5a0ef296b583a7c1861c6938ddd6708d6e70f4203a163c2ab42d43"},
{file = "azure-storage-blob-12.22.0.tar.gz", hash = "sha256:b3804bb4fe8ab1c32771fa464053da772a682c2737b19da438a3f4e5e3b3736e"},
{file = "azure_storage_blob-12.22.0-py3-none-any.whl", hash = "sha256:bb7d2d824ce3f11f14a27ee7d9281289f7e072ac8311c52e3652672455b7d5e8"},
]
[package.dependencies]
@@ -655,33 +668,33 @@ yaml = ["PyYAML"]
[[package]]
name = "black"
version = "24.4.2"
version = "24.8.0"
description = "The uncompromising code formatter."
optional = false
python-versions = ">=3.8"
files = [
{file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"},
{file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"},
{file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"},
{file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"},
{file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"},
{file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"},
{file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"},
{file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"},
{file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"},
{file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"},
{file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"},
{file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"},
{file = "black-24.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7"},
{file = "black-24.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94"},
{file = "black-24.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8"},
{file = "black-24.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c"},
{file = "black-24.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1"},
{file = "black-24.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741"},
{file = "black-24.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e"},
{file = "black-24.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7"},
{file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"},
{file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"},
{file = "black-24.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:09cdeb74d494ec023ded657f7092ba518e8cf78fa8386155e4a03fdcc44679e6"},
{file = "black-24.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:81c6742da39f33b08e791da38410f32e27d632260e599df7245cccee2064afeb"},
{file = "black-24.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:707a1ca89221bc8a1a64fb5e15ef39cd755633daa672a9db7498d1c19de66a42"},
{file = "black-24.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d6417535d99c37cee4091a2f24eb2b6d5ec42b144d50f1f2e436d9fe1916fe1a"},
{file = "black-24.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fb6e2c0b86bbd43dee042e48059c9ad7830abd5c94b0bc518c0eeec57c3eddc1"},
{file = "black-24.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:837fd281f1908d0076844bc2b801ad2d369c78c45cf800cad7b61686051041af"},
{file = "black-24.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:62e8730977f0b77998029da7971fa896ceefa2c4c4933fcd593fa599ecbf97a4"},
{file = "black-24.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:72901b4913cbac8972ad911dc4098d5753704d1f3c56e44ae8dce99eecb0e3af"},
{file = "black-24.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368"},
{file = "black-24.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed"},
{file = "black-24.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018"},
{file = "black-24.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2"},
{file = "black-24.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:505289f17ceda596658ae81b61ebbe2d9b25aa78067035184ed0a9d855d18afd"},
{file = "black-24.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b19c9ad992c7883ad84c9b22aaa73562a16b819c1d8db7a1a1a49fb7ec13c7d2"},
{file = "black-24.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1f13f7f386f86f8121d76599114bb8c17b69d962137fc70efe56137727c7047e"},
{file = "black-24.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:f490dbd59680d809ca31efdae20e634f3fae27fba3ce0ba3208333b713bc3920"},
{file = "black-24.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eab4dd44ce80dea27dc69db40dab62d4ca96112f87996bca68cd75639aeb2e4c"},
{file = "black-24.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3c4285573d4897a7610054af5a890bde7c65cb466040c5f0c8b732812d7f0e5e"},
{file = "black-24.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e84e33b37be070ba135176c123ae52a51f82306def9f7d063ee302ecab2cf47"},
{file = "black-24.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:73bbf84ed136e45d451a260c6b73ed674652f90a2b3211d6a35e78054563a9bb"},
{file = "black-24.8.0-py3-none-any.whl", hash = "sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed"},
{file = "black-24.8.0.tar.gz", hash = "sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f"},
]
[package.dependencies]
@@ -712,17 +725,17 @@ files = [
[[package]]
name = "boto3"
version = "1.34.151"
version = "1.34.162"
description = "The AWS SDK for Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "boto3-1.34.151-py3-none-any.whl", hash = "sha256:35bc76faacf1667d3fbb66c1966acf2230ef26206557efc26d9d9d79337bef43"},
{file = "boto3-1.34.151.tar.gz", hash = "sha256:30498a76b6f651ee2af7ae8edc1704379279ab8b91f1a8dd1f4ddf51259b0bc2"},
{file = "boto3-1.34.162-py3-none-any.whl", hash = "sha256:d6f6096bdab35a0c0deff469563b87d184a28df7689790f7fe7be98502b7c590"},
{file = "boto3-1.34.162.tar.gz", hash = "sha256:873f8f5d2f6f85f1018cbb0535b03cceddc7b655b61f66a0a56995238804f41f"},
]
[package.dependencies]
botocore = ">=1.34.151,<1.35.0"
botocore = ">=1.34.162,<1.35.0"
jmespath = ">=0.7.1,<2.0.0"
s3transfer = ">=0.10.0,<0.11.0"
@@ -731,13 +744,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
[[package]]
name = "botocore"
version = "1.34.151"
version = "1.34.162"
description = "Low-level, data-driven core of boto 3."
optional = false
python-versions = ">=3.8"
files = [
{file = "botocore-1.34.151-py3-none-any.whl", hash = "sha256:9018680d7d4a8060c26d127ceec5ab5b270879f423ea39b863d8a46f3e34c404"},
{file = "botocore-1.34.151.tar.gz", hash = "sha256:0d0968e427a94378f295b49d59170dad539938487ec948de3d030f06092ec6dc"},
{file = "botocore-1.34.162-py3-none-any.whl", hash = "sha256:2d918b02db88d27a75b48275e6fb2506e9adaaddbec1ffa6a8a0898b34e769be"},
{file = "botocore-1.34.162.tar.gz", hash = "sha256:adc23be4fb99ad31961236342b7cbf3c0bfc62532cd02852196032e8c0d682f3"},
]
[package.dependencies]
@@ -749,7 +762,7 @@ urllib3 = [
]
[package.extras]
crt = ["awscrt (==0.20.11)"]
crt = ["awscrt (==0.21.2)"]
[[package]]
name = "cachetools"
@@ -1003,63 +1016,83 @@ files = [
[[package]]
name = "coverage"
version = "7.6.0"
version = "7.6.1"
description = "Code coverage measurement for Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "coverage-7.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dff044f661f59dace805eedb4a7404c573b6ff0cdba4a524141bc63d7be5c7fd"},
{file = "coverage-7.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8659fd33ee9e6ca03950cfdcdf271d645cf681609153f218826dd9805ab585c"},
{file = "coverage-7.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7792f0ab20df8071d669d929c75c97fecfa6bcab82c10ee4adb91c7a54055463"},
{file = "coverage-7.6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4b3cd1ca7cd73d229487fa5caca9e4bc1f0bca96526b922d61053ea751fe791"},
{file = "coverage-7.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7e128f85c0b419907d1f38e616c4f1e9f1d1b37a7949f44df9a73d5da5cd53c"},
{file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a94925102c89247530ae1dab7dc02c690942566f22e189cbd53579b0693c0783"},
{file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:dcd070b5b585b50e6617e8972f3fbbee786afca71b1936ac06257f7e178f00f6"},
{file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d50a252b23b9b4dfeefc1f663c568a221092cbaded20a05a11665d0dbec9b8fb"},
{file = "coverage-7.6.0-cp310-cp310-win32.whl", hash = "sha256:0e7b27d04131c46e6894f23a4ae186a6a2207209a05df5b6ad4caee6d54a222c"},
{file = "coverage-7.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:54dece71673b3187c86226c3ca793c5f891f9fc3d8aa183f2e3653da18566169"},
{file = "coverage-7.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7b525ab52ce18c57ae232ba6f7010297a87ced82a2383b1afd238849c1ff933"},
{file = "coverage-7.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bea27c4269234e06f621f3fac3925f56ff34bc14521484b8f66a580aacc2e7d"},
{file = "coverage-7.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed8d1d1821ba5fc88d4a4f45387b65de52382fa3ef1f0115a4f7a20cdfab0e94"},
{file = "coverage-7.6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01c322ef2bbe15057bc4bf132b525b7e3f7206f071799eb8aa6ad1940bcf5fb1"},
{file = "coverage-7.6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03cafe82c1b32b770a29fd6de923625ccac3185a54a5e66606da26d105f37dac"},
{file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0d1b923fc4a40c5832be4f35a5dab0e5ff89cddf83bb4174499e02ea089daf57"},
{file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4b03741e70fb811d1a9a1d75355cf391f274ed85847f4b78e35459899f57af4d"},
{file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a73d18625f6a8a1cbb11eadc1d03929f9510f4131879288e3f7922097a429f63"},
{file = "coverage-7.6.0-cp311-cp311-win32.whl", hash = "sha256:65fa405b837060db569a61ec368b74688f429b32fa47a8929a7a2f9b47183713"},
{file = "coverage-7.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:6379688fb4cfa921ae349c76eb1a9ab26b65f32b03d46bb0eed841fd4cb6afb1"},
{file = "coverage-7.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f7db0b6ae1f96ae41afe626095149ecd1b212b424626175a6633c2999eaad45b"},
{file = "coverage-7.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bbdf9a72403110a3bdae77948b8011f644571311c2fb35ee15f0f10a8fc082e8"},
{file = "coverage-7.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cc44bf0315268e253bf563f3560e6c004efe38f76db03a1558274a6e04bf5d5"},
{file = "coverage-7.6.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da8549d17489cd52f85a9829d0e1d91059359b3c54a26f28bec2c5d369524807"},
{file = "coverage-7.6.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0086cd4fc71b7d485ac93ca4239c8f75732c2ae3ba83f6be1c9be59d9e2c6382"},
{file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1fad32ee9b27350687035cb5fdf9145bc9cf0a094a9577d43e909948ebcfa27b"},
{file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:044a0985a4f25b335882b0966625270a8d9db3d3409ddc49a4eb00b0ef5e8cee"},
{file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:76d5f82213aa78098b9b964ea89de4617e70e0d43e97900c2778a50856dac605"},
{file = "coverage-7.6.0-cp312-cp312-win32.whl", hash = "sha256:3c59105f8d58ce500f348c5b56163a4113a440dad6daa2294b5052a10db866da"},
{file = "coverage-7.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:ca5d79cfdae420a1d52bf177de4bc2289c321d6c961ae321503b2ca59c17ae67"},
{file = "coverage-7.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d39bd10f0ae453554798b125d2f39884290c480f56e8a02ba7a6ed552005243b"},
{file = "coverage-7.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:beb08e8508e53a568811016e59f3234d29c2583f6b6e28572f0954a6b4f7e03d"},
{file = "coverage-7.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2e16f4cd2bc4d88ba30ca2d3bbf2f21f00f382cf4e1ce3b1ddc96c634bc48ca"},
{file = "coverage-7.6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6616d1c9bf1e3faea78711ee42a8b972367d82ceae233ec0ac61cc7fec09fa6b"},
{file = "coverage-7.6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad4567d6c334c46046d1c4c20024de2a1c3abc626817ae21ae3da600f5779b44"},
{file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d17c6a415d68cfe1091d3296ba5749d3d8696e42c37fca5d4860c5bf7b729f03"},
{file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9146579352d7b5f6412735d0f203bbd8d00113a680b66565e205bc605ef81bc6"},
{file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:cdab02a0a941af190df8782aafc591ef3ad08824f97850b015c8c6a8b3877b0b"},
{file = "coverage-7.6.0-cp38-cp38-win32.whl", hash = "sha256:df423f351b162a702c053d5dddc0fc0ef9a9e27ea3f449781ace5f906b664428"},
{file = "coverage-7.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:f2501d60d7497fd55e391f423f965bbe9e650e9ffc3c627d5f0ac516026000b8"},
{file = "coverage-7.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7221f9ac9dad9492cecab6f676b3eaf9185141539d5c9689d13fd6b0d7de840c"},
{file = "coverage-7.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ddaaa91bfc4477d2871442bbf30a125e8fe6b05da8a0015507bfbf4718228ab2"},
{file = "coverage-7.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4cbe651f3904e28f3a55d6f371203049034b4ddbce65a54527a3f189ca3b390"},
{file = "coverage-7.6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:831b476d79408ab6ccfadaaf199906c833f02fdb32c9ab907b1d4aa0713cfa3b"},
{file = "coverage-7.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46c3d091059ad0b9c59d1034de74a7f36dcfa7f6d3bde782c49deb42438f2450"},
{file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4d5fae0a22dc86259dee66f2cc6c1d3e490c4a1214d7daa2a93d07491c5c04b6"},
{file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:07ed352205574aad067482e53dd606926afebcb5590653121063fbf4e2175166"},
{file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:49c76cdfa13015c4560702574bad67f0e15ca5a2872c6a125f6327ead2b731dd"},
{file = "coverage-7.6.0-cp39-cp39-win32.whl", hash = "sha256:482855914928c8175735a2a59c8dc5806cf7d8f032e4820d52e845d1f731dca2"},
{file = "coverage-7.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:543ef9179bc55edfd895154a51792b01c017c87af0ebaae092720152e19e42ca"},
{file = "coverage-7.6.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:6fe885135c8a479d3e37a7aae61cbd3a0fb2deccb4dda3c25f92a49189f766d6"},
{file = "coverage-7.6.0.tar.gz", hash = "sha256:289cc803fa1dc901f84701ac10c9ee873619320f2f9aff38794db4a4a0268d51"},
{file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"},
{file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"},
{file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"},
{file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"},
{file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"},
{file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"},
{file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"},
{file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"},
{file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"},
{file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"},
{file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"},
{file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"},
{file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"},
{file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"},
{file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"},
{file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"},
{file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"},
{file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"},
{file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"},
{file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"},
{file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"},
{file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"},
{file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"},
{file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"},
{file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"},
{file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"},
{file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"},
{file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"},
{file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"},
{file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"},
{file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"},
{file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"},
{file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"},
{file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"},
{file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"},
{file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"},
{file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"},
{file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"},
{file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"},
{file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"},
{file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"},
{file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"},
{file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"},
{file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"},
{file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"},
{file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"},
{file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"},
{file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"},
{file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"},
{file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"},
{file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"},
{file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"},
{file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"},
{file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"},
{file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"},
{file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"},
{file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"},
{file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"},
{file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"},
{file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"},
{file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"},
{file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"},
{file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"},
{file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"},
{file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"},
{file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"},
{file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"},
{file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"},
{file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"},
{file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"},
{file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"},
{file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"},
]
[package.dependencies]
@@ -1070,38 +1103,38 @@ toml = ["tomli"]
[[package]]
name = "cryptography"
version = "43.0.0"
version = "43.0.1"
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
optional = false
python-versions = ">=3.7"
files = [
{file = "cryptography-43.0.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:64c3f16e2a4fc51c0d06af28441881f98c5d91009b8caaff40cf3548089e9c74"},
{file = "cryptography-43.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3dcdedae5c7710b9f97ac6bba7e1052b95c7083c9d0e9df96e02a1932e777895"},
{file = "cryptography-43.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d9a1eca329405219b605fac09ecfc09ac09e595d6def650a437523fcd08dd22"},
{file = "cryptography-43.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ea9e57f8ea880eeea38ab5abf9fbe39f923544d7884228ec67d666abd60f5a47"},
{file = "cryptography-43.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9a8d6802e0825767476f62aafed40532bd435e8a5f7d23bd8b4f5fd04cc80ecf"},
{file = "cryptography-43.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:cc70b4b581f28d0a254d006f26949245e3657d40d8857066c2ae22a61222ef55"},
{file = "cryptography-43.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:4a997df8c1c2aae1e1e5ac49c2e4f610ad037fc5a3aadc7b64e39dea42249431"},
{file = "cryptography-43.0.0-cp37-abi3-win32.whl", hash = "sha256:6e2b11c55d260d03a8cf29ac9b5e0608d35f08077d8c087be96287f43af3ccdc"},
{file = "cryptography-43.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:31e44a986ceccec3d0498e16f3d27b2ee5fdf69ce2ab89b52eaad1d2f33d8778"},
{file = "cryptography-43.0.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:7b3f5fe74a5ca32d4d0f302ffe6680fcc5c28f8ef0dc0ae8f40c0f3a1b4fca66"},
{file = "cryptography-43.0.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac1955ce000cb29ab40def14fd1bbfa7af2017cca696ee696925615cafd0dce5"},
{file = "cryptography-43.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:299d3da8e00b7e2b54bb02ef58d73cd5f55fb31f33ebbf33bd00d9aa6807df7e"},
{file = "cryptography-43.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ee0c405832ade84d4de74b9029bedb7b31200600fa524d218fc29bfa371e97f5"},
{file = "cryptography-43.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cb013933d4c127349b3948aa8aaf2f12c0353ad0eccd715ca789c8a0f671646f"},
{file = "cryptography-43.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:fdcb265de28585de5b859ae13e3846a8e805268a823a12a4da2597f1f5afc9f0"},
{file = "cryptography-43.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2905ccf93a8a2a416f3ec01b1a7911c3fe4073ef35640e7ee5296754e30b762b"},
{file = "cryptography-43.0.0-cp39-abi3-win32.whl", hash = "sha256:47ca71115e545954e6c1d207dd13461ab81f4eccfcb1345eac874828b5e3eaaf"},
{file = "cryptography-43.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:0663585d02f76929792470451a5ba64424acc3cd5227b03921dab0e2f27b1709"},
{file = "cryptography-43.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2c6d112bf61c5ef44042c253e4859b3cbbb50df2f78fa8fae6747a7814484a70"},
{file = "cryptography-43.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:844b6d608374e7d08f4f6e6f9f7b951f9256db41421917dfb2d003dde4cd6b66"},
{file = "cryptography-43.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:51956cf8730665e2bdf8ddb8da0056f699c1a5715648c1b0144670c1ba00b48f"},
{file = "cryptography-43.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:aae4d918f6b180a8ab8bf6511a419473d107df4dbb4225c7b48c5c9602c38c7f"},
{file = "cryptography-43.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:232ce02943a579095a339ac4b390fbbe97f5b5d5d107f8a08260ea2768be8cc2"},
{file = "cryptography-43.0.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5bcb8a5620008a8034d39bce21dc3e23735dfdb6a33a06974739bfa04f853947"},
{file = "cryptography-43.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:08a24a7070b2b6804c1940ff0f910ff728932a9d0e80e7814234269f9d46d069"},
{file = "cryptography-43.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e9c5266c432a1e23738d178e51c2c7a5e2ddf790f248be939448c0ba2021f9d1"},
{file = "cryptography-43.0.0.tar.gz", hash = "sha256:b88075ada2d51aa9f18283532c9f60e72170041bba88d7f37e49cbb10275299e"},
{file = "cryptography-43.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d"},
{file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062"},
{file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962"},
{file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277"},
{file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a"},
{file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042"},
{file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494"},
{file = "cryptography-43.0.1-cp37-abi3-win32.whl", hash = "sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2"},
{file = "cryptography-43.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d"},
{file = "cryptography-43.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d"},
{file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806"},
{file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85"},
{file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c"},
{file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1"},
{file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa"},
{file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4"},
{file = "cryptography-43.0.1-cp39-abi3-win32.whl", hash = "sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47"},
{file = "cryptography-43.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb"},
{file = "cryptography-43.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034"},
{file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d"},
{file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289"},
{file = "cryptography-43.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84"},
{file = "cryptography-43.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365"},
{file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96"},
{file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172"},
{file = "cryptography-43.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2"},
{file = "cryptography-43.0.1.tar.gz", hash = "sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d"},
]
[package.dependencies]
@@ -1114,7 +1147,7 @@ nox = ["nox"]
pep8test = ["check-sdist", "click", "mypy", "ruff"]
sdist = ["build"]
ssh = ["bcrypt (>=3.1.5)"]
test = ["certifi", "cryptography-vectors (==43.0.0)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"]
test = ["certifi", "cryptography-vectors (==43.0.1)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"]
test-randomorder = ["pytest-randomly"]
[[package]]
@@ -1375,13 +1408,13 @@ typing = ["typing-extensions (>=4.7.1)"]
[[package]]
name = "flake8"
version = "7.1.0"
version = "7.1.1"
description = "the modular source code checker: pep8 pyflakes and co"
optional = false
python-versions = ">=3.8.1"
files = [
{file = "flake8-7.1.0-py2.py3-none-any.whl", hash = "sha256:2e416edcc62471a64cea09353f4e7bdba32aeb079b6e360554c659a122b1bc6a"},
{file = "flake8-7.1.0.tar.gz", hash = "sha256:48a07b626b55236e0fb4784ee69a465fbf59d79eec1f5b4785c3d3bc57d17aa5"},
{file = "flake8-7.1.1-py2.py3-none-any.whl", hash = "sha256:597477df7860daa5aa0fdd84bf5208a043ab96b8e96ab708770ae0364dd03213"},
{file = "flake8-7.1.1.tar.gz", hash = "sha256:049d058491e228e03e67b390f311bbf88fce2dbaa8fa673e7aea87b7198b8d38"},
]
[package.dependencies]
@@ -1586,13 +1619,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"]
[[package]]
name = "google-api-python-client"
version = "2.139.0"
version = "2.143.0"
description = "Google API Client Library for Python"
optional = false
python-versions = ">=3.7"
files = [
{file = "google_api_python_client-2.139.0-py2.py3-none-any.whl", hash = "sha256:1850a92505d91a82e2ca1635ab2b8dff179f4b67082c2651e1db332e8039840c"},
{file = "google_api_python_client-2.139.0.tar.gz", hash = "sha256:ed4bc3abe2c060a87412465b4e8254620bbbc548eefc5388e2c5ff912d36a68b"},
{file = "google_api_python_client-2.143.0-py2.py3-none-any.whl", hash = "sha256:d5654134522b9b574b82234e96f7e0aeeabcbf33643fbabcd449ef0068e3a476"},
{file = "google_api_python_client-2.143.0.tar.gz", hash = "sha256:6a75441f9078e6e2fcdf4946a153fda1e2cc81b5e9c8d6e8c0750c85c7f8a566"},
]
[package.dependencies]
@@ -2365,13 +2398,13 @@ pendulum = ">=3.0.0"
[[package]]
name = "microsoft-kiota-serialization-json"
version = "1.2.0"
version = "1.3.0"
description = "Implementation of Kiota Serialization interfaces for JSON"
optional = false
python-versions = "*"
files = [
{file = "microsoft_kiota_serialization_json-1.2.0-py2.py3-none-any.whl", hash = "sha256:cf68ef323157b3566b043d2282b292479bca6af0ffcf08385c806c812e507a58"},
{file = "microsoft_kiota_serialization_json-1.2.0.tar.gz", hash = "sha256:89a4ec0128958bc92287db0cf5b6616a9f66ac42f6c7bcfe8894393d2156bed9"},
{file = "microsoft_kiota_serialization_json-1.3.0-py2.py3-none-any.whl", hash = "sha256:fbf82835d8b77ef21b496aa711a512fe4494fa94dfe88f7fd014dffe33778e20"},
{file = "microsoft_kiota_serialization_json-1.3.0.tar.gz", hash = "sha256:235b680e6eb646479ffb7b59d2a6f0216c4f7e1c2ff1219fd4d59e898fa6b124"},
]
[package.dependencies]
@@ -2440,13 +2473,13 @@ min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-imp
[[package]]
name = "mkdocs-git-revision-date-localized-plugin"
version = "1.2.6"
version = "1.2.7"
description = "Mkdocs plugin that enables displaying the localized date of the last git modification of a markdown file."
optional = false
python-versions = ">=3.8"
files = [
{file = "mkdocs_git_revision_date_localized_plugin-1.2.6-py3-none-any.whl", hash = "sha256:f015cb0f3894a39b33447b18e270ae391c4e25275cac5a626e80b243784e2692"},
{file = "mkdocs_git_revision_date_localized_plugin-1.2.6.tar.gz", hash = "sha256:e432942ce4ee8aa9b9f4493e993dee9d2cc08b3ea2b40a3d6b03ca0f2a4bcaa2"},
{file = "mkdocs_git_revision_date_localized_plugin-1.2.7-py3-none-any.whl", hash = "sha256:d2b30ccb74ec8e118298758d75ae4b4f02c620daf776a6c92fcbb58f2b78f19f"},
{file = "mkdocs_git_revision_date_localized_plugin-1.2.7.tar.gz", hash = "sha256:2f83b52b4dad642751a79465f80394672cbad022129286f40d36b03aebee490f"},
]
[package.dependencies]
@@ -2513,13 +2546,13 @@ test = ["pytest", "pytest-cov"]
[[package]]
name = "moto"
version = "5.0.11"
version = "5.0.13"
description = ""
optional = false
python-versions = ">=3.8"
files = [
{file = "moto-5.0.11-py2.py3-none-any.whl", hash = "sha256:bdba9bec0afcde9f99b58c5271d6458dbfcda0a0a1e9beaecd808d2591db65ea"},
{file = "moto-5.0.11.tar.gz", hash = "sha256:606b641f4c6ef69f28a84147d6d6806d052011e7ae7b0fe46ae8858e7a27a0a3"},
{file = "moto-5.0.13-py2.py3-none-any.whl", hash = "sha256:984377a9c4536543fc09f49a1d5210c61c4a4f55c79719f7d9f8dcdd9bf55ea5"},
{file = "moto-5.0.13.tar.gz", hash = "sha256:ddf8864f0d61af88fd07a4e5eac428c6bebf4fcd10023f8e756e65e9e7b7e4a5"},
]
[package.dependencies]
@@ -2558,6 +2591,7 @@ cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>
cognitoidp = ["joserfc (>=0.9.0)"]
dynamodb = ["docker (>=3.0.0)", "py-partiql-parser (==0.5.5)"]
dynamodbstreams = ["docker (>=3.0.0)", "py-partiql-parser (==0.5.5)"]
events = ["jsonpath-ng"]
glue = ["pyparsing (>=3.0.7)"]
iotdata = ["jsondiff (>=1.1.2)"]
proxy = ["PyYAML (>=5.1)", "antlr4-python3-runtime", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "graphql-core", "joserfc (>=0.9.0)", "jsondiff (>=1.1.2)", "jsonpath-ng", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.5.5)", "pyparsing (>=3.0.7)", "setuptools"]
@@ -2646,13 +2680,13 @@ dev = ["bumpver", "isort", "mypy", "pylint", "pytest", "yapf"]
[[package]]
name = "msgraph-sdk"
version = "1.5.3"
version = "1.5.4"
description = "The Microsoft Graph Python SDK"
optional = false
python-versions = ">=3.8"
files = [
{file = "msgraph_sdk-1.5.3-py3-none-any.whl", hash = "sha256:47aecbeb3bdb76fcfb4b7535aa5defc336249c42b0d7cbb9480244045449cb12"},
{file = "msgraph_sdk-1.5.3.tar.gz", hash = "sha256:34dd5a9a55287cfc8a3d5e301acdf65e46462011d451f2992fdbf31851a94e8f"},
{file = "msgraph_sdk-1.5.4-py3-none-any.whl", hash = "sha256:9ea349f30cc4a03edb587e26554c7a4839a38c2ef30d4b5396882fd2be82dcac"},
{file = "msgraph_sdk-1.5.4.tar.gz", hash = "sha256:b0e146328d136d1db175938d8fc901f3bb32acf3ea6fe93c0dc7c5a0abc45e39"},
]
[package.dependencies]
@@ -2661,7 +2695,7 @@ microsoft-kiota-abstractions = ">=1.3.0,<2.0.0"
microsoft-kiota-authentication-azure = ">=1.0.0,<2.0.0"
microsoft-kiota-http = ">=1.0.0,<2.0.0"
microsoft-kiota-serialization-form = ">=0.1.0"
microsoft-kiota-serialization-json = ">=1.0.0,<2.0.0"
microsoft-kiota-serialization-json = ">=1.3.0,<2.0.0"
microsoft-kiota-serialization-multipart = ">=0.1.0"
microsoft-kiota-serialization-text = ">=1.0.0,<2.0.0"
msgraph_core = ">=1.0.0"
@@ -2842,56 +2876,56 @@ test = ["pytest (>=7.2)", "pytest-cov (>=4.0)"]
[[package]]
name = "numpy"
version = "2.0.1"
version = "2.0.2"
description = "Fundamental package for array computing in Python"
optional = false
python-versions = ">=3.9"
files = [
{file = "numpy-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0fbb536eac80e27a2793ffd787895242b7f18ef792563d742c2d673bfcb75134"},
{file = "numpy-2.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:69ff563d43c69b1baba77af455dd0a839df8d25e8590e79c90fcbe1499ebde42"},
{file = "numpy-2.0.1-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:1b902ce0e0a5bb7704556a217c4f63a7974f8f43e090aff03fcf262e0b135e02"},
{file = "numpy-2.0.1-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:f1659887361a7151f89e79b276ed8dff3d75877df906328f14d8bb40bb4f5101"},
{file = "numpy-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4658c398d65d1b25e1760de3157011a80375da861709abd7cef3bad65d6543f9"},
{file = "numpy-2.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4127d4303b9ac9f94ca0441138acead39928938660ca58329fe156f84b9f3015"},
{file = "numpy-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e5eeca8067ad04bc8a2a8731183d51d7cbaac66d86085d5f4766ee6bf19c7f87"},
{file = "numpy-2.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9adbd9bb520c866e1bfd7e10e1880a1f7749f1f6e5017686a5fbb9b72cf69f82"},
{file = "numpy-2.0.1-cp310-cp310-win32.whl", hash = "sha256:7b9853803278db3bdcc6cd5beca37815b133e9e77ff3d4733c247414e78eb8d1"},
{file = "numpy-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:81b0893a39bc5b865b8bf89e9ad7807e16717f19868e9d234bdaf9b1f1393868"},
{file = "numpy-2.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75b4e316c5902d8163ef9d423b1c3f2f6252226d1aa5cd8a0a03a7d01ffc6268"},
{file = "numpy-2.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6e4eeb6eb2fced786e32e6d8df9e755ce5be920d17f7ce00bc38fcde8ccdbf9e"},
{file = "numpy-2.0.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:a1e01dcaab205fbece13c1410253a9eea1b1c9b61d237b6fa59bcc46e8e89343"},
{file = "numpy-2.0.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:a8fc2de81ad835d999113ddf87d1ea2b0f4704cbd947c948d2f5513deafe5a7b"},
{file = "numpy-2.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a3d94942c331dd4e0e1147f7a8699a4aa47dffc11bf8a1523c12af8b2e91bbe"},
{file = "numpy-2.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15eb4eca47d36ec3f78cde0a3a2ee24cf05ca7396ef808dda2c0ddad7c2bde67"},
{file = "numpy-2.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b83e16a5511d1b1f8a88cbabb1a6f6a499f82c062a4251892d9ad5d609863fb7"},
{file = "numpy-2.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f87fec1f9bc1efd23f4227becff04bd0e979e23ca50cc92ec88b38489db3b55"},
{file = "numpy-2.0.1-cp311-cp311-win32.whl", hash = "sha256:36d3a9405fd7c511804dc56fc32974fa5533bdeb3cd1604d6b8ff1d292b819c4"},
{file = "numpy-2.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:08458fbf403bff5e2b45f08eda195d4b0c9b35682311da5a5a0a0925b11b9bd8"},
{file = "numpy-2.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6bf4e6f4a2a2e26655717a1983ef6324f2664d7011f6ef7482e8c0b3d51e82ac"},
{file = "numpy-2.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d6fddc5fe258d3328cd8e3d7d3e02234c5d70e01ebe377a6ab92adb14039cb4"},
{file = "numpy-2.0.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:5daab361be6ddeb299a918a7c0864fa8618af66019138263247af405018b04e1"},
{file = "numpy-2.0.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:ea2326a4dca88e4a274ba3a4405eb6c6467d3ffbd8c7d38632502eaae3820587"},
{file = "numpy-2.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:529af13c5f4b7a932fb0e1911d3a75da204eff023ee5e0e79c1751564221a5c8"},
{file = "numpy-2.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6790654cb13eab303d8402354fabd47472b24635700f631f041bd0b65e37298a"},
{file = "numpy-2.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cbab9fc9c391700e3e1287666dfd82d8666d10e69a6c4a09ab97574c0b7ee0a7"},
{file = "numpy-2.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:99d0d92a5e3613c33a5f01db206a33f8fdf3d71f2912b0de1739894668b7a93b"},
{file = "numpy-2.0.1-cp312-cp312-win32.whl", hash = "sha256:173a00b9995f73b79eb0191129f2455f1e34c203f559dd118636858cc452a1bf"},
{file = "numpy-2.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:bb2124fdc6e62baae159ebcfa368708867eb56806804d005860b6007388df171"},
{file = "numpy-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bfc085b28d62ff4009364e7ca34b80a9a080cbd97c2c0630bb5f7f770dae9414"},
{file = "numpy-2.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8fae4ebbf95a179c1156fab0b142b74e4ba4204c87bde8d3d8b6f9c34c5825ef"},
{file = "numpy-2.0.1-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:72dc22e9ec8f6eaa206deb1b1355eb2e253899d7347f5e2fae5f0af613741d06"},
{file = "numpy-2.0.1-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:ec87f5f8aca726117a1c9b7083e7656a9d0d606eec7299cc067bb83d26f16e0c"},
{file = "numpy-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f682ea61a88479d9498bf2091fdcd722b090724b08b31d63e022adc063bad59"},
{file = "numpy-2.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8efc84f01c1cd7e34b3fb310183e72fcdf55293ee736d679b6d35b35d80bba26"},
{file = "numpy-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3fdabe3e2a52bc4eff8dc7a5044342f8bd9f11ef0934fcd3289a788c0eb10018"},
{file = "numpy-2.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:24a0e1befbfa14615b49ba9659d3d8818a0f4d8a1c5822af8696706fbda7310c"},
{file = "numpy-2.0.1-cp39-cp39-win32.whl", hash = "sha256:f9cf5ea551aec449206954b075db819f52adc1638d46a6738253a712d553c7b4"},
{file = "numpy-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:e9e81fa9017eaa416c056e5d9e71be93d05e2c3c2ab308d23307a8bc4443c368"},
{file = "numpy-2.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:61728fba1e464f789b11deb78a57805c70b2ed02343560456190d0501ba37b0f"},
{file = "numpy-2.0.1-pp39-pypy39_pp73-macosx_14_0_x86_64.whl", hash = "sha256:12f5d865d60fb9734e60a60f1d5afa6d962d8d4467c120a1c0cda6eb2964437d"},
{file = "numpy-2.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eacf3291e263d5a67d8c1a581a8ebbcfd6447204ef58828caf69a5e3e8c75990"},
{file = "numpy-2.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2c3a346ae20cfd80b6cfd3e60dc179963ef2ea58da5ec074fd3d9e7a1e7ba97f"},
{file = "numpy-2.0.1.tar.gz", hash = "sha256:485b87235796410c3519a699cfe1faab097e509e90ebb05dcd098db2ae87e7b3"},
{file = "numpy-2.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:51129a29dbe56f9ca83438b706e2e69a39892b5eda6cedcb6b0c9fdc9b0d3ece"},
{file = "numpy-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f15975dfec0cf2239224d80e32c3170b1d168335eaedee69da84fbe9f1f9cd04"},
{file = "numpy-2.0.2-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:8c5713284ce4e282544c68d1c3b2c7161d38c256d2eefc93c1d683cf47683e66"},
{file = "numpy-2.0.2-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:becfae3ddd30736fe1889a37f1f580e245ba79a5855bff5f2a29cb3ccc22dd7b"},
{file = "numpy-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2da5960c3cf0df7eafefd806d4e612c5e19358de82cb3c343631188991566ccd"},
{file = "numpy-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:496f71341824ed9f3d2fd36cf3ac57ae2e0165c143b55c3a035ee219413f3318"},
{file = "numpy-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a61ec659f68ae254e4d237816e33171497e978140353c0c2038d46e63282d0c8"},
{file = "numpy-2.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d731a1c6116ba289c1e9ee714b08a8ff882944d4ad631fd411106a30f083c326"},
{file = "numpy-2.0.2-cp310-cp310-win32.whl", hash = "sha256:984d96121c9f9616cd33fbd0618b7f08e0cfc9600a7ee1d6fd9b239186d19d97"},
{file = "numpy-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:c7b0be4ef08607dd04da4092faee0b86607f111d5ae68036f16cc787e250a131"},
{file = "numpy-2.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:49ca4decb342d66018b01932139c0961a8f9ddc7589611158cb3c27cbcf76448"},
{file = "numpy-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:11a76c372d1d37437857280aa142086476136a8c0f373b2e648ab2c8f18fb195"},
{file = "numpy-2.0.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:807ec44583fd708a21d4a11d94aedf2f4f3c3719035c76a2bbe1fe8e217bdc57"},
{file = "numpy-2.0.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:8cafab480740e22f8d833acefed5cc87ce276f4ece12fdaa2e8903db2f82897a"},
{file = "numpy-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a15f476a45e6e5a3a79d8a14e62161d27ad897381fecfa4a09ed5322f2085669"},
{file = "numpy-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13e689d772146140a252c3a28501da66dfecd77490b498b168b501835041f951"},
{file = "numpy-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9ea91dfb7c3d1c56a0e55657c0afb38cf1eeae4544c208dc465c3c9f3a7c09f9"},
{file = "numpy-2.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c1c9307701fec8f3f7a1e6711f9089c06e6284b3afbbcd259f7791282d660a15"},
{file = "numpy-2.0.2-cp311-cp311-win32.whl", hash = "sha256:a392a68bd329eafac5817e5aefeb39038c48b671afd242710b451e76090e81f4"},
{file = "numpy-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:286cd40ce2b7d652a6f22efdfc6d1edf879440e53e76a75955bc0c826c7e64dc"},
{file = "numpy-2.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:df55d490dea7934f330006d0f81e8551ba6010a5bf035a249ef61a94f21c500b"},
{file = "numpy-2.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8df823f570d9adf0978347d1f926b2a867d5608f434a7cff7f7908c6570dcf5e"},
{file = "numpy-2.0.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9a92ae5c14811e390f3767053ff54eaee3bf84576d99a2456391401323f4ec2c"},
{file = "numpy-2.0.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:a842d573724391493a97a62ebbb8e731f8a5dcc5d285dfc99141ca15a3302d0c"},
{file = "numpy-2.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c05e238064fc0610c840d1cf6a13bf63d7e391717d247f1bf0318172e759e692"},
{file = "numpy-2.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0123ffdaa88fa4ab64835dcbde75dcdf89c453c922f18dced6e27c90d1d0ec5a"},
{file = "numpy-2.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:96a55f64139912d61de9137f11bf39a55ec8faec288c75a54f93dfd39f7eb40c"},
{file = "numpy-2.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec9852fb39354b5a45a80bdab5ac02dd02b15f44b3804e9f00c556bf24b4bded"},
{file = "numpy-2.0.2-cp312-cp312-win32.whl", hash = "sha256:671bec6496f83202ed2d3c8fdc486a8fc86942f2e69ff0e986140339a63bcbe5"},
{file = "numpy-2.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:cfd41e13fdc257aa5778496b8caa5e856dc4896d4ccf01841daee1d96465467a"},
{file = "numpy-2.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9059e10581ce4093f735ed23f3b9d283b9d517ff46009ddd485f1747eb22653c"},
{file = "numpy-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:423e89b23490805d2a5a96fe40ec507407b8ee786d66f7328be214f9679df6dd"},
{file = "numpy-2.0.2-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:2b2955fa6f11907cf7a70dab0d0755159bca87755e831e47932367fc8f2f2d0b"},
{file = "numpy-2.0.2-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:97032a27bd9d8988b9a97a8c4d2c9f2c15a81f61e2f21404d7e8ef00cb5be729"},
{file = "numpy-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e795a8be3ddbac43274f18588329c72939870a16cae810c2b73461c40718ab1"},
{file = "numpy-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f26b258c385842546006213344c50655ff1555a9338e2e5e02a0756dc3e803dd"},
{file = "numpy-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fec9451a7789926bcf7c2b8d187292c9f93ea30284802a0ab3f5be8ab36865d"},
{file = "numpy-2.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9189427407d88ff25ecf8f12469d4d39d35bee1db5d39fc5c168c6f088a6956d"},
{file = "numpy-2.0.2-cp39-cp39-win32.whl", hash = "sha256:905d16e0c60200656500c95b6b8dca5d109e23cb24abc701d41c02d74c6b3afa"},
{file = "numpy-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:a3f4ab0caa7f053f6797fcd4e1e25caee367db3112ef2b6ef82d749530768c73"},
{file = "numpy-2.0.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7f0a0c6f12e07fa94133c8a67404322845220c06a9e80e85999afe727f7438b8"},
{file = "numpy-2.0.2-pp39-pypy39_pp73-macosx_14_0_x86_64.whl", hash = "sha256:312950fdd060354350ed123c0e25a71327d3711584beaef30cdaa93320c392d4"},
{file = "numpy-2.0.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26df23238872200f63518dd2aa984cfca675d82469535dc7162dc2ee52d9dd5c"},
{file = "numpy-2.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a46288ec55ebbd58947d31d72be2c63cbf839f0a63b49cb755022310792a3385"},
{file = "numpy-2.0.2.tar.gz", hash = "sha256:883c987dee1880e2a864ab0dc9892292582510604156762362d9326444636e78"},
]
[[package]]
@@ -3324,6 +3358,35 @@ files = [
{file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"},
]
[[package]]
name = "psutil"
version = "6.0.0"
description = "Cross-platform lib for process and system monitoring in Python."
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
files = [
{file = "psutil-6.0.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a021da3e881cd935e64a3d0a20983bda0bb4cf80e4f74fa9bfcb1bc5785360c6"},
{file = "psutil-6.0.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:1287c2b95f1c0a364d23bc6f2ea2365a8d4d9b726a3be7294296ff7ba97c17f0"},
{file = "psutil-6.0.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:a9a3dbfb4de4f18174528d87cc352d1f788b7496991cca33c6996f40c9e3c92c"},
{file = "psutil-6.0.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6ec7588fb3ddaec7344a825afe298db83fe01bfaaab39155fa84cf1c0d6b13c3"},
{file = "psutil-6.0.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:1e7c870afcb7d91fdea2b37c24aeb08f98b6d67257a5cb0a8bc3ac68d0f1a68c"},
{file = "psutil-6.0.0-cp27-none-win32.whl", hash = "sha256:02b69001f44cc73c1c5279d02b30a817e339ceb258ad75997325e0e6169d8b35"},
{file = "psutil-6.0.0-cp27-none-win_amd64.whl", hash = "sha256:21f1fb635deccd510f69f485b87433460a603919b45e2a324ad65b0cc74f8fb1"},
{file = "psutil-6.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c588a7e9b1173b6e866756dde596fd4cad94f9399daf99ad8c3258b3cb2b47a0"},
{file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ed2440ada7ef7d0d608f20ad89a04ec47d2d3ab7190896cd62ca5fc4fe08bf0"},
{file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fd9a97c8e94059b0ef54a7d4baf13b405011176c3b6ff257c247cae0d560ecd"},
{file = "psutil-6.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e8d0054fc88153ca0544f5c4d554d42e33df2e009c4ff42284ac9ebdef4132"},
{file = "psutil-6.0.0-cp36-cp36m-win32.whl", hash = "sha256:fc8c9510cde0146432bbdb433322861ee8c3efbf8589865c8bf8d21cb30c4d14"},
{file = "psutil-6.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:34859b8d8f423b86e4385ff3665d3f4d94be3cdf48221fbe476e883514fdb71c"},
{file = "psutil-6.0.0-cp37-abi3-win32.whl", hash = "sha256:a495580d6bae27291324fe60cea0b5a7c23fa36a7cd35035a16d93bdcf076b9d"},
{file = "psutil-6.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:33ea5e1c975250a720b3a6609c490db40dae5d83a4eb315170c4fe0d8b1f34b3"},
{file = "psutil-6.0.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:ffe7fc9b6b36beadc8c322f84e1caff51e8703b88eee1da46d1e3a6ae11b4fd0"},
{file = "psutil-6.0.0.tar.gz", hash = "sha256:8faae4f310b6d969fa26ca0545338b21f73c6b15db7c4a8d934a5482faa818f2"},
]
[package.extras]
test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"]
[[package]]
name = "py-ocsf-models"
version = "0.1.1"
@@ -3500,13 +3563,13 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"]
[[package]]
name = "pylint"
version = "3.2.6"
version = "3.2.7"
description = "python code static checker"
optional = false
python-versions = ">=3.8.0"
files = [
{file = "pylint-3.2.6-py3-none-any.whl", hash = "sha256:03c8e3baa1d9fb995b12c1dbe00aa6c4bcef210c2a2634374aedeb22fb4a8f8f"},
{file = "pylint-3.2.6.tar.gz", hash = "sha256:a5d01678349454806cff6d886fb072294f56a58c4761278c97fb557d708e1eb3"},
{file = "pylint-3.2.7-py3-none-any.whl", hash = "sha256:02f4aedeac91be69fb3b4bea997ce580a4ac68ce58b89eaefeaf06749df73f4b"},
{file = "pylint-3.2.7.tar.gz", hash = "sha256:1b7a721b575eaeaa7d39db076b6e7743c993ea44f57979127c517c6c572c803e"},
]
[package.dependencies]
@@ -4228,13 +4291,13 @@ crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"]
[[package]]
name = "safety"
version = "3.2.4"
version = "3.2.7"
description = "Checks installed dependencies for known vulnerabilities and licenses."
optional = false
python-versions = ">=3.7"
files = [
{file = "safety-3.2.4-py3-none-any.whl", hash = "sha256:242ff7ae448d7fb2ea455c90f44e3f2ca45be9c8559b2fe9dfc89617164a0f17"},
{file = "safety-3.2.4.tar.gz", hash = "sha256:bac0202016d736a2118057964a0e3983fa20ff2563fd103cac3f3ac1ed3fea11"},
{file = "safety-3.2.7-py3-none-any.whl", hash = "sha256:7f111b689901f2e65f7dd524e4c84158c368bcbbdbb38cbbb06ba1249250da11"},
{file = "safety-3.2.7.tar.gz", hash = "sha256:cd6093b82d84e85815f037334f1b83ed89d31f767b7b324bfed6228c434f2667"},
]
[package.dependencies]
@@ -4245,11 +4308,12 @@ filelock = ">=3.12.2,<3.13.0"
jinja2 = ">=3.1.0"
marshmallow = ">=3.15.0"
packaging = ">=21.0"
psutil = ">=6.0.0,<6.1.0"
pydantic = ">=1.10.12"
requests = "*"
rich = "*"
"ruamel.yaml" = ">=0.17.21"
safety-schemas = ">=0.0.2"
safety-schemas = ">=0.0.4"
setuptools = ">=65.5.1"
typer = "*"
typing-extensions = ">=4.7.1"
@@ -4262,13 +4326,13 @@ spdx = ["spdx-tools (>=0.8.2)"]
[[package]]
name = "safety-schemas"
version = "0.0.2"
version = "0.0.4"
description = "Schemas for Safety tools"
optional = false
python-versions = ">=3.7"
files = [
{file = "safety_schemas-0.0.2-py3-none-any.whl", hash = "sha256:277c077ce6e53221874a87c29515ffdd2f3773a6db4d035a9f67cc98db3b8c7f"},
{file = "safety_schemas-0.0.2.tar.gz", hash = "sha256:7d1b040ec06480f05cff6b45ea7a93e09c8942df864fb0d01ddeb67c323cfa8c"},
{file = "safety_schemas-0.0.4-py3-none-any.whl", hash = "sha256:b8b93e447bbffe62e4bd4364877f8ac0dc9688056911b2618d6f48773f9c9011"},
{file = "safety_schemas-0.0.4.tar.gz", hash = "sha256:5ec6a8e2a80620a829a9d236165cce9d9e864b0345345d1fc983397eb5d2ac65"},
]
[package.dependencies]
@@ -4882,20 +4946,20 @@ multidict = ">=4.0"
[[package]]
name = "zipp"
version = "3.19.1"
version = "3.20.0"
description = "Backport of pathlib-compatible object wrapper for zip files"
optional = false
python-versions = ">=3.8"
files = [
{file = "zipp-3.19.1-py3-none-any.whl", hash = "sha256:2828e64edb5386ea6a52e7ba7cdb17bb30a73a858f5eb6eb93d8d36f5ea26091"},
{file = "zipp-3.19.1.tar.gz", hash = "sha256:35427f6d5594f4acf82d25541438348c26736fa9b3afa2754bcd63cdb99d8e8f"},
{file = "zipp-3.20.0-py3-none-any.whl", hash = "sha256:58da6168be89f0be59beb194da1250516fdaa062ccebd30127ac65d30045e10d"},
{file = "zipp-3.20.0.tar.gz", hash = "sha256:0145e43d89664cfe1a2e533adc75adafed82fe2da404b4bbb6b026c0157bdb31"},
]
[package.extras]
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
test = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"]
test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.9,<3.13"
content-hash = "324ea427d651cea1513f4a7be9b86f420eb75efcd4e54e7021835e517cd81525"
content-hash = "62d82a6d2b81429a749a20512c19836193bcf10aa9dddda5b3300535789d9e7d"

View File

@@ -19,8 +19,11 @@ Mutelist:
- "StackSet-AWSControlTowerSecurityResources-*"
- "StackSet-AWSControlTowerLoggingResources-*"
- "StackSet-AWSControlTowerExecutionRole-*"
- "AWSControlTowerBP-BASELINE-CLOUDTRAIL-MASTER"
- "AWSControlTowerBP-BASELINE-CONFIG-MASTER"
- "AWSControlTowerBP-BASELINE-CLOUDTRAIL-MASTER*"
- "AWSControlTowerBP-BASELINE-CONFIG-MASTER*"
- "StackSet-AWSControlTower*"
- "CLOUDTRAIL-ENABLED-ON-SHARED-ACCOUNTS-*"
- "AFT-Backend*"
"cloudtrail_*":
Regions:
- "*"

View File

@@ -11,7 +11,7 @@ from prowler.lib.logger import logger
timestamp = datetime.today()
timestamp_utc = datetime.now(timezone.utc).replace(tzinfo=timezone.utc)
prowler_version = "4.3.4"
prowler_version = "4.4.0"
html_logo_url = "https://github.com/prowler-cloud/prowler/"
square_logo_img = "https://prowler.com/wp-content/uploads/logo-html.png"
aws_logo = "https://user-images.githubusercontent.com/38561120/235953920-3e3fba08-0795-41dc-b480-9bea57db9f2e.png"

View File

@@ -41,6 +41,21 @@ aws:
[
"amazon-elb"
]
# aws.ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports
ec2_sg_high_risk_ports:
[
25,
110,
135,
143,
445,
3000,
4333,
5000,
5500,
8080,
8088,
]
# AWS VPC Configuration (vpc_endpoint_connections_trust_boundaries, vpc_endpoint_services_allowed_principals_trust_boundaries)
# Single account environment: No action required. The AWS account number will be automatically added by the checks.
@@ -271,6 +286,11 @@ aws:
# AWS ACM Configuration
# aws.acm_certificates_expiration_check
days_to_expire_threshold: 7
# aws.acm_certificates_rsa_key_length
insecure_key_algorithms:
[
"RSA-1024",
]
# AWS EKS Configuration
# aws.eks_control_plane_logging_all_types_enabled
@@ -284,6 +304,33 @@ aws:
"scheduler",
]
# aws.eks_cluster_uses_a_supported_version
# EKS clusters must be version 1.28 or higher
eks_cluster_oldest_version_supported: "1.28"
# AWS CodeBuild Configuration
# aws.codebuild_project_no_secrets_in_variables
# CodeBuild sensitive variables that are excluded from the check
excluded_sensitive_environment_variables:
[
]
# AWS ELB Configuration
# aws.elb_is_in_multiple_az
# Minimum number of Availability Zones that an CLB must be in
elb_min_azs: 2
# AWS ELBv2 Configuration
# aws.elbv2_is_in_multiple_az
# Minimum number of Availability Zones that an ELBv2 must be in
elbv2_min_azs: 2
# Known secrets to ignore on detection
# this will include a list of regex patterns to ignore on detection
secrets_ignore_patterns: []
# Azure Configuration
azure:
# Azure Network Configuration

View File

View File

@@ -0,0 +1,55 @@
class ProwlerException(Exception):
"""Base exception for all Prowler SDK errors."""
ERROR_CODES = {
(1901, "UnexpectedError"): {
"message": "Unexpected error occurred.",
"remediation": "Please review the error message and try again.",
"file": "{file}",
"provider": "{provider}",
}
}
def __init__(
self, code, provider=None, file=None, original_exception=None, error_info=None
):
"""
Initialize the ProwlerException class.
Args:
code (int): The error code.
provider (str): The provider name.
file (str): The file name.
original_exception (Exception): The original exception.
error_info (dict): The error information.
Example:
A ProwlerException is raised with the following parameters and format:
>>> original_exception = Exception("Error occurred.")
ProwlerException(1901, "AWS", "file.txt", original_exception)
>>> [1901] Unexpected error occurred. - Exception: Error occurred.
"""
self.code = code
self.provider = provider
self.file = file
if error_info is None:
error_info = self.ERROR_CODES.get((code, self.__class__.__name__))
self.message = error_info.get("message")
self.remediation = error_info.get("remediation")
self.original_exception = original_exception
# Format -> [code] message - original_exception
if original_exception is None:
super().__init__(f"[{self.code}] {self.message}")
else:
super().__init__(
f"[{self.code}] {self.message} - {self.original_exception}"
)
def __str__(self):
"""Overriding the __str__ method"""
return f"{self.__class__.__name__}[{self.code}]: {self.message} - {self.original_exception}"
class UnexpectedError(ProwlerException):
def __init__(self, provider, file, original_exception=None):
super().__init__(1901, provider, file, original_exception)

View File

@@ -17,7 +17,7 @@ import prowler
from prowler.config.config import orange_color
from prowler.lib.check.compliance_models import load_compliance_framework
from prowler.lib.check.custom_checks_metadata import update_check_metadata
from prowler.lib.check.models import Check, load_check_metadata
from prowler.lib.check.models import Check, CheckMetadata, load_check_metadata
from prowler.lib.logger import logger
from prowler.lib.outputs.outputs import report
from prowler.lib.utils.utils import open_file, parse_json_file, print_boxes
@@ -25,7 +25,15 @@ from prowler.providers.common.models import Audit_Metadata
# Load all checks metadata
def bulk_load_checks_metadata(provider: str) -> dict:
def bulk_load_checks_metadata(provider: str) -> dict[str, CheckMetadata]:
"""
Load the metadata of all checks for a given provider reading the check's metadata files.
Args:
provider (str): The name of the provider.
Returns:
dict[str, CheckMetadata]: A dictionary containing the metadata of all checks, with the CheckID as the key.
"""
bulk_check_metadata = {}
checks = recover_checks_from_provider(provider)
# Build list of check's metadata files
@@ -593,12 +601,17 @@ def execute_checks(
service,
check_name,
global_provider,
services_executed,
checks_executed,
custom_checks_metadata,
)
all_findings.extend(check_findings)
# Update Audit Status
services_executed.add(service)
checks_executed.add(check_name)
global_provider.audit_metadata = update_audit_metadata(
global_provider.audit_metadata, services_executed, checks_executed
)
# If check does not exists in the provider or is from another provider
except ModuleNotFoundError:
logger.error(
@@ -651,11 +664,16 @@ def execute_checks(
service,
check_name,
global_provider,
services_executed,
checks_executed,
custom_checks_metadata,
)
all_findings.extend(check_findings)
services_executed.add(service)
checks_executed.add(check_name)
global_provider.audit_metadata = update_audit_metadata(
global_provider.audit_metadata,
services_executed,
checks_executed,
)
# If check does not exists in the provider or is from another provider
except ModuleNotFoundError:
@@ -677,8 +695,6 @@ def execute(
service: str,
check_name: str,
global_provider: Any,
services_executed: set,
checks_executed: set,
custom_checks_metadata: Any,
):
try:
@@ -714,13 +730,6 @@ def execute(
if finding.status in global_provider.output_options.status
]
# Update Audit Status
services_executed.add(service)
checks_executed.add(check_name)
global_provider.audit_metadata = update_audit_metadata(
global_provider.audit_metadata, services_executed, checks_executed
)
# Mutelist findings
if hasattr(global_provider, "mutelist") and global_provider.mutelist.mutelist:
# TODO: make this prettier

View File

@@ -1,6 +1,6 @@
import sys
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.logger import logger
@@ -26,8 +26,8 @@ def update_checks_metadata_with_compliance(
if check in requirement.Checks:
# Include the requirement into the check's framework requirements
compliance_requirements.append(requirement)
# Create the Compliance_Model
compliance = ComplianceBaseModel(
# Create the Compliance
compliance = Compliance(
Framework=framework.Framework,
Provider=framework.Provider,
Version=framework.Version,

View File

@@ -188,8 +188,8 @@ class Compliance_Requirement(BaseModel):
Checks: list[str]
class ComplianceBaseModel(BaseModel):
"""ComplianceBaseModel holds the base model for every compliance framework"""
class Compliance(BaseModel):
"""Compliance holds the base model for every compliance framework"""
Framework: str
Provider: str
@@ -213,16 +213,107 @@ class ComplianceBaseModel(BaseModel):
raise ValueError("Framework or Provider must not be empty")
return values
@staticmethod
def list(bulk_compliance_frameworks: dict, provider: str = None) -> list[str]:
"""
Returns a list of compliance frameworks from bulk compliance frameworks
Args:
bulk_compliance_frameworks (dict): The bulk compliance frameworks
provider (str): The provider name
Returns:
list: The list of compliance frameworks
"""
if provider:
compliance_frameworks = [
compliance_framework
for compliance_framework in bulk_compliance_frameworks.keys()
if provider in compliance_framework
]
else:
compliance_frameworks = [
compliance_framework
for compliance_framework in bulk_compliance_frameworks.keys()
]
return compliance_frameworks
@staticmethod
def get(
bulk_compliance_frameworks: dict, compliance_framework_name: str
) -> "Compliance":
"""
Returns a compliance framework from bulk compliance frameworks
Args:
bulk_compliance_frameworks (dict): The bulk compliance frameworks
compliance_framework_name (str): The compliance framework name
Returns:
Compliance: The compliance framework
"""
return bulk_compliance_frameworks.get(compliance_framework_name, None)
@staticmethod
def list_requirements(
bulk_compliance_frameworks: dict, compliance_framework: str = None
) -> list:
"""
Returns a list of compliance requirements from a compliance framework
Args:
bulk_compliance_frameworks (dict): The bulk compliance frameworks
compliance_framework (str): The compliance framework name
Returns:
list: The list of compliance requirements for the provided compliance framework
"""
compliance_requirements = []
if bulk_compliance_frameworks and compliance_framework:
compliance_requirements = [
compliance_requirement.Id
for compliance_requirement in bulk_compliance_frameworks.get(
compliance_framework
).Requirements
]
return compliance_requirements
@staticmethod
def get_requirement(
bulk_compliance_frameworks: dict, compliance_framework: str, requirement_id: str
) -> Union[Mitre_Requirement, Compliance_Requirement]:
"""
Returns a compliance requirement from a compliance framework
Args:
bulk_compliance_frameworks (dict): The bulk compliance frameworks
compliance_framework (str): The compliance framework name
requirement_id (str): The compliance requirement ID
Returns:
Mitre_Requirement | Compliance_Requirement: The compliance requirement
"""
requirement = None
for compliance_requirement in bulk_compliance_frameworks.get(
compliance_framework
).Requirements:
if compliance_requirement.Id == requirement_id:
requirement = compliance_requirement
break
return requirement
# Testing Pending
def load_compliance_framework(
compliance_specification_file: str,
) -> ComplianceBaseModel:
) -> Compliance:
"""load_compliance_framework loads and parse a Compliance Framework Specification"""
try:
compliance_framework = ComplianceBaseModel.parse_file(
compliance_specification_file
)
compliance_framework = Compliance.parse_file(compliance_specification_file)
except ValidationError as error:
logger.critical(
f"Compliance Framework Specification from {compliance_specification_file} is not valid: {error}"

View File

@@ -11,7 +11,15 @@ from prowler.lib.logger import logger
class Code(BaseModel):
"""Check's remediation information using IaC like CloudFormation, Terraform or the native CLI"""
"""
Represents the remediation code using IaC like CloudFormation, Terraform or the native CLI.
Attributes:
NativeIaC (str): The NativeIaC code.
Terraform (str): The Terraform code.
CLI (str): The CLI code.
Other (str): Other code.
"""
NativeIaC: str
Terraform: str
@@ -20,21 +28,61 @@ class Code(BaseModel):
class Recommendation(BaseModel):
"""Check's recommendation information"""
"""
Represents a recommendation.
Attributes:
Text (str): The text of the recommendation.
Url (str): The URL associated with the recommendation.
"""
Text: str
Url: str
class Remediation(BaseModel):
"""Check's remediation: Code and Recommendation"""
"""
Represents a remediation action for a specific .
Attributes:
Code (Code): The code associated with the remediation action.
Recommendation (Recommendation): The recommendation for the remediation action.
"""
Code: Code
Recommendation: Recommendation
class Check_Metadata_Model(BaseModel):
"""Check Metadata Model"""
class CheckMetadata(BaseModel):
"""
Model representing the metadata of a check.
Attributes:
Provider (str): The provider of the check.
CheckID (str): The ID of the check.
CheckTitle (str): The title of the check.
CheckType (list[str]): The type of the check.
CheckAliases (list[str], optional): The aliases of the check. Defaults to an empty list.
ServiceName (str): The name of the service.
SubServiceName (str): The name of the sub-service.
ResourceIdTemplate (str): The template for the resource ID.
Severity (str): The severity of the check.
ResourceType (str): The type of the resource.
Description (str): The description of the check.
Risk (str): The risk associated with the check.
RelatedUrl (str): The URL related to the check.
Remediation (Remediation): The remediation steps for the check.
Categories (list[str]): The categories of the check.
DependsOn (list[str]): The dependencies of the check.
RelatedTo (list[str]): The related checks.
Notes (str): Additional notes for the check.
Compliance (list, optional): The compliance information for the check. Defaults to None.
Validators:
valid_category(value): Validator function to validate the categories of the check.
severity_to_lower(severity): Validator function to convert the severity to lowercase.
valid_severity(severity): Validator function to validate the severity of the check.
"""
Provider: str
CheckID: str
@@ -82,7 +130,7 @@ class Check_Metadata_Model(BaseModel):
return severity
class Check(ABC, Check_Metadata_Model):
class Check(ABC, CheckMetadata):
"""Prowler Check"""
def __init__(self, **data):
@@ -93,7 +141,7 @@ class Check(ABC, Check_Metadata_Model):
+ ".metadata.json"
)
# Store it to validate them with Pydantic
data = Check_Metadata_Model.parse_file(metadata_file).dict()
data = CheckMetadata.parse_file(metadata_file).dict()
# Calls parents init function
super().__init__(**data)
# TODO: verify that the CheckID is the same as the filename and classname
@@ -114,14 +162,14 @@ class Check_Report:
status: str
status_extended: str
check_metadata: Check_Metadata_Model
check_metadata: CheckMetadata
resource_details: str
resource_tags: list
muted: bool
def __init__(self, metadata):
self.status = ""
self.check_metadata = Check_Metadata_Model.parse_raw(metadata)
self.check_metadata = CheckMetadata.parse_raw(metadata)
self.status_extended = ""
self.resource_details = ""
self.resource_tags = []
@@ -194,12 +242,22 @@ class Check_Report_Kubernetes(Check_Report):
# Testing Pending
def load_check_metadata(metadata_file: str) -> Check_Metadata_Model:
"""load_check_metadata loads and parse a Check's metadata file"""
def load_check_metadata(metadata_file: str) -> CheckMetadata:
"""
Load check metadata from a file.
Args:
metadata_file (str): The path to the metadata file.
Returns:
CheckMetadata: The loaded check metadata.
Raises:
ValidationError: If the metadata file is not valid.
"""
try:
check_metadata = Check_Metadata_Model.parse_file(metadata_file)
check_metadata = CheckMetadata.parse_file(metadata_file)
except ValidationError as error:
logger.critical(f"Metadata from {metadata_file} is not valid: {error}")
# TODO: remove this exit and raise an exception
sys.exit(1)
else:
return check_metadata

View File

@@ -314,6 +314,9 @@ class Mutelist(ABC):
Args:
matched_items (list): List of items to be matched.
finding_items (str): String to search for matched items.
tag (bool): If True the search will have a different logic due to the tags being ANDed or ORed:
- Check of AND logic -> True if all the tags are present in the finding.
- Check of OR logic -> True if any of the tags is present in the finding.
Returns:
bool: True if any of the matched_items are present in finding_items, otherwise False.

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.outputs.compliance.aws_well_architected.models import (
AWSWellArchitectedModel,
)
@@ -21,7 +21,7 @@ class AWSWellArchitected(ComplianceOutput):
def transform(
self,
findings: list[Finding],
compliance: ComplianceBaseModel,
compliance: Compliance,
compliance_name: str,
) -> None:
"""
@@ -29,7 +29,7 @@ class AWSWellArchitected(ComplianceOutput):
Parameters:
- findings (list): A list of findings.
- compliance (ComplianceBaseModel): A compliance model.
- compliance (Compliance): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.outputs.compliance.cis.models import AWSCISModel
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.finding import Finding
@@ -19,7 +19,7 @@ class AWSCIS(ComplianceOutput):
def transform(
self,
findings: list[Finding],
compliance: ComplianceBaseModel,
compliance: Compliance,
compliance_name: str,
) -> None:
"""
@@ -27,7 +27,7 @@ class AWSCIS(ComplianceOutput):
Parameters:
- findings (list): A list of findings.
- compliance (ComplianceBaseModel): A compliance model.
- compliance (Compliance): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.outputs.compliance.cis.models import AzureCISModel
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.finding import Finding
@@ -19,7 +19,7 @@ class AzureCIS(ComplianceOutput):
def transform(
self,
findings: list[Finding],
compliance: ComplianceBaseModel,
compliance: Compliance,
compliance_name: str,
) -> None:
"""
@@ -27,7 +27,7 @@ class AzureCIS(ComplianceOutput):
Parameters:
- findings (list): A list of findings.
- compliance (ComplianceBaseModel): A compliance model.
- compliance (Compliance): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.outputs.compliance.cis.models import GCPCISModel
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.finding import Finding
@@ -19,7 +19,7 @@ class GCPCIS(ComplianceOutput):
def transform(
self,
findings: list[Finding],
compliance: ComplianceBaseModel,
compliance: Compliance,
compliance_name: str,
) -> None:
"""
@@ -27,7 +27,7 @@ class GCPCIS(ComplianceOutput):
Parameters:
- findings (list): A list of findings.
- compliance (ComplianceBaseModel): A compliance model.
- compliance (Compliance): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:

View File

@@ -1,6 +1,6 @@
from datetime import datetime
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.outputs.compliance.cis.models import KubernetesCISModel
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.finding import Finding
@@ -21,7 +21,7 @@ class KubernetesCIS(ComplianceOutput):
def transform(
self,
findings: list[Finding],
compliance: ComplianceBaseModel,
compliance: Compliance,
compliance_name: str,
) -> None:
"""
@@ -29,7 +29,7 @@ class KubernetesCIS(ComplianceOutput):
Parameters:
- findings (list): A list of findings.
- compliance (ComplianceBaseModel): A compliance model.
- compliance (Compliance): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:

View File

@@ -2,7 +2,7 @@ from csv import DictWriter
from pathlib import Path
from typing import List
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.logger import logger
from prowler.lib.outputs.finding import Finding
from prowler.lib.outputs.output import Output
@@ -28,7 +28,7 @@ class ComplianceOutput(Output):
def __init__(
self,
findings: List[Finding],
compliance: ComplianceBaseModel,
compliance: Compliance,
create_file_descriptor: bool = False,
file_path: str = None,
file_extension: str = "",

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.compliance.ens.models import AWSENSModel
from prowler.lib.outputs.finding import Finding
@@ -19,7 +19,7 @@ class AWSENS(ComplianceOutput):
def transform(
self,
findings: list[Finding],
compliance: ComplianceBaseModel,
compliance: Compliance,
compliance_name: str,
) -> None:
"""
@@ -27,7 +27,7 @@ class AWSENS(ComplianceOutput):
Parameters:
- findings (list): A list of findings.
- compliance (ComplianceBaseModel): A compliance model.
- compliance (Compliance): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.compliance.generic.models import GenericComplianceModel
from prowler.lib.outputs.finding import Finding
@@ -19,7 +19,7 @@ class GenericCompliance(ComplianceOutput):
def transform(
self,
findings: list[Finding],
compliance: ComplianceBaseModel,
compliance: Compliance,
compliance_name: str,
) -> None:
"""
@@ -27,7 +27,7 @@ class GenericCompliance(ComplianceOutput):
Parameters:
- findings (list): A list of findings.
- compliance (ComplianceBaseModel): A compliance model.
- compliance (Compliance): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.compliance.iso27001.models import AWSISO27001Model
from prowler.lib.outputs.finding import Finding
@@ -19,7 +19,7 @@ class AWSISO27001(ComplianceOutput):
def transform(
self,
findings: list[Finding],
compliance: ComplianceBaseModel,
compliance: Compliance,
compliance_name: str,
) -> None:
"""
@@ -27,7 +27,7 @@ class AWSISO27001(ComplianceOutput):
Parameters:
- findings (list): A list of findings.
- compliance (ComplianceBaseModel): A compliance model.
- compliance (Compliance): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.compliance.mitre_attack.models import AWSMitreAttackModel
from prowler.lib.outputs.finding import Finding
@@ -20,7 +20,7 @@ class AWSMitreAttack(ComplianceOutput):
def transform(
self,
findings: list[Finding],
compliance: ComplianceBaseModel,
compliance: Compliance,
compliance_name: str,
) -> None:
"""
@@ -28,7 +28,7 @@ class AWSMitreAttack(ComplianceOutput):
Parameters:
- findings (list): A list of findings.
- compliance (ComplianceBaseModel): A compliance model.
- compliance (Compliance): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.compliance.mitre_attack.models import AzureMitreAttackModel
from prowler.lib.outputs.finding import Finding
@@ -20,7 +20,7 @@ class AzureMitreAttack(ComplianceOutput):
def transform(
self,
findings: list[Finding],
compliance: ComplianceBaseModel,
compliance: Compliance,
compliance_name: str,
) -> None:
"""
@@ -28,7 +28,7 @@ class AzureMitreAttack(ComplianceOutput):
Parameters:
- findings (list): A list of findings.
- compliance (ComplianceBaseModel): A compliance model.
- compliance (Compliance): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.compliance.mitre_attack.models import GCPMitreAttackModel
from prowler.lib.outputs.finding import Finding
@@ -20,7 +20,7 @@ class GCPMitreAttack(ComplianceOutput):
def transform(
self,
findings: list[Finding],
compliance: ComplianceBaseModel,
compliance: Compliance,
compliance_name: str,
) -> None:
"""
@@ -28,7 +28,7 @@ class GCPMitreAttack(ComplianceOutput):
Parameters:
- findings (list): A list of findings.
- compliance (ComplianceBaseModel): A compliance model.
- compliance (Compliance): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:

View File

@@ -173,9 +173,15 @@ class HTML(Output):
<li class="list-group-item">
<b>Passed:</b> {str(stats.get("total_pass", 0))}
</li>
<li class="list-group-item">
<b>Passed (Muted):</b> {str(stats.get("total_muted_pass", 0))}
</li>
<li class="list-group-item">
<b>Failed:</b> {str(stats.get("total_fail", 0))}
</li>
<li class="list-group-item">
<b>Failed (Muted):</b> {str(stats.get("total_muted_fail", 0))}
</li>
<li class="list-group-item">
<b>Total Resources:</b> {str(stats.get("resources_count", 0))}
</li>

View File

@@ -72,6 +72,7 @@ class OCSF(Output):
title=finding.check_title,
uid=finding.finding_uid,
product_uid="prowler",
types=[finding.check_type],
),
event_time=finding.timestamp,
remediation=Remediation(
@@ -120,7 +121,6 @@ class OCSF(Output):
type_uid=DetectionFindingTypeID.Create,
type_name=DetectionFindingTypeID.Create.name,
unmapped={
"check_type": finding.check_type,
"related_url": finding.related_url,
"categories": finding.categories,
"depends_on": finding.depends_on,
@@ -220,8 +220,12 @@ class OCSF(Output):
StatusID: The StatusID based on the status and muted values
"""
status_id = StatusID.Other
if status == "FAIL":
if status == "PASS":
status_id = StatusID.Resolved
elif status == "FAIL":
status_id = StatusID.New
if muted:
status_id = StatusID.Suppressed
return status_id

View File

@@ -91,6 +91,8 @@ def extract_findings_statistics(findings: list) -> dict:
stats = {}
total_pass = 0
total_fail = 0
muted_pass = 0
muted_fail = 0
resources = set()
findings_count = 0
all_fails_are_muted = True
@@ -98,17 +100,25 @@ def extract_findings_statistics(findings: list) -> dict:
for finding in findings:
# Save the resource_id
resources.add(finding.resource_id)
if finding.status == "PASS":
total_pass += 1
findings_count += 1
if finding.muted is True:
muted_pass += 1
if finding.status == "FAIL":
total_fail += 1
findings_count += 1
if finding.muted is True:
muted_fail += 1
if not finding.muted and all_fails_are_muted:
all_fails_are_muted = False
stats["total_pass"] = total_pass
stats["total_muted_pass"] = muted_pass
stats["total_fail"] = total_fail
stats["total_muted_fail"] = muted_fail
stats["resources_count"] = len(resources)
stats["findings_count"] = findings_count
stats["all_fails_are_muted"] = all_fails_are_muted

View File

@@ -52,6 +52,14 @@ def unroll_tags(tags: list) -> dict:
>>> unroll_tags(tags)
{'name': 'John', 'age': '30'}
>>> tags = [{"key": "name"}]
>>> unroll_tags(tags)
{'name': ''}
>>> tags = [{"Key": "name"}]
>>> unroll_tags(tags)
{'name': ''}
>>> tags = [{"name": "John", "age": "30"}]
>>> unroll_tags(tags)
{'name': 'John', 'age': '30'}
@@ -74,9 +82,9 @@ def unroll_tags(tags: list) -> dict:
if isinstance(tags[0], str) and len(tags) > 0:
return {tag: "" for tag in tags}
if "key" in tags[0]:
return {item["key"]: item["value"] for item in tags}
return {item["key"]: item.get("value", "") for item in tags}
elif "Key" in tags[0]:
return {item["Key"]: item["Value"] for item in tags}
return {item["Key"]: item.get("Value", "") for item in tags}
else:
return {key: value for d in tags for key, value in d.items()}
return {}

View File

@@ -1,60 +1,202 @@
from typing import Any
from typing import Generator
from prowler.lib.check.check import execute
from prowler.lib.check.models import Check_Report
from prowler.lib.check.check import execute, update_audit_metadata
from prowler.lib.logger import logger
from prowler.lib.outputs.finding import Finding
from prowler.providers.common.models import Audit_Metadata
from prowler.providers.common.provider import Provider
def scan(
checks_to_execute: list,
global_provider: Any,
custom_checks_metadata: Any,
) -> list[Check_Report]:
try:
# List to store all the check's findings
all_findings = []
# Services and checks executed for the Audit Status
services_executed = set()
checks_executed = set()
class Scan:
_provider: Provider
# Refactor(Core): This should replace the Audit_Metadata
_number_of_checks_to_execute: int = 0
_number_of_checks_completed: int = 0
# TODO the str should be a set of Check objects
_checks_to_execute: list[str]
_service_checks_to_execute: dict[str, set[str]]
_service_checks_completed: dict[str, set[str]]
_progress: float = 0.0
_findings: list = []
# Initialize the Audit Metadata
# TODO: this should be done in the provider class
# Refactor(Core): Audit manager?
global_provider.audit_metadata = Audit_Metadata(
services_scanned=0,
expected_checks=checks_to_execute,
completed_checks=0,
audit_progress=0,
def __init__(self, provider: Provider, checks_to_execute: list[str]):
"""
Scan is the class that executes the checks and yields the progress and the findings.
Params:
provider: Provider -> The provider to scan
checks_to_execute: list[str] -> The checks to execute
"""
self._provider = provider
# Remove duplicated checks and sort them
self._checks_to_execute = sorted(list(set(checks_to_execute)))
self._number_of_checks_to_execute = len(checks_to_execute)
service_checks_to_execute = get_service_checks_to_execute(checks_to_execute)
service_checks_completed = dict()
self._service_checks_to_execute = service_checks_to_execute
self._service_checks_completed = service_checks_completed
@property
def checks_to_execute(self) -> set[str]:
return self._checks_to_execute
@property
def service_checks_to_execute(self) -> dict[str, set[str]]:
return self._service_checks_to_execute
@property
def service_checks_completed(self) -> dict[str, set[str]]:
return self._service_checks_completed
@property
def provider(self) -> Provider:
return self._provider
@property
def progress(self) -> float:
return (
self._number_of_checks_completed / self._number_of_checks_to_execute * 100
)
for check_name in checks_to_execute:
try:
# Recover service from check name
service = check_name.split("_")[0]
@property
def findings(self) -> list:
return self._findings
check_findings = execute(
service,
check_name,
global_provider,
services_executed,
checks_executed,
custom_checks_metadata,
)
all_findings.extend(check_findings)
def scan(
self,
custom_checks_metadata: dict = {},
) -> Generator[tuple[float, list[Finding]], None, None]:
"""
Executes the scan by iterating over the checks to execute and executing each check.
Yields the progress and findings for each check.
# If check does not exists in the provider or is from another provider
except ModuleNotFoundError:
logger.error(
f"Check '{check_name}' was not found for the {global_provider.type.upper()} provider"
)
except Exception as error:
logger.error(
f"{check_name} - {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
except Exception as error:
logger.error(
f"{check_name} - {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
Args:
custom_checks_metadata (dict): Custom metadata for the checks (default: {}).
return all_findings
Yields:
Tuple[float, list[Finding]]: A tuple containing the progress and findings for each check.
Raises:
ModuleNotFoundError: If the check does not exist in the provider or is from another provider.
Exception: If any other error occurs during the execution of a check.
"""
try:
checks_to_execute = self.checks_to_execute
# Initialize the Audit Metadata
# TODO: this should be done in the provider class
# Refactor(Core): Audit manager?
self._provider.audit_metadata = Audit_Metadata(
services_scanned=0,
expected_checks=checks_to_execute,
completed_checks=0,
audit_progress=0,
)
for check_name in checks_to_execute:
try:
# Recover service from check name
service = get_service_name_from_check_name(check_name)
# Execute the check
check_findings = execute(
service,
check_name,
self._provider,
custom_checks_metadata,
)
# Store findings
self._findings.extend(check_findings)
# Remove the executed check
self._service_checks_to_execute[service].remove(check_name)
if len(self._service_checks_to_execute[service]) == 0:
self._service_checks_to_execute.pop(service, None)
# Add the completed check
if service not in self._service_checks_completed:
self._service_checks_completed[service] = set()
self._service_checks_completed[service].add(check_name)
self._number_of_checks_completed += 1
# This should be done just once all the service's checks are completed
# This metadata needs to get to the services not within the provider
# since it is present in the Scan class
self._provider.audit_metadata = update_audit_metadata(
self._provider.audit_metadata,
self.get_completed_services(),
self.get_completed_checks(),
)
findings = [
Finding.generate_output(self._provider, finding)
for finding in check_findings
]
yield self.progress, findings
# If check does not exists in the provider or is from another provider
except ModuleNotFoundError:
logger.error(
f"Check '{check_name}' was not found for the {self._provider.type.upper()} provider"
)
except Exception as error:
logger.error(
f"{check_name} - {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
except Exception as error:
logger.error(
f"{check_name} - {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def get_completed_services(self) -> set[str]:
"""
get_completed_services returns the services that have been completed.
Example:
get_completed_services() -> {"ec2", "s3"}
"""
return self._service_checks_completed.keys()
def get_completed_checks(self) -> set[str]:
"""
get_completed_checks returns the checks that have been completed.
Example:
get_completed_checks() -> {"ec2_instance_public_ip", "s3_bucket_public"}
"""
completed_checks = set()
for checks in self._service_checks_completed.values():
completed_checks.update(checks)
return completed_checks
def get_service_name_from_check_name(check_name: str) -> str:
"""
get_service_name_from_check_name returns the service name for a given check name.
Example:
get_service_name_from_check_name("ec2_instance_public") -> "ec2"
"""
return check_name.split("_")[0]
def get_service_checks_to_execute(checks_to_execute: set[str]) -> dict[str, set[str]]:
"""
get_service_checks_to_execute returns a dictionary with the services and the checks to execute.
Example:
get_service_checks_to_execute({"accessanalyzer_enabled", "ec2_instance_public_ip"})
-> {"accessanalyzer": {"accessanalyzer_enabled"}, "ec2": {"ec2_instance_public_ip"}}
"""
service_checks_to_execute = dict()
for check in checks_to_execute:
# check -> accessanalyzer_enabled
# service -> accessanalyzer
service = get_service_name_from_check_name(check)
if service not in service_checks_to_execute:
service_checks_to_execute[service] = set()
service_checks_to_execute[service].add(check)
return service_checks_to_execute

View File

@@ -20,7 +20,7 @@ from typing import Optional
from colorama import Style
from detect_secrets import SecretsCollection
from detect_secrets.settings import default_settings
from detect_secrets.settings import transient_settings
from prowler.config.config import encoding_format_utf_8
from prowler.lib.logger import logger
@@ -80,20 +80,96 @@ def hash_sha512(string: str) -> str:
return sha512(string.encode(encoding_format_utf_8)).hexdigest()[0:9]
def detect_secrets_scan(data):
temp_data_file = tempfile.NamedTemporaryFile(delete=False)
temp_data_file.write(bytes(data, encoding="raw_unicode_escape"))
temp_data_file.close()
def detect_secrets_scan(
data: str = None, file=None, excluded_secrets: list[str] = None
) -> list[dict[str, str]]:
"""detect_secrets_scan scans the data or file for secrets using the detect-secrets library.
Args:
data (str): The data to scan for secrets.
file (str): The file to scan for secrets.
excluded_secrets (list): A list of regex patterns to exclude from the scan.
Returns:
dict: The secrets found in the
Raises:
Exception: If an error occurs during the scan.
Examples:
>>> detect_secrets_scan(data="password=password")
[{'filename': 'data', 'hashed_secret': 'f7c3bc1d808e04732adf679965ccc34ca7ae3441', 'is_verified': False, 'line_number': 1, 'type': 'Secret Keyword'}]
>>> detect_secrets_scan(file="file.txt")
{'file.txt': [{'filename': 'file.txt', 'hashed_secret': 'f7c3bc1d808e04732adf679965ccc34ca7ae3441', 'is_verified': False, 'line_number': 1, 'type': 'Secret Keyword'}]}
"""
try:
if not file:
temp_data_file = tempfile.NamedTemporaryFile(delete=False)
temp_data_file.write(bytes(data, encoding="raw_unicode_escape"))
temp_data_file.close()
secrets = SecretsCollection()
with default_settings():
secrets.scan_file(temp_data_file.name)
os.remove(temp_data_file.name)
secrets = SecretsCollection()
detect_secrets_output = secrets.json()
if detect_secrets_output:
return detect_secrets_output[temp_data_file.name]
else:
settings = {
"plugins_used": [
{"name": "ArtifactoryDetector"},
{"name": "AWSKeyDetector"},
{"name": "AzureStorageKeyDetector"},
{"name": "BasicAuthDetector"},
{"name": "CloudantDetector"},
{"name": "DiscordBotTokenDetector"},
{"name": "GitHubTokenDetector"},
{"name": "GitLabTokenDetector"},
{"name": "Base64HighEntropyString", "limit": 6.0},
{"name": "HexHighEntropyString", "limit": 3.0},
{"name": "IbmCloudIamDetector"},
{"name": "IbmCosHmacDetector"},
{"name": "IPPublicDetector"},
{"name": "JwtTokenDetector"},
{"name": "KeywordDetector"},
{"name": "MailchimpDetector"},
{"name": "NpmDetector"},
{"name": "OpenAIDetector"},
{"name": "PrivateKeyDetector"},
{"name": "PypiTokenDetector"},
{"name": "SendGridDetector"},
{"name": "SlackDetector"},
{"name": "SoftlayerDetector"},
{"name": "SquareOAuthDetector"},
{"name": "StripeDetector"},
# {"name": "TelegramBotTokenDetector"}, https://github.com/Yelp/detect-secrets/pull/878
{"name": "TwilioKeyDetector"},
],
"filters_used": [
{"path": "detect_secrets.filters.common.is_invalid_file"},
{"path": "detect_secrets.filters.common.is_known_false_positive"},
{"path": "detect_secrets.filters.heuristic.is_likely_id_string"},
{"path": "detect_secrets.filters.heuristic.is_potential_secret"},
],
}
if excluded_secrets and len(excluded_secrets) > 0:
settings["filters_used"].append(
{
"path": "detect_secrets.filters.regex.should_exclude_line",
"pattern": excluded_secrets,
}
)
with transient_settings(settings):
if file:
secrets.scan_file(file)
else:
secrets.scan_file(temp_data_file.name)
if not file:
os.remove(temp_data_file.name)
detect_secrets_output = secrets.json()
if detect_secrets_output:
if file:
return detect_secrets_output[file]
else:
return detect_secrets_output[temp_data_file.name]
else:
return None
except Exception as e:
logger.error(f"Error scanning for secrets: {e}")
return None

View File

@@ -1,14 +1,14 @@
import os
import pathlib
import sys
from argparse import Namespace
from datetime import datetime
from re import fullmatch
from boto3 import client, session
from boto3 import client
from boto3.session import Session
from botocore.config import Config
from botocore.credentials import RefreshableCredentials
from botocore.session import get_session
from botocore.exceptions import ClientError, NoCredentialsError, ProfileNotFound
from botocore.session import Session as BotocoreSession
from colorama import Fore, Style
from pytz import utc
from tzlocal import get_localzone
@@ -28,6 +28,20 @@ from prowler.providers.aws.config import (
BOTO3_USER_AGENT_EXTRA,
ROLE_SESSION_NAME,
)
from prowler.providers.aws.exceptions.exceptions import (
AWSArgumentTypeValidationError,
AWSAssumeRoleError,
AWSClientError,
AWSIAMRoleARNEmptyResource,
AWSIAMRoleARNInvalidAccountID,
AWSIAMRoleARNInvalidResourceType,
AWSIAMRoleARNPartitionEmpty,
AWSIAMRoleARNRegionNotEmtpy,
AWSIAMRoleARNServiceNotIAMnorSTS,
AWSNoCredentialsError,
AWSProfileNotFoundError,
AWSSetUpSessionError,
)
from prowler.providers.aws.lib.arn.arn import parse_iam_credentials_arn
from prowler.providers.aws.lib.arn.models import ARN
from prowler.providers.aws.lib.mutelist.mutelist import AWSMutelist
@@ -46,7 +60,7 @@ from prowler.providers.aws.models import (
AWSOutputOptions,
AWSSession,
)
from prowler.providers.common.models import Audit_Metadata
from prowler.providers.common.models import Audit_Metadata, Connection
from prowler.providers.common.provider import Provider
@@ -63,34 +77,57 @@ class AwsProvider(Provider):
# TODO: this is not optional, enforce for all providers
audit_metadata: Audit_Metadata
def __init__(self, arguments: Namespace):
def __init__(
self,
retries_max_attempts: int = 3,
role_arn: str = None,
session_duration: int = None,
external_id: str = None,
role_session_name: str = None,
mfa: bool = None,
profile: str = None,
regions: set = set(),
organizations_role_arn: str = None,
scan_unused_services: bool = None,
resource_tags: list[str] = [],
resource_arn: list[str] = [],
config_file: str = None,
fixer_config: str = None,
):
"""
Initializes the AWS provider.
Arguments:
- retries_max_attempts: The maximum number of retries for the AWS client.
- role_arn: The ARN of the IAM role to assume.
- session_duration: The duration of the session in seconds.
- external_id: The external ID to use when assuming the IAM role.
- role_session_name: The name of the session when assuming the IAM role.
- mfa: A boolean indicating whether MFA is enabled.
- profile: The name of the AWS CLI profile to use.
- regions: A set of regions to audit.
- organizations_role_arn: The ARN of the AWS Organizations IAM role to assume.
- scan_unused_services: A boolean indicating whether to scan unused services.
- resource_tags: A list of tags to filter the resources to audit.
- resource_arn: A list of ARNs of the resources to audit.
- config_file: The path to the configuration file.
- fixer_config: The path to the fixer configuration
Raises:
- ArgumentTypeError: If the input MFA ARN is invalid.
- ArgumentTypeError: If the input session duration is invalid.
- ArgumentTypeError: If the input external ID is invalid.
- ArgumentTypeError: If the input role session name is invalid.
"""
logger.info("Initializing AWS provider ...")
######## Parse Arguments
# Session
aws_retries_max_attempts = getattr(arguments, "aws_retries_max_attempts", None)
# Assume Role
input_role = getattr(arguments, "role", None)
input_session_duration = getattr(arguments, "session_duration", None)
input_external_id = getattr(arguments, "external_id", None)
input_role_session_name = getattr(arguments, "role_session_name", None)
# MFA Configuration (false by default)
input_mfa = getattr(arguments, "mfa", None)
input_profile = getattr(arguments, "profile", None)
input_regions = set(getattr(arguments, "region", set()))
organizations_role_arn = getattr(arguments, "organizations_role", None)
# Set if unused services must be scanned
scan_unused_services = getattr(arguments, "scan_unused_services", None)
########
######## AWS Session
logger.info("Generating original session ...")
# Configure the initial AWS Session using the local credentials: profile or environment variables
aws_session = self.setup_session(input_mfa, input_profile, input_role)
session_config = self.set_session_config(aws_retries_max_attempts)
aws_session = self.setup_session(mfa, profile)
session_config = self.set_session_config(retries_max_attempts)
# Current session and the original session points to the same session object until we get a new one, if needed
self._session = AWSSession(
current_session=aws_session,
@@ -103,12 +140,15 @@ class AwsProvider(Provider):
# After the session is created, validate it
logger.info("Validating credentials ...")
sts_region = get_aws_region_for_sts(
self.session.current_session.region_name, input_regions
self.session.current_session.region_name, regions
)
caller_identity = validate_aws_credentials(
self.session.current_session, sts_region
# Validate the credentials
caller_identity = self.validate_credentials(
session=self.session.current_session,
aws_region=sts_region,
)
logger.info("Credentials validated")
########
@@ -119,24 +159,24 @@ class AwsProvider(Provider):
# Set identity
self._identity = self.set_identity(
caller_identity=caller_identity,
input_profile=input_profile,
input_regions=input_regions,
profile=profile,
regions=regions,
profile_region=profile_region,
)
########
######## AWS Session with Assume Role (if needed)
if input_role:
if role_arn:
# Validate the input role
valid_role_arn = parse_iam_credentials_arn(input_role)
valid_role_arn = parse_iam_credentials_arn(role_arn)
# Set assume IAM Role information
assumed_role_information = self.set_assumed_role_info(
valid_role_arn,
input_external_id,
input_mfa,
input_session_duration,
input_role_session_name,
sts_region,
assumed_role_information = AWSAssumeRoleInfo(
role_arn=valid_role_arn,
session_duration=session_duration,
external_id=external_id,
mfa_enabled=mfa,
role_session_name=role_session_name,
sts_region=sts_region,
)
# Assume the IAM Role
logger.info(f"Assuming role: {assumed_role_information.role_arn.arn}")
@@ -175,14 +215,15 @@ class AwsProvider(Provider):
# Validate the input role
valid_role_arn = parse_iam_credentials_arn(organizations_role_arn)
# Set assume IAM Role information
organizations_assumed_role_information = self.set_assumed_role_info(
valid_role_arn,
input_external_id,
input_mfa,
input_session_duration,
input_role_session_name,
sts_region,
organizations_assumed_role_information = AWSAssumeRoleInfo(
role_arn=valid_role_arn,
session_duration=session_duration,
external_id=external_id,
mfa_enabled=mfa,
role_session_name=role_session_name,
sts_region=sts_region,
)
# Assume the Organizations IAM Role
logger.info(
f"Assuming the AWS Organizations IAM Role: {organizations_assumed_role_information.role_arn.arn}"
@@ -214,12 +255,12 @@ class AwsProvider(Provider):
########
# Parse Scan Tags
if getattr(arguments, "resource_tags", None):
self._audit_resources = self.get_tagged_resources(arguments.resource_tags)
if resource_tags:
self._audit_resources = self.get_tagged_resources(resource_tags)
# Parse Input Resource ARNs
if getattr(arguments, "resource_arn", None):
self._audit_resources = arguments.resource_arn
if resource_arn:
self._audit_resources = resource_arn
# Get Enabled Regions
self._enabled_regions = self.get_aws_enabled_regions(
@@ -232,14 +273,12 @@ class AwsProvider(Provider):
# TODO: move this to the providers, pending for AWS, GCP, AZURE and K8s
# Audit Config
self._audit_config = {}
if hasattr(arguments, "config_file"):
self._audit_config = load_and_validate_config_file(
self._type, arguments.config_file
)
if config_file:
self._audit_config = load_and_validate_config_file(self._type, config_file)
self._fixer_config = {}
if hasattr(arguments, "fixer_config"):
if fixer_config:
self._fixer_config = load_and_validate_fixer_config_file(
self._type, arguments.fixer_config
self._type, fixer_config
)
@property
@@ -383,8 +422,8 @@ class AwsProvider(Provider):
def set_identity(
self,
caller_identity: AWSCallerIdentity,
input_profile: str,
input_regions: set,
profile: str,
regions: set,
profile_region: str,
) -> AWSIdentityInfo:
logger.info(f"Original AWS Caller Identity UserId: {caller_identity.user_id}")
@@ -397,21 +436,20 @@ class AwsProvider(Provider):
user_id=caller_identity.user_id,
partition=partition,
identity_arn=caller_identity.arn.arn,
profile=input_profile,
profile=profile,
profile_region=profile_region,
audited_regions=input_regions,
audited_regions=regions,
)
@staticmethod
def setup_session(
self,
input_mfa: bool,
input_profile: str,
input_role: str = None,
mfa: bool = False,
profile: str = None,
) -> Session:
try:
logger.info("Creating original session ...")
if input_mfa and not input_role:
mfa_info = self.__input_role_mfa_token_and_code__()
if mfa:
mfa_info = AwsProvider.input_role_mfa_token_and_code()
# TODO: validate MFA ARN here
get_session_token_arguments = {
"SerialNumber": mfa_info.arn,
@@ -429,48 +467,51 @@ class AwsProvider(Provider):
aws_session_token=session_credentials["Credentials"][
"SessionToken"
],
profile_name=input_profile,
profile_name=profile,
)
else:
return Session(
profile_name=input_profile,
profile_name=profile,
)
except Exception as error:
logger.critical(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
f"AWSSetUpSessionError[{error.__traceback__.tb_lineno}]: {error}"
)
raise AWSSetUpSessionError(
original_exception=error,
file=pathlib.Path(__file__).name,
)
sys.exit(1)
def set_assumed_role_info(
self,
role_arn: str,
input_external_id: str,
input_mfa: str,
session_duration: int,
role_session_name: str,
sts_region: str = AWS_STS_GLOBAL_ENDPOINT_REGION,
) -> AWSAssumeRoleInfo:
"""
set_assumed_role_info returns a AWSAssumeRoleInfo object
"""
logger.info("Setting assume IAM Role information ...")
return AWSAssumeRoleInfo(
role_arn=role_arn,
session_duration=session_duration,
external_id=input_external_id,
mfa_enabled=input_mfa,
role_session_name=role_session_name,
sts_region=sts_region,
)
def setup_assumed_session(
self,
assumed_role_credentials: AWSCredentials,
) -> Session:
"""
Sets up an assumed session using the provided assumed role credentials.
This method creates a new session with temporary credentials obtained by assuming an AWS IAM role.
It uses the `RefreshableCredentials` class from the `botocore` library to manage the automatic
refreshing of the assumed role credentials.
Args:
assumed_role_credentials (AWSCredentials): The assumed role credentials.
Returns:
Session: The assumed session.
Raises:
Exception: If an error occurs during the setup process.
References:
- `RefreshableCredentials` class in botocore:
[GitHub](https://github.com/boto/botocore/blob/098cc255f81a25b852e1ecdeb7adebd94c7b1b73/botocore/credentials.py#L395)
- AWS STS AssumeRole API:
[AWS Documentation](https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html)
"""
try:
# From botocore we can use RefreshableCredentials class, which has an attribute (refresh_using)
# that needs to be a method without arguments that retrieves a new set of fresh credentials
# asuming the role again. -> https://github.com/boto/botocore/blob/098cc255f81a25b852e1ecdeb7adebd94c7b1b73/botocore/credentials.py#L395
# assuming the role again.
assumed_refreshable_credentials = RefreshableCredentials(
access_key=assumed_role_credentials.aws_access_key_id,
secret_key=assumed_role_credentials.aws_secret_access_key,
@@ -481,10 +522,10 @@ class AwsProvider(Provider):
)
# Here we need the botocore session since it needs to use refreshable credentials
assumed_session = get_session()
assumed_session = BotocoreSession()
assumed_session._credentials = assumed_refreshable_credentials
assumed_session.set_config_variable("region", self._identity.profile_region)
return session.Session(
return Session(
profile_name=self._identity.profile,
botocore_session=assumed_session,
)
@@ -492,7 +533,7 @@ class AwsProvider(Provider):
logger.critical(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
sys.exit(1)
raise error
# TODO: maybe this can be improved with botocore.credentials.DeferredRefreshableCredentials https://stackoverflow.com/a/75576540
def refresh_credentials(self) -> dict:
@@ -531,7 +572,7 @@ class AwsProvider(Provider):
token=assume_role_response.aws_session_token,
expiry_time=assume_role_response.expiration.isoformat(),
)
logger.info(f"Refreshed Credentials: {refreshed_credentials}")
logger.info("Refreshed Credentials")
return refreshed_credentials
@@ -555,11 +596,11 @@ class AwsProvider(Provider):
]
# If -A is set, print Assumed Role ARN
if (
hasattr(self, "_assumed_role")
and self._assumed_role.info.role_arn is not None
hasattr(self, "_assumed_role_configuration")
and self._assumed_role_configuration.info.role_arn is not None
):
report_lines.append(
f"Assumed Role ARN: {Fore.YELLOW}[{self._assumed_role.info.role_arn.arn}]{Style.RESET_ALL}"
f"Assumed Role ARN: {Fore.YELLOW}[{self._assumed_role_configuration.info.role_arn.arn}]{Style.RESET_ALL}"
)
report_title = (
f"{Style.BRIGHT}Using the AWS credentials below:{Style.RESET_ALL}"
@@ -687,12 +728,12 @@ class AwsProvider(Provider):
audited_regions.add(region)
return audited_regions
def get_tagged_resources(self, input_resource_tags: list[str]) -> list[str]:
def get_tagged_resources(self, resource_tags: list[str]) -> list[str]:
"""
Returns a list of the resources that are going to be scanned based on the given input tags.
Parameters:
- input_resource_tags: A list of strings representing the tags to filter the resources. Each string should be in the format "key=value".
- resource_tags: A list of strings representing the tags to filter the resources. Each string should be in the format "key=value".
Returns:
- A list of strings representing the ARNs (Amazon Resource Names) of the tagged resources.
@@ -703,16 +744,16 @@ class AwsProvider(Provider):
- The method paginates through the results of the 'get_resources' operation to retrieve all the tagged resources.
Example usage:
input_resource_tags = ["Environment=Production", "Owner=John Doe"]
tagged_resources = get_tagged_resources(input_resource_tags)
resource_tags = ["Environment=Production", "Owner=John Doe"]
tagged_resources = get_tagged_resources(resource_tags)
"""
try:
resource_tags = []
resource_tags_values = []
tagged_resources = []
for tag in input_resource_tags:
for tag in resource_tags:
key = tag.split("=")[0]
value = tag.split("=")[1]
resource_tags.append({"Key": key, "Values": [value]})
resource_tags_values.append({"Key": key, "Values": [value]})
# Get Resources with resource_tags for all regions
for regional_client in self.generate_regional_clients(
"resourcegroupstaggingapi"
@@ -722,7 +763,7 @@ class AwsProvider(Provider):
"get_resources"
)
for page in get_resources_paginator.paginate(
TagFilters=resource_tags
TagFilters=resource_tags_values
):
for resource in page["ResourceTagMappingList"]:
tagged_resources.append(resource["ResourceARN"])
@@ -730,13 +771,13 @@ class AwsProvider(Provider):
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
return tagged_resources
except Exception as error:
logger.critical(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
sys.exit(1)
else:
return tagged_resources
raise error
def get_default_region(self, service: str) -> str:
"""get_default_region returns the default region based on the profile and audited service regions"""
@@ -768,13 +809,14 @@ class AwsProvider(Provider):
global_region = "aws-iso-global"
return global_region
def __input_role_mfa_token_and_code__(self) -> AWSMFAInfo:
@staticmethod
def input_role_mfa_token_and_code() -> AWSMFAInfo:
"""input_role_mfa_token_and_code ask for the AWS MFA ARN and TOTP and returns it."""
mfa_ARN = input("Enter ARN of MFA: ")
mfa_TOTP = input("Enter MFA code: ")
return AWSMFAInfo(arn=mfa_ARN, totp=mfa_TOTP)
def set_session_config(self, aws_retries_max_attempts: int) -> Config:
def set_session_config(self, retries_max_attempts: int) -> Config:
"""
set_session_config returns a botocore Config object with the Prowler user agent and the default retrier configuration if nothing is passed as argument
"""
@@ -783,11 +825,11 @@ class AwsProvider(Provider):
retries={"max_attempts": 3, "mode": "standard"},
user_agent_extra=BOTO3_USER_AGENT_EXTRA,
)
if aws_retries_max_attempts:
if retries_max_attempts:
# Create the new config
config = Config(
retries={
"max_attempts": aws_retries_max_attempts,
"max_attempts": retries_max_attempts,
"mode": "standard",
},
)
@@ -796,8 +838,8 @@ class AwsProvider(Provider):
return default_session_config
@staticmethod
def assume_role(
self,
session: Session,
assumed_role_info: AWSAssumeRoleInfo,
) -> AWSCredentials:
@@ -822,10 +864,12 @@ class AwsProvider(Provider):
assume_role_arguments["ExternalId"] = assumed_role_info.external_id
if assumed_role_info.mfa_enabled:
mfa_info = self.__input_role_mfa_token_and_code__()
mfa_info = AwsProvider.input_role_mfa_token_and_code()
assume_role_arguments["SerialNumber"] = mfa_info.arn
assume_role_arguments["TokenCode"] = mfa_info.totp
sts_client = create_sts_session(session, assumed_role_info.sts_region)
sts_client = AwsProvider.create_sts_session(
session, assumed_role_info.sts_region
)
assumed_credentials = sts_client.assume_role(**assume_role_arguments)
# Convert the UTC datetime object to your local timezone
credentials_expiration_local_time = (
@@ -846,7 +890,10 @@ class AwsProvider(Provider):
logger.critical(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
)
sys.exit(1)
raise AWSAssumeRoleError(
original_exception=error,
file=pathlib.Path(__file__).name,
)
def get_aws_enabled_regions(self, current_session: Session) -> set:
"""get_aws_enabled_regions returns a set of enabled AWS regions"""
@@ -887,7 +934,243 @@ class AwsProvider(Provider):
logger.critical(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
sys.exit(1)
raise error
@staticmethod
def validate_credentials(
session: Session,
aws_region: str,
) -> AWSCallerIdentity:
"""
Validates the AWS credentials using the provided session and AWS region.
Args:
session (Session): The AWS session object.
aws_region (str): The AWS region to validate the credentials.
Returns:
AWSCallerIdentity: An object containing the caller identity information.
Raises:
Exception: If an error occurs during the validation process.
"""
try:
sts_client = AwsProvider.create_sts_session(session, aws_region)
caller_identity = sts_client.get_caller_identity()
# Include the region where the caller_identity has validated the credentials
return AWSCallerIdentity(
user_id=caller_identity.get("UserId"),
account=caller_identity.get("Account"),
arn=ARN(caller_identity.get("Arn")),
region=aws_region,
)
except Exception as error:
logger.critical(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
raise error
@staticmethod
def test_connection(
profile: str = None,
aws_region: str = AWS_STS_GLOBAL_ENDPOINT_REGION,
role_arn: str = None,
role_session_name: str = ROLE_SESSION_NAME,
session_duration: int = 3600,
external_id: str = None,
mfa_enabled: bool = False,
raise_on_exception: bool = True,
) -> Connection:
"""
Test the connection to AWS with one of the Boto3 credentials methods.
Args:
profile (str): The AWS profile to use for the session.
aws_region (str): The AWS region to validate the credentials in.
role_arn (str): The ARN of the IAM role to assume.
role_session_name (str): The name of the role session.
session_duration (int): The duration of the assumed role session in seconds.
external_id (str): The external ID to use when assuming the role.
mfa_enabled (bool): Whether MFA (Multi-Factor Authentication) is enabled.
raise_on_exception (bool): Whether to raise an exception if an error occurs.
Returns:
Connection: An object tha contains the result of the test connection operation.
- is_connected (bool): Indicates whether the validation was successful.
- error (Exception): An exception object if an error occurs during the validation.
Raises:
ClientError: If there is an error with the AWS client.
ProfileNotFound: If the specified profile is not found.
NoCredentialsError: If there are no AWS credentials found.
ArgumentTypeError: If there is a validation error with the arguments.
Exception: If there is an unexpected error.
Examples:
>>> AwsProvider.test_connection(
role_arn="arn:aws:iam::111122223333:role/ProwlerRole",
external_id="67f7a641-ecb0-4f6d-921d-3587febd379c",
raise_on_exception=False)
)
Connection(is_connected=True, Error=None)
>>> AwsProvider.test_connection(profile="test", raise_on_exception=False)
Connection(is_connected=True, Error=None)
>>> AwsProvider.test_connection(profile="not-found", raise_on_exception=False))
Connection(is_connected=False, Error=ProfileNotFound('The config profile (not-found) could not be found'))
>>> AwsProvider.test_connection(raise_on_exception=False))
Connection(is_connected=False, Error=NoCredentialsError('Unable to locate credentials'))
"""
try:
session = AwsProvider.setup_session(mfa_enabled, profile)
if role_arn:
session_duration = validate_session_duration(session_duration)
role_session_name = validate_role_session_name(role_session_name)
role_arn = parse_iam_credentials_arn(role_arn)
assumed_role_information = AWSAssumeRoleInfo(
role_arn=role_arn,
session_duration=session_duration,
external_id=external_id,
mfa_enabled=mfa_enabled,
role_session_name=role_session_name,
)
assumed_role_credentials = AwsProvider.assume_role(
session,
assumed_role_information,
)
session = Session(
aws_access_key_id=assumed_role_credentials.aws_access_key_id,
aws_secret_access_key=assumed_role_credentials.aws_secret_access_key,
aws_session_token=assumed_role_credentials.aws_session_token,
region_name=aws_region,
profile_name=profile,
)
sts_client = AwsProvider.create_sts_session(session, aws_region)
_ = sts_client.get_caller_identity()
return Connection(
is_connected=True,
)
except AWSSetUpSessionError as setup_session_error:
logger.error(str(setup_session_error))
if raise_on_exception:
raise setup_session_error
return Connection(error=setup_session_error)
except AWSArgumentTypeValidationError as validation_error:
logger.error(str(validation_error))
if raise_on_exception:
raise validation_error
return Connection(error=validation_error)
except AWSIAMRoleARNRegionNotEmtpy as arn_region_not_empty_error:
logger.error(str(arn_region_not_empty_error))
if raise_on_exception:
raise arn_region_not_empty_error
return Connection(error=arn_region_not_empty_error)
except AWSIAMRoleARNPartitionEmpty as arn_partition_empty_error:
logger.error(str(arn_partition_empty_error))
if raise_on_exception:
raise arn_partition_empty_error
return Connection(error=arn_partition_empty_error)
except AWSIAMRoleARNServiceNotIAMnorSTS as arn_service_not_iam_sts_error:
logger.error(str(arn_service_not_iam_sts_error))
if raise_on_exception:
raise arn_service_not_iam_sts_error
return Connection(error=arn_service_not_iam_sts_error)
except AWSIAMRoleARNInvalidAccountID as arn_invalid_account_id_error:
logger.error(str(arn_invalid_account_id_error))
if raise_on_exception:
raise arn_invalid_account_id_error
return Connection(error=arn_invalid_account_id_error)
except AWSIAMRoleARNInvalidResourceType as arn_invalid_resource_type_error:
logger.error(str(arn_invalid_resource_type_error))
if raise_on_exception:
raise arn_invalid_resource_type_error
return Connection(error=arn_invalid_resource_type_error)
except AWSIAMRoleARNEmptyResource as arn_empty_resource_error:
logger.error(str(arn_empty_resource_error))
if raise_on_exception:
raise arn_empty_resource_error
return Connection(error=arn_empty_resource_error)
except AWSAssumeRoleError as assume_role_error:
logger.error(str(assume_role_error))
if raise_on_exception:
raise assume_role_error
return Connection(error=assume_role_error)
except ClientError as client_error:
logger.error(
f"AWSClientError[{client_error.__traceback__.tb_lineno}]: {client_error}"
)
if raise_on_exception:
raise AWSClientError(
file=os.path.basename(__file__), original_exception=client_error
) from client_error
return Connection(error=client_error)
except ProfileNotFound as profile_not_found_error:
logger.error(
f"AWSProfileNotFoundError[{profile_not_found_error.__traceback__.tb_lineno}]: {profile_not_found_error}"
)
if raise_on_exception:
raise AWSProfileNotFoundError(
file=os.path.basename(__file__),
original_exception=profile_not_found_error,
) from profile_not_found_error
return Connection(error=profile_not_found_error)
except NoCredentialsError as no_credentials_error:
logger.error(
f"AWSNoCredentialsError[{no_credentials_error.__traceback__.tb_lineno}]: {no_credentials_error}"
)
if raise_on_exception:
raise AWSNoCredentialsError(
file=os.path.basename(__file__),
original_exception=no_credentials_error,
) from no_credentials_error
return Connection(error=no_credentials_error)
except Exception as error:
logger.critical(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
raise error
@staticmethod
def create_sts_session(
session: Session, aws_region: str = AWS_STS_GLOBAL_ENDPOINT_REGION
) -> Session.client:
"""
Create an STS session client.
Parameters:
- session (session.Session): The AWS session object.
- aws_region (str): The AWS region to use for the session.
Returns:
- session.Session.client: The STS session client.
Example:
session = boto3.session.Session()
sts_client = create_sts_session(session, 'us-west-2')
"""
try:
sts_endpoint_url = (
f"https://sts.{aws_region}.amazonaws.com"
if not aws_region.startswith("cn-")
else f"https://sts.{aws_region}.amazonaws.com.cn"
)
return session.client("sts", aws_region, endpoint_url=sts_endpoint_url)
except Exception as error:
logger.critical(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
raise error
def read_aws_regions_file() -> dict:
@@ -927,35 +1210,9 @@ def get_aws_available_regions() -> set:
# TODO: This can be moved to another class since it doesn't need self
# TODO: rename to validate_credentials
def validate_aws_credentials(
session: Session,
aws_region: str,
) -> AWSCallerIdentity:
"""
validate_aws_credentials returns the get_caller_identity() answer, exits if something exception is raised.
"""
try:
validate_credentials_client = create_sts_session(session, aws_region)
caller_identity = validate_credentials_client.get_caller_identity()
# Include the region where the caller_identity has validated the credentials
return AWSCallerIdentity(
user_id=caller_identity.get("UserId"),
account=caller_identity.get("Account"),
arn=ARN(caller_identity.get("Arn")),
region=aws_region,
)
except Exception as error:
logger.critical(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
sys.exit(1)
# TODO: This can be moved to another class since it doesn't need self
def get_aws_region_for_sts(session_region: str, input_regions: set[str]) -> str:
def get_aws_region_for_sts(session_region: str, regions: set[str]) -> str:
# If there is no region passed with -f/--region/--filter-region
if input_regions is None or len(input_regions) == 0:
if regions is None or len(regions) == 0:
# If you have a region configured in your AWS config or credentials file
if session_region is not None:
aws_region = session_region
@@ -965,38 +1222,57 @@ def get_aws_region_for_sts(session_region: str, input_regions: set[str]) -> str:
aws_region = AWS_STS_GLOBAL_ENDPOINT_REGION
else:
# Get the first region passed to the -f/--region
aws_region = list(input_regions)[0]
aws_region = list(regions)[0]
return aws_region
# TODO: This can be moved to another class since it doesn't need self
def create_sts_session(
session: session.Session, aws_region: str = AWS_STS_GLOBAL_ENDPOINT_REGION
) -> session.Session.client:
# TODO: this duplicates the provider arguments validation library
def validate_session_duration(duration: int) -> int:
"""
Create an STS session client.
validate_session_duration validates that the AWS STS Assume Role Session Duration is between 900 and 43200 seconds.
Parameters:
- session (session.Session): The AWS session object.
- aws_region (str): The AWS region to use for the session.
Args:
duration (int): The session duration in seconds.
Returns:
- session.Session.client: The STS session client.
int: The validated session duration.
Example:
session = boto3.session.Session()
sts_client = create_sts_session(session, 'us-west-2')
Raises:
ArgumentTypeError: If the session duration is not within the valid range.
"""
try:
sts_endpoint_url = (
f"https://sts.{aws_region}.amazonaws.com"
if "cn-" not in aws_region
else f"https://sts.{aws_region}.amazonaws.com.cn"
duration = int(duration)
# Since the range(i,j) goes from i to j-1 we have to j+1
if duration not in range(900, 43201):
raise AWSArgumentTypeValidationError(
message="Session Duration must be between 900 and 43200 seconds.",
file=os.path.basename(__file__),
)
return session.client("sts", aws_region, endpoint_url=sts_endpoint_url)
except Exception as error:
logger.critical(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
else:
return duration
# TODO: this duplicates the provider arguments validation library
def validate_role_session_name(session_name) -> str:
"""
Validates that the role session name is valid.
Args:
session_name (str): The role session name to be validated.
Returns:
str: The validated role session name.
Raises:
ArgumentTypeError: If the role session name is invalid.
Documentation:
- AWS STS AssumeRole API: https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html
"""
if fullmatch(r"[\w+=,.@-]{2,64}", session_name):
return session_name
else:
raise AWSArgumentTypeValidationError(
file=os.path.basename(__file__),
message="Role Session Name must be between 2 and 64 characters and may contain alphanumeric characters, periods, hyphens, and underscores.",
)
sys.exit(1)

View File

@@ -14,6 +14,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -95,6 +96,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -138,6 +140,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -327,6 +330,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -446,6 +450,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -489,6 +494,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -609,6 +615,7 @@
"ap-southeast-1",
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ca-central-1",
"eu-central-1",
"eu-central-2",
@@ -618,6 +625,7 @@
"eu-west-1",
"eu-west-2",
"eu-west-3",
"il-central-1",
"me-central-1",
"me-south-1",
"sa-east-1",
@@ -809,6 +817,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -852,6 +861,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -975,6 +985,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -1018,6 +1029,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -1072,6 +1084,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -1201,6 +1214,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -1261,6 +1275,28 @@
]
}
},
"bedrock-runtime": {
"regions": {
"aws": [
"ap-northeast-1",
"ap-south-1",
"ap-southeast-1",
"ap-southeast-2",
"ca-central-1",
"eu-central-1",
"eu-west-1",
"eu-west-2",
"eu-west-3",
"sa-east-1",
"us-east-1",
"us-west-2"
],
"aws-cn": [],
"aws-us-gov": [
"us-gov-west-1"
]
}
},
"billingconductor": {
"regions": {
"aws": [
@@ -1566,6 +1602,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -1692,6 +1729,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -1735,6 +1773,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -1864,6 +1903,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -1916,6 +1956,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -2494,6 +2535,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -2819,6 +2861,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -3008,6 +3051,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -3066,6 +3110,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -3109,6 +3154,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -3266,6 +3312,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -3309,6 +3356,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -3352,6 +3400,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -3395,6 +3444,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -3438,6 +3488,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -3491,6 +3542,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -3577,6 +3629,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -3620,6 +3673,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -3640,7 +3694,10 @@
"us-west-2"
],
"aws-cn": [],
"aws-us-gov": []
"aws-us-gov": [
"us-gov-east-1",
"us-gov-west-1"
]
}
},
"elastic-inference": {
@@ -3671,6 +3728,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -3767,6 +3825,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -3810,6 +3869,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -3853,6 +3913,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -3991,6 +4052,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -4034,6 +4096,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -4077,6 +4140,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -4137,6 +4201,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -4229,6 +4294,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -4918,6 +4984,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -5047,6 +5114,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -5724,6 +5792,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -5839,6 +5908,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -5925,6 +5995,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -6258,6 +6329,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -6528,6 +6600,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -7363,6 +7436,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -7386,7 +7460,10 @@
"cn-north-1",
"cn-northwest-1"
],
"aws-us-gov": []
"aws-us-gov": [
"us-gov-east-1",
"us-gov-west-1"
]
}
},
"omics": {
@@ -7418,6 +7495,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -7465,7 +7543,9 @@
"us-west-2"
],
"aws-cn": [],
"aws-us-gov": []
"aws-us-gov": [
"us-gov-west-1"
]
}
},
"opsworks": {
@@ -7556,6 +7636,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -7742,6 +7823,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -7956,6 +8038,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -8015,6 +8098,16 @@
"aws-us-gov": []
}
},
"qapps": {
"regions": {
"aws": [
"us-east-1",
"us-west-2"
],
"aws-cn": [],
"aws-us-gov": []
}
},
"qbusiness": {
"regions": {
"aws": [
@@ -8025,6 +8118,15 @@
"aws-us-gov": []
}
},
"qdeveloper": {
"regions": {
"aws": [
"us-east-1"
],
"aws-cn": [],
"aws-us-gov": []
}
},
"qldb": {
"regions": {
"aws": [
@@ -8108,6 +8210,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -8151,6 +8254,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -8194,6 +8298,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -8268,6 +8373,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -8311,6 +8417,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -8344,6 +8451,7 @@
"regions": {
"aws": [
"ap-south-1",
"ap-southeast-3",
"ca-central-1",
"eu-central-2",
"eu-south-2",
@@ -8483,6 +8591,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -8526,6 +8635,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -8667,6 +8777,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -8693,6 +8804,50 @@
"aws-us-gov": []
}
},
"route53-application-recovery-controller": {
"regions": {
"aws": [
"af-south-1",
"ap-east-1",
"ap-northeast-1",
"ap-northeast-2",
"ap-northeast-3",
"ap-south-1",
"ap-south-2",
"ap-southeast-1",
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
"eu-central-2",
"eu-north-1",
"eu-south-1",
"eu-south-2",
"eu-west-1",
"eu-west-2",
"eu-west-3",
"il-central-1",
"me-central-1",
"me-south-1",
"sa-east-1",
"us-east-1",
"us-east-2",
"us-west-1",
"us-west-2"
],
"aws-cn": [
"cn-north-1",
"cn-northwest-1"
],
"aws-us-gov": [
"us-gov-east-1",
"us-gov-west-1"
]
}
},
"route53-recovery-readiness": {
"regions": {
"aws": [
@@ -8711,6 +8866,46 @@
"aws-us-gov": []
}
},
"route53profiles": {
"regions": {
"aws": [
"af-south-1",
"ap-east-1",
"ap-northeast-1",
"ap-northeast-2",
"ap-northeast-3",
"ap-south-1",
"ap-south-2",
"ap-southeast-1",
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ca-central-1",
"ca-west-1",
"eu-central-1",
"eu-central-2",
"eu-north-1",
"eu-south-1",
"eu-south-2",
"eu-west-1",
"eu-west-2",
"eu-west-3",
"il-central-1",
"me-central-1",
"me-south-1",
"sa-east-1",
"us-east-1",
"us-east-2",
"us-west-1",
"us-west-2"
],
"aws-cn": [],
"aws-us-gov": [
"us-gov-east-1",
"us-gov-west-1"
]
}
},
"route53resolver": {
"regions": {
"aws": [
@@ -8725,6 +8920,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -8802,6 +8998,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -9094,6 +9291,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -9224,6 +9422,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -9370,6 +9569,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -9495,6 +9695,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -9773,6 +9974,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -9816,6 +10018,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -9859,6 +10062,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -10023,6 +10227,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -10109,6 +10314,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -10151,6 +10357,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -10205,6 +10412,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -10248,6 +10456,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -10349,9 +10558,12 @@
"ap-south-1",
"ap-southeast-1",
"ap-southeast-2",
"ca-central-1",
"eu-central-1",
"eu-north-1",
"eu-west-1",
"eu-west-2",
"eu-west-3",
"us-east-1",
"us-east-2",
"us-west-2"
@@ -10485,6 +10697,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -10554,6 +10767,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -10717,6 +10931,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -10749,6 +10964,7 @@
"vpc-lattice": {
"regions": {
"aws": [
"af-south-1",
"ap-northeast-1",
"ap-northeast-2",
"ap-south-1",
@@ -10757,6 +10973,7 @@
"ca-central-1",
"eu-central-1",
"eu-north-1",
"eu-south-1",
"eu-west-1",
"eu-west-2",
"eu-west-3",
@@ -10784,6 +11001,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",
@@ -11126,6 +11344,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ca-central-1",
"ca-west-1",
"eu-central-1",

View File

@@ -0,0 +1,177 @@
from prowler.exceptions.exceptions import ProwlerException
class AWSBaseException(ProwlerException):
"""Base class for AWS errors."""
AWS_ERROR_CODES = {
(1902, "AWSClientError"): {
"message": "AWS ClientError occurred",
"remediation": "Check your AWS client configuration and permissions.",
},
(1903, "AWSProfileNotFoundError"): {
"message": "AWS Profile not found",
"remediation": "Ensure the AWS profile is correctly configured, please visit https://docs.aws.amazon.com/cli/v1/userguide/cli-configure-files.html",
},
(1904, "AWSNoCredentialsError"): {
"message": "No AWS credentials found",
"remediation": "Verify that AWS credentials are properly set up, please visit https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/aws/authentication/ and https://docs.aws.amazon.com/cli/v1/userguide/cli-chap-configure.html",
},
(1905, "AWSArgumentTypeValidationError"): {
"message": "AWS argument type validation error",
"remediation": "Check the provided argument types specific to AWS and ensure they meet the required format. For session duration check: https://docs.aws.amazon.com/singlesignon/latest/userguide/howtosessionduration.html and for role session name check: https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_terms-and-concepts.html#iam-term-role-session-name",
},
(1906, "AWSSetUpSessionError"): {
"message": "AWS session setup error",
"remediation": "Check the AWS session setup and ensure it is properly configured, please visit https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-files.html and check if the provided profile has the necessary permissions.",
},
(1907, "AWSIAMRoleARNRegionNotEmtpy"): {
"message": "AWS IAM Role ARN region is not empty",
"remediation": "Check the AWS IAM Role ARN region and ensure it is empty, visit https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html#identifiers-arns for more information.",
},
(1908, "AWSIAMRoleARNPartitionEmpty"): {
"message": "AWS IAM Role ARN partition is empty",
"remediation": "Check the AWS IAM Role ARN partition and ensure it is not empty, visit https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html#identifiers-arns for more information.",
},
(1909, "AWSIAMRoleARNMissingFields"): {
"message": "AWS IAM Role ARN missing fields",
"remediation": "Check the AWS IAM Role ARN and ensure all required fields are present, visit https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html#identifiers-arns for more information.",
},
(1910, "AWSIAMRoleARNServiceNotIAMnorSTS"): {
"message": "AWS IAM Role ARN service is not IAM nor STS",
"remediation": "Check the AWS IAM Role ARN service and ensure it is either IAM or STS, visit https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html#identifiers-arns for more information.",
},
(1911, "AWSIAMRoleARNInvalidAccountID"): {
"message": "AWS IAM Role ARN account ID is invalid",
"remediation": "Check the AWS IAM Role ARN account ID and ensure it is a valid 12-digit number, visit https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html#identifiers-arns for more information.",
},
(1912, "AWSIAMRoleARNInvalidResourceType"): {
"message": "AWS IAM Role ARN resource type is invalid",
"remediation": "Check the AWS IAM Role ARN resource type and ensure it is valid, resources types are: role, user, assumed-role, root, federated-user, visit https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html#identifiers-arns for more information.",
},
(1913, "AWSIAMRoleARNEmptyResource"): {
"message": "AWS IAM Role ARN resource is empty",
"remediation": "Check the AWS IAM Role ARN resource and ensure it is not empty, visit https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html#identifiers-arns for more information.",
},
(1914, "AWSAssumeRoleError"): {
"message": "AWS assume role error",
"remediation": "Check the AWS assume role configuration and ensure it is properly set up, please visit https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/aws/role-assumption/ and https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_terms-and-concepts.html#iam-term-role-session-name",
},
}
def __init__(self, code, file=None, original_exception=None, message=None):
error_info = self.AWS_ERROR_CODES.get((code, self.__class__.__name__))
if message:
error_info["message"] = message
super().__init__(
code,
provider="AWS",
file=file,
original_exception=original_exception,
error_info=error_info,
)
class AWSCredentialsError(AWSBaseException):
"""Base class for AWS credentials errors."""
def __init__(self, code, file=None, original_exception=None, message=None):
super().__init__(code, file, original_exception, message)
class AWSClientError(AWSCredentialsError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1902, file=file, original_exception=original_exception, message=message
)
class AWSProfileNotFoundError(AWSCredentialsError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1903, file=file, original_exception=original_exception, message=message
)
class AWSNoCredentialsError(AWSCredentialsError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1904, file=file, original_exception=original_exception, message=message
)
class AWSArgumentTypeValidationError(AWSBaseException):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1905, file=file, original_exception=original_exception, message=message
)
class AWSSetUpSessionError(AWSBaseException):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1906, file=file, original_exception=original_exception, message=message
)
class AWSRoleArnError(AWSBaseException):
"""Base class for AWS role ARN errors."""
def __init__(self, code, file=None, original_exception=None, message=None):
super().__init__(code, file, original_exception, message)
class AWSIAMRoleARNRegionNotEmtpy(AWSRoleArnError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1907, file=file, original_exception=original_exception, message=message
)
class AWSIAMRoleARNPartitionEmpty(AWSRoleArnError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1908, file=file, original_exception=original_exception, message=message
)
class AWSIAMRoleARNMissingFields(AWSRoleArnError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1909, file=file, original_exception=original_exception, message=message
)
class AWSIAMRoleARNServiceNotIAMnorSTS(AWSRoleArnError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1910, file=file, original_exception=original_exception, message=message
)
class AWSIAMRoleARNInvalidAccountID(AWSRoleArnError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1911, file=file, original_exception=original_exception, message=message
)
class AWSIAMRoleARNInvalidResourceType(AWSRoleArnError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1912, file=file, original_exception=original_exception, message=message
)
class AWSIAMRoleARNEmptyResource(AWSRoleArnError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1913, file=file, original_exception=original_exception, message=message
)
class AWSAssumeRoleError(AWSBaseException):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1914, file=file, original_exception=original_exception, message=message
)

View File

@@ -168,13 +168,40 @@ def init_parser(self):
)
def validate_session_duration(duration):
"""validate_session_duration validates that the AWS STS Assume Role Session Duration is between 900 and 43200 seconds."""
duration = int(duration)
def validate_session_duration(session_duration: int) -> int:
"""validate_session_duration validates that the input session_duration is valid"""
duration = int(session_duration)
# Since the range(i,j) goes from i to j-1 we have to j+1
if duration not in range(900, 43201):
raise ArgumentTypeError("Session duration must be between 900 and 43200")
return duration
raise ArgumentTypeError(
"Session duration must be between 900 and 43200 seconds"
)
else:
return duration
def validate_role_session_name(session_name) -> str:
"""
Validates that the role session name is valid.
Args:
session_name (str): The role session name to be validated.
Returns:
str: The validated role session name.
Raises:
ArgumentTypeError: If the role session name is invalid.
Documentation:
- AWS STS AssumeRole API: https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html
"""
if fullmatch(r"[\w+=,.@-]{2,64}", session_name):
return session_name
else:
raise ArgumentTypeError(
"Role session name must be between 2 and 64 characters long and may contain alphanumeric characters, hyphens, underscores, plus signs, equal signs, commas, periods, at signs, and tildes."
)
def validate_arguments(arguments: Namespace) -> tuple[bool, str]:
@@ -195,7 +222,7 @@ def validate_arguments(arguments: Namespace) -> tuple[bool, str]:
return (True, "")
def validate_bucket(bucket_name):
def validate_bucket(bucket_name: str) -> str:
"""validate_bucket validates that the input bucket_name is valid"""
if search("(?!(^xn--|.+-s3alias$))^[a-z0-9][a-z0-9-]{1,61}[a-z0-9]$", bucket_name):
return bucket_name
@@ -203,16 +230,3 @@ def validate_bucket(bucket_name):
raise ArgumentTypeError(
"Bucket name must be valid (https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html)"
)
def validate_role_session_name(session_name):
"""
validates that the role session name is valid
Documentation: https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html
"""
if fullmatch(r"[\w+=,.@-]{2,64}", session_name):
return session_name
else:
raise ArgumentTypeError(
"Role Session Name must be 2-64 characters long and consist only of upper- and lower-case alphanumeric characters with no spaces. You can also include underscores or any of the following characters: =,.@-"
)

View File

@@ -1,13 +1,14 @@
import os
import re
from argparse import ArgumentTypeError
from prowler.providers.aws.lib.arn.error import (
RoleArnParsingEmptyResource,
RoleArnParsingIAMRegionNotEmpty,
RoleArnParsingInvalidAccountID,
RoleArnParsingInvalidResourceType,
RoleArnParsingPartitionEmpty,
RoleArnParsingServiceNotIAMnorSTS,
from prowler.providers.aws.exceptions.exceptions import (
AWSIAMRoleARNEmptyResource,
AWSIAMRoleARNInvalidAccountID,
AWSIAMRoleARNInvalidResourceType,
AWSIAMRoleARNPartitionEmpty,
AWSIAMRoleARNRegionNotEmtpy,
AWSIAMRoleARNServiceNotIAMnorSTS,
)
from prowler.providers.aws.lib.arn.models import ARN
@@ -24,7 +25,7 @@ def parse_iam_credentials_arn(arn: str) -> ARN:
arn_parsed = ARN(arn)
# First check if region is empty (in IAM ARN's region is always empty)
if arn_parsed.region:
raise RoleArnParsingIAMRegionNotEmpty
raise AWSIAMRoleARNRegionNotEmtpy(file=os.path.basename(__file__))
else:
# check if needed fields are filled:
# - partition
@@ -33,15 +34,15 @@ def parse_iam_credentials_arn(arn: str) -> ARN:
# - resource_type
# - resource
if arn_parsed.partition is None or arn_parsed.partition == "":
raise RoleArnParsingPartitionEmpty
raise AWSIAMRoleARNPartitionEmpty(file=os.path.basename(__file__))
elif arn_parsed.service != "iam" and arn_parsed.service != "sts":
raise RoleArnParsingServiceNotIAMnorSTS
raise AWSIAMRoleARNServiceNotIAMnorSTS(file=os.path.basename(__file__))
elif (
arn_parsed.account_id is None
or len(arn_parsed.account_id) != 12
or not arn_parsed.account_id.isnumeric()
):
raise RoleArnParsingInvalidAccountID
raise AWSIAMRoleARNInvalidAccountID(file=os.path.basename(__file__))
elif (
arn_parsed.resource_type != "role"
and arn_parsed.resource_type != "user"
@@ -49,9 +50,9 @@ def parse_iam_credentials_arn(arn: str) -> ARN:
and arn_parsed.resource_type != "root"
and arn_parsed.resource_type != "federated-user"
):
raise RoleArnParsingInvalidResourceType
raise AWSIAMRoleARNInvalidResourceType(file=os.path.basename(__file__))
elif arn_parsed.resource == "":
raise RoleArnParsingEmptyResource
raise AWSIAMRoleARNEmptyResource(file=os.path.basename(__file__))
else:
return arn_parsed

View File

@@ -1,49 +0,0 @@
class RoleArnParsingFailedMissingFields(Exception):
# The ARN contains a number of fields different than six separated by :"
def __init__(self):
self.message = "The assumed role ARN contains an invalid number of fields separated by : or it does not start by arn, please input a valid ARN"
super().__init__(self.message)
class RoleArnParsingIAMRegionNotEmpty(Exception):
# The ARN contains a non-empty value for region, since it is an IAM ARN is not valid
def __init__(self):
self.message = "The assumed role ARN contains a non-empty value for region, since it is an IAM ARN is not valid, please input a valid ARN"
super().__init__(self.message)
class RoleArnParsingPartitionEmpty(Exception):
# The ARN contains an empty value for partition
def __init__(self):
self.message = "The assumed role ARN does not contain a value for partition, please input a valid ARN"
super().__init__(self.message)
class RoleArnParsingServiceNotIAMnorSTS(Exception):
def __init__(self):
self.message = "The assumed role ARN contains a value for service distinct than IAM or STS, please input a valid ARN"
super().__init__(self.message)
class RoleArnParsingServiceNotSTS(Exception):
def __init__(self):
self.message = "The assumed role ARN contains a value for service distinct than STS, please input a valid ARN"
super().__init__(self.message)
class RoleArnParsingInvalidAccountID(Exception):
def __init__(self):
self.message = "The assumed role ARN contains a value for account id empty or invalid, a valid account id must be composed of 12 numbers, please input a valid ARN"
super().__init__(self.message)
class RoleArnParsingInvalidResourceType(Exception):
def __init__(self):
self.message = "The assumed role ARN contains a value for resource type different than role, please input a valid ARN"
super().__init__(self.message)
class RoleArnParsingEmptyResource(Exception):
def __init__(self):
self.message = "The assumed role ARN does not contain a value for resource, please input a valid ARN"
super().__init__(self.message)

View File

@@ -1,8 +1,9 @@
import os
from typing import Optional
from pydantic import BaseModel
from prowler.providers.aws.lib.arn.error import RoleArnParsingFailedMissingFields
from prowler.providers.aws.exceptions.exceptions import AWSIAMRoleARNMissingFields
class ARN(BaseModel):
@@ -18,7 +19,7 @@ class ARN(BaseModel):
# Validate the ARN
## Check that arn starts with arn
if not arn.startswith("arn:"):
raise RoleArnParsingFailedMissingFields
raise AWSIAMRoleARNMissingFields(file=os.path.basename(__file__))
## Retrieve fields
arn_elements = arn.split(":", 5)
data = {

View File

@@ -40,6 +40,7 @@ class AWSService:
self.audited_account_arn = provider.identity.account_arn
self.audited_partition = provider.identity.partition
self.audit_resources = provider.audit_resources
# TODO: remove this
self.audited_checks = provider.audit_metadata.expected_checks
self.audit_config = provider.audit_config
self.fixer_config = provider.fixer_config

View File

@@ -5,6 +5,7 @@ from boto3.session import Session
from botocore.config import Config
from prowler.config.config import output_file_timestamp
from prowler.providers.aws.config import AWS_STS_GLOBAL_ENDPOINT_REGION
from prowler.providers.aws.lib.arn.models import ARN
from prowler.providers.common.models import ProviderOutputOptions
@@ -34,7 +35,7 @@ class AWSAssumeRoleInfo:
external_id: str
mfa_enabled: bool
role_session_name: str
sts_region: str
sts_region: str = AWS_STS_GLOBAL_ENDPOINT_REGION
@dataclass

View File

@@ -14,11 +14,11 @@ class AccessAnalyzer(AWSService):
# Call AWSService's __init__
super().__init__(__class__.__name__, provider)
self.analyzers = []
self.__threading_call__(self.__list_analyzers__)
self.__list_findings__()
self.__get_finding_status__()
self.__threading_call__(self._list_analyzers)
self._list_findings()
self._get_finding_status()
def __list_analyzers__(self, regional_client):
def _list_analyzers(self, regional_client):
logger.info("AccessAnalyzer - Listing Analyzers...")
try:
list_analyzers_paginator = regional_client.get_paginator("list_analyzers")
@@ -57,7 +57,7 @@ class AccessAnalyzer(AWSService):
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __get_finding_status__(self):
def _get_finding_status(self):
logger.info("AccessAnalyzer - Get Finding status...")
try:
for analyzer in self.analyzers:
@@ -87,7 +87,7 @@ class AccessAnalyzer(AWSService):
# TODO: We need to include ListFindingsV2
# https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/accessanalyzer/client/list_findings_v2.html
def __list_findings__(self):
def _list_findings(self):
logger.info("AccessAnalyzer - Listing Findings per Analyzer...")
try:
for analyzer in self.analyzers:

View File

@@ -13,10 +13,10 @@ class Account(AWSService):
# Call AWSService's __init__
super().__init__(__class__.__name__, provider)
self.number_of_contacts = 4
self.contact_base = self.__get_contact_information__()
self.contacts_billing = self.__get_alternate_contact__("BILLING")
self.contacts_security = self.__get_alternate_contact__("SECURITY")
self.contacts_operations = self.__get_alternate_contact__("OPERATIONS")
self.contact_base = self._get_contact_information()
self.contacts_billing = self._get_alternate_contact("BILLING")
self.contacts_security = self._get_alternate_contact("SECURITY")
self.contacts_operations = self._get_alternate_contact("OPERATIONS")
if self.contact_base:
# Set of contact phone numbers
@@ -42,7 +42,7 @@ class Account(AWSService):
self.contacts_operations.email,
}
def __get_contact_information__(self):
def _get_contact_information(self):
try:
primary_account_contact = self.client.get_contact_information()[
"ContactInformation"
@@ -65,7 +65,7 @@ class Account(AWSService):
)
return Contact(type="PRIMARY")
def __get_alternate_contact__(self, contact_type: str):
def _get_alternate_contact(self, contact_type: str):
try:
account_contact = self.client.get_alternate_contact(
AlternateContactType=contact_type

View File

@@ -6,29 +6,30 @@ class acm_certificates_transparency_logs_enabled(Check):
def execute(self):
findings = []
for certificate in acm_client.certificates:
report = Check_Report_AWS(self.metadata())
report.region = certificate.region
if certificate.type == "IMPORTED":
report.status = "PASS"
report.status_extended = f"ACM Certificate {certificate.id} for {certificate.name} is imported."
report.resource_id = certificate.id
report.resource_details = certificate.name
report.resource_arn = certificate.arn
report.resource_tags = certificate.tags
else:
if not certificate.transparency_logging:
report.status = "FAIL"
report.status_extended = f"ACM Certificate {certificate.id} for {certificate.name} has Certificate Transparency logging disabled."
if certificate.in_use or acm_client.provider.scan_unused_services:
report = Check_Report_AWS(self.metadata())
report.region = certificate.region
if certificate.type == "IMPORTED":
report.status = "PASS"
report.status_extended = f"ACM Certificate {certificate.id} for {certificate.name} is imported."
report.resource_id = certificate.id
report.resource_details = certificate.name
report.resource_arn = certificate.arn
report.resource_tags = certificate.tags
else:
report.status = "PASS"
report.status_extended = f"ACM Certificate {certificate.id} for {certificate.name} has Certificate Transparency logging enabled."
report.resource_id = certificate.id
report.resource_details = certificate.name
report.resource_arn = certificate.arn
report.resource_tags = certificate.tags
findings.append(report)
if not certificate.transparency_logging:
report.status = "FAIL"
report.status_extended = f"ACM Certificate {certificate.id} for {certificate.name} has Certificate Transparency logging disabled."
report.resource_id = certificate.id
report.resource_details = certificate.name
report.resource_arn = certificate.arn
report.resource_tags = certificate.tags
else:
report.status = "PASS"
report.status_extended = f"ACM Certificate {certificate.id} for {certificate.name} has Certificate Transparency logging enabled."
report.resource_id = certificate.id
report.resource_details = certificate.name
report.resource_arn = certificate.arn
report.resource_tags = certificate.tags
findings.append(report)
return findings

View File

@@ -0,0 +1,32 @@
{
"Provider": "aws",
"CheckID": "acm_certificates_with_secure_key_algorithms",
"CheckTitle": "Check if ACM Certificates use a secure key algorithm",
"CheckType": [
"Data Protection"
],
"ServiceName": "acm",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:acm:region:account-id:certificate/resource-id",
"Severity": "high",
"ResourceType": "AwsCertificateManagerCertificate",
"Description": "Check if ACM Certificates use a secure key algorithm (RSA 2048 bits or more, or ECDSA 256 bits or more). For example certificates that use RSA-1024 can be compromised because the encryption could be broken in no more than 2^80 guesses making it vulnerable to a factorization attack.",
"Risk": "Certificates with weak RSA or ECDSA keys can be compromised because the length of the key defines the security of the encryption. The number of bits in the key determines the number of guesses an attacker would have to make in order to decrypt the data. The more bits in the key, the more secure the encryption.",
"RelatedUrl": "https://docs.aws.amazon.com/acm/latest/userguide/acm-certificate.html",
"Remediation": {
"Code": {
"CLI": "",
"NativeIaC": "",
"Other": "",
"Terraform": ""
},
"Recommendation": {
"Text": "Ensure that all ACM certificates use a secure key algorithm. If any certificates use smaller keys, regenerate them with a secure key size and update any systems that rely on these certificates.",
"Url": "https://docs.aws.amazon.com/securityhub/latest/userguide/acm-controls.html#acm-2"
}
},
"Categories": [],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""
}

View File

@@ -0,0 +1,26 @@
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.acm.acm_client import acm_client
class acm_certificates_with_secure_key_algorithms(Check):
def execute(self):
findings = []
for certificate in acm_client.certificates:
if certificate.in_use or acm_client.provider.scan_unused_services:
report = Check_Report_AWS(self.metadata())
report.region = certificate.region
report.resource_id = certificate.id
report.resource_details = certificate.name
report.resource_arn = certificate.arn
report.resource_tags = certificate.tags
report.status = "PASS"
report.status_extended = f"ACM Certificate {certificate.id} for {certificate.name} uses a secure key algorithm ({certificate.key_algorithm})."
if certificate.key_algorithm in acm_client.audit_config.get(
"insecure_algorithms", ["RSA-1024"]
):
report.status = "FAIL"
report.status_extended = f"ACM Certificate {certificate.id} for {certificate.name} does not use a secure key algorithm ({certificate.key_algorithm})."
findings.append(report)
return findings

View File

@@ -14,17 +14,28 @@ class ACM(AWSService):
# Call AWSService's __init__
super().__init__(__class__.__name__, provider)
self.certificates = []
self.__threading_call__(self.__list_certificates__)
self.__describe_certificates__()
self.__list_tags_for_certificate__()
self.__threading_call__(self._list_certificates)
self._describe_certificates()
self._list_tags_for_certificate()
def __list_certificates__(self, regional_client):
def _list_certificates(self, regional_client):
logger.info("ACM - Listing Certificates...")
try:
includes = {
"keyTypes": [
"RSA_1024",
"RSA_2048",
"RSA_3072",
"RSA_4096",
"EC_prime256v1",
"EC_secp384r1",
"EC_secp521r1",
]
}
list_certificates_paginator = regional_client.get_paginator(
"list_certificates"
)
for page in list_certificates_paginator.paginate():
for page in list_certificates_paginator.paginate(Includes=includes):
for certificate in page["CertificateSummaryList"]:
if not self.audit_resources or (
is_resource_filtered(
@@ -49,6 +60,7 @@ class ACM(AWSService):
name=certificate["DomainName"],
id=certificate["CertificateArn"].split("/")[-1],
type=certificate["Type"],
key_algorithm=certificate["KeyAlgorithm"],
expiration_days=certificate_expiration_time,
in_use=certificate.get("InUse", False),
transparency_logging=False,
@@ -60,7 +72,7 @@ class ACM(AWSService):
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __describe_certificates__(self):
def _describe_certificates(self):
logger.info("ACM - Describing Certificates...")
try:
for certificate in self.certificates:
@@ -78,7 +90,7 @@ class ACM(AWSService):
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __list_tags_for_certificate__(self):
def _list_tags_for_certificate(self):
logger.info("ACM - List Tags...")
try:
for certificate in self.certificates:
@@ -98,6 +110,7 @@ class Certificate(BaseModel):
name: str
id: str
type: str
key_algorithm: str
tags: Optional[list] = []
expiration_days: int
in_use: bool

View File

@@ -14,13 +14,13 @@ class APIGateway(AWSService):
# Call AWSService's __init__
super().__init__(__class__.__name__, provider)
self.rest_apis = []
self.__threading_call__(self.__get_rest_apis__)
self.__get_authorizers__()
self.__get_rest_api__()
self.__get_stages__()
self.__get_resources__()
self.__threading_call__(self._get_rest_apis)
self._get_authorizers()
self._get_rest_api()
self._get_stages()
self._get_resources()
def __get_rest_apis__(self, regional_client):
def _get_rest_apis(self, regional_client):
logger.info("APIGateway - Getting Rest APIs...")
try:
get_rest_apis_paginator = regional_client.get_paginator("get_rest_apis")
@@ -44,7 +44,7 @@ class APIGateway(AWSService):
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __get_authorizers__(self):
def _get_authorizers(self):
logger.info("APIGateway - Getting Rest APIs authorizer...")
try:
for rest_api in self.rest_apis:
@@ -75,7 +75,7 @@ class APIGateway(AWSService):
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __get_rest_api__(self):
def _get_rest_api(self):
logger.info("APIGateway - Describing Rest API...")
try:
for rest_api in self.rest_apis:
@@ -103,7 +103,7 @@ class APIGateway(AWSService):
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __get_stages__(self):
def _get_stages(self):
logger.info("APIGateway - Getting stages for Rest APIs...")
try:
for rest_api in self.rest_apis:
@@ -151,7 +151,7 @@ class APIGateway(AWSService):
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __get_resources__(self):
def _get_resources(self):
logger.info("APIGateway - Getting API resources...")
try:
for rest_api in self.rest_apis:

View File

@@ -13,11 +13,11 @@ class ApiGatewayV2(AWSService):
# Call AWSService's __init__
super().__init__(__class__.__name__, provider)
self.apis = []
self.__threading_call__(self.__get_apis__)
self.__get_authorizers__()
self.__get_stages__()
self.__threading_call__(self._get_apis)
self._get_authorizers()
self._get_stages()
def __get_apis__(self, regional_client):
def _get_apis(self, regional_client):
logger.info("APIGatewayv2 - Getting APIs...")
try:
get_apis_paginator = regional_client.get_paginator("get_apis")
@@ -41,7 +41,7 @@ class ApiGatewayV2(AWSService):
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __get_authorizers__(self):
def _get_authorizers(self):
logger.info("APIGatewayv2 - Getting APIs authorizer...")
try:
for api in self.apis:
@@ -54,7 +54,7 @@ class ApiGatewayV2(AWSService):
f"{error.__class__.__name__}:{error.__traceback__.tb_lineno} -- {error}"
)
def __get_stages__(self):
def _get_stages(self):
logger.info("APIGatewayv2 - Getting stages for APIs...")
try:
for api in self.apis:

View File

@@ -13,10 +13,10 @@ class AppStream(AWSService):
# Call AWSService's __init__
super().__init__(__class__.__name__, provider)
self.fleets = []
self.__threading_call__(self.__describe_fleets__)
self.__list_tags_for_resource__()
self.__threading_call__(self._describe_fleets)
self._list_tags_for_resource()
def __describe_fleets__(self, regional_client):
def _describe_fleets(self, regional_client):
logger.info("AppStream - Describing Fleets...")
try:
describe_fleets_paginator = regional_client.get_paginator("describe_fleets")
@@ -50,7 +50,7 @@ class AppStream(AWSService):
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __list_tags_for_resource__(self):
def _list_tags_for_resource(self):
logger.info("AppStream - List Tags...")
try:
for fleet in self.fleets:

View File

@@ -13,12 +13,12 @@ class Athena(AWSService):
# Call AWSService's __init__
super().__init__(__class__.__name__, provider)
self.workgroups = {}
self.__threading_call__(self.__list_workgroups__)
self.__get_workgroups__()
self.__list_query_executions__()
self.__list_tags_for_resource__()
self.__threading_call__(self._list_workgroups)
self._get_workgroups()
self._list_query_executions()
self._list_tags_for_resource()
def __list_workgroups__(self, regional_client):
def _list_workgroups(self, regional_client):
logger.info("Athena - Listing WorkGroups...")
try:
list_workgroups = regional_client.list_work_groups()
@@ -44,7 +44,7 @@ class Athena(AWSService):
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __get_workgroups__(self):
def _get_workgroups(self):
logger.info("Athena - Getting WorkGroups...")
try:
for workgroup in self.workgroups.values():
@@ -88,7 +88,7 @@ class Athena(AWSService):
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __list_query_executions__(self):
def _list_query_executions(self):
logger.info("Athena - Listing Queries...")
try:
for workgroup in self.workgroups.values():
@@ -109,7 +109,7 @@ class Athena(AWSService):
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __list_tags_for_resource__(self):
def _list_tags_for_resource(self):
logger.info("Athena - Listing Tags...")
try:
for workgroup in self.workgroups.values():

View File

@@ -1,14 +1,10 @@
import os
import tempfile
import zlib
from base64 import b64decode
from detect_secrets import SecretsCollection
from detect_secrets.settings import default_settings
from prowler.config.config import encoding_format_utf_8
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.lib.logger import logger
from prowler.lib.utils.utils import detect_secrets_scan
from prowler.providers.aws.services.autoscaling.autoscaling_client import (
autoscaling_client,
)
@@ -17,6 +13,9 @@ from prowler.providers.aws.services.autoscaling.autoscaling_client import (
class autoscaling_find_secrets_ec2_launch_configuration(Check):
def execute(self):
findings = []
secrets_ignore_patterns = autoscaling_client.audit_config.get(
"secrets_ignore_patterns", []
)
for configuration in autoscaling_client.launch_configurations:
report = Check_Report_AWS(self.metadata())
report.region = configuration.region
@@ -24,9 +23,7 @@ class autoscaling_find_secrets_ec2_launch_configuration(Check):
report.resource_arn = configuration.arn
if configuration.user_data:
temp_user_data_file = tempfile.NamedTemporaryFile(delete=False)
user_data = b64decode(configuration.user_data)
try:
if user_data[0:2] == b"\x1f\x8b": # GZIP magic number
user_data = zlib.decompress(
@@ -45,22 +42,16 @@ class autoscaling_find_secrets_ec2_launch_configuration(Check):
)
continue
temp_user_data_file.write(
bytes(user_data, encoding="raw_unicode_escape")
has_secrets = detect_secrets_scan(
data=user_data, excluded_secrets=secrets_ignore_patterns
)
temp_user_data_file.close()
secrets = SecretsCollection()
with default_settings():
secrets.scan_file(temp_user_data_file.name)
if secrets.json():
if has_secrets:
report.status = "FAIL"
report.status_extended = f"Potential secret found in autoscaling {configuration.name} User Data."
else:
report.status = "PASS"
report.status_extended = f"No secrets found in autoscaling {configuration.name} User Data."
os.remove(temp_user_data_file.name)
else:
report.status = "PASS"
report.status_extended = f"No secrets found in autoscaling {configuration.name} since User Data is empty."

View File

@@ -11,11 +11,11 @@ class AutoScaling(AWSService):
# Call AWSService's __init__
super().__init__(__class__.__name__, provider)
self.launch_configurations = []
self.__threading_call__(self.__describe_launch_configurations__)
self.__threading_call__(self._describe_launch_configurations)
self.groups = []
self.__threading_call__(self.__describe_auto_scaling_groups__)
self.__threading_call__(self._describe_auto_scaling_groups)
def __describe_launch_configurations__(self, regional_client):
def _describe_launch_configurations(self, regional_client):
logger.info("AutoScaling - Describing Launch Configurations...")
try:
describe_launch_configurations_paginator = regional_client.get_paginator(
@@ -44,7 +44,7 @@ class AutoScaling(AWSService):
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __describe_auto_scaling_groups__(self, regional_client):
def _describe_auto_scaling_groups(self, regional_client):
logger.info("AutoScaling - Describing AutoScaling Groups...")
try:
describe_auto_scaling_groups_paginator = regional_client.get_paginator(

View File

@@ -0,0 +1,30 @@
{
"Provider": "aws",
"CheckID": "awslambda_function_inside_vpc",
"CheckTitle": "Ensure AWS Lambda Functions Are Deployed Inside a VPC",
"CheckType": [],
"ServiceName": "lambda",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:lambda:region:account-id:function/function-name",
"Severity": "low",
"ResourceType": "AwsLambdaFunction",
"Description": "This check verifies whether an AWS Lambda function is deployed within a Virtual Private Cloud (VPC). Deploying Lambda functions inside a VPC improves security by allowing control over the network environment, reducing the exposure to public internet threats.",
"Risk": "Lambda functions not deployed in a VPC may expose your application to increased security risks, including unauthorized access and data breaches. Without the network isolation provided by a VPC, your Lambda functions are more vulnerable to attacks.",
"RelatedUrl": "https://docs.aws.amazon.com/lambda/latest/dg/configuration-vpc.html",
"Remediation": {
"Code": {
"CLI": "aws lambda update-function-configuration --region <region-name> --function-name <function-name> --vpc-config SubnetIds=<subnet-id-1>,<subnet-id-2>,SecurityGroupIds=<security-group-id>",
"NativeIaC": "https://www.trendmicro.com/cloudoneconformity/knowledge-base/aws/Lambda/function-in-vpc.html",
"Other": "https://docs.aws.amazon.com/securityhub/latest/userguide/lambda-controls.html#lambda-3",
"Terraform": "https://docs.prowler.com/checks/aws/general-policies/ensure-that-aws-lambda-function-is-configured-inside-a-vpc-1/"
},
"Recommendation": {
"Text": "Configure your AWS Lambda functions to operate within a Virtual Private Cloud (VPC) to enhance security and control network access.",
"Url": ""
}
},
"Categories": [],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""
}

View File

@@ -0,0 +1,27 @@
from typing import List
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.awslambda.awslambda_client import awslambda_client
class awslambda_function_inside_vpc(Check):
def execute(self) -> List[Check_Report_AWS]:
findings = []
for function_arn, function in awslambda_client.functions.items():
report = Check_Report_AWS(self.metadata())
report.region = function.region
report.resource_id = function.name
report.resource_arn = function_arn
report.resource_tags = function.tags
report.status = "FAIL"
report.status_extended = (
f"Lambda function {function.name} is not inside a VPC"
)
if function.vpc_id:
report.status = "PASS"
report.status_extended = f"Lambda function {function.name} is inside of VPC {function.vpc_id}"
findings.append(report)
return findings

View File

@@ -1,10 +1,8 @@
import os
import tempfile
from detect_secrets import SecretsCollection
from detect_secrets.settings import default_settings
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.lib.utils.utils import detect_secrets_scan
from prowler.providers.aws.services.awslambda.awslambda_client import awslambda_client
@@ -12,7 +10,10 @@ class awslambda_function_no_secrets_in_code(Check):
def execute(self):
findings = []
if awslambda_client.functions:
for function, function_code in awslambda_client.__get_function_code__():
secrets_ignore_patterns = awslambda_client.audit_config.get(
"secrets_ignore_patterns", []
)
for function, function_code in awslambda_client._get_function_code():
if function_code:
report = Check_Report_AWS(self.metadata())
report.region = function.region
@@ -30,25 +31,23 @@ class awslambda_function_no_secrets_in_code(Check):
files_in_zip = next(os.walk(tmp_dir_name))[2]
secrets_findings = []
for file in files_in_zip:
secrets = SecretsCollection()
with default_settings():
secrets.scan_file(f"{tmp_dir_name}/{file}")
detect_secrets_output = secrets.json()
detect_secrets_output = detect_secrets_scan(
file=f"{tmp_dir_name}/{file}",
excluded_secrets=secrets_ignore_patterns,
)
if detect_secrets_output:
for (
file_name
secret
) in (
detect_secrets_output.keys()
detect_secrets_output
): # Appears that only 1 file is being scanned at a time, so could rework this
output_file_name = file_name.replace(
output_file_name = secret["filename"].replace(
f"{tmp_dir_name}/", ""
)
secrets_string = ", ".join(
[
f"{secret['type']} on line {secret['line_number']}"
for secret in detect_secrets_output[
file_name
]
for secret in detect_secrets_output
]
)
secrets_findings.append(

View File

@@ -1,17 +1,16 @@
import json
import os
import tempfile
from detect_secrets import SecretsCollection
from detect_secrets.settings import default_settings
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.lib.utils.utils import detect_secrets_scan
from prowler.providers.aws.services.awslambda.awslambda_client import awslambda_client
class awslambda_function_no_secrets_in_variables(Check):
def execute(self):
findings = []
secrets_ignore_patterns = awslambda_client.audit_config.get(
"secrets_ignore_patterns", []
)
for function in awslambda_client.functions.values():
report = Check_Report_AWS(self.metadata())
report.region = function.region
@@ -25,32 +24,21 @@ class awslambda_function_no_secrets_in_variables(Check):
)
if function.environment:
temp_env_data_file = tempfile.NamedTemporaryFile(delete=False)
temp_env_data_file.write(
bytes(
json.dumps(function.environment, indent=2),
encoding="raw_unicode_escape",
)
detect_secrets_output = detect_secrets_scan(
data=json.dumps(function.environment, indent=2),
excluded_secrets=secrets_ignore_patterns,
)
temp_env_data_file.close()
secrets = SecretsCollection()
with default_settings():
secrets.scan_file(temp_env_data_file.name)
detect_secrets_output = secrets.json()
if detect_secrets_output:
environment_variable_names = list(function.environment.keys())
secrets_string = ", ".join(
[
f"{secret['type']} in variable {environment_variable_names[int(secret['line_number']) - 2]}"
for secret in detect_secrets_output[temp_env_data_file.name]
for secret in detect_secrets_output
]
)
report.status = "FAIL"
report.status_extended = f"Potential secret found in Lambda function {function.name} variables -> {secrets_string}."
os.remove(temp_env_data_file.name)
findings.append(report)
return findings

View File

@@ -19,12 +19,12 @@ class Lambda(AWSService):
# Call AWSService's __init__
super().__init__(__class__.__name__, provider)
self.functions = {}
self.__threading_call__(self.__list_functions__)
self.__list_tags_for_resource__()
self.__threading_call__(self.__get_policy__)
self.__threading_call__(self.__get_function_url_config__)
self.__threading_call__(self._list_functions)
self._list_tags_for_resource()
self.__threading_call__(self._get_policy)
self.__threading_call__(self._get_function_url_config)
def __list_functions__(self, regional_client):
def _list_functions(self, regional_client):
logger.info("Lambda - Listing Functions...")
try:
list_functions_paginator = regional_client.get_paginator("list_functions")
@@ -37,13 +37,13 @@ class Lambda(AWSService):
):
lambda_name = function["FunctionName"]
lambda_arn = function["FunctionArn"]
vpc_config = function.get("VpcConfig", {})
# We must use the Lambda ARN as the dict key since we could have Lambdas in different regions with the same name
self.functions[lambda_arn] = Function(
name=lambda_name,
arn=lambda_arn,
security_groups=function.get("VpcConfig", {}).get(
"SecurityGroupIds", []
),
security_groups=vpc_config.get("SecurityGroupIds", []),
vpc_id=vpc_config.get("VpcId"),
region=regional_client.region,
)
if "Runtime" in function:
@@ -61,12 +61,12 @@ class Lambda(AWSService):
f" {error}"
)
def __get_function_code__(self):
def _get_function_code(self):
logger.info("Lambda - Getting Function Code...")
# Use a thread pool handle the queueing and execution of the __fetch_function_code__ tasks, up to max_workers tasks concurrently.
# Use a thread pool handle the queueing and execution of the _fetch_function_code tasks, up to max_workers tasks concurrently.
lambda_functions_to_fetch = {
self.thread_pool.submit(
self.__fetch_function_code__, function.name, function.region
self._fetch_function_code, function.name, function.region
): function
for function in self.functions.values()
}
@@ -82,7 +82,7 @@ class Lambda(AWSService):
f"{function.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __fetch_function_code__(self, function_name, function_region):
def _fetch_function_code(self, function_name, function_region):
try:
regional_client = self.regional_clients[function_region]
function_information = regional_client.get_function(
@@ -101,7 +101,7 @@ class Lambda(AWSService):
)
raise
def __get_policy__(self, regional_client):
def _get_policy(self, regional_client):
logger.info("Lambda - Getting Policy...")
try:
for function in self.functions.values():
@@ -124,7 +124,7 @@ class Lambda(AWSService):
f" {error}"
)
def __get_function_url_config__(self, regional_client):
def _get_function_url_config(self, regional_client):
logger.info("Lambda - Getting Function URL Config...")
try:
for function in self.functions.values():
@@ -153,7 +153,7 @@ class Lambda(AWSService):
f" {error}"
)
def __list_tags_for_resource__(self):
def _list_tags_for_resource(self):
logger.info("Lambda - List Tags...")
try:
for function in self.functions.values():
@@ -201,4 +201,5 @@ class Function(BaseModel):
policy: dict = None
code: LambdaCode = None
url_config: URLConfig = None
vpc_id: Optional[str]
tags: Optional[list] = []

View File

@@ -18,13 +18,13 @@ class Backup(AWSService):
self.report_plan_arn_template = f"arn:{self.audited_partition}:backup:{self.region}:{self.audited_account}:report-plan"
self.backup_vault_arn_template = f"arn:{self.audited_partition}:backup:{self.region}:{self.audited_account}:backup-vault"
self.backup_vaults = []
self.__threading_call__(self.__list_backup_vaults__)
self.__threading_call__(self._list_backup_vaults)
self.backup_plans = []
self.__threading_call__(self.__list_backup_plans__)
self.__threading_call__(self._list_backup_plans)
self.backup_report_plans = []
self.__threading_call__(self.__list_backup_report_plans__)
self.__threading_call__(self._list_backup_report_plans)
def __list_backup_vaults__(self, regional_client):
def _list_backup_vaults(self, regional_client):
logger.info("Backup - Listing Backup Vaults...")
try:
list_backup_vaults_paginator = regional_client.get_paginator(
@@ -70,7 +70,7 @@ class Backup(AWSService):
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __list_backup_plans__(self, regional_client):
def _list_backup_plans(self, regional_client):
logger.info("Backup - Listing Backup Plans...")
try:
list_backup_plans_paginator = regional_client.get_paginator(
@@ -105,7 +105,7 @@ class Backup(AWSService):
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __list_backup_report_plans__(self, regional_client):
def _list_backup_report_plans(self, regional_client):
logger.info("Backup - Listing Backup Report Plans...")
try:

View File

@@ -14,10 +14,10 @@ class CloudFormation(AWSService):
# Call AWSService's __init__
super().__init__(__class__.__name__, provider)
self.stacks = []
self.__threading_call__(self.__describe_stacks__)
self.__describe_stack__()
self.__threading_call__(self._describe_stacks)
self._describe_stack()
def __describe_stacks__(self, regional_client):
def _describe_stacks(self, regional_client):
"""Get ALL CloudFormation Stacks"""
logger.info("CloudFormation - Describing Stacks...")
try:
@@ -47,7 +47,7 @@ class CloudFormation(AWSService):
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __describe_stack__(self):
def _describe_stack(self):
"""Get Details for a CloudFormation Stack"""
logger.info("CloudFormation - Describing Stack to get specific details...")
for stack in self.stacks:

View File

@@ -1,10 +1,5 @@
import os
import tempfile
from detect_secrets import SecretsCollection
from detect_secrets.settings import default_settings
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.lib.utils.utils import detect_secrets_scan
from prowler.providers.aws.services.cloudformation.cloudformation_client import (
cloudformation_client,
)
@@ -16,6 +11,9 @@ class cloudformation_stack_outputs_find_secrets(Check):
def execute(self):
"""Execute the cloudformation_stack_outputs_find_secrets check"""
findings = []
secrets_ignore_patterns = cloudformation_client.audit_config.get(
"secrets_ignore_patterns", []
)
for stack in cloudformation_client.stacks:
report = Check_Report_AWS(self.metadata())
report.region = stack.region
@@ -25,26 +23,25 @@ class cloudformation_stack_outputs_find_secrets(Check):
report.status = "PASS"
report.status_extended = f"No secrets found in Stack {stack.name} Outputs."
if stack.outputs:
temp_output_file = tempfile.NamedTemporaryFile(delete=False)
data = ""
# Store the CloudFormation Stack Outputs into a file
for output in stack.outputs:
temp_output_file.write(f"{output}".encode())
temp_output_file.close()
data += f"{output}\n"
# Init detect_secrets
secrets = SecretsCollection()
# Scan file for secrets
with default_settings():
secrets.scan_file(temp_output_file.name)
if secrets.json():
report.status = "FAIL"
report.status_extended = (
f"Potential secret found in Stack {stack.name} Outputs."
detect_secrets_output = detect_secrets_scan(
data=data, excluded_secrets=secrets_ignore_patterns
)
# If secrets are found, update the report status
if detect_secrets_output:
secrets_string = ", ".join(
[
f"{secret['type']} in Output {int(secret['line_number'])}"
for secret in detect_secrets_output
]
)
report.status = "FAIL"
report.status_extended = f"Potential secret found in Stack {stack.name} Outputs -> {secrets_string}."
os.remove(temp_output_file.name)
else:
report.status = "PASS"
report.status_extended = f"CloudFormation {stack.name} has no Outputs."

View File

@@ -14,11 +14,11 @@ class CloudFront(AWSService):
# Call AWSService's __init__
super().__init__(__class__.__name__, provider, global_service=True)
self.distributions = {}
self.__list_distributions__(self.client, self.region)
self.__get_distribution_config__(self.client, self.distributions, self.region)
self.__list_tags_for_resource__(self.client, self.distributions, self.region)
self._list_distributions(self.client, self.region)
self._get_distribution_config(self.client, self.distributions, self.region)
self._list_tags_for_resource(self.client, self.distributions, self.region)
def __list_distributions__(self, client, region) -> dict:
def _list_distributions(self, client, region) -> dict:
logger.info("CloudFront - Listing Distributions...")
try:
list_ditributions_paginator = client.get_paginator("list_distributions")
@@ -44,7 +44,7 @@ class CloudFront(AWSService):
f"{region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __get_distribution_config__(self, client, distributions, region) -> dict:
def _get_distribution_config(self, client, distributions, region) -> dict:
logger.info("CloudFront - Getting Distributions...")
try:
for distribution_id in distributions.keys():
@@ -87,7 +87,7 @@ class CloudFront(AWSService):
f"{region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __list_tags_for_resource__(self, client, distributions, region):
def _list_tags_for_resource(self, client, distributions, region):
logger.info("CloudFront - List Tags...")
try:
for distribution in distributions.values():

View File

@@ -20,7 +20,7 @@ class cloudtrail_bucket_requires_mfa_delete(Check):
report.resource_tags = trail.tags
report.status = "FAIL"
report.status_extended = f"Trail {trail.name} bucket ({trail_bucket}) does not have MFA delete enabled."
for bucket in s3_client.buckets:
for bucket in s3_client.buckets.values():
if trail_bucket == bucket.name:
trail_bucket_is_in_account = True
if bucket.mfa_delete:

View File

@@ -23,7 +23,7 @@ class cloudtrail_logs_s3_bucket_access_logging_enabled(Check):
report.status_extended = f"Multiregion Trail {trail.name} S3 bucket access logging is not enabled for bucket {trail_bucket}."
else:
report.status_extended = f"Single region Trail {trail.name} S3 bucket access logging is not enabled for bucket {trail_bucket}."
for bucket in s3_client.buckets:
for bucket in s3_client.buckets.values():
if trail_bucket == bucket.name:
trail_bucket_is_in_account = True
if bucket.logging:

View File

@@ -23,7 +23,7 @@ class cloudtrail_logs_s3_bucket_is_not_publicly_accessible(Check):
report.status_extended = f"S3 Bucket {trail_bucket} from multiregion trail {trail.name} is not publicly accessible."
else:
report.status_extended = f"S3 Bucket {trail_bucket} from single region trail {trail.name} is not publicly accessible."
for bucket in s3_client.buckets:
for bucket in s3_client.buckets.values():
# Here we need to ensure that acl_grantee is filled since if we don't have permissions to query the api for a concrete region
# (for example due to a SCP) we are going to try access an attribute from a None type
if trail_bucket == bucket.name:

View File

@@ -32,7 +32,7 @@ class cloudtrail_multi_region_enabled(Check):
"No CloudTrail trails enabled with logging were found."
)
report.resource_arn = (
cloudtrail_client.__get_trail_arn_template__(region)
cloudtrail_client._get_trail_arn_template(region)
)
report.resource_id = cloudtrail_client.audited_account
# If there are no trails logging it is needed to store the FAIL once all the trails have been checked

View File

@@ -14,9 +14,7 @@ class cloudtrail_multi_region_enabled_logging_management_events(Check):
report.status_extended = "No CloudTrail trails enabled and logging management events were found."
report.region = region
report.resource_id = cloudtrail_client.audited_account
report.resource_arn = cloudtrail_client.__get_trail_arn_template__(
region
)
report.resource_arn = cloudtrail_client._get_trail_arn_template(region)
trail_is_logging_management_events = False
for trail in cloudtrail_client.trails.values():
if trail.region == region or trail.is_multiregion:

View File

@@ -17,7 +17,7 @@
"Code": {
"CLI": "aws cloudtrail put-event-selectors --trail-name <YOUR_TRAIL_NAME_HERE> --event-selectors '[{ 'ReadWriteType': 'ReadOnly', 'IncludeManagementEvents':true, 'DataResources': [{ 'Type': 'AWS::S3::Object', 'Values': ['arn:aws:s3'] }] }]'",
"NativeIaC": "",
"Other": "",
"Other": "https://docs.aws.amazon.com/securityhub/latest/userguide/s3-controls.html#s3-23",
"Terraform": ""
},
"Recommendation": {

View File

@@ -17,7 +17,7 @@
"Code": {
"CLI": "aws cloudtrail put-event-selectors --trail-name <YOUR_TRAIL_NAME_HERE> --event-selectors '[{ 'ReadWriteType': 'WriteOnly', 'IncludeManagementEvents':true, 'DataResources': [{ 'Type': 'AWS::S3::Object', 'Values': ['arn:aws:s3'] }] }]'",
"NativeIaC": "",
"Other": "",
"Other": "https://docs.aws.amazon.com/securityhub/latest/userguide/s3-controls.html#s3-22",
"Terraform": ""
},
"Recommendation": {

View File

@@ -16,21 +16,21 @@ class Cloudtrail(AWSService):
super().__init__(__class__.__name__, provider)
self.trail_arn_template = f"arn:{self.audited_partition}:cloudtrail:{self.region}:{self.audited_account}:trail"
self.trails = {}
self.__threading_call__(self.__get_trails__)
self.__threading_call__(self._get_trails)
if self.trails:
self.__get_trail_status__()
self.__get_insight_selectors__()
self.__get_event_selectors__()
self.__list_tags_for_resource__()
self._get_trail_status()
self._get_insight_selectors()
self._get_event_selectors()
self._list_tags_for_resource()
def __get_trail_arn_template__(self, region):
def _get_trail_arn_template(self, region):
return (
f"arn:{self.audited_partition}:cloudtrail:{region}:{self.audited_account}:trail"
if region
else f"arn:{self.audited_partition}:cloudtrail:{self.region}:{self.audited_account}:trail"
)
def __get_trails__(self, regional_client):
def _get_trails(self, regional_client):
logger.info("Cloudtrail - Getting trails...")
try:
describe_trails = regional_client.describe_trails()["trailList"]
@@ -70,7 +70,7 @@ class Cloudtrail(AWSService):
if trails_count == 0:
if self.trails is None:
self.trails = {}
self.trails[self.__get_trail_arn_template__(regional_client.region)] = (
self.trails[self._get_trail_arn_template(regional_client.region)] = (
Trail(
region=regional_client.region,
)
@@ -91,7 +91,7 @@ class Cloudtrail(AWSService):
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __get_trail_status__(self):
def _get_trail_status(self):
logger.info("Cloudtrail - Getting trail status")
try:
for trail in self.trails.values():
@@ -109,7 +109,7 @@ class Cloudtrail(AWSService):
f"{client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __get_event_selectors__(self):
def _get_event_selectors(self):
logger.info("Cloudtrail - Getting event selector")
try:
for trail in self.trails.values():
@@ -142,7 +142,7 @@ class Cloudtrail(AWSService):
f"{client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __get_insight_selectors__(self):
def _get_insight_selectors(self):
logger.info("Cloudtrail - Getting trail insight selectors...")
try:
@@ -192,7 +192,7 @@ class Cloudtrail(AWSService):
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __lookup_events__(self, trail, event_name, minutes):
def _lookup_events(self, trail, event_name, minutes):
logger.info("CloudTrail - Lookup Events...")
try:
regional_client = self.regional_clients[trail.region]
@@ -208,7 +208,7 @@ class Cloudtrail(AWSService):
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __list_tags_for_resource__(self):
def _list_tags_for_resource(self):
logger.info("CloudTrail - List Tags...")
try:
for trail in self.trails.values():

View File

@@ -33,7 +33,7 @@ class cloudtrail_threat_detection_enumeration(Check):
)
for trail in trails_to_scan:
for event_name in enumeration_actions:
for event_log in cloudtrail_client.__lookup_events__(
for event_log in cloudtrail_client._lookup_events(
trail=trail,
event_name=event_name,
minutes=threat_detection_minutes,
@@ -52,7 +52,7 @@ class cloudtrail_threat_detection_enumeration(Check):
report = Check_Report_AWS(self.metadata())
report.region = cloudtrail_client.region
report.resource_id = cloudtrail_client.audited_account
report.resource_arn = cloudtrail_client.__get_trail_arn_template__(
report.resource_arn = cloudtrail_client._get_trail_arn_template(
cloudtrail_client.region
)
report.status = "FAIL"
@@ -62,7 +62,7 @@ class cloudtrail_threat_detection_enumeration(Check):
report = Check_Report_AWS(self.metadata())
report.region = cloudtrail_client.region
report.resource_id = cloudtrail_client.audited_account
report.resource_arn = cloudtrail_client.__get_trail_arn_template__(
report.resource_arn = cloudtrail_client._get_trail_arn_template(
cloudtrail_client.region
)
report.status = "PASS"

View File

@@ -34,7 +34,7 @@ class cloudtrail_threat_detection_privilege_escalation(Check):
)
for trail in trails_to_scan:
for event_name in privilege_escalation_actions:
for event_log in cloudtrail_client.__lookup_events__(
for event_log in cloudtrail_client._lookup_events(
trail=trail,
event_name=event_name,
minutes=threat_detection_minutes,
@@ -58,7 +58,7 @@ class cloudtrail_threat_detection_privilege_escalation(Check):
report = Check_Report_AWS(self.metadata())
report.region = cloudtrail_client.region
report.resource_id = cloudtrail_client.audited_account
report.resource_arn = cloudtrail_client.__get_trail_arn_template__(
report.resource_arn = cloudtrail_client._get_trail_arn_template(
cloudtrail_client.region
)
report.status = "FAIL"
@@ -68,7 +68,7 @@ class cloudtrail_threat_detection_privilege_escalation(Check):
report = Check_Report_AWS(self.metadata())
report.region = cloudtrail_client.region
report.resource_id = cloudtrail_client.audited_account
report.resource_arn = cloudtrail_client.__get_trail_arn_template__(
report.resource_arn = cloudtrail_client._get_trail_arn_template(
cloudtrail_client.region
)
report.status = "PASS"

View File

@@ -12,6 +12,9 @@ class cloudwatch_log_group_no_secrets_in_logs(Check):
def execute(self):
findings = []
if logs_client.log_groups:
secrets_ignore_patterns = logs_client.audit_config.get(
"secrets_ignore_patterns", []
)
for log_group in logs_client.log_groups:
report = Check_Report_AWS(self.metadata())
report.status = "PASS"
@@ -31,7 +34,10 @@ class cloudwatch_log_group_no_secrets_in_logs(Check):
for event in log_group.log_streams[log_stream_name]
]
)
log_stream_secrets_output = detect_secrets_scan(log_stream_data)
log_stream_secrets_output = detect_secrets_scan(
data=log_stream_data,
excluded_secrets=secrets_ignore_patterns,
)
if log_stream_secrets_output:
for secret in log_stream_secrets_output:
@@ -63,7 +69,7 @@ class cloudwatch_log_group_no_secrets_in_logs(Check):
# Can get more informative output if there is more than 1 line.
# Will rescan just this event to get the type of secret and the line number
event_detect_secrets_output = detect_secrets_scan(
log_event_data
data=log_event_data
)
if event_detect_secrets_output:
for secret in event_detect_secrets_output:

View File

@@ -15,11 +15,11 @@ class CloudWatch(AWSService):
# Call AWSService's __init__
super().__init__(__class__.__name__, provider)
self.metric_alarms = []
self.__threading_call__(self.__describe_alarms__)
self.__threading_call__(self._describe_alarms)
if self.metric_alarms:
self.__list_tags_for_resource__()
self._list_tags_for_resource()
def __describe_alarms__(self, regional_client):
def _describe_alarms(self, regional_client):
logger.info("CloudWatch - Describing alarms...")
try:
describe_alarms_paginator = regional_client.get_paginator("describe_alarms")
@@ -61,7 +61,7 @@ class CloudWatch(AWSService):
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __list_tags_for_resource__(self):
def _list_tags_for_resource(self):
logger.info("CloudWatch - List Tags...")
try:
for metric_alarm in self.metric_alarms:
@@ -84,8 +84,8 @@ class Logs(AWSService):
self.log_group_arn_template = f"arn:{self.audited_partition}:logs:{self.region}:{self.audited_account}:log-group"
self.metric_filters = []
self.log_groups = []
self.__threading_call__(self.__describe_metric_filters__)
self.__threading_call__(self.__describe_log_groups__)
self.__threading_call__(self._describe_metric_filters)
self.__threading_call__(self._describe_log_groups)
if self.log_groups:
if (
"cloudwatch_log_group_no_secrets_in_logs"
@@ -94,10 +94,10 @@ class Logs(AWSService):
self.events_per_log_group_threshold = (
1000 # The threshold for number of events to return per log group.
)
self.__threading_call__(self.__get_log_events__)
self.__threading_call__(self.__list_tags_for_resource__, self.log_groups)
self.__threading_call__(self._get_log_events)
self.__threading_call__(self._list_tags_for_resource, self.log_groups)
def __describe_metric_filters__(self, regional_client):
def _describe_metric_filters(self, regional_client):
logger.info("CloudWatch Logs - Describing metric filters...")
try:
describe_metric_filters_paginator = regional_client.get_paginator(
@@ -137,7 +137,7 @@ class Logs(AWSService):
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __describe_log_groups__(self, regional_client):
def _describe_log_groups(self, regional_client):
logger.info("CloudWatch Logs - Describing log groups...")
try:
describe_log_groups_paginator = regional_client.get_paginator(
@@ -182,7 +182,7 @@ class Logs(AWSService):
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __get_log_events__(self, regional_client):
def _get_log_events(self, regional_client):
regional_log_groups = [
log_group
for log_group in self.log_groups
@@ -214,7 +214,7 @@ class Logs(AWSService):
f"CloudWatch Logs - Finished retrieving log events in {regional_client.region}..."
)
def __list_tags_for_resource__(self, log_group):
def _list_tags_for_resource(self, log_group):
logger.info(f"CloudWatch Logs - List Tags for Log Group {log_group.name}...")
try:
regional_client = self.regional_clients[log_group.region]

View File

@@ -16,11 +16,11 @@ class CodeArtifact(AWSService):
super().__init__(__class__.__name__, provider)
# repositories is a dictionary containing all the codeartifact service information
self.repositories = {}
self.__threading_call__(self.__list_repositories__)
self.__threading_call__(self.__list_packages__)
self.__list_tags_for_resource__()
self.__threading_call__(self._list_repositories)
self.__threading_call__(self._list_packages)
self._list_tags_for_resource()
def __list_repositories__(self, regional_client):
def _list_repositories(self, regional_client):
logger.info("CodeArtifact - Listing Repositories...")
try:
list_repositories_paginator = regional_client.get_paginator(
@@ -52,7 +52,7 @@ class CodeArtifact(AWSService):
f" {error}"
)
def __list_packages__(self, regional_client):
def _list_packages(self, regional_client):
logger.info("CodeArtifact - Listing Packages and retrieving information...")
for repository in self.repositories:
try:
@@ -169,7 +169,7 @@ class CodeArtifact(AWSService):
f" {error}"
)
def __list_tags_for_resource__(self):
def _list_tags_for_resource(self):
logger.info("CodeArtifact - List Tags...")
try:
for repository in self.repositories.values():

View File

@@ -0,0 +1,32 @@
{
"Provider": "aws",
"CheckID": "codebuild_project_no_secrets_in_variables",
"CheckTitle": "Ensure CodeBuild projects do not contain secrets on plaintext environmet variables",
"CheckType": [
"Security Best Practices"
],
"ServiceName": "codebuild",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
"Severity": "critical",
"ResourceType": "AwsCodeBuildProject",
"Description": "This check ensures that AWS CodeBuild projects do not contain secrets in environment variables.",
"Risk": "Using plaintext AWS credentials in CodeBuild project environment variables can expose these sensitive keys, leading to unauthorized access and potential security breaches.",
"RelatedUrl": "https://docs.aws.amazon.com/codebuild/latest/userguide/how-to-create-project-console.html",
"Remediation": {
"Code": {
"CLI": "",
"NativeIaC": "",
"Other": "https://docs.aws.amazon.com/securityhub/latest/userguide/codebuild-controls.html#codebuild-2",
"Terraform": ""
},
"Recommendation": {
"Text": "",
"Url": "https://docs.aws.amazon.com/codebuild/latest/userguide/change-project.html"
}
},
"Categories": [],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""
}

View File

@@ -0,0 +1,49 @@
import json
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.lib.utils.utils import detect_secrets_scan
from prowler.providers.aws.services.codebuild.codebuild_client import codebuild_client
class codebuild_project_no_secrets_in_variables(Check):
def execute(self):
findings = []
sensitive_vars_excluded = codebuild_client.audit_config.get(
"excluded_sensitive_environment_variables", []
)
secrets_ignore_patterns = codebuild_client.audit_config.get(
"secrets_ignore_patterns", []
)
for project in codebuild_client.projects.values():
report = Check_Report_AWS(self.metadata())
report.region = project.region
report.resource_id = project.name
report.resource_arn = project.arn
report.status = "PASS"
report.status_extended = f"CodeBuild project {project.name} does not have sensitive environment plaintext credentials."
secrets_found = []
if project.environment_variables:
for env_var in project.environment_variables:
if (
env_var.type == "PLAINTEXT"
and env_var.name not in sensitive_vars_excluded
):
detect_secrets_output = detect_secrets_scan(
data=json.dumps({env_var.name: env_var.value}),
excluded_secrets=secrets_ignore_patterns,
)
if detect_secrets_output:
secrets_info = [
f"{secret['type']} in variable {env_var.name}"
for secret in detect_secrets_output
]
secrets_found.extend(secrets_info)
if secrets_found:
report.status = "FAIL"
report.status_extended = f"CodeBuild project {project.name} has sensitive environment plaintext credentials in variables: {', '.join(secrets_found)}."
findings.append(report)
return findings

View File

@@ -0,0 +1,32 @@
{
"Provider": "aws",
"CheckID": "codebuild_project_source_repo_url_no_sensitive_credentials",
"CheckTitle": "Ensure CodeBuild project source repository URLs do not contain sensitive credentials",
"CheckType": [
"Security Best Practices"
],
"ServiceName": "codebuild",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
"Severity": "critical",
"ResourceType": "AwsCodeBuildProject",
"Description": "This check ensures an AWS CodeBuild project source repository URL doesn't contain personal access tokens or a user name and password. The check fails if the source repository URL contains personal access tokens or a user name and password.",
"Risk": "Storing or transmitting sign-in credentials in clear text or including them in the source repository URL can lead to unintended data exposure or unauthorized access, potentially compromising the security of the system.",
"RelatedUrl": "https://docs.aws.amazon.com/config/latest/developerguide/codebuild-project-source-repo-url-check.html",
"Remediation": {
"Code": {
"CLI": "",
"NativeIaC": "",
"Other": "https://docs.aws.amazon.com/securityhub/latest/userguide/codebuild-controls.html#codebuild-1",
"Terraform": ""
},
"Recommendation": {
"Text": "Update your CodeBuild project to use OAuth instead of personal access tokens or basic authentication in your repository URLs.",
"Url": "https://docs.aws.amazon.com/codebuild/latest/userguide/use-case-based-samples.html"
}
},
"Categories": [],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""
}

View File

@@ -0,0 +1,44 @@
import re
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.codebuild.codebuild_client import codebuild_client
class codebuild_project_source_repo_url_no_sensitive_credentials(Check):
def execute(self):
findings = []
token_pattern = re.compile(r"https://x-token-auth:[^@]+@bitbucket\.org/.+\.git")
user_pass_pattern = re.compile(r"https://[^:]+:[^@]+@bitbucket\.org/.+\.git")
for project in codebuild_client.projects.values():
report = Check_Report_AWS(self.metadata())
report.region = project.region
report.resource_id = project.name
report.resource_arn = project.arn
report.status = "PASS"
report.status_extended = f"CodeBuild project {project.name} does not contain sensitive credentials in any source repository URLs."
secrets_found = []
if project.source and project.source.type == "BITBUCKET":
if token_pattern.match(project.source.location):
secrets_found.append(
f"Token in {project.source.type} URL {project.source.location}"
)
elif user_pass_pattern.match(project.source.location):
secrets_found.append(
f"Basic Auth Credentials in {project.source.type} URL {project.source.location}"
)
for url in project.secondary_sources:
if url.type == "BITBUCKET":
if token_pattern.match(url.location):
secrets_found.append(f"Token in {url.type} URL {url.location}")
elif user_pass_pattern.match(url.location):
secrets_found.append(
f"Basic Auth Credentials in {url.type} URL {url.location}"
)
if secrets_found:
report.status = "FAIL"
report.status_extended = f"CodeBuild project {project.name} has sensitive credentials in source repository URLs: {', '.join(secrets_found)}."
findings.append(report)
return findings

View File

@@ -1,5 +1,5 @@
import datetime
from typing import Optional
from typing import List, Optional
from pydantic import BaseModel
@@ -8,20 +8,17 @@ from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.lib.service.service import AWSService
################### Codebuild
class Codebuild(AWSService):
def __init__(self, provider):
# Call AWSService's __init__
super().__init__(__class__.__name__, provider)
self.projects = {}
self.__threading_call__(self.__list_projects__)
self.__threading_call__(
self.__list_builds_for_project__, self.projects.values()
)
self.__threading_call__(self.__batch_get_builds__, self.projects.values())
self.__threading_call__(self.__batch_get_projects__, self.projects.values())
self.__threading_call__(self._list_projects)
self.__threading_call__(self._list_builds_for_project, self.projects.values())
self.__threading_call__(self._batch_get_builds, self.projects.values())
self.__threading_call__(self._batch_get_projects, self.projects.values())
def __list_projects__(self, regional_client):
def _list_projects(self, regional_client):
logger.info("Codebuild - Listing projects...")
try:
list_projects_paginator = regional_client.get_paginator("list_projects")
@@ -42,7 +39,7 @@ class Codebuild(AWSService):
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __list_builds_for_project__(self, project):
def _list_builds_for_project(self, project):
logger.info("Codebuild - Listing builds...")
try:
regional_client = self.regional_clients[project.region]
@@ -56,7 +53,7 @@ class Codebuild(AWSService):
f"{project.region}: {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __batch_get_builds__(self, project):
def _batch_get_builds(self, project):
logger.info("Codebuild - Getting builds...")
try:
if project.last_build and project.last_build.id:
@@ -71,14 +68,31 @@ class Codebuild(AWSService):
f"{regional_client.region}: {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __batch_get_projects__(self, project):
def _batch_get_projects(self, project):
logger.info("Codebuild - Getting projects...")
try:
regional_client = self.regional_clients[project.region]
project_source = regional_client.batch_get_projects(names=[project.name])[
project_info = regional_client.batch_get_projects(names=[project.name])[
"projects"
][0]["source"]
project.buildspec = project_source.get("buildspec", "")
][0]
project.buildspec = project_info["source"].get("buildspec")
if project_info["source"]["type"] != "NO_SOURCE":
project.source = Source(
type=project_info["source"]["type"],
location=project_info["source"]["location"],
)
project.secondary_sources = []
for secondary_source in project_info.get("secondarySources", []):
source_obj = Source(
type=secondary_source["type"], location=secondary_source["location"]
)
project.secondary_sources.append(source_obj)
environment = project_info.get("environment", {})
env_vars = environment.get("environmentVariables", [])
project.environment_variables = [
EnvironmentVariable(**var) for var in env_vars
]
project.buildspec = project_info.get("source", {}).get("buildspec", "")
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -89,6 +103,17 @@ class Build(BaseModel):
id: str
class Source(BaseModel):
type: str
location: str
class EnvironmentVariable(BaseModel):
name: str
value: str
type: str
class Project(BaseModel):
name: str
arn: str
@@ -96,3 +121,6 @@ class Project(BaseModel):
last_build: Optional[Build]
last_invoked_time: Optional[datetime.datetime]
buildspec: Optional[str]
source: Optional[Source]
secondary_sources: Optional[list[Source]] = []
environment_variables: Optional[List[EnvironmentVariable]]

View File

@@ -14,14 +14,14 @@ class CognitoIDP(AWSService):
super().__init__("cognito-idp", provider)
self.user_pools = {}
self.__threading_call__(self.__list_user_pools__)
self.__describe_user_pools__()
self.__list_user_pool_clients__()
self.__describe_user_pool_clients__()
self.__get_user_pool_mfa_config__()
self.__get_user_pool_risk_configuration__()
self.__threading_call__(self._list_user_pools)
self._describe_user_pools()
self._list_user_pool_clients()
self._describe_user_pool_clients()
self._get_user_pool_mfa_config()
self._get_user_pool_risk_configuration()
def __list_user_pools__(self, regional_client):
def _list_user_pools(self, regional_client):
logger.info("Cognito - Listing User Pools...")
try:
user_pools_paginator = regional_client.get_paginator("list_user_pools")
@@ -51,7 +51,7 @@ class CognitoIDP(AWSService):
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __describe_user_pools__(self):
def _describe_user_pools(self):
logger.info("Cognito - Describing User Pools...")
try:
for user_pool in self.user_pools.values():
@@ -114,7 +114,7 @@ class CognitoIDP(AWSService):
f"{user_pool.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __list_user_pool_clients__(self):
def _list_user_pool_clients(self):
logger.info("Cognito - Listing User Pool Clients...")
try:
for user_pool in self.user_pools.values():
@@ -143,7 +143,7 @@ class CognitoIDP(AWSService):
f"{user_pool.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __describe_user_pool_clients__(self):
def _describe_user_pool_clients(self):
logger.info("Cognito - Describing User Pool Clients...")
try:
for user_pool in self.user_pools.values():
@@ -175,7 +175,7 @@ class CognitoIDP(AWSService):
f"{user_pool.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __get_user_pool_mfa_config__(self):
def _get_user_pool_mfa_config(self):
logger.info("Cognito - Getting User Pool MFA Configuration...")
try:
for user_pool in self.user_pools.values():
@@ -202,7 +202,7 @@ class CognitoIDP(AWSService):
f"{user_pool.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __get_user_pool_risk_configuration__(self):
def _get_user_pool_risk_configuration(self):
logger.info("Cognito - Getting User Pool Risk Configuration...")
try:
for user_pool in self.user_pools.values():
@@ -265,11 +265,11 @@ class CognitoIdentity(AWSService):
def __init__(self, provider):
super().__init__("cognito-identity", provider)
self.identity_pools = {}
self.__threading_call__(self.__list_identity_pools__)
self.__describe_identity_pools__()
self.__get_identity_pool_roles__()
self.__threading_call__(self._list_identity_pools)
self._describe_identity_pools()
self._get_identity_pool_roles()
def __list_identity_pools__(self, regional_client):
def _list_identity_pools(self, regional_client):
logger.info("Cognito - Listing Identity Pools...")
try:
identity_pools_paginator = regional_client.get_paginator(
@@ -297,7 +297,7 @@ class CognitoIdentity(AWSService):
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __describe_identity_pools__(self):
def _describe_identity_pools(self):
logger.info("Cognito - Describing Identity Pools...")
try:
for identity_pool in self.identity_pools.values():
@@ -325,7 +325,7 @@ class CognitoIdentity(AWSService):
f"{identity_pool.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __get_identity_pool_roles__(self):
def _get_identity_pool_roles(self):
logger.info("Cognito - Getting Identity Pool Roles...")
try:
for identity_pool in self.identity_pools.values():

Some files were not shown because too many files have changed in this diff Show More