Compare commits

...

592 Commits

Author SHA1 Message Date
pedrooot
aa69563fc2 feat(checks_loader): check for none check_list 2024-11-11 11:21:53 +01:00
pedrooot
ef4750856c feat(checks_loader): discard cloudwatch_log_group_no_secrets_in_logs and cloudwatch_log_group_no_critical_pii_in_logs 2024-11-08 13:25:25 +01:00
dependabot[bot]
b0af1390b5 chore(deps): bump trufflesecurity/trufflehog from 3.83.3 to 3.83.4 (#5692)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-11-08 11:48:59 +01:00
dependabot[bot]
bc3cd43126 chore(deps): bump slack-sdk from 3.33.1 to 3.33.3 (#5688)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-11-08 11:19:02 +01:00
dependabot[bot]
087dae07d8 chore(deps-dev): bump coverage from 7.6.1 to 7.6.4 (#5686)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-11-08 09:04:20 +01:00
dependabot[bot]
0baf4fb224 chore(deps): bump boto3 from 1.35.29 to 1.35.55 (#5685)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-11-08 08:15:43 +01:00
dependabot[bot]
0f8ea48f2f chore(deps): bump azure-mgmt-containerservice from 32.0.0 to 32.1.0 (#5664)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-11-07 15:49:57 -05:00
dependabot[bot]
ec207c50ce chore(deps): bump microsoft-kiota-abstractions from 1.3.3 to 1.6.0 (#5662)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-11-07 14:54:13 -05:00
dependabot[bot]
b59b40b822 chore(deps): bump azure-keyvault-keys from 4.9.0 to 4.10.0 (#5660)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-11-07 13:26:51 -05:00
dependabot[bot]
aa51045329 chore(deps-dev): bump mkdocs-material from 9.5.39 to 9.5.44 (#5659)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-11-07 12:36:15 -05:00
dependabot[bot]
1a9f854063 chore(deps): bump google-api-python-client from 2.147.0 to 2.151.0 (#5661)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-11-07 11:42:31 -05:00
Mario Rodriguez Lopez
6bdcb509e1 feat(appsync): add new check appsync_graphql_apis_no_api_key_authentication (#5591)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-11-07 11:42:07 -05:00
Sergio Garcia
ce1e9de104 chore(aws): deprecate glue_etl_jobs_logging_enabled check (#5670) 2024-11-07 10:25:32 -05:00
dependabot[bot]
2471bc569a chore(deps): bump botocore from 1.35.29 to 1.35.55 (#5663)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-11-07 10:22:14 -05:00
Daniel Barranquero
d0ef75d8d9 feat(dms): add new check dms_replication_task_target_logging_enabled (#5631)
Co-authored-by: Sergio <sergio@prowler.com>
2024-11-07 10:19:44 -05:00
Sergio Garcia
aa79a289ce fix(aws): update EKS check in compliance frameworks (#5672) 2024-11-07 15:56:55 +01:00
dependabot[bot]
0340ab9570 chore(deps-dev): bump pytest-cov from 5.0.0 to 6.0.0 (#5666)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-11-07 09:17:18 -05:00
thomscode
a2929f2efb fix(mutelist): set arguments while loading providers (#5653)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2024-11-07 09:12:29 -05:00
Prowler Bot
bf4db86dec chore(regions_update): Changes in regions for AWS services (#5655)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-11-07 08:22:22 -05:00
Daniel Barranquero
a339dafcc6 fix(guardduty): fix guardduty_is_enabled_fixer test (#5668) 2024-11-07 08:21:49 -05:00
dependabot[bot]
f376516aad chore(deps-dev): bump vulture from 2.12 to 2.13 (#5665)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-11-07 08:20:54 -05:00
dependabot[bot]
816b49fac5 chore(deps-dev): bump black from 24.8.0 to 24.10.0 (#5667)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-11-07 12:55:16 +01:00
Pepe Fagoaga
6851350093 fix(lock): Use detect-secrets from package not repo (#5656) 2024-11-07 11:30:46 +01:00
Daniel Barranquero
d5873c0437 feat(dms): add new check dms_replication_task_source_logging_enabled (#5627)
Co-authored-by: Sergio <sergio@prowler.com>
2024-11-06 15:50:48 -05:00
Mario Rodriguez Lopez
a2dba30869 feat(servicecatalog): Add new service servicecatalog (#5618)
Co-authored-by: Sergio <sergio@prowler.com>
2024-11-06 12:02:14 -05:00
Mario Rodriguez Lopez
0662dff13f feat(appsync): Add new service AppSync (#5589)
Co-authored-by: Sergio <sergio@prowler.com>
2024-11-06 11:50:27 -05:00
Daniel Barranquero
0ae26bddfc feat(dms): add new check dms_endpoint_redis_tls_enabled (#5583)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-11-06 11:03:13 -05:00
Sergio Garcia
43efabef6c fix(docker): add g++ to Dockerfile for presidio-analyzer compatibility (#5645) 2024-11-06 10:45:16 -05:00
dependabot[bot]
e73fc14f62 chore(deps): bump trufflesecurity/trufflehog from 3.83.2 to 3.83.3 (#5647)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-11-06 10:13:09 +01:00
Sergio Garcia
89fe8fa8e2 chore(version): update Prowler version (#5642) 2024-11-06 08:11:13 +01:00
Drew Kerrigan
634ef2e599 fix(docs): Update misc tutorial categories example (#5644) 2024-11-05 15:37:20 -05:00
Sergio Garcia
4efb70a508 chore(README): update summary table (#5633) 2024-11-05 13:24:46 -05:00
Pepe Fagoaga
c3ae0aa873 fix(connection): return Connection on generic exception (#5636) 2024-11-05 12:24:18 -05:00
Sergio Garcia
a109cd2816 fix(gcp): do not require organization id to get projects (#5637) 2024-11-05 12:24:07 -05:00
sansns-aws
78fb540bbb feat(rds): add rds_cluster_protected_by_backup_plan check (#5638) 2024-11-05 11:30:45 -05:00
sansns-aws
5b543bf058 feat(aws): Update check metadata with redudancy category (#5640) 2024-11-05 11:27:24 -05:00
Daniel Barranquero
9802fc141a feat(dms): add new check dms_endpoint_mongodb_authentication_enabled (#5578)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-11-05 09:58:55 -05:00
Daniel Barranquero
ea038085ba feat(dms): add new check dms_endpoint_neptune_iam_authorization_enabled (#5549)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-11-05 08:43:57 -05:00
Sergio Garcia
6ff1c436a0 fix(aws): handle global WAFv2 ACLs in service (#5628) 2024-11-05 08:42:20 -05:00
Rubén De la Torre Vico
1b50fdba28 feat(secretsmanager): add new check secretsmanager_secret_rotated_periodically (#5450)
Co-authored-by: Sergio <sergio@prowler.com>
2024-11-04 16:08:38 -05:00
Sergio Garcia
230d2571f9 fix(k8s): do not raise error when unable to list roles (#5630) 2024-11-04 13:47:18 -05:00
sansns-aws
6c818cbcc3 fix(rds): Check Aurora clusters properly for backup plan (#5594)
Co-authored-by: Sergio <sergio@prowler.com>
2024-11-04 13:20:22 -05:00
Mario Rodriguez Lopez
694cee1afb feat(kafka): add new check kafka_connector_in_transit_encryption_enabled (#5577)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-11-04 12:46:32 -05:00
Mario Rodriguez Lopez
bc89f4383e feat(ecs): add new check ecs_task_set_no_assign_public_ip (#5603)
Co-authored-by: Sergio <sergio@prowler.com>
2024-11-04 11:49:22 -05:00
Sergio Garcia
84d4e4a604 fix(aws): solve invalid ECR Registry ARN (#5622) 2024-11-04 11:47:49 -05:00
Hugo Pereira Brito
5fbf8ddfe9 feat(transfer): add new check transfer_server_encryption_in_transit (#5590)
Co-authored-by: Sergio <sergio@prowler.com>
2024-11-04 11:30:18 -05:00
Prowler Bot
ddcd06d9be chore(regions_update): Changes in regions for AWS services (#5617)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-11-04 11:20:16 -05:00
Pepe Fagoaga
5214a37d6d chore: add dependabot labels (#5624) 2024-11-04 10:45:53 -05:00
sansns-aws
a1f4ae73cf feat(aws): Update check metadata with category (#5607) 2024-11-04 10:45:36 -05:00
Prowler Bot
d0bc37c281 chore(regions_update): Changes in regions for AWS services (#5600)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-11-04 10:24:55 -05:00
Prowler Bot
85393e6f78 chore(regions_update): Changes in regions for AWS services (#5613)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-11-04 10:20:42 -05:00
Sergio Garcia
e3104ae5ee feat(aws): add new check cloudwatch_log_group_no_critical_pii_in_logs (#5494) 2024-11-04 10:20:35 -05:00
Sergio Garcia
be523c11c8 fix(aws): do not flag cross-service confused deputy as public (#5593) 2024-11-04 15:51:52 +01:00
Hugo Pereira Brito
797b627695 feat(aws): add new service transfer (#5585) 2024-11-04 08:55:47 -05:00
dependabot[bot]
5ac670ed4f chore(deps): bump trufflesecurity/trufflehog from 3.82.13 to 3.83.2 (#5611)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-11-04 08:51:15 -05:00
Pedro Martín
bf9111397b feat(mutelist): add mute_finding method (#5563) 2024-11-04 12:05:03 +01:00
Pedro Martín
17dd9de6d8 fix(main): set attributes on load_checks_to_execute (#5606) 2024-11-04 09:05:02 +01:00
Pedro Martín
e4400ecf10 fix(checks_loader): solve issue related with checks from compliance (#5601) 2024-10-31 17:28:23 +01:00
sansns-aws
cbba5acc31 chore(aws): cleanup tests in dynamodb and cw (#5588) 2024-10-31 07:59:33 +01:00
Mario Rodriguez Lopez
046f1b2e5f feat(guardduty): add new check guardduty_eks_runtime_monitoring_enabled (#5582)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-30 15:01:50 -05:00
sansns-aws
9e8f88c889 feat(elbv2): add elbv2_nlb_tls_termination_enabled check (#5550)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-30 15:00:55 -05:00
sansns-aws
2d73b9b8f4 feat(elbv2): add elbv2_cross_zone_load_balancing_enabled check (#5548)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-30 14:42:56 -05:00
sansns-aws
9a7190c9c2 chore(aws): cleanup tests (#5592) 2024-10-30 14:04:05 -05:00
sansns-aws
a2b6bdc461 feat(ecs): Add ecs_task_definitions_logging_block_mode check (#5526) 2024-10-30 12:02:36 -05:00
Hugo Pereira Brito
056d482023 feat(glue): add new check glue_etl_jobs_logging_enabled (#5581)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-30 10:56:46 -05:00
Sergio Garcia
239b248935 feat(aws): add new check bedrock_agent_guardrail_enabled (#5509) 2024-10-30 09:41:44 -05:00
Sergio Garcia
5bd394dffe fix(gcp): enforce correct severity levels in CloudSQL PostgreSQL log_min_messages (#5571) 2024-10-30 09:41:06 -05:00
Prowler Bot
1195b75acc chore(regions_update): Changes in regions for AWS services (#5580)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-10-30 09:03:02 -05:00
Daniel Barranquero
fee70bc9b4 chore(rds): improve metadata title and description for check rds_instance_transport_encrypted (#5584) 2024-10-30 09:02:03 -05:00
Sergio Garcia
01716d9020 feat(aws): add new check cloudwatch_log_group_not_publicly_accessible (#5495)
Co-authored-by: Rubén De la Torre Vico <rubendltv22@gmail.com>
2024-10-30 08:50:17 -05:00
Pedro Martín
b87e6d20d7 feat(s3): add test_connection method (#5332) 2024-10-30 11:45:22 +01:00
Pedro Martín
11592634f2 fix(check): add .value to severity enum (#5579) 2024-10-30 11:44:42 +01:00
Sergio Garcia
bc308de571 feat(SecurityHub): add test_connection method (#5350)
Co-authored-by: pedrooot <pedromarting3@gmail.com>
2024-10-30 10:02:13 +01:00
Hugo Pereira Brito
4bee4d482a feat(s3): add new check s3_bucket_event_notifications_enabled (#5562)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-29 15:38:38 -05:00
Hugo Pereira Brito
82ec3e8779 feat(s3): add new check s3_multi_region_access_point_public_access_block (#5552)
Co-authored-by: Sergio <sergio@prowler.com>
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-10-29 14:56:56 -05:00
sansns-aws
85777546e8 feat(autoscaling): Add autoscaling_group_capacity_rebalance_enabled check (#5523)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-29 14:51:21 -05:00
Kay Agahd
ec69d8073a fix(aws): findings in IAM policies were not reported (#5560) 2024-10-29 14:29:29 -05:00
Sergio Garcia
e6053ce218 feat(slack): add test_connection method (#5340) 2024-10-29 18:14:06 +01:00
Sergio Garcia
f01910e4f2 feat(gcp): add --organization-id flag (#5524) 2024-10-29 18:11:53 +01:00
sansns-aws
8848cadc0a chore(aws): Cleanup RDS and S3 tests (#5569) 2024-10-29 12:06:12 -05:00
sansns-aws
2c7d71a0d9 chore(glue): Cleanup tests (#5568) 2024-10-29 12:06:03 -05:00
sansns-aws
dcd1b1121a chore(codebuild): Cleanup tests (#5567) 2024-10-29 11:31:19 -05:00
Hugo Pereira Brito
8a6e222f7a feat(wafv2): set us-east-1 region for global acls (#5558)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-29 10:16:48 -05:00
Prowler Bot
a4c39c25f1 chore(regions_update): Changes in regions for AWS services (#5559)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-10-29 08:55:45 -05:00
Rubén De la Torre Vico
628d50cf0d chore(azure): deprecate AzureGermanCloud (#5561) 2024-10-29 08:54:55 -05:00
sansns-aws
f70e3deade feat(aws): add DirectConnect service and checks (#5522) 2024-10-28 16:48:22 -05:00
sansns-aws
14f06d6497 chore(elbv2): cleanup tests (#5553) 2024-10-28 15:45:01 -05:00
Sergio Garcia
3c6e06837c fix(dependabot): security update werkzeug (#5551) 2024-10-28 13:49:47 -05:00
Sergio Garcia
e778444d1d fix(PyPi): solve detect-secrets dependency (#5514) 2024-10-28 11:36:19 -05:00
NIRBHAY KUMAR
a4cca188ef chore(slack): add text argument by best practice (#5541)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-28 11:19:05 -05:00
Hugo Pereira Brito
76ee608ef8 fix: added s3 origin comprobation in cloudfront_distributions_s3_origin_non_existent_bucket (#5543) 2024-10-28 10:01:03 -05:00
Prowler Bot
7af5c82371 chore(regions_update): Changes in regions for AWS services (#5540)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-10-28 15:16:51 +01:00
MrSecure
172530153c feat(color): add --no-color flag (#5368)
Co-authored-by: pedrooot <pedromarting3@gmail.com>
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2024-10-28 12:23:21 +01:00
Pedro Martín
0114d0462f feat(check): add check methods (#5462) 2024-10-28 12:22:34 +01:00
Pepe Fagoaga
6502330512 chore(providers): Remove get_output_mapping (#5484) 2024-10-28 11:40:31 +01:00
Prowler Bot
9bf9ebe4fd chore(regions_update): Changes in regions for AWS services (#5542)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-10-28 11:17:35 +01:00
Sergio Garcia
406d5864ee fix(kubernetes): handle input kube config file (#5502) 2024-10-28 08:51:37 +01:00
Sergio Garcia
0f9ebecbb7 fix(aws): review checks with wrong attributes (#5503) 2024-10-28 08:45:03 +01:00
Hugo Pereira Brito
0331af02ac feat(mq): add new check mq_broker_logging_enabled (#5483)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-25 14:33:35 -07:00
Sergio Garcia
64fb823276 fix(aws): review checks in compliance frameworks (#5513) 2024-10-25 18:32:46 +02:00
Prowler Bot
33f2c80a78 chore(regions_update): Changes in regions for AWS services (#5533)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-10-25 09:07:51 -07:00
dependabot[bot]
84ce7a8b52 chore(deps): bump trufflesecurity/trufflehog from 3.82.12 to 3.82.13 (#5531)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-10-24 16:13:42 -07:00
Hugo Pereira Brito
1a6b2eaa7d feat(mq): add new check mq_broker_active_deployment_mode (#5433)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-24 16:13:11 -07:00
Hugo Pereira Brito
df373279e9 feat(mq): add new check mq_broker_cluster_deployment_mode (#5481)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-24 12:00:13 -07:00
Prowler Bot
6a09171851 chore(regions_update): Changes in regions for AWS services (#5519)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-10-24 07:54:54 -07:00
sansns-aws
93d257941b feat(aws): add new check fsx_windows_file_system_multi_az (#5491)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-23 14:07:57 -07:00
sansns-aws
28f8915f6f feat(aws): Add Fault Tolerance Checks (#5488)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-23 14:07:43 -07:00
Daniel Barranquero
fef99fd5fb feat(backup): add new check backup_recovery_point_encrypted (#5426)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-23 11:16:43 -07:00
sansns-aws
1e1c7cc1ce feat(aws): add new check redshift_cluster_multi_az_enabled (#5492)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-23 10:32:25 -07:00
sansns-aws
7e7d86f14a feat(aws): add new check dynamodb_accelerator_cluster_multi_az (#5493)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-23 10:16:56 -07:00
Prowler Bot
41cdc2bcc7 chore(regions_update): Changes in regions for AWS services (#5511)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-10-23 07:55:04 -07:00
dependabot[bot]
c41866db38 chore(deps): bump trufflesecurity/trufflehog from 3.82.11 to 3.82.12 (#5508)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-10-22 16:27:05 -07:00
Mario Rodriguez Lopez
f36d23c9a7 feat(elb): add new check elb_desync_mitigation_mode (#5500)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-22 11:08:13 -07:00
Hugo Pereira Brito
8ac28fbcfd feat(waf): add new check waf_global_webacl_with_rules (#5469)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-22 10:26:22 -07:00
Hugo Pereira Brito
7f41ae7385 feat(waf): add new check waf_global_webacl_logging_enabled (#5479)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-22 09:41:00 -07:00
Prowler Bot
4c5f3a212c chore(regions_update): Changes in regions for AWS services (#5499)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-10-22 07:54:22 -07:00
Hugo Pereira Brito
ffa29f2f6e feat(waf): add new check waf_global_rulegroup_not_empty (#5467)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-21 12:53:06 -07:00
Rubén De la Torre Vico
2ef9e27ee3 fix(kinesis): add missing init file (#5490) 2024-10-21 10:26:36 -07:00
Mario Rodriguez Lopez
d4b93d79b5 feat(elb): add new check elb_ssl_listeners_use_acm_certificate (#5424)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-21 10:15:12 -07:00
Daniel Barranquero
d00afbdc87 feat(apigateway): add new check apigateway_restapi_tracing_enabled (#5470)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-21 10:14:38 -07:00
Hugo Pereira Brito
5b0868e26c feat(waf): add new check waf_global_rule_with_conditions (#5465)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-21 10:14:15 -07:00
Mario Rodriguez Lopez
415c319208 feat(iam): add new check iam_policy_cloudshell_admin_not_attached (#5437)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-21 07:41:58 -07:00
Daniel Barranquero
1aca7a754c feat(apigateway): add new check apigateway_restapi_cache_encrypted (#5448) 2024-10-21 07:38:55 -07:00
Prowler Bot
147c3c455b chore(regions_update): Changes in regions for AWS services (#5477)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-10-21 07:29:06 -07:00
Rubén De la Torre Vico
d997ebb2cc feat(athena): add new check athena_workgroup_logging_enabled (#5468) 2024-10-18 16:40:57 -04:00
Mario Rodriguez Lopez
50cb79ee2f feat(aws): Add new checks ses_identities/glue_data_catalogs/secretsmanager _not_publicly_accessible (#5471) 2024-10-18 16:40:12 -04:00
johannes-engler-mw
2b34fd39f6 feat(containerregistry): add new check containerregistry_uses_private_link (#5375)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-10-18 16:15:52 -04:00
Mario Rodriguez Lopez
0c82137834 feat(eventbridge): add new check eventbridge_global_endpoint_event_replication_enabled (#5396)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-10-18 15:36:39 -04:00
Hugo Pereira Brito
413b86e7cf chore(wafv2): migrated testing from magicmock to moto (#5464) 2024-10-18 14:55:49 -04:00
Pepe Fagoaga
23a20a582e chore(findings): add new properties (#5463) 2024-10-18 13:36:41 +02:00
Pepe Fagoaga
8411fcb5fc chore(severities): Use enum (#5460) 2024-10-18 11:39:48 +02:00
Pedro Martín
41e585643b feat(scan): add mutelist and config file to scan (#5310)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2024-10-18 10:34:46 +02:00
dependabot[bot]
aca5824240 chore(deps): bump trufflesecurity/trufflehog from 3.82.9 to 3.82.11 (#5458)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-10-18 09:29:38 +02:00
Pedro Martín
e65b346afd feat(exceptions): modify custom exceptions (#5451) 2024-10-18 09:28:58 +02:00
Pepe Fagoaga
98cb954f74 refactor(finding): Add metadata object (#5447)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-10-18 09:26:50 +02:00
Hugo Pereira Brito
778edd5fec feat(mq): add new check mq_broker_auto_minor_version_upgrades (#5431)
Co-authored-by: Rubén De la Torre Vico <rubendltv22@gmail.com>
2024-10-17 14:33:42 -04:00
Rubén De la Torre Vico
06deda7e5f feat(opensearch): add new check opensearch_domain_master_nodes_fault_tolerant (#5393) 2024-10-17 14:32:42 -04:00
Adrián Jesús Peña Rodríguez
26a00a14df feat(datasync): add datasync service and check datasync_task_logging_enabled (#5444) 2024-10-17 13:07:18 -04:00
Sergio Garcia
12abea371d fix(bedrock): add filtering and handle different ARNs (#5453) 2024-10-17 12:53:18 -04:00
Rubén De la Torre Vico
a17cf1bbb6 feat(secretsmanager): add new check secretsmanager_secret_unused (#5428) 2024-10-17 10:24:12 -04:00
Mario Rodriguez Lopez
5d51942768 feat(fsx): add new check fsx_file_system_copy_tags_to_backups_enabled (#5417)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-10-17 10:23:21 -04:00
Pepe Fagoaga
3122d727a5 chore(aws): Add AWSSessionTokenExpired (#5378) 2024-10-17 15:43:27 +02:00
Mario Rodriguez Lopez
e5f89d5bc7 feat(fsx): add new check fsx_file_system_copy_tags_to_volumes_enabled (#5414)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-10-17 09:40:42 -04:00
Pedro Martín
efc60d2bf4 feat(scan): add status argument (#5443) 2024-10-17 15:27:27 +02:00
Mario Rodriguez Lopez
f7fd355dc1 refactor(acm): Change certificates from list to dict in acm_service (#5420) 2024-10-17 09:16:21 -04:00
Prowler Bot
7bd402bf4e chore(regions_update): Changes in regions for AWS services (#5445)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-10-17 08:49:42 -04:00
Pedro Martín
b69962efb6 feat(scan): add excluded_checks and services (#5442) 2024-10-17 10:25:07 +02:00
Pedro Martín
2b8b223403 feat(k8s): Add kubeconfig content authentication (#5397)
Co-authored-by: Sergio <sergio@prowler.com>
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2024-10-17 09:56:51 +02:00
Pedro Martín
a024ab31a0 feat(scan): add arguments (#5427) 2024-10-17 09:29:02 +02:00
Hugo Pereira Brito
9969e271ed feat(waf): add new check waf_regional_rulegroup_not_empty (#5415)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-16 15:22:24 -04:00
Sergio Garcia
f1449b66d6 feat(k8s): Add kubeconfig content static authentication (#5370)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2024-10-16 18:52:15 +02:00
Hugo Pereira Brito
3c0f360244 feat(waf): add new check waf_regional_rule_with_conditions (#5411)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-16 12:41:57 -04:00
Sergio Garcia
6e3c008a89 chore(aws): improve logic for determining if resources are publicly accessible (#5195) 2024-10-16 12:10:38 -04:00
Hugo Pereira Brito
9d97b1a7ee feat(waf): add new check waf_regional_webacl_with_rules (#5392)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-10-16 11:58:03 -04:00
Hugo Pereira Brito
d07f1e982a feat(wafv2): add new check wafv2_webacl_with_rules (#5376)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-10-16 11:44:41 -04:00
Hugo Pereira Brito
402e0e3107 feat(wafv2): add new check wafv2_webacl_rule_logging_enabled (#5362)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-16 10:19:19 -04:00
dependabot[bot]
c5716bf9b6 chore(deps): bump trufflesecurity/trufflehog from 3.82.8 to 3.82.9 (#5421)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-10-16 09:03:57 -04:00
Pepe Fagoaga
bfdff563e6 chore(aws): Set scan_unused_services False by default (#5425) 2024-10-16 13:19:10 +02:00
Pedro Martín
4be83f240a feat(azure): add provider id validation inside test_connection (#5391)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2024-10-16 12:02:40 +02:00
Daniel Barranquero
45c32abcdf feat(autoscaling): add new check autoscaling_group_multiple_instance_types (#5325)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-15 15:36:48 -04:00
Hugo Pereira Brito
c0ac4c7c30 feat(MQ): add new service MQ (#5419) 2024-10-15 14:32:28 -04:00
Daniel Barranquero
c90cb3712b feat(cloudwatch): add new check cloudwatch_alarm_actions_alarm_state_configured (#5404)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-15 10:51:02 -04:00
Daniel Barranquero
23c3884ab7 feat(cloudwatch): add new check cloudwatch_alarm_actions_enabled (#5416)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-15 09:50:50 -04:00
Mario Rodriguez Lopez
a491e39a18 feat(fsx): Add new service FSx (#5412)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-15 09:01:25 -04:00
Rubén De la Torre Vico
78d2fb9fd5 feat(codebuild): add new check codebuild_report_group_export_encrypted (#5384)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-15 08:39:18 -04:00
Rubén De la Torre Vico
aac6038565 feat(codebuild): add new check codebuild_project_logging_enabled (#5365)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-10-15 07:42:45 -04:00
Prowler Bot
0449d6372c chore(regions_update): Changes in regions for AWS services (#5413)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-10-15 07:41:07 -04:00
Pedro Martín
bc1e6c0626 feat(azure): add authentication method from static credentials (#5358) 2024-10-15 09:37:17 +02:00
Pedro Martín
c1d061ef70 feat(gcp): add provider id validation inside test_connection (#5381) 2024-10-15 09:04:17 +02:00
Mario Rodriguez Lopez
9788fe4236 feat(macie): add new check macie_automated_sensitive_data_discovery_enabled (#5390)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-14 17:58:44 -04:00
Mario Rodriguez Lopez
7fd0798b7c feat(opensearch): add new check opensearch_service_domains_fault_tolerant_data_nodes (#5366)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-14 14:49:46 -04:00
Rubén De la Torre Vico
82ab439e9a feat(codebuild): add new check codebuild_project_s3_logs_encrypted (#5363)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-14 14:40:04 -04:00
Sergio Garcia
54280ee2dc fix(iam): update AWS Support policy (#5399) 2024-10-14 13:58:42 -04:00
Sergio Garcia
434460b978 fix(organizations): no finding for access denied in listing policies (#5400) 2024-10-14 13:58:30 -04:00
Daniel Barranquero
808fa96407 feat(autoscaling): add new check autoscaling_group_launch_configuration_requires_imdsv2 (#5356)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-14 13:16:02 -04:00
Hugo Pereira Brito
2c0c1f7d09 refactor(WAF): Rename WAF to WAFRegional and Add Global WAF Service (#5389)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-14 11:49:26 -04:00
Rubén De la Torre Vico
037e40f8e4 feat(config): add new check config_recorder_using_aws_service_role (#5357) 2024-10-14 11:19:35 -04:00
Daniel Barranquero
e0ed891fc4 feat(autoscaling): add new check autoscaling_group_launch_configuration_no_public_ip (#5359) 2024-10-14 11:17:36 -04:00
Daniel Barranquero
dfc8e3e38f feat(autoscaling): add new check autoscaling_group_using_ec2_launch_template (#5346)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-10-14 10:55:19 -04:00
Sergio Garcia
aef4a68c46 feat(bedrock): add checks for guardrails configuration and log encryption (#5385) 2024-10-14 10:49:58 -04:00
Sergio Garcia
3c929bd68f feat(aws): add checks for Bedrock logging configuration and CloudTrail LLM Jacking detection (#5314) 2024-10-11 15:01:45 -04:00
Jonny
444d820f98 chore(lambda): update obsolete lambda runtime (#5379)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-10-11 12:05:04 -04:00
Hugo Pereira Brito
304bb27502 feat(waf): change WAF Classic web_acls from list to dict (#5380)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-11 11:05:37 -04:00
Mario Rodriguez Lopez
a6db526eec feat(elasticbeanstalk): add new check elasticbeanstalk_enhanced_health_reporting_enabled (#5348)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-11 10:27:48 -04:00
Sergio Garcia
3ace44979a chore(aws): add more cases to public IAM resource policies (#5336) 2024-10-11 10:27:23 -04:00
Prowler Bot
493d6a9210 chore(regions_update): Changes in regions for AWS services (#5377)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-10-11 09:29:26 -04:00
dependabot[bot]
3762d70ba3 chore(deps): bump trufflesecurity/trufflehog from 3.82.7 to 3.82.8 (#5371)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-10-11 09:28:43 -04:00
Pedro Martín
03a26ec507 feat(gcp): add static credentials for gcp provider (#5364) 2024-10-11 11:01:37 +02:00
Mario Rodriguez Lopez
c3e3381c63 feat(elasticbeanstalk): add new check elasticbeanstalk_cloudwatch_enabled (#5335)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-10 15:32:31 -04:00
Mario Rodriguez Lopez
f8a8266c9d feat(elasticbeanstalk): add new check elasticbeanstalk_managed_platform_updates_enabled (#5324)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-10 15:31:11 -04:00
Pepe Fagoaga
d9c2933dc5 feat(test_connection): Add optional AWS Account ID validation (#5361) 2024-10-10 12:45:16 -04:00
Pepe Fagoaga
cad99c5e0f feat(aws): Add static credentials authentication (#5360) 2024-10-10 11:47:05 -04:00
johannes-engler-mw
9f2de7d2f9 feat(containerregistry): add new check containerregistry_not_publicly_accessible (#5291)
Co-authored-by: Rubén De la Torre Vico <rubendltv22@gmail.com>
2024-10-10 11:39:16 -04:00
Daniel Barranquero
4181ca56be feat(autoscaling): add new check autoscaling_group_elb_health_check_enabled (#5330)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-09 14:56:18 -04:00
Daniel Barranquero
d45750b042 feat(redshift): add new check redshift_cluster_enhanced_vpc_routing (#5281)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-10-09 12:40:36 -04:00
Mario Rodriguez Lopez
16191a7b15 feat(elasticbeanstalk): Add new service ElasticBeanstalk (#5322)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-09 09:29:19 -04:00
Rubén De la Torre Vico
0c149461b3 chore(sns): manage ResourceNotFoundException and add paralelism (#5345) 2024-10-09 08:56:39 -04:00
Pedro Martín
3ee39cff2a feat(scan): execute all checks if no checks are provided (#5307) 2024-10-09 11:46:38 +02:00
Pedro Martín
41ba118cc4 feat(scan): add scan duration (#5305)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: Hugo Pereira Brito <101209179+HugoPBrito@users.noreply.github.com>
Co-authored-by: Sergio <sergio@prowler.com>
Co-authored-by: Prowler Bot <bot@prowler.com>
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
Co-authored-by: Rubén De la Torre Vico <rubendltv22@gmail.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
Co-authored-by: Daniel Barranquero <74871504+danibarranqueroo@users.noreply.github.com>
2024-10-09 11:12:39 +02:00
Sergio Garcia
e0587fe0cf fix(Dockerfile): install git dependency (#5339) 2024-10-09 08:58:55 +02:00
Daniel Barranquero
50481665ce feat(redshift): add new check redshift_cluster_in_transit_encryption_enabled (#5271)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-10-08 14:15:32 -04:00
Prowler Bot
a49c744e08 chore(regions_update): Changes in regions for AWS services (#5323)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-08 14:13:17 -04:00
Rubén De la Torre Vico
aa32634105 chore(guardduty): mock failing tests using moto (#5334) 2024-10-08 13:27:37 -04:00
Rubén De la Torre Vico
b27898de1d chore(ecs): mock all tests using moto (#5326) 2024-10-08 12:11:33 -04:00
Sergio Garcia
b703357027 chore(secrets): use master branch of Yelp/detect-secrets (#5298)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2024-10-08 09:55:46 -04:00
Rubén De la Torre Vico
27cd9b22df feat(guardduty): add new check guardduty_lambda_protection_enabled (#5299)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-08 08:20:23 -04:00
Pepe Fagoaga
5bf85366e0 chore(secrets): Add TelegramBotToken detector (#5321) 2024-10-08 08:09:26 -04:00
dependabot[bot]
30bc971f4b chore(deps): bump trufflesecurity/trufflehog from 3.82.6 to 3.82.7 (#5315)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-10-08 10:44:56 +02:00
Sergio Garcia
3950d7eba8 fix(threat detection): ignore AWS services events (#5276) 2024-10-07 14:25:09 -04:00
Rubén De la Torre Vico
2f8a3d2ef8 feat(guardduty): add new check guardduty_ec2_malware_protection_enabled (#5297)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-07 13:03:36 -04:00
Prowler Bot
3b64bbd3a8 chore(regions_update): Changes in regions for AWS services (#5302)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-10-07 10:58:40 -04:00
Hugo Pereira Brito
09d099891a feat(wafv2): change web_acls from list to dict (#5308)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-07 10:23:58 -04:00
Mario Rodriguez Lopez
a6b10a8611 feat(efs): add new check efs_access_point_enforce_user_identity (#5285)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-10-04 15:16:10 -04:00
Lefteris
c239ede3f9 feat(glue): add check glue_ml_transform_encrypted_at_rest (#5272)
Co-authored-by: Lefteris Gilmaz <lefterisgilmaz@Lefteriss-MacBook-Pro.local>
Co-authored-by: Rubén De la Torre Vico <rubendltv22@gmail.com>
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-04 14:13:11 -04:00
Hugo Pereira Brito
66f2754017 feat(networkfirewall): add new check networkfirewall_policy_default_action_full_packets (#5284)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-10-04 14:00:25 -04:00
Hugo Pereira Brito
9138ecdce9 feat(kinesis): add new check kinesis_stream_encrypted_at_rest (#5292)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-10-04 13:59:49 -04:00
Rubén De la Torre Vico
2b66368cf2 feat(guardduty): add new check guardduty_eks_audit_log_enabled (#5293)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-04 13:43:04 -04:00
Mario Rodriguez Lopez
aa3425a7de feat(efs): add new check efs_access_point_enforce_root_directory (#5277)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-10-04 13:12:47 -04:00
Mario Rodriguez Lopez
a31b15c26c feat(efs): add new check efs_mount_target_not_publicly_accesible (#5275)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-04 11:41:51 -04:00
Hugo Pereira Brito
f2301d5ed6 feat(networkfirewall): add new check networkfirewall_policy_default_action_fragmented_packets (#5244)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-10-04 11:41:26 -04:00
Rubén De la Torre Vico
df10253056 chore(cloudwatch): Improve checks related with function check_cloudwatch_log_metric_filter (#5286) 2024-10-04 11:18:46 -04:00
Sergio Garcia
d5acdc766a chore(ocsf): adapt mapping for version 1.3.0 (#5287) 2024-10-04 10:59:51 -04:00
Rubén De la Torre Vico
e389e0136f chore(cloudwatch): add tags to missing checks report (#5261)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-03 18:04:43 -04:00
Rubén De la Torre Vico
8bb3bd0dcb chore(iam): add tags to missing checks report (#5280)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-03 13:47:10 -04:00
Hugo Pereira Brito
4d4bf3fa11 feat(networkfirewall): add new check networkfirewall_multi_az (#5247)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-03 13:46:44 -04:00
Daniel Barranquero
e99c58405c feat(redshift): add new check redshift_cluster_non_default_database_name (#5283)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-03 11:28:54 -04:00
Daniel Barranquero
2177704b4b feat(redshift): add new check redshift_cluster_encrypted_at_rest (#5262)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-02 17:06:19 -04:00
Mario Rodriguez Lopez
2ffe7f3ef7 feat(ecs): add new check ecs_service_fargate_latest_platform_version (#5258)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-02 16:50:20 -04:00
dependabot[bot]
158263a8bf chore(deps-dev): bump moto from 5.0.15 to 5.0.16 (#5256)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-02 15:40:34 -04:00
Daniel Barranquero
469986dd28 feat(redshift): add new check redshift_cluster_non_default_username (#5268) 2024-10-02 13:54:12 -04:00
Hugo Pereira Brito
ff101087bf feat(networkfirewall): add new check networkfirewall_logging_enabled (#5145)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-10-02 12:09:13 -04:00
dependabot[bot]
b2151e2e9c chore(deps): bump boto3 from 1.35.28 to 1.35.29 (#5257)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-02 11:27:39 -04:00
Sergio Garcia
2c4244b1fb chore(version): update Prowler version (#5251) 2024-10-02 11:14:26 -04:00
Hugo Pereira Brito
260cdf575a feat(kinesis): add new service Kinesis (#5228)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-10-02 10:59:59 -04:00
Michael St.Onge
ab4190c215 chore(contrib): update aws-multi-account-securityhub deployment (#5263) 2024-10-02 10:58:02 -04:00
Mario Rodriguez Lopez
7f97b0a57f feat(ecs): Ensure ECS clusters use Container Insights (#5241)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-02 10:42:52 -04:00
Daniel Barranquero
2c2dd82d0c feat(dynamodb): add new check dynamodb_table_autoscaling_enabled (#5129)
Co-authored-by: Sergio <sergio@prowler.com>
2024-10-02 10:42:36 -04:00
Mario Rodriguez Lopez
2511df1732 fix(ecs): Adjust code to the new ARN formats in the ECS service (#5259) 2024-10-02 09:40:32 -04:00
Rubén De la Torre Vico
f955dd76d9 test(aws): fix failing tests for ecs_task_definitions_logging_enabled and ssm_managed_compliant_patching (#5267) 2024-10-02 09:35:27 -04:00
Prowler Bot
a08cc769c8 chore(regions_update): Changes in regions for AWS services (#5269)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-10-02 08:50:30 -04:00
Prowler Bot
77ac5e3b91 chore(regions_update): Changes in regions for AWS services (#5260)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-10-01 14:10:38 -04:00
dependabot[bot]
2da8f2b1eb chore(deps-dev): bump mkdocs-material from 9.5.38 to 9.5.39 (#5255)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-10-01 14:10:17 -04:00
Sergio Garcia
38e024216c chore(ec2): enhance security group with any open port check (#5215) 2024-09-30 14:53:04 -04:00
Rubén De la Torre Vico
8e4847ec89 fix(rds): add comprobations before list tags (#5249)
Co-authored-by: Sergio <sergio@prowler.com>
2024-09-30 13:34:22 -04:00
Sergio Garcia
c6d34e8089 chore(README): update summary table (#5248) 2024-09-30 12:56:42 -04:00
Hugo Pereira Brito
880523076d feat(networkfirewall): add new check networkfirewall_policy_rule_group_associated (#5225) 2024-09-30 12:04:32 -04:00
Sergio Garcia
3d2f1a3aa7 fix(aws): handle none type attributes (#5216) 2024-09-30 18:04:14 +02:00
Rubén De la Torre Vico
c9ff96144d chore(ssm): add tags to ssm_managed_compliant_patching (#5245)
Co-authored-by: Sergio <sergio@prowler.com>
2024-09-30 12:00:43 -04:00
johannes-engler-mw
234f8c2958 feat(azure containerregistry): gather service infos and checks disabled admin user (#5191)
Co-authored-by: Pedro Martín <pedromarting3@gmail.com>
Co-authored-by: Rubén De la Torre Vico <rubendltv22@gmail.com>
Co-authored-by: Sergio <sergio@prowler.com>
2024-09-30 11:52:48 -04:00
Sergio Garcia
da87c0d81e fix(tests): patch head_bucket function correctly (#5246) 2024-09-30 11:00:30 -04:00
dependabot[bot]
7732ec7d34 chore(deps-dev): bump safety from 3.2.7 to 3.2.8 (#5238)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-30 10:54:36 -04:00
Rubén De la Torre Vico
a1b9b2171f feat(securityhub): add tags securityhub_enabled (#5231)
Co-authored-by: Sergio <sergio@prowler.com>
2024-09-30 10:13:41 -04:00
Mario Rodriguez Lopez
30e3fd9e46 feat(ecs): Ensure ECS containers have a logging configuration specified (#5234)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-09-30 09:43:20 -04:00
dependabot[bot]
3db541a42a chore(deps): bump botocore from 1.35.28 to 1.35.29 (#5239)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-30 09:38:12 -04:00
Rubén De la Torre Vico
d5abe16180 feat(wafv2): add tags to wafv2_webacl_logging_enabled (#5243) 2024-09-30 09:37:16 -04:00
dependabot[bot]
564b18c388 chore(deps): bump azure-storage-blob from 12.23.0 to 12.23.1 (#5240)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-30 08:30:28 -04:00
Rubén De la Torre Vico
13e40eb03e feat(aws): add tags to Global Accelerator (#5233) 2024-09-27 12:37:19 -04:00
Rubén De la Torre Vico
b402ced402 docs: change installation methods (#5192) 2024-09-27 12:15:14 -04:00
dependabot[bot]
6bbb9d04a6 chore(deps): bump boto3 from 1.35.26 to 1.35.28 (#5232)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-27 12:13:56 -04:00
dependabot[bot]
6616657c91 chore(deps): bump botocore from 1.35.27 to 1.35.28 (#5220)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-27 11:30:21 -04:00
Amogh Bantwal
853b833cfb feat(aws): Add new check opensearch_service_domains_access_control_enabled (#5203)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-09-27 10:13:43 -04:00
Rubén De la Torre Vico
c047b29140 feat(rds): add missing tags to RDS checks (#5230) 2024-09-27 09:34:25 -04:00
Prowler Bot
c4a39662ae chore(regions_update): Changes in regions for AWS services (#5224)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-09-27 12:30:05 +02:00
dependabot[bot]
66e804f212 chore(deps): bump trufflesecurity/trufflehog from 3.82.5 to 3.82.6 (#5222)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-27 08:42:07 +02:00
Mario Rodriguez Lopez
9d4fa55c13 feat(ecs): Ensure ECS task definitions host's process namespace is not shared (#5146)
Co-authored-by: Sergio <sergio@prowler.com>
2024-09-26 18:24:21 -04:00
Mario Rodriguez Lopez
ff05ce4da1 feat(ecs): Ensure ECS containers have read-only access to root filesystems (#5168)
Co-authored-by: Sergio <sergio@prowler.com>
2024-09-26 14:37:24 -04:00
Mario Rodriguez Lopez
0474c7995c feat(ecs): Ensure ECS containers run as non-privileged (#5214) 2024-09-26 14:05:11 -04:00
Mario Rodriguez Lopez
1a679f371f feat(ecr): Ensure ECR repositories have tag immutability configured (#5144)
Co-authored-by: Sergio <sergio@prowler.com>
2024-09-26 13:51:57 -04:00
Rubén De la Torre Vico
05f7170add feat(dms): add tags to DMS checks (#5209) 2024-09-26 13:33:28 -04:00
Rubén De la Torre Vico
19acb873af feat(glue): add tags to Glue checks (#5213) 2024-09-26 13:11:44 -04:00
Daniel Barranquero
0b566f9666 feat(dynamodb): add new check dynamodb_table_deletion_protection_enabled (#5148) 2024-09-26 11:19:57 -04:00
Rubén De la Torre Vico
67bf89537a chore(ec2): add tags to report of EC2 launch templates (#5210)
Co-authored-by: Sergio <sergio@prowler.com>
2024-09-26 10:50:02 -04:00
Daniel Barranquero
d0681a9e20 fix(aws): change protected_by_backup_plan checks (#5204)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-09-26 10:33:12 -04:00
Rubén De la Torre Vico
31bff99b3d feat(codebuild): add tags support to projects (#5207) 2024-09-26 10:14:02 -04:00
Rubén De la Torre Vico
48c7e65a39 chore(autoscaling): deprecate check autoscaling_find_secrets_ec2_launch_configuration (#5205) 2024-09-26 10:11:54 -04:00
dependabot[bot]
1b407639f0 chore(deps): bump azure-mgmt-network from 26.0.0 to 27.0.0 (#5201)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-26 15:36:42 +02:00
Prowler Bot
4d7d5718d5 chore(regions_update): Changes in regions for AWS services (#5208)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-09-26 08:20:13 -04:00
dependabot[bot]
7955048e79 chore(deps-dev): bump mkdocs-material from 9.5.36 to 9.5.38 (#5206)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-26 10:51:29 +02:00
dependabot[bot]
8e0b715f12 chore(deps): bump trufflesecurity/trufflehog from 3.82.3 to 3.82.5 (#5202)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-26 10:07:17 +02:00
dependabot[bot]
1d81261d97 chore(deps): bump botocore from 1.35.26 to 1.35.27 (#5199)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-26 10:06:40 +02:00
Mario Rodriguez Lopez
114a3088a4 feat(ecs): Ensure public IP addresses are not assigned automatically (#5128)
Co-authored-by: Sergio <sergio@prowler.com>
2024-09-25 16:24:39 -04:00
Rubén De la Torre Vico
bc8f3eba4d feat(backup): add tags to backup vaults and backup plans (#5194) 2024-09-25 11:02:53 -04:00
Hugo Pereira Brito
8e087196c9 feat(s3): Add new check s3_bucket_cross_account_access (#5082)
Co-authored-by: Sergio <sergio@prowler.com>
2024-09-25 10:01:52 -04:00
Amogh Bantwal
744e7ff5ac feat(threat-detection): Use IAM Identity for Cloudtrail Threat Detection instead of IP (#5166) 2024-09-25 09:15:47 -04:00
Prowler Bot
90b84b57d3 chore(regions_update): Changes in regions for AWS services (#5190)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-09-25 09:07:42 -04:00
Sergio Garcia
0a2b7cf152 chore(aws): improve IAM Resource Policy public logic (#5067)
Co-authored-by: Rubén De la Torre Vico <rubendltv22@gmail.com>
2024-09-25 08:33:41 -04:00
Pedro Martín
ebbccd04f1 refactor(execute_check): refactor execute method (#4975) 2024-09-25 14:19:42 +02:00
dependabot[bot]
2b431fc79f chore(deps-dev): bump pylint from 3.3.0 to 3.3.1 (#5187)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-25 12:19:02 +02:00
dependabot[bot]
fe7c3e7548 chore(deps): bump google-api-python-client from 2.146.0 to 2.147.0 (#5185)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-25 10:34:04 +02:00
dependabot[bot]
0e5f929044 chore(deps): bump boto3 from 1.35.24 to 1.35.26 (#5189)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-25 09:39:06 +02:00
Pedro Martín
47a6e28d71 refactor(output_options): remove output options from provider (#5149) 2024-09-25 09:38:21 +02:00
Jude Bae(Bae cheongho)
de5742433b feat(compliance): add KISA ISMS-P compliance framework (#5086)
Co-authored-by: MZC01-JUDE <mzc01-jude@MZC01-JUDE-2.local>
2024-09-25 09:06:05 +02:00
dependabot[bot]
3fcccd0bcd chore(deps): bump botocore from 1.35.25 to 1.35.26 (#5184)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-25 08:42:35 +02:00
dependabot[bot]
00938cadb1 chore(deps): bump trufflesecurity/trufflehog from 3.82.2 to 3.82.3 (#5183)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-25 08:42:06 +02:00
Daniel Barranquero
9fb26643ba feat(dynamodb): add new check dynamodb_accelerator_cluster_in_transit_encryption_enabled (#5173)
Co-authored-by: Sergio <sergio@prowler.com>
2024-09-24 16:32:37 -04:00
Daniel Barranquero
e4890f9d9d feat(dynamodb): add new check dynamodb_table_protected_by_backup_plan (#5175)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
Co-authored-by: Sergio <sergio@prowler.com>
2024-09-24 12:45:12 -04:00
Hugo Pereira Brito
980b9b4770 feat(networkfirewall): change network_firewalls from list to dict (#5169) 2024-09-24 12:43:19 -04:00
Sergio Garcia
348cea67c0 fix(aws): always use audited partition (#5174) 2024-09-24 11:38:11 -04:00
Sergio Garcia
f4d89066d9 feat(aws): add new check organizations_opt_out_ai_services_policy (#5152) 2024-09-24 11:37:03 -04:00
dependabot[bot]
b26dc899be chore(deps-dev): bump moto from 5.0.14 to 5.0.15 (#5158)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-24 09:04:52 -04:00
Sergio Garcia
25327d618d chore(aws): handle NotAction cases in IAM policies (#5035) 2024-09-24 08:36:11 -04:00
Sergio Garcia
3951295c0c chore(organizations): improve AWS Organizations service (#5151) 2024-09-24 08:28:21 -04:00
Prowler Bot
ff9c3b52d6 chore(regions_update): Changes in regions for AWS services (#5167)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-09-24 08:17:19 -04:00
dependabot[bot]
af8c18eb4e chore(deps-dev): bump bandit from 1.7.9 to 1.7.10 (#5157)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-24 08:16:27 -04:00
dependabot[bot]
6fbfcc7f5f chore(deps): bump botocore from 1.35.24 to 1.35.25 (#5155)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-24 12:33:29 +02:00
dependabot[bot]
7c7132f9c4 chore(deps-dev): bump mkdocs-material from 9.5.35 to 9.5.36 (#5156)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-24 11:04:06 +02:00
dependabot[bot]
62e30f929c chore(deps): bump boto3 from 1.35.23 to 1.35.24 (#5154)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-24 10:06:27 +02:00
Pepe Fagoaga
ddaafd5876 chore(bot): Use bot Token (#5163) 2024-09-24 10:06:00 +02:00
Mario Rodriguez Lopez
1f43e6eff9 feat(inspector2): Add more tests to inspector2_is_enabled check (#5150) 2024-09-23 15:06:34 -04:00
Daniel Barranquero
aa118c05c5 feat(rds): add new check rds_cluster_non_default_port (#5113) 2024-09-23 15:05:56 -04:00
Hugo Pereira Brito
cca17b9378 feat(cloudfront): add new check cloudfront_distributions_s3_origin_non_existing_bucket (#4996)
Co-authored-by: Sergio <sergio@prowler.com>
2024-09-23 12:43:03 -04:00
Pedro Martín
14ed19e3a8 fix(iam): fill resource id with inline policy entity (#5120) 2024-09-23 10:54:38 -04:00
dependabot[bot]
8caf8f794c chore(deps): bump azure-mgmt-cosmosdb from 9.5.1 to 9.6.0 (#5111)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-23 09:38:15 -04:00
dependabot[bot]
cba9ad61e4 chore(deps): bump msgraph-sdk from 1.7.0 to 1.8.0 (#5110)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-23 08:48:42 -04:00
dependabot[bot]
e64a0eff0f chore(deps): bump botocore from 1.35.23 to 1.35.24 (#5140)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-23 08:01:40 -04:00
dependabot[bot]
23c65b8fde chore(deps): bump pandas from 2.2.2 to 2.2.3 (#5139)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-23 10:33:38 +02:00
dependabot[bot]
a7c93f3237 chore(deps-dev): bump pylint from 3.2.7 to 3.3.0 (#5138)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-23 09:41:04 +02:00
dependabot[bot]
7b9402f3d0 chore(deps): bump kubernetes from 30.1.0 to 31.0.0 (#5137)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-23 08:29:49 +02:00
Sergio Garcia
4badcca4f8 fix(gcp): add default project for org level checks (#5003) 2024-09-20 20:39:35 +02:00
Hugo Pereira Brito
c6daa60f26 feat(elasticache): add check elasticache_redis_cluster_auth_enabled (#4830)
Co-authored-by: Sergio <sergio@prowler.com>
2024-09-20 12:18:08 -04:00
Harshit Raj Singh
f9aa2bb8be fix(lightsail): Remove second call to is_resource_filtered (#5044) 2024-09-20 11:39:03 -04:00
Rubén De la Torre Vico
66ac395705 chore(README): update checks summary table (#5119) 2024-09-20 11:27:19 -04:00
Sergio Garcia
16a251254e fix(gcp): solve errors in GCP services (#5016) 2024-09-20 11:06:57 -04:00
Sergio Garcia
751958907c fix(vpc): check all routes tables in subnet (#5081) 2024-09-20 10:13:16 -04:00
Sergio Garcia
60012ab19d chore(deps): update docs dependencies (#5098) 2024-09-20 10:13:09 -04:00
dependabot[bot]
65d7ba020b chore(deps): bump boto3 from 1.35.21 to 1.35.23 (#5115)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-20 09:13:09 -04:00
Sergio Garcia
9456c6198a chore(ssm): add trusted accounts variable to ssm check (#5005)
Co-authored-by: pedrooot <pedromarting3@gmail.com>
2024-09-20 09:12:48 -04:00
Sergio Garcia
45ce1a0650 fix(asff): include status extended in ASFF output (#5097) 2024-09-20 09:08:13 -04:00
dependabot[bot]
4c5db5295c chore(deps): bump botocore from 1.35.22 to 1.35.23 (#5109)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-20 14:18:55 +02:00
dependabot[bot]
a2ad0cdf30 chore(deps): bump azure-identity from 1.17.1 to 1.18.0 (#5108)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-20 13:29:43 +02:00
dependabot[bot]
0c70a64e84 chore(deps): bump slack-sdk from 3.33.0 to 3.33.1 (#5107)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-20 11:57:41 +02:00
Mario Rodriguez Lopez
73c96f8346 feat(sagemaker): Ensure SageMaker Endpoint Production Variants have Initial Instance Count greater than one (#5045)
Co-authored-by: Sergio <sergio@prowler.com>
2024-09-19 15:16:56 -04:00
Amogh Bantwal
0974c5f333 feat(slack): add more information about critical findings (#5042) 2024-09-19 14:02:09 -04:00
Hugo Pereira Brito
7db0746416 feat(guardduty): add new check guardduty_rds_protection_enabled (#5100) 2024-09-19 13:52:17 -04:00
dependabot[bot]
8f0bf5e896 chore(deps-dev): bump pytest-env from 1.1.4 to 1.1.5 (#5090)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Sergio <sergio@prowler.com>
2024-09-19 12:29:43 -04:00
Pedro Martín
57abe1c839 fix(accessanalyzer): refactor accessanalyzer enabled fixer test (#5026) 2024-09-19 11:09:03 -04:00
Daniel Barranquero
43183962ad feat(aws): Add new check to ensure RDS instances are not using default database engine ports (#4973)
Co-authored-by: Sergio <sergio@prowler.com>
2024-09-19 10:14:46 -04:00
Daniel Barranquero
87948b458e feat(guardduty): add new check guardduty_s3_protection_enabled (#5087)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-09-19 10:10:39 -04:00
dependabot[bot]
ab5c3eb4f8 chore(deps): bump botocore from 1.35.21 to 1.35.22 (#5089)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-19 09:30:24 -04:00
Rubén De la Torre Vico
320a2a2c77 feat(awslambda): add new check awslambda_function_vpc_multi_az (#4816)
Co-authored-by: Sergio <sergio@prowler.com>
2024-09-19 09:30:04 -04:00
Sergio Garcia
dbc8e140e3 chore(docs): change ResourceType link of Security Hub (#5063) 2024-09-19 07:25:41 -04:00
Hugo Pereira Brito
21ac395d4c fix(elasticache): get correct automatic failover attribute (#5084)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-09-18 18:29:43 -04:00
Mario Rodriguez Lopez
8a8c2b5097 feat(ecs): add new check ecs_task_definitions_host_networking_mode_users (#5088)
Co-authored-by: Sergio <sergio@prowler.com>
2024-09-18 18:28:31 -04:00
dependabot[bot]
3bea772c6b chore(deps): bump slack-sdk from 3.32.0 to 3.33.0 (#5069)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-18 18:28:20 -04:00
Lefteris
34679c98d6 feat(dms): new check dms_endpoint_ssl_enabled (#4968)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
Co-authored-by: Rubén De la Torre Vico <rubendltv22@gmail.com>
2024-09-18 17:46:56 -04:00
dependabot[bot]
2b41445d57 chore(deps): bump boto3 from 1.35.19 to 1.35.21 (#5085)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-18 17:21:02 -04:00
dependabot[bot]
796c87bc93 chore(deps): bump google-api-python-client from 2.145.0 to 2.146.0 (#5070)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-18 16:32:09 -04:00
dependabot[bot]
a83e08aa9e chore(deps-dev): bump vulture from 2.11 to 2.12 (#5071)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-18 13:59:25 -04:00
Hugo Pereira Brito
ae794c7c32 feat(cloudfront): Ensure Cloudfront distributions have origin failover configured (#4868)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-09-18 13:26:35 -04:00
dependabot[bot]
edc78bfd6b chore(deps): bump botocore from 1.35.20 to 1.35.21 (#5073)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-18 13:18:17 -04:00
dependabot[bot]
9263adeb78 chore(deps): bump azure-storage-blob from 12.22.0 to 12.23.0 (#5072)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-18 12:19:46 -04:00
Prowler Bot
bfdc87723b chore(regions_update): Changes in regions for AWS services (#5080)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-09-18 11:33:01 -04:00
Rubén De la Torre Vico
8d23e81b1c feat(elb): add new check elb_connection_draining_enabled (#5014)
Co-authored-by: Sergio <sergio@prowler.com>
2024-09-18 10:49:33 -04:00
Daniel Barranquero
f0cd924016 feat(neptune): add new check neptune_cluster_copy_tags_to_snapshots (#5062)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-09-18 09:23:44 -04:00
Mario Rodriguez Lopez
c425e8249b fix(inspector2): Ensure Inspector2 is enabled for ECR, EC2, Lambda and Lambda Code (#5061)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-09-17 14:01:19 -04:00
Daniel Barranquero
1ece8bbcd6 feat(neptune): add new check neptune_cluster_snapshot_encrypted (#5058)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-09-17 13:16:43 -04:00
Daniel Barranquero
5fb2d7c3ce feat(neptune): add new check neptune_cluster_integration_cloudwatch_logs (#5048)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-09-17 12:20:25 -04:00
Prowler Bot
64aebe84fe chore(regions_update): Changes in regions for AWS services (#5059)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-09-17 11:52:41 -04:00
Rubén De la Torre Vico
de831b0abe chore(AWS): match all AWS resource types with SecurityHub supported types in metadata (#4882)
Co-authored-by: Sergio <sergio@prowler.com>
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-09-17 11:40:45 -04:00
dependabot[bot]
68af4f6c73 chore(deps): bump botocore from 1.35.19 to 1.35.20 (#5053)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-17 11:39:24 -04:00
dependabot[bot]
52981b54b9 chore(deps): bump trufflesecurity/trufflehog from 3.82.1 to 3.82.2 (#5052)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-17 08:36:42 -04:00
dependabot[bot]
a366594714 chore(deps): bump boto3 from 1.35.16 to 1.35.19 (#5049)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-16 15:16:44 -04:00
Hugo Pereira Brito
1fb36f316b fix(cloudfront): duplicated link in cloudfront_distributions_https_sni_enabled check (#5047) 2024-09-16 15:16:26 -04:00
dependabot[bot]
30ffa8f00b chore(deps): bump azure-mgmt-containerservice from 31.0.0 to 32.0.0 (#5036)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-16 13:45:01 -04:00
Prowler Bot
5855918ade chore(regions_update): Changes in regions for AWS services (#5041)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-09-16 13:44:47 -04:00
dependabot[bot]
f9005c875f chore(deps): bump botocore from 1.35.18 to 1.35.19 (#5037)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-16 12:52:59 -04:00
Mario Rodriguez Lopez
91bf99ca45 feat(ec2): Ensure EC2 launch templates do not assign public IPs (#4852)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-09-16 12:52:40 -04:00
dependabot[bot]
8176063fef chore(deps): bump dash from 2.18.0 to 2.18.1 (#5024)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-16 11:25:08 -04:00
Mario Rodriguez Lopez
3373822240 feat(ec2): EBS Volumes Should Be Covered by a Backup Plan (#5028)
Co-authored-by: Sergio <sergio@prowler.com>
2024-09-16 11:23:23 -04:00
Hugo Pereira Brito
7e16702b2f feat(cloudfront): add cloudfront_distributions_origin_traffic_encrypted check to ensure traffic encryption to custom origins (#4958)
Co-authored-by: Sergio <sergio@prowler.com>
2024-09-16 09:12:37 -04:00
Daniel Barranquero
f54b64f1f8 feat(rds): add new check rds_instance_inside_vpc (#5029)
Co-authored-by: Sergio <sergio@prowler.com>
2024-09-16 08:56:39 -04:00
dependabot[bot]
2c337ab3f6 chore(deps-dev): bump mkdocs-git-revision-date-localized-plugin from 1.2.8 to 1.2.9 (#5023)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-13 14:44:33 -04:00
dependabot[bot]
5279d937d7 chore(deps): bump botocore from 1.35.17 to 1.35.18 (#5021)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-13 11:24:55 -04:00
Hugo Pereira Brito
48c31a1616 feat(cloudfront): Add new cloudfront_distributions_s3_origin_access_control check to ensure OAC is configured in distributions (#4939)
Co-authored-by: Sergio <sergio@prowler.com>
2024-09-13 10:51:49 -04:00
Rubén De la Torre Vico
917a2ad0fe docs(check): change where extract ResourceTypes (#5030) 2024-09-13 10:51:09 -04:00
Rubén De la Torre Vico
8cfc4c56cf docs(dev-guide): refer poetry docs for installation (#5031) 2024-09-13 10:45:57 -04:00
Prowler Bot
99e9e42a17 chore(regions_update): Changes in regions for AWS services (#5027)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-09-13 10:38:08 -04:00
dependabot[bot]
13c95ba131 chore(deps): bump trufflesecurity/trufflehog from 3.81.10 to 3.82.1 (#5025)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-13 08:59:06 -04:00
LefterisXefteris
600a8c7804 chore(aws): add mixed regions test for s3_access_point_public_access_block (#4877)
Co-authored-by: Lefteris Gilmaz <lefterisgilmaz@Lefteriss-MacBook-Pro.local>
Co-authored-by: Sergio <sergio@prowler.com>
2024-09-12 15:58:39 -04:00
Hugo Pereira Brito
64fb52fc5e feat(cloudfront): add new check cloudfront_distributions_custom_ssl_certificate (#4959)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-09-12 15:00:48 -04:00
Mario Rodriguez Lopez
92b6e7230d feat(ec2): Amazon EC2 Instances Should Not Use Multiple ENIs (#4935)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-09-12 14:29:36 -04:00
Hugo Pereira Brito
cc8bc781c1 feat(elasticache): Ensure Redis replication groups have automatic failover enabled (#4853)
Co-authored-by: Sergio <sergio@prowler.com>
2024-09-12 12:23:15 -04:00
Hugo Pereira Brito
edbe463d73 feat(cloudfront): Add new check cloudfront_distributions_default_root_object (#4938)
Co-authored-by: Sergio <sergio@prowler.com>
2024-09-12 10:58:24 -04:00
Sergio Garcia
8ace8c01cf chore(refactor): make Provider generation global (#4961)
Co-authored-by: pedrooot <pedromarting3@gmail.com>
2024-09-12 16:56:58 +02:00
Hugo Pereira Brito
8f37252676 feat(cloudfront): Ensure distributions use SNI to serve HTTPS requests (#4888)
Co-authored-by: Sergio <sergio@prowler.com>
2024-09-12 09:28:26 -04:00
Mario Rodriguez Lopez
c0c59968bf feat(ec2): Ensure both VPN tunnels for an AWS Site-to-Site VPN connection are UP (#4948) 2024-09-12 08:26:35 -04:00
dependabot[bot]
9f5a909be3 chore(deps): bump msgraph-sdk from 1.6.0 to 1.7.0 (#5013)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-12 14:10:50 +02:00
dependabot[bot]
90975bdadc chore(deps): bump pytz from 2024.1 to 2024.2 (#5012)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-12 11:32:58 +02:00
dependabot[bot]
7d1fad9eb7 chore(deps): bump botocore from 1.35.16 to 1.35.17 (#5011)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-12 10:50:07 +02:00
dependabot[bot]
983c79ad3b chore(deps): bump boto3 from 1.35.15 to 1.35.16 (#5010)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-12 09:16:53 +02:00
Mario Rodriguez Lopez
96e73fcb63 feat(ec2): Amazon EC2 Paravirtual Instance Types Should Not Be Used (#4922)
Co-authored-by: Sergio <sergio@prowler.com>
2024-09-11 15:56:20 -04:00
Pedro Martín
70a3736073 fix(awslamba): add audit config to lambda_client in tests (#4999) 2024-09-11 12:15:22 -04:00
Pedro Martín
1e8e8ba65c fix(iam-gcp): add getters in iam_service for gcp (#4998) 2024-09-11 11:01:58 -04:00
dependabot[bot]
359a1f2c8e chore(deps): bump botocore from 1.35.15 to 1.35.16 (#4989)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Sergio <sergio@prowler.com>
2024-09-11 10:53:18 -04:00
Mario Rodriguez Lopez
2e4f8cbfc7 feat(ec2): Ensure not default Network Access Control Lists are used (#4917)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-09-11 09:55:18 -04:00
Prowler Bot
482aee0d9d chore(regions_update): Changes in regions for AWS services (#4995)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-09-11 09:52:28 -04:00
Daniel Barranquero
0ae3374e81 feat(aws): Add new check to ensure Aurora MySQL DB Clusters publish audit logs to CloudWatch logs (#4916)
Co-authored-by: Sergio <sergio@prowler.com>
2024-09-11 09:10:49 -04:00
Mario Rodriguez Lopez
ddc088859e feat(vpc): Ensure Amazon EC2 Is Configured to Use VPC Endpoints Created for the Amazon EC2 Service (#4872) 2024-09-11 09:08:25 -04:00
dependabot[bot]
5e3da2d687 chore(deps): bump google-api-python-client from 2.144.0 to 2.145.0 (#4990)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-11 13:28:13 +02:00
Pedro Martín
1af7f658a8 refactor(azure): remove validate_arguments for CLI (#4985) 2024-09-11 13:13:06 +02:00
dependabot[bot]
1298620da8 chore(deps-dev): bump pytest from 8.3.2 to 8.3.3 (#4991)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-11 08:59:46 +02:00
Hugo Pereira Brito
75c48cfaa3 refactor(cloudfront): replace origins dictionary with custom Origin class (#4981)
Co-authored-by: Sergio <sergio@prowler.com>
2024-09-10 16:04:57 -04:00
Sergio Garcia
3406a07ae5 fix(audit): solve resources audit (#4983) 2024-09-10 15:41:59 -04:00
Sergio Garcia
cc9e1c5af8 chore(dependencies): update boto3 and botocore packages (#4976) 2024-09-10 15:36:23 -04:00
Sergio Garcia
0343f01cca chore(README): update summary table (#4984) 2024-09-10 21:17:33 +02:00
dependabot[bot]
cad7985c28 chore(deps-dev): bump moto from 5.0.13 to 5.0.14 (#4965)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Sergio <sergio@prowler.com>
2024-09-10 14:36:21 -04:00
Pedro Martín
71030f6f42 fix(main): logic for resource_tag and resource_arn usage (#4979)
Co-authored-by: Sergio <sergio@prowler.com>
2024-09-10 14:07:07 -04:00
Daniel Barranquero
6883467d2f feat(aws): Add new check to ensure RDS DB clusters are encrypted at rest (#4931) 2024-09-10 13:40:08 -04:00
Sergio Garcia
2c6944176f fix(rds): handle new rds arn template function syntax (#4980) 2024-09-10 13:24:19 -04:00
Daniel Barranquero
1ef15f0b24 feat(aws): Add new check to ensure RDS event notification subscriptions are configured for critical database parameter group events (#4907) 2024-09-10 11:10:57 -04:00
dependabot[bot]
f5b0583df5 chore(deps-dev): bump pytest-env from 1.1.3 to 1.1.4 (#4966)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-10 10:17:36 -04:00
Daniel Barranquero
db225e9d2a feat(aws): Add new RDS check to ensure db instances are protected by a backup plan (#4879)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-09-10 10:14:40 -04:00
Daniel Barranquero
c9ae9df87f feat(aws): Add new check to ensure RDS event notification subscriptions are configured for critical database instance events (#4891) 2024-09-10 09:26:15 -04:00
Daniel Barranquero
159a090c02 feat(aws): Add new check to ensure RDS event notification subscriptions are configured for critical cluster events (#4887) 2024-09-10 09:25:42 -04:00
Daniel Barranquero
605c6770e5 fix(rds): Modify RDS Event Notification Subscriptions for Security Groups Events check (#4969) 2024-09-10 09:13:46 -04:00
Pedro Martín
ae950484ed fix(aws): make intersection to retrieve checks to execute (#4970) 2024-09-10 13:24:35 +02:00
Prowler Bot
c54b815b90 chore(regions_update): Changes in regions for AWS services (#4971)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-09-10 12:55:06 +02:00
Pedro Martín
7a937c7708 refactor(provider): move audit and fixer config inside the provider (#4960) 2024-09-10 09:48:11 +02:00
dependabot[bot]
d62e74853e chore(deps-dev): bump mkdocs-git-revision-date-localized-plugin from 1.2.7 to 1.2.8 (#4967)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-10 09:22:10 +02:00
Mario Rodriguez Lopez
bab59bc86e feat(EC2): Change service to adjust the data saved in template_data in LaunchTemplateVersion (#4848) 2024-09-09 12:32:39 -04:00
dependabot[bot]
39e8485fc1 chore(deps): bump slack-sdk from 3.31.0 to 3.32.0 (#4955)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-09 11:10:40 +02:00
Prowler Bot
b9f46cafff chore(regions_update): Changes in regions for AWS services (#4956)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-09-09 09:15:40 +02:00
Pedro Martín
48377ca865 feat(azure): add custom exception class (#4871) 2024-09-06 14:50:27 +02:00
Pedro Martín
4d902e02bb fix(security-groups): remove RFC1918 from ec2_securitygroup_allow_wide_open_public_ipv4 (#4951) 2024-09-06 13:42:28 +02:00
Pedro Martín
e146491d4b fix(aws): change check metadata ec2_securitygroup_allow_wide_open_public_ipv4 (#4946) 2024-09-06 12:31:19 +02:00
Pedro Martín
4eed5c7a99 refactor(check_metadata): move bulk_load_checks_metadata inside class (#4934) 2024-09-06 09:50:14 +02:00
dependabot[bot]
f169599a56 chore(deps): bump msgraph-sdk from 1.5.4 to 1.6.0 (#4940)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: pedrooot <pedromarting3@gmail.com>
2024-09-06 09:49:20 +02:00
dependabot[bot]
95768baa9e chore(deps): bump google-api-python-client from 2.143.0 to 2.144.0 (#4943)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-06 08:24:31 +02:00
Pedro Martín
d8d348f609 feat(kubernetes): add custom exception class (#4912) 2024-09-05 16:52:34 +02:00
dependabot[bot]
bd336250ee chore(deps): bump dash from 2.17.1 to 2.18.0 (#4932)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-05 09:16:51 +02:00
Pedro Martín
a975e96a45 feat(compliance): add method list_compliance_requirements (#4890)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2024-09-04 20:35:26 +02:00
Pedro Martín
3933440a08 feat(secrets): improve detect secrets checks and add config (#4915) 2024-09-04 16:54:55 +02:00
Prowler Bot
36e7bf0912 chore(regions_update): Changes in regions for AWS services (#4929)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-09-04 11:45:59 +02:00
dependabot[bot]
897e25dd3c chore(deps): bump cryptography from 43.0.0 to 43.0.1 (#4928)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-04 09:46:58 +02:00
dependabot[bot]
f4a8059f9b chore(deps): bump cryptography from 43.0.0 to 43.0.1 (#4923)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-04 08:54:56 +02:00
dependabot[bot]
71d844c101 chore(deps): bump peter-evans/create-pull-request from 6 to 7 (#4926)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-04 08:53:26 +02:00
Pedro Martín
c2b2754926 feat(gcp): add custom exceptions clas (#4908) 2024-09-03 15:56:49 +02:00
Pedro Martín
cfd4019281 fix(aws): raise ArgumentTypeError for parser (#4921) 2024-09-03 13:47:43 +02:00
dependabot[bot]
989fce300d chore(deps-dev): bump pylint from 3.2.6 to 3.2.7 (#4920)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-09-03 07:21:52 +02:00
Amogh Bantwal
70fdc2693e feat(html): Add number of muted findings in HTML report #4703 (#4895) 2024-09-02 10:13:06 +02:00
Rubén De la Torre Vico
9797c11152 chore(prowler): change all methods from services from format double underscore to single underscore (#4910) 2024-09-02 10:07:21 +02:00
Pedro Martín
007c1febf7 fix(metadata): change description from documentdb_cluster_deletion_protection (#4909) 2024-09-02 09:59:29 +02:00
Pepe Fagoaga
163027a49d chore(aws): Remove token from log line (#4903) 2024-08-30 11:50:18 +02:00
Pepe Fagoaga
80c4802b36 chore(aws_mutelist): Add more Control Tower resources and tests (#4900) 2024-08-30 10:13:00 +02:00
dependabot[bot]
285eb45673 chore(deps): bump trufflesecurity/trufflehog from 3.81.9 to 3.81.10 (#4898)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-30 09:44:12 +02:00
dependabot[bot]
5c2f2ee3b3 chore(deps-dev): bump safety from 3.2.6 to 3.2.7 (#4899)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-30 09:43:58 +02:00
Pedro Martín
1f83e4fe7b chore(pull-request): add check for backport (#4901) 2024-08-30 09:42:52 +02:00
Pedro Martín
b29f99441a feat(aws): add custom exceptions class (#4847) 2024-08-29 19:08:47 +02:00
Pedro Martín
82c065bff4 feat(compliance): rename Compliance class and add list_compliance (#4883) 2024-08-29 16:55:22 +02:00
Pedro Martín
168d44d14b docs(fixers): improve docs about fixers (#4889) 2024-08-29 14:15:31 +02:00
dependabot[bot]
910a72140b chore(deps): bump google-api-python-client from 2.142.0 to 2.143.0 (#4884)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-29 07:56:38 +02:00
Prowler Bot
d988877173 chore(regions_update): Changes in regions for AWS services (#4880)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-08-28 11:45:12 +02:00
Toni de la Fuente
4fd673fd7c chore(readme): Update Slack invite link (#4875) 2024-08-27 21:44:12 +02:00
Pepe Fagoaga
1bff2451e5 chore(release): Remove unused step (#4874) 2024-08-27 16:40:15 +02:00
Pepe Fagoaga
0921daf18b chore: remove not used variable (#4873) 2024-08-27 16:31:13 +02:00
Pedro Martín
7ff80dbb8f fix(rds): get the db_instances values (#4866) 2024-08-27 13:22:54 +02:00
dependabot[bot]
f487bda1fe chore(deps): bump numpy from 2.0.1 to 2.0.2 (#4869)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-27 08:05:57 +02:00
Pepe Fagoaga
d61e999b8f chore(check_metadata): Rename to CheckMetadata (#4864) 2024-08-26 15:25:19 +02:00
Rubén De la Torre Vico
bcb63d0b2d feat(elb): add new check elb_is_in_multiple_az (#4829)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2024-08-26 13:27:08 +02:00
Pepe Fagoaga
71f50422ad chore(aws-region): Use Prowler Bot (#4863) 2024-08-26 11:04:02 +02:00
Rubén De la Torre Vico
2b49aa8e89 chore(readme): Update the number of AWS checks (#4860) 2024-08-26 10:09:54 +02:00
Pedro Martín
921b6b1e85 fix(aws): enchance check cloudformation_stack_outputs_find_secrets (#4859) 2024-08-26 10:08:19 +02:00
dependabot[bot]
fc155e8368 chore(deps): bump azure-mgmt-compute from 32.0.0 to 33.0.0 (#4856)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-26 08:01:31 +02:00
Rubén De la Torre Vico
79f1cf89cf feat(elb): add new check elb_cross_zone_load_balancing_enabled (#4818)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-08-23 10:09:32 -04:00
Pedro Martín
496d4daf01 refactor(azure): refactor azure provider (#4653)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-08-23 10:01:35 -04:00
Daniel Barranquero
559c0d4e0b chore(aws): Change RDS instance type from list to dict (#4851) 2024-08-23 09:26:53 -04:00
Pedro Martín
2fda2388bb refactor(aws): Refactor provider (#4808)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2024-08-23 09:19:05 -04:00
Pepe Fagoaga
0f79312c33 chore(backport): Use Prowler-Bot PAT (#4855) 2024-08-23 09:18:24 -04:00
Daniel Barranquero
472aea6a91 feat(aws): Add new check to ensure RDS db clusters copy tags to snapshots (#4846) 2024-08-23 09:09:52 -04:00
Pedro Martín
0d18406f80 refactor(kubernetes): refactor provider (#4805)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-08-23 14:22:03 +02:00
Pedro Martín
05da5d1796 refactor(gcp): refactor GCP provider (#4790)
Co-authored-by: Sergio <sergio@prowler.com>
2024-08-23 07:37:02 -04:00
Sergio Garcia
fb449cede8 fix(aws): handle AWS key-only tags (#4845) 2024-08-23 13:02:59 +02:00
Pepe Fagoaga
61df2ce0c2 chore(regions_update): Changes in regions for AWS services. (#4849)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-08-23 11:45:45 +02:00
Pedro Martín
b7e20344a8 docs(is_item_matched): update docstrings for method (#4836)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2024-08-23 10:15:15 +02:00
Sergio Garcia
c2552ee508 fix: handle empty input regions (#4841) 2024-08-22 13:54:18 -04:00
Hugo Pereira Brito
57f1fa5bfa feat(s3): add s3_bucket_lifecycle_enabled check (#4801)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-08-22 12:24:59 -04:00
Rubén De la Torre Vico
0b238243b1 feat(elbv2): add new check elbv2_is_in_multiple_az (#4800)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-08-22 11:08:49 -04:00
Sergio Garcia
df405254c6 fix(aws): enhance resource arn filtering (#4821) 2024-08-22 16:48:25 +02:00
Daniel Barranquero
460acf2860 feat(aws): Add new RDS check to verify that db instances copy tags to snapshots (#4806) 2024-08-22 10:44:26 -04:00
Rubén De la Torre Vico
dec3e652c5 feat(IAM): add new check iam_group_administrator_access_policy (#4831)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-08-22 10:14:45 -04:00
Mario Rodriguez Lopez
fc03188bfb feat(ec2): Client VPN Endpoints Should Have Client Connection Logging Enabled (#4804)
Co-authored-by: Sergio <sergio@prowler.com>
2024-08-22 09:57:33 -04:00
Mario Rodriguez Lopez
ff244138d9 feat(ec2): Ensure automatic acceptance of VPC attachment requests is disabled (#4765) 2024-08-22 08:26:01 -04:00
Sergio Garcia
903f9c576f chore(test): improve iam_root_hardware_mfa_enabled tests (#4833) 2024-08-22 08:08:25 -04:00
Daniel Barranquero
0005f86a5f feat(aws): Add new RDS check to ensure db clusters are configured for multiple availability zones (#4781) 2024-08-22 07:49:59 -04:00
Daniel Barranquero
a2144ad353 chore(rds): Revert changes on inherited instance checks (#4827) 2024-08-22 07:33:25 -04:00
Pepe Fagoaga
5f075b296d chore(regions_update): Changes in regions for AWS services. (#4826)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
Co-authored-by: Pedro Martín <pedromarting3@gmail.com>
2024-08-22 13:21:45 +02:00
dependabot[bot]
0c7b960e08 chore(deps-dev): bump safety from 3.2.5 to 3.2.6 (#4825)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-22 08:26:58 +02:00
dependabot[bot]
c65e91f834 chore(deps): bump tj-actions/changed-files from 44 to 45 (#4822)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-22 08:25:43 +02:00
Pedro Martín
5876fea163 fix(outputs): refactor unroll_tags to use str as tags (#4817) 2024-08-21 12:40:46 -04:00
Pepe Fagoaga
a557d62d84 chore(regions_update): Changes in regions for AWS services. (#4814)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-08-21 13:07:03 +02:00
dependabot[bot]
f25319f3f6 chore(deps): bump azure-mgmt-web from 7.3.0 to 7.3.1 (#4813)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-21 11:11:54 +02:00
dependabot[bot]
1e02b05d2d chore(deps): bump google-api-python-client from 2.141.0 to 2.142.0 (#4812)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-21 08:33:53 +02:00
Rubén De la Torre Vico
78042063cb feat(iam): add new check to ensure user does not have policies with admin access (#4802) 2024-08-20 11:08:51 -04:00
Mario Rodriguez Lopez
8129b174f1 feat(CodeBuild): Ensure source repository URLs do not contain sensitive credentials (#4731)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-08-20 09:44:55 -04:00
Daniel Barranquero
3f78fb4220 feat(aws): Add new RDS check for deletion protection enabled on clusters (#4738) 2024-08-20 09:07:11 -04:00
Pedro Martín
e11bb478d6 fix(mutelist): change logic for tags in aws mutelist (#4786) 2024-08-20 07:38:06 -04:00
dependabot[bot]
dec5fb6428 chore(deps-dev): bump mkdocs-git-revision-date-localized-plugin from 1.2.6 to 1.2.7 (#4796)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-20 09:34:40 +02:00
dependabot[bot]
256ccfea79 chore(deps-dev): bump moto from 5.0.12 to 5.0.13 (#4795)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-20 08:16:18 +02:00
Rubén De la Torre Vico
1a8bc14587 feat(awslambda): New check to ensure that a function is inside VPC (#4783)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-08-19 14:22:21 -04:00
Rubén De la Torre Vico
8483486095 chore(elbv2): Add SecurityHub link to elbv2_ssl_listeners metadata (#4787) 2024-08-19 13:06:34 -04:00
Rubén De la Torre Vico
7aaecbabab chore(elbv2): add SecurityHub link to elbv2_desync_mitigation_mode metadata (#4791) 2024-08-19 13:04:48 -04:00
Rubén De la Torre Vico
5cc9554c23 chore(awslambda): Enhance function public access check called from other resource (#4679)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-08-19 13:03:30 -04:00
Hugo Pereira Brito
5d42ae6e6f feat(s3): add s3_bucket_cross_region_replication check (#4761)
Co-authored-by: Sergio <sergio@prowler.com>
2024-08-19 12:42:42 -04:00
Sergio Garcia
38b73fb0c0 feat(kubernetes): add a test_connection method (#4684) 2024-08-19 12:12:00 -04:00
Sergio Garcia
84a76f4535 feat(gcp): add a test_connection method (#4616)
Co-authored-by: Rubén De la Torre Vico <rubendltv22@gmail.com>
2024-08-19 12:11:20 -04:00
Rubén De la Torre Vico
a126fd82b3 fix(ec2): Manage UnicodeDecodeError when reading user data (#4785) 2024-08-19 11:34:39 -04:00
Rubén De la Torre Vico
bf139138e0 chore(azure): Fix CIS 2.1 mapping (#4760) 2024-08-19 11:44:34 +02:00
dependabot[bot]
0fcf4243f5 chore(deps): bump boto3 from 1.34.160 to 1.34.162 (#4778)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-19 09:14:39 +02:00
dependabot[bot]
bbb0248bc1 chore(deps): bump google-api-python-client from 2.140.0 to 2.141.0 (#4751)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-16 12:11:28 -04:00
Sergio Garcia
e6581255c2 fix(iam): update logic of Root Hardware MFA check (#4726) 2024-08-16 11:49:30 -04:00
Sergio Garcia
717932ae26 fix(aws): run Prowler as IAM Root or Federated User (#4712) 2024-08-16 11:49:14 -04:00
Sergio Garcia
3f56731e6d fix(version): update version flag logic (#4688) 2024-08-16 11:48:57 -04:00
Pepe Fagoaga
0f837f658e chore(regions_update): Changes in regions for AWS services. (#4753)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-08-16 11:45:12 -04:00
Sergio Garcia
b70977163e fix(ecr): change log level of non-scanned images (#4747) 2024-08-16 11:43:04 -04:00
Sergio Garcia
98fc624010 fix(ecr): handle non-existing findingSeverityCounts key (#4746) 2024-08-16 11:42:53 -04:00
dependabot[bot]
ccb755340f chore(deps): bump botocore from 1.34.160 to 1.34.162 (#4758)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-16 11:28:04 -04:00
Mario Rodriguez Lopez
49ff901195 feat(EC2): Add new check for security group port restrictions (#4594) 2024-08-16 09:43:00 -04:00
dependabot[bot]
e7d0d49809 chore(deps): bump trufflesecurity/trufflehog from 3.81.8 to 3.81.9 (#4756)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-16 09:35:08 -04:00
Hugo Pereira Brito
47bb97961c chore(cloudtrail): add remediation link to check cloudtrail_s3_dataevents_read_enabled (#4764) 2024-08-16 09:33:09 -04:00
Hugo Pereira Brito
1178317567 chore(cloudtrail): add remediation link to check cloudtrail_s3_dataevents_write_enabled (#4762) 2024-08-16 09:32:35 -04:00
dependabot[bot]
edd0dd1080 chore(deps): bump boto3 from 1.34.159 to 1.34.160 (#4750)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-16 09:18:48 -04:00
Hugo Pereira Brito
ae1b114a13 refactor(s3): Changed buckets variable type form list to dict (#4742) 2024-08-14 10:28:06 -04:00
dependabot[bot]
3c9c28f351 chore(deps): bump botocore from 1.34.159 to 1.34.160 (#4735)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-14 10:20:15 -04:00
dependabot[bot]
93e6751e35 chore(deps): bump boto3 from 1.34.158 to 1.34.159 (#4734)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-14 09:30:11 -04:00
Daniel Barranquero
680781656b feat(aws): Add new RDS check to verify that cluster minor version upgrade is enabled (#4725) 2024-08-14 09:04:27 -04:00
Pepe Fagoaga
21382efd07 chore(regions_update): Changes in regions for AWS services. (#4739)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-08-14 08:31:50 -04:00
Hugo Pereira Brito
097e61ab9d feat(elasticache): Ensure Redis Cache Clusters Automatically Install Minor Updates (#4699) 2024-08-14 08:28:16 -04:00
Daniel Barranquero
52d83bd83b feat(aws): Split the checks that mix RDS Instances and Clusters (#4730) 2024-08-13 10:16:50 -04:00
dependabot[bot]
49cfe15abc chore(deps): bump botocore from 1.34.158 to 1.34.159 (#4728)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-13 09:03:15 -04:00
Mario Rodriguez Lopez
0ef30c655a fix(ACM): Change check logic to scan only in use certificates (#4732) 2024-08-13 08:39:27 -04:00
Daniel Barranquero
e2d211c188 feat(aws): Add new Neptune check for cluster snapshot visibility (#4709) 2024-08-13 08:27:35 -04:00
Daniel Barranquero
62a1d91869 feat(aws): Add new CodeBuild check to validate environment variables (#4632)
Co-authored-by: Sergio <sergio@prowler.com>
2024-08-13 08:15:45 -04:00
dependabot[bot]
8c1347323e chore(deps): bump boto3 from 1.34.157 to 1.34.158 (#4727)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-13 08:13:00 -04:00
Mario Rodriguez Lopez
cb807e4aed feat(DocumentDB): Add new DocumentDB check for cluster snapshot visibility (#4702) 2024-08-12 14:05:04 -04:00
dependabot[bot]
bcc8d5f1fe chore(deps-dev): bump safety from 3.2.4 to 3.2.5 (#4722)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Sergio <sergio@prowler.com>
2024-08-12 10:03:00 -04:00
dependabot[bot]
59acd303fb chore(deps): bump botocore from 1.34.157 to 1.34.158 (#4721)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-12 08:40:42 -04:00
dependabot[bot]
0675cc8fdb chore(deps): bump boto3 from 1.34.156 to 1.34.157 (#4719)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-12 08:02:17 -04:00
dependabot[bot]
ed27491118 chore(deps): bump trufflesecurity/trufflehog from 3.81.7 to 3.81.8 (#4720)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-12 07:59:29 -04:00
dependabot[bot]
abb28af68e chore(deps): bump aiohttp from 3.9.5 to 3.10.2 (#4713)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-09 14:52:42 -04:00
Rubén De la Torre Vico
18885d0cd7 chore(ec2): Change security groups to dict (#4700) 2024-08-09 14:40:34 -04:00
Pedro Martín
ca56ac4e77 feat(azure): add test_connection method (#4615)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2024-08-09 14:38:12 -04:00
Pedro Martín
8f2b39b3ce fix(iam): handle no arn serial numbers for MFA devices (#4697)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2024-08-09 12:57:34 -04:00
Pepe Fagoaga
761eebac1e feat(aws): Add a test_connection method (#4563)
Co-authored-by: pedrooot <pedromarting3@gmail.com>
2024-08-09 12:01:40 +02:00
Pepe Fagoaga
8bdff0d681 fix(backport): Workaround not to fail if no backport is needed (#4707) 2024-08-09 09:56:02 +02:00
dependabot[bot]
55e0656375 chore(deps): bump botocore from 1.34.156 to 1.34.157 (#4704)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-09 07:56:26 +02:00
dependabot[bot]
e666b66ec0 chore(deps): bump boto3 from 1.34.154 to 1.34.156 (#4698)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-08 11:54:40 +02:00
Pedro Martín
cdb4f73803 docs(developer-guide): add info about docstrings (#4701) 2024-08-08 11:41:32 +02:00
dependabot[bot]
b4c7345124 chore(deps): bump botocore from 1.34.155 to 1.34.156 (#4694)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-08 10:49:13 +02:00
dependabot[bot]
af8cc37eea chore(deps): bump trufflesecurity/trufflehog from 3.81.6 to 3.81.7 (#4693)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-08 10:48:41 +02:00
Sergio Garcia
28bed98ee4 chore(version): update version logic in Prowler (#4654) 2024-08-07 18:15:10 +02:00
Sergio Garcia
3d39eb7db6 chore(backport): update backport PR title (#4686)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2024-08-07 16:59:47 +02:00
Pepe Fagoaga
2c5f2e9f5c chore(labeler): Run also for v4.* (#4687) 2024-08-07 10:30:49 -04:00
Hugo Pereira Brito
5ce54e5605 feat(aws): Add new S3 check for public access block configuration in access points (#4608) 2024-08-07 10:23:12 -04:00
Daniel Barranquero
6c029a9d7d feat(aws): Add new KMS check to prevent unintentional key deletion (#4595)
Co-authored-by: Sergio <sergio@prowler.com>
2024-08-07 09:15:22 -04:00
Sergio Garcia
96f893c3ec chore(version): update master version (#4681) 2024-08-07 14:53:45 +02:00
Pepe Fagoaga
f0047cf5a7 chore(actions): Run for v4.* branch (#4682) 2024-08-07 14:11:38 +02:00
Mario Rodriguez Lopez
1b18aef0f0 feat(acm): Add new check for insecure algorithms in certificates (#4551)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-08-07 08:00:24 -04:00
dependabot[bot]
80e13bffa2 chore(deps): bump botocore from 1.34.154 to 1.34.155 (#4665)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-07 11:33:45 +02:00
dependabot[bot]
384d16749c chore(deps): bump azure-storage-blob from 12.21.0 to 12.22.0 (#4664)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-07 11:01:14 +02:00
Pepe Fagoaga
9c4ba1183b chore(regions): Update labels for backporting (#4678) 2024-08-07 11:00:41 +02:00
Pepe Fagoaga
40a88e07d1 chore(backport): Automate all the things! (#4669) 2024-08-07 10:40:14 +02:00
dependabot[bot]
692ed760e0 chore(deps): bump google-api-python-client from 2.139.0 to 2.140.0 (#4666)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-07 10:26:48 +02:00
dependabot[bot]
6c3e451f32 chore(deps): bump boto3 from 1.34.152 to 1.34.154 (#4663)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-07 09:01:28 +02:00
dependabot[bot]
24f511b567 chore(deps): bump trufflesecurity/trufflehog from 3.81.5 to 3.81.6 (#4662)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-07 09:00:56 +02:00
Sergio Garcia
89c6652bd6 fix(tags): handle AWS dictionary type tags (#4656) 2024-08-07 08:34:57 +02:00
dependabot[bot]
8aca456285 chore(deps-dev): bump moto from 5.0.11 to 5.0.12 (#4642)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: Sergio <sergio@prowler.com>
2024-08-06 14:59:29 -04:00
Rubén De la Torre Vico
824a465667 test(awslambda): Cover possible checks with moto instead MagicMock (#4609)
Co-authored-by: Sergio <sergio@prowler.com>
2024-08-06 13:40:51 -04:00
Amogh Bantwal
086c203e6b feat(aws) Add check to make sure EKS clusters have a supported version (#4604)
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-08-06 13:40:05 -04:00
dependabot[bot]
f746a9e742 chore(deps-dev): bump flake8 from 7.1.0 to 7.1.1 (#4643)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-06 09:19:05 +02:00
Pepe Fagoaga
90810d9098 chore: change SaaS for Prowler (#4651) 2024-08-06 08:56:04 +02:00
Pepe Fagoaga
75b3f52309 docs(mutelist): Add service_* documentation (#4650) 2024-08-06 08:55:55 +02:00
dependabot[bot]
8ecb4696d4 chore(deps): bump botocore from 1.34.152 to 1.34.154 (#4641)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-06 08:44:43 +02:00
dependabot[bot]
7b22c9c97b chore(deps): bump trufflesecurity/trufflehog from 3.81.4 to 3.81.5 (#4645)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-06 08:24:27 +02:00
dependabot[bot]
84f0542b98 chore(deps-dev): bump coverage from 7.6.0 to 7.6.1 (#4640)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-06 08:07:24 +02:00
Rubén De la Torre Vico
8faa40dfb6 feat(opensearch): Add domain inside VPC case for public domain check (#4570) 2024-08-05 13:04:49 -04:00
Pepe Fagoaga
47f7555d05 refactor(mutelist): Remove re.match and improve docs (#4637)
Co-authored-by: Sergio <sergio@prowler.com>
2024-08-05 12:59:30 -04:00
Pedro Martín
96d9cbd8af fix(gcp): check cloudsql sslMode (#4635) 2024-08-05 12:12:00 -04:00
Pedro Martín
c8bc54aa48 fix(gcp): check next rotation time in KMS keys (#4633) 2024-08-05 11:31:38 -04:00
Rubén De la Torre Vico
fad0b8995a chore(aws): Convert ELB and ELBv2 attributes to dictionaries (#4575)
Co-authored-by: Sergio <sergio@prowler.com>
2024-08-05 11:14:19 -04:00
dependabot[bot]
d4b6fa27e2 chore(deps): bump msgraph-sdk from 1.5.3 to 1.5.4 (#4629)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-05 15:02:49 +02:00
dependabot[bot]
a37723fd32 chore(deps): bump boto3 from 1.34.151 to 1.34.152 (#4628)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-05 08:14:55 -04:00
Pedro Martín
fc5eefe532 fix(scan_test): change resource_tags to a dict (#4631) 2024-08-05 10:02:41 +02:00
Pedro Martín
ffd9b2a2f6 chore(scan-class): add new scan class (#4564)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2024-08-05 08:21:13 +02:00
dependabot[bot]
112f48ac08 chore(deps-dev): bump black from 24.4.2 to 24.8.0 (#4627)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-05 08:19:54 +02:00
Sergio Garcia
95ec3d91b4 refactor(tags): convert tags to a dictionary (#4598)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2024-08-05 08:17:43 +02:00
Sergio Garcia
b0709d08cd fix(gcp): use KMS key id in checks (#4610) 2024-08-05 08:16:56 +02:00
dependabot[bot]
a0e3cb87a4 chore(deps): bump trufflesecurity/trufflehog from 3.80.5 to 3.81.4 (#4625)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-05 08:15:49 +02:00
Pepe Fagoaga
1b9cc9e3db chore(regions_update): Changes in regions for AWS services. (#4630)
Co-authored-by: sergargar <38561120+sergargar@users.noreply.github.com>
2024-08-05 08:14:49 +02:00
Jon Young
d9fb67bc43 docs(Tutorials): include volume option when running dashboard in docker (#4620) 2024-08-05 08:06:24 +02:00
dependabot[bot]
a79022dce8 chore(deps): bump botocore from 1.34.151 to 1.34.152 (#4611)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-02 08:03:55 +02:00
dependabot[bot]
0a2ce690f4 chore(deps): bump trufflesecurity/trufflehog from 3.80.4 to 3.80.5 (#4612)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-08-02 07:57:22 +02:00
Pedro Martín
bbc51114b0 fix(sns): add condition to sns topics (#4498)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
2024-08-01 11:54:36 -04:00
1726 changed files with 94857 additions and 15261 deletions

14
.backportrc.json Normal file
View File

@@ -0,0 +1,14 @@
{
"repoOwner": "prowler-cloud",
"repoName": "prowler",
"targetPRLabels": [
"backport"
],
"sourcePRLabels": [
"was-backported"
],
"copySourcePRLabels": false,
"copySourcePRReviewers": true,
"prTitle": "{{sourcePullRequest.title}}",
"commitConflicts": true
}

View File

@@ -20,6 +20,9 @@ updates:
interval: "daily"
open-pull-requests-limit: 10
target-branch: master
labels:
- "dependencies"
- "github_actions"
- package-ecosystem: "pip"
directory: "/"
@@ -38,5 +41,6 @@ updates:
open-pull-requests-limit: 10
target-branch: v3
labels:
- "dependencies"
- "github_actions"
- "v3"

View File

@@ -14,6 +14,7 @@ Please include a summary of the change and which issue is fixed. List any depend
- If so, do we need to update permissions for the provider? Please review this carefully.
- [ ] Review if the code is being covered by tests.
- [ ] Review if code is being documented following this specification https://github.com/google/styleguide/blob/gh-pages/pyguide.md#38-comments-and-docstrings
- [ ] Review if backport is needed.
### License

42
.github/workflows/backport.yml vendored Normal file
View File

@@ -0,0 +1,42 @@
name: Automatic Backport
on:
pull_request_target:
branches: ['master']
types: ['labeled', 'closed']
jobs:
backport:
name: Backport PR
if: github.event.pull_request.merged == true && !(contains(github.event.pull_request.labels.*.name, 'backport'))
runs-on: ubuntu-latest
permissions:
id-token: write
pull-requests: write
contents: write
steps:
# Workaround not to fail the workflow if the PR does not need a backport
# https://github.com/sorenlouv/backport-github-action/issues/127#issuecomment-2258561266
- name: Check for backport labels
id: check_labels
run: |-
labels='${{ toJSON(github.event.pull_request.labels.*.name) }}'
echo "$labels"
matched=$(echo "${labels}" | jq '. | map(select(startswith("backport-to-"))) | length')
echo "matched=$matched"
echo "matched=$matched" >> $GITHUB_OUTPUT
- name: Backport Action
if: fromJSON(steps.check_labels.outputs.matched) > 0
uses: sorenlouv/backport-github-action@v9.5.1
with:
github_token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
auto_backport_label_prefix: backport-to-
- name: Info log
if: ${{ success() && fromJSON(steps.check_labels.outputs.matched) > 0 }}
run: cat ~/.backport/backport.info.log
- name: Debug log
if: ${{ failure() && fromJSON(steps.check_labels.outputs.matched) > 0 }}
run: cat ~/.backport/backport.debug.log

View File

@@ -16,9 +16,9 @@ jobs:
name: Documentation Link
runs-on: ubuntu-latest
steps:
- name: Leave PR comment with the SaaS Documentation URI
- name: Leave PR comment with the Prowler Documentation URI
uses: peter-evans/create-or-update-comment@v4
with:
issue-number: ${{ env.PR_NUMBER }}
body: |
You can check the documentation for this PR here -> [SaaS Documentation](https://prowler-prowler-docs--${{ env.PR_NUMBER }}.com.readthedocs.build/projects/prowler-open-source/en/${{ env.PR_NUMBER }}/)
You can check the documentation for this PR here -> [Prowler Documentation](https://prowler-prowler-docs--${{ env.PR_NUMBER }}.com.readthedocs.build/projects/prowler-open-source/en/${{ env.PR_NUMBER }}/)

View File

@@ -43,7 +43,7 @@ jobs:
runs-on: ubuntu-latest
outputs:
prowler_version_major: ${{ steps.get-prowler-version.outputs.PROWLER_VERSION_MAJOR }}
prowler_version: ${{ steps.update-prowler-version.outputs.PROWLER_VERSION }}
prowler_version: ${{ steps.get-prowler-version.outputs.PROWLER_VERSION }}
env:
POETRY_VIRTUALENVS_CREATE: "false"
@@ -65,6 +65,8 @@ jobs:
id: get-prowler-version
run: |
PROWLER_VERSION="$(poetry version -s 2>/dev/null)"
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_ENV}"
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_OUTPUT}"
# Store prowler version major just for the release
PROWLER_VERSION_MAJOR="${PROWLER_VERSION%%.*}"
@@ -89,15 +91,6 @@ jobs:
;;
esac
- name: Update Prowler version (release)
id: update-prowler-version
if: github.event_name == 'release'
run: |
PROWLER_VERSION="${{ github.event.release.tag_name }}"
poetry version "${PROWLER_VERSION}"
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_ENV}"
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_OUTPUT}"
- name: Login to DockerHub
uses: docker/login-action@v3
with:
@@ -160,7 +153,7 @@ jobs:
run: |
curl https://api.github.com/repos/${{ secrets.DISPATCH_OWNER }}/${{ secrets.DISPATCH_REPO }}/dispatches \
-H "Accept: application/vnd.github+json" \
-H "Authorization: Bearer ${{ secrets.ACCESS_TOKEN }}" \
-H "Authorization: Bearer ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}" \
-H "X-GitHub-Api-Version: 2022-11-28" \
--data '{"event_type":"dispatch","client_payload":{"version":"v3-latest", "tag": "${{ env.LATEST_COMMIT_HASH }}"}}'
@@ -169,6 +162,6 @@ jobs:
run: |
curl https://api.github.com/repos/${{ secrets.DISPATCH_OWNER }}/${{ secrets.DISPATCH_REPO }}/dispatches \
-H "Accept: application/vnd.github+json" \
-H "Authorization: Bearer ${{ secrets.ACCESS_TOKEN }}" \
-H "Authorization: Bearer ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}" \
-H "X-GitHub-Api-Version: 2022-11-28" \
--data '{"event_type":"dispatch","client_payload":{"version":"release", "tag":"${{ needs.container-build-push.outputs.prowler_version }}"}}'

View File

@@ -13,10 +13,10 @@ name: "CodeQL"
on:
push:
branches: [ "master", "v3" ]
branches: [ "master", "v3", "v4.*" ]
pull_request:
# The branches below must be a subset of the branches above
branches: [ "master", "v3" ]
branches: [ "master", "v3", "v4.*" ]
schedule:
- cron: '00 12 * * *'

View File

@@ -11,7 +11,7 @@ jobs:
with:
fetch-depth: 0
- name: TruffleHog OSS
uses: trufflesecurity/trufflehog@3.80.4
uses: trufflesecurity/trufflehog@v3.83.4
with:
path: ./
base: ${{ github.event.repository.default_branch }}

View File

@@ -5,6 +5,7 @@ on:
branches:
- "master"
- "v3"
- "v4.*"
jobs:
labeler:

View File

@@ -5,10 +5,12 @@ on:
branches:
- "master"
- "v3"
- "v4.*"
pull_request:
branches:
- "master"
- "v3"
- "v4.*"
jobs:
build:
runs-on: ubuntu-latest
@@ -20,7 +22,7 @@ jobs:
- uses: actions/checkout@v4
- name: Test if changes are in not ignored paths
id: are-non-ignored-files-changed
uses: tj-actions/changed-files@v44
uses: tj-actions/changed-files@v45
with:
files: ./**
files_ignore: |
@@ -29,6 +31,7 @@ jobs:
docs/**
permissions/**
mkdocs.yml
.backportrc.json
- name: Install poetry
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |

View File

@@ -8,8 +8,6 @@ env:
RELEASE_TAG: ${{ github.event.release.tag_name }}
PYTHON_VERSION: 3.11
CACHE: "poetry"
# TODO: create a bot user for this kind of tasks, like prowler-bot
GIT_COMMITTER_EMAIL: "sergio@prowler.com"
jobs:
release-prowler-job:
@@ -40,7 +38,6 @@ jobs:
- name: Install dependencies
run: |
pipx install poetry
pipx inject poetry poetry-bumpversion
- name: Setup Python
uses: actions/setup-python@v5
@@ -48,34 +45,6 @@ jobs:
python-version: ${{ env.PYTHON_VERSION }}
cache: ${{ env.CACHE }}
- name: Update Poetry and config version
run: |
poetry version ${{ env.RELEASE_TAG }}
- name: Import GPG key
uses: crazy-max/ghaction-import-gpg@v6
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.GPG_PASSPHRASE }}
git_user_signingkey: true
git_commit_gpgsign: true
- name: Push updated version to the release tag
run: |
# Configure Git
git config user.name "github-actions"
git config user.email "${{ env.GIT_COMMITTER_EMAIL }}"
# Add the files with the version changed
git add prowler/config/config.py pyproject.toml
git commit -m "chore(release): ${{ env.RELEASE_TAG }}" --no-verify -S
# Replace the tag with the version updated
git tag -fa ${{ env.RELEASE_TAG }} -m "chore(release): ${{ env.RELEASE_TAG }}" --sign
# Push the tag
git push -f origin ${{ env.RELEASE_TAG }}
- name: Build Prowler package
run: |
poetry build

View File

@@ -50,13 +50,13 @@ jobs:
# Create pull request
- name: Create Pull Request
uses: peter-evans/create-pull-request@v6
uses: peter-evans/create-pull-request@v7
with:
token: ${{ secrets.PROWLER_ACCESS_TOKEN }}
commit-message: "feat(regions_update): Update regions for AWS services."
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
commit-message: "feat(regions_update): Update regions for AWS services"
branch: "aws-services-regions-updated-${{ github.sha }}"
labels: "status/waiting-for-revision, severity/low, provider/aws, backport-v3"
title: "chore(regions_update): Changes in regions for AWS services."
labels: "status/waiting-for-revision, severity/low, provider/aws, backport-to-v3"
title: "chore(regions_update): Changes in regions for AWS services"
body: |
### Description

View File

@@ -85,7 +85,7 @@ repos:
# For running trufflehog in docker, use the following entry instead:
# entry: bash -c 'docker run -v "$(pwd):/workdir" -i --rm trufflesecurity/trufflehog:latest git file:///workdir --only-verified --fail'
language: system
stages: ["commit", "push"]
stages: ["pre-commit", "pre-push"]
- id: bandit
name: bandit

View File

@@ -2,29 +2,28 @@ FROM python:3.12-alpine
LABEL maintainer="https://github.com/prowler-cloud/prowler"
# Update system dependencies
# Update system dependencies and install essential tools
#hadolint ignore=DL3018
RUN apk --no-cache upgrade && apk --no-cache add curl
RUN apk --no-cache upgrade && apk --no-cache add curl git g++
# Create nonroot user
# Create non-root user
RUN mkdir -p /home/prowler && \
echo 'prowler:x:1000:1000:prowler:/home/prowler:' > /etc/passwd && \
echo 'prowler:x:1000:' > /etc/group && \
chown -R prowler:prowler /home/prowler
USER prowler
# Copy necessary files
# Copy necessary files
WORKDIR /home/prowler
COPY prowler/ /home/prowler/prowler/
COPY dashboard/ /home/prowler/dashboard/
COPY pyproject.toml /home/prowler
COPY README.md /home/prowler
# Install dependencies
# Install Python dependencies
ENV HOME='/home/prowler'
ENV PATH="$HOME/.local/bin:$PATH"
#hadolint ignore=DL3013
RUN pip install --no-cache-dir --upgrade pip && \
RUN pip install --no-cache-dir --upgrade pip setuptools wheel && \
pip install --no-cache-dir .
# Remove deprecated dash dependencies

View File

@@ -12,7 +12,7 @@
<p align="center">
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog"><img width="30" height="30" alt="Prowler community on Slack" src="https://github.com/prowler-cloud/prowler/assets/38561120/3c8b4ec5-6849-41a5-b5e1-52bbb94af73a"></a>
<br>
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog">Join our Prowler community!</a>
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-2oinmgmw6-cl7gOrljSEqo_aoripVPFA">Join our Prowler community!</a>
</p>
<hr>
<p align="center">
@@ -63,9 +63,9 @@ It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, Fe
| Provider | Checks | Services | [Compliance Frameworks](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/compliance/) | [Categories](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/misc/#categories) |
|---|---|---|---|---|
| AWS | 385 | 67 -> `prowler aws --list-services` | 28 -> `prowler aws --list-compliance` | 7 -> `prowler aws --list-categories` |
| GCP | 77 | 13 -> `prowler gcp --list-services` | 1 -> `prowler gcp --list-compliance` | 2 -> `prowler gcp --list-categories`|
| Azure | 135 | 16 -> `prowler azure --list-services` | 2 -> `prowler azure --list-compliance` | 2 -> `prowler azure --list-categories` |
| AWS | 553 | 77 -> `prowler aws --list-services` | 30 -> `prowler aws --list-compliance` | 9 -> `prowler aws --list-categories` |
| GCP | 77 | 13 -> `prowler gcp --list-services` | 2 -> `prowler gcp --list-compliance` | 2 -> `prowler gcp --list-categories`|
| Azure | 138 | 17 -> `prowler azure --list-services` | 3 -> `prowler azure --list-compliance` | 2 -> `prowler azure --list-categories` |
| Kubernetes | 83 | 7 -> `prowler kubernetes --list-services` | 1 -> `prowler kubernetes --list-compliance` | 7 -> `prowler kubernetes --list-categories` |
# 💻 Installation

View File

@@ -12,7 +12,11 @@ Originally based on [org-multi-account](https://github.com/prowler-cloud/prowler
## Architecture Explanation
The solution is designed to be very simple. Prowler is run via an ECS Task definition that launches a single Fargate container. This Task Definition is executed on a schedule using an EventBridge Rule.
The solution is designed to be very simple. Prowler is run via an ECS Task definition that launches a single Fargate container. This Task Definition is executed on a schedule using an EventBridge Rule.
## Prerequisites
This solution assumes that you have a VPC architecture with two redundant subnets that can reach the AWS API endpoints (e.g. PrivateLink, NAT Gateway, etc.).
## CloudFormation Templates
@@ -59,9 +63,9 @@ The logs that are generated and sent to Cloudwatch are error logs, and assessmen
## Instructions
1. Create a Private ECR Repository in the account that will host the Prowler container. The Audit account is recommended, but any account can be used.
2. Configure the .awsvariables file. Note the ROLE name chosen as it will be the CrossAccountRole.
3. Follow the steps from "View Push Commands" to build and upload the container image. You need to have Docker and AWS CLI installed, and use the cli to login to the account first. After upload note the Image URI, as it is required for the CF-Prowler-ECS template.
4. Make sure SecurityHub is enabled in every account in AWS Organizations, and that the SecurityHub integration is enabled as explained in [Prowler - Security Hub Integration](https://github.com/prowler-cloud/prowler#security-hub-integration)
2. Configure the .awsvariables file. Note the ROLE name chosen as it will be the CrossAccountRole.
3. Follow the steps from "View Push Commands" to build and upload the container image. Substitute step 2 with the build command provided in the Dockerfile. You need to have Docker and AWS CLI installed, and use the cli to login to the account first. After upload note the Image URI, as it is required for the CF-Prowler-ECS template. Ensure that you pay attention to the architecture while performing the docker build command. A common mistake is not specifying the architecture and then building on Apple silicon. Your task will fail with *exec /home/prowler/.local/bin/prowler: exec format error*.
4. Make sure SecurityHub is enabled in every account in AWS Organizations, and that the SecurityHub integration is enabled as explained in [Prowler - Security Hub Integration](https://github.com/prowler-cloud/prowler#security-hub-integration)
5. Deploy **CF-Prowler-CrossAccountRole.yml** in the Master Account as a single stack. You will have to choose the CrossAccountRole name (ProwlerXA-Role by default) and the ProwlerTaskRoleName (ProwlerECSTask-Role by default)
6. Deploy **CF-Prowler-CrossAccountRole.yml** in every Member Account as a StackSet. Choose the same CrossAccountName and ProwlerTaskRoleName as the previous step.
7. Deploy **CF-Prowler-IAM.yml** in the account that will host the Prowler container (the same from step 1). The following template parameters must be provided:
@@ -91,4 +95,4 @@ If you permission find errors in the CloudWatch logs, the culprit might be a [Se
## Upgrading Prowler
Prowler version is controlled by the PROWLERVER argument in the Dockerfile, change it to the desired version and follow the ECR Push Commands to update the container image.
Old images can be deleted from the ECR Repository after the new image is confirmed to work. They will show as "untagged" as only one image can hold the "latest" tag.
Old images can be deleted from the ECR Repository after the new image is confirmed to work. They will show as "untagged" as only one image can hold the "latest" tag.

View File

@@ -68,7 +68,7 @@ for accountId in ${ACCOUNTS_IN_ORGS}; do
# Run Prowler
echo -e "Assessing AWS Account: ${accountId}, using Role: ${ROLE} on $(date)"
# Pipe stdout to /dev/null to reduce unnecessary Cloudwatch logs
prowler aws -R arn:"${PARTITION}":iam::"${accountId}":role/"${ROLE}" -q -S -f "${REGION}" > /dev/null
prowler aws -R arn:"${PARTITION}":iam::"${accountId}":role/"${ROLE}" --security-hub --send-sh-only-fails -f "${REGION}" > /dev/null
TOTAL_SEC=$((SECONDS - START_TIME))
printf "Completed AWS Account: ${accountId} in %02dh:%02dm:%02ds" $((TOTAL_SEC / 3600)) $((TOTAL_SEC % 3600 / 60)) $((TOTAL_SEC % 60))
echo ""

View File

@@ -60,24 +60,42 @@ Resources:
Effect: Allow
Resource: "*"
Action:
- ds:ListAuthorizedApplications
- account:Get*
- appstream:Describe*
- appstream:List*
- backup:List*
- cloudtrail:GetInsightSelectors
- codeartifact:List*
- codebuild:BatchGet*
- cognito-idp:GetUserPoolMfaConfig
- dlm:Get*
- drs:Describe*
- ds:Describe*
- ds:Get*
- ds:List*
- dynamodb:GetResourcePolicy
- ec2:GetEbsEncryptionByDefault
- ec2:GetSnapshotBlockPublicAccessState
- ec2:GetInstanceMetadataDefaults
- ecr:Describe*
- ecr:GetRegistryScanningConfiguration
- elasticfilesystem:DescribeBackupPolicy
- glue:GetConnections
- glue:GetSecurityConfiguration
- glue:GetSecurityConfiguration*
- glue:SearchTables
- lambda:GetFunction
- lambda:GetFunction*
- logs:FilterLogEvents
- lightsail:GetRelationalDatabases
- macie2:GetMacieSession
- s3:GetAccountPublicAccessBlock
- shield:DescribeProtection
- shield:GetSubscriptionState
- ssm:GetDocument
- ssm-incidents:List*
- support:Describe*
- tag:GetTagKeys
- PolicyName: Prowler-Security-Hub
PolicyDocument:
Version: 2012-10-17
Statement:
- wellarchitected:List*
- Sid: AllowProwlerSecurityHub
Effect: Allow
Resource: "*"

View File

@@ -62,7 +62,7 @@ Resources:
awslogs-stream-prefix: ecs
Cpu: 1024
ExecutionRoleArn: !Ref ECSExecutionRole
Memory: 2048
Memory: 8192
NetworkMode: awsvpc
TaskRoleArn: !Ref ProwlerTaskRole
Family: SecurityHubProwlerTask

View File

@@ -97,9 +97,15 @@ Outputs:
ECSExecutionRoleARN:
Description: ARN of the ECS Task Execution Role
Value: !GetAtt ECSExecutionRole.Arn
Export:
Name: ECSExecutionRoleArn
ProwlerTaskRoleARN:
Description: ARN of the ECS Prowler Task Role
Value: !GetAtt ProwlerTaskRole.Arn
Export:
Name: ProwlerTaskRoleArn
ECSEventRoleARN:
Description: ARN of the Eventbridge Task Role
Value: !GetAtt ECSEventRole.Arn
Export:
Name: ECSEventRoleARN

View File

@@ -2223,3 +2223,232 @@ def get_section_containers_ens(data, section_1, section_2, section_3, section_4)
section_containers.append(section_container)
return html.Div(section_containers, className="compliance-data-layout")
# This function extracts and compares up to two numeric values, ensuring correct sorting for version-like strings.
def extract_numeric_values(value):
numbers = re.findall(r"\d+", str(value))
if len(numbers) >= 2:
return int(numbers[0]), int(numbers[1])
elif len(numbers) == 1:
return int(numbers[0]), 0
return 0, 0
def get_section_containers_kisa_ismsp(data, section_1, section_2):
data["STATUS"] = data["STATUS"].apply(map_status_to_icon)
data[section_1] = data[section_1].astype(str)
data[section_2] = data[section_2].astype(str)
data.sort_values(
by=section_1,
key=lambda x: x.map(extract_numeric_values),
ascending=True,
inplace=True,
)
findings_counts_section = (
data.groupby([section_2, "STATUS"]).size().unstack(fill_value=0)
)
findings_counts_name = (
data.groupby([section_1, "STATUS"]).size().unstack(fill_value=0)
)
section_containers = []
for name in data[section_1].unique():
success_name = (
findings_counts_name.loc[name, pass_emoji]
if pass_emoji in findings_counts_name.columns
else 0
)
failed_name = (
findings_counts_name.loc[name, fail_emoji]
if fail_emoji in findings_counts_name.columns
else 0
)
fig_name = go.Figure(
data=[
go.Bar(
name="Failed",
x=[failed_name],
y=[""],
orientation="h",
marker=dict(color="#e77676"),
width=[0.8],
),
go.Bar(
name="Success",
x=[success_name],
y=[""],
orientation="h",
marker=dict(color="#45cc6e"),
width=[0.8],
),
]
)
fig_name.update_layout(
barmode="stack",
margin=dict(l=10, r=10, t=10, b=10),
paper_bgcolor="rgba(0,0,0,0)",
plot_bgcolor="rgba(0,0,0,0)",
showlegend=False,
width=350,
height=30,
xaxis=dict(showticklabels=False, showgrid=False, zeroline=False),
yaxis=dict(showticklabels=False, showgrid=False, zeroline=False),
annotations=[
dict(
x=success_name + failed_name,
y=0,
xref="x",
yref="y",
text=str(success_name),
showarrow=False,
font=dict(color="#45cc6e", size=14),
xanchor="left",
yanchor="middle",
),
dict(
x=0,
y=0,
xref="x",
yref="y",
text=str(failed_name),
showarrow=False,
font=dict(color="#e77676", size=14),
xanchor="right",
yanchor="middle",
),
],
)
graph_name = dcc.Graph(
figure=fig_name, config={"staticPlot": True}, className="info-bar"
)
graph_div = html.Div(graph_name, className="graph-section")
direct_internal_items = []
for section in data[data[section_1] == name][section_2].unique():
specific_data = data[
(data[section_1] == name) & (data[section_2] == section)
]
success_section = (
findings_counts_section.loc[section, pass_emoji]
if pass_emoji in findings_counts_section.columns
else 0
)
failed_section = (
findings_counts_section.loc[section, fail_emoji]
if fail_emoji in findings_counts_section.columns
else 0
)
data_table = dash_table.DataTable(
data=specific_data.to_dict("records"),
columns=[
{"name": i, "id": i}
for i in ["CHECKID", "STATUS", "REGION", "ACCOUNTID", "RESOURCEID"]
],
style_table={"overflowX": "auto"},
style_as_list_view=True,
style_cell={"textAlign": "left", "padding": "5px"},
)
fig_section = go.Figure(
data=[
go.Bar(
name="Failed",
x=[failed_section],
y=[""],
orientation="h",
marker=dict(color="#e77676"),
),
go.Bar(
name="Success",
x=[success_section],
y=[""],
orientation="h",
marker=dict(color="#45cc6e"),
),
]
)
fig_section.update_layout(
barmode="stack",
margin=dict(l=10, r=10, t=10, b=10),
paper_bgcolor="rgba(0,0,0,0)",
plot_bgcolor="rgba(0,0,0,0)",
showlegend=False,
width=350,
height=30,
xaxis=dict(showticklabels=False, showgrid=False, zeroline=False),
yaxis=dict(showticklabels=False, showgrid=False, zeroline=False),
annotations=[
dict(
x=success_section + failed_section,
y=0,
xref="x",
yref="y",
text=str(success_section),
showarrow=False,
font=dict(color="#45cc6e", size=14),
xanchor="left",
yanchor="middle",
),
dict(
x=0,
y=0,
xref="x",
yref="y",
text=str(failed_section),
showarrow=False,
font=dict(color="#e77676", size=14),
xanchor="right",
yanchor="middle",
),
],
)
graph_section = dcc.Graph(
figure=fig_section,
config={"staticPlot": True},
className="info-bar-child",
)
graph_div_section = html.Div(graph_section, className="graph-section-req")
internal_accordion_item = dbc.AccordionItem(
title=section,
children=[html.Div([data_table], className="inner-accordion-content")],
)
internal_section_container = html.Div(
[
graph_div_section,
dbc.Accordion(
[internal_accordion_item], start_collapsed=True, flush=True
),
],
className="accordion-inner--child",
)
direct_internal_items.append(internal_section_container)
accordion_item = dbc.AccordionItem(
title=f"{name}", children=direct_internal_items
)
section_container = html.Div(
[
graph_div,
dbc.Accordion([accordion_item], start_collapsed=True, flush=True),
],
className="accordion-inner",
)
section_containers.append(section_container)
return html.Div(section_containers, className="compliance-data-layout")

View File

@@ -0,0 +1,25 @@
import warnings
from dashboard.common_methods import get_section_containers_kisa_ismsp
warnings.filterwarnings("ignore")
def get_table(data):
aux = data[
[
"REQUIREMENTS_ID",
"REQUIREMENTS_ATTRIBUTES_SUBDOMAIN",
"REQUIREMENTS_ATTRIBUTES_SECTION",
# "REQUIREMENTS_DESCRIPTION",
"CHECKID",
"STATUS",
"REGION",
"ACCOUNTID",
"RESOURCEID",
]
].copy()
return get_section_containers_kisa_ismsp(
aux, "REQUIREMENTS_ATTRIBUTES_SUBDOMAIN", "REQUIREMENTS_ATTRIBUTES_SECTION"
)

View File

@@ -0,0 +1,25 @@
import warnings
from dashboard.common_methods import get_section_containers_kisa_ismsp
warnings.filterwarnings("ignore")
def get_table(data):
aux = data[
[
"REQUIREMENTS_ID",
"REQUIREMENTS_ATTRIBUTES_SUBDOMAIN",
"REQUIREMENTS_ATTRIBUTES_SECTION",
# "REQUIREMENTS_DESCRIPTION",
"CHECKID",
"STATUS",
"REGION",
"ACCOUNTID",
"RESOURCEID",
]
].copy()
return get_section_containers_kisa_ismsp(
aux, "REQUIREMENTS_ATTRIBUTES_SUBDOMAIN", "REQUIREMENTS_ATTRIBUTES_SECTION"
)

View File

@@ -222,7 +222,7 @@ class ec2_securitygroup_with_many_ingress_egress_rules(Check):
max_security_group_rules = ec2_client.audit_config.get(
"max_security_group_rules", 50
)
for security_group in ec2_client.security_groups:
for security_group_arn, security_group in ec2_client.security_groups.items():
```
```yaml title="config.yaml"
@@ -272,7 +272,7 @@ Each Prowler check has metadata associated which is stored at the same level of
# Severity holds the check's severity, always in lowercase (critical, high, medium, low or informational)
"Severity": "critical",
# ResourceType only for AWS, holds the type from here
# https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-template-resource-type-ref.html
# https://docs.aws.amazon.com/securityhub/latest/userguide/asff-resources.html
"ResourceType": "Other",
# Description holds the title of the check, for now is the same as CheckTitle
"Description": "Ensure there are no EC2 AMIs set as Public.",

View File

@@ -14,10 +14,8 @@ Once that is satisfied go ahead and clone your forked repo:
git clone https://github.com/<your-github-user>/prowler
cd prowler
```
For isolation and avoid conflicts with other environments, we recommend usage of `poetry`:
```
pip install poetry
```
For isolation and to avoid conflicts with other environments, we recommend using `poetry`, a Python dependency management tool. You can install it by following the instructions [here](https://python-poetry.org/docs/#installation).
Then install all dependencies including the ones for developers:
```
poetry install --with dev
@@ -50,6 +48,8 @@ You can see all dependencies in file `pyproject.toml`.
Moreover, you would need to install [`TruffleHog`](https://github.com/trufflesecurity/trufflehog) on the latest version to check for secrets in the code. You can install it using the official installation guide [here](https://github.com/trufflesecurity/trufflehog?tab=readme-ov-file#floppy_disk-installation).
Additionally, please ensure to follow the code documentation practices outlined in this guide: [Google Python Style Guide - Comments and Docstrings](https://github.com/google/styleguide/blob/gh-pages/pyguide.md#38-comments-and-docstrings).
???+ note
If you have any trouble when committing to the Prowler repository, add the `--no-verify` flag to the `git commit` command.

View File

@@ -44,7 +44,6 @@ class Provider(ABC):
Methods:
print_credentials(): Displays the provider's credentials used for auditing in the command-line interface.
setup_session(): Sets up the session for the provider.
get_output_mapping(): Returns the output mapping between the provider and the generic model.
validate_arguments(): Validates the arguments for the provider.
get_checks_to_execute_by_audit_resources(): Returns a set of checks based on the input resources to scan.
@@ -131,15 +130,6 @@ class Provider(ABC):
"""
raise NotImplementedError()
@abstractmethod
def get_output_mapping(self) -> dict:
"""
get_output_mapping returns the output mapping between the provider and the generic model.
This method needs to be created in each provider.
"""
raise NotImplementedError()
def validate_arguments(self) -> None:
"""
validate_arguments validates the arguments for the provider.

View File

@@ -592,7 +592,7 @@ is following the actual format, add one function where the client is passed to b
`mock_api_<endpoint>_calls` (*endpoint* refers to the first attribute pointed after *client*).
In the example of BigQuery the function is called `mock_api_dataset_calls`. And inside of this function we found an assignation to
be used in the `__get_datasets__` method in BigQuery class:
be used in the `_get_datasets` method in BigQuery class:
```python
# Mocking datasets
@@ -765,7 +765,7 @@ from tests.providers.azure.azure_fixtures import (
set_mocked_azure_provider,
)
# Function to mock the service function __get_components__, this function task is to return a possible value that real function could returns
# Function to mock the service function _get_components, this function task is to return a possible value that real function could returns
def mock_appinsights_get_components(_):
return {
AZURE_SUBSCRIPTION_ID: {
@@ -779,12 +779,12 @@ def mock_appinsights_get_components(_):
# Patch decorator to use the mocked function instead the function with the real API call
@patch(
"prowler.providers.azure.services.appinsights.appinsights_service.AppInsights.__get_components__",
"prowler.providers.azure.services.appinsights.appinsights_service.AppInsights._get_components",
new=mock_appinsights_get_components,
)
class Test_AppInsights_Service:
# Mandatory test for every service, this method test the instance of the client is correct
def test__get_client__(self):
def test_get_client(self):
app_insights = AppInsights(set_mocked_azure_provider())
assert (
app_insights.clients[AZURE_SUBSCRIPTION_ID].__class__.__name__
@@ -794,8 +794,8 @@ class Test_AppInsights_Service:
def test__get_subscriptions__(self):
app_insights = AppInsights(set_mocked_azure_provider())
assert app_insights.subscriptions.__class__.__name__ == "dict"
# Test for the function __get_components__, inside this client is used the mocked function
def test__get_components__(self):
# Test for the function _get_components, inside this client is used the mocked function
def test_get_components(self):
appinsights = AppInsights(set_mocked_azure_provider())
assert len(appinsights.components) == 1
assert (

View File

@@ -19,14 +19,40 @@ It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, Fe
## Quick Start
### Installation
Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/), thus can be installed using pip with `Python >= 3.9`:
Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/), thus can be installed as Python package with `Python >= 3.9`:
=== "Generic"
=== "pipx"
[pipx](https://pipx.pypa.io/stable/) is a tool to install Python applications in isolated environments. It is recommended to use `pipx` for a global installation.
_Requirements_:
* `Python >= 3.9`
* `Python pip >= 3.9`
* `pipx` installed: [pipx installation](https://pipx.pypa.io/stable/installation/).
* AWS, GCP, Azure and/or Kubernetes credentials
_Commands_:
``` bash
pipx install prowler
prowler -v
```
To upgrade Prowler to the latest version, run:
``` bash
pipx upgrade prowler
```
=== "pip"
???+ warning
This method is not recommended because it will modify the environment which you choose to install. Consider using [pipx](https://docs.prowler.com/projects/prowler-open-source/en/latest/#__tabbed_1_1) for a global installation.
_Requirements_:
* `Python >= 3.9`
* `Python pip >= 21.0.0`
* AWS, GCP, Azure and/or Kubernetes credentials
_Commands_:
@@ -36,13 +62,19 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
prowler -v
```
To upgrade Prowler to the latest version, run:
``` bash
pip install --upgrade prowler
```
=== "Docker"
_Requirements_:
* Have `docker` installed: https://docs.docker.com/get-docker/.
* AWS, GCP, Azure and/or Kubernetes credentials
* In the command below, change `-v` to your local directory path in order to access the reports.
* AWS, GCP, Azure and/or Kubernetes credentials
_Commands_:
@@ -54,41 +86,21 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
--env AWS_SESSION_TOKEN toniblyx/prowler:latest
```
=== "Ubuntu"
_Requirements for Ubuntu 20.04.3 LTS_:
* AWS, GCP, Azure and/or Kubernetes credentials
* Install python 3.9 with: `sudo apt-get install python3.9`
* Remove python 3.8 to avoid conflicts if you can: `sudo apt-get remove python3.8`
* Make sure you have the python3 distutils package installed: `sudo apt-get install python3-distutils`
* To make sure you use pip for 3.9 get the get-pip script with: `curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py`
* Execute it with the proper python version: `sudo python3.9 get-pip.py`
* Now you should have pip for 3.9 ready: `pip3.9 --version`
_Commands_:
```
pip3.9 install prowler
export PATH=$PATH:/home/$HOME/.local/bin/
prowler -v
```
=== "GitHub"
_Requirements for Developers_:
* `git`
* `poetry` installed: [poetry installation](https://python-poetry.org/docs/#installation).
* AWS, GCP, Azure and/or Kubernetes credentials
* `git`, `Python >= 3.9`, `pip` and `poetry` installed (`pip install poetry`)
_Commands_:
```
git clone https://github.com/prowler-cloud/prowler
cd prowler
poetry shell
poetry install
python prowler.py -v
poetry run python prowler.py -v
```
???+ note
If you want to clone Prowler from Windows, use `git config core.longpaths true` to allow long file paths.
@@ -97,15 +109,33 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
_Requirements_:
* `Python >= 3.9`
* AWS, GCP, Azure and/or Kubernetes credentials
* Latest Amazon Linux 2 should come with Python 3.9 already installed however it may need pip. Install Python pip 3.9 with: `sudo yum install -y python3-pip`.
* Make sure setuptools for python is already installed with: `pip3 install setuptools`
_Commands_:
```
pip3.9 install prowler
export PATH=$PATH:/home/$HOME/.local/bin/
python3 -m pip install --user pipx
python3 -m pipx ensurepath
pipx install prowler
prowler -v
```
=== "Ubuntu"
_Requirements_:
* `Ubuntu 23.04` or above, if you are using an older version of Ubuntu check [pipx installation](https://docs.prowler.com/projects/prowler-open-source/en/latest/#__tabbed_1_1) and ensure you have `Python >= 3.9`.
* `Python >= 3.9`
* AWS, GCP, Azure and/or Kubernetes credentials
_Commands_:
``` bash
sudo apt update
sudo apt install pipx
pipx ensurepath
pipx install prowler
prowler -v
```
@@ -125,7 +155,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
=== "AWS CloudShell"
After the migration of AWS CloudShell from Amazon Linux 2 to Amazon Linux 2023 [[1]](https://aws.amazon.com/about-aws/whats-new/2023/12/aws-cloudshell-migrated-al2023/) [2](https://docs.aws.amazon.com/cloudshell/latest/userguide/cloudshell-AL2023-migration.html), there is no longer a need to manually compile Python 3.9 as it's already included in AL2023. Prowler can thus be easily installed following the Generic method of installation via pip. Follow the steps below to successfully execute Prowler v4 in AWS CloudShell:
After the migration of AWS CloudShell from Amazon Linux 2 to Amazon Linux 2023 [[1]](https://aws.amazon.com/about-aws/whats-new/2023/12/aws-cloudshell-migrated-al2023/) [[2]](https://docs.aws.amazon.com/cloudshell/latest/userguide/cloudshell-AL2023-migration.html), there is no longer a need to manually compile Python 3.9 as it's already included in AL2023. Prowler can thus be easily installed following the Generic method of installation via pip. Follow the steps below to successfully execute Prowler v4 in AWS CloudShell:
_Requirements_:
@@ -133,11 +163,13 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
_Commands_:
```
```bash
sudo bash
adduser prowler
su prowler
pip install prowler
python3 -m pip install --user pipx
python3 -m pipx ensurepath
pipx install prowler
cd /tmp
prowler aws
```
@@ -153,9 +185,12 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
_Commands_:
```
pip install prowler
prowler -v
```bash
python3 -m pip install --user pipx
python3 -m pipx ensurepath
pipx install prowler
cd /tmp
prowler azure --az-cli-auth
```
## Prowler container versions

View File

@@ -4,21 +4,25 @@ Prowler allows you to do threat detection in AWS based on the CloudTrail log rec
```
prowler aws --category threat-detection
```
This comand will run these checks:
This command will run these checks:
* `cloudtrail_threat_detection_privilege_escalation`
* `cloudtrail_threat_detection_enumeration`
* `cloudtrail_threat_detection_privilege_escalation` -> Detects privilege escalation attacks.
* `cloudtrail_threat_detection_enumeration` -> Detects enumeration attacks.
* `cloudtrail_threat_detection_llm_jacking` -> Detects LLM Jacking attacks.
???+ note
Threat Detection checks will be only executed using `--category threat-detection` flag due to preformance.
Threat Detection checks will be only executed using `--category threat-detection` flag due to performance.
## Config File
If you want to manage the behavior of the Threat Detection checks you can edit `config.yaml` file from `/prowler/config`. In this file you can edit the following attributes related with Threat Detection:
* `threat_detection_privilege_escalation_threshold`: determines the percentage of actions found to decide if it is an privilege_scalation attack event, by default is 0.1 (10%)
* `threat_detection_privilege_escalation_threshold`: determines the percentage of actions found to decide if it is an privilege_scalation attack event, by default is 0.2 (20%)
* `threat_detection_privilege_escalation_minutes`: it is the past minutes to search from now for privilege_escalation attacks, by default is 1440 minutes (24 hours)
* `threat_detection_privilege_escalation_actions`: these are the default actions related with priviledge scalation.
* `threat_detection_enumeration_threshold`: determines the percentage of actions found to decide if it is an enumeration attack event, by default is 0.1 (10%)
* `threat_detection_privilege_escalation_actions`: these are the default actions related with privilege escalation.
* `threat_detection_enumeration_threshold`: determines the percentage of actions found to decide if it is an enumeration attack event, by default is 0.3 (30%)
* `threat_detection_enumeration_minutes`: it is the past minutes to search from now for enumeration attacks, by default is 1440 minutes (24 hours)
* `threat_detection_enumeration_actions`: these are the default actions related with enumeration attacks.
* `threat_detection_llm_jacking_threshold`: determines the percentage of actions found to decide if it is an LLM Jacking attack event, by default is 0.4 (40%)
* `threat_detection_llm_jacking_minutes`: it is the past minutes to search from now for LLM Jacking attacks, by default is 1440 minutes (24 hours)
* `threat_detection_llm_jacking_actions`: these are the default actions related with LLM Jacking attacks.

View File

@@ -7,7 +7,6 @@ At the time of writing this documentation the available Azure Clouds from differ
- AzureCloud
- AzureChinaCloud
- AzureUSGovernment
- AzureGermanCloud
If you want to change the default one you must include the flag `--azure-region`, i.e.:

View File

@@ -13,37 +13,57 @@ The following list includes all the AWS checks with configurable variables that
| Check Name | Value | Type |
|---------------------------------------------------------------|--------------------------------------------------|-----------------|
| `iam_user_accesskey_unused` | `max_unused_access_keys_days` | Integer |
| `iam_user_console_access_unused` | `max_console_access_days` | Integer |
| `ec2_elastic_ip_shodan` | `shodan_api_key` | String |
| `ec2_securitygroup_with_many_ingress_egress_rules` | `max_security_group_rules` | Integer |
| `ec2_instance_older_than_specific_days` | `max_ec2_instance_age_in_days` | Integer |
| `vpc_endpoint_connections_trust_boundaries` | `trusted_account_ids` | List of Strings |
| `vpc_endpoint_services_allowed_principals_trust_boundaries` | `trusted_account_ids` | List of Strings |
| `cloudwatch_log_group_retention_policy_specific_days_enabled` | `log_group_retention_days` | Integer |
| `appstream_fleet_session_idle_disconnect_timeout` | `max_idle_disconnect_timeout_in_seconds` | Integer |
| `appstream_fleet_session_disconnect_timeout` | `max_disconnect_timeout_in_seconds` | Integer |
| `acm_certificates_expiration_check` | `days_to_expire_threshold` | Integer |
| `appstream_fleet_maximum_session_duration` | `max_session_duration_seconds` | Integer |
| `appstream_fleet_session_disconnect_timeout` | `max_disconnect_timeout_in_seconds` | Integer |
| `appstream_fleet_session_idle_disconnect_timeout` | `max_idle_disconnect_timeout_in_seconds` | Integer |
| `autoscaling_find_secrets_ec2_launch_configuration` | `secrets_ignore_patterns` | List of Strings |
| `awslambda_function_no_secrets_in_code` | `secrets_ignore_patterns` | List of Strings |
| `awslambda_function_no_secrets_in_variables` | `secrets_ignore_patterns` | List of Strings |
| `awslambda_function_using_supported_runtimes` | `obsolete_lambda_runtimes` | Integer |
| `organizations_scp_check_deny_regions` | `organizations_enabled_regions` | List of Strings |
| `organizations_delegated_administrators` | `organizations_trusted_delegated_administrators` | List of Strings |
| `ecr_repositories_scan_vulnerabilities_in_latest_image` | `ecr_repository_vulnerability_minimum_severity` | String |
| `trustedadvisor_premium_support_plan_subscribed` | `verify_premium_support_plans` | Boolean |
| `config_recorder_all_regions_enabled` | `mute_non_default_regions` | Boolean |
| `drs_job_exist` | `mute_non_default_regions` | Boolean |
| `guardduty_is_enabled` | `mute_non_default_regions` | Boolean |
| `securityhub_enabled` | `mute_non_default_regions` | Boolean |
| `cloudtrail_threat_detection_privilege_escalation` | `threat_detection_privilege_escalation_entropy` | Integer |
| `cloudtrail_threat_detection_privilege_escalation` | `threat_detection_privilege_escalation_minutes` | Integer |
| `cloudtrail_threat_detection_privilege_escalation` | `threat_detection_privilege_escalation_actions` | List of Strings |
| `awslambda_function_vpc_is_in_multi_azs` | `lambda_min_azs` | Integer |
| `cloudformation_stack_outputs_find_secrets` | `secrets_ignore_patterns` | List of Strings |
| `cloudtrail_threat_detection_enumeration` | `threat_detection_enumeration_actions` | List of Strings |
| `cloudtrail_threat_detection_enumeration` | `threat_detection_enumeration_entropy` | Integer |
| `cloudtrail_threat_detection_enumeration` | `threat_detection_enumeration_minutes` | Integer |
| `cloudtrail_threat_detection_enumeration` | `threat_detection_enumeration_actions` | List of Strings |
| `rds_instance_backup_enabled` | `check_rds_instance_replicas` | Boolean |
| `ec2_securitygroup_allow_ingress_from_internet_to_any_port` | `ec2_allowed_interface_types` | List of Strings |
| `cloudtrail_threat_detection_privilege_escalation` | `threat_detection_privilege_escalation_actions` | List of Strings |
| `cloudtrail_threat_detection_privilege_escalation` | `threat_detection_privilege_escalation_entropy` | Integer |
| `cloudtrail_threat_detection_privilege_escalation` | `threat_detection_privilege_escalation_minutes` | Integer |
| `cloudwatch_log_group_no_secrets_in_logs` | `secrets_ignore_patterns` | List of Strings |
| `cloudwatch_log_group_no_critical_pii_in_logs` | `critical_pii_entities` | List of Strings |
| `cloudwatch_log_group_no_critical_pii_in_logs` | `pii_language` | String |
| `cloudwatch_log_group_retention_policy_specific_days_enabled` | `log_group_retention_days` | Integer |
| `codebuild_project_no_secrets_in_variables` | `excluded_sensitive_environment_variables` | List of Strings |
| `codebuild_project_no_secrets_in_variables` | `secrets_ignore_patterns` | List of Strings |
| `config_recorder_all_regions_enabled` | `mute_non_default_regions` | Boolean |
| `drs_job_exist` | `mute_non_default_regions` | Boolean |
| `ec2_elastic_ip_shodan` | `shodan_api_key` | String |
| `ec2_instance_older_than_specific_days` | `max_ec2_instance_age_in_days` | Integer |
| `ec2_instance_secrets_user_data` | `secrets_ignore_patterns` | List of Strings |
| `ec2_launch_template_no_secrets` | `secrets_ignore_patterns` | List of Strings |
| `ec2_securitygroup_allow_ingress_from_internet_to_any_port` | `ec2_allowed_instance_owners` | List of Strings |
| `acm_certificates_expiration_check` | `days_to_expire_threshold` | Integer |
| `ec2_securitygroup_allow_ingress_from_internet_to_any_port` | `ec2_allowed_interface_types` | List of Strings |
| `ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports`| `ec2_sg_high_risk_ports` | List of Integer |
| `ec2_securitygroup_with_many_ingress_egress_rules` | `max_security_group_rules` | Integer |
| `ecs_task_definitions_no_environment_secrets` | `secrets_ignore_patterns` | List of Strings |
| `ecr_repositories_scan_vulnerabilities_in_latest_image` | `ecr_repository_vulnerability_minimum_severity` | String |
| `eks_cluster_uses_a_supported_version` | `eks_cluster_oldest_version_supported` | String |
| `eks_control_plane_logging_all_types_enabled` | `eks_required_log_types` | List of Strings |
| `elb_is_in_multiple_az` | `elb_min_azs` | Integer |
| `elbv2_is_in_multiple_az` | `elbv2_min_azs` | Integer |
| `guardduty_is_enabled` | `mute_non_default_regions` | Boolean |
| `iam_user_accesskey_unused` | `max_unused_access_keys_days` | Integer |
| `iam_user_console_access_unused` | `max_console_access_days` | Integer |
| `organizations_delegated_administrators` | `organizations_trusted_delegated_administrators` | List of Strings |
| `organizations_scp_check_deny_regions` | `organizations_enabled_regions` | List of Strings |
| `rds_instance_backup_enabled` | `check_rds_instance_replicas` | Boolean |
| `securityhub_enabled` | `mute_non_default_regions` | Boolean |
| `secretsmanager_secret_unused` | `max_days_secret_unused` | Integer |
| `secretsmanager_secret_rotated_periodically` | `max_days_secret_unrotated` | Integer |
| `ssm_document_secrets` | `secrets_ignore_patterns` | List of Strings |
| `trustedadvisor_premium_support_plan_subscribed` | `verify_premium_support_plans` | Boolean |
| `vpc_endpoint_connections_trust_boundaries` | `trusted_account_ids` | List of Strings |
| `vpc_endpoint_services_allowed_principals_trust_boundaries` | `trusted_account_ids` | List of Strings |
## Azure
@@ -125,8 +145,24 @@ aws:
[
"amazon-elb"
]
# aws.ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports
ec2_sg_high_risk_ports:
[
25,
110,
135,
143,
445,
3000,
4333,
5000,
5500,
8080,
8088,
]
# AWS VPC Configuration (vpc_endpoint_connections_trust_boundaries, vpc_endpoint_services_allowed_principals_trust_boundaries)
# AWS SSM Configuration (aws.ssm_documents_set_as_public)
# Single account environment: No action required. The AWS account number will be automatically added by the checks.
# Multi account environment: Any additional trusted account number should be added as a space separated list, e.g.
# trusted_account_ids : ["123456789012", "098765432109", "678901234567"]
@@ -194,7 +230,7 @@ aws:
# AWS CloudTrail Configuration
# aws.cloudtrail_threat_detection_privilege_escalation
threat_detection_privilege_escalation_threshold: 0.1 # Percentage of actions found to decide if it is an privilege_escalation attack event, by default is 0.1 (10%)
threat_detection_privilege_escalation_threshold: 0.2 # Percentage of actions found to decide if it is an privilege_escalation attack event, by default is 0.2 (20%)
threat_detection_privilege_escalation_minutes: 1440 # Past minutes to search from now for privilege_escalation attacks, by default is 1440 minutes (24 hours)
threat_detection_privilege_escalation_actions:
[
@@ -251,7 +287,7 @@ aws:
"UpdateLoginProfile",
]
# aws.cloudtrail_threat_detection_enumeration
threat_detection_enumeration_threshold: 0.1 # Percentage of actions found to decide if it is an enumeration attack event, by default is 0.1 (10%)
threat_detection_enumeration_threshold: 0.3 # Percentage of actions found to decide if it is an enumeration attack event, by default is 0.3 (30%)
threat_detection_enumeration_minutes: 1440 # Past minutes to search from now for enumeration attacks, by default is 1440 minutes (24 hours)
threat_detection_enumeration_actions:
[
@@ -346,6 +382,24 @@ aws:
"LookupEvents",
"Search",
]
# aws.cloudtrail_threat_detection_llm_jacking
threat_detection_llm_jacking_threshold: 0.4 # Percentage of actions found to decide if it is an LLM Jacking attack event, by default is 0.4 (40%)
threat_detection_llm_jacking_minutes: 1440 # Past minutes to search from now for LLM Jacking attacks, by default is 1440 minutes (24 hours)
threat_detection_llm_jacking_actions:
[
"PutUseCaseForModelAccess", # Submits a use case for model access, providing justification (Write).
"PutFoundationModelEntitlement", # Grants entitlement for accessing a foundation model (Write).
"PutModelInvocationLoggingConfiguration", # Configures logging for model invocations (Write).
"CreateFoundationModelAgreement", # Creates a new agreement to use a foundation model (Write).
"InvokeModel", # Invokes a specified Bedrock model for inference using provided prompt and parameters (Read).
"InvokeModelWithResponseStream", # Invokes a Bedrock model for inference with real-time token streaming (Read).
"GetUseCaseForModelAccess", # Retrieves an existing use case for model access (Read).
"GetModelInvocationLoggingConfiguration", # Fetches the logging configuration for model invocations (Read).
"GetFoundationModelAvailability", # Checks the availability of a foundation model for use (Read).
"ListFoundationModelAgreementOffers", # Lists available agreement offers for accessing foundation models (List).
"ListFoundationModels", # Lists the available foundation models in Bedrock (List).
"ListProvisionedModelThroughputs", # Lists the provisioned throughput for previously created models (List).
]
# AWS RDS Configuration
# aws.rds_instance_backup_enabled
@@ -368,6 +422,18 @@ aws:
"scheduler",
]
# aws.eks_cluster_uses_a_supported_version
# EKS clusters must be version 1.28 or higher
eks_cluster_oldest_version_supported: "1.28"
# AWS CodeBuild Configuration
# aws.codebuild_project_no_secrets_in_variables
# CodeBuild sensitive variables that are excluded from the check
excluded_sensitive_environment_variables:
[
]
# Azure Configuration
azure:
# Azure Network Configuration

View File

@@ -10,9 +10,11 @@ prowler dashboard
To run Prowler local dashboard with Docker, use:
```sh
docker run --env HOST=0.0.0.0 --publish 127.0.0.1:11666:11666 toniblyx/prowler:latest dashboard
docker run -v /your/local/dir/prowler-output:/home/prowler/output --env HOST=0.0.0.0 --publish 127.0.0.1:11666:11666 toniblyx/prowler:latest dashboard
```
Make sure you update the `/your/local/dir/prowler-output` to match the path that contains your prowler output.
???+ note
**Remember that the `dashboard` server is not authenticated, if you expose it to the internet, you are running it at your own risk.**

View File

@@ -13,7 +13,7 @@ prowler <provider> -c <check_to_fix_1> <check_to_fix_2> ... --fixer
```sh
prowler <provider> --list-fixers
```
It's important to note that using the fixers for `Access Analyzer`, `GuardDuty`, and `SecurityHub` may incur additional costs. These AWS services might trigger actions or deploy resources that can lead to charges on your AWS account.
## Writing a Fixer
To write a fixer, you need to create a file called `<check_id>_fixer.py` inside the check folder, with a function called `fixer` that receives either the region or the resource to be fixed as a parameter, and returns a boolean value indicating if the fix was successful or not.

View File

@@ -0,0 +1,22 @@
# GCP Organization
By default, Prowler scans all Google Cloud projects accessible to the authenticated user.
To limit the scan to projects within a specific Google Cloud organization, use the `--organization-id` option with the GCP organization ID:
```console
prowler gcp --organization-id organization-id
```
???+ warning
Make sure that the used credentials have the role Cloud Asset Viewer (`roles/cloudasset.viewer`) or Cloud Asset Owner (`roles/cloudasset.owner`) on the organization level.
???+ note
With this option, Prowler retrieves all projects within the specified organization, including those organized in folders and nested subfolders. This ensures that every project under the organizations hierarchy is scanned, providing full visibility across the entire organization.
???+ note
To find the organization ID, use the following command:
```console
gcloud organizations list
```

View File

@@ -24,6 +24,11 @@ Prowler can run without showing its banner:
```console
prowler <provider> -b/--no-banner
```
## Disable Colors
Prowler can run without showing colors:
```console
prowler <provider> --no-color
```
## Checks
Prowler has checks per provider, there are options related with them:
@@ -120,5 +125,5 @@ prowler <provider> --list-categories
```
- Execute specific category(s):
```console
prowler <provider> --categories
prowler <provider> --categories secrets
```

View File

@@ -7,97 +7,155 @@ Mutelist option works along with other options and will modify the output in the
- CSV: `muted` is `True`. The field `status` will keep the original status, `MANUAL`, `PASS` or `FAIL`, of the finding.
You can use `-w`/`--mutelist-file` with the path of your mutelist yaml file:
## How the Mutelist Works
The **Mutelist** uses both "AND" and "OR" logic to determine which resources, checks, regions, and tags should be muted. For each check, the Mutelist evaluates whether the account, region, and resource match the specified criteria using "AND" logic. If tags are specified, the Mutelist can apply either "AND" or "OR" logic.
If any of the criteria do not match, the check is not muted.
???+ note
Remember that mutelist can be used with regular expressions.
## Mutelist Specification
???+ note
- For Azure provider, the Account ID is the Subscription Name and the Region is the Location.
- For GCP provider, the Account ID is the Project ID and the Region is the Zone.
- For Kubernetes provider, the Account ID is the Cluster Name and the Region is the Namespace.
The Mutelist file uses the [YAML](https://en.wikipedia.org/wiki/YAML) format with the following syntax:
```yaml
### Account, Check and/or Region can be * to apply for all the cases.
### Resources and tags are lists that can have either Regex or Keywords.
### Tags is an optional list that matches on tuples of 'key=value' and are "ANDed" together.
### Use an alternation Regex to match one of multiple tags with "ORed" logic.
### For each check you can except Accounts, Regions, Resources and/or Tags.
########################### MUTELIST EXAMPLE ###########################
Mutelist:
Accounts:
"123456789012":
Checks:
"iam_user_hardware_mfa_enabled":
Regions:
- "us-east-1"
Resources:
- "user-1" # Will mute user-1 in check iam_user_hardware_mfa_enabled
- "user-2" # Will mute user-2 in check iam_user_hardware_mfa_enabled
"ec2_*":
Regions:
- "*"
Resources:
- "*" # Will mute every EC2 check in every account and region
"*":
Regions:
- "*"
Resources:
- "test"
Tags:
- "test=test" # Will mute every resource containing the string "test" and the tags 'test=test' and
- "project=test|project=stage" # either of ('project=test' OR project=stage) in account 123456789012 and every region
"*":
Regions:
- "*"
Resources:
- "test"
Tags:
- "test=test"
- "project=test" # This will mute every resource containing the string "test" and BOTH tags at the same time.
"*":
Regions:
- "*"
Resources:
- "test"
Tags: # This will mute every resource containing the string "test" and the ones that contain EITHER the `test=test` OR `project=test` OR `project=dev`
- "test=test|project=(test|dev)"
"*":
Regions:
- "*"
Resources:
- "test"
Tags:
- "test=test" # This will mute every resource containing the string "test" and the tags `test=test` and either `project=test` OR `project=stage` in every account and region.
- "project=test|project=stage"
"*":
Checks:
"s3_bucket_object_versioning":
Regions:
- "eu-west-1"
- "us-east-1"
Resources:
- "ci-logs" # Will mute bucket "ci-logs" AND ALSO bucket "ci-logs-replica" in specified check and regions
- "logs" # Will mute EVERY BUCKET containing the string "logs" in specified check and regions
- ".+-logs" # Will mute all buckets containing the terms ci-logs, qa-logs, etc. in specified check and regions
"ecs_task_definitions_no_environment_secrets":
Regions:
- "*"
Resources:
- "*"
Exceptions:
Accounts:
- "0123456789012"
Regions:
- "eu-west-1"
- "eu-south-2" # Will mute every resource in check ecs_task_definitions_no_environment_secrets except the ones in account 0123456789012 located in eu-south-2 or eu-west-1
"*":
Regions:
- "*"
Resources:
- "*"
Tags:
- "environment=dev" # Will mute every resource containing the tag 'environment=dev' in every account and region
"123456789012":
Checks:
"*":
Regions:
- "*"
Resources:
- "*"
Exceptions:
Resources:
- "test"
Tags:
- "environment=prod" # Will mute every resource except in account 123456789012 except the ones containing the string "test" and tag environment=prod
"*":
Checks:
"ec2_*":
Regions:
- "*"
Resources:
- "test-resource" # Will mute the resource "test-resource" in all accounts and regions for whatever check from the EC2 service
```
### Account, Check, Region, Resource, and Tag
| Field | Description | Logic |
|----------|----------|----------|
| `account_id` | Use `*` to apply the mutelist to all accounts. | `ANDed` |
| `check_name` | The name of the Prowler check. Use `*` to apply the mutelist to all checks, or `service_*` to apply it to all service's checks. | `ANDed` |
| `region` | The region identifier. Use `*` to apply the mutelist to all regions. | `ANDed` |
| `resource` | The resource identifier. Use `*` to apply the mutelist to all resources. | `ANDed` |
| `tag` | The tag value. | `ORed` |
## How to Use the Mutelist
To use the Mutelist, you need to specify the path to the Mutelist YAML file using the `-w` or `--mutelist-file` option when running Prowler:
```
prowler <provider> -w mutelist.yaml
```
## Mutelist YAML File Syntax
Replace `<provider>` with the appropriate provider name.
???+ note
For Azure provider, the Account ID is the Subscription Name and the Region is the Location.
## Considerations
???+ note
For GCP provider, the Account ID is the Project ID and the Region is the Zone.
- The Mutelist can be used in combination with other Prowler options, such as the `--service` or `--checks` option, to further customize the scanning process.
- Make sure to review and update the Mutelist regularly to ensure it reflects the desired exclusions and remains up to date with your infrastructure.
???+ note
For Kubernetes provider, the Account ID is the Cluster Name and the Region is the Namespace.
The Mutelist file is a YAML file with the following syntax:
```yaml
### Account, Check and/or Region can be * to apply for all the cases.
### Resources and tags are lists that can have either Regex or Keywords.
### Tags is an optional list that matches on tuples of 'key=value' and are "ANDed" together.
### Use an alternation Regex to match one of multiple tags with "ORed" logic.
### For each check you can except Accounts, Regions, Resources and/or Tags.
########################### MUTELIST EXAMPLE ###########################
Mutelist:
Accounts:
"123456789012":
Checks:
"iam_user_hardware_mfa_enabled":
Regions:
- "us-east-1"
Resources:
- "user-1" # Will ignore user-1 in check iam_user_hardware_mfa_enabled
- "user-2" # Will ignore user-2 in check iam_user_hardware_mfa_enabled
"ec2_*":
Regions:
- "*"
Resources:
- "*" # Will ignore every EC2 check in every account and region
"*":
Regions:
- "*"
Resources:
- "test"
Tags:
- "test=test" # Will ignore every resource containing the string "test" and the tags 'test=test' and
- "project=test|project=stage" # either of ('project=test' OR project=stage) in account 123456789012 and every region
"*":
Checks:
"s3_bucket_object_versioning":
Regions:
- "eu-west-1"
- "us-east-1"
Resources:
- "ci-logs" # Will ignore bucket "ci-logs" AND ALSO bucket "ci-logs-replica" in specified check and regions
- "logs" # Will ignore EVERY BUCKET containing the string "logs" in specified check and regions
- ".+-logs" # Will ignore all buckets containing the terms ci-logs, qa-logs, etc. in specified check and regions
"ecs_task_definitions_no_environment_secrets":
Regions:
- "*"
Resources:
- "*"
Exceptions:
Accounts:
- "0123456789012"
Regions:
- "eu-west-1"
- "eu-south-2" # Will ignore every resource in check ecs_task_definitions_no_environment_secrets except the ones in account 0123456789012 located in eu-south-2 or eu-west-1
"*":
Regions:
- "*"
Resources:
- "*"
Tags:
- "environment=dev" # Will ignore every resource containing the tag 'environment=dev' in every account and region
"123456789012":
Checks:
"*":
Regions:
- "*"
Resources:
- "*"
Exceptions:
Resources:
- "test"
Tags:
- "environment=prod" # Will ignore every resource except in account 123456789012 except the ones containing the string "test" and tag environment=prod
```
## AWS Mutelist
### Mute specific AWS regions

View File

@@ -142,7 +142,8 @@ The JSON-OCSF output format implements the [Detection Finding](https://schema.oc
"desc": "Ensure CloudTrail is enabled in all regions",
"product_uid": "prowler",
"title": "Ensure CloudTrail is enabled in all regions",
"uid": "prowler-aws-cloudtrail_multi_region_enabled-123456789012-ap-northeast-1-123456789012"
"uid": "prowler-aws-cloudtrail_multi_region_enabled-123456789012-ap-northeast-1-123456789012",
"types": ["Software and Configuration Checks","Industry and Regulatory Standards","CIS AWS Foundations Benchmark"],
},
"resources": [
{
@@ -189,11 +190,10 @@ The JSON-OCSF output format implements the [Detection Finding](https://schema.oc
"type_uid": 200401,
"type_name": "Create",
"unmapped": {
"check_type": "Software and Configuration Checks,Industry and Regulatory Standards,CIS AWS Foundations Benchmark",
"related_url": "",
"categories": "forensics-ready",
"depends_on": "",
"related_to": "",
"categories": ["forensics-ready"],
"depends_on": [],
"related_to": [],
"notes": "",
"compliance": {
"CISA": [
@@ -336,7 +336,7 @@ The following is the mapping between the native JSON and the Detection Finding f
| Provider | cloud.provider |
| CheckID | metadata.event_code |
| CheckTitle | finding_info.title |
| CheckType | unmapped.check_type |
| CheckType | finding_info.types |
| ServiceName | resources.group.name |
| SubServiceName | _Not mapped yet_ |
| Status | status_code |

View File

@@ -36,10 +36,11 @@ If EBS default encyption is not enabled, sensitive information at rest is not pr
- `ec2_ebs_default_encryption`
If your Security groups are not properly configured the attack surface is increased, nonetheless, Prowler will detect those security groups that are being used (they are attached) to only notify those that are being used. This logic applies to the 15 checks related to open ports in security groups and the check for the default security group.
If your Security groups are not properly configured the attack surface is increased, nonetheless, Prowler will detect those security groups that are being used (they are attached) to only notify those that are being used. This logic applies to the 15 checks related to open ports in security groups, the check for the default security group and for the security groups that allow ingress and egress traffic.
- `ec2_securitygroup_allow_ingress_from_internet_to_port_X` (15 checks)
- `ec2_securitygroup_default_restrict_traffic`
- `ec2_securitygroup_allow_wide_open_public_ipv4`
Prowler will also check for used Network ACLs to only alerts those with open ports that are being used.

View File

@@ -87,6 +87,7 @@ nav:
- Google Cloud:
- Authentication: tutorials/gcp/authentication.md
- Projects: tutorials/gcp/projects.md
- Organization: tutorials/gcp/organization.md
- Kubernetes:
- In-Cluster Execution: tutorials/kubernetes/in-cluster.md
- Non In-Cluster Execution: tutorials/kubernetes/outside-cluster.md

View File

@@ -59,9 +59,12 @@ Resources:
- 'appstream:Describe*'
- 'appstream:List*'
- 'backup:List*'
- 'bedrock:List*'
- 'bedrock:Get*'
- 'cloudtrail:GetInsightSelectors'
- 'codeartifact:List*'
- 'codebuild:BatchGet*'
- 'codebuild:ListReportGroups'
- 'cognito-idp:GetUserPoolMfaConfig'
- 'dlm:Get*'
- 'drs:Describe*'
@@ -82,6 +85,7 @@ Resources:
- 'logs:FilterLogEvents'
- 'lightsail:GetRelationalDatabases'
- 'macie2:GetMacieSession'
- 'macie2:GetAutomatedDiscoveryConfiguration'
- 's3:GetAccountPublicAccessBlock'
- 'shield:DescribeProtection'
- 'shield:GetSubscriptionState'

View File

@@ -7,9 +7,12 @@
"appstream:Describe*",
"appstream:List*",
"backup:List*",
"bedrock:List*",
"bedrock:Get*",
"cloudtrail:GetInsightSelectors",
"codeartifact:List*",
"codebuild:BatchGet*",
"codebuild:ListReportGroups",
"cognito-idp:GetUserPoolMfaConfig",
"dlm:Get*",
"drs:Describe*",
@@ -30,6 +33,7 @@
"logs:FilterLogEvents",
"lightsail:GetRelationalDatabases",
"macie2:GetMacieSession",
"macie2:GetAutomatedDiscoveryConfiguration",
"s3:GetAccountPublicAccessBlock",
"shield:DescribeProtection",
"shield:GetSubscriptionState",

4273
poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -5,6 +5,7 @@ import sys
from os import environ
from colorama import Fore, Style
from colorama import init as colorama_init
from prowler.config.config import (
csv_file_suffix,
@@ -15,8 +16,6 @@ from prowler.config.config import (
)
from prowler.lib.banner import print_banner
from prowler.lib.check.check import (
bulk_load_checks_metadata,
bulk_load_compliance_frameworks,
exclude_checks_to_run,
exclude_services_to_run,
execute_checks,
@@ -36,10 +35,12 @@ from prowler.lib.check.check import (
)
from prowler.lib.check.checks_loader import load_checks_to_execute
from prowler.lib.check.compliance import update_checks_metadata_with_compliance
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.check.custom_checks_metadata import (
parse_custom_checks_metadata_file,
update_checks_metadata,
)
from prowler.lib.check.models import CheckMetadata
from prowler.lib.cli.parser import ProwlerArgumentParser
from prowler.lib.logger import logger, set_logging_config
from prowler.lib.outputs.asff.asff import ASFF
@@ -54,6 +55,7 @@ from prowler.lib.outputs.compliance.compliance import display_compliance_table
from prowler.lib.outputs.compliance.ens.ens_aws import AWSENS
from prowler.lib.outputs.compliance.generic.generic import GenericCompliance
from prowler.lib.outputs.compliance.iso27001.iso27001_aws import AWSISO27001
from prowler.lib.outputs.compliance.kisa_ismsp.kisa_ismsp_aws import AWSKISAISMSP
from prowler.lib.outputs.compliance.mitre_attack.mitre_attack_aws import AWSMitreAttack
from prowler.lib.outputs.compliance.mitre_attack.mitre_attack_azure import (
AzureMitreAttack,
@@ -68,8 +70,12 @@ from prowler.lib.outputs.slack.slack import Slack
from prowler.lib.outputs.summary_table import display_summary_table
from prowler.providers.aws.lib.s3.s3 import S3
from prowler.providers.aws.lib.security_hub.security_hub import SecurityHub
from prowler.providers.aws.models import AWSOutputOptions
from prowler.providers.azure.models import AzureOutputOptions
from prowler.providers.common.provider import Provider
from prowler.providers.common.quick_inventory import run_provider_quick_inventory
from prowler.providers.gcp.models import GCPOutputOptions
from prowler.providers.kubernetes.models import KubernetesOutputOptions
def prowler():
@@ -107,6 +113,9 @@ def prowler():
and not checks_folder
)
if args.no_color:
colorama_init(strip=True)
if not args.no_banner:
legend = args.verbose or getattr(args, "fixer", None)
print_banner(legend)
@@ -131,7 +140,7 @@ def prowler():
# Load checks metadata
logger.debug("Loading checks metadata from .metadata.json files")
bulk_checks_metadata = bulk_load_checks_metadata(provider)
bulk_checks_metadata = CheckMetadata.get_bulk(provider)
if args.list_categories:
print_categories(list_categories(bulk_checks_metadata))
@@ -141,7 +150,7 @@ def prowler():
# Load compliance frameworks
logger.debug("Loading compliance frameworks from .json files")
bulk_compliance_frameworks = bulk_load_compliance_frameworks(provider)
bulk_compliance_frameworks = Compliance.get_bulk(provider)
# Complete checks metadata with the compliance framework specification
bulk_checks_metadata = update_checks_metadata_with_compliance(
bulk_compliance_frameworks, bulk_checks_metadata
@@ -168,15 +177,15 @@ def prowler():
# Load checks to execute
checks_to_execute = load_checks_to_execute(
bulk_checks_metadata,
bulk_compliance_frameworks,
checks_file,
checks,
services,
severities,
compliance_framework,
categories,
provider,
bulk_checks_metadata=bulk_checks_metadata,
bulk_compliance_frameworks=bulk_compliance_frameworks,
checks_file=checks_file,
check_list=checks,
service_list=services,
severities=severities,
compliance_frameworks=compliance_framework,
categories=categories,
provider=provider,
)
# if --list-checks-json, dump a json file and exit
@@ -190,7 +199,7 @@ def prowler():
sys.exit()
# Provider to scan
Provider.set_global_provider(args)
Provider.init_global_provider(args)
global_provider = Provider.get_global_provider()
# Print Provider Credentials
@@ -224,17 +233,30 @@ def prowler():
# Once the provider is set and we have the eventual checks based on the resource identifier,
# it is time to check what Prowler's checks are going to be executed
checks_from_resources = global_provider.get_checks_to_execute_by_audit_resources()
if checks_from_resources:
# Intersect checks from resources with checks to execute so we only run the checks that apply to the resources with the specified ARNs or tags
if getattr(args, "resource_arn", None) or getattr(args, "resource_tag", None):
checks_to_execute = checks_to_execute.intersection(checks_from_resources)
# Sort final check list
checks_to_execute = sorted(checks_to_execute)
# Setup Mutelist
global_provider.mutelist = args.mutelist_file
# Setup Output Options
global_provider.output_options = (args, bulk_checks_metadata)
if provider == "aws":
output_options = AWSOutputOptions(
args, bulk_checks_metadata, global_provider.identity
)
elif provider == "azure":
output_options = AzureOutputOptions(
args, bulk_checks_metadata, global_provider.identity
)
elif provider == "gcp":
output_options = GCPOutputOptions(
args, bulk_checks_metadata, global_provider.identity
)
elif provider == "kubernetes":
output_options = KubernetesOutputOptions(
args, bulk_checks_metadata, global_provider.identity
)
# Run the quick inventory for the provider if available
if hasattr(args, "quick_inventory") and args.quick_inventory:
@@ -250,6 +272,7 @@ def prowler():
global_provider,
custom_checks_metadata,
args.config_file,
output_options,
)
else:
logger.error(
@@ -257,7 +280,7 @@ def prowler():
)
# Prowler Fixer
if global_provider.output_options.fixer:
if output_options.fixer:
print(f"{Style.BRIGHT}\nRunning Prowler Fixer, please wait...{Style.RESET_ALL}")
# Check if there are any FAIL findings
if any("FAIL" in finding.status for finding in findings):
@@ -303,7 +326,8 @@ def prowler():
# TODO: this part is needed since the checks generates a Check_Report_XXX and the output uses Finding
# This will be refactored for the outputs generate directly the Finding
finding_outputs = [
Finding.generate_output(global_provider, finding) for finding in findings
Finding.generate_output(global_provider, finding, output_options)
for finding in findings
]
generated_outputs = {"regular": [], "compliance": []}
@@ -311,8 +335,8 @@ def prowler():
if args.output_formats:
for mode in args.output_formats:
filename = (
f"{global_provider.output_options.output_directory}/"
f"{global_provider.output_options.output_filename}"
f"{output_options.output_directory}/"
f"{output_options.output_filename}"
)
if mode == "csv":
csv_output = CSV(
@@ -354,16 +378,16 @@ def prowler():
)
# Compliance Frameworks
input_compliance_frameworks = set(
global_provider.output_options.output_modes
).intersection(get_available_compliance_frameworks(provider))
input_compliance_frameworks = set(output_options.output_modes).intersection(
get_available_compliance_frameworks(provider)
)
if provider == "aws":
for compliance_name in input_compliance_frameworks:
if compliance_name.startswith("cis_"):
# Generate CIS Finding Object
filename = (
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
f"{output_options.output_directory}/compliance/"
f"{output_options.output_filename}_{compliance_name}.csv"
)
cis = AWSCIS(
findings=finding_outputs,
@@ -376,8 +400,8 @@ def prowler():
elif compliance_name == "mitre_attack_aws":
# Generate MITRE ATT&CK Finding Object
filename = (
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
f"{output_options.output_directory}/compliance/"
f"{output_options.output_filename}_{compliance_name}.csv"
)
mitre_attack = AWSMitreAttack(
findings=finding_outputs,
@@ -390,8 +414,8 @@ def prowler():
elif compliance_name.startswith("ens_"):
# Generate ENS Finding Object
filename = (
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
f"{output_options.output_directory}/compliance/"
f"{output_options.output_filename}_{compliance_name}.csv"
)
ens = AWSENS(
findings=finding_outputs,
@@ -404,8 +428,8 @@ def prowler():
elif compliance_name.startswith("aws_well_architected_framework"):
# Generate AWS Well-Architected Finding Object
filename = (
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
f"{output_options.output_directory}/compliance/"
f"{output_options.output_filename}_{compliance_name}.csv"
)
aws_well_architected = AWSWellArchitected(
findings=finding_outputs,
@@ -418,8 +442,8 @@ def prowler():
elif compliance_name.startswith("iso27001_"):
# Generate ISO27001 Finding Object
filename = (
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
f"{output_options.output_directory}/compliance/"
f"{output_options.output_filename}_{compliance_name}.csv"
)
iso27001 = AWSISO27001(
findings=finding_outputs,
@@ -429,10 +453,24 @@ def prowler():
)
generated_outputs["compliance"].append(iso27001)
iso27001.batch_write_data_to_file()
elif compliance_name.startswith("kisa"):
# Generate KISA-ISMS-P Finding Object
filename = (
f"{output_options.output_directory}/compliance/"
f"{output_options.output_filename}_{compliance_name}.csv"
)
kisa_ismsp = AWSKISAISMSP(
findings=finding_outputs,
compliance=bulk_compliance_frameworks[compliance_name],
create_file_descriptor=True,
file_path=filename,
)
generated_outputs["compliance"].append(kisa_ismsp)
kisa_ismsp.batch_write_data_to_file()
else:
filename = (
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
f"{output_options.output_directory}/compliance/"
f"{output_options.output_filename}_{compliance_name}.csv"
)
generic_compliance = GenericCompliance(
findings=finding_outputs,
@@ -448,8 +486,8 @@ def prowler():
if compliance_name.startswith("cis_"):
# Generate CIS Finding Object
filename = (
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
f"{output_options.output_directory}/compliance/"
f"{output_options.output_filename}_{compliance_name}.csv"
)
cis = AzureCIS(
findings=finding_outputs,
@@ -462,8 +500,8 @@ def prowler():
elif compliance_name == "mitre_attack_azure":
# Generate MITRE ATT&CK Finding Object
filename = (
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
f"{output_options.output_directory}/compliance/"
f"{output_options.output_filename}_{compliance_name}.csv"
)
mitre_attack = AzureMitreAttack(
findings=finding_outputs,
@@ -475,8 +513,8 @@ def prowler():
mitre_attack.batch_write_data_to_file()
else:
filename = (
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
f"{output_options.output_directory}/compliance/"
f"{output_options.output_filename}_{compliance_name}.csv"
)
generic_compliance = GenericCompliance(
findings=finding_outputs,
@@ -492,8 +530,8 @@ def prowler():
if compliance_name.startswith("cis_"):
# Generate CIS Finding Object
filename = (
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
f"{output_options.output_directory}/compliance/"
f"{output_options.output_filename}_{compliance_name}.csv"
)
cis = GCPCIS(
findings=finding_outputs,
@@ -506,8 +544,8 @@ def prowler():
elif compliance_name == "mitre_attack_gcp":
# Generate MITRE ATT&CK Finding Object
filename = (
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
f"{output_options.output_directory}/compliance/"
f"{output_options.output_filename}_{compliance_name}.csv"
)
mitre_attack = GCPMitreAttack(
findings=finding_outputs,
@@ -519,8 +557,8 @@ def prowler():
mitre_attack.batch_write_data_to_file()
else:
filename = (
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
f"{output_options.output_directory}/compliance/"
f"{output_options.output_filename}_{compliance_name}.csv"
)
generic_compliance = GenericCompliance(
findings=finding_outputs,
@@ -536,8 +574,8 @@ def prowler():
if compliance_name.startswith("cis_"):
# Generate CIS Finding Object
filename = (
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
f"{output_options.output_directory}/compliance/"
f"{output_options.output_filename}_{compliance_name}.csv"
)
cis = KubernetesCIS(
findings=finding_outputs,
@@ -549,8 +587,8 @@ def prowler():
cis.batch_write_data_to_file()
else:
filename = (
f"{global_provider.output_options.output_directory}/compliance/"
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
f"{output_options.output_directory}/compliance/"
f"{output_options.output_filename}_{compliance_name}.csv"
)
generic_compliance = GenericCompliance(
findings=finding_outputs,
@@ -583,7 +621,11 @@ def prowler():
)
security_hub_regions = (
global_provider.get_available_aws_service_regions("securityhub")
global_provider.get_available_aws_service_regions(
"securityhub",
global_provider.identity.partition,
global_provider.identity.audited_regions,
)
if not global_provider.identity.audited_regions
else global_provider.identity.audited_regions
)
@@ -593,7 +635,7 @@ def prowler():
aws_partition=global_provider.identity.partition,
aws_session=global_provider.session.current_session,
findings=asff_output.data,
send_only_fails=global_provider.output_options.send_sh_only_fails,
send_only_fails=output_options.send_sh_only_fails,
aws_security_hub_available_regions=security_hub_regions,
)
# Send the findings to Security Hub
@@ -619,7 +661,7 @@ def prowler():
display_summary_table(
findings,
global_provider,
global_provider.output_options,
output_options,
)
# Only display compliance table if there are findings (not all MANUAL) and it is a default execution
if (
@@ -638,13 +680,13 @@ def prowler():
findings,
bulk_checks_metadata,
compliance,
global_provider.output_options.output_filename,
global_provider.output_options.output_directory,
output_options.output_filename,
output_options.output_directory,
compliance_overview,
)
if compliance_overview:
print(
f"\nDetailed compliance results are in {Fore.YELLOW}{global_provider.output_options.output_directory}/compliance/{Style.RESET_ALL}\n"
f"\nDetailed compliance results are in {Fore.YELLOW}{output_options.output_directory}/compliance/{Style.RESET_ALL}\n"
)
# If custom checks were passed, remove the modules

View File

@@ -557,7 +557,7 @@
}
],
"Checks": [
"inspector2_findings_exist"
"inspector2_is_enabled"
]
},
{
@@ -587,7 +587,8 @@
}
],
"Checks": [
"inspector2_findings_exist",
"inspector2_active_findings_exist",
"inspector2_is_enabled",
"ecr_registry_scan_images_on_push_enabled",
"ecr_repositories_scan_vulnerabilities_in_latest_image",
"ecr_repositories_scan_images_on_push_enabled"

View File

@@ -485,7 +485,7 @@
"codeartifact_packages_external_public_publishing_disabled",
"ecr_repositories_not_publicly_accessible",
"efs_not_publicly_accessible",
"eks_endpoints_not_publicly_accessible",
"eks_cluster_not_publicly_accessible",
"elb_internet_facing",
"elbv2_internet_facing",
"s3_account_level_public_access_blocks",
@@ -664,7 +664,7 @@
"awslambda_function_not_publicly_accessible",
"apigateway_restapi_waf_acl_attached",
"cloudfront_distributions_using_waf",
"eks_control_plane_endpoint_access_restricted",
"eks_cluster_not_publicly_accessible",
"sagemaker_models_network_isolation_enabled",
"sagemaker_models_vpc_settings_configured",
"sagemaker_notebook_instance_vpc_settings_configured",

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -19,7 +19,7 @@
"ec2_ebs_public_snapshot",
"ec2_instance_profile_attached",
"ec2_instance_public_ip",
"eks_endpoints_not_publicly_accessible",
"eks_cluster_not_publicly_accessible",
"emr_cluster_master_nodes_no_public_ip",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
@@ -61,7 +61,7 @@
"ec2_ebs_public_snapshot",
"ec2_instance_profile_attached",
"ec2_instance_public_ip",
"eks_endpoints_not_publicly_accessible",
"eks_cluster_not_publicly_accessible",
"emr_cluster_master_nodes_no_public_ip",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
@@ -102,7 +102,7 @@
"Checks": [
"ec2_ebs_public_snapshot",
"ec2_instance_public_ip",
"eks_endpoints_not_publicly_accessible",
"eks_cluster_not_publicly_accessible",
"emr_cluster_master_nodes_no_public_ip",
"awslambda_function_not_publicly_accessible",
"awslambda_function_url_public",

View File

@@ -971,7 +971,7 @@
"Checks": [
"ec2_ebs_public_snapshot",
"ec2_instance_public_ip",
"eks_endpoints_not_publicly_accessible",
"eks_cluster_not_publicly_accessible",
"emr_cluster_master_nodes_no_public_ip",
"awslambda_function_url_public",
"rds_instance_no_public_access",

View File

@@ -3043,9 +3043,7 @@
{
"Id": "9.4",
"Description": "Ensure that Register with Entra ID is enabled on App Service",
"Checks": [
"app_client_certificates_on"
],
"Checks": [],
"Attributes": [
{
"Section": "9. AppService",
@@ -3066,7 +3064,7 @@
"Id": "9.5",
"Description": "Ensure That 'PHP version' is the Latest, If Used to Run the Web App",
"Checks": [
"app_register_with_identity"
"app_ensure_php_version_is_latest"
],
"Attributes": [
{
@@ -3088,7 +3086,7 @@
"Id": "9.6",
"Description": "Ensure that 'Python version' is the Latest Stable Version, if Used to Run the Web App",
"Checks": [
"app_ensure_php_version_is_latest"
"app_ensure_python_version_is_latest"
],
"Attributes": [
{
@@ -3110,7 +3108,7 @@
"Id": "9.7",
"Description": "Ensure that 'Java version' is the latest, if used to run the Web App",
"Checks": [
"app_ensure_python_version_is_latest"
"app_ensure_java_version_is_latest"
],
"Attributes": [
{
@@ -3132,7 +3130,7 @@
"Id": "9.8",
"Description": "Ensure that 'HTTP Version' is the Latest, if Used to Run the Web App",
"Checks": [
"app_ensure_java_version_is_latest"
"app_ensure_using_http20"
],
"Attributes": [
{
@@ -3154,7 +3152,7 @@
"Id": "9.9",
"Description": "Ensure FTP deployments are Disabled",
"Checks": [
"app_ensure_using_http20"
"app_ftp_deployment_disabled"
],
"Attributes": [
{
@@ -3175,9 +3173,7 @@
{
"Id": "9.10",
"Description": "Ensure Azure Key Vaults are Used to Store Secrets",
"Checks": [
"app_ftp_deployment_disabled"
],
"Checks": [],
"Attributes": [
{
"Section": "9. AppService",
@@ -3213,66 +3209,6 @@
"References": "https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-group-lock-resources:https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-subscription-governance#azure-resource-locks:https://docs.microsoft.com/en-us/azure/governance/blueprints/concepts/resource-locking:https://learn.microsoft.com/en-us/security/benchmark/azure/mcsb-asset-management#am-4-limit-access-to-asset-management"
}
]
},
{
"Id": "9.10",
"Description": "Ensure FTP deployments are Disabled",
"Checks": [],
"Attributes": [
{
"Section": "9. AppService",
"Profile": "Level 1",
"AssessmentStatus": "Automated",
"Description": "By default, Azure Functions, Web, and API Services can be deployed over FTP. If FTP is required for an essential deployment workflow, FTPS should be required for FTP login for all App Service Apps and Functions.",
"RationaleStatement": "Azure FTP deployment endpoints are public. An attacker listening to traffic on a wifi network used by a remote employee or a corporate network could see login traffic in clear-text which would then grant them full control of the code base of the app or service. This finding is more severe if User Credentials for deployment are set at the subscription level rather than using the default Application Credentials which are unique per App.",
"ImpactStatement": "Any deployment workflows that rely on FTP or FTPs rather than the WebDeploy or HTTPs endpoints may be affected.",
"RemediationProcedure": "**From Azure Portal** 1. Go to the Azure Portal 2. Select `App Services` 3. Click on an app 4. Select `Settings` and then `Configuration` 5. Under `General Settings`, for the `Platform Settings`, the `FTP state` should be set to `Disabled` or `FTPS Only` **From Azure CLI** For each out of compliance application, run the following choosing either 'disabled' or 'FtpsOnly' as appropriate: ``` az webapp config set --resource-group <resource group name> --name <app name> --ftps-state [disabled|FtpsOnly] ``` **From PowerShell** For each out of compliance application, run the following: ``` Set-AzWebApp -ResourceGroupName <resource group name> -Name <app name> -FtpsState <Disabled or FtpsOnly> ```",
"AuditProcedure": "**From Azure Portal** 1. Go to the Azure Portal 2. Select `App Services` 3. Click on an app 4. Select `Settings` and then `Configuration` 5. Under `General Settings`, for the `Platform Settings`, the `FTP state` should not be set to `All allowed` **From Azure CLI** List webapps to obtain the ids. ``` az webapp list ``` List the publish profiles to obtain the username, password and ftp server url. ``` az webapp deployment list-publishing-profiles --ids <ids> { publishUrl: <URL_FOR_WEB_APP>, userName: <USER_NAME>, userPWD: <USER_PASSWORD>, } ``` **From PowerShell** List all Web Apps: ``` Get-AzWebApp ``` For each app: ``` Get-AzWebApp -ResourceGroupName <resource group name> -Name <app name> | Select-Object -ExpandProperty SiteConfig ``` In the output, look for the value of **FtpsState**. If its value is **AllAllowed** the setting is out of compliance. Any other value is considered in compliance with this check.",
"AdditionalInformation": "",
"DefaultValue": "[Azure Web Service Deploy via FTP](https://docs.microsoft.com/en-us/azure/app-service/deploy-ftp):[Azure Web Service Deployment](https://docs.microsoft.com/en-us/azure/app-service/overview-security):https://docs.microsoft.com/en-us/security/benchmark/azure/security-controls-v3-data-protection#dp-4-encrypt-sensitive-information-in-transit:https://docs.microsoft.com/en-us/security/benchmark/azure/security-controls-v3-posture-vulnerability-management#pv-7-rapidly-and-automatically-remediate-software-vulnerabilities",
"References": "TA0008, T1570, M1031"
}
]
},
{
"Id": "9.11",
"Description": "Ensure Azure Key Vaults are Used to Store Secrets",
"Checks": [],
"Attributes": [
{
"Section": "9. AppService",
"Profile": "Level 2",
"AssessmentStatus": "Manual",
"Description": "Azure Key Vault will store multiple types of sensitive information such as encryption keys, certificate thumbprints, and Managed Identity Credentials. Access to these 'Secrets' can be controlled through granular permissions.",
"RationaleStatement": "The credentials given to an application have permissions to create, delete, or modify data stored within the systems they access. If these credentials are stored within the application itself, anyone with access to the application or a copy of the code has access to them. Storing within Azure Key Vault as secrets increases security by controlling access. This also allows for updates of the credentials without redeploying the entire application.",
"ImpactStatement": "Integrating references to secrets within the key vault are required to be specifically integrated within the application code. This will require additional configuration to be made during the writing of an application, or refactoring of an already written one. There are also additional costs that are charged per 10000 requests to the Key Vault.",
"RemediationProcedure": "Remediation has 2 steps 1. Setup the Key Vault 2. Setup the App Service to use the Key Vault **Step 1: Set up the Key Vault** **From Azure CLI** ``` az keyvault create --name <name> --resource-group <myResourceGroup> --location myLocation ``` **From Powershell** ``` New-AzKeyvault -name <name> -ResourceGroupName <myResourceGroup> -Location <myLocation> ``` **Step 2: Set up the App Service to use the Key Vault** Sample JSON Template for App Service Configuration: ``` { //... resources: [ { type: Microsoft.Storage/storageAccounts, name: [variables('storageAccountName')], //... }, { type: Microsoft.Insights/components, name: [variables('appInsightsName')], //... }, { type: Microsoft.Web/sites, name: [variables('functionAppName')], identity: { type: SystemAssigned }, //... resources: [ { type: config, name: appsettings, //... dependsOn: [ [resourceId('Microsoft.Web/sites', variables('functionAppName'))], [resourceId('Microsoft.KeyVault/vaults/', variables('keyVaultName'))], [resourceId('Microsoft.KeyVault/vaults/secrets', variables('keyVaultName'), variables('storageConnectionStringName'))], [resourceId('Microsoft.KeyVault/vaults/secrets', variables('keyVaultName'), variables('appInsightsKeyName'))] ], properties: { AzureWebJobsStorage: [concat('@Microsoft.KeyVault(SecretUri=', reference(variables('storageConnectionStringResourceId')).secretUriWithVersion, ')')], WEBSITE_CONTENTAZUREFILECONNECTIONSTRING: [concat('@Microsoft.KeyVault(SecretUri=', reference(variables('storageConnectionStringResourceId')).secretUriWithVersion, ')')], APPINSIGHTS_INSTRUMENTATIONKEY: [concat('@Microsoft.KeyVault(SecretUri=', reference(variables('appInsightsKeyResourceId')).secretUriWithVersion, ')')], WEBSITE_ENABLE_SYNC_UPDATE_SITE: true //... } }, { type: sourcecontrols, name: web, //... dependsOn: [ [resourceId('Microsoft.Web/sites', variables('functionAppName'))], [resourceId('Microsoft.Web/sites/config', variables('functionAppName'), 'appsettings')] ], } ] }, { type: Microsoft.KeyVault/vaults, name: [variables('keyVaultName')], //... dependsOn: [ [resourceId('Microsoft.Web/sites', variables('functionAppName'))] ], properties: { //... accessPolicies: [ { tenantId: [reference(concat('Microsoft.Web/sites/', variables('functionAppName'), '/providers/Microsoft.ManagedIdentity/Identities/default'), '2015-08-31-PREVIEW').tenantId], objectId: [reference(concat('Microsoft.Web/sites/', variables('functionAppName'), '/providers/Microsoft.ManagedIdentity/Identities/default'), '2015-08-31-PREVIEW').principalId], permissions: { secrets: [ get ] } } ] }, resources: [ { type: secrets, name: [variables('storageConnectionStringName')], //... dependsOn: [ [resourceId('Microsoft.KeyVault/vaults/', variables('keyVaultName'))], [resourceId('Microsoft.Storage/storageAccounts', variables('storageAccountName'))] ], properties: { value: [concat('DefaultEndpointsProtocol=https;AccountName=', variables('storageAccountName'), ';AccountKey=', listKeys(variables('storageAccountResourceId'),'2015-05-01-preview').key1)] } }, { type: secrets, name: [variables('appInsightsKeyName')], //... dependsOn: [ [resourceId('Microsoft.KeyVault/vaults/', variables('keyVaultName'))], [resourceId('Microsoft.Insights/components', variables('appInsightsName'))] ], properties: { value: [reference(resourceId('microsoft.insights/components/', variables('appInsightsName')), '2015-05-01').InstrumentationKey] } } ] } ] } ```",
"AuditProcedure": "**From Azure Portal** 1. Login to Azure Portal 2. In the expandable menu on the left go to `Key Vaults` 3. View the Key Vaults listed. **From Azure CLI** To list key vaults within a subscription run the following command: ``` Get-AzKeyVault ``` To list the secrets within these key vaults run the following command: ``` Get-AzKeyVaultSecret [-VaultName] <vault name> ``` **From Powershell** To list key vaults within a subscription run the following command: ``` Get-AzKeyVault ``` To list all secrets in a key vault run the following command: ``` Get-AzKeyVaultSecret -VaultName '<vaultName' ```",
"AdditionalInformation": "",
"DefaultValue": "https://docs.microsoft.com/en-us/azure/app-service/app-service-key-vault-references:https://docs.microsoft.com/en-us/security/benchmark/azure/security-controls-v3-identity-management#im-2-manage-application-identities-securely-and-automatically:https://docs.microsoft.com/en-us/cli/azure/keyvault?view=azure-cli-latest:https://docs.microsoft.com/en-us/cli/azure/keyvault?view=azure-cli-latest",
"References": "TA0006, T1552, M1041"
}
]
},
{
"Id": "10.1",
"Description": "Ensure that Resource Locks are set for Mission-Critical Azure Resources",
"Checks": [],
"Attributes": [
{
"Section": "10. Miscellaneous",
"Profile": "Level 2",
"AssessmentStatus": "Manual",
"Description": "Resource Manager Locks provide a way for administrators to lock down Azure resources to prevent deletion of, or modifications to, a resource. These locks sit outside of the Role Based Access Controls (RBAC) hierarchy and, when applied, will place restrictions on the resource for all users. These locks are very useful when there is an important resource in a subscription that users should not be able to delete or change. Locks can help prevent accidental and malicious changes or deletion.",
"RationaleStatement": "As an administrator, it may be necessary to lock a subscription, resource group, or resource to prevent other users in the organization from accidentally deleting or modifying critical resources. The lock level can be set to to `CanNotDelete` or `ReadOnly` to achieve this purpose. - `CanNotDelete` means authorized users can still read and modify a resource, but they cannot delete the resource. - `ReadOnly` means authorized users can read a resource, but they cannot delete or update the resource. Applying this lock is similar to restricting all authorized users to the permissions granted by the Reader role.",
"ImpactStatement": "There can be unintended outcomes of locking a resource. Applying a lock to a parent service will cause it to be inherited by all resources within. Conversely, applying a lock to a resource may not apply to connected storage, leaving it unlocked. Please see the documentation for further information.",
"RemediationProcedure": "**From Azure Portal** 1. Navigate to the specific Azure Resource or Resource Group 2. For each mission critical resource, click on `Locks` 3. Click `Add` 4. Give the lock a name and a description, then select the type, `Read-only` or `Delete` as appropriate 5. Click OK **From Azure CLI** To lock a resource, provide the name of the resource, its resource type, and its resource group name. ``` az lock create --name <LockName> --lock-type <CanNotDelete/Read-only> --resource-group <resourceGroupName> --resource-name <resourceName> --resource-type <resourceType> ``` **From Powershell** ``` Get-AzResourceLock -ResourceName <Resource Name> -ResourceType <Resource Type> -ResourceGroupName <Resource Group Name> -Locktype <CanNotDelete/Read-only> ```",
"AuditProcedure": "**From Azure Portal** 1. Navigate to the specific Azure Resource or Resource Group 2. Click on `Locks` 3. Ensure the lock is defined with name and description, with type `Read-only` or `Delete` as appropriate. **From Azure CLI** Review the list of all locks set currently: ``` az lock list --resource-group <resourcegroupname> --resource-name <resourcename> --namespace <Namespace> --resource-type <type> --parent ``` **From Powershell** Run the following command to list all resources. ``` Get-AzResource ``` For each resource, run the following command to check for Resource Locks. ``` Get-AzResourceLock -ResourceName <Resource Name> -ResourceType <Resource Type> -ResourceGroupName <Resource Group Name> ``` Review the output of the `Properties` setting. Compliant settings will have the `CanNotDelete` or `ReadOnly` value.",
"AdditionalInformation": "",
"DefaultValue": "https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-group-lock-resources:https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-subscription-governance#azure-resource-locks:https://docs.microsoft.com/en-us/azure/governance/blueprints/concepts/resource-locking:https://docs.microsoft.com/en-us/security/benchmark/azure/security-controls-v3-asset-management#am-4-limit-access-to-asset-management",
"References": ""
}
]
}
]
}

View File

@@ -19,8 +19,11 @@ Mutelist:
- "StackSet-AWSControlTowerSecurityResources-*"
- "StackSet-AWSControlTowerLoggingResources-*"
- "StackSet-AWSControlTowerExecutionRole-*"
- "AWSControlTowerBP-BASELINE-CLOUDTRAIL-MASTER"
- "AWSControlTowerBP-BASELINE-CONFIG-MASTER"
- "AWSControlTowerBP-BASELINE-CLOUDTRAIL-MASTER*"
- "AWSControlTowerBP-BASELINE-CONFIG-MASTER*"
- "StackSet-AWSControlTower*"
- "CLOUDTRAIL-ENABLED-ON-SHARED-ACCOUNTS-*"
- "AFT-Backend*"
"cloudtrail_*":
Regions:
- "*"

View File

@@ -1,16 +1,18 @@
import os
import pathlib
from datetime import datetime, timezone
from enum import Enum
from os import getcwd
import requests
import yaml
from packaging import version
from prowler.lib.logger import logger
timestamp = datetime.today()
timestamp_utc = datetime.now(timezone.utc).replace(tzinfo=timezone.utc)
prowler_version = "4.3.1"
prowler_version = "4.6.0"
html_logo_url = "https://github.com/prowler-cloud/prowler/"
square_logo_img = "https://prowler.com/wp-content/uploads/logo-html.png"
aws_logo = "https://user-images.githubusercontent.com/38561120/235953920-3e3fba08-0795-41dc-b480-9bea57db9f2e.png"
@@ -20,8 +22,13 @@ gcp_logo = "https://user-images.githubusercontent.com/38561120/235928332-eb4accd
orange_color = "\033[38;5;208m"
banner_color = "\033[1;92m"
finding_statuses = ["PASS", "FAIL", "MANUAL"]
valid_severities = ["critical", "high", "medium", "low", "informational"]
class Provider(str, Enum):
AWS = "aws"
GCP = "gcp"
AZURE = "azure"
KUBERNETES = "kubernetes"
# Compliance
actual_directory = pathlib.Path(os.path.dirname(os.path.realpath(__file__)))
@@ -29,7 +36,7 @@ actual_directory = pathlib.Path(os.path.dirname(os.path.realpath(__file__)))
def get_available_compliance_frameworks(provider=None):
available_compliance_frameworks = []
providers = ["aws", "gcp", "azure", "kubernetes"]
providers = [p.value for p in Provider]
if provider:
providers = [provider]
for provider in providers:
@@ -86,7 +93,7 @@ def check_current_version():
"https://api.github.com/repos/prowler-cloud/prowler/tags", timeout=1
)
latest_version = release_response.json()[0]["name"]
if latest_version != prowler_version:
if version.parse(latest_version) > version.parse(prowler_version):
return f"{prowler_version_string} (latest is {latest_version}, upgrade for the latest features)"
else:
return (

View File

@@ -41,8 +41,29 @@ aws:
[
"amazon-elb"
]
# aws.ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports
ec2_sg_high_risk_ports:
[
25,
110,
135,
143,
445,
3000,
4333,
5000,
5500,
8080,
8088,
]
# AWS ECS Configuration
# aws.ecs_service_fargate_latest_platform_version
fargate_linux_latest_version: "1.4.0"
fargate_windows_latest_version: "1.0.0"
# AWS VPC Configuration (vpc_endpoint_connections_trust_boundaries, vpc_endpoint_services_allowed_principals_trust_boundaries)
# AWS SSM Configuration (aws.ssm_documents_set_as_public)
# Single account environment: No action required. The AWS account number will be automatically added by the checks.
# Multi account environment: Any additional trusted account number should be added as a space separated list, e.g.
# trusted_account_ids : ["123456789012", "098765432109", "678901234567"]
@@ -51,6 +72,31 @@ aws:
# AWS Cloudwatch Configuration
# aws.cloudwatch_log_group_retention_policy_specific_days_enabled --> by default is 365 days
log_group_retention_days: 365
# aws.cloudwatch_log_group_no_critical_pii_in_logs --> see all available entities in https://microsoft.github.io/presidio/supported_entities/
critical_pii_entities : [
"CREDIT_CARD", # Credit card numbers are highly sensitive financial information.
"CRYPTO", # Crypto wallet numbers (e.g., Bitcoin addresses) can give access to cryptocurrency.
"IBAN_CODE", # International Bank Account Numbers are critical financial information.
"US_BANK_NUMBER", # US bank account numbers are sensitive and should be protected.
"US_SSN", # US Social Security Numbers are critical PII used for identity verification.
"US_PASSPORT", # US passport numbers can be used for identity theft.
"US_ITIN", # US Individual Taxpayer Identification Numbers are sensitive personal identifiers.
#"UK_NHS", # UK NHS numbers can be used to access medical records and other private information.
#"ES_NIF", # Spanish NIF (Personal tax ID) is critical for identification and tax purposes.
#"ES_NIE", # Spanish NIE (Foreigners ID card) is a critical identifier for foreign residents.
#"IT_FISCAL_CODE", # Italian personal identification code is sensitive PII for tax and legal purposes.
#"IT_PASSPORT", # Italian passport numbers are critical PII.
#"IT_IDENTITY_CARD", # Italian identity card numbers are critical for personal identification.
#"PL_PESEL", # Polish PESEL numbers are sensitive personal identifiers.
#"SG_NRIC_FIN", # Singapore National Registration Identification Card is critical PII.
#"AU_ABN", # Australian Business Numbers are critical for business identification.
#"AU_TFN", # Australian Tax File Numbers are sensitive and used for taxation purposes.
#"AU_MEDICARE", # Australian Medicare numbers are sensitive medical identifiers.
#"IN_PAN", # Indian Permanent Account Numbers are critical for tax purposes and identity.
#"IN_AADHAAR", # Indian Aadhaar numbers are highly sensitive and serve as a universal identity number.
#"FI_PERSONAL_IDENTITY_CODE" # Finnish Personal Identity Code is sensitive PII for personal identification.
]
pii_language: "en" # Language for recognizing PII entities
# AWS AppStream Session Configuration
# aws.appstream_fleet_session_idle_disconnect_timeout
@@ -78,7 +124,9 @@ aws:
"nodejs10.x",
"nodejs12.x",
"nodejs14.x",
"nodejs16.x",
"dotnet5.0",
"dotnet7",
"dotnetcore1.0",
"dotnetcore2.0",
"dotnetcore2.1",
@@ -86,6 +134,8 @@ aws:
"ruby2.5",
"ruby2.7",
]
# aws.awslambda_function_vpc_is_in_multi_azs
lambda_min_azs: 2
# AWS Organizations
# aws.organizations_scp_check_deny_regions
@@ -110,7 +160,7 @@ aws:
# AWS CloudTrail Configuration
# aws.cloudtrail_threat_detection_privilege_escalation
threat_detection_privilege_escalation_threshold: 0.1 # Percentage of actions found to decide if it is an privilege_escalation attack event, by default is 0.1 (10%)
threat_detection_privilege_escalation_threshold: 0.2 # Percentage of actions found to decide if it is an privilege_escalation attack event, by default is 0.2 (20%)
threat_detection_privilege_escalation_minutes: 1440 # Past minutes to search from now for privilege_escalation attacks, by default is 1440 minutes (24 hours)
threat_detection_privilege_escalation_actions:
[
@@ -167,7 +217,7 @@ aws:
"UpdateLoginProfile",
]
# aws.cloudtrail_threat_detection_enumeration
threat_detection_enumeration_threshold: 0.1 # Percentage of actions found to decide if it is an enumeration attack event, by default is 0.1 (10%)
threat_detection_enumeration_threshold: 0.3 # Percentage of actions found to decide if it is an enumeration attack event, by default is 0.3 (30%)
threat_detection_enumeration_minutes: 1440 # Past minutes to search from now for enumeration attacks, by default is 1440 minutes (24 hours)
threat_detection_enumeration_actions:
[
@@ -262,6 +312,24 @@ aws:
"LookupEvents",
"Search",
]
# aws.cloudtrail_threat_detection_llm_jacking
threat_detection_llm_jacking_threshold: 0.4 # Percentage of actions found to decide if it is an LLM Jacking attack event, by default is 0.4 (40%)
threat_detection_llm_jacking_minutes: 1440 # Past minutes to search from now for LLM Jacking attacks, by default is 1440 minutes (24 hours)
threat_detection_llm_jacking_actions:
[
"PutUseCaseForModelAccess", # Submits a use case for model access, providing justification (Write).
"PutFoundationModelEntitlement", # Grants entitlement for accessing a foundation model (Write).
"PutModelInvocationLoggingConfiguration", # Configures logging for model invocations (Write).
"CreateFoundationModelAgreement", # Creates a new agreement to use a foundation model (Write).
"InvokeModel", # Invokes a specified Bedrock model for inference using provided prompt and parameters (Read).
"InvokeModelWithResponseStream", # Invokes a Bedrock model for inference with real-time token streaming (Read).
"GetUseCaseForModelAccess", # Retrieves an existing use case for model access (Read).
"GetModelInvocationLoggingConfiguration", # Fetches the logging configuration for model invocations (Read).
"GetFoundationModelAvailability", # Checks the availability of a foundation model for use (Read).
"ListFoundationModelAgreementOffers", # Lists available agreement offers for accessing foundation models (List).
"ListFoundationModels", # Lists the available foundation models in Bedrock (List).
"ListProvisionedModelThroughputs", # Lists the provisioned throughput for previously created models (List).
]
# AWS RDS Configuration
# aws.rds_instance_backup_enabled
@@ -271,6 +339,11 @@ aws:
# AWS ACM Configuration
# aws.acm_certificates_expiration_check
days_to_expire_threshold: 7
# aws.acm_certificates_rsa_key_length
insecure_key_algorithms:
[
"RSA-1024",
]
# AWS EKS Configuration
# aws.eks_control_plane_logging_all_types_enabled
@@ -284,6 +357,42 @@ aws:
"scheduler",
]
# aws.eks_cluster_uses_a_supported_version
# EKS clusters must be version 1.28 or higher
eks_cluster_oldest_version_supported: "1.28"
# AWS CodeBuild Configuration
# aws.codebuild_project_no_secrets_in_variables
# CodeBuild sensitive variables that are excluded from the check
excluded_sensitive_environment_variables:
[
]
# AWS ELB Configuration
# aws.elb_is_in_multiple_az
# Minimum number of Availability Zones that an CLB must be in
elb_min_azs: 2
# AWS ELBv2 Configuration
# aws.elbv2_is_in_multiple_az
# Minimum number of Availability Zones that an ELBv2 must be in
elbv2_min_azs: 2
# AWS Secrets Configuration
# Patterns to ignore in the secrets checks
secrets_ignore_patterns: []
# AWS Secrets Manager Configuration
# aws.secretsmanager_secret_unused
# Maximum number of days a secret can be unused
max_days_secret_unused: 90
# aws.secretsmanager_secret_rotated_periodically
# Maximum number of days a secret should be rotated
max_days_secret_unrotated: 90
# Azure Configuration
azure:
# Azure Network Configuration

View File

@@ -0,0 +1,56 @@
class ProwlerException(Exception):
"""Base exception for all Prowler SDK errors."""
ERROR_CODES = {
(1901, "UnexpectedError"): {
"message": "Unexpected error occurred.",
"remediation": "Please review the error message and try again.",
}
}
def __init__(
self, code, source=None, file=None, original_exception=None, error_info=None
):
"""
Initialize the ProwlerException class.
Args:
code (int): The error code.
source (str): The source name. This can be the provider name, module name, service name, etc.
file (str): The file name.
original_exception (Exception): The original exception.
error_info (dict): The error information.
Example:
A ProwlerException is raised with the following parameters and format:
>>> original_exception = Exception("Error occurred.")
ProwlerException(1901, "AWS", "file.txt", original_exception)
>>> [1901] Unexpected error occurred. - Exception: Error occurred.
"""
self.code = code
self.source = source
self.file = file
if error_info is None:
error_info = self.ERROR_CODES.get((code, self.__class__.__name__))
self.message = error_info.get("message")
self.remediation = error_info.get("remediation")
self.original_exception = original_exception
# Format -> [code] message - original_exception
if original_exception is None:
super().__init__(f"[{self.code}] {self.message}")
else:
super().__init__(
f"[{self.code}] {self.message} - {self.original_exception}"
)
def __str__(self):
"""Overriding the __str__ method"""
default_str = f"{self.__class__.__name__}[{self.code}]: {self.message}"
if self.original_exception:
default_str += f" - {self.original_exception}"
return default_str
class UnexpectedError(ProwlerException):
def __init__(self, source, file, original_exception=None):
super().__init__(1901, source, file, original_exception)

View File

@@ -1,4 +1,3 @@
import functools
import importlib
import json
import os
@@ -6,7 +5,6 @@ import re
import shutil
import sys
import traceback
from pkgutil import walk_packages
from types import ModuleType
from typing import Any
@@ -15,68 +13,15 @@ from colorama import Fore, Style
import prowler
from prowler.config.config import orange_color
from prowler.lib.check.compliance_models import load_compliance_framework
from prowler.lib.check.custom_checks_metadata import update_check_metadata
from prowler.lib.check.models import Check, load_check_metadata
from prowler.lib.check.models import Check
from prowler.lib.check.utils import recover_checks_from_provider
from prowler.lib.logger import logger
from prowler.lib.outputs.outputs import report
from prowler.lib.utils.utils import open_file, parse_json_file, print_boxes
from prowler.providers.common.models import Audit_Metadata
# Load all checks metadata
def bulk_load_checks_metadata(provider: str) -> dict:
bulk_check_metadata = {}
checks = recover_checks_from_provider(provider)
# Build list of check's metadata files
for check_info in checks:
# Build check path name
check_name = check_info[0]
check_path = check_info[1]
# Ignore fixer files
if check_name.endswith("_fixer"):
continue
# Append metadata file extension
metadata_file = f"{check_path}/{check_name}.metadata.json"
# Load metadata
check_metadata = load_check_metadata(metadata_file)
bulk_check_metadata[check_metadata.CheckID] = check_metadata
return bulk_check_metadata
# Bulk load all compliance frameworks specification
def bulk_load_compliance_frameworks(provider: str) -> dict:
"""Bulk load all compliance frameworks specification into a dict"""
try:
bulk_compliance_frameworks = {}
available_compliance_framework_modules = list_compliance_modules()
for compliance_framework in available_compliance_framework_modules:
if provider in compliance_framework.name:
compliance_specification_dir_path = (
f"{compliance_framework.module_finder.path}/{provider}"
)
# for compliance_framework in available_compliance_framework_modules:
for filename in os.listdir(compliance_specification_dir_path):
file_path = os.path.join(
compliance_specification_dir_path, filename
)
# Check if it is a file and ti size is greater than 0
if os.path.isfile(file_path) and os.stat(file_path).st_size > 0:
# Open Compliance file in JSON
# cis_v1.4_aws.json --> cis_v1.4_aws
compliance_framework_name = filename.split(".json")[0]
# Store the compliance info
bulk_compliance_frameworks[compliance_framework_name] = (
load_compliance_framework(file_path)
)
except Exception as e:
logger.error(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}] -- {e}")
return bulk_compliance_frameworks
# Exclude checks to run
def exclude_checks_to_run(checks_to_execute: set, excluded_checks: list) -> set:
for check in excluded_checks:
@@ -328,7 +273,7 @@ def print_checks(
for check in check_list:
try:
print(
f"[{bulk_checks_metadata[check].CheckID}] {bulk_checks_metadata[check].CheckTitle} - {Fore.MAGENTA}{bulk_checks_metadata[check].ServiceName} {Fore.YELLOW}[{bulk_checks_metadata[check].Severity}]{Style.RESET_ALL}"
f"[{bulk_checks_metadata[check].CheckID}] {bulk_checks_metadata[check].CheckTitle} - {Fore.MAGENTA}{bulk_checks_metadata[check].ServiceName} {Fore.YELLOW}[{bulk_checks_metadata[check].Severity.value}]{Style.RESET_ALL}"
)
except KeyError as error:
logger.error(
@@ -347,126 +292,12 @@ def print_checks(
print(message)
# Parse checks from compliance frameworks specification
def parse_checks_from_compliance_framework(
compliance_frameworks: list, bulk_compliance_frameworks: dict
) -> list:
"""parse_checks_from_compliance_framework returns a set of checks from the given compliance_frameworks"""
checks_to_execute = set()
try:
for framework in compliance_frameworks:
# compliance_framework_json["Requirements"][*]["Checks"]
compliance_framework_checks_list = [
requirement.Checks
for requirement in bulk_compliance_frameworks[framework].Requirements
]
# Reduce nested list into a list
# Pythonic functional magic
compliance_framework_checks = functools.reduce(
lambda x, y: x + y, compliance_framework_checks_list
)
# Then union this list of checks with the initial one
checks_to_execute = checks_to_execute.union(compliance_framework_checks)
except Exception as e:
logger.error(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}] -- {e}")
return checks_to_execute
def recover_checks_from_provider(
provider: str, service: str = None, include_fixers: bool = False
) -> list[tuple]:
"""
Recover all checks from the selected provider and service
Returns a list of tuples with the following format (check_name, check_path)
"""
try:
checks = []
modules = list_modules(provider, service)
for module_name in modules:
# Format: "prowler.providers.{provider}.services.{service}.{check_name}.{check_name}"
check_module_name = module_name.name
# We need to exclude common shared libraries in services
if (
check_module_name.count(".") == 6
and "lib" not in check_module_name
and (not check_module_name.endswith("_fixer") or include_fixers)
):
check_path = module_name.module_finder.path
# Check name is the last part of the check_module_name
check_name = check_module_name.split(".")[-1]
check_info = (check_name, check_path)
checks.append(check_info)
except ModuleNotFoundError:
logger.critical(f"Service {service} was not found for the {provider} provider.")
sys.exit(1)
except Exception as e:
logger.critical(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}]: {e}")
sys.exit(1)
else:
return checks
def list_compliance_modules():
"""
list_compliance_modules returns the available compliance frameworks and returns their path
"""
# This module path requires the full path including "prowler."
module_path = "prowler.compliance"
return walk_packages(
importlib.import_module(module_path).__path__,
importlib.import_module(module_path).__name__ + ".",
)
# List all available modules in the selected provider and service
def list_modules(provider: str, service: str):
# This module path requires the full path including "prowler."
module_path = f"prowler.providers.{provider}.services"
if service:
module_path += f".{service}"
return walk_packages(
importlib.import_module(module_path).__path__,
importlib.import_module(module_path).__name__ + ".",
)
# Import an input check using its path
def import_check(check_path: str) -> ModuleType:
lib = importlib.import_module(f"{check_path}")
return lib
def run_check(check: Check, verbose: bool = False, only_logs: bool = False) -> list:
"""
Run the check and return the findings
Args:
check (Check): check class
output_options (Any): output options
Returns:
list: list of findings
"""
findings = []
if verbose:
print(
f"\nCheck ID: {check.CheckID} - {Fore.MAGENTA}{check.ServiceName}{Fore.YELLOW} [{check.Severity}]{Style.RESET_ALL}"
)
logger.debug(f"Executing check: {check.CheckID}")
try:
findings = check.execute()
except Exception as error:
if not only_logs:
print(
f"Something went wrong in {check.CheckID}, please use --log-level ERROR"
)
logger.error(
f"{check.CheckID} -- {error.__class__.__name__}[{traceback.extract_tb(error.__traceback__)[-1].lineno}]: {error}"
)
finally:
return findings
def run_fixer(check_findings: list) -> int:
"""
Run the fixer for the check if it exists and there are any FAIL findings
@@ -548,6 +379,7 @@ def execute_checks(
global_provider: Any,
custom_checks_metadata: Any,
config_file: str,
output_options: Any,
) -> list:
# List to store all the check's findings
all_findings = []
@@ -583,22 +415,51 @@ def execute_checks(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
# Set verbose flag
verbose = False
if hasattr(output_options, "verbose"):
verbose = output_options.verbose
elif hasattr(output_options, "fixer"):
verbose = output_options.fixer
# Execution with the --only-logs flag
if global_provider.output_options.only_logs:
if output_options.only_logs:
for check_name in checks_to_execute:
# Recover service from check name
service = check_name.split("_")[0]
try:
try:
# Import check module
check_module_path = f"prowler.providers.{global_provider.type}.services.{service}.{check_name}.{check_name}"
lib = import_check(check_module_path)
# Recover functions from check
check_to_execute = getattr(lib, check_name)
check = check_to_execute()
except ModuleNotFoundError:
logger.error(
f"Check '{check_name}' was not found for the {global_provider.type.upper()} provider"
)
continue
if verbose:
print(
f"\nCheck ID: {check.CheckID} - {Fore.MAGENTA}{check.ServiceName}{Fore.YELLOW} [{check.Severity.value}]{Style.RESET_ALL}"
)
check_findings = execute(
service,
check_name,
check,
global_provider,
services_executed,
checks_executed,
custom_checks_metadata,
output_options,
)
report(check_findings, global_provider, output_options)
all_findings.extend(check_findings)
# Update Audit Status
services_executed.add(service)
checks_executed.add(check_name)
global_provider.audit_metadata = update_audit_metadata(
global_provider.audit_metadata, services_executed, checks_executed
)
# If check does not exists in the provider or is from another provider
except ModuleNotFoundError:
logger.error(
@@ -647,15 +508,39 @@ def execute_checks(
f"-> Scanning {orange_color}{service}{Style.RESET_ALL} service"
)
try:
try:
# Import check module
check_module_path = f"prowler.providers.{global_provider.type}.services.{service}.{check_name}.{check_name}"
lib = import_check(check_module_path)
# Recover functions from check
check_to_execute = getattr(lib, check_name)
check = check_to_execute()
except ModuleNotFoundError:
logger.error(
f"Check '{check_name}' was not found for the {global_provider.type.upper()} provider"
)
continue
if verbose:
print(
f"\nCheck ID: {check.CheckID} - {Fore.MAGENTA}{check.ServiceName}{Fore.YELLOW} [{check.Severity.value}]{Style.RESET_ALL}"
)
check_findings = execute(
service,
check_name,
check,
global_provider,
custom_checks_metadata,
output_options,
)
report(check_findings, global_provider, output_options)
all_findings.extend(check_findings)
services_executed.add(service)
checks_executed.add(check_name)
global_provider.audit_metadata = update_audit_metadata(
global_provider.audit_metadata,
services_executed,
checks_executed,
custom_checks_metadata,
)
all_findings.extend(check_findings)
# If check does not exists in the provider or is from another provider
except ModuleNotFoundError:
@@ -670,60 +555,79 @@ def execute_checks(
)
bar()
bar.title = f"-> {Fore.GREEN}Scan completed!{Style.RESET_ALL}"
# Custom report interface
if os.environ.get("PROWLER_REPORT_LIB_PATH"):
try:
logger.info("Using custom report interface ...")
lib = os.environ["PROWLER_REPORT_LIB_PATH"]
outputs_module = importlib.import_module(lib)
custom_report_interface = getattr(outputs_module, "report")
# TODO: review this call and see if we can remove the global_provider.output_options since it is contained in the global_provider
custom_report_interface(check_findings, output_options, global_provider)
except Exception:
sys.exit(1)
return all_findings
def execute(
service: str,
check_name: str,
check: Check,
global_provider: Any,
services_executed: set,
checks_executed: set,
custom_checks_metadata: Any,
output_options: Any = None,
):
try:
# Import check module
check_module_path = f"prowler.providers.{global_provider.type}.services.{service}.{check_name}.{check_name}"
lib = import_check(check_module_path)
# Recover functions from check
check_to_execute = getattr(lib, check_name)
check_class = check_to_execute()
"""
Execute the check and report the findings
Args:
service (str): service name
check_name (str): check name
global_provider (Any): provider object
custom_checks_metadata (Any): custom checks metadata
output_options (Any): output options, depending on the provider
Returns:
list: list of findings
"""
try:
# Update check metadata to reflect that in the outputs
if custom_checks_metadata and custom_checks_metadata["Checks"].get(
check_class.CheckID
check.CheckID
):
check_class = update_check_metadata(
check_class, custom_checks_metadata["Checks"][check_class.CheckID]
check = update_check_metadata(
check, custom_checks_metadata["Checks"][check.CheckID]
)
# Run check
verbose = (
global_provider.output_options.verbose
or global_provider.output_options.fixer
)
check_findings = run_check(
check_class, verbose, global_provider.output_options.only_logs
)
only_logs = False
if hasattr(output_options, "only_logs"):
only_logs = output_options.only_logs
# Execute the check
check_findings = []
logger.debug(f"Executing check: {check.CheckID}")
try:
check_findings = check.execute()
except Exception as error:
if not only_logs:
print(
f"Something went wrong in {check.CheckID}, please use --log-level ERROR"
)
logger.error(
f"{check.CheckID} -- {error.__class__.__name__}[{traceback.extract_tb(error.__traceback__)[-1].lineno}]: {error}"
)
# Exclude findings per status
if global_provider.output_options.status:
if hasattr(output_options, "status") and output_options.status:
check_findings = [
finding
for finding in check_findings
if finding.status in global_provider.output_options.status
if finding.status in output_options.status
]
# Update Audit Status
services_executed.add(service)
checks_executed.add(check_name)
global_provider.audit_metadata = update_audit_metadata(
global_provider.audit_metadata, services_executed, checks_executed
)
# Mutelist findings
# Before returning the findings, we need to apply the mute list logic
if hasattr(global_provider, "mutelist") and global_provider.mutelist.mutelist:
# TODO: make this prettier
is_finding_muted_args = {}
if global_provider.type == "aws":
is_finding_muted_args["aws_account_id"] = (
@@ -738,27 +642,9 @@ def execute(
**is_finding_muted_args
)
# Refactor(Outputs)
# Report the check's findings
report(check_findings, global_provider)
# Refactor(Outputs)
if os.environ.get("PROWLER_REPORT_LIB_PATH"):
try:
logger.info("Using custom report interface ...")
lib = os.environ["PROWLER_REPORT_LIB_PATH"]
outputs_module = importlib.import_module(lib)
custom_report_interface = getattr(outputs_module, "report")
# TODO: review this call and see if we can remove the global_provider.output_options since it is contained in the global_provider
custom_report_interface(
check_findings, global_provider.output_options, global_provider
)
except Exception:
sys.exit(1)
except ModuleNotFoundError:
logger.error(
f"Check '{check_name}' was not found for the {global_provider.type.upper()} provider"
f"Check '{check.CheckID}' was not found for the {global_provider.type.upper()} provider"
)
check_findings = []
except Exception as error:
@@ -788,34 +674,3 @@ def update_audit_metadata(
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def recover_checks_from_service(service_list: list, provider: str) -> set:
"""
Recover all checks from the selected provider and service
Returns a set of checks from the given services
"""
try:
checks = set()
service_list = [
"awslambda" if service == "lambda" else service for service in service_list
]
for service in service_list:
service_checks = recover_checks_from_provider(provider, service)
if not service_checks:
logger.error(f"Service '{service}' does not have checks.")
else:
for check in service_checks:
# Recover check name and module name from import path
# Format: "providers.{provider}.services.{service}.{check_name}.{check_name}"
check_name = check[0].split(".")[-1]
# If the service is present in the group list passed as parameters
# if service_name in group_list: checks_from_arn.add(check_name)
checks.add(check_name)
return checks
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)

View File

@@ -1,35 +1,33 @@
from colorama import Fore, Style
from prowler.config.config import valid_severities
from prowler.lib.check.check import (
parse_checks_from_compliance_framework,
parse_checks_from_file,
recover_checks_from_provider,
recover_checks_from_service,
)
from prowler.lib.check.check import parse_checks_from_file
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.check.models import CheckMetadata, Severity
from prowler.lib.logger import logger
# Generate the list of checks to execute
def load_checks_to_execute(
bulk_checks_metadata: dict,
bulk_compliance_frameworks: dict,
checks_file: str,
check_list: list,
service_list: list,
severities: list,
compliance_frameworks: list,
categories: set,
provider: str,
bulk_checks_metadata: dict = None,
bulk_compliance_frameworks: dict = None,
checks_file: str = None,
check_list: list = None,
service_list: list = None,
severities: list = None,
compliance_frameworks: list = None,
categories: set = None,
) -> set:
"""Generate the list of checks to execute based on the cloud provider and the input arguments given"""
try:
# Local subsets
checks_to_execute = set()
check_aliases = {}
check_severities = {key: [] for key in valid_severities}
check_categories = {}
check_severities = {severity.value: [] for severity in Severity}
if not bulk_checks_metadata:
bulk_checks_metadata = CheckMetadata.get_bulk(provider=provider)
# First, loop over the bulk_checks_metadata to extract the needed subsets
for check, metadata in bulk_checks_metadata.items():
try:
@@ -64,24 +62,41 @@ def load_checks_to_execute(
checks_to_execute.update(check_severities[severity])
if service_list:
checks_to_execute = (
recover_checks_from_service(service_list, provider)
& checks_to_execute
)
for service in service_list:
checks_to_execute = (
set(
CheckMetadata.list(
bulk_checks_metadata=bulk_checks_metadata,
service=service,
)
)
& checks_to_execute
)
# Handle if there are checks passed using -C/--checks-file
elif checks_file:
checks_to_execute = parse_checks_from_file(checks_file, provider)
# Handle if there are services passed using -s/--services
elif service_list:
checks_to_execute = recover_checks_from_service(service_list, provider)
for service in service_list:
checks_to_execute.update(
CheckMetadata.list(
bulk_checks_metadata=bulk_checks_metadata,
service=service,
)
)
# Handle if there are compliance frameworks passed using --compliance
elif compliance_frameworks:
checks_to_execute = parse_checks_from_compliance_framework(
compliance_frameworks, bulk_compliance_frameworks
)
if not bulk_compliance_frameworks:
bulk_compliance_frameworks = Compliance.get_bulk(provider=provider)
for compliance_framework in compliance_frameworks:
checks_to_execute.update(
CheckMetadata.list(
bulk_compliance_frameworks=bulk_compliance_frameworks,
compliance_framework=compliance_framework,
)
)
# Handle if there are categories passed using --categories
elif categories:
@@ -90,20 +105,30 @@ def load_checks_to_execute(
# If there are no checks passed as argument
else:
# Get all check modules to run with the specific provider
checks = recover_checks_from_provider(provider)
for check_info in checks:
# Recover check name from import path (last part)
# Format: "providers.{provider}.services.{service}.{check_name}.{check_name}"
check_name = check_info[0]
# get all checks
for check_name in CheckMetadata.list(
bulk_checks_metadata=bulk_checks_metadata
):
checks_to_execute.add(check_name)
# Only execute threat detection checks if threat-detection category is set
if "threat-detection" not in categories:
if categories and categories != [] and "threat-detection" not in categories:
for threat_detection_check in check_categories.get("threat-detection", []):
checks_to_execute.discard(threat_detection_check)
# Exclude check cloudwatch_log_group_no_secrets_in_logs from the checks to execute if not in check_list
if (
check_list
and "cloudwatch_log_group_no_secrets_in_logs" not in check_list
and "cloudwatch_log_group_no_secrets_in_logs" in checks_to_execute
):
checks_to_execute.remove("cloudwatch_log_group_no_secrets_in_logs")
# Exclude check cloudwatch_log_group_no_critical_pii_in_logs from the checks to execute if not in check_list
if (
check_list
and "cloudwatch_log_group_no_critical_pii_in_logs" not in check_list
and "cloudwatch_log_group_no_critical_pii_in_logs" in checks_to_execute
):
checks_to_execute.remove("cloudwatch_log_group_no_critical_pii_in_logs")
# Check Aliases
checks_to_execute = update_checks_to_execute_with_aliases(
checks_to_execute, check_aliases

View File

@@ -1,6 +1,6 @@
import sys
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.logger import logger
@@ -26,8 +26,8 @@ def update_checks_metadata_with_compliance(
if check in requirement.Checks:
# Include the requirement into the check's framework requirements
compliance_requirements.append(requirement)
# Create the Compliance_Model
compliance = ComplianceBaseModel(
# Create the Compliance
compliance = Compliance(
Framework=framework.Framework,
Provider=framework.Provider,
Version=framework.Version,

View File

@@ -1,9 +1,11 @@
import os
import sys
from enum import Enum
from typing import Optional, Union
from pydantic import BaseModel, ValidationError, root_validator
from prowler.lib.check.utils import list_compliance_modules
from prowler.lib.logger import logger
@@ -167,6 +169,19 @@ class Mitre_Requirement(BaseModel):
Checks: list[str]
# KISA-ISMS-P Requirement Attribute
class KISA_ISMSP_Requirement_Attribute(BaseModel):
"""KISA ISMS-P Requirement Attribute"""
Domain: str
Subdomain: str
Section: str
AuditChecklist: Optional[list[str]]
RelatedRegulations: Optional[list[str]]
AuditEvidence: Optional[list[str]]
NonComplianceCases: Optional[list[str]]
# Base Compliance Model
# TODO: move this to compliance folder
class Compliance_Requirement(BaseModel):
@@ -181,6 +196,7 @@ class Compliance_Requirement(BaseModel):
ENS_Requirement_Attribute,
ISO27001_2013_Requirement_Attribute,
AWS_Well_Architected_Requirement_Attribute,
KISA_ISMSP_Requirement_Attribute,
# Generic_Compliance_Requirement_Attribute must be the last one since it is the fallback for generic compliance framework
Generic_Compliance_Requirement_Attribute,
]
@@ -188,8 +204,8 @@ class Compliance_Requirement(BaseModel):
Checks: list[str]
class ComplianceBaseModel(BaseModel):
"""ComplianceBaseModel holds the base model for every compliance framework"""
class Compliance(BaseModel):
"""Compliance holds the base model for every compliance framework"""
Framework: str
Provider: str
@@ -213,16 +229,137 @@ class ComplianceBaseModel(BaseModel):
raise ValueError("Framework or Provider must not be empty")
return values
@staticmethod
def list(bulk_compliance_frameworks: dict, provider: str = None) -> list[str]:
"""
Returns a list of compliance frameworks from bulk compliance frameworks
Args:
bulk_compliance_frameworks (dict): The bulk compliance frameworks
provider (str): The provider name
Returns:
list: The list of compliance frameworks
"""
if provider:
compliance_frameworks = [
compliance_framework
for compliance_framework in bulk_compliance_frameworks.keys()
if provider in compliance_framework
]
else:
compliance_frameworks = [
compliance_framework
for compliance_framework in bulk_compliance_frameworks.keys()
]
return compliance_frameworks
@staticmethod
def get(
bulk_compliance_frameworks: dict, compliance_framework_name: str
) -> "Compliance":
"""
Returns a compliance framework from bulk compliance frameworks
Args:
bulk_compliance_frameworks (dict): The bulk compliance frameworks
compliance_framework_name (str): The compliance framework name
Returns:
Compliance: The compliance framework
"""
return bulk_compliance_frameworks.get(compliance_framework_name, None)
@staticmethod
def list_requirements(
bulk_compliance_frameworks: dict, compliance_framework: str = None
) -> list:
"""
Returns a list of compliance requirements from a compliance framework
Args:
bulk_compliance_frameworks (dict): The bulk compliance frameworks
compliance_framework (str): The compliance framework name
Returns:
list: The list of compliance requirements for the provided compliance framework
"""
compliance_requirements = []
if bulk_compliance_frameworks and compliance_framework:
compliance_requirements = [
compliance_requirement.Id
for compliance_requirement in bulk_compliance_frameworks.get(
compliance_framework
).Requirements
]
return compliance_requirements
@staticmethod
def get_requirement(
bulk_compliance_frameworks: dict, compliance_framework: str, requirement_id: str
) -> Union[Mitre_Requirement, Compliance_Requirement]:
"""
Returns a compliance requirement from a compliance framework
Args:
bulk_compliance_frameworks (dict): The bulk compliance frameworks
compliance_framework (str): The compliance framework name
requirement_id (str): The compliance requirement ID
Returns:
Mitre_Requirement | Compliance_Requirement: The compliance requirement
"""
requirement = None
for compliance_requirement in bulk_compliance_frameworks.get(
compliance_framework
).Requirements:
if compliance_requirement.Id == requirement_id:
requirement = compliance_requirement
break
return requirement
@staticmethod
def get_bulk(provider: str) -> dict:
"""Bulk load all compliance frameworks specification into a dict"""
try:
bulk_compliance_frameworks = {}
available_compliance_framework_modules = list_compliance_modules()
for compliance_framework in available_compliance_framework_modules:
if provider in compliance_framework.name:
compliance_specification_dir_path = (
f"{compliance_framework.module_finder.path}/{provider}"
)
# for compliance_framework in available_compliance_framework_modules:
for filename in os.listdir(compliance_specification_dir_path):
file_path = os.path.join(
compliance_specification_dir_path, filename
)
# Check if it is a file and ti size is greater than 0
if os.path.isfile(file_path) and os.stat(file_path).st_size > 0:
# Open Compliance file in JSON
# cis_v1.4_aws.json --> cis_v1.4_aws
compliance_framework_name = filename.split(".json")[0]
# Store the compliance info
bulk_compliance_frameworks[compliance_framework_name] = (
load_compliance_framework(file_path)
)
except Exception as e:
logger.error(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}] -- {e}")
return bulk_compliance_frameworks
# Testing Pending
def load_compliance_framework(
compliance_specification_file: str,
) -> ComplianceBaseModel:
) -> Compliance:
"""load_compliance_framework loads and parse a Compliance Framework Specification"""
try:
compliance_framework = ComplianceBaseModel.parse_file(
compliance_specification_file
)
compliance_framework = Compliance.parse_file(compliance_specification_file)
except ValidationError as error:
logger.critical(
f"Compliance Framework Specification from {compliance_specification_file} is not valid: {error}"

View File

@@ -3,7 +3,7 @@ import sys
import yaml
from jsonschema import validate
from prowler.config.config import valid_severities
from prowler.lib.check.models import Severity
from prowler.lib.logger import logger
custom_checks_metadata_schema = {
@@ -17,7 +17,7 @@ custom_checks_metadata_schema = {
"properties": {
"Severity": {
"type": "string",
"enum": valid_severities,
"enum": [severity.value for severity in Severity],
},
"CheckTitle": {
"type": "string",

View File

@@ -1,17 +1,30 @@
import functools
import os
import re
import sys
from abc import ABC, abstractmethod
from dataclasses import dataclass
from enum import Enum
from typing import Set
from pydantic import BaseModel, ValidationError, validator
from prowler.config.config import valid_severities
from prowler.config.config import Provider
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.check.utils import recover_checks_from_provider
from prowler.lib.logger import logger
class Code(BaseModel):
"""Check's remediation information using IaC like CloudFormation, Terraform or the native CLI"""
"""
Represents the remediation code using IaC like CloudFormation, Terraform or the native CLI.
Attributes:
NativeIaC (str): The NativeIaC code.
Terraform (str): The Terraform code.
CLI (str): The CLI code.
Other (str): Other code.
"""
NativeIaC: str
Terraform: str
@@ -20,21 +33,69 @@ class Code(BaseModel):
class Recommendation(BaseModel):
"""Check's recommendation information"""
"""
Represents a recommendation.
Attributes:
Text (str): The text of the recommendation.
Url (str): The URL associated with the recommendation.
"""
Text: str
Url: str
class Remediation(BaseModel):
"""Check's remediation: Code and Recommendation"""
"""
Represents a remediation action for a specific .
Attributes:
Code (Code): The code associated with the remediation action.
Recommendation (Recommendation): The recommendation for the remediation action.
"""
Code: Code
Recommendation: Recommendation
class Check_Metadata_Model(BaseModel):
"""Check Metadata Model"""
class Severity(str, Enum):
critical = "critical"
high = "high"
medium = "medium"
low = "low"
informational = "informational"
class CheckMetadata(BaseModel):
"""
Model representing the metadata of a check.
Attributes:
Provider (str): The provider of the check.
CheckID (str): The ID of the check.
CheckTitle (str): The title of the check.
CheckType (list[str]): The type of the check.
CheckAliases (list[str], optional): The aliases of the check. Defaults to an empty list.
ServiceName (str): The name of the service.
SubServiceName (str): The name of the sub-service.
ResourceIdTemplate (str): The template for the resource ID.
Severity (str): The severity of the check.
ResourceType (str): The type of the resource.
Description (str): The description of the check.
Risk (str): The risk associated with the check.
RelatedUrl (str): The URL related to the check.
Remediation (Remediation): The remediation steps for the check.
Categories (list[str]): The categories of the check.
DependsOn (list[str]): The dependencies of the check.
RelatedTo (list[str]): The related checks.
Notes (str): Additional notes for the check.
Compliance (list, optional): The compliance information for the check. Defaults to None.
Validators:
valid_category(value): Validator function to validate the categories of the check.
severity_to_lower(severity): Validator function to convert the severity to lowercase.
valid_severity(severity): Validator function to validate the severity of the check.
"""
Provider: str
CheckID: str
@@ -44,7 +105,7 @@ class Check_Metadata_Model(BaseModel):
ServiceName: str
SubServiceName: str
ResourceIdTemplate: str
Severity: str
Severity: Severity
ResourceType: str
Description: str
Risk: str
@@ -73,16 +134,244 @@ class Check_Metadata_Model(BaseModel):
def severity_to_lower(severity):
return severity.lower()
@validator("Severity")
def valid_severity(severity):
if severity not in valid_severities:
raise ValueError(
f"Invalid severity: {severity}. Severity must be one of {', '.join(valid_severities)}"
@staticmethod
def get_bulk(provider: str) -> dict[str, "CheckMetadata"]:
"""
Load the metadata of all checks for a given provider reading the check's metadata files.
Args:
provider (str): The name of the provider.
Returns:
dict[str, CheckMetadata]: A dictionary containing the metadata of all checks, with the CheckID as the key.
"""
bulk_check_metadata = {}
checks = recover_checks_from_provider(provider)
# Build list of check's metadata files
for check_info in checks:
# Build check path name
check_name = check_info[0]
check_path = check_info[1]
# Ignore fixer files
if check_name.endswith("_fixer"):
continue
# Append metadata file extension
metadata_file = f"{check_path}/{check_name}.metadata.json"
# Load metadata
check_metadata = load_check_metadata(metadata_file)
bulk_check_metadata[check_metadata.CheckID] = check_metadata
return bulk_check_metadata
@staticmethod
def list(
bulk_checks_metadata: dict = None,
bulk_compliance_frameworks: dict = None,
provider: str = None,
severity: str = None,
category: str = None,
service: str = None,
compliance_framework: str = None,
) -> Set["CheckMetadata"]:
"""
Returns a set of checks from the bulk checks metadata.
Args:
provider (str): The provider of the checks.
bulk_checks_metadata (dict): The bulk checks metadata.
bulk_compliance_frameworks (dict): The bulk compliance frameworks.
severity (str): The severity of the checks.
category (str): The category of the checks.
service (str): The service of the checks.
compliance_framework (str): The compliance framework of the checks.
Returns:
set: A set of checks.
"""
checks_from_provider = set()
checks_from_severity = set()
checks_from_category = set()
checks_from_service = set()
checks_from_compliance_framework = set()
# If the bulk checks metadata is not provided, get it
if not bulk_checks_metadata:
bulk_checks_metadata = {}
available_providers = [p.value for p in Provider]
for provider_name in available_providers:
bulk_checks_metadata.update(CheckMetadata.get_bulk(provider_name))
if provider:
checks_from_provider = {
check_name
for check_name, check_metadata in bulk_checks_metadata.items()
if check_metadata.Provider == provider
}
if severity:
checks_from_severity = CheckMetadata.list_by_severity(
bulk_checks_metadata=bulk_checks_metadata, severity=severity
)
return severity
if category:
checks_from_category = CheckMetadata.list_by_category(
bulk_checks_metadata=bulk_checks_metadata, category=category
)
if service:
checks_from_service = CheckMetadata.list_by_service(
bulk_checks_metadata=bulk_checks_metadata, service=service
)
if compliance_framework:
# Loaded here, as it is not always needed
if not bulk_compliance_frameworks:
bulk_compliance_frameworks = {}
available_providers = [p.value for p in Provider]
for provider in available_providers:
bulk_compliance_frameworks = Compliance.get_bulk(provider=provider)
checks_from_compliance_framework = (
CheckMetadata.list_by_compliance_framework(
bulk_compliance_frameworks=bulk_compliance_frameworks,
compliance_framework=compliance_framework,
)
)
# Get all the checks:
checks = set(bulk_checks_metadata.keys())
# Get the intersection of the checks
if len(checks_from_provider) > 0 or provider:
checks = checks & checks_from_provider
if len(checks_from_severity) > 0 or severity:
checks = checks & checks_from_severity
if len(checks_from_category) > 0 or category:
checks = checks & checks_from_category
if len(checks_from_service) > 0 or service:
checks = checks & checks_from_service
if len(checks_from_compliance_framework) > 0 or compliance_framework:
checks = checks & checks_from_compliance_framework
return checks
@staticmethod
def get(bulk_checks_metadata: dict, check_id: str) -> "CheckMetadata":
"""
Returns the check metadata from the bulk checks metadata.
Args:
bulk_checks_metadata (dict): The bulk checks metadata.
check_id (str): The check ID.
Returns:
CheckMetadata: The check metadata.
"""
return bulk_checks_metadata.get(check_id, None)
@staticmethod
def list_by_severity(bulk_checks_metadata: dict, severity: str = None) -> set:
"""
Returns a set of checks by severity from the bulk checks metadata.
Args:
bulk_checks_metadata (dict): The bulk checks metadata.
severity (str): The severity.
Returns:
set: A set of checks by severity.
"""
checks = set()
if severity:
checks = {
check_name
for check_name, check_metadata in bulk_checks_metadata.items()
if check_metadata.Severity == severity
}
return checks
@staticmethod
def list_by_category(bulk_checks_metadata: dict, category: str = None) -> set:
"""
Returns a set of checks by category from the bulk checks metadata.
Args:
bulk_checks_metadata (dict): The bulk checks metadata.
category (str): The category.
Returns:
set: A set of checks by category.
"""
checks = set()
if category:
checks = {
check_name
for check_name, check_metadata in bulk_checks_metadata.items()
if category in check_metadata.Categories
}
return checks
@staticmethod
def list_by_service(bulk_checks_metadata: dict, service: str = None) -> set:
"""
Returns a set of checks by service from the bulk checks metadata.
Args:
bulk_checks_metadata (dict): The bulk checks metadata.
service (str): The service.
Returns:
set: A set of checks by service.
"""
checks = set()
if service:
if service == "lambda":
service = "awslambda"
checks = {
check_name
for check_name, check_metadata in bulk_checks_metadata.items()
if check_metadata.ServiceName == service
}
return checks
@staticmethod
def list_by_compliance_framework(
bulk_compliance_frameworks: dict, compliance_framework: str = None
) -> set:
"""
Returns a set of checks by compliance framework from the bulk compliance frameworks.
Args:
bulk_compliance_frameworks (dict): The bulk compliance frameworks.
compliance_framework (str): The compliance framework.
Returns:
set: A set of checks by compliance framework.
"""
checks = set()
if compliance_framework:
try:
checks_from_framework_list = [
requirement.Checks
for requirement in bulk_compliance_frameworks[
compliance_framework
].Requirements
]
# Reduce nested list into a list
# Pythonic functional magic
checks_from_framework = functools.reduce(
lambda x, y: x + y, checks_from_framework_list
)
# Then union this list of checks with the initial one
checks = checks.union(checks_from_framework)
except Exception as e:
logger.error(
f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}] -- {e}"
)
return checks
class Check(ABC, Check_Metadata_Model):
class Check(ABC, CheckMetadata):
"""Prowler Check"""
def __init__(self, **data):
@@ -93,7 +382,7 @@ class Check(ABC, Check_Metadata_Model):
+ ".metadata.json"
)
# Store it to validate them with Pydantic
data = Check_Metadata_Model.parse_file(metadata_file).dict()
data = CheckMetadata.parse_file(metadata_file).dict()
# Calls parents init function
super().__init__(**data)
# TODO: verify that the CheckID is the same as the filename and classname
@@ -114,14 +403,14 @@ class Check_Report:
status: str
status_extended: str
check_metadata: Check_Metadata_Model
check_metadata: CheckMetadata
resource_details: str
resource_tags: list
muted: bool
def __init__(self, metadata):
self.status = ""
self.check_metadata = Check_Metadata_Model.parse_raw(metadata)
self.check_metadata = CheckMetadata.parse_raw(metadata)
self.status_extended = ""
self.resource_details = ""
self.resource_tags = []
@@ -194,12 +483,22 @@ class Check_Report_Kubernetes(Check_Report):
# Testing Pending
def load_check_metadata(metadata_file: str) -> Check_Metadata_Model:
"""load_check_metadata loads and parse a Check's metadata file"""
def load_check_metadata(metadata_file: str) -> CheckMetadata:
"""
Load check metadata from a file.
Args:
metadata_file (str): The path to the metadata file.
Returns:
CheckMetadata: The loaded check metadata.
Raises:
ValidationError: If the metadata file is not valid.
"""
try:
check_metadata = Check_Metadata_Model.parse_file(metadata_file)
check_metadata = CheckMetadata.parse_file(metadata_file)
except ValidationError as error:
logger.critical(f"Metadata from {metadata_file} is not valid: {error}")
# TODO: remove this exit and raise an exception
sys.exit(1)
else:
return check_metadata

View File

@@ -0,0 +1,95 @@
import importlib
import sys
from pkgutil import walk_packages
from prowler.lib.logger import logger
def recover_checks_from_provider(
provider: str, service: str = None, include_fixers: bool = False
) -> list[tuple]:
"""
Recover all checks from the selected provider and service
Returns a list of tuples with the following format (check_name, check_path)
"""
try:
checks = []
modules = list_modules(provider, service)
for module_name in modules:
# Format: "prowler.providers.{provider}.services.{service}.{check_name}.{check_name}"
check_module_name = module_name.name
# We need to exclude common shared libraries in services
if (
check_module_name.count(".") == 6
and "lib" not in check_module_name
and (not check_module_name.endswith("_fixer") or include_fixers)
):
check_path = module_name.module_finder.path
# Check name is the last part of the check_module_name
check_name = check_module_name.split(".")[-1]
check_info = (check_name, check_path)
checks.append(check_info)
except ModuleNotFoundError:
logger.critical(f"Service {service} was not found for the {provider} provider.")
sys.exit(1)
except Exception as e:
logger.critical(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}]: {e}")
sys.exit(1)
else:
return checks
# List all available modules in the selected provider and service
def list_modules(provider: str, service: str):
# This module path requires the full path including "prowler."
module_path = f"prowler.providers.{provider}.services"
if service:
module_path += f".{service}"
return walk_packages(
importlib.import_module(module_path).__path__,
importlib.import_module(module_path).__name__ + ".",
)
def recover_checks_from_service(service_list: list, provider: str) -> set:
"""
Recover all checks from the selected provider and service
Returns a set of checks from the given services
"""
try:
checks = set()
service_list = [
"awslambda" if service == "lambda" else service for service in service_list
]
for service in service_list:
service_checks = recover_checks_from_provider(provider, service)
if not service_checks:
logger.error(f"Service '{service}' does not have checks.")
else:
for check in service_checks:
# Recover check name and module name from import path
# Format: "providers.{provider}.services.{service}.{check_name}.{check_name}"
check_name = check[0].split(".")[-1]
# If the service is present in the group list passed as parameters
# if service_name in group_list: checks_from_arn.add(check_name)
checks.add(check_name)
return checks
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def list_compliance_modules():
"""
list_compliance_modules returns the available compliance frameworks and returns their path
"""
# This module path requires the full path including "prowler."
module_path = "prowler.compliance"
return walk_packages(
importlib.import_module(module_path).__path__,
importlib.import_module(module_path).__name__ + ".",
)

View File

@@ -10,9 +10,9 @@ from prowler.config.config import (
default_config_file_path,
default_fixer_config_file_path,
default_output_directory,
finding_statuses,
valid_severities,
)
from prowler.lib.check.models import Severity
from prowler.lib.outputs.common import Status
from prowler.providers.common.arguments import (
init_providers_parser,
validate_provider_arguments,
@@ -138,8 +138,8 @@ Detailed documentation at https://docs.prowler.com
common_outputs_parser.add_argument(
"--status",
nargs="+",
help=f"Filter by the status of the findings {finding_statuses}",
choices=finding_statuses,
help=f"Filter by the status of the findings {[status.value for status in Status]}",
choices=[status.value for status in Status],
)
common_outputs_parser.add_argument(
"--output-formats",
@@ -177,6 +177,12 @@ Detailed documentation at https://docs.prowler.com
common_outputs_parser.add_argument(
"--no-banner", "-b", action="store_true", help="Hide Prowler banner"
)
common_outputs_parser.add_argument(
"--no-color",
action="store_true",
help="Disable color codes in output",
)
common_outputs_parser.add_argument(
"--unix-timestamp",
action="store_true",
@@ -257,8 +263,8 @@ Detailed documentation at https://docs.prowler.com
"--severity",
"--severities",
nargs="+",
help=f"Severities to be executed {valid_severities}",
choices=valid_severities,
help=f"Severities to be executed {[severity.value for severity in Severity]}",
choices=[severity.value for severity in Severity],
)
group.add_argument(
"--compliance",

View File

@@ -5,6 +5,8 @@ import yaml
from prowler.lib.logger import logger
from prowler.lib.mutelist.models import mutelist_schema
from prowler.lib.outputs.common import Status
from prowler.lib.outputs.utils import unroll_dict, unroll_tags
class Mutelist(ABC):
@@ -237,6 +239,35 @@ class Mutelist(ABC):
)
return False
def mute_finding(self, finding):
"""
Check if the provided finding is muted
Args:
finding (Finding): The finding to be evaluated for muting.
Returns:
Finding: The finding with the status updated if it is muted, otherwise the finding is returned
"""
try:
if self.is_muted(
finding.account_uid,
finding.metadata.CheckID,
finding.region,
finding.resource_uid,
unroll_dict(unroll_tags(finding.resource_tags)),
):
finding.raw["status"] = finding.status
finding.status = Status.MUTED
finding.muted = True
return finding
except Exception as error:
logger.error(
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
)
return finding
def is_excepted(
self,
exceptions,
@@ -307,13 +338,16 @@ class Mutelist(ABC):
return False
@staticmethod
def is_item_matched(matched_items, finding_items, tag=False):
def is_item_matched(matched_items, finding_items, tag=False) -> bool:
"""
Check if any of the items in matched_items are present in finding_items.
Args:
matched_items (list): List of items to be matched.
finding_items (str): String to search for matched items.
tag (bool): If True the search will have a different logic due to the tags being ANDed or ORed:
- Check of AND logic -> True if all the tags are present in the finding.
- Check of OR logic -> True if any of the tags is present in the finding.
Returns:
bool: True if any of the matched_items are present in finding_items, otherwise False.
@@ -321,17 +355,19 @@ class Mutelist(ABC):
try:
is_item_matched = False
if matched_items and (finding_items or finding_items == ""):
# If we use tags, we need to use re.search instead of re.match because we need to match the tags in the format key1=value1 | key2=value2
if tag:
operation = re.search
else:
operation = re.match
is_item_matched = True
for item in matched_items:
if item.startswith("*"):
item = ".*" + item[1:]
if operation(item, finding_items):
is_item_matched = True
break
if tag:
if not re.search(item, finding_items):
is_item_matched = False
break
else:
if re.search(item, finding_items):
is_item_matched = True
break
return is_item_matched
except Exception as error:
logger.error(

View File

@@ -25,7 +25,6 @@ class ASFF(Output):
- transform(findings: list[Finding]) -> None: Transforms a list of findings into ASFF format.
- batch_write_data_to_file() -> None: Writes the findings data to a file in JSON ASFF format.
- generate_status(status: str, muted: bool = False) -> str: Generates the ASFF status based on the provided status and muted flag.
- format_resource_tags(tags: str) -> dict: Transforms a string of tags into a dictionary format.
References:
- AWS Security Hub API Reference: https://docs.aws.amazon.com/securityhub/1.0/APIReference/API_Compliance.html
@@ -62,7 +61,6 @@ class ASFF(Output):
if finding.status == "MANUAL":
continue
timestamp = timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ")
resource_tags = ASFF.format_resource_tags(finding.resource_tags)
associated_standards, compliance_summary = ASFF.format_compliance(
finding.compliance
@@ -70,36 +68,39 @@ class ASFF(Output):
# Ensures finding_status matches allowed values in ASFF
finding_status = ASFF.generate_status(finding.status, finding.muted)
self._data.append(
AWSSecurityFindingFormat(
# The following line cannot be changed because it is the format we use to generate unique findings for AWS Security Hub
# If changed some findings could be lost because the unique identifier will be different
Id=f"prowler-{finding.check_id}-{finding.account_uid}-{finding.region}-{hash_sha512(finding.resource_uid)}",
Id=f"prowler-{finding.metadata.CheckID}-{finding.account_uid}-{finding.region}-{hash_sha512(finding.resource_uid)}",
ProductArn=f"arn:{finding.partition}:securityhub:{finding.region}::product/prowler/prowler",
ProductFields=ProductFields(
ProwlerResourceName=finding.resource_uid,
),
GeneratorId="prowler-" + finding.check_id,
GeneratorId="prowler-" + finding.metadata.CheckID,
AwsAccountId=finding.account_uid,
Types=(
finding.check_type.split(",")
if finding.check_type
finding.metadata.CheckType
if finding.metadata.CheckType
else ["Software and Configuration Checks"]
),
FirstObservedAt=timestamp,
UpdatedAt=timestamp,
CreatedAt=timestamp,
Severity=Severity(Label=finding.severity.value),
Title=finding.check_title,
Description=finding.description,
Severity=Severity(Label=finding.metadata.Severity.value),
Title=finding.metadata.CheckTitle,
Description=(
(finding.status_extended[:1000] + "...")
if len(finding.status_extended) > 1000
else finding.status_extended
),
Resources=[
Resource(
Id=finding.resource_uid,
Type=finding.resource_type,
Type=finding.metadata.ResourceType,
Partition=finding.partition,
Region=finding.region,
Tags=resource_tags,
Tags=finding.resource_tags,
)
],
Compliance=Compliance(
@@ -109,8 +110,8 @@ class ASFF(Output):
),
Remediation=Remediation(
Recommendation=Recommendation(
Text=finding.remediation_recommendation_text,
Url=finding.remediation_recommendation_url,
Text=finding.metadata.Remediation.Recommendation.Text,
Url=finding.metadata.Remediation.Recommendation.Url,
)
),
)
@@ -195,42 +196,6 @@ class ASFF(Output):
return json_asff_status
@staticmethod
def format_resource_tags(tags: str) -> dict:
"""
Transforms a string of tags into a dictionary format.
Parameters:
- tags (str): A string containing tags separated by ' | ' and key-value pairs separated by '='.
Returns:
- dict: A dictionary where keys are tag names and values are tag values.
Notes:
- If the input string is empty or None, it returns None.
- Each tag in the input string should be in the format 'key=value'.
- If the input string is not formatted correctly, it logs an error and returns None.
"""
try:
tags_dict = None
if tags:
tags = tags.split(" | ")
tags_dict = {}
for tag in tags:
value = tag.split("=")
tags_dict[value[0]] = value[1]
return tags_dict
except IndexError as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
return None
except AttributeError as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
return None
@staticmethod
def format_compliance(compliance: dict) -> tuple[list[dict], list[str]]:
"""
@@ -316,6 +281,12 @@ class Resource(BaseModel):
Region: str
Tags: Optional[dict]
@validator("Tags", pre=True, always=True)
def tags_cannot_be_empty_dict(tags):
if not tags:
return None
return tags
class Compliance(BaseModel):
"""

View File

@@ -1,62 +1,26 @@
from operator import attrgetter
from enum import Enum
from prowler.config.config import timestamp
from prowler.lib.logger import logger
from prowler.lib.outputs.utils import unroll_list, unroll_tags
from prowler.lib.outputs.utils import unroll_tags
from prowler.lib.utils.utils import outputs_unix_timestamp
def get_provider_data_mapping(provider) -> dict:
data = {}
for generic_field, provider_field in provider.get_output_mapping.items():
try:
provider_value = attrgetter(provider_field)(provider)
data[generic_field] = provider_value
except AttributeError:
data[generic_field] = ""
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
return data
# TODO: add test for outputs_unix_timestamp
def fill_common_finding_data(finding: dict, unix_timestamp: bool) -> dict:
finding_data = {
"metadata": finding.check_metadata,
"timestamp": outputs_unix_timestamp(unix_timestamp, timestamp),
"check_id": finding.check_metadata.CheckID,
"check_title": finding.check_metadata.CheckTitle,
"check_type": ",".join(finding.check_metadata.CheckType),
"status": finding.status,
"status_extended": finding.status_extended,
"muted": finding.muted,
"service_name": finding.check_metadata.ServiceName,
"subservice_name": finding.check_metadata.SubServiceName,
"severity": finding.check_metadata.Severity,
"resource_type": finding.check_metadata.ResourceType,
"resource_details": finding.resource_details,
"resource_tags": unroll_tags(finding.resource_tags),
"description": finding.check_metadata.Description,
"risk": finding.check_metadata.Risk,
"related_url": finding.check_metadata.RelatedUrl,
"remediation_recommendation_text": (
finding.check_metadata.Remediation.Recommendation.Text
),
"remediation_recommendation_url": (
finding.check_metadata.Remediation.Recommendation.Url
),
"remediation_code_nativeiac": (
finding.check_metadata.Remediation.Code.NativeIaC
),
"remediation_code_terraform": (
finding.check_metadata.Remediation.Code.Terraform
),
"remediation_code_cli": (finding.check_metadata.Remediation.Code.CLI),
"remediation_code_other": (finding.check_metadata.Remediation.Code.Other),
"categories": unroll_list(finding.check_metadata.Categories),
"depends_on": unroll_list(finding.check_metadata.DependsOn),
"related_to": unroll_list(finding.check_metadata.RelatedTo),
"notes": finding.check_metadata.Notes,
}
return finding_data
class Status(str, Enum):
PASS = "PASS"
FAIL = "FAIL"
MANUAL = "MANUAL"
MUTED = "MUTED"

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.outputs.compliance.aws_well_architected.models import (
AWSWellArchitectedModel,
)
@@ -21,7 +21,7 @@ class AWSWellArchitected(ComplianceOutput):
def transform(
self,
findings: list[Finding],
compliance: ComplianceBaseModel,
compliance: Compliance,
compliance_name: str,
) -> None:
"""
@@ -29,7 +29,7 @@ class AWSWellArchitected(ComplianceOutput):
Parameters:
- findings (list): A list of findings.
- compliance (ComplianceBaseModel): A compliance model.
- compliance (Compliance): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.outputs.compliance.cis.models import AWSCISModel
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.finding import Finding
@@ -19,7 +19,7 @@ class AWSCIS(ComplianceOutput):
def transform(
self,
findings: list[Finding],
compliance: ComplianceBaseModel,
compliance: Compliance,
compliance_name: str,
) -> None:
"""
@@ -27,7 +27,7 @@ class AWSCIS(ComplianceOutput):
Parameters:
- findings (list): A list of findings.
- compliance (ComplianceBaseModel): A compliance model.
- compliance (Compliance): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.outputs.compliance.cis.models import AzureCISModel
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.finding import Finding
@@ -19,7 +19,7 @@ class AzureCIS(ComplianceOutput):
def transform(
self,
findings: list[Finding],
compliance: ComplianceBaseModel,
compliance: Compliance,
compliance_name: str,
) -> None:
"""
@@ -27,7 +27,7 @@ class AzureCIS(ComplianceOutput):
Parameters:
- findings (list): A list of findings.
- compliance (ComplianceBaseModel): A compliance model.
- compliance (Compliance): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.outputs.compliance.cis.models import GCPCISModel
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.finding import Finding
@@ -19,7 +19,7 @@ class GCPCIS(ComplianceOutput):
def transform(
self,
findings: list[Finding],
compliance: ComplianceBaseModel,
compliance: Compliance,
compliance_name: str,
) -> None:
"""
@@ -27,7 +27,7 @@ class GCPCIS(ComplianceOutput):
Parameters:
- findings (list): A list of findings.
- compliance (ComplianceBaseModel): A compliance model.
- compliance (Compliance): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:

View File

@@ -1,6 +1,6 @@
from datetime import datetime
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.outputs.compliance.cis.models import KubernetesCISModel
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.finding import Finding
@@ -21,7 +21,7 @@ class KubernetesCIS(ComplianceOutput):
def transform(
self,
findings: list[Finding],
compliance: ComplianceBaseModel,
compliance: Compliance,
compliance_name: str,
) -> None:
"""
@@ -29,7 +29,7 @@ class KubernetesCIS(ComplianceOutput):
Parameters:
- findings (list): A list of findings.
- compliance (ComplianceBaseModel): A compliance model.
- compliance (Compliance): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:

View File

@@ -7,6 +7,7 @@ from prowler.lib.outputs.compliance.ens.ens import get_ens_table
from prowler.lib.outputs.compliance.generic.generic_table import (
get_generic_compliance_table,
)
from prowler.lib.outputs.compliance.kisa_ismsp.kisa_ismsp import get_kisa_ismsp_table
from prowler.lib.outputs.compliance.mitre_attack.mitre_attack import (
get_mitre_attack_table,
)
@@ -62,6 +63,15 @@ def display_compliance_table(
output_directory,
compliance_overview,
)
elif "kisa_isms_" in compliance_framework:
get_kisa_ismsp_table(
findings,
bulk_checks_metadata,
compliance_framework,
output_filename,
output_directory,
compliance_overview,
)
else:
get_generic_compliance_table(
findings,

View File

@@ -2,7 +2,7 @@ from csv import DictWriter
from pathlib import Path
from typing import List
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.logger import logger
from prowler.lib.outputs.finding import Finding
from prowler.lib.outputs.output import Output
@@ -28,7 +28,7 @@ class ComplianceOutput(Output):
def __init__(
self,
findings: List[Finding],
compliance: ComplianceBaseModel,
compliance: Compliance,
create_file_descriptor: bool = False,
file_path: str = None,
file_extension: str = "",

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.compliance.ens.models import AWSENSModel
from prowler.lib.outputs.finding import Finding
@@ -19,7 +19,7 @@ class AWSENS(ComplianceOutput):
def transform(
self,
findings: list[Finding],
compliance: ComplianceBaseModel,
compliance: Compliance,
compliance_name: str,
) -> None:
"""
@@ -27,7 +27,7 @@ class AWSENS(ComplianceOutput):
Parameters:
- findings (list): A list of findings.
- compliance (ComplianceBaseModel): A compliance model.
- compliance (Compliance): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.compliance.generic.models import GenericComplianceModel
from prowler.lib.outputs.finding import Finding
@@ -19,7 +19,7 @@ class GenericCompliance(ComplianceOutput):
def transform(
self,
findings: list[Finding],
compliance: ComplianceBaseModel,
compliance: Compliance,
compliance_name: str,
) -> None:
"""
@@ -27,7 +27,7 @@ class GenericCompliance(ComplianceOutput):
Parameters:
- findings (list): A list of findings.
- compliance (ComplianceBaseModel): A compliance model.
- compliance (Compliance): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.compliance.iso27001.models import AWSISO27001Model
from prowler.lib.outputs.finding import Finding
@@ -19,7 +19,7 @@ class AWSISO27001(ComplianceOutput):
def transform(
self,
findings: list[Finding],
compliance: ComplianceBaseModel,
compliance: Compliance,
compliance_name: str,
) -> None:
"""
@@ -27,7 +27,7 @@ class AWSISO27001(ComplianceOutput):
Parameters:
- findings (list): A list of findings.
- compliance (ComplianceBaseModel): A compliance model.
- compliance (Compliance): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:

View File

@@ -0,0 +1,89 @@
from colorama import Fore, Style
from tabulate import tabulate
from prowler.config.config import orange_color
def get_kisa_ismsp_table(
findings: list,
bulk_checks_metadata: dict,
compliance_framework: str,
output_filename: str,
output_directory: str,
compliance_overview: bool,
):
sections = {}
kisa_ismsp_compliance_table = {
"Provider": [],
"Section": [],
"Status": [],
"Muted": [],
}
pass_count = []
fail_count = []
muted_count = []
for index, finding in enumerate(findings):
check = bulk_checks_metadata[finding.check_metadata.CheckID]
check_compliances = check.Compliance
for compliance in check_compliances:
if (
compliance.Framework.startswith("KISA")
and compliance.Version in compliance_framework
):
for requirement in compliance.Requirements:
for attribute in requirement.Attributes:
section = attribute.Section
# Check if Section exists
if section not in sections:
sections[section] = {
"Status": f"{Fore.GREEN}PASS{Style.RESET_ALL}",
"Muted": 0,
}
if finding.muted:
if index not in muted_count:
muted_count.append(index)
sections[section]["Muted"] += 1
else:
if finding.status == "FAIL" and index not in fail_count:
fail_count.append(index)
elif finding.status == "PASS" and index not in pass_count:
pass_count.append(index)
# Add results to table
sections = dict(sorted(sections.items()))
for section in sections:
kisa_ismsp_compliance_table["Provider"].append(compliance.Provider)
kisa_ismsp_compliance_table["Section"].append(section)
kisa_ismsp_compliance_table["Muted"].append(
f"{orange_color}{sections[section]['Muted']}{Style.RESET_ALL}"
)
if len(fail_count) + len(pass_count) + len(muted_count) > 1:
print(
f"\nCompliance Status of {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL} Framework:"
)
overview_table = [
[
f"{Fore.RED}{round(len(fail_count) / len(findings) * 100, 2)}% ({len(fail_count)}) FAIL{Style.RESET_ALL}",
f"{Fore.GREEN}{round(len(pass_count) / len(findings) * 100, 2)}% ({len(pass_count)}) PASS{Style.RESET_ALL}",
f"{orange_color}{round(len(muted_count) / len(findings) * 100, 2)}% ({len(muted_count)}) MUTED{Style.RESET_ALL}",
]
]
print(tabulate(overview_table, tablefmt="rounded_grid"))
if not compliance_overview:
print(
f"\nFramework {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL} Results:"
)
print(
tabulate(
kisa_ismsp_compliance_table,
headers="keys",
tablefmt="rounded_grid",
)
)
print(
f"{Style.BRIGHT}* Only sections containing results appear.{Style.RESET_ALL}"
)
print(f"\nDetailed results of {compliance_framework.upper()} are in:")
print(
f" - CSV: {output_directory}/compliance/{output_filename}_{compliance_framework}.csv\n"
)

View File

@@ -0,0 +1,93 @@
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.compliance.kisa_ismsp.models import AWSKISAISMSPModel
from prowler.lib.outputs.finding import Finding
class AWSKISAISMSP(ComplianceOutput):
"""
This class represents the AWS KISA-ISMS-P compliance output.
Attributes:
- _data (list): A list to store transformed data from findings.
- _file_descriptor (TextIOWrapper): A file descriptor to write data to a file.
Methods:
- transform: Transforms findings into AWS KISA-ISMS-P compliance format.
"""
def transform(
self,
findings: list[Finding],
compliance: Compliance,
compliance_name: str,
) -> None:
"""
Transforms a list of findings into AWS KISA-ISMS-P compliance format.
Parameters:
- findings (list): A list of findings.
- compliance (Compliance): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:
- None
"""
for finding in findings:
# Get the compliance requirements for the finding
finding_requirements = finding.compliance.get(compliance_name, [])
for requirement in compliance.Requirements:
if requirement.Id in finding_requirements:
for attribute in requirement.Attributes:
compliance_row = AWSKISAISMSPModel(
Provider=finding.provider,
Description=compliance.Description,
AccountId=finding.account_uid,
Region=finding.region,
AssessmentDate=str(finding.timestamp),
Requirements_Id=requirement.Id,
Requirements_Name=requirement.Name,
Requirements_Description=requirement.Description,
Requirements_Attributes_Domain=attribute.Domain,
Requirements_Attributes_Subdomain=attribute.Subdomain,
Requirements_Attributes_Section=attribute.Section,
Requirements_Attributes_AuditChecklist=attribute.AuditChecklist,
Requirements_Attributes_RelatedRegulations=attribute.RelatedRegulations,
Requirements_Attributes_AuditEvidence=attribute.AuditEvidence,
Requirements_Attributes_NonComplianceCases=attribute.NonComplianceCases,
Status=finding.status,
StatusExtended=finding.status_extended,
ResourceId=finding.resource_uid,
ResourceName=finding.resource_name,
CheckId=finding.check_id,
Muted=finding.muted,
)
self._data.append(compliance_row)
# Add manual requirements to the compliance output
for requirement in compliance.Requirements:
if not requirement.Checks:
for attribute in requirement.Attributes:
compliance_row = AWSKISAISMSPModel(
Provider=compliance.Provider.lower(),
Description=compliance.Description,
AccountId="",
Region="",
AssessmentDate=str(finding.timestamp),
Requirements_Id=requirement.Id,
Requirements_Name=requirement.Name,
Requirements_Description=requirement.Description,
Requirements_Attributes_Domain=attribute.Domain,
Requirements_Attributes_Subdomain=attribute.Subdomain,
Requirements_Attributes_Section=attribute.Section,
Requirements_Attributes_AuditChecklist=attribute.AuditChecklist,
Requirements_Attributes_RelatedRegulations=attribute.RelatedRegulations,
Requirements_Attributes_AuditEvidence=attribute.AuditEvidence,
Requirements_Attributes_NonComplianceCases=attribute.NonComplianceCases,
Status="MANUAL",
StatusExtended="Manual check",
ResourceId="manual_check",
ResourceName="Manual check",
CheckId="manual",
Muted=False,
)
self._data.append(compliance_row)

View File

@@ -0,0 +1,31 @@
from typing import Optional
from pydantic import BaseModel
class AWSKISAISMSPModel(BaseModel):
"""
The AWS KISA-ISMS-P Model outputs findings in a format compliant with the AWS KISA-ISMS-P standard
"""
Provider: str
Description: str
AccountId: str
Region: str
AssessmentDate: str
Requirements_Id: str
Requirements_Name: str
Requirements_Description: str
Requirements_Attributes_Domain: str
Requirements_Attributes_Subdomain: str
Requirements_Attributes_Section: str
Requirements_Attributes_AuditChecklist: Optional[list[str]]
Requirements_Attributes_RelatedRegulations: Optional[list[str]]
Requirements_Attributes_AuditEvidence: Optional[list[str]]
Requirements_Attributes_NonComplianceCases: Optional[list[str]]
Status: str
StatusExtended: str
ResourceId: str
ResourceName: str
CheckId: str
Muted: bool

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.compliance.mitre_attack.models import AWSMitreAttackModel
from prowler.lib.outputs.finding import Finding
@@ -20,7 +20,7 @@ class AWSMitreAttack(ComplianceOutput):
def transform(
self,
findings: list[Finding],
compliance: ComplianceBaseModel,
compliance: Compliance,
compliance_name: str,
) -> None:
"""
@@ -28,7 +28,7 @@ class AWSMitreAttack(ComplianceOutput):
Parameters:
- findings (list): A list of findings.
- compliance (ComplianceBaseModel): A compliance model.
- compliance (Compliance): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.compliance.mitre_attack.models import AzureMitreAttackModel
from prowler.lib.outputs.finding import Finding
@@ -20,7 +20,7 @@ class AzureMitreAttack(ComplianceOutput):
def transform(
self,
findings: list[Finding],
compliance: ComplianceBaseModel,
compliance: Compliance,
compliance_name: str,
) -> None:
"""
@@ -28,7 +28,7 @@ class AzureMitreAttack(ComplianceOutput):
Parameters:
- findings (list): A list of findings.
- compliance (ComplianceBaseModel): A compliance model.
- compliance (Compliance): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:

View File

@@ -1,4 +1,4 @@
from prowler.lib.check.compliance_models import ComplianceBaseModel
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.compliance.mitre_attack.models import GCPMitreAttackModel
from prowler.lib.outputs.finding import Finding
@@ -20,7 +20,7 @@ class GCPMitreAttack(ComplianceOutput):
def transform(
self,
findings: list[Finding],
compliance: ComplianceBaseModel,
compliance: Compliance,
compliance_name: str,
) -> None:
"""
@@ -28,7 +28,7 @@ class GCPMitreAttack(ComplianceOutput):
Parameters:
- findings (list): A list of findings.
- compliance (ComplianceBaseModel): A compliance model.
- compliance (Compliance): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:

View File

@@ -1,4 +1,5 @@
from csv import DictWriter
from typing import List
from prowler.lib.logger import logger
from prowler.lib.outputs.finding import Finding
@@ -7,7 +8,7 @@ from prowler.lib.outputs.utils import unroll_dict, unroll_list
class CSV(Output):
def transform(self, findings: list[Finding]) -> None:
def transform(self, findings: List[Finding]) -> None:
"""Transforms the findings into the CSV format.
Args:
@@ -16,11 +17,68 @@ class CSV(Output):
"""
try:
for finding in findings:
finding_dict = {k.upper(): v for k, v in finding.dict().items()}
finding_dict["COMPLIANCE"] = unroll_dict(finding.compliance)
finding_dict["ACCOUNT_TAGS"] = unroll_list(finding.account_tags)
finding_dict = {}
finding_dict["AUTH_METHOD"] = finding.auth_method
finding_dict["TIMESTAMP"] = finding.timestamp
finding_dict["ACCOUNT_UID"] = finding.account_uid
finding_dict["ACCOUNT_NAME"] = finding.account_name
finding_dict["ACCOUNT_EMAIL"] = finding.account_email
finding_dict["ACCOUNT_ORGANIZATION_UID"] = (
finding.account_organization_uid
)
finding_dict["ACCOUNT_ORGANIZATION_NAME"] = (
finding.account_organization_name
)
finding_dict["ACCOUNT_TAGS"] = unroll_dict(
finding.account_tags, separator=":"
)
finding_dict["FINDING_UID"] = finding.uid
finding_dict["PROVIDER"] = finding.metadata.Provider
finding_dict["CHECK_ID"] = finding.metadata.CheckID
finding_dict["CHECK_TITLE"] = finding.metadata.CheckTitle
finding_dict["CHECK_TYPE"] = unroll_list(finding.metadata.CheckType)
finding_dict["STATUS"] = finding.status.value
finding_dict["SEVERITY"] = finding.severity.value
finding_dict["STATUS_EXTENDED"] = finding.status_extended
finding_dict["MUTED"] = finding.muted
finding_dict["SERVICE_NAME"] = finding.metadata.ServiceName
finding_dict["SUBSERVICE_NAME"] = finding.metadata.SubServiceName
finding_dict["SEVERITY"] = finding.metadata.Severity.value
finding_dict["RESOURCE_TYPE"] = finding.metadata.ResourceType
finding_dict["RESOURCE_UID"] = finding.resource_uid
finding_dict["RESOURCE_NAME"] = finding.resource_name
finding_dict["RESOURCE_DETAILS"] = finding.resource_details
finding_dict["RESOURCE_TAGS"] = unroll_dict(finding.resource_tags)
finding_dict["PARTITION"] = finding.partition
finding_dict["REGION"] = finding.region
finding_dict["DESCRIPTION"] = finding.metadata.Description
finding_dict["RISK"] = finding.metadata.Risk
finding_dict["RELATED_URL"] = finding.metadata.RelatedUrl
finding_dict["REMEDIATION_RECOMMENDATION_TEXT"] = (
finding.metadata.Remediation.Recommendation.Text
)
finding_dict["REMEDIATION_RECOMMENDATION_URL"] = (
finding.metadata.Remediation.Recommendation.Url
)
finding_dict["REMEDIATION_CODE_NATIVEIAC"] = (
finding.metadata.Remediation.Code.NativeIaC
)
finding_dict["REMEDIATION_CODE_TERRAFORM"] = (
finding.metadata.Remediation.Code.Terraform
)
finding_dict["REMEDIATION_CODE_CLI"] = (
finding.metadata.Remediation.Code.CLI
)
finding_dict["REMEDIATION_CODE_OTHER"] = (
finding.metadata.Remediation.Code.Other
)
finding_dict["COMPLIANCE"] = unroll_dict(
finding.compliance, separator=": "
)
finding_dict["CATEGORIES"] = unroll_list(finding.metadata.Categories)
finding_dict["DEPENDS_ON"] = unroll_list(finding.metadata.DependsOn)
finding_dict["RELATED_TO"] = unroll_list(finding.metadata.RelatedTo)
finding_dict["NOTES"] = finding.metadata.Notes
finding_dict["PROWLER_VERSION"] = finding.prowler_version
self._data.append(finding_dict)
except Exception as error:
logger.error(

View File

@@ -1,34 +1,17 @@
from datetime import datetime
from enum import Enum
from typing import Optional, Union
from pydantic import BaseModel
from pydantic import BaseModel, Field
from prowler.config.config import prowler_version
from prowler.lib.check.models import Check_Report
from prowler.lib.check.models import Check_Report, CheckMetadata
from prowler.lib.logger import logger
from prowler.lib.outputs.common import (
fill_common_finding_data,
get_provider_data_mapping,
)
from prowler.lib.outputs.common import Status, fill_common_finding_data
from prowler.lib.outputs.compliance.compliance import get_check_compliance
from prowler.lib.utils.utils import dict_to_lowercase, get_nested_attribute
from prowler.providers.common.provider import Provider
class Status(str, Enum):
PASS = "PASS"
FAIL = "FAIL"
MANUAL = "MANUAL"
class Severity(str, Enum):
critical = "critical"
high = "high"
medium = "medium"
low = "low"
informational = "informational"
class Finding(BaseModel):
"""
Represents the output model for a finding across different providers.
@@ -41,81 +24,130 @@ class Finding(BaseModel):
auth_method: str
timestamp: Union[int, datetime]
account_uid: str
# Optional since it depends on permissions
account_name: Optional[str]
# Optional since it depends on permissions
account_email: Optional[str]
# Optional since it depends on permissions
account_organization_uid: Optional[str]
# Optional since it depends on permissions
account_organization_name: Optional[str]
# Optional since it depends on permissions
account_tags: Optional[list[str]]
finding_uid: str
provider: str
check_id: str
check_title: str
check_type: str
account_name: Optional[str] = None
account_email: Optional[str] = None
account_organization_uid: Optional[str] = None
account_organization_name: Optional[str] = None
metadata: CheckMetadata
account_tags: dict = {}
uid: str
status: Status
status_extended: str
muted: bool = False
service_name: str
subservice_name: str
severity: Severity
resource_type: str
resource_uid: str
resource_name: str
resource_details: str
resource_tags: str
# Only present for AWS and Azure
partition: Optional[str]
resource_tags: dict = Field(default_factory=dict)
partition: Optional[str] = None
region: str
description: str
risk: str
related_url: str
remediation_recommendation_text: str
remediation_recommendation_url: str
remediation_code_nativeiac: str
remediation_code_terraform: str
remediation_code_cli: str
remediation_code_other: str
compliance: dict
categories: str
depends_on: str
related_to: str
notes: str
prowler_version: str = prowler_version
raw: dict = Field(default_factory=dict)
@property
def provider(self) -> str:
"""
Returns the provider from the finding check's metadata.
"""
return self.metadata.Provider
@property
def check_id(self) -> str:
"""
Returns the ID from the finding check's metadata.
"""
return self.metadata.CheckID
@property
def severity(self) -> str:
"""
Returns the severity from the finding check's metadata.
"""
return self.metadata.Severity
@property
def resource_type(self) -> str:
"""
Returns the resource type from the finding check's metadata.
"""
return self.metadata.ResourceType
@property
def service_name(self) -> str:
"""
Returns the service name from the finding check's metadata.
"""
return self.metadata.ServiceName
def get_metadata(self) -> dict:
"""
Retrieves the metadata of the object and returns it as a dictionary with all keys in lowercase.
Returns:
dict: A dictionary containing the metadata with keys converted to lowercase.
"""
return dict_to_lowercase(self.metadata.dict())
@classmethod
def generate_output(
cls, provider: Provider, check_output: Check_Report
cls, provider: Provider, check_output: Check_Report, output_options
) -> "Finding":
"""Generates the output for a finding based on the provider and output options
Args:
provider (Provider): the provider object
check_output (Check_Report): the check output object
output_options: the output options object, depending on the provider
Returns:
finding_output (Finding): the finding output object
"""
output_options = provider.output_options
# TODO: think about get_provider_data_mapping
provider_data_mapping = get_provider_data_mapping(provider)
# TODO: move fill_common_finding_data
common_finding_data = fill_common_finding_data(
check_output, output_options.unix_timestamp
)
unix_timestamp = False
if hasattr(output_options, "unix_timestamp"):
unix_timestamp = output_options.unix_timestamp
common_finding_data = fill_common_finding_data(check_output, unix_timestamp)
output_data = {}
output_data.update(provider_data_mapping)
output_data.update(common_finding_data)
bulk_checks_metadata = {}
if hasattr(output_options, "bulk_checks_metadata"):
bulk_checks_metadata = output_options.bulk_checks_metadata
output_data["compliance"] = get_check_compliance(
check_output, provider.type, output_options.bulk_checks_metadata
check_output, provider.type, bulk_checks_metadata
)
try:
output_data["provider"] = provider.type
if provider.type == "aws":
output_data["account_uid"] = get_nested_attribute(
provider, "identity.account"
)
output_data["account_name"] = get_nested_attribute(
provider, "organizations_metadata.account_name"
)
output_data["account_email"] = get_nested_attribute(
provider, "organizations_metadata.account_email"
)
output_data["account_organization_uid"] = get_nested_attribute(
provider, "organizations_metadata.organization_arn"
)
output_data["account_organization_name"] = get_nested_attribute(
provider, "organizations_metadata.organization_id"
)
output_data["account_tags"] = get_nested_attribute(
provider, "organizations_metadata.account_tags"
)
output_data["partition"] = get_nested_attribute(
provider, "identity.partition"
)
# TODO: probably Organization UID is without the account id
output_data["auth_method"] = f"profile: {output_data['auth_method']}"
output_data["auth_method"] = (
f"profile: {get_nested_attribute(provider, 'identity.profile')}"
)
output_data["resource_name"] = check_output.resource_id
output_data["resource_uid"] = check_output.resource_arn
output_data["region"] = check_output.region
@@ -126,9 +158,9 @@ class Finding(BaseModel):
f"{provider.identity.identity_type}: {provider.identity.identity_id}"
)
# Get the first tenant domain ID, just in case
output_data["account_organization_uid"] = output_data[
"account_organization_uid"
][0]
output_data["account_organization_uid"] = get_nested_attribute(
provider, "identity.tenant_ids"
)[0]
output_data["account_uid"] = (
output_data["account_organization_uid"]
if "Tenant:" in check_output.subscription
@@ -138,15 +170,33 @@ class Finding(BaseModel):
output_data["resource_name"] = check_output.resource_name
output_data["resource_uid"] = check_output.resource_id
output_data["region"] = check_output.location
# TODO: check the tenant_ids
# TODO: we have to get the account organization, the tenant is not that
output_data["account_organization_name"] = get_nested_attribute(
provider, "identity.tenant_domain"
)
output_data["partition"] = get_nested_attribute(
provider, "region_config.name"
)
# TODO: pending to get the subscription tags
# "account_tags": "organizations_metadata.account_details_tags",
# TODO: store subscription_name + id pairs
# "account_name": "organizations_metadata.account_details_name",
# "account_email": "organizations_metadata.account_details_email",
elif provider.type == "gcp":
output_data["auth_method"] = f"Principal: {output_data['auth_method']}"
output_data["auth_method"] = (
f"Principal: {get_nested_attribute(provider, 'identity.profile')}"
)
output_data["account_uid"] = provider.projects[
check_output.project_id
].id
output_data["account_name"] = provider.projects[
check_output.project_id
].name
# There is no concept as project email in GCP
# "account_email": "organizations_metadata.account_details_email",
output_data["account_tags"] = provider.projects[
check_output.project_id
].labels
@@ -165,7 +215,7 @@ class Finding(BaseModel):
check_output.project_id
].organization.id
# TODO: for now is None since we don't retrieve that data
output_data["account_organization"] = provider.projects[
output_data["account_organization_name"] = provider.projects[
check_output.project_id
].organization.display_name
@@ -177,12 +227,15 @@ class Finding(BaseModel):
output_data["resource_name"] = check_output.resource_name
output_data["resource_uid"] = check_output.resource_id
output_data["account_name"] = f"context: {provider.identity.context}"
output_data["account_uid"] = get_nested_attribute(
provider, "identity.cluster"
)
output_data["region"] = f"namespace: {check_output.namespace}"
# check_output Unique ID
# TODO: move this to a function
# TODO: in Azure, GCP and K8s there are fidings without resource_name
output_data["finding_uid"] = (
output_data["uid"] = (
f"prowler-{provider.type}-{check_output.check_metadata.CheckID}-{output_data['account_uid']}-"
f"{output_data['region']}-{output_data['resource_name']}"
)

View File

@@ -39,17 +39,17 @@ class HTML(Output):
f"""
<tr class="{row_class}">
<td>{finding_status}</td>
<td>{finding.severity.value}</td>
<td>{finding.service_name}</td>
<td>{finding.metadata.Severity.value}</td>
<td>{finding.metadata.ServiceName}</td>
<td>{finding.region.lower()}</td>
<td>{finding.check_id.replace("_", "<wbr />_")}</td>
<td>{finding.check_title}</td>
<td>{finding.metadata.CheckID.replace("_", "<wbr />_")}</td>
<td>{finding.metadata.CheckTitle}</td>
<td>{finding.resource_uid.replace("<", "&lt;").replace(">", "&gt;").replace("_", "<wbr />_")}</td>
<td>{parse_html_string(finding.resource_tags)}</td>
<td>{parse_html_string(unroll_dict(finding.resource_tags))}</td>
<td>{finding.status_extended.replace("<", "&lt;").replace(">", "&gt;").replace("_", "<wbr />_")}</td>
<td><p class="show-read-more">{html.escape(finding.risk)}</p></td>
<td><p class="show-read-more">{html.escape(finding.remediation_recommendation_text)}</p> <a class="read-more" href="{finding.remediation_recommendation_url}"><i class="fas fa-external-link-alt"></i></a></td>
<td><p class="show-read-more">{parse_html_string(unroll_dict(finding.compliance))}</p></td>
<td><p class="show-read-more">{html.escape(finding.metadata.Risk)}</p></td>
<td><p class="show-read-more">{html.escape(finding.metadata.Remediation.Recommendation.Text)}</p> <a class="read-more" href="{finding.metadata.Remediation.Recommendation.Url}"><i class="fas fa-external-link-alt"></i></a></td>
<td><p class="show-read-more">{parse_html_string(unroll_dict(finding.compliance, separator=": "))}</p></td>
</tr>
"""
)
@@ -173,9 +173,15 @@ class HTML(Output):
<li class="list-group-item">
<b>Passed:</b> {str(stats.get("total_pass", 0))}
</li>
<li class="list-group-item">
<b>Passed (Muted):</b> {str(stats.get("total_muted_pass", 0))}
</li>
<li class="list-group-item">
<b>Failed:</b> {str(stats.get("total_fail", 0))}
</li>
<li class="list-group-item">
<b>Failed (Muted):</b> {str(stats.get("total_muted_fail", 0))}
</li>
<li class="list-group-item">
<b>Total Resources:</b> {str(stats.get("resources_count", 0))}
</li>

View File

@@ -9,7 +9,6 @@ from py_ocsf_models.events.findings.detection_finding import (
from py_ocsf_models.events.findings.finding import ActivityID, FindingInformation
from py_ocsf_models.objects.account import Account, TypeID
from py_ocsf_models.objects.cloud import Cloud
from py_ocsf_models.objects.container import Container
from py_ocsf_models.objects.group import Group
from py_ocsf_models.objects.metadata import Metadata
from py_ocsf_models.objects.organization import Organization
@@ -20,6 +19,7 @@ from py_ocsf_models.objects.resource_details import ResourceDetails
from prowler.lib.logger import logger
from prowler.lib.outputs.finding import Finding
from prowler.lib.outputs.output import Output
from prowler.lib.outputs.utils import unroll_dict_to_list
class OCSF(Output):
@@ -36,7 +36,7 @@ class OCSF(Output):
- transform(findings: List[Finding]) -> None: Transforms the findings into the OCSF Detection Finding format.
- batch_write_data_to_file() -> None: Writes the findings to a file using the OCSF Detection Finding format using the `Output._file_descriptor`.
- get_account_type_id_by_provider(provider: str) -> TypeID: Returns the TypeID based on the provider.
- get_finding_status_id(status: str, muted: bool) -> StatusID: Returns the StatusID based on the status and muted values.
- get_finding_status_id(muted: bool) -> StatusID: Returns the StatusID based on the muted value.
References:
- OCSF: https://schema.ocsf.io/1.2.0/classes/detection_finding
@@ -53,37 +53,42 @@ class OCSF(Output):
for finding in findings:
finding_activity = ActivityID.Create
cloud_account_type = self.get_account_type_id_by_provider(
finding.provider
finding.metadata.Provider
)
finding_severity = getattr(
SeverityID, finding.severity.capitalize(), SeverityID.Unknown
)
finding_status = self.get_finding_status_id(
finding.status, finding.muted
SeverityID,
finding.metadata.Severity.capitalize(),
SeverityID.Unknown,
)
finding_status = self.get_finding_status_id(finding.muted)
detection_finding = DetectionFinding(
message=finding.status_extended,
activity_id=finding_activity.value,
activity_name=finding_activity.name,
finding_info=FindingInformation(
created_time=finding.timestamp,
desc=finding.description,
title=finding.check_title,
uid=finding.finding_uid,
created_time_dt=finding.timestamp,
created_time=int(finding.timestamp.timestamp()),
desc=finding.metadata.Description,
title=finding.metadata.CheckTitle,
uid=finding.uid,
name=finding.resource_name,
product_uid="prowler",
types=finding.metadata.CheckType,
),
event_time=finding.timestamp,
time_dt=finding.timestamp,
time=int(finding.timestamp.timestamp()),
remediation=Remediation(
desc=finding.remediation_recommendation_text,
desc=finding.metadata.Remediation.Recommendation.Text,
references=list(
filter(
None,
[
finding.remediation_code_nativeiac,
finding.remediation_code_terraform,
finding.remediation_code_cli,
finding.remediation_code_other,
finding.remediation_recommendation_url,
finding.metadata.Remediation.Code.NativeIaC,
finding.metadata.Remediation.Code.Terraform,
finding.metadata.Remediation.Code.CLI,
finding.metadata.Remediation.Code.Other,
finding.metadata.Remediation.Recommendation.Url,
],
)
),
@@ -94,65 +99,73 @@ class OCSF(Output):
status=finding_status.name,
status_code=finding.status,
status_detail=finding.status_extended,
risk_details=finding.risk,
resources=[
ResourceDetails(
# TODO: Check labels for other providers
labels=(
finding.resource_tags.split(",")
if finding.resource_tags
else []
),
name=finding.resource_name,
uid=finding.resource_uid,
group=Group(name=finding.service_name),
type=finding.resource_type,
# TODO: this should be included only if using the Cloud profile
cloud_partition=finding.partition,
region=finding.region,
data={"details": finding.resource_details},
)
],
risk_details=finding.metadata.Risk,
resources=(
[
ResourceDetails(
labels=unroll_dict_to_list(finding.resource_tags),
name=finding.resource_name,
uid=finding.resource_uid,
group=Group(name=finding.metadata.ServiceName),
type=finding.metadata.ResourceType,
# TODO: this should be included only if using the Cloud profile
cloud_partition=finding.partition,
region=finding.region,
data={"details": finding.resource_details},
)
]
if finding.metadata.Provider != "kubernetes"
else [
ResourceDetails(
labels=unroll_dict_to_list(finding.resource_tags),
name=finding.resource_name,
uid=finding.resource_uid,
group=Group(name=finding.metadata.ServiceName),
type=finding.metadata.ResourceType,
data={"details": finding.resource_details},
namespace=finding.region.replace("namespace: ", ""),
)
]
),
metadata=Metadata(
event_code=finding.check_id,
event_code=finding.metadata.CheckID,
product=Product(
uid="prowler",
name="Prowler",
vendor_name="Prowler",
version=finding.prowler_version,
),
profiles=(
["cloud", "datetime"]
if finding.metadata.Provider != "kubernetes"
else ["container", "datetime"]
),
tenant_uid=finding.account_organization_uid,
),
type_uid=DetectionFindingTypeID.Create,
type_name=DetectionFindingTypeID.Create.name,
type_name=f"Detection Finding: {DetectionFindingTypeID.Create.name}",
unmapped={
"check_type": finding.check_type,
"related_url": finding.related_url,
"categories": finding.categories,
"depends_on": finding.depends_on,
"related_to": finding.related_to,
"notes": finding.notes,
"related_url": finding.metadata.RelatedUrl,
"categories": finding.metadata.Categories,
"depends_on": finding.metadata.DependsOn,
"related_to": finding.metadata.RelatedTo,
"notes": finding.metadata.Notes,
"compliance": finding.compliance,
},
)
if finding.provider == "kubernetes":
detection_finding.container = Container(
name=finding.resource_name,
uid=finding.resource_uid,
)
# TODO: Get the PID of the namespace (we only have the name of the namespace)
# detection_finding.namespace_pid=,
else:
if finding.provider != "kubernetes":
detection_finding.cloud = Cloud(
account=Account(
name=finding.account_name,
type_id=cloud_account_type.value,
type=cloud_account_type.name,
type=cloud_account_type.name.replace("_", " "),
uid=finding.account_uid,
labels=finding.account_tags,
labels=unroll_dict_to_list(finding.account_tags),
),
org=Organization(
uid=finding.account_organization_uid,
name=finding.account_organization_name,
# TODO: add the org unit id and name
),
provider=finding.provider,
region=finding.region,
@@ -212,20 +225,17 @@ class OCSF(Output):
return type_id
@staticmethod
def get_finding_status_id(status: str, muted: bool) -> StatusID:
def get_finding_status_id(muted: bool) -> StatusID:
"""
Returns the StatusID based on the status and muted values.
Returns the StatusID based on the muted value.
Args:
status (str): The status value
muted (bool): The muted value
Returns:
StatusID: The StatusID based on the status and muted values
StatusID: The StatusID based on the muted value
"""
status_id = StatusID.Other
if status == "FAIL":
status_id = StatusID.New
status_id = StatusID.New
if muted:
status_id = StatusID.Suppressed
return status_id

View File

@@ -25,10 +25,12 @@ def stdout_report(finding, color, verbose, status, fix):
)
# TODO: Only pass check_findings, provider.output_options and provider.type
def report(check_findings, provider):
# TODO: Only pass check_findings, output_options and provider.type
def report(check_findings, provider, output_options):
try:
output_options = provider.output_options
verbose = False
if hasattr(output_options, "verbose"):
verbose = output_options.verbose
if check_findings:
# TO-DO Generic Function
if provider.type == "aws":
@@ -39,21 +41,27 @@ def report(check_findings, provider):
for finding in check_findings:
# Print findings by stdout
status = []
if hasattr(output_options, "status"):
status = output_options.status
fixer = False
if hasattr(output_options, "fixer"):
fixer = output_options.fixer
color = set_report_color(finding.status, finding.muted)
stdout_report(
finding,
color,
output_options.verbose,
output_options.status,
output_options.fixer,
verbose,
status,
fixer,
)
else: # No service resources in the whole account
color = set_report_color("MANUAL")
if output_options.verbose:
if verbose:
print(f"\t{color}INFO{Style.RESET_ALL} There are no resources")
# Separator between findings and bar
if output_options.verbose:
if verbose:
print()
except Exception as error:
logger.error(
@@ -82,35 +90,82 @@ def extract_findings_statistics(findings: list) -> dict:
extract_findings_statistics takes a list of findings and returns the following dict with the aggregated statistics
{
"total_pass": 0,
"total_muted_pass": 0,
"total_fail": 0,
"total_muted_fail": 0,
"resources_count": 0,
"findings_count": 0,
"critical_failed_findings": [],
"critical_passed_findings": []
"all_fails_are_muted": False
}
"""
logger.info("Extracting audit statistics...")
stats = {}
total_pass = 0
total_fail = 0
muted_pass = 0
muted_fail = 0
resources = set()
findings_count = 0
all_fails_are_muted = True
critical_severity_pass = 0
critical_severity_fail = 0
high_severity_pass = 0
high_severity_fail = 0
medium_severity_pass = 0
medium_severity_fail = 0
low_severity_pass = 0
low_severity_fail = 0
for finding in findings:
# Save the resource_id
resources.add(finding.resource_id)
if finding.status == "PASS":
if finding.check_metadata.Severity == "critical":
critical_severity_pass += 1
if finding.check_metadata.Severity == "high":
high_severity_pass += 1
if finding.check_metadata.Severity == "medium":
medium_severity_pass += 1
if finding.check_metadata.Severity == "low":
low_severity_pass += 1
total_pass += 1
findings_count += 1
if finding.muted is True:
muted_pass += 1
if finding.status == "FAIL":
if finding.check_metadata.Severity == "critical":
critical_severity_fail += 1
if finding.check_metadata.Severity == "high":
high_severity_fail += 1
if finding.check_metadata.Severity == "medium":
medium_severity_fail += 1
if finding.check_metadata.Severity == "low":
low_severity_fail += 1
total_fail += 1
findings_count += 1
if finding.muted is True:
muted_fail += 1
if not finding.muted and all_fails_are_muted:
all_fails_are_muted = False
stats["total_pass"] = total_pass
stats["total_muted_pass"] = muted_pass
stats["total_fail"] = total_fail
stats["total_muted_fail"] = muted_fail
stats["resources_count"] = len(resources)
stats["findings_count"] = findings_count
stats["total_critical_severity_fail"] = critical_severity_fail
stats["total_critical_severity_pass"] = critical_severity_pass
stats["total_high_severity_fail"] = high_severity_fail
stats["total_high_severity_pass"] = high_severity_pass
stats["total_medium_severity_fail"] = medium_severity_fail
stats["total_medium_severity_pass"] = medium_severity_pass
stats["total_low_severity_fail"] = medium_severity_fail
stats["total_low_severity_pass"] = medium_severity_pass
stats["all_fails_are_muted"] = all_fails_are_muted
return stats

View File

@@ -0,0 +1,61 @@
from prowler.exceptions.exceptions import ProwlerException
# Exceptions codes from 8000 to 8999 are reserved for Slack exceptions
class SlackBaseException(ProwlerException):
"""Base class for Slack errors."""
SLACK_ERROR_CODES = {
(8000, "SlackClientError"): {
"message": "Slack ClientError occurred",
"remediation": "Check your Slack client configuration and permissions.",
},
(8001, "SlackNoCredentialsError"): {
"message": "Invalid Slack credentials found",
"remediation": "Some aspect of authentication cannot be validated. Either the provided token is invalid or the request originates from an IP address disallowed from making the request.",
},
(8002, "SlackChannelNotFound"): {
"message": "Slack channel not found",
"remediation": "Check the channel name and ensure it exists.",
},
}
def __init__(self, code, file=None, original_exception=None, message=None):
error_info = self.SLACK_ERROR_CODES.get((code, self.__class__.__name__))
if message:
error_info["message"] = message
super().__init__(
code,
source="Slack",
file=file,
original_exception=original_exception,
error_info=error_info,
)
class SlackCredentialsError(SlackBaseException):
"""Base class for Slack credentials errors."""
def __init__(self, code, file=None, original_exception=None, message=None):
super().__init__(code, file, original_exception, message)
class SlackClientError(SlackCredentialsError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
8000, file=file, original_exception=original_exception, message=message
)
class SlackNoCredentialsError(SlackCredentialsError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
8001, file=file, original_exception=original_exception, message=message
)
class SlackChannelNotFound(SlackCredentialsError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
8002, file=file, original_exception=original_exception, message=message
)

View File

@@ -1,3 +1,4 @@
import os
from typing import Any
from slack_sdk import WebClient
@@ -5,6 +6,12 @@ from slack_sdk.web.base_client import SlackResponse
from prowler.config.config import aws_logo, azure_logo, gcp_logo, square_logo_img
from prowler.lib.logger import logger
from prowler.lib.outputs.slack.exceptions.exceptions import (
SlackChannelNotFound,
SlackClientError,
SlackNoCredentialsError,
)
from prowler.providers.common.models import Connection
class Slack:
@@ -43,6 +50,7 @@ class Slack:
username="Prowler",
icon_url=square_logo_img,
channel=f"#{self.channel}",
text="Prowler Scan Summary",
blocks=self.__create_message_blocks__(identity, logo, stats, args),
)
return response
@@ -50,7 +58,6 @@ class Slack:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
return error
def __create_message_identity__(self, provider: Any):
"""
@@ -121,6 +128,19 @@ class Slack:
"text": f"\n:white_check_mark: *{stats['total_pass']} Passed findings* ({round(stats['total_pass'] / stats['findings_count'] * 100 , 2)}%)\n",
},
},
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": (
"*Severities:*\n"
f"• *Critical:* {stats['total_critical_severity_pass']} "
f"• *High:* {stats['total_high_severity_pass']} "
f"• *Medium:* {stats['total_medium_severity_pass']} "
f"• *Low:* {stats['total_low_severity_pass']}"
),
},
},
{
"type": "section",
"text": {
@@ -128,6 +148,19 @@ class Slack:
"text": f"\n:x: *{stats['total_fail']} Failed findings* ({round(stats['total_fail'] / stats['findings_count'] * 100 , 2)}%)\n ",
},
},
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": (
"*Severities:*\n"
f"• *Critical:* {stats['total_critical_severity_fail']} "
f"• *High:* {stats['total_high_severity_fail']} "
f"• *Medium:* {stats['total_medium_severity_fail']} "
f"• *Low:* {stats['total_low_severity_fail']}"
),
},
},
{
"type": "section",
"text": {
@@ -204,3 +237,66 @@ class Slack:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
@staticmethod
def test_connection(
token: str,
channel: str,
raise_on_exception: bool = True,
) -> Connection:
"""
Test the Slack connection by validating the provided token and channel.
Args:
token (str): The Slack token to be tested.
channel (str): The Slack channel to be validated.
Returns:
Connection: A Connection object.
"""
try:
client = WebClient(token=token)
# Test if the token is valid
auth_response = client.auth_test()
if auth_response["ok"]:
# Test if the channel is accessible
channels_response = client.conversations_info(
token=token, channel=channel
)
if channels_response["ok"]:
return Connection(is_connected=True)
else:
exception = SlackChannelNotFound(
file=os.path.basename(__file__),
message=(
channels_response["error"]
if "error" in channels_response
else "Unknown error"
),
)
if raise_on_exception:
raise exception
return Connection(error=exception)
else:
exception = SlackNoCredentialsError(
file=os.path.basename(__file__),
message=(
auth_response["error"]
if "error" in auth_response
else "Unknown error"
),
)
if raise_on_exception:
raise exception
return Connection(error=exception)
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
if raise_on_exception:
raise SlackClientError(
file=os.path.basename(__file__),
original_exception=error,
) from error
return Connection(error=error)

View File

@@ -1,4 +1,24 @@
def unroll_list(listed_items: list, separator: str = "|"):
def unroll_list(listed_items: list, separator: str = "|") -> str:
"""
Unrolls a list of items into a single string, separated by a specified separator.
Args:
listed_items (list): The list of items to be unrolled.
separator (str, optional): The separator to be used between the items. Defaults to "|".
Returns:
str: The unrolled string.
Examples:
>>> unroll_list(['apple', 'banana', 'orange'])
'apple | banana | orange'
>>> unroll_list(['apple', 'banana', 'orange'], separator=',')
'apple, banana, orange'
>>> unroll_list([])
''
"""
unrolled_items = ""
if listed_items:
for item in listed_items:
@@ -13,70 +33,138 @@ def unroll_list(listed_items: list, separator: str = "|"):
return unrolled_items
def unroll_tags(tags: list):
def unroll_tags(tags: list) -> dict:
"""
Unrolls a list of tags into a dictionary.
Args:
tags (list): A list of tags.
Returns:
dict: A dictionary containing the unrolled tags.
Examples:
>>> tags = [{"key": "name", "value": "John"}, {"key": "age", "value": "30"}]
>>> unroll_tags(tags)
{'name': 'John', 'age': '30'}
>>> tags = [{"Key": "name", "Value": "John"}, {"Key": "age", "Value": "30"}]
>>> unroll_tags(tags)
{'name': 'John', 'age': '30'}
>>> tags = [{"key": "name"}]
>>> unroll_tags(tags)
{'name': ''}
>>> tags = [{"Key": "name"}]
>>> unroll_tags(tags)
{'name': ''}
>>> tags = [{"name": "John", "age": "30"}]
>>> unroll_tags(tags)
{'name': 'John', 'age': '30'}
>>> tags = []
>>> unroll_tags(tags)
{}
>>> tags = {"name": "John", "age": "30"}
>>> unroll_tags(tags)
{'name': 'John', 'age': '30'}
>>> tags = ["name", "age"]
>>> unroll_tags(tags)
{'name': '', 'age': ''}
"""
if tags and tags != [{}] and tags != [None] and tags != []:
if isinstance(tags, dict):
return tags
if isinstance(tags[0], str) and len(tags) > 0:
return {tag: "" for tag in tags}
if "key" in tags[0]:
return {item["key"]: item.get("value", "") for item in tags}
elif "Key" in tags[0]:
return {item["Key"]: item.get("Value", "") for item in tags}
else:
return {key: value for d in tags for key, value in d.items()}
return {}
def unroll_dict(dict: dict, separator: str = "=") -> str:
"""
Unrolls a dictionary into a string representation.
Args:
dict (dict): The dictionary to be unrolled.
Returns:
str: The unrolled string representation of the dictionary.
Examples:
>>> my_dict = {'name': 'John', 'age': 30, 'hobbies': ['reading', 'coding']}
>>> unroll_dict(my_dict)
'name: John | age: 30 | hobbies: reading, coding'
"""
unrolled_items = ""
separator = "|"
if tags and tags != [{}] and tags != [None]:
for item in tags:
# Check if there are tags in list
if isinstance(item, dict):
for key, value in item.items():
if not unrolled_items:
# Check the pattern of tags (Key:Value or Key:key/Value:value)
if "Key" != key and "Value" != key:
unrolled_items = f"{key}={value}"
else:
if "Key" == key:
unrolled_items = f"{value}="
else:
unrolled_items = f"{value}"
else:
if "Key" != key and "Value" != key:
unrolled_items = (
f"{unrolled_items} {separator} {key}={value}"
)
else:
if "Key" == key:
unrolled_items = (
f"{unrolled_items} {separator} {value}="
)
else:
unrolled_items = f"{unrolled_items}{value}"
elif not unrolled_items:
unrolled_items = f"{item}"
else:
unrolled_items = f"{unrolled_items} {separator} {item}"
return unrolled_items
def unroll_dict(dict: dict):
unrolled_items = ""
separator = "|"
for key, value in dict.items():
if isinstance(value, list):
value = ", ".join(value)
if not unrolled_items:
unrolled_items = f"{key}: {value}"
unrolled_items = f"{key}{separator}{value}"
else:
unrolled_items = f"{unrolled_items} {separator} {key}: {value}"
unrolled_items = f"{unrolled_items} | {key}{separator}{value}"
return unrolled_items
def unroll_dict_to_list(dict: dict):
def unroll_dict_to_list(dict: dict) -> list:
"""
Unrolls a dictionary into a list of key-value pairs.
Args:
dict (dict): The dictionary to be unrolled.
Returns:
list: A list of key-value pairs, where each pair is represented as a string.
Examples:
>>> my_dict = {'name': 'John', 'age': 30, 'hobbies': ['reading', 'coding']}
>>> unroll_dict_to_list(my_dict)
['name: John', 'age: 30', 'hobbies: reading, coding']
"""
dict_list = []
for key, value in dict.items():
if isinstance(value, list):
value = ", ".join(value)
dict_list.append(f"{key}: {value}")
dict_list.append(f"{key}:{value}")
else:
dict_list.append(f"{key}: {value}")
dict_list.append(f"{key}:{value}")
return dict_list
def parse_json_tags(tags: list):
def parse_json_tags(tags: list) -> dict[str, str]:
"""
Parses a list of JSON tags and returns a dictionary of key-value pairs.
Args:
tags (list): A list of JSON tags.
Returns:
dict: A dictionary containing the parsed key-value pairs from the tags.
Examples:
>>> tags = [
... {"Key": "Name", "Value": "John"},
... {"Key": "Age", "Value": "30"},
... {"Key": "City", "Value": "New York"}
... ]
>>> parse_json_tags(tags)
{'Name': 'John', 'Age': '30', 'City': 'New York'}
"""
dict_tags = {}
if tags and tags != [{}] and tags != [None]:
for tag in tags:
@@ -88,7 +176,23 @@ def parse_json_tags(tags: list):
return dict_tags
def parse_html_string(str: str):
def parse_html_string(str: str) -> str:
"""
Parses a string and returns a formatted HTML string.
This function takes an input string and splits it using the delimiter " | ".
It then formats each element of the split string as a bullet point in HTML format.
Args:
str (str): The input string to be parsed.
Returns:
str: The formatted HTML string.
Example:
>>> parse_html_string("item1 | item2 | item3")
'\n&#x2022;item1\n\n&#x2022;item2\n\n&#x2022;item3\n'
"""
string = ""
for elem in str.split(" | "):
if elem:

View File

@@ -0,0 +1,88 @@
from prowler.exceptions.exceptions import ProwlerException
# Exceptions codes from 5000 to 5999 are reserved for Scan exceptions
class ScanBaseException(ProwlerException):
"""Base class for Scan errors."""
SCAN_ERROR_CODES = {
(5000, "ScanInvalidSeverityError"): {
"message": "Invalid severity level provided.",
"remediation": "Please provide a valid severity level. Valid severities are: critical, high, medium, low, informational.",
},
(5001, "ScanInvalidCheckError"): {
"message": "Invalid check provided.",
"remediation": "Please provide a valid check name.",
},
(5002, "ScanInvalidServiceError"): {
"message": "Invalid service provided.",
"remediation": "Please provide a valid service name.",
},
(5003, "ScanInvalidComplianceFrameworkError"): {
"message": "Invalid compliance framework provided.",
"remediation": "Please provide a valid compliance framework name for the chosen provider.",
},
(5004, "ScanInvalidCategoryError"): {
"message": "Invalid category provided.",
"remediation": "Please provide a valid category name.",
},
(5005, "ScanInvalidStatusError"): {
"message": "Invalid status provided.",
"remediation": "Please provide a valid status: FAIL, PASS, MANUAL.",
},
}
def __init__(self, code, file=None, original_exception=None, message=None):
module = "Scan"
error_info = self.SCAN_ERROR_CODES.get((code, self.__class__.__name__))
if message:
error_info["message"] = message
super().__init__(
code=code,
source=module,
file=file,
original_exception=original_exception,
error_info=error_info,
)
class ScanInvalidSeverityError(ScanBaseException):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
5000, file=file, original_exception=original_exception, message=message
)
class ScanInvalidCheckError(ScanBaseException):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
5001, file=file, original_exception=original_exception, message=message
)
class ScanInvalidServiceError(ScanBaseException):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
5002, file=file, original_exception=original_exception, message=message
)
class ScanInvalidComplianceFrameworkError(ScanBaseException):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
5003, file=file, original_exception=original_exception, message=message
)
class ScanInvalidCategoryError(ScanBaseException):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
5004, file=file, original_exception=original_exception, message=message
)
class ScanInvalidStatusError(ScanBaseException):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
5005, file=file, original_exception=original_exception, message=message
)

View File

@@ -1,60 +1,379 @@
from typing import Any
import datetime
from typing import Generator
from prowler.lib.check.check import execute
from prowler.lib.check.models import Check_Report
from prowler.lib.check.check import (
execute,
import_check,
list_services,
update_audit_metadata,
)
from prowler.lib.check.checks_loader import load_checks_to_execute
from prowler.lib.check.compliance import update_checks_metadata_with_compliance
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.check.models import CheckMetadata, Severity
from prowler.lib.logger import logger
from prowler.lib.outputs.common import Status
from prowler.lib.outputs.finding import Finding
from prowler.lib.scan.exceptions.exceptions import (
ScanInvalidCategoryError,
ScanInvalidCheckError,
ScanInvalidComplianceFrameworkError,
ScanInvalidServiceError,
ScanInvalidSeverityError,
ScanInvalidStatusError,
)
from prowler.providers.common.models import Audit_Metadata
from prowler.providers.common.provider import Provider
def scan(
checks_to_execute: list,
global_provider: Any,
custom_checks_metadata: Any,
) -> list[Check_Report]:
try:
# List to store all the check's findings
all_findings = []
# Services and checks executed for the Audit Status
services_executed = set()
checks_executed = set()
class Scan:
_provider: Provider
# Refactor(Core): This should replace the Audit_Metadata
_number_of_checks_to_execute: int = 0
_number_of_checks_completed: int = 0
# TODO the str should be a set of Check objects
_checks_to_execute: list[str]
_service_checks_to_execute: dict[str, set[str]]
_service_checks_completed: dict[str, set[str]]
_progress: float = 0.0
_findings: list = []
_duration: int = 0
_status: list[str] = None
# Initialize the Audit Metadata
# TODO: this should be done in the provider class
# Refactor(Core): Audit manager?
global_provider.audit_metadata = Audit_Metadata(
services_scanned=0,
expected_checks=checks_to_execute,
completed_checks=0,
audit_progress=0,
)
def __init__(
self,
provider: Provider,
checks: list[str] = None,
services: list[str] = None,
compliances: list[str] = None,
categories: list[str] = None,
severities: list[str] = None,
excluded_checks: list[str] = None,
excluded_services: list[str] = None,
status: list[str] = None,
):
"""
Scan is the class that executes the checks and yields the progress and the findings.
for check_name in checks_to_execute:
Params:
provider: Provider -> The provider to scan
checks: list[str] -> The checks to execute
services: list[str] -> The services to scan
compliances: list[str] -> The compliances to check
categories: list[str] -> The categories of the checks
severities: list[str] -> The severities of the checks
excluded_checks: list[str] -> The checks to exclude
excluded_services: list[str] -> The services to exclude
status: list[str] -> The status of the checks
Raises:
ScanInvalidCheckError: If the check does not exist in the provider or is from another provider.
ScanInvalidServiceError: If the service does not exist in the provider.
ScanInvalidComplianceFrameworkError: If the compliance framework does not exist in the provider.
ScanInvalidCategoryError: If the category does not exist in the provider.
ScanInvalidSeverityError: If the severity does not exist in the provider.
ScanInvalidStatusError: If the status does not exist in the provider.
"""
self._provider = provider
# Validate the status
if status:
try:
# Recover service from check name
service = check_name.split("_")[0]
for s in status:
Status(s)
if not self._status:
self._status = []
if s not in self._status:
self._status.append(s)
except ValueError:
raise ScanInvalidStatusError(f"Invalid status provided: {s}.")
check_findings = execute(
service,
check_name,
global_provider,
services_executed,
checks_executed,
custom_checks_metadata,
)
all_findings.extend(check_findings)
# Load bulk compliance frameworks
bulk_compliance_frameworks = Compliance.get_bulk(provider.type)
# If check does not exists in the provider or is from another provider
except ModuleNotFoundError:
logger.error(
f"Check '{check_name}' was not found for the {global_provider.type.upper()} provider"
)
except Exception as error:
logger.error(
f"{check_name} - {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
except Exception as error:
logger.error(
f"{check_name} - {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
# Get bulk checks metadata for the provider
bulk_checks_metadata = CheckMetadata.get_bulk(provider.type)
# Complete checks metadata with the compliance framework specification
bulk_checks_metadata = update_checks_metadata_with_compliance(
bulk_compliance_frameworks, bulk_checks_metadata
)
return all_findings
# Create a list of valid categories
valid_categories = set()
for check, metadata in bulk_checks_metadata.items():
for category in metadata.Categories:
if category not in valid_categories:
valid_categories.add(category)
# Validate checks
if checks:
for check in checks:
if check not in bulk_checks_metadata.keys():
raise ScanInvalidCheckError(f"Invalid check provided: {check}.")
# Validate services
if services:
for service in services:
if service not in list_services(provider.type):
raise ScanInvalidServiceError(
f"Invalid service provided: {service}."
)
# Validate compliances
if compliances:
for compliance in compliances:
if compliance not in bulk_compliance_frameworks.keys():
raise ScanInvalidComplianceFrameworkError(
f"Invalid compliance provided: {compliance}."
)
# Validate categories
if categories:
for category in categories:
if category not in valid_categories:
raise ScanInvalidCategoryError(
f"Invalid category provided: {category}."
)
# Validate severity
if severities:
for severity in severities:
try:
Severity(severity)
except ValueError:
raise ScanInvalidSeverityError(
f"Invalid severity provided: {severity}."
)
# Load checks to execute
self._checks_to_execute = sorted(
load_checks_to_execute(
bulk_checks_metadata=bulk_checks_metadata,
bulk_compliance_frameworks=bulk_compliance_frameworks,
check_list=checks,
service_list=services,
compliance_frameworks=compliances,
categories=categories,
severities=severities,
provider=provider.type,
checks_file=None,
)
)
# Exclude checks
if excluded_checks:
for check in excluded_checks:
if check in self._checks_to_execute:
self._checks_to_execute.remove(check)
else:
raise ScanInvalidCheckError(
f"Invalid check provided: {check}. Check does not exist in the provider."
)
# Exclude services
if excluded_services:
for check in self._checks_to_execute:
if get_service_name_from_check_name(check) in excluded_services:
self._checks_to_execute.remove(check)
else:
raise ScanInvalidServiceError(
f"Invalid service provided: {check}. Service does not exist in the provider."
)
self._number_of_checks_to_execute = len(self._checks_to_execute)
service_checks_to_execute = get_service_checks_to_execute(
self._checks_to_execute
)
service_checks_completed = dict()
self._service_checks_to_execute = service_checks_to_execute
self._service_checks_completed = service_checks_completed
@property
def checks_to_execute(self) -> list[str]:
return self._checks_to_execute
@property
def service_checks_to_execute(self) -> dict[str, set[str]]:
return self._service_checks_to_execute
@property
def service_checks_completed(self) -> dict[str, set[str]]:
return self._service_checks_completed
@property
def provider(self) -> Provider:
return self._provider
@property
def progress(self) -> float:
return (
self._number_of_checks_completed / self._number_of_checks_to_execute * 100
)
@property
def duration(self) -> int:
return self._duration
@property
def findings(self) -> list:
return self._findings
def scan(
self,
custom_checks_metadata: dict = {},
) -> Generator[tuple[float, list[Finding]], None, None]:
"""
Executes the scan by iterating over the checks to execute and executing each check.
Yields the progress and findings for each check.
Args:
custom_checks_metadata (dict): Custom metadata for the checks (default: {}).
Yields:
Tuple[float, list[Finding]]: A tuple containing the progress and findings for each check.
Raises:
ModuleNotFoundError: If the check does not exist in the provider or is from another provider.
Exception: If any other error occurs during the execution of a check.
"""
try:
checks_to_execute = self.checks_to_execute
# Initialize the Audit Metadata
# TODO: this should be done in the provider class
# Refactor(Core): Audit manager?
self._provider.audit_metadata = Audit_Metadata(
services_scanned=0,
expected_checks=checks_to_execute,
completed_checks=0,
audit_progress=0,
)
start_time = datetime.datetime.now()
for check_name in checks_to_execute:
try:
# Recover service from check name
service = get_service_name_from_check_name(check_name)
try:
# Import check module
check_module_path = f"prowler.providers.{self._provider.type}.services.{service}.{check_name}.{check_name}"
lib = import_check(check_module_path)
# Recover functions from check
check_to_execute = getattr(lib, check_name)
check = check_to_execute()
except ModuleNotFoundError:
logger.error(
f"Check '{check_name}' was not found for the {self._provider.type.upper()} provider"
)
continue
# Execute the check
check_findings = execute(
check,
self._provider,
custom_checks_metadata,
output_options=None,
)
# Filter the findings by the status
if self._status:
for finding in check_findings:
if finding.status not in self._status:
check_findings.remove(finding)
# Store findings
self._findings.extend(check_findings)
# Remove the executed check
self._service_checks_to_execute[service].remove(check_name)
if len(self._service_checks_to_execute[service]) == 0:
self._service_checks_to_execute.pop(service, None)
# Add the completed check
if service not in self._service_checks_completed:
self._service_checks_completed[service] = set()
self._service_checks_completed[service].add(check_name)
self._number_of_checks_completed += 1
# This should be done just once all the service's checks are completed
# This metadata needs to get to the services not within the provider
# since it is present in the Scan class
self._provider.audit_metadata = update_audit_metadata(
self._provider.audit_metadata,
self.get_completed_services(),
self.get_completed_checks(),
)
findings = [
Finding.generate_output(
self._provider, finding, output_options=None
)
for finding in check_findings
]
yield self.progress, findings
# If check does not exists in the provider or is from another provider
except ModuleNotFoundError:
logger.error(
f"Check '{check_name}' was not found for the {self._provider.type.upper()} provider"
)
except Exception as error:
logger.error(
f"{check_name} - {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
# Update the scan duration when all checks are completed
self._duration = int((datetime.datetime.now() - start_time).total_seconds())
except Exception as error:
logger.error(
f"{check_name} - {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def get_completed_services(self) -> set[str]:
"""
get_completed_services returns the services that have been completed.
Example:
get_completed_services() -> {"ec2", "s3"}
"""
return self._service_checks_completed.keys()
def get_completed_checks(self) -> set[str]:
"""
get_completed_checks returns the checks that have been completed.
Example:
get_completed_checks() -> {"ec2_instance_public_ip", "s3_bucket_public"}
"""
completed_checks = set()
for checks in self._service_checks_completed.values():
completed_checks.update(checks)
return completed_checks
def get_service_name_from_check_name(check_name: str) -> str:
"""
get_service_name_from_check_name returns the service name for a given check name.
Example:
get_service_name_from_check_name("ec2_instance_public") -> "ec2"
"""
return check_name.split("_")[0]
def get_service_checks_to_execute(checks_to_execute: set[str]) -> dict[str, set[str]]:
"""
get_service_checks_to_execute returns a dictionary with the services and the checks to execute.
Example:
get_service_checks_to_execute({"accessanalyzer_enabled", "ec2_instance_public_ip"})
-> {"accessanalyzer": {"accessanalyzer_enabled"}, "ec2": {"ec2_instance_public_ip"}}
"""
service_checks_to_execute = dict()
for check in checks_to_execute:
# check -> accessanalyzer_enabled
# service -> accessanalyzer
service = get_service_name_from_check_name(check)
if service not in service_checks_to_execute:
service_checks_to_execute[service] = set()
service_checks_to_execute[service].add(check)
return service_checks_to_execute

View File

@@ -1,5 +1,6 @@
import json
import os
from operator import attrgetter
try:
import grp
@@ -16,11 +17,11 @@ from io import TextIOWrapper
from ipaddress import ip_address
from os.path import exists
from time import mktime
from typing import Optional
from typing import Any, Optional
from colorama import Style
from detect_secrets import SecretsCollection
from detect_secrets.settings import default_settings
from detect_secrets.settings import transient_settings
from prowler.config.config import encoding_format_utf_8
from prowler.lib.logger import logger
@@ -80,20 +81,96 @@ def hash_sha512(string: str) -> str:
return sha512(string.encode(encoding_format_utf_8)).hexdigest()[0:9]
def detect_secrets_scan(data):
temp_data_file = tempfile.NamedTemporaryFile(delete=False)
temp_data_file.write(bytes(data, encoding="raw_unicode_escape"))
temp_data_file.close()
def detect_secrets_scan(
data: str = None, file=None, excluded_secrets: list[str] = None
) -> list[dict[str, str]]:
"""detect_secrets_scan scans the data or file for secrets using the detect-secrets library.
Args:
data (str): The data to scan for secrets.
file (str): The file to scan for secrets.
excluded_secrets (list): A list of regex patterns to exclude from the scan.
Returns:
dict: The secrets found in the
Raises:
Exception: If an error occurs during the scan.
Examples:
>>> detect_secrets_scan(data="password=password")
[{'filename': 'data', 'hashed_secret': 'f7c3bc1d808e04732adf679965ccc34ca7ae3441', 'is_verified': False, 'line_number': 1, 'type': 'Secret Keyword'}]
>>> detect_secrets_scan(file="file.txt")
{'file.txt': [{'filename': 'file.txt', 'hashed_secret': 'f7c3bc1d808e04732adf679965ccc34ca7ae3441', 'is_verified': False, 'line_number': 1, 'type': 'Secret Keyword'}]}
"""
try:
if not file:
temp_data_file = tempfile.NamedTemporaryFile(delete=False)
temp_data_file.write(bytes(data, encoding="raw_unicode_escape"))
temp_data_file.close()
secrets = SecretsCollection()
with default_settings():
secrets.scan_file(temp_data_file.name)
os.remove(temp_data_file.name)
secrets = SecretsCollection()
detect_secrets_output = secrets.json()
if detect_secrets_output:
return detect_secrets_output[temp_data_file.name]
else:
settings = {
"plugins_used": [
{"name": "ArtifactoryDetector"},
{"name": "AWSKeyDetector"},
{"name": "AzureStorageKeyDetector"},
{"name": "BasicAuthDetector"},
{"name": "CloudantDetector"},
{"name": "DiscordBotTokenDetector"},
{"name": "GitHubTokenDetector"},
{"name": "GitLabTokenDetector"},
{"name": "Base64HighEntropyString", "limit": 6.0},
{"name": "HexHighEntropyString", "limit": 3.0},
{"name": "IbmCloudIamDetector"},
{"name": "IbmCosHmacDetector"},
# {"name": "IPPublicDetector"}, https://github.com/Yelp/detect-secrets/pull/885
{"name": "JwtTokenDetector"},
{"name": "KeywordDetector"},
{"name": "MailchimpDetector"},
{"name": "NpmDetector"},
{"name": "OpenAIDetector"},
{"name": "PrivateKeyDetector"},
{"name": "PypiTokenDetector"},
{"name": "SendGridDetector"},
{"name": "SlackDetector"},
{"name": "SoftlayerDetector"},
{"name": "SquareOAuthDetector"},
{"name": "StripeDetector"},
# {"name": "TelegramBotTokenDetector"}, https://github.com/Yelp/detect-secrets/pull/878
{"name": "TwilioKeyDetector"},
],
"filters_used": [
{"path": "detect_secrets.filters.common.is_invalid_file"},
{"path": "detect_secrets.filters.common.is_known_false_positive"},
{"path": "detect_secrets.filters.heuristic.is_likely_id_string"},
{"path": "detect_secrets.filters.heuristic.is_potential_secret"},
],
}
if excluded_secrets and len(excluded_secrets) > 0:
settings["filters_used"].append(
{
"path": "detect_secrets.filters.regex.should_exclude_line",
"pattern": excluded_secrets,
}
)
with transient_settings(settings):
if file:
secrets.scan_file(file)
else:
secrets.scan_file(temp_data_file.name)
if not file:
os.remove(temp_data_file.name)
detect_secrets_output = secrets.json()
if detect_secrets_output:
if file:
return detect_secrets_output[file]
else:
return detect_secrets_output[temp_data_file.name]
else:
return None
except Exception as e:
logger.error(f"Error scanning for secrets: {e}")
return None
@@ -196,3 +273,44 @@ def print_boxes(messages: list, report_title: str):
f"{Style.BRIGHT}{Style.RESET_ALL} · {message}{Style.BRIGHT}{Style.RESET_ALL}"
)
print()
def dict_to_lowercase(d):
"""
Convert all keys in a dictionary to lowercase.
This function takes a dictionary and returns a new dictionary
with all the keys converted to lowercase. If a value in the
dictionary is another dictionary, the function will recursively
convert the keys of that dictionary to lowercase as well.
Args:
d (dict): The dictionary to convert.
Returns:
dict: A new dictionary with all keys in lowercase.
"""
new_dict = {}
for k, v in d.items():
if isinstance(v, dict):
v = dict_to_lowercase(v)
new_dict[k.lower()] = v
return new_dict
def get_nested_attribute(obj: Any, attr: str) -> Any:
"""
Get a nested attribute from an object.
Args:
obj (Any): The object to get the attribute from.
attr (str): The attribute to get.
Returns:
Any: The attribute value if present, otherwise "".
"""
try:
return attrgetter(attr)(obj)
except AttributeError:
return ""
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
return ""

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,222 @@
from prowler.exceptions.exceptions import ProwlerException
# Exceptions codes from 1000 to 1999 are reserved for AWS exceptions
class AWSBaseException(ProwlerException):
"""Base class for AWS errors."""
AWS_ERROR_CODES = {
(1000, "AWSClientError"): {
"message": "AWS ClientError occurred",
"remediation": "Check your AWS client configuration and permissions.",
},
(1001, "AWSProfileNotFoundError"): {
"message": "AWS Profile not found",
"remediation": "Ensure the AWS profile is correctly configured, please visit https://docs.aws.amazon.com/cli/v1/userguide/cli-configure-files.html",
},
(1002, "AWSNoCredentialsError"): {
"message": "No AWS credentials found",
"remediation": "Verify that AWS credentials are properly set up, please visit https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/aws/authentication/ and https://docs.aws.amazon.com/cli/v1/userguide/cli-chap-configure.html",
},
(1003, "AWSArgumentTypeValidationError"): {
"message": "AWS argument type validation error",
"remediation": "Check the provided argument types specific to AWS and ensure they meet the required format. For session duration check: https://docs.aws.amazon.com/singlesignon/latest/userguide/howtosessionduration.html and for role session name check: https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_terms-and-concepts.html#iam-term-role-session-name",
},
(1004, "AWSSetUpSessionError"): {
"message": "AWS session setup error",
"remediation": "Check the AWS session setup and ensure it is properly configured, please visit https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-files.html and check if the provided profile has the necessary permissions.",
},
(1005, "AWSIAMRoleARNRegionNotEmtpyError"): {
"message": "AWS IAM Role ARN region is not empty",
"remediation": "Check the AWS IAM Role ARN region and ensure it is empty, visit https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html#identifiers-arns for more information.",
},
(1006, "AWSIAMRoleARNPartitionEmptyError"): {
"message": "AWS IAM Role ARN partition is empty",
"remediation": "Check the AWS IAM Role ARN partition and ensure it is not empty, visit https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html#identifiers-arns for more information.",
},
(1007, "AWSIAMRoleARNMissingFieldsError"): {
"message": "AWS IAM Role ARN missing fields",
"remediation": "Check the AWS IAM Role ARN and ensure all required fields are present, visit https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html#identifiers-arns for more information.",
},
(1008, "AWSIAMRoleARNServiceNotIAMnorSTSError"): {
"message": "AWS IAM Role ARN service is not IAM nor STS",
"remediation": "Check the AWS IAM Role ARN service and ensure it is either IAM or STS, visit https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html#identifiers-arns for more information.",
},
(1009, "AWSIAMRoleARNInvalidAccountIDError"): {
"message": "AWS IAM Role ARN account ID is invalid",
"remediation": "Check the AWS IAM Role ARN account ID and ensure it is a valid 12-digit number, visit https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html#identifiers-arns for more information.",
},
(1010, "AWSIAMRoleARNInvalidResourceTypeError"): {
"message": "AWS IAM Role ARN resource type is invalid",
"remediation": "Check the AWS IAM Role ARN resource type and ensure it is valid, resources types are: role, user, assumed-role, root, federated-user, visit https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html#identifiers-arns for more information.",
},
(1011, "AWSIAMRoleARNEmptyResourceError"): {
"message": "AWS IAM Role ARN resource is empty",
"remediation": "Check the AWS IAM Role ARN resource and ensure it is not empty, visit https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html#identifiers-arns for more information.",
},
(1012, "AWSAssumeRoleError"): {
"message": "AWS assume role error",
"remediation": "Check the AWS assume role configuration and ensure it is properly set up, please visit https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/aws/role-assumption/ and https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_terms-and-concepts.html#iam-term-role-session-name",
},
(1013, "AWSAccessKeyIDInvalidError"): {
"message": "AWS Access Key ID or Session Token is invalid",
"remediation": "Check your AWS Access Key ID or Session Token and ensure it is valid.",
},
(1014, "AWSSecretAccessKeyInvalidError"): {
"message": "AWS Secret Access Key is invalid",
"remediation": "Check your AWS Secret Access Key and signing method and ensure it is valid.",
},
(1015, "AWSInvalidProviderIdError"): {
"message": "The provided AWS credentials belong to a different account",
"remediation": "Check the provided AWS credentials and review if belong to the account you want to use.",
},
(1016, "AWSSessionTokenExpiredError"): {
"message": "The provided AWS Session Token is expired",
"remediation": "Get a new AWS Session Token and configure it for the provider.",
},
}
def __init__(self, code, file=None, original_exception=None, message=None):
error_info = self.AWS_ERROR_CODES.get((code, self.__class__.__name__))
if message:
error_info["message"] = message
super().__init__(
code,
source="AWS",
file=file,
original_exception=original_exception,
error_info=error_info,
)
class AWSCredentialsError(AWSBaseException):
"""Base class for AWS credentials errors."""
def __init__(self, code, file=None, original_exception=None, message=None):
super().__init__(code, file, original_exception, message)
class AWSClientError(AWSCredentialsError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1000, file=file, original_exception=original_exception, message=message
)
class AWSProfileNotFoundError(AWSCredentialsError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1001, file=file, original_exception=original_exception, message=message
)
class AWSNoCredentialsError(AWSCredentialsError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1002, file=file, original_exception=original_exception, message=message
)
class AWSArgumentTypeValidationError(AWSBaseException):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1003, file=file, original_exception=original_exception, message=message
)
class AWSSetUpSessionError(AWSBaseException):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1004, file=file, original_exception=original_exception, message=message
)
class AWSRoleArnError(AWSBaseException):
"""Base class for AWS role ARN errors."""
def __init__(self, code, file=None, original_exception=None, message=None):
super().__init__(code, file, original_exception, message)
class AWSIAMRoleARNRegionNotEmtpyError(AWSRoleArnError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1005, file=file, original_exception=original_exception, message=message
)
class AWSIAMRoleARNPartitionEmptyError(AWSRoleArnError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1006, file=file, original_exception=original_exception, message=message
)
class AWSIAMRoleARNMissingFieldsError(AWSRoleArnError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1007, file=file, original_exception=original_exception, message=message
)
class AWSIAMRoleARNServiceNotIAMnorSTSError(AWSRoleArnError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1008, file=file, original_exception=original_exception, message=message
)
class AWSIAMRoleARNInvalidAccountIDError(AWSRoleArnError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1009, file=file, original_exception=original_exception, message=message
)
class AWSIAMRoleARNInvalidResourceTypeError(AWSRoleArnError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1010, file=file, original_exception=original_exception, message=message
)
class AWSIAMRoleARNEmptyResourceError(AWSRoleArnError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1011, file=file, original_exception=original_exception, message=message
)
class AWSAssumeRoleError(AWSBaseException):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1012, file=file, original_exception=original_exception, message=message
)
class AWSAccessKeyIDInvalidError(AWSCredentialsError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1013, file=file, original_exception=original_exception, message=message
)
class AWSSecretAccessKeyInvalidError(AWSCredentialsError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1014, file=file, original_exception=original_exception, message=message
)
class AWSInvalidProviderIdError(AWSCredentialsError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1015, file=file, original_exception=original_exception, message=message
)
class AWSSessionTokenExpiredError(AWSCredentialsError):
def __init__(self, file=None, original_exception=None, message=None):
super().__init__(
1016, file=file, original_exception=original_exception, message=message
)

View File

@@ -168,13 +168,40 @@ def init_parser(self):
)
def validate_session_duration(duration):
"""validate_session_duration validates that the AWS STS Assume Role Session Duration is between 900 and 43200 seconds."""
duration = int(duration)
def validate_session_duration(session_duration: int) -> int:
"""validate_session_duration validates that the input session_duration is valid"""
duration = int(session_duration)
# Since the range(i,j) goes from i to j-1 we have to j+1
if duration not in range(900, 43201):
raise ArgumentTypeError("Session duration must be between 900 and 43200")
return duration
raise ArgumentTypeError(
"Session duration must be between 900 and 43200 seconds"
)
else:
return duration
def validate_role_session_name(session_name) -> str:
"""
Validates that the role session name is valid.
Args:
session_name (str): The role session name to be validated.
Returns:
str: The validated role session name.
Raises:
ArgumentTypeError: If the role session name is invalid.
Documentation:
- AWS STS AssumeRole API: https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html
"""
if fullmatch(r"[\w+=,.@-]{2,64}", session_name):
return session_name
else:
raise ArgumentTypeError(
"Role session name must be between 2 and 64 characters long and may contain alphanumeric characters, hyphens, underscores, plus signs, equal signs, commas, periods, at signs, and tildes."
)
def validate_arguments(arguments: Namespace) -> tuple[bool, str]:
@@ -195,7 +222,7 @@ def validate_arguments(arguments: Namespace) -> tuple[bool, str]:
return (True, "")
def validate_bucket(bucket_name):
def validate_bucket(bucket_name: str) -> str:
"""validate_bucket validates that the input bucket_name is valid"""
if search("(?!(^xn--|.+-s3alias$))^[a-z0-9][a-z0-9-]{1,61}[a-z0-9]$", bucket_name):
return bucket_name
@@ -203,16 +230,3 @@ def validate_bucket(bucket_name):
raise ArgumentTypeError(
"Bucket name must be valid (https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html)"
)
def validate_role_session_name(session_name):
"""
validates that the role session name is valid
Documentation: https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html
"""
if fullmatch(r"[\w+=,.@-]{2,64}", session_name):
return session_name
else:
raise ArgumentTypeError(
"Role Session Name must be 2-64 characters long and consist only of upper- and lower-case alphanumeric characters with no spaces. You can also include underscores or any of the following characters: =,.@-"
)

Some files were not shown because too many files have changed in this diff Show More