mirror of
https://github.com/prowler-cloud/prowler.git
synced 2025-12-19 05:17:47 +00:00
feat(gcp): add CIS checks (#2544)
This commit is contained in:
@@ -40,7 +40,7 @@ It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, Fe
|
||||
| Provider | Checks | Services | [Compliance Frameworks](https://docs.prowler.cloud/en/latest/tutorials/compliance/) | [Categories](https://docs.prowler.cloud/en/latest/tutorials/misc/#categories) |
|
||||
|---|---|---|---|---|
|
||||
| AWS | 283 | 55 -> `prowler aws --list-services` | 21 -> `prowler aws --list-compliance` | 5 -> `prowler aws --list-categories` |
|
||||
| GCP | 59 | 10 -> `prowler gcp --list-services` | CIS soon | 0 -> `prowler gcp --list-categories`|
|
||||
| GCP | 73 | 11 -> `prowler gcp --list-services` | 1 -> `prowler gcp --list-compliance` | 0 -> `prowler gcp --list-categories`|
|
||||
| Azure | 20 | 3 -> `prowler azure --list-services` | CIS soon | 1 -> `prowler azure --list-categories` |
|
||||
| Kubernetes | Planned | - | - | - |
|
||||
|
||||
|
||||
1750
prowler/compliance/gcp/cis_2.0_gcp.json
Normal file
1750
prowler/compliance/gcp/cis_2.0_gcp.json
Normal file
File diff suppressed because one or more lines are too long
@@ -24,17 +24,16 @@ banner_color = "\033[1;92m"
|
||||
|
||||
# Compliance
|
||||
actual_directory = pathlib.Path(os.path.dirname(os.path.realpath(__file__)))
|
||||
compliance_aws_dir = f"{actual_directory}/../compliance/aws"
|
||||
available_compliance_frameworks = []
|
||||
with os.scandir(compliance_aws_dir) as files:
|
||||
files = [
|
||||
file.name
|
||||
for file in files
|
||||
if file.is_file()
|
||||
and file.name.endswith(".json")
|
||||
and available_compliance_frameworks.append(file.name.removesuffix(".json"))
|
||||
]
|
||||
|
||||
for provider in ["aws", "gcp"]:
|
||||
with os.scandir(f"{actual_directory}/../compliance/{provider}") as files:
|
||||
files = [
|
||||
file.name
|
||||
for file in files
|
||||
if file.is_file()
|
||||
and file.name.endswith(".json")
|
||||
and available_compliance_frameworks.append(file.name.removesuffix(".json"))
|
||||
]
|
||||
# AWS services-regions matrix json
|
||||
aws_services_json_file = "aws_regions_by_service.json"
|
||||
|
||||
|
||||
@@ -8,10 +8,11 @@ from prowler.config.config import orange_color, timestamp
|
||||
from prowler.lib.check.models import Check_Report
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.outputs.models import (
|
||||
Check_Output_CSV_AWS_CIS,
|
||||
Check_Output_CSV_AWS_ISO27001_2013,
|
||||
Check_Output_CSV_AWS_Well_Architected,
|
||||
Check_Output_CSV_CIS,
|
||||
Check_Output_CSV_ENS_RD2022,
|
||||
Check_Output_CSV_GCP_CIS,
|
||||
Check_Output_CSV_Generic_Compliance,
|
||||
Check_Output_MITRE_ATTACK,
|
||||
generate_csv_fields,
|
||||
@@ -29,7 +30,10 @@ def add_manual_controls(output_options, audit_info, file_descriptors):
|
||||
manual_finding.status = "INFO"
|
||||
manual_finding.status_extended = "Manual check"
|
||||
manual_finding.resource_id = "manual_check"
|
||||
manual_finding.resource_name = "Manual check"
|
||||
manual_finding.region = ""
|
||||
manual_finding.location = ""
|
||||
manual_finding.project_id = ""
|
||||
fill_compliance(
|
||||
output_options, manual_finding, audit_info, file_descriptors
|
||||
)
|
||||
@@ -86,38 +90,70 @@ def fill_compliance(output_options, finding, audit_info, file_descriptors):
|
||||
elif compliance.Framework == "CIS" and "cis_" in str(
|
||||
output_options.output_modes
|
||||
):
|
||||
compliance_output = "cis_" + compliance.Version + "_aws"
|
||||
compliance_output = (
|
||||
"cis_" + compliance.Version + "_" + compliance.Provider.lower()
|
||||
)
|
||||
# Only with the version of CIS that was selected
|
||||
if compliance_output in str(output_options.output_modes):
|
||||
for requirement in compliance.Requirements:
|
||||
requirement_description = requirement.Description
|
||||
requirement_id = requirement.Id
|
||||
for attribute in requirement.Attributes:
|
||||
compliance_row = Check_Output_CSV_CIS(
|
||||
Provider=finding.check_metadata.Provider,
|
||||
Description=compliance.Description,
|
||||
AccountId=audit_info.audited_account,
|
||||
Region=finding.region,
|
||||
AssessmentDate=timestamp.isoformat(),
|
||||
Requirements_Id=requirement_id,
|
||||
Requirements_Description=requirement_description,
|
||||
Requirements_Attributes_Section=attribute.Section,
|
||||
Requirements_Attributes_Profile=attribute.Profile,
|
||||
Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus,
|
||||
Requirements_Attributes_Description=attribute.Description,
|
||||
Requirements_Attributes_RationaleStatement=attribute.RationaleStatement,
|
||||
Requirements_Attributes_ImpactStatement=attribute.ImpactStatement,
|
||||
Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure,
|
||||
Requirements_Attributes_AuditProcedure=attribute.AuditProcedure,
|
||||
Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation,
|
||||
Requirements_Attributes_References=attribute.References,
|
||||
Status=finding.status,
|
||||
StatusExtended=finding.status_extended,
|
||||
ResourceId=finding.resource_id,
|
||||
CheckId=finding.check_metadata.CheckID,
|
||||
)
|
||||
|
||||
csv_header = generate_csv_fields(Check_Output_CSV_CIS)
|
||||
if compliance.Provider == "AWS":
|
||||
compliance_row = Check_Output_CSV_AWS_CIS(
|
||||
Provider=finding.check_metadata.Provider,
|
||||
Description=compliance.Description,
|
||||
AccountId=audit_info.audited_account,
|
||||
Region=finding.region,
|
||||
AssessmentDate=timestamp.isoformat(),
|
||||
Requirements_Id=requirement_id,
|
||||
Requirements_Description=requirement_description,
|
||||
Requirements_Attributes_Section=attribute.Section,
|
||||
Requirements_Attributes_Profile=attribute.Profile,
|
||||
Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus,
|
||||
Requirements_Attributes_Description=attribute.Description,
|
||||
Requirements_Attributes_RationaleStatement=attribute.RationaleStatement,
|
||||
Requirements_Attributes_ImpactStatement=attribute.ImpactStatement,
|
||||
Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure,
|
||||
Requirements_Attributes_AuditProcedure=attribute.AuditProcedure,
|
||||
Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation,
|
||||
Requirements_Attributes_References=attribute.References,
|
||||
Status=finding.status,
|
||||
StatusExtended=finding.status_extended,
|
||||
ResourceId=finding.resource_id,
|
||||
CheckId=finding.check_metadata.CheckID,
|
||||
)
|
||||
csv_header = generate_csv_fields(
|
||||
Check_Output_CSV_AWS_CIS
|
||||
)
|
||||
elif compliance.Provider == "GCP":
|
||||
compliance_row = Check_Output_CSV_GCP_CIS(
|
||||
Provider=finding.check_metadata.Provider,
|
||||
Description=compliance.Description,
|
||||
ProjectId=finding.project_id,
|
||||
Location=finding.location,
|
||||
AssessmentDate=timestamp.isoformat(),
|
||||
Requirements_Id=requirement_id,
|
||||
Requirements_Description=requirement_description,
|
||||
Requirements_Attributes_Section=attribute.Section,
|
||||
Requirements_Attributes_Profile=attribute.Profile,
|
||||
Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus,
|
||||
Requirements_Attributes_Description=attribute.Description,
|
||||
Requirements_Attributes_RationaleStatement=attribute.RationaleStatement,
|
||||
Requirements_Attributes_ImpactStatement=attribute.ImpactStatement,
|
||||
Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure,
|
||||
Requirements_Attributes_AuditProcedure=attribute.AuditProcedure,
|
||||
Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation,
|
||||
Requirements_Attributes_References=attribute.References,
|
||||
Status=finding.status,
|
||||
StatusExtended=finding.status_extended,
|
||||
ResourceId=finding.resource_id,
|
||||
ResourceName=finding.resource_name,
|
||||
CheckId=finding.check_metadata.CheckID,
|
||||
)
|
||||
csv_header = generate_csv_fields(
|
||||
Check_Output_CSV_GCP_CIS
|
||||
)
|
||||
|
||||
elif (
|
||||
"AWS-Well-Architected-Framework" in compliance.Framework
|
||||
@@ -412,7 +448,7 @@ def display_compliance_table(
|
||||
print(
|
||||
f" - CSV: {output_directory}/{output_filename}_{compliance_framework}.csv\n"
|
||||
)
|
||||
elif "cis_1." in compliance_framework:
|
||||
elif "cis_" in compliance_framework:
|
||||
sections = {}
|
||||
cis_compliance_table = {
|
||||
"Provider": [],
|
||||
|
||||
@@ -14,10 +14,11 @@ from prowler.lib.outputs.html import add_html_header
|
||||
from prowler.lib.outputs.models import (
|
||||
Aws_Check_Output_CSV,
|
||||
Azure_Check_Output_CSV,
|
||||
Check_Output_CSV_AWS_CIS,
|
||||
Check_Output_CSV_AWS_ISO27001_2013,
|
||||
Check_Output_CSV_AWS_Well_Architected,
|
||||
Check_Output_CSV_CIS,
|
||||
Check_Output_CSV_ENS_RD2022,
|
||||
Check_Output_CSV_GCP_CIS,
|
||||
Check_Output_CSV_Generic_Compliance,
|
||||
Check_Output_MITRE_ATTACK,
|
||||
Gcp_Check_Output_CSV,
|
||||
@@ -120,6 +121,14 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, audit
|
||||
)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
elif isinstance(audit_info, GCP_Audit_Info):
|
||||
if output_mode == "cis_2.0_gcp":
|
||||
filename = f"{output_directory}/{output_filename}_cis_2.0_gcp{csv_file_suffix}"
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename, output_mode, audit_info, Check_Output_CSV_GCP_CIS
|
||||
)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
elif isinstance(audit_info, AWS_Audit_Info):
|
||||
if output_mode == "json-asff":
|
||||
filename = f"{output_directory}/{output_filename}{json_asff_file_suffix}"
|
||||
@@ -141,14 +150,14 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, audit
|
||||
elif output_mode == "cis_1.5_aws":
|
||||
filename = f"{output_directory}/{output_filename}_cis_1.5_aws{csv_file_suffix}"
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename, output_mode, audit_info, Check_Output_CSV_CIS
|
||||
filename, output_mode, audit_info, Check_Output_CSV_AWS_CIS
|
||||
)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
elif output_mode == "cis_1.4_aws":
|
||||
filename = f"{output_directory}/{output_filename}_cis_1.4_aws{csv_file_suffix}"
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename, output_mode, audit_info, Check_Output_CSV_CIS
|
||||
filename, output_mode, audit_info, Check_Output_CSV_AWS_CIS
|
||||
)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
|
||||
@@ -539,7 +539,7 @@ class Check_Output_CSV_ENS_RD2022(BaseModel):
|
||||
CheckId: str
|
||||
|
||||
|
||||
class Check_Output_CSV_CIS(BaseModel):
|
||||
class Check_Output_CSV_AWS_CIS(BaseModel):
|
||||
"""
|
||||
Check_Output_CSV_CIS generates a finding's output in CSV CIS format.
|
||||
"""
|
||||
@@ -567,6 +567,35 @@ class Check_Output_CSV_CIS(BaseModel):
|
||||
CheckId: str
|
||||
|
||||
|
||||
class Check_Output_CSV_GCP_CIS(BaseModel):
|
||||
"""
|
||||
Check_Output_CSV_CIS generates a finding's output in CSV CIS format.
|
||||
"""
|
||||
|
||||
Provider: str
|
||||
Description: str
|
||||
ProjectId: str
|
||||
Location: str
|
||||
AssessmentDate: str
|
||||
Requirements_Id: str
|
||||
Requirements_Description: str
|
||||
Requirements_Attributes_Section: str
|
||||
Requirements_Attributes_Profile: str
|
||||
Requirements_Attributes_AssessmentStatus: str
|
||||
Requirements_Attributes_Description: str
|
||||
Requirements_Attributes_RationaleStatement: str
|
||||
Requirements_Attributes_ImpactStatement: str
|
||||
Requirements_Attributes_RemediationProcedure: str
|
||||
Requirements_Attributes_AuditProcedure: str
|
||||
Requirements_Attributes_AdditionalInformation: str
|
||||
Requirements_Attributes_References: str
|
||||
Status: str
|
||||
StatusExtended: str
|
||||
ResourceId: str
|
||||
ResourceName: str
|
||||
CheckId: str
|
||||
|
||||
|
||||
class Check_Output_CSV_Generic_Compliance(BaseModel):
|
||||
"""
|
||||
Check_Output_CSV_Generic_Compliance generates a finding's output in CSV Generic Compliance format.
|
||||
|
||||
@@ -86,25 +86,24 @@ def report(check_findings, output_options, audit_info):
|
||||
if file_descriptors:
|
||||
# Check if --quiet to only add fails to outputs
|
||||
if not (finding.status != "FAIL" and output_options.is_quiet):
|
||||
if any(
|
||||
compliance in output_options.output_modes
|
||||
for compliance in available_compliance_frameworks
|
||||
):
|
||||
fill_compliance(
|
||||
output_options,
|
||||
finding,
|
||||
audit_info,
|
||||
file_descriptors,
|
||||
)
|
||||
|
||||
add_manual_controls(
|
||||
output_options,
|
||||
audit_info,
|
||||
file_descriptors,
|
||||
)
|
||||
# AWS specific outputs
|
||||
if finding.check_metadata.Provider == "aws":
|
||||
if any(
|
||||
compliance in output_options.output_modes
|
||||
for compliance in available_compliance_frameworks
|
||||
):
|
||||
fill_compliance(
|
||||
output_options,
|
||||
finding,
|
||||
audit_info,
|
||||
file_descriptors,
|
||||
)
|
||||
|
||||
add_manual_controls(
|
||||
output_options,
|
||||
audit_info,
|
||||
file_descriptors,
|
||||
)
|
||||
|
||||
if "json-asff" in file_descriptors:
|
||||
finding_output = Check_Output_JSON_ASFF()
|
||||
fill_json_asff(
|
||||
|
||||
@@ -11,7 +11,8 @@ class apikeys_api_restrictions_configured(Check):
|
||||
report.resource_id = key.id
|
||||
report.resource_name = key.name
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"API key {key.name} have restrictions configured."
|
||||
report.location = apikeys_client.region
|
||||
report.status_extended = f"API key {key.name} has restrictions configured."
|
||||
if key.restrictions == {} or any(
|
||||
[
|
||||
target.get("service") == "cloudapis.googleapis.com"
|
||||
|
||||
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"Provider": "gcp",
|
||||
"CheckID": "apikeys_key_exists",
|
||||
"CheckTitle": "Ensure API Keys Only Exist for Active Services",
|
||||
"CheckType": [],
|
||||
"ServiceName": "apikeys",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "API Key",
|
||||
"Description": "API Keys should only be used for services in cases where other authentication methods are unavailable. Unused keys with their permissions in tact may still exist within a project. Keys are insecure because they can be viewed publicly, such as from within a browser, or they can be accessed on a device where the key resides. It is recommended to use standard authentication flow instead.",
|
||||
"Risk": "Security risks involved in using API-Keys appear below: API keys are simple encrypted strings, API keys do not identify the user or the application making the API request, API keys are typically accessible to clients, making it easy to discover and steal an API key.",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "gcloud alpha services api-keys delete",
|
||||
"NativeIaC": "",
|
||||
"Other": "",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "To avoid the security risk in using API keys, it is recommended to use standard authentication flow instead.",
|
||||
"Url": "https://cloud.google.com/docs/authentication/api-keys"
|
||||
}
|
||||
},
|
||||
"Categories": [],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": ""
|
||||
}
|
||||
@@ -0,0 +1,22 @@
|
||||
from prowler.lib.check.models import Check, Check_Report_GCP
|
||||
from prowler.providers.gcp.services.apikeys.apikeys_client import apikeys_client
|
||||
|
||||
|
||||
class apikeys_key_exists(Check):
|
||||
def execute(self) -> Check_Report_GCP:
|
||||
findings = []
|
||||
for project in apikeys_client.project_ids:
|
||||
report = Check_Report_GCP(self.metadata())
|
||||
report.project_id = project
|
||||
report.resource_id = project
|
||||
report.location = apikeys_client.region
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Project {project} does not have active API Keys."
|
||||
for key in apikeys_client.keys:
|
||||
if key.project_id == project:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Project {project} has active API Keys."
|
||||
break
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -12,6 +12,7 @@ class apikeys_key_rotated_in_90_days(Check):
|
||||
report.project_id = key.project_id
|
||||
report.resource_id = key.id
|
||||
report.resource_name = key.name
|
||||
report.location = apikeys_client.region
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"API key {key.name} created in less than 90 days."
|
||||
if (
|
||||
@@ -20,7 +21,7 @@ class apikeys_key_rotated_in_90_days(Check):
|
||||
).days > 90:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"API key {key.name} creation date have more than 90 days."
|
||||
f"API key {key.name} creation date has more than 90 days."
|
||||
)
|
||||
findings.append(report)
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@ class APIKeys:
|
||||
self.api_version = "v2"
|
||||
self.project_ids = audit_info.project_ids
|
||||
self.default_project_id = audit_info.default_project_id
|
||||
self.region = "global"
|
||||
self.client = generate_client(self.service, self.api_version, audit_info)
|
||||
self.keys = []
|
||||
self.__get_keys__()
|
||||
|
||||
@@ -13,7 +13,10 @@ class CloudResourceManager:
|
||||
self.project_ids = audit_info.project_ids
|
||||
self.client = generate_client(self.service, self.api_version, audit_info)
|
||||
self.bindings = []
|
||||
self.projects = []
|
||||
self.organizations = []
|
||||
self.__get_iam_policy__()
|
||||
self.__get_organizations__()
|
||||
|
||||
def __get_client__(self):
|
||||
return self.client
|
||||
@@ -24,6 +27,12 @@ class CloudResourceManager:
|
||||
policy = (
|
||||
self.client.projects().getIamPolicy(resource=project_id).execute()
|
||||
)
|
||||
audit_logging = False
|
||||
if policy.get("auditConfigs"):
|
||||
audit_logging = True
|
||||
self.projects.append(
|
||||
Project(id=project_id, audit_logging=audit_logging)
|
||||
)
|
||||
for binding in policy["bindings"]:
|
||||
self.bindings.append(
|
||||
Binding(
|
||||
@@ -37,8 +46,30 @@ class CloudResourceManager:
|
||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
def __get_organizations__(self):
|
||||
try:
|
||||
response = self.client.organizations().search().execute()
|
||||
for org in response["organizations"]:
|
||||
self.organizations.append(
|
||||
Organization(id=org["name"].split("/")[-1], name=org["displayName"])
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
|
||||
class Binding(BaseModel):
|
||||
role: str
|
||||
members: list
|
||||
project_id: str
|
||||
|
||||
|
||||
class Project(BaseModel):
|
||||
id: str
|
||||
audit_logging: bool
|
||||
|
||||
|
||||
class Organization(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
{
|
||||
"Provider": "gcp",
|
||||
"CheckID": "cloudsql_instance_postgres_log_min_duration_statement_flag",
|
||||
"CheckTitle": "Ensure that the Log_min_error_statement Flag for a Cloud SQL PostgreSQL Instance Is Set to -1",
|
||||
"CheckTitle": "Ensure that the Log_min_duration_statement Flag for a Cloud SQL PostgreSQL Instance Is Set to -1",
|
||||
"CheckType": [],
|
||||
"ServiceName": "cloudsql",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "DatabaseInstance",
|
||||
"Description": "Ensure that the Log_min_error_statement Flag for a Cloud SQL PostgreSQL Instance Is Set to -1",
|
||||
"Description": "Ensure that the Log_min_duration_statement Flag for a Cloud SQL PostgreSQL Instance Is Set to -1",
|
||||
"Risk": "The log_min_duration_statement flag defines the minimum amount of execution time of a statement in milliseconds where the total duration of the statement is logged. Ensure that log_min_duration_statement is disabled, i.e., a value of -1 is set.",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
|
||||
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"Provider": "gcp",
|
||||
"CheckID": "cloudstorage_bucket_log_retention_policy_lock",
|
||||
"CheckTitle": "Ensure That Retention Policies on Cloud Storage Buckets Used for Exporting Logs Are Configured Using Bucket Lock",
|
||||
"CheckType": [],
|
||||
"ServiceName": "cloudstorage",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "Bucket",
|
||||
"Description": "Enabling retention policies on log buckets will protect logs stored in cloud storage buckets from being overwritten or accidentally deleted.",
|
||||
"Risk": "Sinks can be configured to export logs in storage buckets. It is recommended to configure a data retention policy for these cloud storage buckets and to lock the data retention policy; thus permanently preventing the policy from being reduced or removed. This way, if the system is ever compromised by an attacker or a malicious insider who wants to cover their tracks, the activity logs are definitely preserved for forensics and security investigations.",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://www.trendmicro.com/cloudoneconformity/knowledge-base/gcp/CloudStorage/retention-policies-with-bucket-lock.html",
|
||||
"Terraform": "https://docs.bridgecrew.io/docs/ensure-that-retention-policies-on-log-buckets-are-configured-using-bucket-lock#terraform"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "It is recommended to set up retention policies and configure Bucket Lock on all storage buckets that are used as log sinks.",
|
||||
"Url": "https://cloud.google.com/storage/docs/using-uniform-bucket-level-access"
|
||||
}
|
||||
},
|
||||
"Categories": [],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": ""
|
||||
}
|
||||
@@ -0,0 +1,35 @@
|
||||
from prowler.lib.check.models import Check, Check_Report_GCP
|
||||
from prowler.providers.gcp.services.cloudstorage.cloudstorage_client import (
|
||||
cloudstorage_client,
|
||||
)
|
||||
from prowler.providers.gcp.services.logging.logging_client import logging_client
|
||||
|
||||
|
||||
class cloudstorage_bucket_log_retention_policy_lock(Check):
|
||||
def execute(self) -> Check_Report_GCP:
|
||||
findings = []
|
||||
# Get Log Sink Buckets
|
||||
log_buckets = []
|
||||
for sink in logging_client.sinks:
|
||||
if "storage.googleapis.com" in sink.destination:
|
||||
log_buckets.append(sink.destination.split("/")[-1])
|
||||
for bucket in cloudstorage_client.buckets:
|
||||
if bucket.name in log_buckets:
|
||||
report = Check_Report_GCP(self.metadata())
|
||||
report.project_id = bucket.project_id
|
||||
report.resource_id = bucket.id
|
||||
report.resource_name = bucket.name
|
||||
report.location = bucket.region
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Log Sink Bucket {bucket.name} has no Retention Policy"
|
||||
)
|
||||
if bucket.retention_policy:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Log Sink Bucket {bucket.name} has no Retention Policy but without Bucket Lock"
|
||||
if bucket.retention_policy["isLocked"]:
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Log Sink Bucket {bucket.name} has a Retention Policy with Bucket Lock"
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -1,3 +1,5 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
@@ -40,6 +42,7 @@ class CloudStorage:
|
||||
"uniformBucketLevelAccess"
|
||||
]["enabled"],
|
||||
public=public,
|
||||
retention_policy=bucket.get("retentionPolicy"),
|
||||
project_id=project_id,
|
||||
)
|
||||
)
|
||||
@@ -60,3 +63,4 @@ class Bucket(BaseModel):
|
||||
uniform_bucket_level_access: bool
|
||||
public: bool
|
||||
project_id: str
|
||||
retention_policy: Optional[dict]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"Provider": "gcp",
|
||||
"CheckID": "compute_rdp_access_from_the_internet_allowed",
|
||||
"CheckID": "compute_firewall_rdp_access_from_the_internet_allowed",
|
||||
"CheckTitle": "Ensure That RDP Access Is Restricted From the Internet",
|
||||
"CheckType": [],
|
||||
"ServiceName": "networking",
|
||||
@@ -2,7 +2,7 @@ from prowler.lib.check.models import Check, Check_Report_GCP
|
||||
from prowler.providers.gcp.services.compute.compute_client import compute_client
|
||||
|
||||
|
||||
class compute_rdp_access_from_the_internet_allowed(Check):
|
||||
class compute_firewall_rdp_access_from_the_internet_allowed(Check):
|
||||
def execute(self) -> Check_Report_GCP:
|
||||
findings = []
|
||||
for firewall in compute_client.firewalls:
|
||||
@@ -10,6 +10,7 @@ class compute_rdp_access_from_the_internet_allowed(Check):
|
||||
report.project_id = firewall.project_id
|
||||
report.resource_id = firewall.id
|
||||
report.resource_name = firewall.name
|
||||
report.location = compute_client.region
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Firewall {firewall.name} does not expose port 3389 (RDP) to the internet."
|
||||
opened_port = False
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"Provider": "gcp",
|
||||
"CheckID": "compute_ssh_access_from_the_internet_allowed",
|
||||
"CheckID": "compute_firewall_ssh_access_from_the_internet_allowed",
|
||||
"CheckTitle": "Ensure That SSH Access Is Restricted From the Internet",
|
||||
"CheckType": [],
|
||||
"ServiceName": "networking",
|
||||
@@ -2,7 +2,7 @@ from prowler.lib.check.models import Check, Check_Report_GCP
|
||||
from prowler.providers.gcp.services.compute.compute_client import compute_client
|
||||
|
||||
|
||||
class compute_ssh_access_from_the_internet_allowed(Check):
|
||||
class compute_firewall_ssh_access_from_the_internet_allowed(Check):
|
||||
def execute(self) -> Check_Report_GCP:
|
||||
findings = []
|
||||
for firewall in compute_client.firewalls:
|
||||
@@ -10,6 +10,7 @@ class compute_ssh_access_from_the_internet_allowed(Check):
|
||||
report.project_id = firewall.project_id
|
||||
report.resource_id = firewall.id
|
||||
report.resource_name = firewall.name
|
||||
report.location = compute_client.region
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Firewall {firewall.name} does not expose port 22 (SSH) to the internet."
|
||||
opened_port = False
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"Provider": "gcp",
|
||||
"CheckID": "compute_block_project_wide_ssh_keys_disabled",
|
||||
"CheckID": "compute_instance_block_project_wide_ssh_keys_disabled",
|
||||
"CheckTitle": "Ensure “Block Project-Wide SSH Keys” Is Enabled for VM Instances",
|
||||
"CheckType": [],
|
||||
"ServiceName": "compute",
|
||||
@@ -2,7 +2,7 @@ from prowler.lib.check.models import Check, Check_Report_GCP
|
||||
from prowler.providers.gcp.services.compute.compute_client import compute_client
|
||||
|
||||
|
||||
class compute_block_project_wide_ssh_keys_disabled(Check):
|
||||
class compute_instance_block_project_wide_ssh_keys_disabled(Check):
|
||||
def execute(self) -> Check_Report_GCP:
|
||||
findings = []
|
||||
for instance in compute_client.instances:
|
||||
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"Provider": "gcp",
|
||||
"CheckID": "compute_instance_confidential_computing_enabled",
|
||||
"CheckTitle": "Ensure Compute Instances Have Confidential Computing Enabled",
|
||||
"CheckType": [],
|
||||
"ServiceName": "compute",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "VMInstance",
|
||||
"Description": "Ensure that the Confidential Computing security feature is enabled for your Google Cloud virtual machine (VM) instances in order to add protection to your sensitive data in use by keeping it encrypted in memory and using encryption keys that Google doesn't have access to. Confidential Computing is a breakthrough technology which encrypts data while it is being processed. This technology keeps data encrypted in memory, outside the CPU.",
|
||||
"Risk": "Confidential Computing keeps your sensitive data encrypted while it is used, indexed, queried, or trained on, and does not allow Google to access the encryption keys (these keys are generated in hardware, per VM instance, and can't be exported). In this way, the Confidential Computing feature can help alleviate concerns about risk related to either dependency on Google Cloud infrastructure or Google insiders' access to your data in the clear.",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://www.trendmicro.com/cloudoneconformity/knowledge-base/gcp/ComputeEngine/confidential-computing.html",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Ensure that the Confidential Computing security feature is enabled for your Google Cloud virtual machine (VM) instances in order to add protection to your sensitive data in use by keeping it encrypted in memory and using encryption keys that Google doesn't have access to. Confidential Computing is a breakthrough technology which encrypts data while it is being processed. This technology keeps data encrypted in memory, outside the CPU.",
|
||||
"Url": "https://cloud.google.com/compute/confidential-vm/docs/creating-cvm-instance:https://cloud.google.com/compute/confidential-vm/docs/about-cvm:https://cloud.google.com/confidential-computing:https://cloud.google.com/blog/products/identity-security/introducing-google-cloud-confidential-computing-with-confidential-vms"
|
||||
}
|
||||
},
|
||||
"Categories": [],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": ""
|
||||
}
|
||||
@@ -0,0 +1,23 @@
|
||||
from prowler.lib.check.models import Check, Check_Report_GCP
|
||||
from prowler.providers.gcp.services.compute.compute_client import compute_client
|
||||
|
||||
|
||||
class compute_instance_confidential_computing_enabled(Check):
|
||||
def execute(self) -> Check_Report_GCP:
|
||||
findings = []
|
||||
for instance in compute_client.instances:
|
||||
report = Check_Report_GCP(self.metadata())
|
||||
report.project_id = instance.project_id
|
||||
report.resource_id = instance.id
|
||||
report.resource_name = instance.name
|
||||
report.location = instance.zone
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"VM Instance {instance.name} has Confidential Computing enabled"
|
||||
)
|
||||
if not instance.confidential_computing:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"VM Instance {instance.name} does not have Confidential Computing enabled"
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"Provider": "gcp",
|
||||
"CheckID": "compute_default_service_account_in_use",
|
||||
"CheckID": "compute_instance_default_service_account_in_use",
|
||||
"CheckTitle": "Ensure That Instances Are Not Configured To Use the Default Service Account",
|
||||
"CheckType": [],
|
||||
"ServiceName": "compute",
|
||||
@@ -2,7 +2,7 @@ from prowler.lib.check.models import Check, Check_Report_GCP
|
||||
from prowler.providers.gcp.services.compute.compute_client import compute_client
|
||||
|
||||
|
||||
class compute_default_service_account_in_use(Check):
|
||||
class compute_instance_default_service_account_in_use(Check):
|
||||
def execute(self) -> Check_Report_GCP:
|
||||
findings = []
|
||||
for instance in compute_client.instances:
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"Provider": "gcp",
|
||||
"CheckID": "compute_default_service_account_in_use_with_full_api_access",
|
||||
"CheckID": "compute_instance_default_service_account_in_use_with_full_api_access",
|
||||
"CheckTitle": "Ensure That Instances Are Not Configured To Use the Default Service Account With Full Access to All Cloud APIs",
|
||||
"CheckType": [],
|
||||
"ServiceName": "compute",
|
||||
@@ -2,7 +2,7 @@ from prowler.lib.check.models import Check, Check_Report_GCP
|
||||
from prowler.providers.gcp.services.compute.compute_client import compute_client
|
||||
|
||||
|
||||
class compute_default_service_account_in_use_with_full_api_access(Check):
|
||||
class compute_instance_default_service_account_in_use_with_full_api_access(Check):
|
||||
def execute(self) -> Check_Report_GCP:
|
||||
findings = []
|
||||
for instance in compute_client.instances:
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"Provider": "gcp",
|
||||
"CheckID": "compute_encryption_with_csek_is_disabled",
|
||||
"CheckID": "compute_instance_encryption_with_csek_enabled",
|
||||
"CheckTitle": "Ensure VM Disks for Critical VMs Are Encrypted With Customer-Supplied Encryption Keys (CSEK)",
|
||||
"CheckType": [],
|
||||
"ServiceName": "compute",
|
||||
@@ -2,7 +2,7 @@ from prowler.lib.check.models import Check, Check_Report_GCP
|
||||
from prowler.providers.gcp.services.compute.compute_client import compute_client
|
||||
|
||||
|
||||
class compute_encryption_with_csek_is_disabled(Check):
|
||||
class compute_instance_encryption_with_csek_enabled(Check):
|
||||
def execute(self) -> Check_Report_GCP:
|
||||
findings = []
|
||||
for instance in compute_client.instances:
|
||||
@@ -12,11 +12,11 @@ class compute_encryption_with_csek_is_disabled(Check):
|
||||
report.resource_name = instance.name
|
||||
report.location = instance.zone
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"The VM Instance {instance.name} have the following unencrypted disks: '{', '.join([i[0] for i in instance.disks_encryption if not i[1]])}'"
|
||||
report.status_extended = f"The VM Instance {instance.name} has the following unencrypted disks: '{', '.join([i[0] for i in instance.disks_encryption if not i[1]])}'"
|
||||
if all([i[1] for i in instance.disks_encryption]):
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"The VM Instance {instance.name} have every disk encrypted."
|
||||
f"The VM Instance {instance.name} has every disk encrypted."
|
||||
)
|
||||
findings.append(report)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"Provider": "gcp",
|
||||
"CheckID": "compute_ip_forwarding_is_enabled",
|
||||
"CheckID": "compute_instance_ip_forwarding_is_enabled",
|
||||
"CheckTitle": "Ensure That IP Forwarding Is Not Enabled on Instances",
|
||||
"CheckType": [],
|
||||
"ServiceName": "compute",
|
||||
@@ -2,7 +2,7 @@ from prowler.lib.check.models import Check, Check_Report_GCP
|
||||
from prowler.providers.gcp.services.compute.compute_client import compute_client
|
||||
|
||||
|
||||
class compute_ip_forwarding_is_enabled(Check):
|
||||
class compute_instance_ip_forwarding_is_enabled(Check):
|
||||
def execute(self) -> Check_Report_GCP:
|
||||
findings = []
|
||||
for instance in compute_client.instances:
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"Provider": "gcp",
|
||||
"CheckID": "compute_serial_ports_in_use",
|
||||
"CheckID": "compute_instance_serial_ports_in_use",
|
||||
"CheckTitle": "Ensure ‘Enable Connecting to Serial Ports’ Is Not Enabled for VM Instance",
|
||||
"CheckType": [],
|
||||
"ServiceName": "compute",
|
||||
@@ -2,7 +2,7 @@ from prowler.lib.check.models import Check, Check_Report_GCP
|
||||
from prowler.providers.gcp.services.compute.compute_client import compute_client
|
||||
|
||||
|
||||
class compute_serial_ports_in_use(Check):
|
||||
class compute_instance_serial_ports_in_use(Check):
|
||||
def execute(self) -> Check_Report_GCP:
|
||||
findings = []
|
||||
for instance in compute_client.instances:
|
||||
@@ -12,7 +12,9 @@ class compute_serial_ports_in_use(Check):
|
||||
report.resource_name = instance.name
|
||||
report.location = instance.zone
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"VM Instance {instance.name} have ‘Enable Connecting to Serial Ports’ off"
|
||||
report.status_extended = (
|
||||
f"VM Instance {instance.name} has Enable Connecting to Serial Ports off"
|
||||
)
|
||||
if instance.metadata.get("items"):
|
||||
for item in instance.metadata["items"]:
|
||||
if item["key"] == "serial-port-enable" and item["value"] in [
|
||||
@@ -20,7 +22,7 @@ class compute_serial_ports_in_use(Check):
|
||||
"true",
|
||||
]:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"VM Instance {instance.name} have ‘Enable Connecting to Serial Ports’ set to on"
|
||||
report.status_extended = f"VM Instance {instance.name} has Enable Connecting to Serial Ports set to on"
|
||||
break
|
||||
findings.append(report)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"Provider": "gcp",
|
||||
"CheckID": "compute_shielded_vm_enabled",
|
||||
"CheckID": "compute_instance_shielded_vm_enabled",
|
||||
"CheckTitle": "Ensure Compute Instances Are Launched With Shielded VM Enabled",
|
||||
"CheckType": [],
|
||||
"ServiceName": "compute",
|
||||
@@ -2,7 +2,7 @@ from prowler.lib.check.models import Check, Check_Report_GCP
|
||||
from prowler.providers.gcp.services.compute.compute_client import compute_client
|
||||
|
||||
|
||||
class compute_shielded_vm_enabled(Check):
|
||||
class compute_instance_shielded_vm_enabled(Check):
|
||||
def execute(self) -> Check_Report_GCP:
|
||||
findings = []
|
||||
for instance in compute_client.instances:
|
||||
@@ -12,13 +12,13 @@ class compute_shielded_vm_enabled(Check):
|
||||
report.resource_name = instance.name
|
||||
report.location = instance.zone
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"VM Instance {instance.name} have vTPM or Integrity Monitoring set to on"
|
||||
report.status_extended = f"VM Instance {instance.name} has vTPM or Integrity Monitoring set to on"
|
||||
if (
|
||||
not instance.shielded_enabled_vtpm
|
||||
or not instance.shielded_enabled_integrity_monitoring
|
||||
):
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"VM Instance {instance.name} don't have vTPM and Integrity Monitoring set to on"
|
||||
report.status_extended = f"VM Instance {instance.name} doesn't have vTPM and Integrity Monitoring set to on"
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"Provider": "gcp",
|
||||
"CheckID": "compute_loadbalancer_logging_enabled",
|
||||
"CheckTitle": "Ensure Logging is enabled for HTTP(S) Load Balancer",
|
||||
"CheckType": [],
|
||||
"ServiceName": "compute",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "LoadBalancer",
|
||||
"Description": "Logging enabled on a HTTPS Load Balancer will show all network traffic and its destination.",
|
||||
"Risk": "HTTP(S) load balancing log entries contain information useful for monitoring and debugging web traffic. Google Cloud exports this logging data to Cloud Monitoring service so that monitoring metrics can be created to evaluate a load balancer's configuration, usage, and performance, troubleshoot problems, and improve resource utilization and user experience.",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "gcloud compute backend-services update <serviceName> --region=REGION --enable-logging --logging-sample-rate=<percentageAsADecimal>",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://www.trendmicro.com/cloudoneconformity/knowledge-base/gcp/CloudLoadBalancing/enableLoad-balancing-backend-service-logging.html",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Logging will allow you to view HTTPS network traffic to your web applications.",
|
||||
"Url": "https://cloud.google.com/load-balancing/docs/https/https-logging-monitoring#gcloud:-global-mode"
|
||||
}
|
||||
},
|
||||
"Categories": [],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": ""
|
||||
}
|
||||
@@ -0,0 +1,23 @@
|
||||
from prowler.lib.check.models import Check, Check_Report_GCP
|
||||
from prowler.providers.gcp.services.compute.compute_client import compute_client
|
||||
|
||||
|
||||
class compute_loadbalancer_logging_enabled(Check):
|
||||
def execute(self) -> Check_Report_GCP:
|
||||
findings = []
|
||||
for lb in compute_client.load_balancers:
|
||||
report = Check_Report_GCP(self.metadata())
|
||||
report.project_id = lb.project_id
|
||||
report.resource_id = lb.id
|
||||
report.resource_name = lb.name
|
||||
report.location = compute_client.region
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"LoadBalancer {lb.name} has logging enabled"
|
||||
if not lb.logging:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"LoadBalancer {lb.name} does not have logging enabled"
|
||||
)
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"Provider": "gcp",
|
||||
"CheckID": "compute_network_dns_logging_enabled",
|
||||
"CheckTitle": "Enable Cloud DNS Logging for VPC Networks",
|
||||
"CheckType": [],
|
||||
"ServiceName": "compute",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "Network",
|
||||
"Description": "Ensure that Cloud DNS logging is enabled for all your Virtual Private Cloud (VPC) networks using DNS server policies. Cloud DNS logging records queries that the name servers resolve for your Google Cloud VPC networks, as well as queries from external entities directly to a public DNS zone. Recorded queries can come from virtual machine (VM) instances, GKE containers running in the same VPC network, peering zones, or other Google Cloud resources provisioned within your VPC.",
|
||||
"Risk": "Cloud DNS logging is disabled by default on each Google Cloud VPC network. By enabling monitoring of Cloud DNS logs, you can increase visibility into the DNS names requested by the clients within your VPC network. Cloud DNS logs can be monitored for anomalous domain names and evaluated against threat intelligence.",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://www.trendmicro.com/cloudoneconformity/knowledge-base/gcp/CloudVPC/dns-logging-for-vpcs.html",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Cloud DNS logging records the queries from the name servers within your VPC to Stackdriver. Logged queries can come from Compute Engine VMs, GKE containers, or other GCP resources provisioned within the VPC.",
|
||||
"Url": "https://cloud.google.com/dns/docs/monitoring"
|
||||
}
|
||||
},
|
||||
"Categories": [],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": ""
|
||||
}
|
||||
@@ -0,0 +1,28 @@
|
||||
from prowler.lib.check.models import Check, Check_Report_GCP
|
||||
from prowler.providers.gcp.services.compute.compute_client import compute_client
|
||||
from prowler.providers.gcp.services.dns.dns_client import dns_client
|
||||
|
||||
|
||||
class compute_network_dns_logging_enabled(Check):
|
||||
def execute(self) -> Check_Report_GCP:
|
||||
findings = []
|
||||
for network in compute_client.networks:
|
||||
report = Check_Report_GCP(self.metadata())
|
||||
report.project_id = network.project_id
|
||||
report.resource_id = network.id
|
||||
report.resource_name = network.name
|
||||
report.location = compute_client.region
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Network {network.name} does not have DNS logging enabled"
|
||||
)
|
||||
for policy in dns_client.policies:
|
||||
if network.name in policy.networks and policy.logging:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Network {network.name} has DNS logging enabled"
|
||||
)
|
||||
break
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"Provider": "gcp",
|
||||
"CheckID": "compute_network_not_legacy",
|
||||
"CheckTitle": "Ensure Legacy Networks Do Not Exist",
|
||||
"CheckType": [],
|
||||
"ServiceName": "compute",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "Network",
|
||||
"Description": "In order to prevent use of legacy networks, a project should not have a legacy network configured. As of now, Legacy Networks are gradually being phased out, and you can no longer create projects with them. This recommendation is to check older projects to ensure that they are not using Legacy Networks.",
|
||||
"Risk": "Google Cloud legacy networks have a single global IPv4 range which cannot be divided into subnets, and a single gateway IP address for the whole network. Legacy networks do not support several Google Cloud networking features such as subnets, alias IP ranges, multiple network interfaces, Cloud NAT (Network Address Translation), Virtual Private Cloud (VPC) Peering, and private access options for GCP services. Legacy networks are not recommended for high network traffic projects and are subject to a single point of contention or failure.",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "gcloud compute networks delete <LEGACY_NETWORK_NAME>",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://www.trendmicro.com/cloudoneconformity/knowledge-base/gcp/CloudVPC/legacy-vpc-in-use.html#",
|
||||
"Terraform": "https://docs.bridgecrew.io/docs/ensure-legacy-networks-do-not-exist-for-a-project#terraform"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Ensure that your Google Cloud Platform (GCP) projects are not using legacy networks as this type of network is no longer recommended for production environments because it does not support advanced networking features. Instead, it is strongly recommended to use Virtual Private Cloud (VPC) networks for existing and future GCP projects.",
|
||||
"Url": "https://cloud.google.com/vpc/docs/using-legacy#deleting_a_legacy_network"
|
||||
}
|
||||
},
|
||||
"Categories": [],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": ""
|
||||
}
|
||||
@@ -0,0 +1,21 @@
|
||||
from prowler.lib.check.models import Check, Check_Report_GCP
|
||||
from prowler.providers.gcp.services.compute.compute_client import compute_client
|
||||
|
||||
|
||||
class compute_network_not_legacy(Check):
|
||||
def execute(self) -> Check_Report_GCP:
|
||||
findings = []
|
||||
for network in compute_client.networks:
|
||||
report = Check_Report_GCP(self.metadata())
|
||||
report.project_id = network.project_id
|
||||
report.resource_id = network.id
|
||||
report.resource_name = network.name
|
||||
report.location = compute_client.region
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Network {network.name} is not legacy"
|
||||
if network.subnet_mode == "legacy":
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Legacy network {network.name} exists"
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"Provider": "gcp",
|
||||
"CheckID": "compute_project_os_login_enabled",
|
||||
"CheckTitle": "Ensure Os Login Is Enabled for a Project",
|
||||
"CheckType": [],
|
||||
"ServiceName": "compute",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "low",
|
||||
"ResourceType": "Project",
|
||||
"Description": "Ensure that the OS Login feature is enabled at the Google Cloud Platform (GCP) project level in order to provide you with centralized and automated SSH key pair management.",
|
||||
"Risk": "Enabling OS Login feature ensures that the SSH keys used to connect to VM instances are mapped with Google Cloud IAM users. Revoking access to corresponding IAM users will revoke all the SSH keys associated with these users, therefore it facilitates centralized SSH key pair management, which is extremely useful in handling compromised or stolen SSH key pairs and/or revocation of external/third-party/vendor users.",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "https://docs.bridgecrew.io/docs/bc_gcp_networking_9#cli-command",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://www.trendmicro.com/cloudoneconformity/knowledge-base/gcp/ComputeEngine/enable-os-login.html",
|
||||
"Terraform": "https://docs.bridgecrew.io/docs/bc_gcp_networking_9#terraform"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Ensure that the OS Login feature is enabled at the Google Cloud Platform (GCP) project level in order to provide you with centralized and automated SSH key pair management.",
|
||||
"Url": "https://cloud.google.com/compute/confidential-vm/docs/creating-cvm-instance:https://cloud.google.com/compute/confidential-vm/docs/about-cvm:https://cloud.google.com/confidential-computing:https://cloud.google.com/blog/products/identity-security/introducing-google-cloud-confidential-computing-with-confidential-vms"
|
||||
}
|
||||
},
|
||||
"Categories": [],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": ""
|
||||
}
|
||||
@@ -0,0 +1,22 @@
|
||||
from prowler.lib.check.models import Check, Check_Report_GCP
|
||||
from prowler.providers.gcp.services.compute.compute_client import compute_client
|
||||
|
||||
|
||||
class compute_project_os_login_enabled(Check):
|
||||
def execute(self) -> Check_Report_GCP:
|
||||
findings = []
|
||||
for project in compute_client.projects:
|
||||
report = Check_Report_GCP(self.metadata())
|
||||
report.project_id = project.id
|
||||
report.resource_id = project.id
|
||||
report.location = "global"
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Project {project.id} has OS Login enabled"
|
||||
if not project.enable_oslogin:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Project {project.id} does not have OS Login enabled"
|
||||
)
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -12,15 +12,23 @@ class Compute:
|
||||
self.project_ids = audit_info.project_ids
|
||||
self.default_project_id = audit_info.default_project_id
|
||||
self.client = generate_client(self.service, self.api_version, audit_info)
|
||||
self.region = "global"
|
||||
self.regions = set()
|
||||
self.zones = set()
|
||||
self.instances = []
|
||||
self.networks = []
|
||||
self.subnets = []
|
||||
self.firewalls = []
|
||||
self.projects = []
|
||||
self.load_balancers = []
|
||||
self.__get_url_maps__()
|
||||
self.__describe_backend_service__()
|
||||
self.__get_regions__()
|
||||
self.__get_projects__()
|
||||
self.__get_zones__()
|
||||
self.__get_instances__()
|
||||
self.__get_networks__()
|
||||
self.__get_subnetworks__()
|
||||
self.__get_firewalls__()
|
||||
|
||||
def __get_regions__(self):
|
||||
@@ -59,6 +67,22 @@ class Compute:
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
def __get_projects__(self):
|
||||
for project_id in self.project_ids:
|
||||
try:
|
||||
enable_oslogin = False
|
||||
response = self.client.projects().get(project=project_id).execute()
|
||||
for item in response["commonInstanceMetadata"].get("items", []):
|
||||
if item["key"] == "enable-oslogin" and item["value"] == "TRUE":
|
||||
enable_oslogin = True
|
||||
self.projects.append(
|
||||
Project(id=project_id, enable_oslogin=enable_oslogin)
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
def __get_instances__(self):
|
||||
for project_id in self.project_ids:
|
||||
try:
|
||||
@@ -88,6 +112,9 @@ class Compute:
|
||||
shielded_enabled_integrity_monitoring=instance[
|
||||
"shieldedInstanceConfig"
|
||||
]["enableIntegrityMonitoring"],
|
||||
confidential_computing=instance[
|
||||
"confidentialInstanceConfig"
|
||||
]["enableConfidentialCompute"],
|
||||
service_accounts=instance["serviceAccounts"],
|
||||
ip_forward=instance.get("canIpForward", False),
|
||||
disks_encryption=[
|
||||
@@ -120,10 +147,18 @@ class Compute:
|
||||
while request is not None:
|
||||
response = request.execute()
|
||||
for network in response.get("items", []):
|
||||
subnet_mode = (
|
||||
"legacy"
|
||||
if "autoCreateSubnetworks" not in network
|
||||
else "auto"
|
||||
if network["autoCreateSubnetworks"]
|
||||
else "custom"
|
||||
)
|
||||
self.networks.append(
|
||||
Network(
|
||||
name=network["name"],
|
||||
id=network["id"],
|
||||
subnet_mode=subnet_mode,
|
||||
project_id=project_id,
|
||||
)
|
||||
)
|
||||
@@ -136,6 +171,35 @@ class Compute:
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
def __get_subnetworks__(self):
|
||||
for project_id in self.project_ids:
|
||||
try:
|
||||
for region in self.regions:
|
||||
request = self.client.subnetworks().list(
|
||||
project=project_id, region=region
|
||||
)
|
||||
while request is not None:
|
||||
response = request.execute()
|
||||
for subnet in response.get("items", []):
|
||||
self.subnets.append(
|
||||
Subnet(
|
||||
name=subnet["name"],
|
||||
id=subnet["id"],
|
||||
project_id=project_id,
|
||||
flow_logs=subnet.get("enableFlowLogs", False),
|
||||
network=subnet["network"].split("/")[-1],
|
||||
region=region,
|
||||
)
|
||||
)
|
||||
|
||||
request = self.client.subnetworks().list_next(
|
||||
previous_request=request, previous_response=response
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
def __get_firewalls__(self):
|
||||
for project_id in self.project_ids:
|
||||
try:
|
||||
@@ -163,6 +227,47 @@ class Compute:
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
def __get_url_maps__(self):
|
||||
for project_id in self.project_ids:
|
||||
try:
|
||||
request = self.client.urlMaps().list(project=project_id)
|
||||
while request is not None:
|
||||
response = request.execute()
|
||||
for urlmap in response.get("items", []):
|
||||
self.load_balancers.append(
|
||||
LoadBalancer(
|
||||
name=urlmap["name"],
|
||||
id=urlmap["id"],
|
||||
service=urlmap["defaultService"],
|
||||
project_id=project_id,
|
||||
)
|
||||
)
|
||||
|
||||
request = self.client.urlMaps().list_next(
|
||||
previous_request=request, previous_response=response
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
def __describe_backend_service__(self):
|
||||
for balancer in self.load_balancers:
|
||||
try:
|
||||
response = (
|
||||
self.client.backendServices()
|
||||
.get(
|
||||
project=balancer.project_id,
|
||||
backendService=balancer.service.split("/")[-1],
|
||||
)
|
||||
.execute()
|
||||
)
|
||||
balancer.logging = response.get("logConfig", False).get("enable", False)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
|
||||
class Instance(BaseModel):
|
||||
name: str
|
||||
@@ -173,6 +278,7 @@ class Instance(BaseModel):
|
||||
metadata: dict
|
||||
shielded_enabled_vtpm: bool
|
||||
shielded_enabled_integrity_monitoring: bool
|
||||
confidential_computing: bool
|
||||
service_accounts: list
|
||||
ip_forward: bool
|
||||
disks_encryption: list
|
||||
@@ -181,9 +287,19 @@ class Instance(BaseModel):
|
||||
class Network(BaseModel):
|
||||
name: str
|
||||
id: str
|
||||
subnet_mode: str
|
||||
project_id: str
|
||||
|
||||
|
||||
class Subnet(BaseModel):
|
||||
name: str
|
||||
id: str
|
||||
network: str
|
||||
project_id: str
|
||||
flow_logs: bool
|
||||
region: str
|
||||
|
||||
|
||||
class Firewall(BaseModel):
|
||||
name: str
|
||||
id: str
|
||||
@@ -191,3 +307,16 @@ class Firewall(BaseModel):
|
||||
direction: str
|
||||
allowed_rules: list
|
||||
project_id: str
|
||||
|
||||
|
||||
class Project(BaseModel):
|
||||
id: str
|
||||
enable_oslogin: bool
|
||||
|
||||
|
||||
class LoadBalancer(BaseModel):
|
||||
name: str
|
||||
id: str
|
||||
service: str
|
||||
logging: bool = False
|
||||
project_id: str
|
||||
|
||||
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"Provider": "gcp",
|
||||
"CheckID": "compute_subnet_flow_logs_enabled",
|
||||
"CheckTitle": "Enable VPC Flow Logs for VPC Subnets",
|
||||
"CheckType": [],
|
||||
"ServiceName": "compute",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "Subnet",
|
||||
"Description": "Ensure that VPC Flow Logs is enabled for every subnet created within your production Virtual Private Cloud (VPC) network. Flow Logs is a logging feature that enables users to capture information about the IP traffic (accepted, rejected, or all traffic) going to and from the network interfaces (ENIs) available within your VPC subnets.",
|
||||
"Risk": "By default, the VPC Flow Logs feature is disabled when a new VPC network subnet is created. Once enabled, VPC Flow Logs will start collecting network traffic data to and from your Virtual Private Cloud (VPC) subnets, logging data that can be useful for understanding network usage, network traffic expense optimization, network forensics, and real-time security analysis. To enhance Google Cloud VPC network visibility and security it is strongly recommended to enable Flow Logs for every business-critical or production VPC subnet.",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "gcloud compute networks subnets update [SUBNET_NAME] --region [REGION] --enable-flow-logs",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://www.trendmicro.com/cloudoneconformity/knowledge-base/gcp/CloudVPC/enable-vpc-flow-logs.html",
|
||||
"Terraform": "https://docs.bridgecrew.io/docs/bc_gcp_logging_1#terraform"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Ensure that VPC Flow Logs is enabled for every subnet created within your production Virtual Private Cloud (VPC) network. Flow Logs is a logging feature that enables users to capture information about the IP traffic (accepted, rejected, or all traffic) going to and from the network interfaces (ENIs) available within your VPC subnets.",
|
||||
"Url": "https://cloud.google.com/vpc/docs/using-flow-logs#enabling_vpc_flow_logging"
|
||||
}
|
||||
},
|
||||
"Categories": [],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": ""
|
||||
}
|
||||
@@ -0,0 +1,21 @@
|
||||
from prowler.lib.check.models import Check, Check_Report_GCP
|
||||
from prowler.providers.gcp.services.compute.compute_client import compute_client
|
||||
|
||||
|
||||
class compute_subnet_flow_logs_enabled(Check):
|
||||
def execute(self) -> Check_Report_GCP:
|
||||
findings = []
|
||||
for subnet in compute_client.subnets:
|
||||
report = Check_Report_GCP(self.metadata())
|
||||
report.project_id = subnet.project_id
|
||||
report.resource_id = subnet.id
|
||||
report.resource_name = subnet.name
|
||||
report.location = subnet.region
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Subnet {subnet.name} in network {subnet.network} has flow logs enabled"
|
||||
if not subnet.flow_logs:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Subnet {subnet.name} in network {subnet.network} does not have flow logs enabled"
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -12,12 +12,12 @@ class dns_dnssec_disabled(Check):
|
||||
report.resource_name = managed_zone.name
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Cloud DNS {managed_zone.name} have DNSSEC enabled."
|
||||
f"Cloud DNS {managed_zone.name} has DNSSEC enabled."
|
||||
)
|
||||
if not managed_zone.dnssec:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Cloud DNS {managed_zone.name} doens't have DNSSEC enabled."
|
||||
f"Cloud DNS {managed_zone.name} doesn't have DNSSEC enabled."
|
||||
)
|
||||
findings.append(report)
|
||||
|
||||
|
||||
@@ -12,8 +12,11 @@ class DNS:
|
||||
self.project_ids = audit_info.project_ids
|
||||
self.default_project_id = audit_info.default_project_id
|
||||
self.client = generate_client(self.service, self.api_version, audit_info)
|
||||
self.region = "global"
|
||||
self.managed_zones = []
|
||||
self.__get_managed_zones__()
|
||||
self.policies = []
|
||||
self.__get_policies__()
|
||||
|
||||
def __get_managed_zones__(self):
|
||||
for project_id in self.project_ids:
|
||||
@@ -42,6 +45,35 @@ class DNS:
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
def __get_policies__(self):
|
||||
for project_id in self.project_ids:
|
||||
try:
|
||||
request = self.client.policies().list(project=project_id)
|
||||
while request is not None:
|
||||
response = request.execute()
|
||||
|
||||
for policy in response.get("policies", []):
|
||||
policy_networks = []
|
||||
for network in policy.get("networks", []):
|
||||
policy_networks.append(network["networkUrl"].split("/")[-1])
|
||||
self.policies.append(
|
||||
Policy(
|
||||
name=policy["name"],
|
||||
id=policy["id"],
|
||||
logging=policy.get("enableLogging", False),
|
||||
networks=policy_networks,
|
||||
project_id=project_id,
|
||||
)
|
||||
)
|
||||
|
||||
request = self.client.policies().list_next(
|
||||
previous_request=request, previous_response=response
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
|
||||
class ManagedZone(BaseModel):
|
||||
name: str
|
||||
@@ -49,3 +81,11 @@ class ManagedZone(BaseModel):
|
||||
dnssec: bool
|
||||
key_specs: list
|
||||
project_id: str
|
||||
|
||||
|
||||
class Policy(BaseModel):
|
||||
name: str
|
||||
id: str
|
||||
logging: bool
|
||||
networks: list
|
||||
project_id: str
|
||||
|
||||
@@ -0,0 +1,4 @@
|
||||
from prowler.providers.gcp.lib.audit_info.audit_info import gcp_audit_info
|
||||
from prowler.providers.gcp.services.iam.iam_service import AccessApproval
|
||||
|
||||
accessapproval_client = AccessApproval(gcp_audit_info)
|
||||
@@ -0,0 +1,4 @@
|
||||
from prowler.providers.gcp.lib.audit_info.audit_info import gcp_audit_info
|
||||
from prowler.providers.gcp.services.iam.iam_service import EssentialContacts
|
||||
|
||||
essentialcontacts_client = EssentialContacts(gcp_audit_info)
|
||||
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"Provider": "gcp",
|
||||
"CheckID": "iam_account_access_approval_enabled",
|
||||
"CheckTitle": "Ensure Access Approval is Enabled in your account",
|
||||
"CheckType": [],
|
||||
"ServiceName": "iam",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "high",
|
||||
"ResourceType": "Account",
|
||||
"Description": "Ensure that Access Approval is enabled within your Google Cloud Platform (GCP) account in order to allow you to require your explicit approval whenever Google personnel need to access your GCP projects. Once the Access Approval feature is enabled, you can delegate users within your organization who can approve the access requests by giving them a security role in Identity and Access Management (IAM). These requests show the requester name/ID in an email or Pub/Sub message that you can choose to approve. This creates a new control and logging layer that reveals who in your organization approved/denied access requests to your projects.",
|
||||
"Risk": "Controlling access to your Google Cloud data is crucial when working with business-critical and sensitive data. With Access Approval, you can be certain that your cloud information is accessed by approved Google personnel only. The Access Approval feature ensures that a cryptographically-signed approval is available for Google Cloud support and engineering teams when they need to access your cloud data (certain exceptions apply). By default, Access Approval and its dependency of Access Transparency are not enabled.",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://www.trendmicro.com/cloudoneconformity/knowledge-base/gcp/CloudIAM/enable-access-approval.html",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Ensure that Access Approval is enabled within your Google Cloud Platform (GCP) account in order to allow you to require your explicit approval whenever Google personnel need to access your GCP projects. Once the Access Approval feature is enabled, you can delegate users within your organization who can approve the access requests by giving them a security role in Identity and Access Management (IAM). These requests show the requester name/ID in an email or Pub/Sub message that you can choose to approve. This creates a new control and logging layer that reveals who in your organization approved/denied access requests to your projects.",
|
||||
"Url": "https://cloud.google.com/cloud-provider-access-management/access-approval/docs"
|
||||
}
|
||||
},
|
||||
"Categories": [],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": ""
|
||||
}
|
||||
@@ -0,0 +1,24 @@
|
||||
from prowler.lib.check.models import Check, Check_Report_GCP
|
||||
from prowler.providers.gcp.services.iam.accessapproval_client import (
|
||||
accessapproval_client,
|
||||
)
|
||||
|
||||
|
||||
class iam_account_access_approval_enabled(Check):
|
||||
def execute(self) -> Check_Report_GCP:
|
||||
findings = []
|
||||
for project_id in accessapproval_client.project_ids:
|
||||
report = Check_Report_GCP(self.metadata())
|
||||
report.project_id = project_id
|
||||
report.resource_id = project_id
|
||||
report.location = accessapproval_client.region
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Project {project_id} has Access Approval enabled"
|
||||
if project_id not in accessapproval_client.settings:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Project {project_id} does not have Access Approval enabled"
|
||||
)
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"Provider": "gcp",
|
||||
"CheckID": "iam_audit_logs_enabled",
|
||||
"CheckTitle": "Configure Google Cloud Audit Logs to Track All Activities",
|
||||
"CheckType": [],
|
||||
"ServiceName": "iam",
|
||||
"SubServiceName": "Audit Logs",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "",
|
||||
"Description": "Ensure that Google Cloud Audit Logs feature is configured to track Data Access logs for all Google Cloud Platform (GCP) services and users, in order to enhance overall access security and meet compliance requirements. Once configured, the feature can record all admin related activities, as well as all the read and write access requests to user data.",
|
||||
"Risk": "In order to maintain an effective Google Cloud audit configuration for your project, folder, and organization, all 3 types of Data Access logs (ADMIN_READ, DATA_READ and DATA_WRITE) must be enabled for all supported GCP services. Also, Data Access logs should be captured for all IAM users, without exempting any of them. Exemptions let you control which users generate audit logs. When you add an exempted user to your log configuration, audit logs are not created for that user, for the selected log type(s). Data Access audit logs are disabled by default and must be explicitly enabled based on your business requirements.",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://www.trendmicro.com/cloudoneconformity/knowledge-base/gcp/CloudIAM/record-all-activities.html",
|
||||
"Terraform": "https://docs.bridgecrew.io/docs/ensure-that-cloud-audit-logging-is-configured-properly-across-all-services-and-all-users-from-a-project#terraform"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "It is recommended that Cloud Audit Logging is configured to track all admin activities and read, write access to user data.",
|
||||
"Url": "https://cloud.google.com/logging/docs/audit/"
|
||||
}
|
||||
},
|
||||
"Categories": [],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": ""
|
||||
}
|
||||
@@ -0,0 +1,24 @@
|
||||
from prowler.lib.check.models import Check, Check_Report_GCP
|
||||
from prowler.providers.gcp.services.cloudresourcemanager.cloudresourcemanager_client import (
|
||||
cloudresourcemanager_client,
|
||||
)
|
||||
|
||||
|
||||
class iam_audit_logs_enabled(Check):
|
||||
def execute(self) -> Check_Report_GCP:
|
||||
findings = []
|
||||
for project in cloudresourcemanager_client.projects:
|
||||
report = Check_Report_GCP(self.metadata())
|
||||
report.project_id = project.id
|
||||
report.location = cloudresourcemanager_client.region
|
||||
report.resource_id = project.id
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Audit Logs are enabled for project {project.id}"
|
||||
if not project.audit_logging:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Audit Logs are not enabled for project {project.id}"
|
||||
)
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -13,6 +13,7 @@ class iam_no_service_roles_at_project_level(Check):
|
||||
report.project_id = binding.project_id
|
||||
report.resource_id = binding.role
|
||||
report.resource_name = binding.role
|
||||
report.location = cloudresourcemanager_client.region
|
||||
if binding.role in [
|
||||
"roles/iam.serviceAccountUser",
|
||||
"roles/iam.serviceAccountTokenCreator",
|
||||
|
||||
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"Provider": "gcp",
|
||||
"CheckID": "iam_organization_essential_contacts_configured",
|
||||
"CheckTitle": "Ensure Essential Contacts is Configured for Organization",
|
||||
"CheckType": [],
|
||||
"ServiceName": "iam",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "Organization",
|
||||
"Description": "It is recommended that Essential Contacts is configured to designate email addresses for Google Cloud services to notify of important technical or security information.",
|
||||
"Risk": "Google Cloud Platform (GCP) services, such as Cloud Billing, send out billing notifications to share important information with the cloud platform users. By default, these types of notifications are sent to members with certain Identity and Access Management (IAM) roles such as 'roles/owner' and 'roles/billing.admin'. With Essential Contacts, you can specify exactly who receives important notifications by providing your own list of contacts (i.e. email addresses).",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "gcloud essential-contacts create --email=<EMAIL> --notification-categories=<NOTIFICATION_CATEGORIES> --organization=<ORGANIZATION_ID>",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://www.trendmicro.com/cloudoneconformity/knowledge-base/gcp/CloudIAM/essential-contacts.html",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "It is recommended that Essential Contacts is configured to designate email addresses for Google Cloud services to notify of important technical or security information.",
|
||||
"Url": "https://cloud.google.com/resource-manager/docs/managing-notification-contacts"
|
||||
}
|
||||
},
|
||||
"Categories": [],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": ""
|
||||
}
|
||||
@@ -0,0 +1,27 @@
|
||||
from prowler.lib.check.models import Check, Check_Report_GCP
|
||||
from prowler.providers.gcp.services.iam.essentialcontacts_client import (
|
||||
essentialcontacts_client,
|
||||
)
|
||||
|
||||
|
||||
class iam_organization_essential_contacts_configured(Check):
|
||||
def execute(self) -> Check_Report_GCP:
|
||||
findings = []
|
||||
for org in essentialcontacts_client.organizations:
|
||||
report = Check_Report_GCP(self.metadata())
|
||||
report.project_id = org.id
|
||||
report.resource_id = org.id
|
||||
report.resource_name = org.name
|
||||
report.location = essentialcontacts_client.region
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Organization {org.name} does not have essential contacts configured"
|
||||
)
|
||||
if org.contacts:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Organization {org.name} has essential contacts configured"
|
||||
)
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"Provider": "gcp",
|
||||
"CheckID": "iam_role_kms_enforce_separation_of_duties",
|
||||
"CheckTitle": "Enforce Separation of Duties for KMS-Related Roles",
|
||||
"CheckType": [],
|
||||
"ServiceName": "iam",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "IAMRole",
|
||||
"Description": "Ensure that separation of duties is enforced for all Cloud Key Management Service (KMS) related roles. The principle of separation of duties (also known as segregation of duties) has as its primary objective the prevention of fraud and human error. This objective is achieved by dismantling the tasks and the associated privileges for a specific business process among multiple users/identities. Google Cloud provides predefined roles that can be used to implement the principle of separation of duties, where it is needed. The predefined Cloud KMS Admin role is meant for users to manage KMS keys but not to use them. The Cloud KMS CryptoKey Encrypter/Decrypter roles are meant for services who can use keys to encrypt and decrypt data, but not to manage them. To adhere to cloud security best practices, your IAM users should not have the Admin role and any of the CryptoKey Encrypter/Decrypter roles assigned at the same time.",
|
||||
"Risk": "The principle of separation of duties can be enforced in order to eliminate the need for the IAM user/identity that has all the permissions needed to perform unwanted actions, such as using a cryptographic key to access and decrypt data which the user should not normally have access to.",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://www.trendmicro.com/cloudoneconformity/knowledge-base/gcp/CloudIAM/enforce-separation-of-duties-for-kms-related-roles.html",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "It is recommended that the principle of 'Separation of Duties' is enforced while assigning KMS related roles to users.",
|
||||
"Url": "https://cloud.google.com/kms/docs/separation-of-duties"
|
||||
}
|
||||
},
|
||||
"Categories": [],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": ""
|
||||
}
|
||||
@@ -0,0 +1,36 @@
|
||||
from prowler.lib.check.models import Check, Check_Report_GCP
|
||||
from prowler.providers.gcp.services.cloudresourcemanager.cloudresourcemanager_client import (
|
||||
cloudresourcemanager_client,
|
||||
)
|
||||
|
||||
|
||||
class iam_role_kms_enforce_separation_of_duties(Check):
|
||||
def execute(self) -> Check_Report_GCP:
|
||||
findings = []
|
||||
for project in cloudresourcemanager_client.project_ids:
|
||||
non_compliant_members = []
|
||||
kms_admin_members = []
|
||||
report = Check_Report_GCP(self.metadata())
|
||||
report.project_id = project
|
||||
report.location = cloudresourcemanager_client.region
|
||||
report.resource_id = project
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Principle of separation of duties was enforced for KMS-Related Roles in project {project}"
|
||||
for binding in cloudresourcemanager_client.bindings:
|
||||
if binding.project_id == project:
|
||||
if "roles/cloudkms.admin" in binding.role:
|
||||
kms_admin_members.extend(binding.members)
|
||||
if (
|
||||
"roles/cloudkms.cryptoKeyEncrypterDecrypter" in binding.role
|
||||
or "roles/cloudkms.cryptoKeyEncrypter" in binding.role
|
||||
or "roles/cloudkms.cryptoKeyDecrypter" in binding.role
|
||||
):
|
||||
for member in binding.members:
|
||||
if member in kms_admin_members:
|
||||
non_compliant_members.append(member)
|
||||
if non_compliant_members:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Principle of separation of duties was not enforced for KMS-Related Roles in project {project} in members {','.join(non_compliant_members)}"
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"Provider": "gcp",
|
||||
"CheckID": "iam_role_sa_enforce_separation_of_duties",
|
||||
"CheckTitle": "Enforce Separation of Duties for Service-Account Related Roles",
|
||||
"CheckType": [],
|
||||
"ServiceName": "iam",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "IAMRole",
|
||||
"Description": "Ensure that separation of duties (also known as segregation of duties - SoD) is enforced for all Google Cloud Platform (GCP) service-account related roles. The security principle of separation of duties has as its primary objective the prevention of fraud and human error. This objective is achieved by disbanding the tasks and associated privileges for a specific business process among multiple users/members. To follow security best practices, your GCP service accounts should not have the Service Account Admin and Service Account User roles assigned at the same time.",
|
||||
"Risk": "The principle of separation of duties should be enforced in order to eliminate the need for high-privileged IAM members, as the permissions granted to these members can allow them to perform malicious or unwanted actions.",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://www.trendmicro.com/cloudoneconformity/knowledge-base/gcp/CloudIAM/enforce-separation-of-duties-for-service-account-roles.html",
|
||||
"Terraform": "https://docs.bridgecrew.io/docs/bc_gcp_iam_10#terraform"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Ensure that separation of duties (also known as segregation of duties - SoD) is enforced for all Google Cloud Platform (GCP) service-account related roles. The security principle of separation of duties has as its primary objective the prevention of fraud and human error. This objective is achieved by disbanding the tasks and associated privileges for a specific business process among multiple users/members. To follow security best practices, your GCP service accounts should not have the Service Account Admin and Service Account User roles assigned at the same time.",
|
||||
"Url": "https://cloud.google.com/iam/docs/understanding-roles"
|
||||
}
|
||||
},
|
||||
"Categories": [],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": ""
|
||||
}
|
||||
@@ -0,0 +1,29 @@
|
||||
from prowler.lib.check.models import Check, Check_Report_GCP
|
||||
from prowler.providers.gcp.services.cloudresourcemanager.cloudresourcemanager_client import (
|
||||
cloudresourcemanager_client,
|
||||
)
|
||||
|
||||
|
||||
class iam_role_sa_enforce_separation_of_duties(Check):
|
||||
def execute(self) -> Check_Report_GCP:
|
||||
findings = []
|
||||
for project in cloudresourcemanager_client.project_ids:
|
||||
non_compliant_members = []
|
||||
report = Check_Report_GCP(self.metadata())
|
||||
report.project_id = project
|
||||
report.location = cloudresourcemanager_client.region
|
||||
report.resource_id = project
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Principle of separation of duties was enforced for Service-Account Related Roles in project {project}"
|
||||
for binding in cloudresourcemanager_client.bindings:
|
||||
if binding.project_id == project and (
|
||||
"roles/iam.serviceAccountUser" in binding.role
|
||||
or "roles/iam.serviceAccountAdmin" in binding.role
|
||||
):
|
||||
non_compliant_members.extend(binding.members)
|
||||
if non_compliant_members:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Principle of separation of duties was not enforced for Service-Account Related Roles in project {project} in members {','.join(non_compliant_members)}"
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -4,6 +4,9 @@ from pydantic import BaseModel
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.providers.gcp.gcp_provider import generate_client
|
||||
from prowler.providers.gcp.services.cloudresourcemanager.cloudresourcemanager_client import (
|
||||
cloudresourcemanager_client,
|
||||
)
|
||||
|
||||
|
||||
################## IAM
|
||||
@@ -103,3 +106,86 @@ class ServiceAccount(BaseModel):
|
||||
display_name: str
|
||||
keys: list[Key] = []
|
||||
project_id: str
|
||||
|
||||
|
||||
################## AccessApproval
|
||||
class AccessApproval:
|
||||
def __init__(self, audit_info):
|
||||
self.service = "accessapproval"
|
||||
self.api_version = "v1"
|
||||
self.project_ids = audit_info.project_ids
|
||||
self.region = "global"
|
||||
self.client = generate_client(self.service, self.api_version, audit_info)
|
||||
self.settings = {}
|
||||
self.__get_settings__()
|
||||
|
||||
def __get_client__(self):
|
||||
return self.client
|
||||
|
||||
def __get_settings__(self):
|
||||
for project_id in self.project_ids:
|
||||
try:
|
||||
response = (
|
||||
self.client.projects().getAccessApprovalSettings(
|
||||
name=f"projects/{project_id}/accessApprovalSettings"
|
||||
)
|
||||
).execute()
|
||||
self.settings[project_id].append(
|
||||
Setting(
|
||||
name=response["name"],
|
||||
project_id=project_id,
|
||||
)
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
|
||||
class Setting(BaseModel):
|
||||
name: str
|
||||
project_id: str
|
||||
|
||||
|
||||
################## EssentialContacts
|
||||
class EssentialContacts:
|
||||
def __init__(self, audit_info):
|
||||
self.service = "essentialcontacts"
|
||||
self.api_version = "v1"
|
||||
self.region = "global"
|
||||
self.client = generate_client(self.service, self.api_version, audit_info)
|
||||
self.organizations = []
|
||||
self.__get_contacts__()
|
||||
|
||||
def __get_client__(self):
|
||||
return self.client
|
||||
|
||||
def __get_contacts__(self):
|
||||
for org in cloudresourcemanager_client.organizations:
|
||||
try:
|
||||
contacts = False
|
||||
response = (
|
||||
self.client.organizations()
|
||||
.contacts()
|
||||
.list(parent="organizations/" + org.id)
|
||||
).execute()
|
||||
if len(response["contacts"]) > 0:
|
||||
contacts = True
|
||||
|
||||
self.organizations.append(
|
||||
Organization(
|
||||
name=org.name,
|
||||
email=org.id,
|
||||
contacts=contacts,
|
||||
)
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
|
||||
class Organization(BaseModel):
|
||||
name: str
|
||||
id: str
|
||||
contacts: bool
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
from prowler.providers.gcp.lib.audit_info.audit_info import gcp_audit_info
|
||||
from prowler.providers.gcp.services.serviceusage.serviceusage_service import (
|
||||
ServiceUsage,
|
||||
)
|
||||
|
||||
serviceusage_client = ServiceUsage(gcp_audit_info)
|
||||
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"Provider": "gcp",
|
||||
"CheckID": "serviceusage_cloudasset_inventory_enabled",
|
||||
"CheckTitle": "Ensure Cloud Asset Inventory Is Enabled",
|
||||
"CheckType": [],
|
||||
"ServiceName": "serviceusage",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "high",
|
||||
"ResourceType": "Service",
|
||||
"Description": "GCP Cloud Asset Inventory is services that provides a historical view of GCP resources and IAM policies through a time-series database. The information recorded includes metadata on Google Cloud resources, metadata on policies set on Google Cloud projects or resources, and runtime information gathered within a Google Cloud resource.",
|
||||
"Risk": "Gaining insight into Google Cloud resources and policies is vital for tasks such as DevOps, security analytics, multi-cluster and fleet management, auditing, and governance. With Cloud Asset Inventory you can discover, monitor, and analyze all GCP assets in one place, achieving a better understanding of all your cloud assets across projects and services.",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "gcloud services enable cloudasset.googleapis.com",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://www.trendmicro.com/cloudoneconformity/knowledge-base/gcp/CloudAPI/enabled-cloud-asset-inventory.html",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Ensure that Cloud Asset Inventory is enabled for all your GCP projects in order to efficiently manage the history and the inventory of your cloud resources. Google Cloud Asset Inventory is a fully managed metadata inventory service that allows you to view, monitor, analyze, and gain insights for your Google Cloud and Anthos assets. Cloud Asset Inventory is disabled by default in each GCP project.",
|
||||
"Url": "https://cloud.google.com/asset-inventory/docs"
|
||||
}
|
||||
},
|
||||
"Categories": [],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": ""
|
||||
}
|
||||
@@ -0,0 +1,31 @@
|
||||
from prowler.lib.check.models import Check, Check_Report_GCP
|
||||
from prowler.providers.gcp.services.serviceusage.serviceusage_client import (
|
||||
serviceusage_client,
|
||||
)
|
||||
|
||||
|
||||
class serviceusage_cloudasset_inventory_enabled(Check):
|
||||
def execute(self) -> Check_Report_GCP:
|
||||
findings = []
|
||||
for project_id in serviceusage_client.project_ids:
|
||||
report = Check_Report_GCP(self.metadata())
|
||||
report.project_id = project_id
|
||||
report.resource_id = "cloudasset.googleapis.com"
|
||||
report.resource_name = "Cloud Asset Inventory"
|
||||
report.location = serviceusage_client.region
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Cloud Asset Inventory is not enabled in project {project_id}"
|
||||
)
|
||||
for active_service in serviceusage_client.active_services.get(
|
||||
project_id, []
|
||||
):
|
||||
if active_service.name == "cloudasset.googleapis.com":
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Cloud Asset Inventory is enabled in project {project_id}"
|
||||
)
|
||||
break
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -0,0 +1,51 @@
|
||||
from pydantic import BaseModel
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.providers.gcp.gcp_provider import generate_client
|
||||
|
||||
|
||||
################## ServiceUsage
|
||||
class ServiceUsage:
|
||||
def __init__(self, audit_info):
|
||||
self.service = "serviceusage"
|
||||
self.api_version = "v1"
|
||||
self.region = "global"
|
||||
self.project_ids = audit_info.project_ids
|
||||
self.client = generate_client(self.service, self.api_version, audit_info)
|
||||
self.active_services = {}
|
||||
self.__get_active_services__()
|
||||
|
||||
def __get_client__(self):
|
||||
return self.client
|
||||
|
||||
def __get_active_services__(self):
|
||||
for project_id in self.project_ids:
|
||||
self.active_services[project_id] = []
|
||||
try:
|
||||
request = self.client.services().list(
|
||||
parent="projects/" + project_id, filter="state:ENABLED"
|
||||
)
|
||||
while request is not None:
|
||||
response = request.execute()
|
||||
for service in response["services"]:
|
||||
self.active_services[project_id].append(
|
||||
Service(
|
||||
name=service["name"].split("/")[-1],
|
||||
title=service["config"]["title"],
|
||||
project_id=project_id,
|
||||
)
|
||||
)
|
||||
|
||||
request = self.client.services().list_next(
|
||||
previous_request=request, previous_response=response
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
|
||||
class Service(BaseModel):
|
||||
name: str
|
||||
title: str
|
||||
project_id: str
|
||||
@@ -40,6 +40,7 @@ class Test_apikeys_api_restrictions_configured:
|
||||
apikeys_client = mock.MagicMock
|
||||
apikeys_client.project_ids = [GCP_PROJECT_ID]
|
||||
apikeys_client.keys = [key]
|
||||
apikeys_client.region = "global"
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.gcp.services.apikeys.apikeys_api_restrictions_configured.apikeys_api_restrictions_configured.apikeys_client",
|
||||
@@ -55,7 +56,7 @@ class Test_apikeys_api_restrictions_configured:
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "PASS"
|
||||
assert search(
|
||||
f"API key {key.name} have restrictions configured.",
|
||||
f"API key {key.name} has restrictions configured.",
|
||||
result[0].status_extended,
|
||||
)
|
||||
assert result[0].resource_id == key.id
|
||||
@@ -74,6 +75,7 @@ class Test_apikeys_api_restrictions_configured:
|
||||
apikeys_client = mock.MagicMock
|
||||
apikeys_client.project_ids = [GCP_PROJECT_ID]
|
||||
apikeys_client.keys = [key]
|
||||
apikeys_client.region = "global"
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.gcp.services.apikeys.apikeys_api_restrictions_configured.apikeys_api_restrictions_configured.apikeys_client",
|
||||
@@ -114,6 +116,7 @@ class Test_apikeys_api_restrictions_configured:
|
||||
apikeys_client = mock.MagicMock
|
||||
apikeys_client.project_ids = [GCP_PROJECT_ID]
|
||||
apikeys_client.keys = [key]
|
||||
apikeys_client.region = "global"
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.gcp.services.apikeys.apikeys_api_restrictions_configured.apikeys_api_restrictions_configured.apikeys_client",
|
||||
|
||||
@@ -0,0 +1,65 @@
|
||||
from re import search
|
||||
from unittest import mock
|
||||
|
||||
GCP_PROJECT_ID = "123456789012"
|
||||
|
||||
|
||||
class Test_apikeys_key_exists:
|
||||
def test_apikeys_no_keys(self):
|
||||
apikeys_client = mock.MagicMock
|
||||
apikeys_client.project_ids = [GCP_PROJECT_ID]
|
||||
apikeys_client.keys = []
|
||||
apikeys_client.region = "global"
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.gcp.services.apikeys.apikeys_key_exists.apikeys_key_exists.apikeys_client",
|
||||
new=apikeys_client,
|
||||
):
|
||||
from prowler.providers.gcp.services.apikeys.apikeys_key_exists.apikeys_key_exists import (
|
||||
apikeys_key_exists,
|
||||
)
|
||||
|
||||
check = apikeys_key_exists()
|
||||
result = check.execute()
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "PASS"
|
||||
assert search(
|
||||
f"Project {GCP_PROJECT_ID} does not have active API Keys.",
|
||||
result[0].status_extended,
|
||||
)
|
||||
assert result[0].resource_id == GCP_PROJECT_ID
|
||||
|
||||
def test_one_compliant_key(self):
|
||||
from prowler.providers.gcp.services.apikeys.apikeys_service import Key
|
||||
|
||||
key = Key(
|
||||
name="test",
|
||||
id="123",
|
||||
creation_time="2023-06-01T11:21:41.627509Z",
|
||||
restrictions={},
|
||||
project_id=GCP_PROJECT_ID,
|
||||
)
|
||||
|
||||
apikeys_client = mock.MagicMock
|
||||
apikeys_client.project_ids = [GCP_PROJECT_ID]
|
||||
apikeys_client.keys = [key]
|
||||
apikeys_client.region = "global"
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.gcp.services.apikeys.apikeys_key_exists.apikeys_key_exists.apikeys_client",
|
||||
new=apikeys_client,
|
||||
):
|
||||
from prowler.providers.gcp.services.apikeys.apikeys_key_exists.apikeys_key_exists import (
|
||||
apikeys_key_exists,
|
||||
)
|
||||
|
||||
check = apikeys_key_exists()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert search(
|
||||
f"Project {GCP_PROJECT_ID} has active API Keys.",
|
||||
result[0].status_extended,
|
||||
)
|
||||
assert result[0].resource_id == GCP_PROJECT_ID
|
||||
@@ -35,6 +35,7 @@ class Test_apikeys_key_rotated_in_90_days:
|
||||
apikeys_client = mock.MagicMock
|
||||
apikeys_client.project_ids = [GCP_PROJECT_ID]
|
||||
apikeys_client.keys = [key]
|
||||
apikeys_client.region = "global"
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.gcp.services.apikeys.apikeys_key_rotated_in_90_days.apikeys_key_rotated_in_90_days.apikeys_client",
|
||||
@@ -69,6 +70,7 @@ class Test_apikeys_key_rotated_in_90_days:
|
||||
apikeys_client = mock.MagicMock
|
||||
apikeys_client.project_ids = [GCP_PROJECT_ID]
|
||||
apikeys_client.keys = [key]
|
||||
apikeys_client.region = "global"
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.gcp.services.apikeys.apikeys_key_rotated_in_90_days.apikeys_key_rotated_in_90_days.apikeys_client",
|
||||
@@ -84,7 +86,7 @@ class Test_apikeys_key_rotated_in_90_days:
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert search(
|
||||
f"API key {key.name} creation date have more than 90 days.",
|
||||
f"API key {key.name} creation date has more than 90 days.",
|
||||
result[0].status_extended,
|
||||
)
|
||||
assert result[0].resource_id == key.id
|
||||
|
||||
@@ -4,21 +4,21 @@ from unittest import mock
|
||||
GCP_PROJECT_ID = "123456789012"
|
||||
|
||||
|
||||
class Test_compute_block_project_wide_ssh_keys_disabled:
|
||||
class Test_compute_instance_block_project_wide_ssh_keys_disabled:
|
||||
def test_compute_no_instances(self):
|
||||
compute_client = mock.MagicMock
|
||||
compute_client.project_ids = [GCP_PROJECT_ID]
|
||||
compute_client.instances = []
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.gcp.services.compute.compute_block_project_wide_ssh_keys_disabled.compute_block_project_wide_ssh_keys_disabled.compute_client",
|
||||
"prowler.providers.gcp.services.compute.compute_instance_block_project_wide_ssh_keys_disabled.compute_instance_block_project_wide_ssh_keys_disabled.compute_client",
|
||||
new=compute_client,
|
||||
):
|
||||
from prowler.providers.gcp.services.compute.compute_block_project_wide_ssh_keys_disabled.compute_block_project_wide_ssh_keys_disabled import (
|
||||
compute_block_project_wide_ssh_keys_disabled,
|
||||
from prowler.providers.gcp.services.compute.compute_instance_block_project_wide_ssh_keys_disabled.compute_instance_block_project_wide_ssh_keys_disabled import (
|
||||
compute_instance_block_project_wide_ssh_keys_disabled,
|
||||
)
|
||||
|
||||
check = compute_block_project_wide_ssh_keys_disabled()
|
||||
check = compute_instance_block_project_wide_ssh_keys_disabled()
|
||||
result = check.execute()
|
||||
assert len(result) == 0
|
||||
|
||||
@@ -33,6 +33,7 @@ class Test_compute_block_project_wide_ssh_keys_disabled:
|
||||
metadata={"items": [{"key": "block-project-ssh-keys", "value": "true"}]},
|
||||
shielded_enabled_vtpm=True,
|
||||
shielded_enabled_integrity_monitoring=True,
|
||||
confidential_computing=True,
|
||||
service_accounts=[],
|
||||
ip_forward=False,
|
||||
disks_encryption=[("disk1", False), ("disk2", False)],
|
||||
@@ -44,14 +45,14 @@ class Test_compute_block_project_wide_ssh_keys_disabled:
|
||||
compute_client.instances = [instance]
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.gcp.services.compute.compute_block_project_wide_ssh_keys_disabled.compute_block_project_wide_ssh_keys_disabled.compute_client",
|
||||
"prowler.providers.gcp.services.compute.compute_instance_block_project_wide_ssh_keys_disabled.compute_instance_block_project_wide_ssh_keys_disabled.compute_client",
|
||||
new=compute_client,
|
||||
):
|
||||
from prowler.providers.gcp.services.compute.compute_block_project_wide_ssh_keys_disabled.compute_block_project_wide_ssh_keys_disabled import (
|
||||
compute_block_project_wide_ssh_keys_disabled,
|
||||
from prowler.providers.gcp.services.compute.compute_instance_block_project_wide_ssh_keys_disabled.compute_instance_block_project_wide_ssh_keys_disabled import (
|
||||
compute_instance_block_project_wide_ssh_keys_disabled,
|
||||
)
|
||||
|
||||
check = compute_block_project_wide_ssh_keys_disabled()
|
||||
check = compute_instance_block_project_wide_ssh_keys_disabled()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
@@ -73,6 +74,7 @@ class Test_compute_block_project_wide_ssh_keys_disabled:
|
||||
metadata={},
|
||||
shielded_enabled_vtpm=True,
|
||||
shielded_enabled_integrity_monitoring=True,
|
||||
confidential_computing=True,
|
||||
service_accounts=[],
|
||||
ip_forward=False,
|
||||
disks_encryption=[("disk1", False), ("disk2", False)],
|
||||
@@ -84,14 +86,14 @@ class Test_compute_block_project_wide_ssh_keys_disabled:
|
||||
compute_client.instances = [instance]
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.gcp.services.compute.compute_block_project_wide_ssh_keys_disabled.compute_block_project_wide_ssh_keys_disabled.compute_client",
|
||||
"prowler.providers.gcp.services.compute.compute_instance_block_project_wide_ssh_keys_disabled.compute_instance_block_project_wide_ssh_keys_disabled.compute_client",
|
||||
new=compute_client,
|
||||
):
|
||||
from prowler.providers.gcp.services.compute.compute_block_project_wide_ssh_keys_disabled.compute_block_project_wide_ssh_keys_disabled import (
|
||||
compute_block_project_wide_ssh_keys_disabled,
|
||||
from prowler.providers.gcp.services.compute.compute_instance_block_project_wide_ssh_keys_disabled.compute_instance_block_project_wide_ssh_keys_disabled import (
|
||||
compute_instance_block_project_wide_ssh_keys_disabled,
|
||||
)
|
||||
|
||||
check = compute_block_project_wide_ssh_keys_disabled()
|
||||
check = compute_instance_block_project_wide_ssh_keys_disabled()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
@@ -113,6 +115,7 @@ class Test_compute_block_project_wide_ssh_keys_disabled:
|
||||
metadata={"items": [{"key": "block-project-ssh-keys", "value": "false"}]},
|
||||
shielded_enabled_vtpm=True,
|
||||
shielded_enabled_integrity_monitoring=True,
|
||||
confidential_computing=True,
|
||||
service_accounts=[],
|
||||
ip_forward=False,
|
||||
disks_encryption=[("disk1", False), ("disk2", False)],
|
||||
@@ -124,14 +127,14 @@ class Test_compute_block_project_wide_ssh_keys_disabled:
|
||||
compute_client.instances = [instance]
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.gcp.services.compute.compute_block_project_wide_ssh_keys_disabled.compute_block_project_wide_ssh_keys_disabled.compute_client",
|
||||
"prowler.providers.gcp.services.compute.compute_instance_block_project_wide_ssh_keys_disabled.compute_instance_block_project_wide_ssh_keys_disabled.compute_client",
|
||||
new=compute_client,
|
||||
):
|
||||
from prowler.providers.gcp.services.compute.compute_block_project_wide_ssh_keys_disabled.compute_block_project_wide_ssh_keys_disabled import (
|
||||
compute_block_project_wide_ssh_keys_disabled,
|
||||
from prowler.providers.gcp.services.compute.compute_instance_block_project_wide_ssh_keys_disabled.compute_instance_block_project_wide_ssh_keys_disabled import (
|
||||
compute_instance_block_project_wide_ssh_keys_disabled,
|
||||
)
|
||||
|
||||
check = compute_block_project_wide_ssh_keys_disabled()
|
||||
check = compute_instance_block_project_wide_ssh_keys_disabled()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
|
||||
@@ -4,20 +4,20 @@ from unittest import mock
|
||||
GCP_PROJECT_ID = "123456789012"
|
||||
|
||||
|
||||
class Test_compute_default_service_account_in_use:
|
||||
class Test_compute_instance_default_service_account_in_use:
|
||||
def test_compute_no_instances(self):
|
||||
compute_client = mock.MagicMock
|
||||
compute_client.instances = []
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.gcp.services.compute.compute_default_service_account_in_use.compute_default_service_account_in_use.compute_client",
|
||||
"prowler.providers.gcp.services.compute.compute_instance_default_service_account_in_use.compute_instance_default_service_account_in_use.compute_client",
|
||||
new=compute_client,
|
||||
):
|
||||
from prowler.providers.gcp.services.compute.compute_default_service_account_in_use.compute_default_service_account_in_use import (
|
||||
compute_default_service_account_in_use,
|
||||
from prowler.providers.gcp.services.compute.compute_instance_default_service_account_in_use.compute_instance_default_service_account_in_use import (
|
||||
compute_instance_default_service_account_in_use,
|
||||
)
|
||||
|
||||
check = compute_default_service_account_in_use()
|
||||
check = compute_instance_default_service_account_in_use()
|
||||
result = check.execute()
|
||||
assert len(result) == 0
|
||||
|
||||
@@ -32,6 +32,7 @@ class Test_compute_default_service_account_in_use:
|
||||
metadata={},
|
||||
shielded_enabled_vtpm=True,
|
||||
shielded_enabled_integrity_monitoring=True,
|
||||
confidential_computing=True,
|
||||
ip_forward=False,
|
||||
disks_encryption=[("disk1", False), ("disk2", False)],
|
||||
service_accounts=[{"email": "custom@developer.gserviceaccount.com"}],
|
||||
@@ -43,14 +44,14 @@ class Test_compute_default_service_account_in_use:
|
||||
compute_client.instances = [instance]
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.gcp.services.compute.compute_default_service_account_in_use.compute_default_service_account_in_use.compute_client",
|
||||
"prowler.providers.gcp.services.compute.compute_instance_default_service_account_in_use.compute_instance_default_service_account_in_use.compute_client",
|
||||
new=compute_client,
|
||||
):
|
||||
from prowler.providers.gcp.services.compute.compute_default_service_account_in_use.compute_default_service_account_in_use import (
|
||||
compute_default_service_account_in_use,
|
||||
from prowler.providers.gcp.services.compute.compute_instance_default_service_account_in_use.compute_instance_default_service_account_in_use import (
|
||||
compute_instance_default_service_account_in_use,
|
||||
)
|
||||
|
||||
check = compute_default_service_account_in_use()
|
||||
check = compute_instance_default_service_account_in_use()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
@@ -72,6 +73,7 @@ class Test_compute_default_service_account_in_use:
|
||||
metadata={},
|
||||
shielded_enabled_vtpm=True,
|
||||
shielded_enabled_integrity_monitoring=True,
|
||||
confidential_computing=True,
|
||||
service_accounts=[
|
||||
{"email": f"{GCP_PROJECT_ID}-compute@developer.gserviceaccount.com"}
|
||||
],
|
||||
@@ -85,14 +87,14 @@ class Test_compute_default_service_account_in_use:
|
||||
compute_client.instances = [instance]
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.gcp.services.compute.compute_default_service_account_in_use.compute_default_service_account_in_use.compute_client",
|
||||
"prowler.providers.gcp.services.compute.compute_instance_default_service_account_in_use.compute_instance_default_service_account_in_use.compute_client",
|
||||
new=compute_client,
|
||||
):
|
||||
from prowler.providers.gcp.services.compute.compute_default_service_account_in_use.compute_default_service_account_in_use import (
|
||||
compute_default_service_account_in_use,
|
||||
from prowler.providers.gcp.services.compute.compute_instance_default_service_account_in_use.compute_instance_default_service_account_in_use import (
|
||||
compute_instance_default_service_account_in_use,
|
||||
)
|
||||
|
||||
check = compute_default_service_account_in_use()
|
||||
check = compute_instance_default_service_account_in_use()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
@@ -114,6 +116,7 @@ class Test_compute_default_service_account_in_use:
|
||||
metadata={},
|
||||
shielded_enabled_vtpm=True,
|
||||
shielded_enabled_integrity_monitoring=True,
|
||||
confidential_computing=True,
|
||||
service_accounts=[
|
||||
{"email": f"{GCP_PROJECT_ID}-compute@developer.gserviceaccount.com"}
|
||||
],
|
||||
@@ -127,14 +130,14 @@ class Test_compute_default_service_account_in_use:
|
||||
compute_client.instances = [instance]
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.gcp.services.compute.compute_default_service_account_in_use.compute_default_service_account_in_use.compute_client",
|
||||
"prowler.providers.gcp.services.compute.compute_instance_default_service_account_in_use.compute_instance_default_service_account_in_use.compute_client",
|
||||
new=compute_client,
|
||||
):
|
||||
from prowler.providers.gcp.services.compute.compute_default_service_account_in_use.compute_default_service_account_in_use import (
|
||||
compute_default_service_account_in_use,
|
||||
from prowler.providers.gcp.services.compute.compute_instance_default_service_account_in_use.compute_instance_default_service_account_in_use import (
|
||||
compute_instance_default_service_account_in_use,
|
||||
)
|
||||
|
||||
check = compute_default_service_account_in_use()
|
||||
check = compute_instance_default_service_account_in_use()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
|
||||
@@ -4,20 +4,22 @@ from unittest import mock
|
||||
GCP_PROJECT_ID = "123456789012"
|
||||
|
||||
|
||||
class Test_compute_default_service_account_in_use_with_full_api_access:
|
||||
class Test_compute_instance_default_service_account_in_use_with_full_api_access:
|
||||
def test_compute_no_instances(self):
|
||||
compute_client = mock.MagicMock
|
||||
compute_client.instances = []
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.gcp.services.compute.compute_default_service_account_in_use_with_full_api_access.compute_default_service_account_in_use_with_full_api_access.compute_client",
|
||||
"prowler.providers.gcp.services.compute.compute_instance_default_service_account_in_use_with_full_api_access.compute_instance_default_service_account_in_use_with_full_api_access.compute_client",
|
||||
new=compute_client,
|
||||
):
|
||||
from prowler.providers.gcp.services.compute.compute_default_service_account_in_use_with_full_api_access.compute_default_service_account_in_use_with_full_api_access import (
|
||||
compute_default_service_account_in_use_with_full_api_access,
|
||||
from prowler.providers.gcp.services.compute.compute_instance_default_service_account_in_use_with_full_api_access.compute_instance_default_service_account_in_use_with_full_api_access import (
|
||||
compute_instance_default_service_account_in_use_with_full_api_access,
|
||||
)
|
||||
|
||||
check = compute_default_service_account_in_use_with_full_api_access()
|
||||
check = (
|
||||
compute_instance_default_service_account_in_use_with_full_api_access()
|
||||
)
|
||||
result = check.execute()
|
||||
assert len(result) == 0
|
||||
|
||||
@@ -32,6 +34,7 @@ class Test_compute_default_service_account_in_use_with_full_api_access:
|
||||
metadata={},
|
||||
shielded_enabled_vtpm=True,
|
||||
shielded_enabled_integrity_monitoring=True,
|
||||
confidential_computing=True,
|
||||
service_accounts=[
|
||||
{"email": "123-compute@developer.gserviceaccount.com", "scopes": []}
|
||||
],
|
||||
@@ -45,14 +48,16 @@ class Test_compute_default_service_account_in_use_with_full_api_access:
|
||||
compute_client.instances = [instance]
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.gcp.services.compute.compute_default_service_account_in_use_with_full_api_access.compute_default_service_account_in_use_with_full_api_access.compute_client",
|
||||
"prowler.providers.gcp.services.compute.compute_instance_default_service_account_in_use_with_full_api_access.compute_instance_default_service_account_in_use_with_full_api_access.compute_client",
|
||||
new=compute_client,
|
||||
):
|
||||
from prowler.providers.gcp.services.compute.compute_default_service_account_in_use_with_full_api_access.compute_default_service_account_in_use_with_full_api_access import (
|
||||
compute_default_service_account_in_use_with_full_api_access,
|
||||
from prowler.providers.gcp.services.compute.compute_instance_default_service_account_in_use_with_full_api_access.compute_instance_default_service_account_in_use_with_full_api_access import (
|
||||
compute_instance_default_service_account_in_use_with_full_api_access,
|
||||
)
|
||||
|
||||
check = compute_default_service_account_in_use_with_full_api_access()
|
||||
check = (
|
||||
compute_instance_default_service_account_in_use_with_full_api_access()
|
||||
)
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
@@ -74,6 +79,7 @@ class Test_compute_default_service_account_in_use_with_full_api_access:
|
||||
metadata={},
|
||||
shielded_enabled_vtpm=True,
|
||||
shielded_enabled_integrity_monitoring=True,
|
||||
confidential_computing=True,
|
||||
service_accounts=[
|
||||
{
|
||||
"email": f"{GCP_PROJECT_ID}-compute@developer.gserviceaccount.com",
|
||||
@@ -90,14 +96,16 @@ class Test_compute_default_service_account_in_use_with_full_api_access:
|
||||
compute_client.instances = [instance]
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.gcp.services.compute.compute_default_service_account_in_use_with_full_api_access.compute_default_service_account_in_use_with_full_api_access.compute_client",
|
||||
"prowler.providers.gcp.services.compute.compute_instance_default_service_account_in_use_with_full_api_access.compute_instance_default_service_account_in_use_with_full_api_access.compute_client",
|
||||
new=compute_client,
|
||||
):
|
||||
from prowler.providers.gcp.services.compute.compute_default_service_account_in_use_with_full_api_access.compute_default_service_account_in_use_with_full_api_access import (
|
||||
compute_default_service_account_in_use_with_full_api_access,
|
||||
from prowler.providers.gcp.services.compute.compute_instance_default_service_account_in_use_with_full_api_access.compute_instance_default_service_account_in_use_with_full_api_access import (
|
||||
compute_instance_default_service_account_in_use_with_full_api_access,
|
||||
)
|
||||
|
||||
check = compute_default_service_account_in_use_with_full_api_access()
|
||||
check = (
|
||||
compute_instance_default_service_account_in_use_with_full_api_access()
|
||||
)
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
@@ -119,6 +127,7 @@ class Test_compute_default_service_account_in_use_with_full_api_access:
|
||||
metadata={},
|
||||
shielded_enabled_vtpm=True,
|
||||
shielded_enabled_integrity_monitoring=True,
|
||||
confidential_computing=True,
|
||||
service_accounts=[
|
||||
{
|
||||
"email": f"{GCP_PROJECT_ID}-compute@developer.gserviceaccount.com",
|
||||
@@ -135,14 +144,16 @@ class Test_compute_default_service_account_in_use_with_full_api_access:
|
||||
compute_client.instances = [instance]
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.gcp.services.compute.compute_default_service_account_in_use_with_full_api_access.compute_default_service_account_in_use_with_full_api_access.compute_client",
|
||||
"prowler.providers.gcp.services.compute.compute_instance_default_service_account_in_use_with_full_api_access.compute_instance_default_service_account_in_use_with_full_api_access.compute_client",
|
||||
new=compute_client,
|
||||
):
|
||||
from prowler.providers.gcp.services.compute.compute_default_service_account_in_use_with_full_api_access.compute_default_service_account_in_use_with_full_api_access import (
|
||||
compute_default_service_account_in_use_with_full_api_access,
|
||||
from prowler.providers.gcp.services.compute.compute_instance_default_service_account_in_use_with_full_api_access.compute_instance_default_service_account_in_use_with_full_api_access import (
|
||||
compute_instance_default_service_account_in_use_with_full_api_access,
|
||||
)
|
||||
|
||||
check = compute_default_service_account_in_use_with_full_api_access()
|
||||
check = (
|
||||
compute_instance_default_service_account_in_use_with_full_api_access()
|
||||
)
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
|
||||
@@ -4,21 +4,21 @@ from unittest import mock
|
||||
GCP_PROJECT_ID = "123456789012"
|
||||
|
||||
|
||||
class Test_compute_encryption_with_csek_is_disabled:
|
||||
class Test_compute_instance_encryption_with_csek_enabled:
|
||||
def test_compute_no_instances(self):
|
||||
compute_client = mock.MagicMock
|
||||
compute_client.project_ids = [GCP_PROJECT_ID]
|
||||
compute_client.instances = []
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.gcp.services.compute.compute_encryption_with_csek_is_disabled.compute_encryption_with_csek_is_disabled.compute_client",
|
||||
"prowler.providers.gcp.services.compute.compute_instance_encryption_with_csek_enabled.compute_instance_encryption_with_csek_enabled.compute_client",
|
||||
new=compute_client,
|
||||
):
|
||||
from prowler.providers.gcp.services.compute.compute_encryption_with_csek_is_disabled.compute_encryption_with_csek_is_disabled import (
|
||||
compute_encryption_with_csek_is_disabled,
|
||||
from prowler.providers.gcp.services.compute.compute_instance_encryption_with_csek_enabled.compute_instance_encryption_with_csek_enabled import (
|
||||
compute_instance_encryption_with_csek_enabled,
|
||||
)
|
||||
|
||||
check = compute_encryption_with_csek_is_disabled()
|
||||
check = compute_instance_encryption_with_csek_enabled()
|
||||
result = check.execute()
|
||||
assert len(result) == 0
|
||||
|
||||
@@ -33,6 +33,7 @@ class Test_compute_encryption_with_csek_is_disabled:
|
||||
metadata={"items": [{"key": "block-project-ssh-keys", "value": "true"}]},
|
||||
shielded_enabled_vtpm=True,
|
||||
shielded_enabled_integrity_monitoring=True,
|
||||
confidential_computing=True,
|
||||
service_accounts=[],
|
||||
ip_forward=False,
|
||||
disks_encryption=[("disk1", True), ("disk2", True)],
|
||||
@@ -44,20 +45,20 @@ class Test_compute_encryption_with_csek_is_disabled:
|
||||
compute_client.instances = [instance]
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.gcp.services.compute.compute_encryption_with_csek_is_disabled.compute_encryption_with_csek_is_disabled.compute_client",
|
||||
"prowler.providers.gcp.services.compute.compute_instance_encryption_with_csek_enabled.compute_instance_encryption_with_csek_enabled.compute_client",
|
||||
new=compute_client,
|
||||
):
|
||||
from prowler.providers.gcp.services.compute.compute_encryption_with_csek_is_disabled.compute_encryption_with_csek_is_disabled import (
|
||||
compute_encryption_with_csek_is_disabled,
|
||||
from prowler.providers.gcp.services.compute.compute_instance_encryption_with_csek_enabled.compute_instance_encryption_with_csek_enabled import (
|
||||
compute_instance_encryption_with_csek_enabled,
|
||||
)
|
||||
|
||||
check = compute_encryption_with_csek_is_disabled()
|
||||
check = compute_instance_encryption_with_csek_enabled()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "PASS"
|
||||
assert search(
|
||||
f"The VM Instance {instance.name} have every disk encrypted.",
|
||||
f"The VM Instance {instance.name} has every disk encrypted.",
|
||||
result[0].status_extended,
|
||||
)
|
||||
assert result[0].resource_id == instance.id
|
||||
@@ -73,6 +74,7 @@ class Test_compute_encryption_with_csek_is_disabled:
|
||||
metadata={},
|
||||
shielded_enabled_vtpm=True,
|
||||
shielded_enabled_integrity_monitoring=True,
|
||||
confidential_computing=True,
|
||||
service_accounts=[],
|
||||
ip_forward=False,
|
||||
disks_encryption=[("disk1", False), ("disk2", True)],
|
||||
@@ -84,20 +86,20 @@ class Test_compute_encryption_with_csek_is_disabled:
|
||||
compute_client.instances = [instance]
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.gcp.services.compute.compute_encryption_with_csek_is_disabled.compute_encryption_with_csek_is_disabled.compute_client",
|
||||
"prowler.providers.gcp.services.compute.compute_instance_encryption_with_csek_enabled.compute_instance_encryption_with_csek_enabled.compute_client",
|
||||
new=compute_client,
|
||||
):
|
||||
from prowler.providers.gcp.services.compute.compute_encryption_with_csek_is_disabled.compute_encryption_with_csek_is_disabled import (
|
||||
compute_encryption_with_csek_is_disabled,
|
||||
from prowler.providers.gcp.services.compute.compute_instance_encryption_with_csek_enabled.compute_instance_encryption_with_csek_enabled import (
|
||||
compute_instance_encryption_with_csek_enabled,
|
||||
)
|
||||
|
||||
check = compute_encryption_with_csek_is_disabled()
|
||||
check = compute_instance_encryption_with_csek_enabled()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert search(
|
||||
f"The VM Instance {instance.name} have the following unencrypted disks: '{', '.join([i[0] for i in instance.disks_encryption if not i[1]])}'",
|
||||
f"The VM Instance {instance.name} has the following unencrypted disks: '{', '.join([i[0] for i in instance.disks_encryption if not i[1]])}'",
|
||||
result[0].status_extended,
|
||||
)
|
||||
assert result[0].resource_id == instance.id
|
||||
@@ -113,6 +115,7 @@ class Test_compute_encryption_with_csek_is_disabled:
|
||||
metadata={"items": [{"key": "block-project-ssh-keys", "value": "false"}]},
|
||||
shielded_enabled_vtpm=True,
|
||||
shielded_enabled_integrity_monitoring=True,
|
||||
confidential_computing=True,
|
||||
service_accounts=[],
|
||||
ip_forward=False,
|
||||
disks_encryption=[("disk1", False), ("disk2", False)],
|
||||
@@ -124,20 +127,20 @@ class Test_compute_encryption_with_csek_is_disabled:
|
||||
compute_client.instances = [instance]
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.gcp.services.compute.compute_encryption_with_csek_is_disabled.compute_encryption_with_csek_is_disabled.compute_client",
|
||||
"prowler.providers.gcp.services.compute.compute_instance_encryption_with_csek_enabled.compute_instance_encryption_with_csek_enabled.compute_client",
|
||||
new=compute_client,
|
||||
):
|
||||
from prowler.providers.gcp.services.compute.compute_encryption_with_csek_is_disabled.compute_encryption_with_csek_is_disabled import (
|
||||
compute_encryption_with_csek_is_disabled,
|
||||
from prowler.providers.gcp.services.compute.compute_instance_encryption_with_csek_enabled.compute_instance_encryption_with_csek_enabled import (
|
||||
compute_instance_encryption_with_csek_enabled,
|
||||
)
|
||||
|
||||
check = compute_encryption_with_csek_is_disabled()
|
||||
check = compute_instance_encryption_with_csek_enabled()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert search(
|
||||
f"The VM Instance {instance.name} have the following unencrypted disks: '{', '.join([i[0] for i in instance.disks_encryption if not i[1]])}'",
|
||||
f"The VM Instance {instance.name} has the following unencrypted disks: '{', '.join([i[0] for i in instance.disks_encryption if not i[1]])}'",
|
||||
result[0].status_extended,
|
||||
)
|
||||
assert result[0].resource_id == instance.id
|
||||
|
||||
@@ -0,0 +1,111 @@
|
||||
from re import search
|
||||
from unittest import mock
|
||||
|
||||
GCP_PROJECT_ID = "123456789012"
|
||||
|
||||
|
||||
class Test_compute_instance_confidential_computing_enabled:
|
||||
def test_compute_no_instances(self):
|
||||
compute_client = mock.MagicMock
|
||||
compute_client.project_ids = [GCP_PROJECT_ID]
|
||||
compute_client.instances = []
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.gcp.services.compute.compute_instance_confidential_computing_enabled.compute_instance_confidential_computing_enabled.compute_client",
|
||||
new=compute_client,
|
||||
):
|
||||
from prowler.providers.gcp.services.compute.compute_instance_confidential_computing_enabled.compute_instance_confidential_computing_enabled import (
|
||||
compute_instance_confidential_computing_enabled,
|
||||
)
|
||||
|
||||
check = compute_instance_confidential_computing_enabled()
|
||||
result = check.execute()
|
||||
assert len(result) == 0
|
||||
|
||||
def test_one_compliant_instance(self):
|
||||
from prowler.providers.gcp.services.compute.compute_service import Instance
|
||||
|
||||
instance = Instance(
|
||||
name="test",
|
||||
id="1234567890",
|
||||
zone="us-central1-a",
|
||||
public_ip=True,
|
||||
metadata={},
|
||||
shielded_enabled_vtpm=True,
|
||||
shielded_enabled_integrity_monitoring=True,
|
||||
confidential_computing=True,
|
||||
service_accounts=[],
|
||||
ip_forward=False,
|
||||
disks_encryption=[("disk1", False), ("disk2", False)],
|
||||
project_id=GCP_PROJECT_ID,
|
||||
)
|
||||
|
||||
compute_client = mock.MagicMock
|
||||
compute_client.project_ids = [GCP_PROJECT_ID]
|
||||
compute_client.instances = [instance]
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.gcp.services.compute.compute_instance_confidential_computing_enabled.compute_instance_confidential_computing_enabled.compute_client",
|
||||
new=compute_client,
|
||||
):
|
||||
from prowler.providers.gcp.services.compute.compute_instance_confidential_computing_enabled.compute_instance_confidential_computing_enabled import (
|
||||
compute_instance_confidential_computing_enabled,
|
||||
)
|
||||
|
||||
check = compute_instance_confidential_computing_enabled()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "PASS"
|
||||
assert search(
|
||||
f"VM Instance {instance.name} has Confidential Computing enabled",
|
||||
result[0].status_extended,
|
||||
)
|
||||
assert result[0].resource_id == instance.id
|
||||
assert result[0].resource_name == instance.name
|
||||
assert result[0].location == instance.zone
|
||||
assert result[0].project_id == GCP_PROJECT_ID
|
||||
|
||||
def test_one_instance_with_shielded_vtpm_disabled(self):
|
||||
from prowler.providers.gcp.services.compute.compute_service import Instance
|
||||
|
||||
instance = Instance(
|
||||
name="test",
|
||||
id="1234567890",
|
||||
zone="us-central1-a",
|
||||
public_ip=True,
|
||||
metadata={},
|
||||
shielded_enabled_vtpm=False,
|
||||
shielded_enabled_integrity_monitoring=True,
|
||||
confidential_computing=False,
|
||||
service_accounts=[],
|
||||
ip_forward=False,
|
||||
disks_encryption=[("disk1", False), ("disk2", False)],
|
||||
project_id=GCP_PROJECT_ID,
|
||||
)
|
||||
|
||||
compute_client = mock.MagicMock
|
||||
compute_client.project_ids = [GCP_PROJECT_ID]
|
||||
compute_client.instances = [instance]
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.gcp.services.compute.compute_instance_confidential_computing_enabled.compute_instance_confidential_computing_enabled.compute_client",
|
||||
new=compute_client,
|
||||
):
|
||||
from prowler.providers.gcp.services.compute.compute_instance_confidential_computing_enabled.compute_instance_confidential_computing_enabled import (
|
||||
compute_instance_confidential_computing_enabled,
|
||||
)
|
||||
|
||||
check = compute_instance_confidential_computing_enabled()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert search(
|
||||
f"VM Instance {instance.name} does not have Confidential Computing enabled",
|
||||
result[0].status_extended,
|
||||
)
|
||||
assert result[0].resource_id == instance.id
|
||||
assert result[0].resource_name == instance.name
|
||||
assert result[0].location == instance.zone
|
||||
assert result[0].project_id == GCP_PROJECT_ID
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user