mirror of
https://github.com/prowler-cloud/prowler.git
synced 2026-01-25 02:08:11 +00:00
feat(gcp): add check to enforce On Host Maintenance set to MIGRATE (#9834)
This commit is contained in:
@@ -20,6 +20,7 @@ All notable changes to the **Prowler SDK** are documented in this file.
|
||||
- `compute_image_not_publicly_shared` check for GCP provider [(#9718)](https://github.com/prowler-cloud/prowler/pull/9718)
|
||||
- `TLS/SSL`, `records` and `email` checks for `zone` service [(#9424)](https://github.com/prowler-cloud/prowler/pull/9424)
|
||||
- `compute_snapshot_not_outdated` check for GCP provider [(#9774)](https://github.com/prowler-cloud/prowler/pull/9774)
|
||||
- `compute_instance_on_host_maintenance_migrate` check for GCP provider [(#9834)](https://github.com/prowler-cloud/prowler/pull/9834)
|
||||
- CIS 1.12 compliance framework for Kubernetes [(#9778)](https://github.com/prowler-cloud/prowler/pull/9778)
|
||||
- CIS 6.0 for M365 provider [(#9779)](https://github.com/prowler-cloud/prowler/pull/9779)
|
||||
- CIS 5.0 compliance framework for the Azure provider [(#9777)](https://github.com/prowler-cloud/prowler/pull/9777)
|
||||
|
||||
@@ -0,0 +1,39 @@
|
||||
{
|
||||
"Provider": "gcp",
|
||||
"CheckID": "compute_instance_on_host_maintenance_migrate",
|
||||
"CheckTitle": "Compute Engine VM instance has On Host Maintenance set to MIGRATE",
|
||||
"CheckType": [],
|
||||
"ServiceName": "compute",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "compute.googleapis.com/Instance",
|
||||
"ResourceGroup": "compute",
|
||||
"Description": "**Compute Engine VM instances** should have their **On Host Maintenance** setting configured to `MIGRATE` for live migration during host maintenance events, ensuring continuous availability without downtime.",
|
||||
"Risk": "VM instances configured with On Host Maintenance set to `TERMINATE` will be shut down during host maintenance events, causing **service interruptions** and **unplanned downtime**. This can impact application availability and may require manual intervention to restart services.",
|
||||
"RelatedUrl": "",
|
||||
"AdditionalURLs": [
|
||||
"https://www.trendmicro.com/cloudoneconformity/knowledge-base/gcp/ComputeEngine/configure-maintenance-behavior.html",
|
||||
"https://cloud.google.com/compute/docs/instances/setting-instance-scheduling-options"
|
||||
],
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "gcloud compute instances set-scheduling <INSTANCE_NAME> --maintenance-policy=MIGRATE --zone=<ZONE>",
|
||||
"NativeIaC": "",
|
||||
"Other": "1. Open Google Cloud Console and navigate to Compute Engine > VM instances\n2. Click on the instance name to view details\n3. Click 'Edit' at the top of the page\n4. Under 'Availability policies', set 'On host maintenance' to 'Migrate VM instance (recommended)'\n5. Click 'Save' at the bottom of the page",
|
||||
"Terraform": "```hcl\n# Example: configure On Host Maintenance to MIGRATE for a Compute Engine VM instance\nresource \"google_compute_instance\" \"example\" {\n name = var.instance_name\n machine_type = var.machine_type\n zone = var.zone\n\n scheduling {\n # Live migrate during host maintenance events\n on_host_maintenance = \"MIGRATE\"\n }\n}\n```"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Configure VM instances to use **live migration** during host maintenance events to ensure continuous availability. This is the recommended setting for production workloads that require high availability.",
|
||||
"Url": "https://hub.prowler.com/check/compute_instance_on_host_maintenance_migrate"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"resilience"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [
|
||||
"compute_instance_automatic_restart_enabled"
|
||||
],
|
||||
"Notes": "Preemptible and Spot VMs cannot use MIGRATE and will always be TERMINATE. The default value for this setting is MIGRATE."
|
||||
}
|
||||
@@ -0,0 +1,41 @@
|
||||
from prowler.lib.check.models import Check, Check_Report_GCP
|
||||
from prowler.providers.gcp.services.compute.compute_client import compute_client
|
||||
|
||||
|
||||
class compute_instance_on_host_maintenance_migrate(Check):
|
||||
"""
|
||||
Ensure Compute Engine VM instances have On Host Maintenance set to MIGRATE.
|
||||
|
||||
This check evaluates whether VM instances are configured to live migrate during
|
||||
host maintenance events, preventing downtime when Google performs maintenance.
|
||||
|
||||
- PASS: VM instance has On Host Maintenance set to MIGRATE.
|
||||
- FAIL: VM instance has On Host Maintenance set to TERMINATE.
|
||||
"""
|
||||
|
||||
def execute(self) -> list[Check_Report_GCP]:
|
||||
findings = []
|
||||
for instance in compute_client.instances:
|
||||
report = Check_Report_GCP(metadata=self.metadata(), resource=instance)
|
||||
|
||||
if instance.on_host_maintenance == "MIGRATE":
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"VM Instance {instance.name} has On Host Maintenance set to MIGRATE."
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
if instance.preemptible or instance.provisioning_model == "SPOT":
|
||||
vm_type = "preemptible" if instance.preemptible else "Spot"
|
||||
report.status_extended = (
|
||||
f"VM Instance {instance.name} is a {vm_type} VM and has On Host Maintenance set to TERMINATE. "
|
||||
f"{vm_type.capitalize()} VMs cannot use MIGRATE and must always use TERMINATE. "
|
||||
f"If high availability is required, consider using a non-preemptible VM instead."
|
||||
)
|
||||
else:
|
||||
report.status_extended = (
|
||||
f"VM Instance {instance.name} has On Host Maintenance set to "
|
||||
f"{instance.on_host_maintenance} instead of MIGRATE."
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -191,6 +191,9 @@ class Compute(GCPService):
|
||||
"deletionProtection", False
|
||||
),
|
||||
network_interfaces=network_interfaces,
|
||||
on_host_maintenance=instance.get("scheduling", {}).get(
|
||||
"onHostMaintenance", "MIGRATE"
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
@@ -690,6 +693,7 @@ class Instance(BaseModel):
|
||||
provisioning_model: str = "STANDARD"
|
||||
deletion_protection: bool = False
|
||||
network_interfaces: list[NetworkInterface] = []
|
||||
on_host_maintenance: str = "MIGRATE"
|
||||
|
||||
|
||||
class Network(BaseModel):
|
||||
|
||||
@@ -771,6 +771,7 @@ def mock_api_instances_calls(client: MagicMock, service: str):
|
||||
"automaticRestart": False,
|
||||
"preemptible": False,
|
||||
"provisioningModel": "STANDARD",
|
||||
"onHostMaintenance": "MIGRATE",
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -802,6 +803,7 @@ def mock_api_instances_calls(client: MagicMock, service: str):
|
||||
"automaticRestart": False,
|
||||
"preemptible": False,
|
||||
"provisioningModel": "STANDARD",
|
||||
"onHostMaintenance": "TERMINATE",
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
@@ -0,0 +1,515 @@
|
||||
from unittest import mock
|
||||
|
||||
from tests.providers.gcp.gcp_fixtures import GCP_PROJECT_ID, set_mocked_gcp_provider
|
||||
|
||||
|
||||
class TestComputeInstanceOnHostMaintenanceMigrate:
|
||||
def test_compute_no_instances(self):
|
||||
compute_client = mock.MagicMock()
|
||||
compute_client.instances = []
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_gcp_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.gcp.services.compute.compute_instance_on_host_maintenance_migrate.compute_instance_on_host_maintenance_migrate.compute_client",
|
||||
new=compute_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.gcp.services.compute.compute_instance_on_host_maintenance_migrate.compute_instance_on_host_maintenance_migrate import (
|
||||
compute_instance_on_host_maintenance_migrate,
|
||||
)
|
||||
|
||||
check = compute_instance_on_host_maintenance_migrate()
|
||||
result = check.execute()
|
||||
assert len(result) == 0
|
||||
|
||||
def test_instance_with_on_host_maintenance_migrate(self):
|
||||
compute_client = mock.MagicMock()
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_gcp_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.gcp.services.compute.compute_instance_on_host_maintenance_migrate.compute_instance_on_host_maintenance_migrate.compute_client",
|
||||
new=compute_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.gcp.services.compute.compute_instance_on_host_maintenance_migrate.compute_instance_on_host_maintenance_migrate import (
|
||||
compute_instance_on_host_maintenance_migrate,
|
||||
)
|
||||
from prowler.providers.gcp.services.compute.compute_service import Instance
|
||||
|
||||
compute_client.project_ids = [GCP_PROJECT_ID]
|
||||
compute_client.instances = [
|
||||
Instance(
|
||||
name="test-instance",
|
||||
id="1234567890",
|
||||
zone="us-central1-a",
|
||||
region="us-central1",
|
||||
public_ip=True,
|
||||
metadata={},
|
||||
shielded_enabled_vtpm=True,
|
||||
shielded_enabled_integrity_monitoring=True,
|
||||
confidential_computing=False,
|
||||
service_accounts=[],
|
||||
ip_forward=False,
|
||||
disks_encryption=[("disk1", False)],
|
||||
automatic_restart=True,
|
||||
project_id=GCP_PROJECT_ID,
|
||||
on_host_maintenance="MIGRATE",
|
||||
)
|
||||
]
|
||||
|
||||
check = compute_instance_on_host_maintenance_migrate()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "PASS"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== "VM Instance test-instance has On Host Maintenance set to MIGRATE."
|
||||
)
|
||||
assert result[0].resource_id == compute_client.instances[0].id
|
||||
assert result[0].resource_name == compute_client.instances[0].name
|
||||
assert result[0].location == "us-central1"
|
||||
assert result[0].project_id == GCP_PROJECT_ID
|
||||
|
||||
def test_instance_with_on_host_maintenance_terminate(self):
|
||||
compute_client = mock.MagicMock()
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_gcp_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.gcp.services.compute.compute_instance_on_host_maintenance_migrate.compute_instance_on_host_maintenance_migrate.compute_client",
|
||||
new=compute_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.gcp.services.compute.compute_instance_on_host_maintenance_migrate.compute_instance_on_host_maintenance_migrate import (
|
||||
compute_instance_on_host_maintenance_migrate,
|
||||
)
|
||||
from prowler.providers.gcp.services.compute.compute_service import Instance
|
||||
|
||||
compute_client.project_ids = [GCP_PROJECT_ID]
|
||||
compute_client.instances = [
|
||||
Instance(
|
||||
name="test-instance-terminate",
|
||||
id="0987654321",
|
||||
zone="us-west1-b",
|
||||
region="us-west1",
|
||||
public_ip=False,
|
||||
metadata={},
|
||||
shielded_enabled_vtpm=False,
|
||||
shielded_enabled_integrity_monitoring=False,
|
||||
confidential_computing=False,
|
||||
service_accounts=[],
|
||||
ip_forward=False,
|
||||
disks_encryption=[],
|
||||
automatic_restart=False,
|
||||
project_id=GCP_PROJECT_ID,
|
||||
on_host_maintenance="TERMINATE",
|
||||
)
|
||||
]
|
||||
|
||||
check = compute_instance_on_host_maintenance_migrate()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== "VM Instance test-instance-terminate has On Host Maintenance set to TERMINATE instead of MIGRATE."
|
||||
)
|
||||
assert result[0].resource_id == compute_client.instances[0].id
|
||||
assert result[0].resource_name == compute_client.instances[0].name
|
||||
assert result[0].location == "us-west1"
|
||||
assert result[0].project_id == GCP_PROJECT_ID
|
||||
|
||||
def test_multiple_instances_mixed(self):
|
||||
compute_client = mock.MagicMock()
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_gcp_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.gcp.services.compute.compute_instance_on_host_maintenance_migrate.compute_instance_on_host_maintenance_migrate.compute_client",
|
||||
new=compute_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.gcp.services.compute.compute_instance_on_host_maintenance_migrate.compute_instance_on_host_maintenance_migrate import (
|
||||
compute_instance_on_host_maintenance_migrate,
|
||||
)
|
||||
from prowler.providers.gcp.services.compute.compute_service import Instance
|
||||
|
||||
compute_client.project_ids = [GCP_PROJECT_ID]
|
||||
compute_client.instances = [
|
||||
Instance(
|
||||
name="compliant-instance",
|
||||
id="1111111111",
|
||||
zone="us-central1-a",
|
||||
region="us-central1",
|
||||
public_ip=True,
|
||||
metadata={},
|
||||
shielded_enabled_vtpm=True,
|
||||
shielded_enabled_integrity_monitoring=True,
|
||||
confidential_computing=False,
|
||||
service_accounts=[],
|
||||
ip_forward=False,
|
||||
disks_encryption=[],
|
||||
automatic_restart=True,
|
||||
project_id=GCP_PROJECT_ID,
|
||||
on_host_maintenance="MIGRATE",
|
||||
),
|
||||
Instance(
|
||||
name="non-compliant-instance",
|
||||
id="2222222222",
|
||||
zone="us-west1-b",
|
||||
region="us-west1",
|
||||
public_ip=False,
|
||||
metadata={},
|
||||
shielded_enabled_vtpm=False,
|
||||
shielded_enabled_integrity_monitoring=False,
|
||||
confidential_computing=False,
|
||||
service_accounts=[],
|
||||
ip_forward=False,
|
||||
disks_encryption=[],
|
||||
automatic_restart=False,
|
||||
project_id=GCP_PROJECT_ID,
|
||||
on_host_maintenance="TERMINATE",
|
||||
),
|
||||
]
|
||||
|
||||
check = compute_instance_on_host_maintenance_migrate()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 2
|
||||
|
||||
compliant_result = next(r for r in result if r.resource_id == "1111111111")
|
||||
non_compliant_result = next(
|
||||
r for r in result if r.resource_id == "2222222222"
|
||||
)
|
||||
|
||||
assert compliant_result.status == "PASS"
|
||||
assert (
|
||||
compliant_result.status_extended
|
||||
== "VM Instance compliant-instance has On Host Maintenance set to MIGRATE."
|
||||
)
|
||||
assert compliant_result.resource_id == "1111111111"
|
||||
assert compliant_result.resource_name == "compliant-instance"
|
||||
assert compliant_result.location == "us-central1"
|
||||
assert compliant_result.project_id == GCP_PROJECT_ID
|
||||
|
||||
assert non_compliant_result.status == "FAIL"
|
||||
assert (
|
||||
non_compliant_result.status_extended
|
||||
== "VM Instance non-compliant-instance has On Host Maintenance set to TERMINATE instead of MIGRATE."
|
||||
)
|
||||
assert non_compliant_result.resource_id == "2222222222"
|
||||
assert non_compliant_result.resource_name == "non-compliant-instance"
|
||||
assert non_compliant_result.location == "us-west1"
|
||||
assert non_compliant_result.project_id == GCP_PROJECT_ID
|
||||
|
||||
def test_instance_with_default_on_host_maintenance(self):
|
||||
compute_client = mock.MagicMock()
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_gcp_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.gcp.services.compute.compute_instance_on_host_maintenance_migrate.compute_instance_on_host_maintenance_migrate.compute_client",
|
||||
new=compute_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.gcp.services.compute.compute_instance_on_host_maintenance_migrate.compute_instance_on_host_maintenance_migrate import (
|
||||
compute_instance_on_host_maintenance_migrate,
|
||||
)
|
||||
from prowler.providers.gcp.services.compute.compute_service import Instance
|
||||
|
||||
compute_client.project_ids = [GCP_PROJECT_ID]
|
||||
compute_client.instances = [
|
||||
Instance(
|
||||
name="default-instance",
|
||||
id="3333333333",
|
||||
zone="us-east1-b",
|
||||
region="us-east1",
|
||||
public_ip=False,
|
||||
metadata={},
|
||||
shielded_enabled_vtpm=True,
|
||||
shielded_enabled_integrity_monitoring=True,
|
||||
confidential_computing=False,
|
||||
service_accounts=[],
|
||||
ip_forward=False,
|
||||
disks_encryption=[],
|
||||
automatic_restart=True,
|
||||
project_id=GCP_PROJECT_ID,
|
||||
)
|
||||
]
|
||||
|
||||
check = compute_instance_on_host_maintenance_migrate()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "PASS"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== "VM Instance default-instance has On Host Maintenance set to MIGRATE."
|
||||
)
|
||||
assert result[0].resource_id == "3333333333"
|
||||
assert result[0].resource_name == "default-instance"
|
||||
assert result[0].location == "us-east1"
|
||||
assert result[0].project_id == GCP_PROJECT_ID
|
||||
|
||||
def test_preemptible_instance_fails_with_explanation(self):
|
||||
compute_client = mock.MagicMock()
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_gcp_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.gcp.services.compute.compute_instance_on_host_maintenance_migrate.compute_instance_on_host_maintenance_migrate.compute_client",
|
||||
new=compute_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.gcp.services.compute.compute_instance_on_host_maintenance_migrate.compute_instance_on_host_maintenance_migrate import (
|
||||
compute_instance_on_host_maintenance_migrate,
|
||||
)
|
||||
from prowler.providers.gcp.services.compute.compute_service import Instance
|
||||
|
||||
compute_client.project_ids = [GCP_PROJECT_ID]
|
||||
compute_client.instances = [
|
||||
Instance(
|
||||
name="preemptible-instance",
|
||||
id="4444444444",
|
||||
zone="us-central1-a",
|
||||
region="us-central1",
|
||||
public_ip=False,
|
||||
metadata={},
|
||||
shielded_enabled_vtpm=False,
|
||||
shielded_enabled_integrity_monitoring=False,
|
||||
confidential_computing=False,
|
||||
service_accounts=[],
|
||||
ip_forward=False,
|
||||
disks_encryption=[],
|
||||
automatic_restart=False,
|
||||
project_id=GCP_PROJECT_ID,
|
||||
preemptible=True,
|
||||
provisioning_model="STANDARD",
|
||||
on_host_maintenance="TERMINATE",
|
||||
)
|
||||
]
|
||||
|
||||
check = compute_instance_on_host_maintenance_migrate()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== "VM Instance preemptible-instance is a preemptible VM and has On Host Maintenance set to TERMINATE. Preemptible VMs cannot use MIGRATE and must always use TERMINATE. If high availability is required, consider using a non-preemptible VM instead."
|
||||
)
|
||||
assert result[0].resource_id == "4444444444"
|
||||
assert result[0].resource_name == "preemptible-instance"
|
||||
|
||||
def test_spot_instance_fails_with_explanation(self):
|
||||
compute_client = mock.MagicMock()
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_gcp_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.gcp.services.compute.compute_instance_on_host_maintenance_migrate.compute_instance_on_host_maintenance_migrate.compute_client",
|
||||
new=compute_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.gcp.services.compute.compute_instance_on_host_maintenance_migrate.compute_instance_on_host_maintenance_migrate import (
|
||||
compute_instance_on_host_maintenance_migrate,
|
||||
)
|
||||
from prowler.providers.gcp.services.compute.compute_service import Instance
|
||||
|
||||
compute_client.project_ids = [GCP_PROJECT_ID]
|
||||
compute_client.instances = [
|
||||
Instance(
|
||||
name="spot-instance",
|
||||
id="5555555555",
|
||||
zone="us-west1-a",
|
||||
region="us-west1",
|
||||
public_ip=False,
|
||||
metadata={},
|
||||
shielded_enabled_vtpm=False,
|
||||
shielded_enabled_integrity_monitoring=False,
|
||||
confidential_computing=False,
|
||||
service_accounts=[],
|
||||
ip_forward=False,
|
||||
disks_encryption=[],
|
||||
automatic_restart=False,
|
||||
project_id=GCP_PROJECT_ID,
|
||||
preemptible=False,
|
||||
provisioning_model="SPOT",
|
||||
on_host_maintenance="TERMINATE",
|
||||
)
|
||||
]
|
||||
|
||||
check = compute_instance_on_host_maintenance_migrate()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== "VM Instance spot-instance is a Spot VM and has On Host Maintenance set to TERMINATE. Spot VMs cannot use MIGRATE and must always use TERMINATE. If high availability is required, consider using a non-preemptible VM instead."
|
||||
)
|
||||
assert result[0].resource_id == "5555555555"
|
||||
assert result[0].resource_name == "spot-instance"
|
||||
|
||||
def test_mixed_with_preemptible_and_spot(self):
|
||||
compute_client = mock.MagicMock()
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_gcp_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.gcp.services.compute.compute_instance_on_host_maintenance_migrate.compute_instance_on_host_maintenance_migrate.compute_client",
|
||||
new=compute_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.gcp.services.compute.compute_instance_on_host_maintenance_migrate.compute_instance_on_host_maintenance_migrate import (
|
||||
compute_instance_on_host_maintenance_migrate,
|
||||
)
|
||||
from prowler.providers.gcp.services.compute.compute_service import Instance
|
||||
|
||||
compute_client.project_ids = [GCP_PROJECT_ID]
|
||||
compute_client.instances = [
|
||||
Instance(
|
||||
name="regular-instance-pass",
|
||||
id="6666666666",
|
||||
zone="us-central1-a",
|
||||
region="us-central1",
|
||||
public_ip=True,
|
||||
metadata={},
|
||||
shielded_enabled_vtpm=True,
|
||||
shielded_enabled_integrity_monitoring=True,
|
||||
confidential_computing=False,
|
||||
service_accounts=[],
|
||||
ip_forward=False,
|
||||
disks_encryption=[],
|
||||
automatic_restart=True,
|
||||
project_id=GCP_PROJECT_ID,
|
||||
preemptible=False,
|
||||
provisioning_model="STANDARD",
|
||||
on_host_maintenance="MIGRATE",
|
||||
),
|
||||
Instance(
|
||||
name="preemptible-instance",
|
||||
id="7777777777",
|
||||
zone="us-west1-a",
|
||||
region="us-west1",
|
||||
public_ip=False,
|
||||
metadata={},
|
||||
shielded_enabled_vtpm=False,
|
||||
shielded_enabled_integrity_monitoring=False,
|
||||
confidential_computing=False,
|
||||
service_accounts=[],
|
||||
ip_forward=False,
|
||||
disks_encryption=[],
|
||||
automatic_restart=False,
|
||||
project_id=GCP_PROJECT_ID,
|
||||
preemptible=True,
|
||||
provisioning_model="STANDARD",
|
||||
on_host_maintenance="TERMINATE",
|
||||
),
|
||||
Instance(
|
||||
name="spot-instance",
|
||||
id="8888888888",
|
||||
zone="us-east1-b",
|
||||
region="us-east1",
|
||||
public_ip=False,
|
||||
metadata={},
|
||||
shielded_enabled_vtpm=False,
|
||||
shielded_enabled_integrity_monitoring=False,
|
||||
confidential_computing=False,
|
||||
service_accounts=[],
|
||||
ip_forward=False,
|
||||
disks_encryption=[],
|
||||
automatic_restart=False,
|
||||
project_id=GCP_PROJECT_ID,
|
||||
preemptible=False,
|
||||
provisioning_model="SPOT",
|
||||
on_host_maintenance="TERMINATE",
|
||||
),
|
||||
Instance(
|
||||
name="regular-instance-fail",
|
||||
id="9999999999",
|
||||
zone="us-central1-b",
|
||||
region="us-central1",
|
||||
public_ip=False,
|
||||
metadata={},
|
||||
shielded_enabled_vtpm=False,
|
||||
shielded_enabled_integrity_monitoring=False,
|
||||
confidential_computing=False,
|
||||
service_accounts=[],
|
||||
ip_forward=False,
|
||||
disks_encryption=[],
|
||||
automatic_restart=False,
|
||||
project_id=GCP_PROJECT_ID,
|
||||
preemptible=False,
|
||||
provisioning_model="STANDARD",
|
||||
on_host_maintenance="TERMINATE",
|
||||
),
|
||||
]
|
||||
|
||||
check = compute_instance_on_host_maintenance_migrate()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 4
|
||||
|
||||
pass_result = next(r for r in result if r.resource_id == "6666666666")
|
||||
preemptible_result = next(
|
||||
r for r in result if r.resource_id == "7777777777"
|
||||
)
|
||||
spot_result = next(r for r in result if r.resource_id == "8888888888")
|
||||
fail_result = next(r for r in result if r.resource_id == "9999999999")
|
||||
|
||||
assert pass_result.status == "PASS"
|
||||
assert (
|
||||
pass_result.status_extended
|
||||
== "VM Instance regular-instance-pass has On Host Maintenance set to MIGRATE."
|
||||
)
|
||||
assert pass_result.resource_name == "regular-instance-pass"
|
||||
|
||||
assert preemptible_result.status == "FAIL"
|
||||
assert (
|
||||
preemptible_result.status_extended
|
||||
== "VM Instance preemptible-instance is a preemptible VM and has On Host Maintenance set to TERMINATE. Preemptible VMs cannot use MIGRATE and must always use TERMINATE. If high availability is required, consider using a non-preemptible VM instead."
|
||||
)
|
||||
assert preemptible_result.resource_name == "preemptible-instance"
|
||||
|
||||
assert spot_result.status == "FAIL"
|
||||
assert (
|
||||
spot_result.status_extended
|
||||
== "VM Instance spot-instance is a Spot VM and has On Host Maintenance set to TERMINATE. Spot VMs cannot use MIGRATE and must always use TERMINATE. If high availability is required, consider using a non-preemptible VM instead."
|
||||
)
|
||||
assert spot_result.resource_name == "spot-instance"
|
||||
|
||||
assert fail_result.status == "FAIL"
|
||||
assert (
|
||||
fail_result.status_extended
|
||||
== "VM Instance regular-instance-fail has On Host Maintenance set to TERMINATE instead of MIGRATE."
|
||||
)
|
||||
assert fail_result.resource_name == "regular-instance-fail"
|
||||
Reference in New Issue
Block a user