mirror of
https://github.com/prowler-cloud/prowler.git
synced 2026-04-06 02:58:15 +00:00
Merge branch 'cloudflare-pr3-bot-config-checks' into cloudflare-pr4-dns-firewall-waf
And add Resourcegroup
This commit is contained in:
@@ -18,10 +18,12 @@ All notable changes to the **Prowler SDK** are documented in this file.
|
||||
- `Cloudflare` provider with critical security checks [(#9423)](https://github.com/prowler-cloud/prowler/pull/9423)
|
||||
- `compute_instance_single_network_interface` check for GCP provider [(#9702)](https://github.com/prowler-cloud/prowler/pull/9702)
|
||||
- `compute_image_not_publicly_shared` check for GCP provider [(#9718)](https://github.com/prowler-cloud/prowler/pull/9718)
|
||||
- `TLS/SSL`, `records` and `email` checks for `zones` service [(#9424)](https://github.com/prowler-cloud/prowler/pull/9424)
|
||||
- `TLS/SSL`, `records` and `email` checks for `zone` service [(#9424)](https://github.com/prowler-cloud/prowler/pull/9424)
|
||||
- `compute_snapshot_not_outdated` check for GCP provider [(#9774)](https://github.com/prowler-cloud/prowler/pull/9774)
|
||||
- CIS 1.12 compliance framework for Kubernetes [(#9778)](https://github.com/prowler-cloud/prowler/pull/9778)
|
||||
- CIS 6.0 for M365 provider [(#9779)](https://github.com/prowler-cloud/prowler/pull/9779)
|
||||
- CIS 5.0 compliance framework for the Azure provider [(#9777)](https://github.com/prowler-cloud/prowler/pull/9777)
|
||||
- `Cloudflare` Bot protection, WAF, Privacy, Anti-Scraping and Zone configuration checks [(#9425)](https://github.com/prowler-cloud/prowler/pull/9425)
|
||||
|
||||
### Changed
|
||||
- Update AWS Step Functions service metadata to new format [(#9432)](https://github.com/prowler-cloud/prowler/pull/9432)
|
||||
|
||||
@@ -510,6 +510,9 @@ gcp:
|
||||
# gcp.compute_instance_group_multiple_zones
|
||||
# Minimum number of zones a MIG should span for high availability
|
||||
mig_min_zones: 2
|
||||
# gcp.compute_snapshot_not_outdated
|
||||
# Maximum age in days for disk snapshots before they are considered outdated
|
||||
max_snapshot_age_days: 90
|
||||
# GCP Service Account and user-managed keys unused configuration
|
||||
# gcp.iam_service_account_unused
|
||||
# gcp.iam_sa_user_managed_key_unused
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "high",
|
||||
"ResourceType": "DNSRecord",
|
||||
"ResourceGroup": "network",
|
||||
"Description": "**Cloudflare DNS CNAME records** are assessed for **dangling CNAME** vulnerabilities by checking if the target domain resolves to a valid address, preventing **subdomain takeover** attacks.",
|
||||
"Risk": "Dangling **CNAME records** pointing to non-existent targets create subdomain takeover vulnerabilities.\n- **Confidentiality**: attackers can host malicious content on your subdomain to phish users\n- **Integrity**: attackers can impersonate your organization and damage brand reputation\n- **Availability**: legitimate services may be disrupted or redirected",
|
||||
"RelatedUrl": "",
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "high",
|
||||
"ResourceType": "DNSRecord",
|
||||
"ResourceGroup": "network",
|
||||
"Description": "**Cloudflare DNS records** are assessed for **internal IP exposure** by checking if A or AAAA records point to private, loopback, or reserved IP addresses which could **leak internal network structure**.",
|
||||
"Risk": "DNS records exposing **internal IP addresses** leak sensitive network information.\n- **Confidentiality**: reveals internal network topology and addressing schemes to attackers\n- **Integrity**: provides reconnaissance data for targeted attacks on internal infrastructure\n- **Availability**: internal IPs in public DNS may indicate misconfiguration affecting service routing",
|
||||
"RelatedUrl": "",
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "DNSRecord",
|
||||
"ResourceGroup": "network",
|
||||
"Description": "**Cloudflare DNS records** are assessed for **wildcard usage** by checking if A, AAAA, or CNAME records use wildcard entries (*.example.com) which can **increase attack surface** and expose unintended services.",
|
||||
"Risk": "**Wildcard DNS records** can expose unintended services and increase attack surface.\n- **Confidentiality**: any subdomain resolves, potentially exposing internal naming conventions\n- **Integrity**: attackers can access unintended services via arbitrary subdomains\n- **Availability**: wildcard records may route traffic to services not designed for public access",
|
||||
"RelatedUrl": "",
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "DNSRecord",
|
||||
"ResourceGroup": "network",
|
||||
"Description": "**Cloudflare DNS records** are assessed for **proxy configuration** by checking if A, AAAA, and CNAME records are proxied through Cloudflare to benefit from **DDoS protection**, **WAF**, and **caching** capabilities.",
|
||||
"Risk": "Unproxied **DNS records** expose origin server IP addresses directly to the internet.\n- **Confidentiality**: origin IP exposure enables targeted reconnaissance and attacks\n- **Integrity**: direct access to origin bypasses WAF and security controls\n- **Availability**: origin is exposed to DDoS attacks without Cloudflare protection",
|
||||
"RelatedUrl": "",
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "high",
|
||||
"ResourceType": "Zone",
|
||||
"ResourceGroup": "network",
|
||||
"Description": "**Cloudflare zones** are assessed for **Always Online** configuration by checking if it is disabled to prevent serving **stale cached content** when the origin server is unavailable, which could expose outdated or sensitive information.",
|
||||
"Risk": "With **Always Online** enabled, Cloudflare serves cached pages when the origin is unavailable.\n- **Confidentiality**: stale cache may expose sensitive information that was subsequently removed\n- **Integrity**: outdated content may contain incorrect or superseded information\n- **Availability**: reliance on cached content masks origin failures requiring attention",
|
||||
"RelatedUrl": "",
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "Zone",
|
||||
"ResourceGroup": "network",
|
||||
"Description": "**Cloudflare zones** are assessed for **Automatic HTTPS Rewrites** configuration by checking if it is enabled to automatically rewrite insecure HTTP links to HTTPS, resolving **mixed content issues** and enhancing site security.",
|
||||
"Risk": "Without **Automatic HTTPS Rewrites**, pages may contain mixed content where HTTP resources load over HTTPS pages.\n- **Confidentiality**: insecure resources can be intercepted and modified by attackers\n- **Integrity**: browsers block or warn about mixed content, indicating potential tampering\n- **User Experience**: security warnings degrade trust and some browsers block mixed content entirely",
|
||||
"RelatedUrl": "",
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "Zone",
|
||||
"ResourceGroup": "network",
|
||||
"Description": "**Cloudflare zones** are assessed for **Bot Fight Mode** configuration by checking if it is enabled to detect and mitigate **automated bot traffic** targeting the zone through browser integrity checks.",
|
||||
"Risk": "Without **Bot Fight Mode**, zones are vulnerable to automated attacks.\n- **Confidentiality**: web scraping bots can harvest sensitive data from your site\n- **Integrity**: credential stuffing attacks can compromise user accounts\n- **Availability**: bot traffic can overwhelm resources causing service degradation",
|
||||
"RelatedUrl": "",
|
||||
|
||||
@@ -5,17 +5,17 @@ from prowler.providers.cloudflare.services.zone.zone_client import zone_client
|
||||
class zone_bot_fight_mode_enabled(Check):
|
||||
"""Ensure that Bot Fight Mode is enabled for Cloudflare zones.
|
||||
|
||||
Bot Fight Mode (Browser Integrity Check) detects and mitigates automated bot
|
||||
traffic by analyzing browser characteristics and behavior patterns. It challenges
|
||||
requests that appear to come from bots or clients with missing/invalid browser
|
||||
headers, protecting against scraping, spam, and automated attacks.
|
||||
Bot Fight Mode is a free Cloudflare feature that detects and mitigates automated
|
||||
bot traffic. It uses JavaScript challenges and behavioral analysis to identify
|
||||
bots and block malicious automated traffic, protecting against scraping, spam,
|
||||
credential stuffing, and other automated attacks.
|
||||
"""
|
||||
|
||||
def execute(self) -> list[CheckReportCloudflare]:
|
||||
"""Execute the Bot Fight Mode enabled check.
|
||||
|
||||
Iterates through all Cloudflare zones and verifies that Bot Fight Mode
|
||||
(Browser Integrity Check) is enabled. This feature helps identify and
|
||||
is enabled via the Bot Management API. This feature helps identify and
|
||||
block malicious bot traffic.
|
||||
|
||||
Returns:
|
||||
@@ -28,12 +28,15 @@ class zone_bot_fight_mode_enabled(Check):
|
||||
metadata=self.metadata(),
|
||||
resource=zone,
|
||||
)
|
||||
browser_check = (zone.settings.browser_check or "").lower()
|
||||
if browser_check == "on":
|
||||
if zone.settings.bot_fight_mode_enabled:
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Bot Fight Mode (Browser Integrity Check) is enabled for zone {zone.name}."
|
||||
report.status_extended = (
|
||||
f"Bot Fight Mode is enabled for zone {zone.name}."
|
||||
)
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Bot Fight Mode (Browser Integrity Check) is not enabled for zone {zone.name}."
|
||||
report.status_extended = (
|
||||
f"Bot Fight Mode is not enabled for zone {zone.name}."
|
||||
)
|
||||
findings.append(report)
|
||||
return findings
|
||||
|
||||
@@ -0,0 +1,36 @@
|
||||
{
|
||||
"Provider": "cloudflare",
|
||||
"CheckID": "zone_browser_integrity_check_enabled",
|
||||
"CheckTitle": "Cloudflare Zone Browser Integrity Check Is Enabled",
|
||||
"CheckType": [],
|
||||
"ServiceName": "zone",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "low",
|
||||
"ResourceType": "Zone",
|
||||
"ResourceGroup": "network",
|
||||
"Description": "**Cloudflare zones** are assessed for **Browser Integrity Check** configuration by verifying that HTTP headers are analyzed to identify requests from bots or clients with missing/invalid browser signatures.",
|
||||
"Risk": "Without **Browser Integrity Check**, malformed or suspicious requests reach the origin.\n- **Confidentiality**: basic bots can access and scrape content without challenge\n- **Integrity**: requests with invalid headers may exploit application vulnerabilities\n- **Availability**: automated traffic without browser signatures consumes resources",
|
||||
"RelatedUrl": "",
|
||||
"AdditionalURLs": [
|
||||
"https://developers.cloudflare.com/waf/tools/browser-integrity-check/"
|
||||
],
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "1. Log in to the Cloudflare dashboard and select your account and domain\n2. Go to Security > Settings\n3. Enable Browser Integrity Check\n4. This feature is enabled by default on most Cloudflare plans",
|
||||
"Terraform": "```hcl\n# Enable Browser Integrity Check\nresource \"cloudflare_zone_settings_override\" \"browser_check\" {\n zone_id = \"<ZONE_ID>\"\n settings {\n browser_check = \"on\"\n }\n}\n```"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Enable **Browser Integrity Check** to filter basic bot traffic.\n- Validates HTTP headers to identify non-browser requests\n- Challenges requests with missing or invalid browser signatures\n- Enabled by default on most Cloudflare plans\n- Low impact on legitimate users with standard browsers",
|
||||
"Url": "https://hub.prowler.com/checks/cloudflare/zone_browser_integrity_check_enabled"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"internet-exposed"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "Browser Integrity Check is enabled by default on most Cloudflare plans. It provides basic protection against requests with invalid or missing browser headers."
|
||||
}
|
||||
@@ -0,0 +1,43 @@
|
||||
from prowler.lib.check.models import Check, CheckReportCloudflare
|
||||
from prowler.providers.cloudflare.services.zone.zone_client import zone_client
|
||||
|
||||
|
||||
class zone_browser_integrity_check_enabled(Check):
|
||||
"""Ensure that Browser Integrity Check is enabled for Cloudflare zones.
|
||||
|
||||
Browser Integrity Check analyzes HTTP headers to identify requests from
|
||||
bots or clients with missing/invalid browser signatures. It challenges
|
||||
suspicious requests that don't have valid browser characteristics,
|
||||
protecting against basic automated attacks and malformed requests.
|
||||
"""
|
||||
|
||||
def execute(self) -> list[CheckReportCloudflare]:
|
||||
"""Execute the Browser Integrity Check enabled check.
|
||||
|
||||
Iterates through all Cloudflare zones and verifies that Browser
|
||||
Integrity Check is enabled. This feature validates browser headers
|
||||
to filter out basic bot traffic.
|
||||
|
||||
Returns:
|
||||
A list of CheckReportCloudflare objects with PASS status if Browser
|
||||
Integrity Check is enabled, or FAIL status if it is disabled.
|
||||
"""
|
||||
findings = []
|
||||
for zone in zone_client.zones.values():
|
||||
report = CheckReportCloudflare(
|
||||
metadata=self.metadata(),
|
||||
resource=zone,
|
||||
)
|
||||
browser_check = (zone.settings.browser_check or "").lower()
|
||||
if browser_check == "on":
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Browser Integrity Check is enabled for zone {zone.name}."
|
||||
)
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Browser Integrity Check is not enabled for zone {zone.name}."
|
||||
)
|
||||
findings.append(report)
|
||||
return findings
|
||||
@@ -1,14 +1,15 @@
|
||||
{
|
||||
"Provider": "cloudflare",
|
||||
"CheckID": "zone_challenge_passage_configured",
|
||||
"CheckTitle": "Challenge Passage is configured with an appropriate security window",
|
||||
"CheckTitle": "Cloudflare Zone Challenge Passage Is Configured Between 15 and 45 Minutes",
|
||||
"CheckType": [],
|
||||
"ServiceName": "zone",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "low",
|
||||
"ResourceType": "Zone",
|
||||
"Description": "**Cloudflare zones** are assessed for **Challenge Passage** (challenge TTL) configuration by checking if it is set to an appropriate value (recommended: 1 hour) to balance **security** with **user experience** for verified visitors.",
|
||||
"ResourceGroup": "network",
|
||||
"Description": "**Cloudflare zones** are assessed for **Challenge Passage** (challenge TTL) configuration by checking if it is set between **15 minutes** and **45 minutes** to balance security with user experience.",
|
||||
"Risk": "Improperly configured **Challenge Passage** can impact security or user experience.\n- **Confidentiality**: TTL set too long may allow attackers extended access after passing initial challenge\n- **Integrity**: security controls become less effective with overly permissive TTL settings\n- **Availability**: TTL set too short causes excessive challenges degrading user experience",
|
||||
"RelatedUrl": "",
|
||||
"AdditionalURLs": [
|
||||
@@ -18,11 +19,11 @@
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "1. Log in to the Cloudflare dashboard and select your account and domain\n2. Go to Security > Settings\n3. Scroll to Challenge Passage\n4. Set the value to an appropriate duration (recommended: 1 hour / 3600 seconds)\n5. Adjust based on your specific threat model and user experience requirements",
|
||||
"Terraform": "```hcl\n# Configure Challenge Passage with recommended 1-hour security window\nresource \"cloudflare_zone_settings_override\" \"challenge_passage\" {\n zone_id = \"<ZONE_ID>\"\n settings {\n challenge_ttl = 3600 # 1 hour: balances security with user experience\n }\n}\n```"
|
||||
"Other": "1. Log in to the Cloudflare dashboard and select your account and domain\n2. Go to Security > Settings\n3. Scroll to Challenge Passage\n4. Set the value between 15 and 45 minutes\n5. The default value of 30 minutes is recommended for most use cases",
|
||||
"Terraform": "```hcl\n# Configure Challenge Passage between 15-45 minutes\nresource \"cloudflare_zone_settings_override\" \"challenge_passage\" {\n zone_id = \"<ZONE_ID>\"\n settings {\n challenge_ttl = 1800 # 30 minutes - recommended default\n }\n}\n```"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Configure **Challenge Passage** to balance security and usability.\n- A 1-hour window is generally recommended for most use cases\n- Adjust based on your threat model and user experience requirements\n- Shorter TTLs increase security but may frustrate legitimate users\n- Longer TTLs improve UX but give attackers more time after passing challenges",
|
||||
"Text": "Configure **Challenge Passage** between 15 and 45 minutes.\n- Values below 15 minutes may frustrate legitimate users with excessive challenges\n- Values above 45 minutes give attackers too much time after passing challenges\n- The default Cloudflare value of 30 minutes is recommended for most use cases\n- Adjust based on your specific threat model and user experience requirements",
|
||||
"Url": "https://hub.prowler.com/checks/cloudflare/zone_challenge_passage_configured"
|
||||
}
|
||||
},
|
||||
@@ -31,5 +32,5 @@
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "Challenge Passage determines how long a visitor who passes a challenge can access the site without being challenged again. The default value is 30 minutes."
|
||||
"Notes": "Challenge Passage determines how long a visitor who passes a challenge can access the site without being challenged again. Setting this value too low can frustrate legitimate users with excessive security challenges, while setting it too high reduces security effectiveness. The default value is 30 minutes."
|
||||
}
|
||||
|
||||
@@ -3,43 +3,43 @@ from prowler.providers.cloudflare.services.zone.zone_client import zone_client
|
||||
|
||||
|
||||
class zone_challenge_passage_configured(Check):
|
||||
"""Ensure that Challenge Passage is configured appropriately for Cloudflare zones.
|
||||
"""Ensure that Challenge Passage is configured between 15 and 45 minutes for Cloudflare zones.
|
||||
|
||||
Challenge Passage (Challenge TTL) determines how long a visitor who has passed
|
||||
a security challenge can access the site before being challenged again. A value
|
||||
of 1 hour (3600 seconds) balances security with user experience, requiring
|
||||
re-verification periodically without excessive friction for legitimate users.
|
||||
between 15 and 45 minutes balances security with user experience.
|
||||
"""
|
||||
|
||||
def execute(self) -> list[CheckReportCloudflare]:
|
||||
"""Execute the Challenge Passage configured check.
|
||||
|
||||
Iterates through all Cloudflare zones and verifies that Challenge Passage
|
||||
is set to the recommended value of 1 hour (3600 seconds). This balances
|
||||
security requirements with user experience.
|
||||
is set between 15 and 45 minutes.
|
||||
|
||||
Returns:
|
||||
A list of CheckReportCloudflare objects with PASS status if Challenge
|
||||
Passage is set to 3600 seconds, or FAIL status if it differs.
|
||||
Passage is between 15 and 45 minutes, or FAIL status otherwise.
|
||||
"""
|
||||
findings = []
|
||||
# Recommended challenge TTL is 1 hour (3600 seconds)
|
||||
recommended_ttl = 3600
|
||||
min_minutes = 15
|
||||
max_minutes = 45
|
||||
|
||||
for zone in zone_client.zones.values():
|
||||
report = CheckReportCloudflare(
|
||||
metadata=self.metadata(),
|
||||
resource=zone,
|
||||
)
|
||||
challenge_ttl = zone.settings.challenge_ttl or 0
|
||||
if challenge_ttl == recommended_ttl:
|
||||
# API returns seconds, convert to minutes
|
||||
challenge_ttl_minutes = zone.settings.challenge_ttl // 60
|
||||
|
||||
if min_minutes <= challenge_ttl_minutes <= max_minutes:
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Challenge Passage is set to {challenge_ttl} seconds for zone {zone.name}."
|
||||
report.status_extended = f"Challenge Passage is set to {challenge_ttl_minutes} minutes for zone {zone.name}."
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Challenge Passage is set to {challenge_ttl} seconds for zone {zone.name} "
|
||||
f"(recommended: {recommended_ttl})."
|
||||
f"Challenge Passage is set to {challenge_ttl_minutes} minutes for zone {zone.name} "
|
||||
f"(recommended: between {min_minutes} and {max_minutes} minutes)."
|
||||
)
|
||||
findings.append(report)
|
||||
return findings
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "low",
|
||||
"ResourceType": "Zone",
|
||||
"ResourceGroup": "network",
|
||||
"Description": "**Cloudflare zones** are assessed for **Development Mode** configuration by checking if it is disabled to ensure **caching**, **security features**, and **performance optimizations** are active in production environments.",
|
||||
"Risk": "With **Development Mode** enabled, Cloudflare bypasses caching and some optimizations.\n- **Confidentiality**: some security features may be affected or bypassed\n- **Integrity**: performance optimizations are disabled impacting site reliability\n- **Availability**: origin server is exposed to increased load without caching protection",
|
||||
"RelatedUrl": "",
|
||||
|
||||
@@ -37,8 +37,7 @@ class zone_development_mode_disabled(Check):
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Development mode is enabled for zone {zone.name}. "
|
||||
"This bypasses Cloudflare caching and should be disabled in production."
|
||||
f"Development mode is enabled for zone {zone.name}."
|
||||
)
|
||||
findings.append(report)
|
||||
return findings
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "high",
|
||||
"ResourceType": "Zone",
|
||||
"ResourceGroup": "network",
|
||||
"Description": "**Cloudflare zones** are assessed for **DNSSEC** configuration by checking if it is enabled to **cryptographically sign DNS responses** and protect against DNS spoofing and cache poisoning attacks.",
|
||||
"Risk": "Without **DNSSEC**, DNS responses can be spoofed or modified by attackers.\n- **Confidentiality**: users can be redirected to malicious sites that harvest credentials\n- **Integrity**: DNS hijacking enables man-in-the-middle attacks and content modification\n- **Availability**: cache poisoning can cause denial of service by directing traffic to non-existent servers",
|
||||
"RelatedUrl": "",
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "low",
|
||||
"ResourceType": "Zone",
|
||||
"ResourceGroup": "network",
|
||||
"Description": "**Cloudflare zones** are assessed for **Email Obfuscation** (Scrape Shield) configuration by checking if it is enabled to protect email addresses on the website from **automated harvesting** by bots and spammers.",
|
||||
"Risk": "Without **Email Obfuscation**, email addresses displayed on your website can be harvested by bots.\n- **Confidentiality**: harvested emails become targets for spam and phishing campaigns\n- **Integrity**: employees may fall victim to targeted social engineering attacks\n- **Availability**: increased spam volume can overwhelm email systems",
|
||||
"RelatedUrl": "",
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "Zone",
|
||||
"ResourceGroup": "network",
|
||||
"Description": "**Cloudflare zones** are assessed for **firewall blocking rules** by checking if custom rules use block, challenge, js_challenge, or managed_challenge actions to actively protect against threats rather than only logging.",
|
||||
"Risk": "Firewall rules configured only for **logging** provide visibility but no protection.\n- **Confidentiality**: malicious traffic can access and exfiltrate sensitive data\n- **Integrity**: application exploits can modify data without being blocked\n- **Availability**: credential stuffing and abuse attacks reach the origin unimpeded",
|
||||
"RelatedUrl": "",
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "low",
|
||||
"ResourceType": "Zone",
|
||||
"ResourceGroup": "network",
|
||||
"Description": "**Cloudflare zones** are assessed for **Hotlink Protection** (Scrape Shield) configuration by checking if it is enabled to prevent other websites from directly linking to **images and media**, consuming bandwidth without authorization.",
|
||||
"Risk": "Without **Hotlink Protection**, external websites can embed your media directly.\n- **Confidentiality**: content may be used without proper attribution or permission\n- **Integrity**: unauthorized use of media may misrepresent your brand\n- **Availability**: bandwidth theft increases costs and may degrade performance",
|
||||
"RelatedUrl": "",
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "high",
|
||||
"ResourceType": "Zone",
|
||||
"ResourceGroup": "network",
|
||||
"Description": "**Cloudflare zones** are assessed for **HTTP Strict Transport Security (HSTS)** by checking if it is enabled with a `max-age` of at least **6 months** (15768000 seconds) and **includes subdomains** to instruct browsers to always use HTTPS across the entire domain.",
|
||||
"Risk": "Without **HSTS**, browsers may initially connect over HTTP before redirecting to HTTPS.\n- **Confidentiality**: creates a window for SSL stripping attacks where attackers downgrade connections to unencrypted HTTP\n- **Integrity**: first request can be intercepted and modified before HTTPS redirect\n- **Session hijacking**: cookies and credentials may be captured during initial HTTP request",
|
||||
"RelatedUrl": "",
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "Zone",
|
||||
"ResourceGroup": "network",
|
||||
"Description": "**Cloudflare zones** are assessed for **Always Use HTTPS** setting by checking if it is enabled to automatically redirect all **HTTP requests to HTTPS**, enforcing encrypted transport for all visitors.",
|
||||
"Risk": "Without **automatic HTTPS redirects**, users may access resources over unencrypted HTTP.\n- **Confidentiality**: traffic can be intercepted and read by attackers on the network path\n- **Integrity**: HTTP responses can be modified in transit (content injection, malware insertion)\n- **Authentication**: session cookies and credentials may be transmitted in plaintext",
|
||||
"RelatedUrl": "",
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "low",
|
||||
"ResourceType": "Zone",
|
||||
"ResourceGroup": "network",
|
||||
"Description": "**Cloudflare zones** are assessed for **IP Geolocation** configuration by checking if it is enabled to add the **CF-IPCountry header** to requests, enabling geographic-based access controls, firewall rules, and analytics.",
|
||||
"Risk": "Without **IP Geolocation**, geographic-based security controls cannot be implemented.\n- **Confidentiality**: unable to restrict access from high-risk regions\n- **Integrity**: cannot enforce geographic data residency requirements\n- **Availability**: limited visibility into traffic origins for threat analysis",
|
||||
"RelatedUrl": "",
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "high",
|
||||
"ResourceType": "Zone",
|
||||
"ResourceGroup": "network",
|
||||
"Description": "**Cloudflare zones** are assessed for **minimum TLS version** configuration by checking if the version is set to at least `TLS 1.2` to ensure connections use **secure, modern cryptographic protocols**.",
|
||||
"Risk": "Allowing **legacy TLS versions** (1.0, 1.1) exposes connections to known protocol vulnerabilities.\n- **Confidentiality**: BEAST, POODLE, and weak cipher suites can be exploited for traffic decryption\n- **Compliance**: TLS 1.0/1.1 are deprecated by PCI-DSS, NIST, and major browsers\n- **Integrity**: downgrade attacks can force weaker encryption that is susceptible to tampering",
|
||||
"RelatedUrl": "",
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "Zone",
|
||||
"ResourceGroup": "network",
|
||||
"Description": "**Cloudflare zones** are assessed for **Rate Limiting** configuration by checking if rules are configured to protect against **DDoS attacks**, **brute force attempts**, and **API abuse**.",
|
||||
"Risk": "Without **Rate Limiting**, applications are vulnerable to volumetric attacks.\n- **Confidentiality**: credential brute forcing can compromise user accounts\n- **Integrity**: API abuse can manipulate data through excessive requests\n- **Availability**: volumetric attacks can exhaust resources causing service degradation",
|
||||
"RelatedUrl": "",
|
||||
|
||||
@@ -39,9 +39,9 @@ class zone_rate_limiting_enabled(Check):
|
||||
|
||||
if rate_limit_rules:
|
||||
report.status = "PASS"
|
||||
rules_str = ", ".join(rule.description for rule in rate_limit_rules)
|
||||
report.status_extended = (
|
||||
f"Rate limiting is configured for zone {zone.name} "
|
||||
f"({len(rate_limit_rules)} rule(s))."
|
||||
f"Rate limiting is configured for zone {zone.name}: {rules_str}."
|
||||
)
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "low",
|
||||
"ResourceType": "Zone",
|
||||
"ResourceGroup": "network",
|
||||
"Description": "**Cloudflare zones** are assessed for **CAA (Certificate Authority Authorization)** DNS records by checking if they exist with **`issue` or `issuewild` tags** that specify which **certificate authorities** are permitted to issue SSL/TLS certificates for the domain.",
|
||||
"Risk": "Without **CAA** records or without `issue`/`issuewild` tags, any certificate authority can issue certificates for your domain.\n- **Confidentiality**: unauthorized certificates enable man-in-the-middle attacks\n- **Integrity**: attackers can impersonate your domain with fraudulently obtained certificates\n- **Trust**: CA compromise or social engineering can result in unauthorized certificate issuance\n- **Missing tags**: CAA records without `issue` or `issuewild` tags (e.g., only `iodef`) do not restrict certificate issuance",
|
||||
"RelatedUrl": "",
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "Zone",
|
||||
"ResourceGroup": "network",
|
||||
"Description": "**Cloudflare zones** are assessed for **DKIM (DomainKeys Identified Mail)** records by checking if TXT records exist at `*._domainkey` subdomains containing a **cryptographically valid public key** in the `p=` parameter used to **verify email signatures**.",
|
||||
"Risk": "Without **DKIM** or with a revoked/empty public key, email recipients cannot verify that messages were sent by authorized servers.\n- **Confidentiality**: attackers can forge emails appearing to come from your domain\n- **Integrity**: no cryptographic proof that email content hasn't been modified in transit\n- **Reputation**: DMARC policies relying on DKIM will fail, affecting email deliverability\n- **Revoked keys**: A DKIM record with `p=` empty (e.g., `p=;`) indicates a revoked key that cannot authenticate emails",
|
||||
"RelatedUrl": "",
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "Zone",
|
||||
"ResourceGroup": "network",
|
||||
"Description": "**Cloudflare zones** are assessed for **DMARC (Domain-based Message Authentication, Reporting, and Conformance)** records by checking if a TXT record exists at `_dmarc` subdomain with an **enforcement policy (`p=reject` or `p=quarantine`)** to actively block or quarantine spoofed emails.",
|
||||
"Risk": "Without **DMARC** or with a monitoring-only policy (`p=none`), there is no active protection against email spoofing.\n- **Confidentiality**: attackers can spoof emails for phishing campaigns to steal credentials\n- **Integrity**: no visibility into email authentication failures or abuse attempts\n- **Reputation**: domain reputation damage from spoofed emails sent in your name\n- **Monitoring-only policy**: `p=none` only generates reports but does not block or quarantine spoofed emails, providing no real protection",
|
||||
"RelatedUrl": "",
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "Zone",
|
||||
"ResourceGroup": "network",
|
||||
"Description": "**Cloudflare zones** are assessed for **SPF (Sender Policy Framework)** records by checking if a TXT record exists that specifies which mail servers are **authorized to send email** on behalf of the domain, and verifies that the record uses a **strict policy (`-all`)** to reject unauthorized senders.",
|
||||
"Risk": "Without **SPF** or with a permissive policy (`~all`, `?all`, `+all`), attackers can forge emails appearing to come from your domain.\n- **Confidentiality**: phishing attacks can harvest sensitive information from recipients who trust spoofed emails\n- **Integrity**: brand reputation damage from fraudulent emails sent in your name\n- **Availability**: email deliverability issues as receiving servers may reject or quarantine legitimate emails\n- **Permissive policies**: `~all` (softfail) only marks emails as suspicious but does not reject them, `?all` (neutral) provides no protection",
|
||||
"RelatedUrl": "",
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "low",
|
||||
"ResourceType": "Zone",
|
||||
"ResourceGroup": "network",
|
||||
"Description": "**Cloudflare zones** are assessed for **Under Attack Mode** configuration by checking if it is disabled during normal operations, as this mode performs additional security checks including an **interstitial JavaScript challenge page** that significantly impacts user experience.",
|
||||
"Risk": "Keeping **Under Attack Mode** permanently enabled causes operational issues.\n- **Availability**: all visitors face a 5-second interstitial challenge page before accessing the site\n- **Accessibility**: visitors without JavaScript support cannot access the site at all\n- **User Experience**: legitimate users experience unnecessary delays and third-party analytics show degraded performance",
|
||||
"RelatedUrl": "",
|
||||
|
||||
@@ -1,35 +0,0 @@
|
||||
{
|
||||
"Provider": "cloudflare",
|
||||
"CheckID": "zone_server_side_excludes_enabled",
|
||||
"CheckTitle": "Server Side Excludes is enabled",
|
||||
"CheckType": [],
|
||||
"ServiceName": "zone",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "low",
|
||||
"ResourceType": "Zone",
|
||||
"Description": "**Cloudflare zones** are assessed for **Server Side Excludes** (Scrape Shield) configuration by checking if it is enabled to automatically hide content wrapped in SSE tags from visitors identified as **suspicious bots or crawlers**.",
|
||||
"Risk": "Without **Server Side Excludes**, sensitive content remains visible to malicious bots.\n- **Confidentiality**: sensitive information marked for exclusion can be scraped by bots\n- **Integrity**: content may be indexed or misused by unauthorized crawlers\n- **Availability**: scraped data may be used to plan further attacks",
|
||||
"RelatedUrl": "",
|
||||
"AdditionalURLs": [
|
||||
"https://developers.cloudflare.com/waf/tools/scrape-shield/server-side-excludes/"
|
||||
],
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "1. Log in to the Cloudflare dashboard and select your account and domain\n2. Go to Scrape Shield (or Security > Settings in newer UI)\n3. Scroll to Server Side Excludes\n4. Toggle the setting to On\n5. Wrap sensitive content in SSE HTML comments in your page source",
|
||||
"Terraform": "```hcl\n# Enable Server Side Excludes to hide sensitive content from bots\nresource \"cloudflare_zone_settings_override\" \"server_side_excludes\" {\n zone_id = \"<ZONE_ID>\"\n settings {\n server_side_exclude = \"on\" # Hides SSE-wrapped content from suspicious visitors\n }\n}\n```"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Enable **Server Side Excludes** and wrap sensitive content in SSE HTML comments.\n- Provides selective content protection while maintaining functionality for legitimate visitors\n- Use HTML comments to mark content for exclusion from suspicious visitors\n- Part of the Scrape Shield feature set for comprehensive protection\n- Combine with other anti-scraping measures for defense in depth",
|
||||
"Url": "https://hub.prowler.com/checks/cloudflare/zone_server_side_excludes_enabled"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"internet-exposed"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "Server Side Excludes requires wrapping sensitive content with special HTML comments to mark content for exclusion. The syntax is: <!--sse-->sensitive content<!--/sse-->"
|
||||
}
|
||||
@@ -1,43 +0,0 @@
|
||||
from prowler.lib.check.models import Check, CheckReportCloudflare
|
||||
from prowler.providers.cloudflare.services.zone.zone_client import zone_client
|
||||
|
||||
|
||||
class zone_server_side_excludes_enabled(Check):
|
||||
"""Ensure that Server Side Excludes is enabled for Cloudflare zones.
|
||||
|
||||
Server Side Excludes (SSE) is part of Cloudflare's Scrape Shield suite that
|
||||
automatically hides content wrapped in SSE tags from visitors identified as
|
||||
suspicious bots or crawlers. This protects sensitive information like email
|
||||
addresses and phone numbers from being scraped.
|
||||
"""
|
||||
|
||||
def execute(self) -> list[CheckReportCloudflare]:
|
||||
"""Execute the Server Side Excludes enabled check.
|
||||
|
||||
Iterates through all Cloudflare zones and verifies that Server Side
|
||||
Excludes is enabled. This feature helps protect sensitive content from
|
||||
being scraped by malicious bots.
|
||||
|
||||
Returns:
|
||||
A list of CheckReportCloudflare objects with PASS status if Server
|
||||
Side Excludes is enabled, or FAIL status if it is disabled for the zone.
|
||||
"""
|
||||
findings = []
|
||||
for zone in zone_client.zones.values():
|
||||
report = CheckReportCloudflare(
|
||||
metadata=self.metadata(),
|
||||
resource=zone,
|
||||
)
|
||||
server_side_exclude = (zone.settings.server_side_exclude or "").lower()
|
||||
if server_side_exclude == "on":
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Server Side Excludes is enabled for zone {zone.name}."
|
||||
)
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Server Side Excludes is not enabled for zone {zone.name}."
|
||||
)
|
||||
findings.append(report)
|
||||
return findings
|
||||
@@ -45,6 +45,7 @@ class Zone(CloudflareService):
|
||||
self._get_zones_settings()
|
||||
self._get_zones_dnssec()
|
||||
self._get_zones_universal_ssl()
|
||||
self._get_zones_bot_management()
|
||||
self._get_zones_firewall_rules()
|
||||
self._get_zones_waf_rulesets()
|
||||
|
||||
@@ -152,6 +153,20 @@ class Zone(CloudflareService):
|
||||
f"{zone.id} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
def _get_zones_bot_management(self) -> None:
|
||||
"""Get Bot Management settings for all zones."""
|
||||
logger.info("Zone - Getting Bot Management settings...")
|
||||
for zone in self.zones.values():
|
||||
try:
|
||||
bot_management = self.client.bot_management.get(zone_id=zone.id)
|
||||
zone.settings.bot_fight_mode_enabled = getattr(
|
||||
bot_management, "fight_mode", False
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{zone.id} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
def _get_zones_firewall_rules(self) -> None:
|
||||
"""Get firewall rules for all zones."""
|
||||
logger.info("Zones - Getting firewall rules...")
|
||||
@@ -301,7 +316,7 @@ class Zone(CloudflareService):
|
||||
waf=settings.get("waf"),
|
||||
security_level=settings.get("security_level"),
|
||||
browser_check=settings.get("browser_check"),
|
||||
challenge_ttl=settings.get("challenge_ttl"),
|
||||
challenge_ttl=settings.get("challenge_ttl" or 0),
|
||||
ip_geolocation=settings.get("ip_geolocation"),
|
||||
email_obfuscation=settings.get("email_obfuscation"),
|
||||
server_side_exclude=settings.get("server_side_exclude"),
|
||||
@@ -374,6 +389,8 @@ class CloudflareZoneSettings(BaseModel):
|
||||
# Zone state
|
||||
development_mode: Optional[str] = None
|
||||
always_online: Optional[str] = None
|
||||
# Bot management
|
||||
bot_fight_mode_enabled: bool = False
|
||||
|
||||
|
||||
class CloudflareZone(BaseModel):
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "high",
|
||||
"ResourceType": "Zone",
|
||||
"ResourceGroup": "network",
|
||||
"Description": "**Cloudflare zones** are assessed for **SSL/TLS encryption mode** by checking if the mode is set to `Full (Strict)` to ensure **end-to-end encryption** with certificate validation.",
|
||||
"Risk": "Without **strict SSL mode**, traffic between Cloudflare and origin may use unvalidated or unencrypted connections.\n- **Confidentiality**: sensitive data can be intercepted in transit via man-in-the-middle attacks\n- **Integrity**: responses can be modified without detection between Cloudflare and origin\n- **Compliance**: may violate PCI-DSS, HIPAA, and other regulatory requirements for encrypted transport",
|
||||
"RelatedUrl": "",
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "Zone",
|
||||
"ResourceGroup": "network",
|
||||
"Description": "**Cloudflare zones** are assessed for **TLS 1.3** configuration by checking if it is enabled to benefit from **improved security** through simplified cipher suites and **faster handshakes** with zero round-trip time resumption.",
|
||||
"Risk": "Without **TLS 1.3**, connections use older TLS versions with less secure characteristics.\n- **Confidentiality**: legacy TLS versions have more complex cipher negotiations that may expose weaknesses\n- **Integrity**: older protocols lack protection against downgrade attacks that TLS 1.3 provides\n- **Performance**: slower handshakes impact user experience and increase latency",
|
||||
"RelatedUrl": "",
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "high",
|
||||
"ResourceType": "Zone",
|
||||
"ResourceGroup": "network",
|
||||
"Description": "**Cloudflare zones** are assessed for **Universal SSL** configuration by checking if it is enabled to provide **free SSL/TLS certificates** for the domain and its subdomains, enabling secure HTTPS connections.",
|
||||
"Risk": "Without **Universal SSL**, visitors cannot establish HTTPS connections to your site.\n- **Confidentiality**: all traffic is unencrypted and vulnerable to interception and eavesdropping\n- **Integrity**: HTTP responses can be modified in transit by attackers (content injection, malware)\n- **Trust**: browsers display security warnings degrading user trust and experience",
|
||||
"RelatedUrl": "",
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "high",
|
||||
"ResourceType": "Zone",
|
||||
"ResourceGroup": "network",
|
||||
"Description": "**Cloudflare zones** are assessed for **Web Application Firewall (WAF)** configuration by checking if it is enabled to protect against common web vulnerabilities including **SQL injection**, **XSS**, and **OWASP Top 10** threats.",
|
||||
"Risk": "Without **WAF**, web applications are exposed to common attack vectors.\n- **Confidentiality**: SQL injection attacks can exfiltrate sensitive database contents\n- **Integrity**: XSS attacks can modify page content and steal session tokens\n- **Availability**: application-layer attacks can cause service disruption",
|
||||
"RelatedUrl": "",
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "high",
|
||||
"ResourceType": "Zone",
|
||||
"ResourceGroup": "network",
|
||||
"Description": "**Cloudflare zones** are assessed for **OWASP managed rulesets** by checking if they are enabled to protect against common web application vulnerabilities including **SQL injection**, **XSS**, and other **OWASP Top 10** threats.",
|
||||
"Risk": "Without **OWASP managed rulesets**, web applications are exposed to well-known attack vectors.\n- **Confidentiality**: SQL injection attacks can exfiltrate sensitive database contents\n- **Integrity**: XSS attacks can modify page content and steal session tokens\n- **Availability**: remote code execution can compromise server availability",
|
||||
"RelatedUrl": "",
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from pydantic.v1 import BaseModel
|
||||
@@ -22,6 +23,7 @@ class Compute(GCPService):
|
||||
self.load_balancers = []
|
||||
self.instance_groups = []
|
||||
self.images = []
|
||||
self.snapshots = []
|
||||
self._get_regions()
|
||||
self._get_projects()
|
||||
self._get_url_maps()
|
||||
@@ -36,6 +38,7 @@ class Compute(GCPService):
|
||||
self.__threading_call__(self._get_zonal_instance_groups, self.zones)
|
||||
self._associate_migs_with_load_balancers()
|
||||
self._get_images()
|
||||
self._get_snapshots()
|
||||
|
||||
def _get_regions(self):
|
||||
for project_id in self.project_ids:
|
||||
@@ -602,6 +605,57 @@ class Compute(GCPService):
|
||||
f"{project_id} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
def _get_snapshots(self) -> None:
|
||||
for project_id in self.project_ids:
|
||||
try:
|
||||
request = self.client.snapshots().list(project=project_id)
|
||||
while request is not None:
|
||||
response = request.execute(num_retries=DEFAULT_RETRY_ATTEMPTS)
|
||||
for snapshot in response.get("items", []):
|
||||
# Parse creation timestamp to datetime
|
||||
creation_timestamp_str = snapshot.get("creationTimestamp", "")
|
||||
creation_timestamp = None
|
||||
if creation_timestamp_str:
|
||||
try:
|
||||
# GCP timestamps are in RFC 3339 format
|
||||
creation_timestamp = datetime.fromisoformat(
|
||||
creation_timestamp_str.replace("Z", "+00:00")
|
||||
)
|
||||
except ValueError:
|
||||
logger.error(
|
||||
f"Could not parse timestamp {creation_timestamp_str} for snapshot {snapshot['name']}"
|
||||
)
|
||||
|
||||
# Extract source disk name from the full URL
|
||||
source_disk_url = snapshot.get("sourceDisk", "")
|
||||
source_disk = (
|
||||
source_disk_url.split("/")[-1] if source_disk_url else ""
|
||||
)
|
||||
|
||||
self.snapshots.append(
|
||||
Snapshot(
|
||||
name=snapshot["name"],
|
||||
id=snapshot["id"],
|
||||
project_id=project_id,
|
||||
creation_timestamp=creation_timestamp,
|
||||
source_disk=source_disk,
|
||||
source_disk_id=snapshot.get("sourceDiskId"),
|
||||
disk_size_gb=int(snapshot.get("diskSizeGb", 0)),
|
||||
storage_bytes=int(snapshot.get("storageBytes", 0)),
|
||||
storage_locations=snapshot.get("storageLocations", []),
|
||||
status=snapshot.get("status", ""),
|
||||
auto_created=snapshot.get("autoCreated", False),
|
||||
)
|
||||
)
|
||||
|
||||
request = self.client.snapshots().list_next(
|
||||
previous_request=request, previous_response=response
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{project_id} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
|
||||
class NetworkInterface(BaseModel):
|
||||
name: str
|
||||
@@ -708,3 +762,17 @@ class Image(BaseModel):
|
||||
id: str
|
||||
project_id: str
|
||||
publicly_shared: bool = False
|
||||
|
||||
|
||||
class Snapshot(BaseModel):
|
||||
name: str
|
||||
id: str
|
||||
project_id: str
|
||||
creation_timestamp: Optional[datetime] = None
|
||||
source_disk: str = ""
|
||||
source_disk_id: Optional[str] = None
|
||||
disk_size_gb: int = 0
|
||||
storage_bytes: int = 0
|
||||
storage_locations: list[str] = []
|
||||
status: str = ""
|
||||
auto_created: bool = False
|
||||
|
||||
@@ -0,0 +1,38 @@
|
||||
{
|
||||
"Provider": "gcp",
|
||||
"CheckID": "compute_snapshot_not_outdated",
|
||||
"CheckTitle": "Compute Engine disk snapshot is not outdated",
|
||||
"CheckType": [],
|
||||
"ServiceName": "compute",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "low",
|
||||
"ResourceType": "compute.googleapis.com/Snapshot",
|
||||
"ResourceGroup": "storage",
|
||||
"Description": "Compute Engine **disk snapshots** are evaluated against a configurable age threshold (default `90` days) to identify snapshots exceeding the organization's retention policy.",
|
||||
"Risk": "Outdated snapshots containing **sensitive data** expand the **attack surface** and risk data exposure if compromised.\n\nStale snapshots may violate compliance requirements, complicate disaster recovery efforts, and introduce configuration drift that affects system **integrity**.",
|
||||
"RelatedUrl": "",
|
||||
"AdditionalURLs": [
|
||||
"https://www.trendmicro.com/cloudoneconformity/knowledge-base/gcp/ComputeEngine/remove-old-disk-snapshots.html",
|
||||
"https://cloud.google.com/compute/docs/disks/create-snapshots",
|
||||
"https://cloud.google.com/compute/docs/disks/snapshot-best-practices"
|
||||
],
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "gcloud compute snapshots delete SNAPSHOT_NAME --project=PROJECT_ID",
|
||||
"NativeIaC": "",
|
||||
"Other": "1. Open Google Cloud Console and navigate to Compute Engine > Snapshots\n2. Identify snapshots older than your retention policy\n3. Select outdated snapshots and click **Delete**\n4. Confirm the deletion\n\nTo automate cleanup, create a snapshot schedule with auto-delete policies under Compute Engine > Snapshots > Snapshot schedules.",
|
||||
"Terraform": "```hcl\nresource \"google_compute_resource_policy\" \"snapshot_schedule\" {\n name = \"snapshot-schedule-with-retention\"\n region = var.region\n\n snapshot_schedule_policy {\n schedule {\n daily_schedule {\n days_in_cycle = 1\n start_time = \"04:00\"\n }\n }\n\n # Automatically delete snapshots older than 90 days\n retention_policy {\n max_retention_days = 90\n on_source_disk_delete = \"KEEP_AUTO_SNAPSHOTS\"\n }\n }\n}\n\nresource \"google_compute_disk_resource_policy_attachment\" \"attachment\" {\n name = google_compute_resource_policy.snapshot_schedule.name\n disk = google_compute_disk.example.name\n zone = var.zone\n}\n```"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Implement a snapshot lifecycle policy to automatically delete snapshots older than your organization's retention requirements. Regularly review and clean up outdated snapshots to reduce storage costs and minimize data exposure risks. Consider using scheduled snapshots with automatic deletion policies.",
|
||||
"Url": "https://hub.prowler.com/check/compute_snapshot_not_outdated"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"resilience"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "The age threshold is configurable via the `max_snapshot_age_days` parameter in the configuration file (default: 90 days). Snapshots without a creation timestamp will be flagged for manual review."
|
||||
}
|
||||
@@ -0,0 +1,60 @@
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from prowler.lib.check.models import Check, Check_Report_GCP
|
||||
from prowler.providers.gcp.services.compute.compute_client import compute_client
|
||||
|
||||
|
||||
class compute_snapshot_not_outdated(Check):
|
||||
"""Check that Compute Engine disk snapshots are not outdated.
|
||||
|
||||
This check ensures Compute Engine disk snapshots are within the configured
|
||||
age threshold (default 90 days) to help control storage costs and limit
|
||||
exposure from stale data.
|
||||
|
||||
- PASS: Snapshot is not outdated (within the acceptable age threshold).
|
||||
- FAIL: Snapshot is outdated (exceeds the configured age threshold).
|
||||
"""
|
||||
|
||||
def execute(self) -> list[Check_Report_GCP]:
|
||||
findings = []
|
||||
|
||||
max_snapshot_age_days = compute_client.audit_config.get(
|
||||
"max_snapshot_age_days", 90
|
||||
)
|
||||
|
||||
current_time = datetime.now(timezone.utc)
|
||||
|
||||
for snapshot in compute_client.snapshots:
|
||||
report = Check_Report_GCP(
|
||||
metadata=self.metadata(),
|
||||
resource=snapshot,
|
||||
location="global",
|
||||
)
|
||||
|
||||
if snapshot.creation_timestamp is None:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Disk snapshot {snapshot.name} timestamp could not be retrieved "
|
||||
"and cannot be evaluated for age."
|
||||
)
|
||||
findings.append(report)
|
||||
continue
|
||||
|
||||
snapshot_age = (current_time - snapshot.creation_timestamp).days
|
||||
|
||||
if snapshot_age > max_snapshot_age_days:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Disk snapshot {snapshot.name} is {snapshot_age} days old, "
|
||||
f"exceeding the {max_snapshot_age_days} day threshold."
|
||||
)
|
||||
else:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Disk snapshot {snapshot.name} is {snapshot_age} days old, "
|
||||
f"within the {max_snapshot_age_days} day threshold."
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -43,7 +43,7 @@ class Test_zone_bot_fight_mode_enabled:
|
||||
status="active",
|
||||
paused=False,
|
||||
settings=CloudflareZoneSettings(
|
||||
browser_check="on",
|
||||
bot_fight_mode_enabled=True,
|
||||
),
|
||||
)
|
||||
}
|
||||
@@ -80,7 +80,7 @@ class Test_zone_bot_fight_mode_enabled:
|
||||
status="active",
|
||||
paused=False,
|
||||
settings=CloudflareZoneSettings(
|
||||
browser_check="off",
|
||||
bot_fight_mode_enabled=False,
|
||||
),
|
||||
)
|
||||
}
|
||||
@@ -104,36 +104,3 @@ class Test_zone_bot_fight_mode_enabled:
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert "not enabled" in result[0].status_extended
|
||||
|
||||
def test_zone_bot_fight_mode_none(self):
|
||||
zone_client = mock.MagicMock
|
||||
zone_client.zones = {
|
||||
ZONE_ID: CloudflareZone(
|
||||
id=ZONE_ID,
|
||||
name=ZONE_NAME,
|
||||
status="active",
|
||||
paused=False,
|
||||
settings=CloudflareZoneSettings(
|
||||
browser_check=None,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_cloudflare_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.cloudflare.services.zone.zone_bot_fight_mode_enabled.zone_bot_fight_mode_enabled.zone_client",
|
||||
new=zone_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.cloudflare.services.zone.zone_bot_fight_mode_enabled.zone_bot_fight_mode_enabled import (
|
||||
zone_bot_fight_mode_enabled,
|
||||
)
|
||||
|
||||
check = zone_bot_fight_mode_enabled()
|
||||
result = check.execute()
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
|
||||
@@ -11,7 +11,7 @@ from tests.providers.cloudflare.cloudflare_fixtures import (
|
||||
)
|
||||
|
||||
|
||||
class Test_zone_server_side_excludes_enabled:
|
||||
class Test_zone_browser_integrity_check_enabled:
|
||||
def test_no_zones(self):
|
||||
zone_client = mock.MagicMock
|
||||
zone_client.zones = {}
|
||||
@@ -22,19 +22,19 @@ class Test_zone_server_side_excludes_enabled:
|
||||
return_value=set_mocked_cloudflare_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.cloudflare.services.zone.zone_server_side_excludes_enabled.zone_server_side_excludes_enabled.zone_client",
|
||||
"prowler.providers.cloudflare.services.zone.zone_browser_integrity_check_enabled.zone_browser_integrity_check_enabled.zone_client",
|
||||
new=zone_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.cloudflare.services.zone.zone_server_side_excludes_enabled.zone_server_side_excludes_enabled import (
|
||||
zone_server_side_excludes_enabled,
|
||||
from prowler.providers.cloudflare.services.zone.zone_browser_integrity_check_enabled.zone_browser_integrity_check_enabled import (
|
||||
zone_browser_integrity_check_enabled,
|
||||
)
|
||||
|
||||
check = zone_server_side_excludes_enabled()
|
||||
check = zone_browser_integrity_check_enabled()
|
||||
result = check.execute()
|
||||
assert len(result) == 0
|
||||
|
||||
def test_zone_server_side_excludes_enabled(self):
|
||||
def test_zone_browser_integrity_check_enabled(self):
|
||||
zone_client = mock.MagicMock
|
||||
zone_client.zones = {
|
||||
ZONE_ID: CloudflareZone(
|
||||
@@ -43,7 +43,7 @@ class Test_zone_server_side_excludes_enabled:
|
||||
status="active",
|
||||
paused=False,
|
||||
settings=CloudflareZoneSettings(
|
||||
server_side_exclude="on",
|
||||
browser_check="on",
|
||||
),
|
||||
)
|
||||
}
|
||||
@@ -54,23 +54,24 @@ class Test_zone_server_side_excludes_enabled:
|
||||
return_value=set_mocked_cloudflare_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.cloudflare.services.zone.zone_server_side_excludes_enabled.zone_server_side_excludes_enabled.zone_client",
|
||||
"prowler.providers.cloudflare.services.zone.zone_browser_integrity_check_enabled.zone_browser_integrity_check_enabled.zone_client",
|
||||
new=zone_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.cloudflare.services.zone.zone_server_side_excludes_enabled.zone_server_side_excludes_enabled import (
|
||||
zone_server_side_excludes_enabled,
|
||||
from prowler.providers.cloudflare.services.zone.zone_browser_integrity_check_enabled.zone_browser_integrity_check_enabled import (
|
||||
zone_browser_integrity_check_enabled,
|
||||
)
|
||||
|
||||
check = zone_server_side_excludes_enabled()
|
||||
check = zone_browser_integrity_check_enabled()
|
||||
result = check.execute()
|
||||
assert len(result) == 1
|
||||
assert result[0].resource_id == ZONE_ID
|
||||
assert result[0].resource_name == ZONE_NAME
|
||||
assert result[0].status == "PASS"
|
||||
assert "Server Side Excludes is enabled" in result[0].status_extended
|
||||
assert "Browser Integrity Check" in result[0].status_extended
|
||||
assert "enabled" in result[0].status_extended
|
||||
|
||||
def test_zone_server_side_excludes_disabled(self):
|
||||
def test_zone_browser_integrity_check_disabled(self):
|
||||
zone_client = mock.MagicMock
|
||||
zone_client.zones = {
|
||||
ZONE_ID: CloudflareZone(
|
||||
@@ -79,7 +80,7 @@ class Test_zone_server_side_excludes_enabled:
|
||||
status="active",
|
||||
paused=False,
|
||||
settings=CloudflareZoneSettings(
|
||||
server_side_exclude="off",
|
||||
browser_check="off",
|
||||
),
|
||||
)
|
||||
}
|
||||
@@ -90,21 +91,21 @@ class Test_zone_server_side_excludes_enabled:
|
||||
return_value=set_mocked_cloudflare_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.cloudflare.services.zone.zone_server_side_excludes_enabled.zone_server_side_excludes_enabled.zone_client",
|
||||
"prowler.providers.cloudflare.services.zone.zone_browser_integrity_check_enabled.zone_browser_integrity_check_enabled.zone_client",
|
||||
new=zone_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.cloudflare.services.zone.zone_server_side_excludes_enabled.zone_server_side_excludes_enabled import (
|
||||
zone_server_side_excludes_enabled,
|
||||
from prowler.providers.cloudflare.services.zone.zone_browser_integrity_check_enabled.zone_browser_integrity_check_enabled import (
|
||||
zone_browser_integrity_check_enabled,
|
||||
)
|
||||
|
||||
check = zone_server_side_excludes_enabled()
|
||||
check = zone_browser_integrity_check_enabled()
|
||||
result = check.execute()
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert "Server Side Excludes is not enabled" in result[0].status_extended
|
||||
assert "not enabled" in result[0].status_extended
|
||||
|
||||
def test_zone_server_side_excludes_none(self):
|
||||
def test_zone_browser_integrity_check_none(self):
|
||||
zone_client = mock.MagicMock
|
||||
zone_client.zones = {
|
||||
ZONE_ID: CloudflareZone(
|
||||
@@ -113,7 +114,7 @@ class Test_zone_server_side_excludes_enabled:
|
||||
status="active",
|
||||
paused=False,
|
||||
settings=CloudflareZoneSettings(
|
||||
server_side_exclude=None,
|
||||
browser_check=None,
|
||||
),
|
||||
)
|
||||
}
|
||||
@@ -124,15 +125,15 @@ class Test_zone_server_side_excludes_enabled:
|
||||
return_value=set_mocked_cloudflare_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.cloudflare.services.zone.zone_server_side_excludes_enabled.zone_server_side_excludes_enabled.zone_client",
|
||||
"prowler.providers.cloudflare.services.zone.zone_browser_integrity_check_enabled.zone_browser_integrity_check_enabled.zone_client",
|
||||
new=zone_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.cloudflare.services.zone.zone_server_side_excludes_enabled.zone_server_side_excludes_enabled import (
|
||||
zone_server_side_excludes_enabled,
|
||||
from prowler.providers.cloudflare.services.zone.zone_browser_integrity_check_enabled.zone_browser_integrity_check_enabled import (
|
||||
zone_browser_integrity_check_enabled,
|
||||
)
|
||||
|
||||
check = zone_server_side_excludes_enabled()
|
||||
check = zone_browser_integrity_check_enabled()
|
||||
result = check.execute()
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
@@ -34,7 +34,7 @@ class Test_zone_challenge_passage_configured:
|
||||
result = check.execute()
|
||||
assert len(result) == 0
|
||||
|
||||
def test_zone_challenge_passage_correct(self):
|
||||
def test_zone_challenge_passage_at_min(self):
|
||||
zone_client = mock.MagicMock
|
||||
zone_client.zones = {
|
||||
ZONE_ID: CloudflareZone(
|
||||
@@ -43,7 +43,7 @@ class Test_zone_challenge_passage_configured:
|
||||
status="active",
|
||||
paused=False,
|
||||
settings=CloudflareZoneSettings(
|
||||
challenge_ttl=3600, # Recommended value
|
||||
challenge_ttl=900, # 15 minutes - minimum recommended
|
||||
),
|
||||
)
|
||||
}
|
||||
@@ -68,9 +68,9 @@ class Test_zone_challenge_passage_configured:
|
||||
assert result[0].resource_id == ZONE_ID
|
||||
assert result[0].resource_name == ZONE_NAME
|
||||
assert result[0].status == "PASS"
|
||||
assert "3600" in result[0].status_extended
|
||||
assert "15 minutes" in result[0].status_extended
|
||||
|
||||
def test_zone_challenge_passage_too_long(self):
|
||||
def test_zone_challenge_passage_at_max(self):
|
||||
zone_client = mock.MagicMock
|
||||
zone_client.zones = {
|
||||
ZONE_ID: CloudflareZone(
|
||||
@@ -79,7 +79,7 @@ class Test_zone_challenge_passage_configured:
|
||||
status="active",
|
||||
paused=False,
|
||||
settings=CloudflareZoneSettings(
|
||||
challenge_ttl=86400, # Too long (24 hours)
|
||||
challenge_ttl=2700, # 45 minutes - maximum recommended
|
||||
),
|
||||
)
|
||||
}
|
||||
@@ -101,9 +101,42 @@ class Test_zone_challenge_passage_configured:
|
||||
check = zone_challenge_passage_configured()
|
||||
result = check.execute()
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert "86400" in result[0].status_extended
|
||||
assert "recommended" in result[0].status_extended
|
||||
assert result[0].status == "PASS"
|
||||
assert "45 minutes" in result[0].status_extended
|
||||
|
||||
def test_zone_challenge_passage_default(self):
|
||||
zone_client = mock.MagicMock
|
||||
zone_client.zones = {
|
||||
ZONE_ID: CloudflareZone(
|
||||
id=ZONE_ID,
|
||||
name=ZONE_NAME,
|
||||
status="active",
|
||||
paused=False,
|
||||
settings=CloudflareZoneSettings(
|
||||
challenge_ttl=1800, # 30 minutes - default and secure
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_cloudflare_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.cloudflare.services.zone.zone_challenge_passage_configured.zone_challenge_passage_configured.zone_client",
|
||||
new=zone_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.cloudflare.services.zone.zone_challenge_passage_configured.zone_challenge_passage_configured import (
|
||||
zone_challenge_passage_configured,
|
||||
)
|
||||
|
||||
check = zone_challenge_passage_configured()
|
||||
result = check.execute()
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "PASS"
|
||||
assert "30 minutes" in result[0].status_extended
|
||||
|
||||
def test_zone_challenge_passage_too_short(self):
|
||||
zone_client = mock.MagicMock
|
||||
@@ -114,7 +147,7 @@ class Test_zone_challenge_passage_configured:
|
||||
status="active",
|
||||
paused=False,
|
||||
settings=CloudflareZoneSettings(
|
||||
challenge_ttl=300, # Too short (5 minutes)
|
||||
challenge_ttl=300, # 5 minutes - too short
|
||||
),
|
||||
)
|
||||
}
|
||||
@@ -137,7 +170,43 @@ class Test_zone_challenge_passage_configured:
|
||||
result = check.execute()
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert "300" in result[0].status_extended
|
||||
assert "5 minutes" in result[0].status_extended
|
||||
assert "recommended" in result[0].status_extended
|
||||
|
||||
def test_zone_challenge_passage_too_long(self):
|
||||
zone_client = mock.MagicMock
|
||||
zone_client.zones = {
|
||||
ZONE_ID: CloudflareZone(
|
||||
id=ZONE_ID,
|
||||
name=ZONE_NAME,
|
||||
status="active",
|
||||
paused=False,
|
||||
settings=CloudflareZoneSettings(
|
||||
challenge_ttl=3600, # 60 minutes - exceeds recommended
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_cloudflare_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.cloudflare.services.zone.zone_challenge_passage_configured.zone_challenge_passage_configured.zone_client",
|
||||
new=zone_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.cloudflare.services.zone.zone_challenge_passage_configured.zone_challenge_passage_configured import (
|
||||
zone_challenge_passage_configured,
|
||||
)
|
||||
|
||||
check = zone_challenge_passage_configured()
|
||||
result = check.execute()
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert "60 minutes" in result[0].status_extended
|
||||
assert "recommended" in result[0].status_extended
|
||||
|
||||
def test_zone_challenge_passage_none(self):
|
||||
zone_client = mock.MagicMock
|
||||
|
||||
@@ -64,6 +64,7 @@ def mock_api_client(GCPService, service, api_version, _):
|
||||
mock_api_access_policies_calls(client)
|
||||
mock_api_instance_group_managers_calls(client)
|
||||
mock_api_images_calls(client)
|
||||
mock_api_snapshots_calls(client)
|
||||
|
||||
return client
|
||||
|
||||
@@ -1344,3 +1345,8 @@ def mock_api_images_calls(client: MagicMock):
|
||||
return return_value
|
||||
|
||||
client.images().getIamPolicy = mock_get_image_iam_policy
|
||||
|
||||
|
||||
def mock_api_snapshots_calls(client: MagicMock):
|
||||
client.snapshots().list().execute.return_value = {"items": []}
|
||||
client.snapshots().list_next.return_value = None
|
||||
|
||||
@@ -92,6 +92,7 @@ class TestGCPProvider:
|
||||
"max_unused_account_days": 180,
|
||||
"storage_min_retention_days": 90,
|
||||
"mig_min_zones": 2,
|
||||
"max_snapshot_age_days": 90,
|
||||
}
|
||||
|
||||
@freeze_time(datetime.today())
|
||||
|
||||
@@ -0,0 +1,324 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from unittest import mock
|
||||
|
||||
from tests.providers.gcp.gcp_fixtures import GCP_PROJECT_ID, set_mocked_gcp_provider
|
||||
|
||||
|
||||
class TestComputeSnapshotNotOutdated:
|
||||
def test_compute_no_snapshots(self):
|
||||
compute_client = mock.MagicMock()
|
||||
compute_client.snapshots = []
|
||||
compute_client.audit_config = {"max_snapshot_age_days": 90}
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_gcp_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.gcp.services.compute.compute_snapshot_not_outdated.compute_snapshot_not_outdated.compute_client",
|
||||
new=compute_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.gcp.services.compute.compute_snapshot_not_outdated.compute_snapshot_not_outdated import (
|
||||
compute_snapshot_not_outdated,
|
||||
)
|
||||
|
||||
check = compute_snapshot_not_outdated()
|
||||
result = check.execute()
|
||||
assert len(result) == 0
|
||||
|
||||
def test_snapshot_within_threshold(self):
|
||||
compute_client = mock.MagicMock()
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_gcp_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.gcp.services.compute.compute_snapshot_not_outdated.compute_snapshot_not_outdated.compute_client",
|
||||
new=compute_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.gcp.services.compute.compute_service import Snapshot
|
||||
from prowler.providers.gcp.services.compute.compute_snapshot_not_outdated.compute_snapshot_not_outdated import (
|
||||
compute_snapshot_not_outdated,
|
||||
)
|
||||
|
||||
creation_time = datetime.now(timezone.utc) - timedelta(days=30)
|
||||
|
||||
compute_client.project_ids = [GCP_PROJECT_ID]
|
||||
compute_client.audit_config = {"max_snapshot_age_days": 90}
|
||||
compute_client.snapshots = [
|
||||
Snapshot(
|
||||
name="test-snapshot-recent",
|
||||
id="1234567890",
|
||||
project_id=GCP_PROJECT_ID,
|
||||
creation_timestamp=creation_time,
|
||||
source_disk="test-disk",
|
||||
source_disk_id="disk-123",
|
||||
disk_size_gb=100,
|
||||
storage_bytes=1073741824,
|
||||
storage_locations=["us-central1"],
|
||||
status="READY",
|
||||
auto_created=False,
|
||||
)
|
||||
]
|
||||
|
||||
check = compute_snapshot_not_outdated()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "PASS"
|
||||
assert "30 days old" in result[0].status_extended
|
||||
assert "within the 90 day threshold" in result[0].status_extended
|
||||
assert result[0].resource_id == "1234567890"
|
||||
assert result[0].resource_name == "test-snapshot-recent"
|
||||
assert result[0].location == "global"
|
||||
assert result[0].project_id == GCP_PROJECT_ID
|
||||
|
||||
def test_snapshot_exceeds_threshold(self):
|
||||
compute_client = mock.MagicMock()
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_gcp_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.gcp.services.compute.compute_snapshot_not_outdated.compute_snapshot_not_outdated.compute_client",
|
||||
new=compute_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.gcp.services.compute.compute_service import Snapshot
|
||||
from prowler.providers.gcp.services.compute.compute_snapshot_not_outdated.compute_snapshot_not_outdated import (
|
||||
compute_snapshot_not_outdated,
|
||||
)
|
||||
|
||||
creation_time = datetime.now(timezone.utc) - timedelta(days=120)
|
||||
|
||||
compute_client.project_ids = [GCP_PROJECT_ID]
|
||||
compute_client.audit_config = {"max_snapshot_age_days": 90}
|
||||
compute_client.snapshots = [
|
||||
Snapshot(
|
||||
name="test-snapshot-old",
|
||||
id="0987654321",
|
||||
project_id=GCP_PROJECT_ID,
|
||||
creation_timestamp=creation_time,
|
||||
source_disk="test-disk",
|
||||
source_disk_id="disk-456",
|
||||
disk_size_gb=200,
|
||||
storage_bytes=2147483648,
|
||||
storage_locations=["us-east1"],
|
||||
status="READY",
|
||||
auto_created=False,
|
||||
)
|
||||
]
|
||||
|
||||
check = compute_snapshot_not_outdated()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert "120 days old" in result[0].status_extended
|
||||
assert "exceeding the 90 day threshold" in result[0].status_extended
|
||||
assert result[0].resource_id == "0987654321"
|
||||
assert result[0].resource_name == "test-snapshot-old"
|
||||
assert result[0].location == "global"
|
||||
assert result[0].project_id == GCP_PROJECT_ID
|
||||
|
||||
def test_snapshot_no_creation_timestamp(self):
|
||||
compute_client = mock.MagicMock()
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_gcp_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.gcp.services.compute.compute_snapshot_not_outdated.compute_snapshot_not_outdated.compute_client",
|
||||
new=compute_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.gcp.services.compute.compute_service import Snapshot
|
||||
from prowler.providers.gcp.services.compute.compute_snapshot_not_outdated.compute_snapshot_not_outdated import (
|
||||
compute_snapshot_not_outdated,
|
||||
)
|
||||
|
||||
compute_client.project_ids = [GCP_PROJECT_ID]
|
||||
compute_client.audit_config = {"max_snapshot_age_days": 90}
|
||||
compute_client.snapshots = [
|
||||
Snapshot(
|
||||
name="test-snapshot-no-timestamp",
|
||||
id="1111111111",
|
||||
project_id=GCP_PROJECT_ID,
|
||||
creation_timestamp=None,
|
||||
source_disk="test-disk",
|
||||
source_disk_id="disk-789",
|
||||
disk_size_gb=50,
|
||||
storage_bytes=536870912,
|
||||
storage_locations=["eu-west1"],
|
||||
status="READY",
|
||||
auto_created=False,
|
||||
)
|
||||
]
|
||||
|
||||
check = compute_snapshot_not_outdated()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert "timestamp could not be retrieved" in result[0].status_extended
|
||||
assert result[0].resource_id == "1111111111"
|
||||
assert result[0].resource_name == "test-snapshot-no-timestamp"
|
||||
assert result[0].project_id == GCP_PROJECT_ID
|
||||
|
||||
def test_multiple_snapshots_mixed(self):
|
||||
compute_client = mock.MagicMock()
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_gcp_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.gcp.services.compute.compute_snapshot_not_outdated.compute_snapshot_not_outdated.compute_client",
|
||||
new=compute_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.gcp.services.compute.compute_service import Snapshot
|
||||
from prowler.providers.gcp.services.compute.compute_snapshot_not_outdated.compute_snapshot_not_outdated import (
|
||||
compute_snapshot_not_outdated,
|
||||
)
|
||||
|
||||
current_time = datetime.now(timezone.utc)
|
||||
|
||||
compute_client.project_ids = [GCP_PROJECT_ID]
|
||||
compute_client.audit_config = {"max_snapshot_age_days": 90}
|
||||
compute_client.snapshots = [
|
||||
Snapshot(
|
||||
name="recent-snapshot",
|
||||
id="1111111111",
|
||||
project_id=GCP_PROJECT_ID,
|
||||
creation_timestamp=current_time - timedelta(days=10),
|
||||
source_disk="disk-1",
|
||||
status="READY",
|
||||
),
|
||||
Snapshot(
|
||||
name="old-snapshot",
|
||||
id="2222222222",
|
||||
project_id=GCP_PROJECT_ID,
|
||||
creation_timestamp=current_time - timedelta(days=150),
|
||||
source_disk="disk-2",
|
||||
status="READY",
|
||||
),
|
||||
Snapshot(
|
||||
name="boundary-snapshot",
|
||||
id="3333333333",
|
||||
project_id=GCP_PROJECT_ID,
|
||||
creation_timestamp=current_time - timedelta(days=91),
|
||||
source_disk="disk-3",
|
||||
status="READY",
|
||||
),
|
||||
]
|
||||
|
||||
check = compute_snapshot_not_outdated()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 3
|
||||
|
||||
recent_result = next(r for r in result if r.resource_id == "1111111111")
|
||||
old_result = next(r for r in result if r.resource_id == "2222222222")
|
||||
boundary_result = next(r for r in result if r.resource_id == "3333333333")
|
||||
|
||||
assert recent_result.status == "PASS"
|
||||
assert recent_result.resource_name == "recent-snapshot"
|
||||
|
||||
assert old_result.status == "FAIL"
|
||||
assert old_result.resource_name == "old-snapshot"
|
||||
|
||||
assert boundary_result.status == "FAIL"
|
||||
assert boundary_result.resource_name == "boundary-snapshot"
|
||||
|
||||
def test_custom_threshold(self):
|
||||
compute_client = mock.MagicMock()
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_gcp_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.gcp.services.compute.compute_snapshot_not_outdated.compute_snapshot_not_outdated.compute_client",
|
||||
new=compute_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.gcp.services.compute.compute_service import Snapshot
|
||||
from prowler.providers.gcp.services.compute.compute_snapshot_not_outdated.compute_snapshot_not_outdated import (
|
||||
compute_snapshot_not_outdated,
|
||||
)
|
||||
|
||||
creation_time = datetime.now(timezone.utc) - timedelta(days=45)
|
||||
|
||||
compute_client.project_ids = [GCP_PROJECT_ID]
|
||||
compute_client.audit_config = {"max_snapshot_age_days": 30}
|
||||
compute_client.snapshots = [
|
||||
Snapshot(
|
||||
name="test-snapshot-custom",
|
||||
id="4444444444",
|
||||
project_id=GCP_PROJECT_ID,
|
||||
creation_timestamp=creation_time,
|
||||
source_disk="test-disk",
|
||||
status="READY",
|
||||
)
|
||||
]
|
||||
|
||||
check = compute_snapshot_not_outdated()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert "45 days old" in result[0].status_extended
|
||||
assert "exceeding the 30 day threshold" in result[0].status_extended
|
||||
|
||||
def test_default_threshold_when_not_configured(self):
|
||||
compute_client = mock.MagicMock()
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_gcp_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.gcp.services.compute.compute_snapshot_not_outdated.compute_snapshot_not_outdated.compute_client",
|
||||
new=compute_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.gcp.services.compute.compute_service import Snapshot
|
||||
from prowler.providers.gcp.services.compute.compute_snapshot_not_outdated.compute_snapshot_not_outdated import (
|
||||
compute_snapshot_not_outdated,
|
||||
)
|
||||
|
||||
creation_time = datetime.now(timezone.utc) - timedelta(days=85)
|
||||
|
||||
compute_client.project_ids = [GCP_PROJECT_ID]
|
||||
compute_client.audit_config = {}
|
||||
compute_client.snapshots = [
|
||||
Snapshot(
|
||||
name="test-snapshot-default",
|
||||
id="5555555555",
|
||||
project_id=GCP_PROJECT_ID,
|
||||
creation_timestamp=creation_time,
|
||||
source_disk="test-disk",
|
||||
status="READY",
|
||||
)
|
||||
]
|
||||
|
||||
check = compute_snapshot_not_outdated()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "PASS"
|
||||
assert "85 days old" in result[0].status_extended
|
||||
assert "within the 90 day threshold" in result[0].status_extended
|
||||
Reference in New Issue
Block a user