Compare commits

...

10 Commits

Author SHA1 Message Date
César Arroba
ef39a500b4 chore: rollback last change 2025-07-31 15:44:15 +02:00
César Arroba
9b399409d5 chore: build code with preview image tag 2025-07-31 15:36:39 +02:00
Pablo Lara
ccb1ca53ff chore: some SSR tweaks 2025-07-09 14:23:58 +02:00
Chandrapal Badshah
2225cc48a5 feat: add lighthouse caching 2025-07-08 12:45:09 +05:30
Chandrapal Badshah
e3c63c01bb feat: add caching recommendations in valkey 2025-07-04 11:36:25 +05:30
Chandrapal Badshah
5fa44572d5 feat: get summary of all scans in last 24 hours 2025-07-03 14:37:40 +05:30
Chandrapal Badshah
0da9bd80fb feat: add tool to detect new failed findings in a scan 2025-07-02 14:36:00 +05:30
Chandrapal Badshah
b03a807d33 feat: add lighthouse summary generator 2025-07-02 14:33:25 +05:30
Chandrapal Badshah
c3e55b383b fix: update schema to get check details 2025-07-02 11:24:22 +05:30
Chandrapal Badshah
5daed282d5 feat: add getLatestFindings tool 2025-06-27 15:10:13 +05:30
18 changed files with 1651 additions and 70 deletions

View File

@@ -0,0 +1,78 @@
"use server";
import { apiBaseUrl, getAuthHeaders, parseStringify } from "@/lib";
export const getLighthouseFindings = async ({
page = 1,
pageSize = 10,
query = "",
sort = "",
filters = {},
}) => {
const headers = await getAuthHeaders({ contentType: false });
// For lighthouse usage, handle invalid page numbers by defaulting to 1
const validPage = isNaN(Number(page)) || page < 1 ? 1 : page;
const url = new URL(`${apiBaseUrl}/findings`);
if (validPage) url.searchParams.append("page[number]", validPage.toString());
if (pageSize) url.searchParams.append("page[size]", pageSize.toString());
if (query) url.searchParams.append("filter[search]", query);
if (sort) url.searchParams.append("sort", sort);
Object.entries(filters).forEach(([key, value]) => {
url.searchParams.append(key, String(value));
});
try {
const findings = await fetch(url.toString(), {
headers,
});
const data = await findings.json();
const parsedData = parseStringify(data);
return parsedData;
} catch (error) {
// eslint-disable-next-line no-console
console.error("Error fetching lighthouse findings:", error);
return undefined;
}
};
export const getLighthouseLatestFindings = async ({
page = 1,
pageSize = 10,
query = "",
sort = "",
filters = {},
}) => {
const headers = await getAuthHeaders({ contentType: false });
const validPage = isNaN(Number(page)) || page < 1 ? 1 : page;
const url = new URL(`${apiBaseUrl}/findings/latest`);
if (validPage) url.searchParams.append("page[number]", validPage.toString());
if (pageSize) url.searchParams.append("page[size]", pageSize.toString());
if (query) url.searchParams.append("filter[search]", query);
if (sort) url.searchParams.append("sort", sort);
Object.entries(filters).forEach(([key, value]) => {
url.searchParams.append(key, String(value));
});
try {
const findings = await fetch(url.toString(), {
headers,
});
const data = await findings.json();
const parsedData = parseStringify(data);
return parsedData;
} catch (error) {
// eslint-disable-next-line no-console
console.error("Error fetching lighthouse latest findings:", error);
return undefined;
}
};

View File

@@ -1,16 +1,55 @@
import { getLighthouseConfig } from "@/actions/lighthouse/lighthouse";
import { Chat } from "@/components/lighthouse";
import { ContentLayout } from "@/components/ui";
import { CacheService } from "@/lib/lighthouse/cache";
import { suggestedActions } from "@/lib/lighthouse/suggested-actions";
export default async function AIChatbot() {
interface LighthousePageProps {
searchParams: { cachedMessage?: string };
}
export default async function AIChatbot({ searchParams }: LighthousePageProps) {
const config = await getLighthouseConfig();
const hasConfig = !!config;
const isActive = config?.attributes?.is_active ?? false;
// Fetch cached content if a cached message type is specified
let cachedContent = null;
if (searchParams.cachedMessage) {
const cached = await CacheService.getCachedMessage(
searchParams.cachedMessage,
);
cachedContent = cached.success ? cached.data : null;
}
// Pre-fetch all question answers and processing status
const isProcessing = await CacheService.isRecommendationProcessing();
const questionAnswers: Record<string, string> = {};
if (!isProcessing) {
for (const action of suggestedActions) {
if (action.questionRef) {
const cached = await CacheService.getCachedMessage(
`question_${action.questionRef}`,
);
if (cached.success && cached.data) {
questionAnswers[action.questionRef] = cached.data;
}
}
}
}
return (
<ContentLayout title="Lighthouse" icon="lucide:bot">
<Chat hasConfig={hasConfig} isActive={isActive} />
<Chat
hasConfig={hasConfig}
isActive={isActive}
cachedContent={cachedContent}
messageType={searchParams.cachedMessage}
isProcessing={isProcessing}
questionAnswers={questionAnswers}
/>
</ContentLayout>
);
}

View File

@@ -9,6 +9,7 @@ import {
getProvidersOverview,
} from "@/actions/overview/overview";
import { FilterControls } from "@/components/filters";
import { LighthouseBanner } from "@/components/lighthouse";
import {
FindingsBySeverityChart,
FindingsByStatusChart,
@@ -143,7 +144,6 @@ const SSRDataNewFindingsTable = async () => {
sort,
filters: defaultFilters,
});
// Create dictionaries for resources, scans, and providers
const resourceDict = createDict("resources", findingsData);
const scanDict = createDict("scans", findingsData);
@@ -187,6 +187,9 @@ const SSRDataNewFindingsTable = async () => {
</div>
</div>
<Spacer y={4} />
<LighthouseBanner />
<DataTable
columns={ColumnNewFindingsToDate}
data={expandedResponse?.data || []}

View File

@@ -0,0 +1,91 @@
import { Bot } from "lucide-react";
import Link from "next/link";
import { getLighthouseConfig } from "@/actions/lighthouse/lighthouse";
import { CacheService, initializeTenantCache } from "@/lib/lighthouse/cache";
interface BannerConfig {
message: string;
href: string;
gradient: string;
animate?: boolean;
}
const renderBanner = ({
message,
href,
gradient,
animate = false,
}: BannerConfig) => (
<Link href={href} className="mb-4 block w-full">
<div
className={`w-full rounded-lg ${gradient} shadow-lg transition-all duration-200 hover:shadow-xl focus:outline-none focus:ring-2 focus:ring-opacity-50`}
>
<div className="p-6">
<div className="flex items-center gap-4">
<div className="flex h-12 w-12 items-center justify-center rounded-lg bg-white/20 backdrop-blur-sm">
<Bot
size={24}
className={`text-white ${animate ? "animate-pulse" : ""}`}
/>
</div>
<div className="text-left">
<p className="text-xl font-semibold text-white">{message}</p>
</div>
</div>
</div>
</div>
</Link>
);
export const LighthouseBanner = async () => {
try {
await initializeTenantCache();
// Check if Lighthouse is configured
const lighthouseConfig = await getLighthouseConfig();
if (!lighthouseConfig?.attributes) {
return renderBanner({
message: "Enable Lighthouse to secure your cloud with AI insights",
href: "/lighthouse/config",
gradient:
"bg-gradient-to-r from-green-500 to-blue-500 hover:from-green-600 hover:to-blue-600 focus:ring-green-500/50 dark:from-green-600 dark:to-blue-600 dark:hover:from-green-700 dark:hover:to-blue-700 dark:focus:ring-green-400/50",
});
}
// Check if recommendation exists
const cachedRecommendations = await CacheService.getRecommendations();
if (
cachedRecommendations.success &&
cachedRecommendations.data &&
cachedRecommendations.data.trim().length > 0
) {
return renderBanner({
message: cachedRecommendations.data,
href: "/lighthouse?cachedMessage=recommendation",
gradient:
"bg-gradient-to-r from-blue-500 to-purple-600 hover:from-blue-600 hover:to-purple-700 focus:ring-blue-500/50 dark:from-blue-600 dark:to-purple-700 dark:hover:from-blue-700 dark:hover:to-purple-800 dark:focus:ring-blue-400/50",
});
}
// Check if recommendation is being processed
const isProcessing = await CacheService.isRecommendationProcessing();
if (isProcessing) {
return renderBanner({
message: "Lighthouse is reviewing your findings for insights",
href: "",
gradient:
"bg-gradient-to-r from-orange-500 to-yellow-500 hover:from-orange-600 hover:to-yellow-600 focus:ring-orange-500/50 dark:from-orange-600 dark:to-yellow-600 dark:hover:from-orange-700 dark:hover:to-yellow-700 dark:focus:ring-orange-400/50",
animate: true,
});
}
// Lighthouse configured but no recommendation and not processing - don't show banner
return null;
} catch (error) {
console.error("Error getting banner state:", error);
return null;
}
};

View File

@@ -2,43 +2,64 @@
import { useChat } from "@ai-sdk/react";
import Link from "next/link";
import { useEffect, useRef } from "react";
import { useCallback, useEffect, useRef, useState } from "react";
import { useForm } from "react-hook-form";
import { MemoizedMarkdown } from "@/components/lighthouse/memoized-markdown";
import { CustomButton, CustomTextarea } from "@/components/ui/custom";
import { Form } from "@/components/ui/form";
interface SuggestedAction {
title: string;
label: string;
action: string;
}
import {
SuggestedAction,
suggestedActions,
} from "@/lib/lighthouse/suggested-actions";
interface ChatProps {
hasConfig: boolean;
isActive: boolean;
cachedContent?: string | null;
messageType?: string;
isProcessing: boolean;
questionAnswers: Record<string, string>;
}
interface ChatFormData {
message: string;
}
export const Chat = ({ hasConfig, isActive }: ChatProps) => {
const { messages, handleSubmit, handleInputChange, append, status } = useChat(
{
api: "/api/lighthouse/analyst",
credentials: "same-origin",
experimental_throttle: 100,
sendExtraMessageFields: true,
onFinish: () => {
// Handle chat completion
},
onError: (error) => {
console.error("Chat error:", error);
},
export const Chat = ({
hasConfig,
isActive,
cachedContent,
messageType,
isProcessing,
questionAnswers,
}: ChatProps) => {
const {
messages,
handleSubmit,
handleInputChange,
append,
status,
setMessages,
} = useChat({
api: "/api/lighthouse/analyst",
credentials: "same-origin",
experimental_throttle: 100,
sendExtraMessageFields: true,
onFinish: () => {
// Handle chat completion
},
onError: (error) => {
console.error("Chat error:", error);
},
});
// State for cached response streaming simulation
const [isStreamingCached, setIsStreamingCached] = useState(false);
const [streamingMessageId, setStreamingMessageId] = useState<string | null>(
null,
);
const [currentStreamText, setCurrentStreamText] = useState("");
const form = useForm<ChatFormData>({
defaultValues: {
@@ -50,6 +71,149 @@ export const Chat = ({ hasConfig, isActive }: ChatProps) => {
const messagesContainerRef = useRef<HTMLDivElement | null>(null);
const latestUserMsgRef = useRef<HTMLDivElement | null>(null);
// Function to simulate streaming text
const simulateStreaming = useCallback(
async (text: string, messageId: string) => {
setIsStreamingCached(true);
setStreamingMessageId(messageId);
setCurrentStreamText("");
// Stream word by word with realistic delays
const words = text.split(" ");
let currentText = "";
for (let i = 0; i < words.length; i++) {
currentText += (i > 0 ? " " : "") + words[i];
setCurrentStreamText(currentText);
// Shorter delay between words for faster streaming
const delay = Math.random() * 80 + 40; // 40-120ms delay per word
await new Promise((resolve) => setTimeout(resolve, delay));
}
setIsStreamingCached(false);
setStreamingMessageId(null);
setCurrentStreamText("");
},
[],
);
// Function to handle cached response for suggested actions
const handleCachedResponse = useCallback(
async (action: SuggestedAction) => {
if (!action.questionRef) {
// No question ref, use normal flow
append({
role: "user",
content: action.action,
});
return;
}
try {
if (isProcessing) {
// Processing in progress, fallback to real-time LLM
append({
role: "user",
content: action.action,
});
return;
}
// Check if we have cached answer
const cachedAnswer = questionAnswers[action.questionRef];
if (cachedAnswer) {
// Cache hit - use cached content with streaming simulation
const userMessageId = `user-cached-${Date.now()}`;
const assistantMessageId = `assistant-cached-${Date.now()}`;
const userMessage = {
id: userMessageId,
role: "user" as const,
content: action.action,
};
const assistantMessage = {
id: assistantMessageId,
role: "assistant" as const,
content: "",
};
const updatedMessages = [...messages, userMessage, assistantMessage];
setMessages(updatedMessages);
// Start streaming simulation
setTimeout(() => {
simulateStreaming(cachedAnswer, assistantMessageId);
}, 300);
} else {
// Cache miss/expired/error - fallback to real-time LLM
append({
role: "user",
content: action.action,
});
}
} catch (error) {
console.error("Error handling cached response:", error);
// Fall back to normal API flow
append({
role: "user",
content: action.action,
});
}
},
[
messages,
setMessages,
append,
simulateStreaming,
isProcessing,
questionAnswers,
],
);
// Load cached message on mount if cachedContent is provided
useEffect(() => {
const loadCachedMessage = () => {
if (cachedContent && messages.length === 0) {
// Create different user questions based on message type
let userQuestion = "Tell me more about this";
if (messageType === "recommendation") {
userQuestion =
"Tell me more about the security issues Lighthouse found";
}
// Future: handle other message types
// else if (messageType === "question_1") {
// userQuestion = "Previously cached question here";
// }
// Create message IDs
const userMessageId = `user-cached-${messageType}-${Date.now()}`;
const assistantMessageId = `assistant-cached-${messageType}-${Date.now()}`;
// Add user message
const userMessage = {
id: userMessageId,
role: "user" as const,
content: userQuestion,
};
// Add assistant message with the cached content
const assistantMessage = {
id: assistantMessageId,
role: "assistant" as const,
content: cachedContent,
};
setMessages([userMessage, assistantMessage]);
}
};
loadCachedMessage();
}, [cachedContent, messageType, messages.length, setMessages]);
// Sync form value with chat input
useEffect(() => {
const syntheticEvent = {
@@ -86,6 +250,19 @@ export const Chat = ({ hasConfig, isActive }: ChatProps) => {
return () => document.removeEventListener("keydown", handleKeyDown);
}, [messageValue, onFormSubmit]);
// Update assistant message content during streaming simulation
useEffect(() => {
if (isStreamingCached && streamingMessageId && currentStreamText) {
setMessages((prevMessages) =>
prevMessages.map((msg) =>
msg.id === streamingMessageId
? { ...msg, content: currentStreamText }
: msg,
),
);
}
}, [currentStreamText, isStreamingCached, streamingMessageId, setMessages]);
useEffect(() => {
if (messagesContainerRef.current && latestUserMsgRef.current) {
const container = messagesContainerRef.current;
@@ -96,30 +273,6 @@ export const Chat = ({ hasConfig, isActive }: ChatProps) => {
}
}, [messages]);
const suggestedActions: SuggestedAction[] = [
{
title: "Are there any exposed S3",
label: "buckets in my AWS accounts?",
action: "List exposed S3 buckets in my AWS accounts",
},
{
title: "What is the risk of having",
label: "RDS databases unencrypted?",
action: "What is the risk of having RDS databases unencrypted?",
},
{
title: "What is the CIS 1.10 compliance status",
label: "of my Kubernetes cluster?",
action:
"What is the CIS 1.10 compliance status of my Kubernetes cluster?",
},
{
title: "List my highest privileged",
label: "AWS IAM users with full admin access?",
action: "List my highest privileged AWS IAM users with full admin access",
},
];
// Determine if chat should be disabled
const shouldDisableChat = !hasConfig || !isActive;
@@ -158,10 +311,7 @@ export const Chat = ({ hasConfig, isActive }: ChatProps) => {
key={`suggested-action-${index}`}
ariaLabel={`Send message: ${action.action}`}
onPress={() => {
append({
role: "user",
content: action.action,
});
handleCachedResponse(action); // Use cached response handler
}}
className="hover:bg-muted flex h-auto w-full flex-col items-start justify-start rounded-xl border bg-gray-50 px-4 py-3.5 text-left font-sans text-sm dark:bg-gray-900"
>
@@ -211,10 +361,12 @@ export const Chat = ({ hasConfig, isActive }: ChatProps) => {
</div>
);
})}
{status === "submitted" && (
{(status === "submitted" || isStreamingCached) && (
<div className="flex justify-start">
<div className="bg-muted max-w-[80%] rounded-lg px-4 py-2">
<div className="animate-pulse">Thinking...</div>
<div className="animate-pulse">
{isStreamingCached ? "" : "Thinking..."}
</div>
</div>
</div>
)}
@@ -245,10 +397,18 @@ export const Chat = ({ hasConfig, isActive }: ChatProps) => {
ariaLabel={
status === "submitted" ? "Stop generation" : "Send message"
}
isDisabled={status === "submitted" || !messageValue?.trim()}
isDisabled={
status === "submitted" ||
isStreamingCached ||
!messageValue?.trim()
}
className="flex h-10 w-10 flex-shrink-0 items-center justify-center rounded-lg bg-primary p-2 text-primary-foreground hover:bg-primary/90 disabled:opacity-50 dark:bg-primary/90"
>
{status === "submitted" ? <span></span> : <span></span>}
{status === "submitted" || isStreamingCached ? (
<span></span>
) : (
<span></span>
)}
</CustomButton>
</div>
</form>

View File

@@ -1,2 +1,3 @@
export * from "./banner";
export * from "./chat";
export * from "./chatbot-config";

434
ui/lib/lighthouse/cache.ts Normal file
View File

@@ -0,0 +1,434 @@
import Valkey from "iovalkey";
import { auth } from "@/auth.config";
import {
generateBannerFromDetailed,
generateDetailedRecommendation,
generateQuestionAnswers,
} from "./recommendations";
import { suggestedActions } from "./suggested-actions";
import {
compareProcessedScanIds,
generateSecurityScanSummary,
getCompletedScansLast24h,
} from "./summary";
let valkeyClient: Valkey | null = null;
export async function getValkeyClient(): Promise<Valkey> {
if (!valkeyClient) {
valkeyClient = new Valkey({
host: process.env.VALKEY_HOST,
port: parseInt(process.env.VALKEY_PORT || "6379"),
connectTimeout: 5000,
lazyConnect: true,
});
}
return valkeyClient;
}
export class CacheService {
private static async getTenantId(): Promise<string | null> {
const session = await auth();
return session?.tenantId || null;
}
private static async acquireProcessingLock(
tenantId: string,
lockKey: string,
lockTtlSeconds: number = 300,
): Promise<boolean> {
try {
const client = await getValkeyClient();
const fullLockKey = `_lighthouse:${tenantId}:lock:${lockKey}`;
const result = await client.set(
fullLockKey,
Date.now().toString(),
"EX",
lockTtlSeconds,
"NX",
);
return result === "OK";
} catch (error) {
return false;
}
}
private static async releaseProcessingLock(
tenantId: string,
lockKey: string,
): Promise<void> {
try {
const client = await getValkeyClient();
const fullLockKey = `_lighthouse:${tenantId}:lock:${lockKey}`;
await client.del([fullLockKey]);
} catch (error) {
// Silent failure
}
}
static async getProcessedScanIds(): Promise<string[]> {
const tenantId = await this.getTenantId();
if (!tenantId) return [];
try {
const client = await getValkeyClient();
const dataKey = `_lighthouse:${tenantId}:processed_scan_ids`;
const result = await client.get(dataKey);
if (!result) return [];
const scanIdsString = result.toString();
return scanIdsString ? scanIdsString.split(",") : [];
} catch (error) {
return [];
}
}
static async setProcessedScanIds(scanIds: string[]): Promise<boolean> {
const tenantId = await this.getTenantId();
if (!tenantId) return false;
try {
const client = await getValkeyClient();
const dataKey = `_lighthouse:${tenantId}:processed_scan_ids`;
const scanIdsString = scanIds.join(",");
await client.set(dataKey, scanIdsString);
return true;
} catch (error) {
return false;
}
}
static async processScansWithLock(scanIds: string[]): Promise<{
success: boolean;
data?: string;
}> {
const tenantId = await this.getTenantId();
if (!tenantId) return { success: false };
const lockKey = "scan-processing";
const lockTtlSeconds = 1200; // 20 minutes
try {
// Try to acquire processing lock
const lockAcquired = await this.acquireProcessingLock(
tenantId,
lockKey,
lockTtlSeconds,
);
if (!lockAcquired) {
// Processing is happening in background, return success but no data
return { success: true };
}
try {
// Generate the scan summary for the provided scan IDs
const scanSummary = await generateSecurityScanSummary(scanIds);
// Only process if we have valid scan summary
if (scanSummary) {
// Cache the scan summary
await this.set("scan-summary", scanSummary);
// Mark scans as processed
await this.setProcessedScanIds(scanIds);
// Generate and cache recommendations asynchronously
this.generateAndCacheRecommendations(scanSummary).catch((error) => {
console.error(
"Background recommendation generation failed:",
error,
);
});
return {
success: true,
data: scanSummary,
};
} else {
// Even if no summary, mark scans as processed to avoid reprocessing
await this.setProcessedScanIds(scanIds);
}
return { success: true };
} finally {
await this.releaseProcessingLock(tenantId, lockKey);
}
} catch (error) {
console.error("Error processing scans with lock:", error);
return { success: false };
}
}
// Generic cache methods for future use
static async get(key: string): Promise<string | null> {
const tenantId = await this.getTenantId();
if (!tenantId) return null;
try {
const client = await getValkeyClient();
const fullKey = `_lighthouse:${tenantId}:${key}`;
const result = await client.get(fullKey);
return result?.toString() || null;
} catch (error) {
return null;
}
}
static async set(
key: string,
value: string,
ttlSeconds?: number,
): Promise<boolean> {
const tenantId = await this.getTenantId();
if (!tenantId) return false;
try {
const client = await getValkeyClient();
const fullKey = `_lighthouse:${tenantId}:${key}`;
if (ttlSeconds) {
await client.set(fullKey, value, "EX", ttlSeconds);
} else {
await client.set(fullKey, value);
}
return true;
} catch (error) {
return false;
}
}
static async getRecommendations(): Promise<{
success: boolean;
data?: string;
}> {
const tenantId = await this.getTenantId();
if (!tenantId) return { success: false };
try {
const client = await getValkeyClient();
const dataKey = `_lighthouse:${tenantId}:recommendations`;
const cachedData = await client.get(dataKey);
if (cachedData) {
return {
success: true,
data: cachedData.toString(),
};
}
return { success: true, data: undefined };
} catch (error) {
return { success: false };
}
}
static async generateAndCacheRecommendations(scanSummary: string): Promise<{
success: boolean;
data?: string;
}> {
const tenantId = await this.getTenantId();
if (!tenantId) return { success: false };
const lockKey = "recommendations-processing";
const dataKey = `_lighthouse:${tenantId}:recommendations`;
const detailedDataKey = `_lighthouse:${tenantId}:cached-messages:recommendation`;
try {
const client = await getValkeyClient();
// Check if data already exists
const existingData = await client.get(dataKey);
if (existingData) {
return {
success: true,
data: existingData.toString(),
};
}
// Lock TTL 10 minutes
const lockAcquired = await this.acquireProcessingLock(
tenantId,
lockKey,
600,
);
if (!lockAcquired) {
// Processing is happening in background, return success but no data
return { success: true };
}
try {
// Double-check after acquiring lock
const doubleCheckData = await client.get(dataKey);
if (doubleCheckData) {
return {
success: true,
data: doubleCheckData.toString(),
};
}
// Generate detailed recommendation first
const detailedRecommendation =
await generateDetailedRecommendation(scanSummary);
if (!detailedRecommendation.trim()) {
return { success: true, data: "" };
}
// Generate banner from detailed content
const bannerRecommendation = await generateBannerFromDetailed(
detailedRecommendation,
);
// Both must succeed - no point in detailed without banner
if (!bannerRecommendation.trim()) {
return { success: true, data: "" };
}
// Generate question answers
const questionAnswers = await generateQuestionAnswers(suggestedActions);
// Cache both versions
await client.set(dataKey, bannerRecommendation);
await client.set(detailedDataKey, detailedRecommendation);
// Cache question answers with 24h TTL
for (const [questionRef, answer] of Object.entries(questionAnswers)) {
const questionKey = `_lighthouse:${tenantId}:cached-messages:question_${questionRef}`;
await client.set(questionKey, answer, "EX", 86400); // 24 hours
}
return {
success: true,
data: bannerRecommendation,
};
} finally {
await this.releaseProcessingLock(tenantId, lockKey);
}
} catch (error) {
console.error("Error generating and caching recommendations:", error);
return { success: false };
}
}
static async isRecommendationProcessing(): Promise<boolean> {
const tenantId = await this.getTenantId();
if (!tenantId) return false;
try {
const client = await getValkeyClient();
const lockKey = `_lighthouse:${tenantId}:lock:recommendations-processing`;
const result = await client.get(lockKey);
return result !== null;
} catch (error) {
return false;
}
}
// New method to get cached message by type
static async getCachedMessage(messageType: string): Promise<{
success: boolean;
data?: string;
}> {
const tenantId = await this.getTenantId();
if (!tenantId) return { success: false };
try {
const client = await getValkeyClient();
const dataKey = `_lighthouse:${tenantId}:cached-messages:${messageType}`;
const cachedData = await client.get(dataKey);
if (cachedData) {
return {
success: true,
data: cachedData.toString(),
};
}
return { success: true, data: undefined };
} catch (error) {
console.error(`Error getting cached message ${messageType}:`, error);
return { success: false };
}
}
// New method to set cached message by type
static async setCachedMessage(
messageType: string,
content: string,
): Promise<boolean> {
const tenantId = await this.getTenantId();
if (!tenantId) return false;
try {
const client = await getValkeyClient();
const dataKey = `_lighthouse:${tenantId}:cached-messages:${messageType}`;
await client.set(dataKey, content);
return true;
} catch (error) {
console.error(`Error caching message type ${messageType}:`, error);
return false;
}
}
}
export async function initializeTenantCache(): Promise<{
success: boolean;
data?: string;
scanSummary?: string;
}> {
try {
// Quick pre-check: Do we need to process anything?
const currentScanIds = await getCompletedScansLast24h();
if (currentScanIds.length === 0) {
// No scans in last 24h, return existing cached data if any
const existingSummary = await CacheService.get("scan-summary");
return {
success: true,
data: existingSummary || undefined,
scanSummary: existingSummary || undefined,
};
}
// Check if we need to process these scans
const processedScanIds = await CacheService.getProcessedScanIds();
const shouldProcess = !compareProcessedScanIds(
currentScanIds,
processedScanIds,
);
if (!shouldProcess) {
// Scans already processed, return existing cached data
const existingSummary = await CacheService.get("scan-summary");
return {
success: true,
data: existingSummary || undefined,
scanSummary: existingSummary || undefined,
};
}
// New scans found, trigger full processing with lock
const result = await CacheService.processScansWithLock(currentScanIds);
return {
success: result.success,
data: result.data,
scanSummary: result.data,
};
} catch (error) {
console.error("Error initializing tenant cache:", error);
return {
success: false,
};
}
}

View File

@@ -127,8 +127,8 @@ You operate in an agent loop, iterating through these steps:
- Fetches information related to:
- All findings data across providers. Supports filtering by severity, status, etc.
- Unique metadata values from findings
- Remediation for checks
- Check IDs supported by different provider types
- Available checks for a specific provider (aws, gcp, azure, kubernetes, etc)
- Details of a specific check including details about severity, risk, remediation, compliances that are associated with the check, etc
### roles_agent

View File

@@ -0,0 +1,199 @@
import { ChatOpenAI } from "@langchain/openai";
import { getAIKey, getLighthouseConfig } from "@/actions/lighthouse/lighthouse";
import { type SuggestedAction } from "./suggested-actions";
import { initLighthouseWorkflow } from "./workflow";
export const generateDetailedRecommendation = async (
scanSummary: string,
): Promise<string> => {
try {
const apiKey = await getAIKey();
if (!apiKey) {
return "";
}
const lighthouseConfig = await getLighthouseConfig();
if (!lighthouseConfig?.attributes) {
return "";
}
const config = lighthouseConfig.attributes;
const businessContext = config.business_context || "";
const llm = new ChatOpenAI({
model: config.model || "gpt-4o",
temperature: config.temperature || 0,
maxTokens: 1500,
apiKey: apiKey,
});
let systemPrompt = `You are a cloud security analyst providing focused, actionable recommendations.
IMPORTANT: Focus on ONE of these high-impact opportunities:
1. The most CRITICAL finding that needs immediate attention
2. A pattern where fixing one check ID resolves many findings (e.g., "Fix aws_s3_bucket_public_access_block to resolve 15 findings")
3. The issue with highest business impact
Your response should be a comprehensive analysis of this ONE focus area including:
**Issue Description:**
- What exactly is the problem
- Why it's critical or high-impact
- How many findings it affects
**Affected Resources:**
- Specific resources, services, or configurations involved
- Number of affected resources
**Business Impact:**
- Security risks and potential consequences
- Compliance violations (mention specific frameworks if applicable)
- Operational impact
**Remediation Steps:**
- Clear, step-by-step instructions
- Specific commands or configuration changes where applicable
- Expected outcome after fix
Be specific with numbers (e.g., "affects 12 S3 buckets", "resolves 15 findings"). Focus on actionable guidance that will have the biggest security improvement.`;
if (businessContext) {
systemPrompt += `\n\nBUSINESS CONTEXT: ${businessContext}`;
}
systemPrompt += `\n\nSecurity Scan Summary:\n${scanSummary}`;
const response = await llm.invoke([
{
role: "system",
content: systemPrompt,
},
]);
return response.content.toString().trim();
} catch (error) {
console.error("Error generating detailed recommendation:", error);
return "";
}
};
export const generateBannerFromDetailed = async (
detailedRecommendation: string,
): Promise<string> => {
try {
const apiKey = await getAIKey();
if (!apiKey) {
return "";
}
const lighthouseConfig = await getLighthouseConfig();
if (!lighthouseConfig?.attributes) {
return "";
}
const config = lighthouseConfig.attributes;
const llm = new ChatOpenAI({
model: config.model || "gpt-4o",
temperature: config.temperature || 0,
maxTokens: 100,
apiKey: apiKey,
});
const systemPrompt = `Create a short, engaging banner message from this detailed security analysis.
REQUIREMENTS:
- Maximum 80 characters
- Include "Lighthouse" in the message
- Focus on the key insight or opportunity
- Make it clickable and business-focused
- Use action words like "detected", "found", "identified"
- Don't end with punctuation
EXAMPLES:
- Lighthouse found fixing 1 S3 check resolves 15 findings
- Lighthouse detected critical RDS encryption gaps
- Lighthouse identified 3 exposed databases needing attention
Based on this detailed analysis, create one engaging banner message:
${detailedRecommendation}`;
const response = await llm.invoke([
{
role: "system",
content: systemPrompt,
},
]);
return response.content.toString().trim();
} catch (error) {
console.error(
"Error generating banner from detailed recommendation:",
error,
);
return "";
}
};
// Legacy function for backward compatibility
export const generateRecommendation = async (
scanSummary: string,
): Promise<string> => {
const detailed = await generateDetailedRecommendation(scanSummary);
if (!detailed) return "";
return await generateBannerFromDetailed(detailed);
};
export const generateQuestionAnswers = async (
questions: SuggestedAction[],
): Promise<Record<string, string>> => {
const answers: Record<string, string> = {};
try {
const apiKey = await getAIKey();
if (!apiKey) {
return answers;
}
// Initialize the workflow system
const workflow = await initLighthouseWorkflow();
for (const question of questions) {
if (!question.questionRef) continue;
try {
// Use the existing workflow to answer the question
const result = await workflow.invoke({
messages: [
{
role: "user",
content: question.action,
},
],
});
// Extract the final message content
const finalMessage = result.messages[result.messages.length - 1];
if (finalMessage?.content) {
answers[question.questionRef] = finalMessage.content
.toString()
.trim();
}
} catch (error) {
console.error(
`Error generating answer for question ${question.questionRef}:`,
error,
);
continue;
}
}
} catch (error) {
console.error("Error generating question answers:", error);
}
return answers;
};

View File

@@ -0,0 +1,33 @@
export interface SuggestedAction {
title: string;
label: string;
action: string;
questionRef?: string;
}
export const suggestedActions: SuggestedAction[] = [
{
title: "Are there any exposed S3",
label: "buckets in my AWS accounts?",
action: "List exposed S3 buckets in my AWS accounts",
questionRef: "1",
},
{
title: "What is the risk of having",
label: "RDS databases unencrypted?",
action: "What is the risk of having RDS databases unencrypted?",
questionRef: "2",
},
{
title: "What is the CIS 1.10 compliance status",
label: "of my Kubernetes cluster?",
action: "What is the CIS 1.10 compliance status of my Kubernetes cluster?",
questionRef: "3",
},
{
title: "List my highest privileged",
label: "AWS IAM users with full admin access?",
action: "List my highest privileged AWS IAM users with full admin access",
questionRef: "4",
},
];

View File

@@ -0,0 +1,305 @@
import { getLighthouseCheckDetails } from "@/actions/lighthouse/checks";
import { getLighthouseFindings } from "@/actions/lighthouse/findings";
import { getScans } from "@/actions/scans/scans";
import { CheckDetails, FindingSummary } from "@/types/lighthouse/summary";
import { getNewFailedFindingsSummary } from "./tools/findings";
export const getCompletedScansLast24h = async (): Promise<string[]> => {
const twentyFourHoursAgo = new Date();
twentyFourHoursAgo.setHours(twentyFourHoursAgo.getHours() - 24);
const scansResponse = await getScans({
page: 1,
pageSize: 50,
filters: {
"fields[scans]": "completed_at",
"filter[state]": "completed",
"filter[started_at__gte]": twentyFourHoursAgo.toISOString(),
},
sort: "-updated_at",
});
if (!scansResponse?.data || scansResponse.data.length === 0) {
return [];
}
return scansResponse.data.map((scan: any) => scan.id);
};
export const compareProcessedScanIds = (
currentScanIds: string[],
processedScanIds: string[],
): boolean => {
const sortedCurrent = [...currentScanIds].sort();
const sortedProcessed = [...processedScanIds].sort();
// Compare lengths first
if (sortedCurrent.length !== sortedProcessed.length) {
return false;
}
// Compare each element
for (let i = 0; i < sortedCurrent.length; i++) {
if (sortedCurrent[i] !== sortedProcessed[i]) {
return false;
}
}
return true;
};
const getTopFailedFindingsSummary = async (
scanIds: string[],
limit: number = 10,
): Promise<FindingSummary[]> => {
const response = await getLighthouseFindings({
page: 1,
pageSize: limit,
sort: "severity",
filters: {
"fields[findings]": "check_id,severity",
"filter[scan__in]": scanIds.join(","),
"filter[status]": "FAIL",
"filter[muted]": "false",
},
});
if (!response?.data) {
return [];
}
return response.data.map((finding: any) => ({
checkId: finding.attributes.check_id,
severity: finding.attributes.severity,
count: 1,
findingIds: [finding.id],
}));
};
// Helper function to collect new failed findings across multiple scans
const collectNewFailedFindings = async (
scanIds: string[],
): Promise<Record<string, FindingSummary[]>> => {
const findingsByScan: Record<string, FindingSummary[]> = {};
for (const scanId of scanIds) {
try {
const newFailedFindingsSummary =
await getNewFailedFindingsSummary(scanId);
if (Object.keys(newFailedFindingsSummary).length > 0) {
const scanFindings: FindingSummary[] = [];
// Convert to FindingSummary format
Object.entries(newFailedFindingsSummary).forEach(
([severity, checks]) => {
Object.entries(checks).forEach(([checkId, summary]) => {
scanFindings.push({
checkId,
severity,
count: summary.count,
findingIds: summary.finding_ids,
});
});
},
);
if (scanFindings.length > 0) {
findingsByScan[scanId] = scanFindings;
}
}
} catch (error) {
console.error(
`Error fetching new failed findings for scan ${scanId}:`,
error,
);
}
}
return findingsByScan;
};
// Helper function to enrich findings with check details
const enrichFindingsWithCheckDetails = async (
findings: FindingSummary[],
): Promise<Map<string, CheckDetails>> => {
const uniqueCheckIds = Array.from(new Set(findings.map((f) => f.checkId)));
const checkDetailsMap = new Map<string, CheckDetails>();
for (const checkId of uniqueCheckIds) {
try {
const checkDetails = await getLighthouseCheckDetails({ checkId });
if (checkDetails) {
checkDetailsMap.set(checkId, checkDetails);
}
} catch (error) {
console.error(`Error fetching check details for ${checkId}:`, error);
// Add a fallback check details object
checkDetailsMap.set(checkId, {
id: checkId,
title: checkId,
description: "",
risk: "",
remediation: {},
});
}
}
return checkDetailsMap;
};
// Helper function to sort findings by severity
const sortFindingsBySeverity = (
findings: FindingSummary[],
): FindingSummary[] => {
const severityOrder = {
critical: 0,
high: 1,
medium: 2,
low: 3,
informational: 4,
};
return findings.sort(
(a, b) =>
severityOrder[a.severity as keyof typeof severityOrder] -
severityOrder[b.severity as keyof typeof severityOrder],
);
};
// Helper function to build details for a single finding
const buildSingleFindingDetails = (
finding: FindingSummary,
checkDetailsMap: Map<string, CheckDetails>,
): string => {
const checkDetails = checkDetailsMap.get(finding.checkId);
let detailsText = "";
detailsText += `**Title:** ${checkDetails?.title || finding.checkId}\n`;
detailsText += `**Severity:** ${finding.severity.toUpperCase()}\n`;
detailsText += `**Check Summary:** ${checkDetails?.description || "Description not available"}\n`;
detailsText += `**Number of failed findings:** ${finding.count}\n`;
detailsText += `**Finding IDs:** ${finding.findingIds.join(", ")}\n`;
detailsText += "**Remediation:**\n";
const remediation = checkDetails?.remediation;
if (remediation?.terraform) {
detailsText += `- Terraform: ${remediation.terraform.description}\n`;
if (remediation.terraform.reference) {
detailsText += ` Reference: ${remediation.terraform.reference}\n`;
}
}
if (remediation?.cli) {
detailsText += `- AWS CLI: ${remediation.cli.description}\n`;
if (remediation.cli.reference) {
detailsText += ` Reference: ${remediation.cli.reference}\n`;
}
}
if (remediation?.nativeiac) {
detailsText += `- Native IAC: ${remediation.nativeiac.description}\n`;
if (remediation.nativeiac.reference) {
detailsText += ` Reference: ${remediation.nativeiac.reference}\n`;
}
}
if (remediation?.other) {
detailsText += `- Other: ${remediation.other.description}\n`;
if (remediation.other.reference) {
detailsText += ` Reference: ${remediation.other.reference}\n`;
}
}
if (remediation?.wui) {
detailsText += `- WUI: ${remediation.wui.description}\n`;
if (remediation.wui.reference) {
detailsText += ` Reference: ${remediation.wui.reference}\n`;
}
}
if (
!remediation?.terraform &&
!remediation?.cli &&
!remediation?.nativeiac &&
!remediation?.other &&
!remediation?.wui
) {
detailsText += "- No specific remediation commands available\n";
}
detailsText += "\n";
return detailsText;
};
// Generates a summary of failed findings for the provided scan IDs
// Returns an empty string if no failed findings in any scan or unexpected error
// Else it returns a string with the summary of the failed findings
export const generateSecurityScanSummary = async (
scanIds: string[],
): Promise<string> => {
try {
// Collect new failed findings by scan
const newFindingsByScan = await collectNewFailedFindings(scanIds);
// Get top failed findings across all scans
let topFailedFindings: FindingSummary[] = [];
try {
topFailedFindings = await getTopFailedFindingsSummary(scanIds, 10);
} catch (error) {
console.error("Error fetching top failed findings:", error);
}
// Combine all findings for check details enrichment
const newFindings = Object.values(newFindingsByScan).flat();
const allFindings = [...newFindings, ...topFailedFindings];
// If no findings at all, return empty string
if (allFindings.length === 0) {
return "";
}
// Enrich all findings with check details
const checkDetailsMap = await enrichFindingsWithCheckDetails(allFindings);
// Build the summary
let summaryText = "";
// Header
if (scanIds.length === 1) {
summaryText += `# Scan ID: ${scanIds[0]}\n\n`;
} else {
summaryText += `# Scans processed (${scanIds.length} scans from last 24h)\n`;
summaryText += `**Scan IDs:** ${scanIds.join(", ")}\n\n`;
}
// New findings section (if any)
if (newFindings.length > 0) {
summaryText += "## New Failed Findings by Scan\n";
summaryText += `${newFindings.length} new findings detected.\n\n`;
Object.entries(newFindingsByScan).forEach(([scanId, scanFindings]) => {
summaryText += `### Scan ID: ${scanId}\n`;
const sortedScanFindings = sortFindingsBySeverity(scanFindings);
for (const finding of sortedScanFindings) {
summaryText += buildSingleFindingDetails(finding, checkDetailsMap);
}
summaryText += "\n";
});
}
// Top findings section
if (topFailedFindings.length > 0) {
summaryText += "## Top Failed Findings Across All Scans\n";
summaryText += `Showing top ${topFailedFindings.length} critical findings.\n\n`;
const sortedTopFindings = sortFindingsBySeverity(topFailedFindings);
for (const finding of sortedTopFindings) {
summaryText += buildSingleFindingDetails(finding, checkDetailsMap);
}
}
return summaryText;
} catch (error) {
console.error("Error generating security scan summary:", error);
return "";
}
};

View File

@@ -1,11 +1,22 @@
import { tool } from "@langchain/core/tools";
import { z } from "zod";
import { getFindings, getMetadataInfo } from "@/actions/findings";
import { getMetadataInfo } from "@/actions/findings";
import {
getLighthouseFindings,
getLighthouseLatestFindings,
} from "@/actions/lighthouse/findings";
import { getFindingsSchema, getMetadataInfoSchema } from "@/types/lighthouse";
export const getFindingsTool = tool(
async ({ page, pageSize, query, sort, filters }) => {
return await getFindings({ page, pageSize, query, sort, filters });
return await getLighthouseFindings({
page,
pageSize,
query,
sort,
filters,
});
},
{
name: "getFindings",
@@ -15,6 +26,104 @@ export const getFindingsTool = tool(
},
);
export const getLatestFindingsTool = tool(
async ({ page, pageSize, query, sort, filters }) => {
return await getLighthouseLatestFindings({
page,
pageSize,
query,
sort,
filters,
});
},
{
name: "getLatestFindings",
description:
"Retrieves a list of the latest findings from the latest scans of all providers with options for filtering by various criteria.",
// getLatestFindings uses the same schema as getFindings
schema: getFindingsSchema,
},
);
// Function to get a summary of new and changed failed findings that appeared in a particular scan
export const getNewFailedFindingsSummary = async (scanId: string) => {
let allFindings: any[] = [];
let currentPage = 1;
let totalPages = 1;
const pageSize = 100;
do {
const response = await getLighthouseFindings({
page: currentPage,
pageSize: pageSize,
sort: "severity",
filters: {
"fields[findings]": "check_id,severity",
"filter[scan]": scanId,
"filter[status]": "FAIL",
"filter[muted]": "false",
"filter[delta__in]": "new,changed",
},
});
if (response?.data) {
allFindings = allFindings.concat(response.data);
}
if (currentPage === 1 && response?.meta?.pagination) {
totalPages = response.meta.pagination.pages;
}
currentPage++;
} while (currentPage <= totalPages);
const summary: Record<
string,
Record<string, { count: number; finding_ids: string[] }>
> = {};
allFindings.forEach((finding) => {
const severity = finding.attributes.severity;
const checkId = finding.attributes.check_id;
const findingId = finding.id;
// Initialize severity group if it doesn't exist
if (!summary[severity]) {
summary[severity] = {};
}
// Initialize check_id group if it doesn't exist
if (!summary[severity][checkId]) {
summary[severity][checkId] = {
count: 0,
finding_ids: [],
};
}
// Add finding to the appropriate group
summary[severity][checkId].count++;
summary[severity][checkId].finding_ids.push(findingId);
});
return summary;
};
export const getNewFailedFindingsSummaryTool = tool(
async ({ scanId }) => {
return await getNewFailedFindingsSummary(scanId);
},
{
name: "getNewFailedFindingsSummary",
description:
"Fetches summary of new and changed failed findings that appeared in a particular scan. Summary includes count of findings by severity, check_id and finding_ids.",
schema: z.object({
scanId: z
.string()
.describe("The UUID of the scan to fetch failed findings summary for."),
}),
},
);
export const getMetadataInfoTool = tool(
async ({ query, sort, filters }) => {
return await getMetadataInfo({ query, sort, filters });

View File

@@ -24,7 +24,9 @@ import {
} from "@/lib/lighthouse/tools/compliances";
import {
getFindingsTool,
getLatestFindingsTool,
getMetadataInfoTool,
getNewFailedFindingsSummaryTool,
} from "@/lib/lighthouse/tools/findings";
import {
getFindingsBySeverityTool,
@@ -101,7 +103,9 @@ export async function initLighthouseWorkflow() {
llm: llm,
tools: [
getFindingsTool,
getLatestFindingsTool,
getMetadataInfoTool,
getNewFailedFindingsSummaryTool,
getProviderChecksTool,
getProviderCheckDetailsTool,
],

84
ui/package-lock.json generated
View File

@@ -38,6 +38,7 @@
"framer-motion": "^11.16.0",
"immer": "^10.1.1",
"intl-messageformat": "^10.5.0",
"iovalkey": "^0.3.3",
"jose": "^5.9.3",
"jwt-decode": "^4.0.0",
"lucide-react": "^0.471.0",
@@ -985,6 +986,12 @@
"@swc/helpers": "^0.5.0"
}
},
"node_modules/@iovalkey/commands": {
"version": "0.1.0",
"resolved": "https://registry.npmjs.org/@iovalkey/commands/-/commands-0.1.0.tgz",
"integrity": "sha512-/B9W4qKSSITDii5nkBCHyPkIkAi+ealUtr1oqBJsLxjSRLka4pxun2VvMNSmcwgAMxgXtQfl0qRv7TE+udPJzg==",
"license": "MIT"
},
"node_modules/@isaacs/cliui": {
"version": "8.0.2",
"resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz",
@@ -9112,6 +9119,15 @@
"node": ">=6"
}
},
"node_modules/cluster-key-slot": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.2.tgz",
"integrity": "sha512-RMr0FhtfXemyinomL4hrWcYJxmX6deFdCxpJzhDttxgO1+bcCnkk+9drydLVDmAMG7NE6aN/fl4F7ucU/90gAA==",
"license": "Apache-2.0",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/cmdk": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/cmdk/-/cmdk-1.0.0.tgz",
@@ -9985,6 +10001,15 @@
"node": ">=0.4.0"
}
},
"node_modules/denque": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/denque/-/denque-2.1.0.tgz",
"integrity": "sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw==",
"license": "Apache-2.0",
"engines": {
"node": ">=0.10"
}
},
"node_modules/dequal": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz",
@@ -11902,6 +11927,26 @@
"tslib": "^2.4.0"
}
},
"node_modules/iovalkey": {
"version": "0.3.3",
"resolved": "https://registry.npmjs.org/iovalkey/-/iovalkey-0.3.3.tgz",
"integrity": "sha512-4rTJX6Q5wTYEvxboXi8DsEiUo+OvqJGtLYOSGm37KpdRXsG5XJjbVtYKGJpPSWP+QT7rWscA4vsrdmzbEbenpw==",
"license": "MIT",
"dependencies": {
"@iovalkey/commands": "^0.1.0",
"cluster-key-slot": "^1.1.0",
"debug": "^4.3.4",
"denque": "^2.1.0",
"lodash.defaults": "^4.2.0",
"lodash.isarguments": "^3.1.0",
"redis-errors": "^1.2.0",
"redis-parser": "^3.0.0",
"standard-as-callback": "^2.1.0"
},
"engines": {
"node": ">=18.12.0"
}
},
"node_modules/is-alphabetical": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-2.0.1.tgz",
@@ -12850,6 +12895,12 @@
"resolved": "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz",
"integrity": "sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow=="
},
"node_modules/lodash.defaults": {
"version": "4.2.0",
"resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz",
"integrity": "sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ==",
"license": "MIT"
},
"node_modules/lodash.foreach": {
"version": "4.5.0",
"resolved": "https://registry.npmjs.org/lodash.foreach/-/lodash.foreach-4.5.0.tgz",
@@ -12860,6 +12911,12 @@
"resolved": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz",
"integrity": "sha512-z+Uw/vLuy6gQe8cfaFWD7p0wVv8fJl3mbzXh33RS+0oW2wvUqiRXiQ69gLWSLpgB5/6sU+r6BlQR0MBILadqTQ=="
},
"node_modules/lodash.isarguments": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/lodash.isarguments/-/lodash.isarguments-3.1.0.tgz",
"integrity": "sha512-chi4NHZlZqZD18a0imDHnZPrDeBbTtVN7GXMwuGdRH9qotxAjYs3aVLKc7zNOG9eddR5Ksd8rvFEBc9SsggPpg==",
"license": "MIT"
},
"node_modules/lodash.isplainobject": {
"version": "4.0.6",
"resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz",
@@ -15309,6 +15366,27 @@
"resolved": "https://registry.npmjs.org/react-is/-/react-is-18.3.1.tgz",
"integrity": "sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg=="
},
"node_modules/redis-errors": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/redis-errors/-/redis-errors-1.2.0.tgz",
"integrity": "sha512-1qny3OExCf0UvUV/5wpYKf2YwPcOqXzkwKKSmKHiE6ZMQs5heeE/c8eXK+PNllPvmjgAbfnsbpkGZWy8cBpn9w==",
"license": "MIT",
"engines": {
"node": ">=4"
}
},
"node_modules/redis-parser": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/redis-parser/-/redis-parser-3.0.0.tgz",
"integrity": "sha512-DJnGAeenTdpMEH6uAJRK/uiyEIH9WVsUmoLwzudwGJUwZPp80PDBWPHXSAGNPwNvIXAbe7MSUB1zQFugFml66A==",
"license": "MIT",
"dependencies": {
"redis-errors": "^1.0.0"
},
"engines": {
"node": ">=4"
}
},
"node_modules/reflect.getprototypeof": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.6.tgz",
@@ -16010,6 +16088,12 @@
"url": "https://github.com/sponsors/wooorm"
}
},
"node_modules/standard-as-callback": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/standard-as-callback/-/standard-as-callback-2.1.0.tgz",
"integrity": "sha512-qoRRSyROncaz1z0mvYqIE4lCd9p2R90i6GxW3uZv5ucSu8tU7B5HXUP1gG8pVZsYNVaXjk8ClXHPttLyxAL48A==",
"license": "MIT"
},
"node_modules/stdin-discarder": {
"version": "0.1.0",
"resolved": "https://registry.npmjs.org/stdin-discarder/-/stdin-discarder-0.1.0.tgz",

View File

@@ -30,6 +30,7 @@
"framer-motion": "^11.16.0",
"immer": "^10.1.1",
"intl-messageformat": "^10.5.0",
"iovalkey": "^0.3.3",
"jose": "^5.9.3",
"jwt-decode": "^4.0.0",
"lucide-react": "^0.471.0",

View File

@@ -10,5 +10,5 @@ export const checkSchema = z.object({
});
export const checkDetailsSchema = z.object({
id: z.string(),
checkId: z.string(),
});

View File

@@ -71,21 +71,12 @@ export const getFindingsSchema = z.object({
.optional()
.describe("Comma-separated list of UUID values"),
// Impact and Severity filters
// Impact filters
"filter[impact]": impactEnum.optional(),
"filter[impact__in]": z
.string()
.optional()
.describe("Comma-separated list of impact values"),
"filter[severity]": z
.enum(["critical", "high", "medium", "low", "informational"])
.optional(),
"filter[severity__in]": z
.string()
.optional()
.describe(
"Comma-separated list of severity values. Do not use it with severity filter.",
),
// Date filters
"filter[inserted_at]": z
@@ -105,6 +96,9 @@ export const getFindingsSchema = z.object({
.optional()
.describe("Date in format YYYY-MM-DD"),
// Muted filter
"filter[muted]": z.boolean().optional(),
// Provider filters
"filter[provider]": z.string().optional().describe("Provider UUID"),
"filter[provider__in]": z
@@ -176,6 +170,17 @@ export const getFindingsSchema = z.object({
.optional()
.describe("Comma-separated list of service values"),
// Severity filters
"filter[severity]": z
.enum(["critical", "high", "medium", "low", "informational"])
.optional(),
"filter[severity__in]": z
.string()
.optional()
.describe(
"Comma-separated list of severity values. Do not use it with severity filter.",
),
// Status filters
"filter[status]": statusEnum.optional(),
"filter[status__in]": z

View File

@@ -0,0 +1,35 @@
export interface CheckDetails {
id: string;
title: string;
description: string;
risk: string;
remediation: {
cli?: {
description: string;
reference: string;
};
terraform?: {
description: string;
reference: string;
};
nativeiac?: {
description: string;
reference: string;
};
other?: {
description: string;
reference: string;
};
wui?: {
description: string;
reference: string;
};
};
}
export interface FindingSummary {
checkId: string;
severity: string;
count: number;
findingIds: string[];
}