fixed HTTP llm.toolHook does not support openAI

This commit is contained in:
xquanluu
2026-05-06 15:27:12 +07:00
parent 4b8fc65cdb
commit aff2724fd6
+8 -2
View File
@@ -114,8 +114,14 @@ class TaskLlm extends Task {
const tool_response = await this.cs?.requestor.request('llm:tool-call', this.toolHook, {tool_call_id, ...data});
// if the toolHook was a websocket it will return undefined, otherwise it should return an object
if (typeof tool_response != 'undefined') {
tool_response.type = 'client_tool_result';
tool_response.invocation_id = tool_call_id;
// If the webhook didn't declare a `type`, assume the legacy Ultravox-style
// envelope and fill it in. Otherwise pass the response through unchanged so
// vendors like OpenAI ('conversation.item.create') receive the exact shape
// their realtime API expects.
if (!tool_response.type) {
tool_response.type = 'client_tool_result';
tool_response.invocation_id = tool_call_id;
}
this.processToolOutput(tool_call_id, tool_response);
}
}