mirror of
https://github.com/jambonz/jambonz-feature-server.git
synced 2026-05-06 08:47:08 +00:00
fixed HTTP llm.toolHook does not support openAI
This commit is contained in:
@@ -114,8 +114,14 @@ class TaskLlm extends Task {
|
||||
const tool_response = await this.cs?.requestor.request('llm:tool-call', this.toolHook, {tool_call_id, ...data});
|
||||
// if the toolHook was a websocket it will return undefined, otherwise it should return an object
|
||||
if (typeof tool_response != 'undefined') {
|
||||
tool_response.type = 'client_tool_result';
|
||||
tool_response.invocation_id = tool_call_id;
|
||||
// If the webhook didn't declare a `type`, assume the legacy Ultravox-style
|
||||
// envelope and fill it in. Otherwise pass the response through unchanged so
|
||||
// vendors like OpenAI ('conversation.item.create') receive the exact shape
|
||||
// their realtime API expects.
|
||||
if (!tool_response.type) {
|
||||
tool_response.type = 'client_tool_result';
|
||||
tool_response.invocation_id = tool_call_id;
|
||||
}
|
||||
this.processToolOutput(tool_call_id, tool_response);
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user