mirror of
https://github.com/jambonz/jambonz-feature-server.git
synced 2025-12-20 08:40:38 +00:00
Feat/llm update (#936)
* add support for llm:update during LLM session * make sure to end openai session when Llm task is killed * wip * wip * wip * wip * wip * wip * wip
This commit is contained in:
@@ -2,6 +2,7 @@ const Task = require('../../task');
|
||||
const TaskName = 'Llm_OpenAI_s2s';
|
||||
const {LlmEvents_OpenAI} = require('../../../utils/constants');
|
||||
const ClientEvent = 'client.event';
|
||||
const SessionDelete = 'session.delete';
|
||||
|
||||
const openai_server_events = [
|
||||
'error',
|
||||
@@ -125,6 +126,13 @@ class TaskLlmOpenAI_S2S extends Task {
|
||||
}
|
||||
}
|
||||
|
||||
async _api(ep, args) {
|
||||
const res = await ep.api('uuid_openai_s2s', `^^|${args.join('|')}`);
|
||||
if (!res.body?.startsWith('+OK')) {
|
||||
throw new Error({args}, `Error calling uuid_openai_s2s: ${res.body}`);
|
||||
}
|
||||
}
|
||||
|
||||
async exec(cs, {ep}) {
|
||||
await super.exec(cs);
|
||||
|
||||
@@ -140,26 +148,57 @@ class TaskLlmOpenAI_S2S extends Task {
|
||||
|
||||
async kill(cs) {
|
||||
super.kill(cs);
|
||||
|
||||
this._api(cs.ep, [cs.ep.uuid, SessionDelete])
|
||||
.catch((err) => this.logger.info({err}, 'TaskLlmOpenAI_S2S:kill - error deleting session'));
|
||||
|
||||
this.notifyTaskDone();
|
||||
}
|
||||
|
||||
/**
|
||||
* Send function call output to the OpenAI server in the form of conversation.item.create
|
||||
* per https://platform.openai.com/docs/guides/realtime/function-calls
|
||||
*/
|
||||
async processToolOutput(ep, tool_call_id, data) {
|
||||
try {
|
||||
this.logger.debug({tool_call_id, data}, 'TaskLlmOpenAI_S2S:processToolOutput');
|
||||
|
||||
await this._api(ep, [ep.uuid, ClientEvent, JSON.stringify(data)]);
|
||||
if (!data.type || data.type !== 'conversation.item.create') {
|
||||
this.logger.info({data},
|
||||
'TaskLlmOpenAI_S2S:processToolOutput - invalid tool output, must be conversation.item.create');
|
||||
}
|
||||
else {
|
||||
await this._api(ep, [ep.uuid, ClientEvent, JSON.stringify(data)]);
|
||||
|
||||
// send immediate response.create per https://platform.openai.com/docs/guides/realtime/function-calls
|
||||
await this._api(ep, [ep.uuid, ClientEvent, JSON.stringify({type: 'response.create'})]);
|
||||
// spec also recommends to send immediate response.create
|
||||
await this._api(ep, [ep.uuid, ClientEvent, JSON.stringify({type: 'response.create'})]);
|
||||
}
|
||||
} catch (err) {
|
||||
this.logger.info({err}, 'TaskLlmOpenAI_S2S:processToolOutput');
|
||||
}
|
||||
}
|
||||
|
||||
async _api(ep, args) {
|
||||
const res = await ep.api('uuid_openai_s2s', `^^|${args.join('|')}`);
|
||||
if (!res.body?.startsWith('+OK')) {
|
||||
throw new Error({args}, `Error calling uuid_openai_s2s: ${res.body}`);
|
||||
/**
|
||||
* Send a session.update to the OpenAI server
|
||||
* Note: creating and deleting conversation items also supported as well as interrupting the assistant
|
||||
*/
|
||||
async processLlmUpdate(ep, data, _callSid) {
|
||||
try {
|
||||
this.logger.debug({data, _callSid}, 'TaskLlmOpenAI_S2S:processLlmUpdate');
|
||||
|
||||
if (!data.type || ![
|
||||
'session.update',
|
||||
'conversation.item.create',
|
||||
'conversation.item.delete',
|
||||
'response.cancel'
|
||||
].includes(data.type)) {
|
||||
this.logger.info({data}, 'TaskLlmOpenAI_S2S:processLlmUpdate - invalid mid-call request');
|
||||
}
|
||||
else {
|
||||
await this._api(ep, [ep.uuid, ClientEvent, JSON.stringify(data)]);
|
||||
}
|
||||
} catch (err) {
|
||||
this.logger.info({err}, 'TaskLlmOpenAI_S2S:processLlmUpdate');
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user