Compare commits

..

19 Commits

Author SHA1 Message Date
Hoan Luu Huu
f241faa871 update speech utils version (#1070) 2025-02-07 08:00:33 -05:00
rammohan-y
65d35c893c Feat/1067 set default language if language is undefined (#1068)
* sending recognition mode channel variable

* change verb-specifications version

* feat/1067 - setting default language to previously set language for the recognizer object if the vendor is default

* added undefined check for fallbackVendor and fallbackLanguage
2025-02-06 08:06:56 -05:00
Hoan Luu Huu
dbdc1cd43d support voxist stt (#1066)
* support voxist stt

* wip
2025-02-05 08:33:35 -05:00
Hoan Luu Huu
7105453d81 support caching tts audio with model/model_id (#1062)
* support caching tts audio with model/model_id

* update speech utils version
2025-02-03 08:47:44 -05:00
Hoan Luu Huu
8487a4be68 support elevenlabs private agent (#1063) 2025-02-02 22:10:51 -05:00
Hoan Luu Huu
2ddcd53d6b support elevenlabs s2s (#1052)
* support elevenlabs s2s

* wip

* wip

* wip
2025-02-02 10:29:48 -05:00
rammohan-y
a4d07ddce0 Feat/1057 recognition mode (#1060)
* sending recognition mode channel variable

* change verb-specifications version
2025-01-28 08:06:04 -05:00
rammohan-y
16e044cabf feat/1053: added empty check on this.currentTask (#1054) 2025-01-22 07:16:44 -05:00
Hoan Luu Huu
ba282d775d support rimelabs tts streaming (#1047) 2025-01-18 08:17:33 -05:00
Dave Horton
a194ba833e Feat/1041 (#1045)
* initial changes for stream synonym to listen

* listen on B endpoint if nested listen in dial has channel === 2
2025-01-17 08:48:39 -05:00
rammohan-y
77f3d9d7ec feat/1034: sending socket close code when there is no response from the websocket app (#1035) 2025-01-16 10:13:00 -05:00
Sam Machin
4dbc7df93d new error for HTTP responses without stack trace (#1044)
* new error for HTTP responses without stack trace

* lint
2025-01-16 08:05:17 -05:00
Dave Horton
f71f0ac69a Fix/speechmatics (#1042)
* add speechmatics options

* wip

* speechmatics does not do endpointing for us so we need to flip on continuousAsr

* speechmatics: continousAsr should be at least equal to max_delay, if set
2025-01-15 19:12:15 -05:00
Dave Horton
edb7e21ff9 update deps 2025-01-14 10:45:38 -05:00
Dave Horton
cafd9530a2 update drachtio-srf and fsmrf to main branch releases (#1038) 2025-01-14 10:01:33 -05:00
Hoan Luu Huu
ca8cace284 support custom tts streaming (#1023)
* support custom tts streaming

* wip

* wip

* wip

* wip

* wip

* wip

* fix review comments
2025-01-14 07:24:06 -05:00
Hoan Luu Huu
499c800213 Feat/ultravox s2s (#1032)
* support ultravox_s2s

* support ultravox_s2s

* support ultravox_s2s

* wip

* wip

* wip

* wip

* fix ultravox toolcall

* wip
2025-01-14 07:11:55 -05:00
Sam Machin
97952afb1d add deepgram filler words (#1036)
* add deepgram filler words

* Update package.json

* Update package-lock.json
2025-01-13 11:07:24 -05:00
Hoan Luu Huu
f4e68d0ea1 fix openai_s2s is using wrong model (#1031)
* fix openai_s2s is using wrong model

* wip

* wip
2025-01-11 08:38:14 -05:00
23 changed files with 1529 additions and 9010 deletions

View File

@@ -1,5 +1,5 @@
const uuidv4 = require('uuid-random');
const {CallDirection, AllowedSipRecVerbs} = require('./utils/constants');
const {CallDirection, AllowedSipRecVerbs, WS_CLOSE_CODES} = require('./utils/constants');
const {parseSiprecPayload} = require('./utils/siprec-utils');
const CallInfo = require('./session/call-info');
const HttpRequestor = require('./utils/http-requestor');
@@ -460,7 +460,7 @@ module.exports = function(srf, logger) {
}).catch((err) => this.logger.info({err}, 'Error generating alert for parsing application'));
logger.info({err}, `Error retrieving or parsing application: ${err?.message}`);
res.send(480, {headers: {'X-Reason': err?.message || 'unknown'}});
app.requestor.close();
app.requestor.close(WS_CLOSE_CODES.GoingAway);
}
}

View File

@@ -1084,6 +1084,12 @@ class CallSession extends Emitter {
api_key: credential.api_key
};
}
else if ('voxist' === vendor) {
return {
speech_credential_sid: credential.speech_credential_sid,
api_key: credential.api_key
};
}
else if ('whisper' === vendor) {
return {
api_key: credential.api_key,
@@ -1109,7 +1115,8 @@ class CallSession extends Emitter {
speech_credential_sid: credential.speech_credential_sid,
auth_token: credential.auth_token,
custom_stt_url: credential.custom_stt_url,
custom_tts_url: credential.custom_tts_url
custom_tts_url: credential.custom_tts_url,
custom_tts_streaming_url: credential.custom_tts_streaming_url
};
}
}
@@ -1467,7 +1474,7 @@ class CallSession extends Emitter {
if (!listenTask) {
return this.logger.info('CallSession:_lccListenStatus - invalid listen_status: Dial does not have a listen');
}
listenTask.updateListen(opts.listen_status);
listenTask.updateListen(opts.listen_status || opts.stream_status);
}
/**
@@ -1609,7 +1616,7 @@ Duration=${duration} `
// this whole thing requires us to be in a Dial verb
const task = this.currentTask;
if (!task || ![TaskName.Dial, TaskName.Listen].includes(task.name)) {
return this.logger.info('CallSession:_lccWhisper - invalid command since we are not in a dial or listen');
return this.logger.info('CallSession:_lccWhisper - invalid command since we are not in a dial or stream/listen');
}
// allow user to provide a url object, a url string, an array of tasks, or a single task
@@ -1675,7 +1682,7 @@ Duration=${duration} `
const t = normalizeJambones(this.logger, [
{
verb: 'dub',
...opts.dub
...opts
}
])
.map((tdata) => makeTask(this.logger, tdata));
@@ -1798,7 +1805,7 @@ Duration=${duration} `
if (opts.call_hook || opts.child_call_hook) {
return await this._lccCallHook(opts);
}
if (opts.listen_status) {
if (opts.listen_status || opts.stream_status) {
await this._lccListenStatus(opts);
}
if (opts.transcribe_status) {
@@ -2108,6 +2115,7 @@ Duration=${duration} `
break;
case 'listen:status':
case 'stream:status':
this._lccListenStatus(data);
break;
@@ -2482,7 +2490,7 @@ Duration=${duration} `
res.send(200, {body: this.ep.local.sdp});
}
else {
if (this.currentTask.name === TaskName.Dial && this.currentTask.isOnHoldEnabled) {
if (this.currentTask && this.currentTask.name === TaskName.Dial && this.currentTask.isOnHoldEnabled) {
this.logger.info('onholdMusic reINVITE after media has been released');
await this.currentTask.handleReinviteAfterMediaReleased(req, res);
} else {

View File

@@ -187,18 +187,20 @@ class TaskConfig extends Task {
: cs.speechRecognizerVendor;
cs.speechRecognizerLabel = this.recognizer.label === 'default'
? cs.speechRecognizerLabel : this.recognizer.label;
cs.speechRecognizerLanguage = this.recognizer.language !== 'default'
cs.speechRecognizerLanguage = this.recognizer.language !== undefined && this.recognizer.language !== 'default'
? this.recognizer.language
: cs.speechRecognizerLanguage;
//fallback
cs.fallbackSpeechRecognizerVendor = this.recognizer.fallbackVendor !== 'default'
cs.fallbackSpeechRecognizerVendor = this.recognizer.fallbackVendor !== undefined &&
this.recognizer.fallbackVendor !== 'default'
? this.recognizer.fallbackVendor
: cs.fallbackSpeechRecognizerVendor;
cs.fallbackSpeechRecognizerLabel = this.recognizer.fallbackLabel === 'default' ?
cs.fallbackSpeechRecognizerLabel :
this.recognizer.fallbackLabel;
cs.fallbackSpeechRecognizerLanguage = this.recognizer.fallbackLanguage !== 'default'
cs.fallbackSpeechRecognizerLanguage = this.recognizer.fallbackLanguage !== undefined &&
this.recognizer.fallbackLanguage !== 'default'
? this.recognizer.fallbackLanguage
: cs.fallbackSpeechRecognizerLanguage;

View File

@@ -121,8 +121,9 @@ class TaskDial extends Task {
}
}
if (this.data.listen) {
this.listenTask = makeTask(logger, {'listen': this.data.listen}, this);
const listenData = this.data.listen || this.data.stream;
if (listenData) {
this.listenTask = makeTask(logger, {'listen': listenData }, this);
}
if (this.data.transcribe) {
this.transcribeTask = makeTask(logger, {'transcribe' : this.data.transcribe}, this);
@@ -873,7 +874,7 @@ class TaskDial extends Task {
if (cs.sipRequestWithinDialogHook) this._initSipIndialogRequestListener(cs, this.dlg);
if (this.transcribeTask) this.transcribeTask.exec(cs, {ep: this.epOther, ep2:this.ep});
if (this.listenTask) this.listenTask.exec(cs, {ep: this.epOther});
if (this.listenTask) this.listenTask.exec(cs, {ep: this.listenTask.channel === 2 ? this.ep : this.epOther});
if (this.startAmd) {
try {
this.startAmd(cs, this.ep, this, this.data.amd);

View File

@@ -11,6 +11,7 @@ const {
NvidiaTranscriptionEvents,
JambonzTranscriptionEvents,
AssemblyAiTranscriptionEvents,
VoxistTranscriptionEvents,
VadDetection,
VerbioTranscriptionEvents,
SpeechmaticsTranscriptionEvents
@@ -24,6 +25,7 @@ const makeTask = require('./make_task');
const assert = require('assert');
const SttTask = require('./stt-task');
const { SpeechCredentialError } = require('../utils/error');
const SPEECHMATICS_DEFAULT_ASR_TIMEOUT = 1200;
class TaskGather extends SttTask {
constructor(logger, opts, parentTask) {
@@ -162,6 +164,16 @@ class TaskGather extends SttTask {
this.logger.debug({hints: this.data.recognizer.hints, hintsBoost: this.data.recognizer.hintsBoost},
'Gather:exec - applying global sttHints');
}
// specials case for speechmatics: they dont do endpointing so we need to enable continuous ASR
if (this.vendor === 'speechmatics' && !this.isContinuousAsr) {
const maxDelay = this.recognizer?.speechmaticsOptions?.transcription_config?.max_delay;
if (maxDelay) this.asrTimeout = Math.min(SPEECHMATICS_DEFAULT_ASR_TIMEOUT, maxDelay * 1000);
else this.asrTimeout = SPEECHMATICS_DEFAULT_ASR_TIMEOUT;
this.isContinuousAsr = true;
this.logger.debug(`Gather:exec - auto-enabling continuous ASR for speechmatics w/ timeout ${this.asrTimeout}`);
}
if (!this.isContinuousAsr && cs.isContinuousAsr) {
this.isContinuousAsr = true;
this.asrTimeout = cs.asrTimeout * 1000;
@@ -513,6 +525,17 @@ class TaskGather extends SttTask {
this._onVendorConnectFailure.bind(this, cs, ep));
break;
case 'voxist':
this.bugname = `${this.bugname_prefix}voxist_transcribe`;
this.addCustomEventListener(ep, VoxistTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep));
this.addCustomEventListener(
ep, VoxistTranscriptionEvents.Connect, this._onVendorConnect.bind(this, cs, ep));
this.addCustomEventListener(ep, VoxistTranscriptionEvents.Error, this._onVendorError.bind(this, cs, ep));
this.addCustomEventListener(ep, VoxistTranscriptionEvents.ConnectFailure,
this._onVendorConnectFailure.bind(this, cs, ep));
break;
case 'speechmatics':
this.bugname = `${this.bugname_prefix}speechmatics_transcribe`;
this.addCustomEventListener(
@@ -832,7 +855,7 @@ class TaskGather extends SttTask {
const t = evt.alternatives[0].transcript;
if (t) {
/* remove trailing punctuation */
if (/[,;:\.!\?]$/.test(t)) {
if (this.vendor !== 'speechmatics' && /[,;:\.!\?]$/.test(t)) {
this.logger.debug('TaskGather:_onTranscription - removing trailing punctuation');
evt.alternatives[0].transcript = t.slice(0, -1);
}

View File

@@ -17,7 +17,7 @@ class TaskListen extends Task {
[
'action', 'auth', 'method', 'url', 'finishOnKey', 'maxLength', 'metadata', 'mixType', 'passDtmf', 'playBeep',
'sampleRate', 'timeout', 'transcribe', 'wsAuth', 'disableBidirectionalAudio'
'sampleRate', 'timeout', 'transcribe', 'wsAuth', 'disableBidirectionalAudio', 'channel'
].forEach((k) => this[k] = this.data[k]);
this.mixType = this.mixType || 'mono';

View File

@@ -2,6 +2,8 @@ const Task = require('../task');
const {TaskPreconditions} = require('../../utils/constants');
const TaskLlmOpenAI_S2S = require('./llms/openai_s2s');
const TaskLlmVoiceAgent_S2S = require('./llms/voice_agent_s2s');
const TaskLlmUltravox_S2S = require('./llms/ultravox_s2s');
const TaskLlmElevenlabs_S2S = require('./llms/elevenlabs_s2s');
class TaskLlm extends Task {
constructor(logger, opts) {
@@ -49,6 +51,14 @@ class TaskLlm extends Task {
llm = new TaskLlmVoiceAgent_S2S(this.logger, this.data, this);
break;
case 'ultravox':
llm = new TaskLlmUltravox_S2S(this.logger, this.data, this);
break;
case 'elevenlabs':
llm = new TaskLlmElevenlabs_S2S(this.logger, this.data, this);
break;
default:
throw new Error(`Unsupported vendor ${this.vendor} for LLM`);
}

View File

@@ -0,0 +1,302 @@
const Task = require('../../task');
const TaskName = 'Llm_Elevenlabs_s2s';
const {LlmEvents_Elevenlabs} = require('../../../utils/constants');
const {request} = require('undici');
const ClientEvent = 'client.event';
const SessionDelete = 'session.delete';
const elevenlabs_server_events = [
'conversation_initiation_metadata',
'user_transcript',
'agent_response',
'client_tool_call'
];
const expandWildcards = (events) => {
const expandedEvents = [];
events.forEach((evt) => {
if (evt.endsWith('.*')) {
const prefix = evt.slice(0, -2); // Remove the wildcard ".*"
const matchingEvents = elevenlabs_server_events.filter((e) => e.startsWith(prefix));
expandedEvents.push(...matchingEvents);
} else {
expandedEvents.push(evt);
}
});
return expandedEvents;
};
class TaskLlmElevenlabs_S2S extends Task {
constructor(logger, opts, parentTask) {
super(logger, opts, parentTask);
this.parent = parentTask;
this.vendor = this.parent.vendor;
this.auth = this.parent.auth;
const {agent_id, api_key} = this.auth || {};
if (!agent_id) throw new Error('auth.agent_id is required for Elevenlabs S2S');
this.agent_id = agent_id;
this.api_key = api_key;
this.actionHook = this.data.actionHook;
this.eventHook = this.data.eventHook;
this.toolHook = this.data.toolHook;
const {
conversation_initiation_client_data,
input_sample_rate = 16000,
output_sample_rate = 16000
} = this.data.llmOptions;
this.conversation_initiation_client_data = conversation_initiation_client_data;
this.input_sample_rate = input_sample_rate;
this.output_sample_rate = output_sample_rate;
this.results = {
completionReason: 'normal conversation end'
};
/**
* only one of these will have items,
* if includeEvents, then these are the events to include
* if excludeEvents, then these are the events to exclude
*/
this.includeEvents = [];
this.excludeEvents = [];
/* default to all events if user did not specify */
this._populateEvents(this.data.events || elevenlabs_server_events);
this.addCustomEventListener = parentTask.addCustomEventListener.bind(parentTask);
this.removeCustomEventListeners = parentTask.removeCustomEventListeners.bind(parentTask);
}
get name() { return TaskName; }
async getSignedUrl() {
if (!this.api_key) {
return {
host: 'api.elevenlabs.io',
path: `/v1/convai/conversation?agent_id=${this.agent_id}`,
};
}
const {statusCode, body} = await request(
`https://api.elevenlabs.io/v1/convai/conversation/get_signed_url?agent_id=${this.agent_id}`, {
method: 'GET',
headers: {
'xi-api-key': this.api_key
},
}
);
const data = await body.json();
if (statusCode !== 200 || !data?.signed_url) {
this.logger.error({statusCode, data}, 'Elevenlabs Error registering call');
throw new Error(`Elevenlabs Error registering call: ${data.message}`);
}
const url = new URL(data.signed_url);
return {
host: url.hostname,
path: url.pathname + url.search,
};
}
async _api(ep, args) {
const res = await ep.api('uuid_elevenlabs_s2s', `^^|${args.join('|')}`);
if (!res.body?.startsWith('+OK')) {
throw new Error({args}, `Error calling uuid_elevenlabs_s2s: ${res.body}`);
}
}
async exec(cs, {ep}) {
await super.exec(cs);
await this._startListening(cs, ep);
await this.awaitTaskDone();
/* note: the parent llm verb started the span, which is why this is necessary */
await this.parent.performAction(this.results);
this._unregisterHandlers();
}
async kill(cs) {
super.kill(cs);
this._api(cs.ep, [cs.ep.uuid, SessionDelete])
.catch((err) => this.logger.info({err}, 'TaskLlmElevenlabs_S2S:kill - error deleting session'));
this.notifyTaskDone();
}
/**
* Send function call output to the Elevenlabs server in the form of conversation.item.create
* per https://elevenlabs.io/docs/conversational-ai/api-reference/conversational-ai/websocket
*/
async processToolOutput(ep, tool_call_id, rawData) {
try {
const {data} = rawData;
this.logger.debug({tool_call_id, data}, 'TaskLlmElevenlabs_S2S:processToolOutput');
if (!data.type || data.type !== 'client_tool_result') {
this.logger.info({data},
'TaskLlmElevenlabs_S2S:processToolOutput - invalid tool output, must be client_tool_result');
}
else {
await this._api(ep, [ep.uuid, ClientEvent, JSON.stringify(data)]);
}
} catch (err) {
this.logger.info({err}, 'TaskLlmElevenlabs_S2S:processToolOutput');
}
}
/**
* Send a session.update to the Elevenlabs server
* Note: creating and deleting conversation items also supported as well as interrupting the assistant
*/
async processLlmUpdate(ep, data, _callSid) {
this.logger.debug({data, _callSid}, 'TaskLlmElevenlabs_S2S:processLlmUpdate, ignored');
}
async _startListening(cs, ep) {
this._registerHandlers(ep);
try {
const {host, path} = await this.getSignedUrl();
const args = [ep.uuid, 'session.create', this.input_sample_rate, this.output_sample_rate, host, path];
await this._api(ep, args);
} catch (err) {
this.logger.error({err}, 'TaskLlmElevenlabs_S2S:_startListening');
this.notifyTaskDone();
}
}
async _sendClientEvent(ep, obj) {
let ok = true;
this.logger.debug({obj}, 'TaskLlmElevenlabs_S2S:_sendClientEvent');
try {
const args = [ep.uuid, ClientEvent, JSON.stringify(obj)];
await this._api(ep, args);
} catch (err) {
ok = false;
this.logger.error({err}, 'TaskLlmElevenlabs_S2S:_sendClientEvent - Error');
}
return ok;
}
async _sendInitialMessage(ep) {
if (this.conversation_initiation_client_data) {
if (!await this._sendClientEvent(ep, {
type: 'conversation_initiation_client_data',
conversation_initiation_client_data: this.conversation_initiation_client_data
})) {
this.notifyTaskDone();
}
}
}
_registerHandlers(ep) {
this.addCustomEventListener(ep, LlmEvents_Elevenlabs.Connect, this._onConnect.bind(this, ep));
this.addCustomEventListener(ep, LlmEvents_Elevenlabs.ConnectFailure, this._onConnectFailure.bind(this, ep));
this.addCustomEventListener(ep, LlmEvents_Elevenlabs.Disconnect, this._onDisconnect.bind(this, ep));
this.addCustomEventListener(ep, LlmEvents_Elevenlabs.ServerEvent, this._onServerEvent.bind(this, ep));
}
_unregisterHandlers() {
this.removeCustomEventListeners();
}
_onError(ep, evt) {
this.logger.info({evt}, 'TaskLlmElevenlabs_S2S:_onError');
this.notifyTaskDone();
}
_onConnect(ep) {
this.logger.debug('TaskLlmElevenlabs_S2S:_onConnect');
this._sendInitialMessage(ep);
}
_onConnectFailure(_ep, evt) {
this.logger.info(evt, 'TaskLlmElevenlabs_S2S:_onConnectFailure');
this.results = {completionReason: 'connection failure'};
this.notifyTaskDone();
}
_onDisconnect(_ep, evt) {
this.logger.info(evt, 'TaskLlmElevenlabs_S2S:_onConnectFailure');
this.results = {completionReason: 'disconnect from remote end'};
this.notifyTaskDone();
}
async _onServerEvent(ep, evt) {
let endConversation = false;
const type = evt.type;
this.logger.info({evt}, 'TaskLlmElevenlabs_S2S:_onServerEvent');
if (type === 'error') {
endConversation = true;
this.results = {
completionReason: 'server error',
error: evt.error
};
}
/* tool calls */
else if (type === 'client_tool_call') {
this.logger.debug({evt}, 'TaskLlmElevenlabs_S2S:_onServerEvent - function_call');
if (!this.toolHook) {
this.logger.warn({evt}, 'TaskLlmElevenlabs_S2S:_onServerEvent - no toolHook defined!');
}
else {
const {client_tool_call} = evt;
const {tool_name: name, tool_call_id: call_id, parameters: args} = client_tool_call;
try {
await this.parent.sendToolHook(call_id, {name, args});
} catch (err) {
this.logger.info({err, evt}, 'TaskLlmElevenlabs_S2S - error calling function');
this.results = {
completionReason: 'client error calling function',
error: err
};
endConversation = true;
}
}
}
/* check whether we should notify on this event */
if (this.includeEvents.length > 0 ? this.includeEvents.includes(type) : !this.excludeEvents.includes(type)) {
this.parent.sendEventHook(evt)
.catch((err) => this.logger.info({err},
'TaskLlmElevenlabs_S2S:_onServerEvent - error sending event hook'));
}
if (endConversation) {
this.logger.info({results: this.results},
'TaskLlmElevenlabs_S2S:_onServerEvent - ending conversation due to error');
this.notifyTaskDone();
}
}
_populateEvents(events) {
if (events.includes('all')) {
/* work by excluding specific events */
const exclude = events
.filter((evt) => evt.startsWith('-'))
.map((evt) => evt.slice(1));
if (exclude.length === 0) this.includeEvents = elevenlabs_server_events;
else this.excludeEvents = expandWildcards(exclude);
}
else {
/* work by including specific events */
const include = events
.filter((evt) => !evt.startsWith('-'));
this.includeEvents = expandWildcards(include);
}
this.logger.debug({
includeEvents: this.includeEvents,
excludeEvents: this.excludeEvents
}, 'TaskLlmElevenlabs_S2S:_populateEvents');
}
}
module.exports = TaskLlmElevenlabs_S2S;

View File

@@ -120,7 +120,7 @@ class TaskLlmOpenAI_S2S extends Task {
switch (this.vendor) {
case 'openai':
return 'v1/realtime?model=${this.model}';
return `v1/realtime?model=${this.model}`;
case 'microsoft':
return `openai/realtime?api-version=2024-10-01-preview&deployment=${this.model}`;
}

View File

@@ -0,0 +1,245 @@
const Task = require('../../task');
const TaskName = 'Llm_Ultravox_s2s';
const {request} = require('undici');
const {LlmEvents_Ultravox} = require('../../../utils/constants');
const ultravox_server_events = [
'pong',
'state',
'transcript',
'conversationText',
'clientToolInvocation',
'playbackClearBuffer',
];
const ClientEvent = 'client.event';
const expandWildcards = (events) => {
// no-op for deepgram
return events;
};
const SessionDelete = 'session.delete';
class TaskLlmUltravox_S2S extends Task {
constructor(logger, opts, parentTask) {
super(logger, opts, parentTask);
this.parent = parentTask;
this.vendor = this.parent.vendor;
this.model = this.parent.model || 'fixie-ai/ultravox';
this.auth = this.parent.auth;
this.connectionOptions = this.parent.connectOptions;
const {apiKey} = this.auth || {};
if (!apiKey) throw new Error('auth.apiKey is required for Vendor: Ultravox');
this.apiKey = apiKey;
this.actionHook = this.data.actionHook;
this.eventHook = this.data.eventHook;
this.toolHook = this.data.toolHook;
/**
* only one of these will have items,
* if includeEvents, then these are the events to include
* if excludeEvents, then these are the events to exclude
*/
this.includeEvents = [];
this.excludeEvents = [];
/* default to all events if user did not specify */
this._populateEvents(this.data.events || ultravox_server_events);
this.addCustomEventListener = parentTask.addCustomEventListener.bind(parentTask);
this.removeCustomEventListeners = parentTask.removeCustomEventListeners.bind(parentTask);
}
get name() { return TaskName; }
async _api(ep, args) {
const res = await ep.api('uuid_ultravox_s2s', `^^|${args.join('|')}`);
if (!res.body?.startsWith('+OK')) {
throw new Error(`Error calling uuid_ultravox_s2s: ${JSON.stringify(res.body)}`);
}
}
async createCall() {
const payload = {
...this.data.llmOptions,
model: this.model,
medium: {
...(this.data.llmOptions.medium || {}),
serverWebSocket: {
inputSampleRate: 8000,
outputSampleRate: 8000,
}
}
};
const {statusCode, body} = await request('https://api.ultravox.ai/api/calls', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'X-API-Key': this.apiKey
},
body: JSON.stringify(payload)
});
const data = await body.json();
if (statusCode !== 201 || !data?.joinUrl) {
this.logger.error({statusCode, data}, 'Ultravox Error registering call');
throw new Error(`Ultravox Error registering call: ${data.message}`);
}
this.logger.info({joinUrl: data.joinUrl}, 'Ultravox Call registered');
return data.joinUrl;
}
_unregisterHandlers() {
this.removeCustomEventListeners();
}
_registerHandlers(ep) {
this.addCustomEventListener(ep, LlmEvents_Ultravox.Connect, this._onConnect.bind(this, ep));
this.addCustomEventListener(ep, LlmEvents_Ultravox.ConnectFailure, this._onConnectFailure.bind(this, ep));
this.addCustomEventListener(ep, LlmEvents_Ultravox.Disconnect, this._onDisconnect.bind(this, ep));
this.addCustomEventListener(ep, LlmEvents_Ultravox.ServerEvent, this._onServerEvent.bind(this, ep));
}
async _startListening(cs, ep) {
this._registerHandlers(ep);
const joinUrl = await this.createCall();
// split the joinUrl into host and path
const {host, pathname, search} = new URL(joinUrl);
try {
const args = [ep.uuid, 'session.create', host, pathname + search];
await this._api(ep, args);
} catch (err) {
this.logger.error({err}, 'TaskLlmUltraVox_S2S:_startListening');
this.notifyTaskDone();
}
}
async exec(cs, {ep}) {
await super.exec(cs);
await this._startListening(cs, ep);
await this.awaitTaskDone();
/* note: the parent llm verb started the span, which is why this is necessary */
await this.parent.performAction(this.results);
this._unregisterHandlers();
}
async kill(cs) {
super.kill(cs);
this._api(cs.ep, [cs.ep.uuid, SessionDelete])
.catch((err) => this.logger.info({err}, 'TaskLlmUltravox_S2S:kill - error deleting session'));
this.notifyTaskDone();
}
_onConnect(ep) {
this.logger.debug('TaskLlmUltravox_S2S:_onConnect');
}
_onConnectFailure(_ep, evt) {
this.logger.info(evt, 'TaskLlmUltravox_S2S:_onConnectFailure');
this.results = {completionReason: 'connection failure'};
this.notifyTaskDone();
}
_onDisconnect(_ep, evt) {
this.logger.info(evt, 'TaskLlmUltravox_S2S:_onConnectFailure');
this.results = {completionReason: 'disconnect from remote end'};
this.notifyTaskDone();
}
async _onServerEvent(_ep, evt) {
let endConversation = false;
const type = evt.type;
this.logger.info({evt}, 'TaskLlmUltravox_S2S:_onServerEvent');
/* server errors of some sort */
if (type === 'error') {
endConversation = true;
this.results = {
completionReason: 'server error',
error: evt.error
};
}
/* tool calls */
else if (type === 'client_tool_invocation') {
this.logger.debug({evt}, 'TaskLlmUltravox_S2S:_onServerEvent - function_call');
if (!this.toolHook) {
this.logger.warn({evt}, 'TaskLlmUltravox_S2S:_onServerEvent - no toolHook defined!');
}
else {
const {toolName: name, invocationId: call_id, parameters: args} = evt;
try {
await this.parent.sendToolHook(call_id, {name, args});
} catch (err) {
this.logger.info({err, evt}, 'TaskLlmUltravox_S2S - error calling function');
this.results = {
completionReason: 'client error calling function',
error: err
};
endConversation = true;
}
}
}
/* check whether we should notify on this event */
if (this.includeEvents.length > 0 ? this.includeEvents.includes(type) : !this.excludeEvents.includes(type)) {
this.parent.sendEventHook(evt)
.catch((err) => this.logger.info({err}, 'TaskLlmUltravox_S2S:_onServerEvent - error sending event hook'));
}
if (endConversation) {
this.logger.info({results: this.results},
'TaskLlmUltravox_S2S:_onServerEvent - ending conversation due to error');
this.notifyTaskDone();
}
}
async processToolOutput(ep, tool_call_id, data) {
try {
this.logger.debug({tool_call_id, data}, 'TaskLlmUltravox_S2S:processToolOutput');
if (!data.type || data.type !== 'client_tool_result') {
this.logger.info({data},
'TaskLlmUltravox_S2S:processToolOutput - invalid tool output, must be client_tool_result');
}
else {
await this._api(ep, [ep.uuid, ClientEvent, JSON.stringify(data)]);
}
} catch (err) {
this.logger.info({err}, 'TaskLlmUltravox_S2S:processToolOutput');
}
}
_populateEvents(events) {
if (events.includes('all')) {
/* work by excluding specific events */
const exclude = events
.filter((evt) => evt.startsWith('-'))
.map((evt) => evt.slice(1));
if (exclude.length === 0) this.includeEvents = ultravox_server_events;
else this.excludeEvents = expandWildcards(exclude);
}
else {
/* work by including specific events */
const include = events
.filter((evt) => !evt.startsWith('-'));
this.includeEvents = expandWildcards(include);
}
this.logger.debug({
includeEvents: this.includeEvents,
excludeEvents: this.excludeEvents
}, 'TaskLlmUltravox_S2S:_populateEvents');
}
}
module.exports = TaskLlmUltravox_S2S;

View File

@@ -84,6 +84,7 @@ function makeTask(logger, obj, parent) {
const TaskTranscribe = require('./transcribe');
return new TaskTranscribe(logger, data, parent);
case TaskName.Listen:
case TaskName.Stream:
const TaskListen = require('./listen');
return new TaskListen(logger, data, parent);
case TaskName.Redirect:

View File

@@ -240,6 +240,7 @@ class TaskSay extends TtsTask {
language,
voice,
engine,
model: this.model || this.model_id,
text
}).catch((err) => this.logger.info({err}, 'Error adding file to cache'));
}

View File

@@ -13,6 +13,7 @@ const {
JambonzTranscriptionEvents,
TranscribeStatus,
AssemblyAiTranscriptionEvents,
VoxistTranscriptionEvents,
VerbioTranscriptionEvents,
SpeechmaticsTranscriptionEvents
} = require('../utils/constants.json');
@@ -300,6 +301,17 @@ class TaskTranscribe extends SttTask {
this._onVendorConnectFailure.bind(this, cs, ep, channel));
break;
case 'voxist':
this.bugname = `${this.bugname_prefix}voxist_transcribe`;
this.addCustomEventListener(ep, VoxistTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
this.addCustomEventListener(ep,
VoxistTranscriptionEvents.Connect, this._onVendorConnect.bind(this, cs, ep));
this.addCustomEventListener(ep, VoxistTranscriptionEvents.Error, this._onVendorError.bind(this, cs, ep));
this.addCustomEventListener(ep, VoxistTranscriptionEvents.ConnectFailure,
this._onVendorConnectFailure.bind(this, cs, ep, channel));
break;
case 'speechmatics':
this.bugname = `${this.bugname_prefix}speechmatics_transcribe`;
this.addCustomEventListener(

View File

@@ -59,7 +59,7 @@ class TtsTask extends Task {
}
async setTtsStreamingChannelVars(vendor, language, voice, credentials, ep) {
const {api_key, model_id} = credentials;
const {api_key, model_id, custom_tts_streaming_url, auth_token} = credentials;
const {stability, similarity_boost, use_speaker_boost, style} = this.options;
let obj;
@@ -94,8 +94,36 @@ class TtsTask extends Task {
...(style && {ELEVENLABS_TTS_STREAMING_VOICE_SETTINGS_STYLE: style})
};
break;
case 'rimelabs':
const {
pauseBetweenBrackets, phonemizeBetweenBrackets, inlineSpeedAlpha, speedAlpha, reduceLatency
} = this.options;
obj = {
RIMELABS_API_KEY: api_key,
RIMELABS_TTS_STREAMING_MODEL_ID: model_id,
RIMELABS_TTS_STREAMING_VOICE_ID: voice,
RIMELABS_TTS_STREAMING_LANGUAGE: language || 'en',
...(pauseBetweenBrackets && {RIMELABS_TTS_STREAMING_PAUSE_BETWEEN_BRACKETS: pauseBetweenBrackets}),
...(phonemizeBetweenBrackets &&
{RIMELABS_TTS_STREAMING_PHONEMIZE_BETWEEN_BRACKETS: phonemizeBetweenBrackets}),
...(inlineSpeedAlpha && {RIMELABS_TTS_STREAMING_INLINE_SPEED_ALPHA: inlineSpeedAlpha}),
...(speedAlpha && {RIMELABS_TTS_STREAMING_SPEED_ALPHA: speedAlpha}),
...(reduceLatency && {RIMELABS_TTS_STREAMING_REDUCE_LATENCY: reduceLatency})
};
break;
default:
throw new Error(`vendor ${vendor} is not supported for tts streaming yet`);
if (vendor.startsWith('custom:')) {
const use_tls = custom_tts_streaming_url.startsWith('wss://');
obj = {
CUSTOM_TTS_STREAMING_HOST: custom_tts_streaming_url.replace(/^(ws|wss):\/\//, ''),
CUSTOM_TTS_STREAMING_API_KEY: auth_token,
CUSTOM_TTS_STREAMING_VOICE_ID: voice,
CUSTOM_TTS_STREAMING_LANGUAGE: language || 'en',
CUSTOM_TTS_STREAMING_USE_TLS: use_tls
};
} else {
throw new Error(`vendor ${vendor} is not supported for tts streaming yet`);
}
}
this.logger.info({vendor, credentials, obj}, 'setTtsStreamingChannelVars');
@@ -115,16 +143,16 @@ class TtsTask extends Task {
`No text-to-speech service credentials for ${vendor} with labels: ${label} have been configured`);
}
/* parse Nuance voices into name and model */
let model;
if (vendor === 'nuance' && voice) {
const arr = /([A-Za-z-]*)\s+-\s+(enhanced|standard)/.exec(voice);
if (arr) {
voice = arr[1];
model = arr[2];
this.model = arr[2];
}
} else if (vendor === 'deepgram') {
model = voice;
this.model = voice;
}
this.model_id = credentials.model_id;
/* allow for microsoft custom region voice and api_key to be specified as an override */
if (vendor === 'microsoft' && this.options.deploymentId) {
@@ -187,7 +215,8 @@ class TtsTask extends Task {
// If vendor is changed from the previous one, then reset the cache_speech_handles flag
//cs.currentTtsVendor = vendor;
if (!preCache && !this._disableTracing) this.logger.info({vendor, language, voice, model}, 'TaskSay:exec');
if (!preCache && !this._disableTracing)
this.logger.info({vendor, language, voice, model: this.model}, 'TaskSay:exec');
try {
if (!credentials) {
writeAlerts({
@@ -222,7 +251,7 @@ class TtsTask extends Task {
language,
voice,
engine,
model,
model: this.model,
salt,
credentials,
options: this.options,

View File

@@ -28,6 +28,7 @@
"SipRedirect": "sip:redirect",
"Say": "say",
"SayLegacy": "say:legacy",
"Stream": "stream",
"Tag": "tag",
"Transcribe": "transcribe"
},
@@ -148,6 +149,12 @@
"ConnectFailure": "assemblyai_transcribe::connect_failed",
"Connect": "assemblyai_transcribe::connect"
},
"VoxistTranscriptionEvents": {
"Transcription": "voxist_transcribe::transcription",
"Error": "voxist_transcribe::error",
"ConnectFailure": "voxist_transcribe::connect_failed",
"Connect": "voxist_transcribe::connect"
},
"VadDetection": {
"Detection": "vad_detect:detection"
},
@@ -175,6 +182,13 @@
"Disconnect": "openai_s2s::disconnect",
"ServerEvent": "openai_s2s::server_event"
},
"LlmEvents_Elevenlabs": {
"Error": "error",
"Connect": "elevenlabs_s2s::connect",
"ConnectFailure": "elevenlabs_s2s::connect_failed",
"Disconnect": "elevenlabs_s2s::disconnect",
"ServerEvent": "elevenlabs_s2s::server_event"
},
"LlmEvents_VoiceAgent": {
"Error": "error",
"Connect": "voice_agent_s2s::connect",
@@ -182,6 +196,13 @@
"Disconnect": "voice_agent_s2s::disconnect",
"ServerEvent": "voice_agent_s2s::server_event"
},
"LlmEvents_Ultravox": {
"Error": "error",
"Connect": "ultravox_s2s::connect",
"ConnectFailure": "ultravox_s2s::connect_failed",
"Disconnect": "ultravox_s2s::disconnect",
"ServerEvent": "ultravox_s2s::server_event"
},
"QueueResults": {
"Bridged": "bridged",
"Error": "error",
@@ -251,6 +272,16 @@
"ConnectFailure": "elevenlabs_tts_streaming::connect_failed",
"Connect": "elevenlabs_tts_streaming::connect"
},
"RimelabsTtsStreamingEvents": {
"Empty": "rimelabs_tts_streaming::empty",
"ConnectFailure": "rimelabs_tts_streaming::connect_failed",
"Connect": "rimelabs_tts_streaming::connect"
},
"CustomTtsStreamingEvents": {
"Empty": "custom_tts_streaming::empty",
"ConnectFailure": "custom_tts_streaming::connect_failed",
"Connect": "custom_tts_streaming::connect"
},
"TtsStreamingEvents": {
"Empty": "tts_streaming::empty",
"Pause": "tts_streaming::pause",
@@ -271,5 +302,9 @@
"Offline": "OFFLINE",
"GracefulShutdownInProgress":"SHUTDOWN_IN_PROGRESS"
},
"FEATURE_SERVER" : "feature-server"
"FEATURE_SERVER" : "feature-server",
"WS_CLOSE_CODES": {
"NormalClosure": 1000,
"GoingAway": 1001
}
}

View File

@@ -122,6 +122,10 @@ const speechMapper = (cred) => {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
}
else if ('voxist' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
}
else if ('whisper' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
@@ -143,6 +147,7 @@ const speechMapper = (cred) => {
obj.auth_token = o.auth_token;
obj.custom_stt_url = o.custom_stt_url;
obj.custom_tts_url = o.custom_tts_url;
obj.custom_tts_streaming_url = o.custom_tts_streaming_url;
}
} catch (err) {
console.log(err);

View File

@@ -17,8 +17,17 @@ class PlayFileNotFoundError extends NonFatalTaskError {
}
}
class HTTPResponseError extends Error {
constructor(statusCode) {
super('Unexpected HTTP Response');
delete this.stack;
this.statusCode = statusCode;
}
}
module.exports = {
SpeechCredentialError,
NonFatalTaskError,
PlayFileNotFoundError
PlayFileNotFoundError,
HTTPResponseError
};

View File

@@ -16,6 +16,7 @@ const {
NODE_ENV,
HTTP_USER_AGENT_HEADER,
} = require('../config');
const {HTTPResponseError} = require('./error');
const toBase64 = (str) => Buffer.from(str || '', 'utf8').toString('base64');
@@ -190,8 +191,7 @@ class HttpRequestor extends BaseRequestor {
followRedirects: false
});
if (![200, 202, 204].includes(statusCode)) {
const err = new Error();
err.statusCode = statusCode;
const err = new HTTPResponseError(statusCode);
throw err;
}
if (headers['content-type']?.includes('application/json')) {

View File

@@ -43,7 +43,8 @@ const stickyVars = {
'DEEPGRAM_SPEECH_UTTERANCE_END_MS',
'DEEPGRAM_SPEECH_VAD_TURNOFF',
'DEEPGRAM_SPEECH_TAG',
'DEEPGRAM_SPEECH_MODEL_VERSION'
'DEEPGRAM_SPEECH_MODEL_VERSION',
'DEEPGRAM_SPEECH_FILLER_WORDS'
],
aws: [
'AWS_VOCABULARY_NAME',
@@ -104,6 +105,9 @@ const stickyVars = {
'ASSEMBLYAI_API_KEY',
'ASSEMBLYAI_WORD_BOOST'
],
voxist: [
'VOXIST_API_KEY',
],
speechmatics: [
'SPEECHMATICS_API_KEY',
'SPEECHMATICS_HOST',
@@ -208,7 +212,7 @@ const consolidateTranscripts = (bufferedTranscripts, channel, language, vendor)
const lastChar = acc.alternatives[0].transcript.slice(-1);
const firstChar = newTranscript.charAt(0);
if (lastChar.match(/\d/) && firstChar.match(/\d/)) {
if (vendor === 'speechmatics' || (lastChar.match(/\d/) && firstChar.match(/\d/))) {
acc.alternatives[0].transcript += newTranscript;
} else {
acc.alternatives[0].transcript += ` ${newTranscript}`;
@@ -516,6 +520,25 @@ const normalizeAssemblyAi = (evt, channel, language) => {
};
};
const normalizeVoxist = (evt, channel, language) => {
const copy = JSON.parse(JSON.stringify(evt));
return {
language_code: language,
channel_tag: channel,
is_final: evt.type === 'final',
alternatives: [
{
confidence: 1.00,
transcript: evt.text,
}
],
vendor: {
name: 'voxist',
evt: copy
}
};
};
const normalizeSpeechmatics = (evt, channel, language) => {
const copy = JSON.parse(JSON.stringify(evt));
const is_final = evt.message === 'AddTranscript';
@@ -566,6 +589,8 @@ module.exports = (logger) => {
return normalizeCobalt(evt, channel, language);
case 'assemblyai':
return normalizeAssemblyAi(evt, channel, language, shortUtterance);
case 'voxist':
return normalizeVoxist(evt, channel, language);
case 'verbio':
return normalizeVerbio(evt, channel, language);
case 'speechmatics':
@@ -704,6 +729,8 @@ module.exports = (logger) => {
//azureSttEndpointId overrides sttCredentials.custom_stt_endpoint
...(rOpts.azureSttEndpointId &&
{AZURE_SERVICE_ENDPOINT_ID: rOpts.azureSttEndpointId}),
...(azureOptions.speechRecognitionMode &&
{AZURE_RECOGNITION_MODE: azureOptions.speechRecognitionMode}),
};
}
else if ('nuance' === vendor) {
@@ -812,7 +839,9 @@ module.exports = (logger) => {
...(deepgramOptions.tag) &&
{DEEPGRAM_SPEECH_TAG: deepgramOptions.tag},
...(deepgramOptions.version) &&
{DEEPGRAM_SPEECH_MODEL_VERSION: deepgramOptions.version}
{DEEPGRAM_SPEECH_MODEL_VERSION: deepgramOptions.version},
...(deepgramOptions.fillerWords) &&
{DEEPGRAM_SPEECH_FILLER_WORDS: deepgramOptions.fillerWords}
};
}
else if ('soniox' === vendor) {
@@ -921,6 +950,13 @@ module.exports = (logger) => {
{ASSEMBLYAI_WORD_BOOST: JSON.stringify(rOpts.hints)})
};
}
else if ('voxist' === vendor) {
opts = {
...opts,
...(sttCredentials.api_key) &&
{VOXIST_API_KEY: sttCredentials.api_key},
};
}
else if ('verbio' === vendor) {
const {verbioOptions = {}} = rOpts;
opts = {
@@ -954,6 +990,36 @@ module.exports = (logger) => {
SPEECHMATICS_TRANSLATION_PARTIALS: speechmaticsOptions.translation_config.enable_partials ? 1 : 0
}
),
...(speechmaticsOptions.transcription_config?.domain &&
{SPEECHMATICS_DOMAIN: speechmaticsOptions.transcription_config.domain}),
...{SPEECHMATICS_MAX_DELAY: speechmaticsOptions.transcription_config?.max_delay || 0.7},
...{SPEECHMATICS_MAX_DELAY_MODE: speechmaticsOptions.transcription_config?.max_delay_mode || 'flexible'},
...(speechmaticsOptions.transcription_config?.diarization &&
{SPEECHMATICS_DIARIZATION: speechmaticsOptions.transcription_config.diarization}),
...(speechmaticsOptions.transcription_config?.speaker_diarization_config?.speaker_sensitivity &&
{SPEECHMATICS_DIARIZATION_SPEAKER_SENSITIVITY:
speechmaticsOptions.transcription_config.speaker_diarization_config.speaker_sensitivity}),
...(speechmaticsOptions.transcription_config?.speaker_diarization_config?.max_speakers &&
{SPEECHMATICS_DIARIZATION_MAX_SPEAKERS:
speechmaticsOptions.transcription_config.speaker_diarization_config.max_speakers}),
...(speechmaticsOptions.transcription_config?.output_locale &&
{SPEECHMATICS_OUTPUT_LOCALE: speechmaticsOptions.transcription_config.output_locale}),
...(speechmaticsOptions.transcription_config?.punctuation_overrides?.permitted_marks &&
{SPEECHMATICS_PUNCTUATION_ALLOWED:
speechmaticsOptions.transcription_config.punctuation_overrides.permitted_marks.join(',')}),
...(speechmaticsOptions.transcription_config?.punctuation_overrides?.sensitivity &&
{SPEECHMATICS_PUNCTUATION_SENSITIVITY:
speechmaticsOptions.transcription_config?.punctuation_overrides?.sensitivity}),
...(speechmaticsOptions.transcription_config?.operating_point &&
{SPEECHMATICS_OPERATING_POINT: speechmaticsOptions.transcription_config.operating_point}),
...(speechmaticsOptions.transcription_config?.enable_entities &&
{SPEECHMATICS_ENABLE_ENTTIES: speechmaticsOptions.transcription_config.enable_entities}),
...(speechmaticsOptions.transcription_config?.audio_filtering_config?.volume_threshold &&
{SPEECHMATICS_VOLUME_THRESHOLD:
speechmaticsOptions.transcription_config.audio_filtering_config.volume_threshold}),
...(speechmaticsOptions.transcription_config?.transcript_filtering_config?.remove_disfluencies &&
{SPEECHMATICS_REMOVE_DISFLUENCIES:
speechmaticsOptions.transcription_config.transcript_filtering_config.remove_disfluencies})
};
}
else if (vendor.startsWith('custom:')) {

View File

@@ -214,7 +214,7 @@ class TtsStreamingBuffer extends Emitter {
}
async _api(ep, args) {
const apiCmd = `uuid_${this.vendor}_tts_streaming`;
const apiCmd = `uuid_${this.vendor.startsWith('custom:') ? 'custom' : this.vendor}_tts_streaming`;
const res = await ep.api(apiCmd, `^^|${args.join('|')}`);
if (!res.body?.startsWith('+OK')) {
this.logger.info({args}, `Error calling ${apiCmd}: ${res.body}`);
@@ -277,7 +277,9 @@ class TtsStreamingBuffer extends Emitter {
// DH: add other vendors here as modules are added
'deepgram',
'cartesia',
'elevenlabs'
'elevenlabs',
'rimelabs',
'custom'
].forEach((vendor) => {
const eventClassName = `${vendor.charAt(0).toUpperCase() + vendor.slice(1)}TtsStreamingEvents`;
const eventClass = require('../utils/constants')[eventClassName];

View File

@@ -1,7 +1,7 @@
const assert = require('assert');
const BaseRequestor = require('./base-requestor');
const short = require('short-uuid');
const {HookMsgTypes} = require('./constants.json');
const {HookMsgTypes, WS_CLOSE_CODES} = require('./constants.json');
const Websocket = require('ws');
const snakeCaseKeys = require('./snakecase-keys');
const {
@@ -261,13 +261,13 @@ class WsRequestor extends BaseRequestor {
}
}
close() {
close(code = WS_CLOSE_CODES.NormalClosure) {
this.closedGracefully = true;
this.logger.debug('WsRequestor:close closing socket');
this.logger.debug(`WsRequestor:close closing socket with code ${code}`);
this._stopPingTimer();
try {
if (this.ws) {
this.ws.close(1000);
this.ws.close(code);
this.ws.removeAllListeners();
this.ws = null;
}

9702
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "jambonz-feature-server",
"version": "0.9.2",
"version": "0.9.3",
"main": "app.js",
"engines": {
"node": ">= 18.x"
@@ -31,9 +31,9 @@
"@jambonz/http-health-check": "^0.0.1",
"@jambonz/mw-registrar": "^0.2.7",
"@jambonz/realtimedb-helpers": "^0.8.8",
"@jambonz/speech-utils": "^0.2.1",
"@jambonz/speech-utils": "^0.2.3",
"@jambonz/stats-collector": "^0.1.10",
"@jambonz/verb-specifications": "^0.0.91",
"@jambonz/verb-specifications": "^0.0.95",
"@jambonz/time-series": "^0.2.13",
"@opentelemetry/api": "^1.8.0",
"@opentelemetry/exporter-jaeger": "^1.23.0",
@@ -47,8 +47,8 @@
"bent": "^7.3.12",
"debug": "^4.3.4",
"deepcopy": "^2.1.0",
"drachtio-fsmrf": "^3.0.46",
"drachtio-srf": "^4.5.35",
"drachtio-fsmrf": "^4.0.1",
"drachtio-srf": "^5.0.1",
"express": "^4.19.2",
"express-validator": "^7.0.1",
"moment": "^2.30.1",
@@ -56,7 +56,7 @@
"pino": "^8.20.0",
"polly-ssml-split": "^0.1.0",
"proxyquire": "^2.1.3",
"sdp-transform": "^2.14.2",
"sdp-transform": "^2.15.0",
"short-uuid": "^5.1.0",
"sinon": "^17.0.1",
"to-snake-case": "^1.0.0",