Compare commits

..

98 Commits

Author SHA1 Message Date
Dave Horton
f670626cf7 google gemini set default model to models/gemini-2.0-flash-live-001 2025-05-22 08:12:58 -04:00
Hoan Luu Huu
b92a9c700e fixed google s2s mcp initiate wrong functionDeclarations (#1203)
* fixed google s2s mcp initiate wrong functionDeclarations

* populate model from llm verb to google setup message
2025-05-18 20:15:26 -04:00
Anton Voylenko
761b7f26e7 fix: member mute on conference (#1048) 2025-05-18 14:11:56 -04:00
Dave Horton
76df58bfc2 fix logging in start task msg (#1202)
* fix logging in start task msg

* generate uuids using native crypto lib
2025-05-16 16:54:25 -04:00
Dave Horton
c1cb57c3f6 update version 2025-05-14 15:38:15 -04:00
Dave Horton
610c9af274 update db-helpers 2025-05-13 10:32:30 -04:00
Dave Horton
c0a35af591 update to 0.2.10 speech-utils (#1199) 2025-05-13 10:11:26 -04:00
Hoan Luu Huu
9585018147 support whisper instructions (#1198)
* support whisper instructions

* wip

* update speech utils and verb specification
2025-05-13 09:44:00 -04:00
Hoan Luu Huu
d7884a837a update deepgram voice agent (#1191)
* update deepgram voice agent

* fix lint

* wip

* wip
2025-05-13 07:43:48 -04:00
Dave Horton
ca0bf36815 dont apply snake casing to either env vars or tool call args (#1194) (#1197) 2025-05-12 12:56:58 -04:00
Sam Machin
6b68d32e2c end if last_word_end is -1 (#1196)
* end if last_word_end is -1

* lint
2025-05-12 12:11:32 -04:00
rammohan-y
8217a76697 Removed this.name from Task constructor, as LLM's names are populated post calling the base construction (#1192)
Also fixed a jslint error
2025-05-12 09:14:33 -04:00
rammohan-y
5c8237b382 Feat 1179 race issue with play verb (#1183)
* Fixed race issue between queueCommand false and queueCommand true when play task is involved

https://github.com/jambonz/jambonz-feature-server/issues/1179

* removed unnecessary emitter

* added destroy mechanism for stickyEventEmitter

* clearing stickyEventEmitter

* memory leak fix
2025-05-11 20:25:48 -04:00
Vasudev Anubrolu
4ff5c845de feat/864 update speech utils for playht on prem (#1187)
* feat/864 update speech utils for playht on prem

* feat/864 update speech utils version package lock
2025-05-09 12:34:14 -04:00
Anton Voylenko
78ebd08490 feat: prioritize JAMBONES_LOGLEVEL over db setting (#1188) 2025-05-09 09:41:23 -04:00
Hoan Luu Huu
8b18532f31 fixed tts streaming buffer cannot reset timeoutwhen lastUpdateTime is short (#1184)
* fixed tts streaming buffer cannot reset timeoutwhen lastUpdateTime is short

* wip
2025-05-07 10:26:11 -04:00
rammohan-y
e4bb00b382 Send stop-playback event (#1186)
* Send stop-playback event

https://github.com/jambonz/jambonz-feature-server/issues/1185

* check if not notified in playback-stop, ensure that the stop-playback is sent when kill-playback is sent
2025-05-07 08:59:59 -04:00
Hoan Luu Huu
14295dcebc support google s2s (#1169)
* support google s2s

* wip
2025-05-07 07:20:33 -04:00
Hoan Luu Huu
4d68c179ea sip_decline release callSession if ws requestor is used (#1182) 2025-05-06 10:01:36 -04:00
Hoan Luu Huu
6205959f53 fix microsoft stt max client buffer size error for transcribe verb (#1173) 2025-04-29 09:41:24 -04:00
Hoan Luu Huu
ed92cb2632 update speech utils 0.2.7 (#1177)
* update speech utils 0.2.7

* wip
2025-04-29 08:26:09 -04:00
Sam Machin
3098e04ed6 send env_vars in callHook (#1175)
* send env_vars in callHook

* lint

* add try/catch
2025-04-28 09:51:37 -04:00
Hoan Luu Huu
7e2fe72b6c fix say verb cannot failover if tts_response-code != 2xx (#1174) 2025-04-28 08:46:08 -04:00
Hoan Luu Huu
c2666b7a09 fixed deepgram gather cannot be timeout on empty transcription with continueAsr (#1171) 2025-04-28 08:36:31 -04:00
Hoan Luu Huu
9d54ca8116 Jambonz support Model context protocol (MCP) (#1150)
* Jambonz support Model context protocol (MCP)

* merged mcp tools with existing llmOptions.tools

* support list of mcp servers

* wip

* wip

* wip

* fix voice agent

* fix open-ai

* fix review comment

* fix deepgram voice agent

* update verb specification version
2025-04-24 06:50:53 -04:00
Sam Machin
472f4f4532 clientTools over webhooks (#1167)
* clientTools over webhooks

* lint

* simpler toolHook response
2025-04-23 09:15:16 -04:00
Hoan Luu Huu
63899d0091 update speech utils version 0.2.6 (#1172) 2025-04-23 08:22:47 -04:00
rammohan-y
31e6997746 Updated drachtio-srf version (#1170)
https://github.com/drachtio/drachtio-server/issues/424
2025-04-22 11:06:59 -04:00
Hoan Luu Huu
15b583ef2c only connect to drachtio server if connected to freeswitch (#1123)
* only connect to drachtio server if connected to freeswitch

* wip

* wip
2025-04-22 09:55:39 -04:00
Sam Machin
0bf2013934 add default model (#1147) 2025-04-22 09:49:24 -04:00
rammohan-y
182c310191 remove video from sdp in case of reInvite if the call is audio call (#1159)
https://github.com/jambonz/jambonz-feature-server/issues/1158
2025-04-18 09:33:16 -04:00
Sam Machin
4e74bab728 handle errors in createing call (#1164)
* move createCall into the try/catch and add a completionReason to results for errors

* add default completionReason

fixes #1165

* lint
2025-04-17 07:43:22 -04:00
rammohan-y
87195b6444 Stop tts streaming, when bargeIn is enabled for gather verb and input is detected (#1154)
https://github.com/jambonz/jambonz-feature-server/issues/1153
2025-04-14 09:18:50 -04:00
rammohan-y
eb5e6fa515 Updated db-helper to 0.9.11 (#1152)
https://github.com/jambonz/jambonz-feature-server/issues/1151
2025-04-14 08:22:28 -04:00
Dave Horton
305facb03b Fix/11labs no client config (#1149)
* update to verb specs

* add parameter to api call when there is not client config provided
2025-04-12 10:36:35 -04:00
Dave Horton
d310ba0ed1 reduce verbosity of logging (#1145) 2025-04-09 15:36:58 -04:00
Hoan Luu Huu
77f0fc85a3 ell tts support speech and pronunciation_dictionary_locators (#1137) 2025-04-09 12:32:06 -04:00
Sam Machin
c708b7d007 fix initial message format (#1144) 2025-04-09 10:43:07 -04:00
Hoan Luu Huu
343b382373 fixed ws-requestor missing hook for dial:confirm (#1143) 2025-04-09 07:29:08 -04:00
rammohan-y
0a541e089d Fix for https://github.com/jambonz/jambonz-feature-server/issues/1138 (#1139) 2025-04-04 09:02:18 -04:00
rammohan-y
d910981b1a Allow hangup verb on siprec call (#1136)
* Allow hangup verb on siprec call
https://github.com/jambonz/jambonz-feature-server/issues/1135

* added sip:decline to AllowedSipRecVerbs
2025-04-04 08:23:39 -04:00
Hoan Luu Huu
3f2744f032 fixed replaceEndpoint offer single codec that callee does not support (#1131) 2025-04-03 07:58:39 -04:00
Dave Horton
fcaf2e59e7 initial changes for openai stt (#1127)
* initial changes for openai stt

* wip

* wip

* wip

* wip

* wip

* make minBargeinWordCount work for openai

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wipp

* wip

* wip

* wip

* openai stt: support for prompt templates

* lint

* wip

* support openai semantic_vad

* wip

* transcribe supports openai stt

* sip

* wip

* wip

* refactor list of stt vendors that dont need to be restarted after a final transcript

* cleanup

* wip

* cleanup

* wip

* wip

* wip

* remove credentials from log

* comment
2025-03-28 13:14:58 -04:00
rammohan-y
ee846b283d Feat 1120 video call - remove video media from SDP if the call is audio call (#1124)
* sending jambonz:error when the incoming message is not parsable

https://github.com/jambonz/jambonz-feature-server/issues/1094

* writing an alert when incoming paylod is invalid

* added content to the jambonz:error payload

* removing video media from sdp if the call is an audio call. This is to avoid sending video media to destination if the incoming call is an audio call

* calling removeVideoSdp only when the environment variable JAMBONES_VIDEO_CALLS_ENABLED_IN_FS is set to true, this will ensure there are no regression issues for audio calls

* fixed jslint errors
2025-03-28 12:56:08 -04:00
Hoan Luu Huu
acdb8695a0 allow cartesia model_id is override from synthesizer option (#1130) 2025-03-27 13:37:57 -04:00
Hoan Luu Huu
f33f197e8d gather say support ttstream (#1128) 2025-03-27 07:19:19 -04:00
Sam Machin
9c437ab687 use deepgramOptions.model (#1126)
* use deepgramOptions.model

* lint

* Update transcription-utils.js
2025-03-24 12:25:29 -04:00
Dave Horton
1873694784 update to dractio-fsmrf@4.0.2 2025-03-17 08:50:10 -04:00
rammohan-y
d36e6b4c22 set the detected language as language_code when deepgram detects the language (#1116)
https://github.com/jambonz/jambonz-feature-server/issues/1115
2025-03-11 12:16:29 -04:00
rammohan-y
0470168757 updated realtimedb-helper to 0.8.13 (#1113) 2025-03-10 09:49:04 -04:00
Sam Machin
3120dbc3e0 Feature: add digitCount to amd-utils (#1111)
* add digitCount to amd-utils

* linting

* bump verb-specs
2025-03-06 12:01:51 -05:00
Hoan Luu Huu
8b8283e603 ws requestor should store initial sessionData when sending session:adulting (#1110) 2025-03-06 07:42:47 -05:00
Dave Horton
29de4b8878 fix crashing error with some media timeout scenarios (#1108) 2025-03-05 09:48:40 -05:00
Sam Machin
fa5fc1af9f allow transcribe_status update on Listen/Transcribe tasks (#1107) 2025-03-04 12:41:27 -05:00
Sam Machin
a5e778d7f3 call jambonzHangup when API ends call (#1104) 2025-03-03 07:23:03 -05:00
Dave Horton
bf4ae5b618 #1101 - allow listen url to have relative url and use base url of app… (#1102)
* #1101 - allow listen url to have relative url and use base url of application if ws

* remove logging
2025-02-28 14:19:45 -05:00
Sam Machin
ad2d99c417 if redirect to new server update requestor for baseURL (#1096) 2025-02-28 08:04:37 -05:00
Hoan Luu Huu
af4e17f447 fixed dial transcribe is not able to receive final transcribe when closing the call (#1073)
* fixed dial transcribe is not able to received final transcribe when close call.

* wip

* fix review comment

* support call session delay detroy ep when current task is transcribe

* wip

* wip

* fixed review comments

* fixed review comments
2025-02-27 07:25:01 -05:00
Hoan Luu Huu
cd2563ce17 support ultravox send user_input_message (#1100) 2025-02-26 19:50:09 -05:00
Sam Machin
af475cbea4 Update place-outdial.js (#1090)
* Update place-outdial.js

* update baseURL on redirect

* Revert "update baseURL on redirect"

This reverts commit 55778ba37edf029fa8687cd971b202af15478f95.
2025-02-25 15:09:21 -05:00
Anton Voylenko
69ba18acd1 Support sipindialog for conference (#1050)
* fix: add _onRequestWithinDialog catch block

* feat: support sipindialog for conference

* fix: remove any existing listener before adding new
2025-02-24 13:59:32 -05:00
rammohan-y
8bed44cce3 sending jambonz:error when the incoming message is not parsable (#1095)
* sending jambonz:error when the incoming message is not parsable

https://github.com/jambonz/jambonz-feature-server/issues/1094

* writing an alert when incoming paylod is invalid

* added content to the jambonz:error payload
2025-02-24 12:44:25 -05:00
Dave Horton
8ede41714b fix typo: change AWS_SNS_TOPIC_ARM to AWS_SNS_TOPIC_ARN (#1093) 2025-02-24 10:51:07 -05:00
Dave Horton
ee54e4341a update drachtio-srf 2025-02-20 10:17:53 -05:00
Hoan Luu Huu
4bf2f42f33 support ultravox sends createCall response to app (#1091)
* support ultravox sends createCall response to app

* update type issue

Co-authored-by: Matt Hertogs <matthertogs@gmail.com>

---------

Co-authored-by: Matt Hertogs <matthertogs@gmail.com>
2025-02-20 07:07:03 -05:00
Dave Horton
e09c763d3a #1088 ignore UtteranceEnd if we have unprocessed words (#1089)
* #1088 ignore UtteranceEnd if we have unprocessed words

* wip
2025-02-18 16:30:59 -05:00
Dave Horton
e8a7366526 handle exceptions if we invoke _lccCallHook with new url and it rejects for some reason (#1087) 2025-02-18 13:03:34 -05:00
Dave Horton
122d267816 better handling of flush commands (#1081)
* better handling of flush commands

* rework buffering of tokens

* gather: when returning low confidence also provide the transcript

* better error handling in tts:tokens

* special handling of asr timeout for speechmatics

* remove some logs that were excessively wordy
2025-02-18 09:31:11 -05:00
Hoan Luu Huu
33bca8e67c tts stream should save tts.response_time metric (#1086)
* tts stream should save tts.response_time metric

* wip
2025-02-18 08:45:21 -05:00
Hoan Luu Huu
9c05fd3deb fix dialMusic keep running in infinity loop (#1085) 2025-02-18 07:08:19 -05:00
Hoan Luu Huu
7fa0041f6b support deepgram options noDelay (#1083)
* support deepgram options noDelay

* update verb specification version
2025-02-15 16:39:30 -05:00
Hoan Luu Huu
59d9c62cbe support create call with target.proxy (#1075) 2025-02-11 09:24:04 -05:00
Dave Horton
55b408eecb add support for deepgram keyterms (#1071) 2025-02-07 13:12:25 -05:00
Hoan Luu Huu
f241faa871 update speech utils version (#1070) 2025-02-07 08:00:33 -05:00
rammohan-y
65d35c893c Feat/1067 set default language if language is undefined (#1068)
* sending recognition mode channel variable

* change verb-specifications version

* feat/1067 - setting default language to previously set language for the recognizer object if the vendor is default

* added undefined check for fallbackVendor and fallbackLanguage
2025-02-06 08:06:56 -05:00
Hoan Luu Huu
dbdc1cd43d support voxist stt (#1066)
* support voxist stt

* wip
2025-02-05 08:33:35 -05:00
Hoan Luu Huu
7105453d81 support caching tts audio with model/model_id (#1062)
* support caching tts audio with model/model_id

* update speech utils version
2025-02-03 08:47:44 -05:00
Hoan Luu Huu
8487a4be68 support elevenlabs private agent (#1063) 2025-02-02 22:10:51 -05:00
Hoan Luu Huu
2ddcd53d6b support elevenlabs s2s (#1052)
* support elevenlabs s2s

* wip

* wip

* wip
2025-02-02 10:29:48 -05:00
rammohan-y
a4d07ddce0 Feat/1057 recognition mode (#1060)
* sending recognition mode channel variable

* change verb-specifications version
2025-01-28 08:06:04 -05:00
rammohan-y
16e044cabf feat/1053: added empty check on this.currentTask (#1054) 2025-01-22 07:16:44 -05:00
Hoan Luu Huu
ba282d775d support rimelabs tts streaming (#1047) 2025-01-18 08:17:33 -05:00
Dave Horton
a194ba833e Feat/1041 (#1045)
* initial changes for stream synonym to listen

* listen on B endpoint if nested listen in dial has channel === 2
2025-01-17 08:48:39 -05:00
rammohan-y
77f3d9d7ec feat/1034: sending socket close code when there is no response from the websocket app (#1035) 2025-01-16 10:13:00 -05:00
Sam Machin
4dbc7df93d new error for HTTP responses without stack trace (#1044)
* new error for HTTP responses without stack trace

* lint
2025-01-16 08:05:17 -05:00
Dave Horton
f71f0ac69a Fix/speechmatics (#1042)
* add speechmatics options

* wip

* speechmatics does not do endpointing for us so we need to flip on continuousAsr

* speechmatics: continousAsr should be at least equal to max_delay, if set
2025-01-15 19:12:15 -05:00
Dave Horton
edb7e21ff9 update deps 2025-01-14 10:45:38 -05:00
Dave Horton
cafd9530a2 update drachtio-srf and fsmrf to main branch releases (#1038) 2025-01-14 10:01:33 -05:00
Hoan Luu Huu
ca8cace284 support custom tts streaming (#1023)
* support custom tts streaming

* wip

* wip

* wip

* wip

* wip

* wip

* fix review comments
2025-01-14 07:24:06 -05:00
Hoan Luu Huu
499c800213 Feat/ultravox s2s (#1032)
* support ultravox_s2s

* support ultravox_s2s

* support ultravox_s2s

* wip

* wip

* wip

* wip

* fix ultravox toolcall

* wip
2025-01-14 07:11:55 -05:00
Sam Machin
97952afb1d add deepgram filler words (#1036)
* add deepgram filler words

* Update package.json

* Update package-lock.json
2025-01-13 11:07:24 -05:00
Hoan Luu Huu
f4e68d0ea1 fix openai_s2s is using wrong model (#1031)
* fix openai_s2s is using wrong model

* wip

* wip
2025-01-11 08:38:14 -05:00
Dave Horton
6bad1a22f3 fix #1025 (#1026)
* fix #1025

* redirect verb should be able to redirect to a new websocket endpoint
2025-01-09 15:45:20 -05:00
Hoan Luu Huu
fcefa1ff31 fix inband dtmf does not work in dial verb (#1018) 2025-01-08 18:29:43 -05:00
Hoan Luu Huu
67cd53c930 rest:dial support timeLimit (#1024)
* rest:dial support timeLimit

* wip

* wip

* clear maxCallDuration timer
2025-01-07 12:21:09 -05:00
Dave Horton
a59784b8ab update base image to node:20-alpine (#1022) 2025-01-04 16:38:25 -05:00
Dave Horton
a2581eaeb4 tts throttling and send user_interruption event (#1019)
* tts throttling and send user_interruption event

* tts streaming: if we get a flush with tokens pending, send the flush after the tokens

* wip
2025-01-04 16:34:01 -05:00
Dave Horton
3706aa4d98 #1020 - fix for sticky bargein (#1021) 2025-01-03 10:41:35 -05:00
57 changed files with 4289 additions and 9542 deletions

View File

@@ -1,4 +1,4 @@
FROM --platform=linux/amd64 node:18.15-alpine3.16 as base
FROM --platform=linux/amd64 node:20-alpine as base
RUN apk --update --no-cache add --virtual .builds-deps build-base python3

View File

@@ -13,7 +13,7 @@ Configuration is provided via environment variables:
|AWS_ACCESS_KEY_ID| aws access key id, used for TTS/STT as well SNS notifications|no|
|AWS_REGION| aws region| no|
|AWS_SECRET_ACCESS_KEY| aws secret access key, used per above|no|
|AWS_SNS_TOPIC_ARM| aws sns topic arn that scale-in lifecycle notifications will be published to|no|
|AWS_SNS_TOPIC_ARN| aws sns topic arn that scale-in lifecycle notifications will be published to|no|
|DRACHTIO_HOST| ip address of drachtio server (typically '127.0.0.1')|yes|
|DRACHTIO_PORT| listening port of drachtio server for control connections (typically 9022)|yes|
|DRACHTIO_SECRET| shared secret|yes|
@@ -72,7 +72,7 @@ module.exports = {
STATS_PORT: 8125,
STATS_PROTOCOL: 'tcp',
STATS_TELEGRAF: 1,
AWS_SNS_TOPIC_ARM: 'arn:aws:sns:us-west-1:xxxxxxxxxxx:terraform-20201107200347128600000002',
AWS_SNS_TOPIC_ARN: 'arn:aws:sns:us-west-1:xxxxxxxxxxx:terraform-20201107200347128600000002',
JAMBONES_NETWORK_CIDR: '172.31.0.0/16',
JAMBONES_MYSQL_HOST: 'aurora-cluster-jambonz.cluster-yyyyyyyyyyy.us-west-1.rds.amazonaws.com',
JAMBONES_MYSQL_USER: 'admin',

95
app.js
View File

@@ -27,8 +27,61 @@ const pino = require('pino');
const logger = pino(opts, pino.destination({sync: false}));
const {LifeCycleEvents, FS_UUID_SET_NAME, SystemState, FEATURE_SERVER} = require('./lib/utils/constants');
const installSrfLocals = require('./lib/utils/install-srf-locals');
installSrfLocals(srf, logger);
const createHttpListener = require('./lib/utils/http-listener');
const healthCheck = require('@jambonz/http-health-check');
logger.on('level-change', (lvl, _val, prevLvl, _prevVal, instance) => {
if (logger !== instance) {
return;
}
logger.info('system log level %s was changed to %s', prevLvl, lvl);
});
// Install the srf locals
installSrfLocals(srf, logger, {
onFreeswitchConnect: (wraper) => {
// Only connect to drachtio if freeswitch is connected
logger.info(`connected to freeswitch at ${wraper.ms.address}, start drachtio server`);
if (DRACHTIO_HOST) {
srf.connect({host: DRACHTIO_HOST, port: DRACHTIO_PORT, secret: DRACHTIO_SECRET });
srf.on('connect', (err, hp) => {
const arr = /^(.*)\/(.*)$/.exec(hp.split(',').pop());
srf.locals.localSipAddress = `${arr[2]}`;
logger.info(`connected to drachtio listening on ${hp}, local sip address is ${srf.locals.localSipAddress}`);
});
}
else {
logger.info(`listening for drachtio requests on port ${DRACHTIO_PORT}`);
srf.listen({port: DRACHTIO_PORT, secret: DRACHTIO_SECRET});
}
// Start Http server
createHttpListener(logger, srf)
.then(({server, app}) => {
httpServer = server;
healthCheck({app, logger, path: '/', fn: getCount});
return {server, app};
})
.catch((err) => {
logger.error(err, 'Error creating http listener');
});
},
onFreeswitchDisconnect: (wraper) => {
// check if all freeswitch connections are lost, disconnect drachtio server
logger.info(`lost connection to freeswitch at ${wraper.ms.address}`);
const ms = srf.locals.getFreeswitch();
if (!ms) {
logger.info('no freeswitch connections, stopping drachtio server');
disconnect();
}
}
});
if (NODE_ENV === 'test') {
srf.on('error', (err) => {
logger.info(err, 'Error connecting to drachtio');
});
}
// Init services
const writeSystemAlerts = srf.locals?.writeSystemAlerts;
if (writeSystemAlerts) {
writeSystemAlerts({
@@ -54,24 +107,6 @@ const {
const InboundCallSession = require('./lib/session/inbound-call-session');
const SipRecCallSession = require('./lib/session/siprec-call-session');
if (DRACHTIO_HOST) {
srf.connect({host: DRACHTIO_HOST, port: DRACHTIO_PORT, secret: DRACHTIO_SECRET });
srf.on('connect', (err, hp) => {
const arr = /^(.*)\/(.*)$/.exec(hp.split(',').pop());
srf.locals.localSipAddress = `${arr[2]}`;
logger.info(`connected to drachtio listening on ${hp}, local sip address is ${srf.locals.localSipAddress}`);
});
}
else {
logger.info(`listening for drachtio requests on port ${DRACHTIO_PORT}`);
srf.listen({port: DRACHTIO_PORT, secret: DRACHTIO_SECRET});
}
if (NODE_ENV === 'test') {
srf.on('error', (err) => {
logger.info(err, 'Error connecting to drachtio');
});
}
srf.use('invite', [
initLocals,
createRootSpan,
@@ -97,27 +132,20 @@ sessionTracker.on('idle', () => {
}
});
const getCount = () => sessionTracker.count;
const healthCheck = require('@jambonz/http-health-check');
let httpServer;
const createHttpListener = require('./lib/utils/http-listener');
createHttpListener(logger, srf)
.then(({server, app}) => {
httpServer = server;
healthCheck({app, logger, path: '/', fn: getCount});
return {server, app};
})
.catch((err) => {
logger.error(err, 'Error creating http listener');
});
const monInterval = setInterval(async() => {
srf.locals.stats.gauge('fs.sip.calls.count', sessionTracker.count);
try {
const systemInformation = await srf.locals.dbHelpers.lookupSystemInformation();
if (systemInformation && systemInformation.log_level) {
logger.level = systemInformation.log_level;
const envLogLevel = logger.levels.values[JAMBONES_LOGLEVEL.toLowerCase()];
const dbLogLevel = logger.levels.values[systemInformation.log_level];
const appliedLogLevel = Math.min(envLogLevel, dbLogLevel);
if (logger.levelVal !== appliedLogLevel) {
logger.level = logger.levels.labels[Math.min(envLogLevel, dbLogLevel)];
}
}
} catch (err) {
if (process.env.NODE_ENV === 'test') {
@@ -133,6 +161,7 @@ const disconnect = () => {
httpServer?.on('close', resolve);
httpServer?.close();
srf.disconnect();
srf.removeAllListeners();
srf.locals.mediaservers?.forEach((ms) => ms.disconnect());
});
};

View File

@@ -93,7 +93,7 @@ const AWS_REGION = process.env.AWS_REGION;
const AWS_ACCESS_KEY_ID = process.env.AWS_ACCESS_KEY_ID;
const AWS_SECRET_ACCESS_KEY = process.env.AWS_SECRET_ACCESS_KEY;
const AWS_SNS_PORT = parseInt(process.env.AWS_SNS_PORT, 10) || 3001;
const AWS_SNS_TOPIC_ARM = process.env.AWS_SNS_TOPIC_ARM;
const AWS_SNS_TOPIC_ARN = process.env.AWS_SNS_TOPIC_ARN;
const AWS_SNS_PORT_MAX = parseInt(process.env.AWS_SNS_PORT_MAX, 10) || 3005;
const GCP_JSON_KEY = process.env.GCP_JSON_KEY;
@@ -139,6 +139,9 @@ const JAMBONES_USE_FREESWITCH_TIMER_FD = process.env.JAMBONES_USE_FREESWITCH_TIM
const JAMBONES_DIAL_SBC_FOR_REGISTERED_USER = process.env.JAMBONES_DIAL_SBC_FOR_REGISTERED_USER || false;
const JAMBONES_MEDIA_TIMEOUT_MS = process.env.JAMBONES_MEDIA_TIMEOUT_MS || 0;
const JAMBONES_MEDIA_HOLD_TIMEOUT_MS = process.env.JAMBONES_MEDIA_HOLD_TIMEOUT_MS || 0;
// jambonz
const JAMBONES_TRANSCRIBE_EP_DESTROY_DELAY_MS =
process.env.JAMBONES_TRANSCRIBE_EP_DESTROY_DELAY_MS;
module.exports = {
JAMBONES_MYSQL_HOST,
@@ -189,7 +192,7 @@ module.exports = {
AWS_ACCESS_KEY_ID,
AWS_SECRET_ACCESS_KEY,
AWS_SNS_PORT,
AWS_SNS_TOPIC_ARM,
AWS_SNS_TOPIC_ARN,
AWS_SNS_PORT_MAX,
ANCHOR_MEDIA_ALWAYS,
@@ -227,5 +230,6 @@ module.exports = {
JAMBONES_USE_FREESWITCH_TIMER_FD,
JAMBONES_DIAL_SBC_FOR_REGISTERED_USER,
JAMBONES_MEDIA_TIMEOUT_MS,
JAMBONES_MEDIA_HOLD_TIMEOUT_MS
JAMBONES_MEDIA_HOLD_TIMEOUT_MS,
JAMBONES_TRANSCRIBE_EP_DESTROY_DELAY_MS
};

View File

@@ -3,7 +3,7 @@ const makeTask = require('../../tasks/make_task');
const RestCallSession = require('../../session/rest-call-session');
const CallInfo = require('../../session/call-info');
const {CallDirection, CallStatus} = require('../../utils/constants');
const uuidv4 = require('uuid-random');
const crypto = require('crypto');
const SipError = require('drachtio-srf').SipError;
const { validationResult, body } = require('express-validator');
const { validate } = require('@jambonz/verb-specifications');
@@ -80,7 +80,7 @@ router.post('/',
const {lookupTeamsByAccount, lookupAccountBySid} = srf.locals.dbHelpers;
const account = await lookupAccountBySid(req.body.account_sid);
const accountInfo = await lookupAccountDetails(req.body.account_sid);
const callSid = uuidv4();
const callSid = crypto.randomUUID();
const application = req.body.application_sid ? await lookupAppBySid(req.body.application_sid) : null;
const record_all_calls = account.record_all_calls || (application && application.record_all_calls);
const recordOutputFormat = account.record_format || 'mp3';
@@ -100,6 +100,7 @@ router.post('/',
...(req.body?.application_sid && {'X-Application-Sid': req.body.application_sid}),
...(restDial.fromHost && {'X-Preferred-From-Host': restDial.fromHost}),
...(record_all_calls && {'X-Record-All-Calls': recordOutputFormat}),
...(target.proxy && {'X-SIP-Proxy': target.proxy}),
...target.headers
};

View File

@@ -1,5 +1,5 @@
const uuidv4 = require('uuid-random');
const {CallDirection, AllowedSipRecVerbs} = require('./utils/constants');
const crypto = require('crypto');
const {CallDirection, AllowedSipRecVerbs, WS_CLOSE_CODES} = require('./utils/constants');
const {parseSiprecPayload} = require('./utils/siprec-utils');
const CallInfo = require('./session/call-info');
const HttpRequestor = require('./utils/http-requestor');
@@ -15,6 +15,7 @@ const {
JAMBONES_DISABLE_DIRECT_P2P_CALL
} = require('./config');
const { createJambonzApp } = require('./dynamic-apps');
const { decrypt } = require('./utils/encrypt-decrypt');
module.exports = function(srf, logger) {
const {
@@ -45,7 +46,7 @@ module.exports = function(srf, logger) {
logger.info('getAccountDetails - rejecting call due to missing X-Account-Sid header');
return res.send(500);
}
const callSid = req.has('X-Retain-Call-Sid') ? req.get('X-Retain-Call-Sid') : uuidv4();
const callSid = req.has('X-Retain-Call-Sid') ? req.get('X-Retain-Call-Sid') : crypto.randomUUID();
const account_sid = req.get('X-Account-Sid');
req.locals = {callSid, account_sid, callId};
@@ -348,11 +349,10 @@ module.exports = function(srf, logger) {
}
req.locals.application = app2;
// eslint-disable-next-line no-unused-vars
const {call_hook, call_status_hook, ...appInfo} = app; // mask sensitive data like user/pass on webhook
// eslint-disable-next-line no-unused-vars
const {requestor, notifier, ...loggable} = appInfo;
const {requestor, notifier, env_vars, ...loggable} = appInfo;
logger.info({app: loggable}, `retrieved application for incoming call to ${req.locals.calledNumber}`);
req.locals.callInfo = new CallInfo({
req,
@@ -417,10 +417,22 @@ module.exports = function(srf, logger) {
...(app.fallback_speech_recognizer_language && {fallback_language: app.fallback_speech_recognizer_language})
}
};
let env_vars;
try {
if (app.env_vars) {
const d_env_vars = JSON.parse(decrypt(app.env_vars));
logger.info(`Setting env_vars: ${Object.keys(d_env_vars)}`); // Only log the keys not the values
env_vars = d_env_vars;
}
} catch (error) {
logger.info('Unable to set env_vars', error);
}
const params = Object.assign(['POST', 'WS'].includes(app.call_hook.method) ? { sip: req.msg } : {},
req.locals.callInfo,
{ service_provider_sid: req.locals.service_provider_sid },
{ defaults });
{ defaults },
{ env_vars }
);
logger.debug({ params }, 'sending initial webhook');
const obj = rootSpan.startChildSpan('performAppWebhook');
span = obj.span;
@@ -460,7 +472,7 @@ module.exports = function(srf, logger) {
}).catch((err) => this.logger.info({err}, 'Error generating alert for parsing application'));
logger.info({err}, `Error retrieving or parsing application: ${err?.message}`);
res.send(480, {headers: {'X-Reason': err?.message || 'unknown'}});
app.requestor.close();
app.requestor.close(WS_CLOSE_CODES.GoingAway);
}
}

View File

@@ -1,6 +1,6 @@
const {CallDirection, CallStatus} = require('../utils/constants');
const parseUri = require('drachtio-srf').parseUri;
const uuidv4 = require('uuid-random');
const crypto = require('crypto');
const {JAMBONES_API_BASE_URL} = require('../config');
/**
* @classdesc Represents the common information for all calls
@@ -57,7 +57,7 @@ class CallInfo {
// outbound call that is a child of an existing call
const {req, parentCallInfo, to, callSid} = opts;
srf = req.srf;
this.callSid = callSid || uuidv4();
this.callSid = callSid || crypto.randomUUID();
this.parentCallSid = parentCallInfo.callSid;
this.accountSid = parentCallInfo.accountSid;
this.applicationSid = parentCallInfo.applicationSid;

View File

@@ -23,6 +23,7 @@ const HttpRequestor = require('../utils/http-requestor');
const WsRequestor = require('../utils/ws-requestor');
const ActionHookDelayProcessor = require('../utils/action-hook-delay');
const TtsStreamingBuffer = require('../utils/tts-streaming-buffer');
const StickyEventEmitter = require('../utils/sticky-event-emitter');
const {parseUri} = require('drachtio-srf');
const {
JAMBONES_INJECT_CONTENT,
@@ -30,7 +31,8 @@ const {
AWS_REGION,
JAMBONES_USE_FREESWITCH_TIMER_FD,
JAMBONES_MEDIA_TIMEOUT_MS,
JAMBONES_MEDIA_HOLD_TIMEOUT_MS
JAMBONES_MEDIA_HOLD_TIMEOUT_MS,
JAMBONES_TRANSCRIBE_EP_DESTROY_DELAY_MS
} = require('../config');
const bent = require('bent');
const BackgroundTaskManager = require('../utils/background-task-manager');
@@ -38,6 +40,7 @@ const dbUtils = require('../utils/db-utils');
const BADPRECONDITIONS = 'preconditions not met';
const CALLER_CANCELLED_ERR_MSG = 'Response not sent due to unknown transaction';
const { NonFatalTaskError} = require('../utils/error');
const { sleepFor } = require('../utils/helpers');
const sqlRetrieveQueueEventHook = `SELECT * FROM webhooks
WHERE webhook_sid =
(
@@ -77,6 +80,10 @@ class CallSession extends Emitter {
this.callGone = false;
this.notifiedComplete = false;
this.rootSpan = rootSpan;
this.stickyEventEmitter = new StickyEventEmitter();
this.stickyEventEmitter.onSuccess = () => {
this.taskInProgress = null;
};
this.backgroundTaskManager = new BackgroundTaskManager({
cs: this,
logger,
@@ -133,6 +140,15 @@ class CallSession extends Emitter {
this.requestor.on('handover', handover.bind(this));
this.requestor.on('reconnect-error', this._onSessionReconnectError.bind(this));
}
/**
* Currently this is used for openai STT, which has a prompt paramater and
* we have an experimental feature where you can send the conversation
* history in the prompt
*/
this.conversationTurns = [];
this.on('userSaid', this._onUserSaid.bind(this));
this.on('botSaid', this._onBotSaid.bind(this));
}
/**
@@ -866,6 +882,8 @@ class CallSession extends Emitter {
}
}
clearTtsStream() {
this.requestor?.request('tts:streaming-event', '/streaming-event', {event_type: 'user_interruption'})
.catch((err) => this.logger.info({err}, 'CallSession:clearTtsStream - Error sending user_interruption'));
this.ttsStreamingBuffer?.clear();
}
@@ -949,7 +967,7 @@ class CallSession extends Emitter {
(type === 'tts' && credential.use_for_tts) ||
(type === 'stt' && credential.use_for_stt)
)) {
this.logger.info(
this.logger.debug(
`${type}: ${credential.vendor} ${credential.label ? `, label: ${credential.label}` : ''} `);
if ('google' === vendor) {
if (type === 'tts' && !credential.tts_tested_ok ||
@@ -1082,6 +1100,12 @@ class CallSession extends Emitter {
api_key: credential.api_key
};
}
else if ('voxist' === vendor) {
return {
speech_credential_sid: credential.speech_credential_sid,
api_key: credential.api_key
};
}
else if ('whisper' === vendor) {
return {
api_key: credential.api_key,
@@ -1096,18 +1120,24 @@ class CallSession extends Emitter {
};
}
else if ('speechmatics' === vendor) {
this.logger.info({credential}, 'CallSession:getSpeechCredentials - speechmatics credential');
return {
api_key: credential.api_key,
speechmatics_stt_uri: credential.speechmatics_stt_uri,
};
}
else if ('openai' === vendor) {
return {
api_key: credential.api_key,
model_id: credential.model_id,
};
}
else if (vendor.startsWith('custom:')) {
return {
speech_credential_sid: credential.speech_credential_sid,
auth_token: credential.auth_token,
custom_stt_url: credential.custom_stt_url,
custom_tts_url: credential.custom_tts_url
custom_tts_url: credential.custom_tts_url,
custom_tts_streaming_url: credential.custom_tts_streaming_url
};
}
}
@@ -1155,7 +1185,10 @@ class CallSession extends Emitter {
const taskNum = ++this.taskIdx;
const stackNum = this.stackIdx;
const task = this.tasks.shift();
this.logger.info(`CallSession:exec starting task #${stackNum}:${taskNum}: ${task.name}`);
this.isCurTaskPlay = TaskName.Play === task.name;
this.taskInProgress = task;
this.logger.info(
`CallSession:exec starting task #${stackNum}:${taskNum}: ${task.name} (task id: ${task.taskId})`);
this._notifyTaskStatus(task, {event: 'starting'});
// Register verbhook span wait for end
task.on('VerbHookSpanWaitForEnd', ({span}) => {
@@ -1229,7 +1262,7 @@ class CallSession extends Emitter {
this.logger.info('CallSession:exec all tasks complete');
this._stopping = true;
this._onTasksDone();
this._clearResources();
await this._clearResources();
if (!this.isConfirmCallSession && !this.isSmsCallSession) sessionTracker.remove(this.callSid);
@@ -1285,6 +1318,11 @@ class CallSession extends Emitter {
this.wakeupResolver({reason: 'session ended'});
this.wakeupResolver = null;
}
if (this._maxCallDurationTimer) {
clearTimeout(this._maxCallDurationTimer);
this._maxCallDurationTimer = null;
}
}
/**
@@ -1295,7 +1333,7 @@ class CallSession extends Emitter {
_lccCallStatus(opts) {
if (opts.call_status === CallStatus.Completed && this.dlg) {
this.logger.info('CallSession:_lccCallStatus hanging up call due to request from api');
this._callerHungup();
this._jambonzHangup();
}
else if (opts.call_status === CallStatus.NoAnswer) {
if (this.direction === CallDirection.Inbound) {
@@ -1429,7 +1467,7 @@ class CallSession extends Emitter {
}
if (tasks) {
const t = normalizeJambones(this.logger, tasks).map((tdata) => makeTask(this.logger, tdata));
this.logger.info({tasks: listTaskNames(t)}, 'CallSession:_lccCallHook new task list');
this.logger.debug({tasks: listTaskNames(t)}, 'CallSession:_lccCallHook new task list');
this.replaceApplication(t);
if (this.wakeupResolver) {
//this.logger.debug({resolution}, 'CallSession:_onCommand - got commands, waking up..');
@@ -1460,7 +1498,7 @@ class CallSession extends Emitter {
if (!listenTask) {
return this.logger.info('CallSession:_lccListenStatus - invalid listen_status: Dial does not have a listen');
}
listenTask.updateListen(opts.listen_status);
listenTask.updateListen(opts.listen_status || opts.stream_status);
}
/**
@@ -1473,7 +1511,7 @@ class CallSession extends Emitter {
this.backgroundTaskManager.getTask('transcribe').updateTranscribe(opts.transcribe_status);
}
const task = this.currentTask;
if (!task || ![TaskName.Dial, TaskName.Transcribe].includes(task.name)) {
if (!task || ![TaskName.Dial, TaskName.Transcribe, TaskName.Listen].includes(task.name)) {
return this.logger.info(`CallSession:_lccTranscribeStatus - invalid transcribe_status in task ${task.name}`);
}
const transcribeTask = task.name === TaskName.Transcribe ? task : task.transcribeTask;
@@ -1602,7 +1640,7 @@ Duration=${duration} `
// this whole thing requires us to be in a Dial verb
const task = this.currentTask;
if (!task || ![TaskName.Dial, TaskName.Listen].includes(task.name)) {
return this.logger.info('CallSession:_lccWhisper - invalid command since we are not in a dial or listen');
return this.logger.info('CallSession:_lccWhisper - invalid command since we are not in a dial or stream/listen');
}
// allow user to provide a url object, a url string, an array of tasks, or a single task
@@ -1696,10 +1734,10 @@ Duration=${duration} `
this.currentTask.ep :
this.ep;
const db = parseDecibels(opts);
this.logger.info(`_lccBoostAudioSignal: boosting audio signal by ${db} dB`);
this.logger.debug(`_lccBoostAudioSignal: boosting audio signal by ${db} dB`);
const args = [ep.uuid, 'setGain', db];
const response = await ep.api('uuid_dub', args);
this.logger.info({response}, '_lccBoostAudioSignal: response from freeswitch');
this.logger.debug({response}, '_lccBoostAudioSignal: response from freeswitch');
}
async _lccMediaPath(desiredPath) {
@@ -1752,7 +1790,6 @@ Duration=${duration} `
let res;
try {
res = await this.ttsStreamingBuffer?.bufferTokens(tokens);
this.logger.info({id, res}, 'CallSession:_lccTtsTokens - tts:tokens-result');
} catch (err) {
this.logger.info(err, 'CallSession:_lccTtsTokens');
}
@@ -1785,62 +1822,66 @@ Duration=${duration} `
async updateCall(opts, callSid) {
this.logger.debug(opts, 'CallSession:updateCall');
if (opts.call_status) {
return this._lccCallStatus(opts);
}
if (opts.call_hook || opts.child_call_hook) {
return await this._lccCallHook(opts);
}
if (opts.listen_status) {
await this._lccListenStatus(opts);
}
if (opts.transcribe_status) {
await this._lccTranscribeStatus(opts);
}
else if (opts.mute_status) {
await this._lccMuteStatus(opts.mute_status === 'mute', callSid);
}
else if (opts.conf_hold_status) {
await this._lccConfHoldStatus(opts);
}
else if (opts.conf_mute_status) {
await this._lccConfMuteStatus(opts);
}
else if (opts.sip_request) {
const res = await this._lccSipRequest(opts, callSid);
return {status: res.status, reason: res.reason};
} else if (opts.dtmf) {
await this._lccDtmf(opts, callSid);
}
else if (opts.record) {
await this.notifyRecordOptions(opts.record);
}
else if (opts.tag) {
return this._lccTag(opts);
}
else if (opts.conferenceParticipantAction) {
return this._lccConferenceParticipantAction(opts.conferenceParticipantAction);
}
else if (opts.dub) {
return this._lccDub(opts.dub, callSid);
}
else if (opts.boostAudioSignal) {
return this._lccBoostAudioSignal(opts, callSid);
}
else if (opts.media_path) {
return this._lccMediaPath(opts.media_path, callSid);
}
else if (opts.llm_tool_output) {
return this._lccToolOutput(opts.tool_call_id, opts.llm_tool_output, callSid);
}
else if (opts.llm_update) {
return this._lccLlmUpdate(opts.llm_update, callSid);
}
try {
if (opts.call_status) {
return this._lccCallStatus(opts);
}
if (opts.call_hook || opts.child_call_hook) {
return await this._lccCallHook(opts);
}
if (opts.listen_status || opts.stream_status) {
await this._lccListenStatus(opts);
}
if (opts.transcribe_status) {
await this._lccTranscribeStatus(opts);
}
else if (opts.mute_status) {
await this._lccMuteStatus(opts.mute_status === 'mute', callSid);
}
else if (opts.conf_hold_status) {
await this._lccConfHoldStatus(opts);
}
else if (opts.conf_mute_status) {
await this._lccConfMuteStatus(opts);
}
else if (opts.sip_request) {
const res = await this._lccSipRequest(opts, callSid);
return {status: res.status, reason: res.reason};
} else if (opts.dtmf) {
await this._lccDtmf(opts, callSid);
}
else if (opts.record) {
await this.notifyRecordOptions(opts.record);
}
else if (opts.tag) {
return this._lccTag(opts);
}
else if (opts.conferenceParticipantAction) {
return this._lccConferenceParticipantAction(opts.conferenceParticipantAction);
}
else if (opts.dub) {
return this._lccDub(opts.dub, callSid);
}
else if (opts.boostAudioSignal) {
return this._lccBoostAudioSignal(opts, callSid);
}
else if (opts.media_path) {
return this._lccMediaPath(opts.media_path, callSid);
}
else if (opts.llm_tool_output) {
return this._lccToolOutput(opts.tool_call_id, opts.llm_tool_output, callSid);
}
else if (opts.llm_update) {
return this._lccLlmUpdate(opts.llm_update, callSid);
}
// whisper may be the only thing we are asked to do, or it may that
// we are doing a whisper after having muted, paused recording etc..
if (opts.whisper) {
return this._lccWhisper(opts, callSid);
// whisper may be the only thing we are asked to do, or it may that
// we are doing a whisper after having muted, paused recording etc..
if (opts.whisper) {
return this._lccWhisper(opts, callSid);
}
} catch (err) {
this.logger.info({err, opts, callSid}, 'CallSession:updateCall - error updating call');
}
}
@@ -1883,9 +1924,11 @@ Duration=${duration} `
this.tasks = tasks;
this.taskIdx = 0;
this.stackIdx++;
this.logger.info({tasks: listTaskNames(tasks)},
this.logger.debug({tasks: listTaskNames(tasks)},
`CallSession:replaceApplication reset with ${tasks.length} new tasks, stack depth is ${this.stackIdx}`);
if (this.currentTask) {
this.logger.debug('CallSession:replaceApplication - killing current task ' +
this.currentTask?.name + ', taskId: ' + this.currentTask.taskId);
this.currentTask.kill(this, KillReason.Replaced);
this.currentTask = null;
}
@@ -1894,11 +1937,15 @@ Duration=${duration} `
this.wakeupResolver({reason: 'new tasks'});
this.wakeupResolver = null;
}
if ((!this.currentTask || this.currentTask === undefined) && this.isCurTaskPlay) {
this.logger.debug(`CallSession:replaceApplication - emitting uuid_break, taskId: ${this.taskInProgress?.taskId}`);
this.stickyEventEmitter.emit('uuid_break', this.taskInProgress);
}
}
kill(onBackgroundGatherBargein = false) {
if (this.isConfirmCallSession) this.logger.debug('CallSession:kill (ConfirmSession)');
else this.logger.info('CallSession:kill');
else this.logger.debug('CallSession:kill');
this._endVerbHookSpan();
if (this.currentTask) {
this.currentTask.kill(this);
@@ -1963,7 +2010,7 @@ Duration=${duration} `
task.synthesizer.label :
this.speechSynthesisLabel;
this.logger.info({vendor, language, voice, label},
this.logger.debug({vendor, language, voice, label},
'CallSession:_preCacheAudio - precaching audio for future prompt');
task._synthesizeWithSpecificVendor(this, this.ep, {vendor, language, voice, label, preCache: true})
.catch((err) => this.logger.error(err, 'CallSession:_preCacheAudio - error precaching audio'));
@@ -2033,7 +2080,7 @@ Duration=${duration} `
}
async _onCommand({msgid, command, call_sid, queueCommand, tool_call_id, data}) {
this.logger.info({msgid, command, queueCommand, data}, 'CallSession:_onCommand - received command');
this.logger.debug({msgid, command, queueCommand, data}, 'CallSession:_onCommand - received command');
let resolution;
switch (command) {
case 'redirect':
@@ -2042,18 +2089,18 @@ Duration=${duration} `
const t = normalizeJambones(this.logger, data)
.map((tdata) => makeTask(this.logger, tdata));
if (!queueCommand) {
this.logger.info({tasks: listTaskNames(t)}, 'CallSession:_onCommand new task list');
this.logger.debug({tasks: listTaskNames(t)}, 'CallSession:_onCommand new task list');
this.replaceApplication(t);
}
else if (JAMBONES_INJECT_CONTENT) {
if (JAMBONES_EAGERLY_PRE_CACHE_AUDIO) this._preCacheAudio(t);
this._injectTasks(t);
this.logger.info({tasks: listTaskNames(this.tasks)}, 'CallSession:_onCommand - updated task list');
this.logger.debug({tasks: listTaskNames(this.tasks)}, 'CallSession:_onCommand - updated task list');
}
else {
if (JAMBONES_EAGERLY_PRE_CACHE_AUDIO) this._preCacheAudio(t);
this.tasks.push(...t);
this.logger.info({tasks: listTaskNames(this.tasks)}, 'CallSession:_onCommand - updated task list');
this.logger.debug({tasks: listTaskNames(this.tasks)}, 'CallSession:_onCommand - updated task list');
}
resolution = {reason: 'received command, new tasks', queue: queueCommand, command};
resolution.command = listTaskNames(t);
@@ -2101,6 +2148,7 @@ Duration=${duration} `
break;
case 'listen:status':
case 'stream:status':
this._lccListenStatus(data);
break;
@@ -2319,10 +2367,32 @@ Duration=${duration} `
// Destroy previous ep if it's still running.
if (this.ep?.connected) this.ep.destroy();
this.ep = await this.ms.createEndpoint({remoteSdp: this.dlg.remote.sdp});
/* Codec negotiation issue explanation:
*
* Problem scenario:
* 1. Initial negotiation:
* - FreeSWITCH → SBC: offers multiple codecs (PCMU, PCMA, G722)
* - SBC → Callee: passes all codecs (PCMU, PCMA, G722)
* - Callee → SBC: responds with PCMA (its supported codec)
* - SBC → FreeSWITCH: responds with PCMU (after transcoding)
*
* 2. After endpoint replacement:
* - If we only offer PCMU in the new endpoint
* - FreeSWITCH → SBC: offers only PCMU
* - SBC → Callee: offers only PCMU
* - Call fails: Callee rejects since it only supports PCMA
*
* Solution:
* Always have FreeSWITCH offer multiple codecs to the SBC, don't pass remote sdp here to ensure
* the SBC can reoffer the same codecs that the callee originally accepted.
* This prevents call failures during media renegotiation.
*/
this.ep = await this.ms.createEndpoint();
this._configMsEndpoint();
await this.dlg.modify(this.ep.local.sdp);
const sdp = await this.dlg.modify(this.ep.local.sdp);
await this.ep.modify(sdp);
this.logger.debug('CallSession:replaceEndpoint completed');
return this.ep;
}
@@ -2330,9 +2400,16 @@ Duration=${duration} `
/**
* Hang up the call and free the media endpoint
*/
_clearResources() {
async _clearResources() {
this.stickyEventEmitter.destroy();
this.stickyEventEmitter = null;
this.taskInProgress = null;
for (const resource of [this.dlg, this.ep, this.ep2]) {
if (resource && resource.connected) resource.destroy();
try {
if (resource && resource.connected) await resource.destroy();
} catch (err) {
this.logger.info({err}, 'CallSession:_clearResources - error clearing resources');
}
}
this.dlg = null;
this.ep = null;
@@ -2459,12 +2536,14 @@ Duration=${duration} `
} else if (sip_method === 'MESSAGE') {
res.send(202);
} else {
this.logger.info(`CallSession:_onRequestWithinDialog unsported method: ${req.method}`);
this.logger.warn(`CallSession:_onRequestWithinDialog unsupported method: ${req.method}`);
res.send(501);
return;
}
const params = {sip_method, sip_body: req.body, sip_headers: req.headers};
this.currentTask.performHook(this, this.sipRequestWithinDialogHook, params);
this.currentTask.performHook(this, this.sipRequestWithinDialogHook, params).catch((err) => {
this.logger.error({err}, 'CallSession:_onRequestWithinDialog - error calling sipRequestWithinDialogHook');
});
}
async _onReinvite(req, res) {
@@ -2475,7 +2554,7 @@ Duration=${duration} `
res.send(200, {body: this.ep.local.sdp});
}
else {
if (this.currentTask.name === TaskName.Dial && this.currentTask.isOnHoldEnabled) {
if (this.currentTask && this.currentTask.name === TaskName.Dial && this.currentTask.isOnHoldEnabled) {
this.logger.info('onholdMusic reINVITE after media has been released');
await this.currentTask.handleReinviteAfterMediaReleased(req, res);
} else {
@@ -2544,7 +2623,7 @@ Duration=${duration} `
if (json && Array.isArray(json)) {
const tasks = normalizeJambones(this.logger, json).map((tdata) => makeTask(this.logger, tdata));
if (tasks && tasks.length > 0) {
this.logger.info('CallSession:handleRefer received REFER, get new tasks');
this.logger.debug('CallSession:handleRefer received REFER, get new tasks');
this.replaceApplication(tasks);
if (this.wakeupResolver) {
this.wakeupResolver({reason: 'CallSession: referHook new taks'});
@@ -2591,14 +2670,14 @@ Duration=${duration} `
if (typeof this.queueEventHookRequestor === 'undefined') {
const pp = this._pool.promise();
try {
this.logger.info({accountSid: this.accountSid}, 'performQueueWebhook: looking up account');
this.logger.debug({accountSid: this.accountSid}, 'performQueueWebhook: looking up account');
const [r] = await pp.query(sqlRetrieveQueueEventHook, [this.accountSid]);
if (0 === r.length) {
this.logger.info({accountSid: this.accountSid}, 'performQueueWebhook: no webhook provisioned');
this.queueEventHookRequestor = null;
}
else {
this.logger.info({accountSid: this.accountSid, webhook: r[0]}, 'performQueueWebhook: webhook found');
this.logger.debug({accountSid: this.accountSid, webhook: r[0]}, 'performQueueWebhook: webhook found');
this.queueEventHookRequestor = new HttpRequestor(this.logger, this.accountSid,
r[0], this.webhook_secret);
this.queueEventHook = r[0];
@@ -2612,7 +2691,7 @@ Duration=${duration} `
/* send webhook */
const params = {...obj, ...this.callInfo.toJSON()};
this.logger.info({accountSid: this.accountSid, params}, 'performQueueWebhook: sending webhook');
this.logger.debug({accountSid: this.accountSid, params}, 'performQueueWebhook: sending webhook');
this.queueEventHookRequestor.request('queue:status', this.queueEventHook, params)
.catch((err) => {
this.logger.info({err, accountSid: this.accountSid, obj}, 'Error sending queue notification event');
@@ -2747,7 +2826,7 @@ Duration=${duration} `
async handleReinviteAfterMediaReleased(req, res) {
assert(this.dlg && this.dlg.connected && !this.ep);
const sdp = await this.dlg.modify(req.body);
this.logger.info({sdp}, 'CallSession:handleReinviteAfterMediaReleased - reinvite to A leg returned sdp');
this.logger.debug({sdp}, 'CallSession:handleReinviteAfterMediaReleased - reinvite to A leg returned sdp');
res.send(200, {body: sdp});
}
@@ -2819,6 +2898,24 @@ Duration=${duration} `
if (Object.keys(opts).length > 0) {
this.ep.set(opts);
}
const origDestroy = this.ep.destroy.bind(this.ep);
this.ep.destroy = async() => {
try {
if (this.currentTask?.name === TaskName.Transcribe && JAMBONES_TRANSCRIBE_EP_DESTROY_DELAY_MS) {
// transcribe task is being used, wait for some time before destroy
// if final transcription is received but endpoint is already closed,
// freeswitch module will not be able to send the transcription
this.logger.debug('callSession:_configMsEndpoint -' +
' transcribe task, wait for some time before destroy');
await sleepFor(JAMBONES_TRANSCRIBE_EP_DESTROY_DELAY_MS);
}
await origDestroy();
} catch (err) {
this.logger.error(err, 'callSession:_configMsEndpoint - error destroying endpoint');
}
};
}
async _handleMediaTimeout(evt) {
@@ -2864,7 +2961,7 @@ Duration=${duration} `
_awaitCommandsOrHangup() {
assert(!this.wakeupResolver);
return new Promise((resolve, reject) => {
this.logger.info('_awaitCommandsOrHangup - waiting...');
this.logger.debug('_awaitCommandsOrHangup - waiting...');
this.wakeupResolver = resolve;
if (this._actionHookDelayProcessor) {
@@ -2884,7 +2981,7 @@ Duration=${duration} `
this.ep.play(this.fillerNoise.url);
this.ep.once('playback-start', (evt) => {
if (evt.file === this.fillerNoise.url && !this._isPlayingFillerNoise) {
this.logger.info('CallSession:_awaitCommandsOrHangup - filler noise started');
this.logger.debug('CallSession:_awaitCommandsOrHangup - filler noise started');
this.ep.api('uuid_break', this.ep.uuid)
.catch((err) => this.logger.info(err, 'Error killing filler noise'));
}
@@ -2895,7 +2992,7 @@ Duration=${duration} `
_clearTasks(backgroundGather, evt) {
if (this.requestor instanceof WsRequestor && !backgroundGather.cleared) {
this.logger.info({evt}, 'CallSession:_clearTasks on event from background gather');
this.logger.debug({evt}, 'CallSession:_clearTasks on event from background gather');
try {
backgroundGather.cleared = true;
this.kill(true);
@@ -2952,6 +3049,64 @@ Duration=${duration} `
}
this.logger.info({vendor}, 'CallSession:_onTtsStreamingConnectFailure - tts streaming connect failure');
}
async startMaxCallDurationTimer(timeLimit) {
if (!this._maxCallDurationTimer && timeLimit > 0) {
this.timeLimit = timeLimit;
this._maxCallDurationTimer = setTimeout(this._onMaxCallDuration.bind(this), timeLimit * 1000);
this.logger.debug(`CallSession:startMaxCallDurationTimer - started max call duration timer for ${timeLimit}s`);
}
}
/**
* _onMaxCallDuration - called when the call has reached the maximum duration
*/
_onMaxCallDuration() {
this.logger.info(`callSession:_onMaxCallDuration tearing down call as it has reached ${this.timeLimit}s`);
if (!this.dlg) {
this.logger.debug('CallSession:_onMaxCallDuration - no dialog, call already gone');
return;
}
this._jambonzHangup('Max Call Duration');
this._maxCallDurationTimer = null;
}
_onUserSaid(transcript) {
const count = this.conversationTurns.length;
if (count === 0 || this.conversationTurns[count - 1].type === 'assistant') {
this.conversationTurns.push({
type: 'user',
text: transcript
});
}
else {
this.conversationTurns[count - 1].text += ` ${transcript}`;
}
}
_onBotSaid(transcript) {
const count = this.conversationTurns.length;
if (count === 0 || this.conversationTurns[count - 1].type === 'user') {
this.conversationTurns.push({
type: 'assistant',
text: transcript
});
}
else {
this.conversationTurns[count - 1].text += ` ${transcript}`;
}
}
getFormattedConversation(numTurns) {
const turns = this.conversationTurns.slice(-numTurns);
if (turns.length === 0) return null;
return turns.map((t) => {
if (t.type === 'user') {
return `user: ${t.text}`;
}
return `assistant: ${t.text}`;
}).join('\n');
}
}
module.exports = CallSession;

View File

@@ -63,7 +63,7 @@ class RestCallSession extends CallSession {
this.callInfo.callTerminationBy = terminatedBy;
const duration = moment().diff(this.dlg.connectTime, 'seconds');
this.emit('callStatusChange', {callStatus: CallStatus.Completed, duration});
this.logger.debug(`RestCallSession: called party hung up by ${terminatedBy}`);
this.logger.info(`RestCallSession: called party hung up by ${terminatedBy}`);
this._callReleased();
}

View File

@@ -83,7 +83,11 @@ class Conference extends Task {
// reset answer time if we were transferred from another feature server
if (this.connectTime) dlg.connectTime = this.connectTime;
if (cs.sipRequestWithinDialogHook) {
/* remove any existing listener to escape from duplicating events */
this._removeSipIndialogRequestListener(this.dlg);
this._initSipIndialogRequestListener(cs, dlg);
}
this.ep.on('destroy', this._kicked.bind(this, cs, dlg));
try {
@@ -103,6 +107,7 @@ class Conference extends Task {
this.logger.debug(`Conference:exec - conference ${this.confName} is over`);
if (this.callMoved !== false) await this.performAction(this.results);
this._removeSipIndialogRequestListener(dlg);
} catch (err) {
this.logger.info(err, `TaskConference:exec - error in conference ${this.confName}`);
}
@@ -416,6 +421,20 @@ class Conference extends Task {
}
}
_initSipIndialogRequestListener(cs, dlg) {
dlg.on('info', this._onRequestWithinDialog.bind(this, cs));
dlg.on('message', this._onRequestWithinDialog.bind(this, cs));
}
_removeSipIndialogRequestListener(dlg) {
dlg && dlg.removeAllListeners('message');
dlg && dlg.removeAllListeners('info');
}
_onRequestWithinDialog(cs, req, res) {
cs._onRequestWithinDialog(req, res);
}
/**
* The conference we have been waiting for has started.
* It may be on this server or a different one, and we are
@@ -446,7 +465,7 @@ class Conference extends Task {
doConferenceMute(cs, opts) {
assert (cs.isInConference);
this.logger.info(`Conference:doConferenceMute ${mute ? 'muting' : 'unmuting'} member`);
const mute = opts.conf_mute_status === 'mute';
this.ep.api(`conference ${this.confName} ${mute ? 'mute' : 'unmute'} ${this.memberId}`)
.catch((err) => this.logger.info({err}, 'Error muting or unmuting participant'));
@@ -549,8 +568,8 @@ class Conference extends Task {
/**
* mute or unmute side of the call
*/
mute(callSid, doMute) {
this.doConferenceMute(this.callSession, {conf_mute_status: doMute});
async mute(callSid, doMute) {
this.doConferenceMute(this.callSession, {conf_mute_status: doMute ? 'mute' : 'unmute'});
}
/**

View File

@@ -187,18 +187,20 @@ class TaskConfig extends Task {
: cs.speechRecognizerVendor;
cs.speechRecognizerLabel = this.recognizer.label === 'default'
? cs.speechRecognizerLabel : this.recognizer.label;
cs.speechRecognizerLanguage = this.recognizer.language !== 'default'
cs.speechRecognizerLanguage = this.recognizer.language !== undefined && this.recognizer.language !== 'default'
? this.recognizer.language
: cs.speechRecognizerLanguage;
//fallback
cs.fallbackSpeechRecognizerVendor = this.recognizer.fallbackVendor !== 'default'
cs.fallbackSpeechRecognizerVendor = this.recognizer.fallbackVendor !== undefined &&
this.recognizer.fallbackVendor !== 'default'
? this.recognizer.fallbackVendor
: cs.fallbackSpeechRecognizerVendor;
cs.fallbackSpeechRecognizerLabel = this.recognizer.fallbackLabel === 'default' ?
cs.fallbackSpeechRecognizerLabel :
this.recognizer.fallbackLabel;
cs.fallbackSpeechRecognizerLanguage = this.recognizer.fallbackLanguage !== 'default'
cs.fallbackSpeechRecognizerLanguage = this.recognizer.fallbackLanguage !== undefined &&
this.recognizer.fallbackLanguage !== 'default'
? this.recognizer.fallbackLanguage
: cs.fallbackSpeechRecognizerLanguage;

View File

@@ -3,8 +3,7 @@ const {TaskName, TaskPreconditions, DequeueResults, BONG_TONE} = require('../uti
const Emitter = require('events');
const bent = require('bent');
const assert = require('assert');
const sleepFor = (ms) => new Promise((resolve) => setTimeout(() => resolve(), ms));
const { sleepFor } = require('../utils/helpers');
const getUrl = (cs) => `${cs.srf.locals.serviceUrl}/v1/dequeue/${cs.callSid}`;

View File

@@ -24,6 +24,7 @@ const {ANCHOR_MEDIA_ALWAYS,
const { isOnhold, isOpusFirst } = require('../utils/sdp-utils');
const { normalizeJambones } = require('@jambonz/verb-specifications');
const { selectHostPort } = require('../utils/network');
const { sleepFor } = require('../utils/helpers');
function parseDtmfOptions(logger, dtmfCapture) {
let parentDtmfCollector, childDtmfCollector;
@@ -86,8 +87,6 @@ function filterAndLimit(logger, tasks) {
return unique;
}
const sleepFor = (ms) => new Promise((resolve) => setTimeout(() => resolve(), ms));
class TaskDial extends Task {
constructor(logger, opts) {
super(logger, opts);
@@ -121,8 +120,9 @@ class TaskDial extends Task {
}
}
if (this.data.listen) {
this.listenTask = makeTask(logger, {'listen': this.data.listen}, this);
const listenData = this.data.listen || this.data.stream;
if (listenData) {
this.listenTask = makeTask(logger, {'listen': listenData }, this);
}
if (this.data.transcribe) {
this.transcribeTask = makeTask(logger, {'transcribe' : this.data.transcribe}, this);
@@ -229,10 +229,10 @@ class TaskDial extends Task {
try {
await this.epOther.play(this.dialMusic);
} catch (err) {
this.logger.error(err, `TaskDial:exec error playing ${this.dialMusic}`);
this.logger.error(err, `TaskDial:exec error playing dialMusic ${this.dialMusic}`);
await sleepFor(1000);
}
} while (!this.killed || !this.bridged);
} while (!this.killed && !this.bridged && this._mediaPath === MediaPath.FullMedia);
})();
}
}
@@ -498,7 +498,7 @@ class TaskDial extends Task {
dlg && dlg.removeAllListeners('info');
}
async _onRequestWithinDialog(cs, req, res) {
_onRequestWithinDialog(cs, req, res) {
cs._onRequestWithinDialog(req, res);
}
@@ -521,7 +521,7 @@ class TaskDial extends Task {
const {req, callInfo, direction, srf} = cs;
const {getSBC} = srf.locals;
const {lookupTeamsByAccount, lookupAccountBySid} = srf.locals.dbHelpers;
const {lookupCarrier, lookupCarrierByPhoneNumber} = dbUtils(this.logger, cs.srf);
const {lookupCarrier, lookupCarrierByPhoneNumber, lookupVoipCarrierBySid} = dbUtils(this.logger, cs.srf);
let sbcAddress = this.proxy || getSBC();
const teamsInfo = {};
let fqdn;
@@ -540,6 +540,8 @@ class TaskDial extends Task {
...this.headers
};
// default to inband dtmf if not specified
this.inbandDtmfEnabled = cs.inbandDtmfEnabled;
// get calling user from From header
const parsedFrom = req.getParsedHeader('from');
const fromUri = parseUri(parsedFrom.uri);
@@ -548,7 +550,8 @@ class TaskDial extends Task {
proxy: `sip:${sbcAddress}`,
callingNumber: this.callerId || fromUri.user,
...(this.callerName && {callingName: this.callerName}),
opusFirst: isOpusFirst(this.cs.ep.remote.sdp)
opusFirst: isOpusFirst(this.cs.ep.remote.sdp),
isVideoCall: this.cs.ep.remote.sdp.includes('m=video')
};
const t = this.target.find((t) => t.type === 'teams');
@@ -617,10 +620,17 @@ class TaskDial extends Task {
const str = this.callerId || req.callingNumber || '';
const callingNumber = str.startsWith('+') ? str.substring(1) : str;
const voip_carrier_sid = await lookupCarrierByPhoneNumber(cs.accountSid, callingNumber);
const req_voip_carrier_sid = req.has('X-Voip-Carrier-Sid') ? req.get('X-Voip-Carrier-Sid') : null;
if (voip_carrier_sid) {
this.logger.info(
`Dial:_attemptCalls: selected voip_carrier_sid ${voip_carrier_sid} for callingNumber: ${callingNumber}`);
opts.headers['X-Requested-Carrier-Sid'] = voip_carrier_sid;
// Checking if outbound carrier is different from inbound carrier and has dtmf type tones
if (voip_carrier_sid !== req_voip_carrier_sid) {
const [voipCarrier] = await lookupVoipCarrierBySid(voip_carrier_sid);
this.inbandDtmfEnabled = voipCarrier?.dtmf_type === 'tones';
}
}
}
@@ -861,10 +871,14 @@ class TaskDial extends Task {
if (this.parentDtmfCollector) this._installDtmfDetection(cs, cs.dlg);
if (this.childDtmfCollector) this._installDtmfDetection(cs, this.dlg);
if (cs.sipRequestWithinDialogHook) this._initSipIndialogRequestListener(cs, this.dlg);
if (cs.sipRequestWithinDialogHook) {
/* remove any existing listener to escape from duplicating events */
this._removeSipIndialogRequestListener(this.dlg);
this._initSipIndialogRequestListener(cs, this.dlg);
}
if (this.transcribeTask) this.transcribeTask.exec(cs, {ep: this.epOther, ep2:this.ep});
if (this.listenTask) this.listenTask.exec(cs, {ep: this.epOther});
if (this.listenTask) this.listenTask.exec(cs, {ep: this.listenTask.channel === 2 ? this.ep : this.epOther});
if (this.startAmd) {
try {
this.startAmd(cs, this.ep, this, this.data.amd);
@@ -894,7 +908,7 @@ class TaskDial extends Task {
}
_handleMediaTimeout(evt) {
if (evt.reason === 'MEDIA_TIMEOUT' && this.sd && this.bridged) {
if (evt?.reason === 'MEDIA_TIMEOUT' && this.sd && this.bridged) {
this.kill(this.cs, KillReason.MediaTimeout);
}
}

View File

@@ -11,6 +11,8 @@ const {
NvidiaTranscriptionEvents,
JambonzTranscriptionEvents,
AssemblyAiTranscriptionEvents,
VoxistTranscriptionEvents,
OpenAITranscriptionEvents,
VadDetection,
VerbioTranscriptionEvents,
SpeechmaticsTranscriptionEvents
@@ -24,6 +26,7 @@ const makeTask = require('./make_task');
const assert = require('assert');
const SttTask = require('./stt-task');
const { SpeechCredentialError } = require('../utils/error');
const SPEECHMATICS_DEFAULT_ASR_TIMEOUT = 1200;
class TaskGather extends SttTask {
constructor(logger, opts, parentTask) {
@@ -81,6 +84,7 @@ class TaskGather extends SttTask {
this._bufferedTranscripts = [];
this.partialTranscriptsCount = 0;
this.bugname_prefix = 'gather_';
}
get name() { return TaskName.Gather; }
@@ -108,6 +112,12 @@ class TaskGather extends SttTask {
return this.fillerNoise.startDelaySecs;
}
get isStreamingTts() { return this.sayTask && this.sayTask.isStreamingTts; }
getTtsVendorData() {
if (this.sayTask) return this.sayTask.getTtsVendorData(this.cs);
}
get summary() {
let s = `${this.name}{`;
if (this.input.length === 2) s += 'inputs=[speech,digits],';
@@ -162,6 +172,16 @@ class TaskGather extends SttTask {
this.logger.debug({hints: this.data.recognizer.hints, hintsBoost: this.data.recognizer.hintsBoost},
'Gather:exec - applying global sttHints');
}
// specials case for speechmatics: they dont do endpointing so we need to enable continuous ASR
if (this.vendor === 'speechmatics' && !this.isContinuousAsr) {
const maxDelay = this.recognizer?.speechmaticsOptions?.transcription_config?.max_delay;
if (maxDelay) this.asrTimeout = Math.min(SPEECHMATICS_DEFAULT_ASR_TIMEOUT, maxDelay * 1000);
else this.asrTimeout = SPEECHMATICS_DEFAULT_ASR_TIMEOUT;
this.isContinuousAsr = true;
this.logger.debug(`Gather:exec - auto-enabling continuous ASR for speechmatics w/ timeout ${this.asrTimeout}`);
}
if (!this.isContinuousAsr && cs.isContinuousAsr) {
this.isContinuousAsr = true;
this.asrTimeout = cs.asrTimeout * 1000;
@@ -221,6 +241,7 @@ class TaskGather extends SttTask {
const {span, ctx} = this.startChildSpan(`nested:${this.sayTask.summary}`);
const process = () => {
this.logger.debug('Gather: nested say task completed');
this.playComplete = true;
if (!this.listenDuringPrompt) {
startDtmfListener();
}
@@ -251,6 +272,7 @@ class TaskGather extends SttTask {
const {span, ctx} = this.startChildSpan(`nested:${this.playTask.summary}`);
const process = () => {
this.logger.debug('Gather: nested play task completed');
this.playComplete = true;
if (!this.listenDuringPrompt) {
startDtmfListener();
}
@@ -513,6 +535,17 @@ class TaskGather extends SttTask {
this._onVendorConnectFailure.bind(this, cs, ep));
break;
case 'voxist':
this.bugname = `${this.bugname_prefix}voxist_transcribe`;
this.addCustomEventListener(ep, VoxistTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep));
this.addCustomEventListener(
ep, VoxistTranscriptionEvents.Connect, this._onVendorConnect.bind(this, cs, ep));
this.addCustomEventListener(ep, VoxistTranscriptionEvents.Error, this._onVendorError.bind(this, cs, ep));
this.addCustomEventListener(ep, VoxistTranscriptionEvents.ConnectFailure,
this._onVendorConnectFailure.bind(this, cs, ep));
break;
case 'speechmatics':
this.bugname = `${this.bugname_prefix}speechmatics_transcribe`;
this.addCustomEventListener(
@@ -530,6 +563,31 @@ class TaskGather extends SttTask {
break;
case 'openai':
this.bugname = `${this.bugname_prefix}openai_transcribe`;
this.addCustomEventListener(
ep, OpenAITranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
this.addCustomEventListener(
ep, OpenAITranscriptionEvents.SpeechStarted, this._onOpenAISpeechStarted.bind(this, cs, ep));
this.addCustomEventListener(
ep, OpenAITranscriptionEvents.SpeechStopped, this._onOpenAISpeechStopped.bind(this, cs, ep));
this.addCustomEventListener(ep, OpenAITranscriptionEvents.Connect,
this._onVendorConnect.bind(this, cs, ep));
this.addCustomEventListener(ep, OpenAITranscriptionEvents.ConnectFailure,
this._onVendorConnectFailure.bind(this, cs, ep));
this.addCustomEventListener(ep, OpenAITranscriptionEvents.Error,
this._onOpenAIErrror.bind(this, cs, ep));
/* openai delta transcripts are useful only for minBargeinWordCount eval */
if (this.minBargeinWordCount > 1) {
this.openaiPartials = [];
opts.OPENAI_WANT_PARTIALS = 1;
this.addCustomEventListener(
ep, OpenAITranscriptionEvents.PartialTranscript, this._onOpenAIPartialTranscript.bind(this, cs, ep));
}
this.modelSupportsConversationTracking = opts.OPENAI_MODEL !== 'whisper-1';
break;
default:
if (this.vendor.startsWith('custom:')) {
this.bugname = `${this.bugname_prefix}${this.vendor}_transcribe`;
@@ -561,6 +619,25 @@ class TaskGather extends SttTask {
bugname: this.bugname
}, 'Gather:_startTranscribing');
/* special feature for openai: we can provide a prompt that includes recent conversation history */
let prompt;
if (this.vendor === 'openai') {
if (this.modelSupportsConversationTracking) {
prompt = this.formatOpenAIPrompt(this.cs, {
prompt: this.data.recognizer?.openaiOptions?.prompt,
hintsTemplate: this.data.recognizer?.openaiOptions?.promptTemplates?.hintsTemplate,
// eslint-disable-next-line max-len
conversationHistoryTemplate: this.data.recognizer?.openaiOptions?.promptTemplates?.conversationHistoryTemplate,
hints: this.data.recognizer?.hints,
});
this.logger.debug({prompt}, 'Gather:_startTranscribing - created an openai prompt');
}
else if (this.data.recognizer?.hints?.length > 0) {
prompt = this.data.recognizer?.hints.join(', ');
}
}
/**
* Note: we don't need to ask deepgram for interim results, because they
* already send us words as they are finalized (is_final=true) even before
@@ -572,6 +649,7 @@ class TaskGather extends SttTask {
interim: this.interim,
bugname: this.bugname,
hostport: this.hostport,
prompt
}).catch((err) => {
const {writeAlerts, AlertType} = this.cs.srf.locals;
this.logger.error(err, 'TaskGather:_startTranscribing error');
@@ -607,12 +685,21 @@ class TaskGather extends SttTask {
}
_startAsrTimer() {
if (this.vendor === 'deepgram') return; // no need
// Deepgram has a case that UtteranceEnd is not sent to cover the last word end time.
// So we need to wait for the asrTimeout to be sure that the last word is sent.
// if (this.vendor === 'deepgram') return; // no need
assert(this.isContinuousAsr);
this._clearAsrTimer();
this._asrTimer = setTimeout(() => {
this.logger.debug('_startAsrTimer - asr timer went off');
const evt = this.consolidateTranscripts(this._bufferedTranscripts, 1, this.language, this.vendor);
/* special case for speechmatics - keep listening if we dont have any transcripts */
if (this.vendor === 'speechmatics' && this._bufferedTranscripts.length === 0) {
this.logger.debug('Gather:_startAsrTimer - speechmatics, no transcripts yet, keep listening');
this._startAsrTimer();
return;
}
this._resolve(this._bufferedTranscripts.length > 0 ? 'speech' : 'timeout', evt);
}, this.asrTimeout);
this.logger.debug(`_startAsrTimer: set for ${this.asrTimeout}ms`);
@@ -745,7 +832,11 @@ class TaskGather extends SttTask {
const bugname = fsEvent.getHeader('media-bugname');
const finished = fsEvent.getHeader('transcription-session-finished');
this.logger.debug({evt, bugname, finished, vendor: this.vendor}, 'Gather:_onTranscription raw transcript');
if (bugname && this.bugname !== bugname) return;
if (bugname && this.bugname !== bugname) {
this.logger.debug(
`Gather:_onTranscription - ignoring transcript from ${bugname} because our bug is ${this.bugname}`);
return;
}
if (finished === 'true') return;
if (this.vendor === 'ibm' && evt?.state === 'listening') return;
@@ -755,10 +846,17 @@ class TaskGather extends SttTask {
this.logger.debug('Gather:_onTranscription - got UtteranceEnd event from deepgram but no buffered transcripts');
}
else {
this.logger.debug('Gather:_onTranscription - got UtteranceEnd event from deepgram, return buffered transcript');
evt = this.consolidateTranscripts(this._bufferedTranscripts, 1, this.language, this.vendor);
this._bufferedTranscripts = [];
this._resolve('speech', evt);
const utteranceTime = evt.last_word_end;
// eslint-disable-next-line max-len
if (utteranceTime && this._dgTimeOfLastUnprocessedWord && utteranceTime < this._dgTimeOfLastUnprocessedWord && utteranceTime != -1) {
this.logger.debug('Gather:_onTranscription - got UtteranceEnd with unprocessed words, continue listening');
}
else {
this.logger.debug('Gather:_onTranscription - got UtteranceEnd from deepgram, return buffered transcript');
evt = this.consolidateTranscripts(this._bufferedTranscripts, 1, this.language, this.vendor);
this._bufferedTranscripts = [];
this._resolve('speech', evt);
}
}
return;
}
@@ -769,7 +867,7 @@ class TaskGather extends SttTask {
evt = this.normalizeTranscription(evt, this.vendor, 1, this.language,
this.shortUtterance, this.data.recognizer.punctuation);
this.logger.debug({evt, bugname, finished, vendor: this.vendor}, 'Gather:_onTranscription normalized transcript');
//this.logger.debug({evt, bugname, finished, vendor: this.vendor}, 'Gather:_onTranscription normalized transcript');
if (evt.alternatives.length === 0) {
this.logger.info({evt}, 'TaskGather:_onTranscription - got empty transcript, continue listening');
@@ -777,8 +875,6 @@ class TaskGather extends SttTask {
}
const confidence = evt.alternatives[0].confidence;
const minConfidence = this.data.recognizer?.minConfidence;
this.logger.debug({evt},
`TaskGather:_onTranscription - confidence (${confidence}), minConfidence (${minConfidence})`);
if (confidence && minConfidence && confidence < minConfidence) {
this.logger.info({evt},
'TaskGather:_onTranscription - Transcript confidence ' +
@@ -832,7 +928,7 @@ class TaskGather extends SttTask {
const t = evt.alternatives[0].transcript;
if (t) {
/* remove trailing punctuation */
if (/[,;:\.!\?]$/.test(t)) {
if (this.vendor !== 'speechmatics' && /[,;:\.!\?]$/.test(t)) {
this.logger.debug('TaskGather:_onTranscription - removing trailing punctuation');
evt.alternatives[0].transcript = t.slice(0, -1);
}
@@ -894,8 +990,21 @@ class TaskGather extends SttTask {
if (originalEvent.is_final && evt.alternatives[0].transcript !== '') {
this.logger.debug({evt}, 'Gather:_onTranscription - buffering a completed (partial) deepgram transcript');
this._bufferedTranscripts.push(evt);
this._dgTimeOfLastUnprocessedWord = null;
}
if (evt.alternatives[0].transcript === '') {
emptyTranscript = true;
}
else if (!originalEvent.is_final) {
/* Deepgram: we have unprocessed words-save last word end time so we can later compare to UtteranceEnd */
const words = originalEvent.channel.alternatives[0].words;
if (words?.length > 0) {
this._dgTimeOfLastUnprocessedWord = words.slice(-1)[0].end;
this.logger.debug(
`TaskGather:_onTranscription - saving word end time: ${this._dgTimeOfLastUnprocessedWord}`);
}
}
if (evt.alternatives[0].transcript === '') emptyTranscript = true;
}
if (!emptyTranscript) {
@@ -1031,6 +1140,33 @@ class TaskGather extends SttTask {
this._onVendorError(cs, _ep, {error: JSON.stringify(e)});
}
async _onOpenAIErrror(cs, _ep, evt) {
// eslint-disable-next-line no-unused-vars
const {message, ...e} = evt;
this._onVendorError(cs, _ep, {error: JSON.stringify(e)});
}
async _onOpenAISpeechStarted(cs, _ep, evt) {
this.logger.debug({evt}, 'TaskGather:_onOpenAISpeechStarted');
}
async _onOpenAISpeechStopped(cs, _ep, evt) {
this.logger.debug({evt}, 'TaskGather:_onOpenAISpeechStopped');
}
async _onOpenAIPartialTranscript(cs, _ep, evt) {
if (!this.playComplete) {
const words = evt.delta.split(' ').filter((w) => /[A-Za-z0-0]/.test(w));
this.openaiPartials.push(...words);
this.logger.debug({words, partials: this.openaiPartials, evt}, 'TaskGather:_onOpenAIPartialTranscript - words');
if (this.openaiPartials.length >= this.minBargeinWordCount) {
this.logger.debug({partials: this.openaiPartials}, 'killing audio due to speech (openai)');
this._killAudio(cs);
this.notifyStatus({event: 'speech-bargein-detected', words: this.openaiPartials});
}
}
}
async _onVendorError(cs, _ep, evt) {
super._onVendorError(cs, _ep, evt);
if (!(await this._startFallback(cs, _ep, evt))) {
@@ -1140,6 +1276,7 @@ class TaskGather extends SttTask {
}
}
else if (reason.startsWith('speech')) {
this.cs.emit('userSaid', evt.alternatives[0].transcript);
if (this.parentTask) this.parentTask.emit('transcription', evt);
else {
this.emit('transcription', evt);
@@ -1165,7 +1302,7 @@ class TaskGather extends SttTask {
if (this.parentTask) this.parentTask.emit('stt-low-confidence', evt);
else {
this.emit('stt-low-confidence', evt);
returnedVerbs = await this.performAction({reason: 'stt-low-confidence'});
returnedVerbs = await this.performAction({speech:evt, reason: 'stt-low-confidence'});
}
}
} catch (err) { /*already logged error*/ }

View File

@@ -17,7 +17,7 @@ class TaskListen extends Task {
[
'action', 'auth', 'method', 'url', 'finishOnKey', 'maxLength', 'metadata', 'mixType', 'passDtmf', 'playBeep',
'sampleRate', 'timeout', 'transcribe', 'wsAuth', 'disableBidirectionalAudio'
'sampleRate', 'timeout', 'transcribe', 'wsAuth', 'disableBidirectionalAudio', 'channel'
].forEach((k) => this[k] = this.data[k]);
this.mixType = this.mixType || 'mono';
@@ -221,7 +221,7 @@ class TaskListen extends Task {
}
}
_onConnect(ep) {
this.logger.debug('TaskListen:_onConnect');
this.logger.info('TaskListen:_onConnect');
}
_onConnectFailure(ep, evt) {
this.logger.info(evt, 'TaskListen:_onConnectFailure');

View File

@@ -2,6 +2,10 @@ const Task = require('../task');
const {TaskPreconditions} = require('../../utils/constants');
const TaskLlmOpenAI_S2S = require('./llms/openai_s2s');
const TaskLlmVoiceAgent_S2S = require('./llms/voice_agent_s2s');
const TaskLlmUltravox_S2S = require('./llms/ultravox_s2s');
const TaskLlmElevenlabs_S2S = require('./llms/elevenlabs_s2s');
const TaskLlmGoogle_S2S = require('./llms/google_s2s');
const LlmMcpService = require('../../utils/llm-mcp');
class TaskLlm extends Task {
constructor(logger, opts) {
@@ -16,6 +20,8 @@ class TaskLlm extends Task {
// delegate to the specific llm model
this.llm = this.createSpecificLlm();
// MCP
this.mcpServers = this.data.mcpServers || [];
}
get name() { return this.llm.name ; }
@@ -26,14 +32,32 @@ class TaskLlm extends Task {
get ep() { return this.cs.ep; }
get mcpService() {
return this.llmMcpService;
}
get isMcpEnabled() {
return this.mcpServers.length > 0;
}
async exec(cs, {ep}) {
await super.exec(cs, {ep});
// create the MCP service if we have MCP servers
if (this.isMcpEnabled) {
this.llmMcpService = new LlmMcpService(this.logger, this.mcpServers);
await this.llmMcpService.init();
}
await this.llm.exec(cs, {ep});
}
async kill(cs) {
super.kill(cs);
await this.llm.kill(cs);
// clean up MCP clients
if (this.isMcpEnabled) {
await this.mcpService.close();
}
}
createSpecificLlm() {
@@ -41,9 +65,7 @@ class TaskLlm extends Task {
switch (this.vendor) {
case 'openai':
case 'microsoft':
if (this.model.startsWith('gpt-4o-realtime')) {
llm = new TaskLlmOpenAI_S2S(this.logger, this.data, this);
}
llm = new TaskLlmOpenAI_S2S(this.logger, this.data, this);
break;
case 'voiceagent':
@@ -51,6 +73,18 @@ class TaskLlm extends Task {
llm = new TaskLlmVoiceAgent_S2S(this.logger, this.data, this);
break;
case 'ultravox':
llm = new TaskLlmUltravox_S2S(this.logger, this.data, this);
break;
case 'elevenlabs':
llm = new TaskLlmElevenlabs_S2S(this.logger, this.data, this);
break;
case 'google':
llm = new TaskLlmGoogle_S2S(this.logger, this.data, this);
break;
default:
throw new Error(`Unsupported vendor ${this.vendor} for LLM`);
}
@@ -74,8 +108,15 @@ class TaskLlm extends Task {
await this.cs?.requestor.request('llm:event', this.eventHook, data);
}
async sendToolHook(tool_call_id, data) {
await this.cs?.requestor.request('llm:tool-call', this.toolHook, {tool_call_id, ...data});
const tool_response = await this.cs?.requestor.request('llm:tool-call', this.toolHook, {tool_call_id, ...data});
// if the toolHook was a websocket it will return undefined, otherwise it should return an object
if (typeof tool_response != 'undefined') {
tool_response.type = 'client_tool_result';
tool_response.invocation_id = tool_call_id;
this.processToolOutput(tool_call_id, tool_response);
}
}
async processToolOutput(tool_call_id, data) {

View File

@@ -0,0 +1,327 @@
const Task = require('../../task');
const TaskName = 'Llm_Elevenlabs_s2s';
const {LlmEvents_Elevenlabs} = require('../../../utils/constants');
const {request} = require('undici');
const ClientEvent = 'client.event';
const SessionDelete = 'session.delete';
const elevenlabs_server_events = [
'conversation_initiation_metadata',
'user_transcript',
'agent_response',
'client_tool_call'
];
const expandWildcards = (events) => {
const expandedEvents = [];
events.forEach((evt) => {
if (evt.endsWith('.*')) {
const prefix = evt.slice(0, -2); // Remove the wildcard ".*"
const matchingEvents = elevenlabs_server_events.filter((e) => e.startsWith(prefix));
expandedEvents.push(...matchingEvents);
} else {
expandedEvents.push(evt);
}
});
return expandedEvents;
};
class TaskLlmElevenlabs_S2S extends Task {
constructor(logger, opts, parentTask) {
super(logger, opts, parentTask);
this.parent = parentTask;
this.vendor = this.parent.vendor;
this.auth = this.parent.auth;
const {agent_id, api_key} = this.auth || {};
if (!agent_id) throw new Error('auth.agent_id is required for Elevenlabs S2S');
this.agent_id = agent_id;
this.api_key = api_key;
this.actionHook = this.data.actionHook;
this.eventHook = this.data.eventHook;
this.toolHook = this.data.toolHook;
const {
conversation_initiation_client_data,
input_sample_rate = 16000,
output_sample_rate = 16000
} = this.data.llmOptions;
this.conversation_initiation_client_data = conversation_initiation_client_data;
this.input_sample_rate = input_sample_rate;
this.output_sample_rate = output_sample_rate;
this.results = {
completionReason: 'normal conversation end'
};
/**
* only one of these will have items,
* if includeEvents, then these are the events to include
* if excludeEvents, then these are the events to exclude
*/
this.includeEvents = [];
this.excludeEvents = [];
/* default to all events if user did not specify */
this._populateEvents(this.data.events || elevenlabs_server_events);
this.addCustomEventListener = parentTask.addCustomEventListener.bind(parentTask);
this.removeCustomEventListeners = parentTask.removeCustomEventListeners.bind(parentTask);
}
get name() { return TaskName; }
async getSignedUrl() {
if (!this.api_key) {
return {
host: 'api.elevenlabs.io',
path: `/v1/convai/conversation?agent_id=${this.agent_id}`,
};
}
const {statusCode, body} = await request(
`https://api.elevenlabs.io/v1/convai/conversation/get_signed_url?agent_id=${this.agent_id}`, {
method: 'GET',
headers: {
'xi-api-key': this.api_key
},
}
);
const data = await body.json();
if (statusCode !== 200 || !data?.signed_url) {
this.logger.error({statusCode, data}, 'Elevenlabs Error registering call');
throw new Error(`Elevenlabs Error registering call: ${data.message}`);
}
const url = new URL(data.signed_url);
return {
host: url.hostname,
path: url.pathname + url.search,
};
}
async _api(ep, args) {
const res = await ep.api('uuid_elevenlabs_s2s', `^^|${args.join('|')}`);
if (!res.body?.startsWith('+OK')) {
throw new Error({args}, `Error calling uuid_elevenlabs_s2s: ${res.body}`);
}
}
async exec(cs, {ep}) {
await super.exec(cs);
await this._startListening(cs, ep);
await this.awaitTaskDone();
/* note: the parent llm verb started the span, which is why this is necessary */
await this.parent.performAction(this.results);
this._unregisterHandlers();
}
async kill(cs) {
super.kill(cs);
this._api(cs.ep, [cs.ep.uuid, SessionDelete])
.catch((err) => this.logger.info({err}, 'TaskLlmElevenlabs_S2S:kill - error deleting session'));
this.notifyTaskDone();
}
/**
* Send function call output to the Elevenlabs server in the form of conversation.item.create
* per https://elevenlabs.io/docs/conversational-ai/api-reference/conversational-ai/websocket
*/
async processToolOutput(ep, tool_call_id, rawData) {
try {
const {data} = rawData;
this.logger.debug({tool_call_id, data}, 'TaskLlmElevenlabs_S2S:processToolOutput');
if (!data.type || data.type !== 'client_tool_result') {
this.logger.info({data},
'TaskLlmElevenlabs_S2S:processToolOutput - invalid tool output, must be client_tool_result');
}
else {
await this._api(ep, [ep.uuid, ClientEvent, JSON.stringify(data)]);
}
} catch (err) {
this.logger.info({err}, 'TaskLlmElevenlabs_S2S:processToolOutput');
}
}
/**
* Send a session.update to the Elevenlabs server
* Note: creating and deleting conversation items also supported as well as interrupting the assistant
*/
async processLlmUpdate(ep, data, _callSid) {
this.logger.debug({data, _callSid}, 'TaskLlmElevenlabs_S2S:processLlmUpdate, ignored');
}
async _startListening(cs, ep) {
this._registerHandlers(ep);
try {
const {host, path} = await this.getSignedUrl();
const args = this.conversation_initiation_client_data ?
[ep.uuid, 'session.create', this.input_sample_rate, this.output_sample_rate, host, path] :
[ep.uuid, 'session.create', this.input_sample_rate, this.output_sample_rate, host, path, 'no_initial_config'];
await this._api(ep, args);
} catch (err) {
this.logger.error({err}, 'TaskLlmElevenlabs_S2S:_startListening');
this.notifyTaskDone();
}
}
async _sendClientEvent(ep, obj) {
let ok = true;
this.logger.debug({obj}, 'TaskLlmElevenlabs_S2S:_sendClientEvent');
try {
const args = [ep.uuid, ClientEvent, JSON.stringify(obj)];
await this._api(ep, args);
} catch (err) {
ok = false;
this.logger.error({err}, 'TaskLlmElevenlabs_S2S:_sendClientEvent - Error');
}
return ok;
}
async _sendInitialMessage(ep) {
if (this.conversation_initiation_client_data) {
if (!await this._sendClientEvent(ep, {
type: 'conversation_initiation_client_data',
...this.conversation_initiation_client_data
})) {
this.notifyTaskDone();
}
}
}
_registerHandlers(ep) {
this.addCustomEventListener(ep, LlmEvents_Elevenlabs.Connect, this._onConnect.bind(this, ep));
this.addCustomEventListener(ep, LlmEvents_Elevenlabs.ConnectFailure, this._onConnectFailure.bind(this, ep));
this.addCustomEventListener(ep, LlmEvents_Elevenlabs.Disconnect, this._onDisconnect.bind(this, ep));
this.addCustomEventListener(ep, LlmEvents_Elevenlabs.ServerEvent, this._onServerEvent.bind(this, ep));
}
_unregisterHandlers() {
this.removeCustomEventListeners();
}
_onError(ep, evt) {
this.logger.info({evt}, 'TaskLlmElevenlabs_S2S:_onError');
this.notifyTaskDone();
}
_onConnect(ep) {
this.logger.debug('TaskLlmElevenlabs_S2S:_onConnect');
this._sendInitialMessage(ep);
}
_onConnectFailure(_ep, evt) {
this.logger.info(evt, 'TaskLlmElevenlabs_S2S:_onConnectFailure');
this.results = {completionReason: 'connection failure'};
this.notifyTaskDone();
}
_onDisconnect(_ep, evt) {
this.logger.info(evt, 'TaskLlmElevenlabs_S2S:_onConnectFailure');
this.results = {completionReason: 'disconnect from remote end'};
this.notifyTaskDone();
}
async _onServerEvent(ep, evt) {
let endConversation = false;
const type = evt.type;
this.logger.info({evt}, 'TaskLlmElevenlabs_S2S:_onServerEvent');
if (type === 'error') {
endConversation = true;
this.results = {
completionReason: 'server error',
error: evt.error
};
}
/* tool calls */
else if (type === 'client_tool_call') {
this.logger.debug({evt}, 'TaskLlmElevenlabs_S2S:_onServerEvent - function_call');
const {tool_name: name, tool_call_id: call_id, parameters: args} = evt.client_tool_call;
const mcpTools = this.parent.isMcpEnabled ? await this.parent.mcpService.getAvailableMcpTools() : [];
if (mcpTools.some((tool) => tool.name === name)) {
this.logger.debug({name, args}, 'TaskLlmElevenlabs_S2S:_onServerEvent - calling mcp tool');
try {
const res = await this.parent.mcpService.callMcpTool(name, args);
this.logger.debug({res}, 'TaskLlmElevenlabs_S2S:_onServerEvent - function_call - mcp result');
this.processToolOutput(ep, call_id, {
data: {
type: 'client_tool_result',
tool_call_id: call_id,
result: res.content?.length ? res.content[0] : res.content,
is_error: false
}
});
return;
}
catch (err) {
this.logger.info({err, evt}, 'TaskLlmElevenlabs_S2S - error calling mcp tool');
this.results = {
completionReason: 'client error calling mcp function',
error: err
};
endConversation = true;
}
} else if (!this.toolHook) {
this.logger.warn({evt}, 'TaskLlmElevenlabs_S2S:_onServerEvent - no toolHook defined!');
}
else {
try {
await this.parent.sendToolHook(call_id, {name, args});
} catch (err) {
this.logger.info({err, evt}, 'TaskLlmElevenlabs_S2S - error calling function');
this.results = {
completionReason: 'client error calling function',
error: err
};
endConversation = true;
}
}
}
/* check whether we should notify on this event */
if (this.includeEvents.length > 0 ? this.includeEvents.includes(type) : !this.excludeEvents.includes(type)) {
this.parent.sendEventHook(evt)
.catch((err) => this.logger.info({err},
'TaskLlmElevenlabs_S2S:_onServerEvent - error sending event hook'));
}
if (endConversation) {
this.logger.info({results: this.results},
'TaskLlmElevenlabs_S2S:_onServerEvent - ending conversation due to error');
this.notifyTaskDone();
}
}
_populateEvents(events) {
if (events.includes('all')) {
/* work by excluding specific events */
const exclude = events
.filter((evt) => evt.startsWith('-'))
.map((evt) => evt.slice(1));
if (exclude.length === 0) this.includeEvents = elevenlabs_server_events;
else this.excludeEvents = expandWildcards(exclude);
}
else {
/* work by including specific events */
const include = events
.filter((evt) => !evt.startsWith('-'));
this.includeEvents = expandWildcards(include);
}
this.logger.debug({
includeEvents: this.includeEvents,
excludeEvents: this.excludeEvents
}, 'TaskLlmElevenlabs_S2S:_populateEvents');
}
}
module.exports = TaskLlmElevenlabs_S2S;

View File

@@ -0,0 +1,319 @@
const Task = require('../../task');
const TaskName = 'Llm_Google_s2s';
const {LlmEvents_Google} = require('../../../utils/constants');
const ClientEvent = 'client.event';
const SessionDelete = 'session.delete';
const google_server_events = [
'error',
'session.created',
'session.updated',
];
const expandWildcards = (events) => {
const expandedEvents = [];
events.forEach((evt) => {
if (evt.endsWith('.*')) {
const prefix = evt.slice(0, -2); // Remove the wildcard ".*"
const matchingEvents = google_server_events.filter((e) => e.startsWith(prefix));
expandedEvents.push(...matchingEvents);
} else {
expandedEvents.push(evt);
}
});
return expandedEvents;
};
class TaskLlmGoogle_S2S extends Task {
constructor(logger, opts, parentTask) {
super(logger, opts, parentTask);
this.parent = parentTask;
this.vendor = this.parent.vendor;
this.vendor = this.parent.vendor;
this.model = this.parent.model || 'models/gemini-2.0-flash-live-001';
this.auth = this.parent.auth;
this.connectionOptions = this.parent.connectOptions;
const {apiKey} = this.auth || {};
if (!apiKey) throw new Error('auth.apiKey is required for Google S2S');
this.apiKey = apiKey;
this.actionHook = this.data.actionHook;
this.eventHook = this.data.eventHook;
this.toolHook = this.data.toolHook;
const {setup} = this.data.llmOptions;
if (typeof setup !== 'object') {
throw new Error('llmOptions with an initial setup is required for Google S2S');
}
this.setup = {
...setup,
model: this.model,
// make sure output is always audio
generationConfig: {
...(setup.generationConfig || {}),
responseModalities: 'audio'
}
};
this.results = {
completionReason: 'normal conversation end'
};
/**
* only one of these will have items,
* if includeEvents, then these are the events to include
* if excludeEvents, then these are the events to exclude
*/
this.includeEvents = [];
this.excludeEvents = [];
/* default to all events if user did not specify */
this._populateEvents(this.data.events || google_server_events);
this.addCustomEventListener = parentTask.addCustomEventListener.bind(parentTask);
this.removeCustomEventListeners = parentTask.removeCustomEventListeners.bind(parentTask);
}
get name() { return TaskName; }
async _api(ep, args) {
const res = await ep.api('uuid_google_s2s', `^^|${args.join('|')}`);
if (!res.body?.startsWith('+OK')) {
throw new Error({args}, `Error calling uuid_openai_s2s: ${res.body}`);
}
}
async exec(cs, {ep}) {
await super.exec(cs);
await this._startListening(cs, ep);
await this.awaitTaskDone();
/* note: the parent llm verb started the span, which is why this is necessary */
await this.parent.performAction(this.results);
this._unregisterHandlers();
}
async kill(cs) {
super.kill(cs);
this._api(cs.ep, [cs.ep.uuid, SessionDelete])
.catch((err) => this.logger.info({err}, 'TaskLlmGoogle_S2S:kill - error deleting session'));
this.notifyTaskDone();
}
_populateEvents(events) {
if (events.includes('all')) {
/* work by excluding specific events */
const exclude = events
.filter((evt) => evt.startsWith('-'))
.map((evt) => evt.slice(1));
if (exclude.length === 0) this.includeEvents = google_server_events;
else this.excludeEvents = expandWildcards(exclude);
}
else {
/* work by including specific events */
const include = events
.filter((evt) => !evt.startsWith('-'));
this.includeEvents = expandWildcards(include);
}
this.logger.debug({
includeEvents: this.includeEvents,
excludeEvents: this.excludeEvents
}, 'TaskLlmGoogle_S2S:_populateEvents');
}
async _startListening(cs, ep) {
this._registerHandlers(ep);
try {
const args = [ep.uuid, 'session.create', this.apiKey];
await this._api(ep, args);
} catch (err) {
this.logger.error({err}, 'TaskLlmGoogle_S2S:_startListening');
this.notifyTaskDone();
}
}
async _sendClientEvent(ep, obj) {
let ok = true;
this.logger.debug({obj}, 'TaskLlmGoogle_S2S:_sendClientEvent');
try {
const args = [ep.uuid, ClientEvent, JSON.stringify(obj)];
await this._api(ep, args);
} catch (err) {
ok = false;
this.logger.error({err}, 'TaskLlmGoogle_S2S:_sendClientEvent - Error');
}
return ok;
}
async _sendInitialMessage(ep) {
const setup = this.setup;
const mcpTools = this.parent.isMcpEnabled ? await this.parent.mcpService.getAvailableMcpTools() : [];
if (mcpTools && mcpTools.length > 0) {
const convertedTools = [
{
functionDeclarations: mcpTools.map((tool) => {
if (tool.inputSchema) {
delete tool.inputSchema.additionalProperties;
delete tool.inputSchema['$schema'];
}
return {
name: tool.name,
description: tool.description,
parameters: tool.inputSchema,
};
})
}
];
// merge with any existing tools
setup.tools = [...convertedTools, ...(this.setup.tools || [])];
}
if (!await this._sendClientEvent(ep, {
setup,
})) {
this.logger.debug(this.setup, 'TaskLlmGoogle_S2S:_sendInitialMessage - sending session.update');
this.notifyTaskDone();
}
}
_registerHandlers(ep) {
this.addCustomEventListener(ep, LlmEvents_Google.Connect, this._onConnect.bind(this, ep));
this.addCustomEventListener(ep, LlmEvents_Google.ConnectFailure, this._onConnectFailure.bind(this, ep));
this.addCustomEventListener(ep, LlmEvents_Google.Disconnect, this._onDisconnect.bind(this, ep));
this.addCustomEventListener(ep, LlmEvents_Google.ServerEvent, this._onServerEvent.bind(this, ep));
}
_unregisterHandlers() {
this.removeCustomEventListeners();
}
_onError(ep, evt) {
this.logger.info({evt}, 'TaskLlmGoogle_S2S:_onError');
this.notifyTaskDone();
}
_onConnect(ep) {
this.logger.debug('TaskLlmGoogle_S2S:_onConnect');
this._sendInitialMessage(ep);
}
_onConnectFailure(_ep, evt) {
this.logger.info(evt, 'TaskLlmGoogle_S2S:_onConnectFailure');
this.results = {completionReason: 'connection failure'};
this.notifyTaskDone();
}
_onDisconnect(_ep, evt) {
this.logger.info(evt, 'TaskLlmGoogle_S2S:_onConnectFailure');
this.results = {completionReason: 'disconnect from remote end'};
this.notifyTaskDone();
}
async _onServerEvent(ep, evt) {
let endConversation = false;
this.logger.debug({evt}, 'TaskLlmGoogle_S2S:_onServerEvent');
const {toolCall /**toolCallCancellation*/} = evt;
if (toolCall) {
this.logger.debug({toolCall}, 'TaskLlmGoogle_S2S:_onServerEvent - toolCall');
if (!this.toolHook) {
this.logger.info({evt}, 'TaskLlmGoogle_S2S:_onServerEvent - no toolHook defined!');
}
else {
const {functionCalls} = toolCall;
const mcpTools = this.parent.isMcpEnabled ? await this.parent.mcpService.getAvailableMcpTools() : [];
const functionResponses = [];
if (mcpTools && mcpTools.length > 0) {
for (const functionCall of functionCalls) {
const {name, args, id} = functionCall;
const tool = mcpTools.find((tool) => tool.name === name);
if (tool) {
const response = await this.parent.mcpService.callMcpTool(name, args);
functionResponses.push({
response: {
output: response,
},
id
});
}
}
}
if (functionResponses && functionResponses.length > 0) {
this.logger.debug({functionResponses}, 'TaskLlmGoogle_S2S:_onServerEvent - function_call - mcp result');
this.processToolOutput(ep, 'tool_call_id', {
toolResponse: {
functionResponses
}
});
} else {
try {
await this.parent.sendToolHook('function_call_id', {type: 'toolCall', functionCalls});
} catch (err) {
this.logger.info({err, evt}, 'TaskLlmGoogle_S2S - error calling function');
this.results = {
completionReason: 'client error calling function',
error: err
};
endConversation = true;
}
}
}
}
this._sendLlmEvent('llm_event', evt);
if (endConversation) {
this.logger.info({results: this.results},
'TaskLlmGoogle_S2S:_onServerEvent - ending conversation due to error');
this.notifyTaskDone();
}
}
_sendLlmEvent(type, evt) {
/* check whether we should notify on this event */
if (this.includeEvents.length > 0 ? this.includeEvents.includes(type) : !this.excludeEvents.includes(type)) {
this.parent.sendEventHook(evt)
.catch((err) => this.logger.info({err}, 'TaskLlmGoogle_S2S:_onServerEvent - error sending event hook'));
}
}
async processLlmUpdate(ep, data, _callSid) {
try {
this.logger.debug({data, _callSid}, 'TaskLlmGoogle_S2S:processLlmUpdate');
await this._api(ep, [ep.uuid, ClientEvent, JSON.stringify(data)]);
} catch (err) {
this.logger.info({err, data}, 'TaskLlmGoogle_S2S:processLlmUpdate - Error processing LLM update');
}
}
async processToolOutput(ep, tool_call_id, data) {
try {
this.logger.debug({tool_call_id, data}, 'TaskLlmGoogle_S2S:processToolOutput');
const {toolResponse} = data;
if (!toolResponse) {
this.logger.info({data},
'TaskLlmGoogle_S2S:processToolOutput - invalid tool output, must be functionResponses');
}
else {
await this._api(ep, [ep.uuid, ClientEvent, JSON.stringify(data)]);
}
} catch (err) {
this.logger.info({err, data}, 'TaskLlmGoogle_S2S:processToolOutput - Error processing tool output');
}
}
}
module.exports = TaskLlmGoogle_S2S;

View File

@@ -120,7 +120,7 @@ class TaskLlmOpenAI_S2S extends Task {
switch (this.vendor) {
case 'openai':
return 'v1/realtime?model=${this.model}';
return `v1/realtime?model=${this.model}`;
case 'microsoft':
return `openai/realtime?api-version=2024-10-01-preview&deployment=${this.model}`;
}
@@ -235,6 +235,23 @@ class TaskLlmOpenAI_S2S extends Task {
/* send immediate session.update if present */
else if (this.session_update) {
if (this.parent.isMcpEnabled) {
this.logger.debug('TaskLlmOpenAI_S2S:_sendInitialMessage - mcp enabled');
const tools = await this.parent.mcpService.getAvailableMcpTools();
if (tools && tools.length > 0 && this.session_update) {
const convertedTools = tools.map((tool) => ({
name: tool.name,
type: 'function',
description: tool.description,
parameters: tool.inputSchema
}));
this.session_update.tools = [
...convertedTools,
...(this.session_update.tools || [])
];
}
}
obj = {type: 'session.update', session: this.session_update};
this.logger.debug({obj}, 'TaskLlmOpenAI_S2S:_sendInitialMessage - sending session.update');
if (!await this._sendClientEvent(ep, obj)) {
@@ -299,13 +316,37 @@ class TaskLlmOpenAI_S2S extends Task {
/* tool calls */
else if (type === 'response.output_item.done' && evt.item?.type === 'function_call') {
this.logger.debug({evt}, 'TaskLlmOpenAI_S2S:_onServerEvent - function_call');
if (!this.toolHook) {
const {name, call_id} = evt.item;
const args = JSON.parse(evt.item.arguments);
const mcpTools = this.parent.isMcpEnabled ? await this.parent.mcpService.getAvailableMcpTools() : [];
if (mcpTools.some((tool) => tool.name === name)) {
this.logger.debug({call_id, name, args}, 'TaskLlmOpenAI_S2S:_onServerEvent - calling mcp tool');
try {
const res = await this.parent.mcpService.callMcpTool(name, args);
this.logger.debug({res}, 'TaskLlmOpenAI_S2S:_onServerEvent - function_call - mcp result');
this.processToolOutput(ep, call_id, {
type: 'conversation.item.create',
item: {
type: 'function_call_output',
call_id,
output: res.content[0]?.text || 'There is no output from the function call',
}
});
return;
} catch (err) {
this.logger.info({err, evt}, 'TaskLlmOpenAI_S2S - error calling function');
this.results = {
completionReason: 'client error calling mcp function',
error: err
};
endConversation = true;
}
}
else if (!this.toolHook) {
this.logger.warn({evt}, 'TaskLlmOpenAI_S2S:_onServerEvent - no toolHook defined!');
}
else {
const {name, call_id} = evt.item;
const args = JSON.parse(evt.item.arguments);
try {
await this.parent.sendToolHook(call_id, {name, args});
} catch (err) {

View File

@@ -0,0 +1,344 @@
const Task = require('../../task');
const TaskName = 'Llm_Ultravox_s2s';
const {request} = require('undici');
const {LlmEvents_Ultravox} = require('../../../utils/constants');
const ultravox_server_events = [
'createCall',
'pong',
'state',
'transcript',
'conversationText',
'clientToolInvocation',
'playbackClearBuffer',
];
const ClientEvent = 'client.event';
const expandWildcards = (events) => {
// no-op for deepgram
return events;
};
const SessionDelete = 'session.delete';
class TaskLlmUltravox_S2S extends Task {
constructor(logger, opts, parentTask) {
super(logger, opts, parentTask);
this.parent = parentTask;
this.vendor = this.parent.vendor;
this.model = this.parent.model || 'fixie-ai/ultravox';
this.auth = this.parent.auth;
this.connectionOptions = this.parent.connectOptions;
const {apiKey} = this.auth || {};
if (!apiKey) throw new Error('auth.apiKey is required for Vendor: Ultravox');
this.apiKey = apiKey;
this.actionHook = this.data.actionHook;
this.eventHook = this.data.eventHook;
this.toolHook = this.data.toolHook;
this.results = {
completionReason: 'normal conversation end'
};
/**
* only one of these will have items,
* if includeEvents, then these are the events to include
* if excludeEvents, then these are the events to exclude
*/
this.includeEvents = [];
this.excludeEvents = [];
/* default to all events if user did not specify */
this._populateEvents(this.data.events || ultravox_server_events);
this.addCustomEventListener = parentTask.addCustomEventListener.bind(parentTask);
this.removeCustomEventListeners = parentTask.removeCustomEventListeners.bind(parentTask);
}
get name() { return TaskName; }
async _api(ep, args) {
const res = await ep.api('uuid_ultravox_s2s', `^^|${args.join('|')}`);
if (!res.body?.startsWith('+OK')) {
throw new Error(`Error calling uuid_ultravox_s2s: ${JSON.stringify(res.body)}`);
}
}
/**
* Converts a JSON Schema to the dynamic parameters format used in the Ultravox API
* @param {Object} jsonSchema - A JSON Schema object defining parameters
* @param {string} locationDefault - Default location value for parameters (default: 'PARAMETER_LOCATION_BODY')
* @returns {Array} Array of dynamic parameters objects
*/
transformSchemaToParameters(jsonSchema, locationDefault = 'PARAMETER_LOCATION_BODY') {
if (jsonSchema.properties) {
const required = jsonSchema.required || [];
return Object.entries(jsonSchema.properties).map(([name]) => {
return {
name,
location: locationDefault,
required: required.includes(name)
};
});
}
return [];
}
async createCall() {
const mcpTools = this.parent.isMcpEnabled ? await this.parent.mcpService.getAvailableMcpTools() : [];
if (mcpTools && mcpTools.length > 0) {
const convertedTools = mcpTools.map((tool) => {
return {
temporaryTool: {
modelToolName: tool.name,
description: tool.description,
dynamicParameters: this.transformSchemaToParameters(tool.inputSchema),
// use client tool that ultravox call tool via freeswitch module.
client: {}
}
};
}
);
// merge with any existing tools
this.data.llmOptions.selectedTools = [
...convertedTools,
...(this.data.llmOptions.selectedTools || [])
];
}
const payload = {
...this.data.llmOptions,
model: this.model,
medium: {
...(this.data.llmOptions.medium || {}),
serverWebSocket: {
inputSampleRate: 8000,
outputSampleRate: 8000,
}
}
};
const {statusCode, body} = await request('https://api.ultravox.ai/api/calls', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'X-API-Key': this.apiKey
},
body: JSON.stringify(payload)
});
const data = await body.json();
if (statusCode !== 201 || !data?.joinUrl) {
this.logger.info({statusCode, data}, 'Ultravox Error registering call');
throw new Error(`Ultravox Error registering call:${statusCode} - ${data.detail}`);
}
this.logger.debug({joinUrl: data.joinUrl}, 'Ultravox Call registered');
return data;
}
_unregisterHandlers() {
this.removeCustomEventListeners();
}
_registerHandlers(ep) {
this.addCustomEventListener(ep, LlmEvents_Ultravox.Connect, this._onConnect.bind(this, ep));
this.addCustomEventListener(ep, LlmEvents_Ultravox.ConnectFailure, this._onConnectFailure.bind(this, ep));
this.addCustomEventListener(ep, LlmEvents_Ultravox.Disconnect, this._onDisconnect.bind(this, ep));
this.addCustomEventListener(ep, LlmEvents_Ultravox.ServerEvent, this._onServerEvent.bind(this, ep));
}
async _startListening(cs, ep) {
this._registerHandlers(ep);
try {
const data = await this.createCall();
const {joinUrl} = data;
// split the joinUrl into host and path
const {host, pathname, search} = new URL(joinUrl);
const args = [ep.uuid, 'session.create', host, pathname + search];
await this._api(ep, args);
// Notify the application that the session has been created with detail information
this._sendLlmEvent('createCall', {
type: 'createCall',
...data
});
} catch (err) {
this.logger.info({err}, 'TaskLlmUltraVox_S2S:_startListening - Error sending createCall');
this.results = {completionReason: `connection failure - ${err}`};
this.notifyTaskDone();
}
}
async exec(cs, {ep}) {
await super.exec(cs);
await this._startListening(cs, ep);
await this.awaitTaskDone();
/* note: the parent llm verb started the span, which is why this is necessary */
await this.parent.performAction(this.results);
this._unregisterHandlers();
}
async kill(cs) {
super.kill(cs);
this._api(cs.ep, [cs.ep.uuid, SessionDelete])
.catch((err) => this.logger.info({err}, 'TaskLlmUltravox_S2S:kill - error deleting session'));
this.notifyTaskDone();
}
_onConnect(ep) {
this.logger.info('TaskLlmUltravox_S2S:_onConnect');
}
_onConnectFailure(_ep, evt) {
this.logger.info(evt, 'TaskLlmUltravox_S2S:_onConnectFailure');
this.results = {completionReason: 'connection failure'};
this.notifyTaskDone();
}
_onDisconnect(_ep, evt) {
this.logger.info(evt, 'TaskLlmUltravox_S2S:_onConnectFailure');
this.results = {completionReason: 'disconnect from remote end'};
this.notifyTaskDone();
}
async _onServerEvent(_ep, evt) {
let endConversation = false;
const type = evt.type;
this.logger.debug({evt}, 'TaskLlmUltravox_S2S:_onServerEvent');
/* server errors of some sort */
if (type === 'error') {
endConversation = true;
this.results = {
completionReason: 'server error',
error: evt.error
};
}
/* tool calls */
else if (type === 'client_tool_invocation') {
this.logger.debug({evt}, 'TaskLlmUltravox_S2S:_onServerEvent - function_call');
const {toolName: name, invocationId: call_id, parameters: args} = evt;
const mcpTools = this.parent.isMcpEnabled ? await this.parent.mcpService.getAvailableMcpTools() : [];
if (mcpTools.some((tool) => tool.name === name)) {
this.logger.debug({
name,
input: args
}, 'TaskLlmUltravox_S2S:_onServerEvent - function_call - mcp tool');
try {
const res = await this.parent.mcpService.callMcpTool(name, args);
this.logger.debug({res}, 'TaskLlmUltravox_S2S:_onServerEvent - function_call - mcp result');
this.processToolOutput(_ep, call_id, {
type: 'client_tool_result',
invocation_id: call_id,
result: res.content
});
return;
} catch (err) {
this.logger.info({err, evt}, 'TaskLlmUltravox_S2S - error calling mcp tool');
this.results = {
completionReason: 'client error calling mcp function',
error: err
};
endConversation = true;
}
} else if (!this.toolHook) {
this.logger.info({evt}, 'TaskLlmUltravox_S2S:_onServerEvent - no toolHook defined!');
}
else {
try {
await this.parent.sendToolHook(call_id, {name, args});
} catch (err) {
this.logger.info({err, evt}, 'TaskLlmUltravox_S2S - error calling function');
this.results = {
completionReason: 'client error calling function',
error: err
};
endConversation = true;
}
}
}
this._sendLlmEvent(type, evt);
if (endConversation) {
this.logger.info({results: this.results},
'TaskLlmUltravox_S2S:_onServerEvent - ending conversation due to error');
this.notifyTaskDone();
}
}
_sendLlmEvent(type, evt) {
/* check whether we should notify on this event */
if (this.includeEvents.length > 0 ? this.includeEvents.includes(type) : !this.excludeEvents.includes(type)) {
this.parent.sendEventHook(evt)
.catch((err) => this.logger.info({err}, 'TaskLlmUltravox_S2S:_onServerEvent - error sending event hook'));
}
}
async processLlmUpdate(ep, data, _callSid) {
try {
this.logger.debug({data, _callSid}, 'TaskLlmUltravox_S2S:processLlmUpdate');
if (!data.type || ![
'input_text_message'
].includes(data.type)) {
this.logger.info({data},
'TaskLlmUltravox_S2S:processLlmUpdate - invalid mid-call request, only input_text_message supported');
}
else {
await this._api(ep, [ep.uuid, ClientEvent, JSON.stringify(data)]);
}
} catch (err) {
this.logger.info({err, data}, 'TaskLlmUltravox_S2S:processLlmUpdate - Error processing LLM update');
}
}
async processToolOutput(ep, tool_call_id, data) {
try {
this.logger.debug({tool_call_id, data}, 'TaskLlmUltravox_S2S:processToolOutput');
if (!data.type || data.type !== 'client_tool_result') {
this.logger.info({data},
'TaskLlmUltravox_S2S:processToolOutput - invalid tool output, must be client_tool_result');
}
else {
await this._api(ep, [ep.uuid, ClientEvent, JSON.stringify(data)]);
}
} catch (err) {
this.logger.info({err, data}, 'TaskLlmUltravox_S2S:processToolOutput - Error processing tool output');
}
}
_populateEvents(events) {
if (events.includes('all')) {
/* work by excluding specific events */
const exclude = events
.filter((evt) => evt.startsWith('-'))
.map((evt) => evt.slice(1));
if (exclude.length === 0) this.includeEvents = ultravox_server_events;
else this.excludeEvents = expandWildcards(exclude);
}
else {
/* work by including specific events */
const include = events
.filter((evt) => !evt.startsWith('-'));
this.includeEvents = expandWildcards(include);
}
this.logger.debug({
includeEvents: this.includeEvents,
excludeEvents: this.excludeEvents
}, 'TaskLlmUltravox_S2S:_populateEvents');
}
}
module.exports = TaskLlmUltravox_S2S;

View File

@@ -29,7 +29,7 @@ class TaskLlmVoiceAgent_S2S extends Task {
this.parent = parentTask;
this.vendor = this.parent.vendor;
this.model = this.parent.model;
this.model = this.parent.model || 'voice-agent';
this.auth = this.parent.auth;
this.connectionOptions = this.parent.connectOptions;
@@ -41,25 +41,25 @@ class TaskLlmVoiceAgent_S2S extends Task {
this.actionHook = this.data.actionHook;
this.eventHook = this.data.eventHook;
this.toolHook = this.data.toolHook;
const {settingsConfiguration} = this.data.llmOptions;
const {Settings} = this.data.llmOptions;
if (typeof settingsConfiguration !== 'object') {
throw new Error('llmOptions with an initial settingsConfiguration is required for VoiceAgent S2S');
if (typeof Settings !== 'object') {
throw new Error('llmOptions with an initial Settings is required for VoiceAgent S2S');
}
// eslint-disable-next-line no-unused-vars
const {audio, ...rest} = settingsConfiguration;
const cfg = this.settingsConfiguration = rest;
const {audio, ...rest} = Settings;
const cfg = this.Settings = rest;
if (!cfg.agent) throw new Error('llmOptions.settingsConfiguration.agent is required for VoiceAgent S2S');
if (!cfg.agent) throw new Error('llmOptions.Settings.agent is required for VoiceAgent S2S');
if (!cfg.agent.think) {
throw new Error('llmOptions.settingsConfiguration.agent.think is required for VoiceAgent S2S');
throw new Error('llmOptions.Settings.agent.think is required for VoiceAgent S2S');
}
if (!cfg.agent.think.model) {
throw new Error('llmOptions.settingsConfiguration.agent.think.model is required for VoiceAgent S2S');
if (!cfg.agent.think.provider?.model) {
throw new Error('llmOptions.Settings.agent.think.provider.model is required for VoiceAgent S2S');
}
if (!cfg.agent.think.provider?.type) {
throw new Error('llmOptions.settingsConfiguration.agent.think.provider.type is required for VoiceAgent S2S');
throw new Error('llmOptions.Settings.agent.think.provider.type is required for VoiceAgent S2S');
}
this.results = {
@@ -92,13 +92,13 @@ class TaskLlmVoiceAgent_S2S extends Task {
const {path} = this.connectionOptions || {};
if (path) return path;
return '/agent';
return '/v1/agent/converse';
}
async _api(ep, args) {
const res = await ep.api('uuid_voice_agent_s2s', `^^|${args.join('|')}`);
if (!res.body?.startsWith('+OK')) {
throw new Error({args}, `Error calling uuid_voice_agent_s2s: ${JSON.stringify(res.body)}`);
throw new Error(`Error calling uuid_voice_agent_s2s: ${JSON.stringify(res.body)}`);
}
}
@@ -193,7 +193,20 @@ class TaskLlmVoiceAgent_S2S extends Task {
}
async _sendInitialMessage(ep) {
if (!await this._sendClientEvent(ep, this.settingsConfiguration)) {
const mcpTools = this.parent.isMcpEnabled ? await this.parent.mcpService.getAvailableMcpTools() : [];
if (mcpTools && mcpTools.length > 0 && this.Settings.agent?.think) {
const convertedTools = mcpTools.map((tool) => ({
name: tool.name,
description: tool.description,
parameters: tool.inputSchema
}));
this.Settings.agent.think.functions = [
...convertedTools,
...(this.Settings.agent.think?.functions || [])
];
}
if (!await this._sendClientEvent(ep, this.Settings)) {
this.notifyTaskDone();
}
}
@@ -254,17 +267,43 @@ class TaskLlmVoiceAgent_S2S extends Task {
/* tool calls */
else if (type === 'FunctionCallRequest') {
this.logger.debug({evt}, 'TaskLlmVoiceAgent_S2S:_onServerEvent - function_call');
if (!this.toolHook) {
const mcpTools = this.parent.isMcpEnabled ? await this.parent.mcpService.getAvailableMcpTools() : [];
if (!this.toolHook && mcpTools.length === 0) {
this.logger.warn({evt}, 'TaskLlmVoiceAgent_S2S:_onServerEvent - no toolHook defined!');
}
else {
const {function_name:name, function_call_id:call_id} = evt;
const args = evt.input;
} else {
const {functions} = evt;
const handledFunctions = [];
try {
await this.parent.sendToolHook(call_id, {name, args});
if (mcpTools && mcpTools.length > 0) {
for (const func of functions) {
const {name, arguments: args, id} = func;
const tool = mcpTools.find((tool) => tool.name === name);
if (tool) {
handledFunctions.push(name);
const response = await this.parent.mcpService.callMcpTool(name, JSON.parse(args));
this.logger.debug({response}, 'TaskLlmVoiceAgent_S2S:_onServerEvent - function_call - mcp result');
this.processToolOutput(_ep, id, {
data: {
type: 'FunctionCallResponse',
id,
name,
content: response.length > 0 ? response[0].text : 'There is no output from the function call'
}
});
}
}
}
for (const func of functions) {
const {name, arguments: args, id} = func;
if (!handledFunctions.includes(name)) {
await this.parent.sendToolHook(id, {name, args: JSON.parse(args)});
}
}
} catch (err) {
this.logger.info({err, evt}, 'TaskLlmVoiceAgent - error calling function');
this.logger.info({err, evt}, 'TaskLlmVoiceAgent_S2S:_onServerEvent - error calling function');
this.results = {
completionReason: 'client error calling function',
error: err

View File

@@ -84,6 +84,7 @@ function makeTask(logger, obj, parent) {
const TaskTranscribe = require('./transcribe');
return new TaskTranscribe(logger, data, parent);
case TaskName.Listen:
case TaskName.Stream:
const TaskListen = require('./listen');
return new TaskListen(logger, data, parent);
case TaskName.Redirect:

View File

@@ -1,7 +1,7 @@
const Task = require('./task');
const {TaskName, TaskPreconditions} = require('../utils/constants');
const bent = require('bent');
const uuidv4 = require('uuid-random');
const crypto = require('crypto');
const {K8S} = require('../config');
class TaskMessage extends Task {
constructor(logger, opts) {
@@ -9,7 +9,7 @@ class TaskMessage extends Task {
this.preconditions = TaskPreconditions.None;
this.payload = {
message_sid: this.data.message_sid || uuidv4(),
message_sid: this.data.message_sid || crypto.randomUUID(),
carrier: this.data.carrier,
to: this.data.to,
from: this.data.from,

View File

@@ -1,7 +1,6 @@
const Task = require('./task');
const {TaskName, TaskPreconditions} = require('../utils/constants');
const { PlayFileNotFoundError } = require('../utils/error');
class TaskPlay extends Task {
constructor(logger, opts) {
super(logger, opts);
@@ -27,6 +26,7 @@ class TaskPlay extends Task {
let playbackSeconds = 0;
let playbackMilliseconds = 0;
let completed = !(this.timeoutSecs > 0 || this.loop);
cs.playingAudio = true;
if (this.timeoutSecs > 0) {
timeout = setTimeout(async() => {
completed = true;
@@ -40,6 +40,22 @@ class TaskPlay extends Task {
try {
this.notifyStatus({event: 'start-playback'});
while (!this.killed && (this.loop === 'forever' || this.loop--) && this.ep.connected) {
/* Listen for playback-start event and set up a one-time listener for uuid_break
* that will kill the audio playback if the taskIds match. This ensures that
* we only kill the currently playing audio and not audio from other tasks.
* As we are using stickyEventEmitter, even if the event is emitted before the listener is registered,
* the listener will receive the most recent event.
*/
ep.once('playback-start', (evt) => {
this.logger.debug({evt}, 'Play got playback-start');
this.cs.stickyEventEmitter.once('uuid_break', (t) => {
if (t?.taskId === this.taskId) {
this.logger.debug(`Play got kill-playback, executing uuid_break, taskId: ${t?.taskId}`);
this.ep.api('uuid_break', this.ep.uuid).catch((err) => this.logger.info(err, 'Error killing audio'));
this.notifyStatus({event: 'kill-playback'});
}
});
});
if (cs.isInConference) {
const {memberId, confName, confUuid} = cs;
if (Array.isArray(this.url)) {
@@ -87,15 +103,15 @@ class TaskPlay extends Task {
async kill(cs) {
super.kill(cs);
if (this.ep.connected && !this.playComplete) {
if (this.ep?.connected && !this.playComplete) {
this.logger.debug('TaskPlay:kill - killing audio');
if (cs.isInConference) {
const {memberId, confName} = cs;
this.killPlayToConfMember(this.ep, memberId, confName);
}
else {
this.notifyStatus({event: 'kill-playback'});
this.ep.api('uuid_break', this.ep.uuid).catch((err) => this.logger.info(err, 'Error killing audio'));
//this.ep.api('uuid_break', this.ep.uuid).catch((err) => this.logger.info(err, 'Error killing audio'));
cs.stickyEventEmitter.emit('uuid_break', this);
}
}
}

View File

@@ -1,5 +1,8 @@
const Task = require('./task');
const {TaskName} = require('../utils/constants');
const WsRequestor = require('../utils/ws-requestor');
const URL = require('url');
const HttpRequestor = require('../utils/http-requestor');
/**
* Redirects to a new application
@@ -13,6 +16,32 @@ class TaskRedirect extends Task {
async exec(cs) {
await super.exec(cs);
if (cs.requestor instanceof WsRequestor && cs.application.requestor._isAbsoluteUrl(this.actionHook)) {
this.logger.info(`Task:performAction redirecting to ${this.actionHook}, requires new ws connection`);
try {
this.cs.requestor.close();
const requestor = new WsRequestor(this.logger, cs.accountSid, {url: this.actionHook}, this.webhook_secret) ;
this.cs.application.requestor = requestor;
} catch (err) {
this.logger.info(err, `Task:performAction error redirecting to ${this.actionHook}`);
}
} else if (cs.application.requestor._isAbsoluteUrl(this.actionHook)) {
const baseUrl = this.cs.application.requestor.baseUrl;
const newUrl = URL.parse(this.actionHook);
const newBaseUrl = newUrl.protocol + '//' + newUrl.host;
if (baseUrl != newBaseUrl) {
try {
this.logger.info(`Task:redirect updating base url to ${newBaseUrl}`);
const newRequestor = new HttpRequestor(this.logger, cs.accountSid, {url: this.actionHook},
cs.accountInfo.account.webhook_secret);
this.cs.requestor.removeAllListeners();
this.cs.application.requestor = newRequestor;
} catch (err) {
this.logger.info(err, `Task:redirect error updating base url to ${this.actionHook}`);
}
}
}
await this.performAction();
}
}

View File

@@ -12,6 +12,7 @@ class TaskRestDial extends Task {
this.from = this.data.from;
this.callerName = this.data.callerName;
this.timeLimit = this.data.timeLimit;
this.fromHost = this.data.fromHost;
this.to = this.data.to;
this.call_hook = this.data.call_hook;
@@ -66,6 +67,9 @@ class TaskRestDial extends Task {
const cs = this.callSession;
cs.setDialog(dlg);
cs.referHook = this.referHook;
if (this.timeLimit) {
cs.startMaxCallDurationTimer(this.timeLimit);
}
this.logger.debug('TaskRestDial:_onConnect - call connected');
if (this.sipRequestWithinDialogHook) this._initSipRequestWithinDialogHandler(cs, dlg);
try {

View File

@@ -107,7 +107,7 @@ class TaskSay extends TtsTask {
throw new SpeechCredentialError(
`No text-to-speech service credentials for ${vendor} with labels: ${label} have been configured`);
}
this.ep = ep;
try {
await this.setTtsStreamingChannelVars(vendor, language, voice, credentials, ep);
@@ -213,7 +213,7 @@ class TaskSay extends TtsTask {
ep.once('playback-start', (evt) => {
this.logger.debug({evt}, 'Say got playback-start');
if (this.otelSpan) {
this._addStreamingTtsAttributes(this.otelSpan, evt);
this._addStreamingTtsAttributes(this.otelSpan, evt, vendor);
this.otelSpan.end();
this.otelSpan = null;
if (evt.variable_tts_cache_filename) {
@@ -223,7 +223,19 @@ class TaskSay extends TtsTask {
});
ep.once('playback-stop', (evt) => {
this.logger.debug({evt}, 'Say got playback-stop');
if (evt.variable_tts_error) {
this.notifyStatus({event: 'stop-playback'});
this.notifiedPlayBackStop = true;
const tts_error = evt.variable_tts_error;
let response_code = 200;
// Check if any property ends with _response_code
for (const [key, value] of Object.entries(evt)) {
if (key.endsWith('_response_code')) {
response_code = parseInt(value, 10) || 200;
break;
}
}
if (tts_error) {
writeAlerts({
account_sid,
alert_type: AlertType.TTS_FAILURE,
@@ -232,7 +244,7 @@ class TaskSay extends TtsTask {
target_sid
}).catch((err) => this.logger.info({err}, 'Error generating alert for no tts'));
}
if (evt.variable_tts_cache_filename && !this.killed) {
if (!tts_error && response_code < 300 && evt.variable_tts_cache_filename && !this.killed) {
const text = parseTextFromSayString(this.text[segment]);
addFileToCache(evt.variable_tts_cache_filename, {
account_sid,
@@ -240,12 +252,15 @@ class TaskSay extends TtsTask {
language,
voice,
engine,
text
model: this.model || this.model_id,
text,
instructions: this.instructions
}).catch((err) => this.logger.info({err}, 'Error adding file to cache'));
}
if (this._playResolve) {
evt.variable_tts_error ? this._playReject(new Error(evt.variable_tts_error)) : this._playResolve();
(tts_error || response_code >= 300) ? this._playReject(new Error(evt.variable_tts_error)) :
this._playResolve();
}
});
// wait for playback-stop event received to confirm if the playback is successful
@@ -291,8 +306,13 @@ class TaskSay extends TtsTask {
if (cs.isInConference) {
const {memberId, confName} = cs;
this.killPlayToConfMember(this.ep, memberId, confName);
}
else {
} else if (this.isStreamingTts) {
this.logger.debug('TaskSay:kill - clearing TTS stream for streaming audio');
cs.clearTtsStream();
} else {
if (!this.notifiedPlayBackStop) {
this.notifyStatus({event: 'stop-playback'});
}
this.notifyStatus({event: 'kill-playback'});
this.ep.api('uuid_break', this.ep.uuid);
}
@@ -307,12 +327,13 @@ class TaskSay extends TtsTask {
this.notifyTaskDone();
}
_addStreamingTtsAttributes(span, evt) {
_addStreamingTtsAttributes(span, evt, vendor) {
const attrs = {'tts.cached': false};
for (const [key, value] of Object.entries(evt)) {
if (key.startsWith('variable_tts_')) {
let newKey = key.substring('variable_tts_'.length)
.replace('whisper_', 'whisper.')
.replace('nvidia_', 'nvidia.')
.replace('deepgram_', 'deepgram.')
.replace('playht_', 'playht.')
.replace('cartesia_', 'cartesia.')
@@ -321,6 +342,9 @@ class TaskSay extends TtsTask {
.replace('elevenlabs_', 'elevenlabs.');
if (spanMapping[newKey]) newKey = spanMapping[newKey];
attrs[newKey] = value;
if (key === 'variable_tts_time_to_first_byte_ms' && value) {
this.cs.srf.locals.stats.histogram('tts.response_time', value, [`vendor:${vendor}`]);
}
}
}
delete attrs['cache_filename']; //no value in adding this to the span

View File

@@ -18,6 +18,11 @@ class TaskSipDecline extends Task {
super.exec(cs);
res.send(this.data.status, this.data.reason, {
headers: this.headers
}, (err) => {
if (!err) {
// Call was successfully declined
cs._callReleased();
}
});
cs.emit('callStatusChange', {
callStatus: CallStatus.Failed,

View File

@@ -5,6 +5,30 @@ const { TaskPreconditions, CobaltTranscriptionEvents } = require('../utils/const
const { SpeechCredentialError } = require('../utils/error');
const {JAMBONES_AWS_TRANSCRIBE_USE_GRPC} = require('../config');
/**
* "Please insert turns here: {{turns:4}}"
// -> { processed: 'Please insert turns here: {{turns}}', turns: 4 }
processTurnString("Please insert turns here: {{turns}}"));
// -> { processed: 'Please insert turns here: {{turns}}', turns: null }
*/
const processTurnString = (input) => {
const regex = /\{\{turns(?::(\d+))?\}\}/;
const match = input.match(regex);
if (!match) {
return {
processed: input,
turns: null
};
}
const turns = match[1] ? parseInt(match[1], 10) : null;
const processed = input.replace(regex, '{{turns}}');
return { processed, turns };
};
class SttTask extends Task {
constructor(logger, data, parentTask) {
@@ -290,6 +314,57 @@ class SttTask extends Task {
});
}
formatOpenAIPrompt(cs, {prompt, hintsTemplate, conversationHistoryTemplate, hints}) {
let conversationHistoryPrompt, hintsPrompt;
/* generate conversation history from template */
if (conversationHistoryTemplate) {
const {processed, turns} = processTurnString(conversationHistoryTemplate);
this.logger.debug({processed, turns}, 'SttTask: processed conversation history template');
conversationHistoryPrompt = cs.getFormattedConversation(turns || 4);
//this.logger.debug({conversationHistoryPrompt}, 'SttTask: conversation history');
if (conversationHistoryPrompt) {
conversationHistoryPrompt = processed.replace('{{turns}}', `\n${conversationHistoryPrompt}\nuser: `);
}
}
/* generate hints from template */
if (hintsTemplate && Array.isArray(hints) && hints.length > 0) {
hintsPrompt = hintsTemplate.replace('{{hints}}', hints);
}
/* combine into final prompt */
let finalPrompt = prompt || '';
if (hintsPrompt) {
finalPrompt = `${finalPrompt}\n${hintsPrompt}`;
}
if (conversationHistoryPrompt) {
finalPrompt = `${finalPrompt}\n${conversationHistoryPrompt}`;
}
this.logger.debug({
finalPrompt,
hints,
hintsPrompt,
conversationHistoryTemplate,
conversationHistoryPrompt
}, 'SttTask: formatted OpenAI prompt');
return finalPrompt?.trimStart();
}
/* some STT engines will keep listening after a final response, so no need to restart */
doesVendorContinueListeningAfterFinalTranscript(vendor) {
return (vendor.startsWith('custom:') || [
'soniox',
'aws',
'microsoft',
'deepgram',
'google',
'speechmatics',
'openai',
].includes(vendor));
}
_onCompileContext(ep, key, evt) {
const {addKey} = this.cs.srf.locals.dbHelpers;
this.logger.debug({evt}, `received cobalt compile context event, will cache under ${key}`);

View File

@@ -1,5 +1,5 @@
const Emitter = require('events');
const uuidv4 = require('uuid-random');
const crypto = require('crypto');
const {TaskPreconditions} = require('../utils/constants');
const { normalizeJambones } = require('@jambonz/verb-specifications');
const WsRequestor = require('../utils/ws-requestor');
@@ -19,6 +19,7 @@ class Task extends Emitter {
this.data = data;
this.actionHook = this.data.actionHook;
this.id = data.id;
this.taskId = crypto.randomUUID();
this._killInProgress = false;
this._completionPromise = new Promise((resolve) => this._completionResolver = resolve);
@@ -272,7 +273,7 @@ class Task extends Emitter {
}
async transferCallToFeatureServer(cs, sipAddress, opts) {
const uuid = uuidv4();
const uuid = crypto.randomUUID();
const {addKey} = cs.srf.locals.dbHelpers;
const obj = Object.assign({}, cs.application);
delete obj.requestor;

View File

@@ -13,6 +13,8 @@ const {
JambonzTranscriptionEvents,
TranscribeStatus,
AssemblyAiTranscriptionEvents,
VoxistTranscriptionEvents,
OpenAITranscriptionEvents,
VerbioTranscriptionEvents,
SpeechmaticsTranscriptionEvents
} = require('../utils/constants.json');
@@ -29,7 +31,6 @@ class TaskTranscribe extends SttTask {
this.transcriptionHook = this.data.transcriptionHook;
this.translationHook = this.data.translationHook;
this.earlyMedia = this.data.earlyMedia === true || (parentTask && parentTask.earlyMedia);
if (this.data.recognizer) {
this.interim = !!this.data.recognizer.interim;
this.separateRecognitionPerChannel = this.data.recognizer.separateRecognitionPerChannel;
@@ -104,7 +105,7 @@ class TaskTranscribe extends SttTask {
if (cs.hasGlobalSttHints) {
const {hints, hintsBoost} = cs.globalSttHints;
this.data.recognizer.hints = this.data.recognizer.hints.concat(hints);
this.data.recognizer.hints = this.data.recognizer?.hints?.concat(hints);
if (!this.data.recognizer.hintsBoost && hintsBoost) this.data.recognizer.hintsBoost = hintsBoost;
this.logger.debug({hints: this.data.recognizer.hints, hintsBoost: this.data.recognizer.hintsBoost},
'Transcribe:exec - applying global sttHints');
@@ -300,6 +301,17 @@ class TaskTranscribe extends SttTask {
this._onVendorConnectFailure.bind(this, cs, ep, channel));
break;
case 'voxist':
this.bugname = `${this.bugname_prefix}voxist_transcribe`;
this.addCustomEventListener(ep, VoxistTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
this.addCustomEventListener(ep,
VoxistTranscriptionEvents.Connect, this._onVendorConnect.bind(this, cs, ep));
this.addCustomEventListener(ep, VoxistTranscriptionEvents.Error, this._onVendorError.bind(this, cs, ep));
this.addCustomEventListener(ep, VoxistTranscriptionEvents.ConnectFailure,
this._onVendorConnectFailure.bind(this, cs, ep, channel));
break;
case 'speechmatics':
this.bugname = `${this.bugname_prefix}speechmatics_transcribe`;
this.addCustomEventListener(
@@ -318,6 +330,20 @@ class TaskTranscribe extends SttTask {
this._onSpeechmaticsError.bind(this, cs, ep));
break;
case 'openai':
this.bugname = `${this.bugname_prefix}openai_transcribe`;
this.addCustomEventListener(
ep, OpenAITranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep, channel));
this.addCustomEventListener(ep, OpenAITranscriptionEvents.Connect,
this._onVendorConnect.bind(this, cs, ep));
this.addCustomEventListener(ep, OpenAITranscriptionEvents.ConnectFailure,
this._onVendorConnectFailure.bind(this, cs, ep));
this.addCustomEventListener(ep, OpenAITranscriptionEvents.Error,
this._onOpenAIErrror.bind(this, cs, ep));
this.modelSupportsConversationTracking = opts.OPENAI_MODEL !== 'whisper-1';
break;
default:
if (this.vendor.startsWith('custom:')) {
this.bugname = `${this.bugname_prefix}${this.vendor}_transcribe`;
@@ -353,6 +379,25 @@ class TaskTranscribe extends SttTask {
async _transcribe(ep) {
this.logger.debug(
`TaskTranscribe:_transcribe - starting transcription vendor ${this.vendor} bugname ${this.bugname}`);
/* special feature for openai: we can provide a prompt that includes recent conversation history */
let prompt;
if (this.vendor === 'openai') {
if (this.modelSupportsConversationTracking) {
prompt = this.formatOpenAIPrompt(this.cs, {
prompt: this.data.recognizer?.openaiOptions?.prompt,
hintsTemplate: this.data.recognizer?.openaiOptions?.promptTemplates?.hintsTemplate,
// eslint-disable-next-line max-len
conversationHistoryTemplate: this.data.recognizer?.openaiOptions?.promptTemplates?.conversationHistoryTemplate,
hints: this.data.recognizer?.hints,
});
this.logger.debug({prompt}, 'Gather:_startTranscribing - created an openai prompt');
}
else if (this.data.recognizer?.hints?.length > 0) {
prompt = this.data.recognizer?.hints.join(', ');
}
}
await ep.startTranscription({
vendor: this.vendor,
interim: this.interim ? true : false,
@@ -444,8 +489,9 @@ class TaskTranscribe extends SttTask {
this._startAsrTimer(channel);
/* some STT engines will keep listening after a final response, so no need to restart */
if (!['soniox', 'aws', 'microsoft', 'deepgram', 'google', 'speechmatics']
.includes(this.vendor)) this._startTranscribing(cs, ep, channel);
if (!this.doesVendorContinueListeningAfterFinalTranscript(this.vendor)) {
this._startTranscribing(cs, ep, channel);
}
}
else {
if (this.vendor === 'soniox') {
@@ -468,9 +514,7 @@ class TaskTranscribe extends SttTask {
this.logger.debug({evt}, 'TaskTranscribe:_onTranscription - sending final transcript');
this._resolve(channel, evt);
/* some STT engines will keep listening after a final response, so no need to restart */
if (!['soniox', 'aws', 'microsoft', 'deepgram', 'google', 'speechmatics'].includes(this.vendor) &&
!this.vendor.startsWith('custom:')) {
if (!this.doesVendorContinueListeningAfterFinalTranscript(this.vendor)) {
this.logger.debug('TaskTranscribe:_onTranscription - restarting transcribe');
this._startTranscribing(cs, ep, channel);
}
@@ -609,12 +653,21 @@ class TaskTranscribe extends SttTask {
}
_onMaxDurationExceeded(cs, ep, channel) {
this.logger.debug(`TaskTranscribe:_onMaxDurationExceeded on channel ${channel}`);
this.restartDueToError(ep, channel, 'Max duration exceeded');
}
_onMaxBufferExceeded(cs, ep, channel) {
this.restartDueToError(ep, channel, 'Max buffer exceeded');
}
restartDueToError(ep, channel, reason) {
this.logger.debug(`TaskTranscribe:${reason} on channel ${channel}`);
if (this.paused) return;
if (this.childSpan[channel - 1] && this.childSpan[channel - 1].span) {
this.childSpan[channel - 1].span.setAttributes({
channel,
'stt.resolve': 'max duration exceeded',
'stt.resolve': reason,
'stt.label': this.label || 'None',
});
this.childSpan[channel - 1].span.end();
@@ -671,6 +724,14 @@ class TaskTranscribe extends SttTask {
return;
}
this.logger.info({evt}, 'TaskTranscribe:_onJambonzError');
if (this.vendor === 'microsoft' &&
evt.error?.includes('Due to service inactivity, the client buffer exceeded maximum size. Resetting the buffer')) {
let channel = 1;
if (this.ep !== _ep) {
channel = 2;
}
return this._onMaxBufferExceeded(cs, _ep, channel);
}
if (this.paused) return;
const {writeAlerts, AlertType} = cs.srf.locals;
@@ -721,6 +782,12 @@ class TaskTranscribe extends SttTask {
this._onVendorError(cs, _ep, {error: JSON.stringify(e)});
}
async _onOpenAIErrror(cs, _ep, evt) {
// eslint-disable-next-line no-unused-vars
const {message, ...e} = evt;
this._onVendorError(cs, _ep, {error: JSON.stringify(e)});
}
_startAsrTimer(channel) {
if (this.vendor === 'deepgram') return; // no need
assert(this.isContinuousAsr);

View File

@@ -21,6 +21,7 @@ class TtsTask extends Task {
this.synthesizer = this.data.synthesizer || {};
this.disableTtsCache = this.data.disableTtsCache;
this.options = this.synthesizer.options || {};
this.instructions = this.data.instructions;
}
async exec(cs) {
@@ -42,6 +43,11 @@ class TtsTask extends Task {
}
}
}
const fullText = Array.isArray(this.text) ? this.text.join(' ') : this.text;
if (fullText.length > 0) {
cs.emit('botSaid', fullText);
}
}
getTtsVendorData(cs) {
@@ -59,8 +65,7 @@ class TtsTask extends Task {
}
async setTtsStreamingChannelVars(vendor, language, voice, credentials, ep) {
const {api_key, model_id} = credentials;
const {stability, similarity_boost, use_speaker_boost, style} = this.options;
const {api_key, model_id, custom_tts_streaming_url, auth_token} = credentials;
let obj;
this.logger.debug({credentials},
@@ -82,6 +87,7 @@ class TtsTask extends Task {
};
break;
case 'elevenlabs':
const {stability, similarity_boost, use_speaker_boost, style, speed} = this.options.voice_settings || {};
obj = {
ELEVENLABS_API_KEY: api_key,
ELEVENLABS_TTS_STREAMING_MODEL_ID: model_id,
@@ -91,13 +97,48 @@ class TtsTask extends Task {
...(stability && {ELEVENLABS_TTS_STREAMING_VOICE_SETTINGS_STABILITY: stability}),
...(similarity_boost && {ELEVENLABS_TTS_STREAMING_VOICE_SETTINGS_SIMILARITY_BOOST: similarity_boost}),
...(use_speaker_boost && {ELEVENLABS_TTS_STREAMING_VOICE_SETTINGS_USE_SPEAKER_BOOST: use_speaker_boost}),
...(style && {ELEVENLABS_TTS_STREAMING_VOICE_SETTINGS_STYLE: style})
...(style && {ELEVENLABS_TTS_STREAMING_VOICE_SETTINGS_STYLE: style}),
// speed has value 0.7 to 1.2, 1.0 is default, make sure we send the value event it's 0
...(speed !== null && speed !== undefined && {ELEVENLABS_TTS_STREAMING_VOICE_SETTINGS_SPEED: `${speed}`}),
...(this.options.pronunciation_dictionary_locators &&
Array.isArray(this.options.pronunciation_dictionary_locators) && {
ELEVENLABS_TTS_STREAMING_PRONUNCIATION_DICTIONARY_LOCATORS:
JSON.stringify(this.options.pronunciation_dictionary_locators)
}),
};
break;
case 'rimelabs':
const {
pauseBetweenBrackets, phonemizeBetweenBrackets, inlineSpeedAlpha, speedAlpha, reduceLatency
} = this.options;
obj = {
RIMELABS_API_KEY: api_key,
RIMELABS_TTS_STREAMING_MODEL_ID: model_id,
RIMELABS_TTS_STREAMING_VOICE_ID: voice,
RIMELABS_TTS_STREAMING_LANGUAGE: language || 'en',
...(pauseBetweenBrackets && {RIMELABS_TTS_STREAMING_PAUSE_BETWEEN_BRACKETS: pauseBetweenBrackets}),
...(phonemizeBetweenBrackets &&
{RIMELABS_TTS_STREAMING_PHONEMIZE_BETWEEN_BRACKETS: phonemizeBetweenBrackets}),
...(inlineSpeedAlpha && {RIMELABS_TTS_STREAMING_INLINE_SPEED_ALPHA: inlineSpeedAlpha}),
...(speedAlpha && {RIMELABS_TTS_STREAMING_SPEED_ALPHA: speedAlpha}),
...(reduceLatency && {RIMELABS_TTS_STREAMING_REDUCE_LATENCY: reduceLatency})
};
break;
default:
throw new Error(`vendor ${vendor} is not supported for tts streaming yet`);
if (vendor.startsWith('custom:')) {
const use_tls = custom_tts_streaming_url.startsWith('wss://');
obj = {
CUSTOM_TTS_STREAMING_HOST: custom_tts_streaming_url.replace(/^(ws|wss):\/\//, ''),
CUSTOM_TTS_STREAMING_API_KEY: auth_token,
CUSTOM_TTS_STREAMING_VOICE_ID: voice,
CUSTOM_TTS_STREAMING_LANGUAGE: language || 'en',
CUSTOM_TTS_STREAMING_USE_TLS: use_tls
};
} else {
throw new Error(`vendor ${vendor} is not supported for tts streaming yet`);
}
}
this.logger.info({vendor, credentials, obj}, 'setTtsStreamingChannelVars');
this.logger.debug({vendor, credentials, obj}, 'setTtsStreamingChannelVars');
await ep.set(obj);
}
@@ -115,15 +156,14 @@ class TtsTask extends Task {
`No text-to-speech service credentials for ${vendor} with labels: ${label} have been configured`);
}
/* parse Nuance voices into name and model */
let model;
if (vendor === 'nuance' && voice) {
const arr = /([A-Za-z-]*)\s+-\s+(enhanced|standard)/.exec(voice);
if (arr) {
voice = arr[1];
model = arr[2];
this.model = arr[2];
}
} else if (vendor === 'deepgram') {
model = voice;
this.model = voice;
}
/* allow for microsoft custom region voice and api_key to be specified as an override */
@@ -165,8 +205,12 @@ class TtsTask extends Task {
};
}
}
} else if (vendor === 'cartesia') {
credentials.model_id = this.options.model_id || credentials.model_id;
}
this.model_id = credentials.model_id;
/**
* note on cache_speech_handles. This was found to be risky.
* It can cause a crash in the following sequence on a single call:
@@ -187,7 +231,8 @@ class TtsTask extends Task {
// If vendor is changed from the previous one, then reset the cache_speech_handles flag
//cs.currentTtsVendor = vendor;
if (!preCache && !this._disableTracing) this.logger.info({vendor, language, voice, model}, 'TaskSay:exec');
if (!preCache && !this._disableTracing)
this.logger.debug({vendor, language, voice, model: this.model}, 'TaskSay:exec');
try {
if (!credentials) {
writeAlerts({
@@ -218,11 +263,12 @@ class TtsTask extends Task {
const {filePath, servedFromCache, rtt} = await synthAudio(stats, {
account_sid,
text,
instructions: this.instructions,
vendor,
language,
voice,
engine,
model,
model: this.model,
salt,
credentials,
options: this.options,

View File

@@ -45,6 +45,7 @@ if (VMD_HINTS_FILE) {
});
}
class Amd extends Emitter {
constructor(logger, cs, opts) {
super();
@@ -68,6 +69,8 @@ class Amd extends Emitter {
this.getIbmAccessToken = getIbmAccessToken;
const {setChannelVarsForStt} = require('./transcription-utils')(logger);
this.setChannelVarsForStt = setChannelVarsForStt;
this.digitCount = opts.digitCount || 0;
this.numberRegEx = RegExp(`[0-9]{${this.digitCount}}`);
const {
noSpeechTimeoutMs = 5000,
@@ -163,6 +166,14 @@ class Amd extends Emitter {
language: t.language_code
});
}
else if (this.digitCount != 0 && this.numberRegEx.test(t.alternatives[0].transcript)) {
/* a string of numbers is typically a machine */
this.emit(this.decision = AmdEvents.MachineDetected, {
reason: 'digit count',
greeting: t.alternatives[0].transcript,
language: t.language_code
});
}
else if (final && wordCount < this.thresholdWordCount) {
/* a short greeting is typically a human */
this.emit(this.decision = AmdEvents.HumanDetected, {

View File

@@ -4,7 +4,7 @@ const assert = require('assert');
const {
AWS_REGION,
AWS_SNS_PORT: PORT,
AWS_SNS_TOPIC_ARM,
AWS_SNS_TOPIC_ARN,
AWS_SNS_PORT_MAX,
} = require('../config');
const {LifeCycleEvents} = require('./constants');
@@ -55,12 +55,12 @@ class SnsNotifier extends Emitter {
async _handlePost(req, res) {
try {
const parsedBody = JSON.parse(req.body);
this.logger.debug({headers: req.headers, body: parsedBody}, 'Received HTTP POST from AWS');
this.logger.info({headers: req.headers, body: parsedBody}, 'Received HTTP POST from AWS');
if (!validatePayload(parsedBody)) {
this.logger.info('incoming AWS SNS HTTP POST failed signature validation');
return res.sendStatus(403);
}
this.logger.debug('incoming HTTP POST passed validation');
this.logger.info('incoming HTTP POST passed validation');
res.sendStatus(200);
switch (parsedBody.Type) {
@@ -74,7 +74,18 @@ class SnsNotifier extends Emitter {
subscriptionRequestId: this.subscriptionRequestId
}, 'response from SNS SubscribeURL');
const data = await this.describeInstance();
this.lifecycleState = data.AutoScalingGroups[0].Instances[0].LifecycleState;
const group = data.AutoScalingGroups.find((group) =>
group.Instances && group.Instances.some((instance) => instance.InstanceId === this.instanceId)
);
if (!group) {
this.logger.error('Current instance not found in any Auto Scaling group', data);
} else {
const instance = group.Instances.find((instance) => instance.InstanceId === this.instanceId);
this.lifecycleState = instance.LifecycleState;
}
//this.lifecycleState = data.AutoScalingGroups[0].Instances[0].LifecycleState;
this.emit('SubscriptionConfirmation', {publicIp: this.publicIp});
break;
@@ -94,7 +105,7 @@ class SnsNotifier extends Emitter {
this.unsubscribe();
}
else {
this.logger.debug(`SnsNotifier - instance ${msg.EC2InstanceId} is scaling in (not us)`);
this.logger.info(`SnsNotifier - instance ${msg.EC2InstanceId} is scaling in (not us)`);
}
}
break;
@@ -111,7 +122,7 @@ class SnsNotifier extends Emitter {
async init() {
try {
this.logger.debug('SnsNotifier: retrieving instance data');
this.logger.info('SnsNotifier: retrieving instance data');
this.instanceId = await getString('http://169.254.169.254/latest/meta-data/instance-id');
this.publicIp = await getString('http://169.254.169.254/latest/meta-data/public-ipv4');
this.logger.info({
@@ -142,13 +153,13 @@ class SnsNotifier extends Emitter {
try {
const params = {
Protocol: 'http',
TopicArn: AWS_SNS_TOPIC_ARM,
TopicArn: AWS_SNS_TOPIC_ARN,
Endpoint: this.snsEndpoint
};
const response = await snsClient.send(new SubscribeCommand(params));
this.logger.info({response}, `response to SNS subscribe to ${AWS_SNS_TOPIC_ARM}`);
this.logger.info({response}, `response to SNS subscribe to ${AWS_SNS_TOPIC_ARN}`);
} catch (err) {
this.logger.error({err}, `Error subscribing to SNS topic arn ${AWS_SNS_TOPIC_ARM}`);
this.logger.error({err}, `Error subscribing to SNS topic arn ${AWS_SNS_TOPIC_ARN}`);
}
}
@@ -159,9 +170,9 @@ class SnsNotifier extends Emitter {
SubscriptionArn: this.subscriptionArn
};
const response = await snsClient.send(new UnsubscribeCommand(params));
this.logger.info({response}, `response to SNS unsubscribe to ${AWS_SNS_TOPIC_ARM}`);
this.logger.info({response}, `response to SNS unsubscribe to ${AWS_SNS_TOPIC_ARN}`);
} catch (err) {
this.logger.error({err}, `Error unsubscribing to SNS topic arn ${AWS_SNS_TOPIC_ARM}`);
this.logger.error({err}, `Error unsubscribing to SNS topic arn ${AWS_SNS_TOPIC_ARN}`);
}
}

View File

@@ -103,6 +103,7 @@ class BackgroundTaskManager extends Emitter {
async _initBargeIn(opts) {
let task;
try {
const copy = JSON.parse(JSON.stringify(opts));
const t = normalizeJambones(this.logger, [opts]);
task = makeTask(this.logger, t[0]);
task
@@ -121,7 +122,7 @@ class BackgroundTaskManager extends Emitter {
if (task.sticky && !this.cs.callGone && !this.cs._stopping) {
this.logger.info('BackgroundTaskManager:_initBargeIn: restarting background bargeIn');
this._bargeInHandled = false;
this.newTask('bargeIn', opts, true);
this.newTask('bargeIn', copy, true);
}
return;
})

View File

@@ -1,6 +1,7 @@
const assert = require('assert');
const Emitter = require('events');
const crypto = require('crypto');
const parseUrl = require('parse-url');
const timeSeries = require('@jambonz/time-series');
const {NODE_ENV, JAMBONES_TIME_SERIES_HOST} = require('../config');
let alerter ;
@@ -21,6 +22,10 @@ class BaseRequestor extends Emitter {
const {stats} = require('../../').srf.locals;
this.stats = stats;
const u = this._parsedUrl = parseUrl(this.url);
if (u.port) this._baseUrl = `${u.protocol}://${u.resource}:${u.port}`;
else this._baseUrl = `${u.protocol}://${u.resource}`;
if (!alerter) {
alerter = timeSeries(logger, {
host: JAMBONES_TIME_SERIES_HOST,
@@ -30,6 +35,10 @@ class BaseRequestor extends Emitter {
}
}
get baseUrl() {
return this._baseUrl;
}
get Alerter() {
return alerter;
}

View File

@@ -28,10 +28,11 @@
"SipRedirect": "sip:redirect",
"Say": "say",
"SayLegacy": "say:legacy",
"Stream": "stream",
"Tag": "tag",
"Transcribe": "transcribe"
},
"AllowedSipRecVerbs": ["answer", "config", "gather", "transcribe", "listen", "tag"],
"AllowedSipRecVerbs": ["answer", "config", "gather", "transcribe", "listen", "tag", "hangup", "sip:decline"],
"AllowedConfirmSessionVerbs": ["config", "gather", "plays", "say", "tag"],
"CallStatus": {
"Trying": "trying",
@@ -136,6 +137,18 @@
"Connect": "speechmatics_transcribe::connect",
"Error": "speechmatics_transcribe::error"
},
"OpenAITranscriptionEvents": {
"Transcription": "openai_transcribe::transcription",
"Translation": "openai_transcribe::translation",
"SpeechStarted": "openai_transcribe::speech_started",
"SpeechStopped": "openai_transcribe::speech_stopped",
"PartialTranscript": "openai_transcribe::partial_transcript",
"Info": "openai_transcribe::info",
"RecognitionStarted": "openai_transcribe::recognition_started",
"ConnectFailure": "openai_transcribe::connect_failed",
"Connect": "openai_transcribe::connect",
"Error": "openai_transcribe::error"
},
"JambonzTranscriptionEvents": {
"Transcription": "jambonz_transcribe::transcription",
"ConnectFailure": "jambonz_transcribe::connect_failed",
@@ -148,6 +161,12 @@
"ConnectFailure": "assemblyai_transcribe::connect_failed",
"Connect": "assemblyai_transcribe::connect"
},
"VoxistTranscriptionEvents": {
"Transcription": "voxist_transcribe::transcription",
"Error": "voxist_transcribe::error",
"ConnectFailure": "voxist_transcribe::connect_failed",
"Connect": "voxist_transcribe::connect"
},
"VadDetection": {
"Detection": "vad_detect:detection"
},
@@ -175,6 +194,20 @@
"Disconnect": "openai_s2s::disconnect",
"ServerEvent": "openai_s2s::server_event"
},
"LlmEvents_Google": {
"Error": "error",
"Connect": "google_s2s::connect",
"ConnectFailure": "google_s2s::connect_failed",
"Disconnect": "google_s2s::disconnect",
"ServerEvent": "google_s2s::server_event"
},
"LlmEvents_Elevenlabs": {
"Error": "error",
"Connect": "elevenlabs_s2s::connect",
"ConnectFailure": "elevenlabs_s2s::connect_failed",
"Disconnect": "elevenlabs_s2s::disconnect",
"ServerEvent": "elevenlabs_s2s::server_event"
},
"LlmEvents_VoiceAgent": {
"Error": "error",
"Connect": "voice_agent_s2s::connect",
@@ -182,6 +215,13 @@
"Disconnect": "voice_agent_s2s::disconnect",
"ServerEvent": "voice_agent_s2s::server_event"
},
"LlmEvents_Ultravox": {
"Error": "error",
"Connect": "ultravox_s2s::connect",
"ConnectFailure": "ultravox_s2s::connect_failed",
"Disconnect": "ultravox_s2s::disconnect",
"ServerEvent": "ultravox_s2s::server_event"
},
"QueueResults": {
"Bridged": "bridged",
"Error": "error",
@@ -251,6 +291,16 @@
"ConnectFailure": "elevenlabs_tts_streaming::connect_failed",
"Connect": "elevenlabs_tts_streaming::connect"
},
"RimelabsTtsStreamingEvents": {
"Empty": "rimelabs_tts_streaming::empty",
"ConnectFailure": "rimelabs_tts_streaming::connect_failed",
"Connect": "rimelabs_tts_streaming::connect"
},
"CustomTtsStreamingEvents": {
"Empty": "custom_tts_streaming::empty",
"ConnectFailure": "custom_tts_streaming::connect_failed",
"Connect": "custom_tts_streaming::connect"
},
"TtsStreamingEvents": {
"Empty": "tts_streaming::empty",
"Pause": "tts_streaming::pause",
@@ -271,5 +321,9 @@
"Offline": "OFFLINE",
"GracefulShutdownInProgress":"SHUTDOWN_IN_PROGRESS"
},
"FEATURE_SERVER" : "feature-server"
"FEATURE_SERVER" : "feature-server",
"WS_CLOSE_CODES": {
"NormalClosure": 1000,
"GoingAway": 1001
}
}

View File

@@ -122,6 +122,10 @@ const speechMapper = (cred) => {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
}
else if ('voxist' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
}
else if ('whisper' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
@@ -138,11 +142,17 @@ const speechMapper = (cred) => {
obj.api_key = o.api_key;
obj.speechmatics_stt_uri = o.speechmatics_stt_uri;
}
else if ('openai' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
obj.model_id = o.model_id;
}
else if (obj.vendor.startsWith('custom:')) {
const o = JSON.parse(decrypt(credential));
obj.auth_token = o.auth_token;
obj.custom_stt_url = o.custom_stt_url;
obj.custom_tts_url = o.custom_tts_url;
obj.custom_tts_streaming_url = o.custom_tts_streaming_url;
}
} catch (err) {
console.log(err);

View File

@@ -17,8 +17,17 @@ class PlayFileNotFoundError extends NonFatalTaskError {
}
}
class HTTPResponseError extends Error {
constructor(statusCode) {
super('Unexpected HTTP Response');
delete this.stack;
this.statusCode = statusCode;
}
}
module.exports = {
SpeechCredentialError,
NonFatalTaskError,
PlayFileNotFoundError
PlayFileNotFoundError,
HTTPResponseError
};

5
lib/utils/helpers.js Normal file
View File

@@ -0,0 +1,5 @@
const sleepFor = (ms) => new Promise((resolve) => setTimeout(() => resolve(), ms));
module.exports = {
sleepFor
};

View File

@@ -16,6 +16,7 @@ const {
NODE_ENV,
HTTP_USER_AGENT_HEADER,
} = require('../config');
const {HTTPResponseError} = require('./error');
const toBase64 = (str) => Buffer.from(str || '', 'utf8').toString('base64');
@@ -47,8 +48,6 @@ class HttpRequestor extends BaseRequestor {
assert(['GET', 'POST'].includes(this.method));
const u = this._parsedUrl = parseUrl(this.url);
if (u.port) this._baseUrl = `${u.protocol}://${u.resource}:${u.port}`;
else this._baseUrl = `${u.protocol}://${u.resource}`;
this._protocol = u.protocol;
this._resource = u.resource;
this._port = u.port;
@@ -56,18 +55,18 @@ class HttpRequestor extends BaseRequestor {
this._usePools = HTTP_POOL && parseInt(HTTP_POOL);
if (this._usePools) {
if (pools.has(this._baseUrl)) {
this.client = pools.get(this._baseUrl);
if (pools.has(this.baseUrl)) {
this.client = pools.get(this.baseUrl);
}
else {
const connections = HTTP_POOLSIZE ? parseInt(HTTP_POOLSIZE) : 10;
const pipelining = HTTP_PIPELINING ? parseInt(HTTP_PIPELINING) : 1;
const pool = this.client = new Pool(this._baseUrl, {
const pool = this.client = new Pool(this.baseUrl, {
connections,
pipelining
});
pools.set(this._baseUrl, pool);
this.logger.debug(`HttpRequestor:created pool for ${this._baseUrl}`);
pools.set(this.baseUrl, pool);
this.logger.debug(`HttpRequestor:created pool for ${this.baseUrl}`);
}
}
else {
@@ -88,10 +87,6 @@ class HttpRequestor extends BaseRequestor {
}
}
get baseUrl() {
return this._baseUrl;
}
close() {
if (!this._usePools && !this.client?.closed) this.client.close();
}
@@ -113,7 +108,7 @@ class HttpRequestor extends BaseRequestor {
assert(HookMsgTypes.includes(type));
const payload = params ? snakeCaseKeys(params, ['customerData', 'sip']) : null;
const payload = params ? snakeCaseKeys(params, ['customerData', 'sip', 'env_vars', 'args']) : null;
const url = hook.url || hook;
const method = hook.method || 'POST';
let buf = '';
@@ -190,8 +185,7 @@ class HttpRequestor extends BaseRequestor {
followRedirects: false
});
if (![200, 202, 204].includes(statusCode)) {
const err = new Error();
err.statusCode = statusCode;
const err = new HTTPResponseError(statusCode);
throw err;
}
if (headers['content-type']?.includes('application/json')) {
@@ -225,7 +219,7 @@ class HttpRequestor extends BaseRequestor {
const rtt = this._roundTrip(startAt);
if (buf) this.stats.histogram('app.hook.response_time', rtt, ['hook_type:app']);
if (buf && Array.isArray(buf)) {
if (buf && (Array.isArray(buf) || type == 'llm:tool-call')) {
this.logger.info({response: buf}, `HttpRequestor:request ${method} ${url} succeeded in ${rtt}ms`);
return buf;
}

View File

@@ -31,18 +31,26 @@ function getLocalIp() {
return '127.0.0.1'; // Fallback to localhost if no suitable interface found
}
function initMS(logger, wrapper, ms) {
function initMS(logger, wrapper, ms, {
onFreeswitchConnect,
onFreeswitchDisconnect
}) {
Object.assign(wrapper, {ms, active: true, connects: 1});
logger.info(`connected to freeswitch at ${ms.address}`);
onFreeswitchConnect(wrapper);
ms.conn
.on('esl::end', () => {
wrapper.active = false;
wrapper.connects = 0;
logger.info(`lost connection to freeswitch at ${ms.address}`);
onFreeswitchDisconnect(wrapper);
ms.removeAllListeners();
})
.on('esl::ready', () => {
if (wrapper.connects > 0) {
logger.info(`connected to freeswitch at ${ms.address}`);
logger.info(`esl::ready connected to freeswitch at ${ms.address}`);
}
wrapper.connects = 1;
wrapper.active = true;
@@ -56,7 +64,10 @@ function initMS(logger, wrapper, ms) {
});
}
function installSrfLocals(srf, logger) {
function installSrfLocals(srf, logger, {
onFreeswitchConnect = () => {},
onFreeswitchDisconnect = () => {}
}) {
logger.debug('installing srf locals');
assert(!srf.locals.dbHelpers);
const {tracer} = srf.locals.otel;
@@ -91,7 +102,10 @@ function installSrfLocals(srf, logger) {
mediaservers.push(val);
try {
const ms = await mrf.connect(fs);
initMS(logger, val, ms);
initMS(logger, val, ms, {
onFreeswitchConnect,
onFreeswitchDisconnect
});
}
catch (err) {
logger.info({err}, `failed connecting to freeswitch at ${fs.address}, will retry shortly: ${err.message}`);
@@ -102,9 +116,15 @@ function installSrfLocals(srf, logger) {
for (const val of mediaservers) {
if (val.connects === 0) {
try {
// make sure all listeners are removed before reconnecting
val.ms?.disconnect();
val.ms = null;
logger.info({mediaserver: val.opts}, 'Retrying initial connection to media server');
const ms = await mrf.connect(val.opts);
initMS(logger, val, ms);
initMS(logger, val, ms, {
onFreeswitchConnect,
onFreeswitchDisconnect
});
} catch (err) {
logger.info({err}, `failed connecting to freeswitch at ${val.opts.address}, will retry shortly`);
}

103
lib/utils/llm-mcp.js Normal file
View File

@@ -0,0 +1,103 @@
const { Client } = require('@modelcontextprotocol/sdk/client/index.js');
class LlmMcpService {
constructor(logger, mcpServers) {
this.logger = logger;
this.mcpServers = mcpServers || [];
this.mcpClients = [];
}
// make sure we call init() before using any of the mcp clients
// this is to ensure that we have a valid connection to the MCP server
// and that we have collected the available tools.
async init() {
if (this.mcpClients.length > 0) {
return;
}
const { SSEClientTransport } = await import('@modelcontextprotocol/sdk/client/sse.js');
for (const server of this.mcpServers) {
const { url } = server;
if (url) {
try {
const transport = new SSEClientTransport(new URL(url), {});
const client = new Client({ name: 'Jambonz MCP Client', version: '1.0.0' });
await client.connect(transport);
// collect available tools
const { tools } = await client.listTools();
this.mcpClients.push({
url,
client,
tools
});
} catch (err) {
this.logger.error(`LlmMcpService: Failed to connect to MCP server at ${url}: ${err.message}`);
}
}
}
}
async getAvailableMcpTools() {
// returns a list of available tools from all MCP clients
const tools = [];
for (const mcpClient of this.mcpClients) {
const {tools: availableTools} = mcpClient;
if (availableTools) {
tools.push(...availableTools);
}
}
return tools;
}
async getMcpClientByToolName(name) {
for (const mcpClient of this.mcpClients) {
const { tools } = mcpClient;
if (tools && tools.some((tool) => tool.name === name)) {
return mcpClient.client;
}
}
return null;
}
async getMcpClientByToolId(id) {
for (const mcpClient of this.mcpClients) {
const { tools } = mcpClient;
if (tools && tools.some((tool) => tool.id === id)) {
return mcpClient.client;
}
}
return null;
}
async callMcpTool(name, input) {
const client = await this.getMcpClientByToolName(name);
if (client) {
try {
const result = await client.callTool({
name,
arguments: input,
});
this.logger.debug({result}, 'LlmMcpService - result');
return result;
} catch (err) {
this.logger.error({err}, 'LlmMcpService - error calling tool');
throw err;
}
}
}
async close() {
for (const mcpClient of this.mcpClients) {
const { client } = mcpClient;
if (client) {
await client.close();
this.logger.debug({url: mcpClient.url}, 'LlmMcpService - mcp client closed');
}
}
this.mcpClients = [];
}
}
module.exports = LlmMcpService;

View File

@@ -12,15 +12,17 @@ const deepcopy = require('deepcopy');
const moment = require('moment');
const stripCodecs = require('./strip-ancillary-codecs');
const RootSpan = require('./call-tracer');
const uuidv4 = require('uuid-random');
const crypto = require('crypto');
const HttpRequestor = require('./http-requestor');
const WsRequestor = require('./ws-requestor');
const {makeOpusFirst} = require('./sdp-utils');
const {makeOpusFirst, removeVideoSdp} = require('./sdp-utils');
const {
JAMBONES_USE_FREESWITCH_TIMER_FD,
JAMBONES_MEDIA_TIMEOUT_MS,
JAMBONES_MEDIA_HOLD_TIMEOUT_MS
JAMBONES_MEDIA_HOLD_TIMEOUT_MS,
JAMBONES_TRANSCRIBE_EP_DESTROY_DELAY_MS
} = require('../config');
const { sleepFor } = require('./helpers');
class SingleDialer extends Emitter {
constructor({logger, sbcAddress, target, opts, application, callInfo, accountInfo, rootSpan, startSpan, dialTask,
@@ -45,7 +47,7 @@ class SingleDialer extends Emitter {
this.callGone = false;
this.callSid = uuidv4();
this.callSid = crypto.randomUUID();
this.dialTask = dialTask;
this.onHoldMusic = onHoldMusic;
@@ -152,15 +154,21 @@ class SingleDialer extends Emitter {
return;
}
let lastSdp;
const connectStream = async(remoteSdp) => {
const connectStream = async(remoteSdp, isVideoCall) => {
if (remoteSdp === lastSdp) return;
if (process.env.JAMBONES_VIDEO_CALLS_ENABLED_IN_FS && !isVideoCall) {
remoteSdp = removeVideoSdp(remoteSdp);
}
lastSdp = remoteSdp;
return this.ep.modify(remoteSdp);
};
let localSdp = this.ep.local.sdp;
if (process.env.JAMBONES_VIDEO_CALLS_ENABLED_IN_FS && !opts.isVideoCall) {
localSdp = removeVideoSdp(localSdp);
}
Object.assign(opts, {
proxy: `sip:${this.sbcAddress}`,
localSdp: opts.opusFirst ? makeOpusFirst(this.ep.local.sdp) : this.ep.local.sdp
localSdp: opts.opusFirst ? makeOpusFirst(localSdp) : localSdp
});
if (this.target.auth) opts.auth = this.target.auth;
inviteSpan = this.startSpan('invite', {
@@ -222,13 +230,13 @@ class SingleDialer extends Emitter {
status.callStatus = CallStatus.EarlyMedia;
this.emit('earlyMedia');
}
connectStream(prov.body);
connectStream(prov.body, opts.isVideoCall);
}
else status.callStatus = CallStatus.Ringing;
this.emit('callStatusChange', status);
}
});
await connectStream(this.dlg.remote.sdp);
await connectStream(this.dlg.remote.sdp, opts.isVideoCall);
this.dlg.callSid = this.callSid;
this.inviteInProgress = null;
this.emit('callStatusChange', {
@@ -271,7 +279,12 @@ class SingleDialer extends Emitter {
this.logger.info('dial is onhold, emit event');
this.emit('reinvite', req, res);
} else {
const newSdp = await this.ep.modify(req.body);
let newSdp = await this.ep.modify(req.body);
// in case of reINVITE if video call is enabled in FS and the call is not a video call,
// remove video media from the SDP
if (process.env.JAMBONES_VIDEO_CALLS_ENABLED_IN_FS && !this.opts?.isVideoCall) {
newSdp = removeVideoSdp(newSdp);
}
res.send(200, {body: newSdp});
this.logger.info({offer: req.body, answer: newSdp}, 'SingleDialer:exec: handling reINVITE');
}
@@ -349,6 +362,33 @@ class SingleDialer extends Emitter {
if (Object.keys(opts).length > 0) {
this.ep.set(opts);
}
if (this.dialTask?.inbandDtmfEnabled && !this.ep.inbandDtmfEnabled) {
// https://developer.signalwire.com/freeswitch/FreeSWITCH-Explained/Modules/mod-dptools/6587132/#0-about
try {
this.ep.execute('start_dtmf');
this.ep.inbandDtmfEnabled = true;
} catch (err) {
this.logger.info(err, 'place-outdial:_configMsEndpoint - error enable inband DTMF');
}
}
const origDestroy = this.ep.destroy.bind(this.ep);
this.ep.destroy = async() => {
try {
if (this.dialTask.transcribeTask && JAMBONES_TRANSCRIBE_EP_DESTROY_DELAY_MS) {
// transcribe task is being used, wait for some time before destroy
// if final transcription is received but endpoint is already closed,
// freeswitch module will not be able to send the transcription
this.logger.info('SingleDialer:_configMsEndpoint -' +
' Dial with transcribe task, wait for some time before destroy');
await sleepFor(JAMBONES_TRANSCRIBE_EP_DESTROY_DELAY_MS);
}
await origDestroy();
} catch (err) {
this.logger.error(err, 'SingleDialer:_configMsEndpoint - error destroying endpoint');
}
};
}
/**
@@ -400,7 +440,10 @@ class SingleDialer extends Emitter {
} catch (err) {
this.logger.debug(err, 'SingleDialer:_executeApp: error');
this.emit('decline');
if (this.dlg.connected) this.dlg.destroy();
if (this.dlg.connected) {
this.dlg.destroy();
this.ep.destroy();
}
}
}
@@ -527,7 +570,8 @@ function placeOutdial({
}) {
const myOpts = deepcopy(opts);
const sd = new SingleDialer({
logger, sbcAddress, target, myOpts, application, callInfo, accountInfo, rootSpan, startSpan, dialTask, onHoldMusic
logger, sbcAddress, target, opts: myOpts, application, callInfo,
accountInfo, rootSpan, startSpan, dialTask, onHoldMusic
});
sd.exec(srf, ms, myOpts);
return sd;

View File

@@ -1,5 +1,5 @@
const assert = require('assert');
const uuidv4 = require('uuid-random');
const crypto = require('crypto');
const {LifeCycleEvents, FS_UUID_SET_NAME} = require('./constants');
const Emitter = require('events');
const debug = require('debug')('jambonz:feature-server');
@@ -8,7 +8,7 @@ const {
JAMBONES_SBCS,
K8S,
K8S_SBC_SIP_SERVICE_NAME,
AWS_SNS_TOPIC_ARM,
AWS_SNS_TOPIC_ARN,
OPTIONS_PING_INTERVAL,
AWS_REGION,
NODE_ENV,
@@ -35,7 +35,7 @@ module.exports = (logger) => {
// listen for SNS lifecycle changes
let lifecycleEmitter = new Emitter();
let dryUpCalls = false;
if (AWS_SNS_TOPIC_ARM && AWS_REGION) {
if (AWS_SNS_TOPIC_ARN && AWS_REGION) {
(async function() {
try {
@@ -130,7 +130,7 @@ module.exports = (logger) => {
logger.info('disabling OPTIONS pings since we are running as a kubernetes service');
const {srf} = require('../..');
const {addToSet} = srf.locals.dbHelpers;
const uuid = srf.locals.fsUUID = uuidv4();
const uuid = srf.locals.fsUUID = crypto.randomUUID();
/* in case redis is restarted, re-insert our key every so often */
setInterval(() => {

View File

@@ -35,6 +35,12 @@ const makeOpusFirst = (sdp) => {
}
return sdpTransform.write(parsedSdp);
};
const removeVideoSdp = (sdp) => {
const parsedSdp = sdpTransform.parse(sdp);
// Filter out video media sections, keeping only non-video media
parsedSdp.media = parsedSdp.media.filter((media) => media.type !== 'video');
return sdpTransform.write(parsedSdp);
};
const extractSdpMedia = (sdp) => {
const parsedSdp1 = sdpTransform.parse(sdp);
@@ -54,5 +60,6 @@ module.exports = {
mergeSdpMedia,
extractSdpMedia,
isOpusFirst,
makeOpusFirst
makeOpusFirst,
removeVideoSdp
};

View File

@@ -1,5 +1,5 @@
const xmlParser = require('xml2js').parseString;
const uuidv4 = require('uuid-random');
const crypto = require('crypto');
const parseUri = require('drachtio-srf').parseUri;
const transform = require('sdp-transform');
const debug = require('debug')('jambonz:feature-server');
@@ -52,7 +52,7 @@ const parseSiprecPayload = (req, logger) => {
const arr = /^([^]+)(m=[^]+?)(m=[^]+?)$/.exec(sdp);
opts.sdp1 = `${arr[1]}${arr[2]}`;
opts.sdp2 = `${arr[1]}${arr[3]}\r\n`;
opts.sessionId = uuidv4();
opts.sessionId = crypto.randomUUID();
logger.info({ payload: req.payload }, 'SIPREC payload with no metadata (e.g. Cisco NBR)');
resolve(opts);
} else if (!sdp || !meta) {
@@ -64,7 +64,7 @@ const parseSiprecPayload = (req, logger) => {
if (err) { throw err; }
opts.recordingData = result ;
opts.sessionId = uuidv4() ;
opts.sessionId = crypto.randomUUID();
const arr = /^([^]+)(m=[^]+?)(m=[^]+?)$/.exec(sdp) ;
opts.sdp1 = `${arr[1]}${arr[2]}` ;

View File

@@ -0,0 +1,70 @@
const EventEmitter = require('events');
/**
* A specialized EventEmitter that caches the most recent event emissions.
* When new listeners are added, they immediately receive the most recent
* event if it was previously emitted. This is useful for handling state
* changes where late subscribers need to know the current state.
*
* Features:
* - Caches the most recent emission for each event type
* - New listeners immediately receive the cached event if available
* - Supports both regular (on) and one-time (once) listeners
* - Maintains compatibility with Node's EventEmitter interface
*/
class StickyEventEmitter extends EventEmitter {
constructor() {
super();
this._eventCache = new Map();
this._onceListeners = new Map(); // For storing once listeners if needed
}
destroy() {
this._eventCache.clear();
this._onceListeners.clear();
this.removeAllListeners();
}
emit(event, ...args) {
// Store the event and its args
this._eventCache.set(event, args);
// If there are any 'once' listeners waiting, call them
if (this._onceListeners.has(event)) {
const listeners = this._onceListeners.get(event);
for (const listener of listeners) {
listener(...args);
}
if (this.onSuccess) {
this.onSuccess();
}
this._onceListeners.delete(event);
}
return super.emit(event, ...args);
}
on(event, listener) {
if (this._eventCache.has(event)) {
listener(...this._eventCache.get(event));
}
return super.on(event, listener);
}
once(event, listener) {
if (this._eventCache.has(event)) {
listener(...this._eventCache.get(event));
if (this.onSuccess) {
this.onSuccess();
}
} else {
// Store listener in case emit comes before
if (!this._onceListeners.has(event)) {
this._onceListeners.set(event, []);
}
this._onceListeners.get(event).push(listener);
super.once(event, listener); // Also attach to native once
}
return this;
}
}
module.exports = StickyEventEmitter;

View File

@@ -30,6 +30,7 @@ const stickyVars = {
'DEEPGRAM_SPEECH_TIER',
'DEEPGRAM_SPEECH_MODEL',
'DEEPGRAM_SPEECH_ENABLE_SMART_FORMAT',
'DEEPGRAM_SPEECH_ENABLE_NO_DELAY',
'DEEPGRAM_SPEECH_ENABLE_AUTOMATIC_PUNCTUATION',
'DEEPGRAM_SPEECH_PROFANITY_FILTER',
'DEEPGRAM_SPEECH_REDACT',
@@ -43,7 +44,9 @@ const stickyVars = {
'DEEPGRAM_SPEECH_UTTERANCE_END_MS',
'DEEPGRAM_SPEECH_VAD_TURNOFF',
'DEEPGRAM_SPEECH_TAG',
'DEEPGRAM_SPEECH_MODEL_VERSION'
'DEEPGRAM_SPEECH_MODEL_VERSION',
'DEEPGRAM_SPEECH_FILLER_WORDS',
'DEEPGRAM_SPEECH_KEYTERMS',
],
aws: [
'AWS_VOCABULARY_NAME',
@@ -104,6 +107,9 @@ const stickyVars = {
'ASSEMBLYAI_API_KEY',
'ASSEMBLYAI_WORD_BOOST'
],
voxist: [
'VOXIST_API_KEY',
],
speechmatics: [
'SPEECHMATICS_API_KEY',
'SPEECHMATICS_HOST',
@@ -111,7 +117,16 @@ const stickyVars = {
'SPEECHMATICS_SPEECH_HINTS',
'SPEECHMATICS_TRANSLATION_LANGUAGES',
'SPEECHMATICS_TRANSLATION_PARTIALS'
]
],
openai: [
'OPENAI_API_KEY',
'OPENAI_MODEL',
'OPENAI_INPUT_AUDIO_NOISE_REDUCTION',
'OPENAI_TURN_DETECTION_TYPE',
'OPENAI_TURN_DETECTION_THRESHOLD',
'OPENAI_TURN_DETECTION_PREFIX_PADDING_MS',
'OPENAI_TURN_DETECTION_SILENCE_DURATION_MS',
],
};
/**
@@ -208,7 +223,7 @@ const consolidateTranscripts = (bufferedTranscripts, channel, language, vendor)
const lastChar = acc.alternatives[0].transcript.slice(-1);
const firstChar = newTranscript.charAt(0);
if (lastChar.match(/\d/) && firstChar.match(/\d/)) {
if (vendor === 'speechmatics' || (lastChar.match(/\d/) && firstChar.match(/\d/))) {
acc.alternatives[0].transcript += newTranscript;
} else {
acc.alternatives[0].transcript += ` ${newTranscript}`;
@@ -298,13 +313,18 @@ const normalizeDeepgram = (evt, channel, language, shortUtterance) => {
confidence: alt.confidence,
transcript: alt.transcript,
}));
/**
* Some models (nova-2-general) return the detected language in the
* alternatives.languages array if the language is set as multi.
* If the language is detected, we use it as the language_code.
*/
const detectedLanguage = evt.channel?.alternatives?.[0]?.languages?.[0];
/**
* note difference between is_final and speech_final in Deepgram:
* https://developers.deepgram.com/docs/understand-endpointing-interim-results
*/
return {
language_code: language,
language_code: detectedLanguage || language,
channel_tag: channel,
is_final: shortUtterance ? evt.is_final : evt.speech_final,
alternatives: alternatives.length ? [alternatives[0]] : [],
@@ -516,6 +536,25 @@ const normalizeAssemblyAi = (evt, channel, language) => {
};
};
const normalizeVoxist = (evt, channel, language) => {
const copy = JSON.parse(JSON.stringify(evt));
return {
language_code: language,
channel_tag: channel,
is_final: evt.type === 'final',
alternatives: [
{
confidence: 1.00,
transcript: evt.text,
}
],
vendor: {
name: 'voxist',
evt: copy
}
};
};
const normalizeSpeechmatics = (evt, channel, language) => {
const copy = JSON.parse(JSON.stringify(evt));
const is_final = evt.message === 'AddTranscript';
@@ -541,6 +580,35 @@ const normalizeSpeechmatics = (evt, channel, language) => {
return obj;
};
const calculateConfidence = (logprobsArray) => {
// Sum the individual log probabilities
const totalLogProb = logprobsArray.reduce((sum, tokenInfo) => sum + tokenInfo.logprob, 0);
// Convert the total log probability back to a regular probability
const confidence = Math.exp(totalLogProb);
return confidence;
};
const normalizeOpenAI = (evt, channel, language) => {
const copy = JSON.parse(JSON.stringify(evt));
const obj = {
language_code: language,
channel_tag: channel,
is_final: true,
alternatives: [
{
transcript: evt.transcript,
confidence: evt.logprobs ? calculateConfidence(evt.logprobs) : 1.0,
}
],
vendor: {
name: 'openai',
evt: copy
}
};
return obj;
};
module.exports = (logger) => {
const normalizeTranscription = (evt, vendor, channel, language, shortUtterance, punctuation) => {
@@ -566,10 +634,14 @@ module.exports = (logger) => {
return normalizeCobalt(evt, channel, language);
case 'assemblyai':
return normalizeAssemblyAi(evt, channel, language, shortUtterance);
case 'voxist':
return normalizeVoxist(evt, channel, language);
case 'verbio':
return normalizeVerbio(evt, channel, language);
case 'speechmatics':
return normalizeSpeechmatics(evt, channel, language);
case 'openai':
return normalizeOpenAI(evt, channel, language);
default:
if (vendor.startsWith('custom:')) {
return normalizeCustom(evt, channel, language, vendor);
@@ -704,6 +776,8 @@ module.exports = (logger) => {
//azureSttEndpointId overrides sttCredentials.custom_stt_endpoint
...(rOpts.azureSttEndpointId &&
{AZURE_SERVICE_ENDPOINT_ID: rOpts.azureSttEndpointId}),
...(azureOptions.speechRecognitionMode &&
{AZURE_RECOGNITION_MODE: azureOptions.speechRecognitionMode}),
};
}
else if ('nuance' === vendor) {
@@ -755,7 +829,7 @@ module.exports = (logger) => {
};
}
else if ('deepgram' === vendor) {
let {model} = rOpts;
let model = rOpts.deepgramOptions?.model || rOpts.model;
const {deepgramOptions = {}} = rOpts;
const deepgramUri = deepgramOptions.deepgramSttUri || sttCredentials.deepgram_stt_uri;
const useTls = deepgramOptions.deepgramSttUseTls || sttCredentials.deepgram_stt_use_tls;
@@ -777,6 +851,8 @@ module.exports = (logger) => {
{DEEPGRAM_SPEECH_ENABLE_AUTOMATIC_PUNCTUATION: 1},
...(deepgramOptions.smartFormatting) &&
{DEEPGRAM_SPEECH_ENABLE_SMART_FORMAT: 1},
...(deepgramOptions.noDelay) &&
{DEEPGRAM_SPEECH_ENABLE_NO_DELAY: 1},
...(deepgramOptions.profanityFilter) &&
{DEEPGRAM_SPEECH_PROFANITY_FILTER: 1},
...(deepgramOptions.redact) &&
@@ -812,7 +888,11 @@ module.exports = (logger) => {
...(deepgramOptions.tag) &&
{DEEPGRAM_SPEECH_TAG: deepgramOptions.tag},
...(deepgramOptions.version) &&
{DEEPGRAM_SPEECH_MODEL_VERSION: deepgramOptions.version}
{DEEPGRAM_SPEECH_MODEL_VERSION: deepgramOptions.version},
...(deepgramOptions.fillerWords) &&
{DEEPGRAM_SPEECH_FILLER_WORDS: deepgramOptions.fillerWords},
...((Array.isArray(deepgramOptions.keyterms) && deepgramOptions.keyterms.length > 0) &&
{DEEPGRAM_SPEECH_KEYTERMS: deepgramOptions.keyterms.join(',')})
};
}
else if ('soniox' === vendor) {
@@ -921,6 +1001,43 @@ module.exports = (logger) => {
{ASSEMBLYAI_WORD_BOOST: JSON.stringify(rOpts.hints)})
};
}
else if ('voxist' === vendor) {
opts = {
...opts,
...(sttCredentials.api_key) &&
{VOXIST_API_KEY: sttCredentials.api_key},
};
}
else if ('openai' === vendor) {
const {openaiOptions = {}} = rOpts;
const model = openaiOptions.model || rOpts.model || sttCredentials.model_id || 'whisper-1';
const apiKey = openaiOptions.apiKey || sttCredentials.api_key;
opts = {
OPENAI_MODEL: model,
OPENAI_API_KEY: apiKey,
...opts,
...(openaiOptions.prompt && {OPENAI_PROMPT: openaiOptions.prompt}),
...(openaiOptions.input_audio_noise_reduction &&
{OPENAI_INPUT_AUDIO_NOISE_REDUCTION: openaiOptions.input_audio_noise_reduction}),
};
if (openaiOptions.turn_detection) {
opts = {
...opts,
OPENAI_TURN_DETECTION_TYPE: openaiOptions.turn_detection.type,
...(openaiOptions.turn_detection.threshold && {
OPENAI_TURN_DETECTION_THRESHOLD: openaiOptions.turn_detection.threshold
}),
...(openaiOptions.turn_detection.prefix_padding_ms && {
OPENAI_TURN_DETECTION_PREFIX_PADDING_MS: openaiOptions.turn_detection.prefix_padding_ms
}),
...(openaiOptions.turn_detection.silence_duration_ms && {
OPENAI_TURN_DETECTION_SILENCE_DURATION_MS: openaiOptions.turn_detection.silence_duration_ms
}),
};
}
}
else if ('verbio' === vendor) {
const {verbioOptions = {}} = rOpts;
opts = {
@@ -954,6 +1071,36 @@ module.exports = (logger) => {
SPEECHMATICS_TRANSLATION_PARTIALS: speechmaticsOptions.translation_config.enable_partials ? 1 : 0
}
),
...(speechmaticsOptions.transcription_config?.domain &&
{SPEECHMATICS_DOMAIN: speechmaticsOptions.transcription_config.domain}),
...{SPEECHMATICS_MAX_DELAY: speechmaticsOptions.transcription_config?.max_delay || 0.7},
...{SPEECHMATICS_MAX_DELAY_MODE: speechmaticsOptions.transcription_config?.max_delay_mode || 'flexible'},
...(speechmaticsOptions.transcription_config?.diarization &&
{SPEECHMATICS_DIARIZATION: speechmaticsOptions.transcription_config.diarization}),
...(speechmaticsOptions.transcription_config?.speaker_diarization_config?.speaker_sensitivity &&
{SPEECHMATICS_DIARIZATION_SPEAKER_SENSITIVITY:
speechmaticsOptions.transcription_config.speaker_diarization_config.speaker_sensitivity}),
...(speechmaticsOptions.transcription_config?.speaker_diarization_config?.max_speakers &&
{SPEECHMATICS_DIARIZATION_MAX_SPEAKERS:
speechmaticsOptions.transcription_config.speaker_diarization_config.max_speakers}),
...(speechmaticsOptions.transcription_config?.output_locale &&
{SPEECHMATICS_OUTPUT_LOCALE: speechmaticsOptions.transcription_config.output_locale}),
...(speechmaticsOptions.transcription_config?.punctuation_overrides?.permitted_marks &&
{SPEECHMATICS_PUNCTUATION_ALLOWED:
speechmaticsOptions.transcription_config.punctuation_overrides.permitted_marks.join(',')}),
...(speechmaticsOptions.transcription_config?.punctuation_overrides?.sensitivity &&
{SPEECHMATICS_PUNCTUATION_SENSITIVITY:
speechmaticsOptions.transcription_config?.punctuation_overrides?.sensitivity}),
...(speechmaticsOptions.transcription_config?.operating_point &&
{SPEECHMATICS_OPERATING_POINT: speechmaticsOptions.transcription_config.operating_point}),
...(speechmaticsOptions.transcription_config?.enable_entities &&
{SPEECHMATICS_ENABLE_ENTTIES: speechmaticsOptions.transcription_config.enable_entities}),
...(speechmaticsOptions.transcription_config?.audio_filtering_config?.volume_threshold &&
{SPEECHMATICS_VOLUME_THRESHOLD:
speechmaticsOptions.transcription_config.audio_filtering_config.volume_threshold}),
...(speechmaticsOptions.transcription_config?.transcript_filtering_config?.remove_disfluencies &&
{SPEECHMATICS_REMOVE_DISFLUENCIES:
speechmaticsOptions.transcription_config.transcript_filtering_config.remove_disfluencies})
};
}
else if (vendor.startsWith('custom:')) {

View File

@@ -4,37 +4,50 @@ const {
TtsStreamingEvents,
TtsStreamingConnectionStatus
} = require('../utils/constants');
const MAX_CHUNK_SIZE = 1800;
const HIGH_WATER_BUFFER_SIZE = 5000;
const LOW_WATER_BUFFER_SIZE = 1000;
const TIMEOUT_RETRY_MSECS = 3000;
const MAX_CHUNK_SIZE = 1800;
const HIGH_WATER_BUFFER_SIZE = 1000;
const LOW_WATER_BUFFER_SIZE = 200;
const TIMEOUT_RETRY_MSECS = 1000; // 1 second
const isWhitespace = (str) => /^\s*$/.test(str);
/**
* Each queue item is an object:
* - { type: 'text', value: '…' } for text tokens.
* - { type: 'flush' } for a flush command.
*/
class TtsStreamingBuffer extends Emitter {
constructor(cs) {
super();
this.cs = cs;
this.logger = cs.logger;
this.tokens = '';
// Use an array to hold our structured items.
this.queue = [];
// Track total number of characters in text items.
this.bufferedLength = 0;
this.eventHandlers = [];
this._isFull = false;
this._connectionStatus = TtsStreamingConnectionStatus.NotConnected;
this._flushPending = false;
this.timer = null;
// Record the last time the text buffer was updated.
this.lastUpdateTime = 0;
}
get isEmpty() {
return this.tokens.length === 0;
return this.queue.length === 0;
}
get size() {
return this.bufferedLength;
}
get isFull() {
return this._isFull;
}
get size() {
return this.tokens.length;
}
get ep() {
return this.cs?.ep;
}
@@ -42,7 +55,8 @@ class TtsStreamingBuffer extends Emitter {
async start() {
assert.ok(
this._connectionStatus === TtsStreamingConnectionStatus.NotConnected,
'TtsStreamingBuffer:start already started, or has failed');
'TtsStreamingBuffer:start already started, or has failed'
);
this.vendor = this.cs.getTsStreamingVendor();
if (!this.vendor) {
@@ -55,9 +69,9 @@ class TtsStreamingBuffer extends Emitter {
this._connectionStatus = TtsStreamingConnectionStatus.Connecting;
try {
if (this.eventHandlers.length === 0) this._initHandlers(this.ep);
await this._api(this.ep, [this.ep.uuid, 'connect']);
await this._api(this.ep, [this.ep.uuid, 'connect']);
} catch (err) {
this.logger.info({err}, 'TtsStreamingBuffer:start Error connecting to TTS streaming');
this.logger.info({ err }, 'TtsStreamingBuffer:start Error connecting to TTS streaming');
this._connectionStatus = TtsStreamingConnectionStatus.Failed;
}
}
@@ -67,188 +81,319 @@ class TtsStreamingBuffer extends Emitter {
this.removeCustomEventListeners();
if (this.ep) {
this._api(this.ep, [this.ep.uuid, 'close'])
.catch((err) => this.logger.info({err}, 'TtsStreamingBuffer:kill Error closing TTS streaming'));
.catch((err) =>
this.logger.info({ err }, 'TtsStreamingBuffer:stop Error closing TTS streaming')
);
}
this.timer = null;
this.tokens = '';
this.queue = [];
this.bufferedLength = 0;
this._connectionStatus = TtsStreamingConnectionStatus.NotConnected;
}
/**
* Add tokens to the buffer and start feeding them to the endpoint if necessary.
* Buffer new text tokens.
*/
async bufferTokens(tokens) {
if (this._connectionStatus === TtsStreamingConnectionStatus.Failed) {
this.logger.info('TtsStreamingBuffer:bufferTokens TTS streaming connection failed, rejecting request');
return {status: 'failed', reason: `connection to ${this.vendor} failed`};
return { status: 'failed', reason: `connection to ${this.vendor} failed` };
}
if (0 === this.bufferedLength && isWhitespace(tokens)) {
this.logger.debug({tokens}, 'TtsStreamingBuffer:bufferTokens discarded whitespace tokens');
return { status: 'ok' };
}
const displayedTokens = tokens.length <= 40 ? tokens : tokens.substring(0, 40);
const totalLength = tokens.length;
/* if we crossed the high water mark, reject the request */
if (this.tokens.length + totalLength > HIGH_WATER_BUFFER_SIZE) {
if (this.bufferedLength + totalLength > HIGH_WATER_BUFFER_SIZE) {
this.logger.info(
`TtsStreamingBuffer:bufferTokensTTS buffer is full, rejecting request to buffer ${totalLength} tokens`);
`TtsStreamingBuffer throttling: buffer is full, rejecting request to buffer ${totalLength} tokens`
);
if (!this._isFull) {
this._isFull = true;
this.emit(TtsStreamingEvents.Pause);
}
return {status: 'failed', reason: 'full'};
return { status: 'failed', reason: 'full' };
}
this.logger.debug(
`TtsStreamingBuffer:bufferTokens "${displayedTokens}" (length: ${totalLength}), starting? ${this.isEmpty}`
`TtsStreamingBuffer:bufferTokens "${displayedTokens}" (length: ${totalLength})`
);
this.tokens += (tokens || '');
this.queue.push({ type: 'text', value: tokens });
this.bufferedLength += totalLength;
// Update the last update time each time new text is buffered.
this.lastUpdateTime = Date.now();
await this._feedTokens();
return {status: 'ok'};
await this._feedQueue();
return { status: 'ok' };
}
/**
* Insert a flush command. If no text is queued, flush immediately.
* Otherwise, append a flush marker so that all text preceding it will be sent
* (regardless of sentence boundaries) before the flush is issued.
*/
flush() {
this.logger.debug('TtsStreamingBuffer:flush');
if (this._connectionStatus === TtsStreamingConnectionStatus.Connecting) {
this.logger.debug('TtsStreamingBuffer:flush TTS stream is not quite ready - wait for connect');
this._flushPending = true;
if (this.queue.length === 0 || this.queue[this.queue.length - 1].type !== 'flush') {
this.queue.push({ type: 'flush' });
}
return;
}
else if (this._connectionStatus === TtsStreamingConnectionStatus.Connected) {
this._api(this.ep, [this.ep.uuid, 'flush'])
.catch((err) => this.logger.info({err},
`TtsStreamingBuffer:flush Error flushing TTS streaming: ${JSON.stringify(err)}`));
if (this.isEmpty) {
this._doFlush();
}
else {
if (this.queue[this.queue.length - 1].type !== 'flush') {
this.queue.push({ type: 'flush' });
this.logger.debug('TtsStreamingBuffer:flush added flush marker to queue');
}
}
}
else {
this.logger.debug(
`TtsStreamingBuffer:flush TTS stream is not connected, status: ${this._connectionStatus}`
);
}
}
clear() {
this.logger.debug('TtsStreamingBuffer:clear');
if (this._connectionStatus !== TtsStreamingConnectionStatus.Connected) return;
clearTimeout(this.timer);
this._api(this.ep, [this.ep.uuid, 'clear'])
.catch((err) => this.logger.info({err}, 'TtsStreamingBuffer:clear Error clearing TTS streaming'));
this.tokens = '';
this._api(this.ep, [this.ep.uuid, 'clear']).catch((err) =>
this.logger.info({ err }, 'TtsStreamingBuffer:clear Error clearing TTS streaming')
);
this.queue = [];
this.bufferedLength = 0;
this.timer = null;
this._isFull = false;
}
/**
* Send tokens to the TTS engine in sentence chunks for best playout
* Process the queue in two phases.
*
* Phase 1: Look for flush markers. When a flush marker is found (even if not at the very front),
* send all text tokens that came before it immediately (ignoring sentence boundaries)
* and then send the flush command. Repeat until there are no flush markers left.
*
* Phase 2: With the remaining queue (now containing only text items), accumulate text
* up to MAX_CHUNK_SIZE and use sentence-boundary logic to determine a chunk.
* Then, remove the exact tokens (or portions thereof) that were consumed.
*/
async _feedTokens(handlingTimeout = false) {
this.logger.debug({tokens: this.tokens}, '_feedTokens');
async _feedQueue(handlingTimeout = false) {
this.logger.debug({ queue: this.queue }, 'TtsStreamingBuffer:_feedQueue');
try {
/* are we in a state where we can feed tokens to the TTS? */
if (!this.cs.isTtsStreamOpen || !this.ep || !this.tokens) {
this.logger.debug('TTS stream is not open or no tokens to send');
return this.tokens?.length || 0;
if (!this.cs.isTtsStreamOpen || !this.ep) {
this.logger.debug('TtsStreamingBuffer:_feedQueue TTS stream is not open or no endpoint available');
return;
}
if (this._connectionStatus === TtsStreamingConnectionStatus.NotConnected ||
this._connectionStatus === TtsStreamingConnectionStatus.Failed) {
this.logger.debug('TtsStreamingBuffer:_feedTokens TTS stream is not connected');
if (
this._connectionStatus === TtsStreamingConnectionStatus.NotConnected ||
this._connectionStatus === TtsStreamingConnectionStatus.Failed
) {
this.logger.debug('TtsStreamingBuffer:_feedQueue TTS stream is not connected');
return;
}
if (this._connectionStatus === TtsStreamingConnectionStatus.Connecting) {
this.logger.debug('TtsStreamingBuffer:_feedTokens TTS stream is not ready, waiting for connect');
// --- Phase 1: Process flush markers ---
// Process any flush marker that isnt in the very first position.
let flushIndex = this.queue.findIndex((item, idx) => item.type === 'flush' && idx > 0);
while (flushIndex !== -1) {
let flushText = '';
// Accumulate all text tokens preceding the flush marker.
for (let i = 0; i < flushIndex; i++) {
if (this.queue[i].type === 'text') {
flushText += this.queue[i].value;
}
}
// Remove those text items.
for (let i = 0; i < flushIndex; i++) {
const item = this.queue.shift();
if (item.type === 'text') {
this.bufferedLength -= item.value.length;
}
}
// Remove the flush marker (now at the front).
if (this.queue.length > 0 && this.queue[0].type === 'flush') {
this.queue.shift();
}
// Immediately send all accumulated text (ignoring sentence boundaries).
if (flushText.length > 0) {
const modifiedFlushText = flushText.replace(/\n\n/g, '\n \n');
try {
await this._api(this.ep, [this.ep.uuid, 'send', modifiedFlushText]);
} catch (err) {
this.logger.info({ err, flushText }, 'TtsStreamingBuffer:_feedQueue Error sending TTS chunk');
}
}
// Send the flush command.
await this._doFlush();
flushIndex = this.queue.findIndex((item, idx) => item.type === 'flush' && idx > 0);
}
// If a flush marker is at the very front, process it.
while (this.queue.length > 0 && this.queue[0].type === 'flush') {
this.queue.shift();
await this._doFlush();
}
// --- Phase 2: Process remaining text tokens ---
if (this.queue.length === 0) {
this._removeTimer();
return;
}
/* must send at least one sentence */
const limit = Math.min(MAX_CHUNK_SIZE, this.tokens.length);
let chunkEnd = findSentenceBoundary(this.tokens, limit);
// Accumulate contiguous text tokens (from the front) up to MAX_CHUNK_SIZE.
let combinedText = '';
for (const item of this.queue) {
if (item.type !== 'text') break;
combinedText += item.value;
if (combinedText.length >= MAX_CHUNK_SIZE) break;
}
if (combinedText.length === 0) {
this._removeTimer();
return;
}
const limit = Math.min(MAX_CHUNK_SIZE, combinedText.length);
let chunkEnd = findSentenceBoundary(combinedText, limit);
if (chunkEnd <= 0) {
if (handlingTimeout) {
/* on a timeout we've left some tokens sitting around, so be more aggressive now in sending them */
chunkEnd = findWordBoundary(this.tokens, limit);
chunkEnd = findWordBoundary(combinedText, limit);
if (chunkEnd <= 0) {
this.logger.debug('TtsStreamingBuffer:_feedTokens: no word boundary found');
this._setTimerIfNeeded();
return;
}
}
else {
/* if we just received tokens, we wont send unless we have at least a full sentence */
this.logger.debug('TtsStreamingBuffer:_feedTokens: no sentence boundary found');
} else {
this._setTimerIfNeeded();
return;
}
}
const chunk = combinedText.slice(0, chunkEnd);
const chunk = this.tokens.slice(0, chunkEnd);
this.tokens = this.tokens.slice(chunkEnd);
// Now we iterate over the queue items
// and deduct their lengths until we've accounted for chunkEnd characters.
let remaining = chunkEnd;
let tokensProcessed = 0;
for (let i = 0; i < this.queue.length; i++) {
const token = this.queue[i];
if (token.type !== 'text') break;
if (remaining >= token.value.length) {
remaining -= token.value.length;
tokensProcessed = i + 1;
} else {
// Partially consumed token: update its value to remove the consumed part.
token.value = token.value.slice(remaining);
tokensProcessed = i;
remaining = 0;
break;
}
}
// Remove the fully consumed tokens from the front of the queue.
this.queue.splice(0, tokensProcessed);
this.bufferedLength -= chunkEnd;
/* freeswitch looks for sequence of 2 newlines to determine end of message, so insert a space */
const modifiedChunk = chunk.replace(/\n\n/g, '\n \n');
await this._api(this.ep, [this.ep.uuid, 'send', modifiedChunk]);
this.logger.debug(`TtsStreamingBuffer:_feedTokens: sent ${chunk.length}, remaining: ${this.tokens.length}`);
this.logger.debug(`TtsStreamingBuffer:_feedQueue sending chunk to tts: ${modifiedChunk}`);
if (this.isFull && this.tokens.length <= LOW_WATER_BUFFER_SIZE) {
this.logger.info('TtsStreamingBuffer:_feedTokens TTS streaming buffer is no longer full');
try {
await this._api(this.ep, [this.ep.uuid, 'send', modifiedChunk]);
} catch (err) {
this.logger.info({ err, chunk }, 'TtsStreamingBuffer:_feedQueue Error sending TTS chunk');
}
if (this._isFull && this.bufferedLength <= LOW_WATER_BUFFER_SIZE) {
this.logger.info('TtsStreamingBuffer throttling: buffer is no longer full - resuming');
this._isFull = false;
this.emit(TtsStreamingEvents.Resume);
}
} catch (err) {
this.logger.info({err}, 'TtsStreamingBuffer:_feedTokens Error sending TTS chunk');
this.tokens = '';
}
return;
return this._feedQueue();
} catch (err) {
this.logger.info({ err }, 'TtsStreamingBuffer:_feedQueue Error sending TTS chunk');
this.queue = [];
this.bufferedLength = 0;
}
}
async _api(ep, args) {
const apiCmd = `uuid_${this.vendor}_tts_streaming`;
const apiCmd = `uuid_${this.vendor.startsWith('custom:') ? 'custom' : this.vendor}_tts_streaming`;
const res = await ep.api(apiCmd, `^^|${args.join('|')}`);
if (!res.body?.startsWith('+OK')) {
this.logger.info({args}, `Error calling ${apiCmd}: ${res.body}`);
this.logger.info({ args }, `Error calling ${apiCmd}: ${res.body}`);
throw new Error(`Error calling ${apiCmd}: ${res.body}`);
}
}
_onConnectFailure(vendor) {
this.logger.info(`streaming tts connection failed to ${vendor}`);
this._connectionStatus = TtsStreamingConnectionStatus.Failed;
this.tokens = '';
this.emit(TtsStreamingEvents.ConnectFailure, {vendor});
_doFlush() {
return this._api(this.ep, [this.ep.uuid, 'flush'])
.then(() => this.logger.debug('TtsStreamingBuffer:_doFlush sent flush command'))
.catch((err) =>
this.logger.info(
{ err },
`TtsStreamingBuffer:_doFlush Error flushing TTS streaming: ${JSON.stringify(err)}`
)
);
}
async _onConnect(vendor) {
this.logger.info(`streaming tts connection made to ${vendor}`);
this.logger.info(`TtsStreamingBuffer:_onConnect streaming tts connection made to ${vendor} successful`);
this._connectionStatus = TtsStreamingConnectionStatus.Connected;
if (this.tokens.length > 0) {
await this._feedTokens();
}
if (this._flushPending) {
this.flush();
this._flushPending = false;
if (this.queue.length > 0) {
await this._feedQueue();
}
}
_onConnectFailure(vendor) {
this.logger.info(`TtsStreamingBuffer:_onConnectFailure streaming tts connection failed to ${vendor}`);
this._connectionStatus = TtsStreamingConnectionStatus.Failed;
this.queue = [];
this.bufferedLength = 0;
this.emit(TtsStreamingEvents.ConnectFailure, { vendor });
}
_setTimerIfNeeded() {
if (this.tokens.length > 0 && !this.timer) {
if (this.bufferedLength > 0 && !this.timer) {
this.logger.debug({queue: this.queue},
`TtsStreamingBuffer:_setTimerIfNeeded setting timer because ${this.bufferedLength} buffered`);
this.timer = setTimeout(this._onTimeout.bind(this), TIMEOUT_RETRY_MSECS);
}
}
_removeTimer() {
if (this.timer) {
this.logger.debug('TtsStreamingBuffer:_removeTimer clearing timer');
clearTimeout(this.timer);
this.timer = null;
}
}
_onTimeout() {
this.logger.info('TtsStreamingBuffer:_onTimeout');
this.logger.debug('TtsStreamingBuffer:_onTimeout Timeout waiting for sentence boundary');
this.timer = null;
this._feedTokens(true);
// Check if new text has been added since the timer was set.
const now = Date.now();
if (now - this.lastUpdateTime < TIMEOUT_RETRY_MSECS) {
this.logger.debug('TtsStreamingBuffer:_onTimeout New text received recently; postponing flush.');
this._setTimerIfNeeded();
return;
}
this._feedQueue(true);
}
_onTtsEmpty(vendor) {
this.emit(TtsStreamingEvents.Empty, {vendor});
this.emit(TtsStreamingEvents.Empty, { vendor });
}
addCustomEventListener(ep, event, handler) {
this.eventHandlers.push({ep, event, handler});
this.eventHandlers.push({ ep, event, handler });
ep.addCustomEventListener(event, handler);
}
@@ -258,10 +403,11 @@ class TtsStreamingBuffer extends Emitter {
_initHandlers(ep) {
[
// DH: add other vendors here as modules are added
'deepgram',
'cartesia',
'elevenlabs'
'elevenlabs',
'rimelabs',
'custom'
].forEach((vendor) => {
const eventClassName = `${vendor.charAt(0).toUpperCase() + vendor.slice(1)}TtsStreamingEvents`;
const eventClass = require('../utils/constants')[eventClassName];
@@ -275,23 +421,21 @@ class TtsStreamingBuffer extends Emitter {
}
const findSentenceBoundary = (text, limit) => {
// Match traditional sentence boundaries or double newlines
// Look for punctuation or double newline that signals sentence end.
const sentenceEndRegex = /[.!?](?=\s|$)|\n\n/g;
let lastSentenceBoundary = -1;
let match;
while ((match = sentenceEndRegex.exec(text)) && match.index < limit) {
const precedingText = text.slice(0, match.index).trim(); // Extract text before the match and trim whitespace
if (precedingText.length > 0) { // Check if there's actual content
const precedingText = text.slice(0, match.index).trim();
if (precedingText.length > 0) {
if (
match[0] === '\n\n' || // It's a double newline
(match.index === 0 || !/\d$/.test(text[match.index - 1])) // Standard punctuation rules
match[0] === '\n\n' ||
(match.index === 0 || !/\d$/.test(text[match.index - 1]))
) {
lastSentenceBoundary = match.index + (match[0] === '\n\n' ? 2 : 1); // Include the boundary
lastSentenceBoundary = match.index + (match[0] === '\n\n' ? 2 : 1);
}
}
}
return lastSentenceBoundary;
};
@@ -299,7 +443,6 @@ const findWordBoundary = (text, limit) => {
const wordBoundaryRegex = /\s+/g;
let lastWordBoundary = -1;
let match;
while ((match = wordBoundaryRegex.exec(text)) && match.index < limit) {
lastWordBoundary = match.index;
}

View File

@@ -1,7 +1,7 @@
const assert = require('assert');
const BaseRequestor = require('./base-requestor');
const short = require('short-uuid');
const {HookMsgTypes} = require('./constants.json');
const {HookMsgTypes, WS_CLOSE_CODES} = require('./constants.json');
const Websocket = require('ws');
const snakeCaseKeys = require('./snakecase-keys');
const {
@@ -132,8 +132,8 @@ class WsRequestor extends BaseRequestor {
assert(this.ws);
/* prepare and send message */
let payload = params ? snakeCaseKeys(params, ['customerData', 'sip']) : null;
if (type === 'session:new') this._sessionData = payload;
let payload = params ? snakeCaseKeys(params, ['customerData', 'sip', 'env_vars', 'args']) : null;
if (type === 'session:new' || type === 'session:adulting') this._sessionData = payload;
if (type === 'session:reconnect') payload = this._sessionData;
assert.ok(url, 'WsRequestor:request url was not provided');
@@ -146,7 +146,9 @@ class WsRequestor extends BaseRequestor {
type,
msgid,
call_sid: this.call_sid,
hook: ['verb:hook', 'session:redirect', 'llm:event', 'llm:tool-call'].includes(type) ? url : undefined,
hook: [
'verb:hook', 'dial:confirm', 'session:redirect', 'llm:event', 'llm:tool-call'
].includes(type) ? url : undefined,
data: {...payload},
...b3
};
@@ -261,13 +263,13 @@ class WsRequestor extends BaseRequestor {
}
}
close() {
close(code = WS_CLOSE_CODES.NormalClosure) {
this.closedGracefully = true;
this.logger.debug('WsRequestor:close closing socket');
this.logger.debug(`WsRequestor:close closing socket with code ${code}`);
this._stopPingTimer();
try {
if (this.ws) {
this.ws.close(1000);
this.ws.close(code);
this.ws.removeAllListeners();
this.ws = null;
}
@@ -431,6 +433,21 @@ class WsRequestor extends BaseRequestor {
}
} catch (err) {
this.logger.info({err, content}, 'WsRequestor:_onMessage - invalid incoming message');
const params = {
msg: 'InvalidMessage',
details: err.message,
content: Buffer.from(content).toString('utf-8')
};
const {writeAlerts, AlertType} = this.Alerter;
writeAlerts({
account_sid: this.account_sid,
alert_type: AlertType.INVALID_APP_PAYLOAD,
target_sid: this.call_sid,
message: err.message,
}).catch((err) => this.logger.info({err}, 'Error generating alert for invalid message'));
this.request('jambonz:error', '/error', params)
.catch((err) => this.logger.debug({err}, 'WsRequestor:_onMessage - Error sending'));
}
}

10501
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "jambonz-feature-server",
"version": "0.9.2",
"version": "0.9.4",
"main": "app.js",
"engines": {
"node": ">= 18.x"
@@ -27,14 +27,15 @@
"dependencies": {
"@aws-sdk/client-auto-scaling": "^3.549.0",
"@aws-sdk/client-sns": "^3.549.0",
"@jambonz/db-helpers": "^0.9.6",
"@jambonz/db-helpers": "^0.9.12",
"@jambonz/http-health-check": "^0.0.1",
"@jambonz/mw-registrar": "^0.2.7",
"@jambonz/realtimedb-helpers": "^0.8.8",
"@jambonz/speech-utils": "^0.2.1",
"@jambonz/realtimedb-helpers": "^0.8.13",
"@jambonz/speech-utils": "^0.2.10",
"@jambonz/stats-collector": "^0.1.10",
"@jambonz/verb-specifications": "^0.0.90",
"@jambonz/time-series": "^0.2.13",
"@jambonz/verb-specifications": "^0.0.104",
"@modelcontextprotocol/sdk": "^1.9.0",
"@opentelemetry/api": "^1.8.0",
"@opentelemetry/exporter-jaeger": "^1.23.0",
"@opentelemetry/exporter-trace-otlp-http": "^0.50.0",
@@ -47,8 +48,8 @@
"bent": "^7.3.12",
"debug": "^4.3.4",
"deepcopy": "^2.1.0",
"drachtio-fsmrf": "^3.0.46",
"drachtio-srf": "^4.5.35",
"drachtio-fsmrf": "^4.0.3",
"drachtio-srf": "^5.0.5",
"express": "^4.19.2",
"express-validator": "^7.0.1",
"moment": "^2.30.1",
@@ -56,12 +57,11 @@
"pino": "^8.20.0",
"polly-ssml-split": "^0.1.0",
"proxyquire": "^2.1.3",
"sdp-transform": "^2.14.2",
"sdp-transform": "^2.15.0",
"short-uuid": "^5.1.0",
"sinon": "^17.0.1",
"to-snake-case": "^1.0.0",
"undici": "^6.20.0",
"uuid-random": "^1.3.2",
"undici": "^7.5.0",
"verify-aws-sns-signature": "^0.1.0",
"ws": "^8.18.0",
"xml2js": "^0.6.2"

View File

@@ -222,3 +222,62 @@ test('test create-call app_json', async(t) => {
t.error(err);
}
});
test('test create-call timeLimit', async(t) => {
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
let from = 'create-call-app-json';
let account_sid = 'bb845d4b-83a9-4cde-a6e9-50f3743bab3f';
// Give UAS app time to come up
const p = sippUac('uas.xml', '172.38.0.10', from);
await waitFor(1000);
const startTime = Date.now();
const app_json = `[
{
"verb": "pause",
"length": 7
}
]`;
const post = bent('http://127.0.0.1:3000/', 'POST', 'json', 201);
post('v1/createCall', {
'account_sid':account_sid,
"call_hook": {
"url": "http://127.0.0.1:3100/",
"method": "POST",
"username": "username",
"password": "password"
},
app_json,
"from": from,
"to": {
"type": "phone",
"number": "15583084809"
},
"timeLimit": 1,
"speech_recognizer_vendor": "google",
"speech_recognizer_language": "en"
});
//THEN
await p;
const endTime = Date.now();
t.ok(endTime - startTime < 2000, 'create-call: timeLimit is respected');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});

View File

@@ -3,9 +3,8 @@ const { sippUac } = require('./sipp')('test_fs');
const bent = require('bent');
const getJSON = bent('json')
const clearModule = require('clear-module');
const {provisionCallHook} = require('./utils')
const sleepFor = (ms) => new Promise((r) => setTimeout(r, ms));
const {provisionCallHook} = require('./utils');
const { sleepFor } = require('../lib/utils/helpers');
process.on('unhandledRejection', (reason, p) => {
console.log('Unhandled Rejection at: Promise', p, 'reason:', reason);

View File

@@ -3,6 +3,7 @@ const { sippUac } = require('./sipp')('test_fs');
const clearModule = require('clear-module');
const {provisionCallHook, provisionActionHook, provisionAnyHook} = require('./utils');
const bent = require('bent');
const { sleepFor } = require('../lib/utils/helpers');
const getJSON = bent('json');
process.on('unhandledRejection', (reason, p) => {
@@ -17,8 +18,6 @@ function connect(connectable) {
});
}
const sleepFor = (ms) => new Promise((resolve) => setTimeout(() => resolve(), ms));
test('\'enqueue-dequeue\' tests', async(t) => {
clearModule.all();

View File

@@ -3,10 +3,9 @@ const { sippUac } = require('./sipp')('test_fs');
const clearModule = require('clear-module');
const {provisionCallHook, provisionCustomHook, provisionActionHook} = require('./utils')
const bent = require('bent');
const { sleepFor } = require('../lib/utils/helpers');
const getJSON = bent('json')
const sleepFor = async(ms) => new Promise(resolve => setTimeout(resolve, ms));
process.on('unhandledRejection', (reason, p) => {
console.log('Unhandled Rejection at: Promise', p, 'reason:', reason);
});