Compare commits

..

28 Commits

Author SHA1 Message Date
rammohan-y
28ff85225f Fixed issue for punctuation (#1344)
https://github.com/jambonz/jambonz-feature-server/issues/1343
2025-09-03 13:33:38 -04:00
Dave Horton
f2fe7c4d24 Fix/playback race by fs generates playback (#1331)
* update to speech-utils that generates playback id

* modify tts and say task to track current playback id and match against start and stop events

* bump speech utils

* wip

* wip

* fix race condition where say with playbackId gets stop event from previous play from cache file

* logging

* wip

* fix comparison when playing cached files

* logging
2025-08-26 09:39:25 -04:00
Dave Horton
97408c7d3b fix uncaught exception with llm streaming 2025-08-22 13:24:59 -04:00
Dave Horton
db5f0a0dce Feat/startup logging (#1333)
* turn down some logging

* add startup logging

* wip

* lint
2025-08-21 14:09:20 -04:00
Sam Machin
654ccd9d9d start timeout on bargein digits (#1312) 2025-08-19 08:34:33 -04:00
Dave Horton
ea27b20ac5 update speech-utils (#1324) 2025-08-17 10:09:55 -04:00
Hoan Luu Huu
96aa705378 update speech util verbsion (#1323) 2025-08-14 08:39:12 -04:00
rammohan-y
5e51849839 Sending synthesized-audio notification for servedFromCache as false (#1320)
* Sending synthesized-audio notification for servedFromCache as well
https://github.com/jambonz/jambonz-feature-server/issues/1319

* Sending back the id that was set, to track the synthesized-audio
e.g if we send a say verb having 100, it's synthesized-audio event will return 100 in the data to correleate the say verb and synthesized-audio event
2025-08-13 20:56:56 -04:00
Hoan Luu Huu
44f69fa76d Support resemble tts (#1322)
* support resemble tts

* update speech utils version
2025-08-13 08:15:29 -04:00
Dave Horton
73c77bea71 add support for deepgram entity_prompt 2025-08-11 20:58:14 -04:00
rammohan-y
babc0d0dbb Fix for issue https://github.com/jambonz/jambonz-feature-server/issues/1317. (#1318)
Unable to use mod_aws_transcribe module due to security error as sessionId is not populated
2025-08-11 09:17:32 -04:00
Hoan Luu Huu
6b8d0fe1a8 update db-helper 0.9.16 (#1316) 2025-08-08 10:43:59 -04:00
Dave Horton
66bb466297 fix bug where task.kill is not passed cs (#1315) 2025-08-07 16:01:45 -04:00
Dave Horton
1933f4ec0b Feat/freeswitch logging (#1309)
* include callSid on INVITEs to freeeswitch

* remove unnecessary warning
2025-08-04 09:19:47 -04:00
Sam Machin
b1089a1ae9 pass recogniser opts in amd to stt (#1308) 2025-08-01 22:26:51 -04:00
sathish kumar pasham
93e06d887e Fix security vulnerabilities by upgrading @jambonz/realtimedb-helpers (#1305) 2025-07-30 13:24:19 -04:00
Sam Machin
b478e0ecd2 fix assert, and force methods to upper case (#1304)
* fix assert, and force methods to upper case

* add alert for updateCall errors

* lint

* handle missing method
2025-07-30 08:32:15 -04:00
Sam Machin
94d43d4b70 use tmpFiles list of parent call-session (#1301)
fixes #1299
2025-07-29 22:08:00 -04:00
Hoan Luu Huu
eb449e9169 support deepgram river (#1273)
* support deepgram river

* wip

* rebase

* fix review comment

---------

Co-authored-by: Dave Horton <daveh@beachdognet.com>
2025-07-29 13:49:43 -04:00
Hoan Luu Huu
158d9d7d25 support stt latency metrics (#1252)
* support stt latency metrics

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* enable stt latency calculator by config verb

* wip

* wip

* wip

* fix jslint

* fixed gather timeout does not have latency calculation

* upadte verb specification to use notifySttLatency

* move stt latency metric from call session to stt-latency calculator

* wip
2025-07-29 09:56:37 -04:00
Hoan Luu Huu
5886d1d945 allow pause/resume background listen with silence/blank (#1300)
* allow pause/resume background listen with silence/blank

* wip

* wip

* wip

* wip

* update drachtio-fsmrf version
2025-07-28 07:58:30 -04:00
Hoan Luu Huu
352106ec0c support referTo with tel: prefix (#1297)
* support referTo with tel: prefix

* fix review comment
2025-07-23 10:52:10 -04:00
Sam Machin
05a6bf51a7 corrected typos (#1295)
🧙‍♀️
2025-07-21 07:22:51 -04:00
Sam Machin
bd1c763e72 urlencode the . in a url querystring on play (#1293)
* urlencode the . in a url querystring on play

* lint

* fix for non querystring urls too

* lint
2025-07-20 14:03:21 -04:00
Sam Machin
d831a4ca7f don't fetch if whisper is an object with a single verb in it (#1290)
* don't fetch if whisper is an object with a single verb in it

* disable URL fetching of verbs on whisper

* fixes for lint

* lint
2025-07-20 13:47:31 -04:00
Sam Machin
0cc6ea987f Fix/1277 (#1278)
* wip

* forwardPAI control in verb

* update deps

* package-loxck

* Update package-lock.json
2025-07-20 13:26:33 -04:00
rammohan-y
6e7521c91c Fix for issue where we should not delay from gather when JAMBONES_TRANSCRIBE_EP_DESTROY_DELAY_MS is set (#1283)
* Fix for issue https://github.com/jambonz/jambonz-feature-server/issues/1281

* Commented the reason for the change
2025-07-17 10:36:22 -04:00
Hoan Luu Huu
e0e2ade289 fixed gather cannot timeout if listenDuringPrompt is true (#1276)
* fixed gather cannot timeout if listenDuringPrompt is true

* wip

* wip

* wip
2025-07-15 22:53:22 -04:00
30 changed files with 3931 additions and 2324 deletions

6
app.js
View File

@@ -29,6 +29,12 @@ const {LifeCycleEvents, FS_UUID_SET_NAME, SystemState, FEATURE_SERVER} = require
const installSrfLocals = require('./lib/utils/install-srf-locals');
const createHttpListener = require('./lib/utils/http-listener');
const healthCheck = require('@jambonz/http-health-check');
const ProcessMonitor = require('./lib/utils/process-monitor');
const monitor = new ProcessMonitor(logger);
// Log startup
monitor.logStartup();
monitor.setupSignalHandlers();
logger.on('level-change', (lvl, _val, prevLvl, _prevVal, instance) => {
if (logger !== instance) {

View File

@@ -130,7 +130,7 @@ const OPTIONS_PING_INTERVAL = parseInt(process.env.OPTIONS_PING_INTERVAL, 10) ||
const JAMBONZ_RECORD_WS_BASE_URL = process.env.JAMBONZ_RECORD_WS_BASE_URL || process.env.JAMBONES_RECORD_WS_BASE_URL;
const JAMBONZ_RECORD_WS_USERNAME = process.env.JAMBONZ_RECORD_WS_USERNAME || process.env.JAMBONES_RECORD_WS_USERNAME;
const JAMBONZ_RECORD_WS_PASSWORD = process.env.JAMBONZ_RECORD_WS_PASSWORD || process.env.JAMBONES_RECORD_WS_PASSWORD;
const JAMBONZ_DISABLE_DIAL_PAI_HEADER = process.env.JAMBONZ_DISABLE_DIAL_PAI_HEADER || false;
const JAMBONZ_DIAL_PAI_HEADER = process.env.JAMBONZ_DIAL_PAI_HEADER || false;
const JAMBONES_DISABLE_DIRECT_P2P_CALL = process.env.JAMBONES_DISABLE_DIRECT_P2P_CALL || false;
const JAMBONES_EAGERLY_PRE_CACHE_AUDIO = parseInt(process.env.JAMBONES_EAGERLY_PRE_CACHE_AUDIO, 10) || 0;
@@ -225,11 +225,11 @@ module.exports = {
JAMBONZ_RECORD_WS_BASE_URL,
JAMBONZ_RECORD_WS_USERNAME,
JAMBONZ_RECORD_WS_PASSWORD,
JAMBONZ_DISABLE_DIAL_PAI_HEADER,
JAMBONZ_DIAL_PAI_HEADER,
JAMBONES_DISABLE_DIRECT_P2P_CALL,
JAMBONES_USE_FREESWITCH_TIMER_FD,
JAMBONES_DIAL_SBC_FOR_REGISTERED_USER,
JAMBONES_MEDIA_TIMEOUT_MS,
JAMBONES_MEDIA_HOLD_TIMEOUT_MS,
JAMBONES_TRANSCRIBE_EP_DESTROY_DELAY_MS
JAMBONES_TRANSCRIBE_EP_DESTROY_DELAY_MS,
};

View File

@@ -10,8 +10,9 @@ const {
RecordState,
AllowedSipRecVerbs,
AllowedConfirmSessionVerbs,
TtsStreamingEvents
} = require('../utils/constants');
TtsStreamingEvents,
ListenStatus
} = require('../utils/constants.json');
const moment = require('moment');
const assert = require('assert');
const sessionTracker = require('./session-tracker');
@@ -37,6 +38,7 @@ const BADPRECONDITIONS = 'preconditions not met';
const CALLER_CANCELLED_ERR_MSG = 'Response not sent due to unknown transaction';
const { NonFatalTaskError} = require('../utils/error');
const { createMediaEndpoint } = require('../utils/media-endpoint');
const SttLatencyCalculator = require('../utils/stt-latency-calculator');
const sqlRetrieveQueueEventHook = `SELECT * FROM webhooks
WHERE webhook_sid =
(
@@ -146,6 +148,30 @@ class CallSession extends Emitter {
this.conversationTurns = [];
this.on('userSaid', this._onUserSaid.bind(this));
this.on('botSaid', this._onBotSaid.bind(this));
/**
* Support STT latency
*/
this.sttLatencyCalculator = new SttLatencyCalculator({
logger,
cs: this
});
this.on('transcribe-start', () => {
this.sttLatencyCalculator.resetTime();
});
this.on('on-transcription', () => {
this.sttLatencyCalculator.onTranscriptionReceived();
});
this.on('transcribe-stop', () => {
this.sttLatencyCalculator.onTranscribeStop();
});
}
get notifySttLatencyEnabled() {
return this._notifySttLatencyEnabled || false;
}
set notifySttLatencyEnabled(enabled) {
this._notifySttLatencyEnabled = enabled;
}
/**
@@ -684,7 +710,7 @@ class CallSession extends Emitter {
}
hasGlobalSttPunctuation() {
get hasGlobalSttPunctuation() {
return this._globalSttPunctuation !== undefined;
}
@@ -716,10 +742,25 @@ class CallSession extends Emitter {
return this._fillerNoise;
}
async pauseOrResumeBackgroundListenIfRequired(action, silence = false) {
if ((action == 'pauseCallRecording' || action == 'resumeCallRecording') &&
this.backgroundTaskManager.isTaskRunning('record')) {
this.logger.debug({action, silence}, 'CallSession:pauseOrResumeBackgroundListenIfRequired');
const backgroundListenTask = this.backgroundTaskManager.getTask('record');
const status = action === 'pauseCallRecording' ? ListenStatus.Pause : ListenStatus.Resume;
backgroundListenTask.updateListen(
status,
silence
);
}
}
async notifyRecordOptions(opts) {
const {action} = opts;
const {action, silence} = opts;
this.logger.debug({opts}, 'CallSession:notifyRecordOptions');
this.pauseOrResumeBackgroundListenIfRequired(action, silence);
/* if we have not answered yet, just save the details for later */
if (!this.dlg) {
if (action === 'startCallRecording') {
@@ -976,8 +1017,6 @@ class CallSession extends Emitter {
(type === 'tts' && credential.use_for_tts) ||
(type === 'stt' && credential.use_for_stt)
)) {
this.logger.debug(
`${type}: ${credential.vendor} ${credential.label ? `, label: ${credential.label}` : ''} `);
if ('google' === vendor) {
if (type === 'tts' && !credential.tts_tested_ok ||
type === 'stt' && !credential.stt_tested_ok) {
@@ -1105,6 +1144,13 @@ class CallSession extends Emitter {
options: credential.options
};
}
else if ('resemble' === vendor) {
return {
api_key: credential.api_key,
resemble_tts_use_tls: credential.resemble_tts_use_tls,
resemble_tts_uri: credential.resemble_tts_uri,
};
}
else if ('inworld' === vendor) {
return {
api_key: credential.api_key,
@@ -1119,6 +1165,12 @@ class CallSession extends Emitter {
service_version: credential.service_version
};
}
else if ('deepgramriver' === vendor) {
return {
speech_credential_sid: credential.speech_credential_sid,
api_key: credential.api_key,
};
}
else if ('voxist' === vendor) {
return {
speech_credential_sid: credential.speech_credential_sid,
@@ -1661,8 +1713,8 @@ Duration=${duration} `
async _lccWhisper(opts, callSid) {
const {whisper} = opts;
let tasks;
const b3 = this.b3;
const httpHeaders = b3 && {b3};
//const b3 = this.b3;
//const httpHeaders = b3 && {b3};
// this whole thing requires us to be in a Dial verb
const task = this.currentTask;
@@ -1671,12 +1723,15 @@ Duration=${duration} `
}
// allow user to provide a url object, a url string, an array of tasks, or a single task
if (typeof whisper === 'string' || (typeof whisper === 'object' && whisper.url)) {
// retrieve a url
const json = await this.requestor(opts.call_hook, this.callInfo.toJSON(), httpHeaders);
tasks = normalizeJambones(this.logger, json).map((tdata) => makeTask(this.logger, tdata));
}
else if (Array.isArray(whisper)) {
// Disable passing a URL as not functional Sam Machin - 17/07/2025
//if (typeof whisper === 'string' || (typeof whisper === 'object' && whisper.url && !whisper.verb)) {
// // retrieve a url
// const json = await this.requestor(opts.call_hook, this.callInfo.toJSON(), httpHeaders);
// tasks = normalizeJambones(this.logger, json).map((tdata) => makeTask(this.logger, tdata));
//}
//else if (Array.isArray(whisper)) {
if (Array.isArray(whisper)) {
// an inline array of tasks
tasks = normalizeJambones(this.logger, whisper).map((tdata) => makeTask(this.logger, tdata));
}
@@ -1913,6 +1968,17 @@ Duration=${duration} `
}
} catch (err) {
this.logger.info({err, opts, callSid}, 'CallSession:updateCall - error updating call');
const {writeAlerts} = this.srf.locals;
try {
writeAlerts({
alert_type: 'error-updating-call',
account_sid: this.accountSid,
message: err.message,
target_sid: callSid
});
} catch (err) {
this.logger.error({err}, 'Error writing error-updating-call alert');
}
}
}
@@ -2317,6 +2383,7 @@ Duration=${duration} `
const ep = await this._createMediaEndpoint({
headers: {
'X-Jambones-Call-ID': this.callId,
'X-Call-Sid': this.callSid,
},
remoteSdp: this.req.body
});
@@ -2461,6 +2528,8 @@ Duration=${duration} `
this.clearOrRestoreActionHookDelayProcessor().catch((err) => {});
this.ttsStreamingBuffer?.stop();
this.sttLatencyCalculator?.stop();
}
/**
@@ -3009,6 +3078,11 @@ Duration=${duration} `
const task = this.currentTask;
if (task && TaskName.Say === task.name) {
task.notifyTtsStreamIsEmpty();
} else if (
// If Gather nested say task is streaming
task && TaskName.Gather === task.name && task.sayTask && task.sayTask.isStreamingTts) {
const sayTask = task.sayTask;
sayTask.notifyTtsStreamIsEmpty();
}
}
@@ -3103,6 +3177,20 @@ Duration=${duration} `
return `assistant: ${t.text}`;
}).join('\n');
}
startSttLatencyVad() {
if (this.notifySttLatencyEnabled) {
this.sttLatencyCalculator.start();
}
}
stopSttLatencyVad() {
this.sttLatencyCalculator.stop();
}
calculateSttLatency() {
return this.sttLatencyCalculator.calculateLatency();
}
}
module.exports = CallSession;

View File

@@ -8,7 +8,8 @@ const CallSession = require('./call-session');
*/
class ConfirmCallSession extends CallSession {
constructor({logger, application, dlg, ep, tasks, callInfo, accountInfo, memberId, confName, rootSpan, req}) {
// eslint-disable-next-line max-len
constructor({logger, application, dlg, ep, tasks, callInfo, accountInfo, memberId, confName, rootSpan, req, tmpFiles}) {
super({
logger,
application,
@@ -24,6 +25,7 @@ class ConfirmCallSession extends CallSession {
this.dlg = dlg;
this.ep = ep;
this.req = req;
this.tmpFiles = tmpFiles;
}
/**

View File

@@ -673,7 +673,8 @@ class Conference extends Task {
confName: this.confName,
tasks,
rootSpan: cs.rootSpan,
req: cs.req
req: cs.req,
tmpFiles: cs.tmpFiles,
});
await this._playSession.exec();
this._playSession = null;

View File

@@ -24,6 +24,9 @@ class TaskConfig extends Task {
if ('notifyEvents' in this.data) {
this.notifyEvents = !!this.data.notifyEvents;
}
if (this.hasNotifySttLatency) {
this.notifySttLatency = !!this.data.notifySttLatency;
}
if (this.bargeIn.enable) {
this.gatherOpts = {
@@ -83,6 +86,7 @@ class TaskConfig extends Task {
get hasVad() { return Object.keys(this.vad).length; }
get hasFillerNoise() { return Object.keys(this.fillerNoise).length; }
get hasReferHook() { return Object.keys(this.data).includes('referHook'); }
get hasNotifySttLatency() { return Object.keys(this.data).includes('notifySttLatency'); }
get hasTtsStream() { return Object.keys(this.ttsStream).length; }
get summary() {
@@ -112,6 +116,8 @@ class TaskConfig extends Task {
if (this.hasFillerNoise) phrase.push(`fillerNoise ${this.fillerNoise.enable ? 'on' : 'off'}`);
if (this.data.amd) phrase.push('enable amd');
if (this.notifyEvents) phrase.push(`event notification ${this.notifyEvents ? 'on' : 'off'}`);
if (this.hasNotifySttLatency) phrase.push(
`notifySttLatency ${this.notifySttLatency ? 'on' : 'off'}`);
if (this.onHoldMusic) phrase.push(`onHoldMusic: ${this.onHoldMusic}`);
if ('boostAudioSignal' in this.data) phrase.push(`setGain ${this.data.boostAudioSignal}`);
if (this.hasReferHook) phrase.push('set referHook');
@@ -130,6 +136,11 @@ class TaskConfig extends Task {
cs.notifyEvents = !!this.data.notifyEvents;
}
if (this.hasNotifySttLatency) {
this.logger.debug(`turning notifySttLatency ${this.notifySttLatency ? 'on' : 'off'}`);
cs.notifySttLatencyEnabled = this.notifySttLatency;
}
if (this.onHoldMusic) {
cs.onHoldMusic = this.onHoldMusic;
}
@@ -318,7 +329,10 @@ class TaskConfig extends Task {
voiceMs: this.vad.voiceMs || 250,
silenceMs: this.vad.silenceMs || 150,
strategy: this.vad.strategy || 'one-shot',
mode: (this.vad.mode !== undefined && this.vad.mode !== null) ? this.vad.mode : 2
mode: (this.vad.mode !== undefined && this.vad.mode !== null) ? this.vad.mode : 2,
vendor: this.vad.vendor || 'silero',
threshold: this.vad.threshold || 0.5,
speechPadMs: this.vad.speechPadMs || 30,
};
}

View File

@@ -19,7 +19,7 @@ const parseDecibels = require('../utils/parse-decibels');
const debug = require('debug')('jambonz:feature-server');
const {parseUri} = require('drachtio-srf');
const {ANCHOR_MEDIA_ALWAYS,
JAMBONZ_DISABLE_DIAL_PAI_HEADER,
JAMBONZ_DIAL_PAI_HEADER,
JAMBONES_DIAL_SBC_FOR_REGISTERED_USER} = require('../config');
const { isOnhold, isOpusFirst } = require('../utils/sdp-utils');
const { normalizeJambones } = require('@jambonz/verb-specifications');
@@ -109,6 +109,7 @@ class TaskDial extends Task {
this.tag = this.data.tag;
this.boostAudioSignal = this.data.boostAudioSignal;
this._mediaPath = MediaPath.FullMedia;
this.forwardPAI = this.data.forwardPAI;
if (this.dtmfHook) {
const {parentDtmfCollector, childDtmfCollector} = parseDtmfOptions(logger, this.data.dtmfCapture || {});
@@ -548,13 +549,14 @@ class TaskDial extends Task {
let sbcAddress = this.proxy || getSBC();
const teamsInfo = {};
let fqdn;
const forwardPAI = this.forwardPAI ?? JAMBONZ_DIAL_PAI_HEADER; // dial verb overides env var
this.logger.debug(forwardPAI, 'forwardPAI value');
if (!sbcAddress) throw new Error('no SBC found for outbound call');
this.headers = {
'X-Account-Sid': cs.accountSid,
...(req && req.has('X-CID') && {'X-CID': req.get('X-CID')}),
...(direction === 'outbound' && callInfo.sbcCallid && {'X-CID': callInfo.sbcCallid}),
...(!JAMBONZ_DISABLE_DIAL_PAI_HEADER && req && {
...(req && forwardPAI && {
...(req.has('P-Asserted-Identity') && {'P-Asserted-Identity': req.get('P-Asserted-Identity')}),
...(req.has('Privacy') && {'Privacy': req.get('Privacy')}),
}),
@@ -995,7 +997,7 @@ class TaskDial extends Task {
this.epOther = null;
this._mediaPath = releaseEntirely ? MediaPath.NoMedia : MediaPath.PartialMedia;
this.logger.info(
`Dial:_releaseMedia - successfully released media from freewitch, media path is now ${this._mediaPath}`);
`Dial:_releaseMedia - successfully released media from freeswitch, media path is now ${this._mediaPath}`);
} catch (err) {
this.logger.info({err}, 'Dial:_releaseMedia error');
}
@@ -1004,7 +1006,7 @@ class TaskDial extends Task {
async reAnchorMedia(cs, sd) {
if (cs.ep && sd.ep) return;
this.logger.info('Dial:reAnchorMedia - re-anchoring media to freewitch');
this.logger.info('Dial:reAnchorMedia - re-anchoring media to freeswitch');
await Promise.all([sd.reAnchorMedia(this._mediaPath), cs.reAnchorMedia(this._mediaPath)]);
this.epOther = cs.ep;
@@ -1012,7 +1014,7 @@ class TaskDial extends Task {
this._mediaPath = MediaPath.FullMedia;
this.logger.info(
`Dial:_releaseMedia - successfully re-anchored media to freewitch, media path is now ${this._mediaPath}`);
`Dial:_releaseMedia - successfully re-anchored media to freeswitch, media path is now ${this._mediaPath}`);
}
// Handle RE-INVITE hold from caller leg.
@@ -1096,7 +1098,8 @@ class TaskDial extends Task {
accountInfo: this.cs.accountInfo,
tasks,
rootSpan: this.cs.rootSpan,
req: this.cs.req
req: this.cs.req,
tmpFiles: this.cs.tmpFiles,
});
await this._onHoldSession.exec();
this._onHoldSession = null;

View File

@@ -370,7 +370,8 @@ class TaskEnqueue extends Task {
accountInfo: cs.accountInfo,
tasks: tasksToRun,
rootSpan: cs.rootSpan,
req: cs.req
req: cs.req,
tmpFiles: cs.tmpFiles,
});
await this._playSession.exec();
this._playSession = null;

View File

@@ -11,6 +11,7 @@ const {
NvidiaTranscriptionEvents,
JambonzTranscriptionEvents,
AssemblyAiTranscriptionEvents,
DeepgramRiverTranscriptionEvents,
VoxistTranscriptionEvents,
CartesiaTranscriptionEvents,
OpenAITranscriptionEvents,
@@ -267,11 +268,16 @@ class TaskGather extends SttTask {
.catch((err) => {
process();
});
this.sayTask.on('playDone', (err) => {
span.end();
if (err) this.logger.error({err}, 'Gather:exec Error playing tts');
if (this.sayTask.isStreamingTts && !this.sayTask.closeOnStreamEmpty) {
// if streaming tts, we do not wait for it to complete if it is not closing the stream automatically
process();
});
} else {
this.sayTask.on('playDone', (err) => {
span.end();
if (err) this.logger.error({err}, 'Gather:exec Error playing tts');
process();
});
}
}
else if (this.playTask) {
const {span, ctx} = this.startChildSpan(`nested:${this.playTask.summary}`);
@@ -346,6 +352,7 @@ class TaskGather extends SttTask {
this.sayTask?.span.end();
this._stopVad();
this._resolve('killed');
cs.stopSttLatencyVad();
}
updateTaskInProgress(opts) {
@@ -361,6 +368,9 @@ class TaskGather extends SttTask {
_onDtmf(cs, ep, evt) {
this.logger.debug(evt, 'TaskGather:_onDtmf');
if (!this._timeoutTimer && this.timeout > 0) {
this._startTimer();
}
clearTimeout(this.interDigitTimer);
let resolved = false;
if (this.dtmfBargein) {
@@ -385,12 +395,7 @@ class TaskGather extends SttTask {
if (this.digitBuffer.length === 0 && this.needsStt) {
// DTMF is higher priority than STT.
this.removeCustomEventListeners();
ep.stopTranscription({
vendor: this.vendor,
bugname: this.bugname,
})
.catch((err) => this.logger.error({err},
` Received DTMF, Error stopping transcription for vendor ${this.vendor}`));
this._stopTranscribing(ep);
}
this.digitBuffer += evt.dtmf;
const len = this.digitBuffer.length;
@@ -463,6 +468,16 @@ class TaskGather extends SttTask {
this._onVendorConnectFailure.bind(this, cs, ep));
break;
case 'deepgramriver':
this.bugname = `${this.bugname_prefix}deepgramriver_transcribe`;
this.addCustomEventListener(
ep, DeepgramRiverTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
this.addCustomEventListener(
ep, DeepgramRiverTranscriptionEvents.Connect, this._onVendorConnect.bind(this, cs, ep));
this.addCustomEventListener(ep, DeepgramRiverTranscriptionEvents.ConnectFailure,
this._onVendorConnectFailure.bind(this, cs, ep));
break;
case 'soniox':
this.bugname = `${this.bugname_prefix}soniox_transcribe`;
this.addCustomEventListener(
@@ -677,10 +692,14 @@ class TaskGather extends SttTask {
target_sid: this.cs.callSid
});
}).catch((err) => this.logger.info({err}, 'Error generating alert for tts failure'));
// Some vendor use single connection, that we cannot use onConnect event to track transcription start
this.cs.emit('transcribe-start');
}
_startTimer() {
if (0 === this.timeout) return;
this.logger.debug(`Starting timoutTimer of ${this.timeout}ms`);
this._clearTimer();
this._timeoutTimer = setTimeout(() => {
if (this.isContinuousAsr) this._startAsrTimer();
@@ -860,6 +879,10 @@ class TaskGather extends SttTask {
if (finished === 'true') return;
if (this.vendor === 'ibm' && evt?.state === 'listening') return;
// emit an event to the call session to track the time transcription is received
cs.emit('on-transcription');
if (this.vendor === 'deepgram' && evt.type === 'UtteranceEnd') {
/* we will only get this when we have set utterance_end_ms */
if (this._bufferedTranscripts.length === 0) {
@@ -943,6 +966,12 @@ class TaskGather extends SttTask {
}
}
// receive a final transcript, calculate the stt latency for this transcript
const sttLatency = this.cs.calculateSttLatency();
if (!emptyTranscript && sttLatency) {
this.stt_latency_ms += `${sttLatency.stt_latency_ms},`;
}
if (this.isContinuousAsr) {
/* append the transcript and start listening again for asrTimeout */
const t = evt.alternatives[0].transcript;
@@ -1094,12 +1123,7 @@ class TaskGather extends SttTask {
async _startFallback(cs, ep, evt) {
if (this.canFallback) {
ep.stopTranscription({
vendor: this.vendor,
bugname: this.bugname,
gracefulShutdown: false
})
.catch((err) => this.logger.error({err}, `Error stopping transcription for primary vendor ${this.vendor}`));
this._stopTranscribing(ep);
try {
this.logger.debug('gather:_startFallback');
this.notifyError({ msg: 'ASR error',
@@ -1228,17 +1252,26 @@ class TaskGather extends SttTask {
}
}
async _stopTranscribing(ep) {
// Fix for https://github.com/jambonz/jambonz-feature-server/issues/1281
// We should immediately call stop transcription from gather
// so that next gather can start transcription immediately
ep.stopTranscription({
vendor: this.vendor,
bugname: this.bugname,
gracefulShutdown: false
})
.catch((err) => {
if (this.resolved) return;
this.logger.error({err}, 'Error stopping transcription');
});
this.cs.emit('transcribe-stop');
}
async _resolve(reason, evt) {
this.logger.info({evt}, `TaskGather:resolve with reason ${reason}`);
if (this.needsStt && this.ep && this.ep.connected) {
this.ep.stopTranscription({
vendor: this.vendor,
bugname: this.bugname
})
.catch((err) => {
if (this.resolved) return;
this.logger.error({err}, 'Error stopping transcription');
});
this._stopTranscribing(this.ep);
}
if (this.resolved) {
this.logger.debug('TaskGather:_resolve - already resolved');
@@ -1257,11 +1290,28 @@ class TaskGather extends SttTask {
this._clearAsrTimer();
this._clearFinalAsrTimer();
let sttLatencyMetrics = {};
if (this.needsStt) {
const sttLatency = this.cs.calculateSttLatency();
if (sttLatency) {
this.stt_latency_ms = this.stt_latency_ms.endsWith(',') ?
this.stt_latency_ms.slice(0, -1) : this.stt_latency_ms;
sttLatencyMetrics = {
'stt.latency_ms': this.stt_latency_ms,
'stt.talkspurts': JSON.stringify(sttLatency.talkspurts),
'stt.start_time': sttLatency.stt_start_time,
'stt.stop_time': sttLatency.stt_stop_time,
'stt.usage': sttLatency.stt_usage,
};
}
}
this.span.setAttributes({
channel: 1,
'stt.label': this.label || 'None',
'stt.resolve': reason,
'stt.result': JSON.stringify(evt)
'stt.result': JSON.stringify(evt),
...sttLatencyMetrics
});
if (this.callSession && this.callSession.callGone) {
@@ -1289,6 +1339,9 @@ class TaskGather extends SttTask {
let returnedVerbs = false;
try {
const latencies = Object.fromEntries(
Object.entries(sttLatencyMetrics).map(([key, value]) => [key.replace('stt.', 'stt_'), value])
);
if (reason.startsWith('dtmf')) {
if (this.parentTask) this.parentTask.emit('dtmf', evt);
else {
@@ -1302,7 +1355,7 @@ class TaskGather extends SttTask {
else {
this.emit('transcription', evt);
this.logger.debug('TaskGather:_resolve - invoking performAction');
returnedVerbs = await this.performAction({speech: evt, reason: 'speechDetected'});
returnedVerbs = await this.performAction({speech: evt, reason: 'speechDetected', ...latencies});
this.logger.debug({returnedVerbs}, 'TaskGather:_resolve - back from performAction');
}
}
@@ -1310,20 +1363,20 @@ class TaskGather extends SttTask {
if (this.parentTask) this.parentTask.emit('timeout', evt);
else {
this.emit('timeout', evt);
returnedVerbs = await this.performAction({reason: 'timeout'});
returnedVerbs = await this.performAction({reason: 'timeout', ...latencies});
}
}
else if (reason.startsWith('stt-error')) {
if (this.parentTask) this.parentTask.emit('stt-error', evt);
else {
this.emit('stt-error', evt);
returnedVerbs = await this.performAction({reason: 'error', details: evt.error});
returnedVerbs = await this.performAction({reason: 'error', details: evt.error, ...latencies});
}
} else if (reason.startsWith('stt-low-confidence')) {
if (this.parentTask) this.parentTask.emit('stt-low-confidence', evt);
else {
this.emit('stt-low-confidence', evt);
returnedVerbs = await this.performAction({speech:evt, reason: 'stt-low-confidence'});
returnedVerbs = await this.performAction({speech:evt, reason: 'stt-low-confidence', ...latencies});
}
}
} catch (err) { /*already logged error*/ }

View File

@@ -1,5 +1,5 @@
const Task = require('./task');
const {TaskName, TaskPreconditions, ListenEvents, ListenStatus} = require('../utils/constants');
const {TaskName, TaskPreconditions, ListenEvents, ListenStatus} = require('../utils/constants.json');
const makeTask = require('./make_task');
const moment = require('moment');
const MAX_PLAY_AUDIO_QUEUE_SIZE = 10;
@@ -72,7 +72,7 @@ class TaskListen extends Task {
} catch (err) {
this.logger.info(err, `TaskListen:exec - error ${this.url}`);
}
if (this.transcribeTask) this.transcribeTask.kill();
if (this.transcribeTask) this.transcribeTask.kill(cs);
this._removeListeners(ep);
}
@@ -103,9 +103,12 @@ class TaskListen extends Task {
this.notifyTaskDone();
}
async updateListen(status) {
async updateListen(status, silence = false) {
if (!this.killed && this.ep && this.ep.connected) {
const args = this._bugname ? [this._bugname] : [];
const args = [
...(this._bugname ? [this._bugname] : []),
...(status === ListenStatus.Pause ? ([silence]) : []),
];
this.logger.info(`TaskListen:updateListen status ${status}`);
switch (status) {
case ListenStatus.Pause:

View File

@@ -218,7 +218,7 @@ class TaskLlmUltravox_S2S extends Task {
async _onServerEvent(_ep, evt) {
let endConversation = false;
const type = evt.type;
this.logger.debug({evt}, 'TaskLlmUltravox_S2S:_onServerEvent');
//this.logger.debug({evt}, 'TaskLlmUltravox_S2S:_onServerEvent');
/* server errors of some sort */
if (type === 'error') {

View File

@@ -6,7 +6,9 @@ class TaskPlay extends Task {
super(logger, opts);
this.preconditions = TaskPreconditions.Endpoint;
this.url = this.data.url;
this.url = this.data.url.includes('?')
? this.data.url.split('?')[0] + '?' + this.data.url.split('?')[1].replaceAll('.', '%2E')
: this.data.url;
this.seekOffset = this.data.seekOffset || -1;
this.timeoutSecs = this.data.timeoutSecs || -1;
this.loop = this.data.loop || 1;

View File

@@ -5,6 +5,28 @@ const pollySSMLSplit = require('polly-ssml-split');
const { SpeechCredentialError } = require('../utils/error');
const { sleepFor } = require('../utils/helpers');
/**
* Discard unmatching responses:
* (1) I sent a playback id but get a response with a different playback id
* (2) I sent a playback id but get a response with no playback id
* (3) I did not send a playback id but get a response with a playback id
* (4) I sent a cache file but get a response with a different cache file
*/
const isMatchingEvent = (logger, filename, playbackId, evt) => {
if (!!playbackId && !!evt.variable_tts_playback_id && evt.variable_tts_playback_id === playbackId) {
//logger.debug({filename, playbackId, evt}, 'Say:isMatchingEvent - playbackId matched');
return true;
}
if (!!filename && !!evt.file && evt.file === filename) {
//logger.debug({filename, playbackId, evt}, 'Say:isMatchingEvent - filename matched');
return true;
}
logger.info({filename, playbackId, evt}, 'Say:isMatchingEvent - no match');
return false;
};
const breakLengthyTextIfNeeded = (logger, text) => {
// As The text can be used for tts streaming, we need to break lengthy text into smaller chunks
// HIGH_WATER_BUFFER_SIZE defined in tts-streaming-buffer.js
@@ -259,40 +281,32 @@ class TaskSay extends TtsTask {
while (!this.killed && (this.loop === 'forever' || this.loop--) && ep?.connected) {
let segment = 0;
while (!this.killed && segment < filepath.length) {
const filename = filepath[segment];
if (cs.isInConference) {
const {memberId, confName, confUuid} = cs;
await this.playToConfMember(ep, memberId, confName, confUuid, filepath[segment]);
await this.playToConfMember(ep, memberId, confName, confUuid, filename);
}
else {
let playbackId;
const isStreaming = filepath[segment].startsWith('say:{');
const isStreaming = filename.startsWith('say:{');
if (isStreaming) {
const arr = /^say:\{.*\}\s*(.*)$/.exec(filepath[segment]);
if (arr) this.logger.debug(`Say:exec sending streaming tts request: ${arr[1].substring(0, 64)}..`);
}
else {
this.logger.debug(`Say:exec sending ${filepath[segment].substring(0, 64)}`);
const arr = /^say:\{.*\}\s*(.*)$/.exec(filename);
if (arr) this.logger.debug(`Say:exec sending streaming tts request ${arr[1].substring(0, 64)}..`);
else this.logger.debug(`Say:exec sending ${filename.substring(0, 64)}`);
}
const onPlaybackStop = (evt) => {
try {
this.logger.debug({evt},
`Say got playback-stop ${evt.variable_tts_playback_id ? evt.variable_tts_playback_id : ''}`);
/**
* If we got a playback id on both the start and stop events, and they don't match,
* then we must have received a playback-stop event for an earlier play request.
*/
const unmatchedResponse = (!!playbackId && !!evt.variable_tts_playback_id) &&
evt.variable_tts_playback_id !== playbackId;
if (unmatchedResponse) {
this.logger.info({currentPlaybackId: playbackId, stopPPlaybackId: evt.variable_tts_playback_id},
const playbackId = this.getPlaybackId(segment);
const isMatch = isMatchingEvent(this.logger, filename, playbackId, evt);
if (!isMatch) {
this.logger.info({currentPlaybackId: playbackId, stopPlaybackId: evt.variable_tts_playback_id},
'Say:exec discarding playback-stop for earlier play');
ep.once('playback-stop', this._boundOnPlaybackStop);
return;
}
this.logger.debug({evt},
`Say got playback-stop ${evt.variable_tts_playback_id ? evt.variable_tts_playback_id : ''}`);
this.notifyStatus({event: 'stop-playback'});
this.notifiedPlayBackStop = true;
const tts_error = evt.variable_tts_error;
@@ -331,6 +345,7 @@ class TaskSay extends TtsTask {
!this.disableTtsCache
) {
const text = parseTextFromSayString(this.text[segment]);
this.logger.debug({text, cacheFile: evt.variable_tts_cache_filename}, 'Say:exec cache tts');
addFileToCache(evt.variable_tts_cache_filename, {
account_sid,
vendor,
@@ -358,9 +373,17 @@ class TaskSay extends TtsTask {
};
this._boundOnPlaybackStop = onPlaybackStop.bind(this);
ep.once('playback-start', (evt) => {
const onPlaybackStart = (evt) => {
try {
playbackId = evt.variable_tts_playback_id;
const playbackId = this.getPlaybackId(segment);
const isMatch = isMatchingEvent(this.logger, filename, playbackId, evt);
if (!isMatch) {
this.logger.info({currentPlaybackId: playbackId, startPlaybackId: evt.variable_tts_playback_id},
'Say:exec playback-start - unmatched playback_id');
ep.once('playback-start', this._boundOnPlaybackStart);
return;
}
ep.once('playback-stop', this._boundOnPlaybackStop);
this.logger.debug({evt},
`Say got playback-start ${evt.variable_tts_playback_id ? evt.variable_tts_playback_id : ''}`);
if (this.otelSpan) {
@@ -374,15 +397,17 @@ class TaskSay extends TtsTask {
} catch (err) {
this.logger.info({err}, 'Error handling playback-start event');
}
});
ep.once('playback-stop', this._boundOnPlaybackStop);
};
this._boundOnPlaybackStart = onPlaybackStart.bind(this);
ep.once('playback-start', this._boundOnPlaybackStart);
// wait for playback-stop event received to confirm if the playback is successful
this._playPromise = new Promise((resolve, reject) => {
this._playResolve = resolve;
this._playReject = reject;
});
const r = await ep.play(filepath[segment]);
const r = await ep.play(filename);
this.logger.debug({r}, 'Say:exec play result');
try {
// wait for playback-stop event received to confirm if the playback is successful
@@ -400,12 +425,12 @@ class TaskSay extends TtsTask {
this._playResolve = null;
this._playReject = null;
}
if (filepath[segment].startsWith('say:{')) {
const arr = /^say:\{.*\}\s*(.*)$/.exec(filepath[segment]);
if (filename.startsWith('say:{')) {
const arr = /^say:\{.*\}\s*(.*)$/.exec(filename);
if (arr) this.logger.debug(`Say:exec complete playing streaming tts request: ${arr[1].substring(0, 64)}..`);
} else {
// This log will print spech credentials in say command for tts stream mode
this.logger.debug(`Say:exec completed play file ${filepath[segment]}`);
this.logger.debug(`Say:exec completed play file ${filename}`);
}
}
segment++;
@@ -452,6 +477,7 @@ class TaskSay extends TtsTask {
.replace('playht_', 'playht.')
.replace('cartesia_', 'cartesia.')
.replace('rimelabs_', 'rimelabs.')
.replace('resemble_', 'resemble.')
.replace('inworld_', 'inworld.')
.replace('verbio_', 'verbio.')
.replace('elevenlabs_', 'elevenlabs.');
@@ -517,6 +543,9 @@ const spanMapping = {
'rimelabs.name_lookup_time_ms': 'name_lookup_ms',
'rimelabs.connect_time_ms': 'connect_ms',
'rimelabs.final_response_time_ms': 'final_response_ms',
// Resemble
'resemble.connect_time_ms': 'connect_ms',
'resemble.final_response_time_ms': 'final_response_ms',
// inworld
'inworld.name_lookup_time_ms': 'name_lookup_ms',
'inworld.connect_time_ms': 'connect_ms',

View File

@@ -111,7 +111,12 @@ class TaskSipRefer extends Task {
/* get IP address of the SBC to use as hostname if needed */
const {host} = parseUri(dlg.remote.uri);
if (!referTo.startsWith('<') && !referTo.startsWith('sip') && !referTo.startsWith('"')) {
if (
!referTo.startsWith('<') &&
!referTo.startsWith('sip:') &&
!referTo.startsWith('"') &&
!referTo.startsWith('tel:')
) {
/* they may have only provided a phone number/user */
referTo = `sip:${referTo}@${host}`;
}
@@ -124,7 +129,12 @@ class TaskSipRefer extends Task {
if (!referredByDisplayName) {
referredByDisplayName = cs.req?.callingName;
}
if (!referredBy.startsWith('<') && !referredBy.startsWith('sip') && !referredBy.startsWith('"')) {
if (
!referredBy.startsWith('<') &&
!referredBy.startsWith('sip:') &&
!referredBy.startsWith('"') &&
!referredBy.startsWith('tel:')
) {
/* they may have only provided a phone number/user */
referredBy = `${referredByDisplayName ? `"${referredByDisplayName}"` : ''}<sip:${referredBy}@${host}>`;
}

View File

@@ -4,6 +4,7 @@ const crypto = require('crypto');
const { TaskPreconditions, CobaltTranscriptionEvents } = require('../utils/constants');
const { SpeechCredentialError } = require('../utils/error');
const {JAMBONES_AWS_TRANSCRIBE_USE_GRPC} = require('../config');
const {TaskName} = require('../utils/constants.json');
/**
* "Please insert turns here: {{turns:4}}"
@@ -84,6 +85,9 @@ class SttTask extends Task {
/*bug name prefix */
this.bugname_prefix = '';
// stt latency calculator
this.stt_latency_ms = '';
}
async exec(cs, {ep, ep2}) {
@@ -91,6 +95,12 @@ class SttTask extends Task {
this.ep = ep;
this.ep2 = ep2;
// start vad from stt latency calculator
if (this.name !== TaskName.Gather ||
this.name === TaskName.Gather && this.needsStt) {
cs.startSttLatencyVad();
}
// use session preferences if we don't have specific verb-level settings.
if (cs.recognizer) {
for (const k in cs.recognizer) {
@@ -400,7 +410,7 @@ class SttTask extends Task {
dgOptions.utteranceEndMs = dgOptions.utteranceEndMs || asrTimeout;
}
_onVendorConnect(_cs, _ep) {
_onVendorConnect(cs, _ep) {
this.logger.debug(`TaskGather:_on${this.vendor}Connect`);
}

View File

@@ -6,6 +6,7 @@ const {
AwsTranscriptionEvents,
AzureTranscriptionEvents,
DeepgramTranscriptionEvents,
DeepgramRiverTranscriptionEvents,
SonioxTranscriptionEvents,
CobaltTranscriptionEvents,
IbmTranscriptionEvents,
@@ -152,12 +153,15 @@ class TaskTranscribe extends SttTask {
.catch((err) => this.logger.info(err, 'Error TaskTranscribe:kill'));
}
this.cs.emit('transcribe-stop');
return stopTranscription;
}
async kill(cs) {
super.kill(cs);
const stopTranscription = this._stopTranscription();
cs.stopSttLatencyVad();
// hangup after 1 sec if we don't get a final transcription
if (stopTranscription) this._timer = setTimeout(() => this.notifyTaskDone(), 1500);
else this.notifyTaskDone();
@@ -238,6 +242,15 @@ class TaskTranscribe extends SttTask {
//if (opts.DEEPGRAM_SPEECH_UTTERANCE_END_MS) this.isContinuousAsr = true;
break;
case 'deepgramriver':
this.bugname = `${this.bugname_prefix}deepgramriver_transcribe`;
this.addCustomEventListener(ep, DeepgramRiverTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
this.addCustomEventListener(ep, DeepgramRiverTranscriptionEvents.Connect,
this._onVendorConnect.bind(this, cs, ep));
this.addCustomEventListener(ep, DeepgramRiverTranscriptionEvents.ConnectFailure,
this._onVendorConnectFailure.bind(this, cs, ep, channel));
break;
case 'soniox':
this.bugname = `${this.bugname_prefix}soniox_transcribe`;
this.addCustomEventListener(ep, SonioxTranscriptionEvents.Transcription,
@@ -423,6 +436,9 @@ class TaskTranscribe extends SttTask {
bugname: this.bugname,
hostport: this.hostport
});
// Some vendor use single connection, that we cannot use onConnect event to track transcription start
this.cs.emit('transcribe-start');
}
async _onTranscription(cs, ep, channel, evt, fsEvent) {
@@ -441,6 +457,9 @@ class TaskTranscribe extends SttTask {
if (this.vendor === 'ibm' && evt?.state === 'listening') return;
// emit an event to the call session to track the time transcription is received
cs.emit('on-transcription');
if (this.vendor === 'deepgram' && evt.type === 'UtteranceEnd') {
/* we will only get this when we have set utterance_end_ms */
@@ -602,14 +621,28 @@ class TaskTranscribe extends SttTask {
}
async _resolve(channel, evt) {
let sttLatencyMetrics = {};
if (evt.is_final) {
const sttLatency = this.cs.calculateSttLatency();
if (sttLatency) {
sttLatencyMetrics = {
'stt.latency_ms': `${sttLatency.stt_latency_ms}`,
'stt.talkspurts': JSON.stringify(sttLatency.talkspurts),
'stt.start_time': sttLatency.stt_start_time,
'stt.stop_time': sttLatency.stt_stop_time,
'stt.usage': sttLatency.stt_usage,
};
}
// time to reset the stt latency
this.cs.emit('transcribe-start');
/* we've got a final transcript, so end the otel child span for this channel */
if (this.childSpan[channel - 1] && this.childSpan[channel - 1].span) {
this.childSpan[channel - 1].span.setAttributes({
channel,
'stt.label': this.label || 'None',
'stt.resolve': 'transcript',
'stt.result': JSON.stringify(evt)
'stt.result': JSON.stringify(evt),
...sttLatencyMetrics
});
this.childSpan[channel - 1].span.end();
}
@@ -618,9 +651,13 @@ class TaskTranscribe extends SttTask {
if (this.transcriptionHook) {
const b3 = this.getTracingPropagation();
const httpHeaders = b3 && {b3};
const latencies = Object.fromEntries(
Object.entries(sttLatencyMetrics).map(([key, value]) => [key.replace('stt.', 'stt_'), value])
);
const payload = {
...this.cs.callInfo,
...httpHeaders,
...latencies,
...(evt.alternatives && {speech: evt}),
...(evt.type && {speechEvent: evt})
};

View File

@@ -3,6 +3,16 @@ const { TaskPreconditions } = require('../utils/constants');
const { SpeechCredentialError } = require('../utils/error');
const dbUtils = require('../utils/db-utils');
const extractPlaybackId = (str) => {
// Match say:{...} and capture the content inside braces
const match = str.match(/say:\{([^}]*)\}/);
if (!match) return null;
// Look for playback_id=value within the captured content
const playbackMatch = match[1].match(/playback_id=([^,]*)/);
return playbackMatch ? playbackMatch[1] : null;
};
class TtsTask extends Task {
constructor(logger, data, parentTask) {
@@ -22,6 +32,11 @@ class TtsTask extends Task {
this.disableTtsCache = this.data.disableTtsCache;
this.options = this.synthesizer.options || {};
this.instructions = this.data.instructions;
this.playbackIds = [];
}
getPlaybackId(offset) {
return this.playbackIds[offset];
}
async exec(cs) {
@@ -280,6 +295,7 @@ class TtsTask extends Task {
renderForCaching: preCache
});
if (!filePath.startsWith('say:')) {
this.playbackIds.push(null);
this.logger.debug(`Say: file ${filePath}, served from cache ${servedFromCache}`);
if (filePath) cs.trackTmpFile(filePath);
if (this.otelSpan) {
@@ -293,12 +309,24 @@ class TtsTask extends Task {
vendor,
language,
characters: text.length,
elapsedTime: rtt
elapsedTime: rtt,
servedFromCache,
'id': this.id
});
}
if (servedFromCache) {
this.notifyStatus({
event: 'synthesized-audio',
vendor,
language,
servedFromCache,
'id': this.id
});
}
}
else {
this.logger.debug('Say: a streaming tts api will be used');
this.playbackIds.push(extractPlaybackId(filePath));
this.logger.debug({playbackIds: this.playbackIds}, 'Say: a streaming tts api will be used');
const modifiedPath = filePath.replace('say:{', `say:{session-uuid=${ep.uuid},`);
return modifiedPath;
}

View File

@@ -281,13 +281,17 @@ module.exports = (logger) => {
/* set stt options */
logger.info(`starting amd for vendor ${vendor} and language ${language}`);
const sttOpts = amd.setChannelVarsForStt({name: TaskName.Gather}, sttCredentials, language, {
vendor,
hints,
enhancedModel: true,
altLanguages: opts.recognizer?.altLanguages || [],
initialSpeechTimeoutMs: opts.resolveTimeoutMs,
});
/* if opts contains recognizer object use that config for stt, otherwise use defaults */
const rOpts = opts.recognizer ?
opts.recognizer :
{
vendor,
hints,
enhancedModel: true,
altLanguages: opts.recognizer?.altLanguages || [],
initialSpeechTimeoutMs: opts.resolveTimeoutMs,
};
const sttOpts = amd.setChannelVarsForStt({name: TaskName.Gather}, sttCredentials, language, rOpts);
await ep.set(sttOpts).catch((err) => logger.info(err, 'Error setting channel variables'));

View File

@@ -65,7 +65,7 @@ class BackgroundTaskManager extends Emitter {
this.logger.info(`stopping background task: ${type}`);
task.removeAllListeners();
task.span.end();
task.kill();
task.kill(this.cs);
// Remove task from managed List
this.tasks.delete(type);
}

View File

@@ -96,6 +96,11 @@
"ConnectFailure": "deepgram_transcribe::connect_failed",
"Connect": "deepgram_transcribe::connect"
},
"DeepgramRiverTranscriptionEvents": {
"Transcription": "deepgramriver_transcribe::transcription",
"ConnectFailure": "deepgramriver_transcribe::connect_failed",
"Connect": "deepgramriver_transcribe::connect"
},
"SonioxTranscriptionEvents": {
"Transcription": "soniox_transcribe::transcription",
"Error": "soniox_transcribe::error"
@@ -177,6 +182,9 @@
"VadDetection": {
"Detection": "vad_detect:detection"
},
"SileroVadDetection": {
"Detection": "vad_silero:detect"
},
"ListenEvents": {
"Connect": "mod_audio_fork::connect",
"ConnectFailure": "mod_audio_fork::connect_failed",

View File

@@ -81,6 +81,10 @@ const speechMapper = (cred) => {
obj.deepgram_tts_uri = o.deepgram_tts_uri;
obj.deepgram_stt_use_tls = o.deepgram_stt_use_tls;
}
else if ('deepgramriver' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
}
else if ('soniox' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
@@ -120,6 +124,12 @@ const speechMapper = (cred) => {
obj.model_id = o.model_id;
obj.options = o.options;
}
else if ('resemble' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
obj.resemble_tts_use_tls = o.resemble_tts_use_tls;
obj.resemble_tts_uri = o.resemble_tts_uri;
}
else if ('inworld' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;

View File

@@ -41,7 +41,7 @@ class HttpRequestor extends BaseRequestor {
constructor(logger, account_sid, hook, secret) {
super(logger, account_sid, hook, secret);
this.method = hook.method || 'POST';
this.method = hook.method?.toUpperCase() || 'POST';
this.authHeader = basicAuth(hook.username, hook.password);
this.backoffMs = 500;
@@ -111,7 +111,7 @@ class HttpRequestor extends BaseRequestor {
const payload = params ? snakeCaseKeys(params, ['customerData', 'sip', 'env_vars', 'args']) : null;
const url = hook.url || hook;
const method = hook.method || 'POST';
const method = hook.method?.toUpperCase() || 'POST';
let buf = '';
httpHeaders = {
...httpHeaders,
@@ -119,7 +119,7 @@ class HttpRequestor extends BaseRequestor {
};
assert.ok(url, 'HttpRequestor:request url was not provided');
assert.ok, (['GET', 'POST'].includes(method), `HttpRequestor:request method must be 'GET' or 'POST' not ${method}`);
assert.ok(['GET', 'POST'].includes(method), `HttpRequestor:request method must be 'GET' or 'POST' not ${method}`);
const startAt = process.hrtime();
/* if we have an absolute url, and it is ws then do a websocket connection */

View File

@@ -401,7 +401,8 @@ class SingleDialer extends Emitter {
accountInfo: this.accountInfo,
tasks,
rootSpan: this.rootSpan,
req: this.req
req: this.req,
tmpFiles: cs.tmpFiles,
});
await cs.exec();

View File

@@ -0,0 +1,91 @@
// lib/utils/process-monitor.js
const fs = require('fs');
const path = require('path');
class ProcessMonitor {
constructor(logger) {
this.logger = logger;
this.packageInfo = this.getPackageInfo();
this.processName = this.packageInfo.name || 'unknown-app';
}
getPackageInfo() {
try {
const packagePath = path.join(process.cwd(), 'package.json');
return JSON.parse(fs.readFileSync(packagePath, 'utf8'));
} catch (e) {
return { name: 'unknown', version: 'unknown' };
}
}
logStartup(additionalInfo = {}) {
const startupInfo = {
msg: `${this.processName} started`,
app_name: this.processName,
app_version: this.packageInfo.version,
pid: process.pid,
ppid: process.ppid,
pm2_instance_id: process.env.NODE_APP_INSTANCE || 'not_pm2',
pm2_id: process.env.pm_id,
is_pm2: !!process.env.PM2,
node_version: process.version,
uptime: process.uptime(),
timestamp: new Date().toISOString(),
...additionalInfo
};
this.logger.info(startupInfo);
return startupInfo;
}
setupSignalHandlers() {
// Log when we receive signals that would cause restart
process.on('SIGINT', () => {
this.logger.info({
msg: 'SIGINT received',
app_name: this.processName,
pid: process.pid,
ppid: process.ppid,
uptime: process.uptime(),
timestamp: new Date().toISOString()
});
process.exit(0);
});
process.on('SIGTERM', () => {
this.logger.info({
msg: 'SIGTERM received',
app_name: this.processName,
pid: process.pid,
ppid: process.ppid,
uptime: process.uptime(),
timestamp: new Date().toISOString()
});
process.exit(0);
});
process.on('uncaughtException', (error) => {
this.logger.error({
msg: 'Uncaught exception - process will restart',
app_name: this.processName,
error: error.message,
stack: error.stack,
pid: process.pid,
timestamp: new Date().toISOString()
});
process.exit(1);
});
process.on('unhandledRejection', (reason, promise) => {
this.logger.error({
msg: 'Unhandled rejection',
app_name: this.processName,
reason,
pid: process.pid,
timestamp: new Date().toISOString()
});
});
}
}
module.exports = ProcessMonitor;

View File

@@ -0,0 +1,197 @@
const { assert } = require('console');
const Emitter = require('events');
const {
VadDetection,
SileroVadDetection
} = require('../utils/constants.json');
class SttLatencyCalculator extends Emitter {
constructor({ logger, cs}) {
super();
this.logger = logger;
this.cs = cs;
this.isRunning = false;
this.isInTalkSpurt = false;
this.start_talking_time = 0;
this.talkspurts = [];
this.vendor = this.cs.vad?.vendor || 'silero';
this.stt_start_time = 0;
this.stt_stop_time = 0;
this.stt_on_transcription_time = 0;
}
set sttStartTime(time) {
this.stt_start_time = time;
}
get sttStartTime() {
return this.stt_start_time || 0;
}
set sttStopTime(time) {
this.stt_stop_time = time;
}
get sttStopTime() {
return this.stt_stop_time || 0;
}
set sttOnTranscriptionTime(time) {
this.stt_on_transcription_time = time;
}
get sttOnTranscriptionTime() {
return this.stt_on_transcription_time || 0;
}
_onVadDetected(_ep, _evt, fsEvent) {
if (fsEvent.getHeader('detected-event') === 'stop_talking') {
if (this.isInTalkSpurt) {
this.talkspurts.push({
start: this.start_talking_time,
stop: Date.now()
});
}
this.start_talking_time = 0;
this.isInTalkSpurt = false;
} else if (fsEvent.getHeader('detected-event') === 'start_talking') {
this.start_talking_time = Date.now();
this.isInTalkSpurt = true;
}
}
_startVad() {
assert(!this.isRunning, 'Latency calculator is already running');
assert(this.cs.ep, 'Callsession has no endpoint to start the latency calculator');
const ep = this.cs.ep;
if (!ep.sttLatencyVadHandler) {
ep.sttLatencyVadHandler = this._onVadDetected.bind(this, ep);
if (this.vendor === 'silero') {
ep.addCustomEventListener(SileroVadDetection.Detection, ep.sttLatencyVadHandler);
} else {
ep.addCustomEventListener(VadDetection.Detection, ep.sttLatencyVadHandler);
}
}
this.stop_talking_time = 0;
this.start_talking_time = 0;
this.vad = {
...(this.cs.vad || {}),
strategy: 'continuous',
bugname: 'stt-latency-calculator-vad',
vendor: this.vendor
};
ep.startVadDetection(this.vad);
this.isRunning = true;
}
_stopVad() {
if (this.isRunning) {
this.logger.warn('Latency calculator is still running, stopping VAD detection');
const ep = this.cs.ep;
ep.stopVadDetection(this.vad);
if (ep.sttLatencyVadHandler) {
if (this.vendor === 'silero') {
this.ep?.removeCustomEventListener(SileroVadDetection.Detection, ep.sttLatencyVadHandler);
} else {
this.ep?.removeCustomEventListener(VadDetection.Detection, ep.sttLatencyVadHandler);
}
ep.sttLatencyVadHandler = null;
}
this.isRunning = false;
this.logger.info('STT Latency Calculator stopped');
}
}
start() {
if (this.isRunning) {
this.logger.warn('Latency calculator is already running');
return;
}
if (!this.cs.ep) {
this.logger.error('Callsession has no endpoint to start the latency calculator');
return;
}
this._startVad();
this.logger.debug('STT Latency Calculator started');
}
stop() {
this._stopVad();
}
toUnixTimestamp(date) {
return Math.floor(date / 1000);
}
calculateLatency() {
if (!this.isRunning) {
this.logger.debug('Latency calculator is not running, cannot calculate latency, returning default values');
return null;
}
const stt_stop_time = this.stt_stop_time || Date.now();
if (this.isInTalkSpurt) {
this.talkspurts.push({
start: this.start_talking_time,
stop: stt_stop_time
});
this.isInTalkSpurt = false;
this.start_talking_time = 0;
}
const stt_on_transcription_time = this.stt_on_transcription_time || stt_stop_time;
const start_talking_time = this.talkspurts[0]?.start;
let lastIdx = this.talkspurts.length - 1;
lastIdx = lastIdx < 0 ? 0 : lastIdx;
const stop_talking_time = this.talkspurts[lastIdx]?.stop || stt_stop_time;
return {
stt_start_time: this.toUnixTimestamp(this.stt_start_time),
stt_stop_time: this.toUnixTimestamp(stt_stop_time),
start_talking_time: this.toUnixTimestamp(start_talking_time),
stop_talking_time: this.toUnixTimestamp(stop_talking_time),
stt_latency: parseFloat((Math.abs(stt_on_transcription_time - stop_talking_time)) / 1000).toFixed(2),
stt_latency_ms: Math.abs(stt_on_transcription_time - stop_talking_time),
stt_usage: parseFloat((stt_stop_time - this.stt_start_time) / 1000).toFixed(2),
talkspurts: this.talkspurts.map((ts) =>
([this.toUnixTimestamp(ts.start || 0), this.toUnixTimestamp(ts.stop || 0)]))
};
}
resetTime() {
if (!this.isRunning) {
return;
}
this.stt_start_time = Date.now();
this.stt_stop_time = 0;
this.stt_on_transcription_time = 0;
this.clearTalkspurts();
this.logger.info('STT Latency Calculator reset');
}
onTranscriptionReceived() {
if (!this.isRunning) {
return;
}
this.stt_on_transcription_time = Date.now();
this.logger.debug(`CallSession:on-transcription set to ${this.stt_on_transcription_time}`);
}
onTranscribeStop() {
if (!this.isRunning) {
return;
}
this.stt_stop_time = Date.now();
this.logger.debug(`CallSession:transcribe-stop set to ${this.stt_stop_time}`);
}
clearTalkspurts() {
this.talkspurts = [];
if (!this.isInTalkSpurt) {
this.start_talking_time = 0;
}
}
}
module.exports = SttLatencyCalculator;

View File

@@ -339,6 +339,25 @@ const normalizeDeepgram = (evt, channel, language, shortUtterance) => {
};
};
const normalizeDeepgramRiver = (evt, channel, language, shortUtterance) => {
const copy = JSON.parse(JSON.stringify(evt));
return {
language_code: language,
channel_tag: channel,
is_final: evt.event === 'EndOfTurn',
alternatives: [
{
confidence: evt.end_of_turn_confidence,
transcript: evt.transcript,
}
],
vendor: {
name: 'deepgramriver',
evt: copy
}
};
};
const normalizeNvidia = (evt, channel, language) => {
const copy = JSON.parse(JSON.stringify(evt));
const alternatives = (evt.alternatives || [])
@@ -650,6 +669,8 @@ module.exports = (logger) => {
switch (vendor) {
case 'deepgram':
return normalizeDeepgram(evt, channel, language, shortUtterance);
case 'deepgramriver':
return normalizeDeepgramRiver(evt, channel, language, shortUtterance);
case 'microsoft':
return normalizeMicrosoft(evt, channel, language, punctuation);
case 'google':
@@ -764,12 +785,15 @@ module.exports = (logger) => {
AWS_ACCESS_KEY_ID: sttCredentials.accessKeyId,
AWS_SECRET_ACCESS_KEY: sttCredentials.secretAccessKey,
AWS_REGION: sttCredentials.region,
AWS_SECURITY_TOKEN: sttCredentials.securityToken
AWS_SECURITY_TOKEN: sttCredentials.securityToken,
AWS_SESSION_TOKEN: sttCredentials.sessionToken ? sttCredentials.sessionToken : sttCredentials.securityToken
}),
...(awsOptions.accessKey && {AWS_ACCESS_KEY_ID: awsOptions.accessKey}),
...(awsOptions.secretKey && {AWS_SECRET_ACCESS_KEY: awsOptions.secretKey}),
...(awsOptions.region && {AWS_REGION: awsOptions.region}),
...(awsOptions.securityToken && {AWS_SECURITY_TOKEN: awsOptions.securityToken}),
...(awsOptions.sessionToken && {AWS_SESSION_TOKEN: awsOptions.sessionToken ?
awsOptions.sessionToken : awsOptions.securityToken}),
...(awsOptions.languageModelName && {AWS_LANGUAGE_MODEL_NAME: awsOptions.languageModelName}),
...(awsOptions.piiEntityTypes?.length && {AWS_PII_ENTITY_TYPES: awsOptions.piiEntityTypes.join(',')}),
...(awsOptions.piiIdentifyEntities && {AWS_PII_IDENTIFY_ENTITIES: true}),
@@ -870,6 +894,14 @@ module.exports = (logger) => {
const deepgramUri = deepgramOptions.deepgramSttUri || sttCredentials.deepgram_stt_uri;
const useTls = deepgramOptions.deepgramSttUseTls || sttCredentials.deepgram_stt_use_tls;
// DH (2025-08-11) entity_prompt is currently limited to 100 words
const entityPrompt = deepgramOptions.entityPrompt ?
deepgramOptions.entityPrompt
.split(/\s+/)
.slice(0, 100)
.join(' ')
: undefined;
/* default to a sensible model if not supplied */
if (!model) {
model = selectDefaultDeepgramModel(task, language);
@@ -928,7 +960,24 @@ module.exports = (logger) => {
...(deepgramOptions.fillerWords) &&
{DEEPGRAM_SPEECH_FILLER_WORDS: deepgramOptions.fillerWords},
...((Array.isArray(deepgramOptions.keyterms) && deepgramOptions.keyterms.length > 0) &&
{DEEPGRAM_SPEECH_KEYTERMS: deepgramOptions.keyterms.join(',')})
{DEEPGRAM_SPEECH_KEYTERMS: deepgramOptions.keyterms.join(',')}),
...(deepgramOptions.mipOptOut && {DEEPGRAM_SPEECH_MIP_OPT_OUT: deepgramOptions.mipOptOut}),
...(entityPrompt && {DEEPGRAM_SPEECH_ENTITY_PROMPT: entityPrompt}),
};
}
else if ('deepgramriver' === vendor) {
const {
preflightThreshold,
eotThreshold,
eotTimeoutMs,
mipOptOut
} = rOpts.deepgramOptions || {};
opts = {
DEEPGRAM_API_KEY: sttCredentials.api_key,
...(preflightThreshold && {DEEPGRAM_SPEECH_PRELIGHT_THRESHOLD: preflightThreshold}),
...(eotThreshold && {DEEPGRAM_SPEECH_EOT_THRESHOLD: eotThreshold}),
...(eotTimeoutMs && {DEEPGRAM_SPEECH_EOT_TIMEOUT_MS: eotTimeoutMs}),
...(mipOptOut && {DEEPGRAM_SPEECH_MIP_OPT_OUT: mipOptOut}),
};
}
else if ('soniox' === vendor) {

View File

@@ -293,7 +293,7 @@ class WsRequestor extends BaseRequestor {
/* send the message */
this.ws.send(JSON.stringify(obj), async() => {
this.logger.debug({obj}, `WsRequestor:request websocket: sent (${url})`);
if (obj.type !== 'llm:event') this.logger.debug({obj}, `WsRequestor:request websocket: sent (${url})`);
// If session:reconnect is waiting for ack, hold here until ack to send queuedMsgs
if (this._reconnectPromise) {
try {

5334
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -27,14 +27,14 @@
"dependencies": {
"@aws-sdk/client-auto-scaling": "^3.549.0",
"@aws-sdk/client-sns": "^3.549.0",
"@jambonz/db-helpers": "^0.9.12",
"@jambonz/db-helpers": "^0.9.16",
"@jambonz/http-health-check": "^0.0.1",
"@jambonz/mw-registrar": "^0.2.7",
"@jambonz/realtimedb-helpers": "^0.8.13",
"@jambonz/speech-utils": "^0.2.15",
"@jambonz/realtimedb-helpers": "^0.8.15",
"@jambonz/speech-utils": "^0.2.22",
"@jambonz/stats-collector": "^0.1.10",
"@jambonz/time-series": "^0.2.14",
"@jambonz/verb-specifications": "^0.0.108",
"@jambonz/verb-specifications": "^0.0.113",
"@modelcontextprotocol/sdk": "^1.9.0",
"@opentelemetry/api": "^1.8.0",
"@opentelemetry/exporter-jaeger": "^1.23.0",
@@ -48,7 +48,7 @@
"bent": "^7.3.12",
"debug": "^4.3.4",
"deepcopy": "^2.1.0",
"drachtio-fsmrf": "^4.0.4",
"drachtio-fsmrf": "^4.1.2",
"drachtio-srf": "^5.0.5",
"express": "^4.19.2",
"express-validator": "^7.0.1",

View File

@@ -58,6 +58,46 @@ test('\'refer\' tests w/202 and NOTIFY', {timeout: 25000}, async(t) => {
}
});
test('\'refer\' tests tel:', {timeout: 25000}, async(t) => {
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
const verbs = [
{
verb: 'say',
text: 'silence_stream://100'
},
{
verb: 'sip:refer',
referTo: 'tel:+1234567890',
actionHook: '/actionHook'
}
];
const noVerbs = [];
const from = 'refer_with_tel';
await provisionCallHook(from, verbs);
await provisionActionHook(from, noVerbs)
// THEN
await sippUac('uac-refer-with-notify.xml', '172.38.0.10', from);
t.pass('refer: successfully received 202 Accepted');
await sleepFor(1000);
const obj = await getJSON(`http:127.0.0.1:3100/lastRequest/${from}_actionHook`);
t.ok(obj.body.final_referred_call_status === 200, 'refer: successfully received NOTIFY with 200 OK');
// console.log(`obj: ${JSON.stringify(obj)}`);
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});
test('\'refer\' tests w/202 but no NOTIFY', {timeout: 25000}, async(t) => {
clearModule.all();
const {srf, disconnect} = require('../app');