Compare commits

..

1 Commits

Author SHA1 Message Date
snyk-bot
2b0424d462 fix: package.json & package-lock.json to reduce vulnerabilities
The following vulnerabilities are fixed with an upgrade:
- https://snyk.io/vuln/SNYK-JS-MODELCONTEXTPROTOCOLSDK-14171914
- https://snyk.io/vuln/SNYK-JS-EXPRESS-14157151
2025-12-03 11:59:24 +00:00
26 changed files with 1977 additions and 4311 deletions

View File

@@ -139,11 +139,6 @@ const JAMBONES_USE_FREESWITCH_TIMER_FD = process.env.JAMBONES_USE_FREESWITCH_TIM
const JAMBONES_DIAL_SBC_FOR_REGISTERED_USER = process.env.JAMBONES_DIAL_SBC_FOR_REGISTERED_USER || false;
const JAMBONES_MEDIA_TIMEOUT_MS = process.env.JAMBONES_MEDIA_TIMEOUT_MS || 0;
const JAMBONES_MEDIA_HOLD_TIMEOUT_MS = process.env.JAMBONES_MEDIA_HOLD_TIMEOUT_MS || 0;
const JAMBONES_WEBHOOK_ERROR_RETURN = parseInt(process.env.JAMBONES_WEBHOOK_ERROR_RETURN, 10) || 480;
/* say / tts */
const JAMBONES_SAY_CHUNK_SIZE = parseInt(process.env.JAMBONES_SAY_CHUNK_SIZE, 10) || 900;
// jambonz
const JAMBONES_TRANSCRIBE_EP_DESTROY_DELAY_MS =
process.env.JAMBONES_TRANSCRIBE_EP_DESTROY_DELAY_MS;
@@ -236,7 +231,5 @@ module.exports = {
JAMBONES_DIAL_SBC_FOR_REGISTERED_USER,
JAMBONES_MEDIA_TIMEOUT_MS,
JAMBONES_MEDIA_HOLD_TIMEOUT_MS,
JAMBONES_SAY_CHUNK_SIZE,
JAMBONES_TRANSCRIBE_EP_DESTROY_DELAY_MS,
JAMBONES_WEBHOOK_ERROR_RETURN
};

View File

@@ -291,7 +291,7 @@ router.post('/',
}, {
...(account.enable_debug_log && {level: 'debug'})
});
app.requestor.logger = app.notifier.logger = restDial.logger = sipLogger;
app.requestor.logger = app.notifier.logger = sipLogger;
const callInfo = new CallInfo({
direction: CallDirection.Outbound,
req: inviteReq,

View File

@@ -12,8 +12,7 @@ const RootSpan = require('./utils/call-tracer');
const listTaskNames = require('./utils/summarize-tasks');
const {
JAMBONES_MYSQL_REFRESH_TTL,
JAMBONES_DISABLE_DIRECT_P2P_CALL,
JAMBONES_WEBHOOK_ERROR_RETURN
JAMBONES_DISABLE_DIRECT_P2P_CALL
} = require('./config');
const { createJambonzApp } = require('./dynamic-apps');
const { decrypt } = require('./utils/encrypt-decrypt');
@@ -481,7 +480,7 @@ module.exports = function(srf, logger) {
message: `${err?.message}`.trim()
}).catch((err) => this.logger.info({err}, 'Error generating alert for parsing application'));
logger.info({err}, `Error retrieving or parsing application: ${err?.message}`);
res.send(JAMBONES_WEBHOOK_ERROR_RETURN, {headers: {'X-Reason': err?.message || 'unknown'}});
res.send(480, {headers: {'X-Reason': err?.message || 'unknown'}});
app.requestor.close(WS_CLOSE_CODES.GoingAway);
}
}

View File

@@ -12,7 +12,6 @@ class CallInfo {
let srf;
this.direction = opts.direction;
this.traceId = opts.traceId;
this.hasRecording = false;
this.callTerminationBy = undefined;
if (opts.req) {
const u = opts.req.getParsedHeader('from');

View File

@@ -504,12 +504,7 @@ class CallSession extends Emitter {
}
get isTtsStreamEnabled() {
// 1st background tts stream
return this.backgroundTaskManager.isTaskRunning('ttsStream') ||
// 2nd current task streaming tts
TaskName.Say === this.currentTask?.name && this.currentTask?.isStreamingTts ||
// 3rd nested verb is streaming tts
TaskName.Gather === this.currentTask?.name && this.currentTask.sayTask?.isStreamingTts;
return this.backgroundTaskManager.isTaskRunning('ttsStream');
}
get isListenEnabled() {
@@ -756,101 +751,69 @@ class CallSession extends Emitter {
return this._fillerNoise;
}
async pauseOrResumeBackgroundListenIfRequired(action, silence = false) {
if ((action == 'pauseCallRecording' || action == 'resumeCallRecording') &&
this.backgroundTaskManager.isTaskRunning('record')) {
this.logger.debug({action, silence}, 'CallSession:pauseOrResumeBackgroundListenIfRequired');
const backgroundListenTask = this.backgroundTaskManager.getTask('record');
const status = action === 'pauseCallRecording' ? ListenStatus.Pause : ListenStatus.Resume;
backgroundListenTask.updateListen(
status,
silence
);
}
}
async notifyRecordOptions(opts) {
const {action, silence = false, type = 'siprec'} = opts;
const {action, silence} = opts;
this.logger.debug({opts}, 'CallSession:notifyRecordOptions');
if (type == 'cloud') {
switch (action) {
case 'pauseCallRecording':
if (this.backgroundTaskManager.isTaskRunning('record')) {
this.logger.debug({action, silence, type}, 'CallSession:cloudRecording');
const backgroundListenTask = this.backgroundTaskManager.getTask('record');
backgroundListenTask.updateListen(
ListenStatus.Pause,
silence
);
return true;
} else { return false; }
case 'resumeCallRecording':
if (this.backgroundTaskManager.isTaskRunning('record')) {
this.logger.debug({action, silence, type}, 'CallSession:cloudRecording');
const backgroundListenTask = this.backgroundTaskManager.getTask('record');
backgroundListenTask.updateListen(
ListenStatus.Resume,
silence
);
return true;
} else { return false; }
case 'startCallRecording':
if (!this.backgroundTaskManager.isTaskRunning('record')) {
this.logger.debug({action, silence, type}, 'CallSession:cloudRecording');
this.callInfo.hasRecording = true;
this.updateCallStatus(Object.assign({}, this.callInfo.toJSON()), this.serviceUrl)
.catch((err) => this.logger.error(err, 'redis error'));
if (!this.dlg) {
// Call not yet answered so set flag to record on status change
this.application.record_all_calls = true;
} else {
this.backgroundTaskManager.newTask('record');
}
return true;
} else { return false; }
case 'stopCallRecording':
if (this.backgroundTaskManager.isTaskRunning('record')) {
this.logger.debug({action, silence, type}, 'CallSession:cloudRecording');
this.backgroundTaskManager.stop('record');
return true;
} else { return false; }
}
} else {
// SIPREC
/* if we have not answered yet, just save the details for later */
if (!this.dlg) {
if (action === 'startCallRecording') {
this.recordOptions = opts;
return true;
}
return false;
}
this.pauseOrResumeBackgroundListenIfRequired(action, silence);
/* check validity of request */
if (action == 'startCallRecording' && this.recordState !== RecordState.RecordingOff) {
this.logger.info({recordState: this.recordState},
'CallSession:notifyRecordOptions: recording is already started, ignoring request');
return false;
}
if (action == 'stopCallRecording' && this.recordState === RecordState.RecordingOff) {
this.logger.info({recordState: this.recordState},
'CallSession:notifyRecordOptions: recording is already stopped, ignoring request');
return false;
}
if (action == 'pauseCallRecording' && this.recordState !== RecordState.RecordingOn) {
this.logger.info({recordState: this.recordState},
'CallSession:notifyRecordOptions: cannot pause recording, ignoring request ');
return false;
}
if (action == 'resumeCallRecording' && this.recordState !== RecordState.RecordingPaused) {
this.logger.info({recordState: this.recordState},
'CallSession:notifyRecordOptions: cannot resume recording, ignoring request ');
return false;
/* if we have not answered yet, just save the details for later */
if (!this.dlg) {
if (action === 'startCallRecording') {
this.recordOptions = opts;
return true;
}
return false;
}
this.recordOptions = opts;
/* check validity of request */
if (action == 'startCallRecording' && this.recordState !== RecordState.RecordingOff) {
this.logger.info({recordState: this.recordState},
'CallSession:notifyRecordOptions: recording is already started, ignoring request');
return false;
}
if (action == 'stopCallRecording' && this.recordState === RecordState.RecordingOff) {
this.logger.info({recordState: this.recordState},
'CallSession:notifyRecordOptions: recording is already stopped, ignoring request');
return false;
}
if (action == 'pauseCallRecording' && this.recordState !== RecordState.RecordingOn) {
this.logger.info({recordState: this.recordState},
'CallSession:notifyRecordOptions: cannot pause recording, ignoring request ');
return false;
}
if (action == 'resumeCallRecording' && this.recordState !== RecordState.RecordingPaused) {
this.logger.info({recordState: this.recordState},
'CallSession:notifyRecordOptions: cannot resume recording, ignoring request ');
return false;
}
switch (action) {
case 'startCallRecording':
return await this.startRecording();
case 'stopCallRecording':
return await this.stopRecording();
case 'pauseCallRecording':
return await this.pauseRecording();
case 'resumeCallRecording':
return await this.resumeRecording();
default:
throw new Error(`invalid record action ${action}`);
}
this.recordOptions = opts;
switch (action) {
case 'startCallRecording':
return await this.startRecording();
case 'stopCallRecording':
return await this.stopRecording();
case 'pauseCallRecording':
return await this.pauseRecording();
case 'resumeCallRecording':
return await this.resumeRecording();
default:
throw new Error(`invalid record action ${action}`);
}
}
@@ -964,7 +927,7 @@ class CallSession extends Emitter {
this.logger.debug('CallSession:enableBackgroundTtsStream - ttsStream enabled');
} else {
this.logger.debug(
'CallSession:enableBackgroundTtsStream - ignoring request; conditions not met (probably not using ws api)');
'CallSession:enableBackgroundTtsStream - ignoring request as call does not have required conditions');
}
} catch (err) {
this.logger.info({err, say}, 'CallSession:enableBackgroundTtsStream - Error creating background tts stream task');
@@ -978,11 +941,9 @@ class CallSession extends Emitter {
}
}
clearTtsStream() {
if (this.isTtsStreamEnabled) {
this.requestor?.request('tts:streaming-event', '/streaming-event', {event_type: 'user_interruption'})
.catch((err) => this.logger.info({err}, 'CallSession:clearTtsStream - Error sending user_interruption'));
this.ttsStreamingBuffer?.clear();
}
this.requestor?.request('tts:streaming-event', '/streaming-event', {event_type: 'user_interruption'})
.catch((err) => this.logger.info({err}, 'CallSession:clearTtsStream - Error sending user_interruption'));
this.ttsStreamingBuffer?.clear();
}
startTtsStream() {
@@ -990,7 +951,7 @@ class CallSession extends Emitter {
}
stopTtsStream() {
if (this.isTtsStreamEnabled) {
if (this.appIsUsingWebsockets) {
this.requestor?.request('tts:streaming-event', '/streaming-event', {event_type: 'stream_closed'})
.catch((err) => this.logger.info({err}, 'CallSession:clearTtsStream - Error sending user_interruption'));
this.ttsStreamingBuffer?.stop();
@@ -1082,7 +1043,7 @@ class CallSession extends Emitter {
const cred = JSON.parse(credential.service_key.replace(/\n/g, '\\n'));
return {
speech_credential_sid: credential.speech_credential_sid,
credentials: cred,
credentials: cred
};
} catch (err) {
const sid = this.accountInfo.account.account_sid;
@@ -1287,10 +1248,9 @@ class CallSession extends Emitter {
}
else {
writeAlerts({
alert_type: type === 'tts' ? AlertType.TTS_NOT_PROVISIONED : AlertType.STT_NOT_PROVISIONED,
alert_type: AlertType.STT_NOT_PROVISIONED,
account_sid: this.accountSid,
vendor,
label,
target_sid: this.callSid
}).catch((err) => this.logger.error({err}, 'Error writing tts alert'));
}
@@ -2028,7 +1988,7 @@ Duration=${duration} `
return this._lccDub(opts.dub, callSid);
}
else if (opts.boostAudioSignal) {
return this._lccBoostAudioSignal(opts.boostAudioSignal, callSid);
return this._lccBoostAudioSignal(opts, callSid);
}
else if (opts.media_path) {
return this._lccMediaPath(opts.media_path, callSid);
@@ -2500,36 +2460,6 @@ Duration=${duration} `
}
else {
this.logger.error(err, `Error attempting to allocate endpoint for for task ${task.name}`);
// Check for SipError type (e.g., 488 codec incompatibility)
const isSipError = err.name === 'SipError';
if (isSipError && err.status) {
// Extract Reason header from SIP response if available (e.g., Q.850;cause=88;text="INCOMPATIBLE_DESTINATION")
const sipReasonHeader = err.res?.msg?.headers?.reason;
this._endpointAllocationError = {
status: err.status,
reason: err.reason || 'Endpoint Allocation Failed',
sipReasonHeader
};
this.logger.info({endpointAllocationError: this._endpointAllocationError},
'Captured SipError for propagation to SBC');
// Send SIP error response immediately for inbound calls
if (this.res && !this.res.finalResponseSent) {
this.logger.info(`Sending ${err.status} response to SBC due to SipError`);
this.res.send(err.status, {
headers: {
'X-Reason': `endpoint allocation failure: ${err.reason || 'Endpoint Allocation Failed'}`,
...(sipReasonHeader && {'Reason': sipReasonHeader})
}
});
this._notifyCallStatusChange({
callStatus: CallStatus.Failed,
sipStatus: err.status,
sipReason: err.reason || 'Endpoint Allocation Failed'
});
this._callReleased();
}
}
throw new Error(`${BADPRECONDITIONS}: unable to allocate endpoint`);
}
}
@@ -3042,7 +2972,8 @@ Duration=${duration} `
// manage record all call.
if (callStatus === CallStatus.InProgress) {
if (this.accountInfo.account.record_all_calls || this.application.record_all_calls) {
if (this.accountInfo.account.record_all_calls ||
this.application.record_all_calls) {
this.backgroundTaskManager.newTask('record');
}
} else if (callStatus == CallStatus.Completed) {

View File

@@ -60,19 +60,6 @@ class InboundCallSession extends CallSession {
}
});
}
else if (this._endpointAllocationError) {
// Propagate SIP error from endpoint allocation failure back to the client
const {status, reason, sipReasonHeader} = this._endpointAllocationError;
this.rootSpan.setAttributes({'call.termination': `endpoint allocation SIP error ${status}`});
this.logger.info({endpointAllocationError: this._endpointAllocationError},
`InboundCallSession:_onTasksDone generating ${status} due to endpoint allocation failure`);
this.res.send(status, {
headers: {
'X-Reason': `endpoint allocation failure: ${reason}`,
...(sipReasonHeader && {'Reason': sipReasonHeader})
}
});
}
else {
this.rootSpan.setAttributes({'call.termination': 'tasks completed without answering call'});
this.logger.info('InboundCallSession:_onTasksDone auto-generating non-success response to invite');

View File

@@ -158,7 +158,7 @@ class TaskDial extends Task {
get canReleaseMedia() {
const keepAnchor = this.data.anchorMedia ||
this.isTranscoding ||
this.weAreTranscoding ||
this.cs.isBackGroundListen ||
this.cs.onHoldMusic ||
ANCHOR_MEDIA_ALWAYS ||
@@ -195,9 +195,6 @@ class TaskDial extends Task {
async exec(cs) {
await super.exec(cs);
/* capture whether A leg was already answered before this dial task started */
this._aLegAlreadyAnswered = !!cs.dlg;
if (this.data.anchorMedia && this.data.exitMediaPath) {
this.logger.info('Dial:exec - incompatible anchorMedia and exitMediaPath are both set, will obey anchorMedia');
delete this.data.exitMediaPath;
@@ -553,7 +550,7 @@ class TaskDial extends Task {
let sbcAddress = this.proxy || getSBC();
const teamsInfo = {};
let fqdn;
const forwardPAI = this.forwardPAI ?? !JAMBONZ_DIAL_PAI_HEADER; // dial verb overides env var
const forwardPAI = this.forwardPAI ?? JAMBONZ_DIAL_PAI_HEADER; // dial verb overides env var
this.logger.debug(forwardPAI, 'forwardPAI value');
if (!sbcAddress) throw new Error('no SBC found for outbound call');
this.headers = {
@@ -776,15 +773,6 @@ class TaskDial extends Task {
}
async _connectSingleDial(cs, sd) {
// start connect with dialed leg, this is the soonest we can identify transcoding
if (this.epOther && sd.ep) {
const codecA = getLeadingCodec(this.epOther.local.sdp);
const codecB = getLeadingCodec(sd.ep.remote.sdp);
this.isTranscoding = (codecA !== codecB);
if (this.isTranscoding) {
this.logger.info(`Dial:_connectSingleDial - transcoding from ${codecA} (A leg) to ${codecB} (B leg)`);
}
}
if (!this.bridged && !this.canReleaseMedia) {
this.logger.debug('Dial:_connectSingleDial bridging endpoints');
if (this.epOther) {
@@ -875,12 +863,8 @@ class TaskDial extends Task {
this.sd = sd;
this.callSid = sd.callSid;
if (this.earlyMedia) {
if (this._aLegAlreadyAnswered) {
debug('Dial:_selectSingleDial A leg was already answered, skipping propagateAnswer');
} else {
debug('Dial:_selectSingleDial propagating answer supervision on A leg now that B is connected');
await cs.propagateAnswer();
}
debug('Dial:_selectSingleDial propagating answer supervision on A leg now that B is connected');
await cs.propagateAnswer();
}
if (this.timeLimit) {
this.timerMaxCallDuration = setTimeout(this._onMaxCallDuration.bind(this, cs), this.timeLimit * 1000);
@@ -946,6 +930,13 @@ class TaskDial extends Task {
this.logger.info({err}, 'Dial:_selectSingleDial - Error boosting audio signal');
}
}
/* basic determination to see if call is being transcoded */
const codecA = getLeadingCodec(this.epOther.local.sdp);
const codecB = getLeadingCodec(this.ep.remote.sdp);
this.weAreTranscoding = (codecA !== codecB);
if (this.weAreTranscoding) {
this.logger.info(`Dial:_selectSingleDial - transcoding from ${codecA} (A leg) to ${codecB} (B leg)`);
}
/* if we can release the media back to the SBC, do so now */
if (this.canReleaseMedia || this.shouldExitMediaPathEntirely) {
setTimeout(this._releaseMedia.bind(this, cs, sd, this.shouldExitMediaPathEntirely), 200);

View File

@@ -500,10 +500,6 @@ class TaskGather extends SttTask {
this.addCustomEventListener(ep, GladiaTranscriptionEvents.ConnectFailure,
this._onVendorConnectFailure.bind(this, cs, ep));
this.addCustomEventListener(ep, GladiaTranscriptionEvents.Error, this._onVendorError.bind(this, cs, ep));
// gladia require unique url for each session
const {host, path} = await this.createGladiaLiveSession();
opts.GLADIA_SPEECH_HOST = host;
opts.GLADIA_SPEECH_PATH = path;
break;
case 'soniox':
@@ -885,7 +881,7 @@ class TaskGather extends SttTask {
this._fillerNoiseOn = false; // in a race, if we just started audio it may sneak through here
this.ep.api('uuid_break', this.ep.uuid)
.catch((err) => this.logger.info(err, 'Error killing audio'));
if (cs.isTtsStreamEnabled) cs.clearTtsStream();
cs.clearTtsStream();
}
return;
}
@@ -1322,8 +1318,6 @@ class TaskGather extends SttTask {
}
this.resolved = true;
// gather is resolved, prevent any further transcription events while resolve in progress
this.removeCustomEventListeners();
// If bargin is false and ws application return ack to verb:hook
// the gather should not play any audio
this._killAudio(this.cs);

View File

@@ -152,17 +152,9 @@ class TaskListen extends Task {
async _startListening(cs, ep) {
this._initListeners(ep);
const tempci = this.nested ? this.parentTask.sd.callInfo : cs.callInfo.toJSON();
const ci = structuredClone(tempci);
const ci = this.nested ? this.parentTask.sd.callInfo : cs.callInfo.toJSON();
if (this._ignoreCustomerData) {
delete ci.customerData;
} else {
for (const key in ci.customerData) {
if (ci.customerData.hasOwnProperty(key)) {
const value = ci.customerData[key];
ci.customerData[key] = typeof value === 'string' ? escapeString(value) : value;
}
}
}
const metadata = Object.assign(
{sampleRate: this.sampleRate, mixType: this.mixType},

View File

@@ -36,9 +36,6 @@ class TaskLlmGoogle_S2S extends Task {
this.model = this.parent.model || 'models/gemini-2.0-flash-live-001';
this.auth = this.parent.auth;
this.connectionOptions = this.parent.connectOptions;
const {host, version} = this.connectionOptions || {};
this.host = host;
this.version = version;
const {apiKey} = this.auth || {};
if (!apiKey) throw new Error('auth.apiKey is required for Google S2S');
@@ -49,7 +46,7 @@ class TaskLlmGoogle_S2S extends Task {
this.eventHook = this.data.eventHook;
this.toolHook = this.data.toolHook;
const {setup, sessionResumption} = this.data.llmOptions;
const {setup} = this.data.llmOptions;
if (typeof setup !== 'object') {
throw new Error('llmOptions with an initial setup is required for Google S2S');
@@ -57,7 +54,6 @@ class TaskLlmGoogle_S2S extends Task {
this.setup = {
...setup,
model: this.model,
...(sessionResumption && {sessionResumption}),
// make sure output is always audio
generationConfig: {
...(setup.generationConfig || {}),
@@ -142,10 +138,6 @@ class TaskLlmGoogle_S2S extends Task {
try {
const args = [ep.uuid, 'session.create', this.apiKey];
if (this.host) {
args.push(this.host);
if (this.version) args.push(this.version);
}
await this._api(ep, args);
} catch (err) {
this.logger.error({err}, 'TaskLlmGoogle_S2S:_startListening');

View File

@@ -146,9 +146,8 @@ class TaskLlmUltravox_S2S extends Task {
return data;
}
_unregisterHandlers(ep) {
_unregisterHandlers() {
this.removeCustomEventListeners();
ep.removeAllListeners('dtmf');
}
_registerHandlers(ep) {
@@ -156,7 +155,6 @@ class TaskLlmUltravox_S2S extends Task {
this.addCustomEventListener(ep, LlmEvents_Ultravox.ConnectFailure, this._onConnectFailure.bind(this, ep));
this.addCustomEventListener(ep, LlmEvents_Ultravox.Disconnect, this._onDisconnect.bind(this, ep));
this.addCustomEventListener(ep, LlmEvents_Ultravox.ServerEvent, this._onServerEvent.bind(this, ep));
ep.on('dtmf', this._onDtmf.bind(this, ep));
}
async _startListening(cs, ep) {
@@ -191,7 +189,7 @@ class TaskLlmUltravox_S2S extends Task {
/* note: the parent llm verb started the span, which is why this is necessary */
await this.parent.performAction(this.results);
this._unregisterHandlers(ep);
this._unregisterHandlers();
}
async kill(cs) {
@@ -348,18 +346,6 @@ class TaskLlmUltravox_S2S extends Task {
excludeEvents: this.excludeEvents
}, 'TaskLlmUltravox_S2S:_populateEvents');
}
_onDtmf(ep, evt) {
this.logger.info({evt}, 'TaskLlmUltravox_S2S:_onDtmf - DTMF received');
const {dtmf} = evt;
const data = {
type: 'user_text_message',
text: `DTMF received: ${dtmf}`,
urgency: 'immediate'
};
this._api(ep, [ep.uuid, ClientEvent, JSON.stringify(data)])
.catch((err) => this.logger.info({err, evt}, 'TaskLlmUltravox_S2S:_onDtmf - Error sending DTMF as text message'));
}
}
module.exports = TaskLlmUltravox_S2S;

View File

@@ -1,6 +1,7 @@
const Task = require('./task');
const {TaskName} = require('../utils/constants');
const WsRequestor = require('../utils/ws-requestor');
const URL = require('url');
const HttpRequestor = require('../utils/http-requestor');
/**
@@ -9,7 +10,6 @@ const HttpRequestor = require('../utils/http-requestor');
class TaskRedirect extends Task {
constructor(logger, opts) {
super(logger, opts);
this.statusHook = opts.statusHook || false;
}
get name() { return TaskName.Redirect; }
@@ -33,7 +33,7 @@ class TaskRedirect extends Task {
}
else {
const baseUrl = this.cs.application.requestor.baseUrl;
const newUrl = new URL(this.actionHook);
const newUrl = URL.parse(this.actionHook);
const newBaseUrl = newUrl.protocol + '//' + newUrl.host;
if (baseUrl != newBaseUrl) {
try {
@@ -47,30 +47,6 @@ class TaskRedirect extends Task {
}
}
}
/* update the notifier if a new statusHook was provided */
if (this.statusHook) {
this.logger.info(`TaskRedirect updating statusHook to ${this.statusHook}`);
try {
const oldNotifier = cs.application.notifier;
const isStatusHookAbsolute = cs.notifier?._isAbsoluteUrl(this.statusHook);
if (isStatusHookAbsolute) {
if (cs.notifier instanceof WsRequestor) {
cs.application.notifier = new WsRequestor(this.logger, cs.accountSid, {url: this.statusHook},
cs.accountInfo.account.webhook_secret);
} else {
cs.application.notifier = new HttpRequestor(this.logger, cs.accountSid, {url: this.statusHook},
cs.accountInfo.account.webhook_secret);
}
if (oldNotifier?.close) oldNotifier.close();
}
/* update the call_status_hook URL that gets passed to the notifier */
cs.application.call_status_hook = this.statusHook;
} catch (err) {
this.logger.info(err, `TaskRedirect error updating statusHook to ${this.statusHook}`);
}
}
await this.performAction();
}
}

View File

@@ -1,7 +1,6 @@
const assert = require('assert');
const TtsTask = require('./tts-task');
const {TaskName, TaskPreconditions} = require('../utils/constants');
const {JAMBONES_SAY_CHUNK_SIZE} = require('../config');
const pollySSMLSplit = require('polly-ssml-split');
const { SpeechCredentialError, NonFatalTaskError } = require('../utils/error');
const { sleepFor } = require('../utils/helpers');
@@ -32,7 +31,7 @@ const isMatchingEvent = (logger, filename, playbackId, evt) => {
const breakLengthyTextIfNeeded = (logger, text) => {
// As The text can be used for tts streaming, we need to break lengthy text into smaller chunks
// HIGH_WATER_BUFFER_SIZE defined in tts-streaming-buffer.js
const chunkSize = JAMBONES_SAY_CHUNK_SIZE;
const chunkSize = 900;
const isSSML = text.startsWith('<speak>');
const options = {
softLimit: 100,

View File

@@ -203,14 +203,26 @@ class SttTask extends Task {
if (cs.hasGlobalSttPunctuation && !this.data.recognizer.punctuation) {
this.data.recognizer.punctuation = cs.globalSttPunctuation;
}
if (this.vendor === 'gladia') {
const { api_key, region } = this.sttCredentials;
const {url} = await this.createGladiaLiveSession({
api_key, region,
model: this.data.recognizer.model || 'solaria-1',
options: this.data.recognizer.gladiaOptions || {}
});
const {host, pathname, search} = new URL(url);
this.sttCredentials.host = host;
this.sttCredentials.path = `${pathname}${search}`;
}
}
async createGladiaLiveSession() {
const { api_key, region = 'us-west' } = this.sttCredentials;
const model = this.data.recognizer.model || 'solaria-1';
const options = this.data.recognizer.gladiaOptions || {};
async createGladiaLiveSession({
api_key,
region = 'us-west',
model = 'solaria-1',
options = {},
}) {
const url = `https://api.gladia.io/v2/live?region=${region}`;
const response = await fetch(url, {
method: 'POST',
@@ -240,9 +252,7 @@ class SttTask extends Task {
const data = await response.json();
this.logger.debug({url: data.url}, 'Gladia Call registered');
const {host, pathname, search} = new URL(data.url);
return {host, path: `${pathname}${search}`};
return data;
}
addCustomEventListener(ep, event, handler) {
@@ -276,7 +286,6 @@ class SttTask extends Task {
account_sid: cs.accountSid,
alert_type: AlertType.STT_NOT_PROVISIONED,
vendor,
label,
target_sid: cs.callSid
}).catch((err) => this.logger.info({err}, 'Error generating alert for no stt'));
// the ASR might have fallback configuration, should not done task here.
@@ -477,7 +486,6 @@ class SttTask extends Task {
message: 'STT failure reported by vendor',
detail: evt.error,
vendor: this.vendor,
label: this.label,
target_sid: cs.callSid
}).catch((err) => this.logger.info({err}, `Error generating alert for ${this.vendor} connection failure`));
}
@@ -491,7 +499,6 @@ class SttTask extends Task {
alert_type: AlertType.STT_FAILURE,
message: `Failed connecting to ${this.vendor} speech recognizer: ${reason}`,
vendor: this.vendor,
label: this.label,
target_sid: cs.callSid
}).catch((err) => this.logger.info({err}, `Error generating alert for ${this.vendor} connection failure`));
}

View File

@@ -459,14 +459,6 @@ class TaskTranscribe extends SttTask {
else if (this.data.recognizer?.hints?.length > 0) {
prompt = this.data.recognizer?.hints.join(', ');
}
} else if (this.vendor === 'gladia') {
// gladia require unique url for each session
const {host, path} = await this.createGladiaLiveSession();
await ep.set({
GLADIA_SPEECH_HOST: host,
GLADIA_SPEECH_PATH: path,
})
.catch((err) => this.logger.info(err, 'Error setting channel variables'));
}
await ep.startTranscription({

View File

@@ -31,9 +31,8 @@ class TtsTask extends Task {
this.synthesizer = this.data.synthesizer || {};
this.disableTtsCache = this.data.disableTtsCache;
this.options = this.synthesizer.options || {};
this.instructions = this.data.instructions || this.options.instructions;
this.instructions = this.data.instructions;
this.playbackIds = [];
this.useGeminiTts = this.options.useGeminiTts;
}
getPlaybackId(offset) {
@@ -157,13 +156,6 @@ class TtsTask extends Task {
...(reduceLatency && {RIMELABS_TTS_STREAMING_REDUCE_LATENCY: reduceLatency})
};
break;
case 'google':
obj = {
GOOGLE_TTS_LANGUAGE_CODE: language,
GOOGLE_TTS_VOICE_NAME: voice,
GOOGLE_APPLICATION_CREDENTIALS: JSON.stringify(credentials.credentials)
};
break;
default:
if (vendor.startsWith('custom:')) {
const use_tls = custom_tts_streaming_url.startsWith('wss://');
@@ -250,8 +242,6 @@ class TtsTask extends Task {
}
} else if (vendor === 'cartesia') {
credentials.model_id = this.options.model_id || credentials.model_id;
} else if (vendor === 'google') {
this.model = this.options.model || credentials.credentials.model_id;
}
this.model_id = credentials.model_id;
@@ -284,7 +274,6 @@ class TtsTask extends Task {
account_sid,
alert_type: AlertType.TTS_NOT_PROVISIONED,
vendor,
label,
target_sid: cs.callSid
}).catch((err) => this.logger.info({err}, 'Error generating alert for no tts'));
throw new SpeechCredentialError('no provisioned speech credentials for TTS');
@@ -371,7 +360,6 @@ class TtsTask extends Task {
account_sid: cs.accountSid,
alert_type: AlertType.TTS_FAILURE,
vendor,
label,
detail: err.message,
target_sid: cs.callSid
}).catch((err) => this.logger.info({err}, 'Error generating alert for tts failure'));

View File

@@ -118,13 +118,6 @@ class ActionHookDelayProcessor extends Emitter {
this.logger.debug('ActionHookDelayProcessor#_onNoResponseTimer');
this._noResponseTimer = null;
/* check if endpoint is still available (call may have ended) */
if (!this.ep) {
this.logger.debug('ActionHookDelayProcessor#_onNoResponseTimer: endpoint is null, call may have ended');
this._active = false;
return;
}
/* get the next play or say action */
const verb = this.actions[this._retryCount % this.actions.length];
@@ -136,8 +129,8 @@ class ActionHookDelayProcessor extends Emitter {
this._taskInProgress.exec(this.cs, {ep: this.ep}).catch((err) => {
this.logger.info(`ActionHookDelayProcessor#_onNoResponseTimer: error playing file: ${err.message}`);
this._taskInProgress = null;
this.ep?.removeAllListeners('playback-start');
this.ep?.removeAllListeners('playback-stop');
this.ep.removeAllListeners('playback-start');
this.ep.removeAllListeners('playback-stop');
});
} catch (err) {
this.logger.info(err, 'ActionHookDelayProcessor#_onNoResponseTimer: error starting action');

View File

@@ -405,21 +405,19 @@ module.exports = (logger) => {
if (ep.amd) {
vendor = ep.amd.vendor;
ep.amd.stopAllTimers();
try {
ep.removeListener(GoogleTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
ep.removeListener(GoogleTranscriptionEvents.EndOfUtterance, ep.amd.EndOfUtteranceHandler);
ep.removeListener(AwsTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
ep.removeListener(AzureTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
ep.removeListener(AzureTranscriptionEvents.NoSpeechDetected, ep.amd.noSpeechHandler);
ep.removeListener(NuanceTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
ep.removeListener(DeepgramTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
ep.removeListener(SonioxTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
ep.removeListener(IbmTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
ep.removeListener(NvidiaTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
ep.removeListener(JambonzTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
} catch (error) {
logger.error('Unable to Remove AMD Listener', error);
}
ep.removeListener(GoogleTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
ep.removeListener(GoogleTranscriptionEvents.EndOfUtterance, ep.amd.EndOfUtteranceHandler);
ep.removeListener(AwsTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
ep.removeListener(AzureTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
ep.removeListener(AzureTranscriptionEvents.NoSpeechDetected, ep.amd.noSpeechHandler);
ep.removeListener(NuanceTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
ep.removeListener(DeepgramTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
ep.removeListener(SonioxTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
ep.removeListener(IbmTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
ep.removeListener(NvidiaTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
ep.removeListener(JambonzTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
ep.amd = null;
}

View File

@@ -135,24 +135,26 @@ class BackgroundTaskManager extends Emitter {
// Initiate Record
async _initRecord() {
if (!JAMBONZ_RECORD_WS_BASE_URL || !this.cs.accountInfo.account.bucket_credential) {
this.logger.error('_initRecord: invalid cfg - missing JAMBONZ_RECORD_WS_BASE_URL or bucket config');
return undefined;
}
const listenOpts = {
url: `${JAMBONZ_RECORD_WS_BASE_URL}/record/${this.cs.accountInfo.account.bucket_credential.vendor}`,
disableBidirectionalAudio: true,
mixType : 'stereo',
passDtmf: true
};
if (JAMBONZ_RECORD_WS_USERNAME && JAMBONZ_RECORD_WS_PASSWORD) {
listenOpts.wsAuth = {
username: JAMBONZ_RECORD_WS_USERNAME,
password: JAMBONZ_RECORD_WS_PASSWORD
if (this.cs.accountInfo.account.record_all_calls || this.cs.application.record_all_calls) {
if (!JAMBONZ_RECORD_WS_BASE_URL || !this.cs.accountInfo.account.bucket_credential) {
this.logger.error('_initRecord: invalid cfg - missing JAMBONZ_RECORD_WS_BASE_URL or bucket config');
return undefined;
}
const listenOpts = {
url: `${JAMBONZ_RECORD_WS_BASE_URL}/record/${this.cs.accountInfo.account.bucket_credential.vendor}`,
disableBidirectionalAudio: true,
mixType : 'stereo',
passDtmf: true
};
if (JAMBONZ_RECORD_WS_USERNAME && JAMBONZ_RECORD_WS_PASSWORD) {
listenOpts.wsAuth = {
username: JAMBONZ_RECORD_WS_USERNAME,
password: JAMBONZ_RECORD_WS_PASSWORD
};
}
this.logger.debug({listenOpts}, '_initRecord: enabling listen');
return await this._initListen({verb: 'listen', ...listenOpts}, 'jambonz-session-record', true, 'record');
}
this.logger.debug({listenOpts}, '_initRecord: enabling listen');
return await this._initListen({verb: 'listen', ...listenOpts}, 'jambonz-session-record', true, 'record');
}
// Initiate Transcribe

View File

@@ -311,11 +311,6 @@
"ConnectFailure": "deepgram_tts_streaming::connect_failed",
"Connect": "deepgram_tts_streaming::connect"
},
"GoogleTtsStreamingEvents": {
"Empty": "google_tts_streaming::empty",
"ConnectFailure": "google_tts_streaming::connect_failed",
"Connect": "google_tts_streaming::connect"
},
"CartesiaTtsStreamingEvents": {
"Empty": "cartesia_tts_streaming::empty",
"ConnectFailure": "cartesia_tts_streaming::connect_failed",

View File

@@ -100,30 +100,6 @@ module.exports = (logger) => {
else if (K8S) {
lifecycleEmitter.scaleIn = () => process.exit(0);
}
else {
process.on('SIGUSR1', () => {
logger.info('received SIGUSR1: begin drying up calls for scale-in');
dryUpCalls = true;
const {srf} = require('../..');
const {writeSystemAlerts} = srf.locals;
if (writeSystemAlerts) {
const {SystemState, FEATURE_SERVER} = require('./constants');
writeSystemAlerts({
system_component: FEATURE_SERVER,
state : SystemState.GracefulShutdownInProgress,
fields : {
detail: `feature-server with process_id ${process.pid} shutdown in progress`,
host: srf.locals?.ipv4
}
});
}
pingProxies(srf);
// Note: in response to SIGUSR1 we start drying up but do not exit when calls reach zero.
// This is to allow external scripts that sent the signal to manage the lifecycle.
});
}
async function pingProxies(srf) {

View File

@@ -127,6 +127,7 @@ class SttLatencyCalculator extends Emitter {
calculateLatency() {
if (!this.isRunning) {
this.logger.debug('Latency calculator is not running, cannot calculate latency, returning default values');
return null;
}

View File

@@ -1085,6 +1085,13 @@ module.exports = (logger) => {
...(keyterms && keyterms.length > 0 && {DEEPGRAMFLUX_SPEECH_KEYTERMS: keyterms.join(',')}),
};
}
else if ('gladia' === vendor) {
const {host, path} = sttCredentials;
opts = {
GLADIA_SPEECH_HOST: host,
GLADIA_SPEECH_PATH: path,
};
}
else if ('soniox' === vendor) {
const {sonioxOptions = {}} = rOpts;
const {storage = {}} = sonioxOptions;
@@ -1310,9 +1317,6 @@ module.exports = (logger) => {
...(openaiOptions.turn_detection.silence_duration_ms && {
OPENAI_TURN_DETECTION_SILENCE_DURATION_MS: openaiOptions.turn_detection.silence_duration_ms
}),
...(openaiOptions.turn_detection.eagerness && {
OPENAI_TURN_DETECTION_EAGERNESS: openaiOptions.turn_detection.eagerness
})
};
}
}
@@ -1378,9 +1382,7 @@ module.exports = (logger) => {
speechmaticsOptions.transcription_config.audio_filtering_config.volume_threshold}),
...(speechmaticsOptions.transcription_config?.transcript_filtering_config?.remove_disfluencies &&
{SPEECHMATICS_REMOVE_DISFLUENCIES:
speechmaticsOptions.transcription_config.transcript_filtering_config.remove_disfluencies}),
SPEECHMATICS_END_OF_UTTERANCE_SILENCE_TRIGGER:
speechmaticsOptions.transcription_config?.conversation_config?.end_of_utterance_silence_trigger || 0.5
speechmaticsOptions.transcription_config.transcript_filtering_config.remove_disfluencies})
};
}
else if (vendor.startsWith('custom:')) {

View File

@@ -163,6 +163,7 @@ class TtsStreamingBuffer extends Emitter {
}
clear() {
this.logger.debug('TtsStreamingBuffer:clear');
if (this._connectionStatus !== TtsStreamingConnectionStatus.Connected) return;
clearTimeout(this.timer);
this._api(this.ep, [this.ep.uuid, 'clear']).catch((err) =>
@@ -421,7 +422,6 @@ class TtsStreamingBuffer extends Emitter {
'cartesia',
'elevenlabs',
'rimelabs',
'google',
'custom'
].forEach((vendor) => {
const eventClassName = `${vendor.charAt(0).toUpperCase() + vendor.slice(1)}TtsStreamingEvents`;
@@ -437,15 +437,7 @@ class TtsStreamingBuffer extends Emitter {
const findSentenceBoundary = (text, limit) => {
// Look for punctuation or double newline that signals sentence end.
// Includes:
// - ASCII: . ! ?
// - Arabic: ؟ (question mark), ۔ (full stop)
// - Japanese: 。 (full stop), , (full-width exclamation/question)
//
// For languages that use spaces between sentences, we still require
// whitespace or end-of-string after the mark. For Japanese (no spaces),
// we treat the punctuation itself as a boundary regardless of following char.
const sentenceEndRegex = /[.!?؟۔](?=\s|$)|[。!?]|\n\n/g;
const sentenceEndRegex = /[.!?](?=\s|$)|\n\n/g;
let lastSentenceBoundary = -1;
let match;
while ((match = sentenceEndRegex.exec(text)) && match.index < limit) {

5759
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -31,11 +31,11 @@
"@jambonz/http-health-check": "^0.0.1",
"@jambonz/mw-registrar": "^0.2.7",
"@jambonz/realtimedb-helpers": "^0.8.15",
"@jambonz/speech-utils": "^0.2.30",
"@jambonz/speech-utils": "^0.2.26",
"@jambonz/stats-collector": "^0.1.10",
"@jambonz/time-series": "^0.2.15",
"@jambonz/verb-specifications": "^0.0.125",
"@modelcontextprotocol/sdk": "^1.9.0",
"@jambonz/time-series": "^0.2.14",
"@jambonz/verb-specifications": "^0.0.122",
"@modelcontextprotocol/sdk": "^1.24.0",
"@opentelemetry/api": "^1.8.0",
"@opentelemetry/exporter-jaeger": "^1.23.0",
"@opentelemetry/exporter-trace-otlp-http": "^0.50.0",
@@ -49,8 +49,8 @@
"debug": "^4.3.4",
"deepcopy": "^2.1.0",
"drachtio-fsmrf": "^4.1.2",
"drachtio-srf": "^5.0.18",
"express": "^4.19.2",
"drachtio-srf": "^5.0.14",
"express": "^4.22.0",
"express-validator": "^7.0.1",
"moment": "^2.30.1",
"parse-url": "^9.2.0",