mirror of
https://github.com/jambonz/jambonz-feature-server.git
synced 2026-02-09 02:30:17 +00:00
Compare commits
91 Commits
v0.9.5-rc4
...
v0.9.6-rc1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ceb9a7a3bd | ||
|
|
ff5f9acaf8 | ||
|
|
96cdc2936b | ||
|
|
6120dcbe96 | ||
|
|
96d72216e2 | ||
|
|
faee30278b | ||
|
|
325af42946 | ||
|
|
9848152d5b | ||
|
|
2468557aef | ||
|
|
3c3dfa81d3 | ||
|
|
961c2589ac | ||
|
|
e4ec0025c3 | ||
|
|
ba275ef547 | ||
|
|
83a8cf6d25 | ||
|
|
09220872ae | ||
|
|
fdce05fa40 | ||
|
|
3bd1dd6323 | ||
|
|
54dc172ebd | ||
|
|
e007e0e2d3 | ||
|
|
c5cd488fdf | ||
|
|
57982335e0 | ||
|
|
5cea91e18a | ||
|
|
e396b6aa98 | ||
|
|
9104ebb603 | ||
|
|
1ad0261336 | ||
|
|
7802822773 | ||
|
|
edb4d21ce1 | ||
|
|
8048e9cf88 | ||
|
|
451feafed4 | ||
|
|
7f1543a0f3 | ||
|
|
83955ba972 | ||
|
|
a5fa5fce5b | ||
|
|
cc1751f500 | ||
|
|
1a1f53aede | ||
|
|
1984b6d3ea | ||
|
|
769b66f57e | ||
|
|
98b845f489 | ||
|
|
f92b1dbc97 | ||
|
|
0442144793 | ||
|
|
2de24af169 | ||
|
|
a884880321 | ||
|
|
b307df79d0 | ||
|
|
77bd11dd47 | ||
|
|
46d56fe546 | ||
|
|
30ab281ea2 | ||
|
|
0869a73052 | ||
|
|
a0a579ccee | ||
|
|
4218653852 | ||
|
|
89cc39f726 | ||
|
|
b231593bff | ||
|
|
4309d25376 | ||
|
|
a00703a067 | ||
|
|
89c985b564 | ||
|
|
b4ed4c8c46 | ||
|
|
581d309f36 | ||
|
|
d1baf2fe37 | ||
|
|
28bf0d3477 | ||
|
|
d2d3b4583e | ||
|
|
854c26db11 | ||
|
|
e77666a1a7 | ||
|
|
5acb19225b | ||
|
|
1d6f84c2d7 | ||
|
|
de9b970a93 | ||
|
|
ec786ef1dd | ||
|
|
a95a6d1683 | ||
|
|
65b3066866 | ||
|
|
057f52e56c | ||
|
|
b46be57eba | ||
|
|
f950d19d1c | ||
|
|
859132bb1c | ||
|
|
acaadceaa2 | ||
|
|
add8d63e8e | ||
|
|
a05b72a420 | ||
|
|
28ff85225f | ||
|
|
f2fe7c4d24 | ||
|
|
97408c7d3b | ||
|
|
db5f0a0dce | ||
|
|
654ccd9d9d | ||
|
|
ea27b20ac5 | ||
|
|
96aa705378 | ||
|
|
5e51849839 | ||
|
|
44f69fa76d | ||
|
|
73c77bea71 | ||
|
|
babc0d0dbb | ||
|
|
6b8d0fe1a8 | ||
|
|
66bb466297 | ||
|
|
1933f4ec0b | ||
|
|
b1089a1ae9 | ||
|
|
93e06d887e | ||
|
|
b478e0ecd2 | ||
|
|
94d43d4b70 |
6
app.js
6
app.js
@@ -29,6 +29,12 @@ const {LifeCycleEvents, FS_UUID_SET_NAME, SystemState, FEATURE_SERVER} = require
|
||||
const installSrfLocals = require('./lib/utils/install-srf-locals');
|
||||
const createHttpListener = require('./lib/utils/http-listener');
|
||||
const healthCheck = require('@jambonz/http-health-check');
|
||||
const ProcessMonitor = require('./lib/utils/process-monitor');
|
||||
const monitor = new ProcessMonitor(logger);
|
||||
|
||||
// Log startup
|
||||
monitor.logStartup();
|
||||
monitor.setupSignalHandlers();
|
||||
|
||||
logger.on('level-change', (lvl, _val, prevLvl, _prevVal, instance) => {
|
||||
if (logger !== instance) {
|
||||
|
||||
@@ -174,5 +174,61 @@
|
||||
"non è raggiungibile",
|
||||
"lascia pure un messaggio",
|
||||
"puoi lasciare un messaggio"
|
||||
],
|
||||
"ja-JP": [
|
||||
"この通話は留守番電話に転送されました",
|
||||
"発信先は現在電話に出ることができません",
|
||||
"発信音の後でメッセージを録音してください",
|
||||
"録音を完了したら電話を切ることができます",
|
||||
"只今電話に出ることができません",
|
||||
"ただ今電話に出ることができません",
|
||||
"ただいま電話に出ることができません",
|
||||
"ピーという発信音の後にお名前とご用件をお話しください",
|
||||
"ファックスを送られる方はスタートボタンを押してください",
|
||||
"FAXを送られる方はスタートボタンを押してください",
|
||||
"おかけになった電話をお呼びしましたが",
|
||||
"お出になりません",
|
||||
"おでになりません",
|
||||
"お掛けになった電話番号は",
|
||||
"おかけになった電話番号は",
|
||||
"お掛けになった電話は",
|
||||
"おかけになった電話は",
|
||||
"現在使われておりません",
|
||||
"番号をお確かめになって",
|
||||
"お掛け直し下さい",
|
||||
"おかけ直し下さい",
|
||||
"おかけ直しください",
|
||||
"こちらはNTTドコモです",
|
||||
"こちらはエーユーです",
|
||||
"こちらはソフトバンクです",
|
||||
"電波の届かない",
|
||||
"電源が入っていない",
|
||||
"掛かりません",
|
||||
"かかりません",
|
||||
"お繋ぎすることが出来ません",
|
||||
"お繋ぎ出来ません",
|
||||
"お繋ぎすることができません",
|
||||
"お繋ぎできません",
|
||||
"おつなぎすることができません",
|
||||
"おつなぎできません",
|
||||
"メッセージを録音",
|
||||
"留守番電話",
|
||||
"お留守番サービス",
|
||||
"留守番",
|
||||
"留守電",
|
||||
"留守",
|
||||
"接続します",
|
||||
"合図の音",
|
||||
"ピーと",
|
||||
"発信音",
|
||||
"ご用件",
|
||||
"伝言",
|
||||
"お話しください",
|
||||
"ファックス",
|
||||
"FAX",
|
||||
"終了",
|
||||
"終了しました",
|
||||
"終了いたしました",
|
||||
"営業時間"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -119,7 +119,7 @@ const ENCRYPTION_SECRET = process.env.ENCRYPTION_SECRET;
|
||||
const HTTP_POOL = process.env.HTTP_POOL && parseInt(process.env.HTTP_POOL);
|
||||
const HTTP_POOLSIZE = parseInt(process.env.HTTP_POOLSIZE, 10) || 10;
|
||||
const HTTP_PIPELINING = parseInt(process.env.HTTP_PIPELINING, 10) || 1;
|
||||
const HTTP_TIMEOUT = 10000;
|
||||
const HTTP_TIMEOUT = parseInt(process.env.JAMBONES_HTTP_TIMEOUT, 10) || 10000;
|
||||
const HTTP_PROXY_IP = process.env.JAMBONES_HTTP_PROXY_IP;
|
||||
const HTTP_PROXY_PORT = process.env.JAMBONES_HTTP_PROXY_PORT;
|
||||
const HTTP_PROXY_PROTOCOL = process.env.JAMBONES_HTTP_PROXY_PROTOCOL || 'http';
|
||||
@@ -139,6 +139,11 @@ const JAMBONES_USE_FREESWITCH_TIMER_FD = process.env.JAMBONES_USE_FREESWITCH_TIM
|
||||
const JAMBONES_DIAL_SBC_FOR_REGISTERED_USER = process.env.JAMBONES_DIAL_SBC_FOR_REGISTERED_USER || false;
|
||||
const JAMBONES_MEDIA_TIMEOUT_MS = process.env.JAMBONES_MEDIA_TIMEOUT_MS || 0;
|
||||
const JAMBONES_MEDIA_HOLD_TIMEOUT_MS = process.env.JAMBONES_MEDIA_HOLD_TIMEOUT_MS || 0;
|
||||
const JAMBONES_WEBHOOK_ERROR_RETURN = parseInt(process.env.JAMBONES_WEBHOOK_ERROR_RETURN, 10) || 480;
|
||||
|
||||
/* say / tts */
|
||||
const JAMBONES_SAY_CHUNK_SIZE = parseInt(process.env.JAMBONES_SAY_CHUNK_SIZE, 10) || 900;
|
||||
|
||||
// jambonz
|
||||
const JAMBONES_TRANSCRIBE_EP_DESTROY_DELAY_MS =
|
||||
process.env.JAMBONES_TRANSCRIBE_EP_DESTROY_DELAY_MS;
|
||||
@@ -231,5 +236,7 @@ module.exports = {
|
||||
JAMBONES_DIAL_SBC_FOR_REGISTERED_USER,
|
||||
JAMBONES_MEDIA_TIMEOUT_MS,
|
||||
JAMBONES_MEDIA_HOLD_TIMEOUT_MS,
|
||||
JAMBONES_SAY_CHUNK_SIZE,
|
||||
JAMBONES_TRANSCRIBE_EP_DESTROY_DELAY_MS,
|
||||
JAMBONES_WEBHOOK_ERROR_RETURN
|
||||
};
|
||||
|
||||
@@ -147,7 +147,7 @@ router.post('/',
|
||||
|
||||
// find handling sbc sip for called user
|
||||
if (JAMBONES_DIAL_SBC_FOR_REGISTERED_USER && target.type === 'user') {
|
||||
const { registrar } = srf.locals.dbHelpers;
|
||||
const { registrar} = srf.locals.dbHelpers;
|
||||
const reg = await registrar.query(target.name);
|
||||
if (reg) {
|
||||
sbcAddress = selectHostPort(logger, reg.sbcAddress, 'tcp')[1];
|
||||
@@ -159,7 +159,9 @@ router.post('/',
|
||||
* trunk isn't specified,
|
||||
* check if from-number matches any existing numbers on Jambonz
|
||||
* */
|
||||
if (target.type === 'phone' && !target.trunk) {
|
||||
const { lookupLcrByAccount} = srf.locals.dbHelpers;
|
||||
const lcrs = await lookupLcrByAccount(req.body.account_sid);
|
||||
if (target.type === 'phone' && !target.trunk && lcrs.length == 0) {
|
||||
const str = restDial.from || '';
|
||||
const callingNumber = str.startsWith('+') ? str.substring(1) : str;
|
||||
const voip_carrier_sid = await lookupCarrierByPhoneNumber(req.body.account_sid, callingNumber);
|
||||
@@ -289,7 +291,7 @@ router.post('/',
|
||||
}, {
|
||||
...(account.enable_debug_log && {level: 'debug'})
|
||||
});
|
||||
app.requestor.logger = app.notifier.logger = sipLogger;
|
||||
app.requestor.logger = app.notifier.logger = restDial.logger = sipLogger;
|
||||
const callInfo = new CallInfo({
|
||||
direction: CallDirection.Outbound,
|
||||
req: inviteReq,
|
||||
|
||||
@@ -12,7 +12,8 @@ const RootSpan = require('./utils/call-tracer');
|
||||
const listTaskNames = require('./utils/summarize-tasks');
|
||||
const {
|
||||
JAMBONES_MYSQL_REFRESH_TTL,
|
||||
JAMBONES_DISABLE_DIRECT_P2P_CALL
|
||||
JAMBONES_DISABLE_DIRECT_P2P_CALL,
|
||||
JAMBONES_WEBHOOK_ERROR_RETURN
|
||||
} = require('./config');
|
||||
const { createJambonzApp } = require('./dynamic-apps');
|
||||
const { decrypt } = require('./utils/encrypt-decrypt');
|
||||
@@ -112,6 +113,14 @@ module.exports = function(srf, logger) {
|
||||
req.locals.callingNumber = sipURIs[1];
|
||||
}
|
||||
}
|
||||
|
||||
// Feature server INVITE request pipelines taking time to finish,
|
||||
// while connecting and fetch application from db and invoking webhook.
|
||||
// call can be canceled without any handling, so we add a listener here
|
||||
req.once('cancel', (sipMsg) => {
|
||||
logger.info(`${callId} got CANCEL request`);
|
||||
req.locals.canceled = true;
|
||||
});
|
||||
next();
|
||||
}
|
||||
|
||||
@@ -362,13 +371,14 @@ module.exports = function(srf, logger) {
|
||||
});
|
||||
// if transferred call contains callInfo, let update original data to newly created callInfo in this instance.
|
||||
if (app.transferredCall && app.callInfo) {
|
||||
const {direction, callerName, from, to, originatingSipIp, originatingSipTrunkName} = app.callInfo;
|
||||
const {direction, callerName, from, to, originatingSipIp, originatingSipTrunkName, customerData} = app.callInfo;
|
||||
req.locals.callInfo.direction = direction;
|
||||
req.locals.callInfo.callerName = callerName;
|
||||
req.locals.callInfo.from = from;
|
||||
req.locals.callInfo.to = to;
|
||||
req.locals.callInfo.originatingSipIp = originatingSipIp;
|
||||
req.locals.callInfo.originatingSipTrunkName = originatingSipTrunkName;
|
||||
if (customerData) req.locals.callInfo.customerData = customerData;
|
||||
delete app.callInfo;
|
||||
}
|
||||
next();
|
||||
@@ -471,7 +481,7 @@ module.exports = function(srf, logger) {
|
||||
message: `${err?.message}`.trim()
|
||||
}).catch((err) => this.logger.info({err}, 'Error generating alert for parsing application'));
|
||||
logger.info({err}, `Error retrieving or parsing application: ${err?.message}`);
|
||||
res.send(480, {headers: {'X-Reason': err?.message || 'unknown'}});
|
||||
res.send(JAMBONES_WEBHOOK_ERROR_RETURN, {headers: {'X-Reason': err?.message || 'unknown'}});
|
||||
app.requestor.close(WS_CLOSE_CODES.GoingAway);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,6 +12,7 @@ class CallInfo {
|
||||
let srf;
|
||||
this.direction = opts.direction;
|
||||
this.traceId = opts.traceId;
|
||||
this.hasRecording = false;
|
||||
this.callTerminationBy = undefined;
|
||||
if (opts.req) {
|
||||
const u = opts.req.getParsedHeader('from');
|
||||
|
||||
@@ -504,7 +504,12 @@ class CallSession extends Emitter {
|
||||
}
|
||||
|
||||
get isTtsStreamEnabled() {
|
||||
return this.backgroundTaskManager.isTaskRunning('ttsStream');
|
||||
// 1st background tts stream
|
||||
return this.backgroundTaskManager.isTaskRunning('ttsStream') ||
|
||||
// 2nd current task streaming tts
|
||||
TaskName.Say === this.currentTask?.name && this.currentTask?.isStreamingTts ||
|
||||
// 3rd nested verb is streaming tts
|
||||
TaskName.Gather === this.currentTask?.name && this.currentTask.sayTask?.isStreamingTts;
|
||||
}
|
||||
|
||||
get isListenEnabled() {
|
||||
@@ -658,6 +663,15 @@ class CallSession extends Emitter {
|
||||
}
|
||||
}
|
||||
|
||||
// disableTtsCache
|
||||
get disableTtsCache() {
|
||||
return this._disableTtsCache || false;
|
||||
}
|
||||
|
||||
set disableTtsCache(d) {
|
||||
this._disableTtsCache = d;
|
||||
}
|
||||
|
||||
getTsStreamingVendor() {
|
||||
let v;
|
||||
if (this.currentTask?.isStreamingTts) {
|
||||
@@ -710,7 +724,7 @@ class CallSession extends Emitter {
|
||||
}
|
||||
|
||||
|
||||
hasGlobalSttPunctuation() {
|
||||
get hasGlobalSttPunctuation() {
|
||||
return this._globalSttPunctuation !== undefined;
|
||||
}
|
||||
|
||||
@@ -742,69 +756,101 @@ class CallSession extends Emitter {
|
||||
return this._fillerNoise;
|
||||
}
|
||||
|
||||
async pauseOrResumeBackgroundListenIfRequired(action, silence = false) {
|
||||
if ((action == 'pauseCallRecording' || action == 'resumeCallRecording') &&
|
||||
this.backgroundTaskManager.isTaskRunning('record')) {
|
||||
this.logger.debug({action, silence}, 'CallSession:pauseOrResumeBackgroundListenIfRequired');
|
||||
const backgroundListenTask = this.backgroundTaskManager.getTask('record');
|
||||
const status = action === 'pauseCallRecording' ? ListenStatus.Pause : ListenStatus.Resume;
|
||||
backgroundListenTask.updateListen(
|
||||
status,
|
||||
silence
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async notifyRecordOptions(opts) {
|
||||
const {action, silence} = opts;
|
||||
const {action, silence = false, type = 'siprec'} = opts;
|
||||
this.logger.debug({opts}, 'CallSession:notifyRecordOptions');
|
||||
|
||||
this.pauseOrResumeBackgroundListenIfRequired(action, silence);
|
||||
|
||||
/* if we have not answered yet, just save the details for later */
|
||||
if (!this.dlg) {
|
||||
if (action === 'startCallRecording') {
|
||||
this.recordOptions = opts;
|
||||
return true;
|
||||
if (type == 'cloud') {
|
||||
switch (action) {
|
||||
case 'pauseCallRecording':
|
||||
if (this.backgroundTaskManager.isTaskRunning('record')) {
|
||||
this.logger.debug({action, silence, type}, 'CallSession:cloudRecording');
|
||||
const backgroundListenTask = this.backgroundTaskManager.getTask('record');
|
||||
backgroundListenTask.updateListen(
|
||||
ListenStatus.Pause,
|
||||
silence
|
||||
);
|
||||
return true;
|
||||
} else { return false; }
|
||||
case 'resumeCallRecording':
|
||||
if (this.backgroundTaskManager.isTaskRunning('record')) {
|
||||
this.logger.debug({action, silence, type}, 'CallSession:cloudRecording');
|
||||
const backgroundListenTask = this.backgroundTaskManager.getTask('record');
|
||||
backgroundListenTask.updateListen(
|
||||
ListenStatus.Resume,
|
||||
silence
|
||||
);
|
||||
return true;
|
||||
} else { return false; }
|
||||
case 'startCallRecording':
|
||||
if (!this.backgroundTaskManager.isTaskRunning('record')) {
|
||||
this.logger.debug({action, silence, type}, 'CallSession:cloudRecording');
|
||||
this.callInfo.hasRecording = true;
|
||||
this.updateCallStatus(Object.assign({}, this.callInfo.toJSON()), this.serviceUrl)
|
||||
.catch((err) => this.logger.error(err, 'redis error'));
|
||||
if (!this.dlg) {
|
||||
// Call not yet answered so set flag to record on status change
|
||||
this.application.record_all_calls = true;
|
||||
} else {
|
||||
this.backgroundTaskManager.newTask('record');
|
||||
}
|
||||
return true;
|
||||
} else { return false; }
|
||||
case 'stopCallRecording':
|
||||
if (this.backgroundTaskManager.isTaskRunning('record')) {
|
||||
this.logger.debug({action, silence, type}, 'CallSession:cloudRecording');
|
||||
this.backgroundTaskManager.stop('record');
|
||||
return true;
|
||||
} else { return false; }
|
||||
}
|
||||
} else {
|
||||
// SIPREC
|
||||
/* if we have not answered yet, just save the details for later */
|
||||
if (!this.dlg) {
|
||||
if (action === 'startCallRecording') {
|
||||
this.recordOptions = opts;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/* check validity of request */
|
||||
if (action == 'startCallRecording' && this.recordState !== RecordState.RecordingOff) {
|
||||
this.logger.info({recordState: this.recordState},
|
||||
'CallSession:notifyRecordOptions: recording is already started, ignoring request');
|
||||
return false;
|
||||
}
|
||||
if (action == 'stopCallRecording' && this.recordState === RecordState.RecordingOff) {
|
||||
this.logger.info({recordState: this.recordState},
|
||||
'CallSession:notifyRecordOptions: recording is already stopped, ignoring request');
|
||||
return false;
|
||||
}
|
||||
if (action == 'pauseCallRecording' && this.recordState !== RecordState.RecordingOn) {
|
||||
this.logger.info({recordState: this.recordState},
|
||||
'CallSession:notifyRecordOptions: cannot pause recording, ignoring request ');
|
||||
return false;
|
||||
}
|
||||
if (action == 'resumeCallRecording' && this.recordState !== RecordState.RecordingPaused) {
|
||||
this.logger.info({recordState: this.recordState},
|
||||
'CallSession:notifyRecordOptions: cannot resume recording, ignoring request ');
|
||||
return false;
|
||||
}
|
||||
/* check validity of request */
|
||||
if (action == 'startCallRecording' && this.recordState !== RecordState.RecordingOff) {
|
||||
this.logger.info({recordState: this.recordState},
|
||||
'CallSession:notifyRecordOptions: recording is already started, ignoring request');
|
||||
return false;
|
||||
}
|
||||
if (action == 'stopCallRecording' && this.recordState === RecordState.RecordingOff) {
|
||||
this.logger.info({recordState: this.recordState},
|
||||
'CallSession:notifyRecordOptions: recording is already stopped, ignoring request');
|
||||
return false;
|
||||
}
|
||||
if (action == 'pauseCallRecording' && this.recordState !== RecordState.RecordingOn) {
|
||||
this.logger.info({recordState: this.recordState},
|
||||
'CallSession:notifyRecordOptions: cannot pause recording, ignoring request ');
|
||||
return false;
|
||||
}
|
||||
if (action == 'resumeCallRecording' && this.recordState !== RecordState.RecordingPaused) {
|
||||
this.logger.info({recordState: this.recordState},
|
||||
'CallSession:notifyRecordOptions: cannot resume recording, ignoring request ');
|
||||
return false;
|
||||
}
|
||||
|
||||
this.recordOptions = opts;
|
||||
this.recordOptions = opts;
|
||||
|
||||
switch (action) {
|
||||
case 'startCallRecording':
|
||||
return await this.startRecording();
|
||||
case 'stopCallRecording':
|
||||
return await this.stopRecording();
|
||||
case 'pauseCallRecording':
|
||||
return await this.pauseRecording();
|
||||
case 'resumeCallRecording':
|
||||
return await this.resumeRecording();
|
||||
default:
|
||||
throw new Error(`invalid record action ${action}`);
|
||||
switch (action) {
|
||||
case 'startCallRecording':
|
||||
return await this.startRecording();
|
||||
case 'stopCallRecording':
|
||||
return await this.stopRecording();
|
||||
case 'pauseCallRecording':
|
||||
return await this.pauseRecording();
|
||||
case 'resumeCallRecording':
|
||||
return await this.resumeRecording();
|
||||
default:
|
||||
throw new Error(`invalid record action ${action}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -918,7 +964,7 @@ class CallSession extends Emitter {
|
||||
this.logger.debug('CallSession:enableBackgroundTtsStream - ttsStream enabled');
|
||||
} else {
|
||||
this.logger.debug(
|
||||
'CallSession:enableBackgroundTtsStream - ignoring request as call does not have required conditions');
|
||||
'CallSession:enableBackgroundTtsStream - ignoring request; conditions not met (probably not using ws api)');
|
||||
}
|
||||
} catch (err) {
|
||||
this.logger.info({err, say}, 'CallSession:enableBackgroundTtsStream - Error creating background tts stream task');
|
||||
@@ -932,15 +978,25 @@ class CallSession extends Emitter {
|
||||
}
|
||||
}
|
||||
clearTtsStream() {
|
||||
this.requestor?.request('tts:streaming-event', '/streaming-event', {event_type: 'user_interruption'})
|
||||
.catch((err) => this.logger.info({err}, 'CallSession:clearTtsStream - Error sending user_interruption'));
|
||||
this.ttsStreamingBuffer?.clear();
|
||||
if (this.isTtsStreamEnabled) {
|
||||
this.requestor?.request('tts:streaming-event', '/streaming-event', {event_type: 'user_interruption'})
|
||||
.catch((err) => this.logger.info({err}, 'CallSession:clearTtsStream - Error sending user_interruption'));
|
||||
this.ttsStreamingBuffer?.clear();
|
||||
}
|
||||
}
|
||||
|
||||
startTtsStream() {
|
||||
this.ttsStreamingBuffer?.start();
|
||||
}
|
||||
|
||||
stopTtsStream() {
|
||||
if (this.isTtsStreamEnabled) {
|
||||
this.requestor?.request('tts:streaming-event', '/streaming-event', {event_type: 'stream_closed'})
|
||||
.catch((err) => this.logger.info({err}, 'CallSession:clearTtsStream - Error sending user_interruption'));
|
||||
this.ttsStreamingBuffer?.stop();
|
||||
}
|
||||
}
|
||||
|
||||
async enableBotMode(gather, autoEnable) {
|
||||
try {
|
||||
let task;
|
||||
@@ -964,7 +1020,7 @@ class CallSession extends Emitter {
|
||||
task.sticky = autoEnable;
|
||||
// listen to the bargein-done from background manager
|
||||
this.backgroundTaskManager.on('bargeIn-done', () => {
|
||||
if (this.requestor instanceof WsRequestor) {
|
||||
if (this.appIsUsingWebsockets) {
|
||||
try {
|
||||
this.kill(true);
|
||||
} catch (err) {}
|
||||
@@ -1017,8 +1073,6 @@ class CallSession extends Emitter {
|
||||
(type === 'tts' && credential.use_for_tts) ||
|
||||
(type === 'stt' && credential.use_for_stt)
|
||||
)) {
|
||||
this.logger.debug(
|
||||
`${type}: ${credential.vendor} ${credential.label ? `, label: ${credential.label}` : ''} `);
|
||||
if ('google' === vendor) {
|
||||
if (type === 'tts' && !credential.tts_tested_ok ||
|
||||
type === 'stt' && !credential.stt_tested_ok) {
|
||||
@@ -1028,7 +1082,7 @@ class CallSession extends Emitter {
|
||||
const cred = JSON.parse(credential.service_key.replace(/\n/g, '\\n'));
|
||||
return {
|
||||
speech_credential_sid: credential.speech_credential_sid,
|
||||
credentials: cred
|
||||
credentials: cred,
|
||||
};
|
||||
} catch (err) {
|
||||
const sid = this.accountInfo.account.account_sid;
|
||||
@@ -1088,6 +1142,13 @@ class CallSession extends Emitter {
|
||||
deepgram_stt_use_tls: credential.deepgram_stt_use_tls
|
||||
};
|
||||
}
|
||||
else if ('gladia' === vendor) {
|
||||
return {
|
||||
speech_credential_sid: credential.speech_credential_sid,
|
||||
api_key: credential.api_key,
|
||||
region: credential.region,
|
||||
};
|
||||
}
|
||||
else if ('soniox' === vendor) {
|
||||
return {
|
||||
speech_credential_sid: credential.speech_credential_sid,
|
||||
@@ -1119,6 +1180,7 @@ class CallSession extends Emitter {
|
||||
return {
|
||||
api_key: credential.api_key,
|
||||
model_id: credential.model_id,
|
||||
api_uri: credential.api_uri,
|
||||
options: credential.options
|
||||
};
|
||||
}
|
||||
@@ -1146,6 +1208,13 @@ class CallSession extends Emitter {
|
||||
options: credential.options
|
||||
};
|
||||
}
|
||||
else if ('resemble' === vendor) {
|
||||
return {
|
||||
api_key: credential.api_key,
|
||||
resemble_tts_use_tls: credential.resemble_tts_use_tls,
|
||||
resemble_tts_uri: credential.resemble_tts_uri,
|
||||
};
|
||||
}
|
||||
else if ('inworld' === vendor) {
|
||||
return {
|
||||
api_key: credential.api_key,
|
||||
@@ -1160,7 +1229,16 @@ class CallSession extends Emitter {
|
||||
service_version: credential.service_version
|
||||
};
|
||||
}
|
||||
else if ('deepgramriver' === vendor) {
|
||||
else if ('houndify' === vendor) {
|
||||
return {
|
||||
speech_credential_sid: credential.speech_credential_sid,
|
||||
client_id: credential.client_id,
|
||||
client_key: credential.client_key,
|
||||
user_id: credential.user_id,
|
||||
houndify_server_uri: credential.houndify_server_uri
|
||||
};
|
||||
}
|
||||
else if ('deepgramflux' === vendor) {
|
||||
return {
|
||||
speech_credential_sid: credential.speech_credential_sid,
|
||||
api_key: credential.api_key,
|
||||
@@ -1209,9 +1287,10 @@ class CallSession extends Emitter {
|
||||
}
|
||||
else {
|
||||
writeAlerts({
|
||||
alert_type: AlertType.STT_NOT_PROVISIONED,
|
||||
alert_type: type === 'tts' ? AlertType.TTS_NOT_PROVISIONED : AlertType.STT_NOT_PROVISIONED,
|
||||
account_sid: this.accountSid,
|
||||
vendor,
|
||||
label,
|
||||
target_sid: this.callSid
|
||||
}).catch((err) => this.logger.error({err}, 'Error writing tts alert'));
|
||||
}
|
||||
@@ -1242,6 +1321,7 @@ class CallSession extends Emitter {
|
||||
this.ttsStreamingBuffer.on(TtsStreamingEvents.Pause, this._onTtsStreamingPause.bind(this));
|
||||
this.ttsStreamingBuffer.on(TtsStreamingEvents.Resume, this._onTtsStreamingResume.bind(this));
|
||||
this.ttsStreamingBuffer.on(TtsStreamingEvents.ConnectFailure, this._onTtsStreamingConnectFailure.bind(this));
|
||||
this.ttsStreamingBuffer.on(TtsStreamingEvents.Connected, this._onTtsStreamingConnected.bind(this));
|
||||
}
|
||||
else {
|
||||
this.logger.info(`CallSession:exec - not a normal call session: ${this.constructor.name}`);
|
||||
@@ -1300,7 +1380,7 @@ class CallSession extends Emitter {
|
||||
}
|
||||
|
||||
if (0 === this.tasks.length &&
|
||||
this.requestor instanceof WsRequestor &&
|
||||
this.appIsUsingWebsockets &&
|
||||
!this.requestor.closedGracefully &&
|
||||
!this.callGone &&
|
||||
!this.isConfirmCallSession
|
||||
@@ -1410,7 +1490,11 @@ class CallSession extends Emitter {
|
||||
}
|
||||
else {
|
||||
if (this.req && !this.dlg) {
|
||||
this.req.cancel();
|
||||
try {
|
||||
this.req.cancel();
|
||||
} catch (err) {
|
||||
this.logger.error({err}, 'CallSession:_lccCallStatus error cancelling request');
|
||||
}
|
||||
this._callReleased();
|
||||
}
|
||||
}
|
||||
@@ -1857,7 +1941,7 @@ Duration=${duration} `
|
||||
return;
|
||||
}
|
||||
else if (tokens === undefined) {
|
||||
this.logger.info({opts}, 'CallSession:_lccTtsTokens - invalid command since id is missing');
|
||||
this.logger.info({opts}, 'CallSession:_lccTtsTokens - invalid command since tokens is missing');
|
||||
return this.requestor.request('tts:tokens-result', '/tokens-result', {
|
||||
id,
|
||||
status: 'failed',
|
||||
@@ -1944,7 +2028,7 @@ Duration=${duration} `
|
||||
return this._lccDub(opts.dub, callSid);
|
||||
}
|
||||
else if (opts.boostAudioSignal) {
|
||||
return this._lccBoostAudioSignal(opts, callSid);
|
||||
return this._lccBoostAudioSignal(opts.boostAudioSignal, callSid);
|
||||
}
|
||||
else if (opts.media_path) {
|
||||
return this._lccMediaPath(opts.media_path, callSid);
|
||||
@@ -1963,6 +2047,17 @@ Duration=${duration} `
|
||||
}
|
||||
} catch (err) {
|
||||
this.logger.info({err, opts, callSid}, 'CallSession:updateCall - error updating call');
|
||||
const {writeAlerts} = this.srf.locals;
|
||||
try {
|
||||
writeAlerts({
|
||||
alert_type: 'error-updating-call',
|
||||
account_sid: this.accountSid,
|
||||
message: err.message,
|
||||
target_sid: callSid
|
||||
});
|
||||
} catch (err) {
|
||||
this.logger.error({err}, 'Error writing error-updating-call alert');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2367,6 +2462,7 @@ Duration=${duration} `
|
||||
const ep = await this._createMediaEndpoint({
|
||||
headers: {
|
||||
'X-Jambones-Call-ID': this.callId,
|
||||
'X-Call-Sid': this.callSid,
|
||||
},
|
||||
remoteSdp: this.req.body
|
||||
});
|
||||
@@ -2378,7 +2474,7 @@ Duration=${duration} `
|
||||
this.logger.debug(`endpoint was destroyed!! ${this.ep.uuid}`);
|
||||
});
|
||||
|
||||
if (this.direction === CallDirection.Inbound) {
|
||||
if (this.direction === CallDirection.Inbound || this.application?.transferredCall) {
|
||||
if (task.earlyMedia && !this.req.finalResponseSent) {
|
||||
this.res.send(183, {body: ep.local.sdp});
|
||||
return {ep};
|
||||
@@ -2404,6 +2500,36 @@ Duration=${duration} `
|
||||
}
|
||||
else {
|
||||
this.logger.error(err, `Error attempting to allocate endpoint for for task ${task.name}`);
|
||||
// Check for SipError type (e.g., 488 codec incompatibility)
|
||||
const isSipError = err.name === 'SipError';
|
||||
if (isSipError && err.status) {
|
||||
// Extract Reason header from SIP response if available (e.g., Q.850;cause=88;text="INCOMPATIBLE_DESTINATION")
|
||||
const sipReasonHeader = err.res?.msg?.headers?.reason;
|
||||
this._endpointAllocationError = {
|
||||
status: err.status,
|
||||
reason: err.reason || 'Endpoint Allocation Failed',
|
||||
sipReasonHeader
|
||||
};
|
||||
this.logger.info({endpointAllocationError: this._endpointAllocationError},
|
||||
'Captured SipError for propagation to SBC');
|
||||
|
||||
// Send SIP error response immediately for inbound calls
|
||||
if (this.res && !this.res.finalResponseSent) {
|
||||
this.logger.info(`Sending ${err.status} response to SBC due to SipError`);
|
||||
this.res.send(err.status, {
|
||||
headers: {
|
||||
'X-Reason': `endpoint allocation failure: ${err.reason || 'Endpoint Allocation Failed'}`,
|
||||
...(sipReasonHeader && {'Reason': sipReasonHeader})
|
||||
}
|
||||
});
|
||||
this._notifyCallStatusChange({
|
||||
callStatus: CallStatus.Failed,
|
||||
sipStatus: err.status,
|
||||
sipReason: err.reason || 'Endpoint Allocation Failed'
|
||||
});
|
||||
this._callReleased();
|
||||
}
|
||||
}
|
||||
throw new Error(`${BADPRECONDITIONS}: unable to allocate endpoint`);
|
||||
}
|
||||
}
|
||||
@@ -2510,7 +2636,7 @@ Duration=${duration} `
|
||||
this.backgroundTaskManager.stopAll();
|
||||
this.clearOrRestoreActionHookDelayProcessor().catch((err) => {});
|
||||
|
||||
this.ttsStreamingBuffer?.stop();
|
||||
this.stopTtsStream();
|
||||
|
||||
this.sttLatencyCalculator?.stop();
|
||||
}
|
||||
@@ -2664,7 +2790,7 @@ Duration=${duration} `
|
||||
*/
|
||||
_onRefer(req, res) {
|
||||
const task = this.currentTask;
|
||||
const sd = task.sd;
|
||||
const sd = task?.sd;
|
||||
if (task && TaskName.Dial === task.name && sd && task.referHook) {
|
||||
task.handleRefer(this, req, res);
|
||||
}
|
||||
@@ -2916,8 +3042,7 @@ Duration=${duration} `
|
||||
|
||||
// manage record all call.
|
||||
if (callStatus === CallStatus.InProgress) {
|
||||
if (this.accountInfo.account.record_all_calls ||
|
||||
this.application.record_all_calls) {
|
||||
if (this.accountInfo.account.record_all_calls || this.application.record_all_calls) {
|
||||
this.backgroundTaskManager.newTask('record');
|
||||
}
|
||||
} else if (callStatus == CallStatus.Completed) {
|
||||
@@ -2970,14 +3095,14 @@ Duration=${duration} `
|
||||
*/
|
||||
|
||||
_notifyTaskError(obj) {
|
||||
if (this.requestor instanceof WsRequestor) {
|
||||
if (this.appIsUsingWebsockets) {
|
||||
this.requestor.request('jambonz:error', '/error', obj)
|
||||
.catch((err) => this.logger.debug({err}, 'CallSession:_notifyTaskError - Error sending'));
|
||||
}
|
||||
}
|
||||
|
||||
_notifyTaskStatus(task, evt) {
|
||||
if (this.notifyEvents && this.requestor instanceof WsRequestor) {
|
||||
if (this.notifyEvents && this.appIsUsingWebsockets) {
|
||||
const obj = {...evt, id: task.id, name: task.name};
|
||||
this.requestor.request('verb:status', '/status', obj)
|
||||
.catch((err) => this.logger.debug({err}, 'CallSession:_notifyTaskStatus - Error sending'));
|
||||
@@ -3029,7 +3154,7 @@ Duration=${duration} `
|
||||
}
|
||||
|
||||
_clearTasks(backgroundGather, evt) {
|
||||
if (this.requestor instanceof WsRequestor && !backgroundGather.cleared) {
|
||||
if (this.appIsUsingWebsockets && !backgroundGather.cleared) {
|
||||
this.logger.debug({evt}, 'CallSession:_clearTasks on event from background gather');
|
||||
try {
|
||||
backgroundGather.cleared = true;
|
||||
@@ -3057,13 +3182,18 @@ Duration=${duration} `
|
||||
}
|
||||
}
|
||||
|
||||
_onTtsStreamingConnected() {
|
||||
this.requestor?.request('tts:streaming-event', '/streaming-event', {event_type: 'stream_open'})
|
||||
.catch((err) => this.logger.info({err}, 'CallSession:_onTtsStreamingConnected - Error sending'));
|
||||
}
|
||||
|
||||
_onTtsStreamingEmpty() {
|
||||
const task = this.currentTask;
|
||||
if (task && TaskName.Say === task.name) {
|
||||
task.notifyTtsStreamIsEmpty();
|
||||
} else if (
|
||||
// If Gather nested say task is streaming
|
||||
TaskName.Gather === task.name && task.sayTask && task.sayTask.isStreamingTts) {
|
||||
task && TaskName.Gather === task.name && task.sayTask && task.sayTask.isStreamingTts) {
|
||||
const sayTask = task.sayTask;
|
||||
sayTask.notifyTtsStreamIsEmpty();
|
||||
}
|
||||
|
||||
@@ -8,7 +8,8 @@ const CallSession = require('./call-session');
|
||||
|
||||
*/
|
||||
class ConfirmCallSession extends CallSession {
|
||||
constructor({logger, application, dlg, ep, tasks, callInfo, accountInfo, memberId, confName, rootSpan, req}) {
|
||||
// eslint-disable-next-line max-len
|
||||
constructor({logger, application, dlg, ep, tasks, callInfo, accountInfo, memberId, confName, rootSpan, req, tmpFiles}) {
|
||||
super({
|
||||
logger,
|
||||
application,
|
||||
@@ -24,6 +25,7 @@ class ConfirmCallSession extends CallSession {
|
||||
this.dlg = dlg;
|
||||
this.ep = ep;
|
||||
this.req = req;
|
||||
this.tmpFiles = tmpFiles;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -22,6 +22,12 @@ class InboundCallSession extends CallSession {
|
||||
this.req = req;
|
||||
this.res = res;
|
||||
|
||||
// if the call was canceled before we got here, handle it
|
||||
if (this.req.locals.canceled) {
|
||||
req.locals.logger.info('InboundCallSession: constructor - call was already canceled');
|
||||
this._onCancel();
|
||||
}
|
||||
|
||||
req.once('cancel', this._onCancel.bind(this));
|
||||
|
||||
this.on('callStatusChange', this._notifyCallStatusChange.bind(this));
|
||||
@@ -54,6 +60,19 @@ class InboundCallSession extends CallSession {
|
||||
}
|
||||
});
|
||||
}
|
||||
else if (this._endpointAllocationError) {
|
||||
// Propagate SIP error from endpoint allocation failure back to the client
|
||||
const {status, reason, sipReasonHeader} = this._endpointAllocationError;
|
||||
this.rootSpan.setAttributes({'call.termination': `endpoint allocation SIP error ${status}`});
|
||||
this.logger.info({endpointAllocationError: this._endpointAllocationError},
|
||||
`InboundCallSession:_onTasksDone generating ${status} due to endpoint allocation failure`);
|
||||
this.res.send(status, {
|
||||
headers: {
|
||||
'X-Reason': `endpoint allocation failure: ${reason}`,
|
||||
...(sipReasonHeader && {'Reason': sipReasonHeader})
|
||||
}
|
||||
});
|
||||
}
|
||||
else {
|
||||
this.rootSpan.setAttributes({'call.termination': 'tasks completed without answering call'});
|
||||
this.logger.info('InboundCallSession:_onTasksDone auto-generating non-success response to invite');
|
||||
|
||||
@@ -49,7 +49,8 @@ class Conference extends Task {
|
||||
this.confName = this.data.name;
|
||||
[
|
||||
'beep', 'startConferenceOnEnter', 'endConferenceOnExit', 'joinMuted',
|
||||
'maxParticipants', 'waitHook', 'statusHook', 'endHook', 'enterHook', 'endConferenceDuration'
|
||||
'maxParticipants', 'waitHook', 'statusHook', 'endHook', 'enterHook',
|
||||
'endConferenceDuration', 'distributeDtmf'
|
||||
].forEach((attr) => this[attr] = this.data[attr]);
|
||||
this.record = this.data.record || {};
|
||||
this.statusEvents = [];
|
||||
@@ -356,6 +357,7 @@ class Conference extends Task {
|
||||
//https://developer.signalwire.com/freeswitch/FreeSWITCH-Explained/Modules/mod_conference_3965534/
|
||||
// mute | Enter conference muted
|
||||
...((this.joinMuted || this.speakOnlyTo) && {mute: true}),
|
||||
...(this.distributeDtmf && {'dist-dtmf': true})
|
||||
}});
|
||||
|
||||
/**
|
||||
@@ -673,7 +675,8 @@ class Conference extends Task {
|
||||
confName: this.confName,
|
||||
tasks,
|
||||
rootSpan: cs.rootSpan,
|
||||
req: cs.req
|
||||
req: cs.req,
|
||||
tmpFiles: cs.tmpFiles,
|
||||
});
|
||||
await this._playSession.exec();
|
||||
this._playSession = null;
|
||||
|
||||
@@ -18,7 +18,8 @@ class TaskConfig extends Task {
|
||||
'boostAudioSignal',
|
||||
'vad',
|
||||
'ttsStream',
|
||||
'autoStreamTts'
|
||||
'autoStreamTts',
|
||||
'disableTtsCache'
|
||||
].forEach((k) => this[k] = this.data[k] || {});
|
||||
|
||||
if ('notifyEvents' in this.data) {
|
||||
@@ -88,6 +89,7 @@ class TaskConfig extends Task {
|
||||
get hasReferHook() { return Object.keys(this.data).includes('referHook'); }
|
||||
get hasNotifySttLatency() { return Object.keys(this.data).includes('notifySttLatency'); }
|
||||
get hasTtsStream() { return Object.keys(this.ttsStream).length; }
|
||||
get hasDisableTtsCache() { return Object.keys(this.data).includes('disableTtsCache'); }
|
||||
|
||||
get summary() {
|
||||
const phrase = [];
|
||||
@@ -125,6 +127,7 @@ class TaskConfig extends Task {
|
||||
phrase.push(`${this.ttsStream.enable ? 'enable' : 'disable'} ttsStream`);
|
||||
}
|
||||
if ('autoStreamTts' in this.data) phrase.push(`enable Say.stream value ${this.data.autoStreamTts ? 'on' : 'off'}`);
|
||||
if (this.hasDisableTtsCache) phrase.push(`disableTtsCache ${this.data.disableTtsCache ? 'on' : 'off'}`);
|
||||
return `${this.name}{${phrase.join(',')}}`;
|
||||
}
|
||||
|
||||
@@ -357,6 +360,11 @@ class TaskConfig extends Task {
|
||||
this.logger.info('Config: disabling ttsStream');
|
||||
cs.disableTtsStream();
|
||||
}
|
||||
|
||||
if (this.hasDisableTtsCache) {
|
||||
this.logger.info(`set disableTtsCache = ${this.disableTtsCache}`);
|
||||
cs.disableTtsCache = this.data.disableTtsCache;
|
||||
}
|
||||
}
|
||||
|
||||
async kill(cs) {
|
||||
|
||||
@@ -21,7 +21,7 @@ const {parseUri} = require('drachtio-srf');
|
||||
const {ANCHOR_MEDIA_ALWAYS,
|
||||
JAMBONZ_DIAL_PAI_HEADER,
|
||||
JAMBONES_DIAL_SBC_FOR_REGISTERED_USER} = require('../config');
|
||||
const { isOnhold, isOpusFirst } = require('../utils/sdp-utils');
|
||||
const { isOnhold, isOpusFirst, getLeadingCodec } = require('../utils/sdp-utils');
|
||||
const { normalizeJambones } = require('@jambonz/verb-specifications');
|
||||
const { selectHostPort } = require('../utils/network');
|
||||
const { sleepFor } = require('../utils/helpers');
|
||||
@@ -158,6 +158,7 @@ class TaskDial extends Task {
|
||||
|
||||
get canReleaseMedia() {
|
||||
const keepAnchor = this.data.anchorMedia ||
|
||||
this.isTranscoding ||
|
||||
this.cs.isBackGroundListen ||
|
||||
this.cs.onHoldMusic ||
|
||||
ANCHOR_MEDIA_ALWAYS ||
|
||||
@@ -194,6 +195,9 @@ class TaskDial extends Task {
|
||||
async exec(cs) {
|
||||
await super.exec(cs);
|
||||
|
||||
/* capture whether A leg was already answered before this dial task started */
|
||||
this._aLegAlreadyAnswered = !!cs.dlg;
|
||||
|
||||
if (this.data.anchorMedia && this.data.exitMediaPath) {
|
||||
this.logger.info('Dial:exec - incompatible anchorMedia and exitMediaPath are both set, will obey anchorMedia');
|
||||
delete this.data.exitMediaPath;
|
||||
@@ -549,7 +553,7 @@ class TaskDial extends Task {
|
||||
let sbcAddress = this.proxy || getSBC();
|
||||
const teamsInfo = {};
|
||||
let fqdn;
|
||||
const forwardPAI = this.forwardPAI ?? JAMBONZ_DIAL_PAI_HEADER; // dial verb overides env var
|
||||
const forwardPAI = this.forwardPAI ?? !JAMBONZ_DIAL_PAI_HEADER; // dial verb overides env var
|
||||
this.logger.debug(forwardPAI, 'forwardPAI value');
|
||||
if (!sbcAddress) throw new Error('no SBC found for outbound call');
|
||||
this.headers = {
|
||||
@@ -575,7 +579,7 @@ class TaskDial extends Task {
|
||||
proxy: `sip:${sbcAddress}`,
|
||||
callingNumber: this.callerId || fromUri.user,
|
||||
...(this.callerName && {callingName: this.callerName}),
|
||||
opusFirst: isOpusFirst(this.cs.ep.remote.sdp),
|
||||
opusFirst: isOpusFirst(this.cs.ep.local.sdp),
|
||||
isVideoCall: this.cs.ep.remote.sdp.includes('m=video')
|
||||
};
|
||||
|
||||
@@ -641,7 +645,9 @@ class TaskDial extends Task {
|
||||
* trunk isn't specified,
|
||||
* check if number matches any existing numbers
|
||||
* */
|
||||
if (t.type === 'phone' && !t.trunk) {
|
||||
const { lookupLcrByAccount} = srf.locals.dbHelpers;
|
||||
const lcrs = await lookupLcrByAccount(cs.accountSid);
|
||||
if (t.type === 'phone' && !t.trunk && lcrs.length == 0) {
|
||||
const str = this.callerId || req.callingNumber || '';
|
||||
const callingNumber = str.startsWith('+') ? str.substring(1) : str;
|
||||
const voip_carrier_sid = await lookupCarrierByPhoneNumber(cs.accountSid, callingNumber);
|
||||
@@ -674,7 +680,8 @@ class TaskDial extends Task {
|
||||
rootSpan: cs.rootSpan,
|
||||
startSpan: this.startSpan.bind(this),
|
||||
dialTask: this,
|
||||
onHoldMusic: this.cs.onHoldMusic
|
||||
onHoldMusic: this.cs.onHoldMusic,
|
||||
tmpFiles: this.cs.tmpFiles,
|
||||
});
|
||||
this.dials.set(sd.callSid, sd);
|
||||
|
||||
@@ -769,12 +776,24 @@ class TaskDial extends Task {
|
||||
}
|
||||
|
||||
async _connectSingleDial(cs, sd) {
|
||||
// start connect with dialed leg, this is the soonest we can identify transcoding
|
||||
if (this.epOther && sd.ep) {
|
||||
const codecA = getLeadingCodec(this.epOther.local.sdp);
|
||||
const codecB = getLeadingCodec(sd.ep.remote.sdp);
|
||||
this.isTranscoding = (codecA !== codecB);
|
||||
if (this.isTranscoding) {
|
||||
this.logger.info(`Dial:_connectSingleDial - transcoding from ${codecA} (A leg) to ${codecB} (B leg)`);
|
||||
}
|
||||
}
|
||||
if (!this.bridged && !this.canReleaseMedia) {
|
||||
this.logger.debug('Dial:_connectSingleDial bridging endpoints');
|
||||
if (this.epOther) {
|
||||
this.epOther.api('uuid_break', this.epOther.uuid);
|
||||
this.epOther.bridge(sd.ep);
|
||||
}
|
||||
else {
|
||||
this.logger.error('Dial:_connectSingleDial - no other endpoint to bridge!');
|
||||
}
|
||||
this.bridged = true;
|
||||
}
|
||||
|
||||
@@ -856,8 +875,12 @@ class TaskDial extends Task {
|
||||
this.sd = sd;
|
||||
this.callSid = sd.callSid;
|
||||
if (this.earlyMedia) {
|
||||
debug('Dial:_selectSingleDial propagating answer supervision on A leg now that B is connected');
|
||||
await cs.propagateAnswer();
|
||||
if (this._aLegAlreadyAnswered) {
|
||||
debug('Dial:_selectSingleDial A leg was already answered, skipping propagateAnswer');
|
||||
} else {
|
||||
debug('Dial:_selectSingleDial propagating answer supervision on A leg now that B is connected');
|
||||
await cs.propagateAnswer();
|
||||
}
|
||||
}
|
||||
if (this.timeLimit) {
|
||||
this.timerMaxCallDuration = setTimeout(this._onMaxCallDuration.bind(this, cs), this.timeLimit * 1000);
|
||||
@@ -923,7 +946,6 @@ class TaskDial extends Task {
|
||||
this.logger.info({err}, 'Dial:_selectSingleDial - Error boosting audio signal');
|
||||
}
|
||||
}
|
||||
|
||||
/* if we can release the media back to the SBC, do so now */
|
||||
if (this.canReleaseMedia || this.shouldExitMediaPathEntirely) {
|
||||
setTimeout(this._releaseMedia.bind(this, cs, sd, this.shouldExitMediaPathEntirely), 200);
|
||||
@@ -1098,7 +1120,8 @@ class TaskDial extends Task {
|
||||
accountInfo: this.cs.accountInfo,
|
||||
tasks,
|
||||
rootSpan: this.cs.rootSpan,
|
||||
req: this.cs.req
|
||||
req: this.cs.req,
|
||||
tmpFiles: this.cs.tmpFiles,
|
||||
});
|
||||
await this._onHoldSession.exec();
|
||||
this._onHoldSession = null;
|
||||
|
||||
@@ -370,7 +370,8 @@ class TaskEnqueue extends Task {
|
||||
accountInfo: cs.accountInfo,
|
||||
tasks: tasksToRun,
|
||||
rootSpan: cs.rootSpan,
|
||||
req: cs.req
|
||||
req: cs.req,
|
||||
tmpFiles: cs.tmpFiles,
|
||||
});
|
||||
await this._playSession.exec();
|
||||
this._playSession = null;
|
||||
|
||||
@@ -5,13 +5,15 @@ const {
|
||||
AwsTranscriptionEvents,
|
||||
AzureTranscriptionEvents,
|
||||
DeepgramTranscriptionEvents,
|
||||
GladiaTranscriptionEvents,
|
||||
SonioxTranscriptionEvents,
|
||||
CobaltTranscriptionEvents,
|
||||
IbmTranscriptionEvents,
|
||||
NvidiaTranscriptionEvents,
|
||||
JambonzTranscriptionEvents,
|
||||
AssemblyAiTranscriptionEvents,
|
||||
DeepgramRiverTranscriptionEvents,
|
||||
HoundifyTranscriptionEvents,
|
||||
DeepgramfluxTranscriptionEvents,
|
||||
VoxistTranscriptionEvents,
|
||||
CartesiaTranscriptionEvents,
|
||||
OpenAITranscriptionEvents,
|
||||
@@ -93,6 +95,8 @@ class TaskGather extends SttTask {
|
||||
|
||||
get needsStt() { return this.input.includes('speech'); }
|
||||
|
||||
get isBackgroundGather() { return this.bugname_prefix === 'background_bargeIn_'; }
|
||||
|
||||
get wantsSingleUtterance() {
|
||||
return this.data.recognizer?.singleUtterance === true;
|
||||
}
|
||||
@@ -227,7 +231,9 @@ class TaskGather extends SttTask {
|
||||
|
||||
const startListening = async(cs, ep) => {
|
||||
this._startTimer();
|
||||
if (this.isContinuousAsr && 0 === this.timeout) this._startAsrTimer();
|
||||
if (this.isContinuousAsr && 0 === this.timeout && !this.isBackgroundGather) {
|
||||
this._startAsrTimer();
|
||||
}
|
||||
if (this.input.includes('speech') && !this.listenDuringPrompt) {
|
||||
try {
|
||||
await this._setSpeechHandlers(cs, ep);
|
||||
@@ -252,7 +258,7 @@ class TaskGather extends SttTask {
|
||||
startDtmfListener();
|
||||
}
|
||||
this._stopVad();
|
||||
if (!this.killed) {
|
||||
if (!this.killed && !this.resolved) {
|
||||
startListening(cs, ep);
|
||||
if (this.input.includes('speech') && this.vendor === 'nuance' && this.listenDuringPrompt) {
|
||||
this.logger.debug('Gather:exec - starting transcription timers after say completes');
|
||||
@@ -264,19 +270,21 @@ class TaskGather extends SttTask {
|
||||
};
|
||||
this.sayTask.span = span;
|
||||
this.sayTask.ctx = ctx;
|
||||
this.sayTask.exec(cs, {ep}) // kicked off, _not_ waiting for it to complete
|
||||
this.sayTask
|
||||
.exec(cs, {ep}) // kicked off, _not_ waiting for it to complete
|
||||
.then(() => {
|
||||
if (this.sayTask.isStreamingTts) return;
|
||||
this.logger.debug('Gather:exec - nested say task completed');
|
||||
span.end();
|
||||
process();
|
||||
return;
|
||||
})
|
||||
.catch((err) => {
|
||||
process();
|
||||
});
|
||||
if (this.sayTask.isStreamingTts && !this.sayTask.closeOnStreamEmpty) {
|
||||
// if streaming tts, we do not wait for it to complete if it is not closing the stream automatically
|
||||
process();
|
||||
} else {
|
||||
this.sayTask.on('playDone', (err) => {
|
||||
span.end();
|
||||
if (err) this.logger.error({err}, 'Gather:exec Error playing tts');
|
||||
process();
|
||||
});
|
||||
}
|
||||
}
|
||||
else if (this.playTask) {
|
||||
@@ -288,7 +296,7 @@ class TaskGather extends SttTask {
|
||||
startDtmfListener();
|
||||
}
|
||||
this._stopVad();
|
||||
if (!this.killed) {
|
||||
if (!this.killed && !this.resolved) {
|
||||
startListening(cs, ep);
|
||||
if (this.input.includes('speech') && this.vendor === 'nuance' && this.listenDuringPrompt) {
|
||||
this.logger.debug('Gather:exec - starting transcription timers after play completes');
|
||||
@@ -300,15 +308,17 @@ class TaskGather extends SttTask {
|
||||
};
|
||||
this.playTask.span = span;
|
||||
this.playTask.ctx = ctx;
|
||||
this.playTask.exec(cs, {ep}) // kicked off, _not_ waiting for it to complete
|
||||
this.playTask
|
||||
.exec(cs, {ep}) // kicked off, _not_ waiting for it to complete
|
||||
.then(() => {
|
||||
this.logger.debug('Gather:exec - nested play task completed');
|
||||
span.end();
|
||||
process();
|
||||
return;
|
||||
})
|
||||
.catch((err) => {
|
||||
process();
|
||||
});
|
||||
this.playTask.on('playDone', (err) => {
|
||||
span.end();
|
||||
if (err) this.logger.error({err}, 'Gather:exec Error playing url');
|
||||
process();
|
||||
});
|
||||
}
|
||||
else {
|
||||
if (this.killed) {
|
||||
@@ -368,6 +378,9 @@ class TaskGather extends SttTask {
|
||||
|
||||
_onDtmf(cs, ep, evt) {
|
||||
this.logger.debug(evt, 'TaskGather:_onDtmf');
|
||||
if (!this._timeoutTimer && this.timeout > 0) {
|
||||
this._startTimer();
|
||||
}
|
||||
clearTimeout(this.interDigitTimer);
|
||||
let resolved = false;
|
||||
if (this.dtmfBargein) {
|
||||
@@ -392,6 +405,7 @@ class TaskGather extends SttTask {
|
||||
if (this.digitBuffer.length === 0 && this.needsStt) {
|
||||
// DTMF is higher priority than STT.
|
||||
this.removeCustomEventListeners();
|
||||
this._clearAsrTimer(); //clear ASR timer as we're now using dtmf
|
||||
this._stopTranscribing(ep);
|
||||
}
|
||||
this.digitBuffer += evt.dtmf;
|
||||
@@ -406,6 +420,7 @@ class TaskGather extends SttTask {
|
||||
const ms = this.interDigitTimeout * 1000;
|
||||
this.logger.debug(`starting interdigit timer of ${ms}`);
|
||||
this.interDigitTimer = setTimeout(() => this._resolve('dtmf-interdigit-timeout'), ms);
|
||||
this._clearTimer(); //clear main timer as we're now using interdigit dtmf timer
|
||||
}
|
||||
}
|
||||
|
||||
@@ -463,16 +478,32 @@ class TaskGather extends SttTask {
|
||||
this.addCustomEventListener(ep, DeepgramTranscriptionEvents.Connect, this._onVendorConnect.bind(this, cs, ep));
|
||||
this.addCustomEventListener(ep, DeepgramTranscriptionEvents.ConnectFailure,
|
||||
this._onVendorConnectFailure.bind(this, cs, ep));
|
||||
this.addCustomEventListener(ep, DeepgramTranscriptionEvents.Error, this._onVendorError.bind(this, cs, ep));
|
||||
break;
|
||||
|
||||
case 'deepgramriver':
|
||||
this.bugname = `${this.bugname_prefix}deepgramriver_transcribe`;
|
||||
case 'deepgramflux':
|
||||
this.bugname = `${this.bugname_prefix}deepgramflux_transcribe`;
|
||||
this.addCustomEventListener(
|
||||
ep, DeepgramRiverTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
|
||||
ep, DeepgramfluxTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
|
||||
this.addCustomEventListener(
|
||||
ep, DeepgramRiverTranscriptionEvents.Connect, this._onVendorConnect.bind(this, cs, ep));
|
||||
this.addCustomEventListener(ep, DeepgramRiverTranscriptionEvents.ConnectFailure,
|
||||
ep, DeepgramfluxTranscriptionEvents.Connect, this._onVendorConnect.bind(this, cs, ep));
|
||||
this.addCustomEventListener(ep, DeepgramfluxTranscriptionEvents.ConnectFailure,
|
||||
this._onVendorConnectFailure.bind(this, cs, ep));
|
||||
this.addCustomEventListener(ep, DeepgramfluxTranscriptionEvents.Error, this._onVendorError.bind(this, cs, ep));
|
||||
break;
|
||||
|
||||
case 'gladia':
|
||||
this.bugname = `${this.bugname_prefix}gladia_transcribe`;
|
||||
this.addCustomEventListener(
|
||||
ep, GladiaTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
|
||||
this.addCustomEventListener(ep, GladiaTranscriptionEvents.Connect, this._onVendorConnect.bind(this, cs, ep));
|
||||
this.addCustomEventListener(ep, GladiaTranscriptionEvents.ConnectFailure,
|
||||
this._onVendorConnectFailure.bind(this, cs, ep));
|
||||
this.addCustomEventListener(ep, GladiaTranscriptionEvents.Error, this._onVendorError.bind(this, cs, ep));
|
||||
// gladia require unique url for each session
|
||||
const {host, path} = await this.createGladiaLiveSession();
|
||||
opts.GLADIA_SPEECH_HOST = host;
|
||||
opts.GLADIA_SPEECH_PATH = path;
|
||||
break;
|
||||
|
||||
case 'soniox':
|
||||
@@ -552,6 +583,18 @@ class TaskGather extends SttTask {
|
||||
this._onVendorConnectFailure.bind(this, cs, ep));
|
||||
break;
|
||||
|
||||
case 'houndify':
|
||||
this.bugname = `${this.bugname_prefix}houndify_transcribe`;
|
||||
this.addCustomEventListener(ep, HoundifyTranscriptionEvents.Transcription,
|
||||
this._onTranscription.bind(this, cs, ep));
|
||||
this.addCustomEventListener(ep, HoundifyTranscriptionEvents.Error,
|
||||
this._onVendorError.bind(this, cs, ep));
|
||||
this.addCustomEventListener(ep, HoundifyTranscriptionEvents.ConnectFailure,
|
||||
this._onVendorConnectFailure.bind(this, cs, ep));
|
||||
this.addCustomEventListener(ep, HoundifyTranscriptionEvents.Connect,
|
||||
this._onVendorConnect.bind(this, cs, ep));
|
||||
break;
|
||||
|
||||
case 'voxist':
|
||||
this.bugname = `${this.bugname_prefix}voxist_transcribe`;
|
||||
this.addCustomEventListener(ep, VoxistTranscriptionEvents.Transcription,
|
||||
@@ -696,10 +739,11 @@ class TaskGather extends SttTask {
|
||||
|
||||
_startTimer() {
|
||||
if (0 === this.timeout) return;
|
||||
this.logger.debug(`Starting timoutTimer of ${this.timeout}ms`);
|
||||
this._clearTimer();
|
||||
this._timeoutTimer = setTimeout(() => {
|
||||
// If continuousASR in use then extend by the asr window for more transcripts.
|
||||
if (this.isContinuousAsr) this._startAsrTimer();
|
||||
if (this.interDigitTimer) return; // let the inter-digit timer complete
|
||||
else {
|
||||
this._resolve(this.digitBuffer.length >= this.minDigits ? 'dtmf-num-digits' : 'timeout');
|
||||
}
|
||||
@@ -841,17 +885,15 @@ class TaskGather extends SttTask {
|
||||
this._fillerNoiseOn = false; // in a race, if we just started audio it may sneak through here
|
||||
this.ep.api('uuid_break', this.ep.uuid)
|
||||
.catch((err) => this.logger.info(err, 'Error killing audio'));
|
||||
cs.clearTtsStream();
|
||||
if (cs.isTtsStreamEnabled) cs.clearTtsStream();
|
||||
}
|
||||
return;
|
||||
}
|
||||
if (this.sayTask && !this.sayTask.killed) {
|
||||
this.sayTask.removeAllListeners('playDone');
|
||||
this.sayTask.kill(cs);
|
||||
this.sayTask = null;
|
||||
}
|
||||
if (this.playTask && !this.playTask.killed) {
|
||||
this.playTask.removeAllListeners('playDone');
|
||||
this.playTask.kill(cs);
|
||||
this.playTask = null;
|
||||
}
|
||||
@@ -906,7 +948,7 @@ class TaskGather extends SttTask {
|
||||
|
||||
evt = this.normalizeTranscription(evt, this.vendor, 1, this.language,
|
||||
this.shortUtterance, this.data.recognizer.punctuation);
|
||||
//this.logger.debug({evt, bugname, finished, vendor: this.vendor}, 'Gather:_onTranscription normalized transcript');
|
||||
this.logger.debug({evt, bugname, finished, vendor: this.vendor}, 'Gather:_onTranscription normalized transcript');
|
||||
|
||||
if (evt.alternatives.length === 0) {
|
||||
this.logger.info({evt}, 'TaskGather:_onTranscription - got empty transcript, continue listening');
|
||||
@@ -1072,6 +1114,11 @@ class TaskGather extends SttTask {
|
||||
this.cs.requestor.request('verb:hook', this.partialResultHook, Object.assign({speech: evt},
|
||||
this.cs.callInfo, httpHeaders));
|
||||
}
|
||||
else if (this.vendor === 'deepgramflux' &&
|
||||
['EagerEndOfTurn', 'TurnResumed'].includes(evt.vendor.evt?.event)) {
|
||||
this.logger.debug(`Gather:_onTranscription - deepgramflux event detected: ${evt.event}`);
|
||||
this.performAction({speech: evt, reason: 'speechDetected'}, false);
|
||||
}
|
||||
if (this.vendor === 'soniox') {
|
||||
if (evt.vendor.finalWords.length) {
|
||||
this.logger.debug({evt}, 'TaskGather:_onTranscription - buffering soniox transcript');
|
||||
@@ -1118,7 +1165,7 @@ class TaskGather extends SttTask {
|
||||
}
|
||||
|
||||
async _startFallback(cs, ep, evt) {
|
||||
if (this.canFallback) {
|
||||
if (this.canFallback()) {
|
||||
this._stopTranscribing(ep);
|
||||
try {
|
||||
this.logger.debug('gather:_startFallback');
|
||||
@@ -1275,6 +1322,8 @@ class TaskGather extends SttTask {
|
||||
}
|
||||
|
||||
this.resolved = true;
|
||||
// gather is resolved, prevent any further transcription events while resolve in progress
|
||||
this.removeCustomEventListeners();
|
||||
// If bargin is false and ws application return ack to verb:hook
|
||||
// the gather should not play any audio
|
||||
this._killAudio(this.cs);
|
||||
|
||||
@@ -5,6 +5,17 @@ const moment = require('moment');
|
||||
const MAX_PLAY_AUDIO_QUEUE_SIZE = 10;
|
||||
const DTMF_SPAN_NAME = 'dtmf';
|
||||
|
||||
function escapeString(str) {
|
||||
return str
|
||||
.replace(/\\/g, '\\\\') // Escape backslashes
|
||||
.replace(/"/g, '\\"') // Escape double quotes
|
||||
.replace(/[\b]/g, '\\b') // Escape backspace (NOTE: [\b] not \b)
|
||||
.replace(/\f/g, '\\f') // Escape formfeed
|
||||
.replace(/\n/g, '\\n') // Escape newlines
|
||||
.replace(/\r/g, '\\r') // Escape carriage returns
|
||||
.replace(/\t/g, '\\t'); // Escape tabs
|
||||
}
|
||||
|
||||
class TaskListen extends Task {
|
||||
constructor(logger, opts, parentTask) {
|
||||
super(logger, opts);
|
||||
@@ -16,10 +27,21 @@ class TaskListen extends Task {
|
||||
this.preconditions = TaskPreconditions.Endpoint;
|
||||
|
||||
[
|
||||
'action', 'auth', 'method', 'url', 'finishOnKey', 'maxLength', 'metadata', 'mixType', 'passDtmf', 'playBeep',
|
||||
'action', 'auth', 'method', 'url', 'finishOnKey', 'maxLength', 'mixType', 'passDtmf', 'playBeep',
|
||||
'sampleRate', 'timeout', 'transcribe', 'wsAuth', 'disableBidirectionalAudio', 'channel'
|
||||
].forEach((k) => this[k] = this.data[k]);
|
||||
|
||||
//Escape JSON special characters in metadata
|
||||
if (this.data.metadata) {
|
||||
this.metadata = {};
|
||||
for (const key in this.data.metadata) {
|
||||
if (this.data.metadata.hasOwnProperty(key)) {
|
||||
const value = this.data.metadata[key];
|
||||
this.metadata[key] = typeof value === 'string' ? escapeString(value) : value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.mixType = this.mixType || 'mono';
|
||||
this.sampleRate = this.sampleRate || 8000;
|
||||
this.earlyMedia = this.data.earlyMedia === true;
|
||||
@@ -72,7 +94,7 @@ class TaskListen extends Task {
|
||||
} catch (err) {
|
||||
this.logger.info(err, `TaskListen:exec - error ${this.url}`);
|
||||
}
|
||||
if (this.transcribeTask) this.transcribeTask.kill();
|
||||
if (this.transcribeTask) this.transcribeTask.kill(cs);
|
||||
this._removeListeners(ep);
|
||||
}
|
||||
|
||||
|
||||
@@ -36,6 +36,9 @@ class TaskLlmGoogle_S2S extends Task {
|
||||
this.model = this.parent.model || 'models/gemini-2.0-flash-live-001';
|
||||
this.auth = this.parent.auth;
|
||||
this.connectionOptions = this.parent.connectOptions;
|
||||
const {host, version} = this.connectionOptions || {};
|
||||
this.host = host;
|
||||
this.version = version;
|
||||
|
||||
const {apiKey} = this.auth || {};
|
||||
if (!apiKey) throw new Error('auth.apiKey is required for Google S2S');
|
||||
@@ -46,7 +49,7 @@ class TaskLlmGoogle_S2S extends Task {
|
||||
this.eventHook = this.data.eventHook;
|
||||
this.toolHook = this.data.toolHook;
|
||||
|
||||
const {setup} = this.data.llmOptions;
|
||||
const {setup, sessionResumption} = this.data.llmOptions;
|
||||
|
||||
if (typeof setup !== 'object') {
|
||||
throw new Error('llmOptions with an initial setup is required for Google S2S');
|
||||
@@ -54,6 +57,7 @@ class TaskLlmGoogle_S2S extends Task {
|
||||
this.setup = {
|
||||
...setup,
|
||||
model: this.model,
|
||||
...(sessionResumption && {sessionResumption}),
|
||||
// make sure output is always audio
|
||||
generationConfig: {
|
||||
...(setup.generationConfig || {}),
|
||||
@@ -138,6 +142,10 @@ class TaskLlmGoogle_S2S extends Task {
|
||||
|
||||
try {
|
||||
const args = [ep.uuid, 'session.create', this.apiKey];
|
||||
if (this.host) {
|
||||
args.push(this.host);
|
||||
if (this.version) args.push(this.version);
|
||||
}
|
||||
await this._api(ep, args);
|
||||
} catch (err) {
|
||||
this.logger.error({err}, 'TaskLlmGoogle_S2S:_startListening');
|
||||
|
||||
@@ -146,8 +146,9 @@ class TaskLlmUltravox_S2S extends Task {
|
||||
return data;
|
||||
}
|
||||
|
||||
_unregisterHandlers() {
|
||||
_unregisterHandlers(ep) {
|
||||
this.removeCustomEventListeners();
|
||||
ep.removeAllListeners('dtmf');
|
||||
}
|
||||
|
||||
_registerHandlers(ep) {
|
||||
@@ -155,6 +156,7 @@ class TaskLlmUltravox_S2S extends Task {
|
||||
this.addCustomEventListener(ep, LlmEvents_Ultravox.ConnectFailure, this._onConnectFailure.bind(this, ep));
|
||||
this.addCustomEventListener(ep, LlmEvents_Ultravox.Disconnect, this._onDisconnect.bind(this, ep));
|
||||
this.addCustomEventListener(ep, LlmEvents_Ultravox.ServerEvent, this._onServerEvent.bind(this, ep));
|
||||
ep.on('dtmf', this._onDtmf.bind(this, ep));
|
||||
}
|
||||
|
||||
async _startListening(cs, ep) {
|
||||
@@ -189,7 +191,7 @@ class TaskLlmUltravox_S2S extends Task {
|
||||
/* note: the parent llm verb started the span, which is why this is necessary */
|
||||
await this.parent.performAction(this.results);
|
||||
|
||||
this._unregisterHandlers();
|
||||
this._unregisterHandlers(ep);
|
||||
}
|
||||
|
||||
async kill(cs) {
|
||||
@@ -218,7 +220,7 @@ class TaskLlmUltravox_S2S extends Task {
|
||||
async _onServerEvent(_ep, evt) {
|
||||
let endConversation = false;
|
||||
const type = evt.type;
|
||||
this.logger.debug({evt}, 'TaskLlmUltravox_S2S:_onServerEvent');
|
||||
//this.logger.debug({evt}, 'TaskLlmUltravox_S2S:_onServerEvent');
|
||||
|
||||
/* server errors of some sort */
|
||||
if (type === 'error') {
|
||||
@@ -346,6 +348,18 @@ class TaskLlmUltravox_S2S extends Task {
|
||||
excludeEvents: this.excludeEvents
|
||||
}, 'TaskLlmUltravox_S2S:_populateEvents');
|
||||
}
|
||||
|
||||
_onDtmf(ep, evt) {
|
||||
this.logger.info({evt}, 'TaskLlmUltravox_S2S:_onDtmf - DTMF received');
|
||||
const {dtmf} = evt;
|
||||
const data = {
|
||||
type: 'user_text_message',
|
||||
text: `DTMF received: ${dtmf}`,
|
||||
urgency: 'immediate'
|
||||
};
|
||||
this._api(ep, [ep.uuid, ClientEvent, JSON.stringify(data)])
|
||||
.catch((err) => this.logger.info({err, evt}, 'TaskLlmUltravox_S2S:_onDtmf - Error sending DTMF as text message'));
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = TaskLlmUltravox_S2S;
|
||||
|
||||
@@ -6,9 +6,21 @@ class TaskPlay extends Task {
|
||||
super(logger, opts);
|
||||
this.preconditions = TaskPreconditions.Endpoint;
|
||||
|
||||
this.url = this.data.url.includes('?')
|
||||
? this.data.url.split('?')[0] + '?' + this.data.url.split('?')[1].replaceAll('.', '%2E')
|
||||
: this.data.url;
|
||||
//Cleanup URLs that contain a querystring with a . unless that querystring is the filename
|
||||
// see https://github.com/jambonz/jambonz-feature-server/pull/1293
|
||||
// and https://github.com/jambonz/jambonz-feature-server/issues/1394 for background
|
||||
if (this.data.url.includes('?')) {
|
||||
if (['.mp3', '.wav'].includes(this.data.url.slice(-4))) {
|
||||
this.url = this.data.url;
|
||||
}
|
||||
else {
|
||||
this.url = this.data.url.split('?')[0] + '?' + this.data.url.split('?')[1].replaceAll('.', '%2E');
|
||||
}
|
||||
}
|
||||
else {
|
||||
this.url = this.data.url;
|
||||
}
|
||||
|
||||
this.seekOffset = this.data.seekOffset || -1;
|
||||
this.timeoutSecs = this.data.timeoutSecs || -1;
|
||||
this.loop = this.data.loop || 1;
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
const Task = require('./task');
|
||||
const {TaskName} = require('../utils/constants');
|
||||
const WsRequestor = require('../utils/ws-requestor');
|
||||
const URL = require('url');
|
||||
const HttpRequestor = require('../utils/http-requestor');
|
||||
|
||||
/**
|
||||
@@ -10,6 +9,7 @@ const HttpRequestor = require('../utils/http-requestor');
|
||||
class TaskRedirect extends Task {
|
||||
constructor(logger, opts) {
|
||||
super(logger, opts);
|
||||
this.statusHook = opts.statusHook || false;
|
||||
}
|
||||
|
||||
get name() { return TaskName.Redirect; }
|
||||
@@ -33,7 +33,7 @@ class TaskRedirect extends Task {
|
||||
}
|
||||
else {
|
||||
const baseUrl = this.cs.application.requestor.baseUrl;
|
||||
const newUrl = URL.parse(this.actionHook);
|
||||
const newUrl = new URL(this.actionHook);
|
||||
const newBaseUrl = newUrl.protocol + '//' + newUrl.host;
|
||||
if (baseUrl != newBaseUrl) {
|
||||
try {
|
||||
@@ -47,6 +47,30 @@ class TaskRedirect extends Task {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* update the notifier if a new statusHook was provided */
|
||||
if (this.statusHook) {
|
||||
this.logger.info(`TaskRedirect updating statusHook to ${this.statusHook}`);
|
||||
try {
|
||||
const oldNotifier = cs.application.notifier;
|
||||
const isStatusHookAbsolute = cs.notifier?._isAbsoluteUrl(this.statusHook);
|
||||
if (isStatusHookAbsolute) {
|
||||
if (cs.notifier instanceof WsRequestor) {
|
||||
cs.application.notifier = new WsRequestor(this.logger, cs.accountSid, {url: this.statusHook},
|
||||
cs.accountInfo.account.webhook_secret);
|
||||
} else {
|
||||
cs.application.notifier = new HttpRequestor(this.logger, cs.accountSid, {url: this.statusHook},
|
||||
cs.accountInfo.account.webhook_secret);
|
||||
}
|
||||
if (oldNotifier?.close) oldNotifier.close();
|
||||
}
|
||||
/* update the call_status_hook URL that gets passed to the notifier */
|
||||
cs.application.call_status_hook = this.statusHook;
|
||||
} catch (err) {
|
||||
this.logger.info(err, `TaskRedirect error updating statusHook to ${this.statusHook}`);
|
||||
}
|
||||
}
|
||||
|
||||
await this.performAction();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,6 +19,7 @@ class TaskRestDial extends Task {
|
||||
this.timeout = this.data.timeout || 60;
|
||||
this.sipRequestWithinDialogHook = this.data.sipRequestWithinDialogHook;
|
||||
this.referHook = this.data.referHook;
|
||||
this.recentCallStatus = 0;
|
||||
|
||||
this.on('connect', this._onConnect.bind(this));
|
||||
this.on('callStatus', this._onCallStatus.bind(this));
|
||||
@@ -57,7 +58,11 @@ class TaskRestDial extends Task {
|
||||
this._clearCallTimer();
|
||||
if (this.canCancel) {
|
||||
this.canCancel = false;
|
||||
cs?.req?.cancel();
|
||||
try {
|
||||
cs?.req?.cancel();
|
||||
} catch (err) {
|
||||
this.logger.error({err}, 'TaskRestDial: error cancelling call');
|
||||
}
|
||||
}
|
||||
this.notifyTaskDone();
|
||||
}
|
||||
@@ -118,7 +123,8 @@ class TaskRestDial extends Task {
|
||||
}
|
||||
|
||||
_onCallStatus(status) {
|
||||
this.logger.debug(`CallStatus: ${status}`);
|
||||
this.logger.debug(`RestDial CallStatus: ${status}`);
|
||||
this.recentCallStatus = status;
|
||||
if (status >= 200) {
|
||||
this.canCancel = false;
|
||||
this._clearCallTimer();
|
||||
@@ -136,11 +142,16 @@ class TaskRestDial extends Task {
|
||||
}
|
||||
|
||||
_onCallTimeout() {
|
||||
this.logger.debug('TaskRestDial: timeout expired without answer, killing task');
|
||||
this.logger.debug(`TaskRestDial: timeout expired without answer, last status ${this.recentCallStatus}`);
|
||||
this.timer = null;
|
||||
if (this.canCancel) {
|
||||
if (this.canCancel && this.recentCallStatus < 200) {
|
||||
this.logger.debug('TaskRestDial: cancelling call attempt');
|
||||
this.canCancel = false;
|
||||
this.cs?.req?.cancel();
|
||||
try {
|
||||
this.cs?.req?.cancel();
|
||||
} catch (err) {
|
||||
this.logger.error({err}, 'TaskRestDial: error cancelling call');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
113
lib/tasks/say.js
113
lib/tasks/say.js
@@ -1,14 +1,38 @@
|
||||
const assert = require('assert');
|
||||
const TtsTask = require('./tts-task');
|
||||
const {TaskName, TaskPreconditions} = require('../utils/constants');
|
||||
const {JAMBONES_SAY_CHUNK_SIZE} = require('../config');
|
||||
const pollySSMLSplit = require('polly-ssml-split');
|
||||
const { SpeechCredentialError } = require('../utils/error');
|
||||
const { SpeechCredentialError, NonFatalTaskError } = require('../utils/error');
|
||||
const { sleepFor } = require('../utils/helpers');
|
||||
const { NON_FANTAL_ERRORS } = require('../utils/constants.json');
|
||||
|
||||
/**
|
||||
* Discard unmatching responses:
|
||||
* (1) I sent a playback id but get a response with a different playback id
|
||||
* (2) I sent a playback id but get a response with no playback id
|
||||
* (3) I did not send a playback id but get a response with a playback id
|
||||
* (4) I sent a cache file but get a response with a different cache file
|
||||
*/
|
||||
|
||||
const isMatchingEvent = (logger, filename, playbackId, evt) => {
|
||||
|
||||
if (!!playbackId && !!evt.variable_tts_playback_id && evt.variable_tts_playback_id === playbackId) {
|
||||
//logger.debug({filename, playbackId, evt}, 'Say:isMatchingEvent - playbackId matched');
|
||||
return true;
|
||||
}
|
||||
if (!!filename && !!evt.file && evt.file === filename) {
|
||||
//logger.debug({filename, playbackId, evt}, 'Say:isMatchingEvent - filename matched');
|
||||
return true;
|
||||
}
|
||||
logger.info({filename, playbackId, evt}, 'Say:isMatchingEvent - no match');
|
||||
return false;
|
||||
};
|
||||
|
||||
const breakLengthyTextIfNeeded = (logger, text) => {
|
||||
// As The text can be used for tts streaming, we need to break lengthy text into smaller chunks
|
||||
// HIGH_WATER_BUFFER_SIZE defined in tts-streaming-buffer.js
|
||||
const chunkSize = 900;
|
||||
const chunkSize = JAMBONES_SAY_CHUNK_SIZE;
|
||||
const isSSML = text.startsWith('<speak>');
|
||||
const options = {
|
||||
softLimit: 100,
|
||||
@@ -98,13 +122,11 @@ class TaskSay extends TtsTask {
|
||||
}
|
||||
if (this.isStreamingTts) await this.handlingStreaming(cs, obj);
|
||||
else await this.handling(cs, obj);
|
||||
this.emit('playDone');
|
||||
} catch (error) {
|
||||
if (error instanceof SpeechCredentialError) {
|
||||
// if say failed due to speech credentials, alarm is writtern and error notification is sent
|
||||
// finished this say to move to next task.
|
||||
this.logger.info({error}, 'Say failed due to SpeechCredentialError, finished!');
|
||||
this.emit('playDone');
|
||||
return;
|
||||
}
|
||||
throw error;
|
||||
@@ -125,9 +147,6 @@ class TaskSay extends TtsTask {
|
||||
|
||||
await cs.startTtsStream();
|
||||
|
||||
cs.requestor?.request('tts:streaming-event', '/streaming-event', {event_type: 'stream_open'})
|
||||
.catch((err) => this.logger.info({err}, 'TaskSay:handlingStreaming - Error sending'));
|
||||
|
||||
if (this.text.length !== 0) {
|
||||
this.logger.info('TaskSay:handlingStreaming - sending text to TTS stream');
|
||||
for (const t of this.text) {
|
||||
@@ -259,40 +278,32 @@ class TaskSay extends TtsTask {
|
||||
while (!this.killed && (this.loop === 'forever' || this.loop--) && ep?.connected) {
|
||||
let segment = 0;
|
||||
while (!this.killed && segment < filepath.length) {
|
||||
const filename = filepath[segment];
|
||||
if (cs.isInConference) {
|
||||
const {memberId, confName, confUuid} = cs;
|
||||
await this.playToConfMember(ep, memberId, confName, confUuid, filepath[segment]);
|
||||
await this.playToConfMember(ep, memberId, confName, confUuid, filename);
|
||||
}
|
||||
else {
|
||||
let playbackId;
|
||||
const isStreaming = filepath[segment].startsWith('say:{');
|
||||
const isStreaming = filename.startsWith('say:{');
|
||||
if (isStreaming) {
|
||||
const arr = /^say:\{.*\}\s*(.*)$/.exec(filepath[segment]);
|
||||
if (arr) this.logger.debug(`Say:exec sending streaming tts request: ${arr[1].substring(0, 64)}..`);
|
||||
}
|
||||
else {
|
||||
this.logger.debug(`Say:exec sending ${filepath[segment].substring(0, 64)}`);
|
||||
const arr = /^say:\{.*\}\s*(.*)$/.exec(filename);
|
||||
if (arr) this.logger.debug(`Say:exec sending streaming tts request ${arr[1].substring(0, 64)}..`);
|
||||
else this.logger.debug(`Say:exec sending ${filename.substring(0, 64)}`);
|
||||
}
|
||||
|
||||
const onPlaybackStop = (evt) => {
|
||||
try {
|
||||
this.logger.debug({evt},
|
||||
`Say got playback-stop ${evt.variable_tts_playback_id ? evt.variable_tts_playback_id : ''}`);
|
||||
|
||||
/**
|
||||
* If we got a playback id on both the start and stop events, and they don't match,
|
||||
* then we must have received a playback-stop event for an earlier play request.
|
||||
*/
|
||||
const unmatchedResponse = (!!playbackId && !!evt.variable_tts_playback_id) &&
|
||||
evt.variable_tts_playback_id !== playbackId;
|
||||
if (unmatchedResponse) {
|
||||
this.logger.info({currentPlaybackId: playbackId, stopPPlaybackId: evt.variable_tts_playback_id},
|
||||
const playbackId = this.getPlaybackId(segment);
|
||||
const isMatch = isMatchingEvent(this.logger, filename, playbackId, evt);
|
||||
if (!isMatch) {
|
||||
this.logger.info({currentPlaybackId: playbackId, stopPlaybackId: evt.variable_tts_playback_id},
|
||||
'Say:exec discarding playback-stop for earlier play');
|
||||
ep.once('playback-stop', this._boundOnPlaybackStop);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
this.logger.debug({evt},
|
||||
`Say got playback-stop ${evt.variable_tts_playback_id ? evt.variable_tts_playback_id : ''}`);
|
||||
this.notifyStatus({event: 'stop-playback'});
|
||||
this.notifiedPlayBackStop = true;
|
||||
const tts_error = evt.variable_tts_error;
|
||||
@@ -331,6 +342,7 @@ class TaskSay extends TtsTask {
|
||||
!this.disableTtsCache
|
||||
) {
|
||||
const text = parseTextFromSayString(this.text[segment]);
|
||||
this.logger.debug({text, cacheFile: evt.variable_tts_cache_filename}, 'Say:exec cache tts');
|
||||
addFileToCache(evt.variable_tts_cache_filename, {
|
||||
account_sid,
|
||||
vendor,
|
||||
@@ -358,9 +370,17 @@ class TaskSay extends TtsTask {
|
||||
};
|
||||
this._boundOnPlaybackStop = onPlaybackStop.bind(this);
|
||||
|
||||
ep.once('playback-start', (evt) => {
|
||||
const onPlaybackStart = (evt) => {
|
||||
try {
|
||||
playbackId = evt.variable_tts_playback_id;
|
||||
const playbackId = this.getPlaybackId(segment);
|
||||
const isMatch = isMatchingEvent(this.logger, filename, playbackId, evt);
|
||||
if (!isMatch) {
|
||||
this.logger.info({currentPlaybackId: playbackId, startPlaybackId: evt.variable_tts_playback_id},
|
||||
'Say:exec playback-start - unmatched playback_id');
|
||||
ep.once('playback-start', this._boundOnPlaybackStart);
|
||||
return;
|
||||
}
|
||||
ep.once('playback-stop', this._boundOnPlaybackStop);
|
||||
this.logger.debug({evt},
|
||||
`Say got playback-start ${evt.variable_tts_playback_id ? evt.variable_tts_playback_id : ''}`);
|
||||
if (this.otelSpan) {
|
||||
@@ -374,16 +394,29 @@ class TaskSay extends TtsTask {
|
||||
} catch (err) {
|
||||
this.logger.info({err}, 'Error handling playback-start event');
|
||||
}
|
||||
});
|
||||
ep.once('playback-stop', this._boundOnPlaybackStop);
|
||||
};
|
||||
this._boundOnPlaybackStart = onPlaybackStart.bind(this);
|
||||
|
||||
ep.once('playback-start', this._boundOnPlaybackStart);
|
||||
|
||||
// wait for playback-stop event received to confirm if the playback is successful
|
||||
this._playPromise = new Promise((resolve, reject) => {
|
||||
this._playResolve = resolve;
|
||||
this._playReject = reject;
|
||||
});
|
||||
const r = await ep.play(filepath[segment]);
|
||||
this.logger.debug({r}, 'Say:exec play result');
|
||||
try {
|
||||
const r = await ep.play(filename);
|
||||
this.logger.debug({r}, 'Say:exec play result');
|
||||
if (r.playbackSeconds == null && r.playbackMilliseconds == null && r.playbackLastOffsetPos == null) {
|
||||
this._playReject(new Error('Playback failed to start'));
|
||||
}
|
||||
} catch (err) {
|
||||
if (NON_FANTAL_ERRORS.includes(err.message)) {
|
||||
throw new NonFatalTaskError(err.message);
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
|
||||
try {
|
||||
// wait for playback-stop event received to confirm if the playback is successful
|
||||
await this._playPromise;
|
||||
@@ -400,12 +433,12 @@ class TaskSay extends TtsTask {
|
||||
this._playResolve = null;
|
||||
this._playReject = null;
|
||||
}
|
||||
if (filepath[segment].startsWith('say:{')) {
|
||||
const arr = /^say:\{.*\}\s*(.*)$/.exec(filepath[segment]);
|
||||
if (filename.startsWith('say:{')) {
|
||||
const arr = /^say:\{.*\}\s*(.*)$/.exec(filename);
|
||||
if (arr) this.logger.debug(`Say:exec complete playing streaming tts request: ${arr[1].substring(0, 64)}..`);
|
||||
} else {
|
||||
// This log will print spech credentials in say command for tts stream mode
|
||||
this.logger.debug(`Say:exec completed play file ${filepath[segment]}`);
|
||||
this.logger.debug(`Say:exec completed play file ${filename}`);
|
||||
}
|
||||
}
|
||||
segment++;
|
||||
@@ -421,8 +454,8 @@ class TaskSay extends TtsTask {
|
||||
const {memberId, confName} = cs;
|
||||
this.killPlayToConfMember(this.ep, memberId, confName);
|
||||
} else if (this.isStreamingTts) {
|
||||
this.logger.debug('TaskSay:kill - clearing TTS stream for streaming audio');
|
||||
cs.clearTtsStream();
|
||||
this.logger.debug('TaskSay:kill - stopping TTS stream for streaming audio');
|
||||
cs.stopTtsStream();
|
||||
} else {
|
||||
if (!this.notifiedPlayBackStop) {
|
||||
this.notifyStatus({event: 'stop-playback'});
|
||||
@@ -452,6 +485,7 @@ class TaskSay extends TtsTask {
|
||||
.replace('playht_', 'playht.')
|
||||
.replace('cartesia_', 'cartesia.')
|
||||
.replace('rimelabs_', 'rimelabs.')
|
||||
.replace('resemble_', 'resemble.')
|
||||
.replace('inworld_', 'inworld.')
|
||||
.replace('verbio_', 'verbio.')
|
||||
.replace('elevenlabs_', 'elevenlabs.');
|
||||
@@ -517,6 +551,9 @@ const spanMapping = {
|
||||
'rimelabs.name_lookup_time_ms': 'name_lookup_ms',
|
||||
'rimelabs.connect_time_ms': 'connect_ms',
|
||||
'rimelabs.final_response_time_ms': 'final_response_ms',
|
||||
// Resemble
|
||||
'resemble.connect_time_ms': 'connect_ms',
|
||||
'resemble.final_response_time_ms': 'final_response_ms',
|
||||
// inworld
|
||||
'inworld.name_lookup_time_ms': 'name_lookup_ms',
|
||||
'inworld.connect_time_ms': 'connect_ms',
|
||||
|
||||
@@ -171,7 +171,7 @@ class SttTask extends Task {
|
||||
try {
|
||||
this.sttCredentials = await this._initSpeechCredentials(this.cs, this.vendor, this.label);
|
||||
} catch (error) {
|
||||
if (this.canFallback) {
|
||||
if (this.canFallback()) {
|
||||
this.notifyError(
|
||||
{
|
||||
msg: 'ASR error', details:`Invalid vendor ${this.vendor}, Error: ${error}`,
|
||||
@@ -205,13 +205,64 @@ class SttTask extends Task {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
async createGladiaLiveSession() {
|
||||
const { api_key, region = 'us-west' } = this.sttCredentials;
|
||||
const model = this.data.recognizer.model || 'solaria-1';
|
||||
const options = this.data.recognizer.gladiaOptions || {};
|
||||
|
||||
const url = `https://api.gladia.io/v2/live?region=${region}`;
|
||||
const response = await fetch(url, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'x-gladia-key': api_key,
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify({
|
||||
encoding: 'wav/pcm',
|
||||
bit_depth: 16,
|
||||
sample_rate: 8000,
|
||||
channels: 1,
|
||||
model,
|
||||
...options,
|
||||
messages_config: {
|
||||
receive_final_transcripts: true,
|
||||
receive_speech_events: true,
|
||||
receive_errors: true,
|
||||
}
|
||||
})
|
||||
});
|
||||
if (!response.ok) {
|
||||
const error = await response.text();
|
||||
this.logger.error({url, status: response.status, error}, 'Error creating Gladia live session');
|
||||
throw new Error(`Error creating Gladia live session: ${response.status} ${error}`);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
this.logger.debug({url: data.url}, 'Gladia Call registered');
|
||||
|
||||
const {host, pathname, search} = new URL(data.url);
|
||||
return {host, path: `${pathname}${search}`};
|
||||
}
|
||||
|
||||
addCustomEventListener(ep, event, handler) {
|
||||
this.eventHandlers.push({ep, event, handler});
|
||||
ep.addCustomEventListener(event, handler);
|
||||
}
|
||||
|
||||
removeCustomEventListeners() {
|
||||
this.eventHandlers.forEach((h) => h.ep.removeCustomEventListener(h.event, h.handler));
|
||||
removeCustomEventListeners(ep) {
|
||||
if (ep) {
|
||||
// for specific endpoint
|
||||
this.eventHandlers.filter((h) => h.ep === ep).forEach((h) => {
|
||||
h.ep.removeCustomEventListener(h.event, h.handler);
|
||||
});
|
||||
this.eventHandlers = this.eventHandlers.filter((h) => h.ep !== ep);
|
||||
return;
|
||||
} else {
|
||||
// for all endpoints
|
||||
this.eventHandlers.forEach((h) => h.ep.removeCustomEventListener(h.event, h.handler));
|
||||
this.eventHandlers = [];
|
||||
}
|
||||
}
|
||||
|
||||
async _initSpeechCredentials(cs, vendor, label) {
|
||||
@@ -225,6 +276,7 @@ class SttTask extends Task {
|
||||
account_sid: cs.accountSid,
|
||||
alert_type: AlertType.STT_NOT_PROVISIONED,
|
||||
vendor,
|
||||
label,
|
||||
target_sid: cs.callSid
|
||||
}).catch((err) => this.logger.info({err}, 'Error generating alert for no stt'));
|
||||
// the ASR might have fallback configuration, should not done task here.
|
||||
@@ -279,11 +331,13 @@ class SttTask extends Task {
|
||||
return credentials;
|
||||
}
|
||||
|
||||
get canFallback() {
|
||||
canFallback() {
|
||||
return this.fallbackVendor && this.isHandledByPrimaryProvider && !this.cs.hasFallbackAsr;
|
||||
}
|
||||
|
||||
async _initFallback() {
|
||||
// ep is optional for gather or any verb that have single ep,
|
||||
// but transcribe does need as it might has 2 eps
|
||||
async _initFallback(ep) {
|
||||
assert(this.fallbackVendor, 'fallback failed without fallbackVendor configuration');
|
||||
this.logger.info(`Failed to use primary STT provider, fallback to ${this.fallbackVendor}`);
|
||||
this.isHandledByPrimaryProvider = false;
|
||||
@@ -296,7 +350,7 @@ class SttTask extends Task {
|
||||
this.data.recognizer.label = this.label;
|
||||
this.sttCredentials = await this._initSpeechCredentials(this.cs, this.vendor, this.label);
|
||||
// cleanup previous listener from previous vendor
|
||||
this.removeCustomEventListeners();
|
||||
this.removeCustomEventListeners(ep);
|
||||
}
|
||||
|
||||
async compileHintsForCobalt(ep, hostport, model, token, hints) {
|
||||
@@ -423,6 +477,7 @@ class SttTask extends Task {
|
||||
message: 'STT failure reported by vendor',
|
||||
detail: evt.error,
|
||||
vendor: this.vendor,
|
||||
label: this.label,
|
||||
target_sid: cs.callSid
|
||||
}).catch((err) => this.logger.info({err}, `Error generating alert for ${this.vendor} connection failure`));
|
||||
}
|
||||
@@ -436,6 +491,7 @@ class SttTask extends Task {
|
||||
alert_type: AlertType.STT_FAILURE,
|
||||
message: `Failed connecting to ${this.vendor} speech recognizer: ${reason}`,
|
||||
vendor: this.vendor,
|
||||
label: this.label,
|
||||
target_sid: cs.callSid
|
||||
}).catch((err) => this.logger.info({err}, `Error generating alert for ${this.vendor} connection failure`));
|
||||
}
|
||||
|
||||
@@ -6,7 +6,8 @@ const {
|
||||
AwsTranscriptionEvents,
|
||||
AzureTranscriptionEvents,
|
||||
DeepgramTranscriptionEvents,
|
||||
DeepgramRiverTranscriptionEvents,
|
||||
GladiaTranscriptionEvents,
|
||||
DeepgramfluxTranscriptionEvents,
|
||||
SonioxTranscriptionEvents,
|
||||
CobaltTranscriptionEvents,
|
||||
IbmTranscriptionEvents,
|
||||
@@ -14,6 +15,7 @@ const {
|
||||
JambonzTranscriptionEvents,
|
||||
TranscribeStatus,
|
||||
AssemblyAiTranscriptionEvents,
|
||||
HoundifyTranscriptionEvents,
|
||||
VoxistTranscriptionEvents,
|
||||
CartesiaTranscriptionEvents,
|
||||
OpenAITranscriptionEvents,
|
||||
@@ -68,6 +70,9 @@ class TaskTranscribe extends SttTask {
|
||||
this._bufferedTranscripts = [ [], [] ]; // for channel 1 and 2
|
||||
this.bugname_prefix = 'transcribe_';
|
||||
this.paused = false;
|
||||
// fallback flags
|
||||
this.isHandledByPrimaryProviderForEp1 = true;
|
||||
this.isHandledByPrimaryProviderForEp2 = true;
|
||||
}
|
||||
|
||||
get name() { return TaskName.Transcribe; }
|
||||
@@ -237,19 +242,35 @@ class TaskTranscribe extends SttTask {
|
||||
this._onVendorConnect.bind(this, cs, ep));
|
||||
this.addCustomEventListener(ep, DeepgramTranscriptionEvents.ConnectFailure,
|
||||
this._onVendorConnectFailure.bind(this, cs, ep, channel));
|
||||
this.addCustomEventListener(ep, DeepgramTranscriptionEvents.Error, this._onVendorError.bind(this, cs, ep));
|
||||
|
||||
|
||||
/* if app sets deepgramOptions.utteranceEndMs they essentially want continuous asr */
|
||||
//if (opts.DEEPGRAM_SPEECH_UTTERANCE_END_MS) this.isContinuousAsr = true;
|
||||
|
||||
break;
|
||||
case 'deepgramriver':
|
||||
this.bugname = `${this.bugname_prefix}deepgramriver_transcribe`;
|
||||
this.addCustomEventListener(ep, DeepgramRiverTranscriptionEvents.Transcription,
|
||||
case 'deepgramflux':
|
||||
this.bugname = `${this.bugname_prefix}deepgramflux_transcribe`;
|
||||
this.addCustomEventListener(ep, DeepgramfluxTranscriptionEvents.Transcription,
|
||||
this._onTranscription.bind(this, cs, ep, channel));
|
||||
this.addCustomEventListener(ep, DeepgramRiverTranscriptionEvents.Connect,
|
||||
this.addCustomEventListener(ep, DeepgramfluxTranscriptionEvents.Connect,
|
||||
this._onVendorConnect.bind(this, cs, ep));
|
||||
this.addCustomEventListener(ep, DeepgramRiverTranscriptionEvents.ConnectFailure,
|
||||
this.addCustomEventListener(ep, DeepgramfluxTranscriptionEvents.ConnectFailure,
|
||||
this._onVendorConnectFailure.bind(this, cs, ep, channel));
|
||||
this.addCustomEventListener(ep, DeepgramfluxTranscriptionEvents.Error, this._onVendorError.bind(this, cs, ep));
|
||||
|
||||
break;
|
||||
|
||||
case 'gladia':
|
||||
this.bugname = `${this.bugname_prefix}gladia_transcribe`;
|
||||
this.addCustomEventListener(ep, GladiaTranscriptionEvents.Transcription,
|
||||
this._onTranscription.bind(this, cs, ep, channel));
|
||||
this.addCustomEventListener(ep, GladiaTranscriptionEvents.Connect,
|
||||
this._onVendorConnect.bind(this, cs, ep));
|
||||
this.addCustomEventListener(ep, GladiaTranscriptionEvents.ConnectFailure,
|
||||
this._onVendorConnectFailure.bind(this, cs, ep, channel));
|
||||
this.addCustomEventListener(ep, GladiaTranscriptionEvents.Error, this._onVendorError.bind(this, cs, ep));
|
||||
|
||||
break;
|
||||
case 'soniox':
|
||||
this.bugname = `${this.bugname_prefix}soniox_transcribe`;
|
||||
@@ -320,6 +341,18 @@ class TaskTranscribe extends SttTask {
|
||||
this._onVendorConnectFailure.bind(this, cs, ep, channel));
|
||||
break;
|
||||
|
||||
case 'houndify':
|
||||
this.bugname = `${this.bugname_prefix}houndify_transcribe`;
|
||||
this.addCustomEventListener(ep, HoundifyTranscriptionEvents.Transcription,
|
||||
this._onTranscription.bind(this, cs, ep, channel));
|
||||
this.addCustomEventListener(ep, HoundifyTranscriptionEvents.Error,
|
||||
this._onVendorError.bind(this, cs, ep));
|
||||
this.addCustomEventListener(ep, HoundifyTranscriptionEvents.ConnectFailure,
|
||||
this._onVendorConnectFailure.bind(this, cs, ep, channel));
|
||||
this.addCustomEventListener(ep, HoundifyTranscriptionEvents.Connect,
|
||||
this._onVendorConnect.bind(this, cs, ep));
|
||||
break;
|
||||
|
||||
case 'voxist':
|
||||
this.bugname = `${this.bugname_prefix}voxist_transcribe`;
|
||||
this.addCustomEventListener(ep, VoxistTranscriptionEvents.Transcription,
|
||||
@@ -426,6 +459,14 @@ class TaskTranscribe extends SttTask {
|
||||
else if (this.data.recognizer?.hints?.length > 0) {
|
||||
prompt = this.data.recognizer?.hints.join(', ');
|
||||
}
|
||||
} else if (this.vendor === 'gladia') {
|
||||
// gladia require unique url for each session
|
||||
const {host, path} = await this.createGladiaLiveSession();
|
||||
await ep.set({
|
||||
GLADIA_SPEECH_HOST: host,
|
||||
GLADIA_SPEECH_PATH: path,
|
||||
})
|
||||
.catch((err) => this.logger.info(err, 'Error setting channel variables'));
|
||||
}
|
||||
|
||||
await ep.startTranscription({
|
||||
@@ -746,7 +787,7 @@ class TaskTranscribe extends SttTask {
|
||||
}
|
||||
|
||||
async _startFallback(cs, _ep, evt) {
|
||||
if (this.canFallback) {
|
||||
if (this.canFallback(_ep)) {
|
||||
_ep.stopTranscription({
|
||||
vendor: this.vendor,
|
||||
bugname: this.bugname,
|
||||
@@ -756,7 +797,7 @@ class TaskTranscribe extends SttTask {
|
||||
try {
|
||||
this.notifyError({ msg: 'ASR error',
|
||||
details:`STT Vendor ${this.vendor} error: ${evt.error || evt.reason}`, failover: 'in progress'});
|
||||
await this._initFallback();
|
||||
await this._initFallback(_ep);
|
||||
let channel = 1;
|
||||
if (this.ep !== _ep) {
|
||||
channel = 2;
|
||||
@@ -865,6 +906,41 @@ class TaskTranscribe extends SttTask {
|
||||
if (this._asrTimer) clearTimeout(this._asrTimer);
|
||||
this._asrTimer = null;
|
||||
}
|
||||
|
||||
// We need to keep track the fallback is happened for each endpoint
|
||||
// override the canFallback and _initFallback methods to make sure that
|
||||
// we only fallback once per endpoint
|
||||
// we want to keep track this on task level instead of endpoint level
|
||||
// because the endpoint instance is used across multiple tasks.
|
||||
canFallback(ep) {
|
||||
let isHandledByPrimaryProvider = this.isHandledByPrimaryProvider;
|
||||
if (ep === this.ep) {
|
||||
isHandledByPrimaryProvider = this.isHandledByPrimaryProviderForEp1;
|
||||
} else if (ep === this.ep2) {
|
||||
isHandledByPrimaryProvider = this.isHandledByPrimaryProviderForEp2;
|
||||
}
|
||||
|
||||
const isOneOfEndpointAlreadyFallenBack = !!this.ep && !!this.ep2 &&
|
||||
this.isHandledByPrimaryProviderForEp1 !== this.isHandledByPrimaryProviderForEp2;
|
||||
|
||||
// fallback is configured
|
||||
return this.fallbackVendor &&
|
||||
// has this endpoint already fallen back
|
||||
isHandledByPrimaryProvider &&
|
||||
// in global level, is there any fallback is already happened
|
||||
// one fallen endpoint will mark cs.hasFallbackAsr to true,
|
||||
// so if one endpoint was fallen, the other endpoint would be able to fallback.
|
||||
(isOneOfEndpointAlreadyFallenBack || !this.cs.hasFallbackAsr);
|
||||
}
|
||||
|
||||
_initFallback(ep) {
|
||||
if (ep === this.ep) {
|
||||
this.isHandledByPrimaryProviderForEp1 = false;
|
||||
} else if (ep === this.ep2) {
|
||||
this.isHandledByPrimaryProviderForEp2 = false;
|
||||
}
|
||||
return super._initFallback(ep);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = TaskTranscribe;
|
||||
|
||||
@@ -3,6 +3,16 @@ const { TaskPreconditions } = require('../utils/constants');
|
||||
const { SpeechCredentialError } = require('../utils/error');
|
||||
const dbUtils = require('../utils/db-utils');
|
||||
|
||||
const extractPlaybackId = (str) => {
|
||||
// Match say:{...} and capture the content inside braces
|
||||
const match = str.match(/say:\{([^}]*)\}/);
|
||||
if (!match) return null;
|
||||
|
||||
// Look for playback_id=value within the captured content
|
||||
const playbackMatch = match[1].match(/playback_id=([^,]*)/);
|
||||
return playbackMatch ? playbackMatch[1] : null;
|
||||
};
|
||||
|
||||
class TtsTask extends Task {
|
||||
|
||||
constructor(logger, data, parentTask) {
|
||||
@@ -21,11 +31,21 @@ class TtsTask extends Task {
|
||||
this.synthesizer = this.data.synthesizer || {};
|
||||
this.disableTtsCache = this.data.disableTtsCache;
|
||||
this.options = this.synthesizer.options || {};
|
||||
this.instructions = this.data.instructions;
|
||||
this.instructions = this.data.instructions || this.options.instructions;
|
||||
this.playbackIds = [];
|
||||
this.useGeminiTts = this.options.useGeminiTts;
|
||||
}
|
||||
|
||||
getPlaybackId(offset) {
|
||||
return this.playbackIds[offset];
|
||||
}
|
||||
|
||||
async exec(cs) {
|
||||
super.exec(cs);
|
||||
// update disableTtsCache from call session if not set in task
|
||||
if (this.data.disableTtsCache == null) {
|
||||
this.disableTtsCache = cs.disableTtsCache;
|
||||
}
|
||||
if (cs.synthesizer) {
|
||||
this.options = {...cs.synthesizer.options, ...this.options};
|
||||
this.data.synthesizer = this.data.synthesizer || {};
|
||||
@@ -66,55 +86,67 @@ class TtsTask extends Task {
|
||||
}
|
||||
|
||||
async setTtsStreamingChannelVars(vendor, language, voice, credentials, ep) {
|
||||
const {api_key, model_id, custom_tts_streaming_url, auth_token} = credentials;
|
||||
let obj;
|
||||
const {api_key, model_id, api_uri, custom_tts_streaming_url, auth_token, options} = credentials;
|
||||
// api_key, model_id, api_uri, custom_tts_streaming_url, and auth_token are encoded in the credentials
|
||||
// allow them to be overriden via config, using options
|
||||
// give preference to options passed in via config
|
||||
const parsed_options = options ? JSON.parse(options) : {};
|
||||
const local_options = {...parsed_options, ...this.options};
|
||||
const local_voice_settings = {...(parsed_options.voice_settings || {}), ...(this.options.voice_settings || {})};
|
||||
const local_api_key = local_options.api_key ?? api_key;
|
||||
const local_model_id = local_options.model_id ?? model_id;
|
||||
const local_api_uri = local_options.api_uri ?? api_uri;
|
||||
const local_custom_tts_streaming_url = local_options.custom_tts_streaming_url ?? custom_tts_streaming_url;
|
||||
const local_auth_token = local_options.auth_token ?? auth_token;
|
||||
|
||||
this.logger.debug({credentials},
|
||||
`setTtsStreamingChannelVars: vendor: ${vendor}, language: ${language}, voice: ${voice}`);
|
||||
|
||||
let obj;
|
||||
|
||||
switch (vendor) {
|
||||
case 'deepgram':
|
||||
obj = {
|
||||
DEEPGRAM_API_KEY: api_key,
|
||||
DEEPGRAM_API_KEY: local_api_key,
|
||||
DEEPGRAM_TTS_STREAMING_MODEL: voice
|
||||
};
|
||||
break;
|
||||
case 'cartesia':
|
||||
obj = {
|
||||
CARTESIA_API_KEY: api_key,
|
||||
CARTESIA_TTS_STREAMING_MODEL_ID: model_id,
|
||||
CARTESIA_API_KEY: local_api_key,
|
||||
CARTESIA_TTS_STREAMING_MODEL_ID: local_model_id,
|
||||
CARTESIA_TTS_STREAMING_VOICE_ID: voice,
|
||||
CARTESIA_TTS_STREAMING_LANGUAGE: language || 'en',
|
||||
};
|
||||
break;
|
||||
case 'elevenlabs':
|
||||
const {stability, similarity_boost, use_speaker_boost, style, speed} = this.options.voice_settings || {};
|
||||
// eslint-disable-next-line max-len
|
||||
const {stability, similarity_boost, use_speaker_boost, style, speed} = local_voice_settings || {};
|
||||
obj = {
|
||||
ELEVENLABS_API_KEY: api_key,
|
||||
ELEVENLABS_TTS_STREAMING_MODEL_ID: model_id,
|
||||
ELEVENLABS_API_KEY: local_api_key,
|
||||
...(api_uri && {ELEVENLABS_API_URI: local_api_uri}),
|
||||
ELEVENLABS_TTS_STREAMING_MODEL_ID: local_model_id,
|
||||
ELEVENLABS_TTS_STREAMING_VOICE_ID: voice,
|
||||
// 20/12/2024 - only eleven_turbo_v2_5 support multiple language
|
||||
...(['eleven_turbo_v2_5'].includes(model_id) && {ELEVENLABS_TTS_STREAMING_LANGUAGE: language}),
|
||||
...(['eleven_turbo_v2_5'].includes(local_model_id) && {ELEVENLABS_TTS_STREAMING_LANGUAGE: language}),
|
||||
...(stability && {ELEVENLABS_TTS_STREAMING_VOICE_SETTINGS_STABILITY: stability}),
|
||||
...(similarity_boost && {ELEVENLABS_TTS_STREAMING_VOICE_SETTINGS_SIMILARITY_BOOST: similarity_boost}),
|
||||
...(use_speaker_boost && {ELEVENLABS_TTS_STREAMING_VOICE_SETTINGS_USE_SPEAKER_BOOST: use_speaker_boost}),
|
||||
...(style && {ELEVENLABS_TTS_STREAMING_VOICE_SETTINGS_STYLE: style}),
|
||||
// speed has value 0.7 to 1.2, 1.0 is default, make sure we send the value event it's 0
|
||||
...(speed !== null && speed !== undefined && {ELEVENLABS_TTS_STREAMING_VOICE_SETTINGS_SPEED: `${speed}`}),
|
||||
...(this.options.pronunciation_dictionary_locators &&
|
||||
Array.isArray(this.options.pronunciation_dictionary_locators) && {
|
||||
...(local_options.pronunciation_dictionary_locators &&
|
||||
Array.isArray(local_options.pronunciation_dictionary_locators) && {
|
||||
ELEVENLABS_TTS_STREAMING_PRONUNCIATION_DICTIONARY_LOCATORS:
|
||||
JSON.stringify(this.options.pronunciation_dictionary_locators)
|
||||
JSON.stringify(local_options.pronunciation_dictionary_locators)
|
||||
}),
|
||||
};
|
||||
break;
|
||||
case 'rimelabs':
|
||||
const {
|
||||
pauseBetweenBrackets, phonemizeBetweenBrackets, inlineSpeedAlpha, speedAlpha, reduceLatency
|
||||
} = this.options;
|
||||
} = local_options;
|
||||
obj = {
|
||||
RIMELABS_API_KEY: api_key,
|
||||
RIMELABS_TTS_STREAMING_MODEL_ID: model_id,
|
||||
RIMELABS_API_KEY: local_api_key,
|
||||
RIMELABS_TTS_STREAMING_MODEL_ID: local_model_id,
|
||||
RIMELABS_TTS_STREAMING_VOICE_ID: voice,
|
||||
RIMELABS_TTS_STREAMING_LANGUAGE: language || 'en',
|
||||
...(pauseBetweenBrackets && {RIMELABS_TTS_STREAMING_PAUSE_BETWEEN_BRACKETS: pauseBetweenBrackets}),
|
||||
@@ -125,12 +157,19 @@ class TtsTask extends Task {
|
||||
...(reduceLatency && {RIMELABS_TTS_STREAMING_REDUCE_LATENCY: reduceLatency})
|
||||
};
|
||||
break;
|
||||
case 'google':
|
||||
obj = {
|
||||
GOOGLE_TTS_LANGUAGE_CODE: language,
|
||||
GOOGLE_TTS_VOICE_NAME: voice,
|
||||
GOOGLE_APPLICATION_CREDENTIALS: JSON.stringify(credentials.credentials)
|
||||
};
|
||||
break;
|
||||
default:
|
||||
if (vendor.startsWith('custom:')) {
|
||||
const use_tls = custom_tts_streaming_url.startsWith('wss://');
|
||||
obj = {
|
||||
CUSTOM_TTS_STREAMING_HOST: custom_tts_streaming_url.replace(/^(ws|wss):\/\//, ''),
|
||||
CUSTOM_TTS_STREAMING_API_KEY: auth_token,
|
||||
CUSTOM_TTS_STREAMING_HOST: local_custom_tts_streaming_url.replace(/^(ws|wss):\/\//, ''),
|
||||
CUSTOM_TTS_STREAMING_API_KEY: local_auth_token,
|
||||
CUSTOM_TTS_STREAMING_VOICE_ID: voice,
|
||||
CUSTOM_TTS_STREAMING_LANGUAGE: language || 'en',
|
||||
CUSTOM_TTS_STREAMING_USE_TLS: use_tls
|
||||
@@ -211,6 +250,8 @@ class TtsTask extends Task {
|
||||
}
|
||||
} else if (vendor === 'cartesia') {
|
||||
credentials.model_id = this.options.model_id || credentials.model_id;
|
||||
} else if (vendor === 'google') {
|
||||
this.model = this.options.model || credentials.credentials.model_id;
|
||||
}
|
||||
|
||||
this.model_id = credentials.model_id;
|
||||
@@ -243,15 +284,16 @@ class TtsTask extends Task {
|
||||
account_sid,
|
||||
alert_type: AlertType.TTS_NOT_PROVISIONED,
|
||||
vendor,
|
||||
label,
|
||||
target_sid: cs.callSid
|
||||
}).catch((err) => this.logger.info({err}, 'Error generating alert for no tts'));
|
||||
throw new SpeechCredentialError('no provisioned speech credentials for TTS');
|
||||
}
|
||||
|
||||
/* produce an audio segment from the provided text */
|
||||
const generateAudio = async(text) => {
|
||||
if (this.killed) return;
|
||||
if (text.startsWith('silence_stream://')) return text;
|
||||
const generateAudio = async(text, index) => {
|
||||
if (this.killed) return {index, filePath: null};
|
||||
if (text.startsWith('silence_stream://')) return {index, filePath: text};
|
||||
|
||||
/* otel: trace time for tts */
|
||||
if (!preCache && !this._disableTracing) {
|
||||
@@ -293,16 +335,35 @@ class TtsTask extends Task {
|
||||
vendor,
|
||||
language,
|
||||
characters: text.length,
|
||||
elapsedTime: rtt
|
||||
elapsedTime: rtt,
|
||||
servedFromCache,
|
||||
'id': this.id
|
||||
});
|
||||
}
|
||||
if (servedFromCache) {
|
||||
this.notifyStatus({
|
||||
event: 'synthesized-audio',
|
||||
vendor,
|
||||
language,
|
||||
servedFromCache,
|
||||
'id': this.id
|
||||
});
|
||||
}
|
||||
return {index, filePath, playbackId: null};
|
||||
}
|
||||
else {
|
||||
const playbackId = extractPlaybackId(filePath);
|
||||
this.logger.debug('Say: a streaming tts api will be used');
|
||||
const modifiedPath = filePath.replace('say:{', `say:{session-uuid=${ep.uuid},`);
|
||||
return modifiedPath;
|
||||
this.notifyStatus({
|
||||
event: 'synthesized-audio',
|
||||
vendor,
|
||||
language,
|
||||
servedFromCache,
|
||||
'id': this.id
|
||||
});
|
||||
return {index, filePath: modifiedPath, playbackId};
|
||||
}
|
||||
return filePath;
|
||||
} catch (err) {
|
||||
this.logger.info({err}, 'Error synthesizing tts');
|
||||
if (this.otelSpan) this.otelSpan.end();
|
||||
@@ -310,6 +371,7 @@ class TtsTask extends Task {
|
||||
account_sid: cs.accountSid,
|
||||
alert_type: AlertType.TTS_FAILURE,
|
||||
vendor,
|
||||
label,
|
||||
detail: err.message,
|
||||
target_sid: cs.callSid
|
||||
}).catch((err) => this.logger.info({err}, 'Error generating alert for tts failure'));
|
||||
@@ -317,8 +379,20 @@ class TtsTask extends Task {
|
||||
}
|
||||
};
|
||||
|
||||
const arr = this.text.map((t) => (this._validateURL(t) ? t : generateAudio(t)));
|
||||
return (await Promise.all(arr)).filter((fp) => fp && fp.length);
|
||||
// process all text segments in parallel will cause ordering issue
|
||||
// so we attach index to each promise result and sort them later
|
||||
|
||||
const arr = this.text.map((t, index) => (this._validateURL(t) ?
|
||||
Promise.resolve({index, filePath: t, playbackId: null}) : generateAudio(t, index)));
|
||||
const results = await Promise.all(arr);
|
||||
const sorted = results.sort((a, b) => a.index - b.index);
|
||||
|
||||
return sorted
|
||||
.filter((fp) => fp.filePath && fp.filePath.length)
|
||||
.map((r) => {
|
||||
this.playbackIds.push(r.playbackId);
|
||||
return r.filePath;
|
||||
});
|
||||
} catch (err) {
|
||||
this.logger.info(err, 'TaskSay:exec error');
|
||||
throw err;
|
||||
|
||||
@@ -118,6 +118,13 @@ class ActionHookDelayProcessor extends Emitter {
|
||||
this.logger.debug('ActionHookDelayProcessor#_onNoResponseTimer');
|
||||
this._noResponseTimer = null;
|
||||
|
||||
/* check if endpoint is still available (call may have ended) */
|
||||
if (!this.ep) {
|
||||
this.logger.debug('ActionHookDelayProcessor#_onNoResponseTimer: endpoint is null, call may have ended');
|
||||
this._active = false;
|
||||
return;
|
||||
}
|
||||
|
||||
/* get the next play or say action */
|
||||
const verb = this.actions[this._retryCount % this.actions.length];
|
||||
|
||||
@@ -129,8 +136,8 @@ class ActionHookDelayProcessor extends Emitter {
|
||||
this._taskInProgress.exec(this.cs, {ep: this.ep}).catch((err) => {
|
||||
this.logger.info(`ActionHookDelayProcessor#_onNoResponseTimer: error playing file: ${err.message}`);
|
||||
this._taskInProgress = null;
|
||||
this.ep.removeAllListeners('playback-start');
|
||||
this.ep.removeAllListeners('playback-stop');
|
||||
this.ep?.removeAllListeners('playback-start');
|
||||
this.ep?.removeAllListeners('playback-stop');
|
||||
});
|
||||
} catch (err) {
|
||||
this.logger.info(err, 'ActionHookDelayProcessor#_onNoResponseTimer: error starting action');
|
||||
|
||||
@@ -281,13 +281,17 @@ module.exports = (logger) => {
|
||||
|
||||
/* set stt options */
|
||||
logger.info(`starting amd for vendor ${vendor} and language ${language}`);
|
||||
const sttOpts = amd.setChannelVarsForStt({name: TaskName.Gather}, sttCredentials, language, {
|
||||
vendor,
|
||||
hints,
|
||||
enhancedModel: true,
|
||||
altLanguages: opts.recognizer?.altLanguages || [],
|
||||
initialSpeechTimeoutMs: opts.resolveTimeoutMs,
|
||||
});
|
||||
/* if opts contains recognizer object use that config for stt, otherwise use defaults */
|
||||
const rOpts = opts.recognizer ?
|
||||
opts.recognizer :
|
||||
{
|
||||
vendor,
|
||||
hints,
|
||||
enhancedModel: true,
|
||||
altLanguages: opts.recognizer?.altLanguages || [],
|
||||
initialSpeechTimeoutMs: opts.resolveTimeoutMs,
|
||||
};
|
||||
const sttOpts = amd.setChannelVarsForStt({name: TaskName.Gather}, sttCredentials, language, rOpts);
|
||||
|
||||
await ep.set(sttOpts).catch((err) => logger.info(err, 'Error setting channel variables'));
|
||||
|
||||
@@ -401,19 +405,21 @@ module.exports = (logger) => {
|
||||
if (ep.amd) {
|
||||
vendor = ep.amd.vendor;
|
||||
ep.amd.stopAllTimers();
|
||||
|
||||
ep.removeListener(GoogleTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
|
||||
ep.removeListener(GoogleTranscriptionEvents.EndOfUtterance, ep.amd.EndOfUtteranceHandler);
|
||||
ep.removeListener(AwsTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
|
||||
ep.removeListener(AzureTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
|
||||
ep.removeListener(AzureTranscriptionEvents.NoSpeechDetected, ep.amd.noSpeechHandler);
|
||||
ep.removeListener(NuanceTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
|
||||
ep.removeListener(DeepgramTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
|
||||
ep.removeListener(SonioxTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
|
||||
ep.removeListener(IbmTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
|
||||
ep.removeListener(NvidiaTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
|
||||
ep.removeListener(JambonzTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
|
||||
|
||||
try {
|
||||
ep.removeListener(GoogleTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
|
||||
ep.removeListener(GoogleTranscriptionEvents.EndOfUtterance, ep.amd.EndOfUtteranceHandler);
|
||||
ep.removeListener(AwsTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
|
||||
ep.removeListener(AzureTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
|
||||
ep.removeListener(AzureTranscriptionEvents.NoSpeechDetected, ep.amd.noSpeechHandler);
|
||||
ep.removeListener(NuanceTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
|
||||
ep.removeListener(DeepgramTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
|
||||
ep.removeListener(SonioxTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
|
||||
ep.removeListener(IbmTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
|
||||
ep.removeListener(NvidiaTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
|
||||
ep.removeListener(JambonzTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
|
||||
} catch (error) {
|
||||
logger.error('Unable to Remove AMD Listener', error);
|
||||
}
|
||||
ep.amd = null;
|
||||
}
|
||||
|
||||
|
||||
@@ -65,7 +65,7 @@ class BackgroundTaskManager extends Emitter {
|
||||
this.logger.info(`stopping background task: ${type}`);
|
||||
task.removeAllListeners();
|
||||
task.span.end();
|
||||
task.kill();
|
||||
task.kill(this.cs);
|
||||
// Remove task from managed List
|
||||
this.tasks.delete(type);
|
||||
}
|
||||
@@ -135,26 +135,24 @@ class BackgroundTaskManager extends Emitter {
|
||||
|
||||
// Initiate Record
|
||||
async _initRecord() {
|
||||
if (this.cs.accountInfo.account.record_all_calls || this.cs.application.record_all_calls) {
|
||||
if (!JAMBONZ_RECORD_WS_BASE_URL || !this.cs.accountInfo.account.bucket_credential) {
|
||||
this.logger.error('_initRecord: invalid cfg - missing JAMBONZ_RECORD_WS_BASE_URL or bucket config');
|
||||
return undefined;
|
||||
}
|
||||
const listenOpts = {
|
||||
url: `${JAMBONZ_RECORD_WS_BASE_URL}/record/${this.cs.accountInfo.account.bucket_credential.vendor}`,
|
||||
disableBidirectionalAudio: true,
|
||||
mixType : 'stereo',
|
||||
passDtmf: true
|
||||
};
|
||||
if (JAMBONZ_RECORD_WS_USERNAME && JAMBONZ_RECORD_WS_PASSWORD) {
|
||||
listenOpts.wsAuth = {
|
||||
username: JAMBONZ_RECORD_WS_USERNAME,
|
||||
password: JAMBONZ_RECORD_WS_PASSWORD
|
||||
};
|
||||
}
|
||||
this.logger.debug({listenOpts}, '_initRecord: enabling listen');
|
||||
return await this._initListen({verb: 'listen', ...listenOpts}, 'jambonz-session-record', true, 'record');
|
||||
if (!JAMBONZ_RECORD_WS_BASE_URL || !this.cs.accountInfo.account.bucket_credential) {
|
||||
this.logger.error('_initRecord: invalid cfg - missing JAMBONZ_RECORD_WS_BASE_URL or bucket config');
|
||||
return undefined;
|
||||
}
|
||||
const listenOpts = {
|
||||
url: `${JAMBONZ_RECORD_WS_BASE_URL}/record/${this.cs.accountInfo.account.bucket_credential.vendor}`,
|
||||
disableBidirectionalAudio: true,
|
||||
mixType : 'stereo',
|
||||
passDtmf: true
|
||||
};
|
||||
if (JAMBONZ_RECORD_WS_USERNAME && JAMBONZ_RECORD_WS_PASSWORD) {
|
||||
listenOpts.wsAuth = {
|
||||
username: JAMBONZ_RECORD_WS_USERNAME,
|
||||
password: JAMBONZ_RECORD_WS_PASSWORD
|
||||
};
|
||||
}
|
||||
this.logger.debug({listenOpts}, '_initRecord: enabling listen');
|
||||
return await this._initListen({verb: 'listen', ...listenOpts}, 'jambonz-session-record', true, 'record');
|
||||
}
|
||||
|
||||
// Initiate Transcribe
|
||||
|
||||
@@ -94,12 +94,20 @@
|
||||
"DeepgramTranscriptionEvents": {
|
||||
"Transcription": "deepgram_transcribe::transcription",
|
||||
"ConnectFailure": "deepgram_transcribe::connect_failed",
|
||||
"Connect": "deepgram_transcribe::connect"
|
||||
"Connect": "deepgram_transcribe::connect",
|
||||
"Error": "deepgram_transcribe::error"
|
||||
},
|
||||
"DeepgramRiverTranscriptionEvents": {
|
||||
"Transcription": "deepgramriver_transcribe::transcription",
|
||||
"ConnectFailure": "deepgramriver_transcribe::connect_failed",
|
||||
"Connect": "deepgramriver_transcribe::connect"
|
||||
"DeepgramfluxTranscriptionEvents": {
|
||||
"Transcription": "deepgramflux_transcribe::transcription",
|
||||
"ConnectFailure": "deepgramflux_transcribe::connect_failed",
|
||||
"Connect": "deepgramflux_transcribe::connect",
|
||||
"Error": "deepgramflux_transcribe::error"
|
||||
},
|
||||
"GladiaTranscriptionEvents": {
|
||||
"Transcription": "gladia_transcribe::transcription",
|
||||
"ConnectFailure": "gladia_transcribe::connect_failed",
|
||||
"Connect": "gladia_transcribe::connect",
|
||||
"Error": "gladia_transcribe::error"
|
||||
},
|
||||
"SonioxTranscriptionEvents": {
|
||||
"Transcription": "soniox_transcribe::transcription",
|
||||
@@ -167,6 +175,12 @@
|
||||
"ConnectFailure": "assemblyai_transcribe::connect_failed",
|
||||
"Connect": "assemblyai_transcribe::connect"
|
||||
},
|
||||
"HoundifyTranscriptionEvents": {
|
||||
"Transcription": "houndify_transcribe::transcription",
|
||||
"Error": "houndify_transcribe::error",
|
||||
"ConnectFailure": "houndify_transcribe::connect_failed",
|
||||
"Connect": "houndify_transcribe::connect"
|
||||
},
|
||||
"VoxistTranscriptionEvents": {
|
||||
"Transcription": "voxist_transcribe::transcription",
|
||||
"Error": "voxist_transcribe::error",
|
||||
@@ -297,6 +311,11 @@
|
||||
"ConnectFailure": "deepgram_tts_streaming::connect_failed",
|
||||
"Connect": "deepgram_tts_streaming::connect"
|
||||
},
|
||||
"GoogleTtsStreamingEvents": {
|
||||
"Empty": "google_tts_streaming::empty",
|
||||
"ConnectFailure": "google_tts_streaming::connect_failed",
|
||||
"Connect": "google_tts_streaming::connect"
|
||||
},
|
||||
"CartesiaTtsStreamingEvents": {
|
||||
"Empty": "cartesia_tts_streaming::empty",
|
||||
"ConnectFailure": "cartesia_tts_streaming::connect_failed",
|
||||
@@ -321,7 +340,8 @@
|
||||
"Empty": "tts_streaming::empty",
|
||||
"Pause": "tts_streaming::pause",
|
||||
"Resume": "tts_streaming::resume",
|
||||
"ConnectFailure": "tts_streaming::connect_failed"
|
||||
"ConnectFailure": "tts_streaming::connect_failed",
|
||||
"Connected": "tts_streaming::connected"
|
||||
},
|
||||
"TtsStreamingConnectionStatus": {
|
||||
"NotConnected": "not_connected",
|
||||
@@ -341,5 +361,8 @@
|
||||
"WS_CLOSE_CODES": {
|
||||
"NormalClosure": 1000,
|
||||
"GoingAway": 1001
|
||||
}
|
||||
},
|
||||
"NON_FANTAL_ERRORS": [
|
||||
"File Not Found"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -81,7 +81,12 @@ const speechMapper = (cred) => {
|
||||
obj.deepgram_tts_uri = o.deepgram_tts_uri;
|
||||
obj.deepgram_stt_use_tls = o.deepgram_stt_use_tls;
|
||||
}
|
||||
else if ('deepgramriver' === obj.vendor) {
|
||||
else if ('gladia' === obj.vendor) {
|
||||
const o = JSON.parse(decrypt(credential));
|
||||
obj.api_key = o.api_key;
|
||||
obj.region = o.region;
|
||||
}
|
||||
else if ('deepgramflux' === obj.vendor) {
|
||||
const o = JSON.parse(decrypt(credential));
|
||||
obj.api_key = o.api_key;
|
||||
}
|
||||
@@ -101,6 +106,7 @@ const speechMapper = (cred) => {
|
||||
const o = JSON.parse(decrypt(credential));
|
||||
obj.api_key = o.api_key;
|
||||
obj.model_id = o.model_id;
|
||||
obj.api_uri = o.api_uri;
|
||||
obj.options = o.options;
|
||||
}
|
||||
else if ('playht' === obj.vendor) {
|
||||
@@ -124,6 +130,12 @@ const speechMapper = (cred) => {
|
||||
obj.model_id = o.model_id;
|
||||
obj.options = o.options;
|
||||
}
|
||||
else if ('resemble' === obj.vendor) {
|
||||
const o = JSON.parse(decrypt(credential));
|
||||
obj.api_key = o.api_key;
|
||||
obj.resemble_tts_use_tls = o.resemble_tts_use_tls;
|
||||
obj.resemble_tts_uri = o.resemble_tts_uri;
|
||||
}
|
||||
else if ('inworld' === obj.vendor) {
|
||||
const o = JSON.parse(decrypt(credential));
|
||||
obj.api_key = o.api_key;
|
||||
@@ -135,6 +147,13 @@ const speechMapper = (cred) => {
|
||||
obj.api_key = o.api_key;
|
||||
obj.service_version = o.service_version;
|
||||
}
|
||||
else if ('houndify' === obj.vendor) {
|
||||
const o = JSON.parse(decrypt(credential));
|
||||
obj.client_id = o.client_id;
|
||||
obj.client_key = o.client_key;
|
||||
obj.user_id = o.user_id;
|
||||
obj.houndify_server_uri = o.houndify_server_uri;
|
||||
}
|
||||
else if ('voxist' === obj.vendor) {
|
||||
const o = JSON.parse(decrypt(credential));
|
||||
obj.api_key = o.api_key;
|
||||
|
||||
@@ -41,7 +41,7 @@ class HttpRequestor extends BaseRequestor {
|
||||
constructor(logger, account_sid, hook, secret) {
|
||||
super(logger, account_sid, hook, secret);
|
||||
|
||||
this.method = hook.method || 'POST';
|
||||
this.method = hook.method?.toUpperCase() || 'POST';
|
||||
this.authHeader = basicAuth(hook.username, hook.password);
|
||||
this.backoffMs = 500;
|
||||
|
||||
@@ -111,7 +111,7 @@ class HttpRequestor extends BaseRequestor {
|
||||
|
||||
const payload = params ? snakeCaseKeys(params, ['customerData', 'sip', 'env_vars', 'args']) : null;
|
||||
const url = hook.url || hook;
|
||||
const method = hook.method || 'POST';
|
||||
const method = hook.method?.toUpperCase() || 'POST';
|
||||
let buf = '';
|
||||
httpHeaders = {
|
||||
...httpHeaders,
|
||||
@@ -119,7 +119,7 @@ class HttpRequestor extends BaseRequestor {
|
||||
};
|
||||
|
||||
assert.ok(url, 'HttpRequestor:request url was not provided');
|
||||
assert.ok, (['GET', 'POST'].includes(method), `HttpRequestor:request method must be 'GET' or 'POST' not ${method}`);
|
||||
assert.ok(['GET', 'POST'].includes(method), `HttpRequestor:request method must be 'GET' or 'POST' not ${method}`);
|
||||
const startAt = process.hrtime();
|
||||
|
||||
/* if we have an absolute url, and it is ws then do a websocket connection */
|
||||
@@ -191,7 +191,7 @@ class HttpRequestor extends BaseRequestor {
|
||||
method,
|
||||
headers: hdrs,
|
||||
...('POST' === method && {body: JSON.stringify(payload)}),
|
||||
timeout: HTTP_TIMEOUT,
|
||||
headersTimeout: HTTP_TIMEOUT,
|
||||
followRedirects: false
|
||||
};
|
||||
|
||||
|
||||
@@ -173,7 +173,8 @@ function installSrfLocals(srf, logger, {
|
||||
lookupAccountCapacitiesBySid,
|
||||
lookupSmppGateways,
|
||||
lookupClientByAccountAndUsername,
|
||||
lookupSystemInformation
|
||||
lookupSystemInformation,
|
||||
lookupLcrByAccount
|
||||
} = require('@jambonz/db-helpers')({
|
||||
host: JAMBONES_MYSQL_HOST,
|
||||
user: JAMBONES_MYSQL_USER,
|
||||
@@ -279,7 +280,8 @@ function installSrfLocals(srf, logger, {
|
||||
retrieveByPatternSortedSet,
|
||||
sortedSetLength,
|
||||
sortedSetPositionByPattern,
|
||||
getVerbioAccessToken
|
||||
getVerbioAccessToken,
|
||||
lookupLcrByAccount
|
||||
},
|
||||
parentLogger: logger,
|
||||
getSBC,
|
||||
|
||||
@@ -20,7 +20,7 @@ const { createMediaEndpoint } = require('./media-endpoint');
|
||||
|
||||
class SingleDialer extends Emitter {
|
||||
constructor({logger, sbcAddress, target, opts, application, callInfo, accountInfo, rootSpan, startSpan, dialTask,
|
||||
onHoldMusic}) {
|
||||
onHoldMusic, tmpFiles}) {
|
||||
super();
|
||||
assert(target.type);
|
||||
|
||||
@@ -44,6 +44,7 @@ class SingleDialer extends Emitter {
|
||||
this.callSid = crypto.randomUUID();
|
||||
this.dialTask = dialTask;
|
||||
this.onHoldMusic = onHoldMusic;
|
||||
this.tmpFiles = tmpFiles;
|
||||
|
||||
this.on('callStatusChange', this._notifyCallStatusChange.bind(this));
|
||||
}
|
||||
@@ -328,7 +329,13 @@ class SingleDialer extends Emitter {
|
||||
*/
|
||||
async kill(Reason) {
|
||||
this.killed = true;
|
||||
if (this.inviteInProgress) await this.inviteInProgress.cancel();
|
||||
if (this.inviteInProgress) {
|
||||
try {
|
||||
await this.inviteInProgress.cancel();
|
||||
} catch (err) {
|
||||
this.logger.error({err}, 'SingleDialer:kill error cancelling invite');
|
||||
}
|
||||
}
|
||||
else if (this.dlg && this.dlg.connected) {
|
||||
const duration = moment().diff(this.dlg.connectTime, 'seconds');
|
||||
this.logger.debug('SingleDialer:kill hanging up called party');
|
||||
@@ -401,7 +408,8 @@ class SingleDialer extends Emitter {
|
||||
accountInfo: this.accountInfo,
|
||||
tasks,
|
||||
rootSpan: this.rootSpan,
|
||||
req: this.req
|
||||
req: this.req,
|
||||
tmpFiles: this.tmpFiles,
|
||||
});
|
||||
await cs.exec();
|
||||
|
||||
@@ -535,12 +543,12 @@ class SingleDialer extends Emitter {
|
||||
|
||||
function placeOutdial({
|
||||
logger, srf, ms, sbcAddress, target, opts, application, callInfo, accountInfo, rootSpan, startSpan, dialTask,
|
||||
onHoldMusic
|
||||
onHoldMusic, tmpFiles
|
||||
}) {
|
||||
const myOpts = deepcopy(opts);
|
||||
const sd = new SingleDialer({
|
||||
logger, sbcAddress, target, opts: myOpts, application, callInfo,
|
||||
accountInfo, rootSpan, startSpan, dialTask, onHoldMusic
|
||||
accountInfo, rootSpan, startSpan, dialTask, onHoldMusic, tmpFiles
|
||||
});
|
||||
sd.exec(srf, ms, myOpts);
|
||||
return sd;
|
||||
|
||||
91
lib/utils/process-monitor.js
Normal file
91
lib/utils/process-monitor.js
Normal file
@@ -0,0 +1,91 @@
|
||||
// lib/utils/process-monitor.js
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
class ProcessMonitor {
|
||||
constructor(logger) {
|
||||
this.logger = logger;
|
||||
this.packageInfo = this.getPackageInfo();
|
||||
this.processName = this.packageInfo.name || 'unknown-app';
|
||||
}
|
||||
|
||||
getPackageInfo() {
|
||||
try {
|
||||
const packagePath = path.join(process.cwd(), 'package.json');
|
||||
return JSON.parse(fs.readFileSync(packagePath, 'utf8'));
|
||||
} catch (e) {
|
||||
return { name: 'unknown', version: 'unknown' };
|
||||
}
|
||||
}
|
||||
|
||||
logStartup(additionalInfo = {}) {
|
||||
const startupInfo = {
|
||||
msg: `${this.processName} started`,
|
||||
app_name: this.processName,
|
||||
app_version: this.packageInfo.version,
|
||||
pid: process.pid,
|
||||
ppid: process.ppid,
|
||||
pm2_instance_id: process.env.NODE_APP_INSTANCE || 'not_pm2',
|
||||
pm2_id: process.env.pm_id,
|
||||
is_pm2: !!process.env.PM2,
|
||||
node_version: process.version,
|
||||
uptime: process.uptime(),
|
||||
timestamp: new Date().toISOString(),
|
||||
...additionalInfo
|
||||
};
|
||||
|
||||
this.logger.info(startupInfo);
|
||||
return startupInfo;
|
||||
}
|
||||
|
||||
setupSignalHandlers() {
|
||||
// Log when we receive signals that would cause restart
|
||||
process.on('SIGINT', () => {
|
||||
this.logger.info({
|
||||
msg: 'SIGINT received',
|
||||
app_name: this.processName,
|
||||
pid: process.pid,
|
||||
ppid: process.ppid,
|
||||
uptime: process.uptime(),
|
||||
timestamp: new Date().toISOString()
|
||||
});
|
||||
process.exit(0);
|
||||
});
|
||||
|
||||
process.on('SIGTERM', () => {
|
||||
this.logger.info({
|
||||
msg: 'SIGTERM received',
|
||||
app_name: this.processName,
|
||||
pid: process.pid,
|
||||
ppid: process.ppid,
|
||||
uptime: process.uptime(),
|
||||
timestamp: new Date().toISOString()
|
||||
});
|
||||
process.exit(0);
|
||||
});
|
||||
|
||||
process.on('uncaughtException', (error) => {
|
||||
this.logger.error({
|
||||
msg: 'Uncaught exception - process will restart',
|
||||
app_name: this.processName,
|
||||
error: error.message,
|
||||
stack: error.stack,
|
||||
pid: process.pid,
|
||||
timestamp: new Date().toISOString()
|
||||
});
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
process.on('unhandledRejection', (reason, promise) => {
|
||||
this.logger.error({
|
||||
msg: 'Unhandled rejection',
|
||||
app_name: this.processName,
|
||||
reason,
|
||||
pid: process.pid,
|
||||
timestamp: new Date().toISOString()
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ProcessMonitor;
|
||||
@@ -100,6 +100,30 @@ module.exports = (logger) => {
|
||||
else if (K8S) {
|
||||
lifecycleEmitter.scaleIn = () => process.exit(0);
|
||||
}
|
||||
else {
|
||||
process.on('SIGUSR1', () => {
|
||||
logger.info('received SIGUSR1: begin drying up calls for scale-in');
|
||||
dryUpCalls = true;
|
||||
|
||||
const {srf} = require('../..');
|
||||
const {writeSystemAlerts} = srf.locals;
|
||||
if (writeSystemAlerts) {
|
||||
const {SystemState, FEATURE_SERVER} = require('./constants');
|
||||
writeSystemAlerts({
|
||||
system_component: FEATURE_SERVER,
|
||||
state : SystemState.GracefulShutdownInProgress,
|
||||
fields : {
|
||||
detail: `feature-server with process_id ${process.pid} shutdown in progress`,
|
||||
host: srf.locals?.ipv4
|
||||
}
|
||||
});
|
||||
}
|
||||
pingProxies(srf);
|
||||
|
||||
// Note: in response to SIGUSR1 we start drying up but do not exit when calls reach zero.
|
||||
// This is to allow external scripts that sent the signal to manage the lifecycle.
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
async function pingProxies(srf) {
|
||||
|
||||
@@ -55,11 +55,28 @@ const extractSdpMedia = (sdp) => {
|
||||
}
|
||||
};
|
||||
|
||||
const getLeadingCodec = (sdp) => {
|
||||
if (!sdp) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const parsed = sdpTransform.parse(sdp);
|
||||
const audio = parsed.media?.find((m) => m.type === 'audio');
|
||||
|
||||
if (!audio) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return audio.rtp?.[0]?.codec || null;
|
||||
};
|
||||
|
||||
|
||||
module.exports = {
|
||||
isOnhold,
|
||||
mergeSdpMedia,
|
||||
extractSdpMedia,
|
||||
isOpusFirst,
|
||||
makeOpusFirst,
|
||||
removeVideoSdp
|
||||
removeVideoSdp,
|
||||
getLeadingCodec
|
||||
};
|
||||
|
||||
@@ -101,8 +101,6 @@ class SttLatencyCalculator extends Emitter {
|
||||
}
|
||||
this.isRunning = false;
|
||||
this.logger.info('STT Latency Calculator stopped');
|
||||
} else {
|
||||
this.logger.warn('Latency calculator is not running, no VAD detection to stop');
|
||||
}
|
||||
}
|
||||
|
||||
@@ -116,7 +114,7 @@ class SttLatencyCalculator extends Emitter {
|
||||
return;
|
||||
}
|
||||
this._startVad();
|
||||
this.logger.info('STT Latency Calculator started');
|
||||
this.logger.debug('STT Latency Calculator started');
|
||||
}
|
||||
|
||||
stop() {
|
||||
@@ -129,7 +127,6 @@ class SttLatencyCalculator extends Emitter {
|
||||
|
||||
calculateLatency() {
|
||||
if (!this.isRunning) {
|
||||
this.logger.debug('Latency calculator is not running, cannot calculate latency, returning default values');
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
@@ -131,6 +131,43 @@ const stickyVars = {
|
||||
'OPENAI_TURN_DETECTION_PREFIX_PADDING_MS',
|
||||
'OPENAI_TURN_DETECTION_SILENCE_DURATION_MS',
|
||||
],
|
||||
houndify: [
|
||||
'HOUNDIFY_CLIENT_ID',
|
||||
'HOUNDIFY_CLIENT_KEY',
|
||||
'HOUNDIFY_USER_ID',
|
||||
'HOUNDIFY_MAX_SILENCE_SECONDS',
|
||||
'HOUNDIFY_MAX_SILENCE_AFTER_FULL_QUERY_SECONDS',
|
||||
'HOUNDIFY_MAX_SILENCE_AFTER_PARTIAL_QUERY_SECONDS',
|
||||
'HOUNDIFY_VAD_SENSITIVITY',
|
||||
'HOUNDIFY_VAD_TIMEOUT',
|
||||
'HOUNDIFY_VAD_MODE',
|
||||
'HOUNDIFY_VAD_VOICE_MS',
|
||||
'HOUNDIFY_VAD_SILENCE_MS',
|
||||
'HOUNDIFY_VAD_DEBUG',
|
||||
'HOUNDIFY_AUDIO_FORMAT',
|
||||
'HOUNDIFY_ENABLE_NOISE_REDUCTION',
|
||||
'HOUNDIFY_AUDIO_ENDPOINT',
|
||||
'HOUNDIFY_ENABLE_PROFANITY_FILTER',
|
||||
'HOUNDIFY_ENABLE_PUNCTUATION',
|
||||
'HOUNDIFY_ENABLE_CAPITALIZATION',
|
||||
'HOUNDIFY_CONFIDENCE_THRESHOLD',
|
||||
'HOUNDIFY_ENABLE_DISFLUENCY_FILTER',
|
||||
'HOUNDIFY_MAX_RESULTS',
|
||||
'HOUNDIFY_ENABLE_WORD_TIMESTAMPS',
|
||||
'HOUNDIFY_MAX_ALTERNATIVES',
|
||||
'HOUNDIFY_PARTIAL_TRANSCRIPT_INTERVAL',
|
||||
'HOUNDIFY_SESSION_TIMEOUT',
|
||||
'HOUNDIFY_CONNECTION_TIMEOUT',
|
||||
'HOUNDIFY_LATITUDE',
|
||||
'HOUNDIFY_LONGITUDE',
|
||||
'HOUNDIFY_CITY',
|
||||
'HOUNDIFY_STATE',
|
||||
'HOUNDIFY_COUNTRY',
|
||||
'HOUNDIFY_TIMEZONE',
|
||||
'HOUNDIFY_DOMAIN',
|
||||
'HOUNDIFY_CUSTOM_VOCABULARY',
|
||||
'HOUNDIFY_LANGUAGE_MODEL'
|
||||
],
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -339,20 +376,56 @@ const normalizeDeepgram = (evt, channel, language, shortUtterance) => {
|
||||
};
|
||||
};
|
||||
|
||||
const normalizeDeepgramRiver = (evt, channel, language, shortUtterance) => {
|
||||
const normalizeGladia = (evt, channel, language, shortUtterance) => {
|
||||
const copy = JSON.parse(JSON.stringify(evt));
|
||||
|
||||
// Handle Gladia transcript format
|
||||
if (evt.type === 'transcript' && evt.data && evt.data.utterance) {
|
||||
const utterance = evt.data.utterance;
|
||||
const alternatives = [{
|
||||
confidence: utterance.confidence || 0,
|
||||
transcript: utterance.text || '',
|
||||
}];
|
||||
|
||||
return {
|
||||
language_code: utterance.language || language,
|
||||
channel_tag: channel,
|
||||
is_final: evt.data.is_final || false,
|
||||
alternatives,
|
||||
vendor: {
|
||||
name: 'gladia',
|
||||
evt: copy
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
const normalizeDeepgramFlux = (evt, channel, language) => {
|
||||
const copy = JSON.parse(JSON.stringify(evt));
|
||||
|
||||
let turnTakingEvent;
|
||||
if (['StartOfTurn', 'EagerEndOfTurn', 'TurnResumed', 'EndOfTurn'].includes(evt.event)) {
|
||||
turnTakingEvent = evt.event;
|
||||
}
|
||||
|
||||
/* calculate total confidence based on word-level confidence */
|
||||
const realWords = (evt.words || [])
|
||||
.filter((w) => ![',.!?;'].includes(w.word));
|
||||
const confidence = realWords.length > 0 ? realWords.reduce((acc, w) => acc + w.confidence, 0) / realWords.length : 0;
|
||||
return {
|
||||
language_code: language,
|
||||
channel_tag: channel,
|
||||
is_final: evt.event === 'EndOfTurn',
|
||||
alternatives: [
|
||||
{
|
||||
confidence: evt.end_of_turn_confidence,
|
||||
confidence,
|
||||
end_of_turn_confidence: evt.end_of_turn_confidence,
|
||||
transcript: evt.transcript,
|
||||
...(turnTakingEvent && {turn_taking_event: turnTakingEvent})
|
||||
}
|
||||
],
|
||||
vendor: {
|
||||
name: 'deepgramriver',
|
||||
name: 'deepgramflux',
|
||||
evt: copy
|
||||
}
|
||||
};
|
||||
@@ -570,6 +643,30 @@ const normalizeAssemblyAi = (evt, channel, language) => {
|
||||
};
|
||||
};
|
||||
|
||||
const normalizeHoundify = (evt, channel, language) => {
|
||||
const copy = JSON.parse(JSON.stringify(evt));
|
||||
const alternatives = [];
|
||||
const is_final = evt.ResultsAreFinal && evt.ResultsAreFinal[0] === true;
|
||||
if (evt.Disambiguation && evt.Disambiguation.ChoiceData && evt.Disambiguation.ChoiceData.length > 0) {
|
||||
// Handle Houndify Voice Search Result format
|
||||
const choiceData = evt.Disambiguation.ChoiceData[0];
|
||||
alternatives.push({
|
||||
confidence: choiceData.ConfidenceScore || choiceData.ASRConfidence || 0.0,
|
||||
transcript: choiceData.FormattedTranscription || choiceData.Transcription || '',
|
||||
});
|
||||
}
|
||||
return {
|
||||
language_code: language,
|
||||
channel_tag: channel,
|
||||
is_final,
|
||||
alternatives,
|
||||
vendor: {
|
||||
name: 'houndify',
|
||||
evt: copy
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const normalizeVoxist = (evt, channel, language) => {
|
||||
const copy = JSON.parse(JSON.stringify(evt));
|
||||
return {
|
||||
@@ -669,8 +766,10 @@ module.exports = (logger) => {
|
||||
switch (vendor) {
|
||||
case 'deepgram':
|
||||
return normalizeDeepgram(evt, channel, language, shortUtterance);
|
||||
case 'deepgramriver':
|
||||
return normalizeDeepgramRiver(evt, channel, language, shortUtterance);
|
||||
case 'gladia':
|
||||
return normalizeGladia(evt, channel, language, shortUtterance);
|
||||
case 'deepgramflux':
|
||||
return normalizeDeepgramFlux(evt, channel, language, shortUtterance);
|
||||
case 'microsoft':
|
||||
return normalizeMicrosoft(evt, channel, language, punctuation);
|
||||
case 'google':
|
||||
@@ -689,6 +788,8 @@ module.exports = (logger) => {
|
||||
return normalizeCobalt(evt, channel, language);
|
||||
case 'assemblyai':
|
||||
return normalizeAssemblyAi(evt, channel, language, shortUtterance);
|
||||
case 'houndify':
|
||||
return normalizeHoundify(evt, channel, language, shortUtterance);
|
||||
case 'voxist':
|
||||
return normalizeVoxist(evt, channel, language);
|
||||
case 'cartesia':
|
||||
@@ -785,12 +886,15 @@ module.exports = (logger) => {
|
||||
AWS_ACCESS_KEY_ID: sttCredentials.accessKeyId,
|
||||
AWS_SECRET_ACCESS_KEY: sttCredentials.secretAccessKey,
|
||||
AWS_REGION: sttCredentials.region,
|
||||
AWS_SECURITY_TOKEN: sttCredentials.securityToken
|
||||
AWS_SECURITY_TOKEN: sttCredentials.securityToken,
|
||||
AWS_SESSION_TOKEN: sttCredentials.sessionToken ? sttCredentials.sessionToken : sttCredentials.securityToken
|
||||
}),
|
||||
...(awsOptions.accessKey && {AWS_ACCESS_KEY_ID: awsOptions.accessKey}),
|
||||
...(awsOptions.secretKey && {AWS_SECRET_ACCESS_KEY: awsOptions.secretKey}),
|
||||
...(awsOptions.region && {AWS_REGION: awsOptions.region}),
|
||||
...(awsOptions.securityToken && {AWS_SECURITY_TOKEN: awsOptions.securityToken}),
|
||||
...(awsOptions.sessionToken && {AWS_SESSION_TOKEN: awsOptions.sessionToken ?
|
||||
awsOptions.sessionToken : awsOptions.securityToken}),
|
||||
...(awsOptions.languageModelName && {AWS_LANGUAGE_MODEL_NAME: awsOptions.languageModelName}),
|
||||
...(awsOptions.piiEntityTypes?.length && {AWS_PII_ENTITY_TYPES: awsOptions.piiEntityTypes.join(',')}),
|
||||
...(awsOptions.piiIdentifyEntities && {AWS_PII_IDENTIFY_ENTITIES: true}),
|
||||
@@ -816,7 +920,7 @@ module.exports = (logger) => {
|
||||
...(rOpts.initialSpeechTimeoutMs > 0 &&
|
||||
{AZURE_INITIAL_SPEECH_TIMEOUT_MS: rOpts.initialSpeechTimeoutMs}),
|
||||
...(rOpts.requestSnr && {AZURE_REQUEST_SNR: 1}),
|
||||
...(rOpts.audioLogging && {AZURE_AUDIO_LOGGING: 1}),
|
||||
...(azureOptions.audioLogging && {AZURE_AUDIO_LOGGING: 1}),
|
||||
...{AZURE_USE_OUTPUT_FORMAT_DETAILED: 1},
|
||||
...(azureOptions.speechSegmentationSilenceTimeoutMs &&
|
||||
{AZURE_SPEECH_SEGMENTATION_SILENCE_TIMEOUT_MS: azureOptions.speechSegmentationSilenceTimeoutMs}),
|
||||
@@ -891,6 +995,14 @@ module.exports = (logger) => {
|
||||
const deepgramUri = deepgramOptions.deepgramSttUri || sttCredentials.deepgram_stt_uri;
|
||||
const useTls = deepgramOptions.deepgramSttUseTls || sttCredentials.deepgram_stt_use_tls;
|
||||
|
||||
// DH (2025-08-11) entity_prompt is currently limited to 100 words
|
||||
const entityPrompt = deepgramOptions.entityPrompt ?
|
||||
deepgramOptions.entityPrompt
|
||||
.split(/\s+/)
|
||||
.slice(0, 100)
|
||||
.join(' ')
|
||||
: undefined;
|
||||
|
||||
/* default to a sensible model if not supplied */
|
||||
if (!model) {
|
||||
model = selectDefaultDeepgramModel(task, language);
|
||||
@@ -949,22 +1061,28 @@ module.exports = (logger) => {
|
||||
...(deepgramOptions.fillerWords) &&
|
||||
{DEEPGRAM_SPEECH_FILLER_WORDS: deepgramOptions.fillerWords},
|
||||
...((Array.isArray(deepgramOptions.keyterms) && deepgramOptions.keyterms.length > 0) &&
|
||||
{DEEPGRAM_SPEECH_KEYTERMS: deepgramOptions.keyterms.join(',')})
|
||||
{DEEPGRAM_SPEECH_KEYTERMS: deepgramOptions.keyterms.join(',')}),
|
||||
...(deepgramOptions.mipOptOut && {DEEPGRAM_SPEECH_MIP_OPT_OUT: deepgramOptions.mipOptOut}),
|
||||
...(entityPrompt && {DEEPGRAM_SPEECH_ENTITY_PROMPT: entityPrompt}),
|
||||
};
|
||||
}
|
||||
else if ('deepgramriver' === vendor) {
|
||||
else if ('deepgramflux' === vendor) {
|
||||
const {
|
||||
preflightThreshold,
|
||||
eotThreshold,
|
||||
eotTimeoutMs,
|
||||
mipOptOut
|
||||
mipOptOut,
|
||||
model,
|
||||
eagerEotThreshold,
|
||||
keyterms
|
||||
} = rOpts.deepgramOptions || {};
|
||||
opts = {
|
||||
DEEPGRAM_API_KEY: sttCredentials.api_key,
|
||||
...(preflightThreshold && {DEEPGRAM_SPEECH_PRELIGHT_THRESHOLD: preflightThreshold}),
|
||||
...(eotThreshold && {DEEPGRAM_SPEECH_EOT_THRESHOLD: eotThreshold}),
|
||||
...(eotTimeoutMs && {DEEPGRAM_SPEECH_EOT_TIMEOUT_MS: eotTimeoutMs}),
|
||||
...(mipOptOut && {DEEPGRAM_SPEECH_MIP_OPT_OUT: mipOptOut}),
|
||||
DEEPGRAMFLUX_API_KEY: sttCredentials.api_key,
|
||||
DEEPGRAMFLUX_SPEECH_MODEL: model || 'flux-general-en',
|
||||
...(eotThreshold && {DEEPGRAMFLUX_SPEECH_EOT_THRESHOLD: eotThreshold}),
|
||||
...(eotTimeoutMs && {DEEPGRAMFLUX_SPEECH_EOT_TIMEOUT_MS: eotTimeoutMs}),
|
||||
...(mipOptOut && {DEEPGRAMFLUX_SPEECH_MIP_OPT_OUT: mipOptOut}),
|
||||
...(eagerEotThreshold && {DEEPGRAMFLUX_SPEECH_EAGER_EOT_THRESHOLD: eagerEotThreshold}),
|
||||
...(keyterms && keyterms.length > 0 && {DEEPGRAMFLUX_SPEECH_KEYTERMS: keyterms.join(',')}),
|
||||
};
|
||||
}
|
||||
else if ('soniox' === vendor) {
|
||||
@@ -1093,6 +1211,61 @@ module.exports = (logger) => {
|
||||
{ASSEMBLYAI_WORD_BOOST: JSON.stringify(rOpts.hints)})
|
||||
};
|
||||
}
|
||||
else if ('houndify' === vendor) {
|
||||
const {
|
||||
latitude, longitude, city, state, country, timeZone, domain, audioEndpoint,
|
||||
maxSilenceSeconds, maxSilenceAfterFullQuerySeconds, maxSilenceAfterPartialQuerySeconds,
|
||||
vadSensitivity, vadTimeout, vadMode, vadVoiceMs, vadSilenceMs, vadDebug,
|
||||
audioFormat, enableNoiseReduction, enableProfanityFilter, enablePunctuation,
|
||||
enableCapitalization, confidenceThreshold, enableDisfluencyFilter,
|
||||
maxResults, enableWordTimestamps, maxAlternatives, partialTranscriptInterval,
|
||||
sessionTimeout, connectionTimeout, customVocabulary, languageModel,
|
||||
requestInfo, sampleRate
|
||||
} = rOpts.houndifyOptions || {};
|
||||
const audioEndpointUri = audioEndpoint || sttCredentials.houndify_server_uri;
|
||||
|
||||
opts = {
|
||||
...opts,
|
||||
HOUNDIFY_CLIENT_ID: sttCredentials.client_id,
|
||||
HOUNDIFY_CLIENT_KEY: sttCredentials.client_key,
|
||||
HOUNDIFY_USER_ID: sttCredentials.user_id,
|
||||
HOUNDIFY_MAX_SILENCE_SECONDS: maxSilenceSeconds || 5,
|
||||
HOUNDIFY_MAX_SILENCE_AFTER_FULL_QUERY_SECONDS: maxSilenceAfterFullQuerySeconds || 1,
|
||||
HOUNDIFY_MAX_SILENCE_AFTER_PARTIAL_QUERY_SECONDS: maxSilenceAfterPartialQuerySeconds || 1.5,
|
||||
...(vadSensitivity && {HOUNDIFY_VAD_SENSITIVITY: vadSensitivity}),
|
||||
...(vadTimeout && {HOUNDIFY_VAD_TIMEOUT: vadTimeout}),
|
||||
...(vadMode && {HOUNDIFY_VAD_MODE: vadMode}),
|
||||
...(vadVoiceMs && {HOUNDIFY_VAD_VOICE_MS: vadVoiceMs}),
|
||||
...(vadSilenceMs && {HOUNDIFY_VAD_SILENCE_MS: vadSilenceMs}),
|
||||
...(vadDebug && {HOUNDIFY_VAD_DEBUG: vadDebug}),
|
||||
...(audioFormat && {HOUNDIFY_AUDIO_FORMAT: audioFormat}),
|
||||
...(enableNoiseReduction && {HOUNDIFY_ENABLE_NOISE_REDUCTION: enableNoiseReduction}),
|
||||
...(enableProfanityFilter && {HOUNDIFY_ENABLE_PROFANITY_FILTER: enableProfanityFilter}),
|
||||
...(enablePunctuation && {HOUNDIFY_ENABLE_PUNCTUATION: enablePunctuation}),
|
||||
...(enableCapitalization && {HOUNDIFY_ENABLE_CAPITALIZATION: enableCapitalization}),
|
||||
...(confidenceThreshold && {HOUNDIFY_CONFIDENCE_THRESHOLD: confidenceThreshold}),
|
||||
...(enableDisfluencyFilter && {HOUNDIFY_ENABLE_DISFLUENCY_FILTER: enableDisfluencyFilter}),
|
||||
...(maxResults && {HOUNDIFY_MAX_RESULTS: maxResults}),
|
||||
...(enableWordTimestamps && {HOUNDIFY_ENABLE_WORD_TIMESTAMPS: enableWordTimestamps}),
|
||||
...(maxAlternatives && {HOUNDIFY_MAX_ALTERNATIVES: maxAlternatives}),
|
||||
...(partialTranscriptInterval && {HOUNDIFY_PARTIAL_TRANSCRIPT_INTERVAL: partialTranscriptInterval}),
|
||||
...(sessionTimeout && {HOUNDIFY_SESSION_TIMEOUT: sessionTimeout}),
|
||||
...(connectionTimeout && {HOUNDIFY_CONNECTION_TIMEOUT: connectionTimeout}),
|
||||
...(latitude && {HOUNDIFY_LATITUDE: latitude}),
|
||||
...(longitude && {HOUNDIFY_LONGITUDE: longitude}),
|
||||
...(city && {HOUNDIFY_CITY: city}),
|
||||
...(state && {HOUNDIFY_STATE: state}),
|
||||
...(country && {HOUNDIFY_COUNTRY: country}),
|
||||
...(timeZone && {HOUNDIFY_TIMEZONE: timeZone}),
|
||||
...(domain && {HOUNDIFY_DOMAIN: domain}),
|
||||
...(audioEndpointUri && {HOUNDIFY_AUDIO_ENDPOINT: audioEndpointUri}),
|
||||
...(customVocabulary && {HOUNDIFY_CUSTOM_VOCABULARY:
|
||||
Array.isArray(customVocabulary) ? customVocabulary.join(',') : customVocabulary}),
|
||||
...(languageModel && {HOUNDIFY_LANGUAGE_MODEL: languageModel}),
|
||||
...(requestInfo && {HOUNDIFY_REQUEST_INFO: JSON.stringify(requestInfo)}),
|
||||
...(sampleRate && {HOUNDIFY_SAMPLING_RATE: sampleRate}),
|
||||
};
|
||||
}
|
||||
else if ('voxist' === vendor) {
|
||||
opts = {
|
||||
...opts,
|
||||
@@ -1137,6 +1310,9 @@ module.exports = (logger) => {
|
||||
...(openaiOptions.turn_detection.silence_duration_ms && {
|
||||
OPENAI_TURN_DETECTION_SILENCE_DURATION_MS: openaiOptions.turn_detection.silence_duration_ms
|
||||
}),
|
||||
...(openaiOptions.turn_detection.eagerness && {
|
||||
OPENAI_TURN_DETECTION_EAGERNESS: openaiOptions.turn_detection.eagerness
|
||||
})
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -1202,7 +1378,9 @@ module.exports = (logger) => {
|
||||
speechmaticsOptions.transcription_config.audio_filtering_config.volume_threshold}),
|
||||
...(speechmaticsOptions.transcription_config?.transcript_filtering_config?.remove_disfluencies &&
|
||||
{SPEECHMATICS_REMOVE_DISFLUENCIES:
|
||||
speechmaticsOptions.transcription_config.transcript_filtering_config.remove_disfluencies})
|
||||
speechmaticsOptions.transcription_config.transcript_filtering_config.remove_disfluencies}),
|
||||
SPEECHMATICS_END_OF_UTTERANCE_SILENCE_TRIGGER:
|
||||
speechmaticsOptions.transcription_config?.conversation_config?.end_of_utterance_silence_trigger || 0.5
|
||||
};
|
||||
}
|
||||
else if (vendor.startsWith('custom:')) {
|
||||
|
||||
@@ -80,7 +80,7 @@ class TtsStreamingBuffer extends Emitter {
|
||||
clearTimeout(this.timer);
|
||||
this.removeCustomEventListeners();
|
||||
if (this.ep) {
|
||||
this._api(this.ep, [this.ep.uuid, 'close'])
|
||||
this._api(this.ep, [this.ep.uuid, 'stop'])
|
||||
.catch((err) =>
|
||||
this.logger.info({ err }, 'TtsStreamingBuffer:stop Error closing TTS streaming')
|
||||
);
|
||||
@@ -163,7 +163,6 @@ class TtsStreamingBuffer extends Emitter {
|
||||
}
|
||||
|
||||
clear() {
|
||||
this.logger.debug('TtsStreamingBuffer:clear');
|
||||
if (this._connectionStatus !== TtsStreamingConnectionStatus.Connected) return;
|
||||
clearTimeout(this.timer);
|
||||
this._api(this.ep, [this.ep.uuid, 'clear']).catch((err) =>
|
||||
@@ -193,10 +192,7 @@ class TtsStreamingBuffer extends Emitter {
|
||||
this.logger.debug('TtsStreamingBuffer:_feedQueue TTS stream is not open or no endpoint available');
|
||||
return;
|
||||
}
|
||||
if (
|
||||
this._connectionStatus === TtsStreamingConnectionStatus.NotConnected ||
|
||||
this._connectionStatus === TtsStreamingConnectionStatus.Failed
|
||||
) {
|
||||
if (this._connectionStatus !== TtsStreamingConnectionStatus.Connected) {
|
||||
this.logger.debug('TtsStreamingBuffer:_feedQueue TTS stream is not connected');
|
||||
return;
|
||||
}
|
||||
@@ -278,6 +274,14 @@ class TtsStreamingBuffer extends Emitter {
|
||||
}
|
||||
const chunk = combinedText.slice(0, chunkEnd);
|
||||
|
||||
// Check if the chunk is only whitespace before processing the queue
|
||||
// If so, wait for more meaningful text
|
||||
if (isWhitespace(chunk)) {
|
||||
this.logger.debug('TtsStreamingBuffer:_feedQueue chunk is only whitespace, waiting for more text');
|
||||
this._setTimerIfNeeded();
|
||||
return;
|
||||
}
|
||||
|
||||
// Now we iterate over the queue items
|
||||
// and deduct their lengths until we've accounted for chunkEnd characters.
|
||||
let remaining = chunkEnd;
|
||||
@@ -301,6 +305,14 @@ class TtsStreamingBuffer extends Emitter {
|
||||
this.bufferedLength -= chunkEnd;
|
||||
|
||||
const modifiedChunk = chunk.replace(/\n\n/g, '\n \n');
|
||||
|
||||
if (isWhitespace(modifiedChunk)) {
|
||||
this.logger.debug('TtsStreamingBuffer:_feedQueue modified chunk is only whitespace, restoring queue');
|
||||
this.queue.unshift({ type: 'text', value: chunk });
|
||||
this.bufferedLength += chunkEnd;
|
||||
this._setTimerIfNeeded();
|
||||
return;
|
||||
}
|
||||
this.logger.debug(`TtsStreamingBuffer:_feedQueue sending chunk to tts: ${modifiedChunk}`);
|
||||
|
||||
try {
|
||||
@@ -349,6 +361,7 @@ class TtsStreamingBuffer extends Emitter {
|
||||
if (this.queue.length > 0) {
|
||||
await this._feedQueue();
|
||||
}
|
||||
this.emit(TtsStreamingEvents.Connected, { vendor });
|
||||
}
|
||||
|
||||
_onConnectFailure(vendor) {
|
||||
@@ -399,6 +412,7 @@ class TtsStreamingBuffer extends Emitter {
|
||||
|
||||
removeCustomEventListeners() {
|
||||
this.eventHandlers.forEach((h) => h.ep.removeCustomEventListener(h.event, h.handler));
|
||||
this.eventHandlers.length = 0;
|
||||
}
|
||||
|
||||
_initHandlers(ep) {
|
||||
@@ -407,6 +421,7 @@ class TtsStreamingBuffer extends Emitter {
|
||||
'cartesia',
|
||||
'elevenlabs',
|
||||
'rimelabs',
|
||||
'google',
|
||||
'custom'
|
||||
].forEach((vendor) => {
|
||||
const eventClassName = `${vendor.charAt(0).toUpperCase() + vendor.slice(1)}TtsStreamingEvents`;
|
||||
@@ -422,7 +437,15 @@ class TtsStreamingBuffer extends Emitter {
|
||||
|
||||
const findSentenceBoundary = (text, limit) => {
|
||||
// Look for punctuation or double newline that signals sentence end.
|
||||
const sentenceEndRegex = /[.!?](?=\s|$)|\n\n/g;
|
||||
// Includes:
|
||||
// - ASCII: . ! ?
|
||||
// - Arabic: ؟ (question mark), ۔ (full stop)
|
||||
// - Japanese: 。 (full stop), !, ? (full-width exclamation/question)
|
||||
//
|
||||
// For languages that use spaces between sentences, we still require
|
||||
// whitespace or end-of-string after the mark. For Japanese (no spaces),
|
||||
// we treat the punctuation itself as a boundary regardless of following char.
|
||||
const sentenceEndRegex = /[.!?؟۔](?=\s|$)|[。!?]|\n\n/g;
|
||||
let lastSentenceBoundary = -1;
|
||||
let match;
|
||||
while ((match = sentenceEndRegex.exec(text)) && match.index < limit) {
|
||||
|
||||
@@ -293,7 +293,7 @@ class WsRequestor extends BaseRequestor {
|
||||
|
||||
/* send the message */
|
||||
this.ws.send(JSON.stringify(obj), async() => {
|
||||
this.logger.debug({obj}, `WsRequestor:request websocket: sent (${url})`);
|
||||
if (obj.type !== 'llm:event') this.logger.debug({obj}, `WsRequestor:request websocket: sent (${url})`);
|
||||
// If session:reconnect is waiting for ack, hold here until ack to send queuedMsgs
|
||||
if (this._reconnectPromise) {
|
||||
try {
|
||||
|
||||
5853
package-lock.json
generated
5853
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
14
package.json
14
package.json
@@ -27,14 +27,14 @@
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-auto-scaling": "^3.549.0",
|
||||
"@aws-sdk/client-sns": "^3.549.0",
|
||||
"@jambonz/db-helpers": "^0.9.12",
|
||||
"@jambonz/db-helpers": "^0.9.18",
|
||||
"@jambonz/http-health-check": "^0.0.1",
|
||||
"@jambonz/mw-registrar": "^0.2.7",
|
||||
"@jambonz/realtimedb-helpers": "^0.8.13",
|
||||
"@jambonz/speech-utils": "^0.2.15",
|
||||
"@jambonz/realtimedb-helpers": "^0.8.15",
|
||||
"@jambonz/speech-utils": "^0.2.30",
|
||||
"@jambonz/stats-collector": "^0.1.10",
|
||||
"@jambonz/time-series": "^0.2.14",
|
||||
"@jambonz/verb-specifications": "^0.0.111",
|
||||
"@jambonz/time-series": "^0.2.15",
|
||||
"@jambonz/verb-specifications": "^0.0.125",
|
||||
"@modelcontextprotocol/sdk": "^1.9.0",
|
||||
"@opentelemetry/api": "^1.8.0",
|
||||
"@opentelemetry/exporter-jaeger": "^1.23.0",
|
||||
@@ -49,12 +49,12 @@
|
||||
"debug": "^4.3.4",
|
||||
"deepcopy": "^2.1.0",
|
||||
"drachtio-fsmrf": "^4.1.2",
|
||||
"drachtio-srf": "^5.0.5",
|
||||
"drachtio-srf": "^5.0.14",
|
||||
"express": "^4.19.2",
|
||||
"express-validator": "^7.0.1",
|
||||
"moment": "^2.30.1",
|
||||
"parse-url": "^9.2.0",
|
||||
"pino": "^8.20.0",
|
||||
"pino": "^10.1.0",
|
||||
"polly-ssml-split": "^0.1.0",
|
||||
"sdp-transform": "^2.15.0",
|
||||
"short-uuid": "^5.1.0",
|
||||
|
||||
@@ -83,7 +83,8 @@ test('invalid jambonz json create alert tests', async(t) => {
|
||||
{account_sid: 'bb845d4b-83a9-4cde-a6e9-50f3743bab3f', page: 1, page_size: 25, days: 7});
|
||||
let checked = false;
|
||||
for (let i = 0; i < data.total; i++) {
|
||||
checked = data.data[i].message === 'malformed jambonz payload: must be array'
|
||||
checked = data.data[i].message === 'malformed jambonz payload: must be array';
|
||||
if (checked) break;
|
||||
}
|
||||
t.ok(checked, 'alert is raised as expected');
|
||||
disconnect();
|
||||
|
||||
Reference in New Issue
Block a user