Compare commits

..

4 Commits

Author SHA1 Message Date
Dave Horton
119a49725a avoid a pre-caching attempt if synth settings change 2024-01-13 12:21:54 -05:00
Dave Horton
5b1c2d3052 more fixes 2024-01-12 16:22:23 -05:00
Dave Horton
21f03dd13a fix for establishing vendor etc 2024-01-12 16:19:59 -05:00
Dave Horton
c2eef50512 wip 2024-01-12 16:13:36 -05:00
42 changed files with 8932 additions and 7631 deletions

View File

@@ -6,10 +6,10 @@ jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: 20
node-version: 18
- run: npm ci
- run: npm run jslint
- run: docker pull drachtio/sipp

1
.gitignore vendored
View File

@@ -42,4 +42,3 @@ ecosystem.config.js
test/credentials/*.json
run-tests.sh
run-coverage.sh
.vscode

17
.vscode/launch.json vendored Normal file
View File

@@ -0,0 +1,17 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"type": "node",
"request": "launch",
"name": "Launch Program",
"program": "${workspaceFolder}/test/index.js",
"env": {
"NODE_ENV": "test"
}
}
]
}

View File

@@ -1,6 +1,6 @@
MIT License
Copyright (c) 2018-2024 FirstFive8, Inc.
Copyright (c) 2021 Drachtio Communications Services, LLC
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

View File

@@ -9,112 +9,7 @@
"can't take your call",
"will get back to you",
"I'll get back to you",
"we are unable",
"Unable to take your call now",
"I'll reply soon",
"I'll call back",
"I'll reach out to you as soon as possible",
"Leave a message",
"Away from phone",
"Not available now",
"I'll return call",
"On another call",
"Currently on another call",
"I will return call later",
"Busy please leave message",
"Message will be returned promptly",
"Currently unavailable to answer",
"Planning to return your call soon",
"Apologies for missing your call",
"Not by the phone at the moment",
"Expecting to return your call",
"Currently not accessible",
"Intend to call back",
"Appreciate your patience!",
"Engaged in another conversation",
"I Will respond promptly",
"Kindly leave a message",
"Currently occupied leave a message",
"Unfortunately unable to answer right now",
"Occupied at the moment",
"Not present leave a message",
"Regrettably unavailable kindly leave a message",
"Will ensure a prompt response to your message",
"Currently engaged",
"Will return your call at the earliest opportunity",
"Your message will receive my prompt attention",
"I'll respond as soon as I can",
"Your message is important please leave it after the beep",
"Away from the phone at the moment",
"Unable to answer right now",
"Engaged in another task",
"Not by the phone presently",
"I'll respond at my earliest convenience",
"Away from the phone momentarily",
"I'll return your call shortly",
"Currently not able to answer",
"Your message is important please leave it after the tone",
"I'm unable to take your call right now",
"Please leave your message for me",
"I'll get back to you soon",
"Your call has been missed",
"Please leave a detailed message for me to respond to",
"Leave a message I'll make sure to respond",
"Feel free to leave a message",
"Your call is important to me",
"I'll get back to you shortly",
"Your message will be attended to promptly",
"Not available at the moment",
"I'll be sure to get back to you",
"I'll call you back soon",
"I'll ensure a prompt response",
"Sorry for the inconvenience",
"I'll return your call",
"I'll make sure to get back to you",
"I'll call you back shortly",
"I'll return your call as soon as possible",
"Apologies for the inconvenience leave your message",
"Your call is appreciated",
"I'm unavailable to answer",
"I'm currently away",
"I'll return your call as soon as I can",
"I'm away from the phone",
"I'm currently unavailable to take your call",
"Sorry for missing your call",
"I'll ensure it receives my immediate attention",
"I'm away from the phone momentarily",
"I'll reach out to you shortly",
"Apologies for the inconvenience",
"Currently occupied",
"Unable to answer your call at the moment",
"I'll make sure to follow up with you",
"Sorry for not being available",
"I'll reach out to you as soon as I can",
"I'm currently engaged",
"I'm currently busy",
"I'm currently unavailable",
"I'll respond to you at my earliest convenience",
"Your message is appreciated",
"I'll get back to you promptly",
"I'll get back to you without delay",
"Currently away from the phone",
"I'll return your call at my earliest opportunity",
"Sorry for the missed call",
"I'll make sure to address your concerns",
"Please provide your details for a callback",
"I'll make every effort to respond promptly",
"I'll ensure it's attended to promptly",
"Away from the phone temporarily",
"I'll get back to you as soon as I return",
"Currently not in a position to answer your call",
"Your call cannot be answered at the moment",
"I'll ensure to respond as soon as I'm able",
"Your call is important please leave a message",
"Unable to answer right now please leave your message",
"Currently not accessible intending to return your call",
"I'll respond promptly to your message",
"leave a memo",
"please leave a memo"
"we are unable"
],
"es-ES": [
"le pasamos la llamada",

View File

@@ -130,9 +130,7 @@ const JAMBONZ_RECORD_WS_PASSWORD = process.env.JAMBONZ_RECORD_WS_PASSWORD || pro
const JAMBONZ_DISABLE_DIAL_PAI_HEADER = process.env.JAMBONZ_DISABLE_DIAL_PAI_HEADER || false;
const JAMBONES_DISABLE_DIRECT_P2P_CALL = process.env.JAMBONES_DISABLE_DIRECT_P2P_CALL || false;
const JAMBONES_EAGERLY_PRE_CACHE_AUDIO = parseInt(process.env.JAMBONES_EAGERLY_PRE_CACHE_AUDIO, 10) || 0;
const JAMBONES_USE_FREESWITCH_TIMER_FD = process.env.JAMBONES_USE_FREESWITCH_TIMER_FD;
const JAMBONES_EAGERLY_PRE_CACHE_AUDIO = process.env.JAMBONES_EAGERLY_PRE_CACHE_AUDIO;
module.exports = {
JAMBONES_MYSQL_HOST,
@@ -215,6 +213,5 @@ module.exports = {
JAMBONZ_RECORD_WS_USERNAME,
JAMBONZ_RECORD_WS_PASSWORD,
JAMBONZ_DISABLE_DIAL_PAI_HEADER,
JAMBONES_DISABLE_DIRECT_P2P_CALL,
JAMBONES_USE_FREESWITCH_TIMER_FD
JAMBONES_DISABLE_DIRECT_P2P_CALL
};

View File

@@ -30,20 +30,6 @@ const appsMap = {
}
]
}]
},
conference: {
// Dummy hook to follow later feature server logic.
call_hook: {
url: 'https://jambonz.org',
method: 'GET'
},
account_sid: '',
app_json: [{
verb: 'conference',
name: '',
beep: false,
startConferenceOnEnter: true
}]
}
};
@@ -52,7 +38,6 @@ const createJambonzApp = (type, {account_sid, name, caller_id}) => {
app.account_sid = account_sid;
switch (type) {
case 'queue':
case 'conference':
app.app_json[0].name = name;
break;
case 'user':

View File

@@ -75,19 +75,13 @@ module.exports = function(srf, logger) {
req.locals.application_sid = application_sid;
}
// check for call to queue
else if (uri.user?.startsWith('queue-') && req.locals.originatingUser && clientDb?.allow_direct_queue_calling) {
if (uri.user?.startsWith('queue-') && req.locals.originatingUser && clientDb?.allow_direct_queue_calling) {
const queue_name = uri.user.match(/queue-(.*)/)[1];
logger.debug(`got Queue from Request URI header: ${queue_name}`);
req.locals.queue_name = queue_name;
}
// check for call to conference
else if (uri.user?.startsWith('conference-') && req.locals.originatingUser && clientDb?.allow_direct_app_calling) {
const conference_id = uri.user.match(/conference-(.*)/)[1];
logger.debug(`got Conference from Request URI header: ${conference_id}`);
req.locals.conference_id = conference_id;
}
// check for call to registered user
else if (!JAMBONES_DISABLE_DIRECT_P2P_CALL && req.locals.originatingUser && clientDb?.allow_direct_user_calling) {
if (!JAMBONES_DISABLE_DIRECT_P2P_CALL && req.locals.originatingUser && clientDb?.allow_direct_user_calling) {
const arr = /^(.*)@(.*)/.exec(req.locals.originatingUser);
if (arr) {
const sipRealm = arr[2];
@@ -103,7 +97,7 @@ module.exports = function(srf, logger) {
if (req.has('X-MS-Teams-Tenant-FQDN')) req.locals.msTeamsTenant = req.get('X-MS-Teams-Tenant-FQDN');
if (req.has('X-Cisco-Recording-Participant')) {
const ciscoParticipants = req.get('X-Cisco-Recording-Participant');
const regex = /sip:[a-zA-Z0-9]+@[a-zA-Z0-9.-_]+/g;
const regex = /sip:[\d]+@[\d]+\.[\d]+\.[\d]+\.[\d]+/g;
const sipURIs = ciscoParticipants.match(regex);
logger.info(`X-Cisco-Recording-Participant : ${sipURIs} `);
if (sipURIs && sipURIs.length > 0) {
@@ -243,9 +237,6 @@ module.exports = function(srf, logger) {
logger.debug(`calling to registered user ${req.locals.called_user}, generating dial app`);
app = createJambonzApp('user',
{account_sid, name: req.locals.called_user, caller_id: req.locals.callingNumber});
} else if (req.locals.conference_id) {
logger.debug(`calling to conference ${req.locals.conference_id}, generating conference app`);
app = createJambonzApp('conference', {account_sid, name: req.locals.conference_id});
} else if (req.locals.application_sid) {
app = await lookupAppBySid(req.locals.application_sid);
} else if (req.locals.originatingUser) {
@@ -352,15 +343,6 @@ module.exports = function(srf, logger) {
direction: CallDirection.Inbound,
traceId: rootSpan.traceId
});
// if transferred call contains callInfo, let update original data to newly created callInfo in this instance.
if (app.transferredCall && app.callInfo) {
req.locals.callInfo.callerName = app.callInfo.callerName;
req.locals.callInfo.from = app.callInfo.from;
req.locals.callInfo.to = app.callInfo.to;
req.locals.callInfo.originatingSipIp = app.callInfo.originatingSipIp;
req.locals.callInfo.originatingSipTrunkName = app.callInfo.originatingSipTrunkName;
delete app.callInfo;
}
next();
} catch (err) {
span.end();

View File

@@ -53,24 +53,16 @@ class AdultingCallSession extends CallSession {
}
_callerHungup() {
this._hangup('caller');
}
_jambonzHangup() {
this._hangup();
}
_hangup(terminatedBy = 'jambonz') {
if (this.dlg.connectTime) {
const duration = moment().diff(this.dlg.connectTime, 'seconds');
this.rootSpan.setAttributes({'call.termination': `hangup by ${terminatedBy}`});
this.callInfo.callTerminationBy = terminatedBy;
this.rootSpan.setAttributes({'call.termination': 'hangup by caller'});
this.callInfo.callTerminationBy = 'caller';
this.emit('callStatusChange', {
callStatus: CallStatus.Completed,
duration
});
}
this.logger.info(`InboundCallSession: ${terminatedBy} hung up`);
this.logger.info('InboundCallSession: caller hung up');
this._callReleased();
this.req.removeAllListeners('cancel');
}

View File

@@ -7,26 +7,21 @@ const {
TaskName,
KillReason,
RecordState,
AllowedSipRecVerbs,
AllowedConfirmSessionVerbs
AllowedSipRecVerbs
} = require('../utils/constants');
const moment = require('moment');
const assert = require('assert');
const sessionTracker = require('./session-tracker');
const makeTask = require('../tasks/make_task');
const parseDecibels = require('../utils/parse-decibels');
const { normalizeJambones } = require('@jambonz/verb-specifications');
const listTaskNames = require('../utils/summarize-tasks');
const HttpRequestor = require('../utils/http-requestor');
const WsRequestor = require('../utils/ws-requestor');
const ActionHookDelayProcessor = require('../utils/action-hook-delay');
const {
JAMBONES_INJECT_CONTENT,
JAMBONES_EAGERLY_PRE_CACHE_AUDIO,
AWS_REGION,
JAMBONES_USE_FREESWITCH_TIMER_FD
} = require('../config');
const bent = require('bent');
const BackgroundTaskManager = require('../utils/background-task-manager');
const BADPRECONDITIONS = 'preconditions not met';
const CALLER_CANCELLED_ERR_MSG = 'Response not sent due to unknown transaction';
@@ -113,19 +108,13 @@ class CallSession extends Emitter {
this.requestor.removeAllListeners();
this.application.requestor = newRequestor;
this.requestor.on('command', this._onCommand.bind(this));
this.logger.debug(`CallSession: ${this.callSid} listener count ${this.requestor.listenerCount('command')}`);
this.requestor.on('connection-dropped', this._onWsConnectionDropped.bind(this));
this.requestor.on('handover', handover.bind(this));
this.requestor.on('reconnect-error', this._onSessionReconnectError.bind(this));
};
if (!this.isConfirmCallSession) {
this.requestor.on('command', this._onCommand.bind(this));
this.logger.debug(`CallSession: ${this.callSid} listener count ${this.requestor.listenerCount('command')}`);
this.requestor.on('connection-dropped', this._onWsConnectionDropped.bind(this));
this.requestor.on('handover', handover.bind(this));
this.requestor.on('reconnect-error', this._onSessionReconnectError.bind(this));
}
this.requestor.on('command', this._onCommand.bind(this));
this.requestor.on('connection-dropped', this._onWsConnectionDropped.bind(this));
this.requestor.on('handover', handover.bind(this));
}
/**
@@ -196,24 +185,6 @@ class CallSession extends Emitter {
this._synthesizer = synth;
}
/**
* ASR TTS fallback
*/
get hasFallbackAsr() {
return this._hasFallbackAsr || false;
}
set hasFallbackAsr(i) {
this._hasFallbackAsr = i;
}
get hasFallbackTts() {
return this._hasFallbackTts || false;
}
set hasFallbackTts(i) {
this._hasFallbackTts = i;
}
/**
* default vendor to use for speech synthesis if not provided in the app
*/
@@ -339,17 +310,6 @@ class CallSession extends Emitter {
this.application.fallback_speech_recognizer_language = language;
}
/**
* Vad
*/
get vad() {
return this._vad;
}
set vad(v) {
this._vad = v;
}
/**
* indicates whether the call currently in progress
*/
@@ -487,113 +447,6 @@ class CallSession extends Emitter {
this._sipRequestWithinDialogHook = url;
}
// Bot Delay (actionHook delayed)
get actionHookDelayEnabled() {
return this._actionHookDelayEnabled;
}
set actionHookDelayEnabled(e) {
this._actionHookDelayEnabled = e;
}
get actionHookNoResponseTimeout() {
return this._actionHookNoResponseTimeout;
}
set actionHookNoResponseTimeout(e) {
this._actionHookNoResponseTimeout = e;
}
get actionHookNoResponseGiveUpTimeout() {
return this._actionHookNoResponseGiveUpTimeout;
}
set actionHookNoResponseGiveUpTimeout(e) {
this._actionHookNoResponseGiveUpTimeout = e;
}
get actionHookDelayRetries() {
return this._actionHookDelayRetries;
}
set actionHookDelayRetries(e) {
this._actionHookDelayRetries = e;
}
// Getter/setter for current tts vendor
get currentTtsVendor() {
return this._currentTtsVendor;
}
set currentTtsVendor(vendor) {
this._currentTtsVendor = vendor;
}
get actionHookDelayProcessor() {
return this._actionHookDelayProcessor;
}
set actionHookDelayProperties(opts) {
if (this._actionHookDelayProcessor) {
this._actionHookDelayProcessor.stop();
if (!this._actionHookDelayProcessor.init(opts)) {
this._actionHookDelayProcessor.removeAllListeners();
this._actionHookDelayProcessor = null;
}
}
else {
try {
this._actionHookDelayProcessor = new ActionHookDelayProcessor(this.logger, opts, this, this.ep);
this._actionHookDelayProcessor.on('giveup', () => {
this.logger.info('CallSession: ActionHookDelayProcessor: giveup event - hanging up call');
this._jambonzHangup();
if (this.wakeupResolver) {
this.logger.debug('CallSession: Giveup timer expired - waking up');
this.wakeupResolver({reason: 'noResponseGiveUp'});
this.wakeupResolver = null;
}
});
} catch (err) {
this.logger.error({err}, 'CallSession: Error creating ActionHookDelayProcessor');
}
}
}
async clearOrRestoreActionHookDelayProcessor() {
if (this._actionHookDelayProcessor) {
await this._actionHookDelayProcessor.stop();
if (!this.popActionHookDelayProperties()) {
//this.logger.debug('CallSession:clearOrRestoreActionHookDelayProcessor - ahd settings');
//await this.clearActionHookDelayProcessor();
}
else {
this.logger.debug('CallSession:clearOrRestoreActionHookDelayProcessor - restore ahd after gather override');
}
}
}
async clearActionHookDelayProcessor() {
if (this._actionHookDelayProcessor) {
await this._actionHookDelayProcessor.stop();
this._actionHookDelayProcessor.removeAllListeners();
this._actionHookDelayProcessor = null;
}
}
stashActionHookDelayProperties() {
this._storedActionHookDelayProperties = this._actionHookDelayProcessor.properties;
}
popActionHookDelayProperties() {
if (this._storedActionHookDelayProperties) {
this._actionHookDelayProcessor.init(this._storedActionHookDelayProperties);
this._storedActionHookDelayProperties = null;
return true;
}
return false;
}
hasGlobalSttPunctuation() {
return this._globalSttPunctuation !== undefined;
}
@@ -614,18 +467,6 @@ class CallSession extends Emitter {
this.speechSynthesisVoice = this._origSynthesizerSettings.voice;
}
enableFillerNoise(opts) {
this._fillerNoise = opts;
}
disableFillerNoise() {
this._fillerNoise = null;
}
get fillerNoise() {
return this._fillerNoise;
}
async notifyRecordOptions(opts) {
const {action} = opts;
this.logger.debug({opts}, 'CallSession:notifyRecordOptions');
@@ -737,10 +578,8 @@ class CallSession extends Emitter {
const res = await this.dlg.request({
method: 'INFO',
headers: {
'X-Reason': 'pauseCallRecording',
...(this.recordOptions.headers && {'Content-Type': 'application/json'})
},
...(this.recordOptions.headers && {body: JSON.stringify(this.recordOptions.headers) + '\n'})
'X-Reason': 'pauseCallRecording'
}
});
if (res.status === 200) {
this._recordState = RecordState.RecordingPaused;
@@ -761,10 +600,8 @@ class CallSession extends Emitter {
const res = await this.dlg.request({
method: 'INFO',
headers: {
'X-Reason': 'resumeCallRecording',
...(this.recordOptions.headers && {'Content-Type': 'application/json'})
},
...(this.recordOptions.headers && {body: JSON.stringify(this.recordOptions.headers) + '\n'})
'X-Reason': 'resumeCallRecording'
}
});
if (res.status === 200) {
this._recordState = RecordState.RecordingOn;
@@ -799,8 +636,7 @@ class CallSession extends Emitter {
}
task = await this.backgroundTaskManager.newTask('bargeIn', gather);
task.sticky = autoEnable;
// listen to the bargein-done from background manager
this.backgroundTaskManager.on('bargeIn-done', () => {
task.once('bargeIn-done', () => {
if (this.requestor instanceof WsRequestor) {
try {
this.kill(true);
@@ -813,8 +649,6 @@ class CallSession extends Emitter {
}
}
async disableBotMode() {
const task = this.backgroundTaskManager.getTask('bargeIn');
if (task) task.sticky = false;
this.backgroundTaskManager.stop('bargeIn');
}
@@ -855,7 +689,7 @@ class CallSession extends Emitter {
(type === 'stt' && credential.use_for_stt)
)) {
this.logger.info(
`${type}: ${credential.vendor} ${credential.label ? `, label: ${credential.label}` : ''} `);
`Speech vendor: ${credential.vendor} ${credential.label ? `, label: ${credential.label}` : ''} selected`);
if ('google' === vendor) {
if (type === 'tts' && !credential.tts_tested_ok ||
type === 'stt' && !credential.stt_tested_ok) {
@@ -882,7 +716,6 @@ class CallSession extends Emitter {
speech_credential_sid: credential.speech_credential_sid,
accessKeyId: credential.access_key_id,
secretAccessKey: credential.secret_access_key,
roleArn: credential.role_arn,
region: credential.aws_region || AWS_REGION
};
}
@@ -917,9 +750,7 @@ class CallSession extends Emitter {
else if ('deepgram' === vendor) {
return {
speech_credential_sid: credential.speech_credential_sid,
api_key: credential.api_key,
deepgram_stt_uri: credential.deepgram_stt_uri,
deepgram_stt_use_tls: credential.deepgram_stt_use_tls
api_key: credential.api_key
};
}
else if ('soniox' === vendor) {
@@ -954,19 +785,6 @@ class CallSession extends Emitter {
model_id: credential.model_id,
options: credential.options
};
} else if ('playht' === vendor) {
return {
api_key: credential.api_key,
user_id: credential.user_id,
voice_engine: credential.voice_engine,
options: credential.options
};
} else if ('rimelabs' === vendor) {
return {
api_key: credential.api_key,
model_id: credential.model_id,
options: credential.options
};
} else if ('assemblyai' === vendor) {
return {
speech_credential_sid: credential.speech_credential_sid,
@@ -977,12 +795,6 @@ class CallSession extends Emitter {
api_key: credential.api_key,
model_id: credential.model_id
};
} else if ('verbio' === vendor) {
return {
client_id: credential.client_id,
client_secret: credential.client_secret,
engine_version: credential.engine_version
};
} else if (vendor.startsWith('custom:')) {
return {
speech_credential_sid: credential.speech_credential_sid,
@@ -1016,10 +828,6 @@ class CallSession extends Emitter {
const task = this.tasks.shift();
this.logger.info(`CallSession:exec starting task #${stackNum}:${taskNum}: ${task.name}`);
this._notifyTaskStatus(task, {event: 'starting'});
// Register verbhook span wait for end
task.on('VerbHookSpanWaitForEnd', ({span}) => {
this.verbHookSpan = span;
});
try {
const resources = await this._evaluatePreconditions(task);
let skip = false;
@@ -1065,15 +873,6 @@ class CallSession extends Emitter {
) {
try {
await this._awaitCommandsOrHangup();
await this.clearOrRestoreActionHookDelayProcessor();
//TODO: remove filler noise code and simply create as action hook delay
if (this._isPlayingFillerNoise) {
this._isPlayingFillerNoise = false;
this.ep.api('uuid_break', this.ep.uuid)
.catch((err) => this.logger.info(err, 'Error killing filler noise'));
}
if (this.callGone) break;
} catch (err) {
this.logger.info(err, 'CallSession:exec - error waiting for new commands');
@@ -1085,6 +884,7 @@ class CallSession extends Emitter {
// all done - cleanup
this.logger.info('CallSession:exec all tasks complete');
this._stopping = true;
this.disableBotMode();
this._onTasksDone();
this._clearResources();
@@ -1170,64 +970,6 @@ class CallSession extends Emitter {
}
}
/**
* perform live call control - create rest:dial
* @param {obj} opts create call options
*/
async _lccCallDial(opts) {
try {
const restDialUrl = `${this.srf.locals.serviceUrl}/v1/createCall`;
await this.transformInputIfRequired(opts);
const resp = bent('POST', 'json', 201)(restDialUrl, opts);
this.logger.info(resp.body, 'successfully create outbound call');
return resp.body;
} catch (err) {
if (err.json) {
err.body = await err.json();
}
this.logger.error(err, 'failed to create outbound call from ' + this.callSid);
this._notifyTaskError(err.body);
}
}
async transformInputIfRequired(opts) {
const {
lookupAppBySid
} = this.srf.locals.dbHelpers;
opts.account_sid = this.accountSid;
if (opts.application_sid) {
this.logger.debug(`Callsession:_validateCreateCall retrieving application ${opts.application_sid}`);
const application = await lookupAppBySid(opts.application_sid);
Object.assign(opts, {
call_hook: application.call_hook,
app_json: application.app_json,
call_status_hook: application.call_status_hook,
speech_synthesis_vendor: application.speech_synthesis_vendor,
speech_synthesis_language: application.speech_synthesis_language,
speech_synthesis_voice: application.speech_synthesis_voice,
speech_recognizer_vendor: application.speech_recognizer_vendor,
speech_recognizer_language: application.speech_recognizer_language
});
this.logger.debug({opts, application}, 'Callsession:_validateCreateCall augmented with application settings');
}
if (typeof opts.call_hook === 'string') {
const url = opts.call_hook;
opts.call_hook = {
url,
method: 'POST'
};
}
if (typeof opts.call_status_hook === 'string') {
const url = opts.call_status_hook;
opts.call_status_hook = {
url,
method: 'POST'
};
}
}
/**
* perform live call control -- set a new call_hook
* @param {object} opts
@@ -1288,19 +1030,11 @@ class CallSession extends Emitter {
const t = normalizeJambones(this.logger, tasks).map((tdata) => makeTask(this.logger, tdata));
this.logger.info({tasks: listTaskNames(t)}, 'CallSession:_lccCallHook new task list');
this.replaceApplication(t);
if (this.wakeupResolver) {
//this.logger.debug({resolution}, 'CallSession:_onCommand - got commands, waking up..');
this.wakeupResolver({reason: 'lcc: new tasks'});
this.wakeupResolver = null;
}
}
else {
/* we started a new app on the child leg, but nothing given for parent so hang him up */
this.currentTask.kill(this);
}
this._endVerbHookSpan();
await this.clearOrRestoreActionHookDelayProcessor();
}
/**
@@ -1326,9 +1060,6 @@ class CallSession extends Emitter {
* @param {string} opts.transcribe_status - 'pause' or 'resume'
*/
async _lccTranscribeStatus(opts) {
if (this.backgroundTaskManager.isTaskRunning('transcribe')) {
this.backgroundTaskManager.getTask('transcribe').updateTranscribe(opts.transcribe_status);
}
const task = this.currentTask;
if (!task || ![TaskName.Dial, TaskName.Transcribe].includes(task.name)) {
return this.logger.info(`CallSession:_lccTranscribeStatus - invalid transcribe_status in task ${task.name}`);
@@ -1356,15 +1087,7 @@ class CallSession extends Emitter {
this.callInfo.customerData = tag;
}
async _lccConferenceParticipantAction(opts) {
const task = this.currentTask;
if (!task || TaskName.Conference !== task.name || !this.isInConference) {
return this.logger.info('CallSession:_lccConferenceParticipantAction - invalid cmd, call is not in conference');
}
task.doConferenceParticipantAction(this, opts);
}
async _lccMuteStatus(mute, callSid) {
async _lccMuteStatus(callSid, mute) {
// this whole thing requires us to be in a Dial or Conference verb
const task = this.currentTask;
if (!task || ![TaskName.Dial, TaskName.Conference].includes(task.name)) {
@@ -1496,78 +1219,6 @@ Duration=${duration} `
task.whisper(tasks, callSid).catch((err) => this.logger.error(err, 'CallSession:_lccWhisper'));
}
async _lccConfig(opts) {
this.logger.debug({opts}, 'CallSession:_lccConfig');
const t = normalizeJambones(this.logger, [
{
verb: 'config',
...opts
}
])
.map((tdata) => makeTask(this.logger, tdata));
const task = t[0];
const {span, ctx} = this.rootSpan.startChildSpan(`verb:${task.summary}`);
span.setAttributes({'verb.summary': task.summary});
task.span = span;
task.ctx = ctx;
try {
await task.exec(this, {ep: this.ep});
} catch (err) {
this.logger.error(err, 'CallSession:_lccConfig');
}
task.span.end();
}
async _lccDub(opts, callSid) {
this.logger.debug({opts}, `CallSession:_lccDub on call_sid ${callSid}`);
const t = normalizeJambones(this.logger, [
{
verb: 'dub',
...opts
}
])
.map((tdata) => makeTask(this.logger, tdata));
const task = t[0];
const ep = this.currentTask?.name === TaskName.Dial && callSid === this.currentTask?.callSid ?
this.currentTask.ep :
this.ep;
const {span, ctx} = this.rootSpan.startChildSpan(`verb:${task.summary}`);
span.setAttributes({'verb.summary': task.summary});
task.span = span;
task.ctx = ctx;
try {
await task.exec(this, {ep});
} catch (err) {
this.logger.error(err, 'CallSession:_lccDub');
}
task.span.end();
}
async _lccBoostAudioSignal(opts, callSid) {
const ep = this.currentTask?.name === TaskName.Dial && callSid === this.currentTask?.callSid ?
this.currentTask.ep :
this.ep;
const db = parseDecibels(opts);
this.logger.info(`_lccBoostAudioSignal: boosting audio signal by ${db} dB`);
const args = [ep.uuid, 'setGain', db];
const response = await ep.api('uuid_dub', args);
this.logger.info({response}, '_lccBoostAudioSignal: response from freeswitch');
}
/**
* perform call hangup by jambonz
*/
async hangup() {
return this._callerHungup();
}
/**
* perform live call control
@@ -1590,7 +1241,7 @@ Duration=${duration} `
await this._lccTranscribeStatus(opts);
}
else if (opts.mute_status) {
await this._lccMuteStatus(opts.mute_status === 'mute', callSid);
await this._lccMuteStatus(callSid, opts.mute_status === 'mute');
}
else if (opts.conf_hold_status) {
await this._lccConfHoldStatus(opts);
@@ -1610,15 +1261,6 @@ Duration=${duration} `
else if (opts.tag) {
return this._lccTag(opts);
}
else if (opts.conferenceParticipantAction) {
return this._lccConferenceParticipantAction(opts.conferenceParticipantAction);
}
else if (opts.dub) {
return this._lccDub(opts);
}
else if (opts.boostAudioSignal) {
return this._lccBoostAudioSignal(opts, callSid);
}
// whisper may be the only thing we are asked to do, or it may that
// we are doing a whisper after having muted, paused recording etc..
@@ -1650,19 +1292,6 @@ Duration=${duration} `
tasks = pruned;
}
}
else if (this.isConfirmCallSession) {
const pruned = tasks.filter((t) => AllowedConfirmSessionVerbs.includes(t.name));
if (0 === pruned.length) {
this.logger.info({tasks},
'CallSession:replaceApplication - filtering verbs allowed on an confirmSession call');
return;
}
if (pruned.length < tasks.length) {
this.logger.info(
'CallSession:replaceApplication - removing verbs that are not allowed for confirmSession call');
tasks = pruned;
}
}
this.tasks = tasks;
this.taskIdx = 0;
this.stackIdx++;
@@ -1672,17 +1301,11 @@ Duration=${duration} `
this.currentTask.kill(this, KillReason.Replaced);
this.currentTask = null;
}
else if (this.wakeupResolver) {
this.logger.debug('CallSession:replaceApplication - waking up');
this.wakeupResolver({reason: 'new tasks'});
this.wakeupResolver = null;
}
}
kill(onBackgroundGatherBargein = false) {
if (this.isConfirmCallSession) this.logger.debug('CallSession:kill (ConfirmSession)');
else this.logger.info('CallSession:kill');
this._endVerbHookSpan();
if (this.currentTask) {
this.currentTask.kill(this);
this.currentTask = null;
@@ -1701,8 +1324,7 @@ Duration=${duration} `
this.logger.info('CallSession:kill - found bargein disabled in the stack, clearing to that point');
break;
}
const rem = this.tasks.shift();
this.logger.debug(`CallSession:kill - clearing task ${rem.summary}`);
this.tasks.shift();
}
}
else this.tasks = [];
@@ -1710,23 +1332,7 @@ Duration=${duration} `
}
_preCacheAudio(newTasks) {
/**
* only precache audio for the a queued say if we have one or more non-Config verbs
* ahead of it in the queue. This is because the Config verb returns immediately
* and would not give us enough time to generate the audio. The point of precaching
* is to take advantage of getting the audio in advance of being needed, so we need
* to be confident we have some time before the say verb is executed, and the Config
* does not give us that confidence since it returns immediately.
*/
const haveQueuedNonConfig = this.tasks.findIndex((t) => t.name !== TaskName.Config) !== -1;
let tasks = haveQueuedNonConfig ? newTasks : [];
if (!haveQueuedNonConfig) {
const idxFirstNotConfig = newTasks.findIndex((t) => t.name !== TaskName.Config);
if (-1 === idxFirstNotConfig) return;
tasks = newTasks.slice(idxFirstNotConfig + 1);
}
for (const task of tasks) {
for (const task of newTasks) {
if (task.name === TaskName.Config && task.hasSynthesizer) {
/* if they change synthesizer settings don't try to precache */
break;
@@ -1798,29 +1404,12 @@ Duration=${duration} `
}, 'CallSession:_injectTasks - completed');
}
async _onSessionReconnectError(err) {
const {writeAlerts, AlertType} = this.srf.locals;
const sid = this.accountInfo.account.account_sid;
this.logger.info({err}, `_onSessionReconnectError for account ${sid}`);
try {
await writeAlerts({
alert_type: AlertType.WEBHOOK_CONNECTION_FAILURE,
account_sid: this.accountSid,
detail: `Session:reconnect error ${err}`
});
} catch (error) {
this.logger.error({error}, 'Error writing WEBHOOK_CONNECTION_FAILURE alert');
}
this._jambonzHangup();
}
async _onCommand({msgid, command, call_sid, queueCommand, data}) {
_onCommand({msgid, command, call_sid, queueCommand, data}) {
this.logger.info({msgid, command, queueCommand, data}, 'CallSession:_onCommand - received command');
let resolution;
const resolution = {reason: 'received command', queue: queueCommand, command};
switch (command) {
case 'redirect':
if (Array.isArray(data)) {
this._endVerbHookSpan();
const t = normalizeJambones(this.logger, data)
.map((tdata) => makeTask(this.logger, tdata));
if (!queueCommand) {
@@ -1837,11 +1426,7 @@ Duration=${duration} `
this.tasks.push(...t);
this.logger.info({tasks: listTaskNames(this.tasks)}, 'CallSession:_onCommand - updated task list');
}
resolution = {reason: 'received command, new tasks', queue: queueCommand, command};
resolution.command = listTaskNames(t);
// clear all delay action hook timeout if there is
await this.clearOrRestoreActionHookDelayProcessor();
}
else this._lccCallHook(data);
break;
@@ -1850,24 +1435,8 @@ Duration=${duration} `
this._lccCallStatus(data);
break;
case 'config':
this._lccConfig(data, call_sid);
break;
case 'dial':
this._lccCallDial(data);
break;
case 'dub':
this._lccDub(data, call_sid);
break;
case 'record':
this.notifyRecordOptions(data);
break;
case 'mute:status':
this._lccMuteStatus(data, call_sid);
this._lccMuteStatus(call_sid, data);
break;
case 'conf:mute-status':
@@ -1878,10 +1447,6 @@ Duration=${duration} `
this._lccConfHoldStatus(data);
break;
case 'conf:participant-action':
this._lccConferenceParticipantAction(data);
break;
case 'listen:status':
this._lccListenStatus(data);
break;
@@ -1908,17 +1473,10 @@ Duration=${duration} `
});
break;
case 'boostAudioSignal':
this._lccBoostAudioSignal(data, call_sid)
.catch((err) => {
this.logger.info({err, data}, 'CallSession:_onCommand - error boosting audio signal');
});
break;
default:
this.logger.info(`CallSession:_onCommand - invalid command ${command}`);
}
if (this.wakeupResolver && resolution) {
if (this.wakeupResolver) {
//this.logger.debug({resolution}, 'CallSession:_onCommand - got commands, waking up..');
this.wakeupResolver(resolution);
this.wakeupResolver = null;
@@ -1994,7 +1552,7 @@ Duration=${duration} `
});
//ep.cs = this;
this.ep = ep;
this.logger.info(`allocated endpoint ${ep.uuid}`);
this.logger.debug(`allocated endpoint ${ep.uuid}`);
this._configMsEndpoint();
@@ -2101,26 +1659,16 @@ Duration=${duration} `
this.rootSpan && this.rootSpan.end();
// close all background tasks
this.backgroundTaskManager.stopAll();
this.clearOrRestoreActionHookDelayProcessor().catch((err) => {});
}
/**
* called when the caller has hung up. Provided for subclasses to override
* in order to apply logic at this point if needed.
* return true if success fallback, return false if not
*/
_callerHungup() {
assert(false, 'subclass responsibility to override this method');
}
/**
* called when the jambonz has hung up. Provided for subclasses to override
* in order to apply logic at this point if needed.
*/
_jambonzHangup() {
assert(false, 'subclass responsibility to override this method');
}
/**
* get a media server to use for this call
*/
@@ -2168,10 +1716,6 @@ Duration=${duration} `
}
this.logger.debug(`CallSession:propagateAnswer - answered callSid ${this.callSid}`);
}
else {
this.logger.debug('CallSession:propagateAnswer - call already answered - re-anchor media with a reinvite');
await this.dlg.modify(this.ep.local.sdp);
}
}
async _onRequestWithinDialog(req, res) {
@@ -2200,7 +1744,7 @@ Duration=${duration} `
res.send(200, {body: this.ep.local.sdp});
}
else {
if (this.currentTask.name === TaskName.Dial && this.currentTask.isOnHoldEnabled) {
if (this.currentTask.name === TaskName.Dial && this.currentTask.isOnHold) {
this.logger.info('onholdMusic reINVITE after media has been released');
await this.currentTask.handleReinviteAfterMediaReleased(req, res);
} else {
@@ -2476,12 +2020,8 @@ Duration=${duration} `
}
_configMsEndpoint() {
const opts = {
...(this.onHoldMusic && {holdMusic: `shout://${this.onHoldMusic.replace(/^https?:\/\//, '')}`}),
...(JAMBONES_USE_FREESWITCH_TIMER_FD && {timer_name: 'timerfd'})
};
if (Object.keys(opts).length > 0) {
this.ep.set(opts);
if (this.onHoldMusic) {
this.ep.set({hold_music: `shout://${this.onHoldMusic.replace(/^https?:\/\//, '')}`});
}
}
@@ -2509,30 +2049,6 @@ Duration=${duration} `
return new Promise((resolve, reject) => {
this.logger.info('_awaitCommandsOrHangup - waiting...');
this.wakeupResolver = resolve;
if (this._actionHookDelayProcessor) {
this._actionHookDelayProcessor.start();
}
/**
* TODO: filler noise can be handled as an ActionHookDelayProcessor -
* it's just one specific scenario for action hook delay -
* remove the code below and simply implement filler noise as an action hook delay
*/
/* start filler noise if configured while we wait for new commands */
if (this.fillerNoise?.url && this.ep?.connected && !this.ep2) {
this.logger.debug('CallSession:_awaitCommandsOrHangup - playing filler noise');
this._isPlayingFillerNoise = true;
this.ep.play(this.fillerNoise.url);
this.ep.once('playback-start', (evt) => {
if (evt.file === this.fillerNoise.url && !this._isPlayingFillerNoise) {
this.logger.info('CallSession:_awaitCommandsOrHangup - filler noise started');
this.ep.api('uuid_break', this.ep.uuid)
.catch((err) => this.logger.info(err, 'Error killing filler noise'));
}
});
}
});
}
@@ -2557,13 +2073,6 @@ Duration=${duration} `
stopBackgroundTask(type) {
this.backgroundTaskManager.stop(type);
}
_endVerbHookSpan() {
if (this.verbHookSpan) {
this.verbHookSpan.end();
this.verbHookSpan = null;
}
}
}
module.exports = CallSession;

View File

@@ -34,9 +34,6 @@ class ConfirmCallSession extends CallSession {
_callerHungup() {
}
_jambonzHangup() {
}
}

View File

@@ -67,27 +67,15 @@ class InboundCallSession extends CallSession {
* This is invoked when the caller hangs up, in order to calculate the call duration.
*/
_callerHungup() {
this._hangup('caller');
}
_jambonzHangup() {
this.dlg?.destroy();
}
_hangup(terminatedBy = 'jambonz') {
if (this.dlg === null) {
this.logger.info('InboundCallSession:_hangup - race condition, dlg cleared by app hangup');
return;
}
this.logger.info(`InboundCallSession: ${terminatedBy} hung up`);
assert(this.dlg.connectTime);
const duration = moment().diff(this.dlg.connectTime, 'seconds');
this.rootSpan.setAttributes({'call.termination': `hangup by ${terminatedBy}`});
this.callInfo.callTerminationBy = terminatedBy;
this.rootSpan.setAttributes({'call.termination': 'hangup by caller'});
this.callInfo.callTerminationBy = 'caller';
this.emit('callStatusChange', {
callStatus: CallStatus.Completed,
duration
});
this.logger.info('InboundCallSession: caller hung up');
this._callReleased();
this.req.removeAllListeners('cancel');
}

View File

@@ -49,21 +49,13 @@ class RestCallSession extends CallSession {
* This is invoked when the called party hangs up, in order to calculate the call duration.
*/
_callerHungup() {
this._hangup('caller');
}
_jambonzHangup() {
this._hangup();
}
_hangup(terminatedBy = 'jambonz') {
if (this.restDialTask) {
this.restDialTask.turnOffAmd();
}
this.callInfo.callTerminationBy = terminatedBy;
this.callInfo.callTerminationBy = 'caller';
const duration = moment().diff(this.dlg.connectTime, 'seconds');
this.emit('callStatusChange', {callStatus: CallStatus.Completed, duration});
this.logger.debug(`RestCallSession: called party hung up by ${terminatedBy}`);
this.logger.debug('RestCallSession: called party hung up');
this._callReleased();
}

View File

@@ -1,22 +0,0 @@
const Task = require('./task');
const {TaskName, TaskPreconditions} = require('../utils/constants');
/**
* Answer the call.
* Note: This is rarely used, as the call is typically answered automatically when required by the app,
* but it can be useful to force an answer before a pause in some cases
*/
class TaskAnswer extends Task {
constructor(logger, opts) {
super(logger, opts);
this.preconditions = TaskPreconditions.Endpoint;
}
get name() { return TaskName.Answer; }
async exec(cs) {
super.exec(cs);
}
}
module.exports = TaskAnswer;

View File

@@ -6,7 +6,6 @@ const { normalizeJambones } = require('@jambonz/verb-specifications');
const makeTask = require('./make_task');
const bent = require('bent');
const assert = require('assert');
const HttpRequestor = require('../utils/http-requestor');
const WAIT = 'wait';
const JOIN = 'join';
const START = 'start';
@@ -61,8 +60,6 @@ class Conference extends Task {
this.emitter = new Emitter();
this.results = {};
this.coaching = [];
this.speakOnlyTo = this.data.speakOnlyTo;
// transferred from another server in order to bridge to a local caller?
if (this.data._ && this.data._.connectTime) {
@@ -351,27 +348,16 @@ class Conference extends Task {
Object.assign(opts, {flags: {
...(this.endConferenceOnExit && {endconf: true}),
...(this.startConferenceOnEnter && {moderator: true}),
...((this.joinMuted || this.speakOnlyTo) && {joinMuted: true}),
...(this.joinMuted && {joinMuted: true}),
}});
/**
* Note on the above: if we are joining in "coaching" mode (ie only going to heard by a subset of participants)
* then we join muted temporarily, and then unmute ourselves once we have identified the subset of participants
* to whom we will be speaking.
*/
}
try {
const {memberId, confUuid} = await this.ep.join(this.confName, opts);
this.logger.debug({memberId, confUuid}, `Conference:_joinConference: successfully joined ${this.confName}`);
this.memberId = parseInt(memberId, 10);
this.memberId = memberId;
this.confUuid = confUuid;
// set a tag for this member, if provided
if (this.data.memberTag) {
this.setMemberTag(this.data.memberTag);
}
cs.setConferenceDetails(memberId, this.confName, confUuid);
const response = await this.ep.api('conference', [this.confName, 'get', 'count']);
if (response.body && /\d+/.test(response.body)) this.participantCount = parseInt(response.body);
@@ -398,9 +384,6 @@ class Conference extends Task {
.catch((err) => {});
}
if (this.speakOnlyTo) {
this.setCoachMode(this.speakOnlyTo);
}
} catch (err) {
this.logger.error(err, `Failed to join conference ${this.confName}`);
throw err;
@@ -445,15 +428,7 @@ class Conference extends Task {
}
}
doConferenceMute(cs, opts) {
assert (cs.isInConference);
const mute = opts.conf_mute_status === 'mute';
this.ep.api(`conference ${this.confName} ${mute ? 'mute' : 'unmute'} ${this.memberId}`)
.catch((err) => this.logger.info({err}, 'Error muting or unmuting participant'));
}
doConferenceHold(cs, opts) {
async doConferenceHold(cs, opts) {
assert (cs.isInConference);
const {conf_hold_status, wait_hook} = opts;
@@ -490,40 +465,6 @@ class Conference extends Task {
}
}
async doConferenceParticipantAction(cs, opts) {
const {action, tag} = opts;
switch (action) {
case 'tag':
await this.setMemberTag(tag);
break;
case 'untag':
await this.clearMemberTag();
break;
case 'coach':
await this.setCoachMode(tag);
break;
case 'uncoach':
await this.clearCoachMode();
break;
case 'hold':
this.doConferenceHold(cs, {conf_hold_status: 'hold'});
break;
case 'unhold':
this.doConferenceHold(cs, {conf_hold_status: 'unhold'});
break;
case 'mute':
this.doConferenceMute(cs, {conf_mute_status: 'mute'});
break;
case 'unmute':
this.doConferenceMute(cs, {conf_mute_status: 'unmute'});
break;
default:
this.logger.info(`Conference:doConferenceParticipantAction - unhandled action ${action}`);
break;
}
}
async _doWaitHookWhileOnHold(cs, dlg, wait_hook) {
do {
try {
@@ -570,7 +511,7 @@ class Conference extends Task {
_normalizeHook(cs, hook) {
if (typeof hook === 'object') return hook;
const url = hook.startsWith('/') ?
`${cs.application.requestor instanceof HttpRequestor ? cs.application.requestor.baseUrl : ''}${hook}` :
`${cs.application.requestor.baseUrl}${hook}` :
hook;
return { url } ;
@@ -589,7 +530,7 @@ class Conference extends Task {
const response = await this.ep.api('conference', [this.confName, 'get', 'count']);
if (response.body && confNoMatch(response.body)) this.participantCount = 0;
else if (response.body && /^\d+$/.test(response.body)) this.participantCount = parseInt(response.body) - 1;
this.logger.debug(`Conference:_doFinalMemberCheck conference count ${this.participantCount}`);
this.logger.debug({response}, `Conference:_doFinalMemberCheck conference count ${this.participantCount}`);
} catch (err) {
this.logger.info({err}, 'Conference:_doFinalMemberCheck error retrieving count (we were probably kicked');
}
@@ -701,19 +642,11 @@ class Conference extends Task {
}
// conference event handlers
_onAddMember(logger, cs, evt) {
const memberId = parseInt(evt.getHeader('Member-ID')) ;
if (this.speakOnlyTo) {
logger.debug(`Conference:_onAddMember - member ${memberId} added to ${this.confName}, updating coaching mode`);
this.setCoachMode(this.speakOnlyTo).catch(() => {});
}
else logger.debug(`Conference:_onAddMember - member ${memberId} added to conference ${this.confName}`);
}
_onDelMember(logger, cs, evt) {
const memberId = parseInt(evt.getHeader('Member-ID')) ;
this.participantCount = parseInt(evt.getHeader('Conference-Size'));
if (memberId === this.memberId) {
logger.info(`Conference:_onDelMember - I was dropped from conference ${this.confName}, task is complete`);
this.logger.info(`Conference:_onDelMember - I was dropped from conference ${this.confName}, task is complete`);
this.replaceEndpointAndEnd(cs);
}
}
@@ -742,89 +675,6 @@ class Conference extends Task {
}
}
_onTag(logger, cs, evt) {
const memberId = parseInt(evt.getHeader('Member-ID')) ;
const tag = evt.getHeader('Tag') || '';
if (memberId !== this.memberId && this.speakOnlyTo) {
logger.info(`Conference:_onTag - member ${memberId} set tag to '${tag }'; updating coach mode accordingly`);
this.setCoachMode(this.speakOnlyTo).catch(() => {});
}
}
/**
* Set the conference to "coaching" mode, where the audio of the participant is only heard
* by a subset of the participants in the conference.
* We do this by first getting all of the members who do *not* have this tag, and then
* we configure this members audio to not be sent to them.
* @param {string} speakOnlyTo - tag of the members who should receive our audio
*
* N.B.: this feature requires jambonz patches to freeswitch mod_conference
*/
async setCoachMode(speakOnlyTo) {
this.speakOnlyTo = speakOnlyTo;
if (!this.memberId) {
this.logger.info('Conference:_setCoachMode: no member id yet');
return;
}
try {
const members = (await this.ep.getNonMatchingConfParticipants(this.confName, speakOnlyTo))
.filter((m) => m !== this.memberId);
if (members.length === 0) {
this.logger.info({members}, 'Conference:_setCoachMode: all participants have the tag, so all will hear me');
if (this.coaching.length) {
await this.ep.api('conference', [this.confName, 'relate', this.memberId, this.coaching.join(','), 'clear']);
this.coaching = [];
}
}
else {
const memberList = members.join(',');
this.logger.info(`Conference:_setCoachMode: my audio will NOT be sent to ${memberList}`);
await this.ep.api('conference', [this.confName, 'relate', this.memberId, memberList, 'nospeak']);
this.coaching = members;
}
} catch (err) {
this.logger.error({err, speakOnlyTo}, '_setCoachMode: Error');
}
}
async clearCoachMode() {
if (!this.memberId) return;
try {
if (this.coaching.length === 0) {
this.logger.info('Conference:_clearCoachMode: no coaching mode to clear');
}
else {
const memberList = this.coaching.join(',');
this.logger.info(`Conference:_clearCoachMode: now sending my audio to all, including ${memberList}`);
await this.ep.api('conference', [this.confName, 'relate', this.memberId, memberList, 'clear']);
}
this.speakOnlyTo = null;
this.coaching = [];
} catch (err) {
this.logger.error({err}, '_clearCoachMode: Error');
}
}
async setMemberTag(tag) {
try {
await this.ep.api('conference', [this.confName, 'tag', this.memberId, tag]);
this.logger.info(`Conference:setMemberTag: set tag for ${this.memberId} to ${tag}`);
this.memberTag = tag;
} catch (err) {
this.logger.error({err}, `Error setting tag for ${this.memberId} to ${tag}`);
}
}
async clearMemberTag() {
try {
await this.ep.api('conference', [this.confName, 'tag', this.memberId]);
this.logger.info(`Conference:setMemberTag: clearing tag for ${this.memberId}`);
this.memberTag = null;
} catch (err) {
this.logger.error({err}, `Error clearing tag for ${this.memberId}`);
}
}
}
module.exports = Conference;

View File

@@ -1,22 +1,16 @@
const Task = require('./task');
const {TaskName, TaskPreconditions} = require('../utils/constants');
const parseDecibels = require('../utils/parse-decibels');
class TaskConfig extends Task {
constructor(logger, opts) {
super(logger, opts);
[
'synthesizer',
'recognizer',
'bargeIn',
'record',
'listen',
'transcribe',
'fillerNoise',
'actionHookDelayAction',
'boostAudioSignal',
'vad'
'transcribe'
].forEach((k) => this[k] = this.data[k] || {});
if ('notifyEvents' in this.data) {
@@ -55,7 +49,6 @@ class TaskConfig extends Task {
this.record?.action ||
this.listen?.url ||
this.data.amd ||
'boostAudioSignal' in this.data ||
this.transcribe?.enable) ?
TaskPreconditions.Endpoint :
TaskPreconditions.None;
@@ -70,9 +63,6 @@ class TaskConfig extends Task {
get hasRecording() { return Object.keys(this.record).length; }
get hasListen() { return Object.keys(this.listen).length; }
get hasTranscribe() { return Object.keys(this.transcribe).length; }
get hasDub() { return Object.keys(this.dub).length; }
get hasVad() { return Object.keys(this.vad).length; }
get hasFillerNoise() { return Object.keys(this.fillerNoise).length; }
get summary() {
const phrase = [];
@@ -98,11 +88,9 @@ class TaskConfig extends Task {
if (this.hasTranscribe) {
phrase.push(this.transcribe.enable ? `transcribe ${this.transcribe.transcriptionHook}` : 'stop transcribe');
}
if (this.hasFillerNoise) phrase.push(`fillerNoise ${this.fillerNoise.enable ? 'on' : 'off'}`);
if (this.data.amd) phrase.push('enable amd');
if (this.notifyEvents) phrase.push(`event notification ${this.notifyEvents ? 'on' : 'off'}`);
if (this.onHoldMusic) phrase.push(`onHoldMusic: ${this.onHoldMusic}`);
if ('boostAudioSignal' in this.data) phrase.push(`setGain ${this.data.boostAudioSignal}`);
return `${this.name}{${phrase.join(',')}}`;
}
@@ -141,8 +129,9 @@ class TaskConfig extends Task {
cs.speechSynthesisVendor = this.synthesizer.vendor !== 'default'
? this.synthesizer.vendor
: cs.speechSynthesisVendor;
cs.speechSynthesisLabel = this.synthesizer.label === 'default'
? cs.speechSynthesisLabel : this.synthesizer.label;
cs.speechSynthesisLabel = this.synthesizer.label !== 'default'
? this.synthesizer.label
: cs.speechSynthesisLabel;
cs.speechSynthesisLanguage = this.synthesizer.language !== 'default'
? this.synthesizer.language
: cs.speechSynthesisLanguage;
@@ -154,16 +143,15 @@ class TaskConfig extends Task {
cs.fallbackSpeechSynthesisVendor = this.synthesizer.fallbackVendor !== 'default'
? this.synthesizer.fallbackVendor
: cs.fallbackSpeechSynthesisVendor;
cs.fallbackSpeechSynthesisLabel = this.synthesizer.fallbackLabel === 'default'
? cs.fallbackSpeechSynthesisLabel : this.synthesizer.fallbackLabel;
cs.fallbackSpeechSynthesisLabel = this.synthesizer.fallbackLabel !== 'default'
? this.synthesizer.fallbackLabel
: cs.fallbackSpeechSynthesisLabel;
cs.fallbackSpeechSynthesisLanguage = this.synthesizer.fallbackLanguage !== 'default'
? this.synthesizer.fallbackLanguage
: cs.fallbackSpeechSynthesisLanguage;
cs.fallbackSpeechSynthesisVoice = this.synthesizer.fallbackVoice !== 'default'
? this.synthesizer.fallbackVoice
: cs.fallbackSpeechSynthesisVoice;
// new vendor is set, reset fallback vendor
cs.hasFallbackTts = false;
this.logger.info({synthesizer: this.synthesizer}, 'Config: updated synthesizer');
}
if (this.hasRecognizer) {
@@ -171,8 +159,9 @@ class TaskConfig extends Task {
cs.speechRecognizerVendor = this.recognizer.vendor !== 'default'
? this.recognizer.vendor
: cs.speechRecognizerVendor;
cs.speechRecognizerLabel = this.recognizer.label === 'default'
? cs.speechRecognizerLabel : this.recognizer.label;
cs.speechRecognizerLabel = this.recognizer.label !== 'default'
? this.recognizer.label
: cs.speechRecognizerLabel;
cs.speechRecognizerLanguage = this.recognizer.language !== 'default'
? this.recognizer.language
: cs.speechRecognizerLanguage;
@@ -181,9 +170,9 @@ class TaskConfig extends Task {
cs.fallbackSpeechRecognizerVendor = this.recognizer.fallbackVendor !== 'default'
? this.recognizer.fallbackVendor
: cs.fallbackSpeechRecognizerVendor;
cs.fallbackSpeechRecognizerLabel = this.recognizer.fallbackLabel === 'default' ?
cs.fallbackSpeechRecognizerLabel :
this.recognizer.fallbackLabel;
cs.fallbackSpeechRecognizerLabel = this.recognizer.fallbackLabel !== 'default'
? this.recognizer.fallbackLabel
: cs.fallbackSpeechRecognizerLabel;
cs.fallbackSpeechRecognizerLanguage = this.recognizer.fallbackLanguage !== 'default'
? this.recognizer.fallbackLanguage
: cs.fallbackSpeechRecognizerLanguage;
@@ -207,8 +196,6 @@ class TaskConfig extends Task {
if ('punctuation' in this.recognizer) {
cs.globalSttPunctuation = this.recognizer.punctuation;
}
// new vendor is set, reset fallback vendor
cs.hasFallbackAsr = false;
this.logger.info({
recognizer: this.recognizer,
isContinuousAsr: cs.isContinuousAsr
@@ -262,40 +249,9 @@ class TaskConfig extends Task {
cs.stopBackgroundTask('transcribe');
}
}
if (Object.keys(this.actionHookDelayAction).length !== 0) {
cs.actionHookDelayProperties = this.actionHookDelayAction;
}
if (this.data.sipRequestWithinDialogHook) {
cs.sipRequestWithinDialogHook = this.data.sipRequestWithinDialogHook;
}
if ('boostAudioSignal' in this.data) {
const db = parseDecibels(this.data.boostAudioSignal);
this.logger.info(`Config: boosting audio signal by ${db} dB`);
const args = [ep.uuid, 'setGain', db];
ep.api('uuid_dub', args).catch((err) => {
this.logger.error(err, 'Error boosting audio signal');
});
}
if (this.hasFillerNoise) {
const {enable, ...opts} = this.fillerNoise;
this.logger.info({fillerNoise: this.fillerNoise}, 'Config: fillerNoise');
if (!enable) cs.disableFillerNoise();
else {
cs.enableFillerNoise(opts);
}
}
if (this.hasVad) {
cs.vad = {
enable: this.vad.enable || false,
voiceMs: this.vad.voiceMs || 250,
silenceMs: this.vad.silenceMs || 150,
strategy: this.vad.strategy || 'one-shot',
mode: this.vad.mod || 2
};
}
}
async kill(cs) {

View File

@@ -73,8 +73,7 @@ class TaskDequeue extends Task {
try {
let url;
if (this.callSid) {
const r = await retrieveByPatternSortedSet(this.queueName, `*${this.callSid}`);
url = r[0];
url = await retrieveByPatternSortedSet(this.queueName, `*${this.callSid}`);
} else {
url = await retrieveFromSortedSet(this.queueName);
}

View File

@@ -14,7 +14,6 @@ const sessionTracker = require('../session/session-tracker');
const DtmfCollector = require('../utils/dtmf-collector');
const ConfirmCallSession = require('../session/confirm-call-session');
const dbUtils = require('../utils/db-utils');
const parseDecibels = require('../utils/parse-decibels');
const debug = require('debug')('jambonz:feature-server');
const {parseUri} = require('drachtio-srf');
const {ANCHOR_MEDIA_ALWAYS, JAMBONZ_DISABLE_DIAL_PAI_HEADER} = require('../config');
@@ -101,8 +100,6 @@ class TaskDial extends Task {
this.referHook = this.data.referHook;
this.dtmfHook = this.data.dtmfHook;
this.proxy = this.data.proxy;
this.tag = this.data.tag;
this.boostAudioSignal = this.data.boostAudioSignal;
if (this.dtmfHook) {
const {parentDtmfCollector, childDtmfCollector} = parseDtmfOptions(logger, this.data.dtmfCapture || {});
@@ -120,9 +117,6 @@ class TaskDial extends Task {
if (this.data.transcribe) {
this.transcribeTask = makeTask(logger, {'transcribe' : this.data.transcribe}, this);
}
if (this.data.dub && Array.isArray(this.data.dub) && this.data.dub.length > 0) {
this.dubTasks = this.data.dub.map((d) => makeTask(logger, {'dub': d}, this));
}
this.results = {};
this.bridged = false;
@@ -144,17 +138,15 @@ class TaskDial extends Task {
get name() { return TaskName.Dial; }
get isOnHoldEnabled() {
return !!this.data.onHoldHook;
get isOnHold() {
return this.isIncomingLegHold || this.isOutgoingLegHold;
}
get canReleaseMedia() {
const keepAnchor = this.data.anchorMedia ||
this.cs.isBackGroundListen ||
this.cs.onHoldMusic ||
ANCHOR_MEDIA_ALWAYS ||
this.listenTask ||
this.dubTasks ||
this.transcribeTask ||
this.startAmd;
@@ -557,9 +549,9 @@ class TaskDial extends Task {
const str = this.callerId || req.callingNumber || '';
const callingNumber = str.startsWith('+') ? str.substring(1) : str;
const voip_carrier_sid = await lookupCarrierByPhoneNumber(cs.accountSid, callingNumber);
this.logger.info(
`Dial:_attemptCalls: selected ${voip_carrier_sid} for requested phone number: ${callingNumber}`);
if (voip_carrier_sid) {
this.logger.info(
`Dial:_attemptCalls: selected voip_carrier_sid ${voip_carrier_sid} for callingNumber: ${callingNumber}`);
opts.headers['X-Requested-Carrier-Sid'] = voip_carrier_sid;
}
}
@@ -578,8 +570,7 @@ class TaskDial extends Task {
accountInfo: cs.accountInfo,
rootSpan: cs.rootSpan,
startSpan: this.startSpan.bind(this),
dialTask: this,
onHoldMusic: this.cs.onHoldMusic
dialTask: this
});
this.dials.set(sd.callSid, sd);
@@ -636,8 +627,6 @@ class TaskDial extends Task {
await this._connectSingleDial(cs, sd);
} catch (err) {
this.logger.info({err}, 'Dial:_attemptCalls - Error calling _connectSingleDial ');
sd.removeAllListeners();
this.kill(cs);
}
})
.on('decline', () => {
@@ -690,43 +679,22 @@ class TaskDial extends Task {
async _onReinvite(req, res) {
try {
let isHandled = false;
if (this.isOnHoldEnabled) {
if (isOnhold(req.body)) {
this.logger.debug('Dial: _onReinvite receive hold Request');
if (!this.epOther && !this.ep) {
this.logger.debug(`Dial: _onReinvite receive hold Request,
media already released, reconnect media server`);
// update caller leg for new SDP from callee.
await this.cs.handleReinviteAfterMediaReleased(req, res);
// Freeswitch media is released, reconnect
await this.reAnchorMedia(this.cs, this.sd);
this.isOutgoingLegHold = true;
} else {
this.logger.debug('Dial: _onReinvite receive hold Request, update SDP');
const newSdp = await this.ep.modify(req.body);
res.send(200, {body: newSdp});
}
if (this.cs.onHoldMusic) {
if (isOnhold(req.body) && !this.epOther && !this.ep) {
await this.cs.handleReinviteAfterMediaReleased(req, res);
// Onhold but media is already released
// reconnect A Leg and Response B leg
await this.reAnchorMedia(this.cs, this.sd);
this.isOutgoingLegHold = true;
isHandled = true;
// Media already connected, ask for onHoldHook
this._onHoldHook(req);
} else if (!isOnhold(req.body)) {
this.logger.debug('Dial: _onReinvite receive unhold Request');
if (this.epOther && this.ep && this.isOutgoingLegHold && this.canReleaseMedia) {
this.logger.debug('Dial: _onReinvite receive unhold Request, release media');
// Offhold, time to release media
const newSdp = await this.ep.modify(req.body);
await res.send(200, {body: newSdp});
await this._releaseMedia(this.cs, this.sd);
this.isOutgoingLegHold = false;
} else {
this.logger.debug('Dial: _onReinvite receive unhold Request, update media server');
const newSdp = await this.ep.modify(req.body);
res.send(200, {body: newSdp});
}
if (this._onHoldSession) {
this._onHoldSession.kill();
}
this._onHoldHook();
} else if (!isOnhold(req.body) && this.epOther && this.ep && this.isOutgoingLegHold && this.canReleaseMedia) {
// Offhold, time to release media
const newSdp = await this.ep.modify(req.body);
await res.send(200, {body: newSdp});
await this._releaseMedia(this.cs, this.sd);
isHandled = true;
this.isOutgoingLegHold = false;
}
}
if (!isHandled) {
@@ -787,17 +755,6 @@ class TaskDial extends Task {
dialCallSid: sd.callSid,
});
if (this.dubTasks) {
for (const dub of this.dubTasks) {
try {
await dub.exec(cs, {ep: sd.ep});
}
catch (err) {
this.logger.error({err}, 'Dial:_selectSingleDial - error executing dubTask');
}
}
}
if (this.parentDtmfCollector) this._installDtmfDetection(cs, cs.dlg);
if (this.childDtmfCollector) this._installDtmfDetection(cs, this.dlg);
if (cs.sipRequestWithinDialogHook) this._initSipIndialogRequestListener(cs, this.dlg);
@@ -812,18 +769,6 @@ class TaskDial extends Task {
}
}
/* boost audio signal if requested */
if (this.boostAudioSignal) {
try {
const db = parseDecibels(this.boostAudioSignal);
this.logger.info(`Dial: boosting audio signal by ${db} dB`);
const args = [this.ep.uuid, 'setGain', db];
await this.ep.api('uuid_dub', args);
} catch (err) {
this.logger.info({err}, 'Dial:_selectSingleDial - Error boosting audio signal');
}
}
/* if we can release the media back to the SBC, do so now */
if (this.canReleaseMedia) setTimeout(this._releaseMedia.bind(this, cs, sd), 200);
}
@@ -866,34 +811,23 @@ class TaskDial extends Task {
this.epOther = cs.ep;
}
// Handle RE-INVITE hold from caller leg.
async handleReinviteAfterMediaReleased(req, res) {
let isHandled = false;
if (this.isOnHoldEnabled) {
if (isOnhold(req.body)) {
if (!this.epOther && !this.ep) {
// update callee leg for new SDP from caller.
const sdp = await this.dlg.modify(req.body);
res.send(200, {body: sdp});
// Onhold but media is already released, reconnect
await this.reAnchorMedia(this.cs, this.sd);
isHandled = true;
this.isIncomingLegHold = true;
}
this._onHoldHook(req);
} else if (!isOnhold(req.body)) {
if (this.epOther && this.ep && this.isIncomingLegHold && this.canReleaseMedia) {
// Offhold, time to release media
const newSdp = await this.epOther.modify(req.body);
await res.send(200, {body: newSdp});
await this._releaseMedia(this.cs, this.sd);
isHandled = true;
}
this.isIncomingLegHold = false;
if (this._onHoldSession) {
this._onHoldSession.kill();
}
}
if (isOnhold(req.body) && !this.epOther && !this.ep) {
const sdp = await this.dlg.modify(req.body);
res.send(200, {body: sdp});
// Onhold but media is already released
await this.reAnchorMedia(this.cs, this.sd);
isHandled = true;
this.isIncomingLegHold = true;
this._onHoldHook();
} else if (!isOnhold(req.body) && this.epOther && this.ep && this.isIncomingLegHold && this.canReleaseMedia) {
// Offhold, time to release media
const newSdp = await this.epOther.modify(req.body);
await res.send(200, {body: newSdp});
await this._releaseMedia(this.cs, this.sd);
isHandled = true;
this.isIncomingLegHold = false;
}
if (!isHandled) {
@@ -912,7 +846,7 @@ class TaskDial extends Task {
});
}
async _onHoldHook(req, allowed = [TaskName.Play, TaskName.Say, TaskName.Pause]) {
async _onHoldHook(allowed = [TaskName.Play, TaskName.Say, TaskName.Pause]) {
if (this.data.onHoldHook) {
// send silence for keep Voice quality
await this.epOther.play('silence_stream://500');
@@ -922,13 +856,7 @@ class TaskDial extends Task {
const b3 = this.getTracingPropagation();
const httpHeaders = b3 && {b3};
const json = await this.cs.application.requestor.
request('verb:hook', this.data.onHoldHook, {
...this.cs.callInfo.toJSON(),
hold_detail: {
from: req.get('From'),
to: req.get('To')
}
}, httpHeaders);
request('verb:hook', this.data.onHoldHook, this.cs.callInfo.toJSON(), httpHeaders);
const tasks = normalizeJambones(this.logger, json).map((tdata) => makeTask(this.logger, tdata));
allowedTasks = tasks.filter((t) => allowed.includes(t.name));
if (tasks.length !== allowedTasks.length) {
@@ -937,7 +865,7 @@ class TaskDial extends Task {
}
this.logger.debug(`DialTask:_onHoldHook: executing ${tasks.length} tasks`);
if (tasks.length) {
this._onHoldSession = new ConfirmCallSession({
this._playSession = new ConfirmCallSession({
logger: this.logger,
application: this.cs.application,
dlg: this.isIncomingLegHold ? this.dlg : this.cs.dlg,
@@ -947,12 +875,12 @@ class TaskDial extends Task {
tasks,
rootSpan: this.cs.rootSpan
});
await this._onHoldSession.exec();
this._onHoldSession = null;
await this._playSession.exec();
this._playSession = null;
}
} catch (error) {
this.logger.info(error, 'DialTask:_onHoldHook: failed retrieving waitHook');
this._onHoldSession = null;
this._playSession = null;
break;
}
} while (allowedTasks && allowedTasks.length > 0 && !this.killed && this.isOnHold);

View File

@@ -1,144 +0,0 @@
const {TaskName} = require('../utils/constants');
const TtsTask = require('./tts-task');
const assert = require('assert');
const parseDecibels = require('../utils/parse-decibels');
/**
* Dub task: add or remove additional audio tracks into the call
*/
class TaskDub extends TtsTask {
constructor(logger, opts, parentTask) {
super(logger, opts, parentTask);
this.logger.debug({opts: this.data}, 'TaskDub constructor');
['action', 'track', 'play', 'say', 'loop'].forEach((prop) => {
this[prop] = this.data[prop];
});
this.gain = parseDecibels(this.data.gain);
assert.ok(this.action, 'TaskDub: action is required');
assert.ok(this.track, 'TaskDub: track is required');
}
get name() { return TaskName.Dub; }
async exec(cs, {ep}) {
super.exec(cs);
try {
switch (this.action) {
case 'addTrack':
await this._addTrack(cs, ep);
break;
case 'removeTrack':
await this._removeTrack(cs, ep);
break;
case 'silenceTrack':
await this._silenceTrack(cs, ep);
break;
case 'playOnTrack':
await this._playOnTrack(cs, ep);
break;
case 'sayOnTrack':
await this._sayOnTrack(cs, ep);
break;
default:
throw new Error(`TaskDub: unsupported action ${this.action}`);
}
} catch (err) {
this.logger.error(err, 'Error executing dub task');
}
}
async _addTrack(cs, ep) {
this.logger.info(`adding track: ${this.track}`);
await ep.dub({
action: 'addTrack',
track: this.track
});
if (this.play) await this._playOnTrack(cs, ep);
else if (this.say) await this._sayOnTrack(cs, ep);
}
async _removeTrack(_cs, ep) {
this.logger.info(`removing track: ${this.track}`);
await ep.dub({
action: 'removeTrack',
track: this.track
});
}
async _silenceTrack(_cs, ep) {
this.logger.info(`silencing track: ${this.track}`);
await ep.dub({
action: 'silenceTrack',
track: this.track
});
}
async _playOnTrack(_cs, ep) {
this.logger.info(`playing on track: ${this.track}`);
await ep.dub({
action: 'playOnTrack',
track: this.track,
play: this.play,
loop: this.loop ? 'loop' : 'once',
gain: this.gain
});
}
async _sayOnTrack(cs, ep) {
const text = this.say.text || this.say;
this.synthesizer = this.say.synthesizer || {};
if (Object.keys(this.synthesizer).length) {
this.logger.info({synthesizer: this.synthesizer},
`saying on track ${this.track}: ${text} with synthesizer options`);
}
else {
this.logger.info(`saying on track ${this.track}: ${text}`);
}
this.synthesizer = this.synthesizer || {};
this.text = [text];
const vendor = this.synthesizer.vendor && this.synthesizer.vendor !== 'default' ?
this.synthesizer.vendor :
cs.speechSynthesisVendor;
const language = this.synthesizer.language && this.synthesizer.language !== 'default' ?
this.synthesizer.language :
cs.speechSynthesisLanguage ;
const voice = this.synthesizer.voice && this.synthesizer.voice !== 'default' ?
this.synthesizer.voice :
cs.speechSynthesisVoice;
const label = this.synthesizer.label && this.synthesizer.label !== 'default' ?
this.synthesizer.label :
cs.speechSynthesisLabel;
const disableTtsStreaming = false;
const filepath = await this._synthesizeWithSpecificVendor(cs, ep, {
vendor, language, voice, label, disableTtsStreaming
});
assert.ok(filepath.length === 1, 'TaskDub: no filepath returned from synthesizer');
const path = filepath[0];
if (!path.startsWith('say:{')) {
/* we have a local file of mp3 or r8 of synthesized speech audio to play */
this.logger.info(`playing synthesized speech from file on track ${this.track}: ${path}`);
this.play = path;
await this._playOnTrack(cs, ep);
}
else {
this.logger.info(`doing actual text to speech file on track ${this.track}: ${path}`);
await ep.dub({
action: 'sayOnTrack',
track: this.track,
say: path,
gain: this.gain
});
}
}
}
module.exports = TaskDub;

View File

@@ -338,7 +338,6 @@ class TaskEnqueue extends Task {
this.logger.error({err}, `TaskEnqueue:_playHook error retrieving list info for queue ${this.queueName}`);
}
const json = await cs.application.requestor.request('verb:hook', hook, params, httpHeaders);
this.logger.debug({json}, 'TaskEnqueue:_playHook: received response from waitHook');
const tasks = normalizeJambones(this.logger, json).map((tdata) => makeTask(this.logger, tdata));
const allowedTasks = tasks.filter((t) => allowed.includes(t.name));

View File

@@ -10,9 +10,7 @@ const {
IbmTranscriptionEvents,
NvidiaTranscriptionEvents,
JambonzTranscriptionEvents,
AssemblyAiTranscriptionEvents,
VadDetection,
VerbioTranscriptionEvents
AssemblyAiTranscriptionEvents
} = require('../utils/constants.json');
const {
JAMBONES_GATHER_EARLY_HINTS_MATCH,
@@ -29,13 +27,9 @@ class TaskGather extends SttTask {
[
'finishOnKey', 'input', 'numDigits', 'minDigits', 'maxDigits',
'interDigitTimeout', 'partialResultHook', 'bargein', 'dtmfBargein',
'speechTimeout', 'timeout', 'say', 'play', 'actionHookDelayAction', 'fillerNoise', 'vad'
'speechTimeout', 'timeout', 'say', 'play'
].forEach((k) => this[k] = this.data[k]);
// gather default input is digits
if (!this.input) {
this.input = ['digits'];
}
/* when collecting dtmf, bargein on dtmf is true unless explicitly set to false */
if (this.dtmfBargein !== false && this.input.includes('digits')) this.dtmfBargein = true;
@@ -43,8 +37,7 @@ class TaskGather extends SttTask {
this.timeout = this.timeout === 0 ? 0 : (this.timeout || 15) * 1000;
this.interim = !!this.partialResultHook || this.bargein || (this.timeout > 0);
this.listenDuringPrompt = this.data.listenDuringPrompt === false ? false : true;
this.minBargeinWordCount = this.data.minBargeinWordCount !== undefined ? this.data.minBargeinWordCount : 1;
this._vadEnabled = this.minBargeinWordCount === 0;
this.minBargeinWordCount = this.data.minBargeinWordCount || 1;
if (this.data.recognizer) {
/* continuous ASR (i.e. compile transcripts until a special timeout or dtmf key) */
this.asrTimeout = typeof this.data.recognizer.asrTimeout === 'number' ?
@@ -94,18 +87,6 @@ class TaskGather extends SttTask {
(this.playTask && this.playTask.earlyMedia);
}
get hasFillerNoise() {
return Object.keys(this.fillerNoise).length > 0 && this.fillerNoise.enabled !== false;
}
get fillerNoiseUrl() {
return this.fillerNoise.url;
}
get fillerNoiseStartDelaySecs() {
return this.fillerNoise.startDelaySecs;
}
get summary() {
let s = `${this.name}{`;
if (this.input.length === 2) s += 'inputs=[speech,digits],';
@@ -117,7 +98,6 @@ class TaskGather extends SttTask {
}
if (this.sayTask) s += ',with nested say task';
if (this.playTask) s += ',with nested play task';
if (this.actionHookDelayAction) s += ',with actionHookDelayAction';
s += '}';
return s;
}
@@ -127,16 +107,6 @@ class TaskGather extends SttTask {
await super.exec(cs, {ep});
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, cs.srf);
this.fillerNoise = {
...(cs.fillerNoise || {}),
...(this.fillerNoise || {})
};
this.vad = {
...(cs.vad || {}),
...(this.vad || {})
};
if (cs.hasGlobalSttHints && !this.maskGlobalSttHints) {
const {hints, hintsBoost} = cs.globalSttHints;
const setOfHints = new Set((this.data.recognizer.hints || [])
@@ -164,24 +134,6 @@ class TaskGather extends SttTask {
this.interim = true;
this.logger.debug('Gather:exec - early hints match enabled');
}
// if we have actionHook delay, and the session does as well, stash the session config
if (this.actionHookDelayAction) {
if (cs.actionHookDelayProcessor) {
this.logger.debug('Gather:exec - stashing session-level ahd proprerties');
cs.stashActionHookDelayProperties();
}
cs.actionHookDelayProperties = this.actionHookDelayAction;
}
this._startVad();
const startDtmfListener = () => {
if (this.input.includes('digits') || this.dtmfBargein || this.asrDtmfTerminationDigit) {
ep.on('dtmf', this._onDtmf.bind(this, cs, ep));
}
};
const startListening = async(cs, ep) => {
this._startTimer();
if (this.isContinuousAsr && 0 === this.timeout) this._startAsrTimer();
@@ -195,7 +147,12 @@ class TaskGather extends SttTask {
this._startTranscribing(ep);
return updateSpeechCredentialLastUsed(this.sttCredentials.speech_credential_sid);
} catch (e) {
await this._startFallback(cs, ep, {error: e});
if (this.fallbackVendor && this.isHandledByPrimaryProvider) {
await this._fallback();
startListening(cs, ep);
} else {
this.logger.error({error: e}, 'error in initSpeech');
}
}
}
};
@@ -203,12 +160,13 @@ class TaskGather extends SttTask {
try {
if (this.sayTask) {
const {span, ctx} = this.startChildSpan(`nested:${this.sayTask.summary}`);
const process = () => {
this.sayTask.span = span;
this.sayTask.ctx = ctx;
this.sayTask.exec(cs, {ep}); // kicked off, _not_ waiting for it to complete
this.sayTask.on('playDone', (err) => {
span.end();
if (err) this.logger.error({err}, 'Gather:exec Error playing tts');
this.logger.debug('Gather: nested say task completed');
if (!this.listenDuringPrompt) {
startDtmfListener();
}
this._stopVad();
if (!this.killed) {
startListening(cs, ep);
if (this.input.includes('speech') && this.vendor === 'nuance' && this.listenDuringPrompt) {
@@ -218,27 +176,17 @@ class TaskGather extends SttTask {
});
}
}
};
this.sayTask.span = span;
this.sayTask.ctx = ctx;
this.sayTask.exec(cs, {ep}) // kicked off, _not_ waiting for it to complete
.catch((err) => {
process();
});
this.sayTask.on('playDone', (err) => {
span.end();
if (err) this.logger.error({err}, 'Gather:exec Error playing tts');
process();
});
}
else if (this.playTask) {
const {span, ctx} = this.startChildSpan(`nested:${this.playTask.summary}`);
const process = () => {
this.playTask.span = span;
this.playTask.ctx = ctx;
this.playTask.exec(cs, {ep}); // kicked off, _not_ waiting for it to complete
this.playTask.on('playDone', (err) => {
span.end();
if (err) this.logger.error({err}, 'Gather:exec Error playing url');
this.logger.debug('Gather: nested play task completed');
if (!this.listenDuringPrompt) {
startDtmfListener();
}
this._stopVad();
if (!this.killed) {
startListening(cs, ep);
if (this.input.includes('speech') && this.vendor === 'nuance' && this.listenDuringPrompt) {
@@ -248,17 +196,6 @@ class TaskGather extends SttTask {
});
}
}
};
this.playTask.span = span;
this.playTask.ctx = ctx;
this.playTask.exec(cs, {ep}) // kicked off, _not_ waiting for it to complete
.catch((err) => {
process();
});
this.playTask.on('playDone', (err) => {
span.end();
if (err) this.logger.error({err}, 'Gather:exec Error playing url');
process();
});
}
else {
@@ -271,22 +208,16 @@ class TaskGather extends SttTask {
if (this.input.includes('speech') && this.listenDuringPrompt) {
await this._setSpeechHandlers(cs, ep);
if (!this.resolved && !this.killed) {
this._startTranscribing(ep);
updateSpeechCredentialLastUsed(this.sttCredentials.speech_credential_sid)
.catch(() => {/*already logged error */});
}
else {
this.logger.info('Gather:exec - task was killed or resolved quickly, not starting transcription');
}
this._startTranscribing(ep);
updateSpeechCredentialLastUsed(this.sttCredentials.speech_credential_sid)
.catch(() => {/*already logged error */});
}
if (this.listenDuringPrompt) {
startDtmfListener();
if (this.input.includes('digits') || this.dtmfBargein || this.asrDtmfTerminationDigit) {
ep.on('dtmf', this._onDtmf.bind(this, cs, ep));
}
await this.awaitTaskDone();
this._killAudio(cs);
} catch (err) {
this.logger.error(err, 'TaskGather:exec error');
}
@@ -296,13 +227,11 @@ class TaskGather extends SttTask {
kill(cs) {
super.kill(cs);
this._killAudio(cs);
this._clearFillerNoiseTimer();
this.ep.removeAllListeners('dtmf');
clearTimeout(this.interDigitTimer);
this._clearAsrTimer();
this.playTask?.span.end();
this.sayTask?.span.end();
this._stopVad();
this._resolve('killed');
}
@@ -372,7 +301,7 @@ class TaskGather extends SttTask {
if (this.data.recognizer?.deepgramOptions?.shortUtterance) this.shortUtterance = true;
}
const opts = this.setChannelVarsForStt(this, this.sttCredentials, this.language, this.data.recognizer);
const opts = this.setChannelVarsForStt(this, this.sttCredentials, this.data.recognizer);
switch (this.vendor) {
case 'google':
this.bugname = `${this.bugname_prefix}google_transcribe`;
@@ -380,19 +309,23 @@ class TaskGather extends SttTask {
ep, GoogleTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
this.addCustomEventListener(
ep, GoogleTranscriptionEvents.EndOfUtterance, this._onEndOfUtterance.bind(this, cs, ep));
this.addCustomEventListener(
ep, GoogleTranscriptionEvents.VadDetected, this._onVadDetected.bind(this, cs, ep));
break;
case 'aws':
case 'polly':
this.bugname = `${this.bugname_prefix}aws_transcribe`;
this.addCustomEventListener(ep, AwsTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
this.addCustomEventListener(ep, AwsTranscriptionEvents.VadDetected, this._onVadDetected.bind(this, cs, ep));
break;
case 'microsoft':
this.bugname = `${this.bugname_prefix}azure_transcribe`;
this.addCustomEventListener(
ep, AzureTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
//this.addCustomEventListener(ep, AzureTranscriptionEvents.NoSpeechDetected,
//this._onNoSpeechDetected.bind(this, cs, ep));
this.addCustomEventListener(ep, AzureTranscriptionEvents.NoSpeechDetected,
this._onNoSpeechDetected.bind(this, cs, ep));
this.addCustomEventListener(ep, AzureTranscriptionEvents.VadDetected, this._onVadDetected.bind(this, cs, ep));
break;
case 'nuance':
this.bugname = `${this.bugname_prefix}nuance_transcribe`;
@@ -402,6 +335,8 @@ class TaskGather extends SttTask {
this._onStartOfSpeech.bind(this, cs, ep));
this.addCustomEventListener(ep, NuanceTranscriptionEvents.TranscriptionComplete,
this._onTranscriptionComplete.bind(this, cs, ep));
this.addCustomEventListener(ep, NuanceTranscriptionEvents.VadDetected,
this._onVadDetected.bind(this, cs, ep));
/* stall timers until prompt finishes playing */
if ((this.sayTask || this.playTask) && this.listenDuringPrompt) {
@@ -416,6 +351,9 @@ class TaskGather extends SttTask {
this.addCustomEventListener(ep, DeepgramTranscriptionEvents.Connect, this._onVendorConnect.bind(this, cs, ep));
this.addCustomEventListener(ep, DeepgramTranscriptionEvents.ConnectFailure,
this._onVendorConnectFailure.bind(this, cs, ep));
/* if app sets deepgramOptions.utteranceEndMs they essentially want continuous asr */
if (opts.DEEPGRAM_SPEECH_UTTERANCE_END_MS) this.isContinuousAsr = true;
break;
case 'soniox':
@@ -424,12 +362,6 @@ class TaskGather extends SttTask {
ep, SonioxTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
break;
case 'verbio':
this.bugname = `${this.bugname_prefix}verbio_transcribe`;
this.addCustomEventListener(
ep, VerbioTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
break;
case 'cobalt':
this.bugname = `${this.bugname_prefix}cobalt_transcribe`;
this.addCustomEventListener(
@@ -477,6 +409,8 @@ class TaskGather extends SttTask {
this._onStartOfSpeech.bind(this, cs, ep));
this.addCustomEventListener(ep, NvidiaTranscriptionEvents.TranscriptionComplete,
this._onTranscriptionComplete.bind(this, cs, ep));
this.addCustomEventListener(ep, NvidiaTranscriptionEvents.VadDetected,
this._onVadDetected.bind(this, cs, ep));
/* I think nvidia has this (??) - stall timers until prompt finishes playing */
if ((this.sayTask || this.playTask) && this.listenDuringPrompt) {
@@ -553,8 +487,9 @@ class TaskGather extends SttTask {
this._clearTimer();
this._timeoutTimer = setTimeout(() => {
if (this.isContinuousAsr) this._startAsrTimer();
if (this.interDigitTimer) return; // let the inter-digit timer complete
else {
else if (this.interDigitTimeout <= 0 ||
this.digitBuffer.length < this.minDigits ||
this.needsStt && this.digitBuffer.length === 0) {
this._resolve(this.digitBuffer.length >= this.minDigits ? 'dtmf-num-digits' : 'timeout');
}
}, this.timeout);
@@ -564,9 +499,7 @@ class TaskGather extends SttTask {
if (this._timeoutTimer) {
clearTimeout(this._timeoutTimer);
this._timeoutTimer = null;
return true;
}
return false;
}
_startAsrTimer() {
@@ -575,25 +508,17 @@ class TaskGather extends SttTask {
this._clearAsrTimer();
this._asrTimer = setTimeout(() => {
this.logger.debug('_startAsrTimer - asr timer went off');
const evt = this.consolidateTranscripts(this._bufferedTranscripts, 1, this.language, this.vendor);
const evt = this.consolidateTranscripts(this._bufferedTranscripts, 1, this.language);
this._resolve(this._bufferedTranscripts.length > 0 ? 'speech' : 'timeout', evt);
}, this.asrTimeout);
this.logger.debug(`_startAsrTimer: set for ${this.asrTimeout}ms`);
}
_clearAsrTimer() {
if (this._asrTimer) {
this.logger.debug('_clearAsrTimer: asrTimer cleared');
clearTimeout(this._asrTimer);
}
if (this._asrTimer) clearTimeout(this._asrTimer);
this._asrTimer = null;
}
_hangupCall() {
this.logger.debug('_hangupCall');
this.cs.hangup();
}
_startFastRecognitionTimer(evt) {
assert(this.fastRecognitionTimeout > 0);
this._clearFastRecognitionTimer();
@@ -614,7 +539,7 @@ class TaskGather extends SttTask {
this._clearFinalAsrTimer();
this._finalAsrTimer = setTimeout(() => {
this.logger.debug('_startFinalAsrTimer - final asr timer went off');
const evt = this.consolidateTranscripts(this._bufferedTranscripts, 1, this.language, this.vendor);
const evt = this.consolidateTranscripts(this._bufferedTranscripts, 1, this.language);
this._resolve(this._bufferedTranscripts.length > 0 ? 'speech' : 'timeout', evt);
}, 1000);
this.logger.debug('_startFinalAsrTimer: set for 1 second');
@@ -625,65 +550,11 @@ class TaskGather extends SttTask {
this._finalAsrTimer = null;
}
_startVad() {
if (!this._vadStarted && this._vadEnabled) {
this.logger.debug('_startVad');
this.addCustomEventListener(this.ep, VadDetection.Detection, this._onVadDetected.bind(this, this.cs, this.ep));
this.ep?.startVadDetection(this.vad);
this._vadStarted = true;
}
}
_stopVad() {
if (this._vadStarted) {
this.logger.debug('_stopVad');
this.ep?.stopVadDetection(this.vad);
this.ep?.removeCustomEventListener(VadDetection.Detection, this._onVadDetected);
this._vadStarted = false;
}
}
_startFillerNoise() {
this.logger.debug('Gather:_startFillerNoise - playing filler noise');
this.ep?.play(this.fillerNoise.url);
this._fillerNoiseOn = true;
this.ep.once('playback-start', (evt) => {
if (evt.file === this.fillerNoise.url && !this._fillerNoiseOn) {
this.logger.info({evt}, 'Gather:_startFillerNoise - race condition - kill filler noise here');
this.ep.api('uuid_break', this.ep.uuid)
.catch((err) => this.logger.info(err, 'Error killing filler noise'));
return;
} else this.logger.debug({evt}, 'Gather:_startFillerNoise - playback started');
});
}
_startFillerNoiseTimer() {
this._clearFillerNoiseTimer();
this._fillerNoiseTimer = setTimeout(() => {
this.logger.debug('Gather:_startFillerNoiseTimer - playing filler noise');
this._startFillerNoise();
}, this.fillerNoise.startDelaySecs * 1000);
}
_clearFillerNoiseTimer() {
if (this._fillerNoiseTimer) clearTimeout(this._fillerNoiseTimer);
this._fillerNoiseTimer = null;
}
_killFillerNoise() {
if (this._fillerNoiseTimer) {
this.logger.debug('Gather:_killFillerNoise');
this.ep?.api('uuid_break', this.ep.uuid);
}
}
_killAudio(cs) {
if (this.hasFillerNoise || (!this.sayTask && !this.playTask && this.bargein)) {
if (this.ep?.connected && (!this.playComplete || this.hasFillerNoise)) {
if (!this.sayTask && !this.playTask && this.bargein) {
if (this.ep?.connected && !this.playComplete) {
this.logger.debug('Gather:_killAudio: killing playback of any audio');
this.playComplete = true;
this._fillerNoiseOn = false; // in a race, if we just started audio it may sneak through here
this.ep.api('uuid_break', this.ep.uuid)
.catch((err) => this.logger.info(err, 'Error killing audio'));
}
@@ -705,11 +576,11 @@ class TaskGather extends SttTask {
// make sure this is not a transcript from answering machine detection
const bugname = fsEvent.getHeader('media-bugname');
const finished = fsEvent.getHeader('transcription-session-finished');
this.logger.debug({evt, bugname, finished, vendor: this.vendor}, 'Gather:_onTranscription raw transcript');
this.logger.debug({evt, bugname, finished}, `Gather:_onTranscription for vendor ${this.vendor}`);
if (bugname && this.bugname !== bugname) return;
if (finished === 'true') return;
if (this.vendor === 'ibm' && evt?.state === 'listening') return;
if (this.vendor === 'deepgram' && evt.type === 'UtteranceEnd') {
/* we will only get this when we have set utterance_end_ms */
if (this._bufferedTranscripts.length === 0) {
@@ -717,21 +588,15 @@ class TaskGather extends SttTask {
}
else {
this.logger.debug('Gather:_onTranscription - got UtteranceEnd event from deepgram, return buffered transcript');
evt = this.consolidateTranscripts(this._bufferedTranscripts, 1, this.language, this.vendor);
evt = this.consolidateTranscripts(this._bufferedTranscripts, 1, this.language);
this._bufferedTranscripts = [];
this._resolve('speech', evt);
}
return;
}
if (this.vendor === 'deepgram' && evt.type === 'Metadata') {
this.logger.debug('Gather:_onTranscription - discarding Metadata event from deepgram');
return;
}
evt = this.normalizeTranscription(evt, this.vendor, 1, this.language,
this.shortUtterance, this.data.recognizer.punctuation);
//this.logger.debug({evt, bugname, finished, vendor: this.vendor}, 'Gather:_onTranscription normalized transcript');
if (evt.alternatives.length === 0) {
this.logger.info({evt}, 'TaskGather:_onTranscription - got empty transcript, continue listening');
return;
@@ -757,9 +622,7 @@ class TaskGather extends SttTask {
if (evt.is_final) {
if (evt.alternatives[0].transcript === '' && !this.callSession.callGone && !this.killed) {
emptyTranscript = true;
if (finished === 'true' &&
['microsoft', 'deepgram'].includes(this.vendor) &&
this._bufferedTranscripts.length === 0) {
if (finished === 'true' && ['microsoft', 'deepgram'].includes(this.vendor)) {
this.logger.debug({evt}, 'TaskGather:_onTranscription - got empty transcript from old gather, disregarding');
return;
}
@@ -793,7 +656,7 @@ class TaskGather extends SttTask {
this._clearTimer();
if (this._finalAsrTimer) {
this._clearFinalAsrTimer();
const evt = this.consolidateTranscripts(this._bufferedTranscripts, 1, this.language, this.vendor);
const evt = this.consolidateTranscripts(this._bufferedTranscripts, 1, this.language);
return this._resolve(this._bufferedTranscripts.length > 0 ? 'speech' : 'timeout', evt);
}
this._startAsrTimer();
@@ -802,59 +665,47 @@ class TaskGather extends SttTask {
if (!['soniox', 'aws', 'microsoft', 'deepgram'].includes(this.vendor)) this._startTranscribing(ep);
}
else {
/* this was removed to fix https://github.com/jambonz/jambonz-feature-server/issues/783 */
/*
if (this.bargein && (words + bufferedWords) < this.minBargeinWordCount) {
this.logger.debug({evt, words, bufferedWords},
'TaskGather:_onTranscription - final transcript but < min barge words');
if (!emptyTranscript) this._bufferedTranscripts.push(evt);
if (!['soniox', 'aws', 'microsoft', 'deepgram'].includes(this.vendor)) this._startTranscribing(ep);
this._bufferedTranscripts.push(evt);
this._startTranscribing(ep);
return;
}
else {
*/
if (this.vendor === 'soniox') {
/* compile transcripts into one */
this._sonioxTranscripts.push(evt.vendor.finalWords);
evt = this.compileSonioxTranscripts(this._sonioxTranscripts, 1, this.language);
this._sonioxTranscripts = [];
}
else if (this.vendor === 'deepgram') {
/* compile transcripts into one */
if (!emptyTranscript) this._bufferedTranscripts.push(evt);
if (this.vendor === 'soniox') {
/* compile transcripts into one */
this._sonioxTranscripts.push(evt.vendor.finalWords);
evt = this.compileSonioxTranscripts(this._sonioxTranscripts, 1, this.language);
this._sonioxTranscripts = [];
}
else if (this.vendor === 'deepgram') {
/* compile transcripts into one */
if (!emptyTranscript) this._bufferedTranscripts.push(evt);
if (this.data.recognizer?.deepgramOptions?.utteranceEndMs) {
this.logger.debug('TaskGather:_onTranscription - got speech_final waiting for UtteranceEnd event');
return;
}
this.logger.debug({evt}, 'TaskGather:_onTranscription - compiling deepgram transcripts');
evt = this.consolidateTranscripts(this._bufferedTranscripts, 1, this.language);
this._bufferedTranscripts = [];
this.logger.debug({evt}, 'TaskGather:_onTranscription - compiled deepgram transcripts');
}
/* deepgram can send an empty and final transcript; only if we have any buffered should we resolve */
if (this._bufferedTranscripts.length === 0) return;
evt = this.consolidateTranscripts(this._bufferedTranscripts, 1, this.language, this.vendor);
this._bufferedTranscripts = [];
/* here is where we return a final transcript */
this._resolve('speech', evt);
}
/* here is where we return a final transcript */
this._resolve('speech', evt);
/*}*/
}
}
else {
/* deepgram can send a non-final transcript but with words that are final, so we need to buffer */
let emptyTranscript = false;
if (this.vendor === 'deepgram') {
const originalEvent = evt.vendor.evt;
if (originalEvent.is_final && evt.alternatives[0].transcript !== '') {
this.logger.debug({evt}, 'Gather:_onTranscription - buffering a completed (partial) deepgram transcript');
this._bufferedTranscripts.push(evt);
}
if (evt.alternatives[0].transcript === '') emptyTranscript = true;
}
if (!emptyTranscript) {
if (this._clearTimer()) this._startTimer();
if (this.bargein && (words + bufferedWords) >= this.minBargeinWordCount) {
if (!this.playComplete) {
this.logger.debug({transcript: evt.alternatives[0].transcript}, 'killing audio due to speech');
this.emit('vad');
}
this._killAudio(cs);
this._clearTimer();
this._startTimer();
if (this.bargein && (words + bufferedWords) >= this.minBargeinWordCount) {
if (!this.playComplete) {
this.logger.debug({transcript: evt.alternatives[0].transcript}, 'killing audio due to speech');
this.emit('vad');
}
this._killAudio(cs);
}
if (this.fastRecognitionTimeout) {
this._startFastRecognitionTimer(evt);
@@ -872,9 +723,14 @@ class TaskGather extends SttTask {
this._sonioxTranscripts.push(evt.vendor.finalWords);
}
}
/* restart asr timer if we get a partial transcript */
if (this.isContinuousAsr) this._startAsrTimer();
/* deepgram can send a non-final transcript but with words that are final, so we need to buffer */
if (this.vendor === 'deepgram') {
const originalEvent = evt.vendor.evt;
if (originalEvent.is_final && evt.alternatives[0].transcript !== '') {
this.logger.debug({evt}, 'Gather:_onTranscription - buffering a completed (partial) deepgram transcript');
this._bufferedTranscripts.push(evt);
}
}
}
}
_onEndOfUtterance(cs, ep) {
@@ -906,9 +762,9 @@ class TaskGather extends SttTask {
_onTranscriptionComplete(cs, ep) {
this.logger.debug('TaskGather:_onTranscriptionComplete');
}
async _startFallback(cs, ep, evt) {
if (this.canFallback) {
async _onJambonzError(cs, ep, evt) {
this.logger.info({evt}, 'TaskGather:_onJambonzError');
if (this.isHandledByPrimaryProvider && this.fallbackVendor) {
ep.stopTranscription({
vendor: this.vendor,
bugname: this.bugname
@@ -916,35 +772,17 @@ class TaskGather extends SttTask {
.catch((err) => this.logger.error({err}, `Error stopping transcription for primary vendor ${this.vendor}`));
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, cs.srf);
try {
this.logger.debug('gather:_startFallback');
this.notifyError({ msg: 'ASR error',
details:`STT Vendor ${this.vendor} error: ${evt.error || evt.reason}`, failover: 'in progress'});
await this._initFallback();
this._speechHandlersSet = false;
await this._setSpeechHandlers(cs, ep);
await this._fallback();
await this._initSpeech(cs, ep);
this._startTranscribing(ep);
updateSpeechCredentialLastUsed(this.sttCredentials.speech_credential_sid);
return true;
return;
} catch (error) {
this.logger.info({error}, `There is error while falling back to ${this.fallbackVendor}`);
this.notifyError({ msg: 'ASR error',
details:`STT Vendor ${this.vendor} error: ${evt.error || evt.reason}`, failover: 'not available'});
}
} else {
this.logger.debug('gather:_startFallback no condition for falling back');
this.notifyError({ msg: 'ASR error',
details:`STT Vendor ${this.vendor} error: ${evt.error || evt.reason}`, failover: 'not available'});
}
return false;
}
async _onJambonzError(cs, ep, evt) {
if (this.vendor === 'google' && evt.error_code === 0) {
this.logger.info({evt}, 'TaskTranscribe:_onJambonzError - ignoring google error code 0');
return;
}
this.logger.info({evt}, 'TaskGather:_onJambonzError');
const {writeAlerts, AlertType} = cs.srf.locals;
if (this.vendor === 'nuance') {
const {code, error} = evt;
if (code === 404 && error === 'No speech') return this._resolve('timeout');
@@ -957,23 +795,17 @@ class TaskGather extends SttTask {
message: `Custom speech vendor ${this.vendor} error: ${evt.error}`,
vendor: this.vendor,
}).catch((err) => this.logger.info({err}, 'Error generating alert for jambonz custom connection failure'));
if (!(await this._startFallback(cs, ep, evt))) {
this.notifyTaskDone();
}
this.notifyError({msg: 'ASR error', details:`Custom speech vendor ${this.vendor} error: ${evt.error}`});
}
async _onVendorConnectFailure(cs, _ep, evt) {
_onVendorConnectFailure(cs, _ep, evt) {
super._onVendorConnectFailure(cs, _ep, evt);
if (!(await this._startFallback(cs, _ep, evt))) {
this.notifyTaskDone();
}
this.notifyTaskDone();
}
async _onVendorError(cs, _ep, evt) {
_onVendorError(cs, _ep, evt) {
super._onVendorError(cs, _ep, evt);
if (!(await this._startFallback(cs, _ep, evt))) {
this._resolve('stt-error', evt);
}
this._resolve('stt-error', evt);
}
_onVadDetected(cs, ep) {
@@ -982,10 +814,6 @@ class TaskGather extends SttTask {
this._killAudio(cs);
this.emit('vad');
}
if (this.vad?.strategy === 'one-shot') {
this.ep?.removeCustomEventListener(VadDetection.Detection, this._onVadDetected);
this._vadStarted = false;
}
}
_onNoSpeechDetected(cs, ep, evt, fsEvent) {
@@ -1004,38 +832,29 @@ class TaskGather extends SttTask {
async _resolve(reason, evt) {
this.logger.debug(`TaskGather:resolve with reason ${reason}`);
if (this.needsStt && this.ep && this.ep.connected) {
this.ep.stopTranscription({
vendor: this.vendor,
bugname: this.bugname
})
.catch((err) => {
if (this.resolved) return;
this.logger.error({err}, 'Error stopping transcription');
});
}
if (this.resolved) {
this.logger.debug('TaskGather:_resolve - already resolved');
return;
}
if (this.resolved) return;
this.resolved = true;
// If bargin is false and ws application return ack to verb:hook
// the gather should not play any audio
this._killAudio(this.cs);
// Clear dtmf events, to avoid any case can leak the listener, just clean it
this.ep.removeAllListeners('dtmf');
// Clear dtmf event
if (this.dtmfBargein) {
this.ep.removeAllListeners('dtmf');
}
clearTimeout(this.interDigitTimer);
this._clearTimer();
this._clearFastRecognitionTimer();
this._clearAsrTimer();
this._clearFinalAsrTimer();
this.span.setAttributes({
channel: 1,
'stt.resolve': reason,
'stt.result': JSON.stringify(evt)
});
if (this.needsStt && this.ep && this.ep.connected) {
this.ep.stopTranscription({
vendor: this.vendor,
bugname: this.bugname
})
.catch((err) => this.logger.error({err}, 'Error stopping transcription'));
}
if (this.callSession && this.callSession.callGone) {
this.logger.debug('TaskGather:_resolve - call is gone, not invoking web callback');
@@ -1043,76 +862,36 @@ class TaskGather extends SttTask {
return;
}
// action hook delay
if (this.cs.actionHookDelayProcessor) {
this.logger.debug('TaskGather:_resolve - actionHookDelayProcessor exists - starting it');
this.cs.actionHookDelayProcessor.start();
}
// TODO: remove and implement as actionHookDelay
if (this.hasFillerNoise && (reason.startsWith('dtmf') || reason.startsWith('speech'))) {
if (this.fillerNoiseStartDelaySecs > 0) {
this._startFillerNoiseTimer();
}
else {
this.logger.debug(`TaskGather:_resolve - playing filler noise: ${this.fillerNoiseUrl}`);
this._startFillerNoise();
}
}
let returnedVerbs = false;
try {
if (reason.startsWith('dtmf')) {
if (this.parentTask) this.parentTask.emit('dtmf', evt);
else {
this.emit('dtmf', evt);
returnedVerbs = await this.performAction({digits: this.digitBuffer, reason: 'dtmfDetected'});
await this.performAction({digits: this.digitBuffer, reason: 'dtmfDetected'});
}
}
else if (reason.startsWith('speech')) {
if (this.parentTask) this.parentTask.emit('transcription', evt);
else {
this.emit('transcription', evt);
this.logger.debug('TaskGather:_resolve - invoking performAction');
returnedVerbs = await this.performAction({speech: evt, reason: 'speechDetected'});
this.logger.debug({returnedVerbs}, 'TaskGather:_resolve - back from performAction');
await this.performAction({speech: evt, reason: 'speechDetected'});
}
}
else if (reason.startsWith('timeout')) {
if (this.parentTask) this.parentTask.emit('timeout', evt);
else {
this.emit('timeout', evt);
returnedVerbs = await this.performAction({reason: 'timeout'});
await this.performAction({reason: 'timeout'});
}
}
else if (reason.startsWith('stt-error')) {
if (this.parentTask) this.parentTask.emit('stt-error', evt);
else {
this.emit('stt-error', evt);
returnedVerbs = await this.performAction({reason: 'error', details: evt.error});
await this.performAction({reason: 'error', details: evt.error});
}
}
} catch (err) { /*already logged error*/ }
// Gather got response from hook, cancel actionHookDelay processing
this.logger.debug('TaskGather:_resolve - checking ahd');
if (this.cs.actionHookDelayProcessor) {
if (returnedVerbs) {
this.logger.debug('TaskGather:_resolve - got response from action hook, cancelling actionHookDelay');
await this.cs.actionHookDelayProcessor.stop();
if (this.actionHookDelayAction && !this.cs.popActionHookDelayProperties()) {
// no session level ahd was running when this task started, so clear it
this.cs.clearActionHookDelayProcessor();
this.logger.debug('TaskGather:_resolve - clear ahd');
}
}
else {
this.logger.debug('TaskGather:_resolve - no response from action hook, continue actionHookDelay');
}
}
this._clearFillerNoiseTimer();
this.notifyTaskDone();
}
}

View File

@@ -8,10 +8,6 @@ const DTMF_SPAN_NAME = 'dtmf';
class TaskListen extends Task {
constructor(logger, opts, parentTask) {
super(logger, opts);
/**
* @deprecated
* use bidirectionalAudio.enabled
*/
this.disableBidirectionalAudio = opts.disableBidirectionalAudio;
this.preconditions = TaskPreconditions.Endpoint;
@@ -29,15 +25,6 @@ class TaskListen extends Task {
this.results = {};
this.playAudioQueue = [];
this.isPlayingAudioFromQueue = false;
this.bidirectionalAudio = {
enabled: this.disableBidirectionalAudio === true ? false : true,
...(this.data['bidirectionalAudio']),
};
// From drachtio-version 3.0.40, forkAudioStart will send empty bugname, metadata together with
// bidirectionalAudio params that cause old version of freeswitch missunderstand between bugname and
// bidirectionalAudio params
this._bugname = 'audio_fork';
if (this.transcribe) this.transcribeTask = makeTask(logger, {'transcribe': opts.transcribe}, this);
}
@@ -136,6 +123,8 @@ class TaskListen extends Task {
ci,
this.metadata);
if (this.hook.auth) {
this.logger.debug({username: this.hook.auth.username, password: this.hook.auth.password},
'TaskListen:_startListening basic auth');
await this.ep.set({
'MOD_AUDIO_BASIC_AUTH_USERNAME': this.hook.auth.username,
'MOD_AUDIO_BASIC_AUTH_PASSWORD': this.hook.auth.password
@@ -146,8 +135,7 @@ class TaskListen extends Task {
mixType: this.mixType,
sampling: this.sampleRate,
...(this._bugname && {bugname: this._bugname}),
metadata,
bidirectionalAudio: this.bidirectionalAudio || {}
metadata
});
this.recordStartTime = moment();
if (this.maxLength) {
@@ -167,7 +155,7 @@ class TaskListen extends Task {
}
/* support bi-directional audio */
if (this.bidirectionalAudio.enabled) {
if (!this.disableBidirectionalAudio) {
ep.addCustomEventListener(ListenEvents.PlayAudio, this._onPlayAudio.bind(this, ep));
}
ep.addCustomEventListener(ListenEvents.KillAudio, this._onKillAudio.bind(this, ep));

View File

@@ -14,9 +14,6 @@ function makeTask(logger, obj, parent) {
}
validateVerb(name, data, logger);
switch (name) {
case TaskName.Answer:
const TaskAnswer = require('./answer');
return new TaskAnswer(logger, data, parent);
case TaskName.SipDecline:
const TaskSipDecline = require('./sip_decline');
return new TaskSipDecline(logger, data, parent);
@@ -44,9 +41,6 @@ function makeTask(logger, obj, parent) {
case TaskName.Dtmf:
const TaskDtmf = require('./dtmf');
return new TaskDtmf(logger, data, parent);
case TaskName.Dub:
const TaskDub = require('./dub');
return new TaskDub(logger, data, parent);
case TaskName.Enqueue:
const TaskEnqueue = require('./enqueue');
return new TaskEnqueue(logger, data, parent);

View File

@@ -1,4 +1,4 @@
const TtsTask = require('./tts-task');
const Task = require('./task');
const {TaskName, TaskPreconditions} = require('../utils/constants');
const pollySSMLSplit = require('polly-ssml-split');
@@ -23,15 +23,9 @@ const breakLengthyTextIfNeeded = (logger, text) => {
}
};
const parseTextFromSayString = (text) => {
const closingBraceIndex = text.indexOf('}');
if (closingBraceIndex === -1) return text;
return text.slice(closingBraceIndex + 1);
};
class TaskSay extends TtsTask {
class TaskSay extends Task {
constructor(logger, opts, parentTask) {
super(logger, opts, parentTask);
super(logger, opts);
this.preconditions = TaskPreconditions.Endpoint;
this.text = (Array.isArray(this.data.text) ? this.data.text : [this.data.text])
@@ -39,6 +33,10 @@ class TaskSay extends TtsTask {
.flat();
this.loop = this.data.loop || 1;
this.earlyMedia = this.data.earlyMedia === true || (parentTask && parentTask.earlyMedia);
this.synthesizer = this.data.synthesizer || {};
this.disableTtsCache = this.data.disableTtsCache;
this.options = this.synthesizer.options || {};
this.isHandledByPrimaryProvider = true;
}
@@ -62,11 +60,11 @@ class TaskSay extends TtsTask {
}
async _synthesizeWithSpecificVendor(cs, ep, {vendor, language, voice, label, preCache = false}) {
const {srf, accountSid:account_sid} = cs;
const {srf} = cs;
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, srf);
const {writeAlerts, AlertType, stats} = srf.locals;
const {synthAudio} = srf.locals.dbHelpers;
const engine = this.synthesizer.engine || cs.synthesizer?.engine || 'neural';
const engine = this.synthesizer.engine || 'standard';
const salt = cs.callSid;
let credentials = cs.getSpeechCredentials(vendor, 'tts', label);
@@ -99,23 +97,18 @@ class TaskSay extends TtsTask {
voice = this.options.voice_id || voice;
}
ep.set({
tts_engine: vendor.startsWith('custom:') ? 'custom' : vendor,
tts_voice: voice,
cache_speech_handles: !cs.currentTtsVendor || cs.currentTtsVendor === vendor ? 1 : 0,
}).catch((err) => this.logger.info({err}, 'Error setting tts_engine on endpoint'));
// set the current vendor on the call session
// If vendor is changed from the previous one, then reset the cache_speech_handles flag
cs.currentTtsVendor = vendor;
if (!preCache && !this._disableTracing) this.logger.info({vendor, language, voice, model}, 'TaskSay:exec');
if (!preCache) this.logger.info({vendor, language, voice, model}, 'TaskSay:exec');
try {
if (!credentials) {
writeAlerts({
account_sid,
account_sid: cs.accountSid,
alert_type: AlertType.TTS_NOT_PROVISIONED,
vendor
}).catch((err) => this.logger.info({err}, 'Error generating alert for no tts'));
this.notifyError({
msg: 'TTS error',
details:`No speech credentials provisioned for selected vendor ${vendor}`
});
throw new Error('no provisioned speech credentials for TTS');
}
// synthesize all of the text elements
@@ -127,17 +120,18 @@ class TaskSay extends TtsTask {
if (text.startsWith('silence_stream://')) return text;
/* otel: trace time for tts */
if (!preCache && !this._disableTracing) {
let otelSpan;
if (!preCache) {
const {span} = this.startChildSpan('tts-generation', {
'tts.vendor': vendor,
'tts.language': language,
'tts.voice': voice
});
this.otelSpan = span;
otelSpan = span;
}
try {
const {filePath, servedFromCache, rtt} = await synthAudio(stats, {
account_sid,
account_sid: cs.accountSid,
text,
vendor,
language,
@@ -147,46 +141,37 @@ class TaskSay extends TtsTask {
salt,
credentials,
options: this.options,
disableTtsCache : this.disableTtsCache,
preCache
disableTtsCache : this.disableTtsCache
});
if (!filePath.startsWith('say:')) {
this.logger.debug(`file ${filePath}, served from cache ${servedFromCache}`);
if (filePath) cs.trackTmpFile(filePath);
if (this.otelSpan) {
this.otelSpan.setAttributes({'tts.cached': servedFromCache});
this.otelSpan.end();
this.otelSpan = null;
}
if (!servedFromCache && !lastUpdated) {
lastUpdated = true;
updateSpeechCredentialLastUsed(credentials.speech_credential_sid).catch(() => {/* logged error */});
}
if (!servedFromCache && rtt && !preCache && !this._disableTracing) {
this.notifyStatus({
event: 'synthesized-audio',
vendor,
language,
characters: text.length,
elapsedTime: rtt
});
}
this.logger.debug(`file ${filePath}, served from cache ${servedFromCache}`);
if (filePath) cs.trackTmpFile(filePath);
if (!servedFromCache && !lastUpdated) {
lastUpdated = true;
updateSpeechCredentialLastUsed(credentials.speech_credential_sid)
.catch(() => {/*already logged error */});
}
else {
this.logger.debug('a streaming tts api will be used');
const modifiedPath = filePath.replace('say:{', `say:{session-uuid=${ep.uuid},`);
return modifiedPath;
if (otelSpan) otelSpan.setAttributes({'tts.cached': servedFromCache});
if (otelSpan) otelSpan.end();
if (!servedFromCache && rtt && !preCache) {
this.notifyStatus({
event: 'synthesized-audio',
vendor,
language,
characters: text.length,
elapsedTime: rtt
});
}
return filePath;
} catch (err) {
this.logger.info({err}, 'Error synthesizing tts');
if (this.otelSpan) this.otelSpan.end();
if (otelSpan) otelSpan.end();
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.TTS_FAILURE,
vendor,
detail: err.message
}).catch((err) => this.logger.info({err}, 'Error generating alert for tts failure'));
this.notifyError({msg: 'TTS error', details: err.message || err});
throw err;
}
};
@@ -201,27 +186,21 @@ class TaskSay extends TtsTask {
}
async exec(cs, {ep}) {
const {srf, accountSid:account_sid} = cs;
const {writeAlerts, AlertType} = srf.locals;
const {addFileToCache} = srf.locals.dbHelpers;
const engine = this.synthesizer.engine || cs.synthesizer?.engine || 'neural';
await super.exec(cs);
this.ep = ep;
let vendor = this.synthesizer.vendor && this.synthesizer.vendor !== 'default' ?
const vendor = this.synthesizer.vendor && this.synthesizer.vendor !== 'default' ?
this.synthesizer.vendor :
cs.speechSynthesisVendor;
let language = this.synthesizer.language && this.synthesizer.language !== 'default' ?
const language = this.synthesizer.language && this.synthesizer.language !== 'default' ?
this.synthesizer.language :
cs.speechSynthesisLanguage ;
let voice = this.synthesizer.voice && this.synthesizer.voice !== 'default' ?
const voice = this.synthesizer.voice && this.synthesizer.voice !== 'default' ?
this.synthesizer.voice :
cs.speechSynthesisVoice;
// label can be null/empty in synthesizer config, just use application level label if it's default
let label = this.synthesizer.label === 'default' ?
cs.speechSynthesisLabel :
this.synthesizer.label;
const label = this.synthesizer.label && this.synthesizer.label !== 'default' ?
this.synthesizer.label :
cs.speechSynthesisLabel;
const fallbackVendor = this.synthesizer.fallbackVendor && this.synthesizer.fallbackVendor !== 'default' ?
this.synthesizer.fallbackVendor :
@@ -232,24 +211,16 @@ class TaskSay extends TtsTask {
const fallbackVoice = this.synthesizer.fallbackVoice && this.synthesizer.fallbackVoice !== 'default' ?
this.synthesizer.fallbackVoice :
cs.fallbackSpeechSynthesisVoice;
// label can be null/empty in synthesizer config, just use application level label if it's default
const fallbackLabel = this.synthesizer.fallbackLabel === 'default' ?
cs.fallbackSpeechSynthesisLabel :
this.synthesizer.fallbackLabel;
const fallbackLabel = this.synthesizer.fallbackLabel && this.synthesizer.fallbackLabel !== 'default' ?
this.synthesizer.fallbackLabel :
cs.fallbackSpeechSynthesisLabel;
if (cs.hasFallbackTts) {
vendor = fallbackVendor;
language = fallbackLanguage;
voice = fallbackVoice;
label = fallbackLabel;
}
const startFallback = async(error) => {
if (fallbackVendor && this.isHandledByPrimaryProvider && !cs.hasFallbackTts) {
this.notifyError(
{ msg: 'TTS error', details:`TTS vendor ${vendor} error: ${error}`, failover: 'in progress'});
let filepath;
try {
filepath = await this._synthesizeWithSpecificVendor(cs, ep, {vendor, language, voice, label});
} catch (error) {
if (fallbackVendor && this.isHandledByPrimaryProvider) {
this.isHandledByPrimaryProvider = false;
cs.hasFallbackTts = true;
this.logger.info(`Synthesize error, fallback to ${fallbackVendor}`);
filepath = await this._synthesizeWithSpecificVendor(cs, ep,
{
@@ -259,94 +230,22 @@ class TaskSay extends TtsTask {
label: fallbackLabel
});
} else {
this.notifyError(
{ msg: 'TTS error', details:`TTS vendor ${vendor} error: ${error}`, failover: 'not available'});
throw error;
}
};
let filepath;
try {
filepath = await this._synthesizeWithSpecificVendor(cs, ep, {vendor, language, voice, label});
} catch (error) {
await startFallback(error);
}
this.notifyStatus({event: 'start-playback'});
while (!this.killed && (this.loop === 'forever' || this.loop--) && ep?.connected) {
while (!this.killed && (this.loop === 'forever' || this.loop--) && this.ep?.connected) {
let segment = 0;
while (!this.killed && segment < filepath.length) {
if (cs.isInConference) {
const {memberId, confName, confUuid} = cs;
await this.playToConfMember(ep, memberId, confName, confUuid, filepath[segment]);
await this.playToConfMember(this.ep, memberId, confName, confUuid, filepath[segment]);
}
else {
if (filepath[segment].startsWith('say:{')) {
const arr = /^say:\{.*\}\s*(.*)$/.exec(filepath[segment]);
if (arr) this.logger.debug(`Say:exec sending streaming tts request: ${arr[1].substring(0, 64)}..`);
}
else this.logger.debug(`Say:exec sending ${filepath[segment].substring(0, 64)}`);
ep.once('playback-start', (evt) => {
this.logger.debug({evt}, 'got playback-start');
if (this.otelSpan) {
this._addStreamingTtsAttributes(this.otelSpan, evt);
this.otelSpan.end();
this.otelSpan = null;
if (evt.variable_tts_cache_filename) cs.trackTmpFile(evt.variable_tts_cache_filename);
}
});
ep.once('playback-stop', (evt) => {
this.logger.debug({evt}, 'got playback-stop');
if (evt.variable_tts_error) {
writeAlerts({
account_sid,
alert_type: AlertType.TTS_FAILURE,
vendor,
detail: evt.variable_tts_error
}).catch((err) => this.logger.info({err}, 'Error generating alert for no tts'));
}
if (evt.variable_tts_cache_filename) {
const text = parseTextFromSayString(this.text[segment]);
addFileToCache(evt.variable_tts_cache_filename, {
account_sid,
vendor,
language,
voice,
engine,
text
}).catch((err) => this.logger.info({err}, 'Error adding file to cache'));
}
if (this._playResolve) {
evt.variable_tts_error ? this._playReject(new Error(evt.variable_tts_error)) : this._playResolve();
}
});
// wait for playback-stop event received to confirm if the playback is successful
this._playPromise = new Promise((resolve, reject) => {
this._playResolve = resolve;
this._playReject = reject;
});
this.logger.debug(`Say:exec sending command to play file ${filepath[segment]}`);
await ep.play(filepath[segment]);
try {
// wait for playback-stop event received to confirm if the playback is successful
await this._playPromise;
} catch (err) {
try {
await startFallback(err);
continue;
} catch (err) {
this.logger.info({err}, 'Error waiting for playback-stop event');
}
} finally {
this._playPromise = null;
this._playResolve = null;
this._playReject = null;
}
if (filepath[segment].startsWith('say:{')) {
const arr = /^say:\{.*\}\s*(.*)$/.exec(filepath[segment]);
if (arr) this.logger.debug(`Say:exec complete playing streaming tts request: ${arr[1].substring(0, 64)}..`);
} else {
// This log will print spech credentials in say command for tts stream mode
this.logger.debug(`Say:exec completed play file ${filepath[segment]}`);
}
this.logger.debug(`Say:exec completed play file ${filepath[segment]}`);
}
segment++;
}
@@ -356,7 +255,7 @@ class TaskSay extends TtsTask {
async kill(cs) {
super.kill(cs);
if (this.ep?.connected) {
if (this.ep.connected) {
this.logger.debug('TaskSay:kill - killing audio');
if (cs.isInConference) {
const {memberId, confName} = cs;
@@ -366,75 +265,8 @@ class TaskSay extends TtsTask {
this.notifyStatus({event: 'kill-playback'});
this.ep.api('uuid_break', this.ep.uuid);
}
this.ep.removeAllListeners('playback-start');
this.ep.removeAllListeners('playback-stop');
// if we are waiting for playback-stop event, resolve the promise
if (this._playResolve) this._playResolve();
}
}
_addStreamingTtsAttributes(span, evt) {
const attrs = {'tts.cached': false};
for (const [key, value] of Object.entries(evt)) {
if (key.startsWith('variable_tts_')) {
let newKey = key.substring('variable_tts_'.length)
.replace('whisper_', 'whisper.')
.replace('deepgram_', 'deepgram.')
.replace('playht_', 'playht.')
.replace('rimelabs_', 'rimelabs.')
.replace('verbio_', 'verbio.')
.replace('elevenlabs_', 'elevenlabs.');
if (spanMapping[newKey]) newKey = spanMapping[newKey];
attrs[newKey] = value;
}
}
delete attrs['cache_filename']; //no value in adding this to the span
span.setAttributes(attrs);
}
}
const spanMapping = {
// IMPORTANT!!! JAMBONZ WEBAPP WILL SHOW TEXT PERFECTLY IF THE SPAN NAME IS SMALLER OR EQUAL 25 CHARACTERS.
// EX: whisper.ratelim_reqs has length 20 <= 25 which is perfect
// Elevenlabs
'elevenlabs.reported_latency_ms': 'elevenlabs.latency_ms',
'elevenlabs.request_id': 'elevenlabs.req_id',
'elevenlabs.history_item_id': 'elevenlabs.item_id',
'elevenlabs.optimize_streaming_latency': 'elevenlabs.optimization',
'elevenlabs.name_lookup_time_ms': 'name_lookup_ms',
'elevenlabs.connect_time_ms': 'connect_ms',
'elevenlabs.final_response_time_ms': 'final_response_ms',
// Whisper
'whisper.reported_latency_ms': 'whisper.latency_ms',
'whisper.request_id': 'whisper.req_id',
'whisper.reported_organization': 'whisper.organization',
'whisper.reported_ratelimit_requests': 'whisper.ratelimit',
'whisper.reported_ratelimit_remaining_requests': 'whisper.ratelimit_remain',
'whisper.reported_ratelimit_reset_requests': 'whisper.ratelimit_reset',
'whisper.name_lookup_time_ms': 'name_lookup_ms',
'whisper.connect_time_ms': 'connect_ms',
'whisper.final_response_time_ms': 'final_response_ms',
// Deepgram
'deepgram.request_id': 'deepgram.req_id',
'deepgram.reported_model_name': 'deepgram.model_name',
'deepgram.reported_model_uuid': 'deepgram.model_uuid',
'deepgram.reported_char_count': 'deepgram.char_count',
'deepgram.name_lookup_time_ms': 'name_lookup_ms',
'deepgram.connect_time_ms': 'connect_ms',
'deepgram.final_response_time_ms': 'final_response_ms',
// Playht
'playht.request_id': 'playht.req_id',
'playht.name_lookup_time_ms': 'name_lookup_ms',
'playht.connect_time_ms': 'connect_ms',
'playht.final_response_time_ms': 'final_response_ms',
// Rimelabs
'rimelabs.name_lookup_time_ms': 'name_lookup_ms',
'rimelabs.connect_time_ms': 'connect_ms',
'rimelabs.final_response_time_ms': 'final_response_ms',
// verbio
'verbio.name_lookup_time_ms': 'name_lookup_ms',
'verbio.connect_time_ms': 'connect_ms',
'verbio.final_response_time_ms': 'final_response_ms',
};
module.exports = TaskSay;

View File

@@ -33,8 +33,7 @@ class SttTask extends Task {
//fallback
this.fallbackVendor = recognizer.fallbackVendor || 'default';
this.fallbackLanguage = recognizer.fallbackLanguage || 'default';
// label can be empty and should not have default value.
this.fallbackLabel = recognizer.fallbackLabel;
this.fallbackLabel = recognizer.fallbackLabel || 'default';
/* let credentials be supplied in the recognizer object at runtime */
this.sttCredentials = setSpeechCredentialsAtRuntime(recognizer);
@@ -57,20 +56,24 @@ class SttTask extends Task {
super.exec(cs);
this.ep = ep;
this.ep2 = ep2;
// use session preferences if we don't have specific verb-level settings.
// copy all value from config verb to this object.
if (cs.recognizer) {
for (const k in cs.recognizer) {
const newValue = this.data.recognizer && this.data.recognizer[k] !== undefined ?
this.data.recognizer[k] :
cs.recognizer[k];
if (Array.isArray(newValue)) {
this.data.recognizer[k] = [...(this.data.recognizer[k] || []), ...cs.recognizer[k]];
} else if (typeof newValue === 'object' && newValue !== null) {
this.data.recognizer[k] = { ...(this.data.recognizer[k] || {}), ...cs.recognizer[k] };
if (Array.isArray(this.data.recognizer[k]) ||
Array.isArray(cs.recognizer[k])) {
this.data.recognizer[k] = [
...this.data.recognizer[k],
...cs.recognizer[k]
];
} else if (typeof this.data.recognizer[k] === 'object' ||
typeof cs.recognizer[k] === 'object'
) {
this.data.recognizer[k] = {
...this.data.recognizer[k],
...cs.recognizer[k]
};
} else {
this.data.recognizer[k] = newValue;
this.data.recognizer[k] = cs.recognizer[k] || this.data.recognizer[k];
}
}
}
@@ -82,8 +85,7 @@ class SttTask extends Task {
this.language = cs.speechRecognizerLanguage;
if (this.data.recognizer) this.data.recognizer.language = this.language;
}
// label can be empty, should not assign application level label
if ('default' === this.label) {
if ('default' === this.label || !this.label) {
this.label = cs.speechRecognizerLabel;
if (this.data.recognizer) this.data.recognizer.label = this.label;
}
@@ -96,18 +98,10 @@ class SttTask extends Task {
this.fallbackLanguage = cs.fallbackSpeechRecognizerLanguage;
if (this.data.recognizer) this.data.recognizer.fallbackLanguage = this.fallbackLanguage;
}
// label can be empty, should not assign application level label
if ('default' === this.fallbackLabel) {
if ('default' === this.fallbackLabel || !this.fallbackLabel) {
this.fallbackLabel = cs.fallbackSpeechRecognizerLabel;
if (this.data.recognizer) this.data.recognizer.fallbackLabel = this.fallbackLabel;
}
// If call is already fallback to 2nd ASR vendor
// use that.
if (cs.hasFallbackAsr) {
this.vendor = this.fallbackVendor;
this.language = this.fallbackLanguage;
this.label = this.fallbackLabel;
}
if (!this.data.recognizer.vendor) {
this.data.recognizer.vendor = this.vendor;
}
@@ -116,28 +110,13 @@ class SttTask extends Task {
this.data.recognizer.model = cs.speechRecognizerLanguage;
}
if (
// not gather task, such as transcribe
(!this.input ||
// gather task with speech
this.input.includes('speech')) &&
!this.sttCredentials) {
if (!this.sttCredentials) {
try {
this.sttCredentials = await this._initSpeechCredentials(this.cs, this.vendor, this.label);
} catch (error) {
if (this.canFallback) {
this.notifyError(
{
msg: 'ASR error', details:`Invalid vendor ${this.vendor}, Error: ${error}`,
failover: 'in progress'
});
await this._initFallback();
if (this.fallbackVendor && this.isHandledByPrimaryProvider) {
await this._fallback();
} else {
this.notifyError(
{
msg: 'ASR error', details:`Invalid vendor ${this.vendor}, Error: ${error}`,
failover: 'not available'
});
throw error;
}
}
@@ -169,7 +148,7 @@ class SttTask extends Task {
}
async _initSpeechCredentials(cs, vendor, label) {
const {getNuanceAccessToken, getIbmAccessToken, getAwsAuthToken, getVerbioAccessToken} = cs.srf.locals.dbHelpers;
const {getNuanceAccessToken, getIbmAccessToken} = cs.srf.locals.dbHelpers;
let credentials = cs.getSpeechCredentials(vendor, 'stt', label);
if (!credentials) {
@@ -180,6 +159,11 @@ class SttTask extends Task {
alert_type: AlertType.STT_NOT_PROVISIONED,
vendor
}).catch((err) => this.logger.info({err}, 'Error generating alert for no stt'));
// Notify application that STT vender is wrong.
this.notifyError({
msg: 'ASR error',
details: `No speech-to-text service credentials for ${vendor} have been configured`
});
this.notifyTaskDone();
throw new Error(`No speech-to-text service credentials for ${vendor} have been configured`);
}
@@ -197,33 +181,13 @@ class SttTask extends Task {
const {access_token, servedFromCache} = await getIbmAccessToken(stt_api_key);
this.logger.debug({stt_api_key}, `got ibm access token ${servedFromCache ? 'from cache' : ''}`);
credentials = {...credentials, access_token, stt_region};
} else if (['aws', 'polly'].includes(vendor) && credentials.roleArn) {
/* get aws access token */
const {roleArn, region} = credentials;
const {accessKeyId, secretAccessKey, sessionToken, servedFromCache} =
await getAwsAuthToken({
region,
roleArn
});
this.logger.debug({roleArn}, `got aws access token ${servedFromCache ? 'from cache' : ''}`);
credentials = {...credentials, accessKeyId, secretAccessKey, sessionToken};
} else if (vendor === 'verbio' && credentials.client_id && credentials.client_secret) {
const {access_token, servedFromCache} = await getVerbioAccessToken(credentials);
this.logger.debug({client_id: credentials.client_id},
`got verbio access token ${servedFromCache ? 'from cache' : ''}`);
credentials.access_token = access_token;
}
return credentials;
}
get canFallback() {
return this.fallbackVendor && this.isHandledByPrimaryProvider && !this.cs.hasFallbackAsr;
}
async _initFallback() {
async _fallback() {
assert(this.fallbackVendor, 'fallback failed without fallbackVendor configuration');
this.isHandledByPrimaryProvider = false;
this.cs.hasFallbackAsr = true;
this.logger.info(`Failed to use primary STT provider, fallback to ${this.fallbackVendor}`);
this.vendor = this.fallbackVendor;
this.language = this.fallbackLanguage;
@@ -232,8 +196,6 @@ class SttTask extends Task {
this.data.recognizer.language = this.language;
this.data.recognizer.label = this.label;
this.sttCredentials = await this._initSpeechCredentials(this.cs, this.vendor, this.label);
// cleanup previous listener from previous vendor
this.removeCustomEventListeners();
}
async compileHintsForCobalt(ep, hostport, model, token, hints) {
@@ -277,20 +239,6 @@ class SttTask extends Task {
_doContinuousAsrWithDeepgram(asrTimeout) {
/* deepgram has an utterance_end_ms property that simplifies things */
assert(this.vendor === 'deepgram');
if (asrTimeout < 1000) {
this.notifyError({
msg: 'ASR error',
details:`asrTimeout ${asrTimeout} is too short for deepgram; setting it to 1000ms`
});
asrTimeout = 1000;
}
else if (asrTimeout > 5000) {
this.notifyError({
msg: 'ASR error',
details:`asrTimeout ${asrTimeout} is too long for deepgram; setting it to 5000ms`
});
asrTimeout = 5000;
}
this.logger.debug(`_doContinuousAsrWithDeepgram - setting utterance_end_ms to ${asrTimeout}`);
const dgOptions = this.data.recognizer.deepgramOptions = this.data.recognizer.deepgramOptions || {};
dgOptions.utteranceEndMs = dgOptions.utteranceEndMs || asrTimeout;
@@ -310,6 +258,7 @@ class SttTask extends Task {
detail: evt.error,
vendor: this.vendor,
}).catch((err) => this.logger.info({err}, `Error generating alert for ${this.vendor} connection failure`));
this.notifyError({msg: 'ASR error', details:`Failed connecting to speech vendor ${this.vendor}: ${evt.error}`});
}
_onVendorConnectFailure(cs, _ep, evt) {
@@ -322,6 +271,7 @@ class SttTask extends Task {
message: `Failed connecting to ${this.vendor} speech recognizer: ${reason}`,
vendor: this.vendor,
}).catch((err) => this.logger.info({err}, `Error generating alert for ${this.vendor} connection failure`));
this.notifyError({msg: 'ASR error', details:`Failed connecting to speech vendor ${this.vendor}: ${reason}`});
}
}

View File

@@ -45,10 +45,6 @@ class Task extends Emitter {
return this.name;
}
set disableTracing(val) {
this._disableTracing = val;
}
toJSON() {
return this.data;
}
@@ -164,33 +160,15 @@ class Task extends Emitter {
const httpHeaders = b3 && {b3};
span.setAttributes({'http.body': JSON.stringify(params)});
try {
if (this.id) params.verb_id = this.id;
const json = await this.cs.requestor.request(type, this.actionHook, params, httpHeaders);
span.setAttributes({'http.statusCode': 200});
const isWsConnection = this.cs.requestor instanceof WsRequestor;
if (!isWsConnection || (expectResponse && json && Array.isArray(json) && json.length)) {
span.end();
} else {
/** we use this span to measure application response latency,
* and with websocket connections we generally get the application's response
* in a subsequent message from the far end, so we terminate the span when the
* first new set of verbs arrive after sending a transcript
* */
this.emit('VerbHookSpanWaitForEnd', {span});
// If actionHook delay action is configured, and ws application have not responded yet any verb for actionHook
// We have to transfer the task to call-session to await on next ws command verbs, and also run action Hook
// delay actions
//if (this.hookDelayActionOpts) {
// this.emit('ActionHookDelayActionOptions', this.hookDelayActionOpts);
//}
}
span.end();
if (expectResponse && json && Array.isArray(json)) {
const makeTask = require('./make_task');
const tasks = normalizeJambones(this.logger, json).map((tdata) => makeTask(this.logger, tdata));
if (tasks && tasks.length > 0) {
this.logger.info({tasks: tasks}, `${this.name} replacing application with ${tasks.length} tasks`);
this.callSession.replaceApplication(tasks);
return true;
}
}
} catch (err) {
@@ -198,7 +176,6 @@ class Task extends Emitter {
span.end();
throw err;
}
return false;
}
}
@@ -278,7 +255,6 @@ class Task extends Emitter {
delete obj.requestor;
delete obj.notifier;
obj.tasks = cs.getRemainingTaskData();
obj.callInfo = cs.callInfo.toJSON();
if (opts && obj.tasks.length > 0) {
const key = Object.keys(obj.tasks[0])[0];
Object.assign(obj.tasks[0][key], {_: opts});

View File

@@ -31,25 +31,6 @@ class TaskTranscribe extends SttTask {
this.separateRecognitionPerChannel = this.data.recognizer.separateRecognitionPerChannel;
}
/* for nested transcribe in dial, unless the app explicitly says so we want to transcribe both legs */
if (this.parentTask?.name === TaskName.Dial) {
if (this.data.channel === 1 || this.data.channel === 2) {
/* transcribe only the channel specified */
this.separateRecognitionPerChannel = false;
this.channel = this.data.channel;
logger.debug(`TaskTranscribe: transcribing only channel ${this.channel} in the Dial verb`);
}
else if (this.separateRecognitionPerChannel !== false) {
this.separateRecognitionPerChannel = true;
}
else {
this.channel = 1;
}
}
else {
this.channel = 1;
}
this.childSpan = [null, null];
// Continuous asr timeout
@@ -58,21 +39,12 @@ class TaskTranscribe extends SttTask {
this.isContinuousAsr = true;
}
/* buffer speech for continuous asr */
this._bufferedTranscripts = [ [], [] ]; // for channel 1 and 2
this._bufferedTranscripts = [];
this.bugname_prefix = 'transcribe_';
this.paused = false;
}
get name() { return TaskName.Transcribe; }
get transcribing1() {
return this.channel === 1 || this.separateRecognitionPerChannel;
}
get transcribing2() {
return this.channel === 2 || this.separateRecognitionPerChannel && this.ep2;
}
async exec(cs, {ep, ep2}) {
await super.exec(cs, {ep, ep2});
@@ -95,30 +67,25 @@ class TaskTranscribe extends SttTask {
}
try {
if (this.transcribing1) {
await this._startTranscribing(cs, ep, 1);
}
if (this.transcribing2) {
await this._startTranscribing(cs, ep, 1);
if (this.separateRecognitionPerChannel && ep2) {
await this._startTranscribing(cs, ep2, 2);
}
updateSpeechCredentialLastUsed(this.sttCredentials.speech_credential_sid)
.catch(() => {/*already logged error */});
await this.awaitTaskDone();
} catch (err) {
if (!(await this._startFallback(cs, ep, {error: err}))) {
this.logger.info(err, 'TaskTranscribe:exec - error');
this.parentTask && this.parentTask.emit('error', err);
this.removeCustomEventListeners();
return;
}
this.logger.info(err, 'TaskTranscribe:exec - error');
this.parentTask && this.parentTask.emit('error', err);
}
await this.awaitTaskDone();
this.removeCustomEventListeners();
}
async _stopTranscription() {
let stopTranscription = false;
if (this.transcribing1 && this.ep?.connected) {
if (this.ep?.connected) {
stopTranscription = true;
this.ep.stopTranscription({
vendor: this.vendor,
@@ -126,9 +93,9 @@ class TaskTranscribe extends SttTask {
})
.catch((err) => this.logger.info(err, 'Error TaskTranscribe:kill'));
}
if (this.transcribing2 && this.ep2?.connected) {
if (this.separateRecognitionPerChannel && this.ep2 && this.ep2.connected) {
stopTranscription = true;
this.ep2.stopTranscription({vendor: this.vendor, bugname: this.bugname})
this.ep2.stopTranscription({vendor: this.vendor})
.catch((err) => this.logger.info(err, 'Error TaskTranscribe:kill'));
}
@@ -150,13 +117,13 @@ class TaskTranscribe extends SttTask {
this.logger.info(`TaskTranscribe:updateTranscribe status ${status}`);
switch (status) {
case TranscribeStatus.Pause:
this.paused = true;
await this._stopTranscription();
break;
case TranscribeStatus.Resume:
this.paused = false;
if (this.transcribing1) await this._startTranscribing(this.cs, this.ep, 1);
if (this.transcribing2) await this._startTranscribing(this.cs, this.ep2, 2);
await this._startTranscribing(this.cs, this.ep, 1);
if (this.separateRecognitionPerChannel && this.ep2) {
await this._startTranscribing(this.cs, this.ep2, 2);
}
break;
}
}
@@ -171,7 +138,7 @@ class TaskTranscribe extends SttTask {
if (this.isContinuousAsr) this._doContinuousAsrWithDeepgram(this.asrTimeout);
}
const opts = this.setChannelVarsForStt(this, this.sttCredentials, this.language, this.data.recognizer);
const opts = this.setChannelVarsForStt(this, this.sttCredentials, this.data.recognizer);
switch (this.vendor) {
case 'google':
this.bugname = `${this.bugname_prefix}google_transcribe`;
@@ -197,8 +164,8 @@ class TaskTranscribe extends SttTask {
this.bugname = `${this.bugname_prefix}azure_transcribe`;
this.addCustomEventListener(ep, AzureTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
//this.addCustomEventListener(ep, AzureTranscriptionEvents.NoSpeechDetected,
// this._onNoAudio.bind(this, cs, ep, channel));
this.addCustomEventListener(ep, AzureTranscriptionEvents.NoSpeechDetected,
this._onNoAudio.bind(this, cs, ep, channel));
break;
case 'nuance':
this.bugname = `${this.bugname_prefix}nuance_transcribe`;
@@ -215,7 +182,7 @@ class TaskTranscribe extends SttTask {
this._onVendorConnectFailure.bind(this, cs, ep, channel));
/* if app sets deepgramOptions.utteranceEndMs they essentially want continuous asr */
//if (opts.DEEPGRAM_SPEECH_UTTERANCE_END_MS) this.isContinuousAsr = true;
if (opts.DEEPGRAM_SPEECH_UTTERANCE_END_MS) this.isContinuousAsr = true;
break;
case 'soniox':
@@ -319,7 +286,7 @@ class TaskTranscribe extends SttTask {
vendor: this.vendor,
interim: this.interim ? true : false,
locale: this.language,
channels: 1,
channels: /*this.separateRecognitionPerChannel ? 2 : */ 1,
bugname: this.bugname,
hostport: this.hostport
});
@@ -328,32 +295,20 @@ class TaskTranscribe extends SttTask {
async _onTranscription(cs, ep, channel, evt, fsEvent) {
// make sure this is not a transcript from answering machine detection
const bugname = fsEvent.getHeader('media-bugname');
const finished = fsEvent.getHeader('transcription-session-finished');
const bufferedTranscripts = this._bufferedTranscripts[channel - 1];
if (bugname && this.bugname !== bugname) return;
if (this.paused) {
this.logger.debug({evt}, 'TaskTranscribe:_onTranscription - paused, ignoring transcript');
}
if (this.vendor === 'ibm' && evt?.state === 'listening') return;
if (this.vendor === 'deepgram' && evt.type === 'UtteranceEnd') {
/* we will only get this when we have set utterance_end_ms */
/* DH: send a speech event when we get UtteranceEnd if they want interim events */
if (this.interim) {
this.logger.debug('Gather:_onTranscription - got UtteranceEnd event from deepgram, sending speech event');
this._resolve(channel, evt);
}
if (bufferedTranscripts.length === 0) {
if (this._bufferedTranscripts.length === 0) {
this.logger.debug('Gather:_onTranscription - got UtteranceEnd event from deepgram but no buffered transcripts');
}
else {
this.logger.debug('Gather:_onTranscription - got UtteranceEnd event from deepgram, return buffered transcript');
evt = this.consolidateTranscripts(bufferedTranscripts, channel, this.language, this.vendor);
evt.is_final = true;
this._bufferedTranscripts[channel - 1] = [];
this._resolve(channel, evt);
evt = this.consolidateTranscripts(this._bufferedTranscripts, 1, this.language);
this._bufferedTranscripts = [];
this._resolve('speech', evt);
}
return;
}
@@ -367,122 +322,55 @@ class TaskTranscribe extends SttTask {
return;
}
let emptyTranscript = false;
if (evt.is_final) {
if (evt.alternatives.length === 0 || evt.alternatives[0].transcript === '' && !cs.callGone && !this.killed) {
emptyTranscript = true;
if (finished === 'true' &&
['microsoft', 'deepgram'].includes(this.vendor) &&
bufferedTranscripts.length === 0) {
this.logger.debug({evt}, 'TaskGather:_onTranscription - got empty transcript from old gather, disregarding');
return;
}
else if (this.vendor !== 'deepgram') {
this.logger.info({evt}, 'TaskGather:_onTranscription - got empty transcript, continue listening');
return;
}
else if (this.isContinuousAsr) {
this.logger.info({evt},
'TaskGather:_onTranscription - got empty deepgram transcript during continous asr, continue listening');
return;
}
else if (this.vendor === 'deepgram' && bufferedTranscripts.length > 0) {
this.logger.info({evt},
'TaskGather:_onTranscription - got empty transcript from deepgram, return the buffered transcripts');
}
}
if (this.isContinuousAsr) {
/* append the transcript and start listening again for asrTimeout */
const t = evt.alternatives[0].transcript;
if (t) {
/* remove trailing punctuation */
if (/[,;:\.!\?]$/.test(t)) {
this.logger.debug('TaskGather:_onTranscription - removing trailing punctuation');
evt.alternatives[0].transcript = t.slice(0, -1);
}
}
this.logger.info({evt}, 'TaskGather:_onTranscription - got transcript during continous asr');
bufferedTranscripts.push(evt);
this._startAsrTimer(channel);
/* some STT engines will keep listening after a final response, so no need to restart */
if (!['soniox', 'aws', 'microsoft', 'deepgram', 'google']
.includes(this.vendor)) this._startTranscribing(cs, ep, channel);
if (evt.alternatives[0]?.transcript === '' && !cs.callGone && !this.killed) {
if (['microsoft', 'deepgram'].includes(this.vendor)) {
this.logger.info({evt}, 'TaskTranscribe:_onTranscription - got empty transcript, continue listening');
}
else {
if (this.vendor === 'soniox') {
/* compile transcripts into one */
this._sonioxTranscripts.push(evt.vendor.finalWords);
evt = this.compileSonioxTranscripts(this._sonioxTranscripts, 1, this.language);
this._sonioxTranscripts = [];
}
else if (this.vendor === 'deepgram') {
/* compile transcripts into one */
if (!emptyTranscript) bufferedTranscripts.push(evt);
this.logger.info({evt}, 'TaskTranscribe:_onTranscription - got empty transcript, listen again');
this._transcribe(ep);
}
return;
}
/* deepgram can send an empty and final transcript; only if we have any buffered should we resolve */
if (bufferedTranscripts.length === 0) return;
evt = this.consolidateTranscripts(bufferedTranscripts, channel, this.language);
this._bufferedTranscripts[channel - 1] = [];
}
/* here is where we return a final transcript */
this.logger.debug({evt}, 'TaskTranscribe:_onTranscription - sending final transcript');
this._resolve(channel, evt);
/* some STT engines will keep listening after a final response, so no need to restart */
if (!['soniox', 'aws', 'microsoft', 'deepgram', 'google'].includes(this.vendor) &&
!this.vendor.startsWith('custom:')) {
this.logger.debug('TaskTranscribe:_onTranscription - restarting transcribe');
this._startTranscribing(cs, ep, channel);
}
if (this.vendor === 'soniox') {
/* compile transcripts into one */
this._sonioxTranscripts.push(evt.vendor.finalWords);
if (evt.is_final) {
evt = this.compileSonioxTranscripts(this._sonioxTranscripts, 1, this.language);
this._sonioxTranscripts = [];
}
}
else {
/* interim transcript */
/* deepgram can send a non-final transcript but with words that are final, so we need to buffer */
if (this.vendor === 'deepgram') {
const originalEvent = evt.vendor.evt;
if (originalEvent.is_final && evt.alternatives[0].transcript !== '') {
this.logger.debug({evt}, 'Gather:_onTranscription - buffering a completed (partial) deepgram transcript');
bufferedTranscripts.push(evt);
}
}
if (this.interim) {
this.logger.debug({evt}, 'TaskTranscribe:_onTranscription - sending interim transcript');
this._resolve(channel, evt);
}
if (this.isContinuousAsr && evt.is_final) {
this._bufferedTranscripts.push(evt);
this._startAsrTimer(channel);
} else {
await this._resolve(channel, evt);
}
}
async _resolve(channel, evt) {
if (evt.is_final) {
/* we've got a final transcript, so end the otel child span for this channel */
if (this.childSpan[channel - 1] && this.childSpan[channel - 1].span) {
this.childSpan[channel - 1].span.setAttributes({
channel,
'stt.resolve': 'transcript',
'stt.result': JSON.stringify(evt)
});
this.childSpan[channel - 1].span.end();
}
/* we've got a transcript, so end the otel child span for this channel */
if (this.childSpan[channel - 1] && this.childSpan[channel - 1].span) {
this.childSpan[channel - 1].span.setAttributes({
channel,
'stt.resolve': 'transcript',
'stt.result': JSON.stringify(evt)
});
this.childSpan[channel - 1].span.end();
}
if (this.transcriptionHook) {
const b3 = this.getTracingPropagation();
const httpHeaders = b3 && {b3};
const payload = {
...this.cs.callInfo,
...httpHeaders,
...(evt.alternatives && {speech: evt}),
...(evt.type && {speechEvent: evt})
};
try {
this.logger.debug({payload}, 'sending transcriptionHook');
const json = await this.cs.requestor.request('verb:hook', this.transcriptionHook, payload);
this.logger.info({json}, 'completed transcriptionHook');
const json = await this.cs.requestor.request('verb:hook', this.transcriptionHook, {
...this.cs.callInfo,
...httpHeaders,
speech: evt
});
this.logger.info({json}, 'sent transcriptionHook');
if (json && Array.isArray(json) && !this.parentTask) {
const makeTask = require('./make_task');
const tasks = normalizeJambones(this.logger, json).map((tdata) => makeTask(this.logger, tdata));
@@ -503,7 +391,7 @@ class TaskTranscribe extends SttTask {
this._clearTimer();
this.notifyTaskDone();
}
else if (evt.is_final) {
else {
/* start another child span for this channel */
const {span, ctx} = this.startChildSpan(`${STT_LISTEN_SPAN_NAME}:${channel}`);
this.childSpan[channel - 1] = {span, ctx};
@@ -511,8 +399,7 @@ class TaskTranscribe extends SttTask {
}
_onNoAudio(cs, ep, channel) {
this.logger.debug(`TaskTranscribe:_onNoAudio on channel ${channel}`);
if (this.paused) return;
this.logger.debug(`TaskTranscribe:_onNoAudio restarting transcription on channel ${channel}`);
if (this.childSpan[channel - 1] && this.childSpan[channel - 1].span) {
this.childSpan[channel - 1].span.setAttributes({
channel,
@@ -528,8 +415,7 @@ class TaskTranscribe extends SttTask {
}
_onMaxDurationExceeded(cs, ep, channel) {
this.logger.debug(`TaskTranscribe:_onMaxDurationExceeded on channel ${channel}`);
if (this.paused) return;
this.logger.debug(`TaskTranscribe:_onMaxDurationExceeded restarting transcription on channel ${channel}`);
if (this.childSpan[channel - 1] && this.childSpan[channel - 1].span) {
this.childSpan[channel - 1].span.setAttributes({
channel,
@@ -552,8 +438,9 @@ class TaskTranscribe extends SttTask {
}
}
async _startFallback(cs, _ep, evt) {
if (this.canFallback) {
async _onJambonzError(cs, _ep, evt) {
this.logger.info({evt}, 'TaskTranscribe:_onJambonzError');
if (this.isHandledByPrimaryProvider && this.fallbackVendor) {
_ep.stopTranscription({
vendor: this.vendor,
bugname: this.bugname
@@ -561,57 +448,37 @@ class TaskTranscribe extends SttTask {
.catch((err) => this.logger.error({err}, `Error stopping transcription for primary vendor ${this.vendor}`));
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, cs.srf);
try {
this.notifyError({ msg: 'ASR error',
details:`STT Vendor ${this.vendor} error: ${evt.error || evt.reason}`, failover: 'in progress'});
await this._initFallback();
await this._fallback();
let channel = 1;
if (this.ep !== _ep) {
channel = 2;
}
this[`_speechHandlersSet_${channel}`] = false;
this._startTranscribing(cs, _ep, channel);
updateSpeechCredentialLastUsed(this.sttCredentials.speech_credential_sid);
return true;
return;
} catch (error) {
this.notifyError({ msg: 'ASR error',
details:`STT Vendor ${this.vendor} error: ${evt.error || evt.reason}`, failover: 'not available'});
this.logger.info({error}, `There is error while falling back to ${this.fallbackVendor}`);
}
} else {
this.logger.debug('transcribe:_startFallback no condition for falling back');
this.notifyError({ msg: 'ASR error',
details:`STT Vendor ${this.vendor} error: ${evt.error || evt.reason}`, failover: 'not available'});
}
return false;
}
const {writeAlerts, AlertType} = cs.srf.locals;
async _onJambonzError(cs, _ep, evt) {
if (this.vendor === 'google' && evt.error_code === 0) {
this.logger.info({evt}, 'TaskTranscribe:_onJambonzError - ignoring google error code 0');
return;
}
this.logger.info({evt}, 'TaskTranscribe:_onJambonzError');
if (this.paused) return;
const {writeAlerts, AlertType} = cs.srf.locals;
if (this.vendor === 'nuance') {
const {code, error} = evt;
if (code === 404 && error === 'No speech') return this._resolve('timeout');
if (code === 413 && error === 'Too much speech') return this._resolve('timeout');
}
this.logger.info({evt}, 'TaskTranscribe:_onJambonzError');
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
message: `Custom speech vendor ${this.vendor} error: ${evt.error}`,
vendor: this.vendor,
}).catch((err) => this.logger.info({err}, 'Error generating alert for jambonz custom connection failure'));
if (!(await this._startFallback(cs, _ep, evt))) {
this.notifyTaskDone();
if (this.vendor === 'nuance') {
const {code, error} = evt;
if (code === 404 && error === 'No speech') return this._resolve('timeout');
if (code === 413 && error === 'Too much speech') return this._resolve('timeout');
}
this.logger.info({evt}, 'TaskTranscribe:_onJambonzError');
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
message: `Custom speech vendor ${this.vendor} error: ${evt.error}`,
vendor: this.vendor,
}).catch((err) => this.logger.info({err}, 'Error generating alert for jambonz custom connection failure'));
this.notifyError({msg: 'ASR error', details:`Custom speech vendor ${this.vendor} error: ${evt.error}`});
}
}
async _onVendorConnectFailure(cs, _ep, channel, evt) {
_onVendorConnectFailure(cs, _ep, channel, evt) {
super._onVendorConnectFailure(cs, _ep, evt);
if (this.childSpan[channel - 1] && this.childSpan[channel - 1].span) {
this.childSpan[channel - 1].span.setAttributes({
@@ -620,9 +487,7 @@ class TaskTranscribe extends SttTask {
});
this.childSpan[channel - 1].span.end();
}
if (!(await this._startFallback(cs, _ep, evt))) {
this.notifyTaskDone();
}
this.notifyTaskDone();
}
_startAsrTimer(channel) {
@@ -631,9 +496,8 @@ class TaskTranscribe extends SttTask {
this._clearAsrTimer(channel);
this._asrTimer = setTimeout(() => {
this.logger.debug(`TaskTranscribe:_startAsrTimer - asr timer went off for channel: ${channel}`);
const evt = this.consolidateTranscripts(
this._bufferedTranscripts[channel - 1], channel, this.language, this.vendor);
this._bufferedTranscripts[channel - 1] = [];
const evt = this.consolidateTranscripts(this._bufferedTranscripts, channel, this.language);
this._bufferedTranscripts = [];
this._resolve(channel, evt);
}, this.asrTimeout);
this.logger.debug(`TaskTranscribe:_startAsrTimer: set for ${this.asrTimeout}ms for channel ${channel}`);

View File

@@ -1,180 +0,0 @@
const Task = require('./task');
const { TaskPreconditions } = require('../utils/constants');
class TtsTask extends Task {
constructor(logger, data, parentTask) {
super(logger, data);
this.parentTask = parentTask;
this.preconditions = TaskPreconditions.Endpoint;
this.earlyMedia = this.data.earlyMedia === true || (parentTask && parentTask.earlyMedia);
this.synthesizer = this.data.synthesizer || {};
this.disableTtsCache = this.data.disableTtsCache;
this.options = this.synthesizer.options || {};
}
async exec(cs) {
super.exec(cs);
}
async _synthesizeWithSpecificVendor(cs, ep, {
vendor,
language,
voice,
label,
disableTtsStreaming,
preCache
}) {
const {srf, accountSid:account_sid} = cs;
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, srf);
const {writeAlerts, AlertType, stats} = srf.locals;
const {synthAudio} = srf.locals.dbHelpers;
const engine = this.synthesizer.engine || cs.synthesizer?.engine || 'neural';
const salt = cs.callSid;
let credentials = cs.getSpeechCredentials(vendor, 'tts', label);
/* parse Nuance voices into name and model */
let model;
if (vendor === 'nuance' && voice) {
const arr = /([A-Za-z-]*)\s+-\s+(enhanced|standard)/.exec(voice);
if (arr) {
voice = arr[1];
model = arr[2];
}
} else if (vendor === 'deepgram') {
model = voice;
}
/* allow for microsoft custom region voice and api_key to be specified as an override */
if (vendor === 'microsoft' && this.options.deploymentId) {
credentials = credentials || {};
credentials.use_custom_tts = true;
credentials.custom_tts_endpoint = this.options.deploymentId;
credentials.api_key = this.options.apiKey || credentials.apiKey;
credentials.region = this.options.region || credentials.region;
voice = this.options.voice || voice;
} else if (vendor === 'elevenlabs') {
credentials = credentials || {};
credentials.model_id = this.options.model_id || credentials.model_id;
credentials.voice_settings = this.options.voice_settings || {};
credentials.optimize_streaming_latency = this.options.optimize_streaming_latency
|| credentials.optimize_streaming_latency;
voice = this.options.voice_id || voice;
}
ep.set({
tts_engine: vendor,
tts_voice: voice,
cache_speech_handles: 1,
}).catch((err) => this.logger.info({err}, `${this.name}: Error setting tts_engine on endpoint`));
if (!preCache) this.logger.info({vendor, language, voice, model}, `${this.name}:exec`);
try {
if (!credentials) {
writeAlerts({
account_sid,
alert_type: AlertType.TTS_NOT_PROVISIONED,
vendor
}).catch((err) => this.logger.info({err}, 'Error generating alert for no tts'));
this.notifyError({
msg: 'TTS error',
details:`No speech credentials provisioned for selected vendor ${vendor}`
});
throw new Error('no provisioned speech credentials for TTS');
}
// synthesize all of the text elements
let lastUpdated = false;
/* produce an audio segment from the provided text */
const generateAudio = async(text) => {
if (this.killed) return;
if (text.startsWith('silence_stream://')) return text;
/* otel: trace time for tts */
if (!preCache && !this.parentTask) {
const {span} = this.startChildSpan('tts-generation', {
'tts.vendor': vendor,
'tts.language': language,
'tts.voice': voice
});
this.otelSpan = span;
}
try {
const {filePath, servedFromCache, rtt} = await synthAudio(stats, {
account_sid,
text,
vendor,
language,
voice,
engine,
model,
salt,
credentials,
options: this.options,
disableTtsCache : this.disableTtsCache,
disableTtsStreaming,
preCache
});
if (!filePath.startsWith('say:')) {
this.logger.debug(`file ${filePath}, served from cache ${servedFromCache}`);
if (filePath) cs.trackTmpFile(filePath);
if (this.otelSpan) {
this.otelSpan.setAttributes({'tts.cached': servedFromCache});
this.otelSpan.end();
this.otelSpan = null;
}
if (!servedFromCache && !lastUpdated) {
lastUpdated = true;
updateSpeechCredentialLastUsed(credentials.speech_credential_sid).catch(() => {/* logged error */});
}
if (!servedFromCache && rtt && !preCache) {
this.notifyStatus({
event: 'synthesized-audio',
vendor,
language,
characters: text.length,
elapsedTime: rtt
});
}
}
else {
this.logger.debug('a streaming tts api will be used');
const modifiedPath = filePath.replace('say:{', `say:{session-uuid=${ep.uuid},`);
return modifiedPath;
}
return filePath;
} catch (err) {
this.logger.info({err}, 'Error synthesizing tts');
if (this.otelSpan) this.otelSpan.end();
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.TTS_FAILURE,
vendor,
detail: err.message
}).catch((err) => this.logger.info({err}, 'Error generating alert for tts failure'));
this.notifyError({msg: 'TTS error', details: err.message || err});
throw err;
}
};
const arr = this.text.map((t) => (this._validateURL(t) ? t : generateAudio(t)));
return (await Promise.all(arr)).filter((fp) => fp && fp.length);
} catch (err) {
this.logger.info(err, 'TaskSay:exec error');
throw err;
}
}
_validateURL(urlString) {
try {
new URL(urlString);
return true;
} catch (e) {
return false;
}
}
}
module.exports = TtsTask;

View File

@@ -1,175 +0,0 @@
const makeTask = require('../tasks/make_task');
const Emitter = require('events');
const { normalizeJambones } = require('@jambonz/verb-specifications');
const {TaskName} = require('../utils/constants');
const assert = require('assert');
/**
* ActionHookDelayProcessor
* @extends Emitter
*
* @param {Object} logger - logger instance
* @param {Object} opts - options
* @param {Object} cs - call session
* @param {Object} ep - endpoint
*
* @emits {Event} 'giveup' - when associated giveup timer expires
*
* Ref:https://www.jambonz.org/docs/supporting-articles/handling-action-hook-delays/
*/
class ActionHookDelayProcessor extends Emitter {
constructor(logger, opts, cs) {
super();
this.logger = logger;
this.cs = cs;
this._active = false;
const enabled = this.init(opts);
if (enabled && (!this.actions || !Array.isArray(this.actions) || this.actions.length === 0)) {
throw new Error('ActionHookDelayProcessor: no actions specified');
}
else if (enabled && this.actions.some((a) => !a.verb || ![TaskName.Say, TaskName.Play].includes(a.verb))) {
throw new Error(`ActionHookDelayProcessor: invalid actions specified: ${JSON.stringify(this.actions)}`);
}
}
get properties() {
return {
actions: this.actions,
retries: this.retries,
noResponseTimeout: this.noResponseTimeout,
noResponseGiveUpTimeout: this.noResponseGiveUpTimeout
};
}
get ep() {
return this.cs.ep;
}
init(opts) {
this.logger.debug({opts}, 'ActionHookDelayProcessor#init');
this.actions = opts.actions;
this.retries = opts.retries || 0;
this.noResponseTimeout = opts.noResponseTimeout || 0;
this.noResponseGiveUpTimeout = opts.noResponseGiveUpTimeout;
// return false if these options actually disable the ahdp
return ('enable' in opts && opts.enable === true) ||
('enabled' in opts && opts.enabled === true) ||
(!('enable' in opts) && !('enabled' in opts));
}
start() {
this.logger.debug('ActionHookDelayProcessor#start');
if (this._active) {
this.logger.debug('ActionHookDelayProcessor#start: already started due to prior gather which is continuing');
return;
}
assert(!this._noResponseTimer);
this._active = true;
this._retryCount = 0;
const timeoutMs = this.noResponseTimeout === 0 ? 1 : this.noResponseTimeout * 1000;
this._noResponseTimer = setTimeout(this._onNoResponseTimer.bind(this), timeoutMs);
if (this.noResponseGiveUpTimeout > 0) {
const timeoutMs = this.noResponseGiveUpTimeout * 1000;
this._noResponseGiveUpTimer = setTimeout(this._onNoResponseGiveUpTimer.bind(this), timeoutMs);
}
}
async stop() {
this.logger.debug('ActionHookDelayProcessor#stop');
this._active = false;
if (this._noResponseTimer) {
clearTimeout(this._noResponseTimer);
this._noResponseTimer = null;
}
if (this._noResponseGiveUpTimer) {
clearTimeout(this._noResponseGiveUpTimer);
this._noResponseGiveUpTimer = null;
}
if (this._taskInProgress) {
this.logger.debug(`ActionHookDelayProcessor#stop: killing task in progress: ${this._taskInProgress.name}`);
/** if we are doing a play, kill it immediately
* if we are doing a say, wait for it to finish
*/
if (TaskName.Say === this._taskInProgress.name) {
this._sayResolver = () => {
this.logger.debug('ActionHookDelayProcessor#stop: say is done, continue on..');
this._taskInProgress.kill(this.cs);
this._taskInProgress = null;
};
this.logger.debug('ActionHookDelayProcessor#stop returning promise');
return new Promise((resolve) => this._sayResolver = resolve);
}
else {
/* play */
this._taskInProgress.kill(this.cs);
this._taskInProgress = null;
}
}
this.logger.debug('ActionHookDelayProcessor#stop returning');
}
_onNoResponseTimer() {
this.logger.debug('ActionHookDelayProcessor#_onNoResponseTimer');
this._noResponseTimer = null;
/* get the next play or say action */
const verb = this.actions[this._retryCount % this.actions.length];
const t = normalizeJambones(this.logger, [verb]);
this.logger.debug({verb}, 'ActionHookDelayProcessor#_onNoResponseTimer: starting action');
try {
this._taskInProgress = makeTask(this.logger, t[0]);
this._taskInProgress.disableTracing = true;
this._taskInProgress.exec(this.cs, {ep: this.ep});
} catch (err) {
this.logger.info(err, 'ActionHookDelayProcessor#_onNoResponseTimer: error starting action');
this._taskInProgress = null;
return;
}
this.ep.once('playback-start', (evt) => {
this.logger.debug({evt}, 'got playback-start');
if (!this._active) {
this.logger.info({evt}, 'ActionHookDelayProcessor#_onNoResponseTimer: killing audio immediately');
this.ep.api('uuid_break', this.ep.uuid)
.catch((err) => this.logger.info(err,
'ActionHookDelayProcessor#_onNoResponseTimer Error killing audio'));
}
});
this.ep.once('playback-stop', (evt) => {
this._taskInProgress = null;
if (this._sayResolver) {
/* we were waiting for the play to finish before continuing to next task */
this.logger.debug({evt}, 'got playback-stop');
this._sayResolver();
this._sayResolver = null;
}
else {
/* possibly start the no response timer again */
if (this._active && this.retries > 0 && this._retryCount < this.retries && this.noResponseTimeout > 0) {
this.logger.debug({evt}, 'ActionHookDelayProcessor#_onNoResponseTimer: playback-stop on play/say action');
const timeoutMs = this.noResponseTimeout * 1000;
this._noResponseTimer = setTimeout(this._onNoResponseTimer.bind(this), timeoutMs);
}
}
});
this._retryCount++;
}
_onNoResponseGiveUpTimer() {
this._active = false;
this.logger.info('ActionHookDelayProcessor#_onNoResponseGiveUpTimer');
this.stop().catch((err) => {});
this.emit('giveup');
}
}
module.exports = ActionHookDelayProcessor;

View File

@@ -266,7 +266,7 @@ module.exports = (logger) => {
/* set stt options */
logger.info(`starting amd for vendor ${vendor} and language ${language}`);
const sttOpts = amd.setChannelVarsForStt({name: TaskName.Gather}, sttCredentials, language, {
const sttOpts = amd.setChannelVarsForStt({name: TaskName.Gather}, sttCredentials, {
vendor,
hints,
enhancedModel: true,

View File

@@ -26,25 +26,25 @@ class BackgroundTaskManager extends Emitter {
return this.tasks.size;
}
async newTask(type, opts, sticky = false) {
this.logger.info({opts}, `initiating Background task ${type}`);
async newTask(type, taskOpts) {
this.logger.info({taskOpts}, `initiating Background task ${type}`);
if (this.tasks.has(type)) {
this.logger.info(`Background task ${type} is running, skipped`);
this.logger.info(`Background task ${type} is running, skiped`);
return;
}
let task;
switch (type) {
case 'listen':
task = await this._initListen(opts);
task = await this._initListen(taskOpts);
break;
case 'bargeIn':
task = await this._initBargeIn(opts);
task = await this._initBargeIn(taskOpts);
break;
case 'record':
task = await this._initRecord();
break;
case 'transcribe':
task = await this._initTranscribe(opts);
task = await this._initTranscribe(taskOpts);
break;
default:
break;
@@ -52,7 +52,6 @@ class BackgroundTaskManager extends Emitter {
if (task) {
this.tasks.set(type, task);
}
if (task && sticky) task.sticky = true;
return task;
}
@@ -65,16 +64,19 @@ class BackgroundTaskManager extends Emitter {
task.kill();
// Remove task from managed List
this.tasks.delete(type);
} else {
this.logger.info(`stopping background task, ${type} is not running, skipped`);
}
}
stopAll() {
this.logger.debug('BackgroundTaskManager:stopAll');
this.logger.info('BackgroundTaskManager:stopAll');
for (const key of this.tasks.keys()) {
this.stop(key);
}
}
// Initiate Task
// Initiate Listen
async _initListen(opts, bugname = 'jambonz-background-listen', ignoreCustomerData = false, type = 'listen') {
let task;
@@ -117,8 +119,7 @@ class BackgroundTaskManager extends Emitter {
this._taskCompleted('bargeIn', task);
if (task.sticky && !this.cs.callGone && !this.cs._stopping) {
this.logger.info('BackgroundTaskManager:_initBargeIn: restarting background bargeIn');
this._bargeInHandled = false;
this.newTask('bargeIn', opts, true);
this.newTask('bargeIn', opts);
}
return;
})
@@ -133,7 +134,8 @@ class BackgroundTaskManager extends Emitter {
async _initRecord() {
if (this.cs.accountInfo.account.record_all_calls || this.cs.application.record_all_calls) {
if (!JAMBONZ_RECORD_WS_BASE_URL || !this.cs.accountInfo.account.bucket_credential) {
this.logger.error('_initRecord: invalid cfg - missing JAMBONZ_RECORD_WS_BASE_URL or bucket config');
this.logger.error(`_initRecord: invalid configuration,
missing JAMBONZ_RECORD_WS_BASE_URL or bucket configuration`);
return undefined;
}
const listenOpts = {
@@ -174,7 +176,7 @@ class BackgroundTaskManager extends Emitter {
}
_taskCompleted(type, task) {
this.logger.debug({type, task}, `BackgroundTaskManager:_taskCompleted: task completed, sticky: ${task.sticky}`);
this.logger.info({type, task}, 'BackgroundTaskManager:_taskCompleted: task completed');
task.removeAllListeners();
task.span.end();
this.tasks.delete(type);
@@ -187,10 +189,7 @@ class BackgroundTaskManager extends Emitter {
}
_bargeInTaskCompleted(evt) {
if (this._bargeInHandled) return;
this._bargeInHandled = true;
this.logger.debug({evt},
'BackgroundTaskManager:_bargeInTaskCompleted on event from background bargeIn, emitting bargein-done event');
this.logger.info({evt}, 'BackgroundTaskManager:_bargeInTaskCompleted on event from background bargeIn');
this.emit('bargeIn-done', evt);
}
}

View File

@@ -1,13 +1,12 @@
{
"TaskName": {
"Answer": "answer",
"Cognigy": "cognigy",
"Conference": "conference",
"Config": "config",
"Dequeue": "dequeue",
"Dial": "dial",
"Dialogflow": "dialogflow",
"Dtmf": "dtmf",
"Dub": "dub",
"Enqueue": "enqueue",
"Gather": "gather",
"Hangup": "hangup",
@@ -30,8 +29,7 @@
"Tag": "tag",
"Transcribe": "transcribe"
},
"AllowedSipRecVerbs": ["answer", "config", "gather", "transcribe", "listen", "tag"],
"AllowedConfirmSessionVerbs": ["config", "gather", "plays", "say", "tag"],
"AllowedSipRecVerbs": ["config", "gather", "transcribe", "listen", "tag"],
"CallStatus": {
"Trying": "trying",
"Ringing": "ringing",
@@ -97,10 +95,6 @@
"Transcription": "soniox_transcribe::transcription",
"Error": "soniox_transcribe::error"
},
"VerbioTranscriptionEvents": {
"Transcription": "verbio_transcribe::transcription",
"Error": "verbio_transcribe::error"
},
"CobaltTranscriptionEvents": {
"Transcription": "cobalt_speech::transcription",
"CompileContext": "cobalt_speech::compile_context_response",
@@ -138,9 +132,6 @@
"ConnectFailure": "assemblyai_transcribe::connect_failed",
"Connect": "assemblyai_transcribe::connect"
},
"VadDetection": {
"Detection": "vad_detect:detection"
},
"ListenEvents": {
"Connect": "mod_audio_fork::connect",
"ConnectFailure": "mod_audio_fork::connect_failed",
@@ -178,7 +169,6 @@
"session:new",
"session:reconnect",
"session:redirect",
"session:adulting",
"call:status",
"queue:status",
"dial:confirm",

View File

@@ -41,7 +41,6 @@ const speechMapper = (cred) => {
const o = JSON.parse(decrypt(credential));
obj.access_key_id = o.access_key_id;
obj.secret_access_key = o.secret_access_key;
obj.role_arn = o.role_arn;
obj.aws_region = o.aws_region;
}
else if ('microsoft' === obj.vendor) {
@@ -76,8 +75,6 @@ const speechMapper = (cred) => {
else if ('deepgram' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
obj.deepgram_stt_uri = o.deepgram_stt_uri;
obj.deepgram_stt_use_tls = o.deepgram_stt_use_tls;
}
else if ('soniox' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
@@ -95,17 +92,6 @@ const speechMapper = (cred) => {
obj.api_key = o.api_key;
obj.model_id = o.model_id;
obj.options = o.options;
} else if ('playht' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
obj.user_id = o.user_id;
obj.voice_engine = o.voice_engine;
obj.options = o.options;
} else if ('rimelabs' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
obj.model_id = o.model_id;
obj.options = o.options;
} else if ('assemblyai' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
@@ -113,11 +99,6 @@ const speechMapper = (cred) => {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
obj.model_id = o.model_id;
} else if ('verbio' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.client_id = o.client_id;
obj.client_secret = o.client_secret;
obj.engine_version = o.engine_version;
} else if (obj.vendor.startsWith('custom:')) {
const o = JSON.parse(decrypt(credential));
obj.auth_token = o.auth_token;

View File

@@ -171,16 +171,13 @@ function installSrfLocals(srf, logger) {
retrieveFromSortedSet,
retrieveByPatternSortedSet,
sortedSetLength,
sortedSetPositionByPattern,
sortedSetPositionByPattern
} = require('@jambonz/realtimedb-helpers')({}, logger, tracer);
const registrar = new Registrar(logger, client);
const {
synthAudio,
addFileToCache,
getNuanceAccessToken,
getIbmAccessToken,
getAwsAuthToken,
getVerbioAccessToken
} = require('@jambonz/speech-utils')({}, logger);
const {
writeAlerts,
@@ -218,8 +215,6 @@ function installSrfLocals(srf, logger) {
listCalls,
deleteCall,
synthAudio,
getAwsAuthToken,
addFileToCache,
createHash,
retrieveHash,
deleteKey,
@@ -240,8 +235,7 @@ function installSrfLocals(srf, logger) {
retrieveFromSortedSet,
retrieveByPatternSortedSet,
sortedSetLength,
sortedSetPositionByPattern,
getVerbioAccessToken
sortedSetPositionByPattern
},
parentLogger: logger,
getSBC,

View File

@@ -1,18 +0,0 @@
const parseDecibels = (db) => {
if (!db) return 0;
if (typeof db === 'number') {
return db;
}
else if (typeof db === 'string') {
const match = db.match(/([+-]?\d+(\.\d+)?)\s*db/i);
if (match) {
return Math.trunc(parseFloat(match[1]));
} else {
return 0;
}
} else {
return 0;
}
};
module.exports = parseDecibels;

View File

@@ -16,13 +16,9 @@ const uuidv4 = require('uuid-random');
const HttpRequestor = require('./http-requestor');
const WsRequestor = require('./ws-requestor');
const {makeOpusFirst} = require('./sdp-utils');
const {
JAMBONES_USE_FREESWITCH_TIMER_FD
} = require('../config');
class SingleDialer extends Emitter {
constructor({logger, sbcAddress, target, opts, application, callInfo, accountInfo, rootSpan, startSpan, dialTask,
onHoldMusic}) {
constructor({logger, sbcAddress, target, opts, application, callInfo, accountInfo, rootSpan, startSpan, dialTask}) {
super();
assert(target.type);
@@ -45,7 +41,6 @@ class SingleDialer extends Emitter {
this.callSid = uuidv4();
this.dialTask = dialTask;
this.onHoldMusic = onHoldMusic;
this.on('callStatusChange', this._notifyCallStatusChange.bind(this));
}
@@ -136,7 +131,6 @@ class SingleDialer extends Emitter {
this.serviceUrl = srf.locals.serviceUrl;
this.ep = await ms.createEndpoint();
this._configMsEndpoint();
this.logger.debug(`SingleDialer:exec - created endpoint ${this.ep.uuid}`);
/**
@@ -195,10 +189,6 @@ class SingleDialer extends Emitter {
callSid: this.callSid,
traceId: this.rootSpan.traceId
});
if (this.dialTask && this.dialTask.tag !== null &&
typeof this.dialTask.tag === 'object' && !Array.isArray(this.dialTask.tag)) {
this.callInfo.customerData = this.dialTask.tag;
}
this.logger = srf.locals.parentLogger.child({
callSid: this.callSid,
parentCallSid: this.parentCallInfo.callSid,
@@ -263,7 +253,7 @@ class SingleDialer extends Emitter {
.on('modify', async(req, res) => {
try {
if (this.ep) {
if (this.dialTask && this.dialTask.isOnHoldEnabled) {
if (this.dialTask && this.dialTask.isOnHold) {
this.logger.info('dial is onhold, emit event');
this.emit('reinvite', req, res);
} else {
@@ -295,17 +285,17 @@ class SingleDialer extends Emitter {
if (err.status === 487) status.callStatus = CallStatus.NoAnswer;
else if ([486, 600].includes(err.status)) status.callStatus = CallStatus.Busy;
this.logger.info(`SingleDialer:exec outdial failure ${err.status}`);
inviteSpan?.setAttributes({'invite.status_code': err.status});
inviteSpan?.end();
inviteSpan.setAttributes({'invite.status_code': err.status});
inviteSpan.end();
}
else {
this.logger.error(err, 'SingleDialer:exec');
status.sipStatus = 500;
inviteSpan?.setAttributes({
inviteSpan.setAttributes({
'invite.status_code': 500,
'invite.err': err.message
});
inviteSpan?.end();
inviteSpan.end();
}
this.emit('callStatusChange', status);
if (this.ep) this.ep.destroy();
@@ -330,16 +320,6 @@ class SingleDialer extends Emitter {
}
}
_configMsEndpoint() {
const opts = {
...(this.onHoldMusic && {holdMusic: `shout://${this.onHoldMusic.replace(/^https?:\/\//, '')}`}),
...(JAMBONES_USE_FREESWITCH_TIMER_FD && {timer_name: 'timerfd'})
};
if (Object.keys(opts).length > 0) {
this.ep.set(opts);
}
}
/**
* Run an application on the call after answer, e.g. call screening.
* Once the application completes in some fashion, emit an 'accepted' event
@@ -353,7 +333,6 @@ class SingleDialer extends Emitter {
const json = await this.requestor.request('dial:confirm', confirmHook, this.callInfo.toJSON());
if (!json || (Array.isArray(json) && json.length === 0)) {
this.logger.info('SingleDialer:_executeApp: no tasks returned from confirm hook');
this.emit('accept');
return;
}
const tasks = normalizeJambones(this.logger, json).map((tdata) => makeTask(this.logger, tdata));
@@ -413,7 +392,6 @@ class SingleDialer extends Emitter {
const app = {...application};
if ('WS' === app.call_hook?.method ||
app.call_hook?.url.startsWith('ws://') || app.call_hook?.url.startsWith('wss://')) {
if (app.call_hook?.url) app.call_hook.url += '/adulting';
const requestor = new WsRequestor(logger, this.accountInfo.account.account_sid,
app.call_hook, this.accountInfo.account.webhook_secret);
app.requestor = requestor;
@@ -428,8 +406,6 @@ class SingleDialer extends Emitter {
this.accountInfo.account.webhook_secret);
else app.notifier = {request: () => {}, close: () => {}};
}
// Replace old application with new application.
this.application = app;
const cs = new AdultingCallSession({
logger: newLogger,
singleDialer: this,
@@ -439,13 +415,6 @@ class SingleDialer extends Emitter {
tasks,
rootSpan
});
app.requestor.request('session:adulting', '/adulting', {
...cs.callInfo.toJSON(),
parentCallInfo: this.parentCallInfo
}).catch((err) => {
newLogger.error({err}, 'doAdulting: error sending adulting request');
});
cs.req = this.req;
cs.exec().catch((err) => newLogger.error({err}, 'doAdulting: error executing session'));
return cs;
@@ -467,7 +436,6 @@ class SingleDialer extends Emitter {
async reAnchorMedia() {
assert(this.dlg && this.dlg.connected && !this.ep);
this.ep = await this.ms.createEndpoint({remoteSdp: this.dlg.remote.sdp});
this._configMsEndpoint();
await this.dlg.modify(this.ep.local.sdp, {
headers: {
'X-Reason': 'anchor-media'
@@ -498,12 +466,11 @@ class SingleDialer extends Emitter {
}
function placeOutdial({
logger, srf, ms, sbcAddress, target, opts, application, callInfo, accountInfo, rootSpan, startSpan, dialTask,
onHoldMusic
logger, srf, ms, sbcAddress, target, opts, application, callInfo, accountInfo, rootSpan, startSpan, dialTask
}) {
const myOpts = deepcopy(opts);
const sd = new SingleDialer({
logger, sbcAddress, target, myOpts, application, callInfo, accountInfo, rootSpan, startSpan, dialTask, onHoldMusic
logger, sbcAddress, target, myOpts, application, callInfo, accountInfo, rootSpan, startSpan, dialTask
});
sd.exec(srf, ms, myOpts);
return sd;

View File

@@ -102,55 +102,7 @@ const stickyVars = {
]
};
/**
* @see https://developers.deepgram.com/docs/models-languages-overview
*/
const optimalDeepramModels = {
zh: ['base', 'base'],
'zh-CN':['base', 'base'],
'zh-TW': ['base', 'base'],
da: ['enhanced', 'enhanced'],
en: ['nova-2-phonecall', 'nova-2'],
'en-US': ['nova-2-phonecall', 'nova-2'],
'en-AU': ['nova-2', 'nova-2'],
'en-GB': ['nova-2', 'nova-2'],
'en-IN': ['nova-2', 'nova-2'],
'en-NZ': ['nova-2', 'nova-2'],
nl: ['nova-2', 'nova-2'],
fr: ['nova-2', 'nova-2'],
'fr-CA': ['nova-2', 'nova-2'],
de: ['nova-2', 'nova-2'],
hi: ['nova-2', 'nova-2'],
'hi-Latn': ['nova-2', 'nova-2'],
id: ['base', 'base'],
it: ['nova-2', 'nova-2'],
ja: ['enhanced', 'enhanced'],
ko: ['nova-2', 'nova-2'],
no: ['nova-2', 'nova-2'],
pl: ['nova-2', 'nova-2'],
pt: ['nova-2', 'nova-2'],
'pt-BR': ['nova-2', 'nova-2'],
'pt-PT': ['nova-2', 'nova-2'],
ru: ['nova-2', 'nova-2'],
es: ['nova-2', 'nova-2'],
'es-419': ['nova-2', 'nova-2'],
'es-LATAM': ['enhanced', 'enhanced'],
sv: ['nova-2', 'nova-2'],
ta: ['enhanced', 'enhanced'],
taq: ['enhanced', 'enhanced'],
tr: ['nova-2', 'nova-2'],
uk: ['nova-2', 'nova-2']
};
const selectDefaultDeepgramModel = (task, language) => {
if (language in optimalDeepramModels) {
const [gather, transcribe] = optimalDeepramModels[language];
return task.name === TaskName.Gather ? gather : transcribe;
}
return 'base';
};
const consolidateTranscripts = (bufferedTranscripts, channel, language, vendor) => {
const consolidateTranscripts = (bufferedTranscripts, channel, language) => {
if (bufferedTranscripts.length === 1) return bufferedTranscripts[0];
let totalConfidence = 0;
const finalTranscript = bufferedTranscripts.reduce((acc, evt) => {
@@ -191,7 +143,7 @@ const consolidateTranscripts = (bufferedTranscripts, channel, language, vendor)
totalConfidence / bufferedTranscripts.length;
finalTranscript.alternatives[0].transcript = finalTranscript.alternatives[0].transcript.trim();
finalTranscript.vendor = {
name: vendor,
name: 'deepgram',
evt: bufferedTranscripts
};
return finalTranscript;
@@ -270,7 +222,7 @@ const normalizeDeepgram = (evt, channel, language, shortUtterance) => {
language_code: language,
channel_tag: channel,
is_final: shortUtterance ? evt.is_final : evt.speech_final,
alternatives: alternatives.length ? [alternatives[0]] : [],
alternatives: [alternatives[0]],
vendor: {
name: 'deepgram',
evt: copy
@@ -376,20 +328,6 @@ const normalizeNuance = (evt, channel, language) => {
};
};
const normalizeVerbio = (evt, channel, language) => {
const copy = JSON.parse(JSON.stringify(evt));
return {
language_code: language,
channel_tag: channel,
is_final: evt.is_final,
alternatives: evt.alternatives,
vendor: {
name: 'verbio',
evt: copy
}
};
};
const normalizeMicrosoft = (evt, channel, language, punctuation = true) => {
const copy = JSON.parse(JSON.stringify(evt));
const nbest = evt.NBest;
@@ -477,8 +415,6 @@ module.exports = (logger) => {
return normalizeCobalt(evt, channel, language);
case 'assemblyai':
return normalizeAssemblyAi(evt, channel, language, shortUtterance);
case 'verbio':
return normalizeVerbio(evt, channel, language);
default:
if (vendor.startsWith('custom:')) {
return normalizeCustom(evt, channel, language, vendor);
@@ -488,15 +424,22 @@ module.exports = (logger) => {
}
};
const setChannelVarsForStt = (task, sttCredentials, language, rOpts = {}) => {
const setChannelVarsForStt = (task, sttCredentials, rOpts = {}) => {
let opts = {};
const {enable, voiceMs = 0, mode = -1} = rOpts.vad || {};
const vad = {enable, voiceMs, mode};
const vendor = rOpts.vendor;
/* voice activity detection works across vendors */
opts = {
...opts,
...(vad.enable && {START_RECOGNIZING_ON_VAD: 1}),
...(vad.enable && vad.voiceMs && {RECOGNIZER_VAD_VOICE_MS: vad.voiceMs}),
...(vad.enable && typeof vad.mode === 'number' && {RECOGNIZER_VAD_MODE: vad.mode}),
};
if ('google' === vendor) {
const useV2 = rOpts.googleOptions?.serviceVersion === 'v2';
const model = task.name === TaskName.Gather ?
(useV2 ? 'telephony_short' : 'command_and_search') :
(useV2 ? 'long' : 'latest_long');
const model = task.name === TaskName.Gather ? 'command_and_search' : 'latest_long';
opts = {
...opts,
...(sttCredentials && {GOOGLE_APPLICATION_CREDENTIALS: JSON.stringify(sttCredentials.credentials)}),
@@ -529,26 +472,6 @@ module.exports = (logger) => {
...{GOOGLE_SPEECH_MODEL: rOpts.model || model},
...(rOpts.naicsCode > 0 && {GOOGLE_SPEECH_METADATA_INDUSTRY_NAICS_CODE: rOpts.naicsCode}),
GOOGLE_SPEECH_METADATA_RECORDING_DEVICE_TYPE: 'phone_line',
...(useV2 && {
GOOGLE_SPEECH_RECOGNIZER_PARENT: `projects/${sttCredentials.credentials.project_id}/locations/global`,
GOOGLE_SPEECH_CLOUD_SERVICES_VERSION: 'v2',
...(rOpts.googleOptions?.speechStartTimeoutMs && {
GOOGLE_SPEECH_START_TIMEOUT_MS: rOpts.googleOptions.speechStartTimeoutMs
}),
...(rOpts.googleOptions?.speechEndTimeoutMs && {
GOOGLE_SPEECH_END_TIMEOUT_MS: rOpts.googleOptions.speechEndTimeoutMs
}),
...(rOpts.googleOptions?.transcriptNormalization && {
GOOGLE_SPEECH_TRANSCRIPTION_NORMALIZATION: JSON.stringify(rOpts.googleOptions.transcriptNormalization)
}),
...(rOpts.googleOptions?.enableVoiceActivityEvents && {
GOOGLE_SPEECH_ENABLE_VOICE_ACTIVITY_EVENTS: rOpts.googleOptions.enableVoiceActivityEvents
}),
...(rOpts.sgoogleOptions?.recognizerId) && {GOOGLE_SPEECH_RECOGNIZER_ID: rOpts.googleOptions.recognizerId},
...(rOpts.googleOptions?.enableVoiceActivityEvents && {
GOOGLE_SPEECH_ENABLE_VOICE_ACTIVITY_EVENTS: rOpts.googleOptions.enableVoiceActivityEvents
}),
}),
};
}
else if (['aws', 'polly'].includes(vendor)) {
@@ -558,10 +481,9 @@ module.exports = (logger) => {
...(rOpts.vocabularyFilterName && {AWS_VOCABULARY_FILTER_NAME: rOpts.vocabularyFilterName}),
...(rOpts.filterMethod && {AWS_VOCABULARY_FILTER_METHOD: rOpts.filterMethod}),
...(sttCredentials && {
...(sttCredentials.accessKeyId && {AWS_ACCESS_KEY_ID: sttCredentials.accessKeyId}),
...(sttCredentials.secretAccessKey && {AWS_SECRET_ACCESS_KEY: sttCredentials.secretAccessKey}),
AWS_REGION: sttCredentials.region,
...(sttCredentials.sessionToken && {AWS_SESSION_TOKEN: sttCredentials.sessionToken}),
AWS_ACCESS_KEY_ID: sttCredentials.accessKeyId,
AWS_SECRET_ACCESS_KEY: sttCredentials.secretAccessKey,
AWS_REGION: sttCredentials.region
}),
};
}
@@ -588,17 +510,12 @@ module.exports = (logger) => {
...{AZURE_USE_OUTPUT_FORMAT_DETAILED: 1},
...(azureOptions.speechSegmentationSilenceTimeoutMs &&
{AZURE_SPEECH_SEGMENTATION_SILENCE_TIMEOUT_MS: azureOptions.speechSegmentationSilenceTimeoutMs}),
...(azureOptions.languageIdMode &&
{AZURE_LANGUAGE_ID_MODE: azureOptions.languageIdMode}),
...(sttCredentials && {
...(sttCredentials.api_key && {AZURE_SUBSCRIPTION_KEY: sttCredentials.api_key}),
...(sttCredentials.region && {AZURE_REGION: sttCredentials.region}),
}),
...(sttCredentials.use_custom_stt && sttCredentials.custom_stt_endpoint &&
{AZURE_SERVICE_ENDPOINT_ID: sttCredentials.custom_stt_endpoint}),
//azureSttEndpointId overrides sttCredentials.custom_stt_endpoint
...(rOpts.azureSttEndpointId &&
{AZURE_SERVICE_ENDPOINT_ID: rOpts.azureSttEndpointId}),
};
}
else if ('nuance' === vendor) {
@@ -650,24 +567,15 @@ module.exports = (logger) => {
};
}
else if ('deepgram' === vendor) {
let {model} = rOpts;
const {deepgramOptions = {}} = rOpts;
const deepgramUri = deepgramOptions.deepgramSttUri || sttCredentials.deepgram_stt_uri;
const useTls = deepgramOptions.deepgramSttUseTls || sttCredentials.deepgram_stt_use_tls;
/* default to a sensible model if not supplied */
if (!model) {
model = selectDefaultDeepgramModel(task, language);
}
opts = {
...opts,
DEEPGRAM_SPEECH_MODEL: model,
...(deepgramUri && {DEEPGRAM_URI: deepgramUri}),
...(deepgramUri && useTls && {DEEPGRAM_USE_TLS: 1}),
...(sttCredentials.api_key) &&
{DEEPGRAM_API_KEY: sttCredentials.api_key},
...(deepgramOptions.tier) &&
{DEEPGRAM_SPEECH_TIER: deepgramOptions.tier},
...(deepgramOptions.model) &&
{DEEPGRAM_SPEECH_MODEL: deepgramOptions.model},
...(deepgramOptions.punctuate) &&
{DEEPGRAM_SPEECH_ENABLE_AUTOMATIC_PUNCTUATION: 1},
...(deepgramOptions.smartFormatting) &&
@@ -697,9 +605,7 @@ module.exports = (logger) => {
...(deepgramOptions.keywords) &&
{DEEPGRAM_SPEECH_KEYWORDS: deepgramOptions.keywords.join(',')},
...('endpointing' in deepgramOptions) &&
{DEEPGRAM_SPEECH_ENDPOINTING: deepgramOptions.endpointing === false ? 'false' : deepgramOptions.endpointing,
// default DEEPGRAM_SPEECH_UTTERANCE_END_MS is 1000, will be override by user settings later if there is.
DEEPGRAM_SPEECH_UTTERANCE_END_MS: 1000},
{DEEPGRAM_SPEECH_ENDPOINTING: deepgramOptions.endpointing === false ? 'false' : deepgramOptions.endpointing},
...(deepgramOptions.utteranceEndMs) &&
{DEEPGRAM_SPEECH_UTTERANCE_END_MS: deepgramOptions.utteranceEndMs},
...(deepgramOptions.vadTurnoff) &&
@@ -812,26 +718,8 @@ module.exports = (logger) => {
...(rOpts.hints?.length > 0 &&
{ASSEMBLYAI_WORD_BOOST: JSON.stringify(rOpts.hints)})
};
} else if ('verbio' === vendor) {
const {verbioOptions = {}} = rOpts;
opts = {
...opts,
...(sttCredentials.access_token && { VERBIO_ACCESS_TOKEN: sttCredentials.access_token}),
...(sttCredentials.engine_version && {VERBIO_ENGINE_VERSION: sttCredentials.engine_version}),
...(language && {VERBIO_LANGUAGE: language}),
...(verbioOptions.enable_formatting && {VERBIO_ENABLE_FORMATTING: verbioOptions.enable_formatting}),
...(verbioOptions.enable_diarization && {VERBIO_ENABLE_DIARIZATION: verbioOptions.enable_diarization}),
...(verbioOptions.topic && {VERBIO_TOPIC: verbioOptions.topic}),
...(verbioOptions.inline_grammar && {VERBIO_INLINE_GRAMMAR: verbioOptions.inline_grammar}),
...(verbioOptions.grammar_uri && {VERBIO_GRAMMAR_URI: verbioOptions.grammar_uri}),
...(verbioOptions.label && {VERBIO_LABEL: verbioOptions.label}),
...(verbioOptions.recognition_timeout && {VERBIO_RECOGNITION_TIMEOUT: verbioOptions.recognition_timeout}),
...(verbioOptions.speech_complete_timeout &&
{VERBIO_SPEECH_COMPLETE_TIMEOUT: verbioOptions.speech_complete_timeout}),
...(verbioOptions.speech_incomplete_timeout &&
{VERBIO_SPEECH_INCOMPLETE_TIMEOUT: verbioOptions.speech_incomplete_timeout}),
};
} else if (vendor.startsWith('custom:')) {
}
else if (vendor.startsWith('custom:')) {
let {options = {}} = rOpts;
const {auth_token, custom_stt_url} = sttCredentials;
options = {

View File

@@ -56,12 +56,6 @@ class WsRequestor extends BaseRequestor {
}
if (type === 'session:new') this.call_sid = params.callSid;
if (type === 'session:reconnect') {
this._reconnectPromise = new Promise((resolve, reject) => {
this._reconnectResolve = resolve;
this._reconnectReject = reject;
});
}
/* if we have an absolute url, and it is http then do a standard webhook */
if (this._isAbsoluteUrl(url) && url.startsWith('http')) {
@@ -77,23 +71,20 @@ class WsRequestor extends BaseRequestor {
}
/* connect if necessary */
const queueMsg = () => {
this.logger.debug(
`WsRequestor:request(${this.id}) - queueing ${type} message since we are connecting`);
if (wantsAck) {
const p = new Promise((resolve, reject) => {
this.queuedMsg.push({type, hook, params, httpHeaders, promise: {resolve, reject}});
});
return p;
}
else {
this.queuedMsg.push({type, hook, params, httpHeaders});
}
return;
};
if (!this.ws) {
if (this.connectInProgress) {
return queueMsg();
this.logger.debug(
`WsRequestor:request(${this.id}) - queueing ${type} message since we are connecting`);
if (wantsAck) {
const p = new Promise((resolve, reject) => {
this.queuedMsg.push({type, hook, params, httpHeaders, promise: {resolve, reject}});
});
return p;
}
else {
this.queuedMsg.push({type, hook, params, httpHeaders});
}
return;
}
this.connectInProgress = true;
this.logger.debug(`WsRequestor:request(${this.id}) - connecting since we do not have a connection for ${type}`);
@@ -111,10 +102,6 @@ class WsRequestor extends BaseRequestor {
return Promise.reject(err);
}
}
// If jambonz wait for ack from reconnect, queue the msg until reconnect is acked
if (type !== 'session:reconnect' && this._reconnectPromise) {
return queueMsg();
}
assert(this.ws);
/* prepare and send message */
@@ -132,7 +119,7 @@ class WsRequestor extends BaseRequestor {
type,
msgid,
call_sid: this.call_sid,
hook: ['verb:hook', 'session:redirect'].includes(type) ? url : undefined,
hook: type === 'verb:hook' ? url : undefined,
data: {...payload},
...b3
};
@@ -152,18 +139,6 @@ class WsRequestor extends BaseRequestor {
}
};
const rejectQueuedMsgs = (err) => {
if (this.queuedMsg.length > 0) {
for (const {promise} of this.queuedMsg) {
this.logger.debug(`WsRequestor:request - preparing queued ${type} for rejectQueuedMsgs`);
if (promise) {
promise.reject(err);
}
}
this.queuedMsg.length = 0;
}
};
//this.logger.debug({obj}, `websocket: sending (${url})`);
/* special case: reconnecting before we received ack to session:new */
@@ -204,37 +179,16 @@ class WsRequestor extends BaseRequestor {
this.logger.debug({response}, `WsRequestor:request ${url} succeeded in ${rtt}ms`);
this.stats.histogram('app.hook.ws_response_time', rtt, ['hook_type:app']);
resolve(response);
if (this._reconnectResolve) {
this._reconnectResolve();
}
},
failure: (err) => {
if (this._reconnectReject) {
this._reconnectReject(err);
}
clearTimeout(timer);
reject(err);
}
});
/* send the message */
this.ws.send(JSON.stringify(obj), async() => {
this.ws.send(JSON.stringify(obj), () => {
this.logger.debug({obj}, `WsRequestor:request websocket: sent (${url})`);
// If session:reconnect is waiting for ack, hold here until ack to send queuedMsgs
if (this._reconnectPromise) {
try {
await this._reconnectPromise;
} catch (err) {
// bad thing happened to session:recconnect
rejectQueuedMsgs(err);
this.emit('reconnect-error');
return;
} finally {
this._reconnectPromise = null;
this._reconnectResolve = null;
this._reconnectReject = null;
}
}
sendQueuedMsgs();
});
});
@@ -372,9 +326,7 @@ class WsRequestor extends BaseRequestor {
'WsRequestor:_onSocketClosed time to reconnect');
if (!this.ws && !this.connectInProgress) {
this.connectInProgress = true;
return this._connect()
.catch((err) => this.logger.error('WsRequestor:_onSocketClosed There is error while reconnect', err))
.finally(() => this.connectInProgress = false);
this._connect().catch((err) => this.connectInProgress = false);
}
}, this.backoffMs);
this.backoffMs = this.backoffMs < 2000 ? this.backoffMs * 2 : (this.backoffMs + 2000);
@@ -392,9 +344,7 @@ class WsRequestor extends BaseRequestor {
/* messages must be JSON format */
try {
const obj = JSON.parse(content);
//const {type, msgid, command, call_sid = this.call_sid, queueCommand = false, data} = obj;
const {type, msgid, command, queueCommand = false, data} = obj;
const call_sid = obj.callSid || this.call_sid;
const {type, msgid, command, call_sid = this.call_sid, queueCommand = false, data} = obj;
//this.logger.debug({obj}, 'WsRequestor:request websocket: received');
assert.ok(type, 'type property not supplied');

13221
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,9 +1,9 @@
{
"name": "jambonz-feature-server",
"version": "0.9.0",
"version": "0.8.5",
"main": "app.js",
"engines": {
"node": ">= 18.x"
"node": ">= 10.16.0"
},
"keywords": [
"sip",
@@ -25,57 +25,57 @@
"jslint:fix": "eslint app.js tracer.js lib --fix"
},
"dependencies": {
"@aws-sdk/client-auto-scaling": "^3.549.0",
"@aws-sdk/client-sns": "^3.549.0",
"@jambonz/db-helpers": "^0.9.3",
"@aws-sdk/client-auto-scaling": "^3.360.0",
"@aws-sdk/client-sns": "^3.360.0",
"@jambonz/db-helpers": "^0.9.1",
"@jambonz/http-health-check": "^0.0.1",
"@jambonz/mw-registrar": "^0.2.7",
"@jambonz/realtimedb-helpers": "^0.8.8",
"@jambonz/speech-utils": "^0.1.11",
"@jambonz/stats-collector": "^0.1.10",
"@jambonz/mw-registrar": "^0.2.4",
"@jambonz/realtimedb-helpers": "^0.8.7",
"@jambonz/speech-utils": "^0.0.33",
"@jambonz/stats-collector": "^0.1.9",
"@jambonz/time-series": "^0.2.8",
"@jambonz/verb-specifications": "^0.0.72",
"@opentelemetry/api": "^1.8.0",
"@opentelemetry/exporter-jaeger": "^1.23.0",
"@opentelemetry/exporter-trace-otlp-http": "^0.50.0",
"@opentelemetry/exporter-zipkin": "^1.23.0",
"@opentelemetry/instrumentation": "^0.50.0",
"@opentelemetry/resources": "^1.23.0",
"@opentelemetry/sdk-trace-base": "^1.23.0",
"@opentelemetry/sdk-trace-node": "^1.23.0",
"@opentelemetry/semantic-conventions": "^1.23.0",
"@jambonz/verb-specifications": "^0.0.50",
"@opentelemetry/api": "^1.4.0",
"@opentelemetry/exporter-jaeger": "^1.9.0",
"@opentelemetry/exporter-trace-otlp-http": "^0.35.0",
"@opentelemetry/exporter-zipkin": "^1.9.0",
"@opentelemetry/instrumentation": "^0.35.0",
"@opentelemetry/resources": "^1.9.0",
"@opentelemetry/sdk-trace-base": "^1.9.0",
"@opentelemetry/sdk-trace-node": "^1.9.0",
"@opentelemetry/semantic-conventions": "^1.9.0",
"bent": "^7.3.12",
"debug": "^4.3.4",
"deepcopy": "^2.1.0",
"drachtio-fsmrf": "^3.0.43",
"drachtio-srf": "^4.5.35",
"express": "^4.19.2",
"drachtio-fsmrf": "^3.0.33",
"drachtio-srf": "^4.5.31",
"express": "^4.18.2",
"express-validator": "^7.0.1",
"ip": "^2.0.1",
"moment": "^2.30.1",
"parse-url": "^9.2.0",
"pino": "^8.20.0",
"ip": "^1.1.8",
"moment": "^2.29.4",
"parse-url": "^8.1.0",
"pino": "^8.8.0",
"polly-ssml-split": "^0.1.0",
"proxyquire": "^2.1.3",
"sdp-transform": "^2.14.2",
"short-uuid": "^5.1.0",
"sinon": "^17.0.1",
"sdp-transform": "^2.14.1",
"short-uuid": "^4.2.2",
"sinon": "^15.0.1",
"to-snake-case": "^1.0.0",
"undici": "^6.19.2",
"undici": "^5.26.2",
"uuid-random": "^1.3.2",
"verify-aws-sns-signature": "^0.1.0",
"ws": "^8.17.1",
"ws": "^8.9.0",
"xml2js": "^0.6.2"
},
"devDependencies": {
"clear-module": "^4.1.2",
"eslint": "7.32.0",
"eslint-plugin-promise": "^6.1.1",
"eslint": "^7.32.0",
"eslint-plugin-promise": "^4.3.1",
"nyc": "^15.1.0",
"tape": "^5.7.5"
"tape": "^5.6.1"
},
"optionalDependencies": {
"bufferutil": "^4.0.8",
"utf-8-validate": "^6.0.3"
"bufferutil": "^4.0.6",
"utf-8-validate": "^5.0.8"
}
}

View File

@@ -42,7 +42,7 @@ services:
ipv4_address: 172.38.0.7
drachtio:
image: drachtio/drachtio-server:0.8.25-rc8
image: drachtio/drachtio-server:0.8.24
restart: always
command: drachtio --contact "sip:*;transport=udp" --mtu 4096 --address 0.0.0.0 --port 9022
ports:
@@ -57,7 +57,7 @@ services:
condition: service_healthy
freeswitch:
image: drachtio/drachtio-freeswitch-mrf:0.7.3
image: drachtio/drachtio-freeswitch-mrf:0.6.1
restart: always
command: freeswitch --rtp-range-start 20000 --rtp-range-end 20100
environment:

View File

@@ -347,7 +347,8 @@ test('\'transcribe\' test - deepgram config options altLanguages', async(t) => {
"en-US"
],
"deepgramOptions": {
"model": "nova-2",
"model": "2-ea",
"tier": "nova",
"numerals": true,
"ner": true,
"vadTurnoff": 10,
@@ -407,7 +408,8 @@ test('\'transcribe\' test - deepgram config options altLanguages', async(t) => {
"en-US"
],
"deepgramOptions": {
"model": "nova-2",
"model": "2-ea",
"tier": "nova",
"numerals": true,
"ner": true,
"vadTurnoff": 10,