Compare commits

..

1 Commits

Author SHA1 Message Date
Dave Horton
3df8b66a09 add X-Application-Sid in outdials so it ends up in cdr 2023-06-09 12:46:48 -04:00
51 changed files with 2774 additions and 8287 deletions

View File

@@ -9,7 +9,7 @@ jobs:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: 18
node-version: 16
- run: npm ci
- run: npm run jslint
- run: docker pull drachtio/sipp

View File

@@ -1,4 +1,4 @@
# jambonz-feature-server ![Build Status](https://github.com/jambonz/jambonz-feature-server/workflows/CI/badge.svg)
# jambones-feature-server ![Build Status](https://github.com/jambonz/jambonz-feature-server/workflows/CI/badge.svg)
This application implements the core feature server of the jambones platform.
@@ -37,9 +37,6 @@ Configuration is provided via environment variables:
|STATS_PORT| listening port for metrics host|no|
|STATS_PROTOCOL| 'tcp' or 'udp'|no|
|STATS_TELEGRAF| if 1, metrics will be generated in telegraf format|no|
|JAMBONZ_RECORD_WS_BASE_URL| recording websocket URL to send the recording audio|no|
|JAMBONZ_RECORD_WS_USERNAME| recording websocket username|no|
|JAMBONZ_RECORD_WS_PASSWORD| recording websocket password|no|
### running under pm2
Typically, this application runs under [pm2](https://pm2.io) using an [ecosystem.config.js](https://pm2.keymetrics.io/docs/usage/application-declaration/) file similar to this:

9
app.js
View File

@@ -20,9 +20,7 @@ const tracer = require('./tracer')(JAMBONES_OTEL_SERVICE_NAME);
const api = require('@opentelemetry/api');
srf.locals = {...srf.locals, otel: {tracer, api}};
const opts = {
level: JAMBONES_LOGLEVEL
};
const opts = {level: JAMBONES_LOGLEVEL};
const pino = require('pino');
const logger = pino(opts, pino.destination({sync: false}));
const {LifeCycleEvents, FS_UUID_SET_NAME} = require('./lib/utils/constants');
@@ -120,15 +118,10 @@ function handle(signal) {
srf.locals.disabled = true;
logger.info(`got signal ${signal}`);
const setName = `${(JAMBONES_CLUSTER_ID || 'default')}:active-fs`;
const fsServiceUrlSetName = `${(JAMBONES_CLUSTER_ID || 'default')}:fs-service-url`;
if (setName && srf.locals.localSipAddress) {
logger.info(`got signal ${signal}, removing ${srf.locals.localSipAddress} from set ${setName}`);
removeFromSet(setName, srf.locals.localSipAddress);
}
if (fsServiceUrlSetName && srf.locals.serviceUrl) {
logger.info(`got signal ${signal}, removing ${srf.locals.serviceUrl} from set ${fsServiceUrlSetName}`);
removeFromSet(fsServiceUrlSetName, srf.locals.serviceUrl);
}
removeFromSet(FS_UUID_SET_NAME, srf.locals.fsUUID);
if (K8S) {
srf.locals.lifecycleEmitter.operationalState = LifeCycleEvents.ScaleIn;

View File

@@ -25,7 +25,7 @@ const JAMBONES_MYSQL_USER = process.env.JAMBONES_MYSQL_USER;
const JAMBONES_MYSQL_PASSWORD = process.env.JAMBONES_MYSQL_PASSWORD;
const JAMBONES_MYSQL_DATABASE = process.env.JAMBONES_MYSQL_DATABASE;
const JAMBONES_MYSQL_PORT = parseInt(process.env.JAMBONES_MYSQL_PORT, 10) || 3306;
const JAMBONES_MYSQL_REFRESH_TTL = parseInt(process.env.JAMBONES_MYSQL_REFRESH_TTL, 10) || 0;
const JAMBONES_MYSQL_REFRESH_TTL = process.env.JAMBONES_MYSQL_REFRESH_TTL;
const JAMBONES_MYSQL_CONNECTION_LIMIT = parseInt(process.env.JAMBONES_MYSQL_CONNECTION_LIMIT, 10) || 10;
/* redis */
@@ -121,9 +121,6 @@ const HTTP_POOL = process.env.HTTP_POOL && parseInt(process.env.HTTP_POOL);
const HTTP_POOLSIZE = parseInt(process.env.HTTP_POOLSIZE, 10) || 10;
const HTTP_PIPELINING = parseInt(process.env.HTTP_PIPELINING, 10) || 1;
const HTTP_TIMEOUT = 10000;
const HTTP_PROXY_IP = process.env.JAMBONES_HTTP_PROXY_IP;
const HTTP_PROXY_PORT = process.env.JAMBONES_HTTP_PROXY_PORT;
const HTTP_PROXY_PROTOCOL = process.env.JAMBONES_HTTP_PROXY_PROTOCOL || 'http';
const OPTIONS_PING_INTERVAL = parseInt(process.env.OPTIONS_PING_INTERVAL, 10) || 30000;
@@ -147,11 +144,6 @@ const JAMBONES_REDIS_SENTINELS = process.env.JAMBONES_REDIS_SENTINELS ? {
username: process.env.JAMBONES_REDIS_SENTINEL_USERNAME
})
} : null;
const JAMBONZ_RECORD_WS_BASE_URL = process.env.JAMBONZ_RECORD_WS_BASE_URL || process.env.JAMBONES_RECORD_WS_BASE_URL;
const JAMBONZ_RECORD_WS_USERNAME = process.env.JAMBONZ_RECORD_WS_USERNAME || process.env.JAMBONES_RECORD_WS_USERNAME;
const JAMBONZ_RECORD_WS_PASSWORD = process.env.JAMBONZ_RECORD_WS_PASSWORD || process.env.JAMBONES_RECORD_WS_PASSWORD;
const JAMBONZ_DISABLE_DIAL_PAI_HEADER = process.env.JAMBONZ_DISABLE_DIAL_PAI_HEADER || false;
const JAMBONES_DISABLE_DIRECT_P2P_CALL = process.env.JAMBONES_DISABLE_DIRECT_P2P_CALL || false;
module.exports = {
JAMBONES_MYSQL_HOST,
@@ -217,9 +209,6 @@ module.exports = {
HTTP_POOLSIZE,
HTTP_PIPELINING,
HTTP_TIMEOUT,
HTTP_PROXY_IP,
HTTP_PROXY_PORT,
HTTP_PROXY_PROTOCOL,
OPTIONS_PING_INTERVAL,
RESPONSE_TIMEOUT_MS,
JAMBONES_WS_HANDSHAKE_TIMEOUT_MS,
@@ -230,10 +219,5 @@ module.exports = {
MICROSOFT_REGION,
MICROSOFT_API_KEY,
SONIOX_API_KEY,
DEEPGRAM_API_KEY,
JAMBONZ_RECORD_WS_BASE_URL,
JAMBONZ_RECORD_WS_USERNAME,
JAMBONZ_RECORD_WS_PASSWORD,
JAMBONZ_DISABLE_DIAL_PAI_HEADER,
JAMBONES_DISABLE_DIRECT_P2P_CALL
DEEPGRAM_API_KEY
};

View File

@@ -1,54 +0,0 @@
const appsMap = {
queue: {
// Dummy hook to follow later feature server logic.
call_hook: {
url: 'https://jambonz.org',
method: 'GET'
},
account_sid: '',
app_json: [{
verb: 'dequeue',
name: '',
timeout: 5
}]
},
user: {
// Dummy hook to follow later feature server logic.
call_hook: {
url: 'https://jambonz.org',
method: 'GET'
},
account_sid: '',
app_json: [{
verb: 'dial',
callerId: '',
answerOnBridge: true,
target: [
{
type: 'user',
name: ''
}
]
}]
}
};
const createJambonzApp = (type, {account_sid, name, caller_id}) => {
const app = {...appsMap[type]};
app.account_sid = account_sid;
switch (type) {
case 'queue':
app.app_json[0].name = name;
break;
case 'user':
app.app_json[0].callerId = caller_id;
app.app_json[0].target[0].name = name;
break;
}
app.app_json = JSON.stringify(app.app_json);
return app;
};
module.exports = {
createJambonzApp
};

View File

@@ -5,331 +5,268 @@ const CallInfo = require('../../session/call-info');
const {CallDirection, CallStatus} = require('../../utils/constants');
const uuidv4 = require('uuid-random');
const SipError = require('drachtio-srf').SipError;
const { validationResult } = require('express-validator');
const { validate } = require('@jambonz/verb-specifications');
const sysError = require('./error');
const HttpRequestor = require('../../utils/http-requestor');
const WsRequestor = require('../../utils/ws-requestor');
const RootSpan = require('../../utils/call-tracer');
const dbUtils = require('../../utils/db-utils');
const { mergeSdpMedia, extractSdpMedia } = require('../../utils/sdp-utils');
const { createCallSchema } = require('../schemas/create-call');
const removeNullProperties = (obj) => (Object.keys(obj).forEach((key) => obj[key] === null && delete obj[key]), obj);
const removeNulls = (req, res, next) => {
req.body = removeNullProperties(req.body);
next();
};
router.post('/', async(req, res) => {
const {logger} = req.app.locals;
const accountSid = req.body.account_sid;
const {srf} = require('../../..');
router.post('/',
removeNulls,
createCallSchema,
async(req, res) => {
const {logger} = req.app.locals;
const errors = validationResult(req);
if (!errors.isEmpty()) {
logger.info({errors: errors.array()}, 'POST /Calls: validation errors');
return res.status(400).json({ errors: errors.array() });
}
const accountSid = req.body.account_sid;
const {srf} = require('../../..');
const app_json = req.body['app_json'];
try {
// app_json is created only by api-server.
if (app_json) {
// if available, delete from req before creating task
delete req.body.app_json;
// validate possible app_json via verb-specifications
validate(logger, JSON.parse(app_json));
}
} catch (err) {
logger.debug({ err }, `invalid app_json: ${err.message}`);
}
logger.debug({body: req.body}, 'got createCall request');
try {
let uri, cs, to;
const restDial = makeTask(logger, { 'rest:dial': req.body });
restDial.appJson = app_json;
const {lookupAccountDetails, lookupCarrierByPhoneNumber, lookupCarrier} = dbUtils(logger, srf);
const {
lookupAppBySid
} = srf.locals.dbHelpers;
const {getSBC, getFreeswitch} = srf.locals;
const sbcAddress = getSBC();
if (!sbcAddress) throw new Error('no available SBCs for outbound call creation');
const target = restDial.to;
const opts = {
callingNumber: restDial.from,
...(restDial.callerName && {callingName: restDial.callerName}),
headers: req.body.headers || {}
};
logger.debug({body: req.body}, 'got createCall request');
try {
let uri, cs, to;
// app_json is creaeted by only api-server.
// if it available, take it and delete before creating task
const app_json = req.body.app_json;
delete req.body.app_json;
const restDial = makeTask(logger, {'rest:dial': req.body});
restDial.appJson = app_json;
const {lookupAccountDetails, lookupCarrierByPhoneNumber, lookupCarrier} = dbUtils(logger, srf);
const {getSBC, getFreeswitch} = srf.locals;
const sbcAddress = getSBC();
if (!sbcAddress) throw new Error('no available SBCs for outbound call creation');
const target = restDial.to;
const opts = {
callingNumber: restDial.from,
...(restDial.callerName && {callingName: restDial.callerName}),
headers: req.body.headers || {}
};
const {lookupTeamsByAccount, lookupAccountBySid} = srf.locals.dbHelpers;
const account = await lookupAccountBySid(req.body.account_sid);
const accountInfo = await lookupAccountDetails(req.body.account_sid);
const callSid = uuidv4();
const application = req.body.application_sid ? await lookupAppBySid(req.body.application_sid) : null;
const record_all_calls = account.record_all_calls || (application && application.record_all_calls);
const recordOutputFormat = account.record_format || 'mp3';
const rootSpan = new RootSpan('rest-call', {
callSid,
accountSid,
...(req.body?.application_sid && {'X-Application-Sid': req.body.application_sid})
});
const {lookupTeamsByAccount, lookupAccountBySid} = srf.locals.dbHelpers;
const account = await lookupAccountBySid(req.body.account_sid);
const accountInfo = await lookupAccountDetails(req.body.account_sid);
const callSid = uuidv4();
opts.headers = {
...opts.headers,
'X-Jambonz-Routing': target.type,
'X-Jambonz-FS-UUID': srf.locals.fsUUID,
'X-Call-Sid': callSid,
'X-Account-Sid': accountSid,
'X-Trace-ID': rootSpan.traceId,
...(req.body?.application_sid && {'X-Application-Sid': req.body.application_sid}),
...(restDial.fromHost && {'X-Preferred-From-Host': restDial.fromHost}),
...(record_all_calls && {'X-Record-All-Calls': recordOutputFormat})
};
opts.headers = {
...opts.headers,
'X-Jambonz-Routing': target.type,
'X-Jambonz-FS-UUID': srf.locals.fsUUID,
'X-Call-Sid': callSid,
'X-Account-Sid': accountSid,
...(req.body?.application_sid && {'X-Application-Sid': req.body.application_sid}),
...(restDial.fromHost && {'X-Preferred-From-Host': restDial.fromHost})
};
switch (target.type) {
case 'phone':
case 'teams':
uri = `sip:${target.number}@${sbcAddress}`;
to = target.number;
if ('teams' === target.type) {
const obj = await lookupTeamsByAccount(accountSid);
if (!obj) throw new Error('dial to ms teams not allowed; account must first be configured with teams info');
Object.assign(opts.headers, {
'X-MS-Teams-FQDN': obj.ms_teams_fqdn,
'X-MS-Teams-Tenant-FQDN': target.tenant || obj.tenant_fqdn
});
if (target.vmail === true) uri = `${uri};opaque=app:voicemail`;
}
break;
case 'user':
uri = `sip:${target.name}`;
to = target.name;
if (target.overrideTo) {
Object.assign(opts.headers, {
'X-Override-To': target.overrideTo
});
}
break;
case 'sip':
uri = target.sipUri;
to = uri;
break;
}
if (target.type === 'phone' && target.trunk) {
const voip_carrier_sid = await lookupCarrier(req.body.account_sid, target.trunk);
logger.info(
`createCall: selected ${voip_carrier_sid} for requested carrier: ${target.trunk || 'unspecified'})`);
if (voip_carrier_sid) {
opts.headers['X-Requested-Carrier-Sid'] = voip_carrier_sid;
switch (target.type) {
case 'phone':
case 'teams':
uri = `sip:${target.number}@${sbcAddress}`;
to = target.number;
if ('teams' === target.type) {
const obj = await lookupTeamsByAccount(accountSid);
if (!obj) throw new Error('dial to ms teams not allowed; account must first be configured with teams info');
Object.assign(opts.headers, {
'X-MS-Teams-FQDN': obj.ms_teams_fqdn,
'X-MS-Teams-Tenant-FQDN': target.tenant || obj.tenant_fqdn
});
if (target.vmail === true) uri = `${uri};opaque=app:voicemail`;
}
}
break;
case 'user':
uri = `sip:${target.name}`;
to = target.name;
if (target.overrideTo) {
Object.assign(opts.headers, {
'X-Override-To': target.overrideTo
});
}
break;
case 'sip':
uri = target.sipUri;
to = uri;
break;
}
/**
if (target.type === 'phone' && target.trunk) {
const voip_carrier_sid = await lookupCarrier(req.body.account_sid, target.trunk);
logger.info(
`createCall: selected ${voip_carrier_sid} for requested carrier: ${target.trunk || 'unspecified'})`);
if (voip_carrier_sid) {
opts.headers['X-Requested-Carrier-Sid'] = voip_carrier_sid;
}
}
/**
* trunk isn't specified,
* check if from-number matches any existing numbers on Jambonz
* */
if (target.type === 'phone' && !target.trunk) {
const str = restDial.from || '';
const callingNumber = str.startsWith('+') ? str.substring(1) : str;
const voip_carrier_sid = await lookupCarrierByPhoneNumber(req.body.account_sid, callingNumber);
logger.info(
`createCall: selected ${voip_carrier_sid} for requested phone number: ${callingNumber || 'unspecified'})`);
if (voip_carrier_sid) {
opts.headers['X-Requested-Carrier-Sid'] = voip_carrier_sid;
}
if (target.type === 'phone' && !target.trunk) {
const str = restDial.from || '';
const callingNumber = str.startsWith('+') ? str.substring(1) : str;
const voip_carrier_sid = await lookupCarrierByPhoneNumber(req.body.account_sid, callingNumber);
logger.info(
`createCall: selected ${voip_carrier_sid} for requested phone number: ${callingNumber || 'unspecified'})`);
if (voip_carrier_sid) {
opts.headers['X-Requested-Carrier-Sid'] = voip_carrier_sid;
}
}
/* create endpoint for outdial */
const ms = getFreeswitch();
if (!ms) throw new Error('no available Freeswitch for outbound call creation');
const ep = await ms.createEndpoint();
logger.debug(`createCall: successfully allocated endpoint, sending INVITE to ${sbcAddress}`);
/* create endpoint for outdial */
const ms = getFreeswitch();
if (!ms) throw new Error('no available Freeswitch for outbound call creation');
const ep = await ms.createEndpoint();
logger.debug(`createCall: successfully allocated endpoint, sending INVITE to ${sbcAddress}`);
/* launch outdial */
let sdp, sipLogger;
let dualEp;
let localSdp = ep.local.sdp;
if (req.body.dual_streams) {
dualEp = await ms.createEndpoint();
localSdp = mergeSdpMedia(localSdp, dualEp.local.sdp);
/* launch outdial */
let sdp, sipLogger;
const connectStream = async(remoteSdp) => {
if (remoteSdp !== sdp) {
ep.modify(sdp = remoteSdp);
return true;
}
const connectStream = async(remoteSdp) => {
if (remoteSdp !== sdp) {
sdp = remoteSdp;
if (req.body.dual_streams) {
const [sdpLegA, sdpLebB] = extractSdpMedia(remoteSdp);
await ep.modify(sdpLegA);
await dualEp.modify(sdpLebB);
await ep.bridge(dualEp);
} else {
ep.modify(sdp);
}
return true;
}
return false;
};
Object.assign(opts, {
proxy: `sip:${sbcAddress}`,
localSdp
});
if (target.auth) opts.auth = target.auth;
return false;
};
Object.assign(opts, {
proxy: `sip:${sbcAddress}`,
localSdp: ep.local.sdp
});
if (target.auth) opts.auth = target.auth;
/**
/**
* create our application object -
* not from the database as per an inbound call,
* but from the provided params in the request
*/
const app = req.body;
const app = req.body;
/**
/**
* attach our requestor and notifier objects
* these will be used for all http requests we make during this call
*/
if ('WS' === app.call_hook?.method || /^wss?:/.test(app.call_hook.url)) {
logger.debug({call_hook: app.call_hook}, 'creating websocket for call hook');
app.requestor = new WsRequestor(logger, account.account_sid, app.call_hook, account.webhook_secret) ;
if (app.call_hook.url === app.call_status_hook.url || !app.call_status_hook?.url) {
logger.debug('reusing websocket for call status hook');
app.notifier = app.requestor;
}
}
else {
logger.debug({call_hook: app.call_hook}, 'creating http client for call hook');
app.requestor = new HttpRequestor(logger, account.account_sid, app.call_hook, account.webhook_secret);
}
if (!app.notifier && app.call_status_hook) {
app.notifier = new HttpRequestor(logger, account.account_sid, app.call_status_hook, account.webhook_secret);
logger.debug({call_hook: app.call_hook}, 'creating http client for call status hook');
}
else if (!app.notifier) {
logger.debug('creating null call status hook');
app.notifier = {request: () => {}, close: () => {}};
if ('WS' === app.call_hook?.method || /^wss?:/.test(app.call_hook.url)) {
logger.debug({call_hook: app.call_hook}, 'creating websocket for call hook');
app.requestor = new WsRequestor(logger, account.account_sid, app.call_hook, account.webhook_secret) ;
if (app.call_hook.url === app.call_status_hook.url || !app.call_status_hook?.url) {
logger.debug('reusing websocket for call status hook');
app.notifier = app.requestor;
}
}
else {
logger.debug({call_hook: app.call_hook}, 'creating http client for call hook');
app.requestor = new HttpRequestor(logger, account.account_sid, app.call_hook, account.webhook_secret);
}
if (!app.notifier && app.call_status_hook) {
app.notifier = new HttpRequestor(logger, account.account_sid, app.call_status_hook, account.webhook_secret);
logger.debug({call_hook: app.call_hook}, 'creating http client for call status hook');
}
else if (!app.notifier) {
logger.debug('creating null call status hook');
app.notifier = {request: () => {}, close: () => {}};
}
/* now launch the outdial */
try {
const dlg = await srf.createUAC(uri, {...opts, followRedirects: true, keepUriOnRedirect: true}, {
cbRequest: (err, inviteReq) => {
/* now launch the outdial */
try {
const dlg = await srf.createUAC(uri, {...opts, followRedirects: true, keepUriOnRedirect: true}, {
cbRequest: (err, inviteReq) => {
/* in case of 302 redirect, this gets called twice, ignore the second
except to update the req so that it can later be canceled if need be
*/
if (res.headersSent) {
logger.info(`create-call: got redirect, updating request to new call-id ${req.get('Call-ID')}`);
if (cs) cs.req = inviteReq;
return;
}
if (err) {
logger.error(err, 'createCall Error creating call');
res.status(500).send('Call Failure');
return;
}
inviteReq.srf = srf;
inviteReq.locals = {
...(inviteReq || {}),
callSid,
application_sid: app.application_sid
};
/* ok our outbound INVITE is in flight */
const tasks = [restDial];
sipLogger = logger.child({
callSid,
callId: inviteReq.get('Call-ID'),
accountSid,
traceId: rootSpan.traceId
});
app.requestor.logger = app.notifier.logger = sipLogger;
const callInfo = new CallInfo({
direction: CallDirection.Outbound,
req: inviteReq,
to,
tag: app.tag,
callSid,
accountSid: req.body.account_sid,
applicationSid: app.application_sid,
traceId: rootSpan.traceId
});
cs = new RestCallSession({
logger: sipLogger,
application: app,
srf,
req: inviteReq,
ep,
ep2: dualEp,
tasks,
callInfo,
accountInfo,
rootSpan
});
cs.exec(req);
res.status(201).json({sid: cs.callSid, callId: inviteReq.get('Call-ID')});
sipLogger.info({sid: cs.callSid, callId: inviteReq.get('Call-ID')},
`outbound REST call attempt to ${JSON.stringify(target)} has been sent`);
},
cbProvisional: (prov) => {
const callStatus = prov.body ? CallStatus.EarlyMedia : CallStatus.Ringing;
if ([180, 183].includes(prov.status) && prov.body) connectStream(prov.body);
restDial.emit('callStatus', prov.status, !!prov.body);
cs.emit('callStatusChange', {callStatus, sipStatus: prov.status});
if (res.headersSent) {
logger.info(`create-call: got redirect, updating request to new call-id ${req.get('Call-ID')}`);
if (cs) cs.req = inviteReq;
return;
}
});
connectStream(dlg.remote.sdp);
cs.emit('callStatusChange', {
callStatus: CallStatus.InProgress,
sipStatus: 200,
sipReason: 'OK'
});
restDial.emit('callStatus', 200);
restDial.emit('connect', dlg);
}
catch (err) {
let callStatus = CallStatus.Failed;
if (err instanceof SipError) {
if ([486, 603].includes(err.status)) callStatus = CallStatus.Busy;
else if (487 === err.status) callStatus = CallStatus.NoAnswer;
if (sipLogger) sipLogger.info(`REST outdial failed with ${err.status}`);
else console.log(`REST outdial failed with ${err.status}`);
if (cs) cs.emit('callStatusChange', {
callStatus,
sipStatus: err.status,
sipReason: err.reason
if (err) {
logger.error(err, 'createCall Error creating call');
res.status(500).send('Call Failure');
return;
}
inviteReq.srf = srf;
inviteReq.locals = {
...(inviteReq || {}),
callSid,
application_sid: app.application_sid
};
/* ok our outbound INVITE is in flight */
const tasks = [restDial];
const rootSpan = new RootSpan('rest-call', inviteReq);
sipLogger = logger.child({
callSid,
callId: inviteReq.get('Call-ID'),
accountSid,
traceId: rootSpan.traceId
});
cs.callGone = true;
}
else {
if (cs) cs.emit('callStatusChange', {
callStatus,
sipStatus: 500,
sipReason: 'Internal Server Error'
app.requestor.logger = app.notifier.logger = sipLogger;
const callInfo = new CallInfo({
direction: CallDirection.Outbound,
req: inviteReq,
to,
tag: app.tag,
callSid,
accountSid: req.body.account_sid,
applicationSid: app.application_sid,
traceId: rootSpan.traceId
});
if (sipLogger) sipLogger.error({err}, 'REST outdial failed');
else console.error(err);
cs = new RestCallSession({
logger: sipLogger,
application: app,
srf,
req: inviteReq,
ep,
tasks,
callInfo,
accountInfo,
rootSpan
});
cs.exec(req);
res.status(201).json({sid: cs.callSid, callId: inviteReq.get('Call-ID')});
sipLogger.info({sid: cs.callSid, callId: inviteReq.get('Call-ID')},
`outbound REST call attempt to ${JSON.stringify(target)} has been sent`);
},
cbProvisional: (prov) => {
const callStatus = prov.body ? CallStatus.EarlyMedia : CallStatus.Ringing;
if ([180, 183].includes(prov.status) && prov.body) connectStream(prov.body);
restDial.emit('callStatus', prov.status, !!prov.body);
cs.emit('callStatusChange', {callStatus, sipStatus: prov.status});
}
ep.destroy();
if (dualEp) {
dualEp.destroy();
}
setTimeout(restDial.kill.bind(restDial, cs), 5000);
}
} catch (err) {
sysError(logger, res, err);
});
connectStream(dlg.remote.sdp);
cs.emit('callStatusChange', {
callStatus: CallStatus.InProgress,
sipStatus: 200,
sipReason: 'OK'
});
restDial.emit('callStatus', 200);
restDial.emit('connect', dlg);
}
});
catch (err) {
let callStatus = CallStatus.Failed;
if (err instanceof SipError) {
if ([486, 603].includes(err.status)) callStatus = CallStatus.Busy;
else if (487 === err.status) callStatus = CallStatus.NoAnswer;
if (sipLogger) sipLogger.info(`REST outdial failed with ${err.status}`);
else console.log(`REST outdial failed with ${err.status}`);
if (cs) cs.emit('callStatusChange', {
callStatus,
sipStatus: err.status,
sipReason: err.reason
});
}
else {
if (cs) cs.emit('callStatusChange', {
callStatus,
sipStatus: 500,
sipReason: 'Internal Server Error'
});
if (sipLogger) sipLogger.error({err}, 'REST outdial failed');
else console.error(err);
}
ep.destroy();
setTimeout(restDial.kill.bind(restDial), 5000);
}
} catch (err) {
sysError(logger, res, err);
}
});
module.exports = router;

View File

@@ -9,29 +9,25 @@ const {CallStatus, CallDirection} = require('../../utils/constants');
*/
function retrieveCallSession(callSid, opts) {
if (opts.call_status_hook && !opts.call_hook) {
throw new DbErrorBadRequest(
`call_status_hook can be updated only when call_hook is also being updated for call_sid ${callSid}`);
throw new DbErrorBadRequest('call_status_hook can be updated only when call_hook is also being updated');
}
const cs = sessionTracker.get(callSid);
if (!cs) {
throw new DbErrorUnprocessableRequest(`call session is gone for call_sid ${callSid}`);
throw new DbErrorUnprocessableRequest('call session is gone');
}
if (opts.call_status === CallStatus.Completed && !cs.hasStableDialog) {
throw new DbErrorUnprocessableRequest(
`current call state is incompatible with requested action for call_sid ${callSid}`);
throw new DbErrorUnprocessableRequest('current call state is incompatible with requested action');
}
else if (opts.call_status === CallStatus.NoAnswer) {
if (cs.direction === CallDirection.Outbound) {
if (!cs.isOutboundCallRinging) {
throw new DbErrorUnprocessableRequest(
`current call state is incompatible with requested action for call_sid ${callSid}`);
throw new DbErrorUnprocessableRequest('current call state is incompatible with requested action');
}
}
else {
if (cs.isInboundCallAnswered) {
throw new DbErrorUnprocessableRequest(
`current call state is incompatible with requested action for call_sid ${callSid}`);
throw new DbErrorUnprocessableRequest('current call state is incompatible with requested action');
}
}
}

View File

@@ -1,114 +0,0 @@
const { checkSchema } = require('express-validator');
/**
* @path api-server {{base_url}}/v1/Accounts/:account_sid/Calls
* @see https://api.jambonz.org/#243a2edd-7999-41db-bd0d-08082bbab401
*/
const createCallSchema = checkSchema({
application_sid: {
isString: true,
optional: true,
isLength: { options: { min: 36, max: 36 } },
errorMessage: 'Invalid application_sid',
},
answerOnBridge: {
isBoolean: true,
optional: true,
errorMessage: 'Invalid answerOnBridge',
},
from: {
errorMessage: 'Invalid from',
isString: true,
isLength: {
options: { min: 1, max: 256 },
},
},
fromHost: {
isString: true,
optional: true,
errorMessage: 'Invalid fromHost',
},
to: {
errorMessage: 'Invalid to',
isObject: true,
},
callerName: {
isString: true,
optional: true,
errorMessage: 'Invalid callerName',
},
amd: {
isObject: true,
optional: true,
},
tag: {
isObject: true,
optional: true,
errorMessage: 'Invalid tag',
},
'tag.*': {
trim: true,
escape: true,
stripLow: true,
},
app_json: {
isString: true,
optional: true,
errorMessage: 'Invalid app_json',
},
account_sid: {
isString: true,
optional: true,
errorMessage: 'Invalid account_sid',
isLength: { options: { min: 36, max: 36 } },
},
timeout: {
isInt: true,
optional: true,
errorMessage: 'Invalid timeout',
},
timeLimit: {
isInt: true,
optional: true,
errorMessage: 'Invalid timeLimit',
},
call_hook: {
isObject: true,
optional: true,
errorMessage: 'Invalid call_hook',
},
call_status_hook: {
isObject: true,
optional: true,
errorMessage: 'Invalid call_status_hook',
},
speech_synthesis_vendor: {
isString: true,
optional: true,
errorMessage: 'Invalid speech_synthesis_vendor',
},
speech_synthesis_language: {
isString: true,
optional: true,
errorMessage: 'Invalid speech_synthesis_language',
},
speech_synthesis_voice: {
isString: true,
optional: true,
errorMessage: 'Invalid speech_synthesis_voice',
},
speech_recognizer_vendor: {
isString: true,
optional: true,
errorMessage: 'Invalid speech_recognizer_vendor',
},
speech_recognizer_language: {
isString: true,
optional: true,
errorMessage: 'Invalid speech_recognizer_language',
}
}, ['body']);
module.exports = {
createCallSchema
};

View File

@@ -12,9 +12,7 @@ const RootSpan = require('./utils/call-tracer');
const listTaskNames = require('./utils/summarize-tasks');
const {
JAMBONES_MYSQL_REFRESH_TTL,
JAMBONES_DISABLE_DIRECT_P2P_CALL
} = require('./config');
const { createJambonzApp } = require('./dynamic-apps');
module.exports = function(srf, logger) {
const {
@@ -22,20 +20,17 @@ module.exports = function(srf, logger) {
lookupAppByRegex,
lookupAppBySid,
lookupAppByRealm,
lookupAppByTeamsTenant,
registrar
lookupAppByTeamsTenant
} = srf.locals.dbHelpers;
const {
writeAlerts,
AlertType
} = srf.locals;
const {lookupAccountDetails, lookupGoogleCustomVoice} = dbUtils(logger, srf);
const {lookupAccountDetails} = dbUtils(logger, srf);
async function initLocals(req, res, next) {
function initLocals(req, res, next) {
const callId = req.get('Call-ID');
const uri = parseUri(req.uri);
logger.info({
uri,
callId,
callingNumber: req.callingNumber,
calledNumber: req.calledNumber
@@ -47,39 +42,12 @@ module.exports = function(srf, logger) {
const callSid = req.has('X-Retain-Call-Sid') ? req.get('X-Retain-Call-Sid') : uuidv4();
const account_sid = req.get('X-Account-Sid');
req.locals = {callSid, account_sid, callId};
if (req.has('X-Authenticated-User')) req.locals.originatingUser = req.get('X-Authenticated-User');
// check for call to application
if (uri.user?.startsWith('app-') && req.locals.originatingUser) {
const application_sid = uri.user.match(/app-(.*)/)[1];
logger.debug(`got application from Request URI header: ${application_sid}`);
req.locals.application_sid = application_sid;
} else if (req.has('X-Application-Sid')) {
if (req.has('X-Application-Sid')) {
const application_sid = req.get('X-Application-Sid');
logger.debug(`got application from X-Application-Sid header: ${application_sid}`);
req.locals.application_sid = application_sid;
}
// check for call to queue
if (uri.user?.startsWith('queue-') && req.locals.originatingUser) {
const queue_name = uri.user.match(/queue-(.*)/)[1];
logger.debug(`got Queue from Request URI header: ${queue_name}`);
req.locals.queue_name = queue_name;
}
// check for call to registered user
if (!JAMBONES_DISABLE_DIRECT_P2P_CALL && req.locals.originatingUser) {
const arr = /^(.*)@(.*)/.exec(req.locals.originatingUser);
if (arr) {
const sipRealm = arr[2];
const called_user = `${req.calledNumber}@${sipRealm}`;
const reg = await registrar.query(called_user);
if (reg) {
logger.debug(`got called Number is a registered user: ${called_user}`);
req.locals.called_user = called_user;
}
}
}
if (req.has('X-Authenticated-User')) req.locals.originatingUser = req.get('X-Authenticated-User');
if (req.has('X-MS-Teams-Tenant-FQDN')) req.locals.msTeamsTenant = req.get('X-MS-Teams-Tenant-FQDN');
if (req.has('X-Cisco-Recording-Participant')) {
const ciscoParticipants = req.get('X-Cisco-Recording-Participant');
@@ -216,24 +184,15 @@ module.exports = function(srf, logger) {
const {span} = rootSpan.startChildSpan('lookupApplication');
try {
let app;
if (req.locals.queue_name) {
logger.debug(`calling to queue ${req.locals.queue_name}, generating queue app`);
app = createJambonzApp('queue', {account_sid, name: req.locals.queue_name});
} else if (req.locals.called_user) {
logger.debug(`calling to registered user ${req.locals.called_user}, generating dial app`);
app = createJambonzApp('user',
{account_sid, name: req.locals.called_user, caller_id: req.locals.callingNumber});
} else if (req.locals.application_sid) {
app = await lookupAppBySid(req.locals.application_sid);
} else if (req.locals.originatingUser) {
if (req.locals.application_sid) app = await lookupAppBySid(req.locals.application_sid);
else if (req.locals.originatingUser) {
const arr = /^(.*)@(.*)/.exec(req.locals.originatingUser);
if (arr) {
const sipRealm = arr[2];
logger.debug(`looking for device calling app for realm ${sipRealm}`);
app = await lookupAppByRealm(sipRealm);
if (app) {
logger.debug({app}, `retrieved device calling app for realm ${sipRealm}`);
}
if (app) logger.debug({app}, `retrieved device calling app for realm ${sipRealm}`);
}
}
else if (req.locals.msTeamsTenant) {
@@ -298,22 +257,7 @@ module.exports = function(srf, logger) {
app2.requestor = new HttpRequestor(logger, account_sid, app.call_hook, accountInfo.account.webhook_secret);
if (app.call_status_hook) app2.notifier = new HttpRequestor(logger, account_sid, app.call_status_hook,
accountInfo.account.webhook_secret);
else app2.notifier = {request: () => {}, close: () => {}};
}
// Resolve application.speech_synthesis_voice if it's custom voice
if (app2.speech_synthesis_vendor === 'google' && app2.speech_synthesis_voice.startsWith('custom_')) {
const arr = /custom_(.*)/.exec(app2.speech_synthesis_voice);
if (arr) {
const google_custom_voice_sid = arr[1];
const [custom_voice] = await lookupGoogleCustomVoice(google_custom_voice_sid);
if (custom_voice) {
app2.speech_synthesis_voice = {
reportedUsage: custom_voice.reported_usage,
model: custom_voice.model
};
}
}
else app2.notifier = {request: () => {}};
}
req.locals.application = app2;
@@ -355,30 +299,22 @@ module.exports = function(srf, logger) {
if (app.app_json) {
json = JSON.parse(app.app_json);
} else {
const defaults = {
synthesizer: {
vendor: app.speech_synthesis_vendor,
...(app.speech_synthesis_label && {label: app.speech_synthesis_label}),
language: app.speech_synthesis_language,
voice: app.speech_synthesis_voice,
...(app.fallback_speech_synthesis_vendor && {fallback_vendor: app.fallback_speech_synthesis_vendor}),
...(app.fallback_speech_synthesis_label && {fallback_label: app.fallback_speech_synthesis_label}),
...(app.fallback_speech_synthesis_language && {fallback_language: app.fallback_speech_synthesis_language}),
...(app.fallback_speech_synthesis_voice && {fallback_voice: app.fallback_speech_synthesis_voice})
},
recognizer: {
vendor: app.speech_recognizer_vendor,
...(app.speech_synthesis_label && {label: app.speech_synthesis_label}),
language: app.speech_recognizer_language,
...(app.fallback_speech_recognizer_vendor && {fallback_vendor: app.fallback_speech_recognizer_vendor}),
...(app.fallback_speech_recognizer_label && {fallback_label: app.fallback_speech_recognizer_label}),
...(app.fallback_speech_recognizer_language && {fallback_language: app.fallback_speech_recognizer_language})
}
};
const params = Object.assign(['POST', 'WS'].includes(app.call_hook.method) ? { sip: req.msg } : {},
req.locals.callInfo,
{ service_provider_sid: req.locals.service_provider_sid },
{ defaults });
{
defaults: {
synthesizer: {
vendor: app.speech_synthesis_vendor,
language: app.speech_synthesis_language,
voice: app.speech_synthesis_voice
},
recognizer: {
vendor: app.speech_recognizer_vendor,
language: app.speech_recognizer_language
}
}
});
logger.debug({ params }, 'sending initial webhook');
const obj = rootSpan.startChildSpan('performAppWebhook');
span = obj.span;
@@ -386,7 +322,6 @@ module.exports = function(srf, logger) {
const httpHeaders = b3 && { b3 };
json = await app.requestor.request('session:new', app.call_hook, params, httpHeaders);
}
app.tasks = normalizeJambones(logger, json).map((tdata) => makeTask(logger, tdata));
span?.setAttributes({
'http.statusCode': 200,

View File

@@ -19,10 +19,7 @@ const HttpRequestor = require('../utils/http-requestor');
const WsRequestor = require('../utils/ws-requestor');
const {
JAMBONES_INJECT_CONTENT,
AWS_REGION,
JAMBONZ_RECORD_WS_BASE_URL,
JAMBONZ_RECORD_WS_USERNAME,
JAMBONZ_RECORD_WS_PASSWORD,
AWS_REGION
} = require('../config');
const BADPRECONDITIONS = 'preconditions not met';
const CALLER_CANCELLED_ERR_MSG = 'Response not sent due to unknown transaction';
@@ -135,11 +132,6 @@ class CallSession extends Emitter {
return this.callInfo.callStatus;
}
get isBackGroundListen() {
return !(this.backgroundListenTask === null ||
this.backgroundListenTask === undefined);
}
/**
* SIP call-id for the call
*/
@@ -179,30 +171,6 @@ class CallSession extends Emitter {
set speechSynthesisVendor(vendor) {
this.application.speech_synthesis_vendor = vendor;
}
get fallbackSpeechSynthesisVendor() {
return this.application.fallback_speech_synthesis_vendor;
}
set fallbackSpeechSynthesisVendor(vendor) {
this.application.fallback_speech_synthesis_vendor = vendor;
}
/**
* default label to use for speech synthesis if not provided in the app
*/
get speechSynthesisLabel() {
return this.application.speech_synthesis_label;
}
set speechSynthesisLabel(label) {
this.application.speech_synthesis_label = label;
}
get fallbackSpeechSynthesisLabel() {
return this.application.fallback_speech_synthesis_label;
}
set fallbackSpeechSynthesisLabel(label) {
this.application.fallback_speech_synthesis_label = label;
}
/**
* default voice to use for speech synthesis if not provided in the app
*/
@@ -212,13 +180,6 @@ class CallSession extends Emitter {
set speechSynthesisVoice(voice) {
this.application.speech_synthesis_voice = voice;
}
get fallbackSpeechSynthesisVoice() {
return this.application.fallback_speech_synthesis_voice;
}
set fallbackSpeechSynthesisVoice(voice) {
this.application.fallback_speech_synthesis_voice = voice;
}
/**
* default language to use for speech synthesis if not provided in the app
*/
@@ -229,13 +190,6 @@ class CallSession extends Emitter {
this.application.speech_synthesis_language = language;
}
get fallbackSpeechSynthesisLanguage() {
return this.application.fallback_speech_synthesis_language;
}
set fallbackSpeechSynthesisLanguage(language) {
this.application.fallback_speech_synthesis_language = language;
}
/**
* default vendor to use for speech recognition if not provided in the app
*/
@@ -245,29 +199,6 @@ class CallSession extends Emitter {
set speechRecognizerVendor(vendor) {
this.application.speech_recognizer_vendor = vendor;
}
get fallbackSpeechRecognizerVendor() {
return this.application.fallback_speech_recognizer_vendor;
}
set fallbackSpeechRecognizerVendor(vendor) {
this.application.fallback_speech_recognizer_vendor = vendor;
}
/**
* default vendor to use for speech recognition if not provided in the app
*/
get speechRecognizerLabel() {
return this.application.speech_recognizer_label;
}
set speechRecognizerLabel(label) {
this.application.speech_recognizer_label = label;
}
get fallbackSpeechRecognizerLabel() {
return this.application.fallback_speech_recognizer_label;
}
set fallbackSpeechRecognizerLabel(label) {
this.application.fallback_speech_recognizer_label = label;
}
/**
* default language to use for speech recognition if not provided in the app
*/
@@ -278,13 +209,6 @@ class CallSession extends Emitter {
this.application.speech_recognizer_language = language;
}
get fallbackSpeechRecognizerLanguage() {
return this.application.fallback_speech_recognizer_language;
}
set fallbackSpeechRecognizerLanguage(language) {
this.application.fallback_speech_recognizer_language = language;
}
/**
* indicates whether the call currently in progress
*/
@@ -406,22 +330,6 @@ class CallSession extends Emitter {
return this._globalSttPunctuation;
}
get onHoldMusic() {
return this._onHoldMusic;
}
set onHoldMusic(url) {
this._onHoldMusic = url;
}
get sipRequestWithinDialogHook() {
return this._sipRequestWithinDialogHook;
}
set sipRequestWithinDialogHook(url) {
this._sipRequestWithinDialogHook = url;
}
hasGlobalSttPunctuation() {
return this._globalSttPunctuation !== undefined;
}
@@ -507,10 +415,7 @@ class CallSession extends Emitter {
'X-Call-Sid': this.callSid,
'X-Account-Sid': this.accountSid,
'X-Application-Sid': this.applicationSid,
...(this.recordOptions.headers && {'Content-Type': 'application/json'})
},
// Siprect Client is initiated from startCallRecording, so just need to pass custom headers in startRecording
...(this.recordOptions.headers && {body: JSON.stringify(this.recordOptions.headers) + '\n'})
}
});
if (res.status === 200) {
this._recordState = RecordState.RecordingOn;
@@ -531,7 +436,7 @@ class CallSession extends Emitter {
const res = await this.dlg.request({
method: 'INFO',
headers: {
'X-Reason': 'stopCallRecording'
'X-Reason': 'stopCallRecording',
}
});
if (res.status === 200) {
@@ -553,7 +458,7 @@ class CallSession extends Emitter {
const res = await this.dlg.request({
method: 'INFO',
headers: {
'X-Reason': 'pauseCallRecording'
'X-Reason': 'pauseCallRecording',
}
});
if (res.status === 200) {
@@ -575,7 +480,7 @@ class CallSession extends Emitter {
const res = await this.dlg.request({
method: 'INFO',
headers: {
'X-Reason': 'resumeCallRecording'
'X-Reason': 'resumeCallRecording',
}
});
if (res.status === 200) {
@@ -590,7 +495,7 @@ class CallSession extends Emitter {
}
}
async startBackgroundListen(opts, bugname) {
async startBackgroundListen(opts) {
if (this.isListenEnabled) {
this.logger.info('CallSession:startBackgroundListen - listen is already enabled, ignoring request');
return;
@@ -599,11 +504,8 @@ class CallSession extends Emitter {
this.logger.debug({opts}, 'CallSession:startBackgroundListen');
const t = normalizeJambones(this.logger, [opts]);
this.backgroundListenTask = makeTask(this.logger, t[0]);
this.backgroundListenTask.bugname = bugname;
// Remove unneeded customer data to be sent to api server.
this.backgroundListenTask.ignoreCustomerData = true;
const resources = await this._evaluatePreconditions(this.backgroundListenTask);
const {span, ctx} = this.rootSpan.startChildSpan(`background-listen:${this.backgroundListenTask.summary}`);
const {span, ctx} = this.rootSpan.startChildSpan(`background-gather:${this.backgroundListenTask.summary}`);
this.backgroundListenTask.span = span;
this.backgroundListenTask.ctx = ctx;
this.backgroundListenTask.exec(this, resources)
@@ -626,7 +528,6 @@ class CallSession extends Emitter {
}
async stopBackgroundListen() {
this.logger.debug('CallSession:stopBackgroundListen');
try {
if (this.backgroundListenTask) {
this.backgroundListenTask.removeAllListeners();
@@ -635,6 +536,7 @@ class CallSession extends Emitter {
} catch (err) {
this.logger.info({err}, 'CallSession:stopBackgroundListen - Error stopping listen task');
}
this.backgroundListenTask = null;
}
async enableBotMode(gather, autoEnable) {
@@ -724,28 +626,15 @@ class CallSession extends Emitter {
* Check for speech credentials for the specified vendor
* @param {*} vendor - google or aws
*/
getSpeechCredentials(vendor, type, label = null) {
getSpeechCredentials(vendor, type) {
const {writeAlerts, AlertType} = this.srf.locals;
if (this.accountInfo.speech && this.accountInfo.speech.length > 0) {
// firstly check if account level has expected credential
let credential = this.accountInfo.speech.find((s) => s.vendor === vendor &&
s.label === label && s.account_sid);
if (!credential) {
// check if SP level has expected credential
credential = this.accountInfo.speech.find((s) => s.vendor === vendor &&
s.label === label && !s.account_sid);
}
const credential = this.accountInfo.speech.find((s) => s.vendor === vendor);
if (credential && (
(type === 'tts' && credential.use_for_tts) ||
(type === 'stt' && credential.use_for_stt)
)) {
this.logger.info(
`Speech vendor: ${credential.vendor} ${credential.label ? `, label: ${credential.label}` : ''} selected`);
if ('google' === vendor) {
if (type === 'tts' && !credential.tts_tested_ok ||
type === 'stt' && !credential.stt_tested_ok) {
return;
}
try {
const cred = JSON.parse(credential.service_key.replace(/\n/g, '\\n'));
return {
@@ -777,10 +666,8 @@ class CallSession extends Emitter {
region: credential.region,
use_custom_stt: credential.use_custom_stt,
custom_stt_endpoint: credential.custom_stt_endpoint,
custom_stt_endpoint_url: credential.custom_stt_endpoint_url,
use_custom_tts: credential.use_custom_tts,
custom_tts_endpoint: credential.custom_tts_endpoint,
custom_tts_endpoint_url: credential.custom_tts_endpoint_url
custom_tts_endpoint: credential.custom_tts_endpoint
};
}
else if ('wellsaid' === vendor) {
@@ -825,22 +712,7 @@ class CallSession extends Emitter {
riva_server_uri: credential.riva_server_uri
};
}
else if ('cobalt' === vendor) {
return {
speech_credential_sid: credential.speech_credential_sid,
cobalt_server_uri: credential.cobalt_server_uri
};
} else if ('elevenlabs' === vendor) {
return {
api_key: credential.api_key,
model_id: credential.model_id
};
} else if ('assemblyai' === vendor) {
return {
speech_credential_sid: credential.speech_credential_sid,
api_key: credential.api_key
};
} else if (vendor.startsWith('custom:')) {
else if (vendor.startsWith('custom:')) {
return {
speech_credential_sid: credential.speech_credential_sid,
auth_token: credential.auth_token,
@@ -910,15 +782,23 @@ class CallSession extends Emitter {
}
}
if (0 === this.tasks.length &&
this.requestor instanceof WsRequestor &&
!this.requestor.closedGracefully &&
!this.callGone
) {
if (0 === this.tasks.length && this.requestor instanceof WsRequestor && !this.callGone) {
//let span;
try {
//const {span} = this.rootSpan.startChildSpan('waiting for commands');
//const {reason, queue, command} = await this._awaitCommandsOrHangup();
/*
span.setAttributes({
'completion.reason': reason,
'async.request.queue': queue,
'async.request.command': command
});
span.end();
*/
await this._awaitCommandsOrHangup();
if (this.callGone) break;
} catch (err) {
//span.end();
this.logger.info(err, 'CallSession:exec - error waiting for new commands');
break;
}
@@ -1098,24 +978,6 @@ class CallSession extends Emitter {
listenTask.updateListen(opts.listen_status);
}
/**
* perform live call control -- change Transcribe status
* @param {object} opts
* @param {string} opts.transcribe_status - 'pause' or 'resume'
*/
async _lccTranscribeStatus(opts) {
const task = this.currentTask;
if (!task || ![TaskName.Dial, TaskName.Transcribe].includes(task.name)) {
return this.logger.info(`CallSession:_lccTranscribeStatus - invalid transcribe_status in task ${task.name}`);
}
const transcribeTask = task.name === TaskName.Transcribe ? task : task.transcribeTask;
if (!transcribeTask) {
return this.logger
.info('CallSession:_lccTranscribeStatus - invalid transcribe_status: Dial does not have a Transcribe');
}
transcribeTask.updateTranscribe(opts.transcribe_status);
}
async _lccMuteStatus(callSid, mute) {
// this whole thing requires us to be in a Dial or Conference verb
const task = this.currentTask;
@@ -1234,9 +1096,6 @@ class CallSession extends Emitter {
if (opts.listen_status) {
await this._lccListenStatus(opts);
}
if (opts.transcribe_status) {
await this._lccTranscribeStatus(opts);
}
else if (opts.mute_status) {
await this._lccMuteStatus(callSid, opts.mute_status === 'mute');
}
@@ -1413,10 +1272,6 @@ class CallSession extends Emitter {
this._lccListenStatus(data);
break;
case 'transcribe:status':
this._lccTranscribeStatus(data);
break;
case 'whisper':
this._lccWhisper(data, call_sid);
break;
@@ -1486,10 +1341,7 @@ class CallSession extends Emitter {
}
// we are going from an early media connection to answer
if (this.direction === CallDirection.Inbound) {
// only do this for inbound call.
await this.propagateAnswer();
}
await this.propagateAnswer();
return {
...resources,
...(this.isSipRecCallSession && {ep2: this.ep2})
@@ -1509,8 +1361,6 @@ class CallSession extends Emitter {
this.ep = ep;
this.logger.debug(`allocated endpoint ${ep.uuid}`);
this._configMsEndpoint();
this.ep.on('destroy', () => {
this.logger.debug(`endpoint was destroyed!! ${this.ep.uuid}`);
});
@@ -1581,7 +1431,6 @@ class CallSession extends Emitter {
return;
}
this.ep = await this.ms.createEndpoint({remoteSdp: this.dlg.remote.sdp});
this._configMsEndpoint();
await this.dlg.modify(this.ep.local.sdp);
this.logger.debug('CallSession:replaceEndpoint completed');
@@ -1663,32 +1512,11 @@ class CallSession extends Emitter {
}
this.dlg.on('modify', this._onReinvite.bind(this));
this.dlg.on('refer', this._onRefer.bind(this));
if (this.sipRequestWithinDialogHook) {
this.dlg.on('info', this._onRequestWithinDialog.bind(this));
this.dlg.on('message', this._onRequestWithinDialog.bind(this));
}
this.logger.debug(`CallSession:propagateAnswer - answered callSid ${this.callSid}`);
}
}
async _onRequestWithinDialog(req, res) {
if (!this.sipRequestWithinDialogHook) {
return;
}
const sip_method = req.method;
if (sip_method === 'INFO') {
res.send(200);
} else if (sip_method === 'MESSAGE') {
res.send(202);
} else {
this.logger.info(`CallSession:_onRequestWithinDialog unsported method: ${req.method}`);
res.send(501);
return;
}
const params = {sip_method, sip_body: req.body, sip_headers: req.headers};
this.currentTask.performHook(this, this.sipRequestWithinDialogHook, params);
}
async _onReinvite(req, res) {
try {
if (this.ep) {
@@ -1697,14 +1525,9 @@ class CallSession extends Emitter {
res.send(200, {body: this.ep.local.sdp});
}
else {
if (this.currentTask.name === TaskName.Dial && this.currentTask.isOnHold) {
this.logger.info('onholdMusic reINVITE after media has been released');
await this.currentTask.handleReinviteAfterMediaReleased(req, res);
} else {
const newSdp = await this.ep.modify(req.body);
res.send(200, {body: newSdp});
this.logger.info({offer: req.body, answer: newSdp}, 'handling reINVITE');
}
const newSdp = await this.ep.modify(req.body);
res.send(200, {body: newSdp});
this.logger.info({offer: req.body, answer: newSdp}, 'handling reINVITE');
}
}
else if (this.currentTask && this.currentTask.name === TaskName.Dial) {
@@ -1751,7 +1574,6 @@ class CallSession extends Emitter {
}
if (!this.ep) {
this.ep = await this.ms.createEndpoint({remoteSdp: this.req.body});
this._configMsEndpoint();
}
return {ms: this.ms, ep: this.ep};
}
@@ -1906,7 +1728,6 @@ class CallSession extends Emitter {
'X-Reason': 'anchor-media'
}
});
this._configMsEndpoint();
}
async handleReinviteAfterMediaReleased(req, res) {
@@ -1928,14 +1749,6 @@ class CallSession extends Emitter {
async _notifyCallStatusChange({callStatus, sipStatus, sipReason, duration}) {
if (this.callMoved) return;
if (callStatus === CallStatus.InProgress) {
// nice, call is in progress, good time to enable record
await this.enableRecordAllCall();
} else if (callStatus == CallStatus.Completed && this.isBackGroundListen) {
this.stopBackgroundListen().catch((err) => this.logger.error(
{err}, 'CallSession:_notifyCallStatusChange - error stopping background listen'));
}
/* race condition: we hang up at the same time as the caller */
if (callStatus === CallStatus.Completed) {
if (this.notifiedComplete) return;
@@ -1948,15 +1761,6 @@ class CallSession extends Emitter {
this.callInfo.updateCallStatus(callStatus, sipStatus, sipReason);
if (typeof duration === 'number') this.callInfo.duration = duration;
this.executeStatusCallback(callStatus, sipStatus);
// update calls db
//this.logger.debug(`updating redis with ${JSON.stringify(this.callInfo)}`);
this.updateCallStatus(Object.assign({}, this.callInfo.toJSON()), this.serviceUrl)
.catch((err) => this.logger.error(err, 'redis error'));
}
async executeStatusCallback(callStatus, sipStatus) {
const {span} = this.rootSpan.startChildSpan(`call-status:${this.callInfo.callStatus}`);
span.setAttributes(this.callInfo.toJSON());
try {
@@ -1968,35 +1772,11 @@ class CallSession extends Emitter {
span.end();
this.logger.info(err, `CallSession:_notifyCallStatusChange error sending ${callStatus} ${sipStatus}`);
}
}
async enableRecordAllCall() {
if (this.accountInfo.account.record_all_calls || this.application.record_all_calls) {
if (!JAMBONZ_RECORD_WS_BASE_URL || !this.accountInfo.account.bucket_credential) {
this.logger.error('Record all calls: invalid configuration');
return;
}
const listenOpts = {
url: `${JAMBONZ_RECORD_WS_BASE_URL}/record/${this.accountInfo.account.bucket_credential.vendor}`,
disableBidirectionalAudio: true,
mixType : 'stereo',
passDtmf: true
};
if (JAMBONZ_RECORD_WS_USERNAME && JAMBONZ_RECORD_WS_PASSWORD) {
listenOpts.wsAuth = {
username: JAMBONZ_RECORD_WS_USERNAME,
password: JAMBONZ_RECORD_WS_PASSWORD
};
}
this.logger.debug({listenOpts}, 'Record all calls: enabling listen');
await this.startBackgroundListen({verb: 'listen', ...listenOpts}, 'jambonz-session-record');
}
}
_configMsEndpoint() {
if (this.onHoldMusic) {
this.ep.set({hold_music: `shout://${this.onHoldMusic.replace(/^https?:\/\//, '')}`});
}
// update calls db
//this.logger.debug(`updating redis with ${JSON.stringify(this.callInfo)}`);
this.updateCallStatus(Object.assign({}, this.callInfo.toJSON()), this.serviceUrl)
.catch((err) => this.logger.error(err, 'redis error'));
}
/**

View File

@@ -8,7 +8,7 @@ const moment = require('moment');
* @extends CallSession
*/
class RestCallSession extends CallSession {
constructor({logger, application, srf, req, ep, ep2, tasks, callInfo, accountInfo, rootSpan}) {
constructor({logger, application, srf, req, ep, tasks, callInfo, accountInfo, rootSpan}) {
super({
logger,
application,
@@ -21,7 +21,6 @@ class RestCallSession extends CallSession {
});
this.req = req;
this.ep = ep;
this.ep2 = ep2;
// keep restDialTask reference for closing AMD
if (tasks.length) {
this.restDialTask = tasks[0];
@@ -50,6 +49,7 @@ class RestCallSession extends CallSession {
*/
_callerHungup() {
if (this.restDialTask) {
this.logger.info('RestCallSession: releasing AMD');
this.restDialTask.turnOffAmd();
}
this.callInfo.callTerminationBy = 'caller';

View File

@@ -48,7 +48,7 @@ class Conference extends Task {
this.confName = this.data.name;
[
'beep', 'startConferenceOnEnter', 'endConferenceOnExit', 'joinMuted',
'maxParticipants', 'waitHook', 'statusHook', 'endHook', 'enterHook', 'endConferenceDuration'
'maxParticipants', 'waitHook', 'statusHook', 'endHook', 'enterHook'
].forEach((attr) => this[attr] = this.data[attr]);
this.record = this.data.record || {};
this.statusEvents = [];
@@ -114,12 +114,7 @@ class Conference extends Task {
}
this.emitter.emit('kill');
await this._doFinalMemberCheck(cs);
if (this.ep && this.ep.connected) {
this.ep.conn.removeAllListeners('esl::event::CUSTOM::*');
this.ep.api(`conference ${this.confName} kick ${this.memberId}`)
.catch((err) => this.logger.info({err}, 'Error kicking participant'));
}
cs.clearConferenceDetails();
if (this.ep && this.ep.connected) this.ep.conn.removeAllListeners('esl::event::CUSTOM::*') ;
this.notifyTaskDone();
}
@@ -344,13 +339,9 @@ class Conference extends Task {
}
const opts = {};
if (this.endConferenceOnExit || this.startConferenceOnEnter || this.joinMuted) {
Object.assign(opts, {flags: {
...(this.endConferenceOnExit && {endconf: true}),
...(this.startConferenceOnEnter && {moderator: true}),
...(this.joinMuted && {joinMuted: true}),
}});
}
if (this.endConferenceOnExit) Object.assign(opts, {flags: {endconf: true}});
if (this.startConferenceOnEnter) Object.assign(opts, {flags: {moderator: true}});
if (this.joinMuted) Object.assign(opts, {flags: {mute: true}});
try {
const {memberId, confUuid} = await this.ep.join(this.confName, opts);
@@ -393,11 +384,6 @@ class Conference extends Task {
this.ep.api('conference', `${this.confName} set max_members ${this.maxParticipants}`)
.catch((err) => this.logger.error(err, `Error setting max participants to ${this.maxParticipants}`));
}
if (typeof this.endConferenceDuration === 'number' && this.endConferenceDuration >= 0) {
this.ep.api('conference', `${this.confName} set endconference_grace_time ${this.endConferenceDuration}`)
.catch((err) => this.logger.error(err, `Error setting end conference time to ${this.endConferenceDuration}`));
}
}
/**

View File

@@ -40,8 +40,6 @@ class TaskConfig extends Task {
this.preconditions = (this.bargeIn.enable || this.record?.action || this.listen?.url || this.data.amd) ?
TaskPreconditions.Endpoint :
TaskPreconditions.None;
this.onHoldMusic = this.data.onHoldMusic;
}
get name() { return TaskName.Config; }
@@ -74,7 +72,6 @@ class TaskConfig extends Task {
}
if (this.data.amd) phrase.push('enable amd');
if (this.notifyEvents) phrase.push(`event notification ${this.notifyEvents ? 'on' : 'off'}`);
if (this.onHoldMusic) phrase.push(`onHoldMusic: ${this.onHoldMusic}`);
return `${this.name}{${phrase.join(',')}}`;
}
@@ -86,10 +83,6 @@ class TaskConfig extends Task {
cs.notifyEvents = !!this.data.notifyEvents;
}
if (this.onHoldMusic) {
cs.onHoldMusic = this.onHoldMusic;
}
if (this.data.amd) {
this.startAmd = cs.startAmd;
this.stopAmd = cs.stopAmd;
@@ -112,53 +105,21 @@ class TaskConfig extends Task {
cs.speechSynthesisVendor = this.synthesizer.vendor !== 'default'
? this.synthesizer.vendor
: cs.speechSynthesisVendor;
cs.speechSynthesisLabel = this.synthesizer.label !== 'default'
? this.synthesizer.label
: cs.speechSynthesisLabel;
cs.speechSynthesisLanguage = this.synthesizer.language !== 'default'
? this.synthesizer.language
: cs.speechSynthesisLanguage;
cs.speechSynthesisVoice = this.synthesizer.voice !== 'default'
? this.synthesizer.voice
: cs.speechSynthesisVoice;
// fallback vendor
cs.fallbackSpeechSynthesisVendor = this.synthesizer.fallbackVendor !== 'default'
? this.synthesizer.fallbackVendor
: cs.fallbackSpeechSynthesisVendor;
cs.fallbackSpeechSynthesisLabel = this.synthesizer.fallbackLabel !== 'default'
? this.synthesizer.fallbackLabel
: cs.fallbackSpeechSynthesisLabel;
cs.fallbackSpeechSynthesisLanguage = this.synthesizer.fallbackLanguage !== 'default'
? this.synthesizer.fallbackLanguage
: cs.fallbackSpeechSynthesisLanguage;
cs.fallbackSpeechSynthesisVoice = this.synthesizer.fallbackVoice !== 'default'
? this.synthesizer.fallbackVoice
: cs.fallbackSpeechSynthesisVoice;
this.logger.info({synthesizer: this.synthesizer}, 'Config: updated synthesizer');
}
if (this.hasRecognizer) {
cs.speechRecognizerVendor = this.recognizer.vendor !== 'default'
? this.recognizer.vendor
: cs.speechRecognizerVendor;
cs.speechRecognizerLabel = this.recognizer.label !== 'default'
? this.recognizer.label
: cs.speechRecognizerLabel;
cs.speechRecognizerLanguage = this.recognizer.language !== 'default'
? this.recognizer.language
: cs.speechRecognizerLanguage;
//fallback
cs.fallbackSpeechRecognizerVendor = this.recognizer.fallbackVendor !== 'default'
? this.recognizer.fallbackVendor
: cs.fallbackSpeechRecognizerVendor;
cs.fallbackSpeechRecognizerLabel = this.recognizer.fallbackLabel !== 'default'
? this.recognizer.fallbackLabel
: cs.fallbackSpeechRecognizerLabel;
cs.fallbackSpeechRecognizerLanguage = this.recognizer.fallbackLanguage !== 'default'
? this.recognizer.fallbackLanguage
: cs.fallbackSpeechRecognizerLanguage;
cs.isContinuousAsr = typeof this.recognizer.asrTimeout === 'number' ? true : false;
if (cs.isContinuousAsr) {
cs.asrTimeout = this.recognizer.asrTimeout;
@@ -216,9 +177,6 @@ class TaskConfig extends Task {
cs.stopBackgroundListen();
}
}
if (this.data.sipRequestWithinDialogHook) {
cs.sipRequestWithinDialogHook = this.data.sipRequestWithinDialogHook;
}
}
async kill(cs) {

View File

@@ -12,13 +12,10 @@ const assert = require('assert');
const placeCall = require('../utils/place-outdial');
const sessionTracker = require('../session/session-tracker');
const DtmfCollector = require('../utils/dtmf-collector');
const ConfirmCallSession = require('../session/confirm-call-session');
const dbUtils = require('../utils/db-utils');
const debug = require('debug')('jambonz:feature-server');
const {parseUri} = require('drachtio-srf');
const {ANCHOR_MEDIA_ALWAYS, JAMBONZ_DISABLE_DIAL_PAI_HEADER} = require('../config');
const { isOnhold } = require('../utils/sdp-utils');
const { normalizeJambones } = require('@jambonz/verb-specifications');
const {ANCHOR_MEDIA_ALWAYS} = require('../config');
function parseDtmfOptions(logger, dtmfCapture) {
let parentDtmfCollector, childDtmfCollector;
@@ -138,13 +135,8 @@ class TaskDial extends Task {
get name() { return TaskName.Dial; }
get isOnHold() {
return this.isIncomingLegHold || this.isOutgoingLegHold;
}
get canReleaseMedia() {
const keepAnchor = this.data.anchorMedia ||
this.cs.isBackGroundListen ||
ANCHOR_MEDIA_ALWAYS ||
this.listenTask ||
this.transcribeTask ||
@@ -204,7 +196,6 @@ class TaskDial extends Task {
await this.performAction(this.results, this.killReason !== KillReason.Replaced);
this._removeDtmfDetection(cs.dlg);
this._removeDtmfDetection(this.dlg);
this._removeSipIndialogRequestListener(this.dlg);
} catch (err) {
this.logger.error({err}, 'TaskDial:exec terminating with error');
this.kill(cs);
@@ -233,7 +224,7 @@ class TaskDial extends Task {
}
this._removeDtmfDetection(cs.dlg);
this._removeDtmfDetection(this.dlg);
await this._killOutdials();
this._killOutdials();
if (this.sd) {
this.sd.kill();
this.sd.removeAllListeners();
@@ -322,41 +313,18 @@ class TaskDial extends Task {
const to = parseUri(req.getParsedHeader('Refer-To').uri);
const by = parseUri(req.getParsedHeader('Referred-By').uri);
this.logger.info({to}, 'refer to parsed');
const json = await cs.requestor.request('verb:hook', this.referHook, {
await cs.requestor.request('verb:hook', this.referHook, {
...callInfo,
refer_details: {
sip_refer_to: req.get('Refer-To'),
sip_referred_by: req.get('Referred-By'),
sip_user_agent: req.get('User-Agent'),
refer_to_user: to.scheme === 'tel' ? to.number : to.user,
referred_by_user: by.scheme === 'tel' ? by.number : by.user,
refer_to_user: to.user,
referred_by_user: by.user,
referring_call_sid,
referred_call_sid
}
}, httpHeaders);
if (json && Array.isArray(json)) {
try {
const logger = isChild ? this.logger : this.sd.logger;
const tasks = normalizeJambones(logger, json).map((tdata) => makeTask(this.logger, tdata));
if (tasks && tasks.length > 0) {
const legs = isChild ? ['child', 'parent'] : ['parent', 'child'];
logger.info(`Dial:handleRefer received REFER on ${legs[0]} leg, setting new app on ${legs[1]} leg`);
if (isChild) this.redirect(cs, tasks);
else {
logger.info({tasks: json}, 'Dial:handleRefer - new application for for child leg');
const adultingSession = await this.sd.doAdulting({
logger,
application: cs.application,
tasks
});
/* need to update the callSid of the child with its own (new) AdultingCallSession */
sessionTracker.add(adultingSession.callSid, adultingSession);
}
}
} catch (err) {
this.logger.info(err, 'Dial:handleRefer - error setting new application after receiving REFER');
}
}
res.send(202);
this.logger.info('DialTask:handleRefer - sent 202 Accepted');
} catch (err) {
@@ -377,16 +345,11 @@ class TaskDial extends Task {
sd.removeAllListeners('callCreateFail');
}
async _killOutdials() {
_killOutdials() {
for (const [callSid, sd] of Array.from(this.dials)) {
this.logger.debug(`Dial:_killOutdials killing callSid ${callSid}`);
try {
await sd.kill();
} catch (err) {
this.logger.info(err, `Dial:_killOutdials Error killing ${callSid}`);
}
sd.kill().catch((err) => this.logger.info(err, `Dial:_killOutdials Error killing ${callSid}`));
this._removeHandlers(sd);
this.logger.debug(`Dial:_killOutdials killed callSid ${callSid}`);
}
this.dials.clear();
}
@@ -399,14 +362,8 @@ class TaskDial extends Task {
}
_onInfo(cs, dlg, req, res) {
// SIP Indialog will be handled by another handler
if (cs.sipRequestWithinDialogHook) {
return;
}
res.send(200);
if (req.get('Content-Type') !== 'application/dtmf-relay') {
return;
}
if (req.get('Content-Type') !== 'application/dtmf-relay') return;
const dtmfDetector = dlg === cs.dlg ? this.parentDtmfCollector : this.childDtmfCollector;
if (!dtmfDetector) return;
@@ -435,20 +392,6 @@ class TaskDial extends Task {
}
}
_initSipIndialogRequestListener(cs, dlg) {
dlg.on('info', this._onRequestWithinDialog.bind(this, cs));
dlg.on('message', this._onRequestWithinDialog.bind(this, cs));
}
_removeSipIndialogRequestListener(dlg) {
dlg && dlg.removeAllListeners('message');
dlg && dlg.removeAllListeners('info');
}
async _onRequestWithinDialog(cs, req, res) {
cs._onRequestWithinDialog(req, res);
}
async _initializeInbound(cs) {
const {ep} = await cs._evalEndpointPrecondition(this);
this.epOther = ep;
@@ -477,8 +420,7 @@ class TaskDial extends Task {
this.headers = {
'X-Account-Sid': cs.accountSid,
...(req && req.has('X-CID') && {'X-CID': req.get('X-CID')}),
...(req && req.has('P-Asserted-Identity') && !JAMBONZ_DISABLE_DIAL_PAI_HEADER &&
{'P-Asserted-Identity': req.get('P-Asserted-Identity')}),
...(req && req.has('P-Asserted-Identity') && {'P-Asserted-Identity': req.get('P-Asserted-Identity')}),
...(req && req.has('X-Voip-Carrier-Sid') && {'X-Voip-Carrier-Sid': req.get('X-Voip-Carrier-Sid')}),
// Put headers at the end to make sure opt.headers override all default behavior.
...this.headers
@@ -499,14 +441,10 @@ class TaskDial extends Task {
}
const ms = await cs.getMS();
this.timerRing = setTimeout(async() => {
this.timerRing = setTimeout(() => {
this.logger.info(`Dial:_attemptCall: ring no answer timer ${this.timeout}s exceeded`);
this.timerRing = null;
try {
await this._killOutdials();
} catch (err) {
this.logger.info(err, 'Dial:_attemptCall - error killing outdials');
}
this._killOutdials();
this.result = {
dialCallStatus: CallStatus.NoAnswer,
dialSipStatus: 487
@@ -568,8 +506,7 @@ class TaskDial extends Task {
callInfo: cs.callInfo,
accountInfo: cs.accountInfo,
rootSpan: cs.rootSpan,
startSpan: this.startSpan.bind(this),
dialTask: this
startSpan: this.startSpan.bind(this)
});
this.dials.set(sd.callSid, sd);
@@ -585,8 +522,7 @@ class TaskDial extends Task {
}
})
.on('callStatusChange', (obj) => {
if (this.results.dialCallStatus !== CallStatus.Completed &&
this.results.dialCallStatus !== CallStatus.NoAnswer) {
if (this.results.dialCallStatus !== CallStatus.Completed) {
Object.assign(this.results, {
dialCallStatus: obj.callStatus,
dialSipStatus: obj.sipStatus,
@@ -639,7 +575,11 @@ class TaskDial extends Task {
}
})
.on('reinvite', (req, res) => {
this._onReinvite(req, res);
try {
cs.handleReinviteAfterMediaReleased(req, res);
} catch (err) {
this.logger.error(err, 'Error in dial einvite from B leg');
}
})
.on('refer', (callInfo, req, res) => {
@@ -675,35 +615,6 @@ class TaskDial extends Task {
this._killOutdials(); // NB: order is important
}
async _onReinvite(req, res) {
try {
let isHandled = false;
if (this.cs.onHoldMusic) {
if (isOnhold(req.body) && !this.epOther && !this.ep) {
await this.cs.handleReinviteAfterMediaReleased(req, res);
// Onhold but media is already released
// reconnect A Leg and Response B leg
await this.reAnchorMedia(this.cs, this.sd);
this.isOutgoingLegHold = true;
isHandled = true;
this._onHoldHook();
} else if (!isOnhold(req.body) && this.epOther && this.ep && this.isOutgoingLegHold && this.canReleaseMedia) {
// Offhold, time to release media
const newSdp = await this.ep.modify(req.body);
await res.send(200, {body: newSdp});
await this._releaseMedia(this.cs, this.sd);
isHandled = true;
this.isOutgoingLegHold = false;
}
}
if (!isHandled) {
this.cs.handleReinviteAfterMediaReleased(req, res);
}
} catch (err) {
this.logger.error(err, 'Error in dial einvite from B leg');
}
}
_onMaxCallDuration(cs) {
this.logger.info(`Dial:_onMaxCallDuration tearing down call as it has reached ${this.timeLimit}s`);
this.ep && this.ep.unbridge();
@@ -756,7 +667,6 @@ class TaskDial extends Task {
if (this.parentDtmfCollector) this._installDtmfDetection(cs, cs.dlg);
if (this.childDtmfCollector) this._installDtmfDetection(cs, this.dlg);
if (cs.sipRequestWithinDialogHook) this._initSipIndialogRequestListener(cs, this.dlg);
if (this.transcribeTask) this.transcribeTask.exec(cs, {ep: this.epOther, ep2:this.ep});
if (this.listenTask) this.listenTask.exec(cs, {ep: this.epOther});
@@ -809,29 +719,9 @@ class TaskDial extends Task {
}
async handleReinviteAfterMediaReleased(req, res) {
let isHandled = false;
if (isOnhold(req.body) && !this.epOther && !this.ep) {
const sdp = await this.dlg.modify(req.body);
res.send(200, {body: sdp});
// Onhold but media is already released
await this.reAnchorMedia(this.cs, this.sd);
isHandled = true;
this.isIncomingLegHold = true;
this._onHoldHook();
} else if (!isOnhold(req.body) && this.epOther && this.ep && this.isIncomingLegHold && this.canReleaseMedia) {
// Offhold, time to release media
const newSdp = await this.epOther.modify(req.body);
await res.send(200, {body: newSdp});
await this._releaseMedia(this.cs, this.sd);
isHandled = true;
this.isIncomingLegHold = false;
}
if (!isHandled) {
const sdp = await this.dlg.modify(req.body);
this.logger.info({sdp}, 'Dial:handleReinviteAfterMediaReleased - sent reinvite to B leg');
res.send(200, {body: sdp});
}
const sdp = await this.dlg.modify(req.body);
this.logger.info({sdp}, 'Dial:handleReinviteAfterMediaReleased - sent reinvite to B leg');
res.send(200, {body: sdp});
}
_onAmdEvent(cs, evt) {
@@ -842,48 +732,6 @@ class TaskDial extends Task {
this.logger.error({err}, 'Dial:_onAmdEvent - error calling actionHook');
});
}
async _onHoldHook(allowed = [TaskName.Play, TaskName.Say, TaskName.Pause]) {
if (this.data.onHoldHook) {
// send silence for keep Voice quality
await this.epOther.play('silence_stream://500');
let allowedTasks;
do {
try {
const b3 = this.getTracingPropagation();
const httpHeaders = b3 && {b3};
const json = await this.cs.application.requestor.
request('verb:hook', this.data.onHoldHook, this.cs.callInfo.toJSON(), httpHeaders);
const tasks = normalizeJambones(this.logger, json).map((tdata) => makeTask(this.logger, tdata));
allowedTasks = tasks.filter((t) => allowed.includes(t.name));
if (tasks.length !== allowedTasks.length) {
this.logger.debug({tasks, allowedTasks}, 'unsupported task');
throw new Error(`unsupported verb in enqueue waitHook: only ${JSON.stringify(allowed)}`);
}
this.logger.debug(`DialTask:_onHoldHook: executing ${tasks.length} tasks`);
if (tasks.length) {
this._playSession = new ConfirmCallSession({
logger: this.logger,
application: this.cs.application,
dlg: this.isIncomingLegHold ? this.dlg : this.cs.dlg,
ep: this.isIncomingLegHold ? this.ep : this.cs.ep,
callInfo: this.cs.callInfo,
accountInfo: this.cs.accountInfo,
tasks,
rootSpan: this.cs.rootSpan
});
await this._playSession.exec();
this._playSession = null;
}
} catch (error) {
this.logger.info(error, 'DialTask:_onHoldHook: failed retrieving waitHook');
this._playSession = null;
break;
}
} while (allowedTasks && allowedTasks.length > 0 && !this.killed && this.isOnHold);
this.logger.info('Finish onHoldHook');
}
}
}
module.exports = TaskDial;

View File

@@ -58,13 +58,6 @@ class Dialogflow extends Task {
this.vendor = this.data.tts.vendor || 'default';
this.language = this.data.tts.language || 'default';
this.voice = this.data.tts.voice || 'default';
this.speechSynthesisLabel = this.data.tts.label || 'default';
// fallback tts
this.fallbackVendor = this.data.tts.fallbackVendor || 'default';
this.fallbackLanguage = this.data.tts.fallbackLanguage || 'default';
this.fallbackVoice = this.data.tts.fallbackLanguage || 'default';
this.fallbackLabel = this.data.tts.fallbackLabel || 'default';
}
this.bargein = this.data.bargein;
}
@@ -125,15 +118,8 @@ class Dialogflow extends Task {
this.vendor = cs.speechSynthesisVendor;
this.language = cs.speechSynthesisLanguage;
this.voice = cs.speechSynthesisVoice;
this.speechSynthesisLabel = cs.speechSynthesisLabel;
}
if (this.fallbackVendor === 'default') {
this.fallbackVendor = cs.fallbackSpeechSynthesisVendor;
this.fallbackLanguage = cs.fallbackSpeechSynthesisLanguage;
this.fallbackVoice = cs.fallbackSpeechSynthesisVoice;
this.fallbackLabel = cs.fallbackSpeechSynthesisLabel;
}
this.ttsCredentials = cs.getSpeechCredentials(this.vendor, 'tts', this.speechSynthesisLabel);
this.ttsCredentials = cs.getSpeechCredentials(this.vendor, 'tts');
this.ep.addCustomEventListener('dialogflow::intent', this._onIntent.bind(this, ep, cs));
this.ep.addCustomEventListener('dialogflow::transcription', this._onTranscription.bind(this, ep, cs));
@@ -235,8 +221,19 @@ class Dialogflow extends Task {
}
try {
const {filePath} = await this._fallbackSynthAudio(cs, intent, stats, synthAudio);
const obj = {
account_sid: cs.accountSid,
text: intent.fulfillmentText,
vendor: this.vendor,
language: this.language,
voice: this.voice,
salt: cs.callSid,
credentials: this.ttsCredentials
};
this.logger.debug({obj}, 'Dialogflow:_onIntent - playing message via tts');
const {filePath, servedFromCache} = await synthAudio(stats, obj);
if (filePath) cs.trackTmpFile(filePath);
if (!this.ttsCredentials && !servedFromCache) cs.billForTts(intent.fulfillmentText.length);
if (this.playInProgress) {
await ep.api('uuid_break', ep.uuid).catch((err) => this.logger.info(err, 'Error killing audio'));
@@ -280,46 +277,6 @@ class Dialogflow extends Task {
}
}
async _fallbackSynthAudio(cs, intent, stats, synthAudio) {
try {
const obj = {
account_sid: cs.accountSid,
text: intent.fulfillmentText,
vendor: this.vendor,
language: this.language,
voice: this.voice,
salt: cs.callSid,
credentials: this.ttsCredentials
};
this.logger.debug({obj}, 'Dialogflow:_onIntent - playing message via tts');
return await synthAudio(stats, obj);
} catch (error) {
this.logger.info({error}, 'Failed to synthesize audio from primary vendor');
try {
if (this.fallbackVendor) {
const credentials = cs.getSpeechCredentials(this.fallbackVendor, 'tts', this.fallbackLabel);
const obj = {
account_sid: cs.accountSid,
text: intent.fulfillmentText,
vendor: this.fallbackVendor,
language: this.fallbackLanguage,
voice: this.fallbackVoice,
salt: cs.callSid,
credentials
};
this.logger.debug({obj}, 'Dialogflow:_onIntent - playing message via fallback tts');
return await synthAudio(stats, obj);
}
} catch (err) {
this.logger.info({err}, 'Failed to synthesize audio from falllback vendor');
throw err;
}
throw error;
}
}
/**
* A transcription - either interim or final - has been returned.
* If we are doing barge-in based on hotword detection, check for the hotword or phrase.

View File

@@ -311,8 +311,7 @@ class TaskEnqueue extends Task {
}
}
async _playHook(cs, dlg, hook,
allowed = [TaskName.Play, TaskName.Say, TaskName.Pause, TaskName.Leave, TaskName.Tag]) {
async _playHook(cs, dlg, hook, allowed = [TaskName.Play, TaskName.Say, TaskName.Pause, TaskName.Leave]) {
const {sortedSetLength, sortedSetPositionByPattern} = cs.srf.locals.dbHelpers;
const b3 = this.getTracingPropagation();
const httpHeaders = b3 && {b3};
@@ -332,7 +331,6 @@ class TaskEnqueue extends Task {
queuePosition: queuePosition.length ? queuePosition[0] : 0,
callSid: this.cs.callSid,
callId: this.cs.callId,
customerData: this.cs.callInfo.customerData
});
} catch (err) {
this.logger.error({err}, `TaskEnqueue:_playHook error retrieving list info for queue ${this.queueName}`);

View File

@@ -1,17 +1,17 @@
const Task = require('./task');
const {
TaskName,
TaskPreconditions,
GoogleTranscriptionEvents,
NuanceTranscriptionEvents,
AwsTranscriptionEvents,
AzureTranscriptionEvents,
DeepgramTranscriptionEvents,
SonioxTranscriptionEvents,
CobaltTranscriptionEvents,
IbmTranscriptionEvents,
NvidiaTranscriptionEvents,
JambonzTranscriptionEvents,
AssemblyAiTranscriptionEvents
} = require('../utils/constants.json');
JambonzTranscriptionEvents
} = require('../utils/constants');
const {
JAMBONES_GATHER_EARLY_HINTS_MATCH,
JAMBONZ_GATHER_EARLY_HINTS_MATCH,
@@ -19,11 +19,34 @@ const {
} = require('../config');
const makeTask = require('./make_task');
const assert = require('assert');
const SttTask = require('./stt-task');
class TaskGather extends SttTask {
const compileTranscripts = (logger, evt, arr) => {
if (!Array.isArray(arr) || arr.length === 0) return;
let t = '';
for (const a of arr) {
t += ` ${a.alternatives[0].transcript}`;
}
t += ` ${evt.alternatives[0].transcript}`;
evt.alternatives[0].transcript = t.trim();
};
class TaskGather extends Task {
constructor(logger, opts, parentTask) {
super(logger, opts, parentTask);
super(logger, opts);
this.preconditions = TaskPreconditions.Endpoint;
const {
setChannelVarsForStt,
normalizeTranscription,
removeSpeechListeners,
setSpeechCredentialsAtRuntime,
compileSonioxTranscripts
} = require('../utils/transcription-utils')(logger);
this.setChannelVarsForStt = setChannelVarsForStt;
this.normalizeTranscription = normalizeTranscription;
this.removeSpeechListeners = removeSpeechListeners;
this.compileSonioxTranscripts = compileSonioxTranscripts;
[
'finishOnKey', 'input', 'numDigits', 'minDigits', 'maxDigits',
'interDigitTimeout', 'partialResultHook', 'bargein', 'dtmfBargein',
@@ -39,23 +62,27 @@ class TaskGather extends SttTask {
this.listenDuringPrompt = this.data.listenDuringPrompt === false ? false : true;
this.minBargeinWordCount = this.data.minBargeinWordCount || 1;
if (this.data.recognizer) {
const recognizer = this.data.recognizer;
this.vendor = recognizer.vendor;
this.language = recognizer.language;
/* let credentials be supplied in the recognizer object at runtime */
this.sttCredentials = setSpeechCredentialsAtRuntime(recognizer);
/* continuous ASR (i.e. compile transcripts until a special timeout or dtmf key) */
this.asrTimeout = typeof this.data.recognizer.asrTimeout === 'number' ?
this.data.recognizer.asrTimeout * 1000 : 0;
if (this.asrTimeout > 0) {
this.isContinuousAsr = true;
this.asrDtmfTerminationDigit = this.data.recognizer.asrDtmfTerminationDigit;
}
this.asrTimeout = typeof recognizer.asrTimeout === 'number' ? recognizer.asrTimeout * 1000 : 0;
if (this.asrTimeout > 0) this.asrDtmfTerminationDigit = recognizer.asrDtmfTerminationDigit;
this.isContinuousAsr = this.asrTimeout > 0;
if (Array.isArray(this.data.recognizer.hints) &&
0 == this.data.recognizer.hints.length && JAMBONES_GATHER_CLEAR_GLOBAL_HINTS_ON_EMPTY_HINTS) {
logger.debug('Gather: an empty hints array was supplied, so we will mask global hints');
this.maskGlobalSttHints = true;
}
// fast Recognition, fire event after a specified time after the last hypothesis.
this.fastRecognitionTimeout = typeof this.data.recognizer.fastRecognitionTimeout === 'number' ?
this.data.recognizer.fastRecognitionTimeout * 1000 : 0;
this.data.recognizer.hints = this.data.recognizer.hints || [];
this.data.recognizer.altLanguages = this.data.recognizer.altLanguages || [];
}
else this.data.recognizer = {hints: [], altLanguages: []};
this.digitBuffer = '';
this._earlyMedia = this.data.earlyMedia === true;
@@ -70,6 +97,11 @@ class TaskGather extends SttTask {
/* buffer speech for continuous asr */
this._bufferedTranscripts = [];
/* buffer for soniox transcripts */
this._sonioxTranscripts = [];
this.parentTask = parentTask;
this.partialTranscriptsCount = 0;
}
@@ -105,10 +137,11 @@ class TaskGather extends SttTask {
this.logger.debug({options: this.data}, 'Gather:exec');
await super.exec(cs);
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, cs.srf);
const {getNuanceAccessToken, getIbmAccessToken} = cs.srf.locals.dbHelpers;
if (cs.hasGlobalSttHints && !this.maskGlobalSttHints) {
const {hints, hintsBoost} = cs.globalSttHints;
const setOfHints = new Set((this.data.recognizer.hints || [])
const setOfHints = new Set(this.data.recognizer.hints
.concat(hints)
.filter((h) => typeof h === 'string' && h.length > 0));
this.data.recognizer.hints = [...setOfHints];
@@ -118,7 +151,7 @@ class TaskGather extends SttTask {
}
if (cs.hasAltLanguages) {
this.data.recognizer.altLanguages = this.data.recognizer.altLanguages.concat(cs.altLanguages);
this.logger.debug({altLanguages: this.data.recognizer?.altLanguages},
this.logger.debug({altLanguages: this.altLanguages},
'Gather:exec - applying altLanguages');
}
if (cs.hasGlobalSttPunctuation && !this.data.recognizer.punctuation) {
@@ -151,69 +184,57 @@ class TaskGather extends SttTask {
this.language = cs.speechRecognizerLanguage;
if (this.data.recognizer) this.data.recognizer.language = this.language;
}
if ('default' === this.label || !this.label) {
this.label = cs.speechRecognizerLabel;
if (this.data.recognizer) this.data.recognizer.label = this.label;
}
// Fallback options
if ('default' === this.fallbackVendor || !this.fallbackVendor) {
this.fallbackVendor = cs.fallbackSpeechRecognizerVendor;
if (this.data.recognizer) this.data.recognizer.fallbackVendor = this.fallbackVendor;
}
if ('default' === this.fallbackLanguage || !this.fallbackLanguage) {
this.fallbackLanguage = cs.fallbackSpeechRecognizerLanguage;
if (this.data.recognizer) this.data.recognizer.fallbackLanguage = this.fallbackLanguage;
}
if ('default' === this.fallbackLabel || !this.fallbackLabel) {
this.fallbackLabel = cs.fallbackSpeechRecognizerLabel;
if (this.data.recognizer) this.data.recognizer.fallbackLabel = this.fallbackLabel;
}
if (!this.data.recognizer.vendor) {
this.data.recognizer.vendor = this.vendor;
}
if (this.vendor === 'cobalt' && !this.data.recognizer.model) {
// By default, application saves cobalt model in language
this.data.recognizer.model = cs.speechRecognizerLanguage;
}
if (this.needsStt && !this.sttCredentials) this.sttCredentials = cs.getSpeechCredentials(this.vendor, 'stt');
if (this.needsStt && !this.sttCredentials) {
try {
this.sttCredentials = await this._initSpeechCredentials(cs, this.vendor, this.label);
} catch (error) {
if (this.fallbackVendor && this.isHandledByPrimaryProvider) {
await this._fallback();
} else {
throw error;
}
}
const {writeAlerts, AlertType} = cs.srf.locals;
this.logger.info(`TaskGather:exec - ERROR stt using ${this.vendor} requested but creds not supplied`);
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_NOT_PROVISIONED,
vendor: this.vendor
}).catch((err) => this.logger.info({err}, 'Error generating alert for no stt'));
// Notify application that STT vender is wrong.
this.notifyError({
msg: 'ASR error',
details: `No speech-to-text service credentials for ${this.vendor} have been configured`
});
this.notifyTaskDone();
throw new Error(`No speech-to-text service credentials for ${this.vendor} have been configured`);
}
/* when using cobalt model is required */
if (this.vendor === 'cobalt' && !this.data.recognizer.model) {
this.notifyError({ msg: 'ASR error', details:'Cobalt requires a model to be specified'});
throw new Error('Cobalt requires a model to be specified');
if (this.vendor === 'nuance' && this.sttCredentials.client_id) {
/* get nuance access token */
const {client_id, secret} = this.sttCredentials;
const {access_token, servedFromCache} = await getNuanceAccessToken(client_id, secret, 'asr tts');
this.logger.debug({client_id}, `Gather:exec - got nuance access token ${servedFromCache ? 'from cache' : ''}`);
this.sttCredentials = {...this.sttCredentials, access_token};
}
const startListening = async(cs, ep) => {
else if (this.vendor == 'ibm' && this.sttCredentials.stt_api_key) {
/* get ibm access token */
const {stt_api_key, stt_region} = this.sttCredentials;
const {access_token, servedFromCache} = await getIbmAccessToken(stt_api_key);
this.logger.debug({stt_api_key}, `Gather:exec - got ibm access token ${servedFromCache ? 'from cache' : ''}`);
this.sttCredentials = {...this.sttCredentials, access_token, stt_region};
}
const startListening = (cs, ep) => {
this._startTimer();
if (this.isContinuousAsr && 0 === this.timeout) this._startAsrTimer();
if (this.input.includes('speech') && !this.listenDuringPrompt) {
try {
await this._setSpeechHandlers(cs, ep);
if (this.killed) {
this.logger.info('Gather:exec - task was quickly killed so do not transcribe');
return;
}
this._startTranscribing(ep);
return updateSpeechCredentialLastUsed(this.sttCredentials.speech_credential_sid);
} catch (e) {
if (this.fallbackVendor && this.isHandledByPrimaryProvider) {
await this._fallback();
startListening(cs, ep);
} else {
this.logger.error({error: e}, 'error in initSpeech');
}
}
this._initSpeech(cs, ep)
.then(() => {
if (this.killed) {
this.logger.info('Gather:exec - task was quickly killed so do not transcribe');
return;
}
this._startTranscribing(ep);
return updateSpeechCredentialLastUsed(this.sttCredentials.speech_credential_sid);
})
.catch((err) => {
this.logger.error({err}, 'error in initSpeech');
});
}
};
@@ -267,7 +288,7 @@ class TaskGather extends SttTask {
}
if (this.input.includes('speech') && this.listenDuringPrompt) {
await this._setSpeechHandlers(cs, ep);
await this._initSpeech(cs, ep);
this._startTranscribing(ep);
updateSpeechCredentialLastUsed(this.sttCredentials.speech_credential_sid)
.catch(() => {/*already logged error */});
@@ -341,16 +362,7 @@ class TaskGather extends SttTask {
}
}
async _setSpeechHandlers(cs, ep) {
if (this._speechHandlersSet) return;
this._speechHandlersSet = true;
/* some special deepgram logic */
if (this.vendor === 'deepgram') {
if (this.isContinuousAsr) this._doContinuousAsrWithDeepgram(this.asrTimeout);
if (this.data.recognizer?.deepgramOptions?.shortUtterance) this.shortUtterance = true;
}
async _initSpeech(cs, ep) {
const opts = this.setChannelVarsForStt(this, this.sttCredentials, this.data.recognizer);
switch (this.vendor) {
case 'google':
@@ -393,12 +405,9 @@ class TaskGather extends SttTask {
case 'deepgram':
this.bugname = 'deepgram_transcribe';
ep.addCustomEventListener(DeepgramTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
ep.addCustomEventListener(DeepgramTranscriptionEvents.Connect, this._onVendorConnect.bind(this, cs, ep));
ep.addCustomEventListener(DeepgramTranscriptionEvents.Connect, this._onDeepgramConnect.bind(this, cs, ep));
ep.addCustomEventListener(DeepgramTranscriptionEvents.ConnectFailure,
this._onVendorConnectFailure.bind(this, cs, ep));
/* if app sets deepgramOptions.utteranceEndMs they essentially want continuous asr */
if (opts.DEEPGRAM_SPEECH_UTTERANCE_END_MS) this.isContinuousAsr = true;
this._onDeepGramConnectFailure.bind(this, cs, ep));
break;
case 'soniox':
@@ -406,42 +415,12 @@ class TaskGather extends SttTask {
ep.addCustomEventListener(SonioxTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
break;
case 'cobalt':
this.bugname = 'cobalt_transcribe';
ep.addCustomEventListener(CobaltTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
/* cobalt doesnt have language, it has model, which is required */
if (!this.data.recognizer.model) {
throw new Error('Cobalt requires a model to be specified');
}
this.language = this.data.recognizer.model;
/* special case: if using hints with cobalt we need to compile them */
this.hostport = opts.COBALT_SERVER_URI;
if (this.vendor === 'cobalt' && opts.COBALT_SPEECH_HINTS) {
try {
const context = await this.compileHintsForCobalt(
ep,
this.hostport,
this.data.recognizer.model,
opts.COBALT_CONTEXT_TOKEN,
opts.COBALT_SPEECH_HINTS
);
if (context) opts.COBALT_COMPILED_CONTEXT_DATA = context;
delete opts.COBALT_SPEECH_HINTS;
} catch (err) {
this.logger.error({err}, 'Error compiling hints for cobalt');
}
}
delete opts.COBALT_SERVER_URI;
break;
case 'ibm':
this.bugname = 'ibm_transcribe';
ep.addCustomEventListener(IbmTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
ep.addCustomEventListener(IbmTranscriptionEvents.Connect, this._onVendorConnect.bind(this, cs, ep));
ep.addCustomEventListener(IbmTranscriptionEvents.Connect, this._onIbmConnect.bind(this, cs, ep));
ep.addCustomEventListener(IbmTranscriptionEvents.ConnectFailure,
this._onVendorConnectFailure.bind(this, cs, ep));
this._onIbmConnectFailure.bind(this, cs, ep));
break;
case 'nvidia':
@@ -461,22 +440,13 @@ class TaskGather extends SttTask {
}
break;
case 'assemblyai':
this.bugname = 'assemblyai_transcribe';
ep.addCustomEventListener(AssemblyAiTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep));
ep.addCustomEventListener(AssemblyAiTranscriptionEvents.Connect, this._onVendorConnect.bind(this, cs, ep));
ep.addCustomEventListener(AssemblyAiTranscriptionEvents.Error, this._onVendorError.bind(this, cs, ep));
ep.addCustomEventListener(AssemblyAiTranscriptionEvents.ConnectFailure,
this._onVendorConnectFailure.bind(this, cs, ep));
break;
default:
if (this.vendor.startsWith('custom:')) {
this.bugname = `${this.vendor}_transcribe`;
ep.addCustomEventListener(JambonzTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
ep.addCustomEventListener(JambonzTranscriptionEvents.Connect, this._onVendorConnect.bind(this, cs, ep));
ep.addCustomEventListener(JambonzTranscriptionEvents.Connect, this._onJambonzConnect.bind(this, cs, ep));
ep.addCustomEventListener(JambonzTranscriptionEvents.ConnectFailure,
this._onVendorConnectFailure.bind(this, cs, ep));
this._onJambonzConnectFailure.bind(this, cs, ep));
break;
}
else {
@@ -499,18 +469,11 @@ class TaskGather extends SttTask {
interim: this.interim,
bugname: this.bugname
}, 'Gather:_startTranscribing');
/**
* Note: we don't need to ask deepgram for interim results, because they
* already send us words as they are finalized (is_final=true) even before
* the utterance is finalized (speech_final=true)
*/
ep.startTranscription({
vendor: this.vendor,
locale: this.language,
interim: this.interim,
bugname: this.bugname,
hostport: this.hostport,
}).catch((err) => {
const {writeAlerts, AlertType} = this.cs.srf.locals;
this.logger.error(err, 'TaskGather:_startTranscribing error');
@@ -540,13 +503,11 @@ class TaskGather extends SttTask {
}
_startAsrTimer() {
if (this.vendor === 'deepgram') return; // no need
assert(this.isContinuousAsr);
this._clearAsrTimer();
this._asrTimer = setTimeout(() => {
this.logger.debug('_startAsrTimer - asr timer went off');
const evt = this.consolidateTranscripts(this._bufferedTranscripts, 1, this.language);
this._resolve(this._bufferedTranscripts.length > 0 ? 'speech' : 'timeout', evt);
this._resolve(this._bufferedTranscripts.length > 0 ? 'speech' : 'timeout');
}, this.asrTimeout);
this.logger.debug(`_startAsrTimer: set for ${this.asrTimeout}ms`);
}
@@ -556,28 +517,11 @@ class TaskGather extends SttTask {
this._asrTimer = null;
}
_startFastRecognitionTimer(evt) {
assert(this.fastRecognitionTimeout > 0);
this._clearFastRecognitionTimer();
this._fastRecognitionTimer = setTimeout(() => {
evt.is_final = true;
this._resolve('speech', evt);
}, this.fastRecognitionTimeout);
}
_clearFastRecognitionTimer() {
if (this._fastRecognitionTimer) {
clearTimeout(this._fastRecognitionTimer);
}
this._fastRecognitionTimer = null;
}
_startFinalAsrTimer() {
this._clearFinalAsrTimer();
this._finalAsrTimer = setTimeout(() => {
this.logger.debug('_startFinalAsrTimer - final asr timer went off');
const evt = this.consolidateTranscripts(this._bufferedTranscripts, 1, this.language);
this._resolve(this._bufferedTranscripts.length > 0 ? 'speech' : 'timeout', evt);
this._resolve(this._bufferedTranscripts.length > 0 ? 'speech' : 'timeout');
}, 1000);
this.logger.debug('_startFinalAsrTimer: set for 1 second');
}
@@ -616,23 +560,11 @@ class TaskGather extends SttTask {
this.logger.debug({evt, bugname, finished}, `Gather:_onTranscription for vendor ${this.vendor}`);
if (bugname && this.bugname !== bugname) return;
if (this.vendor === 'ibm' && evt?.state === 'listening') return;
if (this.vendor === 'deepgram' && evt.type === 'UtteranceEnd') {
/* we will only get this when we have set utterance_end_ms */
if (this._bufferedTranscripts.length === 0) {
this.logger.debug('Gather:_onTranscription - got UtteranceEnd event from deepgram but no buffered transcripts');
}
else {
this.logger.debug('Gather:_onTranscription - got UtteranceEnd event from deepgram, return buffered transcript');
evt = this.consolidateTranscripts(this._bufferedTranscripts, 1, this.language);
this._bufferedTranscripts = [];
this._resolve('speech', evt);
}
return;
if (this.vendor === 'ibm') {
if (evt?.state === 'listening') return;
}
evt = this.normalizeTranscription(evt, this.vendor, 1, this.language, this.shortUtterance);
evt = this.normalizeTranscription(evt, this.vendor, 1, this.language);
if (evt.alternatives.length === 0) {
this.logger.info({evt}, 'TaskGather:_onTranscription - got empty transcript, continue listening');
return;
@@ -654,27 +586,15 @@ class TaskGather extends SttTask {
const bufferedWords = this._sonioxTranscripts.length +
this._bufferedTranscripts.reduce((count, e) => count + e.alternatives[0]?.transcript.split(' ').length, 0);
let emptyTranscript = false;
if (evt.is_final) {
if (evt.alternatives[0].transcript === '' && !this.callSession.callGone && !this.killed) {
emptyTranscript = true;
if (finished === 'true' && ['microsoft', 'deepgram'].includes(this.vendor)) {
this.logger.debug({evt}, 'TaskGather:_onTranscription - got empty transcript from old gather, disregarding');
return;
}
else if (this.vendor !== 'deepgram') {
else {
this.logger.info({evt}, 'TaskGather:_onTranscription - got empty transcript, continue listening');
return;
}
else if (this.isContinuousAsr) {
this.logger.info({evt},
'TaskGather:_onTranscription - got empty deepgram transcript during continous asr, continue listening');
return;
}
else if (this.vendor === 'deepgram' && this._bufferedTranscripts.length > 0) {
this.logger.info({evt},
'TaskGather:_onTranscription - got empty transcript from deepgram, return the buffered transcripts');
}
return;
}
if (this.isContinuousAsr) {
@@ -686,14 +606,14 @@ class TaskGather extends SttTask {
this.logger.debug('TaskGather:_onTranscription - removing trailing punctuation');
evt.alternatives[0].transcript = t.slice(0, -1);
}
else this.logger.debug({t}, 'TaskGather:_onTranscription - no trailing punctuation');
}
this.logger.info({evt}, 'TaskGather:_onTranscription - got transcript during continous asr');
this._bufferedTranscripts.push(evt);
this._clearTimer();
if (this._finalAsrTimer) {
this._clearFinalAsrTimer();
const evt = this.consolidateTranscripts(this._bufferedTranscripts, 1, this.language);
return this._resolve(this._bufferedTranscripts.length > 0 ? 'speech' : 'timeout', evt);
return this._resolve(this._bufferedTranscripts.length > 0 ? 'speech' : 'timeout');
}
this._startAsrTimer();
@@ -715,25 +635,16 @@ class TaskGather extends SttTask {
evt = this.compileSonioxTranscripts(this._sonioxTranscripts, 1, this.language);
this._sonioxTranscripts = [];
}
else if (this.vendor === 'deepgram') {
/* compile transcripts into one */
if (!emptyTranscript) this._bufferedTranscripts.push(evt);
if (this.data.recognizer?.deepgramOptions?.utteranceEndMs) {
this.logger.debug('TaskGather:_onTranscription - got speech_final waiting for UtteranceEnd event');
return;
}
this.logger.debug({evt}, 'TaskGather:_onTranscription - compiling deepgram transcripts');
evt = this.consolidateTranscripts(this._bufferedTranscripts, 1, this.language);
this._bufferedTranscripts = [];
this.logger.debug({evt}, 'TaskGather:_onTranscription - compiled deepgram transcripts');
}
/* here is where we return a final transcript */
this._resolve('speech', evt);
}
}
}
else {
/* google has a measure of stability:
https://cloud.google.com/speech-to-text/docs/basics#streaming_responses
others do not.
*/
//const isStableEnough = typeof evt.stability === 'undefined' || evt.stability > GATHER_STABILITY_THRESHOLD;
this._clearTimer();
this._startTimer();
if (this.bargein && (words + bufferedWords) >= this.minBargeinWordCount) {
@@ -743,9 +654,6 @@ class TaskGather extends SttTask {
}
this._killAudio(cs);
}
if (this.fastRecognitionTimeout) {
this._startFastRecognitionTimer(evt);
}
if (this.partialResultHook) {
const b3 = this.getTracingPropagation();
const httpHeaders = b3 && {b3};
@@ -759,14 +667,6 @@ class TaskGather extends SttTask {
this._sonioxTranscripts.push(evt.vendor.finalWords);
}
}
/* deepgram can send a non-final transcript but with words that are final, so we need to buffer */
if (this.vendor === 'deepgram') {
const originalEvent = evt.vendor.evt;
if (originalEvent.is_final && evt.alternatives[0].transcript !== '') {
this.logger.debug({evt}, 'Gather:_onTranscription - buffering a completed (partial) deepgram transcript');
this._bufferedTranscripts.push(evt);
}
}
}
}
_onEndOfUtterance(cs, ep) {
@@ -781,7 +681,7 @@ class TaskGather extends SttTask {
* getting a transcription. This can happen if someone coughs or mumbles.
* For that reason don't ask for a single utterance and we'll terminate the transcribe operation
* once we get a final transcript.
* However, if the user has specified a singleUtterance, then we need to restart here
* However, if the usr has specified a singleUtterance, then we need to restart here
* since we dont have a final transcript yet.
*/
if (!this.resolved && !this.killed && !this._bufferedTranscripts.length && this.wantsSingleUtterance) {
@@ -798,22 +698,14 @@ class TaskGather extends SttTask {
_onTranscriptionComplete(cs, ep) {
this.logger.debug('TaskGather:_onTranscriptionComplete');
}
async _onJambonzError(cs, ep, evt) {
_onDeepgramConnect(_cs, _ep) {
this.logger.debug('TaskGather:_onDeepgramConnect');
}
_onJambonzConnect(_cs, _ep) {
this.logger.debug('TaskGather:_onJambonzConnect');
}
_onJambonzError(cs, _ep, evt) {
this.logger.info({evt}, 'TaskGather:_onJambonzError');
if (this.isHandledByPrimaryProvider && this.fallbackVendor) {
ep.stopTranscription({vendor: this.vendor})
.catch((err) => this.logger.error({err}, `Error stopping transcription for primary vendor ${this.vendor}`));
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, cs.srf);
try {
await this._fallback();
await this._initSpeech(cs, ep);
this._startTranscribing(ep);
updateSpeechCredentialLastUsed(this.sttCredentials.speech_credential_sid);
return;
} catch (error) {
this.logger.info({error}, `There is error while falling back to ${this.fallbackVendor}`);
}
}
const {writeAlerts, AlertType} = cs.srf.locals;
if (this.vendor === 'nuance') {
@@ -831,16 +723,54 @@ class TaskGather extends SttTask {
this.notifyError({msg: 'ASR error', details:`Custom speech vendor ${this.vendor} error: ${evt.error}`});
}
_onVendorConnectFailure(cs, _ep, evt) {
super._onVendorConnectFailure(cs, _ep, evt);
_onDeepGramConnectFailure(cs, _ep, evt) {
const {reason} = evt;
const {writeAlerts, AlertType} = cs.srf.locals;
this.logger.info({evt}, 'TaskGather:_onDeepgramConnectFailure');
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
message: `Failed connecting to Deepgram speech recognizer: ${reason}`,
vendor: 'deepgram',
}).catch((err) => this.logger.info({err}, 'Error generating alert for deepgram connection failure'));
this.notifyError({msg: 'ASR error', details:`Failed connecting to speech vendor deepgram: ${reason}`});
this.notifyTaskDone();
}
_onJambonzConnectFailure(cs, _ep, evt) {
const {reason} = evt;
const {writeAlerts, AlertType} = cs.srf.locals;
this.logger.info({evt}, 'TaskGather:_onJambonzConnectFailure');
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
message: `Failed connecting to ${this.vendor} speech recognizer: ${reason}`,
vendor: this.vendor,
}).catch((err) => this.logger.info({err}, 'Error generating alert for jambonz custom connection failure'));
this.notifyError({msg: 'ASR error', details:`Failed connecting to speech vendor ${this.vendor}: ${reason}`});
this.notifyTaskDone();
}
_onVendorError(cs, _ep, evt) {
super._onVendorError(cs, _ep, evt);
this._resolve('stt-error', evt);
_onIbmConnect(_cs, _ep) {
this.logger.debug('TaskGather:_onIbmConnect');
}
_onIbmConnectFailure(cs, _ep, evt) {
const {reason} = evt;
const {writeAlerts, AlertType} = cs.srf.locals;
this.logger.info({evt}, 'TaskGather:_onIbmConnectFailure');
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
message: `Failed connecting to IBM watson speech recognizer: ${reason}`,
vendor: 'ibm',
}).catch((err) => this.logger.info({err}, 'Error generating alert for IBM connection failure'));
this.notifyError({msg: 'ASR error', details:`Failed connecting to speech vendor IBM: ${reason}`});
this.notifyTaskDone();
}
_onIbmError(cs, _ep, evt) {
this.logger.info({evt}, 'TaskGather:_onIbmError'); }
_onVadDetected(cs, ep) {
if (this.bargein && this.minBargeinWordCount === 0) {
this.logger.debug('TaskGather:_onVadDetected');
@@ -874,13 +804,20 @@ class TaskGather extends SttTask {
}
clearTimeout(this.interDigitTimer);
this._clearTimer();
this._clearFastRecognitionTimer();
this.span.setAttributes({
channel: 1,
'stt.resolve': reason,
'stt.result': JSON.stringify(evt)
});
if (this.isContinuousAsr && reason.startsWith('speech')) {
evt = {
is_final: true,
transcripts: this._bufferedTranscripts
};
this.logger.debug({evt}, 'TaskGather:resolve continuous asr');
}
else if (!this.isContinuousAsr && reason.startsWith('speech') && this._bufferedTranscripts.length) {
compileTranscripts(this.logger, evt, this._bufferedTranscripts);
this.logger.debug({evt}, 'TaskGather:resolve buffered results');
}
this.span.setAttributes({'stt.resolve': reason, 'stt.result': JSON.stringify(evt)});
if (this.needsStt && this.ep && this.ep.connected) {
this.ep.stopTranscription({vendor: this.vendor})
.catch((err) => this.logger.error({err}, 'Error stopping transcription'));
@@ -914,13 +851,6 @@ class TaskGather extends SttTask {
await this.performAction({reason: 'timeout'});
}
}
else if (reason.startsWith('stt-error')) {
if (this.parentTask) this.parentTask.emit('stt-error', evt);
else {
this.emit('stt-error', evt);
await this.performAction({reason: 'error', details: evt.error});
}
}
} catch (err) { /*already logged error*/ }
this.notifyTaskDone();
}

View File

@@ -25,13 +25,6 @@ class Lex extends Task {
this.vendor = this.data.tts.vendor || 'default';
this.language = this.data.tts.language || 'default';
this.voice = this.data.tts.voice || 'default';
this.speechCredentialLabel = this.data.tts.label || 'default';
// fallback tts
this.fallbackVendor = this.data.tts.fallbackVendor || 'default';
this.fallbackLanguage = this.data.tts.fallbackLanguage || 'default';
this.fallbackVoice = this.data.tts.fallbackLanguage || 'default';
this.fallbackLabel = this.data.tts.fallbackLabel || 'default';
}
this.botName = `${this.bot}:${this.alias}:${this.region}`;
@@ -109,16 +102,8 @@ class Lex extends Task {
this.vendor = cs.speechSynthesisVendor;
this.language = cs.speechSynthesisLanguage;
this.voice = cs.speechSynthesisVoice;
this.speechCredentialLabel = cs.speechSynthesisLabel;
}
if (this.fallbackVendor === 'default') {
this.fallbackVendor = cs.fallbackSpeechSynthesisVendor;
this.fallbackLanguage = cs.fallbackSpeechSynthesisLanguage;
this.fallbackVoice = cs.fallbackSpeechSynthesisVoice;
this.fallbackLabel = cs.fallbackSpeechSynthesisLabel;
}
this.ttsCredentials = cs.getSpeechCredentials(this.vendor, 'tts', this.speechCredentialLabel);
this.ttsCredentials = cs.getSpeechCredentials(this.vendor, 'tts');
this.ep.addCustomEventListener('lex::intent', this._onIntent.bind(this, ep, cs));
this.ep.addCustomEventListener('lex::transcription', this._onTranscription.bind(this, ep, cs));
@@ -183,41 +168,6 @@ class Lex extends Task {
}
}
async _fallbackSynthAudio(cs, msg, stats, synthAudio) {
try {
const {filePath} = await synthAudio(stats, {
account_sid: cs.accountSid,
text: msg,
vendor: this.vendor,
language: this.language,
voice: this.voice,
salt: cs.callSid,
credentials: this.ttsCredentials
});
return filePath;
} catch (error) {
this.logger.info({error}, 'failed to synth audio from primary vendor');
if (this.fallbackVendor) {
try {
const credential = cs.getSpeechCredentials(this.fallbackVendor, 'tts', this.fallbackLabel);
const {filePath} = await synthAudio(stats, {
account_sid: cs.accountSid,
text: msg,
vendor: this.fallbackVendor,
language: this.fallbackLanguage,
voice: this.fallbackVoice,
salt: cs.callSid,
credentials: credential
});
return filePath;
} catch (err) {
this.logger.info({err}, 'failed to synth audio from fallback vendor');
}
}
}
}
/**
* @param {*} evt - event data
*/
@@ -237,7 +187,16 @@ class Lex extends Task {
try {
this.logger.debug(`tts with ${this.vendor} ${this.voice}`);
const filePath = await this._fallbackSynthAudio(cs, msg, stats, synthAudio);
// eslint-disable-next-line no-unused-vars
const {filePath, servedFromCache} = await synthAudio(stats, {
account_sid: cs.accountSid,
text: msg,
vendor: this.vendor,
language: this.language,
voice: this.voice,
salt: cs.callSid,
credentials: this.ttsCredentials
});
if (filePath) cs.trackTmpFile(filePath);
if (this.events.includes('start-play')) {

View File

@@ -3,12 +3,10 @@ const {TaskName, TaskPreconditions, ListenEvents, ListenStatus} = require('../ut
const makeTask = require('./make_task');
const moment = require('moment');
const MAX_PLAY_AUDIO_QUEUE_SIZE = 10;
const DTMF_SPAN_NAME = 'dtmf';
class TaskListen extends Task {
constructor(logger, opts, parentTask) {
super(logger, opts);
this.disableBidirectionalAudio = opts.disableBidirectionalAudio;
this.preconditions = TaskPreconditions.Endpoint;
[
@@ -31,10 +29,6 @@ class TaskListen extends Task {
get name() { return TaskName.Listen; }
set bugname(name) { this._bugname = name; }
set ignoreCustomerData(val) { this._ignoreCustomerData = val; }
async exec(cs, {ep}) {
await super.exec(cs);
this.ep = ep;
@@ -71,8 +65,7 @@ class TaskListen extends Task {
if (this.ep && this.ep.connected) {
this.logger.debug('TaskListen:kill closing websocket');
try {
const args = this._bugname ? [this._bugname] : [];
await this.ep.forkAudioStop(...args);
await this.ep.forkAudioStop();
this.logger.debug('TaskListen:kill successfully closed websocket');
} catch (err) {
this.logger.info(err, 'TaskListen:kill');
@@ -92,16 +85,13 @@ class TaskListen extends Task {
async updateListen(status) {
if (!this.killed && this.ep && this.ep.connected) {
const args = this._bugname ? [this._bugname] : [];
this.logger.info(`TaskListen:updateListen status ${status}`);
switch (status) {
case ListenStatus.Pause:
await this.ep.forkAudioPause(...args)
.catch((err) => this.logger.info(err, 'TaskListen: error pausing audio'));
await this.ep.forkAudioPause().catch((err) => this.logger.info(err, 'TaskListen: error pausing audio'));
break;
case ListenStatus.Resume:
await this.ep.forkAudioResume(...args)
.catch((err) => this.logger.info(err, 'TaskListen: error resuming audio'));
await this.ep.forkAudioResume().catch((err) => this.logger.info(err, 'TaskListen: error resuming audio'));
break;
}
}
@@ -114,13 +104,9 @@ class TaskListen extends Task {
async _startListening(cs, ep) {
this._initListeners(ep);
const ci = this.nested ? this.parentTask.sd.callInfo : cs.callInfo.toJSON();
if (this._ignoreCustomerData) {
delete ci.customerData;
}
const metadata = Object.assign(
{sampleRate: this.sampleRate, mixType: this.mixType},
ci,
this.nested ? this.parentTask.sd.callInfo : cs.callInfo.toJSON(),
this.metadata);
if (this.hook.auth) {
this.logger.debug({username: this.hook.auth.username, password: this.hook.auth.password},
@@ -134,7 +120,6 @@ class TaskListen extends Task {
wsUrl: this.hook.url,
mixType: this.mixType,
sampling: this.sampleRate,
...(this._bugname && {bugname: this._bugname}),
metadata
});
this.recordStartTime = moment();
@@ -155,7 +140,7 @@ class TaskListen extends Task {
}
/* support bi-directional audio */
if (!this.disableBidirectionalAudio) {
if (!this.disableBiDirectionalAudio) {
ep.addCustomEventListener(ListenEvents.PlayAudio, this._onPlayAudio.bind(this, ep));
}
ep.addCustomEventListener(ListenEvents.KillAudio, this._onKillAudio.bind(this, ep));
@@ -176,25 +161,12 @@ class TaskListen extends Task {
}
_onDtmf(ep, evt) {
const {dtmf, duration} = evt;
this.logger.debug({evt}, `TaskListen:_onDtmf received dtmf ${dtmf}`);
this.logger.debug({evt}, `TaskListen:_onDtmf received dtmf ${evt.dtmf}`);
if (this.passDtmf && this.ep?.connected) {
const obj = {event: 'dtmf', dtmf, duration};
const args = this._bugname ? [this._bugname, obj] : [obj];
this.ep.forkAudioSendText(...args)
const obj = {event: 'dtmf', dtmf: evt.dtmf, duration: evt.duration};
this.ep.forkAudioSendText(obj)
.catch((err) => this.logger.info({err}, 'TaskListen:_onDtmf error sending dtmf'));
}
/* add a child span for the dtmf event */
const msDuration = Math.floor((duration / 8000) * 1000);
const {span} = this.startChildSpan(`${DTMF_SPAN_NAME}:${dtmf}`);
span.setAttributes({
channel: 1,
dtmf,
duration: `${msDuration}ms`
});
span.end();
if (evt.dtmf === this.finishOnKey) {
this.logger.info(`TaskListen:_onDtmf terminating task due to dtmf ${evt.dtmf}`);
this.results.digits = evt.dtmf;
@@ -220,15 +192,7 @@ class TaskListen extends Task {
try {
const results = await ep.play(evt.file);
logger.debug(`Finished playing file, result: ${JSON.stringify(results)}`);
const obj = {
type: 'playDone',
data: {
id: evt.id,
...results
}
};
const args = this._bugname ? [this._bugname, obj] : [obj];
ep.forkAudioSendText(...args);
ep.forkAudioSendText({type: 'playDone', data: Object.assign({id: evt.id}, results)});
} catch (err) {
logger.error({err}, 'Error playing file');
}

View File

@@ -63,13 +63,12 @@ class TaskRestDial extends Task {
this.canCancel = false;
const cs = this.callSession;
cs.setDialog(dlg);
this.logger.debug('TaskRestDial:_onConnect - call connected');
try {
const b3 = this.getTracingPropagation();
const httpHeaders = b3 && {b3};
const params = {
...(cs.callInfo.toJSON()),
...cs.callInfo,
defaults: {
synthesizer: {
vendor: cs.speechSynthesisVendor,
@@ -91,10 +90,8 @@ class TaskRestDial extends Task {
}
let tasks;
if (this.app_json) {
this.logger.debug('TaskRestDial: using app_json from task data');
tasks = JSON.parse(this.app_json);
} else {
this.logger.debug({call_hook: this.call_hook}, 'TaskRestDial: retrieving application');
tasks = await cs.requestor.request('session:new', this.call_hook, params, httpHeaders);
}
if (tasks && Array.isArray(tasks)) {
@@ -128,10 +125,7 @@ class TaskRestDial extends Task {
_onCallTimeout() {
this.logger.debug('TaskRestDial: timeout expired without answer, killing task');
this.timer = null;
if (this.canCancel) {
this.canCancel = false;
this.cs?.req?.cancel();
}
this.kill(this.cs);
}
_onAmdEvent(cs, evt) {

View File

@@ -37,7 +37,6 @@ class TaskSay extends Task {
this.synthesizer = this.data.synthesizer || {};
this.disableTtsCache = this.data.disableTtsCache;
this.options = this.synthesizer.options || {};
this.isHandledByPrimaryProvider = true;
}
get name() { return TaskName.Say; }
@@ -50,24 +49,26 @@ class TaskSay extends Task {
return `${this.name}{${this.text[0]}}`;
}
_validateURL(urlString) {
try {
new URL(urlString);
return true;
} catch (e) {
return false;
}
}
async exec(cs, {ep}) {
await super.exec(cs);
async _synthesizeWithSpecificVendor(cs, ep, {vendor, language, voice, label}) {
const {srf} = cs;
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, srf);
const {writeAlerts, AlertType, stats} = srf.locals;
const {synthAudio} = srf.locals.dbHelpers;
const vendor = this.synthesizer.vendor && this.synthesizer.vendor !== 'default' ?
this.synthesizer.vendor :
cs.speechSynthesisVendor;
const language = this.synthesizer.language && this.synthesizer.language !== 'default' ?
this.synthesizer.language :
cs.speechSynthesisLanguage ;
let voice = this.synthesizer.voice && this.synthesizer.voice !== 'default' ?
this.synthesizer.voice :
cs.speechSynthesisVoice;
const engine = this.synthesizer.engine || 'standard';
const salt = cs.callSid;
let credentials = cs.getSpeechCredentials(vendor, 'tts');
let credentials = cs.getSpeechCredentials(vendor, 'tts', label);
/* parse Nuance voices into name and model */
let model;
if (vendor === 'nuance' && voice) {
@@ -89,6 +90,7 @@ class TaskSay extends Task {
}
this.logger.info({vendor, language, voice, model}, 'TaskSay:exec');
this.ep = ep;
try {
if (!credentials) {
writeAlerts({
@@ -158,83 +160,31 @@ class TaskSay extends Task {
detail: err.message
}).catch((err) => this.logger.info({err}, 'Error generating alert for tts failure'));
this.notifyError({msg: 'TTS error', details: err.message || err});
throw err;
return;
}
};
const arr = this.text.map((t) => (this._validateURL(t) ? t : generateAudio(t)));
return (await Promise.all(arr)).filter((fp) => fp && fp.length);
const arr = this.text.map((t) => generateAudio(t));
const filepath = (await Promise.all(arr)).filter((fp) => fp && fp.length);
this.notifyStatus({event: 'start-playback'});
while (!this.killed && (this.loop === 'forever' || this.loop--) && this.ep?.connected) {
let segment = 0;
while (!this.killed && segment < filepath.length) {
if (cs.isInConference) {
const {memberId, confName, confUuid} = cs;
await this.playToConfMember(this.ep, memberId, confName, confUuid, filepath[segment]);
}
else {
this.logger.debug(`Say:exec sending command to play file ${filepath[segment]}`);
await ep.play(filepath[segment]);
this.logger.debug(`Say:exec completed play file ${filepath[segment]}`);
}
segment++;
}
}
} catch (err) {
this.logger.info(err, 'TaskSay:exec error');
throw err;
}
}
async exec(cs, {ep}) {
await super.exec(cs);
this.ep = ep;
const vendor = this.synthesizer.vendor && this.synthesizer.vendor !== 'default' ?
this.synthesizer.vendor :
cs.speechSynthesisVendor;
const language = this.synthesizer.language && this.synthesizer.language !== 'default' ?
this.synthesizer.language :
cs.speechSynthesisLanguage ;
const voice = this.synthesizer.voice && this.synthesizer.voice !== 'default' ?
this.synthesizer.voice :
cs.speechSynthesisVoice;
const label = this.synthesizer.label && this.synthesizer.label !== 'default' ?
this.synthesizer.label :
cs.speechSynthesisLabel;
const fallbackVendor = this.synthesizer.fallbackVendor && this.synthesizer.fallbackVendor !== 'default' ?
this.synthesizer.fallbackVendor :
cs.fallbackSpeechSynthesisVendor;
const fallbackLanguage = this.synthesizer.fallbackLanguage && this.synthesizer.fallbackLanguage !== 'default' ?
this.synthesizer.fallbackLanguage :
cs.fallbackSpeechSynthesisLanguage ;
const fallbackVoice = this.synthesizer.fallbackVoice && this.synthesizer.fallbackVoice !== 'default' ?
this.synthesizer.fallbackVoice :
cs.fallbackSpeechSynthesisVoice;
const fallbackLabel = this.synthesizer.fallbackLabel && this.synthesizer.fallbackLabel !== 'default' ?
this.synthesizer.fallbackLabel :
cs.fallbackSpeechSynthesisLabel;
let filepath;
try {
filepath = await this._synthesizeWithSpecificVendor(cs, ep, {vendor, language, voice, label});
} catch (error) {
if (fallbackVendor && this.isHandledByPrimaryProvider) {
this.isHandledByPrimaryProvider = false;
this.logger.info(`Synthesize error, fallback to ${fallbackVendor}`);
filepath = await this._synthesizeWithSpecificVendor(cs, ep,
{
vendor: fallbackVendor,
language: fallbackLanguage,
voice: fallbackVoice,
label: fallbackLabel
});
} else {
throw error;
}
}
this.notifyStatus({event: 'start-playback'});
while (!this.killed && (this.loop === 'forever' || this.loop--) && this.ep?.connected) {
let segment = 0;
while (!this.killed && segment < filepath.length) {
if (cs.isInConference) {
const {memberId, confName, confUuid} = cs;
await this.playToConfMember(this.ep, memberId, confName, confUuid, filepath[segment]);
}
else {
this.logger.debug(`Say:exec sending command to play file ${filepath[segment]}`);
await ep.play(filepath[segment]);
this.logger.debug(`Say:exec completed play file ${filepath[segment]}`);
}
segment++;
}
}
this.emit('playDone');
}

View File

@@ -1,183 +0,0 @@
const Task = require('./task');
const assert = require('assert');
const crypto = require('crypto');
const { TaskPreconditions, CobaltTranscriptionEvents } = require('../utils/constants');
class SttTask extends Task {
constructor(logger, data, parentTask) {
super(logger, data);
this.parentTask = parentTask;
this.preconditions = TaskPreconditions.Endpoint;
const {
setChannelVarsForStt,
normalizeTranscription,
removeSpeechListeners,
setSpeechCredentialsAtRuntime,
compileSonioxTranscripts,
consolidateTranscripts
} = require('../utils/transcription-utils')(logger);
this.setChannelVarsForStt = setChannelVarsForStt;
this.normalizeTranscription = normalizeTranscription;
this.removeSpeechListeners = removeSpeechListeners;
this.compileSonioxTranscripts = compileSonioxTranscripts;
this.consolidateTranscripts = consolidateTranscripts;
this.isHandledByPrimaryProvider = true;
if (this.data.recognizer) {
const recognizer = this.data.recognizer;
this.vendor = recognizer.vendor;
this.language = recognizer.language;
this.label = recognizer.label;
//fallback
this.fallbackVendor = recognizer.fallbackVendor || 'default';
this.fallbackLanguage = recognizer.fallbackLanguage || 'default';
this.fallbackLabel = recognizer.fallbackLabel || 'default';
/* let credentials be supplied in the recognizer object at runtime */
this.sttCredentials = setSpeechCredentialsAtRuntime(recognizer);
if (!Array.isArray(this.data.recognizer.altLanguages)) {
this.data.recognizer.altLanguages = [];
}
} else {
this.data.recognizer = {hints: [], altLanguages: []};
}
/* buffer for soniox transcripts */
this._sonioxTranscripts = [];
}
async _initSpeechCredentials(cs, vendor, label) {
const {getNuanceAccessToken, getIbmAccessToken} = this.cs.srf.locals.dbHelpers;
let credentials = cs.getSpeechCredentials(vendor, 'stt', label);
if (!credentials) {
const {writeAlerts, AlertType} = cs.srf.locals;
this.logger.info(`ERROR stt using ${vendor} requested but creds not supplied`);
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_NOT_PROVISIONED,
vendor
}).catch((err) => this.logger.info({err}, 'Error generating alert for no stt'));
// Notify application that STT vender is wrong.
this.notifyError({
msg: 'ASR error',
details: `No speech-to-text service credentials for ${vendor} have been configured`
});
this.notifyTaskDone();
throw new Error(`No speech-to-text service credentials for ${vendor} have been configured`);
}
if (vendor === 'nuance' && credentials.client_id) {
/* get nuance access token */
const {client_id, secret} = credentials;
const {access_token, servedFromCache} = await getNuanceAccessToken(client_id, secret, 'asr tts');
this.logger.debug({client_id}, `got nuance access token ${servedFromCache ? 'from cache' : ''}`);
credentials = {...credentials, access_token};
}
else if (vendor == 'ibm' && credentials.stt_api_key) {
/* get ibm access token */
const {stt_api_key, stt_region} = credentials;
const {access_token, servedFromCache} = await getIbmAccessToken(stt_api_key);
this.logger.debug({stt_api_key}, `got ibm access token ${servedFromCache ? 'from cache' : ''}`);
credentials = {...credentials, access_token, stt_region};
}
return credentials;
}
async _fallback() {
assert(this.fallbackVendor, 'fallback failed without fallbackVendor configuration');
this.isHandledByPrimaryProvider = false;
this.logger.info(`Failed to use primary STT provider, fallback to ${this.fallbackVendor}`);
this.vendor = this.fallbackVendor;
this.language = this.fallbackLanguage;
this.label = this.fallbackLabel;
this.data.recognizer.vendor = this.vendor;
this.data.recognizer.language = this.language;
this.data.recognizer.label = this.label;
this.sttCredentials = await this._initSpeechCredentials(this.cs, this.vendor, this.label);
}
async compileHintsForCobalt(ep, hostport, model, token, hints) {
const {retrieveKey} = this.cs.srf.locals.dbHelpers;
const hash = crypto.createHash('sha1');
hash.update(`${model}:${hints}`);
const key = `cobalt:${hash.digest('hex')}`;
this.context = await retrieveKey(key);
if (this.context) {
this.logger.debug({model, hints}, 'found cached cobalt context for supplied hints');
return this.context;
}
this.logger.debug({model, hints}, 'compiling cobalt context for supplied hints');
return new Promise((resolve, reject) => {
this.cobaltCompileResolver = resolve;
ep.addCustomEventListener(CobaltTranscriptionEvents.CompileContext, this._onCompileContext.bind(this, ep, key));
ep.api('uuid_cobalt_compile_context', [ep.uuid, hostport, model, token, hints], (err, evt) => {
if (err || 0 !== evt.getBody().indexOf('+OK')) {
ep.removeCustomEventListener(CobaltTranscriptionEvents.CompileContext);
return reject(err);
}
});
});
}
_onCompileContext(ep, key, evt) {
const {addKey} = this.cs.srf.locals.dbHelpers;
this.logger.debug({evt}, `received cobalt compile context event, will cache under ${key}`);
this.cobaltCompileResolver(evt.compiled_context);
ep.removeCustomEventListener(CobaltTranscriptionEvents.CompileContext);
this.cobaltCompileResolver = null;
//cache the compiled context
addKey(key, evt.compiled_context, 3600 * 12)
.catch((err) => this.logger.info({err}, `Error caching cobalt context for ${key}`));
}
_doContinuousAsrWithDeepgram(asrTimeout) {
/* deepgram has an utterance_end_ms property that simplifies things */
assert(this.vendor === 'deepgram');
this.logger.debug(`_doContinuousAsrWithDeepgram - setting utterance_end_ms to ${asrTimeout}`);
const dgOptions = this.data.recognizer.deepgramOptions = this.data.recognizer.deepgramOptions || {};
dgOptions.utteranceEndMs = dgOptions.utteranceEndMs || asrTimeout;
}
_onVendorConnect(_cs, _ep) {
this.logger.debug(`TaskGather:_on${this.vendor}Connect`);
}
_onVendorError(cs, _ep, evt) {
this.logger.info({evt}, `${this.name}:_on${this.vendor}Error`);
const {writeAlerts, AlertType} = cs.srf.locals;
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
message: 'STT failure reported by vendor',
detail: evt.error,
vendor: this.vendor,
}).catch((err) => this.logger.info({err}, `Error generating alert for ${this.vendor} connection failure`));
this.notifyError({msg: 'ASR error', details:`Failed connecting to speech vendor ${this.vendor}: ${evt.error}`});
}
_onVendorConnectFailure(cs, _ep, evt) {
const {reason} = evt;
const {writeAlerts, AlertType} = cs.srf.locals;
this.logger.info({evt}, `${this.name}:_on${this.vendor}ConnectFailure`);
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
message: `Failed connecting to ${this.vendor} speech recognizer: ${reason}`,
vendor: this.vendor,
}).catch((err) => this.logger.info({err}, `Error generating alert for ${this.vendor} connection failure`));
this.notifyError({msg: 'ASR error', details:`Failed connecting to speech vendor ${this.vendor}: ${reason}`});
}
}
module.exports = SttTask;

View File

@@ -1,45 +1,61 @@
const assert = require('assert');
const Task = require('./task');
const {
TaskName,
TaskPreconditions,
GoogleTranscriptionEvents,
NuanceTranscriptionEvents,
AwsTranscriptionEvents,
AzureTranscriptionEvents,
DeepgramTranscriptionEvents,
SonioxTranscriptionEvents,
CobaltTranscriptionEvents,
IbmTranscriptionEvents,
NvidiaTranscriptionEvents,
JambonzTranscriptionEvents,
TranscribeStatus,
AssemblyAiTranscriptionEvents
} = require('../utils/constants.json');
JambonzTranscriptionEvents
} = require('../utils/constants');
const { normalizeJambones } = require('@jambonz/verb-specifications');
const SttTask = require('./stt-task');
const STT_LISTEN_SPAN_NAME = 'stt-listen';
class TaskTranscribe extends SttTask {
class TaskTranscribe extends Task {
constructor(logger, opts, parentTask) {
super(logger, opts, parentTask);
super(logger, opts);
this.preconditions = TaskPreconditions.Endpoint;
this.parentTask = parentTask;
const {
setChannelVarsForStt,
normalizeTranscription,
removeSpeechListeners,
setSpeechCredentialsAtRuntime,
compileSonioxTranscripts
} = require('../utils/transcription-utils')(logger);
this.setChannelVarsForStt = setChannelVarsForStt;
this.normalizeTranscription = normalizeTranscription;
this.removeSpeechListeners = removeSpeechListeners;
this.compileSonioxTranscripts = compileSonioxTranscripts;
this.transcriptionHook = this.data.transcriptionHook;
this.earlyMedia = this.data.earlyMedia === true || (parentTask && parentTask.earlyMedia);
if (this.data.recognizer) {
this.interim = !!this.data.recognizer.interim;
this.separateRecognitionPerChannel = this.data.recognizer.separateRecognitionPerChannel;
const recognizer = this.data.recognizer;
this.vendor = recognizer.vendor;
this.language = recognizer.language;
/* let credentials be supplied in the recognizer object at runtime */
this.sttCredentials = setSpeechCredentialsAtRuntime(recognizer);
this.interim = !!recognizer.interim;
this.separateRecognitionPerChannel = recognizer.separateRecognitionPerChannel;
this.data.recognizer.hints = this.data.recognizer.hints || [];
this.data.recognizer.altLanguages = this.data.recognizer.altLanguages || [];
}
else this.data.recognizer = {hints: [], altLanguages: []};
/* buffer for soniox transcripts */
this._sonioxTranscripts = [];
this.childSpan = [null, null];
// Continuos asr timeout
this.asrTimeout = typeof this.data.recognizer.asrTimeout === 'number' ? this.data.recognizer.asrTimeout * 1000 : 0;
if (this.asrTimeout > 0) {
this.isContinuousAsr = true;
}
/* buffer speech for continuous asr */
this._bufferedTranscripts = [];
}
get name() { return TaskName.Transcribe; }
@@ -47,6 +63,7 @@ class TaskTranscribe extends SttTask {
async exec(cs, {ep, ep2}) {
super.exec(cs);
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, cs.srf);
const {getNuanceAccessToken, getIbmAccessToken} = cs.srf.locals.dbHelpers;
if (cs.hasGlobalSttHints) {
const {hints, hintsBoost} = cs.globalSttHints;
@@ -74,50 +91,38 @@ class TaskTranscribe extends SttTask {
this.language = cs.speechRecognizerLanguage;
if (this.data.recognizer) this.data.recognizer.language = this.language;
}
if ('default' === this.label || !this.label) {
this.label = cs.speechRecognizerLabel;
if (this.data.recognizer) this.data.recognizer.label = this.label;
}
// Fallback options
if ('default' === this.fallbackVendor || !this.fallbackVendor) {
this.fallbackVendor = cs.fallbackSpeechRecognizerVendor;
if (this.data.recognizer) this.data.recognizer.fallbackVendor = this.fallbackVendor;
}
if ('default' === this.fallbackLanguage || !this.fallbackLanguage) {
this.fallbackLanguage = cs.fallbackSpeechRecognizerLanguage;
if (this.data.recognizer) this.data.recognizer.fallbackLanguage = this.fallbackLanguage;
}
if ('default' === this.fallbackLabel || !this.fallbackLabel) {
this.fallbackLabel = cs.fallbackSpeechRecognizerLabel;
if (this.data.recognizer) this.data.recognizer.fallbackLabel = this.fallbackLabel;
}
if (!this.data.recognizer.vendor) {
this.data.recognizer.vendor = this.vendor;
}
if (this.vendor === 'cobalt' && !this.data.recognizer.model) {
// By default, application saves cobalt model in language
this.data.recognizer.model = cs.speechRecognizerLanguage;
}
if (!this.sttCredentials) {
try {
this.sttCredentials = await this._initSpeechCredentials(cs, this.vendor, this.label);
} catch (error) {
if (this.fallbackVendor && this.isHandledByPrimaryProvider) {
await this._fallback();
} else {
throw error;
}
}
}
/* when using cobalt model is required */
if (this.vendor === 'cobalt' && !this.data.recognizer.model) {
this.notifyError({ msg: 'ASR error', details:'Cobalt requires a model to be specified'});
throw new Error('Cobalt requires a model to be specified');
}
if (!this.sttCredentials) this.sttCredentials = cs.getSpeechCredentials(this.vendor, 'stt');
try {
if (!this.sttCredentials) {
const {writeAlerts, AlertType} = cs.srf.locals;
this.logger.info(`TaskTranscribe:exec - ERROR stt using ${this.vendor} requested but creds not supplied`);
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_NOT_PROVISIONED,
vendor: this.vendor
}).catch((err) => this.logger.info({err}, 'Error generating alert for no stt'));
throw new Error('no provisioned speech credentials for TTS');
}
if (this.vendor === 'nuance' && this.sttCredentials.client_id) {
/* get nuance access token */
const {client_id, secret} = this.sttCredentials;
const {access_token, servedFromCache} = await getNuanceAccessToken(client_id, secret, 'asr tts');
this.logger.debug({client_id},
`Transcribe:exec - got nuance access token ${servedFromCache ? 'from cache' : ''}`);
this.sttCredentials = {...this.sttCredentials, access_token};
}
else if (this.vendor == 'ibm' && this.sttCredentials.stt_api_key) {
/* get ibm access token */
const {stt_api_key, stt_region} = this.sttCredentials;
const {access_token, servedFromCache} = await getIbmAccessToken(stt_api_key);
this.logger.debug({stt_api_key}, `Gather:exec - got ibm access token ${servedFromCache ? 'from cache' : ''}`);
this.sttCredentials = {...this.sttCredentials, access_token, stt_region};
}
await this._startTranscribing(cs, ep, 1);
if (this.separateRecognitionPerChannel && ep2) {
await this._startTranscribing(cs, ep2, 2);
@@ -134,7 +139,8 @@ class TaskTranscribe extends SttTask {
this.removeSpeechListeners(ep);
}
async _stopTranscription() {
async kill(cs) {
super.kill(cs);
let stopTranscription = false;
if (this.ep?.connected) {
stopTranscription = true;
@@ -146,13 +152,6 @@ class TaskTranscribe extends SttTask {
this.ep2.stopTranscription({vendor: this.vendor})
.catch((err) => this.logger.info(err, 'Error TaskTranscribe:kill'));
}
return stopTranscription;
}
async kill(cs) {
super.kill(cs);
const stopTranscription = this._stopTranscription();
// hangup after 1 sec if we don't get a final transcription
if (stopTranscription) this._timer = setTimeout(() => this.notifyTaskDone(), 1500);
else this.notifyTaskDone();
@@ -160,32 +159,7 @@ class TaskTranscribe extends SttTask {
await this.awaitTaskDone();
}
async updateTranscribe(status) {
if (!this.killed && this.ep && this.ep.connected) {
this.logger.info(`TaskTranscribe:updateTranscribe status ${status}`);
switch (status) {
case TranscribeStatus.Pause:
await this._stopTranscription();
break;
case TranscribeStatus.Resume:
await this._startTranscribing(this.cs, this.ep, 1);
if (this.separateRecognitionPerChannel && this.ep2) {
await this._startTranscribing(this.cs, this.ep2, 2);
}
break;
}
}
}
async _setSpeechHandlers(cs, ep, channel) {
if (this[`_speechHandlersSet_${channel}`]) return;
this[`_speechHandlersSet_${channel}`] = true;
/* some special deepgram logic */
if (this.vendor === 'deepgram') {
if (this.isContinuousAsr) this._doContinuousAsrWithDeepgram(this.asrTimeout);
}
async _startTranscribing(cs, ep, channel) {
const opts = this.setChannelVarsForStt(this, this.sttCredentials, this.data.recognizer);
switch (this.vendor) {
case 'google':
@@ -229,57 +203,23 @@ class TaskTranscribe extends SttTask {
ep.addCustomEventListener(DeepgramTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
ep.addCustomEventListener(DeepgramTranscriptionEvents.Connect,
this._onVendorConnect.bind(this, cs, ep));
this._onDeepgramConnect.bind(this, cs, ep, channel));
ep.addCustomEventListener(DeepgramTranscriptionEvents.ConnectFailure,
this._onVendorConnectFailure.bind(this, cs, ep, channel));
/* if app sets deepgramOptions.utteranceEndMs they essentially want continuous asr */
if (opts.DEEPGRAM_SPEECH_UTTERANCE_END_MS) this.isContinuousAsr = true;
this._onDeepGramConnectFailure.bind(this, cs, ep, channel));
break;
case 'soniox':
this.bugname = 'soniox_transcribe';
ep.addCustomEventListener(SonioxTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
break;
case 'cobalt':
this.bugname = 'cobalt_transcribe';
ep.addCustomEventListener(CobaltTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
/* cobalt doesnt have language, it has model, which is required */
if (!this.data.recognizer.model) {
throw new Error('Cobalt requires a model to be specified');
}
this.language = this.data.recognizer.model;
/* special case: if using hints with cobalt we need to compile them */
this.hostport = opts.COBALT_SERVER_URI;
if (this.vendor === 'cobalt' && opts.COBALT_SPEECH_HINTS) {
try {
const context = await this.compileHintsForCobalt(
ep,
opts.COBALT_SERVER_URI,
this.data.recognizer.model,
opts.COBALT_CONTEXT_TOKEN,
opts.COBALT_SPEECH_HINTS
);
if (context) opts.COBALT_COMPILED_CONTEXT_DATA = context;
delete opts.COBALT_SPEECH_HINTS;
} catch (err) {
this.logger.error({err}, 'Error compiling hints for cobalt');
}
}
break;
case 'ibm':
this.bugname = 'ibm_transcribe';
ep.addCustomEventListener(IbmTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
ep.addCustomEventListener(IbmTranscriptionEvents.Connect,
this._onVendorConnect.bind(this, cs, ep));
this._onIbmConnect.bind(this, cs, ep, channel));
ep.addCustomEventListener(IbmTranscriptionEvents.ConnectFailure,
this._onVendorConnectFailure.bind(this, cs, ep, channel));
this._onIbmConnectFailure.bind(this, cs, ep, channel));
break;
case 'nvidia':
@@ -293,42 +233,15 @@ class TaskTranscribe extends SttTask {
ep.addCustomEventListener(NvidiaTranscriptionEvents.VadDetected,
this._onVadDetected.bind(this, cs, ep));
break;
case 'assemblyai':
this.bugname = 'assemblyai_transcribe';
ep.addCustomEventListener(AssemblyAiTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
ep.addCustomEventListener(AssemblyAiTranscriptionEvents.Connect, this._onVendorConnect.bind(this, cs, ep));
ep.addCustomEventListener(AssemblyAiTranscriptionEvents.Error, this._onVendorError.bind(this, cs, ep));
ep.addCustomEventListener(AssemblyAiTranscriptionEvents.ConnectFailure,
this._onVendorConnectFailure.bind(this, cs, ep, channel));
break;
default:
if (this.vendor.startsWith('custom:')) {
this.bugname = `${this.vendor}_transcribe`;
ep.addCustomEventListener(JambonzTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
ep.addCustomEventListener(JambonzTranscriptionEvents.Connect, this._onJambonzConnect.bind(this, cs, ep));
ep.addCustomEventListener(JambonzTranscriptionEvents.ConnectFailure,
this._onJambonzConnectFailure.bind(this, cs, ep));
break;
}
else {
this.notifyError({ msg: 'ASR error', details:`Invalid vendor ${this.vendor}`});
this.notifyTaskDone();
throw new Error(`Invalid vendor ${this.vendor}`);
}
throw new Error(`Invalid vendor ${this.vendor}`);
}
/* common handler for all stt engine errors */
ep.addCustomEventListener(JambonzTranscriptionEvents.Error, this._onJambonzError.bind(this, cs, ep));
await ep.set(opts)
.catch((err) => this.logger.info(err, 'Error setting channel variables'));
}
async _startTranscribing(cs, ep, channel) {
await this._setSpeechHandlers(cs, ep, channel);
await this._transcribe(ep);
/* start child span for this channel */
@@ -342,8 +255,7 @@ class TaskTranscribe extends SttTask {
interim: this.interim ? true : false,
locale: this.language,
channels: /*this.separateRecognitionPerChannel ? 2 : */ 1,
bugname: this.bugname,
hostport: this.hostport
bugname: this.bugname
});
}
@@ -352,20 +264,8 @@ class TaskTranscribe extends SttTask {
const bugname = fsEvent.getHeader('media-bugname');
if (bugname && this.bugname !== bugname) return;
if (this.vendor === 'ibm' && evt?.state === 'listening') return;
if (this.vendor === 'deepgram' && evt.type === 'UtteranceEnd') {
/* we will only get this when we have set utterance_end_ms */
if (this._bufferedTranscripts.length === 0) {
this.logger.debug('Gather:_onTranscription - got UtteranceEnd event from deepgram but no buffered transcripts');
}
else {
this.logger.debug('Gather:_onTranscription - got UtteranceEnd event from deepgram, return buffered transcript');
evt = this.consolidateTranscripts(this._bufferedTranscripts, 1, this.language);
this._bufferedTranscripts = [];
this._resolve('speech', evt);
}
return;
if (this.vendor === 'ibm') {
if (evt?.state === 'listening') return;
}
this.logger.debug({evt}, 'TaskTranscribe:_onTranscription - before normalization');
@@ -396,22 +296,9 @@ class TaskTranscribe extends SttTask {
}
}
if (this.isContinuousAsr && evt.is_final) {
this._bufferedTranscripts.push(evt);
this._startAsrTimer(channel);
} else {
await this._resolve(channel, evt);
}
}
async _resolve(channel, evt) {
/* we've got a transcript, so end the otel child span for this channel */
if (this.childSpan[channel - 1] && this.childSpan[channel - 1].span) {
this.childSpan[channel - 1].span.setAttributes({
channel,
'stt.resolve': 'transcript',
'stt.result': JSON.stringify(evt)
});
this.childSpan[channel - 1].span.setAttributes({'stt.resolve': 'transcript', 'stt.result': JSON.stringify(evt)});
this.childSpan[channel - 1].span.end();
}
@@ -455,10 +342,7 @@ class TaskTranscribe extends SttTask {
_onNoAudio(cs, ep, channel) {
this.logger.debug(`TaskTranscribe:_onNoAudio restarting transcription on channel ${channel}`);
if (this.childSpan[channel - 1] && this.childSpan[channel - 1].span) {
this.childSpan[channel - 1].span.setAttributes({
channel,
'stt.resolve': 'timeout'
});
this.childSpan[channel - 1].span.setAttributes({'stt.resolve': 'timeout'});
this.childSpan[channel - 1].span.end();
}
this._transcribe(ep);
@@ -471,10 +355,7 @@ class TaskTranscribe extends SttTask {
_onMaxDurationExceeded(cs, ep, channel) {
this.logger.debug(`TaskTranscribe:_onMaxDurationExceeded restarting transcription on channel ${channel}`);
if (this.childSpan[channel - 1] && this.childSpan[channel - 1].span) {
this.childSpan[channel - 1].span.setAttributes({
channel,
'stt.resolve': 'max duration exceeded'
});
this.childSpan[channel - 1].span.setAttributes({'stt.resolve': 'max duration exceeded'});
this.childSpan[channel - 1].span.end();
}
@@ -491,73 +372,74 @@ class TaskTranscribe extends SttTask {
this._timer = null;
}
}
async _onJambonzError(cs, _ep, evt) {
this.logger.info({evt}, 'TaskTranscribe:_onJambonzError');
if (this.isHandledByPrimaryProvider && this.fallbackVendor) {
_ep.stopTranscription({vendor: this.vendor})
.catch((err) => this.logger.error({err}, `Error stopping transcription for primary vendor ${this.vendor}`));
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, cs.srf);
try {
await this._fallback();
let channel = 1;
if (this.ep !== _ep) {
channel = 2;
}
this._startTranscribing(cs, _ep, channel);
updateSpeechCredentialLastUsed(this.sttCredentials.speech_credential_sid);
return;
} catch (error) {
this.logger.info({error}, `There is error while falling back to ${this.fallbackVendor}`);
}
} else {
const {writeAlerts, AlertType} = cs.srf.locals;
if (this.vendor === 'nuance') {
const {code, error} = evt;
if (code === 404 && error === 'No speech') return this._resolve('timeout');
if (code === 413 && error === 'Too much speech') return this._resolve('timeout');
}
this.logger.info({evt}, 'TaskTranscribe:_onJambonzError');
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
message: `Custom speech vendor ${this.vendor} error: ${evt.error}`,
vendor: this.vendor,
}).catch((err) => this.logger.info({err}, 'Error generating alert for jambonz custom connection failure'));
this.notifyError({msg: 'ASR error', details:`Custom speech vendor ${this.vendor} error: ${evt.error}`});
}
_onDeepgramConnect(_cs, _ep) {
this.logger.debug('TaskTranscribe:_onDeepgramConnect');
}
_onVendorConnectFailure(cs, _ep, channel, evt) {
super._onVendorConnectFailure(cs, _ep, evt);
_onDeepGramConnectFailure(cs, _ep, channel, evt) {
const {reason} = evt;
const {writeAlerts, AlertType} = cs.srf.locals;
this.logger.info({evt}, 'TaskTranscribe:_onDeepgramConnectFailure');
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
message: `Failed connecting to Deepgram speech recognizer: ${reason}`,
vendor: 'deepgram',
}).catch((err) => this.logger.info({err}, 'Error generating alert for deepgram connection failure'));
this.notifyError(`Failed connecting to speech vendor deepgram: ${reason}`);
if (this.childSpan[channel - 1] && this.childSpan[channel - 1].span) {
this.childSpan[channel - 1].span.setAttributes({
channel,
'stt.resolve': 'connection failure'
});
this.childSpan[channel - 1].span.setAttributes({'stt.resolve': 'connection failure'});
this.childSpan[channel - 1].span.end();
}
this.notifyTaskDone();
}
_startAsrTimer(channel) {
if (this.vendor === 'deepgram') return; // no need
assert(this.isContinuousAsr);
this._clearAsrTimer(channel);
this._asrTimer = setTimeout(() => {
this.logger.debug(`TaskTranscribe:_startAsrTimer - asr timer went off for channel: ${channel}`);
const evt = this.consolidateTranscripts(this._bufferedTranscripts, channel, this.language);
this._bufferedTranscripts = [];
this._resolve(channel, evt);
}, this.asrTimeout);
this.logger.debug(`TaskTranscribe:_startAsrTimer: set for ${this.asrTimeout}ms for channel ${channel}`);
_onIbmConnect(_cs, _ep) {
this.logger.debug('TaskTranscribe:_onIbmConnect');
}
_clearAsrTimer(channel) {
if (this._asrTimer) clearTimeout(this._asrTimer);
this._asrTimer = null;
_onIbmConnectFailure(cs, _ep, channel, evt) {
const {reason} = evt;
const {writeAlerts, AlertType} = cs.srf.locals;
this.logger.info({evt}, 'TaskTranscribe:_onIbmConnectFailure');
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
message: `Failed connecting to IBM watson speech recognizer: ${reason}`,
vendor: 'ibm',
}).catch((err) => this.logger.info({err}, 'Error generating alert for IBM connection failure'));
this.notifyError(`Failed connecting to speech vendor IBM: ${reason}`);
if (this.childSpan[channel - 1] && this.childSpan[channel - 1].span) {
this.childSpan[channel - 1].span.setAttributes({'stt.resolve': 'connection failure'});
this.childSpan[channel - 1].span.end();
}
this.notifyTaskDone();
}
_onIbmError(cs, _ep, _channel, evt) {
this.logger.info({evt}, 'TaskTranscribe:_onIbmError');
}
_onJambonzError(cs, _ep, evt) {
this.logger.info({evt}, 'TaskTranscribe:_onJambonzError');
const {writeAlerts, AlertType} = cs.srf.locals;
if (this.vendor === 'nuance') {
const {code, error} = evt;
if (code === 404 && error === 'No speech') return this._resolve('timeout');
if (code === 413 && error === 'Too much speech') return this._resolve('timeout');
}
this.logger.info({evt}, 'TaskTranscribe:_onJambonzError');
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
message: `Custom speech vendor ${this.vendor} error: ${evt.error}`,
vendor: this.vendor,
}).catch((err) => this.logger.info({err}, 'Error generating alert for jambonz custom connection failure'));
this.notifyError({msg: 'ASR error', details:`Custom speech vendor ${this.vendor} error: ${evt.error}`});
}
}
module.exports = TaskTranscribe;

View File

@@ -9,7 +9,6 @@ const {
NvidiaTranscriptionEvents,
IbmTranscriptionEvents,
SonioxTranscriptionEvents,
CobaltTranscriptionEvents,
DeepgramTranscriptionEvents,
JambonzTranscriptionEvents,
AmdEvents,
@@ -55,8 +54,7 @@ class Amd extends Emitter {
this.language = opts.recognizer?.language || cs.speechRecognizerLanguage;
if ('default' === this.language) this.language = cs.speechRecognizerLanguage;
this.sttCredentials = cs.getSpeechCredentials(this.vendor, 'stt',
opts.recognizer?.label || cs.speechRecognizerLabel);
this.sttCredentials = cs.getSpeechCredentials(this.vendor, 'stt');
if (!this.sttCredentials) throw new Error(`No speech credentials found for vendor ${this.vendor}`);
@@ -315,10 +313,6 @@ module.exports = (logger) => {
ep.addCustomEventListener(NvidiaTranscriptionEvents.Transcription, amd.transcriptionHandler);
break;
case 'cobalt':
ep.addCustomEventListener(CobaltTranscriptionEvents.Transcription, amd.transcriptionHandler);
break;
default:
if (vendor.startsWith('custom:')) {
ep.addCustomEventListener(JambonzTranscriptionEvents.Transcription, amd.transcriptionHandler);

View File

@@ -11,20 +11,15 @@ const {LifeCycleEvents} = require('./constants');
const express = require('express');
const app = express();
const getString = bent('string');
const {
SNSClient,
SubscribeCommand,
UnsubscribeCommand } = require('@aws-sdk/client-sns');
const snsClient = new SNSClient({ region: AWS_REGION, apiVersion: '2010-03-31' });
const {
AutoScalingClient,
DescribeAutoScalingGroupsCommand,
CompleteLifecycleActionCommand } = require('@aws-sdk/client-auto-scaling');
const autoScalingClient = new AutoScalingClient({ region: AWS_REGION, apiVersion: '2011-01-01' });
const AWS = require('aws-sdk');
const sns = new AWS.SNS({apiVersion: '2010-03-31'});
const autoscaling = new AWS.AutoScaling({apiVersion: '2011-01-01'});
const {Parser} = require('xml2js');
const parser = new Parser();
const {validatePayload} = require('verify-aws-sns-signature');
AWS.config.update({region: AWS_REGION});
class SnsNotifier extends Emitter {
constructor(logger) {
super();
@@ -74,7 +69,7 @@ class SnsNotifier extends Emitter {
subscriptionRequestId: this.subscriptionRequestId
}, 'response from SNS SubscribeURL');
const data = await this.describeInstance();
this.lifecycleState = data.AutoScalingGroups[0].Instances[0].LifecycleState;
this.lifecycleState = data.AutoScalingInstances[0].LifecycleState;
this.emit('SubscriptionConfirmation', {publicIp: this.publicIp});
break;
@@ -140,12 +135,11 @@ class SnsNotifier extends Emitter {
async subscribe() {
try {
const params = {
const response = await sns.subscribe({
Protocol: 'http',
TopicArn: AWS_SNS_TOPIC_ARM,
Endpoint: this.snsEndpoint
};
const response = await snsClient.send(new SubscribeCommand(params));
}).promise();
this.logger.info({response}, `response to SNS subscribe to ${AWS_SNS_TOPIC_ARM}`);
} catch (err) {
this.logger.error({err}, `Error subscribing to SNS topic arn ${AWS_SNS_TOPIC_ARM}`);
@@ -155,10 +149,9 @@ class SnsNotifier extends Emitter {
async unsubscribe() {
if (!this.subscriptionArn) throw new Error('SnsNotifier#unsubscribe called without an active subscription');
try {
const params = {
const response = await sns.unsubscribe({
SubscriptionArn: this.subscriptionArn
};
const response = await snsClient.send(new UnsubscribeCommand(params));
}).promise();
this.logger.info({response}, `response to SNS unsubscribe to ${AWS_SNS_TOPIC_ARM}`);
} catch (err) {
this.logger.error({err}, `Error unsubscribing to SNS topic arn ${AWS_SNS_TOPIC_ARM}`);
@@ -167,29 +160,26 @@ class SnsNotifier extends Emitter {
completeScaleIn() {
assert(this.scaleInParams);
autoScalingClient.send(new CompleteLifecycleActionCommand(this.scaleInParams))
.then((data) => {
return this.logger.info({data}, 'Successfully completed scale-in action');
})
.catch((err) => {
this.logger.error({err}, 'Error completing scale-in');
});
autoscaling.completeLifecycleAction(this.scaleInParams, (err, response) => {
if (err) return this.logger.error({err}, 'Error completing scale-in');
this.logger.info({response}, 'Successfully completed scale-in action');
});
}
describeInstance() {
return new Promise((resolve, reject) => {
if (!this.instanceId) return reject('instance-id unknown');
autoScalingClient.send(new DescribeAutoScalingGroupsCommand({
autoscaling.describeAutoScalingInstances({
InstanceIds: [this.instanceId]
}))
.then((data) => {
this.logger.info({data}, 'SnsNotifier: describeInstance');
return resolve(data);
})
.catch((err) => {
}, (err, data) => {
if (err) {
this.logger.error({err}, 'Error describing instances');
reject(err);
});
} else {
this.logger.info({data}, 'SnsNotifier: describeInstance');
resolve(data);
}
});
});
}
@@ -203,7 +193,7 @@ module.exports = async function(logger) {
process.on('SIGHUP', async() => {
try {
const data = await notifier.describeInstance();
const state = data.AutoScalingGroups[0].Instances[0].LifecycleState;
const state = data.AutoScalingInstances[0].LifecycleState;
if (state !== notifier.lifecycleState) {
notifier.lifecycleState = state;
switch (state) {

View File

@@ -2,24 +2,17 @@ const {context, trace} = require('@opentelemetry/api');
const {Dialog} = require('drachtio-srf');
class RootSpan {
constructor(callType, req) {
const {srf} = require('../../');
const tracer = srf.locals.otel.tracer;
let callSid, accountSid, applicationSid, linkedSpanId;
let tracer, callSid, linkedSpanId;
if (req instanceof Dialog) {
const dlg = req;
tracer = dlg.srf.locals.otel.tracer;
callSid = dlg.callSid;
linkedSpanId = dlg.linkedSpanId;
}
else if (req.srf) {
callSid = req.locals.callSid;
accountSid = req.get('X-Account-Sid'),
applicationSid = req.locals.application_sid;
}
else {
callSid = req.callSid;
accountSid = req.accountSid;
applicationSid = req.applicationSid;
tracer = req.srf.locals.otel.tracer;
callSid = req.locals.callSid;
}
this._span = tracer.startSpan(callType || 'incoming-call');
if (req instanceof Dialog) {
@@ -29,20 +22,13 @@ class RootSpan {
callId: dlg.sip.callId
});
}
else if (req.srf) {
this._span.setAttributes({
callSid,
accountSid,
applicationSid,
callId: req.get('Call-ID'),
externalCallId: req.get('X-CID')
});
}
else {
this._span.setAttributes({
callSid,
accountSid,
applicationSid
accountSid: req.get('X-Account-Sid'),
applicationSid: req.locals.application_sid,
callId: req.get('Call-ID'),
externalCallId: req.get('X-CID')
});
}

View File

@@ -29,7 +29,7 @@
"Tag": "tag",
"Transcribe": "transcribe"
},
"AllowedSipRecVerbs": ["config", "gather", "transcribe", "listen", "tag"],
"AllowedSipRecVerbs": ["config", "gather", "transcribe", "listen"],
"CallStatus": {
"Trying": "trying",
"Ringing": "ringing",
@@ -51,11 +51,6 @@
"Silence": "silence",
"Resume": "resume"
},
"TranscribeStatus": {
"Pause": "pause",
"Silence": "silence",
"Resume": "resume"
},
"TaskPreconditions": {
"None": "none",
"Endpoint": "endpoint",
@@ -95,11 +90,6 @@
"Transcription": "soniox_transcribe::transcription",
"Error": "soniox_transcribe::error"
},
"CobaltTranscriptionEvents": {
"Transcription": "cobalt_speech::transcription",
"CompileContext": "cobalt_speech::compile_context_response",
"Error": "cobalt_speech::error"
},
"IbmTranscriptionEvents": {
"Transcription": "ibm_transcribe::transcription",
"ConnectFailure": "ibm_transcribe::connect_failed",
@@ -126,12 +116,6 @@
"Connect": "jambonz_transcribe::connect",
"Error": "jambonz_transcribe::error"
},
"AssemblyAiTranscriptionEvents": {
"Transcription": "assemblyai_transcribe::transcription",
"Error": "assemblyai_transcribe::error",
"ConnectFailure": "assemblyai_transcribe::connect_failed",
"Connect": "assemblyai_transcribe::connect"
},
"ListenEvents": {
"Connect": "mod_audio_fork::connect",
"ConnectFailure": "mod_audio_fork::connect_failed",

View File

@@ -3,10 +3,13 @@ const {decrypt} = require('./encrypt-decrypt');
const sqlAccountDetails = `SELECT *
FROM accounts account
WHERE account.account_sid = ?`;
const sqlSpeechCredentialsForAccount = `SELECT *
const sqlSpeechCredentials = `SELECT *
FROM speech_credentials
WHERE account_sid = ? OR (account_sid is NULL AND service_provider_sid =
(SELECT service_provider_sid from accounts where account_sid = ?))`;
WHERE account_sid = ? `;
const sqlSpeechCredentialsForSP = `SELECT *
FROM speech_credentials
WHERE service_provider_sid =
(SELECT service_provider_sid from accounts where account_sid = ?)`;
const sqlQueryAccountCarrierByName = `SELECT voip_carrier_sid
FROM voip_carriers vc
WHERE vc.account_sid = ?
@@ -27,9 +30,6 @@ WHERE pn.account_sid IS NULL
AND pn.service_provider_sid =
(SELECT service_provider_sid from accounts where account_sid = ?)
AND pn.number = ?`;
const sqlQueryGoogleCustomVoices = `SELECT *
FROM google_custom_voices
WHERE google_custom_voice_sid = ?`;
const speechMapper = (cred) => {
const {credential, ...obj} = cred;
@@ -49,10 +49,8 @@ const speechMapper = (cred) => {
obj.region = o.region;
obj.use_custom_stt = o.use_custom_stt;
obj.custom_stt_endpoint = o.custom_stt_endpoint;
obj.custom_stt_endpoint_url = o.custom_stt_endpoint_url;
obj.use_custom_tts = o.use_custom_tts;
obj.custom_tts_endpoint = o.custom_tts_endpoint;
obj.custom_tts_endpoint_url = o.custom_tts_endpoint_url;
}
else if ('wellsaid' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
@@ -84,17 +82,7 @@ const speechMapper = (cred) => {
const o = JSON.parse(decrypt(credential));
obj.riva_server_uri = o.riva_server_uri;
}
else if ('cobalt' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.cobalt_server_uri = o.cobalt_server_uri;
} else if ('elevenlabs' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
obj.model_id = o.model_id;
} else if ('assemblyai' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
} else if (obj.vendor.startsWith('custom:')) {
else if (obj.vendor.startsWith('custom:')) {
const o = JSON.parse(decrypt(credential));
obj.auth_token = o.auth_token;
obj.custom_stt_url = o.custom_stt_url;
@@ -106,12 +94,6 @@ const speechMapper = (cred) => {
return obj;
};
const bucketCredentialDecrypt = (account) => {
const { bucket_credential } = account.account;
if (!bucket_credential || bucket_credential.vendor) return;
account.account.bucket_credential = JSON.parse(decrypt(bucket_credential));
};
module.exports = (logger, srf) => {
const {pool} = srf.locals.dbHelpers;
const pp = pool.promise();
@@ -120,14 +102,19 @@ module.exports = (logger, srf) => {
const [r] = await pp.query({sql: sqlAccountDetails, nestTables: true}, [account_sid]);
if (0 === r.length) throw new Error(`invalid accountSid: ${account_sid}`);
const [r2] = await pp.query(sqlSpeechCredentialsForAccount, [account_sid, account_sid]);
const [r2] = await pp.query(sqlSpeechCredentials, [account_sid]);
const speech = r2.map(speechMapper);
const account = r[0];
bucketCredentialDecrypt(account);
/* add service provider creds unless we have that vendor at the account level */
const [r3] = await pp.query(sqlSpeechCredentialsForSP, [account_sid]);
r3.forEach((s) => {
if (!speech.find((s2) => s2.vendor === s.vendor)) {
speech.push(speechMapper(s));
}
});
return {
...account,
...r[0],
speech
};
};
@@ -167,22 +154,10 @@ module.exports = (logger, srf) => {
}
};
const lookupGoogleCustomVoice = async(google_custom_voice_sid) => {
const pp = pool.promise();
try {
const [r] = await pp.query(sqlQueryGoogleCustomVoices, [google_custom_voice_sid]);
return r;
} catch (err) {
logger.error({err}, `lookupGoogleCustomVoices: Error ${google_custom_voice_sid}`);
}
};
return {
lookupAccountDetails,
updateSpeechCredentialLastUsed,
lookupCarrier,
lookupCarrierByPhoneNumber,
lookupGoogleCustomVoice
lookupCarrierByPhoneNumber
};
};

View File

@@ -6,8 +6,7 @@ const {PORT, HTTP_PORT_MAX} = require('../config');
const doListen = (logger, app, port, resolve) => {
const server = app.listen(port, () => {
const {srf} = app.locals;
srf.locals.serviceUrl = `http://${srf.locals.ipv4}:${port}`;
logger.info(`listening for HTTP requests on port ${port}, serviceUrl is ${srf.locals.serviceUrl}`);
logger.info(`listening for HTTP requests on port ${PORT}, serviceUrl is ${srf.locals.serviceUrl}`);
resolve({server, app});
});
return server;

View File

@@ -1,4 +1,4 @@
const {request, getGlobalDispatcher, setGlobalDispatcher, Dispatcher, ProxyAgent, Client, Pool} = require('undici');
const {Client, Pool} = require('undici');
const parseUrl = require('parse-url');
const assert = require('assert');
const BaseRequestor = require('./base-requestor');
@@ -10,10 +10,6 @@ const {
HTTP_POOLSIZE,
HTTP_PIPELINING,
HTTP_TIMEOUT,
HTTP_PROXY_IP,
HTTP_PROXY_PORT,
HTTP_PROXY_PROTOCOL,
NODE_ENV,
} = require('../config');
const toBase64 = (str) => Buffer.from(str || '', 'utf8').toString('base64');
@@ -25,15 +21,6 @@ function basicAuth(username, password) {
return {Authorization: header};
}
const defaultDispatcher = HTTP_PROXY_IP ?
new ProxyAgent(`${HTTP_PROXY_PROTOCOL}://${HTTP_PROXY_IP}${HTTP_PROXY_PORT ? `:${HTTP_PROXY_PORT}` : ''}`) :
getGlobalDispatcher();
setGlobalDispatcher(new class extends Dispatcher {
dispatch(options, handler) {
return defaultDispatcher.dispatch(options, handler);
}
}());
class HttpRequestor extends BaseRequestor {
constructor(logger, account_sid, hook, secret) {
@@ -73,18 +60,6 @@ class HttpRequestor extends BaseRequestor {
if (u.port) this.client = new Client(`${u.protocol}://${u.resource}:${u.port}`);
else this.client = new Client(`${u.protocol}://${u.resource}`);
}
if (NODE_ENV == 'test' && process.env.JAMBONES_HTTP_PROXY_IP) {
const defDispatcher =
new ProxyAgent(`${process.env.JAMBONES_HTTP_PROXY_PROTOCOL}://${process.env.JAMBONES_HTTP_PROXY_IP}${
process.env.JAMBONES_HTTP_PROXY_PORT ? `:${process.env.JAMBONES_HTTP_PROXY_PORT}` : ''}`);
setGlobalDispatcher(new class extends Dispatcher {
dispatch(options, handler) {
return defDispatcher.dispatch(options, handler);
}
}());
}
}
get baseUrl() {
@@ -164,18 +139,7 @@ class HttpRequestor extends BaseRequestor {
};
const absUrl = this._isRelativeUrl(url) ? `${this.baseUrl}${url}` : url;
this.logger.debug({url, absUrl, hdrs}, 'send webhook');
const {statusCode, headers, body} = HTTP_PROXY_IP ? await request(
this.baseUrl,
{
path,
query,
method,
headers: hdrs,
...('POST' === method && {body: JSON.stringify(payload)}),
timeout: HTTP_TIMEOUT,
followRedirects: false
}
) : await client.request({
const {statusCode, headers, body} = await client.request({
path,
query,
method,

View File

@@ -17,7 +17,6 @@ const {
PORT,
NODE_ENV,
} = require('../config');
const Registrar = require('@jambonz/mw-registrar');
const assert = require('assert');
function initMS(logger, wrapper, ms) {
@@ -178,7 +177,6 @@ function installSrfLocals(srf, logger) {
host: JAMBONES_REDIS_HOST,
port: JAMBONES_REDIS_PORT || 6379
}, logger, tracer);
const registrar = new Registrar(logger, client);
const {
synthAudio,
getNuanceAccessToken,
@@ -206,7 +204,6 @@ function installSrfLocals(srf, logger) {
srf.locals = {...srf.locals,
dbHelpers: {
client,
registrar,
pool,
lookupAppByPhoneNumber,
lookupAppByRegex,

View File

@@ -15,7 +15,7 @@ const RootSpan = require('./call-tracer');
const uuidv4 = require('uuid-random');
class SingleDialer extends Emitter {
constructor({logger, sbcAddress, target, opts, application, callInfo, accountInfo, rootSpan, startSpan, dialTask}) {
constructor({logger, sbcAddress, target, opts, application, callInfo, accountInfo, rootSpan, startSpan}) {
super();
assert(target.type);
@@ -37,7 +37,6 @@ class SingleDialer extends Emitter {
this.callGone = false;
this.callSid = uuidv4();
this.dialTask = dialTask;
this.on('callStatusChange', this._notifyCallStatusChange.bind(this));
}
@@ -248,14 +247,9 @@ class SingleDialer extends Emitter {
.on('modify', async(req, res) => {
try {
if (this.ep) {
if (this.dialTask && this.dialTask.isOnHold) {
this.logger.info('dial is onhold, emit event');
this.emit('reinvite', req, res);
} else {
const newSdp = await this.ep.modify(req.body);
res.send(200, {body: newSdp});
this.logger.info({offer: req.body, answer: newSdp}, 'SingleDialer:exec: handling reINVITE');
}
const newSdp = await this.ep.modify(req.body);
res.send(200, {body: newSdp});
this.logger.info({offer: req.body, answer: newSdp}, 'SingleDialer:exec: handling reINVITE');
}
else {
this.logger.info('SingleDialer:exec: handling reINVITE with released media, emit event');
@@ -326,10 +320,6 @@ class SingleDialer extends Emitter {
try {
// retrieve set of tasks
const json = await this.requestor.request('dial:confirm', confirmHook, this.callInfo.toJSON());
if (!json || (Array.isArray(json) && json.length === 0)) {
this.logger.info('SingleDialer:_executeApp: no tasks returned from confirm hook');
return;
}
const tasks = normalizeJambones(this.logger, json).map((tdata) => makeTask(this.logger, tdata));
// verify it contains only allowed verbs
const allowedTasks = tasks.filter((task) => {
@@ -440,11 +430,11 @@ class SingleDialer extends Emitter {
}
function placeOutdial({
logger, srf, ms, sbcAddress, target, opts, application, callInfo, accountInfo, rootSpan, startSpan, dialTask
logger, srf, ms, sbcAddress, target, opts, application, callInfo, accountInfo, rootSpan, startSpan
}) {
const myOpts = deepcopy(opts);
const sd = new SingleDialer({
logger, sbcAddress, target, myOpts, application, callInfo, accountInfo, rootSpan, startSpan, dialTask
logger, sbcAddress, target, myOpts, application, callInfo, accountInfo, rootSpan, startSpan
});
sd.exec(srf, ms, myOpts);
return sd;

View File

@@ -101,8 +101,7 @@ module.exports = (logger) => {
method: 'OPTIONS',
headers: {
'X-FS-Status': ms && !dryUpCalls ? 'open' : 'closed',
'X-FS-Calls': srf.locals.sessionTracker.count,
'X-FS-ServiceUrl': srf.locals.serviceUrl
'X-FS-Calls': srf.locals.sessionTracker.count
}
});
req.on('response', (res) => {

View File

@@ -1,32 +0,0 @@
const sdpTransform = require('sdp-transform');
const isOnhold = (sdp) => {
return sdp && (sdp.includes('a=sendonly') || sdp.includes('a=inactive'));
};
const mergeSdpMedia = (sdp1, sdp2) => {
const parsedSdp1 = sdpTransform.parse(sdp1);
const parsedSdp2 = sdpTransform.parse(sdp2);
parsedSdp1.media.push(...parsedSdp2.media);
return sdpTransform.write(parsedSdp1);
};
const extractSdpMedia = (sdp) => {
const parsedSdp1 = sdpTransform.parse(sdp);
if (parsedSdp1.media.length > 1) {
parsedSdp1.media = [parsedSdp1.media[0]];
const parsedSdp2 = sdpTransform.parse(sdp);
parsedSdp2.media = [parsedSdp2.media[1]];
return [sdpTransform.write(parsedSdp1), sdpTransform.write(parsedSdp2)];
} else {
return [sdp, sdp];
}
};
module.exports = {
isOnhold,
mergeSdpMedia,
extractSdpMedia
};

View File

@@ -7,10 +7,8 @@ const {
DeepgramTranscriptionEvents,
SonioxTranscriptionEvents,
NvidiaTranscriptionEvents,
CobaltTranscriptionEvents,
JambonzTranscriptionEvents,
AssemblyAiTranscriptionEvents
} = require('./constants.json');
JambonzTranscriptionEvents
} = require('./constants');
const stickyVars = {
google: [
@@ -35,14 +33,12 @@ const stickyVars = {
'AZURE_SERVICE_ENDPOINT',
'AZURE_INITIAL_SPEECH_TIMEOUT_MS',
'AZURE_USE_OUTPUT_FORMAT_DETAILED',
'AZURE_SPEECH_SEGMENTATION_SILENCE_TIMEOUT_MS'
],
deepgram: [
'DEEPGRAM_SPEECH_KEYWORDS',
'DEEPGRAM_API_KEY',
'DEEPGRAM_SPEECH_TIER',
'DEEPGRAM_SPEECH_MODEL',
'DEEPGRAM_SPEECH_ENABLE_SMART_FORMAT',
'DEEPGRAM_SPEECH_ENABLE_AUTOMATIC_PUNCTUATION',
'DEEPGRAM_SPEECH_PROFANITY_FILTER',
'DEEPGRAM_SPEECH_REDACT',
@@ -53,7 +49,6 @@ const stickyVars = {
'DEEPGRAM_SPEECH_SEARCH',
'DEEPGRAM_SPEECH_REPLACE',
'DEEPGRAM_SPEECH_ENDPOINTING',
'DEEPGRAM_SPEECH_UTTERANCE_END_MS',
'DEEPGRAM_SPEECH_VAD_TURNOFF',
'DEEPGRAM_SPEECH_TAG'
],
@@ -97,68 +92,12 @@ const stickyVars = {
nvidia: [
'NVIDIA_HINTS'
],
cobalt: [
'COBALT_SPEECH_HINTS',
'COBALT_COMPILED_CONTEXT_DATA',
'COBALT_METADATA'
],
soniox: [
'SONIOX_PROFANITY_FILTER',
'SONIOX_MODEL'
],
assemblyai: [
'ASSEMBLYAI_API_KEY',
'ASSEMBLYAI_WORD_BOOST'
]
};
const consolidateTranscripts = (bufferedTranscripts, channel, language) => {
if (bufferedTranscripts.length === 1) return bufferedTranscripts[0];
let totalConfidence = 0;
const finalTranscript = bufferedTranscripts.reduce((acc, evt) => {
totalConfidence += evt.alternatives[0].confidence;
let newTranscript = evt.alternatives[0].transcript;
// If new transcript consists only of digits, spaces, and a trailing comma or period
if (newTranscript.match(/^[\d\s]+[,.]?$/)) {
newTranscript = newTranscript.replace(/\s/g, ''); // Remove all spaces
if (newTranscript.endsWith(',')) {
newTranscript = newTranscript.slice(0, -1); // Remove the trailing comma
} else if (newTranscript.endsWith('.')) {
newTranscript = newTranscript.slice(0, -1); // Remove the trailing period
}
}
const lastChar = acc.alternatives[0].transcript.slice(-1);
const firstChar = newTranscript.charAt(0);
if (lastChar.match(/\d/) && firstChar.match(/\d/)) {
acc.alternatives[0].transcript += newTranscript;
} else {
acc.alternatives[0].transcript += ` ${newTranscript}`;
}
return acc;
}, {
language_code: language,
channel_tag: channel,
is_final: true,
alternatives: [{
transcript: ''
}]
});
finalTranscript.alternatives[0].confidence = bufferedTranscripts.length === 1 ?
bufferedTranscripts[0].alternatives[0].confidence :
totalConfidence / bufferedTranscripts.length;
finalTranscript.alternatives[0].transcript = finalTranscript.alternatives[0].transcript.trim();
finalTranscript.vendor = {
name: 'deepgram',
evt: bufferedTranscripts
};
return finalTranscript;
};
const compileSonioxTranscripts = (finalWordChunks, channel, language) => {
const words = finalWordChunks.flat();
const transcript = words.reduce((acc, word) => {
@@ -216,7 +155,7 @@ const normalizeSoniox = (evt, channel, language) => {
};
};
const normalizeDeepgram = (evt, channel, language, shortUtterance) => {
const normalizeDeepgram = (evt, channel, language) => {
const copy = JSON.parse(JSON.stringify(evt));
const alternatives = (evt.channel?.alternatives || [])
.map((alt) => ({
@@ -224,14 +163,10 @@ const normalizeDeepgram = (evt, channel, language, shortUtterance) => {
transcript: alt.transcript,
}));
/**
* note difference between is_final and speech_final in Deepgram:
* https://developers.deepgram.com/docs/understand-endpointing-interim-results
*/
return {
language_code: language,
channel_tag: channel,
is_final: shortUtterance ? evt.is_final : evt.speech_final,
is_final: evt.is_final,
alternatives: [alternatives[0]],
vendor: {
name: 'deepgram',
@@ -290,37 +225,12 @@ const normalizeGoogle = (evt, channel, language) => {
};
};
const normalizeCobalt = (evt, channel, language) => {
const copy = JSON.parse(JSON.stringify(evt));
const alternatives = (evt.alternatives || [])
.map((alt) => ({
confidence: alt.confidence,
transcript: alt.transcript_formatted,
}));
const normalizeCustom = (evt, channel, language) => {
return {
language_code: language,
channel_tag: channel,
is_final: evt.is_final,
alternatives,
vendor: {
name: 'cobalt',
evt: copy
}
};
};
const normalizeCustom = (evt, channel, language, vendor) => {
const copy = JSON.parse(JSON.stringify(evt));
return {
language_code: language,
channel_tag: channel,
is_final: evt.is_final,
alternatives: [evt.alternatives[0]],
vendor: {
name: vendor,
evt: copy
}
alternatives: [evt.alternatives[0]]
};
};
@@ -380,32 +290,14 @@ const normalizeAws = (evt, channel, language) => {
};
};
const normalizeAssemblyAi = (evt, channel, language) => {
const copy = JSON.parse(JSON.stringify(evt));
return {
language_code: language,
channel_tag: channel,
is_final: evt.message_type === 'FinalTranscript',
alternatives: [
{
confidence: evt.confidence,
transcript: evt.text,
}
],
vendor: {
name: 'ASSEMBLYAI',
evt: copy
}
};
};
module.exports = (logger) => {
const normalizeTranscription = (evt, vendor, channel, language, shortUtterance) => {
const normalizeTranscription = (evt, vendor, channel, language) => {
//logger.debug({ evt, vendor, channel, language }, 'normalizeTranscription');
switch (vendor) {
case 'deepgram':
return normalizeDeepgram(evt, channel, language, shortUtterance);
return normalizeDeepgram(evt, channel, language);
case 'microsoft':
return normalizeMicrosoft(evt, channel, language);
case 'google':
@@ -420,13 +312,9 @@ module.exports = (logger) => {
return normalizeNvidia(evt, channel, language);
case 'soniox':
return normalizeSoniox(evt, channel, language);
case 'cobalt':
return normalizeCobalt(evt, channel, language);
case 'assemblyai':
return normalizeAssemblyAi(evt, channel, language, shortUtterance);
default:
if (vendor.startsWith('custom:')) {
return normalizeCustom(evt, channel, language, vendor);
return normalizeCustom(evt, channel, language);
}
logger.error(`Unknown vendor ${vendor}`);
return evt;
@@ -468,12 +356,12 @@ module.exports = (logger) => {
...(rOpts.punctuation === false && {GOOGLE_SPEECH_ENABLE_AUTOMATIC_PUNCTUATION: 0}),
...(rOpts.words == false && {GOOGLE_SPEECH_ENABLE_WORD_TIME_OFFSETS: 0}),
...(rOpts.diarization === false && {GOOGLE_SPEECH_SPEAKER_DIARIZATION: 0}),
...(rOpts.hints?.length > 0 && typeof rOpts.hints[0] === 'string' &&
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'string' &&
{GOOGLE_SPEECH_HINTS: rOpts.hints.join(',')}),
...(rOpts.hints?.length > 0 && typeof rOpts.hints[0] === 'object' &&
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'object' &&
{GOOGLE_SPEECH_HINTS: JSON.stringify(rOpts.hints)}),
...(typeof rOpts.hintsBoost === 'number' && {GOOGLE_SPEECH_HINTS_BOOST: rOpts.hintsBoost}),
...(rOpts.altLanguages?.length > 0 &&
...(rOpts.altLanguages.length > 0 &&
{GOOGLE_SPEECH_ALTERNATIVE_LANGUAGE_CODES: [...new Set(rOpts.altLanguages)].join(',')}),
...(rOpts.interactionType &&
{GOOGLE_SPEECH_METADATA_INTERACTION_TYPE: rOpts.interactionType}),
@@ -496,33 +384,28 @@ module.exports = (logger) => {
};
}
else if ('microsoft' === vendor) {
const {azureOptions = {}} = rOpts;
opts = {
...opts,
...(rOpts.hints?.length > 0 && typeof rOpts.hints[0] === 'string' &&
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'string' &&
{AZURE_SPEECH_HINTS: rOpts.hints.map((h) => h.trim()).join(',')}),
...(rOpts.hints?.length > 0 && typeof rOpts.hints[0] === 'object' &&
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'object' &&
{AZURE_SPEECH_HINTS: rOpts.hints.map((h) => h.phrase).join(',')}),
...(rOpts.altLanguages && rOpts.altLanguages.length > 0 &&
{AZURE_SPEECH_ALTERNATIVE_LANGUAGE_CODES: [...new Set(rOpts.altLanguages)].join(',')}),
...(rOpts.requestSnr && {AZURE_REQUEST_SNR: 1}),
...(rOpts.profanityOption && {AZURE_PROFANITY_OPTION: rOpts.profanityOption}),
...(sttCredentials.use_custom_stt && sttCredentials.custom_stt_endpoint_url &&
{AZURE_SERVICE_ENDPOINT: sttCredentials.custom_stt_endpoint_url}),
...(rOpts.azureServiceEndpoint && {AZURE_SERVICE_ENDPOINT: rOpts.azureServiceEndpoint}),
...(rOpts.initialSpeechTimeoutMs > 0 &&
{AZURE_INITIAL_SPEECH_TIMEOUT_MS: rOpts.initialSpeechTimeoutMs}),
...(rOpts.requestSnr && {AZURE_REQUEST_SNR: 1}),
...(rOpts.audioLogging && {AZURE_AUDIO_LOGGING: 1}),
...{AZURE_USE_OUTPUT_FORMAT_DETAILED: 1},
...(azureOptions.speechSegmentationSilenceTimeoutMs &&
{AZURE_SPEECH_SEGMENTATION_SILENCE_TIMEOUT_MS: azureOptions.speechSegmentationSilenceTimeoutMs}),
...(sttCredentials && {
...(sttCredentials.api_key && {AZURE_SUBSCRIPTION_KEY: sttCredentials.api_key}),
...(sttCredentials.region && {AZURE_REGION: sttCredentials.region}),
AZURE_SUBSCRIPTION_KEY: sttCredentials.api_key,
AZURE_REGION: sttCredentials.region,
}),
...(sttCredentials.use_custom_stt && sttCredentials.custom_stt_endpoint &&
{AZURE_SERVICE_ENDPOINT_ID: sttCredentials.custom_stt_endpoint}),
{AZURE_SERVICE_ENDPOINT_ID: sttCredentials.custom_stt_endpoint})
};
}
else if ('nuance' === vendor) {
@@ -585,12 +468,10 @@ module.exports = (logger) => {
{DEEPGRAM_SPEECH_MODEL: deepgramOptions.model},
...(deepgramOptions.punctuate) &&
{DEEPGRAM_SPEECH_ENABLE_AUTOMATIC_PUNCTUATION: 1},
...(deepgramOptions.smartFormatting) &&
{DEEPGRAM_SPEECH_ENABLE_SMART_FORMAT: 1},
...(deepgramOptions.profanityFilter) &&
{DEEPGRAM_SPEECH_PROFANITY_FILTER: 1},
...(deepgramOptions.redact) &&
{DEEPGRAM_SPEECH_REDACT: deepgramOptions.redact},
{DEEPGRAM_SPEECH_REDACT: 1},
...(deepgramOptions.diarize) &&
{DEEPGRAM_SPEECH_DIARIZE: 1},
...(deepgramOptions.diarizeVersion) &&
@@ -605,16 +486,14 @@ module.exports = (logger) => {
{DEEPGRAM_SPEECH_SEARCH: deepgramOptions.search.join(',')},
...(deepgramOptions.replace) &&
{DEEPGRAM_SPEECH_REPLACE: deepgramOptions.replace.join(',')},
...(rOpts.hints && rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'string' &&
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'string' &&
{DEEPGRAM_SPEECH_KEYWORDS: rOpts.hints.map((h) => h.trim()).join(',')}),
...(rOpts.hints && rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'object' &&
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'object' &&
{DEEPGRAM_SPEECH_KEYWORDS: rOpts.hints.map((h) => h.phrase).join(',')}),
...(deepgramOptions.keywords) &&
{DEEPGRAM_SPEECH_KEYWORDS: deepgramOptions.keywords.join(',')},
...('endpointing' in deepgramOptions) &&
{DEEPGRAM_SPEECH_ENDPOINTING: deepgramOptions.endpointing},
...(deepgramOptions.utteranceEndMs) &&
{DEEPGRAM_SPEECH_UTTERANCE_END_MS: deepgramOptions.utteranceEndMs},
...(deepgramOptions.vadTurnoff) &&
{DEEPGRAM_SPEECH_VAD_TURNOFF: deepgramOptions.vadTurnoff},
...(deepgramOptions.tag) &&
@@ -628,9 +507,9 @@ module.exports = (logger) => {
...opts,
...(sttCredentials.api_key) &&
{SONIOX_API_KEY: sttCredentials.api_key},
...(rOpts.hints?.length > 0 && typeof rOpts.hints[0] === 'string' &&
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'string' &&
{SONIOX_HINTS: rOpts.hints.join(',')}),
...(rOpts.hints?.length > 0 && typeof rOpts.hints[0] === 'object' &&
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'object' &&
{SONIOX_HINTS: JSON.stringify(rOpts.hints)}),
...(typeof rOpts.hintsBoost === 'number' &&
{SONIOX_HINTS_BOOST: rOpts.hintsBoost}),
@@ -688,9 +567,9 @@ module.exports = (logger) => {
...(rOpts.diarization && rOpts.diarizationMaxSpeakers > 0 &&
{NVIDIA_DIARIZATION_SPEAKER_COUNT: rOpts.diarizationMaxSpeakers}),
...(rOpts.separateRecognitionPerChannel && {NVIDIA_SEPARATE_RECOGNITION_PER_CHANNEL: 1}),
...(rOpts.hints?.length > 0 && typeof rOpts.hints[0] === 'string' &&
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'string' &&
{NVIDIA_HINTS: rOpts.hints.join(',')}),
...(rOpts.hints?.length > 0 && typeof rOpts.hints[0] === 'object' &&
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'object' &&
{NVIDIA_HINTS: JSON.stringify(rOpts.hints)}),
...(typeof rOpts.hintsBoost === 'number' &&
{NVIDIA_HINTS_BOOST: rOpts.hintsBoost}),
@@ -698,42 +577,14 @@ module.exports = (logger) => {
{NVIDIA_CUSTOM_CONFIGURATION: JSON.stringify(nvidiaOptions.customConfiguration)}),
};
}
else if ('cobalt' === vendor) {
const {cobaltOptions = {}} = rOpts;
const cobaltUri = cobaltOptions.serverUri || sttCredentials.cobalt_server_uri;
opts = {
...opts,
...(rOpts.words && {COBALT_WORD_TIME_OFFSETS: 1}),
...(!rOpts.words && {COBALT_WORD_TIME_OFFSETS: 0}),
...(rOpts.model && {COBALT_MODEL: rOpts.model}),
...(cobaltUri && {COBALT_SERVER_URI: cobaltUri}),
...(rOpts.hints?.length > 0 && typeof rOpts.hints[0] === 'string' &&
{COBALT_SPEECH_HINTS: rOpts.hints.join(',')}),
...(rOpts.hints?.length > 0 && typeof rOpts.hints[0] === 'object' &&
{COBALT_SPEECH_HINTS: JSON.stringify(rOpts.hints)}),
...(rOpts.hints?.length > 0 &&
{COBALT_CONTEXT_TOKEN: cobaltOptions.contextToken || 'unk:default'}),
...(cobaltOptions.metadata && {COBALT_METADATA: cobaltOptions.metadata}),
...(cobaltOptions.enableConfusionNetwork && {COBALT_ENABLE_CONFUSION_NETWORK: 1}),
...(cobaltOptions.compiledContextData && {COBALT_COMPILED_CONTEXT_DATA: cobaltOptions.compiledContextData}),
};
} else if ('assemblyai' === vendor) {
opts = {
...opts,
...(sttCredentials.api_key) &&
{ASSEMBLYAI_API_KEY: sttCredentials.api_key},
...(rOpts.hints?.length > 0 &&
{ASSEMBLYAI_WORD_BOOST: JSON.stringify(rOpts.hints)})
};
}
else if (vendor.startsWith('custom:')) {
let {options = {}} = rOpts;
const {auth_token, custom_stt_url} = sttCredentials;
options = {
...options,
...(rOpts.hints?.length > 0 && typeof rOpts.hints[0] === 'string' &&
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'string' &&
{hints: rOpts.hints}),
...(rOpts.hints?.length > 0 && typeof rOpts.hints[0] === 'object' &&
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'object' &&
{hints: JSON.stringify(rOpts.hints)}),
...(typeof rOpts.hintsBoost === 'number' && {hintsBoost: rOpts.hintsBoost})
};
@@ -775,9 +626,6 @@ module.exports = (logger) => {
ep.removeCustomEventListener(SonioxTranscriptionEvents.Transcription);
ep.removeCustomEventListener(CobaltTranscriptionEvents.Transcription);
ep.removeCustomEventListener(CobaltTranscriptionEvents.CompileContext);
ep.removeCustomEventListener(NvidiaTranscriptionEvents.Transcription);
ep.removeCustomEventListener(NvidiaTranscriptionEvents.TranscriptionComplete);
ep.removeCustomEventListener(NvidiaTranscriptionEvents.StartOfSpeech);
@@ -788,10 +636,6 @@ module.exports = (logger) => {
ep.removeCustomEventListener(JambonzTranscriptionEvents.ConnectFailure);
ep.removeCustomEventListener(JambonzTranscriptionEvents.Error);
ep.removeCustomEventListener(AssemblyAiTranscriptionEvents.Transcription);
ep.removeCustomEventListener(AssemblyAiTranscriptionEvents.Connect);
ep.removeCustomEventListener(AssemblyAiTranscriptionEvents.ConnectFailure);
};
const setSpeechCredentialsAtRuntime = (recognizer) => {
@@ -813,10 +657,6 @@ module.exports = (logger) => {
const {apiKey} = recognizer.sonioxOptions || {};
if (apiKey) return {api_key: apiKey};
}
else if (recognizer.vendor === 'cobalt') {
const {serverUri} = recognizer.cobaltOptions || {};
if (serverUri) return {cobalt_server_uri: serverUri};
}
else if (recognizer.vendor === 'ibm') {
const {ttsApiKey, ttsRegion, sttApiKey, sttRegion, instanceId} = recognizer.ibmOptions || {};
if (ttsApiKey || sttApiKey) return {
@@ -834,7 +674,6 @@ module.exports = (logger) => {
setChannelVarsForStt,
removeSpeechListeners,
setSpeechCredentialsAtRuntime,
compileSonioxTranscripts,
consolidateTranscripts
compileSonioxTranscripts
};
};

View File

@@ -43,7 +43,6 @@ class WsRequestor extends BaseRequestor {
async request(type, hook, params, httpHeaders = {}) {
assert(HookMsgTypes.includes(type));
const url = hook.url || hook;
const wantsAck = !['call:status', 'verb:status', 'jambonz:error'].includes(type);
if (this.maliciousClient) {
this.logger.info({url: this.url}, 'WsRequestor:request - discarding msg to malicious client');
@@ -74,19 +73,11 @@ class WsRequestor extends BaseRequestor {
if (this.connectInProgress) {
this.logger.debug(
`WsRequestor:request(${this.id}) - queueing ${type} message since we are connecting`);
if (wantsAck) {
const p = new Promise((resolve, reject) => {
this.queuedMsg.push({type, hook, params, httpHeaders, promise: {resolve, reject}});
});
return p;
}
else {
this.queuedMsg.push({type, hook, params, httpHeaders});
}
this.queuedMsg.push({type, hook, params, httpHeaders});
return;
}
this.connectInProgress = true;
this.logger.debug(`WsRequestor:request(${this.id}) - connecting since we do not have a connection for ${type}`);
this.logger.debug(`WsRequestor:request(${this.id}) - connecting since we do not have a connection`);
if (this.connections >= MAX_RECONNECTS) {
return Promise.reject(`max attempts connecting to ${this.url}`);
}
@@ -125,14 +116,9 @@ class WsRequestor extends BaseRequestor {
const sendQueuedMsgs = () => {
if (this.queuedMsg.length > 0) {
for (const {type, hook, params, httpHeaders, promise} of this.queuedMsg) {
for (const {type, hook, params, httpHeaders} of this.queuedMsg) {
this.logger.debug(`WsRequestor:request - preparing queued ${type} for sending`);
if (promise) {
this.request(type, hook, params, httpHeaders)
.then((res) => promise.resolve(res))
.catch((err) => promise.reject(err));
}
else setImmediate(this.request.bind(this, type, hook, params, httpHeaders));
setImmediate(this.request.bind(this, type, hook, params, httpHeaders));
}
this.queuedMsg.length = 0;
}
@@ -151,8 +137,8 @@ class WsRequestor extends BaseRequestor {
}
/* simple notifications */
if (!wantsAck || reconnectingWithoutAck) {
this.ws?.send(JSON.stringify(obj), () => {
if (['call:status', 'verb:status', 'jambonz:error'].includes(type) || reconnectingWithoutAck) {
this.ws.send(JSON.stringify(obj), () => {
this.logger.debug({obj}, `WsRequestor:request websocket: sent (${url})`);
sendQueuedMsgs();
});

7332
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "jambonz-feature-server",
"version": "0.8.4",
"version": "0.8.3",
"main": "app.js",
"engines": {
"node": ">= 10.16.0"
@@ -19,22 +19,19 @@
"bugs": {},
"scripts": {
"start": "node app",
"test": "NODE_ENV=test JAMBONES_HOSTING=1 HTTP_POOL=1 JAMBONES_TTS_TRIM_SILENCE=1 ENCRYPTION_SECRET=foobar DRACHTIO_HOST=127.0.0.1 DRACHTIO_PORT=9060 DRACHTIO_SECRET=cymru JAMBONES_MYSQL_HOST=127.0.0.1 JAMBONES_MYSQL_PORT=3360 JAMBONES_MYSQL_USER=jambones_test JAMBONES_MYSQL_PASSWORD=jambones_test JAMBONES_MYSQL_DATABASE=jambones_test JAMBONES_REDIS_HOST=127.0.0.1 JAMBONES_REDIS_PORT=16379 JAMBONES_LOGLEVEL=error ENABLE_METRICS=0 HTTP_PORT=3000 JAMBONES_SBCS=172.38.0.10 JAMBONES_FREESWITCH=127.0.0.1:8022:JambonzR0ck$:docker-host JAMBONES_TIME_SERIES_HOST=127.0.0.1 JAMBONES_NETWORK_CIDR=172.38.0.0/16 node test/ ",
"test": "NODE_ENV=test JAMBONES_HOSTING=1 HTTP_POOL=1 ENCRYPTION_SECRET=foobar DRACHTIO_HOST=127.0.0.1 DRACHTIO_PORT=9060 DRACHTIO_SECRET=cymru JAMBONES_MYSQL_HOST=127.0.0.1 JAMBONES_MYSQL_PORT=3360 JAMBONES_MYSQL_USER=jambones_test JAMBONES_MYSQL_PASSWORD=jambones_test JAMBONES_MYSQL_DATABASE=jambones_test JAMBONES_REDIS_HOST=127.0.0.1 JAMBONES_REDIS_PORT=16379 JAMBONES_LOGLEVEL=error ENABLE_METRICS=0 HTTP_PORT=3000 JAMBONES_SBCS=172.38.0.10 JAMBONES_FREESWITCH=127.0.0.1:8022:JambonzR0ck$:docker-host JAMBONES_TIME_SERIES_HOST=127.0.0.1 JAMBONES_NETWORK_CIDR=172.38.0.0/16 node test/ ",
"coverage": "./node_modules/.bin/nyc --reporter html --report-dir ./coverage npm run test",
"jslint": "eslint app.js tracer.js lib",
"jslint:fix": "eslint app.js tracer.js lib --fix"
},
"dependencies": {
"@aws-sdk/client-auto-scaling": "^3.360.0",
"@aws-sdk/client-sns": "^3.360.0",
"@jambonz/db-helpers": "^0.9.1",
"@jambonz/db-helpers": "^0.9.0",
"@jambonz/http-health-check": "^0.0.1",
"@jambonz/mw-registrar": "^0.2.4",
"@jambonz/realtimedb-helpers": "^0.8.6",
"@jambonz/speech-utils": "^0.0.24",
"@jambonz/stats-collector": "^0.1.9",
"@jambonz/time-series": "^0.2.8",
"@jambonz/verb-specifications": "^0.0.45",
"@jambonz/speech-utils": "^0.0.15",
"@jambonz/stats-collector": "^0.1.8",
"@jambonz/time-series": "^0.2.5",
"@jambonz/verb-specifications": "^0.0.24",
"@opentelemetry/api": "^1.4.0",
"@opentelemetry/exporter-jaeger": "^1.9.0",
"@opentelemetry/exporter-trace-otlp-http": "^0.35.0",
@@ -44,13 +41,13 @@
"@opentelemetry/sdk-trace-base": "^1.9.0",
"@opentelemetry/sdk-trace-node": "^1.9.0",
"@opentelemetry/semantic-conventions": "^1.9.0",
"aws-sdk": "^2.1313.0",
"bent": "^7.3.12",
"debug": "^4.3.4",
"deepcopy": "^2.1.0",
"drachtio-fsmrf": "^3.0.27",
"drachtio-srf": "^4.5.29",
"drachtio-fsmrf": "^3.0.21",
"drachtio-srf": "^4.5.23",
"express": "^4.18.2",
"express-validator": "^7.0.1",
"ip": "^1.1.8",
"moment": "^2.29.4",
"parse-url": "^8.1.0",
@@ -61,11 +58,11 @@
"short-uuid": "^4.2.2",
"sinon": "^15.0.1",
"to-snake-case": "^1.0.0",
"undici": "^5.26.2",
"undici": "^5.19.1",
"uuid-random": "^1.3.2",
"verify-aws-sns-signature": "^0.1.0",
"ws": "^8.9.0",
"xml2js": "^0.6.2"
"xml2js": "^0.5.0"
},
"devDependencies": {
"clear-module": "^4.1.2",

View File

@@ -1,86 +0,0 @@
#
# Recommended minimum configuration:
#
# Example rule allowing access from your local networks.
# Adapt to list your (internal) IP networks from where browsing
# should be allowed
acl localnet src 0.0.0.1-0.255.255.255 # RFC 1122 "this" network (LAN)
acl localnet src 10.0.0.0/8 # RFC 1918 local private network (LAN)
acl localnet src 100.64.0.0/10 # RFC 6598 shared address space (CGN)
acl localnet src 169.254.0.0/16 # RFC 3927 link-local (directly plugged) machines
acl localnet src 172.16.0.0/12 # RFC 1918 local private network (LAN)
acl localnet src 172.38.0.0/12 # RFC 1918 local private network (LAN)
acl localnet src 192.168.0.0/16 # RFC 1918 local private network (LAN)
acl localnet src fc00::/7 # RFC 4193 local private network range
acl localnet src fe80::/10 # RFC 4291 link-local (directly plugged) machines
acl SSL_ports port 443
acl Safe_ports port 80 # http
acl Safe_ports port 21 # ftp
acl Safe_ports port 443 # https
acl Safe_ports port 70 # gopher
acl Safe_ports port 210 # wais
acl Safe_ports port 1025-65535 # unregistered ports
acl Safe_ports port 280 # http-mgmt
acl Safe_ports port 488 # gss-http
acl Safe_ports port 591 # filemaker
acl Safe_ports port 777 # multiling http
#
# Recommended minimum Access Permission configuration:
#
# Deny requests to certain unsafe ports
http_access allow !Safe_ports
# Deny CONNECT to other than secure SSL ports
http_access allow CONNECT !SSL_ports
# Only allow cachemgr access from localhost
http_access allow localhost manager
http_access allow manager
# This default configuration only allows localhost requests because a more
# permissive Squid installation could introduce new attack vectors into the
# network by proxying external TCP connections to unprotected services.
http_access allow localhost
# The two deny rules below are unnecessary in this default configuration
# because they are followed by a "deny all" rule. However, they may become
# critically important when you start allowing external requests below them.
# Protect web applications running on the same server as Squid. They often
# assume that only local users can access them at "localhost" ports.
http_access allow to_localhost
# Protect cloud servers that provide local users with sensitive info about
# their server via certain well-known link-local (a.k.a. APIPA) addresses.
# http_access deny to_linklocal
#
# INSERT YOUR OWN RULE(S) HERE TO ALLOW ACCESS FROM YOUR CLIENTS
#
# For example, to allow access from your local networks, you may uncomment the
# following rule (and/or add rules that match your definition of "local"):
# http_access allow localnet
# And finally deny all other access to this proxy
http_access allow all
# Squid normally listens to port 3128
http_port 3128
# Uncomment and adjust the following to add a disk cache directory.
#cache_dir ufs /usr/local/var/cache/squid 100 16 256
# Leave coredumps in the first cache dir
coredump_dir /usr/local/var/cache/squid
#
# Add any of your own refresh_pattern entries above these.
#
refresh_pattern ^ftp: 1440 20% 10080
refresh_pattern ^gopher: 1440 0% 1440
refresh_pattern -i (/cgi-bin/|\?) 0 0% 0
refresh_pattern . 0 20% 4320

View File

@@ -37,7 +37,7 @@ test('test create-call timeout', async(t) => {
'account_sid':account_sid,
'timeout': 1,
"call_hook": {
"url": "https://public-apps.jambonz.cloud/hello-world",
"url": "https://public-apps.jambonz.us/hello-world",
"method": "POST"
},
"from": "15083718299",
@@ -88,8 +88,8 @@ test('test create-call call-hook basic authentication', async(t) => {
let verbs = [
{
"verb": "pause",
"length": 1
"verb": "say",
"text": "hello"
}
];
await provisionCallHook(from, verbs);

View File

@@ -330,7 +330,7 @@ CREATE TABLE `applications` (
LOCK TABLES `applications` WRITE;
/*!40000 ALTER TABLE `applications` DISABLE KEYS */;
INSERT INTO `applications` VALUES ('0dddaabf-0a30-43e3-84e8-426873b1a78b','decline call',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','c71e79db-24f2-4866-a3ee-febb0f97b341','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,NULL,'google','en-US','en-US-Standard-C','google','en-US','2023-05-31 03:52:48'),('0dddaabf-0a30-43e3-84e8-426873b1a78c','app json',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','c71e79db-24f2-4866-a3ee-febb0f97b341','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,'[{\"verb\": \"play\",\"url\": \"silence_stream://5000\"}]','google','en-US','en-US-Standard-C','google','en-US','2023-05-31 03:52:48'),('17461c69-56b5-4dab-ad83-1c43a0f93a3d','gather',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','10692465-a511-4277-9807-b7157e4f81e1','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,NULL,'google','en-US','en-US-Standard-C','google','en-US','2023-05-31 03:52:48'),('195d9507-6a42-46a8-825f-f009e729d023','sip info',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','c9113e7a-741f-48b9-96c1-f2f78176eeb3','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,NULL,'google','en-US','en-US-Standard-C','google','en-US','2023-05-31 03:52:48'),('24d0f6af-e976-44dd-a2e8-41c7b55abe33','say account 2',NULL,'622f62e4-303a-49f2-bbe0-eb1e1714e37a','54ab0976-a6c0-45d8-89a4-d90d45bf9d96','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,NULL,'google','en-US','en-US-Standard-C','google','en-US','2023-05-31 03:52:48'),('308b4f41-1a18-4052-b89a-c054e75ce242','say',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','54ab0976-a6c0-45d8-89a4-d90d45bf9d96','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,NULL,'google','en-US','en-US-Standard-C','google','en-US','2023-05-31 03:52:48'),('ae026ab5-3029-47b4-9d7c-236e3a4b4ebe','transcribe account 2',NULL,'622f62e4-303a-49f2-bbe0-eb1e1714e37a','ecb67a8f-f7ce-4919-abf0-bbc69c1001e5','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,NULL,'google','en-US','en-US-Standard-C','google','en-US','2023-05-31 03:52:48'),('baf9213b-5556-4c20-870c-586392ed246f','transcribe',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','ecb67a8f-f7ce-4919-abf0-bbc69c1001e5','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,NULL,'google','en-US','en-US-Standard-C','google','en-US','2023-05-31 03:52:48'),('0dddaabf-0a30-43e3-84e8-426873b1a7ac','http proxy app',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','c71e79db-24f2-4866-a3ee-febb0f97b3ac','293904c1-351b-4bca-8d58-1a29b853c7ac',NULL,NULL,'google','en-US','en-US-Standard-C','google','en-US','2023-05-31 03:52:48');
INSERT INTO `applications` VALUES ('0dddaabf-0a30-43e3-84e8-426873b1a78b','decline call',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','c71e79db-24f2-4866-a3ee-febb0f97b341','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,NULL,'google','en-US','en-US-Standard-C','google','en-US','2023-05-31 03:52:48'),('0dddaabf-0a30-43e3-84e8-426873b1a78c','app json',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','c71e79db-24f2-4866-a3ee-febb0f97b341','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,'[{\"verb\": \"play\",\"url\": \"silence_stream://5000\"}]','google','en-US','en-US-Standard-C','google','en-US','2023-05-31 03:52:48'),('17461c69-56b5-4dab-ad83-1c43a0f93a3d','gather',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','10692465-a511-4277-9807-b7157e4f81e1','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,NULL,'google','en-US','en-US-Standard-C','google','en-US','2023-05-31 03:52:48'),('195d9507-6a42-46a8-825f-f009e729d023','sip info',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','c9113e7a-741f-48b9-96c1-f2f78176eeb3','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,NULL,'google','en-US','en-US-Standard-C','google','en-US','2023-05-31 03:52:48'),('24d0f6af-e976-44dd-a2e8-41c7b55abe33','say account 2',NULL,'622f62e4-303a-49f2-bbe0-eb1e1714e37a','54ab0976-a6c0-45d8-89a4-d90d45bf9d96','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,NULL,'google','en-US','en-US-Standard-C','google','en-US','2023-05-31 03:52:48'),('308b4f41-1a18-4052-b89a-c054e75ce242','say',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','54ab0976-a6c0-45d8-89a4-d90d45bf9d96','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,NULL,'google','en-US','en-US-Standard-C','google','en-US','2023-05-31 03:52:48'),('ae026ab5-3029-47b4-9d7c-236e3a4b4ebe','transcribe account 2',NULL,'622f62e4-303a-49f2-bbe0-eb1e1714e37a','ecb67a8f-f7ce-4919-abf0-bbc69c1001e5','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,NULL,'google','en-US','en-US-Standard-C','google','en-US','2023-05-31 03:52:48'),('baf9213b-5556-4c20-870c-586392ed246f','transcribe',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','ecb67a8f-f7ce-4919-abf0-bbc69c1001e5','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,NULL,'google','en-US','en-US-Standard-C','google','en-US','2023-05-31 03:52:48');
/*!40000 ALTER TABLE `applications` ENABLE KEYS */;
UNLOCK TABLES;
@@ -646,7 +646,7 @@ CREATE TABLE `phone_numbers` (
LOCK TABLES `phone_numbers` WRITE;
/*!40000 ALTER TABLE `phone_numbers` DISABLE KEYS */;
INSERT INTO `phone_numbers` VALUES ('05eeed62-b29b-4679-bf38-d7a4e318be44','16174000003','5145b436-2f38-4029-8d4c-fd8c67831c7a','bb845d4b-83a9-4cde-a6e9-50f3743bab3f','17461c69-56b5-4dab-ad83-1c43a0f93a3d',NULL),('4b439355-debc-40c7-9cfa-5be58c2bed6b','16174000000','5145b436-2f38-4029-8d4c-fd8c67831c7a','bb845d4b-83a9-4cde-a6e9-50f3743bab3f','0dddaabf-0a30-43e3-84e8-426873b1a78b',NULL),('964d0581-9627-44cb-be20-8118050406b2','16174000006','5145b436-2f38-4029-8d4c-fd8c67831c7a','bb845d4b-83a9-4cde-a6e9-50f3743bab3f','195d9507-6a42-46a8-825f-f009e729d023',NULL),('964d0581-9627-44cb-be20-8118050406b3','16174000007','5145b436-2f38-4029-8d4c-fd8c67831c7a','bb845d4b-83a9-4cde-a6e9-50f3743bab3f','0dddaabf-0a30-43e3-84e8-426873b1a78c',NULL),('9cc9e7fc-b7b0-4101-8f3c-9fe13ce5df0a','16174000001','5145b436-2f38-4029-8d4c-fd8c67831c7a','bb845d4b-83a9-4cde-a6e9-50f3743bab3f','308b4f41-1a18-4052-b89a-c054e75ce242',NULL),('e686a320-0725-418f-be65-532159bdc3ed','16174000002','5145b436-2f38-4029-8d4c-fd8c67831c7a','622f62e4-303a-49f2-bbe0-eb1e1714e37a','24d0f6af-e976-44dd-a2e8-41c7b55abe33',NULL),('f3c53863-b629-4cf6-9dcb-c7fb7072314b','16174000004','5145b436-2f38-4029-8d4c-fd8c67831c7a','bb845d4b-83a9-4cde-a6e9-50f3743bab3f','baf9213b-5556-4c20-870c-586392ed246f',NULL),('f6416c17-829a-4f11-9c32-f0d00e4a9ae9','16174000005','5145b436-2f38-4029-8d4c-fd8c67831c7a','622f62e4-303a-49f2-bbe0-eb1e1714e37a','ae026ab5-3029-47b4-9d7c-236e3a4b4ebe',NULL),('4b439355-debc-40c7-9cfa-5be58c2bedac','16174000015','5145b436-2f38-4029-8d4c-fd8c67831c7a','bb845d4b-83a9-4cde-a6e9-50f3743bab3f','0dddaabf-0a30-43e3-84e8-426873b1a7ac',NULL);
INSERT INTO `phone_numbers` VALUES ('05eeed62-b29b-4679-bf38-d7a4e318be44','16174000003','5145b436-2f38-4029-8d4c-fd8c67831c7a','bb845d4b-83a9-4cde-a6e9-50f3743bab3f','17461c69-56b5-4dab-ad83-1c43a0f93a3d',NULL),('4b439355-debc-40c7-9cfa-5be58c2bed6b','16174000000','5145b436-2f38-4029-8d4c-fd8c67831c7a','bb845d4b-83a9-4cde-a6e9-50f3743bab3f','0dddaabf-0a30-43e3-84e8-426873b1a78b',NULL),('964d0581-9627-44cb-be20-8118050406b2','16174000006','5145b436-2f38-4029-8d4c-fd8c67831c7a','bb845d4b-83a9-4cde-a6e9-50f3743bab3f','195d9507-6a42-46a8-825f-f009e729d023',NULL),('964d0581-9627-44cb-be20-8118050406b3','16174000007','5145b436-2f38-4029-8d4c-fd8c67831c7a','bb845d4b-83a9-4cde-a6e9-50f3743bab3f','0dddaabf-0a30-43e3-84e8-426873b1a78c',NULL),('9cc9e7fc-b7b0-4101-8f3c-9fe13ce5df0a','16174000001','5145b436-2f38-4029-8d4c-fd8c67831c7a','bb845d4b-83a9-4cde-a6e9-50f3743bab3f','308b4f41-1a18-4052-b89a-c054e75ce242',NULL),('e686a320-0725-418f-be65-532159bdc3ed','16174000002','5145b436-2f38-4029-8d4c-fd8c67831c7a','622f62e4-303a-49f2-bbe0-eb1e1714e37a','24d0f6af-e976-44dd-a2e8-41c7b55abe33',NULL),('f3c53863-b629-4cf6-9dcb-c7fb7072314b','16174000004','5145b436-2f38-4029-8d4c-fd8c67831c7a','bb845d4b-83a9-4cde-a6e9-50f3743bab3f','baf9213b-5556-4c20-870c-586392ed246f',NULL),('f6416c17-829a-4f11-9c32-f0d00e4a9ae9','16174000005','5145b436-2f38-4029-8d4c-fd8c67831c7a','622f62e4-303a-49f2-bbe0-eb1e1714e37a','ae026ab5-3029-47b4-9d7c-236e3a4b4ebe',NULL);
/*!40000 ALTER TABLE `phone_numbers` ENABLE KEYS */;
UNLOCK TABLES;
@@ -896,7 +896,7 @@ CREATE TABLE `service_providers` (
LOCK TABLES `service_providers` WRITE;
/*!40000 ALTER TABLE `service_providers` DISABLE KEYS */;
INSERT INTO `service_providers` VALUES ('2708b1b3-2736-40ea-b502-c53d8396247f','jambonz.cloud','jambonz.cloud service provider','yakeeda.com',NULL,NULL);
INSERT INTO `service_providers` VALUES ('2708b1b3-2736-40ea-b502-c53d8396247f','jambonz.us','jambonz.us service provider','yakeeda.com',NULL,NULL);
/*!40000 ALTER TABLE `service_providers` ENABLE KEYS */;
UNLOCK TABLES;
@@ -1045,7 +1045,6 @@ CREATE TABLE `speech_credentials` (
`tts_tested_ok` tinyint(1) DEFAULT NULL,
`stt_tested_ok` tinyint(1) DEFAULT NULL,
`created_at` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
`label` VARCHAR(64),
PRIMARY KEY (`speech_credential_sid`),
UNIQUE KEY `speech_credential_sid` (`speech_credential_sid`),
UNIQUE KEY `speech_credentials_idx_1` (`vendor`,`account_sid`),
@@ -1064,7 +1063,7 @@ CREATE TABLE `speech_credentials` (
LOCK TABLES `speech_credentials` WRITE;
/*!40000 ALTER TABLE `speech_credentials` DISABLE KEYS */;
INSERT INTO `speech_credentials` VALUES ('2add163c-34f2-45c6-a016-f955d218ffb6',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','google','credential-goes-here',1,1,NULL,'2021-04-03 15:42:10',1,1,'2023-05-31 03:44:21', NULL),('2add347f-34f2-45c6-a016-f955d218ffb6',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','microsoft','credential-goes-here',1,1,NULL,'2021-04-03 15:42:10',1,1,'2023-05-31 03:44:21', NULL),('84154212-5c99-4c94-8993-bc2a46288daa',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','aws','credential-goes-here',1,1,NULL,NULL,1,1,'2023-05-31 03:44:21', NULL);
INSERT INTO `speech_credentials` VALUES ('2add163c-34f2-45c6-a016-f955d218ffb6',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','google','credential-goes-here',1,1,NULL,'2021-04-03 15:42:10',1,1,'2023-05-31 03:44:21'),('2add347f-34f2-45c6-a016-f955d218ffb6',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','microsoft','credential-goes-here',1,1,NULL,'2021-04-03 15:42:10',1,1,'2023-05-31 03:44:21'),('84154212-5c99-4c94-8993-bc2a46288daa',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','aws','credential-goes-here',1,1,NULL,NULL,NULL,NULL,'2023-05-31 03:44:21');
/*!40000 ALTER TABLE `speech_credentials` ENABLE KEYS */;
UNLOCK TABLES;
@@ -1254,7 +1253,7 @@ CREATE TABLE `webhooks` (
LOCK TABLES `webhooks` WRITE;
/*!40000 ALTER TABLE `webhooks` DISABLE KEYS */;
INSERT INTO `webhooks` VALUES ('10692465-a511-4277-9807-b7157e4f81e1','http://127.0.0.1:3102/','POST',NULL,NULL),('293904c1-351b-4bca-8d58-1a29b853c7db','http://127.0.0.1:3100/callStatus','POST',NULL,NULL),('54ab0976-a6c0-45d8-89a4-d90d45bf9d96','http://127.0.0.1:3101/','POST',NULL,NULL),('6ac36aeb-6bd0-428a-80a1-aed95640a296','https://flows.jambonz.cloud/callStatus','POST',NULL,NULL),('c71e79db-24f2-4866-a3ee-febb0f97b341','http://127.0.0.1:3100/','POST',NULL,NULL),('c9113e7a-741f-48b9-96c1-f2f78176eeb3','http://127.0.0.1:3104/','POST',NULL,NULL),('d9c205c6-a129-443e-a9c0-d1bb437d4bb7','https://flows.jambonz.cloud/testCall','POST',NULL,NULL),('ecb67a8f-f7ce-4919-abf0-bbc69c1001e5','http://127.0.0.1:3103/','POST',NULL,NULL),('293904c1-351b-4bca-8d58-1a29b853c7ac','http://172.38.0.60:3000/callStatus','POST',NULL,NULL),('c71e79db-24f2-4866-a3ee-febb0f97b3ac','http://172.38.0.60:3000/','POST',NULL,NULL);
INSERT INTO `webhooks` VALUES ('10692465-a511-4277-9807-b7157e4f81e1','http://127.0.0.1:3102/','POST',NULL,NULL),('293904c1-351b-4bca-8d58-1a29b853c7db','http://127.0.0.1:3100/callStatus','POST',NULL,NULL),('54ab0976-a6c0-45d8-89a4-d90d45bf9d96','http://127.0.0.1:3101/','POST',NULL,NULL),('6ac36aeb-6bd0-428a-80a1-aed95640a296','https://flows.jambonz.us/callStatus','POST',NULL,NULL),('c71e79db-24f2-4866-a3ee-febb0f97b341','http://127.0.0.1:3100/','POST',NULL,NULL),('c9113e7a-741f-48b9-96c1-f2f78176eeb3','http://127.0.0.1:3104/','POST',NULL,NULL),('d9c205c6-a129-443e-a9c0-d1bb437d4bb7','https://flows.jambonz.us/testCall','POST',NULL,NULL),('ecb67a8f-f7ce-4919-abf0-bbc69c1001e5','http://127.0.0.1:3103/','POST',NULL,NULL);
/*!40000 ALTER TABLE `webhooks` ENABLE KEYS */;
UNLOCK TABLES;
/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;

View File

@@ -13,8 +13,6 @@ DROP TABLE IF EXISTS beta_invite_codes;
DROP TABLE IF EXISTS call_routes;
DROP TABLE IF EXISTS clients;
DROP TABLE IF EXISTS dns_records;
DROP TABLE IF EXISTS lcr;
@@ -129,16 +127,6 @@ application_sid CHAR(36) NOT NULL,
PRIMARY KEY (call_route_sid)
) COMMENT='a regex-based pattern match for call routing';
CREATE TABLE clients
(
client_sid CHAR(36) NOT NULL UNIQUE ,
account_sid CHAR(36) NOT NULL,
is_active BOOLEAN NOT NULL DEFAULT 1,
username VARCHAR(64),
password VARCHAR(1024),
PRIMARY KEY (client_sid)
);
CREATE TABLE dns_records
(
dns_record_sid CHAR(36) NOT NULL UNIQUE ,
@@ -334,7 +322,6 @@ last_tested DATETIME,
tts_tested_ok BOOLEAN,
stt_tested_ok BOOLEAN,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
label VARCHAR(64),
PRIMARY KEY (speech_credential_sid)
);
@@ -424,7 +411,7 @@ PRIMARY KEY (smpp_gateway_sid)
CREATE TABLE phone_numbers
(
phone_number_sid CHAR(36) UNIQUE ,
number VARCHAR(132) NOT NULL,
number VARCHAR(132) NOT NULL UNIQUE ,
voip_carrier_sid CHAR(36),
account_sid CHAR(36),
application_sid CHAR(36),
@@ -479,12 +466,9 @@ app_json TEXT,
speech_synthesis_vendor VARCHAR(64) NOT NULL DEFAULT 'google',
speech_synthesis_language VARCHAR(12) NOT NULL DEFAULT 'en-US',
speech_synthesis_voice VARCHAR(64),
speech_synthesis_label VARCHAR(64),
speech_recognizer_vendor VARCHAR(64) NOT NULL DEFAULT 'google',
speech_recognizer_language VARCHAR(64) NOT NULL DEFAULT 'en-US',
speech_recognizer_label VARCHAR(64),
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
record_all_calls BOOLEAN NOT NULL DEFAULT false,
PRIMARY KEY (application_sid)
) COMMENT='A defined set of behaviors to be applied to phone calls ';
@@ -522,9 +506,6 @@ subspace_client_secret VARCHAR(255),
subspace_sip_teleport_id VARCHAR(255),
subspace_sip_teleport_destinations VARCHAR(255),
siprec_hook_sid CHAR(36),
record_all_calls BOOLEAN NOT NULL DEFAULT false,
record_format VARCHAR(16) NOT NULL DEFAULT 'mp3',
bucket_credential VARCHAR(8192) COMMENT 'credential used to authenticate with storage service',
PRIMARY KEY (account_sid)
) COMMENT='An enterprise that uses the platform for comm services';
@@ -545,9 +526,6 @@ ALTER TABLE call_routes ADD FOREIGN KEY account_sid_idxfk_3 (account_sid) REFERE
ALTER TABLE call_routes ADD FOREIGN KEY application_sid_idxfk (application_sid) REFERENCES applications (application_sid);
CREATE INDEX client_sid_idx ON clients (client_sid);
ALTER TABLE clients ADD CONSTRAINT account_sid_idxfk_13 FOREIGN KEY account_sid_idxfk_13 (account_sid) REFERENCES accounts (account_sid);
CREATE INDEX dns_record_sid_idx ON dns_records (dns_record_sid);
ALTER TABLE dns_records ADD FOREIGN KEY account_sid_idxfk_4 (account_sid) REFERENCES accounts (account_sid);
@@ -612,6 +590,8 @@ CREATE INDEX smpp_address_sid_idx ON smpp_addresses (smpp_address_sid);
CREATE INDEX service_provider_sid_idx ON smpp_addresses (service_provider_sid);
ALTER TABLE smpp_addresses ADD FOREIGN KEY service_provider_sid_idxfk_4 (service_provider_sid) REFERENCES service_providers (service_provider_sid);
CREATE UNIQUE INDEX speech_credentials_idx_1 ON speech_credentials (vendor,account_sid);
CREATE INDEX speech_credential_sid_idx ON speech_credentials (speech_credential_sid);
CREATE INDEX service_provider_sid_idx ON speech_credentials (service_provider_sid);
ALTER TABLE speech_credentials ADD FOREIGN KEY service_provider_sid_idxfk_5 (service_provider_sid) REFERENCES service_providers (service_provider_sid);
@@ -648,8 +628,6 @@ CREATE INDEX smpp_gateway_sid_idx ON smpp_gateways (smpp_gateway_sid);
CREATE INDEX voip_carrier_sid_idx ON smpp_gateways (voip_carrier_sid);
ALTER TABLE smpp_gateways ADD FOREIGN KEY voip_carrier_sid_idxfk (voip_carrier_sid) REFERENCES voip_carriers (voip_carrier_sid);
CREATE UNIQUE INDEX phone_numbers_unique_idx_voip_carrier_number ON phone_numbers (number,voip_carrier_sid);
CREATE INDEX phone_number_sid_idx ON phone_numbers (phone_number_sid);
CREATE INDEX number_idx ON phone_numbers (number);
CREATE INDEX voip_carrier_sid_idx ON phone_numbers (voip_carrier_sid);
@@ -704,4 +682,5 @@ ALTER TABLE accounts ADD FOREIGN KEY queue_event_hook_sid_idxfk (queue_event_hoo
ALTER TABLE accounts ADD FOREIGN KEY device_calling_application_sid_idxfk (device_calling_application_sid) REFERENCES applications (application_sid);
ALTER TABLE accounts ADD FOREIGN KEY siprec_hook_sid_idxfk (siprec_hook_sid) REFERENCES applications (application_sid);
SET FOREIGN_KEY_CHECKS=1;
SET FOREIGN_KEY_CHECKS=1;

View File

@@ -5,8 +5,6 @@ const getJSON = bent('json')
const clearModule = require('clear-module');
const {provisionCallHook} = require('./utils')
const sleepFor = (ms) => new Promise((r) => setTimeout(r, ms));
process.on('unhandledRejection', (reason, p) => {
console.log('Unhandled Rejection at: Promise', p, 'reason:', reason);
});
@@ -49,7 +47,6 @@ test('\'dial-phone\'', async(t) => {
// THEN
const p = sippUac('uas-dial.xml', '172.38.0.10', undefined, undefined, 2);
await sleepFor(1000);
let account_sid = '622f62e4-303a-49f2-bbe0-eb1e1714e37a';
let post = bent('http://127.0.0.1:3000/', 'POST', 'json', 201);
@@ -87,7 +84,7 @@ test('\'dial-sip\'', async(t) => {
try {
await connect(srf);
// wait for fs connected to drachtio server.
await sleepFor(1000);
await new Promise(r => setTimeout(r, 1000));
// GIVEN
const from = "dial_sip";
let verbs = [

View File

@@ -42,7 +42,7 @@ services:
ipv4_address: 172.38.0.7
drachtio:
image: drachtio/drachtio-server:0.8.22
image: drachtio/drachtio-server:latest
restart: always
command: drachtio --contact "sip:*;transport=udp" --mtu 4096 --address 0.0.0.0 --port 9022
ports:
@@ -57,7 +57,7 @@ services:
condition: service_healthy
freeswitch:
image: drachtio/drachtio-freeswitch-mrf:0.4.33
image: drachtio/drachtio-freeswitch-mrf:0.4.18
restart: always
command: freeswitch --rtp-range-start 20000 --rtp-range-end 20100
environment:
@@ -92,13 +92,3 @@ services:
networks:
fs:
ipv4_address: 172.38.0.90
squid:
image: ubuntu/squid:edge
ports:
- "3128:3128"
volumes:
- ./configuration/squid.conf:/etc/squid/squid.conf
networks:
fs:
ipv4_address: 172.38.0.91

View File

@@ -1,72 +0,0 @@
const test = require('tape');
const { sippUac } = require('./sipp')('test_fs');
const clearModule = require('clear-module');
const {provisionCallHook, provisionCustomHook} = require('./utils')
const bent = require('bent');
const getJSON = bent('json')
process.on('unhandledRejection', (reason, p) => {
console.log('Unhandled Rejection at: Promise', p, 'reason:', reason);
});
function connect(connectable) {
return new Promise((resolve, reject) => {
connectable.on('connect', () => {
return resolve();
});
});
}
test('\'HTTP proxy\' test Info', async(t) => {
clearModule.all();
process.env.JAMBONES_HTTP_PROXY_IP = "127.0.0.1";
process.env.JAMBONES_HTTP_PROXY_PROTOCOL = "http";
process.env.JAMBONES_HTTP_PROXY_PORT = 3128;
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
const verbs = [
{
verb: 'config',
sipRequestWithinDialogHook: '/customHook'
},
{
verb: 'play',
url: 'silence_stream://5000',
}
];
const waitHookVerbs = [
{
verb: 'hangup'
}
];
const from = 'http_proxy_info';
await provisionCustomHook(from, waitHookVerbs)
await provisionCallHook(from, verbs);
// THEN
await sippUac('uac-success-info-received-bye.xml', '172.38.0.10', from, "16174000015");
t.pass('sip Info: success send Info');
// Make sure that sipRequestWithinDialogHook is called and success
const json = await getJSON(`http:127.0.0.1:3100/lastRequest/${from}_customHook`)
t.pass(json.body.sip_method === 'INFO', 'sipRequestWithinDialogHook contains sip_method')
t.pass(json.body.sip_body === 'hello jambonz\r\n', 'sipRequestWithinDialogHook contains sip_method')
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
} finally {
process.env.JAMBONES_HTTP_PROXY_IP = null;
process.env.JAMBONES_HTTP_PROXY_PROTOCOL = null;
process.env.JAMBONES_HTTP_PROXY_PORT = null;
}
});

View File

@@ -1,65 +0,0 @@
const test = require('tape');
const { sippUac } = require('./sipp')('test_fs');
const clearModule = require('clear-module');
const {provisionCallHook, provisionCustomHook} = require('./utils')
const bent = require('bent');
const getJSON = bent('json')
process.on('unhandledRejection', (reason, p) => {
console.log('Unhandled Rejection at: Promise', p, 'reason:', reason);
});
function connect(connectable) {
return new Promise((resolve, reject) => {
connectable.on('connect', () => {
return resolve();
});
});
}
test('\'sip Indialog\' test Info', async(t) => {
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
const verbs = [
{
verb: 'config',
sipRequestWithinDialogHook: '/customHook'
},
{
verb: 'play',
url: 'silence_stream://5000',
}
];
const waitHookVerbs = [
{
verb: 'hangup'
}
];
const from = 'sip_indialog_info';
await provisionCustomHook(from, waitHookVerbs)
await provisionCallHook(from, verbs);
// THEN
await sippUac('uac-success-info-received-bye.xml', '172.38.0.10', from);
t.pass('sip Info: success send Info');
// Make sure that sipRequestWithinDialogHook is called and success
const json = await getJSON(`http:127.0.0.1:3100/lastRequest/${from}_customHook`)
t.pass(json.body.sip_method === 'INFO', 'sipRequestWithinDialogHook contains sip_method')
t.pass(json.body.sip_body === 'hello jambonz\r\n', 'sipRequestWithinDialogHook contains sip_method')
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});

View File

@@ -15,7 +15,5 @@ require('./sip-refer-tests');
require('./listen-tests');
require('./config-test');
require('./queue-test');
require('./in-dialog-test');
require('./http-proxy-test');
require('./remove-test-db');
require('./docker_stop');

View File

@@ -84,46 +84,6 @@ test('\'config\' reset synthesizer tests', async(t) => {
}
});
test('Say verb array test', async(t) => {
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
const verbs = [
{
"verb": "config",
"synthesizer": {
"vendor": "microsft",
"voice": "foobar"
},
},
{
"verb": "config",
"reset": 'synthesizer',
},
{
verb: 'say',
text: ['hello', 'https://samplelib.com/lib/preview/mp3/sample-3s.mp3']
}
];
const from = 'say_test_success';
await provisionCallHook(from, verbs)
// THEN
await sippUac('uac-success-received-bye.xml', '172.38.0.10', from);
t.pass('say: succeeds when using using account credentials');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});
const {MICROSOFT_CUSTOM_API_KEY, MICROSOFT_DEPLOYMENT_ID, MICROSOFT_CUSTOM_REGION, MICROSOFT_CUSTOM_VOICE} = process.env;
if (MICROSOFT_CUSTOM_API_KEY && MICROSOFT_DEPLOYMENT_ID && MICROSOFT_CUSTOM_REGION && MICROSOFT_CUSTOM_VOICE) {
test('\'say\' tests - microsoft custom voice', async(t) => {

View File

@@ -1,114 +0,0 @@
<?xml version="1.0" encoding="ISO-8859-1" ?>
<!DOCTYPE scenario SYSTEM "sipp.dtd">
<scenario name="Basic Sipstone UAC">
<!-- In client mode (sipp placing calls), the Call-ID MUST be -->
<!-- generated by sipp. To do so, use [call_id] keyword. -->
<send retrans="500">
<![CDATA[
INVITE sip:[to]@[remote_ip]:[remote_port] SIP/2.0
Via: SIP/2.0/[transport] [local_ip]:[local_port];branch=[branch]
From: [from] <sip:[from]@[local_ip]:[local_port]>;tag=[pid]SIPpTag00[call_number]
To: <sip:[to]@[remote_ip]:[remote_port]>
Call-ID: [call_id]
CSeq: 1 INVITE
Contact: sip:[from]@[local_ip]:[local_port]
Max-Forwards: 70
X-Account-Sid: bb845d4b-83a9-4cde-a6e9-50f3743bab3f
Subject: uac-say
Content-Type: application/sdp
Content-Length: [len]
v=0
o=user1 53655765 2353687637 IN IP[local_ip_type] [local_ip]
s=-
c=IN IP[media_ip_type] [media_ip]
t=0 0
m=audio [media_port] RTP/AVP 0
a=rtpmap:0 PCMU/8000
]]>
</send>
<recv response="100"
optional="true">
</recv>
<recv response="180" optional="true">
</recv>
<recv response="183" optional="true">
</recv>
<!-- By adding rrs="true" (Record Route Sets), the route sets -->
<!-- are saved and used for following messages sent. Useful to test -->
<!-- against stateful SIP proxies/B2BUAs. -->
<recv response="200" rtd="true">
</recv>
<!-- Packet lost can be simulated in any send/recv message by -->
<!-- by adding the 'lost = "10"'. Value can be [1-100] percent. -->
<send>
<![CDATA[
ACK sip:[to]@[remote_ip]:[remote_port] SIP/2.0
Via: SIP/2.0/[transport] [local_ip]:[local_port];branch=[branch]
From: [from] <sip:[from]@[local_ip]:[local_port]>;tag=[pid]SIPpTag00[call_number]
To: [to] <sip:[to]@[remote_ip]:[remote_port]>[peer_tag_param]
Call-ID: [call_id]
CSeq: 1 ACK
Contact: sip:[from]@[local_ip]:[local_port]
Max-Forwards: 70
Subject: uac-say
Content-Length: 0
]]>
</send>
<pause milliseconds="2000"/>
<!-- Send an INFO message -->
<send>
<![CDATA[
INFO sip:[service]@[remote_ip]:[remote_port] SIP/2.0
Via: SIP/2.0/[transport] [local_ip]:[local_port];branch=[branch]
From: [from] <sip:[from]@[local_ip]:[local_port]>;tag=[pid]SIPpTag00[call_number]
To: [to] <sip:[to]@[remote_ip]:[remote_port]>[peer_tag_param]
Call-ID: [call_id]
CSeq: 2 INFO
Contact: sip:[from]@[local_ip]:[local_port]
Max-Forwards: 70
Subject: Performance Test
Content-Type: text/plain
Content-Length: [len]
hello jambonz
]]>
</send>
<!-- Receive 200 OK -->
<recv response="200">
</recv>
<recv request="BYE">
</recv>
<send>
<![CDATA[
SIP/2.0 200 OK
[last_Via:]
[last_From:]
[last_To:]
[last_Call-ID:]
[last_CSeq:]
Contact: <sip:[local_ip]:[local_port];transport=[transport]>
Content-Length: 0
]]>
</send>
</scenario>

View File

@@ -36,7 +36,7 @@ obj.sippUac = (file, bindAddress, from='sipp', to='16174000000', loop=1) => {
'-cid_str', `%u-%p@%s-${idx++}`,
'172.38.0.50',
'-key','from', from,
'-key','to', to, '-trace_msg', '-trace_err'
'-key','to', to, '-trace_msg'
];
if (bindAddress) args.splice(5, 0, '--ip', bindAddress);

View File

@@ -210,44 +210,6 @@ test('\'transcribe\' test - soniox', async(t) => {
t.ok(obj.body.speech.alternatives[0].transcript.toLowerCase().startsWith('i\'d like to speak to customer support'),
'transcribe: succeeds when using soniox credentials');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});
test('\'transcribe\' test - google with asrTimeout', async(t) => {
if (!GCP_JSON_KEY) {
t.pass('skipping google tests');
return t.end();
}
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
let verbs = [
{
"verb": "transcribe",
"recognizer": {
"vendor": "google",
"hints": ["customer support", "sales", "human resources", "HR"],
"asrTimeout": 4
},
"transcriptionHook": "/transcriptionHook"
}
];
let from = "gather_success";
await provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
t.ok(obj.body.speech.alternatives[0].transcript.toLowerCase().startsWith('i\'d like to speak to customer support'),
'transcribe: succeeds when using google credentials');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);