Compare commits

..

1 Commits

Author SHA1 Message Date
Dave Horton
71bb4707c1 fix #445 2023-08-31 07:32:59 -04:00
66 changed files with 7798 additions and 11199 deletions

View File

@@ -6,10 +6,10 @@ jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: 20
node-version: 16
- run: npm ci
- run: npm run jslint
- run: docker pull drachtio/sipp

1
.gitignore vendored
View File

@@ -42,4 +42,3 @@ ecosystem.config.js
test/credentials/*.json
run-tests.sh
run-coverage.sh
.vscode

17
.vscode/launch.json vendored Normal file
View File

@@ -0,0 +1,17 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"type": "node",
"request": "launch",
"name": "Launch Program",
"program": "${workspaceFolder}/test/index.js",
"env": {
"NODE_ENV": "test"
}
}
]
}

View File

@@ -1,6 +1,6 @@
MIT License
Copyright (c) 2018-2024 FirstFive8, Inc.
Copyright (c) 2021 Drachtio Communications Services, LLC
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

View File

@@ -1,4 +1,4 @@
# jambonz-feature-server [![CI](https://github.com/jambonz/jambonz-feature-server/actions/workflows/build.yml/badge.svg)](https://github.com/jambonz/jambonz-feature-server/actions/workflows/build.yml)
# jambones-feature-server ![Build Status](https://github.com/jambonz/jambonz-feature-server/workflows/CI/badge.svg)
This application implements the core feature server of the jambones platform.
@@ -21,7 +21,6 @@ Configuration is provided via environment variables:
|ENCRYPTION_SECRET| secret for credential encryption(JWT_SECRET is deprecated) |yes|
|GOOGLE_APPLICATION_CREDENTIALS| path to gcp service key file|yes|
|HTTP_PORT| tcp port to listen on for API requests from jambonz-api-server|yes|
|HTTP_IP| IP Address for API requests from jambonz-api-server |no|
|JAMBONES_GATHER_EARLY_HINTS_MATCH| if true and hints are provided, gather will opportunistically review interim transcripts if possible to reduce ASR latency |no|
|JAMBONES_FREESWITCH| IP:port:secret for Freeswitch server (e.g. '127.0.0.1:8021:JambonzR0ck$'|yes|
|JAMBONES_LOGLEVEL| log level for application, 'info' or 'debug'|no|
@@ -38,11 +37,6 @@ Configuration is provided via environment variables:
|STATS_PORT| listening port for metrics host|no|
|STATS_PROTOCOL| 'tcp' or 'udp'|no|
|STATS_TELEGRAF| if 1, metrics will be generated in telegraf format|no|
|JAMBONZ_RECORD_WS_BASE_URL| recording websocket URL to send the recording audio|no|
|JAMBONZ_RECORD_WS_USERNAME| recording websocket username|no|
|JAMBONZ_RECORD_WS_PASSWORD| recording websocket password|no|
|ANCHOR_MEDIA_ALWAYS| keep media on media server|no|
|JAMBONZ_DISABLE_DIAL_PAI_HEADER| control P-Asserted-Identity header on B-Leg|no|
### running under pm2
Typically, this application runs under [pm2](https://pm2.io) using an [ecosystem.config.js](https://pm2.keymetrics.io/docs/usage/application-declaration/) file similar to this:

2
app.js
View File

@@ -109,7 +109,7 @@ const disconnect = () => {
httpServer?.on('close', resolve);
httpServer?.close();
srf.disconnect();
srf.locals.mediaservers?.forEach((ms) => ms.disconnect());
srf.locals.mediaservers.forEach((ms) => ms.disconnect());
});
};

0
bin/k8s-pre-stop-hook.js Normal file → Executable file
View File

View File

@@ -9,112 +9,7 @@
"can't take your call",
"will get back to you",
"I'll get back to you",
"we are unable",
"Unable to take your call now",
"I'll reply soon",
"I'll call back",
"I'll reach out to you as soon as possible",
"Leave a message",
"Away from phone",
"Not available now",
"I'll return call",
"On another call",
"Currently on another call",
"I will return call later",
"Busy please leave message",
"Message will be returned promptly",
"Currently unavailable to answer",
"Planning to return your call soon",
"Apologies for missing your call",
"Not by the phone at the moment",
"Expecting to return your call",
"Currently not accessible",
"Intend to call back",
"Appreciate your patience!",
"Engaged in another conversation",
"I Will respond promptly",
"Kindly leave a message",
"Currently occupied leave a message",
"Unfortunately unable to answer right now",
"Occupied at the moment",
"Not present leave a message",
"Regrettably unavailable kindly leave a message",
"Will ensure a prompt response to your message",
"Currently engaged",
"Will return your call at the earliest opportunity",
"Your message will receive my prompt attention",
"I'll respond as soon as I can",
"Your message is important please leave it after the beep",
"Away from the phone at the moment",
"Unable to answer right now",
"Engaged in another task",
"Not by the phone presently",
"I'll respond at my earliest convenience",
"Away from the phone momentarily",
"I'll return your call shortly",
"Currently not able to answer",
"Your message is important please leave it after the tone",
"I'm unable to take your call right now",
"Please leave your message for me",
"I'll get back to you soon",
"Your call has been missed",
"Please leave a detailed message for me to respond to",
"Leave a message I'll make sure to respond",
"Feel free to leave a message",
"Your call is important to me",
"I'll get back to you shortly",
"Your message will be attended to promptly",
"Not available at the moment",
"I'll be sure to get back to you",
"I'll call you back soon",
"I'll ensure a prompt response",
"Sorry for the inconvenience",
"I'll return your call",
"I'll make sure to get back to you",
"I'll call you back shortly",
"I'll return your call as soon as possible",
"Apologies for the inconvenience leave your message",
"Your call is appreciated",
"I'm unavailable to answer",
"I'm currently away",
"I'll return your call as soon as I can",
"I'm away from the phone",
"I'm currently unavailable to take your call",
"Sorry for missing your call",
"I'll ensure it receives my immediate attention",
"I'm away from the phone momentarily",
"I'll reach out to you shortly",
"Apologies for the inconvenience",
"Currently occupied",
"Unable to answer your call at the moment",
"I'll make sure to follow up with you",
"Sorry for not being available",
"I'll reach out to you as soon as I can",
"I'm currently engaged",
"I'm currently busy",
"I'm currently unavailable",
"I'll respond to you at my earliest convenience",
"Your message is appreciated",
"I'll get back to you promptly",
"I'll get back to you without delay",
"Currently away from the phone",
"I'll return your call at my earliest opportunity",
"Sorry for the missed call",
"I'll make sure to address your concerns",
"Please provide your details for a callback",
"I'll make every effort to respond promptly",
"I'll ensure it's attended to promptly",
"Away from the phone temporarily",
"I'll get back to you as soon as I return",
"Currently not in a position to answer your call",
"Your call cannot be answered at the moment",
"I'll ensure to respond as soon as I'm able",
"Your call is important please leave a message",
"Unable to answer right now please leave your message",
"Currently not accessible intending to return your call",
"I'll respond promptly to your message",
"leave a memo",
"please leave a memo"
"we are unable"
],
"es-ES": [
"le pasamos la llamada",

View File

@@ -45,7 +45,7 @@ The GCP credential is the JSON service key in stringified format.
#### Install Docker
The test suite also requires [Docker](https://www.docker.com/) and docker-compose to be installed on your laptop. Docker is used to set up a network with all of the elements required to test the jambonz-feature-server in a black-box type of fashion.
The test suite ralso equires [Docker](https://www.docker.com/) and docker-compose to be installed on your laptop. Docker is used to set up a network with all of the elements required to test the jambonz-feature-server in a black-box type of fashion.
Once you have docker installed, you can optionally make sure everything Docker-wise is working properly by running this command from the project folder:

View File

@@ -28,6 +28,10 @@ const JAMBONES_MYSQL_PORT = parseInt(process.env.JAMBONES_MYSQL_PORT, 10) || 330
const JAMBONES_MYSQL_REFRESH_TTL = parseInt(process.env.JAMBONES_MYSQL_REFRESH_TTL, 10) || 0;
const JAMBONES_MYSQL_CONNECTION_LIMIT = parseInt(process.env.JAMBONES_MYSQL_CONNECTION_LIMIT, 10) || 10;
/* redis */
const JAMBONES_REDIS_HOST = process.env.JAMBONES_REDIS_HOST;
const JAMBONES_REDIS_PORT = parseInt(process.env.JAMBONES_REDIS_PORT, 10) || 6379;
/* gather and hints */
const JAMBONES_GATHER_EARLY_HINTS_MATCH = process.env.JAMBONES_GATHER_EARLY_HINTS_MATCH;
const JAMBONZ_GATHER_EARLY_HINTS_MATCH = process.env.JAMBONZ_GATHER_EARLY_HINTS_MATCH;
@@ -73,7 +77,6 @@ const JAMBONES_LOGLEVEL = process.env.JAMBONES_LOGLEVEL || 'info';
const JAMBONES_INJECT_CONTENT = process.env.JAMBONES_INJECT_CONTENT;
const PORT = parseInt(process.env.HTTP_PORT, 10) || 3000;
const HTTP_IP = process.env.HTTP_IP;
const HTTP_PORT_MAX = parseInt(process.env.HTTP_PORT_MAX, 10);
const K8S = process.env.K8S;
@@ -118,22 +121,32 @@ const HTTP_POOL = process.env.HTTP_POOL && parseInt(process.env.HTTP_POOL);
const HTTP_POOLSIZE = parseInt(process.env.HTTP_POOLSIZE, 10) || 10;
const HTTP_PIPELINING = parseInt(process.env.HTTP_PIPELINING, 10) || 1;
const HTTP_TIMEOUT = 10000;
const HTTP_PROXY_IP = process.env.JAMBONES_HTTP_PROXY_IP;
const HTTP_PROXY_PORT = process.env.JAMBONES_HTTP_PROXY_PORT;
const HTTP_PROXY_PROTOCOL = process.env.JAMBONES_HTTP_PROXY_PROTOCOL || 'http';
const HTTP_USER_AGENT_HEADER = process.env.JAMBONES_HTTP_USER_AGENT_HEADER || 'jambonz';
const OPTIONS_PING_INTERVAL = parseInt(process.env.OPTIONS_PING_INTERVAL, 10) || 30000;
const JAMBONZ_RECORD_WS_BASE_URL = process.env.JAMBONZ_RECORD_WS_BASE_URL || process.env.JAMBONES_RECORD_WS_BASE_URL;
const JAMBONZ_RECORD_WS_USERNAME = process.env.JAMBONZ_RECORD_WS_USERNAME || process.env.JAMBONES_RECORD_WS_USERNAME;
const JAMBONZ_RECORD_WS_PASSWORD = process.env.JAMBONZ_RECORD_WS_PASSWORD || process.env.JAMBONES_RECORD_WS_PASSWORD;
const JAMBONZ_DISABLE_DIAL_PAI_HEADER = process.env.JAMBONZ_DISABLE_DIAL_PAI_HEADER || false;
const JAMBONES_DISABLE_DIRECT_P2P_CALL = process.env.JAMBONES_DISABLE_DIRECT_P2P_CALL || false;
const JAMBONES_EAGERLY_PRE_CACHE_AUDIO = parseInt(process.env.JAMBONES_EAGERLY_PRE_CACHE_AUDIO, 10) || 0;
const JAMBONES_USE_FREESWITCH_TIMER_FD = process.env.JAMBONES_USE_FREESWITCH_TIMER_FD;
const JAMBONES_REDIS_SENTINELS = process.env.JAMBONES_REDIS_SENTINELS ? {
sentinels: process.env.JAMBONES_REDIS_SENTINELS.split(',').map((sentinel) => {
let host, port = 26379;
if (sentinel.includes(':')) {
const arr = sentinel.split(':');
host = arr[0];
port = parseInt(arr[1], 10);
} else {
host = sentinel;
}
return {host, port};
}),
name: process.env.JAMBONES_REDIS_SENTINEL_MASTER_NAME,
...(process.env.JAMBONES_REDIS_SENTINEL_PASSWORD && {
password: process.env.JAMBONES_REDIS_SENTINEL_PASSWORD
}),
...(process.env.JAMBONES_REDIS_SENTINEL_USERNAME && {
username: process.env.JAMBONES_REDIS_SENTINEL_USERNAME
})
} : null;
const JAMBONZ_RECORD_WS_BASE_URL = process.env.JAMBONZ_RECORD_WS_BASE_URL;
const JAMBONZ_RECORD_WS_USERNAME = process.env.JAMBONZ_RECORD_WS_USERNAME;
const JAMBONZ_RECORD_WS_PASSWORD = process.env.JAMBONZ_RECORD_WS_PASSWORD;
module.exports = {
JAMBONES_MYSQL_HOST,
@@ -152,12 +165,14 @@ module.exports = {
JAMBONZ_GATHER_EARLY_HINTS_MATCH,
JAMBONES_GATHER_CLEAR_GLOBAL_HINTS_ON_EMPTY_HINTS,
JAMBONES_FREESWITCH,
JAMBONES_REDIS_HOST,
JAMBONES_REDIS_PORT,
JAMBONES_REDIS_SENTINELS,
SMPP_URL,
JAMBONES_NETWORK_CIDR,
JAMBONES_API_BASE_URL,
JAMBONES_TIME_SERIES_HOST,
JAMBONES_INJECT_CONTENT,
JAMBONES_EAGERLY_PRE_CACHE_AUDIO,
JAMBONES_ESL_LISTEN_ADDRESS,
JAMBONES_SBCS,
JAMBONES_OTEL_ENABLED,
@@ -171,7 +186,6 @@ module.exports = {
JAMBONES_CLUSTER_ID,
PORT,
HTTP_PORT_MAX,
HTTP_IP,
K8S,
K8S_SBC_SIP_SERVICE_NAME,
JAMBONES_SUBNET,
@@ -198,10 +212,6 @@ module.exports = {
HTTP_POOLSIZE,
HTTP_PIPELINING,
HTTP_TIMEOUT,
HTTP_PROXY_IP,
HTTP_PROXY_PORT,
HTTP_PROXY_PROTOCOL,
HTTP_USER_AGENT_HEADER,
OPTIONS_PING_INTERVAL,
RESPONSE_TIMEOUT_MS,
JAMBONES_WS_HANDSHAKE_TIMEOUT_MS,
@@ -215,8 +225,5 @@ module.exports = {
DEEPGRAM_API_KEY,
JAMBONZ_RECORD_WS_BASE_URL,
JAMBONZ_RECORD_WS_USERNAME,
JAMBONZ_RECORD_WS_PASSWORD,
JAMBONZ_DISABLE_DIAL_PAI_HEADER,
JAMBONES_DISABLE_DIRECT_P2P_CALL,
JAMBONES_USE_FREESWITCH_TIMER_FD
JAMBONZ_RECORD_WS_PASSWORD
};

View File

@@ -1,69 +0,0 @@
const appsMap = {
queue: {
// Dummy hook to follow later feature server logic.
call_hook: {
url: 'https://jambonz.org',
method: 'GET'
},
account_sid: '',
app_json: [{
verb: 'dequeue',
name: '',
timeout: 5
}]
},
user: {
// Dummy hook to follow later feature server logic.
call_hook: {
url: 'https://jambonz.org',
method: 'GET'
},
account_sid: '',
app_json: [{
verb: 'dial',
callerId: '',
answerOnBridge: true,
target: [
{
type: 'user',
name: ''
}
]
}]
},
conference: {
// Dummy hook to follow later feature server logic.
call_hook: {
url: 'https://jambonz.org',
method: 'GET'
},
account_sid: '',
app_json: [{
verb: 'conference',
name: '',
beep: false,
startConferenceOnEnter: true
}]
}
};
const createJambonzApp = (type, {account_sid, name, caller_id}) => {
const app = {...appsMap[type]};
app.account_sid = account_sid;
switch (type) {
case 'queue':
case 'conference':
app.app_json[0].name = name;
break;
case 'user':
app.app_json[0].callerId = caller_id;
app.app_json[0].target[0].name = name;
break;
}
app.app_json = JSON.stringify(app.app_json);
return app;
};
module.exports = {
createJambonzApp
};

View File

@@ -5,337 +5,281 @@ const CallInfo = require('../../session/call-info');
const {CallDirection, CallStatus} = require('../../utils/constants');
const uuidv4 = require('uuid-random');
const SipError = require('drachtio-srf').SipError;
const { validationResult, body } = require('express-validator');
const { validate } = require('@jambonz/verb-specifications');
const sysError = require('./error');
const HttpRequestor = require('../../utils/http-requestor');
const WsRequestor = require('../../utils/ws-requestor');
const RootSpan = require('../../utils/call-tracer');
const dbUtils = require('../../utils/db-utils');
const { mergeSdpMedia, extractSdpMedia } = require('../../utils/sdp-utils');
const { createCallSchema, customSanitizeFunction } = require('../schemas/create-call');
const removeNullProperties = (obj) => (Object.keys(obj).forEach((key) => obj[key] === null && delete obj[key]), obj);
const removeNulls = (req, res, next) => {
req.body = removeNullProperties(req.body);
next();
};
router.post('/', async(req, res) => {
const {logger} = req.app.locals;
const accountSid = req.body.account_sid;
const {srf} = require('../../..');
router.post('/',
removeNulls,
createCallSchema,
body('tag').custom((value) => {
if (value) {
customSanitizeFunction(value);
}
return true;
}),
async(req, res) => {
const {logger} = req.app.locals;
const errors = validationResult(req);
if (!errors.isEmpty()) {
logger.info({errors: errors.array()}, 'POST /Calls: validation errors');
return res.status(400).json({ errors: errors.array() });
}
const accountSid = req.body.account_sid;
const {srf} = require('../../..');
const app_json = req.body['app_json'];
try {
// app_json is created only by api-server.
if (app_json) {
// if available, delete from req before creating task
delete req.body.app_json;
// validate possible app_json via verb-specifications
validate(logger, JSON.parse(app_json));
}
} catch (err) {
logger.debug({ err }, `invalid app_json: ${err.message}`);
}
logger.debug({body: req.body}, 'got createCall request');
try {
let uri, cs, to;
const restDial = makeTask(logger, { 'rest:dial': req.body });
restDial.appJson = app_json;
const {lookupAccountDetails, lookupCarrierByPhoneNumber, lookupCarrier} = dbUtils(logger, srf);
const {
lookupAppBySid
} = srf.locals.dbHelpers;
const {getSBC, getFreeswitch} = srf.locals;
const sbcAddress = getSBC();
if (!sbcAddress) throw new Error('no available SBCs for outbound call creation');
const target = restDial.to;
const opts = {
callingNumber: restDial.from,
...(restDial.callerName && {callingName: restDial.callerName}),
headers: req.body.headers || {}
};
logger.debug({body: req.body}, 'got createCall request');
try {
let uri, cs, to;
// app_json is creaeted by only api-server.
// if it available, take it and delete before creating task
const app_json = req.body.app_json;
delete req.body.app_json;
const restDial = makeTask(logger, {'rest:dial': req.body});
restDial.appJson = app_json;
const {lookupAccountDetails, lookupCarrierByPhoneNumber, lookupCarrier} = dbUtils(logger, srf);
const {
lookupAppBySid
} = srf.locals.dbHelpers;
const {getSBC, getFreeswitch} = srf.locals;
const sbcAddress = getSBC();
if (!sbcAddress) throw new Error('no available SBCs for outbound call creation');
const target = restDial.to;
const opts = {
callingNumber: restDial.from,
...(restDial.callerName && {callingName: restDial.callerName}),
headers: req.body.headers || {}
};
const {lookupTeamsByAccount, lookupAccountBySid} = srf.locals.dbHelpers;
const account = await lookupAccountBySid(req.body.account_sid);
const accountInfo = await lookupAccountDetails(req.body.account_sid);
const callSid = uuidv4();
const application = req.body.application_sid ? await lookupAppBySid(req.body.application_sid) : null;
const record_all_calls = account.record_all_calls || (application && application.record_all_calls);
const recordOutputFormat = account.record_format || 'mp3';
const rootSpan = new RootSpan('rest-call', {
callSid,
accountSid,
...(req.body?.application_sid && {'X-Application-Sid': req.body.application_sid})
});
const {lookupTeamsByAccount, lookupAccountBySid} = srf.locals.dbHelpers;
const account = await lookupAccountBySid(req.body.account_sid);
const accountInfo = await lookupAccountDetails(req.body.account_sid);
const callSid = uuidv4();
const application = req.body.application_sid ? await lookupAppBySid(req.body.application_sid) : null;
const record_all_calls = account.record_all_calls || (application && application.record_all_calls);
const recordOutputFormat = account.record_format || 'mp3';
const rootSpan = new RootSpan('rest-call', {
callSid,
accountSid,
...(req.body?.application_sid && {'X-Application-Sid': req.body.application_sid})
});
opts.headers = {
...opts.headers,
'X-Jambonz-Routing': target.type,
'X-Jambonz-FS-UUID': srf.locals.fsUUID,
'X-Call-Sid': callSid,
'X-Account-Sid': accountSid,
'X-Trace-ID': rootSpan.traceId,
...(req.body?.application_sid && {'X-Application-Sid': req.body.application_sid}),
...(restDial.fromHost && {'X-Preferred-From-Host': restDial.fromHost}),
...(record_all_calls && {'X-Record-All-Calls': recordOutputFormat})
};
opts.headers = {
...opts.headers,
'X-Jambonz-Routing': target.type,
'X-Jambonz-FS-UUID': srf.locals.fsUUID,
'X-Call-Sid': callSid,
'X-Account-Sid': accountSid,
'X-Trace-ID': rootSpan.traceId,
...(req.body?.application_sid && {'X-Application-Sid': req.body.application_sid}),
...(restDial.fromHost && {'X-Preferred-From-Host': restDial.fromHost}),
...(record_all_calls && {'X-Record-All-Calls': recordOutputFormat})
};
switch (target.type) {
case 'phone':
case 'teams':
uri = `sip:${target.number}@${sbcAddress}`;
to = target.number;
if ('teams' === target.type) {
const obj = await lookupTeamsByAccount(accountSid);
if (!obj) throw new Error('dial to ms teams not allowed; account must first be configured with teams info');
Object.assign(opts.headers, {
'X-MS-Teams-FQDN': obj.ms_teams_fqdn,
'X-MS-Teams-Tenant-FQDN': target.tenant || obj.tenant_fqdn
});
if (target.vmail === true) uri = `${uri};opaque=app:voicemail`;
}
break;
case 'user':
uri = `sip:${target.name}`;
to = target.name;
if (target.overrideTo) {
Object.assign(opts.headers, {
'X-Override-To': target.overrideTo
});
}
break;
case 'sip':
uri = target.sipUri;
to = uri;
break;
}
if (target.type === 'phone' && target.trunk) {
const voip_carrier_sid = await lookupCarrier(req.body.account_sid, target.trunk);
logger.info(
`createCall: selected ${voip_carrier_sid} for requested carrier: ${target.trunk || 'unspecified'})`);
if (voip_carrier_sid) {
opts.headers['X-Requested-Carrier-Sid'] = voip_carrier_sid;
switch (target.type) {
case 'phone':
case 'teams':
uri = `sip:${target.number}@${sbcAddress}`;
to = target.number;
if ('teams' === target.type) {
const obj = await lookupTeamsByAccount(accountSid);
if (!obj) throw new Error('dial to ms teams not allowed; account must first be configured with teams info');
Object.assign(opts.headers, {
'X-MS-Teams-FQDN': obj.ms_teams_fqdn,
'X-MS-Teams-Tenant-FQDN': target.tenant || obj.tenant_fqdn
});
if (target.vmail === true) uri = `${uri};opaque=app:voicemail`;
}
}
break;
case 'user':
uri = `sip:${target.name}`;
to = target.name;
if (target.overrideTo) {
Object.assign(opts.headers, {
'X-Override-To': target.overrideTo
});
}
break;
case 'sip':
uri = target.sipUri;
to = uri;
break;
}
/**
if (target.type === 'phone' && target.trunk) {
const voip_carrier_sid = await lookupCarrier(req.body.account_sid, target.trunk);
logger.info(
`createCall: selected ${voip_carrier_sid} for requested carrier: ${target.trunk || 'unspecified'})`);
if (voip_carrier_sid) {
opts.headers['X-Requested-Carrier-Sid'] = voip_carrier_sid;
}
}
/**
* trunk isn't specified,
* check if from-number matches any existing numbers on Jambonz
* */
if (target.type === 'phone' && !target.trunk) {
const str = restDial.from || '';
const callingNumber = str.startsWith('+') ? str.substring(1) : str;
const voip_carrier_sid = await lookupCarrierByPhoneNumber(req.body.account_sid, callingNumber);
logger.info(
`createCall: selected ${voip_carrier_sid} for requested phone number: ${callingNumber || 'unspecified'})`);
if (voip_carrier_sid) {
opts.headers['X-Requested-Carrier-Sid'] = voip_carrier_sid;
}
if (target.type === 'phone' && !target.trunk) {
const str = restDial.from || '';
const callingNumber = str.startsWith('+') ? str.substring(1) : str;
const voip_carrier_sid = await lookupCarrierByPhoneNumber(req.body.account_sid, callingNumber);
logger.info(
`createCall: selected ${voip_carrier_sid} for requested phone number: ${callingNumber || 'unspecified'})`);
if (voip_carrier_sid) {
opts.headers['X-Requested-Carrier-Sid'] = voip_carrier_sid;
}
}
/* create endpoint for outdial */
const ms = getFreeswitch();
if (!ms) throw new Error('no available Freeswitch for outbound call creation');
const ep = await ms.createEndpoint();
logger.debug(`createCall: successfully allocated endpoint, sending INVITE to ${sbcAddress}`);
/* create endpoint for outdial */
const ms = getFreeswitch();
if (!ms) throw new Error('no available Freeswitch for outbound call creation');
const ep = await ms.createEndpoint();
logger.debug(`createCall: successfully allocated endpoint, sending INVITE to ${sbcAddress}`);
/* launch outdial */
let sdp, sipLogger;
let dualEp;
let localSdp = ep.local.sdp;
if (req.body.dual_streams) {
dualEp = await ms.createEndpoint();
localSdp = mergeSdpMedia(localSdp, dualEp.local.sdp);
/* launch outdial */
let sdp, sipLogger;
const connectStream = async(remoteSdp) => {
if (remoteSdp !== sdp) {
ep.modify(sdp = remoteSdp);
return true;
}
const connectStream = async(remoteSdp) => {
if (remoteSdp !== sdp) {
sdp = remoteSdp;
if (req.body.dual_streams) {
const [sdpLegA, sdpLebB] = extractSdpMedia(remoteSdp);
await ep.modify(sdpLegA);
await dualEp.modify(sdpLebB);
await ep.bridge(dualEp);
} else {
ep.modify(sdp);
}
return true;
}
return false;
};
Object.assign(opts, {
proxy: `sip:${sbcAddress}`,
localSdp
});
if (target.auth) opts.auth = target.auth;
return false;
};
Object.assign(opts, {
proxy: `sip:${sbcAddress}`,
localSdp: ep.local.sdp
});
if (target.auth) opts.auth = target.auth;
/**
/**
* create our application object -
* not from the database as per an inbound call,
* but from the provided params in the request
*/
const app = req.body;
const app = req.body;
/**
/**
* attach our requestor and notifier objects
* these will be used for all http requests we make during this call
*/
if ('WS' === app.call_hook?.method || /^wss?:/.test(app.call_hook.url)) {
logger.debug({call_hook: app.call_hook}, 'creating websocket for call hook');
app.requestor = new WsRequestor(logger, account.account_sid, app.call_hook, account.webhook_secret) ;
if (app.call_hook.url === app.call_status_hook.url || !app.call_status_hook?.url) {
logger.debug('reusing websocket for call status hook');
app.notifier = app.requestor;
}
}
else {
logger.debug({call_hook: app.call_hook}, 'creating http client for call hook');
app.requestor = new HttpRequestor(logger, account.account_sid, app.call_hook, account.webhook_secret);
}
if (!app.notifier && app.call_status_hook) {
app.notifier = new HttpRequestor(logger, account.account_sid, app.call_status_hook, account.webhook_secret);
logger.debug({call_hook: app.call_hook}, 'creating http client for call status hook');
}
else if (!app.notifier) {
logger.debug('creating null call status hook');
app.notifier = {request: () => {}, close: () => {}};
if ('WS' === app.call_hook?.method || /^wss?:/.test(app.call_hook.url)) {
logger.debug({call_hook: app.call_hook}, 'creating websocket for call hook');
app.requestor = new WsRequestor(logger, account.account_sid, app.call_hook, account.webhook_secret) ;
if (app.call_hook.url === app.call_status_hook.url || !app.call_status_hook?.url) {
logger.debug('reusing websocket for call status hook');
app.notifier = app.requestor;
}
}
else {
logger.debug({call_hook: app.call_hook}, 'creating http client for call hook');
app.requestor = new HttpRequestor(logger, account.account_sid, app.call_hook, account.webhook_secret);
}
if (!app.notifier && app.call_status_hook) {
app.notifier = new HttpRequestor(logger, account.account_sid, app.call_status_hook, account.webhook_secret);
logger.debug({call_hook: app.call_hook}, 'creating http client for call status hook');
}
else if (!app.notifier) {
logger.debug('creating null call status hook');
app.notifier = {request: () => {}, close: () => {}};
}
/* now launch the outdial */
try {
const dlg = await srf.createUAC(uri, {...opts, followRedirects: true, keepUriOnRedirect: true}, {
cbRequest: (err, inviteReq) => {
/* now launch the outdial */
try {
const dlg = await srf.createUAC(uri, {...opts, followRedirects: true, keepUriOnRedirect: true}, {
cbRequest: (err, inviteReq) => {
/* in case of 302 redirect, this gets called twice, ignore the second
except to update the req so that it can later be canceled if need be
*/
if (res.headersSent) {
logger.info(`create-call: got redirect, updating request to new call-id ${req.get('Call-ID')}`);
if (cs) cs.req = inviteReq;
return;
}
if (err) {
logger.error(err, 'createCall Error creating call');
res.status(500).send('Call Failure');
return;
}
inviteReq.srf = srf;
inviteReq.locals = {
...(inviteReq || {}),
callSid,
application_sid: app.application_sid
};
/* ok our outbound INVITE is in flight */
const tasks = [restDial];
sipLogger = logger.child({
callSid,
callId: inviteReq.get('Call-ID'),
accountSid,
traceId: rootSpan.traceId
});
app.requestor.logger = app.notifier.logger = sipLogger;
const callInfo = new CallInfo({
direction: CallDirection.Outbound,
req: inviteReq,
to,
tag: app.tag,
callSid,
accountSid: req.body.account_sid,
applicationSid: app.application_sid,
traceId: rootSpan.traceId
});
cs = new RestCallSession({
logger: sipLogger,
application: app,
srf,
req: inviteReq,
ep,
ep2: dualEp,
tasks,
callInfo,
accountInfo,
rootSpan
});
cs.exec(req);
res.status(201).json({sid: cs.callSid, callId: inviteReq.get('Call-ID')});
sipLogger.info({sid: cs.callSid, callId: inviteReq.get('Call-ID')},
`outbound REST call attempt to ${JSON.stringify(target)} has been sent`);
},
cbProvisional: (prov) => {
const callStatus = prov.body ? CallStatus.EarlyMedia : CallStatus.Ringing;
if ([180, 183].includes(prov.status) && prov.body) connectStream(prov.body);
restDial.emit('callStatus', prov.status, !!prov.body);
cs.emit('callStatusChange', {callStatus, sipStatus: prov.status});
if (res.headersSent) {
logger.info(`create-call: got redirect, updating request to new call-id ${req.get('Call-ID')}`);
if (cs) cs.req = inviteReq;
return;
}
});
connectStream(dlg.remote.sdp);
cs.emit('callStatusChange', {
callStatus: CallStatus.InProgress,
sipStatus: 200,
sipReason: 'OK'
});
restDial.emit('callStatus', 200);
restDial.emit('connect', dlg);
}
catch (err) {
let callStatus = CallStatus.Failed;
if (err instanceof SipError) {
if ([486, 603].includes(err.status)) callStatus = CallStatus.Busy;
else if (487 === err.status) callStatus = CallStatus.NoAnswer;
if (sipLogger) sipLogger.info(`REST outdial failed with ${err.status}`);
else console.log(`REST outdial failed with ${err.status}`);
if (cs) cs.emit('callStatusChange', {
callStatus,
sipStatus: err.status,
sipReason: err.reason
if (err) {
logger.error(err, 'createCall Error creating call');
res.status(500).send('Call Failure');
return;
}
inviteReq.srf = srf;
inviteReq.locals = {
...(inviteReq || {}),
callSid,
application_sid: app.application_sid
};
/* ok our outbound INVITE is in flight */
const tasks = [restDial];
sipLogger = logger.child({
callSid,
callId: inviteReq.get('Call-ID'),
accountSid,
traceId: rootSpan.traceId
});
cs.callGone = true;
}
else {
if (cs) cs.emit('callStatusChange', {
callStatus,
sipStatus: 500,
sipReason: 'Internal Server Error'
app.requestor.logger = app.notifier.logger = sipLogger;
const callInfo = new CallInfo({
direction: CallDirection.Outbound,
req: inviteReq,
to,
tag: app.tag,
callSid,
accountSid: req.body.account_sid,
applicationSid: app.application_sid,
traceId: rootSpan.traceId
});
if (sipLogger) sipLogger.error({err}, 'REST outdial failed');
else console.error(err);
cs = new RestCallSession({
logger: sipLogger,
application: app,
srf,
req: inviteReq,
ep,
tasks,
callInfo,
accountInfo,
rootSpan
});
cs.exec(req);
res.status(201).json({sid: cs.callSid, callId: inviteReq.get('Call-ID')});
sipLogger.info({sid: cs.callSid, callId: inviteReq.get('Call-ID')},
`outbound REST call attempt to ${JSON.stringify(target)} has been sent`);
},
cbProvisional: (prov) => {
const callStatus = prov.body ? CallStatus.EarlyMedia : CallStatus.Ringing;
if ([180, 183].includes(prov.status) && prov.body) connectStream(prov.body);
restDial.emit('callStatus', prov.status, !!prov.body);
cs.emit('callStatusChange', {callStatus, sipStatus: prov.status});
}
ep.destroy();
if (dualEp) {
dualEp.destroy();
}
setTimeout(restDial.kill.bind(restDial, cs), 5000);
}
} catch (err) {
sysError(logger, res, err);
});
connectStream(dlg.remote.sdp);
cs.emit('callStatusChange', {
callStatus: CallStatus.InProgress,
sipStatus: 200,
sipReason: 'OK'
});
restDial.emit('callStatus', 200);
restDial.emit('connect', dlg);
}
});
catch (err) {
let callStatus = CallStatus.Failed;
if (err instanceof SipError) {
if ([486, 603].includes(err.status)) callStatus = CallStatus.Busy;
else if (487 === err.status) callStatus = CallStatus.NoAnswer;
if (sipLogger) sipLogger.info(`REST outdial failed with ${err.status}`);
else console.log(`REST outdial failed with ${err.status}`);
if (cs) cs.emit('callStatusChange', {
callStatus,
sipStatus: err.status,
sipReason: err.reason
});
cs.callGone = true;
}
else {
if (cs) cs.emit('callStatusChange', {
callStatus,
sipStatus: 500,
sipReason: 'Internal Server Error'
});
if (sipLogger) sipLogger.error({err}, 'REST outdial failed');
else console.error(err);
}
ep.destroy();
setTimeout(restDial.kill.bind(restDial), 5000);
}
} catch (err) {
sysError(logger, res, err);
}
});
module.exports = router;

View File

@@ -45,7 +45,7 @@ function retrieveCallSession(callSid, opts) {
router.post('/:callSid', async(req, res) => {
const logger = req.app.locals.logger;
const callSid = req.params.callSid;
logger.debug({body: req.body}, 'got updateCall request');
logger.debug({body: req.body}, 'got upateCall request');
try {
const cs = retrieveCallSession(callSid, req.body);
if (!cs) {

View File

@@ -1,134 +0,0 @@
const { checkSchema } = require('express-validator');
/**
* @path api-server {{base_url}}/v1/Accounts/:account_sid/Calls
* @see https://api.jambonz.org/#243a2edd-7999-41db-bd0d-08082bbab401
*/
const createCallSchema = checkSchema({
application_sid: {
isString: true,
optional: true,
isLength: { options: { min: 36, max: 36 } },
errorMessage: 'Invalid application_sid',
},
answerOnBridge: {
isBoolean: true,
optional: true,
errorMessage: 'Invalid answerOnBridge',
},
from: {
errorMessage: 'Invalid from',
isString: true,
isLength: {
options: { min: 1, max: 256 },
},
},
fromHost: {
isString: true,
optional: true,
errorMessage: 'Invalid fromHost',
},
to: {
errorMessage: 'Invalid to',
isObject: true,
},
callerName: {
isString: true,
optional: true,
errorMessage: 'Invalid callerName',
},
amd: {
isObject: true,
optional: true,
},
tag: {
isObject: true,
optional: true,
errorMessage: 'Invalid tag',
},
app_json: {
isString: true,
optional: true,
errorMessage: 'Invalid app_json',
},
account_sid: {
isString: true,
optional: true,
errorMessage: 'Invalid account_sid',
isLength: { options: { min: 36, max: 36 } },
},
timeout: {
isInt: true,
optional: true,
errorMessage: 'Invalid timeout',
},
timeLimit: {
isInt: true,
optional: true,
errorMessage: 'Invalid timeLimit',
},
call_hook: {
isObject: true,
optional: true,
errorMessage: 'Invalid call_hook',
},
call_status_hook: {
isObject: true,
optional: true,
errorMessage: 'Invalid call_status_hook',
},
speech_synthesis_vendor: {
isString: true,
optional: true,
errorMessage: 'Invalid speech_synthesis_vendor',
},
speech_synthesis_language: {
isString: true,
optional: true,
errorMessage: 'Invalid speech_synthesis_language',
},
speech_synthesis_voice: {
isString: true,
optional: true,
errorMessage: 'Invalid speech_synthesis_voice',
},
speech_recognizer_vendor: {
isString: true,
optional: true,
errorMessage: 'Invalid speech_recognizer_vendor',
},
speech_recognizer_language: {
isString: true,
optional: true,
errorMessage: 'Invalid speech_recognizer_language',
}
}, ['body']);
const customSanitizeFunction = (value) => {
try {
if (Array.isArray(value)) {
value = value.map((item) => customSanitizeFunction(item));
} else if (typeof value === 'object') {
Object.keys(value).forEach((key) => {
value[key] = customSanitizeFunction(value[key]);
});
} else if (typeof value === 'string') {
/* trims characters at the beginning and at the end of a string */
value = value.trim();
/* Verify strings including 'http' via new URL */
if (value.includes('http')) {
value = new URL(value).toString();
}
}
} catch (error) {
value = `Error: ${error.message}`;
}
return value;
};
module.exports = {
createCallSchema,
customSanitizeFunction
};

View File

@@ -11,10 +11,8 @@ const dbUtils = require('./utils/db-utils');
const RootSpan = require('./utils/call-tracer');
const listTaskNames = require('./utils/summarize-tasks');
const {
JAMBONES_MYSQL_REFRESH_TTL,
JAMBONES_DISABLE_DIRECT_P2P_CALL
JAMBONES_MYSQL_REFRESH_TTL
} = require('./config');
const { createJambonzApp } = require('./dynamic-apps');
module.exports = function(srf, logger) {
const {
@@ -22,21 +20,17 @@ module.exports = function(srf, logger) {
lookupAppByRegex,
lookupAppBySid,
lookupAppByRealm,
lookupAppByTeamsTenant,
registrar,
lookupClientByAccountAndUsername
lookupAppByTeamsTenant
} = srf.locals.dbHelpers;
const {
writeAlerts,
AlertType
} = srf.locals;
const {lookupAccountDetails, lookupGoogleCustomVoice} = dbUtils(logger, srf);
const {lookupAccountDetails} = dbUtils(logger, srf);
async function initLocals(req, res, next) {
function initLocals(req, res, next) {
const callId = req.get('Call-ID');
const uri = parseUri(req.uri);
logger.info({
uri,
callId,
callingNumber: req.callingNumber,
calledNumber: req.calledNumber
@@ -48,62 +42,16 @@ module.exports = function(srf, logger) {
const callSid = req.has('X-Retain-Call-Sid') ? req.get('X-Retain-Call-Sid') : uuidv4();
const account_sid = req.get('X-Account-Sid');
req.locals = {callSid, account_sid, callId};
let clientDb = null;
if (req.has('X-Authenticated-User')) {
req.locals.originatingUser = req.get('X-Authenticated-User');
let clientSettings;
const arr = /^(.*)@(.*)/.exec(req.locals.originatingUser);
if (arr) {
[clientSettings] = await lookupClientByAccountAndUsername(account_sid, arr[1]);
}
clientDb = await registrar.query(req.locals.originatingUser);
clientDb = {
...clientDb,
...clientSettings,
};
}
// check for call to application
if (uri.user?.startsWith('app-') && req.locals.originatingUser && clientDb?.allow_direct_app_calling) {
const application_sid = uri.user.match(/app-(.*)/)[1];
logger.debug(`got application from Request URI header: ${application_sid}`);
req.locals.application_sid = application_sid;
} else if (req.has('X-Application-Sid')) {
if (req.has('X-Application-Sid')) {
const application_sid = req.get('X-Application-Sid');
logger.debug(`got application from X-Application-Sid header: ${application_sid}`);
req.locals.application_sid = application_sid;
}
// check for call to queue
else if (uri.user?.startsWith('queue-') && req.locals.originatingUser && clientDb?.allow_direct_queue_calling) {
const queue_name = uri.user.match(/queue-(.*)/)[1];
logger.debug(`got Queue from Request URI header: ${queue_name}`);
req.locals.queue_name = queue_name;
}
// check for call to conference
else if (uri.user?.startsWith('conference-') && req.locals.originatingUser && clientDb?.allow_direct_app_calling) {
const conference_id = uri.user.match(/conference-(.*)/)[1];
logger.debug(`got Conference from Request URI header: ${conference_id}`);
req.locals.conference_id = conference_id;
}
// check for call to registered user
else if (!JAMBONES_DISABLE_DIRECT_P2P_CALL && req.locals.originatingUser && clientDb?.allow_direct_user_calling) {
const arr = /^(.*)@(.*)/.exec(req.locals.originatingUser);
if (arr) {
const sipRealm = arr[2];
const called_user = `${req.calledNumber}@${sipRealm}`;
const reg = await registrar.query(called_user);
if (reg) {
logger.debug(`got called Number is a registered user: ${called_user}`);
req.locals.called_user = called_user;
}
}
}
if (req.has('X-Authenticated-User')) req.locals.originatingUser = req.get('X-Authenticated-User');
if (req.has('X-MS-Teams-Tenant-FQDN')) req.locals.msTeamsTenant = req.get('X-MS-Teams-Tenant-FQDN');
if (req.has('X-Cisco-Recording-Participant')) {
const ciscoParticipants = req.get('X-Cisco-Recording-Participant');
const regex = /sip:[a-zA-Z0-9]+@[a-zA-Z0-9.-_]+/g;
const regex = /sip:[\d]+@[\d]+\.[\d]+\.[\d]+\.[\d]+/g;
const sipURIs = ciscoParticipants.match(regex);
logger.info(`X-Cisco-Recording-Participant : ${sipURIs} `);
if (sipURIs && sipURIs.length > 0) {
@@ -172,7 +120,7 @@ module.exports = function(srf, logger) {
};
logger.info({callId, metadata, sdp}, 'successfully parsed SIPREC payload');
} catch (err) {
logger.info({err, callId}, 'Error parsing multipart payload');
logger.info({callId}, 'Error parsing multipart payload');
return res.send(503);
}
}
@@ -236,27 +184,15 @@ module.exports = function(srf, logger) {
const {span} = rootSpan.startChildSpan('lookupApplication');
try {
let app;
if (req.locals.queue_name) {
logger.debug(`calling to queue ${req.locals.queue_name}, generating queue app`);
app = createJambonzApp('queue', {account_sid, name: req.locals.queue_name});
} else if (req.locals.called_user) {
logger.debug(`calling to registered user ${req.locals.called_user}, generating dial app`);
app = createJambonzApp('user',
{account_sid, name: req.locals.called_user, caller_id: req.locals.callingNumber});
} else if (req.locals.conference_id) {
logger.debug(`calling to conference ${req.locals.conference_id}, generating conference app`);
app = createJambonzApp('conference', {account_sid, name: req.locals.conference_id});
} else if (req.locals.application_sid) {
app = await lookupAppBySid(req.locals.application_sid);
} else if (req.locals.originatingUser) {
if (req.locals.application_sid) app = await lookupAppBySid(req.locals.application_sid);
else if (req.locals.originatingUser) {
const arr = /^(.*)@(.*)/.exec(req.locals.originatingUser);
if (arr) {
const sipRealm = arr[2];
logger.debug(`looking for device calling app for realm ${sipRealm}`);
app = await lookupAppByRealm(sipRealm);
if (app) {
logger.debug({app}, `retrieved device calling app for realm ${sipRealm}`);
}
if (app) logger.debug({app}, `retrieved device calling app for realm ${sipRealm}`);
}
}
else if (req.locals.msTeamsTenant) {
@@ -321,22 +257,7 @@ module.exports = function(srf, logger) {
app2.requestor = new HttpRequestor(logger, account_sid, app.call_hook, accountInfo.account.webhook_secret);
if (app.call_status_hook) app2.notifier = new HttpRequestor(logger, account_sid, app.call_status_hook,
accountInfo.account.webhook_secret);
else app2.notifier = {request: () => {}, close: () => {}};
}
// Resolve application.speech_synthesis_voice if it's custom voice
if (app2.speech_synthesis_vendor === 'google' && app2.speech_synthesis_voice.startsWith('custom_')) {
const arr = /custom_(.*)/.exec(app2.speech_synthesis_voice);
if (arr) {
const google_custom_voice_sid = arr[1];
const [custom_voice] = await lookupGoogleCustomVoice(google_custom_voice_sid);
if (custom_voice) {
app2.speech_synthesis_voice = {
reportedUsage: custom_voice.reported_usage,
model: custom_voice.model
};
}
}
else app2.notifier = {request: () => {}};
}
req.locals.application = app2;
@@ -352,15 +273,6 @@ module.exports = function(srf, logger) {
direction: CallDirection.Inbound,
traceId: rootSpan.traceId
});
// if transferred call contains callInfo, let update original data to newly created callInfo in this instance.
if (app.transferredCall && app.callInfo) {
req.locals.callInfo.callerName = app.callInfo.callerName;
req.locals.callInfo.from = app.callInfo.from;
req.locals.callInfo.to = app.callInfo.to;
req.locals.callInfo.originatingSipIp = app.callInfo.originatingSipIp;
req.locals.callInfo.originatingSipTrunkName = app.callInfo.originatingSipTrunkName;
delete app.callInfo;
}
next();
} catch (err) {
span.end();
@@ -387,30 +299,22 @@ module.exports = function(srf, logger) {
if (app.app_json) {
json = JSON.parse(app.app_json);
} else {
const defaults = {
synthesizer: {
vendor: app.speech_synthesis_vendor,
...(app.speech_synthesis_label && {label: app.speech_synthesis_label}),
language: app.speech_synthesis_language,
voice: app.speech_synthesis_voice,
...(app.fallback_speech_synthesis_vendor && {fallback_vendor: app.fallback_speech_synthesis_vendor}),
...(app.fallback_speech_synthesis_label && {fallback_label: app.fallback_speech_synthesis_label}),
...(app.fallback_speech_synthesis_language && {fallback_language: app.fallback_speech_synthesis_language}),
...(app.fallback_speech_synthesis_voice && {fallback_voice: app.fallback_speech_synthesis_voice})
},
recognizer: {
vendor: app.speech_recognizer_vendor,
...(app.speech_recognizer_label && {label: app.speech_recognizer_label}),
language: app.speech_recognizer_language,
...(app.fallback_speech_recognizer_vendor && {fallback_vendor: app.fallback_speech_recognizer_vendor}),
...(app.fallback_speech_recognizer_label && {fallback_label: app.fallback_speech_recognizer_label}),
...(app.fallback_speech_recognizer_language && {fallback_language: app.fallback_speech_recognizer_language})
}
};
const params = Object.assign(['POST', 'WS'].includes(app.call_hook.method) ? { sip: req.msg } : {},
req.locals.callInfo,
{ service_provider_sid: req.locals.service_provider_sid },
{ defaults });
{
defaults: {
synthesizer: {
vendor: app.speech_synthesis_vendor,
language: app.speech_synthesis_language,
voice: app.speech_synthesis_voice
},
recognizer: {
vendor: app.speech_recognizer_vendor,
language: app.speech_recognizer_language
}
}
});
logger.debug({ params }, 'sending initial webhook');
const obj = rootSpan.startChildSpan('performAppWebhook');
span = obj.span;

View File

@@ -1,6 +1,4 @@
const CallSession = require('./call-session');
const {CallStatus} = require('../utils/constants');
const moment = require('moment');
/**
* @classdesc Subclass of CallSession. Represents a CallSession
@@ -21,14 +19,12 @@ class AdultingCallSession extends CallSession {
rootSpan
});
this.sd = singleDialer;
this.req = callInfo.req;
this.sd.dlg.on('destroy', () => {
this.logger.info('AdultingCallSession: called party hung up');
this._callReleased();
});
this.sd.emit('adulting');
this.on('callStatusChange', this._notifyCallStatusChange.bind(this));
}
get dlg() {
@@ -53,26 +49,6 @@ class AdultingCallSession extends CallSession {
}
_callerHungup() {
this._hangup('caller');
}
_jambonzHangup() {
this._hangup();
}
_hangup(terminatedBy = 'jambonz') {
if (this.dlg.connectTime) {
const duration = moment().diff(this.dlg.connectTime, 'seconds');
this.rootSpan.setAttributes({'call.termination': `hangup by ${terminatedBy}`});
this.callInfo.callTerminationBy = terminatedBy;
this.emit('callStatusChange', {
callStatus: CallStatus.Completed,
duration
});
}
this.logger.info(`InboundCallSession: ${terminatedBy} hung up`);
this._callReleased();
this.req.removeAllListeners('cancel');
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -34,9 +34,6 @@ class ConfirmCallSession extends CallSession {
_callerHungup() {
}
_jambonzHangup() {
}
}

View File

@@ -67,27 +67,15 @@ class InboundCallSession extends CallSession {
* This is invoked when the caller hangs up, in order to calculate the call duration.
*/
_callerHungup() {
this._hangup('caller');
}
_jambonzHangup() {
this.dlg?.destroy();
}
_hangup(terminatedBy = 'jambonz') {
if (this.dlg === null) {
this.logger.info('InboundCallSession:_hangup - race condition, dlg cleared by app hangup');
return;
}
this.logger.info(`InboundCallSession: ${terminatedBy} hung up`);
assert(this.dlg.connectTime);
const duration = moment().diff(this.dlg.connectTime, 'seconds');
this.rootSpan.setAttributes({'call.termination': `hangup by ${terminatedBy}`});
this.callInfo.callTerminationBy = terminatedBy;
this.rootSpan.setAttributes({'call.termination': 'hangup by caller'});
this.callInfo.callTerminationBy = 'caller';
this.emit('callStatusChange', {
callStatus: CallStatus.Completed,
duration
});
this.logger.info('InboundCallSession: caller hung up');
this._callReleased();
this.req.removeAllListeners('cancel');
}

View File

@@ -1,9 +1,6 @@
const CallSession = require('./call-session');
const {CallStatus} = require('../utils/constants');
const moment = require('moment');
const {parseUri} = require('drachtio-srf');
const { normalizeJambones } = require('@jambonz/verb-specifications');
const makeTask = require('../tasks/make_task');
/**
* @classdesc Subclass of CallSession. This represents a CallSession that is
@@ -11,7 +8,7 @@ const makeTask = require('../tasks/make_task');
* @extends CallSession
*/
class RestCallSession extends CallSession {
constructor({logger, application, srf, req, ep, ep2, tasks, callInfo, accountInfo, rootSpan}) {
constructor({logger, application, srf, req, ep, tasks, callInfo, accountInfo, rootSpan}) {
super({
logger,
application,
@@ -24,7 +21,6 @@ class RestCallSession extends CallSession {
});
this.req = req;
this.ep = ep;
this.ep2 = ep2;
// keep restDialTask reference for closing AMD
if (tasks.length) {
this.restDialTask = tasks[0];
@@ -45,81 +41,20 @@ class RestCallSession extends CallSession {
setDialog(dlg) {
this.dlg = dlg;
dlg.on('destroy', this._callerHungup.bind(this));
dlg.on('refer', this._onRefer.bind(this));
this.wrapDialog(dlg);
}
/**
* global referHook
*/
set referHook(hook) {
this._referHook = hook;
}
/**
* This is invoked when the called party sends REFER to Jambonz.
*/
async _onRefer(req, res) {
if (this._referHook) {
try {
const to = parseUri(req.getParsedHeader('Refer-To').uri);
const by = parseUri(req.getParsedHeader('Referred-By').uri);
const b3 = this.b3;
const httpHeaders = b3 && {b3};
const json = await this.requestor.request('verb:hook', this._referHook, {
...(this.callInfo.toJSON()),
refer_details: {
sip_refer_to: req.get('Refer-To'),
sip_referred_by: req.get('Referred-By'),
sip_user_agent: req.get('User-Agent'),
refer_to_user: to.scheme === 'tel' ? to.number : to.user,
referred_by_user: by.scheme === 'tel' ? by.number : by.user,
referring_call_sid: this.callSid,
referred_call_sid: null,
}
}, httpHeaders);
if (json && Array.isArray(json)) {
const tasks = normalizeJambones(this.logger, json).map((tdata) => makeTask(this.logger, tdata));
if (tasks && tasks.length > 0) {
this.logger.info('RestCallSession:handleRefer received REFER, get new tasks');
this.replaceApplication(tasks);
if (this.wakeupResolver) {
this.wakeupResolver({reason: 'RestCallSession: referHook new taks'});
this.wakeupResolver = null;
}
}
}
res.send(202);
this.logger.info('RestCallSession:handleRefer - sent 202 Accepted');
} catch (err) {
this.logger.error({err}, 'RestCallSession:handleRefer - error while asking referHook');
res.send(err.statusCode || 501);
}
} else {
res.send(501);
}
}
/**
* This is invoked when the called party hangs up, in order to calculate the call duration.
*/
_callerHungup() {
this._hangup('caller');
}
_jambonzHangup() {
this._hangup();
}
_hangup(terminatedBy = 'jambonz') {
if (this.restDialTask) {
this.restDialTask.turnOffAmd();
}
this.callInfo.callTerminationBy = terminatedBy;
this.callInfo.callTerminationBy = 'caller';
const duration = moment().diff(this.dlg.connectTime, 'seconds');
this.emit('callStatusChange', {callStatus: CallStatus.Completed, duration});
this.logger.debug(`RestCallSession: called party hung up by ${terminatedBy}`);
this.logger.debug('RestCallSession: called party hung up');
this._callReleased();
}

View File

@@ -1,22 +0,0 @@
const Task = require('./task');
const {TaskName, TaskPreconditions} = require('../utils/constants');
/**
* Answer the call.
* Note: This is rarely used, as the call is typically answered automatically when required by the app,
* but it can be useful to force an answer before a pause in some cases
*/
class TaskAnswer extends Task {
constructor(logger, opts) {
super(logger, opts);
this.preconditions = TaskPreconditions.Endpoint;
}
get name() { return TaskName.Answer; }
async exec(cs) {
super.exec(cs);
}
}
module.exports = TaskAnswer;

View File

@@ -6,7 +6,6 @@ const { normalizeJambones } = require('@jambonz/verb-specifications');
const makeTask = require('./make_task');
const bent = require('bent');
const assert = require('assert');
const HttpRequestor = require('../utils/http-requestor');
const WAIT = 'wait';
const JOIN = 'join';
const START = 'start';
@@ -61,8 +60,6 @@ class Conference extends Task {
this.emitter = new Emitter();
this.results = {};
this.coaching = [];
this.speakOnlyTo = this.data.speakOnlyTo;
// transferred from another server in order to bridge to a local caller?
if (this.data._ && this.data._.connectTime) {
@@ -351,29 +348,16 @@ class Conference extends Task {
Object.assign(opts, {flags: {
...(this.endConferenceOnExit && {endconf: true}),
...(this.startConferenceOnEnter && {moderator: true}),
//https://developer.signalwire.com/freeswitch/FreeSWITCH-Explained/Modules/mod_conference_3965534/
// mute | Enter conference muted
...((this.joinMuted || this.speakOnlyTo) && {mute: true}),
...(this.joinMuted && {joinMuted: true}),
}});
/**
* Note on the above: if we are joining in "coaching" mode (ie only going to heard by a subset of participants)
* then we join muted temporarily, and then unmute ourselves once we have identified the subset of participants
* to whom we will be speaking.
*/
}
try {
const {memberId, confUuid} = await this.ep.join(this.confName, opts);
this.logger.debug({memberId, confUuid}, `Conference:_joinConference: successfully joined ${this.confName}`);
this.memberId = parseInt(memberId, 10);
this.memberId = memberId;
this.confUuid = confUuid;
// set a tag for this member, if provided
if (this.data.memberTag) {
this.setMemberTag(this.data.memberTag);
}
cs.setConferenceDetails(memberId, this.confName, confUuid);
const response = await this.ep.api('conference', [this.confName, 'get', 'count']);
if (response.body && /\d+/.test(response.body)) this.participantCount = parseInt(response.body);
@@ -400,9 +384,6 @@ class Conference extends Task {
.catch((err) => {});
}
if (this.speakOnlyTo) {
this.setCoachMode(this.speakOnlyTo);
}
} catch (err) {
this.logger.error(err, `Failed to join conference ${this.confName}`);
throw err;
@@ -447,15 +428,7 @@ class Conference extends Task {
}
}
doConferenceMute(cs, opts) {
assert (cs.isInConference);
const mute = opts.conf_mute_status === 'mute';
this.ep.api(`conference ${this.confName} ${mute ? 'mute' : 'unmute'} ${this.memberId}`)
.catch((err) => this.logger.info({err}, 'Error muting or unmuting participant'));
}
doConferenceHold(cs, opts) {
async doConferenceHold(cs, opts) {
assert (cs.isInConference);
const {conf_hold_status, wait_hook} = opts;
@@ -492,43 +465,6 @@ class Conference extends Task {
}
}
async doConferenceParticipantAction(cs, opts) {
const {action, tag} = opts;
switch (action) {
case 'tag':
await this.setMemberTag(tag);
break;
case 'untag':
await this.clearMemberTag();
break;
case 'coach':
await this.setCoachMode(tag);
break;
case 'uncoach':
await this.clearCoachMode();
break;
case 'hold':
this.doConferenceHold(cs, {conf_hold_status: 'hold'});
break;
case 'unhold':
this.doConferenceHold(cs, {conf_hold_status: 'unhold'});
break;
case 'mute':
this.doConferenceMute(cs, {conf_mute_status: 'mute'});
break;
case 'unmute':
this.doConferenceMute(cs, {conf_mute_status: 'unmute'});
break;
case 'kick':
this.kickMember(cs);
break;
default:
this.logger.info(`Conference:doConferenceParticipantAction - unhandled action ${action}`);
break;
}
}
async _doWaitHookWhileOnHold(cs, dlg, wait_hook) {
do {
try {
@@ -575,7 +511,7 @@ class Conference extends Task {
_normalizeHook(cs, hook) {
if (typeof hook === 'object') return hook;
const url = hook.startsWith('/') ?
`${cs.application.requestor instanceof HttpRequestor ? cs.application.requestor.baseUrl : ''}${hook}` :
`${cs.application.requestor.baseUrl}${hook}` :
hook;
return { url } ;
@@ -594,7 +530,7 @@ class Conference extends Task {
const response = await this.ep.api('conference', [this.confName, 'get', 'count']);
if (response.body && confNoMatch(response.body)) this.participantCount = 0;
else if (response.body && /^\d+$/.test(response.body)) this.participantCount = parseInt(response.body) - 1;
this.logger.debug(`Conference:_doFinalMemberCheck conference count ${this.participantCount}`);
this.logger.debug({response}, `Conference:_doFinalMemberCheck conference count ${this.participantCount}`);
} catch (err) {
this.logger.info({err}, 'Conference:_doFinalMemberCheck error retrieving count (we were probably kicked');
}
@@ -706,19 +642,11 @@ class Conference extends Task {
}
// conference event handlers
_onAddMember(logger, cs, evt) {
const memberId = parseInt(evt.getHeader('Member-ID')) ;
if (this.speakOnlyTo) {
logger.debug(`Conference:_onAddMember - member ${memberId} added to ${this.confName}, updating coaching mode`);
this.setCoachMode(this.speakOnlyTo).catch(() => {});
}
else logger.debug(`Conference:_onAddMember - member ${memberId} added to conference ${this.confName}`);
}
_onDelMember(logger, cs, evt) {
const memberId = parseInt(evt.getHeader('Member-ID')) ;
this.participantCount = parseInt(evt.getHeader('Conference-Size'));
if (memberId === this.memberId) {
logger.info(`Conference:_onDelMember - I was dropped from conference ${this.confName}, task is complete`);
this.logger.info(`Conference:_onDelMember - I was dropped from conference ${this.confName}, task is complete`);
this.replaceEndpointAndEnd(cs);
}
}
@@ -747,99 +675,6 @@ class Conference extends Task {
}
}
_onTag(logger, cs, evt) {
const memberId = parseInt(evt.getHeader('Member-ID')) ;
const tag = evt.getHeader('Tag') || '';
if (memberId !== this.memberId && this.speakOnlyTo) {
logger.info(`Conference:_onTag - member ${memberId} set tag to '${tag }'; updating coach mode accordingly`);
this.setCoachMode(this.speakOnlyTo).catch(() => {});
}
}
/**
* Set the conference to "coaching" mode, where the audio of the participant is only heard
* by a subset of the participants in the conference.
* We do this by first getting all of the members who do *not* have this tag, and then
* we configure this members audio to not be sent to them.
* @param {string} speakOnlyTo - tag of the members who should receive our audio
*
* N.B.: this feature requires jambonz patches to freeswitch mod_conference
*/
async setCoachMode(speakOnlyTo) {
this.speakOnlyTo = speakOnlyTo;
if (!this.memberId) {
this.logger.info('Conference:_setCoachMode: no member id yet');
return;
}
try {
const members = (await this.ep.getNonMatchingConfParticipants(this.confName, speakOnlyTo))
.filter((m) => m !== this.memberId);
if (members.length === 0) {
this.logger.info({members}, 'Conference:_setCoachMode: all participants have the tag, so all will hear me');
if (this.coaching.length) {
await this.ep.api('conference', [this.confName, 'relate', this.memberId, this.coaching.join(','), 'clear']);
this.coaching = [];
}
}
else {
const memberList = members.join(',');
this.logger.info(`Conference:_setCoachMode: my audio will NOT be sent to ${memberList}`);
await this.ep.api('conference', [this.confName, 'relate', this.memberId, memberList, 'nospeak']);
this.coaching = members;
}
} catch (err) {
this.logger.error({err, speakOnlyTo}, '_setCoachMode: Error');
}
}
async clearCoachMode() {
if (!this.memberId) return;
try {
if (this.coaching.length === 0) {
this.logger.info('Conference:_clearCoachMode: no coaching mode to clear');
}
else {
const memberList = this.coaching.join(',');
this.logger.info(`Conference:_clearCoachMode: now sending my audio to all, including ${memberList}`);
await this.ep.api('conference', [this.confName, 'relate', this.memberId, memberList, 'clear']);
}
this.speakOnlyTo = null;
this.coaching = [];
} catch (err) {
this.logger.error({err}, '_clearCoachMode: Error');
}
}
async setMemberTag(tag) {
try {
await this.ep.api('conference', [this.confName, 'tag', this.memberId, tag]);
this.logger.info(`Conference:setMemberTag: set tag for ${this.memberId} to ${tag}`);
this.memberTag = tag;
} catch (err) {
this.logger.error({err}, `Error setting tag for ${this.memberId} to ${tag}`);
}
}
async clearMemberTag() {
try {
await this.ep.api('conference', [this.confName, 'tag', this.memberId]);
this.logger.info(`Conference:setMemberTag: clearing tag for ${this.memberId}`);
this.memberTag = null;
} catch (err) {
this.logger.error({err}, `Error clearing tag for ${this.memberId}`);
}
}
async kickMember(cs) {
assert(cs.isInConference);
try {
await this.ep.api('conference', [this.confName, 'kick', this.memberId]);
this.logger.info(`Conference:kickMember: kick ${this.memberId} out of conference ${this.confName}`);
} catch (err) {
this.logger.error({err}, `Error kicking member out of conference for ${this.memberId}`);
}
}
}
module.exports = Conference;

View File

@@ -1,22 +1,15 @@
const Task = require('./task');
const {TaskName, TaskPreconditions} = require('../utils/constants');
const parseDecibels = require('../utils/parse-decibels');
class TaskConfig extends Task {
constructor(logger, opts) {
super(logger, opts);
[
'synthesizer',
'recognizer',
'bargeIn',
'record',
'listen',
'transcribe',
'fillerNoise',
'actionHookDelayAction',
'boostAudioSignal',
'vad'
'listen'
].forEach((k) => this[k] = this.data[k] || {});
if ('notifyEvents' in this.data) {
@@ -37,13 +30,6 @@ class TaskConfig extends Task {
if (this.bargeIn[k]) this.gatherOpts[k] = this.bargeIn[k];
});
}
if (this.transcribe?.enable) {
this.transcribeOpts = {
verb: 'transcribe',
...this.transcribe
};
delete this.transcribeOpts.enable;
}
if (this.data.reset) {
if (typeof this.data.reset === 'string') this.data.reset = [this.data.reset];
@@ -51,12 +37,7 @@ class TaskConfig extends Task {
else this.data.reset = [];
if (this.bargeIn.sticky) this.autoEnable = true;
this.preconditions = (this.bargeIn.enable ||
this.record?.action ||
this.listen?.url ||
this.data.amd ||
'boostAudioSignal' in this.data ||
this.transcribe?.enable) ?
this.preconditions = (this.bargeIn.enable || this.record?.action || this.listen?.url || this.data.amd) ?
TaskPreconditions.Endpoint :
TaskPreconditions.None;
@@ -69,10 +50,6 @@ class TaskConfig extends Task {
get hasRecognizer() { return Object.keys(this.recognizer).length; }
get hasRecording() { return Object.keys(this.record).length; }
get hasListen() { return Object.keys(this.listen).length; }
get hasTranscribe() { return Object.keys(this.transcribe).length; }
get hasDub() { return Object.keys(this.dub).length; }
get hasVad() { return Object.keys(this.vad).length; }
get hasFillerNoise() { return Object.keys(this.fillerNoise).length; }
get summary() {
const phrase = [];
@@ -95,14 +72,9 @@ class TaskConfig extends Task {
if (this.hasListen) {
phrase.push(this.listen.enable ? `listen ${this.listen.url}` : 'stop listen');
}
if (this.hasTranscribe) {
phrase.push(this.transcribe.enable ? `transcribe ${this.transcribe.transcriptionHook}` : 'stop transcribe');
}
if (this.hasFillerNoise) phrase.push(`fillerNoise ${this.fillerNoise.enable ? 'on' : 'off'}`);
if (this.data.amd) phrase.push('enable amd');
if (this.notifyEvents) phrase.push(`event notification ${this.notifyEvents ? 'on' : 'off'}`);
if (this.onHoldMusic) phrase.push(`onHoldMusic: ${this.onHoldMusic}`);
if ('boostAudioSignal' in this.data) phrase.push(`setGain ${this.data.boostAudioSignal}`);
return `${this.name}{${phrase.join(',')}}`;
}
@@ -137,12 +109,12 @@ class TaskConfig extends Task {
});
if (this.hasSynthesizer) {
cs.synthesizer = this.synthesizer;
cs.speechSynthesisVendor = this.synthesizer.vendor !== 'default'
? this.synthesizer.vendor
: cs.speechSynthesisVendor;
cs.speechSynthesisLabel = this.synthesizer.label === 'default'
? cs.speechSynthesisLabel : this.synthesizer.label;
cs.speechSynthesisLabel = this.synthesizer.label !== 'default'
? this.synthesizer.label
: cs.speechSynthesisLabel;
cs.speechSynthesisLanguage = this.synthesizer.language !== 'default'
? this.synthesizer.language
: cs.speechSynthesisLanguage;
@@ -154,25 +126,24 @@ class TaskConfig extends Task {
cs.fallbackSpeechSynthesisVendor = this.synthesizer.fallbackVendor !== 'default'
? this.synthesizer.fallbackVendor
: cs.fallbackSpeechSynthesisVendor;
cs.fallbackSpeechSynthesisLabel = this.synthesizer.fallbackLabel === 'default'
? cs.fallbackSpeechSynthesisLabel : this.synthesizer.fallbackLabel;
cs.fallbackSpeechSynthesisLabel = this.synthesizer.fallbackLabel !== 'default'
? this.synthesizer.fallbackLabel
: cs.fallbackSpeechSynthesisLabel;
cs.fallbackSpeechSynthesisLanguage = this.synthesizer.fallbackLanguage !== 'default'
? this.synthesizer.fallbackLanguage
: cs.fallbackSpeechSynthesisLanguage;
cs.fallbackSpeechSynthesisVoice = this.synthesizer.fallbackVoice !== 'default'
? this.synthesizer.fallbackVoice
: cs.fallbackSpeechSynthesisVoice;
// new vendor is set, reset fallback vendor
cs.hasFallbackTts = false;
this.logger.info({synthesizer: this.synthesizer}, 'Config: updated synthesizer');
}
if (this.hasRecognizer) {
cs.recognizer = this.recognizer;
cs.speechRecognizerVendor = this.recognizer.vendor !== 'default'
? this.recognizer.vendor
: cs.speechRecognizerVendor;
cs.speechRecognizerLabel = this.recognizer.label === 'default'
? cs.speechRecognizerLabel : this.recognizer.label;
cs.speechRecognizerLabel = this.recognizer.label !== 'default'
? this.recognizer.label
: cs.speechRecognizerLabel;
cs.speechRecognizerLanguage = this.recognizer.language !== 'default'
? this.recognizer.language
: cs.speechRecognizerLanguage;
@@ -181,9 +152,9 @@ class TaskConfig extends Task {
cs.fallbackSpeechRecognizerVendor = this.recognizer.fallbackVendor !== 'default'
? this.recognizer.fallbackVendor
: cs.fallbackSpeechRecognizerVendor;
cs.fallbackSpeechRecognizerLabel = this.recognizer.fallbackLabel === 'default' ?
cs.fallbackSpeechRecognizerLabel :
this.recognizer.fallbackLabel;
cs.fallbackSpeechRecognizerLabel = this.recognizer.fallbackLabel !== 'default'
? this.recognizer.fallbackLabel
: cs.fallbackSpeechRecognizerLabel;
cs.fallbackSpeechRecognizerLanguage = this.recognizer.fallbackLanguage !== 'default'
? this.recognizer.fallbackLanguage
: cs.fallbackSpeechRecognizerLanguage;
@@ -207,8 +178,6 @@ class TaskConfig extends Task {
if ('punctuation' in this.recognizer) {
cs.globalSttPunctuation = this.recognizer.punctuation;
}
// new vendor is set, reset fallback vendor
cs.hasFallbackAsr = false;
this.logger.info({
recognizer: this.recognizer,
isContinuousAsr: cs.isContinuousAsr
@@ -241,63 +210,12 @@ class TaskConfig extends Task {
const {enable, ...opts} = this.listen;
if (enable) {
this.logger.debug({opts}, 'Config: enabling listen');
cs.startBackgroundTask('listen', {verb: 'listen', ...opts});
cs.startBackgroundListen({verb: 'listen', ...opts});
} else {
this.logger.info('Config: disabling listen');
cs.stopBackgroundTask('listen');
cs.stopBackgroundListen();
}
}
if (this.hasTranscribe) {
if (this.transcribe.enable) {
if (!this.transcribeOpts.recognizer) {
this.transcribeOpts.recognizer = this.hasRecognizer ?
this.recognizer :
{
vendor: cs.speechRecognizerVendor,
language: cs.speechRecognizerLanguage
};
}
this.logger.debug(this.transcribeOpts, 'Config: enabling transcribe');
cs.startBackgroundTask('transcribe', this.transcribeOpts);
} else {
this.logger.info('Config: disabling transcribe');
cs.stopBackgroundTask('transcribe');
}
}
if (Object.keys(this.actionHookDelayAction).length !== 0) {
cs.actionHookDelayProperties = this.actionHookDelayAction;
}
if (this.data.sipRequestWithinDialogHook) {
cs.sipRequestWithinDialogHook = this.data.sipRequestWithinDialogHook;
}
if ('boostAudioSignal' in this.data) {
const db = parseDecibels(this.data.boostAudioSignal);
this.logger.info(`Config: boosting audio signal by ${db} dB`);
const args = [ep.uuid, 'setGain', db];
ep.api('uuid_dub', args).catch((err) => {
this.logger.error(err, 'Error boosting audio signal');
});
}
if (this.hasFillerNoise) {
const {enable, ...opts} = this.fillerNoise;
this.logger.info({fillerNoise: this.fillerNoise}, 'Config: fillerNoise');
if (!enable) cs.disableFillerNoise();
else {
cs.enableFillerNoise(opts);
}
}
if (this.hasVad) {
cs.vad = {
enable: this.vad.enable || false,
voiceMs: this.vad.voiceMs || 250,
silenceMs: this.vad.silenceMs || 150,
strategy: this.vad.strategy || 'one-shot',
mode: this.vad.mod || 2
};
}
}
async kill(cs) {

View File

@@ -73,8 +73,7 @@ class TaskDequeue extends Task {
try {
let url;
if (this.callSid) {
const r = await retrieveByPatternSortedSet(this.queueName, `*${this.callSid}`);
url = r[0];
url = await retrieveByPatternSortedSet(this.queueName, `*${this.callSid}`);
} else {
url = await retrieveFromSortedSet(this.queueName);
}

View File

@@ -14,11 +14,10 @@ const sessionTracker = require('../session/session-tracker');
const DtmfCollector = require('../utils/dtmf-collector');
const ConfirmCallSession = require('../session/confirm-call-session');
const dbUtils = require('../utils/db-utils');
const parseDecibels = require('../utils/parse-decibels');
const debug = require('debug')('jambonz:feature-server');
const {parseUri} = require('drachtio-srf');
const {ANCHOR_MEDIA_ALWAYS, JAMBONZ_DISABLE_DIAL_PAI_HEADER} = require('../config');
const { isOnhold, isOpusFirst } = require('../utils/sdp-utils');
const {ANCHOR_MEDIA_ALWAYS} = require('../config');
const { isOnhold } = require('../utils/sdp-utils');
const { normalizeJambones } = require('@jambonz/verb-specifications');
function parseDtmfOptions(logger, dtmfCapture) {
@@ -101,8 +100,6 @@ class TaskDial extends Task {
this.referHook = this.data.referHook;
this.dtmfHook = this.data.dtmfHook;
this.proxy = this.data.proxy;
this.tag = this.data.tag;
this.boostAudioSignal = this.data.boostAudioSignal;
if (this.dtmfHook) {
const {parentDtmfCollector, childDtmfCollector} = parseDtmfOptions(logger, this.data.dtmfCapture || {});
@@ -120,9 +117,6 @@ class TaskDial extends Task {
if (this.data.transcribe) {
this.transcribeTask = makeTask(logger, {'transcribe' : this.data.transcribe}, this);
}
if (this.data.dub && Array.isArray(this.data.dub) && this.data.dub.length > 0) {
this.dubTasks = this.data.dub.map((d) => makeTask(logger, {'dub': d}, this));
}
this.results = {};
this.bridged = false;
@@ -144,17 +138,15 @@ class TaskDial extends Task {
get name() { return TaskName.Dial; }
get isOnHoldEnabled() {
return !!this.data.onHoldHook;
get isOnHold() {
return this.isIncomingLegHold || this.isOutgoingLegHold;
}
get canReleaseMedia() {
const keepAnchor = this.data.anchorMedia ||
this.cs.isBackGroundListen ||
this.cs.onHoldMusic ||
ANCHOR_MEDIA_ALWAYS ||
this.listenTask ||
this.dubTasks ||
this.transcribeTask ||
this.startAmd;
@@ -212,7 +204,6 @@ class TaskDial extends Task {
await this.performAction(this.results, this.killReason !== KillReason.Replaced);
this._removeDtmfDetection(cs.dlg);
this._removeDtmfDetection(this.dlg);
this._removeSipIndialogRequestListener(this.dlg);
} catch (err) {
this.logger.error({err}, 'TaskDial:exec terminating with error');
this.kill(cs);
@@ -330,41 +321,18 @@ class TaskDial extends Task {
const to = parseUri(req.getParsedHeader('Refer-To').uri);
const by = parseUri(req.getParsedHeader('Referred-By').uri);
this.logger.info({to}, 'refer to parsed');
const json = await cs.requestor.request('verb:hook', this.referHook, {
...(callInfo.toJSON()),
await cs.requestor.request('verb:hook', this.referHook, {
...callInfo,
refer_details: {
sip_refer_to: req.get('Refer-To'),
sip_referred_by: req.get('Referred-By'),
sip_user_agent: req.get('User-Agent'),
refer_to_user: to.scheme === 'tel' ? to.number : to.user,
referred_by_user: by.scheme === 'tel' ? by.number : by.user,
refer_to_user: to.user,
referred_by_user: by.user,
referring_call_sid,
referred_call_sid
}
}, httpHeaders);
if (json && Array.isArray(json)) {
try {
const logger = isChild ? this.logger : this.sd.logger;
const tasks = normalizeJambones(logger, json).map((tdata) => makeTask(this.logger, tdata));
if (tasks && tasks.length > 0) {
const legs = isChild ? ['child', 'parent'] : ['parent', 'child'];
logger.info(`Dial:handleRefer received REFER on ${legs[0]} leg, setting new app on ${legs[1]} leg`);
if (isChild) this.redirect(cs, tasks);
else {
logger.info({tasks: json}, 'Dial:handleRefer - new application for for child leg');
const adultingSession = await this.sd.doAdulting({
logger,
application: cs.application,
tasks
});
/* need to update the callSid of the child with its own (new) AdultingCallSession */
sessionTracker.add(adultingSession.callSid, adultingSession);
}
}
} catch (err) {
this.logger.info(err, 'Dial:handleRefer - error setting new application after receiving REFER');
}
}
res.send(202);
this.logger.info('DialTask:handleRefer - sent 202 Accepted');
} catch (err) {
@@ -407,14 +375,8 @@ class TaskDial extends Task {
}
_onInfo(cs, dlg, req, res) {
// SIP Indialog will be handled by another handler
if (cs.sipRequestWithinDialogHook) {
return;
}
res.send(200);
if (req.get('Content-Type') !== 'application/dtmf-relay') {
return;
}
if (req.get('Content-Type') !== 'application/dtmf-relay') return;
const dtmfDetector = dlg === cs.dlg ? this.parentDtmfCollector : this.childDtmfCollector;
if (!dtmfDetector) return;
@@ -443,20 +405,6 @@ class TaskDial extends Task {
}
}
_initSipIndialogRequestListener(cs, dlg) {
dlg.on('info', this._onRequestWithinDialog.bind(this, cs));
dlg.on('message', this._onRequestWithinDialog.bind(this, cs));
}
_removeSipIndialogRequestListener(dlg) {
dlg && dlg.removeAllListeners('message');
dlg && dlg.removeAllListeners('info');
}
async _onRequestWithinDialog(cs, req, res) {
cs._onRequestWithinDialog(req, res);
}
async _initializeInbound(cs) {
const {ep} = await cs._evalEndpointPrecondition(this);
this.epOther = ep;
@@ -485,8 +433,7 @@ class TaskDial extends Task {
this.headers = {
'X-Account-Sid': cs.accountSid,
...(req && req.has('X-CID') && {'X-CID': req.get('X-CID')}),
...(req && req.has('P-Asserted-Identity') && !JAMBONZ_DISABLE_DIAL_PAI_HEADER &&
{'P-Asserted-Identity': req.get('P-Asserted-Identity')}),
...(req && req.has('P-Asserted-Identity') && {'P-Asserted-Identity': req.get('P-Asserted-Identity')}),
...(req && req.has('X-Voip-Carrier-Sid') && {'X-Voip-Carrier-Sid': req.get('X-Voip-Carrier-Sid')}),
// Put headers at the end to make sure opt.headers override all default behavior.
...this.headers
@@ -496,8 +443,7 @@ class TaskDial extends Task {
headers: this.headers,
proxy: `sip:${sbcAddress}`,
callingNumber: this.callerId || req.callingNumber,
...(this.callerName && {callingName: this.callerName}),
opusFirst: isOpusFirst(this.cs.ep.remote.sdp)
...(this.callerName && {callingName: this.callerName})
};
const t = this.target.find((t) => t.type === 'teams');
@@ -557,9 +503,9 @@ class TaskDial extends Task {
const str = this.callerId || req.callingNumber || '';
const callingNumber = str.startsWith('+') ? str.substring(1) : str;
const voip_carrier_sid = await lookupCarrierByPhoneNumber(cs.accountSid, callingNumber);
this.logger.info(
`Dial:_attemptCalls: selected ${voip_carrier_sid} for requested phone number: ${callingNumber}`);
if (voip_carrier_sid) {
this.logger.info(
`Dial:_attemptCalls: selected voip_carrier_sid ${voip_carrier_sid} for callingNumber: ${callingNumber}`);
opts.headers['X-Requested-Carrier-Sid'] = voip_carrier_sid;
}
}
@@ -578,8 +524,7 @@ class TaskDial extends Task {
accountInfo: cs.accountInfo,
rootSpan: cs.rootSpan,
startSpan: this.startSpan.bind(this),
dialTask: this,
onHoldMusic: this.cs.onHoldMusic
dialTask: this
});
this.dials.set(sd.callSid, sd);
@@ -636,8 +581,6 @@ class TaskDial extends Task {
await this._connectSingleDial(cs, sd);
} catch (err) {
this.logger.info({err}, 'Dial:_attemptCalls - Error calling _connectSingleDial ');
sd.removeAllListeners();
this.kill(cs);
}
})
.on('decline', () => {
@@ -690,43 +633,22 @@ class TaskDial extends Task {
async _onReinvite(req, res) {
try {
let isHandled = false;
if (this.isOnHoldEnabled) {
if (isOnhold(req.body)) {
this.logger.debug('Dial: _onReinvite receive hold Request');
if (!this.epOther && !this.ep) {
this.logger.debug(`Dial: _onReinvite receive hold Request,
media already released, reconnect media server`);
// update caller leg for new SDP from callee.
await this.cs.handleReinviteAfterMediaReleased(req, res);
// Freeswitch media is released, reconnect
await this.reAnchorMedia(this.cs, this.sd);
this.isOutgoingLegHold = true;
} else {
this.logger.debug('Dial: _onReinvite receive hold Request, update SDP');
const newSdp = await this.ep.modify(req.body);
res.send(200, {body: newSdp});
}
if (this.cs.onHoldMusic) {
if (isOnhold(req.body) && !this.epOther && !this.ep) {
await this.cs.handleReinviteAfterMediaReleased(req, res);
// Onhold but media is already released
// reconnect A Leg and Response B leg
await this.reAnchorMedia(this.cs, this.sd);
this.isOutgoingLegHold = true;
isHandled = true;
// Media already connected, ask for onHoldHook
this._onHoldHook(req);
} else if (!isOnhold(req.body)) {
this.logger.debug('Dial: _onReinvite receive unhold Request');
if (this.epOther && this.ep && this.isOutgoingLegHold && this.canReleaseMedia) {
this.logger.debug('Dial: _onReinvite receive unhold Request, release media');
// Offhold, time to release media
const newSdp = await this.ep.modify(req.body);
await res.send(200, {body: newSdp});
await this._releaseMedia(this.cs, this.sd);
this.isOutgoingLegHold = false;
} else {
this.logger.debug('Dial: _onReinvite receive unhold Request, update media server');
const newSdp = await this.ep.modify(req.body);
res.send(200, {body: newSdp});
}
if (this._onHoldSession) {
this._onHoldSession.kill();
}
this._onHoldHook();
} else if (!isOnhold(req.body) && this.epOther && this.ep && this.isOutgoingLegHold && this.canReleaseMedia) {
// Offhold, time to release media
const newSdp = await this.ep.modify(req.body);
await res.send(200, {body: newSdp});
await this._releaseMedia(this.cs, this.sd);
isHandled = true;
this.isOutgoingLegHold = false;
}
}
if (!isHandled) {
@@ -787,20 +709,8 @@ class TaskDial extends Task {
dialCallSid: sd.callSid,
});
if (this.dubTasks) {
for (const dub of this.dubTasks) {
try {
await dub.exec(cs, {ep: sd.ep});
}
catch (err) {
this.logger.error({err}, 'Dial:_selectSingleDial - error executing dubTask');
}
}
}
if (this.parentDtmfCollector) this._installDtmfDetection(cs, cs.dlg);
if (this.childDtmfCollector) this._installDtmfDetection(cs, this.dlg);
if (cs.sipRequestWithinDialogHook) this._initSipIndialogRequestListener(cs, this.dlg);
if (this.transcribeTask) this.transcribeTask.exec(cs, {ep: this.epOther, ep2:this.ep});
if (this.listenTask) this.listenTask.exec(cs, {ep: this.epOther});
@@ -812,18 +722,6 @@ class TaskDial extends Task {
}
}
/* boost audio signal if requested */
if (this.boostAudioSignal) {
try {
const db = parseDecibels(this.boostAudioSignal);
this.logger.info(`Dial: boosting audio signal by ${db} dB`);
const args = [this.ep.uuid, 'setGain', db];
await this.ep.api('uuid_dub', args);
} catch (err) {
this.logger.info({err}, 'Dial:_selectSingleDial - Error boosting audio signal');
}
}
/* if we can release the media back to the SBC, do so now */
if (this.canReleaseMedia) setTimeout(this._releaseMedia.bind(this, cs, sd), 200);
}
@@ -846,11 +744,9 @@ class TaskDial extends Task {
assert(cs.ep && sd.ep);
try {
// Wait until we got new SDP from B leg to ofter to A Leg
const aLegSdp = cs.ep.remote.sdp;
await sd.releaseMediaToSBC(aLegSdp, cs.ep.local.sdp);
const bLegSdp = sd.dlg.remote.sdp;
await cs.releaseMediaToSBC(bLegSdp);
await Promise.all[sd.releaseMediaToSBC(aLegSdp, cs.ep.local.sdp), cs.releaseMediaToSBC(bLegSdp)];
this.epOther = null;
this.logger.info('Dial:_releaseMedia - successfully released media from freewitch');
} catch (err) {
@@ -866,34 +762,23 @@ class TaskDial extends Task {
this.epOther = cs.ep;
}
// Handle RE-INVITE hold from caller leg.
async handleReinviteAfterMediaReleased(req, res) {
let isHandled = false;
if (this.isOnHoldEnabled) {
if (isOnhold(req.body)) {
if (!this.epOther && !this.ep) {
// update callee leg for new SDP from caller.
const sdp = await this.dlg.modify(req.body);
res.send(200, {body: sdp});
// Onhold but media is already released, reconnect
await this.reAnchorMedia(this.cs, this.sd);
isHandled = true;
this.isIncomingLegHold = true;
}
this._onHoldHook(req);
} else if (!isOnhold(req.body)) {
if (this.epOther && this.ep && this.isIncomingLegHold && this.canReleaseMedia) {
// Offhold, time to release media
const newSdp = await this.epOther.modify(req.body);
await res.send(200, {body: newSdp});
await this._releaseMedia(this.cs, this.sd);
isHandled = true;
}
this.isIncomingLegHold = false;
if (this._onHoldSession) {
this._onHoldSession.kill();
}
}
if (isOnhold(req.body) && !this.epOther && !this.ep) {
const sdp = await this.dlg.modify(req.body);
res.send(200, {body: sdp});
// Onhold but media is already released
await this.reAnchorMedia(this.cs, this.sd);
isHandled = true;
this.isIncomingLegHold = true;
this._onHoldHook();
} else if (!isOnhold(req.body) && this.epOther && this.ep && this.isIncomingLegHold && this.canReleaseMedia) {
// Offhold, time to release media
const newSdp = await this.epOther.modify(req.body);
await res.send(200, {body: newSdp});
await this._releaseMedia(this.cs, this.sd);
isHandled = true;
this.isIncomingLegHold = false;
}
if (!isHandled) {
@@ -912,7 +797,7 @@ class TaskDial extends Task {
});
}
async _onHoldHook(req, allowed = [TaskName.Play, TaskName.Say, TaskName.Pause]) {
async _onHoldHook(allowed = [TaskName.Play, TaskName.Say, TaskName.Pause]) {
if (this.data.onHoldHook) {
// send silence for keep Voice quality
await this.epOther.play('silence_stream://500');
@@ -922,13 +807,7 @@ class TaskDial extends Task {
const b3 = this.getTracingPropagation();
const httpHeaders = b3 && {b3};
const json = await this.cs.application.requestor.
request('verb:hook', this.data.onHoldHook, {
...this.cs.callInfo.toJSON(),
hold_detail: {
from: req.get('From'),
to: req.get('To')
}
}, httpHeaders);
request('verb:hook', this.data.onHoldHook, this.cs.callInfo.toJSON(), httpHeaders);
const tasks = normalizeJambones(this.logger, json).map((tdata) => makeTask(this.logger, tdata));
allowedTasks = tasks.filter((t) => allowed.includes(t.name));
if (tasks.length !== allowedTasks.length) {
@@ -937,7 +816,7 @@ class TaskDial extends Task {
}
this.logger.debug(`DialTask:_onHoldHook: executing ${tasks.length} tasks`);
if (tasks.length) {
this._onHoldSession = new ConfirmCallSession({
this._playSession = new ConfirmCallSession({
logger: this.logger,
application: this.cs.application,
dlg: this.isIncomingLegHold ? this.dlg : this.cs.dlg,
@@ -947,12 +826,12 @@ class TaskDial extends Task {
tasks,
rootSpan: this.cs.rootSpan
});
await this._onHoldSession.exec();
this._onHoldSession = null;
await this._playSession.exec();
this._playSession = null;
}
} catch (error) {
this.logger.info(error, 'DialTask:_onHoldHook: failed retrieving waitHook');
this._onHoldSession = null;
this._playSession = null;
break;
}
} while (allowedTasks && allowedTasks.length > 0 && !this.killed && this.isOnHold);

View File

@@ -58,13 +58,13 @@ class Dialogflow extends Task {
this.vendor = this.data.tts.vendor || 'default';
this.language = this.data.tts.language || 'default';
this.voice = this.data.tts.voice || 'default';
this.speechSynthesisLabel = this.data.tts.label;
this.speechSynthesisLabel = this.data.tts.label || 'default';
// fallback tts
this.fallbackVendor = this.data.tts.fallbackVendor || 'default';
this.fallbackLanguage = this.data.tts.fallbackLanguage || 'default';
this.fallbackVoice = this.data.tts.fallbackLanguage || 'default';
this.fallbackLabel = this.data.tts.fallbackLabel;
this.fallbackLabel = this.data.tts.fallbackLabel || 'default';
}
this.bargein = this.data.bargein;
}
@@ -235,8 +235,9 @@ class Dialogflow extends Task {
}
try {
const {filePath} = await this._fallbackSynthAudio(cs, intent, stats, synthAudio);
const {filePath, servedFromCache} = await this._fallbackSynthAudio(cs, intent, stats, synthAudio);
if (filePath) cs.trackTmpFile(filePath);
if (!this.ttsCredentials && !servedFromCache) cs.billForTts(intent.fulfillmentText.length);
if (this.playInProgress) {
await ep.api('uuid_break', ep.uuid).catch((err) => this.logger.info(err, 'Error killing audio'));

View File

@@ -1,144 +0,0 @@
const {TaskName} = require('../utils/constants');
const TtsTask = require('./tts-task');
const assert = require('assert');
const parseDecibels = require('../utils/parse-decibels');
/**
* Dub task: add or remove additional audio tracks into the call
*/
class TaskDub extends TtsTask {
constructor(logger, opts, parentTask) {
super(logger, opts, parentTask);
this.logger.debug({opts: this.data}, 'TaskDub constructor');
['action', 'track', 'play', 'say', 'loop'].forEach((prop) => {
this[prop] = this.data[prop];
});
this.gain = parseDecibels(this.data.gain);
assert.ok(this.action, 'TaskDub: action is required');
assert.ok(this.track, 'TaskDub: track is required');
}
get name() { return TaskName.Dub; }
async exec(cs, {ep}) {
super.exec(cs);
try {
switch (this.action) {
case 'addTrack':
await this._addTrack(cs, ep);
break;
case 'removeTrack':
await this._removeTrack(cs, ep);
break;
case 'silenceTrack':
await this._silenceTrack(cs, ep);
break;
case 'playOnTrack':
await this._playOnTrack(cs, ep);
break;
case 'sayOnTrack':
await this._sayOnTrack(cs, ep);
break;
default:
throw new Error(`TaskDub: unsupported action ${this.action}`);
}
} catch (err) {
this.logger.error(err, 'Error executing dub task');
}
}
async _addTrack(cs, ep) {
this.logger.info(`adding track: ${this.track}`);
await ep.dub({
action: 'addTrack',
track: this.track
});
if (this.play) await this._playOnTrack(cs, ep);
else if (this.say) await this._sayOnTrack(cs, ep);
}
async _removeTrack(_cs, ep) {
this.logger.info(`removing track: ${this.track}`);
await ep.dub({
action: 'removeTrack',
track: this.track
});
}
async _silenceTrack(_cs, ep) {
this.logger.info(`silencing track: ${this.track}`);
await ep.dub({
action: 'silenceTrack',
track: this.track
});
}
async _playOnTrack(_cs, ep) {
this.logger.info(`playing on track: ${this.track}`);
await ep.dub({
action: 'playOnTrack',
track: this.track,
play: this.play,
loop: this.loop ? 'loop' : 'once',
gain: this.gain
});
}
async _sayOnTrack(cs, ep) {
const text = this.say.text || this.say;
this.synthesizer = this.say.synthesizer || {};
if (Object.keys(this.synthesizer).length) {
this.logger.info({synthesizer: this.synthesizer},
`saying on track ${this.track}: ${text} with synthesizer options`);
}
else {
this.logger.info(`saying on track ${this.track}: ${text}`);
}
this.synthesizer = this.synthesizer || {};
this.text = [text];
const vendor = this.synthesizer.vendor && this.synthesizer.vendor !== 'default' ?
this.synthesizer.vendor :
cs.speechSynthesisVendor;
const language = this.synthesizer.language && this.synthesizer.language !== 'default' ?
this.synthesizer.language :
cs.speechSynthesisLanguage ;
const voice = this.synthesizer.voice && this.synthesizer.voice !== 'default' ?
this.synthesizer.voice :
cs.speechSynthesisVoice;
const label = this.synthesizer.label && this.synthesizer.label !== 'default' ?
this.synthesizer.label :
cs.speechSynthesisLabel;
const disableTtsStreaming = false;
const filepath = await this._synthesizeWithSpecificVendor(cs, ep, {
vendor, language, voice, label, disableTtsStreaming
});
assert.ok(filepath.length === 1, 'TaskDub: no filepath returned from synthesizer');
const path = filepath[0];
if (!path.startsWith('say:{')) {
/* we have a local file of mp3 or r8 of synthesized speech audio to play */
this.logger.info(`playing synthesized speech from file on track ${this.track}: ${path}`);
this.play = path;
await this._playOnTrack(cs, ep);
}
else {
this.logger.info(`doing actual text to speech file on track ${this.track}: ${path}`);
await ep.dub({
action: 'sayOnTrack',
track: this.track,
say: path,
gain: this.gain
});
}
}
}
module.exports = TaskDub;

View File

@@ -311,8 +311,7 @@ class TaskEnqueue extends Task {
}
}
async _playHook(cs, dlg, hook,
allowed = [TaskName.Play, TaskName.Say, TaskName.Pause, TaskName.Leave, TaskName.Tag]) {
async _playHook(cs, dlg, hook, allowed = [TaskName.Play, TaskName.Say, TaskName.Pause, TaskName.Leave]) {
const {sortedSetLength, sortedSetPositionByPattern} = cs.srf.locals.dbHelpers;
const b3 = this.getTracingPropagation();
const httpHeaders = b3 && {b3};
@@ -332,13 +331,11 @@ class TaskEnqueue extends Task {
queuePosition: queuePosition.length ? queuePosition[0] : 0,
callSid: this.cs.callSid,
callId: this.cs.callId,
customerData: this.cs.callInfo.customerData
});
} catch (err) {
this.logger.error({err}, `TaskEnqueue:_playHook error retrieving list info for queue ${this.queueName}`);
}
const json = await cs.application.requestor.request('verb:hook', hook, params, httpHeaders);
this.logger.debug({json}, 'TaskEnqueue:_playHook: received response from waitHook');
const tasks = normalizeJambones(this.logger, json).map((tdata) => makeTask(this.logger, tdata));
const allowedTasks = tasks.filter((t) => allowed.includes(t.name));

File diff suppressed because it is too large Load Diff

View File

@@ -8,10 +8,6 @@ const DTMF_SPAN_NAME = 'dtmf';
class TaskListen extends Task {
constructor(logger, opts, parentTask) {
super(logger, opts);
/**
* @deprecated
* use bidirectionalAudio.enabled
*/
this.disableBidirectionalAudio = opts.disableBidirectionalAudio;
this.preconditions = TaskPreconditions.Endpoint;
@@ -29,15 +25,6 @@ class TaskListen extends Task {
this.results = {};
this.playAudioQueue = [];
this.isPlayingAudioFromQueue = false;
this.bidirectionalAudio = {
enabled: this.disableBidirectionalAudio === true ? false : true,
...(this.data['bidirectionalAudio']),
};
// From drachtio-version 3.0.40, forkAudioStart will send empty bugname, metadata together with
// bidirectionalAudio params that cause old version of freeswitch missunderstand between bugname and
// bidirectionalAudio params
this._bugname = 'audio_fork';
if (this.transcribe) this.transcribeTask = makeTask(logger, {'transcribe': opts.transcribe}, this);
}
@@ -136,6 +123,8 @@ class TaskListen extends Task {
ci,
this.metadata);
if (this.hook.auth) {
this.logger.debug({username: this.hook.auth.username, password: this.hook.auth.password},
'TaskListen:_startListening basic auth');
await this.ep.set({
'MOD_AUDIO_BASIC_AUTH_USERNAME': this.hook.auth.username,
'MOD_AUDIO_BASIC_AUTH_PASSWORD': this.hook.auth.password
@@ -146,8 +135,7 @@ class TaskListen extends Task {
mixType: this.mixType,
sampling: this.sampleRate,
...(this._bugname && {bugname: this._bugname}),
metadata,
bidirectionalAudio: this.bidirectionalAudio || {}
metadata
});
this.recordStartTime = moment();
if (this.maxLength) {
@@ -167,7 +155,7 @@ class TaskListen extends Task {
}
/* support bi-directional audio */
if (this.bidirectionalAudio.enabled) {
if (!this.disableBidirectionalAudio) {
ep.addCustomEventListener(ListenEvents.PlayAudio, this._onPlayAudio.bind(this, ep));
}
ep.addCustomEventListener(ListenEvents.KillAudio, this._onKillAudio.bind(this, ep));

View File

@@ -14,9 +14,6 @@ function makeTask(logger, obj, parent) {
}
validateVerb(name, data, logger);
switch (name) {
case TaskName.Answer:
const TaskAnswer = require('./answer');
return new TaskAnswer(logger, data, parent);
case TaskName.SipDecline:
const TaskSipDecline = require('./sip_decline');
return new TaskSipDecline(logger, data, parent);
@@ -44,9 +41,6 @@ function makeTask(logger, obj, parent) {
case TaskName.Dtmf:
const TaskDtmf = require('./dtmf');
return new TaskDtmf(logger, data, parent);
case TaskName.Dub:
const TaskDub = require('./dub');
return new TaskDub(logger, data, parent);
case TaskName.Enqueue:
const TaskEnqueue = require('./enqueue');
return new TaskEnqueue(logger, data, parent);

View File

@@ -16,8 +16,6 @@ class TaskRestDial extends Task {
this.to = this.data.to;
this.call_hook = this.data.call_hook;
this.timeout = this.data.timeout || 60;
this.sipRequestWithinDialogHook = this.data.sipRequestWithinDialogHook;
this.referHook = this.data.referHook;
this.on('connect', this._onConnect.bind(this));
this.on('callStatus', this._onCallStatus.bind(this));
@@ -65,9 +63,8 @@ class TaskRestDial extends Task {
this.canCancel = false;
const cs = this.callSession;
cs.setDialog(dlg);
cs.referHook = this.referHook;
this.logger.debug('TaskRestDial:_onConnect - call connected');
if (this.sipRequestWithinDialogHook) this._initSipRequestWithinDialogHandler(cs, dlg);
try {
const b3 = this.getTracingPropagation();
const httpHeaders = b3 && {b3};
@@ -145,16 +142,6 @@ class TaskRestDial extends Task {
this.logger.error({err}, 'Rest:dial:_onAmdEvent - error calling actionHook');
});
}
_initSipRequestWithinDialogHandler(cs, dlg) {
cs.sipRequestWithinDialogHook = this.sipRequestWithinDialogHook;
dlg.on('info', this._onRequestWithinDialog.bind(this, cs));
dlg.on('message', this._onRequestWithinDialog.bind(this, cs));
}
async _onRequestWithinDialog(cs, req, res) {
cs._onRequestWithinDialog(req, res);
}
}
module.exports = TaskRestDial;

View File

@@ -1,4 +1,4 @@
const TtsTask = require('./tts-task');
const Task = require('./task');
const {TaskName, TaskPreconditions} = require('../utils/constants');
const pollySSMLSplit = require('polly-ssml-split');
@@ -23,15 +23,9 @@ const breakLengthyTextIfNeeded = (logger, text) => {
}
};
const parseTextFromSayString = (text) => {
const closingBraceIndex = text.indexOf('}');
if (closingBraceIndex === -1) return text;
return text.slice(closingBraceIndex + 1);
};
class TaskSay extends TtsTask {
class TaskSay extends Task {
constructor(logger, opts, parentTask) {
super(logger, opts, parentTask);
super(logger, opts);
this.preconditions = TaskPreconditions.Endpoint;
this.text = (Array.isArray(this.data.text) ? this.data.text : [this.data.text])
@@ -39,6 +33,10 @@ class TaskSay extends TtsTask {
.flat();
this.loop = this.data.loop || 1;
this.earlyMedia = this.data.earlyMedia === true || (parentTask && parentTask.earlyMedia);
this.synthesizer = this.data.synthesizer || {};
this.disableTtsCache = this.data.disableTtsCache;
this.options = this.synthesizer.options || {};
this.isHandledByPrimaryProvider = true;
}
@@ -52,21 +50,12 @@ class TaskSay extends TtsTask {
return `${this.name}{${this.text[0]}}`;
}
_validateURL(urlString) {
try {
new URL(urlString);
return true;
} catch (e) {
return false;
}
}
async _synthesizeWithSpecificVendor(cs, ep, {vendor, language, voice, label, preCache = false}) {
const {srf, accountSid:account_sid} = cs;
async _synthesizeWithSpecificVendor(cs, ep, {vendor, language, voice, label}) {
const {srf} = cs;
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, srf);
const {writeAlerts, AlertType, stats} = srf.locals;
const {synthAudio} = srf.locals.dbHelpers;
const engine = this.synthesizer.engine || cs.synthesizer?.engine || 'neural';
const engine = this.synthesizer.engine || 'standard';
const salt = cs.callSid;
let credentials = cs.getSpeechCredentials(vendor, 'tts', label);
@@ -78,8 +67,6 @@ class TaskSay extends TtsTask {
voice = arr[1];
model = arr[2];
}
} else if (vendor === 'deepgram') {
model = voice;
}
/* allow for microsoft custom region voice and api_key to be specified as an override */
@@ -90,32 +77,20 @@ class TaskSay extends TtsTask {
credentials.api_key = this.options.apiKey || credentials.apiKey;
credentials.region = this.options.region || credentials.region;
voice = this.options.voice || voice;
} else if (vendor === 'elevenlabs') {
credentials = credentials || {};
credentials.model_id = this.options.model_id || credentials.model_id;
credentials.voice_settings = this.options.voice_settings || {};
credentials.optimize_streaming_latency = this.options.optimize_streaming_latency
|| credentials.optimize_streaming_latency;
voice = this.options.voice_id || voice;
}
ep.set({
tts_engine: vendor.startsWith('custom:') ? 'custom' : vendor,
tts_voice: voice,
cache_speech_handles: !cs.currentTtsVendor || cs.currentTtsVendor === vendor ? 1 : 0,
}).catch((err) => this.logger.info({err}, 'Error setting tts_engine on endpoint'));
// set the current vendor on the call session
// If vendor is changed from the previous one, then reset the cache_speech_handles flag
cs.currentTtsVendor = vendor;
if (!preCache && !this._disableTracing) this.logger.info({vendor, language, voice, model}, 'TaskSay:exec');
this.logger.info({vendor, language, voice, model}, 'TaskSay:exec');
try {
if (!credentials) {
writeAlerts({
account_sid,
account_sid: cs.accountSid,
alert_type: AlertType.TTS_NOT_PROVISIONED,
vendor
}).catch((err) => this.logger.info({err}, 'Error generating alert for no tts'));
this.notifyError({
msg: 'TTS error',
details:`No speech credentials provisioned for selected vendor ${vendor}`
});
throw new Error('no provisioned speech credentials for TTS');
}
// synthesize all of the text elements
@@ -127,17 +102,14 @@ class TaskSay extends TtsTask {
if (text.startsWith('silence_stream://')) return text;
/* otel: trace time for tts */
if (!preCache && !this._disableTracing) {
const {span} = this.startChildSpan('tts-generation', {
'tts.vendor': vendor,
'tts.language': language,
'tts.voice': voice
});
this.otelSpan = span;
}
const {span} = this.startChildSpan('tts-generation', {
'tts.vendor': vendor,
'tts.language': language,
'tts.voice': voice
});
try {
const {filePath, servedFromCache, rtt} = await synthAudio(stats, {
account_sid,
account_sid: cs.accountSid,
text,
vendor,
language,
@@ -146,52 +118,42 @@ class TaskSay extends TtsTask {
model,
salt,
credentials,
options: this.options,
disableTtsCache : this.disableTtsCache,
preCache
disableTtsCache : this.disableTtsCache
});
if (!filePath.startsWith('say:')) {
this.logger.debug(`file ${filePath}, served from cache ${servedFromCache}`);
if (filePath) cs.trackTmpFile(filePath);
if (this.otelSpan) {
this.otelSpan.setAttributes({'tts.cached': servedFromCache});
this.otelSpan.end();
this.otelSpan = null;
}
if (!servedFromCache && !lastUpdated) {
lastUpdated = true;
updateSpeechCredentialLastUsed(credentials.speech_credential_sid).catch(() => {/* logged error */});
}
if (!servedFromCache && rtt && !preCache && !this._disableTracing) {
this.notifyStatus({
event: 'synthesized-audio',
vendor,
language,
characters: text.length,
elapsedTime: rtt
});
}
this.logger.debug(`file ${filePath}, served from cache ${servedFromCache}`);
if (filePath) cs.trackTmpFile(filePath);
if (!servedFromCache && !lastUpdated) {
lastUpdated = true;
updateSpeechCredentialLastUsed(credentials.speech_credential_sid)
.catch(() => {/*already logged error */});
}
else {
this.logger.debug('a streaming tts api will be used');
const modifiedPath = filePath.replace('say:{', `say:{session-uuid=${ep.uuid},`);
return modifiedPath;
span.setAttributes({'tts.cached': servedFromCache});
span.end();
if (!servedFromCache && rtt) {
this.notifyStatus({
event: 'synthesized-audio',
vendor,
language,
characters: text.length,
elapsedTime: rtt
});
}
return filePath;
} catch (err) {
this.logger.info({err}, 'Error synthesizing tts');
if (this.otelSpan) this.otelSpan.end();
span.end();
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.TTS_FAILURE,
vendor,
detail: err.message
}).catch((err) => this.logger.info({err}, 'Error generating alert for tts failure'));
this.notifyError({msg: 'TTS error', details: err.message || err});
throw err;
}
};
const arr = this.text.map((t) => (this._validateURL(t) ? t : generateAudio(t)));
const arr = this.text.map((t) => generateAudio(t));
return (await Promise.all(arr)).filter((fp) => fp && fp.length);
} catch (err) {
this.logger.info(err, 'TaskSay:exec error');
@@ -201,27 +163,21 @@ class TaskSay extends TtsTask {
}
async exec(cs, {ep}) {
const {srf, accountSid:account_sid} = cs;
const {writeAlerts, AlertType} = srf.locals;
const {addFileToCache} = srf.locals.dbHelpers;
const engine = this.synthesizer.engine || cs.synthesizer?.engine || 'neural';
await super.exec(cs);
this.ep = ep;
let vendor = this.synthesizer.vendor && this.synthesizer.vendor !== 'default' ?
const vendor = this.synthesizer.vendor && this.synthesizer.vendor !== 'default' ?
this.synthesizer.vendor :
cs.speechSynthesisVendor;
let language = this.synthesizer.language && this.synthesizer.language !== 'default' ?
const language = this.synthesizer.language && this.synthesizer.language !== 'default' ?
this.synthesizer.language :
cs.speechSynthesisLanguage ;
let voice = this.synthesizer.voice && this.synthesizer.voice !== 'default' ?
const voice = this.synthesizer.voice && this.synthesizer.voice !== 'default' ?
this.synthesizer.voice :
cs.speechSynthesisVoice;
// label can be null/empty in synthesizer config, just use application level label if it's default
let label = this.synthesizer.label === 'default' ?
cs.speechSynthesisLabel :
this.synthesizer.label;
const label = this.synthesizer.label && this.synthesizer.label !== 'default' ?
this.synthesizer.label :
cs.speechSynthesisLabel;
const fallbackVendor = this.synthesizer.fallbackVendor && this.synthesizer.fallbackVendor !== 'default' ?
this.synthesizer.fallbackVendor :
@@ -232,24 +188,16 @@ class TaskSay extends TtsTask {
const fallbackVoice = this.synthesizer.fallbackVoice && this.synthesizer.fallbackVoice !== 'default' ?
this.synthesizer.fallbackVoice :
cs.fallbackSpeechSynthesisVoice;
// label can be null/empty in synthesizer config, just use application level label if it's default
const fallbackLabel = this.synthesizer.fallbackLabel === 'default' ?
cs.fallbackSpeechSynthesisLabel :
this.synthesizer.fallbackLabel;
const fallbackLabel = this.synthesizer.fallbackLabel && this.synthesizer.fallbackLabel !== 'default' ?
this.synthesizer.fallbackLabel :
cs.fallbackSpeechSynthesisLabel;
if (cs.hasFallbackTts) {
vendor = fallbackVendor;
language = fallbackLanguage;
voice = fallbackVoice;
label = fallbackLabel;
}
const startFallback = async(error) => {
if (fallbackVendor && this.isHandledByPrimaryProvider && !cs.hasFallbackTts) {
this.notifyError(
{ msg: 'TTS error', details:`TTS vendor ${vendor} error: ${error}`, failover: 'in progress'});
let filepath;
try {
filepath = await this._synthesizeWithSpecificVendor(cs, ep, {vendor, language, voice, label});
} catch (error) {
if (fallbackVendor && this.isHandledByPrimaryProvider) {
this.isHandledByPrimaryProvider = false;
cs.hasFallbackTts = true;
this.logger.info(`Synthesize error, fallback to ${fallbackVendor}`);
filepath = await this._synthesizeWithSpecificVendor(cs, ep,
{
@@ -259,94 +207,22 @@ class TaskSay extends TtsTask {
label: fallbackLabel
});
} else {
this.notifyError(
{ msg: 'TTS error', details:`TTS vendor ${vendor} error: ${error}`, failover: 'not available'});
throw error;
}
};
let filepath;
try {
filepath = await this._synthesizeWithSpecificVendor(cs, ep, {vendor, language, voice, label});
} catch (error) {
await startFallback(error);
}
this.notifyStatus({event: 'start-playback'});
while (!this.killed && (this.loop === 'forever' || this.loop--) && ep?.connected) {
while (!this.killed && (this.loop === 'forever' || this.loop--) && this.ep?.connected) {
let segment = 0;
while (!this.killed && segment < filepath.length) {
if (cs.isInConference) {
const {memberId, confName, confUuid} = cs;
await this.playToConfMember(ep, memberId, confName, confUuid, filepath[segment]);
await this.playToConfMember(this.ep, memberId, confName, confUuid, filepath[segment]);
}
else {
if (filepath[segment].startsWith('say:{')) {
const arr = /^say:\{.*\}\s*(.*)$/.exec(filepath[segment]);
if (arr) this.logger.debug(`Say:exec sending streaming tts request: ${arr[1].substring(0, 64)}..`);
}
else this.logger.debug(`Say:exec sending ${filepath[segment].substring(0, 64)}`);
ep.once('playback-start', (evt) => {
this.logger.debug({evt}, 'got playback-start');
if (this.otelSpan) {
this._addStreamingTtsAttributes(this.otelSpan, evt);
this.otelSpan.end();
this.otelSpan = null;
if (evt.variable_tts_cache_filename) cs.trackTmpFile(evt.variable_tts_cache_filename);
}
});
ep.once('playback-stop', (evt) => {
this.logger.debug({evt}, 'got playback-stop');
if (evt.variable_tts_error) {
writeAlerts({
account_sid,
alert_type: AlertType.TTS_FAILURE,
vendor,
detail: evt.variable_tts_error
}).catch((err) => this.logger.info({err}, 'Error generating alert for no tts'));
}
if (evt.variable_tts_cache_filename) {
const text = parseTextFromSayString(this.text[segment]);
addFileToCache(evt.variable_tts_cache_filename, {
account_sid,
vendor,
language,
voice,
engine,
text
}).catch((err) => this.logger.info({err}, 'Error adding file to cache'));
}
if (this._playResolve) {
evt.variable_tts_error ? this._playReject(new Error(evt.variable_tts_error)) : this._playResolve();
}
});
// wait for playback-stop event received to confirm if the playback is successful
this._playPromise = new Promise((resolve, reject) => {
this._playResolve = resolve;
this._playReject = reject;
});
this.logger.debug(`Say:exec sending command to play file ${filepath[segment]}`);
await ep.play(filepath[segment]);
try {
// wait for playback-stop event received to confirm if the playback is successful
await this._playPromise;
} catch (err) {
try {
await startFallback(err);
continue;
} catch (err) {
this.logger.info({err}, 'Error waiting for playback-stop event');
}
} finally {
this._playPromise = null;
this._playResolve = null;
this._playReject = null;
}
if (filepath[segment].startsWith('say:{')) {
const arr = /^say:\{.*\}\s*(.*)$/.exec(filepath[segment]);
if (arr) this.logger.debug(`Say:exec complete playing streaming tts request: ${arr[1].substring(0, 64)}..`);
} else {
// This log will print spech credentials in say command for tts stream mode
this.logger.debug(`Say:exec completed play file ${filepath[segment]}`);
}
this.logger.debug(`Say:exec completed play file ${filepath[segment]}`);
}
segment++;
}
@@ -356,7 +232,7 @@ class TaskSay extends TtsTask {
async kill(cs) {
super.kill(cs);
if (this.ep?.connected) {
if (this.ep.connected) {
this.logger.debug('TaskSay:kill - killing audio');
if (cs.isInConference) {
const {memberId, confName} = cs;
@@ -366,75 +242,8 @@ class TaskSay extends TtsTask {
this.notifyStatus({event: 'kill-playback'});
this.ep.api('uuid_break', this.ep.uuid);
}
this.ep.removeAllListeners('playback-start');
this.ep.removeAllListeners('playback-stop');
// if we are waiting for playback-stop event, resolve the promise
if (this._playResolve) this._playResolve();
}
}
_addStreamingTtsAttributes(span, evt) {
const attrs = {'tts.cached': false};
for (const [key, value] of Object.entries(evt)) {
if (key.startsWith('variable_tts_')) {
let newKey = key.substring('variable_tts_'.length)
.replace('whisper_', 'whisper.')
.replace('deepgram_', 'deepgram.')
.replace('playht_', 'playht.')
.replace('rimelabs_', 'rimelabs.')
.replace('verbio_', 'verbio.')
.replace('elevenlabs_', 'elevenlabs.');
if (spanMapping[newKey]) newKey = spanMapping[newKey];
attrs[newKey] = value;
}
}
delete attrs['cache_filename']; //no value in adding this to the span
span.setAttributes(attrs);
}
}
const spanMapping = {
// IMPORTANT!!! JAMBONZ WEBAPP WILL SHOW TEXT PERFECTLY IF THE SPAN NAME IS SMALLER OR EQUAL 25 CHARACTERS.
// EX: whisper.ratelim_reqs has length 20 <= 25 which is perfect
// Elevenlabs
'elevenlabs.reported_latency_ms': 'elevenlabs.latency_ms',
'elevenlabs.request_id': 'elevenlabs.req_id',
'elevenlabs.history_item_id': 'elevenlabs.item_id',
'elevenlabs.optimize_streaming_latency': 'elevenlabs.optimization',
'elevenlabs.name_lookup_time_ms': 'name_lookup_ms',
'elevenlabs.connect_time_ms': 'connect_ms',
'elevenlabs.final_response_time_ms': 'final_response_ms',
// Whisper
'whisper.reported_latency_ms': 'whisper.latency_ms',
'whisper.request_id': 'whisper.req_id',
'whisper.reported_organization': 'whisper.organization',
'whisper.reported_ratelimit_requests': 'whisper.ratelimit',
'whisper.reported_ratelimit_remaining_requests': 'whisper.ratelimit_remain',
'whisper.reported_ratelimit_reset_requests': 'whisper.ratelimit_reset',
'whisper.name_lookup_time_ms': 'name_lookup_ms',
'whisper.connect_time_ms': 'connect_ms',
'whisper.final_response_time_ms': 'final_response_ms',
// Deepgram
'deepgram.request_id': 'deepgram.req_id',
'deepgram.reported_model_name': 'deepgram.model_name',
'deepgram.reported_model_uuid': 'deepgram.model_uuid',
'deepgram.reported_char_count': 'deepgram.char_count',
'deepgram.name_lookup_time_ms': 'name_lookup_ms',
'deepgram.connect_time_ms': 'connect_ms',
'deepgram.final_response_time_ms': 'final_response_ms',
// Playht
'playht.request_id': 'playht.req_id',
'playht.name_lookup_time_ms': 'name_lookup_ms',
'playht.connect_time_ms': 'connect_ms',
'playht.final_response_time_ms': 'final_response_ms',
// Rimelabs
'rimelabs.name_lookup_time_ms': 'name_lookup_ms',
'rimelabs.connect_time_ms': 'connect_ms',
'rimelabs.final_response_time_ms': 'final_response_ms',
// verbio
'verbio.name_lookup_time_ms': 'name_lookup_ms',
'verbio.connect_time_ms': 'connect_ms',
'verbio.final_response_time_ms': 'final_response_ms',
};
module.exports = TaskSay;

View File

@@ -1,7 +1,6 @@
const Task = require('./task');
const assert = require('assert');
const crypto = require('crypto');
const { TaskPreconditions, CobaltTranscriptionEvents } = require('../utils/constants');
const { TaskPreconditions } = require('../utils/constants');
class SttTask extends Task {
@@ -14,15 +13,15 @@ class SttTask extends Task {
const {
setChannelVarsForStt,
normalizeTranscription,
removeSpeechListeners,
setSpeechCredentialsAtRuntime,
compileSonioxTranscripts,
consolidateTranscripts
compileSonioxTranscripts
} = require('../utils/transcription-utils')(logger);
this.setChannelVarsForStt = setChannelVarsForStt;
this.normalizeTranscription = normalizeTranscription;
this.removeSpeechListeners = removeSpeechListeners;
this.compileSonioxTranscripts = compileSonioxTranscripts;
this.consolidateTranscripts = consolidateTranscripts;
this.eventHandlers = [];
this.isHandledByPrimaryProvider = true;
if (this.data.recognizer) {
const recognizer = this.data.recognizer;
@@ -33,143 +32,21 @@ class SttTask extends Task {
//fallback
this.fallbackVendor = recognizer.fallbackVendor || 'default';
this.fallbackLanguage = recognizer.fallbackLanguage || 'default';
// label can be empty and should not have default value.
this.fallbackLabel = recognizer.fallbackLabel;
this.fallbackLabel = recognizer.fallbackLabel || 'default';
/* let credentials be supplied in the recognizer object at runtime */
this.sttCredentials = setSpeechCredentialsAtRuntime(recognizer);
if (!Array.isArray(this.data.recognizer.altLanguages)) {
this.data.recognizer.altLanguages = [];
}
} else {
this.data.recognizer = {hints: [], altLanguages: []};
}
/* buffer for soniox transcripts */
this._sonioxTranscripts = [];
/*bug name prefix */
this.bugname_prefix = '';
}
async exec(cs, {ep, ep2}) {
super.exec(cs);
this.ep = ep;
this.ep2 = ep2;
// use session preferences if we don't have specific verb-level settings.
if (cs.recognizer) {
for (const k in cs.recognizer) {
const newValue = this.data.recognizer && this.data.recognizer[k] !== undefined ?
this.data.recognizer[k] :
cs.recognizer[k];
if (Array.isArray(newValue)) {
this.data.recognizer[k] = [...(this.data.recognizer[k] || []), ...cs.recognizer[k]];
} else if (typeof newValue === 'object' && newValue !== null) {
this.data.recognizer[k] = { ...(this.data.recognizer[k] || {}), ...cs.recognizer[k] };
} else {
this.data.recognizer[k] = newValue;
}
}
}
if ('default' === this.vendor || !this.vendor) {
this.vendor = cs.speechRecognizerVendor;
if (this.data.recognizer) this.data.recognizer.vendor = this.vendor;
}
if ('default' === this.language || !this.language) {
this.language = cs.speechRecognizerLanguage;
if (this.data.recognizer) this.data.recognizer.language = this.language;
}
// label can be empty, should not assign application level label
if ('default' === this.label) {
this.label = cs.speechRecognizerLabel;
if (this.data.recognizer) this.data.recognizer.label = this.label;
}
// Fallback options
if ('default' === this.fallbackVendor || !this.fallbackVendor) {
this.fallbackVendor = cs.fallbackSpeechRecognizerVendor;
if (this.data.recognizer) this.data.recognizer.fallbackVendor = this.fallbackVendor;
}
if ('default' === this.fallbackLanguage || !this.fallbackLanguage) {
this.fallbackLanguage = cs.fallbackSpeechRecognizerLanguage;
if (this.data.recognizer) this.data.recognizer.fallbackLanguage = this.fallbackLanguage;
}
// label can be empty, should not assign application level label
if ('default' === this.fallbackLabel) {
this.fallbackLabel = cs.fallbackSpeechRecognizerLabel;
if (this.data.recognizer) this.data.recognizer.fallbackLabel = this.fallbackLabel;
}
// If call is already fallback to 2nd ASR vendor
// use that.
if (cs.hasFallbackAsr) {
this.vendor = this.fallbackVendor;
this.language = this.fallbackLanguage;
this.label = this.fallbackLabel;
}
if (!this.data.recognizer.vendor) {
this.data.recognizer.vendor = this.vendor;
}
if (this.vendor === 'cobalt' && !this.data.recognizer.model) {
// By default, application saves cobalt model in language
this.data.recognizer.model = cs.speechRecognizerLanguage;
}
if (
// not gather task, such as transcribe
(!this.input ||
// gather task with speech
this.input.includes('speech')) &&
!this.sttCredentials) {
try {
this.sttCredentials = await this._initSpeechCredentials(this.cs, this.vendor, this.label);
} catch (error) {
if (this.canFallback) {
this.notifyError(
{
msg: 'ASR error', details:`Invalid vendor ${this.vendor}, Error: ${error}`,
failover: 'in progress'
});
await this._initFallback();
} else {
this.notifyError(
{
msg: 'ASR error', details:`Invalid vendor ${this.vendor}, Error: ${error}`,
failover: 'not available'
});
throw error;
}
}
}
/* when using cobalt model is required */
if (this.vendor === 'cobalt' && !this.data.recognizer.model) {
this.notifyError({ msg: 'ASR error', details:'Cobalt requires a model to be specified'});
throw new Error('Cobalt requires a model to be specified');
}
if (cs.hasAltLanguages) {
this.data.recognizer.altLanguages = this.data.recognizer.altLanguages.concat(cs.altLanguages);
this.logger.debug({altLanguages: this.altLanguages},
'STT:exec - applying altLanguages');
}
if (cs.hasGlobalSttPunctuation && !this.data.recognizer.punctuation) {
this.data.recognizer.punctuation = cs.globalSttPunctuation;
}
}
addCustomEventListener(ep, event, handler) {
this.eventHandlers.push({ep, event, handler});
ep.addCustomEventListener(event, handler);
}
removeCustomEventListeners() {
this.eventHandlers.forEach((h) => h.ep.removeCustomEventListener(h.event, h.handler));
}
async _initSpeechCredentials(cs, vendor, label) {
const {getNuanceAccessToken, getIbmAccessToken, getAwsAuthToken, getVerbioAccessToken} = cs.srf.locals.dbHelpers;
const {getNuanceAccessToken, getIbmAccessToken} = this.cs.srf.locals.dbHelpers;
let credentials = cs.getSpeechCredentials(vendor, 'stt', label);
if (!credentials) {
@@ -180,6 +57,11 @@ class SttTask extends Task {
alert_type: AlertType.STT_NOT_PROVISIONED,
vendor
}).catch((err) => this.logger.info({err}, 'Error generating alert for no stt'));
// Notify application that STT vender is wrong.
this.notifyError({
msg: 'ASR error',
details: `No speech-to-text service credentials for ${vendor} have been configured`
});
this.notifyTaskDone();
throw new Error(`No speech-to-text service credentials for ${vendor} have been configured`);
}
@@ -197,33 +79,13 @@ class SttTask extends Task {
const {access_token, servedFromCache} = await getIbmAccessToken(stt_api_key);
this.logger.debug({stt_api_key}, `got ibm access token ${servedFromCache ? 'from cache' : ''}`);
credentials = {...credentials, access_token, stt_region};
} else if (['aws', 'polly'].includes(vendor) && credentials.roleArn) {
/* get aws access token */
const {roleArn, region} = credentials;
const {accessKeyId, secretAccessKey, sessionToken, servedFromCache} =
await getAwsAuthToken({
region,
roleArn
});
this.logger.debug({roleArn}, `got aws access token ${servedFromCache ? 'from cache' : ''}`);
credentials = {...credentials, accessKeyId, secretAccessKey, sessionToken};
} else if (vendor === 'verbio' && credentials.client_id && credentials.client_secret) {
const {access_token, servedFromCache} = await getVerbioAccessToken(credentials);
this.logger.debug({client_id: credentials.client_id},
`got verbio access token ${servedFromCache ? 'from cache' : ''}`);
credentials.access_token = access_token;
}
return credentials;
}
get canFallback() {
return this.fallbackVendor && this.isHandledByPrimaryProvider && !this.cs.hasFallbackAsr;
}
async _initFallback() {
async _fallback() {
assert(this.fallbackVendor, 'fallback failed without fallbackVendor configuration');
this.isHandledByPrimaryProvider = false;
this.cs.hasFallbackAsr = true;
this.logger.info(`Failed to use primary STT provider, fallback to ${this.fallbackVendor}`);
this.vendor = this.fallbackVendor;
this.language = this.fallbackLanguage;
@@ -232,96 +94,6 @@ class SttTask extends Task {
this.data.recognizer.language = this.language;
this.data.recognizer.label = this.label;
this.sttCredentials = await this._initSpeechCredentials(this.cs, this.vendor, this.label);
// cleanup previous listener from previous vendor
this.removeCustomEventListeners();
}
async compileHintsForCobalt(ep, hostport, model, token, hints) {
const {retrieveKey} = this.cs.srf.locals.dbHelpers;
const hash = crypto.createHash('sha1');
hash.update(`${model}:${hints}`);
const key = `cobalt:${hash.digest('hex')}`;
this.context = await retrieveKey(key);
if (this.context) {
this.logger.debug({model, hints}, 'found cached cobalt context for supplied hints');
return this.context;
}
this.logger.debug({model, hints}, 'compiling cobalt context for supplied hints');
return new Promise((resolve, reject) => {
this.cobaltCompileResolver = resolve;
ep.addCustomEventListener(CobaltTranscriptionEvents.CompileContext, this._onCompileContext.bind(this, ep, key));
ep.api('uuid_cobalt_compile_context', [ep.uuid, hostport, model, token, hints], (err, evt) => {
if (err || 0 !== evt.getBody().indexOf('+OK')) {
ep.removeCustomEventListener(CobaltTranscriptionEvents.CompileContext);
return reject(err);
}
});
});
}
_onCompileContext(ep, key, evt) {
const {addKey} = this.cs.srf.locals.dbHelpers;
this.logger.debug({evt}, `received cobalt compile context event, will cache under ${key}`);
this.cobaltCompileResolver(evt.compiled_context);
ep.removeCustomEventListener(CobaltTranscriptionEvents.CompileContext);
this.cobaltCompileResolver = null;
//cache the compiled context
addKey(key, evt.compiled_context, 3600 * 12)
.catch((err) => this.logger.info({err}, `Error caching cobalt context for ${key}`));
}
_doContinuousAsrWithDeepgram(asrTimeout) {
/* deepgram has an utterance_end_ms property that simplifies things */
assert(this.vendor === 'deepgram');
if (asrTimeout < 1000) {
this.notifyError({
msg: 'ASR error',
details:`asrTimeout ${asrTimeout} is too short for deepgram; setting it to 1000ms`
});
asrTimeout = 1000;
}
else if (asrTimeout > 5000) {
this.notifyError({
msg: 'ASR error',
details:`asrTimeout ${asrTimeout} is too long for deepgram; setting it to 5000ms`
});
asrTimeout = 5000;
}
this.logger.debug(`_doContinuousAsrWithDeepgram - setting utterance_end_ms to ${asrTimeout}`);
const dgOptions = this.data.recognizer.deepgramOptions = this.data.recognizer.deepgramOptions || {};
dgOptions.utteranceEndMs = dgOptions.utteranceEndMs || asrTimeout;
}
_onVendorConnect(_cs, _ep) {
this.logger.debug(`TaskGather:_on${this.vendor}Connect`);
}
_onVendorError(cs, _ep, evt) {
this.logger.info({evt}, `${this.name}:_on${this.vendor}Error`);
const {writeAlerts, AlertType} = cs.srf.locals;
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
message: 'STT failure reported by vendor',
detail: evt.error,
vendor: this.vendor,
}).catch((err) => this.logger.info({err}, `Error generating alert for ${this.vendor} connection failure`));
}
_onVendorConnectFailure(cs, _ep, evt) {
const {reason} = evt;
const {writeAlerts, AlertType} = cs.srf.locals;
this.logger.info({evt}, `${this.name}:_on${this.vendor}ConnectFailure`);
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
message: `Failed connecting to ${this.vendor} speech recognizer: ${reason}`,
vendor: this.vendor,
}).catch((err) => this.logger.info({err}, `Error generating alert for ${this.vendor} connection failure`));
}
}

View File

@@ -12,7 +12,7 @@ class TaskTag extends Task {
async exec(cs) {
super.exec(cs);
cs.callInfo.customerData = this.data;
this.logger.debug({customerData: cs.callInfo.customerData}, 'TaskTag:exec set customer data in callInfo');
//this.logger.debug({callInfo: cs.callInfo.toJSON()}, 'TaskTag:exec set customer data in callInfo');
}
}

View File

@@ -45,10 +45,6 @@ class Task extends Emitter {
return this.name;
}
set disableTracing(val) {
this._disableTracing = val;
}
toJSON() {
return this.data;
}
@@ -164,33 +160,15 @@ class Task extends Emitter {
const httpHeaders = b3 && {b3};
span.setAttributes({'http.body': JSON.stringify(params)});
try {
if (this.id) params.verb_id = this.id;
const json = await this.cs.requestor.request(type, this.actionHook, params, httpHeaders);
span.setAttributes({'http.statusCode': 200});
const isWsConnection = this.cs.requestor instanceof WsRequestor;
if (!isWsConnection || (expectResponse && json && Array.isArray(json) && json.length)) {
span.end();
} else {
/** we use this span to measure application response latency,
* and with websocket connections we generally get the application's response
* in a subsequent message from the far end, so we terminate the span when the
* first new set of verbs arrive after sending a transcript
* */
this.emit('VerbHookSpanWaitForEnd', {span});
// If actionHook delay action is configured, and ws application have not responded yet any verb for actionHook
// We have to transfer the task to call-session to await on next ws command verbs, and also run action Hook
// delay actions
//if (this.hookDelayActionOpts) {
// this.emit('ActionHookDelayActionOptions', this.hookDelayActionOpts);
//}
}
span.end();
if (expectResponse && json && Array.isArray(json)) {
const makeTask = require('./make_task');
const tasks = normalizeJambones(this.logger, json).map((tdata) => makeTask(this.logger, tdata));
if (tasks && tasks.length > 0) {
this.logger.info({tasks: tasks}, `${this.name} replacing application with ${tasks.length} tasks`);
this.callSession.replaceApplication(tasks);
return true;
}
}
} catch (err) {
@@ -198,7 +176,6 @@ class Task extends Emitter {
span.end();
throw err;
}
return false;
}
}
@@ -278,7 +255,6 @@ class Task extends Emitter {
delete obj.requestor;
delete obj.notifier;
obj.tasks = cs.getRemainingTaskData();
obj.callInfo = cs.callInfo.toJSON();
if (opts && obj.tasks.length > 0) {
const key = Object.keys(obj.tasks[0])[0];
Object.assign(obj.tasks[0][key], {_: opts});

View File

@@ -7,13 +7,11 @@ const {
AzureTranscriptionEvents,
DeepgramTranscriptionEvents,
SonioxTranscriptionEvents,
CobaltTranscriptionEvents,
IbmTranscriptionEvents,
NvidiaTranscriptionEvents,
JambonzTranscriptionEvents,
TranscribeStatus,
AssemblyAiTranscriptionEvents
} = require('../utils/constants.json');
TranscribeStatus
} = require('../utils/constants');
const { normalizeJambones } = require('@jambonz/verb-specifications');
const SttTask = require('./stt-task');
@@ -31,59 +29,19 @@ class TaskTranscribe extends SttTask {
this.separateRecognitionPerChannel = this.data.recognizer.separateRecognitionPerChannel;
}
/* for nested transcribe in dial, unless the app explicitly says so we want to transcribe both legs */
if (this.parentTask?.name === TaskName.Dial) {
if (this.data.channel === 1 || this.data.channel === 2) {
/* transcribe only the channel specified */
this.separateRecognitionPerChannel = false;
this.channel = this.data.channel;
logger.debug(`TaskTranscribe: transcribing only channel ${this.channel} in the Dial verb`);
}
else if (this.separateRecognitionPerChannel !== false) {
this.separateRecognitionPerChannel = true;
}
else {
this.channel = 1;
}
}
else {
this.channel = 1;
}
this.childSpan = [null, null];
// Continuous asr timeout
// Continuos asr timeout
this.asrTimeout = typeof this.data.recognizer.asrTimeout === 'number' ? this.data.recognizer.asrTimeout * 1000 : 0;
if (this.asrTimeout > 0) {
this.isContinuousAsr = true;
}
this.isContinuousAsr = this.asrTimeout > 0;
/* buffer speech for continuous asr */
this._bufferedTranscripts = [ [], [] ]; // for channel 1 and 2
this.bugname_prefix = 'transcribe_';
this.paused = false;
this._bufferedTranscripts = [];
}
get name() { return TaskName.Transcribe; }
get transcribing1() {
return this.channel === 1 || this.separateRecognitionPerChannel;
}
get transcribing2() {
return this.channel === 2 || this.separateRecognitionPerChannel && this.ep2;
}
async exec(cs, {ep, ep2}) {
await super.exec(cs, {ep, ep2});
if (this.data.recognizer.vendor === 'nuance') {
this.data.recognizer.nuanceOptions = {
// by default, nuance STT will recognize only 1st utterance.
// enable multiple allow nuance detact all utterances
utteranceDetectionMode: 'multiple',
...this.data.recognizer.nuanceOptions
};
}
super.exec(cs);
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, cs.srf);
if (cs.hasGlobalSttHints) {
@@ -93,42 +51,85 @@ class TaskTranscribe extends SttTask {
this.logger.debug({hints: this.data.recognizer.hints, hintsBoost: this.data.recognizer.hintsBoost},
'Transcribe:exec - applying global sttHints');
}
if (cs.hasAltLanguages) {
this.data.recognizer.altLanguages = this.data.recognizer.altLanguages.concat(cs.altLanguages);
this.logger.debug({altLanguages: this.altLanguages},
'Transcribe:exec - applying altLanguages');
}
if (cs.hasGlobalSttPunctuation && !this.data.recognizer.punctuation) {
this.data.recognizer.punctuation = cs.globalSttPunctuation;
}
this.ep = ep;
this.ep2 = ep2;
if ('default' === this.vendor || !this.vendor) {
this.vendor = cs.speechRecognizerVendor;
if (this.data.recognizer) this.data.recognizer.vendor = this.vendor;
}
if ('default' === this.language || !this.language) {
this.language = cs.speechRecognizerLanguage;
if (this.data.recognizer) this.data.recognizer.language = this.language;
}
if ('default' === this.label || !this.label) {
this.label = cs.speechRecognizerLabel;
if (this.data.recognizer) this.data.recognizer.label = this.label;
}
// Fallback options
if ('default' === this.fallbackVendor || !this.fallbackVendor) {
this.fallbackVendor = cs.fallbackSpeechRecognizerVendor;
if (this.data.recognizer) this.data.recognizer.fallbackVendor = this.fallbackVendor;
}
if ('default' === this.fallbackLanguage || !this.fallbackLanguage) {
this.fallbackLanguage = cs.fallbackSpeechRecognizerLanguage;
if (this.data.recognizer) this.data.recognizer.fallbackLanguage = this.fallbackLanguage;
}
if ('default' === this.fallbackLabel || !this.fallbackLabel) {
this.fallbackLabel = cs.fallbackSpeechRecognizerLabel;
if (this.data.recognizer) this.data.recognizer.fallbackLabel = this.fallbackLabel;
}
if (!this.data.recognizer.vendor) {
this.data.recognizer.vendor = this.vendor;
}
if (!this.sttCredentials) {
try {
this.sttCredentials = await this._initSpeechCredentials(cs, this.vendor, this.label);
} catch (error) {
if (this.fallbackVendor && this.isHandledByPrimaryProvider) {
await this._fallback();
} else {
throw error;
}
}
}
try {
if (this.transcribing1) {
await this._startTranscribing(cs, ep, 1);
}
if (this.transcribing2) {
await this._startTranscribing(cs, ep, 1);
if (this.separateRecognitionPerChannel && ep2) {
await this._startTranscribing(cs, ep2, 2);
}
updateSpeechCredentialLastUsed(this.sttCredentials.speech_credential_sid)
.catch(() => {/*already logged error */});
await this.awaitTaskDone();
} catch (err) {
if (!(await this._startFallback(cs, ep, {error: err}))) {
this.logger.info(err, 'TaskTranscribe:exec - error');
this.parentTask && this.parentTask.emit('error', err);
this.removeCustomEventListeners();
return;
}
this.logger.info(err, 'TaskTranscribe:exec - error');
this.parentTask && this.parentTask.emit('error', err);
}
await this.awaitTaskDone();
this.removeCustomEventListeners();
this.removeSpeechListeners(ep);
}
async _stopTranscription() {
let stopTranscription = false;
if (this.transcribing1 && this.ep?.connected) {
if (this.ep?.connected) {
stopTranscription = true;
this.ep.stopTranscription({
vendor: this.vendor,
bugname: this.bugname
})
this.ep.stopTranscription({vendor: this.vendor})
.catch((err) => this.logger.info(err, 'Error TaskTranscribe:kill'));
}
if (this.transcribing2 && this.ep2?.connected) {
if (this.separateRecognitionPerChannel && this.ep2 && this.ep2.connected) {
stopTranscription = true;
this.ep2.stopTranscription({vendor: this.vendor, bugname: this.bugname})
this.ep2.stopTranscription({vendor: this.vendor})
.catch((err) => this.logger.info(err, 'Error TaskTranscribe:kill'));
}
@@ -150,144 +151,99 @@ class TaskTranscribe extends SttTask {
this.logger.info(`TaskTranscribe:updateTranscribe status ${status}`);
switch (status) {
case TranscribeStatus.Pause:
this.paused = true;
await this._stopTranscription();
break;
case TranscribeStatus.Resume:
this.paused = false;
if (this.transcribing1) await this._startTranscribing(this.cs, this.ep, 1);
if (this.transcribing2) await this._startTranscribing(this.cs, this.ep2, 2);
await this._startTranscribing(this.cs, this.ep, 1);
if (this.separateRecognitionPerChannel && this.ep2) {
await this._startTranscribing(this.cs, this.ep2, 2);
}
break;
}
}
}
async _setSpeechHandlers(cs, ep, channel) {
if (this[`_speechHandlersSet_${channel}`]) return;
this[`_speechHandlersSet_${channel}`] = true;
/* some special deepgram logic */
if (this.vendor === 'deepgram') {
if (this.isContinuousAsr) this._doContinuousAsrWithDeepgram(this.asrTimeout);
}
const opts = this.setChannelVarsForStt(this, this.sttCredentials, this.language, this.data.recognizer);
async _startTranscribing(cs, ep, channel) {
const opts = this.setChannelVarsForStt(this, this.sttCredentials, this.data.recognizer);
switch (this.vendor) {
case 'google':
this.bugname = `${this.bugname_prefix}google_transcribe`;
this.addCustomEventListener(ep, GoogleTranscriptionEvents.Transcription,
this.bugname = 'google_transcribe';
ep.addCustomEventListener(GoogleTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
this.addCustomEventListener(ep, GoogleTranscriptionEvents.NoAudioDetected,
ep.addCustomEventListener(GoogleTranscriptionEvents.NoAudioDetected,
this._onNoAudio.bind(this, cs, ep, channel));
this.addCustomEventListener(ep, GoogleTranscriptionEvents.MaxDurationExceeded,
ep.addCustomEventListener(GoogleTranscriptionEvents.MaxDurationExceeded,
this._onMaxDurationExceeded.bind(this, cs, ep, channel));
break;
case 'aws':
case 'polly':
this.bugname = `${this.bugname_prefix}aws_transcribe`;
this.addCustomEventListener(ep, AwsTranscriptionEvents.Transcription,
this.bugname = 'aws_transcribe';
ep.addCustomEventListener(AwsTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
this.addCustomEventListener(ep, AwsTranscriptionEvents.NoAudioDetected,
ep.addCustomEventListener(AwsTranscriptionEvents.NoAudioDetected,
this._onNoAudio.bind(this, cs, ep, channel));
this.addCustomEventListener(ep, AwsTranscriptionEvents.MaxDurationExceeded,
ep.addCustomEventListener(AwsTranscriptionEvents.MaxDurationExceeded,
this._onMaxDurationExceeded.bind(this, cs, ep, channel));
break;
case 'microsoft':
this.bugname = `${this.bugname_prefix}azure_transcribe`;
this.addCustomEventListener(ep, AzureTranscriptionEvents.Transcription,
this.bugname = 'azure_transcribe';
ep.addCustomEventListener(AzureTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
//this.addCustomEventListener(ep, AzureTranscriptionEvents.NoSpeechDetected,
// this._onNoAudio.bind(this, cs, ep, channel));
ep.addCustomEventListener(AzureTranscriptionEvents.NoSpeechDetected,
this._onNoAudio.bind(this, cs, ep, channel));
break;
case 'nuance':
this.bugname = `${this.bugname_prefix}nuance_transcribe`;
this.addCustomEventListener(ep, NuanceTranscriptionEvents.Transcription,
this.bugname = 'nuance_transcribe';
ep.addCustomEventListener(NuanceTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
ep.addCustomEventListener(NuanceTranscriptionEvents.StartOfSpeech,
this._onStartOfSpeech.bind(this, cs, ep, channel));
ep.addCustomEventListener(NuanceTranscriptionEvents.TranscriptionComplete,
this._onTranscriptionComplete.bind(this, cs, ep, channel));
break;
case 'deepgram':
this.bugname = `${this.bugname_prefix}deepgram_transcribe`;
this.addCustomEventListener(ep, DeepgramTranscriptionEvents.Transcription,
this.bugname = 'deepgram_transcribe';
ep.addCustomEventListener(DeepgramTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
this.addCustomEventListener(ep, DeepgramTranscriptionEvents.Connect,
this._onVendorConnect.bind(this, cs, ep));
this.addCustomEventListener(ep, DeepgramTranscriptionEvents.ConnectFailure,
this._onVendorConnectFailure.bind(this, cs, ep, channel));
/* if app sets deepgramOptions.utteranceEndMs they essentially want continuous asr */
//if (opts.DEEPGRAM_SPEECH_UTTERANCE_END_MS) this.isContinuousAsr = true;
ep.addCustomEventListener(DeepgramTranscriptionEvents.Connect,
this._onDeepgramConnect.bind(this, cs, ep, channel));
ep.addCustomEventListener(DeepgramTranscriptionEvents.ConnectFailure,
this._onDeepGramConnectFailure.bind(this, cs, ep, channel));
break;
case 'soniox':
this.bugname = `${this.bugname_prefix}soniox_transcribe`;
this.addCustomEventListener(ep, SonioxTranscriptionEvents.Transcription,
this.bugname = 'soniox_transcribe';
ep.addCustomEventListener(SonioxTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
break;
case 'cobalt':
this.bugname = `${this.bugname_prefix}cobalt_transcribe`;
this.addCustomEventListener(ep, CobaltTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
/* cobalt doesnt have language, it has model, which is required */
if (!this.data.recognizer.model) {
throw new Error('Cobalt requires a model to be specified');
}
this.language = this.data.recognizer.model;
/* special case: if using hints with cobalt we need to compile them */
this.hostport = opts.COBALT_SERVER_URI;
if (this.vendor === 'cobalt' && opts.COBALT_SPEECH_HINTS) {
try {
const context = await this.compileHintsForCobalt(
ep,
opts.COBALT_SERVER_URI,
this.data.recognizer.model,
opts.COBALT_CONTEXT_TOKEN,
opts.COBALT_SPEECH_HINTS
);
if (context) opts.COBALT_COMPILED_CONTEXT_DATA = context;
delete opts.COBALT_SPEECH_HINTS;
} catch (err) {
this.logger.error({err}, 'Error compiling hints for cobalt');
}
}
break;
case 'ibm':
this.bugname = `${this.bugname_prefix}ibm_transcribe`;
this.addCustomEventListener(ep, IbmTranscriptionEvents.Transcription,
this.bugname = 'ibm_transcribe';
ep.addCustomEventListener(IbmTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
this.addCustomEventListener(ep, IbmTranscriptionEvents.Connect,
this._onVendorConnect.bind(this, cs, ep));
this.addCustomEventListener(ep, IbmTranscriptionEvents.ConnectFailure,
this._onVendorConnectFailure.bind(this, cs, ep, channel));
ep.addCustomEventListener(IbmTranscriptionEvents.Connect,
this._onIbmConnect.bind(this, cs, ep, channel));
ep.addCustomEventListener(IbmTranscriptionEvents.ConnectFailure,
this._onIbmConnectFailure.bind(this, cs, ep, channel));
break;
case 'nvidia':
this.bugname = `${this.bugname_prefix}nvidia_transcribe`;
this.addCustomEventListener(ep, NvidiaTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
this.bugname = 'nvidia_transcribe';
ep.addCustomEventListener(NvidiaTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep));
ep.addCustomEventListener(NvidiaTranscriptionEvents.StartOfSpeech,
this._onStartOfSpeech.bind(this, cs, ep));
ep.addCustomEventListener(NvidiaTranscriptionEvents.TranscriptionComplete,
this._onTranscriptionComplete.bind(this, cs, ep));
ep.addCustomEventListener(NvidiaTranscriptionEvents.VadDetected,
this._onVadDetected.bind(this, cs, ep));
break;
case 'assemblyai':
this.bugname = `${this.bugname_prefix}assemblyai_transcribe`;
this.addCustomEventListener(ep, AssemblyAiTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
this.addCustomEventListener(ep,
AssemblyAiTranscriptionEvents.Connect, this._onVendorConnect.bind(this, cs, ep));
this.addCustomEventListener(ep, AssemblyAiTranscriptionEvents.Error, this._onVendorError.bind(this, cs, ep));
this.addCustomEventListener(ep, AssemblyAiTranscriptionEvents.ConnectFailure,
this._onVendorConnectFailure.bind(this, cs, ep, channel));
break;
default:
if (this.vendor.startsWith('custom:')) {
this.bugname = `${this.bugname_prefix}${this.vendor}_transcribe`;
this.addCustomEventListener(ep, JambonzTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
this.addCustomEventListener(ep, JambonzTranscriptionEvents.Connect, this._onVendorConnect.bind(this, cs, ep));
this.addCustomEventListener(ep, JambonzTranscriptionEvents.ConnectFailure,
this._onVendorConnectFailure.bind(this, cs, ep));
this.bugname = `${this.vendor}_transcribe`;
ep.addCustomEventListener(JambonzTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
ep.addCustomEventListener(JambonzTranscriptionEvents.Connect, this._onJambonzConnect.bind(this, cs, ep));
ep.addCustomEventListener(JambonzTranscriptionEvents.ConnectFailure,
this._onJambonzConnectFailure.bind(this, cs, ep));
break;
}
else {
@@ -298,13 +254,10 @@ class TaskTranscribe extends SttTask {
}
/* common handler for all stt engine errors */
this.addCustomEventListener(ep, JambonzTranscriptionEvents.Error, this._onJambonzError.bind(this, cs, ep));
ep.addCustomEventListener(JambonzTranscriptionEvents.Error, this._onJambonzError.bind(this, cs, ep));
await ep.set(opts)
.catch((err) => this.logger.info(err, 'Error setting channel variables'));
}
async _startTranscribing(cs, ep, channel) {
await this._setSpeechHandlers(cs, ep, channel);
await this._transcribe(ep);
/* start child span for this channel */
@@ -313,176 +266,92 @@ class TaskTranscribe extends SttTask {
}
async _transcribe(ep) {
this.logger.debug(
`TaskTranscribe:_transcribe - starting transcription vendor ${this.vendor} bugname ${this.bugname}`);
await ep.startTranscription({
vendor: this.vendor,
interim: this.interim ? true : false,
locale: this.language,
channels: 1,
bugname: this.bugname,
hostport: this.hostport
channels: /*this.separateRecognitionPerChannel ? 2 : */ 1,
bugname: this.bugname
});
}
async _onTranscription(cs, ep, channel, evt, fsEvent) {
// make sure this is not a transcript from answering machine detection
const bugname = fsEvent.getHeader('media-bugname');
const finished = fsEvent.getHeader('transcription-session-finished');
const bufferedTranscripts = this._bufferedTranscripts[channel - 1];
if (bugname && this.bugname !== bugname) return;
if (this.paused) {
this.logger.debug({evt}, 'TaskTranscribe:_onTranscription - paused, ignoring transcript');
}
if (this.vendor === 'ibm' && evt?.state === 'listening') return;
if (this.vendor === 'deepgram' && evt.type === 'UtteranceEnd') {
/* we will only get this when we have set utterance_end_ms */
/* DH: send a speech event when we get UtteranceEnd if they want interim events */
if (this.interim) {
this.logger.debug('Gather:_onTranscription - got UtteranceEnd event from deepgram, sending speech event');
this._resolve(channel, evt);
}
if (bufferedTranscripts.length === 0) {
this.logger.debug('Gather:_onTranscription - got UtteranceEnd event from deepgram but no buffered transcripts');
}
else {
this.logger.debug('Gather:_onTranscription - got UtteranceEnd event from deepgram, return buffered transcript');
evt = this.consolidateTranscripts(bufferedTranscripts, channel, this.language, this.vendor);
evt.is_final = true;
this._bufferedTranscripts[channel - 1] = [];
this._resolve(channel, evt);
}
return;
if (this.vendor === 'ibm') {
if (evt?.state === 'listening') return;
}
this.logger.debug({evt}, 'TaskTranscribe:_onTranscription - before normalization');
evt = this.normalizeTranscription(evt, this.vendor, channel, this.language, undefined,
this.data.recognizer.punctuation);
evt = this.normalizeTranscription(evt, this.vendor, channel, this.language);
this.logger.debug({evt}, 'TaskTranscribe:_onTranscription');
if (evt.alternatives.length === 0) {
this.logger.info({evt}, 'TaskTranscribe:_onTranscription - got empty transcript, continue listening');
return;
}
let emptyTranscript = false;
if (evt.is_final) {
if (evt.alternatives.length === 0 || evt.alternatives[0].transcript === '' && !cs.callGone && !this.killed) {
emptyTranscript = true;
if (finished === 'true' &&
['microsoft', 'deepgram'].includes(this.vendor) &&
bufferedTranscripts.length === 0) {
this.logger.debug({evt}, 'TaskGather:_onTranscription - got empty transcript from old gather, disregarding');
return;
}
else if (this.vendor !== 'deepgram') {
this.logger.info({evt}, 'TaskGather:_onTranscription - got empty transcript, continue listening');
return;
}
else if (this.isContinuousAsr) {
this.logger.info({evt},
'TaskGather:_onTranscription - got empty deepgram transcript during continous asr, continue listening');
return;
}
else if (this.vendor === 'deepgram' && bufferedTranscripts.length > 0) {
this.logger.info({evt},
'TaskGather:_onTranscription - got empty transcript from deepgram, return the buffered transcripts');
}
}
if (this.isContinuousAsr) {
/* append the transcript and start listening again for asrTimeout */
const t = evt.alternatives[0].transcript;
if (t) {
/* remove trailing punctuation */
if (/[,;:\.!\?]$/.test(t)) {
this.logger.debug('TaskGather:_onTranscription - removing trailing punctuation');
evt.alternatives[0].transcript = t.slice(0, -1);
}
}
this.logger.info({evt}, 'TaskGather:_onTranscription - got transcript during continous asr');
bufferedTranscripts.push(evt);
this._startAsrTimer(channel);
/* some STT engines will keep listening after a final response, so no need to restart */
if (!['soniox', 'aws', 'microsoft', 'deepgram', 'google']
.includes(this.vendor)) this._startTranscribing(cs, ep, channel);
if (evt.alternatives[0]?.transcript === '' && !cs.callGone && !this.killed) {
if (['microsoft', 'deepgram'].includes(this.vendor)) {
this.logger.info({evt}, 'TaskTranscribe:_onTranscription - got empty transcript, continue listening');
}
else {
if (this.vendor === 'soniox') {
/* compile transcripts into one */
this._sonioxTranscripts.push(evt.vendor.finalWords);
evt = this.compileSonioxTranscripts(this._sonioxTranscripts, 1, this.language);
this._sonioxTranscripts = [];
}
else if (this.vendor === 'deepgram') {
/* compile transcripts into one */
if (!emptyTranscript) bufferedTranscripts.push(evt);
this.logger.info({evt}, 'TaskTranscribe:_onTranscription - got empty transcript, listen again');
this._transcribe(ep);
}
return;
}
/* deepgram can send an empty and final transcript; only if we have any buffered should we resolve */
if (bufferedTranscripts.length === 0) return;
evt = this.consolidateTranscripts(bufferedTranscripts, channel, this.language);
this._bufferedTranscripts[channel - 1] = [];
}
/* here is where we return a final transcript */
this.logger.debug({evt}, 'TaskTranscribe:_onTranscription - sending final transcript');
this._resolve(channel, evt);
/* some STT engines will keep listening after a final response, so no need to restart */
if (!['soniox', 'aws', 'microsoft', 'deepgram', 'google'].includes(this.vendor) &&
!this.vendor.startsWith('custom:')) {
this.logger.debug('TaskTranscribe:_onTranscription - restarting transcribe');
this._startTranscribing(cs, ep, channel);
}
if (this.vendor === 'soniox') {
/* compile transcripts into one */
this._sonioxTranscripts.push(evt.vendor.finalWords);
if (evt.is_final) {
evt = this.compileSonioxTranscripts(this._sonioxTranscripts, 1, this.language);
this._sonioxTranscripts = [];
}
}
else {
/* interim transcript */
/* deepgram can send a non-final transcript but with words that are final, so we need to buffer */
if (this.vendor === 'deepgram') {
const originalEvent = evt.vendor.evt;
if (originalEvent.is_final && evt.alternatives[0].transcript !== '') {
this.logger.debug({evt}, 'Gather:_onTranscription - buffering a completed (partial) deepgram transcript');
bufferedTranscripts.push(evt);
}
}
if (this.interim) {
this.logger.debug({evt}, 'TaskTranscribe:_onTranscription - sending interim transcript');
this._resolve(channel, evt);
}
if (this.isContinuousAsr && evt.is_final) {
this._bufferedTranscripts.push(evt);
this._startAsrTimer(channel);
} else {
await this._resolve(channel, evt);
}
}
_compileTranscripts() {
assert(this._bufferedTranscripts.length);
const evt = this._bufferedTranscripts[0];
let t = '';
for (const a of this._bufferedTranscripts) {
t += ` ${a.alternatives[0].transcript}`;
}
evt.alternatives[0].transcript = t.trim();
return evt;
}
async _resolve(channel, evt) {
if (evt.is_final) {
/* we've got a final transcript, so end the otel child span for this channel */
if (this.childSpan[channel - 1] && this.childSpan[channel - 1].span) {
this.childSpan[channel - 1].span.setAttributes({
channel,
'stt.resolve': 'transcript',
'stt.result': JSON.stringify(evt)
});
this.childSpan[channel - 1].span.end();
}
/* we've got a transcript, so end the otel child span for this channel */
if (this.childSpan[channel - 1] && this.childSpan[channel - 1].span) {
this.childSpan[channel - 1].span.setAttributes({
channel,
'stt.resolve': 'transcript',
'stt.result': JSON.stringify(evt)
});
this.childSpan[channel - 1].span.end();
}
if (this.transcriptionHook) {
const b3 = this.getTracingPropagation();
const httpHeaders = b3 && {b3};
const payload = {
...this.cs.callInfo,
...httpHeaders,
...(evt.alternatives && {speech: evt}),
...(evt.type && {speechEvent: evt})
};
try {
this.logger.debug({payload}, 'sending transcriptionHook');
const json = await this.cs.requestor.request('verb:hook', this.transcriptionHook, payload);
this.logger.info({json}, 'completed transcriptionHook');
const json = await this.cs.requestor.request('verb:hook', this.transcriptionHook, {
...this.cs.callInfo,
...httpHeaders,
speech: evt
});
this.logger.info({json}, 'sent transcriptionHook');
if (json && Array.isArray(json) && !this.parentTask) {
const makeTask = require('./make_task');
const tasks = normalizeJambones(this.logger, json).map((tdata) => makeTask(this.logger, tdata));
@@ -503,7 +372,7 @@ class TaskTranscribe extends SttTask {
this._clearTimer();
this.notifyTaskDone();
}
else if (evt.is_final) {
else {
/* start another child span for this channel */
const {span, ctx} = this.startChildSpan(`${STT_LISTEN_SPAN_NAME}:${channel}`);
this.childSpan[channel - 1] = {span, ctx};
@@ -511,8 +380,7 @@ class TaskTranscribe extends SttTask {
}
_onNoAudio(cs, ep, channel) {
this.logger.debug(`TaskTranscribe:_onNoAudio on channel ${channel}`);
if (this.paused) return;
this.logger.debug(`TaskTranscribe:_onNoAudio restarting transcription on channel ${channel}`);
if (this.childSpan[channel - 1] && this.childSpan[channel - 1].span) {
this.childSpan[channel - 1].span.setAttributes({
channel,
@@ -528,8 +396,7 @@ class TaskTranscribe extends SttTask {
}
_onMaxDurationExceeded(cs, ep, channel) {
this.logger.debug(`TaskTranscribe:_onMaxDurationExceeded on channel ${channel}`);
if (this.paused) return;
this.logger.debug(`TaskTranscribe:_onMaxDurationExceeded restarting transcription on channel ${channel}`);
if (this.childSpan[channel - 1] && this.childSpan[channel - 1].span) {
this.childSpan[channel - 1].span.setAttributes({
channel,
@@ -551,68 +418,22 @@ class TaskTranscribe extends SttTask {
this._timer = null;
}
}
async _startFallback(cs, _ep, evt) {
if (this.canFallback) {
_ep.stopTranscription({
vendor: this.vendor,
bugname: this.bugname
})
.catch((err) => this.logger.error({err}, `Error stopping transcription for primary vendor ${this.vendor}`));
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, cs.srf);
try {
this.notifyError({ msg: 'ASR error',
details:`STT Vendor ${this.vendor} error: ${evt.error || evt.reason}`, failover: 'in progress'});
await this._initFallback();
let channel = 1;
if (this.ep !== _ep) {
channel = 2;
}
this[`_speechHandlersSet_${channel}`] = false;
this._startTranscribing(cs, _ep, channel);
updateSpeechCredentialLastUsed(this.sttCredentials.speech_credential_sid);
return true;
} catch (error) {
this.notifyError({ msg: 'ASR error',
details:`STT Vendor ${this.vendor} error: ${evt.error || evt.reason}`, failover: 'not available'});
this.logger.info({error}, `There is error while falling back to ${this.fallbackVendor}`);
}
} else {
this.logger.debug('transcribe:_startFallback no condition for falling back');
this.notifyError({ msg: 'ASR error',
details:`STT Vendor ${this.vendor} error: ${evt.error || evt.reason}`, failover: 'not available'});
}
return false;
_onDeepgramConnect(_cs, _ep) {
this.logger.debug('TaskTranscribe:_onDeepgramConnect');
}
async _onJambonzError(cs, _ep, evt) {
if (this.vendor === 'google' && evt.error_code === 0) {
this.logger.info({evt}, 'TaskTranscribe:_onJambonzError - ignoring google error code 0');
return;
}
this.logger.info({evt}, 'TaskTranscribe:_onJambonzError');
if (this.paused) return;
_onDeepGramConnectFailure(cs, _ep, channel, evt) {
const {reason} = evt;
const {writeAlerts, AlertType} = cs.srf.locals;
if (this.vendor === 'nuance') {
const {code, error} = evt;
if (code === 404 && error === 'No speech') return this._resolve('timeout');
if (code === 413 && error === 'Too much speech') return this._resolve('timeout');
}
this.logger.info({evt}, 'TaskTranscribe:_onJambonzError');
this.logger.info({evt}, 'TaskTranscribe:_onDeepgramConnectFailure');
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
message: `Custom speech vendor ${this.vendor} error: ${evt.error}`,
vendor: this.vendor,
}).catch((err) => this.logger.info({err}, 'Error generating alert for jambonz custom connection failure'));
if (!(await this._startFallback(cs, _ep, evt))) {
this.notifyTaskDone();
}
}
message: `Failed connecting to Deepgram speech recognizer: ${reason}`,
vendor: 'deepgram',
}).catch((err) => this.logger.info({err}, 'Error generating alert for deepgram connection failure'));
this.notifyError(`Failed connecting to speech vendor deepgram: ${reason}`);
async _onVendorConnectFailure(cs, _ep, channel, evt) {
super._onVendorConnectFailure(cs, _ep, evt);
if (this.childSpan[channel - 1] && this.childSpan[channel - 1].span) {
this.childSpan[channel - 1].span.setAttributes({
channel,
@@ -620,20 +441,99 @@ class TaskTranscribe extends SttTask {
});
this.childSpan[channel - 1].span.end();
}
if (!(await this._startFallback(cs, _ep, evt))) {
this.notifyTaskDone();
this.notifyTaskDone();
}
_onJambonzConnect(_cs, _ep) {
this.logger.debug('TaskTranscribe:_onJambonzConnect');
}
_onJambonzConnectFailure(cs, _ep, evt) {
const {reason} = evt;
const {writeAlerts, AlertType} = cs.srf.locals;
this.logger.info({evt}, 'TaskTranscribe:_onJambonzConnectFailure');
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
message: `Failed connecting to ${this.vendor} speech recognizer: ${reason}`,
vendor: this.vendor,
}).catch((err) => this.logger.info({err}, 'Error generating alert for jambonz custom connection failure'));
this.notifyError({msg: 'ASR error', details:`Failed connecting to speech vendor ${this.vendor}: ${reason}`});
this.notifyTaskDone();
}
_onIbmConnect(_cs, _ep) {
this.logger.debug('TaskTranscribe:_onIbmConnect');
}
_onIbmConnectFailure(cs, _ep, channel, evt) {
const {reason} = evt;
const {writeAlerts, AlertType} = cs.srf.locals;
this.logger.info({evt}, 'TaskTranscribe:_onIbmConnectFailure');
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
message: `Failed connecting to IBM watson speech recognizer: ${reason}`,
vendor: 'ibm',
}).catch((err) => this.logger.info({err}, 'Error generating alert for IBM connection failure'));
this.notifyError(`Failed connecting to speech vendor IBM: ${reason}`);
if (this.childSpan[channel - 1] && this.childSpan[channel - 1].span) {
this.childSpan[channel - 1].span.setAttributes({
channel,
'stt.resolve': 'connection failure'
});
this.childSpan[channel - 1].span.end();
}
this.notifyTaskDone();
}
_onIbmError(cs, _ep, _channel, evt) {
this.logger.info({evt}, 'TaskTranscribe:_onIbmError');
}
async _onJambonzError(cs, _ep, evt) {
this.logger.info({evt}, 'TaskTranscribe:_onJambonzError');
if (this.isHandledByPrimaryProvider && this.fallbackVendor) {
_ep.stopTranscription({vendor: this.vendor})
.catch((err) => this.logger.error({err}, `Error stopping transcription for primary vendor ${this.vendor}`));
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, cs.srf);
try {
await this._fallback();
let channel = 1;
if (this.ep !== _ep) {
channel = 2;
}
this._startTranscribing(cs, _ep, channel);
updateSpeechCredentialLastUsed(this.sttCredentials.speech_credential_sid);
return;
} catch (error) {
this.logger.info({error}, `There is error while falling back to ${this.fallbackVendor}`);
}
} else {
const {writeAlerts, AlertType} = cs.srf.locals;
if (this.vendor === 'nuance') {
const {code, error} = evt;
if (code === 404 && error === 'No speech') return this._resolve('timeout');
if (code === 413 && error === 'Too much speech') return this._resolve('timeout');
}
this.logger.info({evt}, 'TaskTranscribe:_onJambonzError');
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
message: `Custom speech vendor ${this.vendor} error: ${evt.error}`,
vendor: this.vendor,
}).catch((err) => this.logger.info({err}, 'Error generating alert for jambonz custom connection failure'));
this.notifyError({msg: 'ASR error', details:`Custom speech vendor ${this.vendor} error: ${evt.error}`});
}
}
_startAsrTimer(channel) {
if (this.vendor === 'deepgram') return; // no need
assert(this.isContinuousAsr);
this._clearAsrTimer(channel);
this._asrTimer = setTimeout(() => {
this.logger.debug(`TaskTranscribe:_startAsrTimer - asr timer went off for channel: ${channel}`);
const evt = this.consolidateTranscripts(
this._bufferedTranscripts[channel - 1], channel, this.language, this.vendor);
this._bufferedTranscripts[channel - 1] = [];
const evt = this._compileTranscripts();
this._bufferedTranscripts = [];
this._resolve(channel, evt);
}, this.asrTimeout);
this.logger.debug(`TaskTranscribe:_startAsrTimer: set for ${this.asrTimeout}ms for channel ${channel}`);

View File

@@ -1,180 +0,0 @@
const Task = require('./task');
const { TaskPreconditions } = require('../utils/constants');
class TtsTask extends Task {
constructor(logger, data, parentTask) {
super(logger, data);
this.parentTask = parentTask;
this.preconditions = TaskPreconditions.Endpoint;
this.earlyMedia = this.data.earlyMedia === true || (parentTask && parentTask.earlyMedia);
this.synthesizer = this.data.synthesizer || {};
this.disableTtsCache = this.data.disableTtsCache;
this.options = this.synthesizer.options || {};
}
async exec(cs) {
super.exec(cs);
}
async _synthesizeWithSpecificVendor(cs, ep, {
vendor,
language,
voice,
label,
disableTtsStreaming,
preCache
}) {
const {srf, accountSid:account_sid} = cs;
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, srf);
const {writeAlerts, AlertType, stats} = srf.locals;
const {synthAudio} = srf.locals.dbHelpers;
const engine = this.synthesizer.engine || cs.synthesizer?.engine || 'neural';
const salt = cs.callSid;
let credentials = cs.getSpeechCredentials(vendor, 'tts', label);
/* parse Nuance voices into name and model */
let model;
if (vendor === 'nuance' && voice) {
const arr = /([A-Za-z-]*)\s+-\s+(enhanced|standard)/.exec(voice);
if (arr) {
voice = arr[1];
model = arr[2];
}
} else if (vendor === 'deepgram') {
model = voice;
}
/* allow for microsoft custom region voice and api_key to be specified as an override */
if (vendor === 'microsoft' && this.options.deploymentId) {
credentials = credentials || {};
credentials.use_custom_tts = true;
credentials.custom_tts_endpoint = this.options.deploymentId;
credentials.api_key = this.options.apiKey || credentials.apiKey;
credentials.region = this.options.region || credentials.region;
voice = this.options.voice || voice;
} else if (vendor === 'elevenlabs') {
credentials = credentials || {};
credentials.model_id = this.options.model_id || credentials.model_id;
credentials.voice_settings = this.options.voice_settings || {};
credentials.optimize_streaming_latency = this.options.optimize_streaming_latency
|| credentials.optimize_streaming_latency;
voice = this.options.voice_id || voice;
}
ep.set({
tts_engine: vendor,
tts_voice: voice,
cache_speech_handles: 1,
}).catch((err) => this.logger.info({err}, `${this.name}: Error setting tts_engine on endpoint`));
if (!preCache) this.logger.info({vendor, language, voice, model}, `${this.name}:exec`);
try {
if (!credentials) {
writeAlerts({
account_sid,
alert_type: AlertType.TTS_NOT_PROVISIONED,
vendor
}).catch((err) => this.logger.info({err}, 'Error generating alert for no tts'));
this.notifyError({
msg: 'TTS error',
details:`No speech credentials provisioned for selected vendor ${vendor}`
});
throw new Error('no provisioned speech credentials for TTS');
}
// synthesize all of the text elements
let lastUpdated = false;
/* produce an audio segment from the provided text */
const generateAudio = async(text) => {
if (this.killed) return;
if (text.startsWith('silence_stream://')) return text;
/* otel: trace time for tts */
if (!preCache && !this.parentTask) {
const {span} = this.startChildSpan('tts-generation', {
'tts.vendor': vendor,
'tts.language': language,
'tts.voice': voice
});
this.otelSpan = span;
}
try {
const {filePath, servedFromCache, rtt} = await synthAudio(stats, {
account_sid,
text,
vendor,
language,
voice,
engine,
model,
salt,
credentials,
options: this.options,
disableTtsCache : this.disableTtsCache,
disableTtsStreaming,
preCache
});
if (!filePath.startsWith('say:')) {
this.logger.debug(`file ${filePath}, served from cache ${servedFromCache}`);
if (filePath) cs.trackTmpFile(filePath);
if (this.otelSpan) {
this.otelSpan.setAttributes({'tts.cached': servedFromCache});
this.otelSpan.end();
this.otelSpan = null;
}
if (!servedFromCache && !lastUpdated) {
lastUpdated = true;
updateSpeechCredentialLastUsed(credentials.speech_credential_sid).catch(() => {/* logged error */});
}
if (!servedFromCache && rtt && !preCache) {
this.notifyStatus({
event: 'synthesized-audio',
vendor,
language,
characters: text.length,
elapsedTime: rtt
});
}
}
else {
this.logger.debug('a streaming tts api will be used');
const modifiedPath = filePath.replace('say:{', `say:{session-uuid=${ep.uuid},`);
return modifiedPath;
}
return filePath;
} catch (err) {
this.logger.info({err}, 'Error synthesizing tts');
if (this.otelSpan) this.otelSpan.end();
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.TTS_FAILURE,
vendor,
detail: err.message
}).catch((err) => this.logger.info({err}, 'Error generating alert for tts failure'));
this.notifyError({msg: 'TTS error', details: err.message || err});
throw err;
}
};
const arr = this.text.map((t) => (this._validateURL(t) ? t : generateAudio(t)));
return (await Promise.all(arr)).filter((fp) => fp && fp.length);
} catch (err) {
this.logger.info(err, 'TaskSay:exec error');
throw err;
}
}
_validateURL(urlString) {
try {
new URL(urlString);
return true;
} catch (e) {
return false;
}
}
}
module.exports = TtsTask;

View File

@@ -1,175 +0,0 @@
const makeTask = require('../tasks/make_task');
const Emitter = require('events');
const { normalizeJambones } = require('@jambonz/verb-specifications');
const {TaskName} = require('../utils/constants');
const assert = require('assert');
/**
* ActionHookDelayProcessor
* @extends Emitter
*
* @param {Object} logger - logger instance
* @param {Object} opts - options
* @param {Object} cs - call session
* @param {Object} ep - endpoint
*
* @emits {Event} 'giveup' - when associated giveup timer expires
*
* Ref:https://www.jambonz.org/docs/supporting-articles/handling-action-hook-delays/
*/
class ActionHookDelayProcessor extends Emitter {
constructor(logger, opts, cs) {
super();
this.logger = logger;
this.cs = cs;
this._active = false;
const enabled = this.init(opts);
if (enabled && (!this.actions || !Array.isArray(this.actions) || this.actions.length === 0)) {
throw new Error('ActionHookDelayProcessor: no actions specified');
}
else if (enabled && this.actions.some((a) => !a.verb || ![TaskName.Say, TaskName.Play].includes(a.verb))) {
throw new Error(`ActionHookDelayProcessor: invalid actions specified: ${JSON.stringify(this.actions)}`);
}
}
get properties() {
return {
actions: this.actions,
retries: this.retries,
noResponseTimeout: this.noResponseTimeout,
noResponseGiveUpTimeout: this.noResponseGiveUpTimeout
};
}
get ep() {
return this.cs.ep;
}
init(opts) {
this.logger.debug({opts}, 'ActionHookDelayProcessor#init');
this.actions = opts.actions;
this.retries = opts.retries || 0;
this.noResponseTimeout = opts.noResponseTimeout || 0;
this.noResponseGiveUpTimeout = opts.noResponseGiveUpTimeout;
// return false if these options actually disable the ahdp
return ('enable' in opts && opts.enable === true) ||
('enabled' in opts && opts.enabled === true) ||
(!('enable' in opts) && !('enabled' in opts));
}
start() {
this.logger.debug('ActionHookDelayProcessor#start');
if (this._active) {
this.logger.debug('ActionHookDelayProcessor#start: already started due to prior gather which is continuing');
return;
}
assert(!this._noResponseTimer);
this._active = true;
this._retryCount = 0;
const timeoutMs = this.noResponseTimeout === 0 ? 1 : this.noResponseTimeout * 1000;
this._noResponseTimer = setTimeout(this._onNoResponseTimer.bind(this), timeoutMs);
if (this.noResponseGiveUpTimeout > 0) {
const timeoutMs = this.noResponseGiveUpTimeout * 1000;
this._noResponseGiveUpTimer = setTimeout(this._onNoResponseGiveUpTimer.bind(this), timeoutMs);
}
}
async stop() {
this.logger.debug('ActionHookDelayProcessor#stop');
this._active = false;
if (this._noResponseTimer) {
clearTimeout(this._noResponseTimer);
this._noResponseTimer = null;
}
if (this._noResponseGiveUpTimer) {
clearTimeout(this._noResponseGiveUpTimer);
this._noResponseGiveUpTimer = null;
}
if (this._taskInProgress) {
this.logger.debug(`ActionHookDelayProcessor#stop: killing task in progress: ${this._taskInProgress.name}`);
/** if we are doing a play, kill it immediately
* if we are doing a say, wait for it to finish
*/
if (TaskName.Say === this._taskInProgress.name) {
this._sayResolver = () => {
this.logger.debug('ActionHookDelayProcessor#stop: say is done, continue on..');
this._taskInProgress.kill(this.cs);
this._taskInProgress = null;
};
this.logger.debug('ActionHookDelayProcessor#stop returning promise');
return new Promise((resolve) => this._sayResolver = resolve);
}
else {
/* play */
this._taskInProgress.kill(this.cs);
this._taskInProgress = null;
}
}
this.logger.debug('ActionHookDelayProcessor#stop returning');
}
_onNoResponseTimer() {
this.logger.debug('ActionHookDelayProcessor#_onNoResponseTimer');
this._noResponseTimer = null;
/* get the next play or say action */
const verb = this.actions[this._retryCount % this.actions.length];
const t = normalizeJambones(this.logger, [verb]);
this.logger.debug({verb}, 'ActionHookDelayProcessor#_onNoResponseTimer: starting action');
try {
this._taskInProgress = makeTask(this.logger, t[0]);
this._taskInProgress.disableTracing = true;
this._taskInProgress.exec(this.cs, {ep: this.ep});
} catch (err) {
this.logger.info(err, 'ActionHookDelayProcessor#_onNoResponseTimer: error starting action');
this._taskInProgress = null;
return;
}
this.ep.once('playback-start', (evt) => {
this.logger.debug({evt}, 'got playback-start');
if (!this._active) {
this.logger.info({evt}, 'ActionHookDelayProcessor#_onNoResponseTimer: killing audio immediately');
this.ep.api('uuid_break', this.ep.uuid)
.catch((err) => this.logger.info(err,
'ActionHookDelayProcessor#_onNoResponseTimer Error killing audio'));
}
});
this.ep.once('playback-stop', (evt) => {
this._taskInProgress = null;
if (this._sayResolver) {
/* we were waiting for the play to finish before continuing to next task */
this.logger.debug({evt}, 'got playback-stop');
this._sayResolver();
this._sayResolver = null;
}
else {
/* possibly start the no response timer again */
if (this._active && this.retries > 0 && this._retryCount < this.retries && this.noResponseTimeout > 0) {
this.logger.debug({evt}, 'ActionHookDelayProcessor#_onNoResponseTimer: playback-stop on play/say action');
const timeoutMs = this.noResponseTimeout * 1000;
this._noResponseTimer = setTimeout(this._onNoResponseTimer.bind(this), timeoutMs);
}
}
});
this._retryCount++;
}
_onNoResponseGiveUpTimer() {
this._active = false;
this.logger.info('ActionHookDelayProcessor#_onNoResponseGiveUpTimer');
this.stop().catch((err) => {});
this.emit('giveup');
}
}
module.exports = ActionHookDelayProcessor;

View File

@@ -9,7 +9,6 @@ const {
NvidiaTranscriptionEvents,
IbmTranscriptionEvents,
SonioxTranscriptionEvents,
CobaltTranscriptionEvents,
DeepgramTranscriptionEvents,
JambonzTranscriptionEvents,
AmdEvents,
@@ -266,7 +265,7 @@ module.exports = (logger) => {
/* set stt options */
logger.info(`starting amd for vendor ${vendor} and language ${language}`);
const sttOpts = amd.setChannelVarsForStt({name: TaskName.Gather}, sttCredentials, language, {
const sttOpts = amd.setChannelVarsForStt({name: TaskName.Gather}, sttCredentials, {
vendor,
hints,
enhancedModel: true,
@@ -315,10 +314,6 @@ module.exports = (logger) => {
ep.addCustomEventListener(NvidiaTranscriptionEvents.Transcription, amd.transcriptionHandler);
break;
case 'cobalt':
ep.addCustomEventListener(CobaltTranscriptionEvents.Transcription, amd.transcriptionHandler);
break;
default:
if (vendor.startsWith('custom:')) {
ep.addCustomEventListener(JambonzTranscriptionEvents.Transcription, amd.transcriptionHandler);

View File

@@ -1,198 +0,0 @@
const { normalizeJambones } = require('@jambonz/verb-specifications');
const makeTask = require('../tasks/make_task');
const { JAMBONZ_RECORD_WS_BASE_URL, JAMBONZ_RECORD_WS_USERNAME, JAMBONZ_RECORD_WS_PASSWORD } = require('../config');
const Emitter = require('events');
class BackgroundTaskManager extends Emitter {
constructor({cs, logger, rootSpan}) {
super();
this.tasks = new Map();
this.cs = cs;
this.logger = logger;
this.rootSpan = rootSpan;
}
isTaskRunning(type) {
return this.tasks.has(type);
}
getTask(type) {
if (this.tasks.has(type)) {
return this.tasks.get(type);
}
}
count() {
return this.tasks.size;
}
async newTask(type, opts, sticky = false) {
this.logger.info({opts}, `initiating Background task ${type}`);
if (this.tasks.has(type)) {
this.logger.info(`Background task ${type} is running, skipped`);
return;
}
let task;
switch (type) {
case 'listen':
task = await this._initListen(opts);
break;
case 'bargeIn':
task = await this._initBargeIn(opts);
break;
case 'record':
task = await this._initRecord();
break;
case 'transcribe':
task = await this._initTranscribe(opts);
break;
default:
break;
}
if (task) {
this.tasks.set(type, task);
}
if (task && sticky) task.sticky = true;
return task;
}
stop(type) {
const task = this.getTask(type);
if (task) {
this.logger.info(`stopping background task: ${type}`);
task.removeAllListeners();
task.span.end();
task.kill();
// Remove task from managed List
this.tasks.delete(type);
}
}
stopAll() {
this.logger.debug('BackgroundTaskManager:stopAll');
for (const key of this.tasks.keys()) {
this.stop(key);
}
}
// Initiate Listen
async _initListen(opts, bugname = 'jambonz-background-listen', ignoreCustomerData = false, type = 'listen') {
let task;
try {
const t = normalizeJambones(this.logger, [opts]);
task = makeTask(this.logger, t[0]);
task.bugname = bugname;
task.ignoreCustomerData = ignoreCustomerData;
const resources = await this.cs._evaluatePreconditions(task);
const {span, ctx} = this.rootSpan.startChildSpan(`background-${type}:${task.summary}`);
task.span = span;
task.ctx = ctx;
task.exec(this.cs, resources)
.then(this._taskCompleted.bind(this, type, task))
.catch(this._taskError.bind(this, type, task));
} catch (err) {
this.logger.info({err, opts}, `BackgroundTaskManager:_initListen - Error creating ${bugname} task`);
}
return task;
}
// Initiate Gather
async _initBargeIn(opts) {
let task;
try {
const t = normalizeJambones(this.logger, [opts]);
task = makeTask(this.logger, t[0]);
task
.once('dtmf', this._bargeInTaskCompleted.bind(this))
.once('vad', this._bargeInTaskCompleted.bind(this))
.once('transcription', this._bargeInTaskCompleted.bind(this))
.once('timeout', this._bargeInTaskCompleted.bind(this));
const resources = await this.cs._evaluatePreconditions(task);
const {span, ctx} = this.rootSpan.startChildSpan(`background-bargeIn:${task.summary}`);
task.span = span;
task.ctx = ctx;
task.bugname_prefix = 'background_bargeIn_';
task.exec(this.cs, resources)
.then(() => {
this._taskCompleted('bargeIn', task);
if (task.sticky && !this.cs.callGone && !this.cs._stopping) {
this.logger.info('BackgroundTaskManager:_initBargeIn: restarting background bargeIn');
this._bargeInHandled = false;
this.newTask('bargeIn', opts, true);
}
return;
})
.catch(this._taskError.bind(this, 'bargeIn', task));
} catch (err) {
this.logger.info(err, 'BackgroundTaskManager:_initGather - Error creating bargeIn task');
}
return task;
}
// Initiate Record
async _initRecord() {
if (this.cs.accountInfo.account.record_all_calls || this.cs.application.record_all_calls) {
if (!JAMBONZ_RECORD_WS_BASE_URL || !this.cs.accountInfo.account.bucket_credential) {
this.logger.error('_initRecord: invalid cfg - missing JAMBONZ_RECORD_WS_BASE_URL or bucket config');
return undefined;
}
const listenOpts = {
url: `${JAMBONZ_RECORD_WS_BASE_URL}/record/${this.cs.accountInfo.account.bucket_credential.vendor}`,
disableBidirectionalAudio: true,
mixType : 'stereo',
passDtmf: true
};
if (JAMBONZ_RECORD_WS_USERNAME && JAMBONZ_RECORD_WS_PASSWORD) {
listenOpts.wsAuth = {
username: JAMBONZ_RECORD_WS_USERNAME,
password: JAMBONZ_RECORD_WS_PASSWORD
};
}
this.logger.debug({listenOpts}, '_initRecord: enabling listen');
return await this._initListen({verb: 'listen', ...listenOpts}, 'jambonz-session-record', true, 'record');
}
}
// Initiate Transcribe
async _initTranscribe(opts) {
let task;
try {
const t = normalizeJambones(this.logger, [opts]);
task = makeTask(this.logger, t[0]);
const resources = await this.cs._evaluatePreconditions(task);
const {span, ctx} = this.rootSpan.startChildSpan(`background-transcribe:${task.summary}`);
task.span = span;
task.ctx = ctx;
task.bugname_prefix = 'background_transcribe_';
task.exec(this.cs, resources)
.then(this._taskCompleted.bind(this, 'transcribe', task))
.catch(this._taskError.bind(this, 'transcribe', task));
} catch (err) {
this.logger.info(err, 'BackgroundTaskManager:_initTranscribe - Error creating transcribe task');
}
return task;
}
_taskCompleted(type, task) {
this.logger.debug({type, task}, `BackgroundTaskManager:_taskCompleted: task completed, sticky: ${task.sticky}`);
task.removeAllListeners();
task.span.end();
this.tasks.delete(type);
}
_taskError(type, task, error) {
this.logger.info({type, task, error}, 'BackgroundTaskManager:_taskError: task Error');
task.removeAllListeners();
task.span.end();
this.tasks.delete(type);
}
_bargeInTaskCompleted(evt) {
if (this._bargeInHandled) return;
this._bargeInHandled = true;
this.logger.debug({evt},
'BackgroundTaskManager:_bargeInTaskCompleted on event from background bargeIn, emitting bargein-done event');
this.emit('bargeIn-done', evt);
}
}
module.exports = BackgroundTaskManager;

View File

@@ -1,13 +1,12 @@
{
"TaskName": {
"Answer": "answer",
"Cognigy": "cognigy",
"Conference": "conference",
"Config": "config",
"Dequeue": "dequeue",
"Dial": "dial",
"Dialogflow": "dialogflow",
"Dtmf": "dtmf",
"Dub": "dub",
"Enqueue": "enqueue",
"Gather": "gather",
"Hangup": "hangup",
@@ -30,8 +29,7 @@
"Tag": "tag",
"Transcribe": "transcribe"
},
"AllowedSipRecVerbs": ["answer", "config", "gather", "transcribe", "listen", "tag"],
"AllowedConfirmSessionVerbs": ["config", "gather", "plays", "say", "tag"],
"AllowedSipRecVerbs": ["config", "gather", "transcribe", "listen"],
"CallStatus": {
"Trying": "trying",
"Ringing": "ringing",
@@ -97,15 +95,6 @@
"Transcription": "soniox_transcribe::transcription",
"Error": "soniox_transcribe::error"
},
"VerbioTranscriptionEvents": {
"Transcription": "verbio_transcribe::transcription",
"Error": "verbio_transcribe::error"
},
"CobaltTranscriptionEvents": {
"Transcription": "cobalt_speech::transcription",
"CompileContext": "cobalt_speech::compile_context_response",
"Error": "cobalt_speech::error"
},
"IbmTranscriptionEvents": {
"Transcription": "ibm_transcribe::transcription",
"ConnectFailure": "ibm_transcribe::connect_failed",
@@ -132,15 +121,6 @@
"Connect": "jambonz_transcribe::connect",
"Error": "jambonz_transcribe::error"
},
"AssemblyAiTranscriptionEvents": {
"Transcription": "assemblyai_transcribe::transcription",
"Error": "assemblyai_transcribe::error",
"ConnectFailure": "assemblyai_transcribe::connect_failed",
"Connect": "assemblyai_transcribe::connect"
},
"VadDetection": {
"Detection": "vad_detect:detection"
},
"ListenEvents": {
"Connect": "mod_audio_fork::connect",
"ConnectFailure": "mod_audio_fork::connect_failed",
@@ -178,7 +158,6 @@
"session:new",
"session:reconnect",
"session:redirect",
"session:adulting",
"call:status",
"queue:status",
"dial:confirm",

View File

@@ -27,9 +27,6 @@ WHERE pn.account_sid IS NULL
AND pn.service_provider_sid =
(SELECT service_provider_sid from accounts where account_sid = ?)
AND pn.number = ?`;
const sqlQueryGoogleCustomVoices = `SELECT *
FROM google_custom_voices
WHERE google_custom_voice_sid = ?`;
const speechMapper = (cred) => {
const {credential, ...obj} = cred;
@@ -41,7 +38,6 @@ const speechMapper = (cred) => {
const o = JSON.parse(decrypt(credential));
obj.access_key_id = o.access_key_id;
obj.secret_access_key = o.secret_access_key;
obj.role_arn = o.role_arn;
obj.aws_region = o.aws_region;
}
else if ('microsoft' === obj.vendor) {
@@ -76,8 +72,6 @@ const speechMapper = (cred) => {
else if ('deepgram' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
obj.deepgram_stt_uri = o.deepgram_stt_uri;
obj.deepgram_stt_use_tls = o.deepgram_stt_use_tls;
}
else if ('soniox' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
@@ -87,38 +81,7 @@ const speechMapper = (cred) => {
const o = JSON.parse(decrypt(credential));
obj.riva_server_uri = o.riva_server_uri;
}
else if ('cobalt' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.cobalt_server_uri = o.cobalt_server_uri;
} else if ('elevenlabs' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
obj.model_id = o.model_id;
obj.options = o.options;
} else if ('playht' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
obj.user_id = o.user_id;
obj.voice_engine = o.voice_engine;
obj.options = o.options;
} else if ('rimelabs' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
obj.model_id = o.model_id;
obj.options = o.options;
} else if ('assemblyai' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
} else if ('whisper' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
obj.model_id = o.model_id;
} else if ('verbio' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.client_id = o.client_id;
obj.client_secret = o.client_secret;
obj.engine_version = o.engine_version;
} else if (obj.vendor.startsWith('custom:')) {
else if (obj.vendor.startsWith('custom:')) {
const o = JSON.parse(decrypt(credential));
obj.auth_token = o.auth_token;
obj.custom_stt_url = o.custom_stt_url;
@@ -191,22 +154,10 @@ module.exports = (logger, srf) => {
}
};
const lookupGoogleCustomVoice = async(google_custom_voice_sid) => {
const pp = pool.promise();
try {
const [r] = await pp.query(sqlQueryGoogleCustomVoices, [google_custom_voice_sid]);
return r;
} catch (err) {
logger.error({err}, `lookupGoogleCustomVoices: Error ${google_custom_voice_sid}`);
}
};
return {
lookupAccountDetails,
updateSpeechCredentialLastUsed,
lookupCarrier,
lookupCarrierByPhoneNumber,
lookupGoogleCustomVoice
lookupCarrierByPhoneNumber
};
};

View File

@@ -1,4 +1,4 @@
const {request, getGlobalDispatcher, setGlobalDispatcher, Dispatcher, ProxyAgent, Client, Pool} = require('undici');
const {Client, Pool} = require('undici');
const parseUrl = require('parse-url');
const assert = require('assert');
const BaseRequestor = require('./base-requestor');
@@ -10,11 +10,6 @@ const {
HTTP_POOLSIZE,
HTTP_PIPELINING,
HTTP_TIMEOUT,
HTTP_PROXY_IP,
HTTP_PROXY_PORT,
HTTP_PROXY_PROTOCOL,
NODE_ENV,
HTTP_USER_AGENT_HEADER,
} = require('../config');
const toBase64 = (str) => Buffer.from(str || '', 'utf8').toString('base64');
@@ -26,15 +21,6 @@ function basicAuth(username, password) {
return {Authorization: header};
}
const defaultDispatcher = HTTP_PROXY_IP ?
new ProxyAgent(`${HTTP_PROXY_PROTOCOL}://${HTTP_PROXY_IP}${HTTP_PROXY_PORT ? `:${HTTP_PROXY_PORT}` : ''}`) :
getGlobalDispatcher();
setGlobalDispatcher(new class extends Dispatcher {
dispatch(options, handler) {
return defaultDispatcher.dispatch(options, handler);
}
}());
class HttpRequestor extends BaseRequestor {
constructor(logger, account_sid, hook, secret) {
@@ -74,18 +60,6 @@ class HttpRequestor extends BaseRequestor {
if (u.port) this.client = new Client(`${u.protocol}://${u.resource}:${u.port}`);
else this.client = new Client(`${u.protocol}://${u.resource}`);
}
if (NODE_ENV == 'test' && process.env.JAMBONES_HTTP_PROXY_IP) {
const defDispatcher =
new ProxyAgent(`${process.env.JAMBONES_HTTP_PROXY_PROTOCOL}://${process.env.JAMBONES_HTTP_PROXY_IP}${
process.env.JAMBONES_HTTP_PROXY_PORT ? `:${process.env.JAMBONES_HTTP_PROXY_PORT}` : ''}`);
setGlobalDispatcher(new class extends Dispatcher {
dispatch(options, handler) {
return defDispatcher.dispatch(options, handler);
}
}());
}
}
get baseUrl() {
@@ -117,10 +91,6 @@ class HttpRequestor extends BaseRequestor {
const url = hook.url || hook;
const method = hook.method || 'POST';
let buf = '';
httpHeaders = {
...httpHeaders,
...(HTTP_USER_AGENT_HEADER && {'user-agent' : HTTP_USER_AGENT_HEADER})
};
assert.ok(url, 'HttpRequestor:request url was not provided');
assert.ok, (['GET', 'POST'].includes(method), `HttpRequestor:request method must be 'GET' or 'POST' not ${method}`);
@@ -169,18 +139,7 @@ class HttpRequestor extends BaseRequestor {
};
const absUrl = this._isRelativeUrl(url) ? `${this.baseUrl}${url}` : url;
this.logger.debug({url, absUrl, hdrs}, 'send webhook');
const {statusCode, headers, body} = HTTP_PROXY_IP ? await request(
this.baseUrl,
{
path,
query,
method,
headers: hdrs,
...('POST' === method && {body: JSON.stringify(payload)}),
timeout: HTTP_TIMEOUT,
followRedirects: false
}
) : await client.request({
const {statusCode, headers, body} = await client.request({
path,
query,
method,

View File

@@ -8,14 +8,15 @@ const {
JAMBONES_MYSQL_CONNECTION_LIMIT,
JAMBONES_MYSQL_PORT,
JAMBONES_FREESWITCH,
JAMBONES_REDIS_HOST,
JAMBONES_REDIS_PORT,
JAMBONES_REDIS_SENTINELS,
SMPP_URL,
JAMBONES_TIME_SERIES_HOST,
JAMBONES_ESL_LISTEN_ADDRESS,
PORT,
HTTP_IP,
NODE_ENV,
} = require('../config');
const Registrar = require('@jambonz/mw-registrar');
const assert = require('assert');
function initMS(logger, wrapper, ms) {
@@ -138,8 +139,7 @@ function installSrfLocals(srf, logger) {
lookupTeamsByAccount,
lookupAccountBySid,
lookupAccountCapacitiesBySid,
lookupSmppGateways,
lookupClientByAccountAndUsername
lookupSmppGateways
} = require('@jambonz/db-helpers')({
host: JAMBONES_MYSQL_HOST,
user: JAMBONES_MYSQL_USER,
@@ -172,17 +172,19 @@ function installSrfLocals(srf, logger) {
retrieveFromSortedSet,
retrieveByPatternSortedSet,
sortedSetLength,
sortedSetPositionByPattern,
} = require('@jambonz/realtimedb-helpers')({}, logger, tracer);
const registrar = new Registrar(logger, client);
sortedSetPositionByPattern
} = require('@jambonz/realtimedb-helpers')(JAMBONES_REDIS_SENTINELS || {
host: JAMBONES_REDIS_HOST,
port: JAMBONES_REDIS_PORT || 6379
}, logger, tracer);
const {
synthAudio,
addFileToCache,
getNuanceAccessToken,
getIbmAccessToken,
getAwsAuthToken,
getVerbioAccessToken
} = require('@jambonz/speech-utils')({}, logger);
} = require('@jambonz/speech-utils')(JAMBONES_REDIS_SENTINELS || {
host: JAMBONES_REDIS_HOST,
port: JAMBONES_REDIS_PORT || 6379
}, logger, tracer);
const {
writeAlerts,
AlertType
@@ -194,8 +196,7 @@ function installSrfLocals(srf, logger) {
let localIp;
try {
// Either use the configured IP address or call ip.address() to find it
localIp = HTTP_IP || ip.address();
localIp = ip.address();
} catch (err) {
logger.error({err}, 'installSrfLocals - error detecting local ipv4 address');
}
@@ -203,7 +204,6 @@ function installSrfLocals(srf, logger) {
srf.locals = {...srf.locals,
dbHelpers: {
client,
registrar,
pool,
lookupAppByPhoneNumber,
lookupAppByRegex,
@@ -214,14 +214,11 @@ function installSrfLocals(srf, logger) {
lookupAccountBySid,
lookupAccountCapacitiesBySid,
lookupSmppGateways,
lookupClientByAccountAndUsername,
updateCallStatus,
retrieveCall,
listCalls,
deleteCall,
synthAudio,
getAwsAuthToken,
addFileToCache,
createHash,
retrieveHash,
deleteKey,
@@ -242,8 +239,7 @@ function installSrfLocals(srf, logger) {
retrieveFromSortedSet,
retrieveByPatternSortedSet,
sortedSetLength,
sortedSetPositionByPattern,
getVerbioAccessToken
sortedSetPositionByPattern
},
parentLogger: logger,
getSBC,

View File

@@ -1,18 +0,0 @@
const parseDecibels = (db) => {
if (!db) return 0;
if (typeof db === 'number') {
return db;
}
else if (typeof db === 'string') {
const match = db.match(/([+-]?\d+(\.\d+)?)\s*db/i);
if (match) {
return Math.trunc(parseFloat(match[1]));
} else {
return 0;
}
} else {
return 0;
}
};
module.exports = parseDecibels;

View File

@@ -13,16 +13,9 @@ const moment = require('moment');
const stripCodecs = require('./strip-ancillary-codecs');
const RootSpan = require('./call-tracer');
const uuidv4 = require('uuid-random');
const HttpRequestor = require('./http-requestor');
const WsRequestor = require('./ws-requestor');
const {makeOpusFirst} = require('./sdp-utils');
const {
JAMBONES_USE_FREESWITCH_TIMER_FD
} = require('../config');
class SingleDialer extends Emitter {
constructor({logger, sbcAddress, target, opts, application, callInfo, accountInfo, rootSpan, startSpan, dialTask,
onHoldMusic}) {
constructor({logger, sbcAddress, target, opts, application, callInfo, accountInfo, rootSpan, startSpan, dialTask}) {
super();
assert(target.type);
@@ -45,7 +38,6 @@ class SingleDialer extends Emitter {
this.callSid = uuidv4();
this.dialTask = dialTask;
this.onHoldMusic = onHoldMusic;
this.on('callStatusChange', this._notifyCallStatusChange.bind(this));
}
@@ -84,8 +76,7 @@ class SingleDialer extends Emitter {
...(this.from.host && {'X-Preferred-From-Host': this.from.host}),
'X-Jambonz-Routing': this.target.type,
'X-Call-Sid': this.callSid,
...(this.applicationSid && {'X-Application-Sid': this.applicationSid}),
...(this.target.proxy && {'X-SIP-Proxy': this.target.proxy})
...(this.applicationSid && {'X-Application-Sid': this.applicationSid})
};
if (srf.locals.fsUUID) {
opts.headers = {
@@ -136,7 +127,6 @@ class SingleDialer extends Emitter {
this.serviceUrl = srf.locals.serviceUrl;
this.ep = await ms.createEndpoint();
this._configMsEndpoint();
this.logger.debug(`SingleDialer:exec - created endpoint ${this.ep.uuid}`);
/**
@@ -158,7 +148,7 @@ class SingleDialer extends Emitter {
Object.assign(opts, {
proxy: `sip:${this.sbcAddress}`,
localSdp: opts.opusFirst ? makeOpusFirst(this.ep.local.sdp) : this.ep.local.sdp
localSdp: this.ep.local.sdp
});
if (this.target.auth) opts.auth = this.target.auth;
inviteSpan = this.startSpan('invite', {
@@ -186,7 +176,6 @@ class SingleDialer extends Emitter {
* (a) create a logger for this call
*/
req.srf = srf;
this.req = req;
this.callInfo = new CallInfo({
direction: CallDirection.Outbound,
parentCallInfo: this.parentCallInfo,
@@ -195,10 +184,6 @@ class SingleDialer extends Emitter {
callSid: this.callSid,
traceId: this.rootSpan.traceId
});
if (this.dialTask && this.dialTask.tag !== null &&
typeof this.dialTask.tag === 'object' && !Array.isArray(this.dialTask.tag)) {
this.callInfo.customerData = this.dialTask.tag;
}
this.logger = srf.locals.parentLogger.child({
callSid: this.callSid,
parentCallSid: this.parentCallInfo.callSid,
@@ -263,7 +248,7 @@ class SingleDialer extends Emitter {
.on('modify', async(req, res) => {
try {
if (this.ep) {
if (this.dialTask && this.dialTask.isOnHoldEnabled) {
if (this.dialTask && this.dialTask.isOnHold) {
this.logger.info('dial is onhold, emit event');
this.emit('reinvite', req, res);
} else {
@@ -295,17 +280,17 @@ class SingleDialer extends Emitter {
if (err.status === 487) status.callStatus = CallStatus.NoAnswer;
else if ([486, 600].includes(err.status)) status.callStatus = CallStatus.Busy;
this.logger.info(`SingleDialer:exec outdial failure ${err.status}`);
inviteSpan?.setAttributes({'invite.status_code': err.status});
inviteSpan?.end();
inviteSpan.setAttributes({'invite.status_code': err.status});
inviteSpan.end();
}
else {
this.logger.error(err, 'SingleDialer:exec');
status.sipStatus = 500;
inviteSpan?.setAttributes({
inviteSpan.setAttributes({
'invite.status_code': 500,
'invite.err': err.message
});
inviteSpan?.end();
inviteSpan.end();
}
this.emit('callStatusChange', status);
if (this.ep) this.ep.destroy();
@@ -330,16 +315,6 @@ class SingleDialer extends Emitter {
}
}
_configMsEndpoint() {
const opts = {
...(this.onHoldMusic && {holdMusic: `shout://${this.onHoldMusic.replace(/^https?:\/\//, '')}`}),
...(JAMBONES_USE_FREESWITCH_TIMER_FD && {timer_name: 'timerfd'})
};
if (Object.keys(opts).length > 0) {
this.ep.set(opts);
}
}
/**
* Run an application on the call after answer, e.g. call screening.
* Once the application completes in some fashion, emit an 'accepted' event
@@ -351,16 +326,10 @@ class SingleDialer extends Emitter {
try {
// retrieve set of tasks
const json = await this.requestor.request('dial:confirm', confirmHook, this.callInfo.toJSON());
if (!json || (Array.isArray(json) && json.length === 0)) {
this.logger.info('SingleDialer:_executeApp: no tasks returned from confirm hook');
this.emit('accept');
return;
}
const tasks = normalizeJambones(this.logger, json).map((tdata) => makeTask(this.logger, tdata));
// verify it contains only allowed verbs
const allowedTasks = tasks.filter((task) => {
return [
TaskPreconditions.None,
TaskPreconditions.StableCall,
TaskPreconditions.Endpoint
].includes(task.preconditions);
@@ -408,45 +377,15 @@ class SingleDialer extends Emitter {
this.dlg.linkedSpanId = this.rootSpan.traceId;
const rootSpan = new RootSpan('outbound-call', this.dlg);
const newLogger = logger.child({traceId: rootSpan.traceId});
//clone application from parent call with new requestor
//parrent application will be closed in case the parent hangup
const app = {...application};
if ('WS' === app.call_hook?.method ||
app.call_hook?.url.startsWith('ws://') || app.call_hook?.url.startsWith('wss://')) {
if (app.call_hook?.url) app.call_hook.url += '/adulting';
const requestor = new WsRequestor(logger, this.accountInfo.account.account_sid,
app.call_hook, this.accountInfo.account.webhook_secret);
app.requestor = requestor;
app.notifier = requestor;
app.call_hook.method = 'WS';
}
else {
app.requestor = new HttpRequestor(logger, this.accountInfo.account.account_sid,
app.call_hook, this.accountInfo.account.webhook_secret);
if (app.call_status_hook) app.notifier = new HttpRequestor(logger,
this.accountInfo.account.account_sid, app.call_status_hook,
this.accountInfo.account.webhook_secret);
else app.notifier = {request: () => {}, close: () => {}};
}
// Replace old application with new application.
this.application = app;
const cs = new AdultingCallSession({
logger: newLogger,
singleDialer: this,
application: app,
application,
callInfo: this.callInfo,
accountInfo: this.accountInfo,
tasks,
rootSpan
});
app.requestor.request('session:adulting', '/adulting', {
...cs.callInfo.toJSON(),
parentCallInfo: this.parentCallInfo
}).catch((err) => {
newLogger.error({err}, 'doAdulting: error sending adulting request');
});
cs.req = this.req;
cs.exec().catch((err) => newLogger.error({err}, 'doAdulting: error executing session'));
return cs;
}
@@ -467,7 +406,6 @@ class SingleDialer extends Emitter {
async reAnchorMedia() {
assert(this.dlg && this.dlg.connected && !this.ep);
this.ep = await this.ms.createEndpoint({remoteSdp: this.dlg.remote.sdp});
this._configMsEndpoint();
await this.dlg.modify(this.ep.local.sdp, {
headers: {
'X-Reason': 'anchor-media'
@@ -498,12 +436,11 @@ class SingleDialer extends Emitter {
}
function placeOutdial({
logger, srf, ms, sbcAddress, target, opts, application, callInfo, accountInfo, rootSpan, startSpan, dialTask,
onHoldMusic
logger, srf, ms, sbcAddress, target, opts, application, callInfo, accountInfo, rootSpan, startSpan, dialTask
}) {
const myOpts = deepcopy(opts);
const sd = new SingleDialer({
logger, sbcAddress, target, myOpts, application, callInfo, accountInfo, rootSpan, startSpan, dialTask, onHoldMusic
logger, sbcAddress, target, myOpts, application, callInfo, accountInfo, rootSpan, startSpan, dialTask
});
sd.exec(srf, ms, myOpts);
return sd;

View File

@@ -1,58 +1,7 @@
const sdpTransform = require('sdp-transform');
const isOnhold = (sdp) => {
return sdp && (sdp.includes('a=sendonly') || sdp.includes('a=inactive'));
};
const mergeSdpMedia = (sdp1, sdp2) => {
const parsedSdp1 = sdpTransform.parse(sdp1);
const parsedSdp2 = sdpTransform.parse(sdp2);
parsedSdp1.media.push(...parsedSdp2.media);
return sdpTransform.write(parsedSdp1);
};
const getCodecPlacement = (parsedSdp, codec) => parsedSdp?.media[0]?.rtp?.findIndex((e) => e.codec === codec);
const isOpusFirst = (sdp) => {
return getCodecPlacement(sdpTransform.parse(sdp), 'opus') === 0;
};
const makeOpusFirst = (sdp) => {
const parsedSdp = sdpTransform.parse(sdp);
// Find the index of the OPUS codec
const opusIndex = getCodecPlacement(parsedSdp, 'opus');
// Move OPUS codec to the beginning
if (opusIndex > 0) {
const opusEntry = parsedSdp.media[0].rtp.splice(opusIndex, 1)[0];
parsedSdp.media[0].rtp.unshift(opusEntry);
// Also move the corresponding payload type in the "m" line
const opusPayloadType = parsedSdp.media[0].payloads.split(' ')[opusIndex];
const otherPayloadTypes = parsedSdp.media[0].payloads.split(' ').filter((pt) => pt != opusPayloadType);
parsedSdp.media[0].payloads = [opusPayloadType, ...otherPayloadTypes].join(' ');
}
return sdpTransform.write(parsedSdp);
};
const extractSdpMedia = (sdp) => {
const parsedSdp1 = sdpTransform.parse(sdp);
if (parsedSdp1.media.length > 1) {
parsedSdp1.media = [parsedSdp1.media[0]];
const parsedSdp2 = sdpTransform.parse(sdp);
parsedSdp2.media = [parsedSdp2.media[1]];
return [sdpTransform.write(parsedSdp1), sdpTransform.write(parsedSdp2)];
} else {
return [sdp, sdp];
}
};
module.exports = {
isOnhold,
mergeSdpMedia,
extractSdpMedia,
isOpusFirst,
makeOpusFirst
isOnhold
};

View File

@@ -97,12 +97,8 @@ const parseSiprecPayload = (req, logger) => {
obj[`${prefix}participantstreamassoc`].forEach((ps) => {
const part = participants[ps.$.participant_id];
if (part) {
if (ps.hasOwnProperty(`${prefix}send`)) {
part.send = ps[`${prefix}send`][0];
}
if (ps.hasOwnProperty(`${prefix}recv`)) {
part.recv = ps[`${prefix}recv`][0];
}
part.send = ps[`${prefix}send`][0];
part.recv = ps[`${prefix}recv`][0];
}
});
}
@@ -113,9 +109,9 @@ const parseSiprecPayload = (req, logger) => {
obj[`${prefix}stream`].forEach((s) => {
const streamId = s.$.stream_id;
let sender;
for (const v of Object.values(participants)) {
for (const [k, v] of Object.entries(participants)) {
if (v.send === streamId) {
sender = v;
sender = k;
break;
}
}
@@ -125,15 +121,9 @@ const parseSiprecPayload = (req, logger) => {
sender.label = s[`${prefix}label`][0];
if (-1 !== ['1', 'a_leg', 'inbound', '10'].indexOf(sender.label)) {
opts.caller.aor = sender.aor;
if (-1 !== ['1', 'a_leg', 'inbound'].indexOf(sender.label)) {
opts.caller.aor = sender.aor ;
if (sender.name) opts.caller.name = sender.name;
// Remap the sdp stream base on sender label
if (!opts.sdp1.includes(`a=label:${sender.label}`)) {
const tmp = opts.sdp1;
opts.sdp1 = opts.sdp2;
opts.sdp2 = tmp;
}
}
else {
opts.callee.aor = sender.aor ;

View File

@@ -1,6 +1,14 @@
const {
TaskName,
} = require('./constants.json');
AzureTranscriptionEvents,
GoogleTranscriptionEvents,
AwsTranscriptionEvents,
NuanceTranscriptionEvents,
DeepgramTranscriptionEvents,
SonioxTranscriptionEvents,
NvidiaTranscriptionEvents,
JambonzTranscriptionEvents
} = require('./constants');
const stickyVars = {
google: [
@@ -25,14 +33,12 @@ const stickyVars = {
'AZURE_SERVICE_ENDPOINT',
'AZURE_INITIAL_SPEECH_TIMEOUT_MS',
'AZURE_USE_OUTPUT_FORMAT_DETAILED',
'AZURE_SPEECH_SEGMENTATION_SILENCE_TIMEOUT_MS'
],
deepgram: [
'DEEPGRAM_SPEECH_KEYWORDS',
'DEEPGRAM_API_KEY',
'DEEPGRAM_SPEECH_TIER',
'DEEPGRAM_SPEECH_MODEL',
'DEEPGRAM_SPEECH_ENABLE_SMART_FORMAT',
'DEEPGRAM_SPEECH_ENABLE_AUTOMATIC_PUNCTUATION',
'DEEPGRAM_SPEECH_PROFANITY_FILTER',
'DEEPGRAM_SPEECH_REDACT',
@@ -43,7 +49,6 @@ const stickyVars = {
'DEEPGRAM_SPEECH_SEARCH',
'DEEPGRAM_SPEECH_REPLACE',
'DEEPGRAM_SPEECH_ENDPOINTING',
'DEEPGRAM_SPEECH_UTTERANCE_END_MS',
'DEEPGRAM_SPEECH_VAD_TURNOFF',
'DEEPGRAM_SPEECH_TAG'
],
@@ -87,116 +92,12 @@ const stickyVars = {
nvidia: [
'NVIDIA_HINTS'
],
cobalt: [
'COBALT_SPEECH_HINTS',
'COBALT_COMPILED_CONTEXT_DATA',
'COBALT_METADATA'
],
soniox: [
'SONIOX_PROFANITY_FILTER',
'SONIOX_MODEL'
],
assemblyai: [
'ASSEMBLYAI_API_KEY',
'ASSEMBLYAI_WORD_BOOST'
]
};
/**
* @see https://developers.deepgram.com/docs/models-languages-overview
*/
const optimalDeepramModels = {
zh: ['base', 'base'],
'zh-CN':['base', 'base'],
'zh-TW': ['base', 'base'],
da: ['enhanced', 'enhanced'],
en: ['nova-2-phonecall', 'nova-2'],
'en-US': ['nova-2-phonecall', 'nova-2'],
'en-AU': ['nova-2', 'nova-2'],
'en-GB': ['nova-2', 'nova-2'],
'en-IN': ['nova-2', 'nova-2'],
'en-NZ': ['nova-2', 'nova-2'],
nl: ['nova-2', 'nova-2'],
fr: ['nova-2', 'nova-2'],
'fr-CA': ['nova-2', 'nova-2'],
de: ['nova-2', 'nova-2'],
hi: ['nova-2', 'nova-2'],
'hi-Latn': ['nova-2', 'nova-2'],
id: ['base', 'base'],
it: ['nova-2', 'nova-2'],
ja: ['enhanced', 'enhanced'],
ko: ['nova-2', 'nova-2'],
no: ['nova-2', 'nova-2'],
pl: ['nova-2', 'nova-2'],
pt: ['nova-2', 'nova-2'],
'pt-BR': ['nova-2', 'nova-2'],
'pt-PT': ['nova-2', 'nova-2'],
ru: ['nova-2', 'nova-2'],
es: ['nova-2', 'nova-2'],
'es-419': ['nova-2', 'nova-2'],
'es-LATAM': ['enhanced', 'enhanced'],
sv: ['nova-2', 'nova-2'],
ta: ['enhanced', 'enhanced'],
taq: ['enhanced', 'enhanced'],
tr: ['nova-2', 'nova-2'],
uk: ['nova-2', 'nova-2']
};
const selectDefaultDeepgramModel = (task, language) => {
if (language in optimalDeepramModels) {
const [gather, transcribe] = optimalDeepramModels[language];
return task.name === TaskName.Gather ? gather : transcribe;
}
return 'base';
};
const consolidateTranscripts = (bufferedTranscripts, channel, language, vendor) => {
if (bufferedTranscripts.length === 1) return bufferedTranscripts[0];
let totalConfidence = 0;
const finalTranscript = bufferedTranscripts.reduce((acc, evt) => {
totalConfidence += evt.alternatives[0].confidence;
let newTranscript = evt.alternatives[0].transcript;
// If new transcript consists only of digits, spaces, and a trailing comma or period
if (newTranscript.match(/^[\d\s]+[,.]?$/)) {
newTranscript = newTranscript.replace(/\s/g, ''); // Remove all spaces
if (newTranscript.endsWith(',')) {
newTranscript = newTranscript.slice(0, -1); // Remove the trailing comma
} else if (newTranscript.endsWith('.')) {
newTranscript = newTranscript.slice(0, -1); // Remove the trailing period
}
}
const lastChar = acc.alternatives[0].transcript.slice(-1);
const firstChar = newTranscript.charAt(0);
if (lastChar.match(/\d/) && firstChar.match(/\d/)) {
acc.alternatives[0].transcript += newTranscript;
} else {
acc.alternatives[0].transcript += ` ${newTranscript}`;
}
return acc;
}, {
language_code: language,
channel_tag: channel,
is_final: true,
alternatives: [{
transcript: ''
}]
});
finalTranscript.alternatives[0].confidence = bufferedTranscripts.length === 1 ?
bufferedTranscripts[0].alternatives[0].confidence :
totalConfidence / bufferedTranscripts.length;
finalTranscript.alternatives[0].transcript = finalTranscript.alternatives[0].transcript.trim();
finalTranscript.vendor = {
name: vendor,
evt: bufferedTranscripts
};
return finalTranscript;
};
const compileSonioxTranscripts = (finalWordChunks, channel, language) => {
const words = finalWordChunks.flat();
const transcript = words.reduce((acc, word) => {
@@ -254,7 +155,7 @@ const normalizeSoniox = (evt, channel, language) => {
};
};
const normalizeDeepgram = (evt, channel, language, shortUtterance) => {
const normalizeDeepgram = (evt, channel, language) => {
const copy = JSON.parse(JSON.stringify(evt));
const alternatives = (evt.channel?.alternatives || [])
.map((alt) => ({
@@ -262,15 +163,11 @@ const normalizeDeepgram = (evt, channel, language, shortUtterance) => {
transcript: alt.transcript,
}));
/**
* note difference between is_final and speech_final in Deepgram:
* https://developers.deepgram.com/docs/understand-endpointing-interim-results
*/
return {
language_code: language,
channel_tag: channel,
is_final: shortUtterance ? evt.is_final : evt.speech_final,
alternatives: alternatives.length ? [alternatives[0]] : [],
is_final: evt.is_final,
alternatives: [alternatives[0]],
vendor: {
name: 'deepgram',
evt: copy
@@ -328,26 +225,6 @@ const normalizeGoogle = (evt, channel, language) => {
};
};
const normalizeCobalt = (evt, channel, language) => {
const copy = JSON.parse(JSON.stringify(evt));
const alternatives = (evt.alternatives || [])
.map((alt) => ({
confidence: alt.confidence,
transcript: alt.transcript_formatted,
}));
return {
language_code: language,
channel_tag: channel,
is_final: evt.is_final,
alternatives,
vendor: {
name: 'cobalt',
evt: copy
}
};
};
const normalizeCustom = (evt, channel, language, vendor) => {
const copy = JSON.parse(JSON.stringify(evt));
return {
@@ -376,34 +253,19 @@ const normalizeNuance = (evt, channel, language) => {
};
};
const normalizeVerbio = (evt, channel, language) => {
const copy = JSON.parse(JSON.stringify(evt));
return {
language_code: language,
channel_tag: channel,
is_final: evt.is_final,
alternatives: evt.alternatives,
vendor: {
name: 'verbio',
evt: copy
}
};
};
const normalizeMicrosoft = (evt, channel, language, punctuation = true) => {
const normalizeMicrosoft = (evt, channel, language) => {
const copy = JSON.parse(JSON.stringify(evt));
const nbest = evt.NBest;
const language_code = evt.PrimaryLanguage?.Language || language;
const alternatives = nbest ? nbest.map((n) => {
return {
confidence: n.Confidence,
// remove all puntuation if needed
transcript: punctuation ? n.Display : n.Display.replace(/\p{P}/gu, '')
transcript: n.Display
};
}) :
[
{
transcript: punctuation ? evt.DisplayText || evt.Text : (evt.DisplayText || evt.Text).replace(/\p{P}/gu, '')
transcript: evt.DisplayText || evt.Text
}
];
@@ -433,34 +295,16 @@ const normalizeAws = (evt, channel, language) => {
};
};
const normalizeAssemblyAi = (evt, channel, language) => {
const copy = JSON.parse(JSON.stringify(evt));
return {
language_code: language,
channel_tag: channel,
is_final: evt.message_type === 'FinalTranscript',
alternatives: [
{
confidence: evt.confidence,
transcript: evt.text,
}
],
vendor: {
name: 'ASSEMBLYAI',
evt: copy
}
};
};
module.exports = (logger) => {
const normalizeTranscription = (evt, vendor, channel, language, shortUtterance, punctuation) => {
const normalizeTranscription = (evt, vendor, channel, language) => {
//logger.debug({ evt, vendor, channel, language }, 'normalizeTranscription');
switch (vendor) {
case 'deepgram':
return normalizeDeepgram(evt, channel, language, shortUtterance);
return normalizeDeepgram(evt, channel, language);
case 'microsoft':
return normalizeMicrosoft(evt, channel, language, punctuation);
return normalizeMicrosoft(evt, channel, language);
case 'google':
return normalizeGoogle(evt, channel, language);
case 'aws':
@@ -473,12 +317,6 @@ module.exports = (logger) => {
return normalizeNvidia(evt, channel, language);
case 'soniox':
return normalizeSoniox(evt, channel, language);
case 'cobalt':
return normalizeCobalt(evt, channel, language);
case 'assemblyai':
return normalizeAssemblyAi(evt, channel, language, shortUtterance);
case 'verbio':
return normalizeVerbio(evt, channel, language);
default:
if (vendor.startsWith('custom:')) {
return normalizeCustom(evt, channel, language, vendor);
@@ -488,15 +326,22 @@ module.exports = (logger) => {
}
};
const setChannelVarsForStt = (task, sttCredentials, language, rOpts = {}) => {
const setChannelVarsForStt = (task, sttCredentials, rOpts = {}) => {
let opts = {};
const {enable, voiceMs = 0, mode = -1} = rOpts.vad || {};
const vad = {enable, voiceMs, mode};
const vendor = rOpts.vendor;
/* voice activity detection works across vendors */
opts = {
...opts,
...(vad.enable && {START_RECOGNIZING_ON_VAD: 1}),
...(vad.enable && vad.voiceMs && {RECOGNIZER_VAD_VOICE_MS: vad.voiceMs}),
...(vad.enable && typeof vad.mode === 'number' && {RECOGNIZER_VAD_MODE: vad.mode}),
};
if ('google' === vendor) {
const useV2 = rOpts.googleOptions?.serviceVersion === 'v2';
const model = task.name === TaskName.Gather ?
(useV2 ? 'telephony_short' : 'command_and_search') :
(useV2 ? 'long' : 'latest_long');
const model = task.name === TaskName.Gather ? 'command_and_search' : 'latest_long';
opts = {
...opts,
...(sttCredentials && {GOOGLE_APPLICATION_CREDENTIALS: JSON.stringify(sttCredentials.credentials)}),
@@ -521,34 +366,13 @@ module.exports = (logger) => {
...(rOpts.hints?.length > 0 && typeof rOpts.hints[0] === 'object' &&
{GOOGLE_SPEECH_HINTS: JSON.stringify(rOpts.hints)}),
...(typeof rOpts.hintsBoost === 'number' && {GOOGLE_SPEECH_HINTS_BOOST: rOpts.hintsBoost}),
// When altLanguages is emptylist, we have to send value to freeswitch to clear the previous settings
...(rOpts.altLanguages &&
...(rOpts.altLanguages?.length > 0 &&
{GOOGLE_SPEECH_ALTERNATIVE_LANGUAGE_CODES: [...new Set(rOpts.altLanguages)].join(',')}),
...(rOpts.interactionType &&
{GOOGLE_SPEECH_METADATA_INTERACTION_TYPE: rOpts.interactionType}),
...{GOOGLE_SPEECH_MODEL: rOpts.model || model},
...(rOpts.naicsCode > 0 && {GOOGLE_SPEECH_METADATA_INDUSTRY_NAICS_CODE: rOpts.naicsCode}),
GOOGLE_SPEECH_METADATA_RECORDING_DEVICE_TYPE: 'phone_line',
...(useV2 && {
GOOGLE_SPEECH_RECOGNIZER_PARENT: `projects/${sttCredentials.credentials.project_id}/locations/global`,
GOOGLE_SPEECH_CLOUD_SERVICES_VERSION: 'v2',
...(rOpts.googleOptions?.speechStartTimeoutMs && {
GOOGLE_SPEECH_START_TIMEOUT_MS: rOpts.googleOptions.speechStartTimeoutMs
}),
...(rOpts.googleOptions?.speechEndTimeoutMs && {
GOOGLE_SPEECH_END_TIMEOUT_MS: rOpts.googleOptions.speechEndTimeoutMs
}),
...(rOpts.googleOptions?.transcriptNormalization && {
GOOGLE_SPEECH_TRANSCRIPTION_NORMALIZATION: JSON.stringify(rOpts.googleOptions.transcriptNormalization)
}),
...(rOpts.googleOptions?.enableVoiceActivityEvents && {
GOOGLE_SPEECH_ENABLE_VOICE_ACTIVITY_EVENTS: rOpts.googleOptions.enableVoiceActivityEvents
}),
...(rOpts.sgoogleOptions?.recognizerId) && {GOOGLE_SPEECH_RECOGNIZER_ID: rOpts.googleOptions.recognizerId},
...(rOpts.googleOptions?.enableVoiceActivityEvents && {
GOOGLE_SPEECH_ENABLE_VOICE_ACTIVITY_EVENTS: rOpts.googleOptions.enableVoiceActivityEvents
}),
}),
};
}
else if (['aws', 'polly'].includes(vendor)) {
@@ -558,23 +382,20 @@ module.exports = (logger) => {
...(rOpts.vocabularyFilterName && {AWS_VOCABULARY_FILTER_NAME: rOpts.vocabularyFilterName}),
...(rOpts.filterMethod && {AWS_VOCABULARY_FILTER_METHOD: rOpts.filterMethod}),
...(sttCredentials && {
...(sttCredentials.accessKeyId && {AWS_ACCESS_KEY_ID: sttCredentials.accessKeyId}),
...(sttCredentials.secretAccessKey && {AWS_SECRET_ACCESS_KEY: sttCredentials.secretAccessKey}),
AWS_REGION: sttCredentials.region,
...(sttCredentials.sessionToken && {AWS_SESSION_TOKEN: sttCredentials.sessionToken}),
AWS_ACCESS_KEY_ID: sttCredentials.accessKeyId,
AWS_SECRET_ACCESS_KEY: sttCredentials.secretAccessKey,
AWS_REGION: sttCredentials.region
}),
};
}
else if ('microsoft' === vendor) {
const {azureOptions = {}} = rOpts;
opts = {
...opts,
...(rOpts.hints?.length > 0 && typeof rOpts.hints[0] === 'string' &&
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'string' &&
{AZURE_SPEECH_HINTS: rOpts.hints.map((h) => h.trim()).join(',')}),
...(rOpts.hints?.length > 0 && typeof rOpts.hints[0] === 'object' &&
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'object' &&
{AZURE_SPEECH_HINTS: rOpts.hints.map((h) => h.phrase).join(',')}),
// When altLanguages is emptylist, we have to send value to freeswitch to clear the previous settings
...(rOpts.altLanguages &&
...(rOpts.altLanguages && rOpts.altLanguages.length > 0 &&
{AZURE_SPEECH_ALTERNATIVE_LANGUAGE_CODES: [...new Set(rOpts.altLanguages)].join(',')}),
...(rOpts.requestSnr && {AZURE_REQUEST_SNR: 1}),
...(rOpts.profanityOption && {AZURE_PROFANITY_OPTION: rOpts.profanityOption}),
@@ -586,19 +407,12 @@ module.exports = (logger) => {
...(rOpts.requestSnr && {AZURE_REQUEST_SNR: 1}),
...(rOpts.audioLogging && {AZURE_AUDIO_LOGGING: 1}),
...{AZURE_USE_OUTPUT_FORMAT_DETAILED: 1},
...(azureOptions.speechSegmentationSilenceTimeoutMs &&
{AZURE_SPEECH_SEGMENTATION_SILENCE_TIMEOUT_MS: azureOptions.speechSegmentationSilenceTimeoutMs}),
...(azureOptions.languageIdMode &&
{AZURE_LANGUAGE_ID_MODE: azureOptions.languageIdMode}),
...(sttCredentials && {
...(sttCredentials.api_key && {AZURE_SUBSCRIPTION_KEY: sttCredentials.api_key}),
...(sttCredentials.region && {AZURE_REGION: sttCredentials.region}),
}),
...(sttCredentials.use_custom_stt && sttCredentials.custom_stt_endpoint &&
{AZURE_SERVICE_ENDPOINT_ID: sttCredentials.custom_stt_endpoint}),
//azureSttEndpointId overrides sttCredentials.custom_stt_endpoint
...(rOpts.azureSttEndpointId &&
{AZURE_SERVICE_ENDPOINT_ID: rOpts.azureSttEndpointId}),
};
}
else if ('nuance' === vendor) {
@@ -650,32 +464,21 @@ module.exports = (logger) => {
};
}
else if ('deepgram' === vendor) {
let {model} = rOpts;
const {deepgramOptions = {}} = rOpts;
const deepgramUri = deepgramOptions.deepgramSttUri || sttCredentials.deepgram_stt_uri;
const useTls = deepgramOptions.deepgramSttUseTls || sttCredentials.deepgram_stt_use_tls;
/* default to a sensible model if not supplied */
if (!model) {
model = selectDefaultDeepgramModel(task, language);
}
opts = {
...opts,
DEEPGRAM_SPEECH_MODEL: model,
...(deepgramUri && {DEEPGRAM_URI: deepgramUri}),
...(deepgramUri && useTls && {DEEPGRAM_USE_TLS: 1}),
...(sttCredentials.api_key) &&
{DEEPGRAM_API_KEY: sttCredentials.api_key},
...(deepgramOptions.tier) &&
{DEEPGRAM_SPEECH_TIER: deepgramOptions.tier},
...(deepgramOptions.model) &&
{DEEPGRAM_SPEECH_MODEL: deepgramOptions.model},
...(deepgramOptions.punctuate) &&
{DEEPGRAM_SPEECH_ENABLE_AUTOMATIC_PUNCTUATION: 1},
...(deepgramOptions.smartFormatting) &&
{DEEPGRAM_SPEECH_ENABLE_SMART_FORMAT: 1},
...(deepgramOptions.profanityFilter) &&
{DEEPGRAM_SPEECH_PROFANITY_FILTER: 1},
...(deepgramOptions.redact) &&
{DEEPGRAM_SPEECH_REDACT: deepgramOptions.redact},
{DEEPGRAM_SPEECH_REDACT: 1},
...(deepgramOptions.diarize) &&
{DEEPGRAM_SPEECH_DIARIZE: 1},
...(deepgramOptions.diarizeVersion) &&
@@ -690,18 +493,14 @@ module.exports = (logger) => {
{DEEPGRAM_SPEECH_SEARCH: deepgramOptions.search.join(',')},
...(deepgramOptions.replace) &&
{DEEPGRAM_SPEECH_REPLACE: deepgramOptions.replace.join(',')},
...(rOpts.hints && rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'string' &&
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'string' &&
{DEEPGRAM_SPEECH_KEYWORDS: rOpts.hints.map((h) => h.trim()).join(',')}),
...(rOpts.hints && rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'object' &&
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'object' &&
{DEEPGRAM_SPEECH_KEYWORDS: rOpts.hints.map((h) => h.phrase).join(',')}),
...(deepgramOptions.keywords) &&
{DEEPGRAM_SPEECH_KEYWORDS: deepgramOptions.keywords.join(',')},
...('endpointing' in deepgramOptions) &&
{DEEPGRAM_SPEECH_ENDPOINTING: deepgramOptions.endpointing === false ? 'false' : deepgramOptions.endpointing,
// default DEEPGRAM_SPEECH_UTTERANCE_END_MS is 1000, will be override by user settings later if there is.
DEEPGRAM_SPEECH_UTTERANCE_END_MS: 1000},
...(deepgramOptions.utteranceEndMs) &&
{DEEPGRAM_SPEECH_UTTERANCE_END_MS: deepgramOptions.utteranceEndMs},
{DEEPGRAM_SPEECH_ENDPOINTING: deepgramOptions.endpointing},
...(deepgramOptions.vadTurnoff) &&
{DEEPGRAM_SPEECH_VAD_TURNOFF: deepgramOptions.vadTurnoff},
...(deepgramOptions.tag) &&
@@ -715,9 +514,9 @@ module.exports = (logger) => {
...opts,
...(sttCredentials.api_key) &&
{SONIOX_API_KEY: sttCredentials.api_key},
...(rOpts.hints?.length > 0 && typeof rOpts.hints[0] === 'string' &&
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'string' &&
{SONIOX_HINTS: rOpts.hints.join(',')}),
...(rOpts.hints?.length > 0 && typeof rOpts.hints[0] === 'object' &&
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'object' &&
{SONIOX_HINTS: JSON.stringify(rOpts.hints)}),
...(typeof rOpts.hintsBoost === 'number' &&
{SONIOX_HINTS_BOOST: rOpts.hintsBoost}),
@@ -775,9 +574,9 @@ module.exports = (logger) => {
...(rOpts.diarization && rOpts.diarizationMaxSpeakers > 0 &&
{NVIDIA_DIARIZATION_SPEAKER_COUNT: rOpts.diarizationMaxSpeakers}),
...(rOpts.separateRecognitionPerChannel && {NVIDIA_SEPARATE_RECOGNITION_PER_CHANNEL: 1}),
...(rOpts.hints?.length > 0 && typeof rOpts.hints[0] === 'string' &&
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'string' &&
{NVIDIA_HINTS: rOpts.hints.join(',')}),
...(rOpts.hints?.length > 0 && typeof rOpts.hints[0] === 'object' &&
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'object' &&
{NVIDIA_HINTS: JSON.stringify(rOpts.hints)}),
...(typeof rOpts.hintsBoost === 'number' &&
{NVIDIA_HINTS_BOOST: rOpts.hintsBoost}),
@@ -785,67 +584,21 @@ module.exports = (logger) => {
{NVIDIA_CUSTOM_CONFIGURATION: JSON.stringify(nvidiaOptions.customConfiguration)}),
};
}
else if ('cobalt' === vendor) {
const {cobaltOptions = {}} = rOpts;
const cobaltUri = cobaltOptions.serverUri || sttCredentials.cobalt_server_uri;
opts = {
...opts,
...(rOpts.words && {COBALT_WORD_TIME_OFFSETS: 1}),
...(!rOpts.words && {COBALT_WORD_TIME_OFFSETS: 0}),
...(rOpts.model && {COBALT_MODEL: rOpts.model}),
...(cobaltUri && {COBALT_SERVER_URI: cobaltUri}),
...(rOpts.hints?.length > 0 && typeof rOpts.hints[0] === 'string' &&
{COBALT_SPEECH_HINTS: rOpts.hints.join(',')}),
...(rOpts.hints?.length > 0 && typeof rOpts.hints[0] === 'object' &&
{COBALT_SPEECH_HINTS: JSON.stringify(rOpts.hints)}),
...(rOpts.hints?.length > 0 &&
{COBALT_CONTEXT_TOKEN: cobaltOptions.contextToken || 'unk:default'}),
...(cobaltOptions.metadata && {COBALT_METADATA: cobaltOptions.metadata}),
...(cobaltOptions.enableConfusionNetwork && {COBALT_ENABLE_CONFUSION_NETWORK: 1}),
...(cobaltOptions.compiledContextData && {COBALT_COMPILED_CONTEXT_DATA: cobaltOptions.compiledContextData}),
};
} else if ('assemblyai' === vendor) {
opts = {
...opts,
...(sttCredentials.api_key) &&
{ASSEMBLYAI_API_KEY: sttCredentials.api_key},
...(rOpts.hints?.length > 0 &&
{ASSEMBLYAI_WORD_BOOST: JSON.stringify(rOpts.hints)})
};
} else if ('verbio' === vendor) {
const {verbioOptions = {}} = rOpts;
opts = {
...opts,
...(sttCredentials.access_token && { VERBIO_ACCESS_TOKEN: sttCredentials.access_token}),
...(sttCredentials.engine_version && {VERBIO_ENGINE_VERSION: sttCredentials.engine_version}),
...(language && {VERBIO_LANGUAGE: language}),
...(verbioOptions.enable_formatting && {VERBIO_ENABLE_FORMATTING: verbioOptions.enable_formatting}),
...(verbioOptions.enable_diarization && {VERBIO_ENABLE_DIARIZATION: verbioOptions.enable_diarization}),
...(verbioOptions.topic && {VERBIO_TOPIC: verbioOptions.topic}),
...(verbioOptions.inline_grammar && {VERBIO_INLINE_GRAMMAR: verbioOptions.inline_grammar}),
...(verbioOptions.grammar_uri && {VERBIO_GRAMMAR_URI: verbioOptions.grammar_uri}),
...(verbioOptions.label && {VERBIO_LABEL: verbioOptions.label}),
...(verbioOptions.recognition_timeout && {VERBIO_RECOGNITION_TIMEOUT: verbioOptions.recognition_timeout}),
...(verbioOptions.speech_complete_timeout &&
{VERBIO_SPEECH_COMPLETE_TIMEOUT: verbioOptions.speech_complete_timeout}),
...(verbioOptions.speech_incomplete_timeout &&
{VERBIO_SPEECH_INCOMPLETE_TIMEOUT: verbioOptions.speech_incomplete_timeout}),
};
} else if (vendor.startsWith('custom:')) {
else if (vendor.startsWith('custom:')) {
let {options = {}} = rOpts;
const {auth_token, custom_stt_url} = sttCredentials;
options = {
...options,
...(rOpts.hints?.length > 0 && typeof rOpts.hints[0] === 'string' &&
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'string' &&
{hints: rOpts.hints}),
...(rOpts.hints?.length > 0 && typeof rOpts.hints[0] === 'object' &&
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'object' &&
{hints: JSON.stringify(rOpts.hints)}),
...(typeof rOpts.hintsBoost === 'number' && {hintsBoost: rOpts.hintsBoost})
};
opts = {
...opts,
...(auth_token && {JAMBONZ_STT_API_KEY: auth_token}),
JAMBONZ_STT_API_KEY: auth_token,
JAMBONZ_STT_URL: custom_stt_url,
...(Object.keys(options).length > 0 && {JAMBONZ_STT_OPTIONS: JSON.stringify(options)}),
};
@@ -857,6 +610,41 @@ module.exports = (logger) => {
return opts;
};
const removeSpeechListeners = (ep) => {
ep.removeCustomEventListener(GoogleTranscriptionEvents.Transcription);
ep.removeCustomEventListener(GoogleTranscriptionEvents.EndOfUtterance);
ep.removeCustomEventListener(GoogleTranscriptionEvents.VadDetected);
ep.removeCustomEventListener(AwsTranscriptionEvents.Transcription);
ep.removeCustomEventListener(AwsTranscriptionEvents.VadDetected);
ep.removeCustomEventListener(AzureTranscriptionEvents.Transcription);
ep.removeCustomEventListener(AzureTranscriptionEvents.NoSpeechDetected);
ep.removeCustomEventListener(AzureTranscriptionEvents.VadDetected);
ep.removeCustomEventListener(NuanceTranscriptionEvents.Transcription);
ep.removeCustomEventListener(NuanceTranscriptionEvents.TranscriptionComplete);
ep.removeCustomEventListener(NuanceTranscriptionEvents.StartOfSpeech);
ep.removeCustomEventListener(NuanceTranscriptionEvents.VadDetected);
ep.removeCustomEventListener(DeepgramTranscriptionEvents.Transcription);
ep.removeCustomEventListener(DeepgramTranscriptionEvents.Connect);
ep.removeCustomEventListener(DeepgramTranscriptionEvents.ConnectFailure);
ep.removeCustomEventListener(SonioxTranscriptionEvents.Transcription);
ep.removeCustomEventListener(NvidiaTranscriptionEvents.Transcription);
ep.removeCustomEventListener(NvidiaTranscriptionEvents.TranscriptionComplete);
ep.removeCustomEventListener(NvidiaTranscriptionEvents.StartOfSpeech);
ep.removeCustomEventListener(NvidiaTranscriptionEvents.VadDetected);
ep.removeCustomEventListener(JambonzTranscriptionEvents.Transcription);
ep.removeCustomEventListener(JambonzTranscriptionEvents.Connect);
ep.removeCustomEventListener(JambonzTranscriptionEvents.ConnectFailure);
ep.removeCustomEventListener(JambonzTranscriptionEvents.Error);
};
const setSpeechCredentialsAtRuntime = (recognizer) => {
if (!recognizer) return;
if (recognizer.vendor === 'nuance') {
@@ -876,10 +664,6 @@ module.exports = (logger) => {
const {apiKey} = recognizer.sonioxOptions || {};
if (apiKey) return {api_key: apiKey};
}
else if (recognizer.vendor === 'cobalt') {
const {serverUri} = recognizer.cobaltOptions || {};
if (serverUri) return {cobalt_server_uri: serverUri};
}
else if (recognizer.vendor === 'ibm') {
const {ttsApiKey, ttsRegion, sttApiKey, sttRegion, instanceId} = recognizer.ibmOptions || {};
if (ttsApiKey || sttApiKey) return {
@@ -895,8 +679,8 @@ module.exports = (logger) => {
return {
normalizeTranscription,
setChannelVarsForStt,
removeSpeechListeners,
setSpeechCredentialsAtRuntime,
compileSonioxTranscripts,
consolidateTranscripts
compileSonioxTranscripts
};
};

View File

@@ -9,8 +9,7 @@ const {
JAMBONES_WS_PING_INTERVAL_MS,
MAX_RECONNECTS,
JAMBONES_WS_HANDSHAKE_TIMEOUT_MS,
JAMBONES_WS_MAX_PAYLOAD,
HTTP_USER_AGENT_HEADER
JAMBONES_WS_MAX_PAYLOAD
} = require('../config');
class WsRequestor extends BaseRequestor {
@@ -56,12 +55,6 @@ class WsRequestor extends BaseRequestor {
}
if (type === 'session:new') this.call_sid = params.callSid;
if (type === 'session:reconnect') {
this._reconnectPromise = new Promise((resolve, reject) => {
this._reconnectResolve = resolve;
this._reconnectReject = reject;
});
}
/* if we have an absolute url, and it is http then do a standard webhook */
if (this._isAbsoluteUrl(url) && url.startsWith('http')) {
@@ -77,23 +70,20 @@ class WsRequestor extends BaseRequestor {
}
/* connect if necessary */
const queueMsg = () => {
this.logger.debug(
`WsRequestor:request(${this.id}) - queueing ${type} message since we are connecting`);
if (wantsAck) {
const p = new Promise((resolve, reject) => {
this.queuedMsg.push({type, hook, params, httpHeaders, promise: {resolve, reject}});
});
return p;
}
else {
this.queuedMsg.push({type, hook, params, httpHeaders});
}
return;
};
if (!this.ws) {
if (this.connectInProgress) {
return queueMsg();
this.logger.debug(
`WsRequestor:request(${this.id}) - queueing ${type} message since we are connecting`);
if (wantsAck) {
const p = new Promise((resolve, reject) => {
this.queuedMsg.push({type, hook, params, httpHeaders, promise: {resolve, reject}});
});
return p;
}
else {
this.queuedMsg.push({type, hook, params, httpHeaders});
}
return;
}
this.connectInProgress = true;
this.logger.debug(`WsRequestor:request(${this.id}) - connecting since we do not have a connection for ${type}`);
@@ -111,10 +101,6 @@ class WsRequestor extends BaseRequestor {
return Promise.reject(err);
}
}
// If jambonz wait for ack from reconnect, queue the msg until reconnect is acked
if (type !== 'session:reconnect' && this._reconnectPromise) {
return queueMsg();
}
assert(this.ws);
/* prepare and send message */
@@ -132,7 +118,7 @@ class WsRequestor extends BaseRequestor {
type,
msgid,
call_sid: this.call_sid,
hook: ['verb:hook', 'session:redirect'].includes(type) ? url : undefined,
hook: type === 'verb:hook' ? url : undefined,
data: {...payload},
...b3
};
@@ -152,18 +138,6 @@ class WsRequestor extends BaseRequestor {
}
};
const rejectQueuedMsgs = (err) => {
if (this.queuedMsg.length > 0) {
for (const {promise} of this.queuedMsg) {
this.logger.debug(`WsRequestor:request - preparing queued ${type} for rejectQueuedMsgs`);
if (promise) {
promise.reject(err);
}
}
this.queuedMsg.length = 0;
}
};
//this.logger.debug({obj}, `websocket: sending (${url})`);
/* special case: reconnecting before we received ack to session:new */
@@ -204,37 +178,16 @@ class WsRequestor extends BaseRequestor {
this.logger.debug({response}, `WsRequestor:request ${url} succeeded in ${rtt}ms`);
this.stats.histogram('app.hook.ws_response_time', rtt, ['hook_type:app']);
resolve(response);
if (this._reconnectResolve) {
this._reconnectResolve();
}
},
failure: (err) => {
if (this._reconnectReject) {
this._reconnectReject(err);
}
clearTimeout(timer);
reject(err);
}
});
/* send the message */
this.ws.send(JSON.stringify(obj), async() => {
this.ws.send(JSON.stringify(obj), () => {
this.logger.debug({obj}, `WsRequestor:request websocket: sent (${url})`);
// If session:reconnect is waiting for ack, hold here until ack to send queuedMsgs
if (this._reconnectPromise) {
try {
await this._reconnectPromise;
} catch (err) {
// bad thing happened to session:recconnect
rejectQueuedMsgs(err);
this.emit('reconnect-error');
return;
} finally {
this._reconnectPromise = null;
this._reconnectResolve = null;
this._reconnectReject = null;
}
}
sendQueuedMsgs();
});
});
@@ -275,9 +228,6 @@ class WsRequestor extends BaseRequestor {
maxRedirects: 2,
handshakeTimeout,
maxPayload: JAMBONES_WS_MAX_PAYLOAD ? parseInt(JAMBONES_WS_MAX_PAYLOAD) : 24 * 1024,
headers: {
...(HTTP_USER_AGENT_HEADER && {'user-agent' : HTTP_USER_AGENT_HEADER})
}
};
if (this.username && this.password) opts = {...opts, auth: `${this.username}:${this.password}`};
@@ -372,9 +322,7 @@ class WsRequestor extends BaseRequestor {
'WsRequestor:_onSocketClosed time to reconnect');
if (!this.ws && !this.connectInProgress) {
this.connectInProgress = true;
return this._connect()
.catch((err) => this.logger.error('WsRequestor:_onSocketClosed There is error while reconnect', err))
.finally(() => this.connectInProgress = false);
this._connect().catch((err) => this.connectInProgress = false);
}
}, this.backoffMs);
this.backoffMs = this.backoffMs < 2000 ? this.backoffMs * 2 : (this.backoffMs + 2000);
@@ -392,9 +340,7 @@ class WsRequestor extends BaseRequestor {
/* messages must be JSON format */
try {
const obj = JSON.parse(content);
//const {type, msgid, command, call_sid = this.call_sid, queueCommand = false, data} = obj;
const {type, msgid, command, queueCommand = false, data} = obj;
const call_sid = obj.callSid || this.call_sid;
const {type, msgid, command, call_sid = this.call_sid, queueCommand = false, data} = obj;
//this.logger.debug({obj}, 'WsRequestor:request websocket: received');
assert.ok(type, 'type property not supplied');

12037
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,9 +1,9 @@
{
"name": "jambonz-feature-server",
"version": "0.9.0",
"version": "0.8.4",
"main": "app.js",
"engines": {
"node": ">= 18.x"
"node": ">= 10.16.0"
},
"keywords": [
"sip",
@@ -25,57 +25,55 @@
"jslint:fix": "eslint app.js tracer.js lib --fix"
},
"dependencies": {
"@aws-sdk/client-auto-scaling": "^3.549.0",
"@aws-sdk/client-sns": "^3.549.0",
"@jambonz/db-helpers": "^0.9.3",
"@aws-sdk/client-auto-scaling": "^3.360.0",
"@aws-sdk/client-sns": "^3.360.0",
"@jambonz/db-helpers": "^0.9.1",
"@jambonz/http-health-check": "^0.0.1",
"@jambonz/mw-registrar": "^0.2.7",
"@jambonz/realtimedb-helpers": "^0.8.8",
"@jambonz/speech-utils": "^0.1.11",
"@jambonz/stats-collector": "^0.1.10",
"@jambonz/realtimedb-helpers": "^0.8.6",
"@jambonz/speech-utils": "^0.0.20",
"@jambonz/stats-collector": "^0.1.9",
"@jambonz/time-series": "^0.2.8",
"@jambonz/verb-specifications": "^0.0.74",
"@opentelemetry/api": "^1.8.0",
"@opentelemetry/exporter-jaeger": "^1.23.0",
"@opentelemetry/exporter-trace-otlp-http": "^0.50.0",
"@opentelemetry/exporter-zipkin": "^1.23.0",
"@opentelemetry/instrumentation": "^0.50.0",
"@opentelemetry/resources": "^1.23.0",
"@opentelemetry/sdk-trace-base": "^1.23.0",
"@opentelemetry/sdk-trace-node": "^1.23.0",
"@opentelemetry/semantic-conventions": "^1.23.0",
"@jambonz/verb-specifications": "^0.0.30",
"@opentelemetry/api": "^1.4.0",
"@opentelemetry/exporter-jaeger": "^1.9.0",
"@opentelemetry/exporter-trace-otlp-http": "^0.35.0",
"@opentelemetry/exporter-zipkin": "^1.9.0",
"@opentelemetry/instrumentation": "^0.35.0",
"@opentelemetry/resources": "^1.9.0",
"@opentelemetry/sdk-trace-base": "^1.9.0",
"@opentelemetry/sdk-trace-node": "^1.9.0",
"@opentelemetry/semantic-conventions": "^1.9.0",
"bent": "^7.3.12",
"debug": "^4.3.4",
"deepcopy": "^2.1.0",
"drachtio-fsmrf": "^3.0.43",
"drachtio-srf": "^4.5.35",
"express": "^4.19.2",
"express-validator": "^7.0.1",
"ip": "^2.0.1",
"moment": "^2.30.1",
"parse-url": "^9.2.0",
"pino": "^8.20.0",
"drachtio-fsmrf": "^3.0.24",
"drachtio-srf": "^4.5.26",
"express": "^4.18.2",
"ip": "^1.1.8",
"moment": "^2.29.4",
"parse-url": "^8.1.0",
"pino": "^8.8.0",
"polly-ssml-split": "^0.1.0",
"proxyquire": "^2.1.3",
"sdp-transform": "^2.14.2",
"short-uuid": "^5.1.0",
"sinon": "^17.0.1",
"sdp-transform": "^2.14.1",
"short-uuid": "^4.2.2",
"sinon": "^15.0.1",
"to-snake-case": "^1.0.0",
"undici": "^6.19.2",
"undici": "^5.19.1",
"uuid-random": "^1.3.2",
"verify-aws-sns-signature": "^0.1.0",
"ws": "^8.17.1",
"ws": "^8.9.0",
"xml2js": "^0.6.2"
},
"devDependencies": {
"clear-module": "^4.1.2",
"eslint": "7.32.0",
"eslint-plugin-promise": "^6.1.1",
"eslint": "^7.32.0",
"eslint-plugin-promise": "^4.3.1",
"nyc": "^15.1.0",
"tape": "^5.7.5"
"tape": "^5.6.1"
},
"optionalDependencies": {
"bufferutil": "^4.0.8",
"utf-8-validate": "^6.0.3"
"bufferutil": "^4.0.6",
"utf-8-validate": "^5.0.8"
}
}

View File

@@ -1,86 +0,0 @@
#
# Recommended minimum configuration:
#
# Example rule allowing access from your local networks.
# Adapt to list your (internal) IP networks from where browsing
# should be allowed
acl localnet src 0.0.0.1-0.255.255.255 # RFC 1122 "this" network (LAN)
acl localnet src 10.0.0.0/8 # RFC 1918 local private network (LAN)
acl localnet src 100.64.0.0/10 # RFC 6598 shared address space (CGN)
acl localnet src 169.254.0.0/16 # RFC 3927 link-local (directly plugged) machines
acl localnet src 172.16.0.0/12 # RFC 1918 local private network (LAN)
acl localnet src 172.38.0.0/12 # RFC 1918 local private network (LAN)
acl localnet src 192.168.0.0/16 # RFC 1918 local private network (LAN)
acl localnet src fc00::/7 # RFC 4193 local private network range
acl localnet src fe80::/10 # RFC 4291 link-local (directly plugged) machines
acl SSL_ports port 443
acl Safe_ports port 80 # http
acl Safe_ports port 21 # ftp
acl Safe_ports port 443 # https
acl Safe_ports port 70 # gopher
acl Safe_ports port 210 # wais
acl Safe_ports port 1025-65535 # unregistered ports
acl Safe_ports port 280 # http-mgmt
acl Safe_ports port 488 # gss-http
acl Safe_ports port 591 # filemaker
acl Safe_ports port 777 # multiling http
#
# Recommended minimum Access Permission configuration:
#
# Deny requests to certain unsafe ports
http_access allow !Safe_ports
# Deny CONNECT to other than secure SSL ports
http_access allow CONNECT !SSL_ports
# Only allow cachemgr access from localhost
http_access allow localhost manager
http_access allow manager
# This default configuration only allows localhost requests because a more
# permissive Squid installation could introduce new attack vectors into the
# network by proxying external TCP connections to unprotected services.
http_access allow localhost
# The two deny rules below are unnecessary in this default configuration
# because they are followed by a "deny all" rule. However, they may become
# critically important when you start allowing external requests below them.
# Protect web applications running on the same server as Squid. They often
# assume that only local users can access them at "localhost" ports.
http_access allow to_localhost
# Protect cloud servers that provide local users with sensitive info about
# their server via certain well-known link-local (a.k.a. APIPA) addresses.
# http_access deny to_linklocal
#
# INSERT YOUR OWN RULE(S) HERE TO ALLOW ACCESS FROM YOUR CLIENTS
#
# For example, to allow access from your local networks, you may uncomment the
# following rule (and/or add rules that match your definition of "local"):
# http_access allow localnet
# And finally deny all other access to this proxy
http_access allow all
# Squid normally listens to port 3128
http_port 3128
# Uncomment and adjust the following to add a disk cache directory.
#cache_dir ufs /usr/local/var/cache/squid 100 16 256
# Leave coredumps in the first cache dir
coredump_dir /usr/local/var/cache/squid
#
# Add any of your own refresh_pattern entries above these.
#
refresh_pattern ^ftp: 1440 20% 10080
refresh_pattern ^gopher: 1440 0% 1440
refresh_pattern -i (/cgi-bin/|\?) 0 0% 0
refresh_pattern . 0 20% 4320

View File

@@ -99,8 +99,6 @@ test('test create-call call-hook basic authentication', async(t) => {
let obj = await getJSON(`http:127.0.0.1:3100/lastRequest/${from}`)
t.ok(obj.headers.Authorization = 'Basic dXNlcm5hbWU6cGFzc3dvcmQ=',
'create-call: call-hook contains basic authentication header');
t.ok(obj.headers['user-agent'] = 'jambonz',
'create-call: call-hook contains user-agent header');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);

View File

@@ -330,7 +330,7 @@ CREATE TABLE `applications` (
LOCK TABLES `applications` WRITE;
/*!40000 ALTER TABLE `applications` DISABLE KEYS */;
INSERT INTO `applications` VALUES ('0dddaabf-0a30-43e3-84e8-426873b1a78b','decline call',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','c71e79db-24f2-4866-a3ee-febb0f97b341','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,NULL,'google','en-US','en-US-Standard-C','google','en-US','2023-05-31 03:52:48'),('0dddaabf-0a30-43e3-84e8-426873b1a78c','app json',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','c71e79db-24f2-4866-a3ee-febb0f97b341','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,'[{\"verb\": \"play\",\"url\": \"silence_stream://5000\"}]','google','en-US','en-US-Standard-C','google','en-US','2023-05-31 03:52:48'),('17461c69-56b5-4dab-ad83-1c43a0f93a3d','gather',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','10692465-a511-4277-9807-b7157e4f81e1','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,NULL,'google','en-US','en-US-Standard-C','google','en-US','2023-05-31 03:52:48'),('195d9507-6a42-46a8-825f-f009e729d023','sip info',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','c9113e7a-741f-48b9-96c1-f2f78176eeb3','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,NULL,'google','en-US','en-US-Standard-C','google','en-US','2023-05-31 03:52:48'),('24d0f6af-e976-44dd-a2e8-41c7b55abe33','say account 2',NULL,'622f62e4-303a-49f2-bbe0-eb1e1714e37a','54ab0976-a6c0-45d8-89a4-d90d45bf9d96','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,NULL,'google','en-US','en-US-Standard-C','google','en-US','2023-05-31 03:52:48'),('308b4f41-1a18-4052-b89a-c054e75ce242','say',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','54ab0976-a6c0-45d8-89a4-d90d45bf9d96','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,NULL,'google','en-US','en-US-Standard-C','google','en-US','2023-05-31 03:52:48'),('ae026ab5-3029-47b4-9d7c-236e3a4b4ebe','transcribe account 2',NULL,'622f62e4-303a-49f2-bbe0-eb1e1714e37a','ecb67a8f-f7ce-4919-abf0-bbc69c1001e5','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,NULL,'google','en-US','en-US-Standard-C','google','en-US','2023-05-31 03:52:48'),('baf9213b-5556-4c20-870c-586392ed246f','transcribe',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','ecb67a8f-f7ce-4919-abf0-bbc69c1001e5','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,NULL,'google','en-US','en-US-Standard-C','google','en-US','2023-05-31 03:52:48'),('0dddaabf-0a30-43e3-84e8-426873b1a7ac','http proxy app',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','c71e79db-24f2-4866-a3ee-febb0f97b3ac','293904c1-351b-4bca-8d58-1a29b853c7ac',NULL,NULL,'google','en-US','en-US-Standard-C','google','en-US','2023-05-31 03:52:48');
INSERT INTO `applications` VALUES ('0dddaabf-0a30-43e3-84e8-426873b1a78b','decline call',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','c71e79db-24f2-4866-a3ee-febb0f97b341','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,NULL,'google','en-US','en-US-Standard-C','google','en-US','2023-05-31 03:52:48'),('0dddaabf-0a30-43e3-84e8-426873b1a78c','app json',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','c71e79db-24f2-4866-a3ee-febb0f97b341','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,'[{\"verb\": \"play\",\"url\": \"silence_stream://5000\"}]','google','en-US','en-US-Standard-C','google','en-US','2023-05-31 03:52:48'),('17461c69-56b5-4dab-ad83-1c43a0f93a3d','gather',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','10692465-a511-4277-9807-b7157e4f81e1','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,NULL,'google','en-US','en-US-Standard-C','google','en-US','2023-05-31 03:52:48'),('195d9507-6a42-46a8-825f-f009e729d023','sip info',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','c9113e7a-741f-48b9-96c1-f2f78176eeb3','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,NULL,'google','en-US','en-US-Standard-C','google','en-US','2023-05-31 03:52:48'),('24d0f6af-e976-44dd-a2e8-41c7b55abe33','say account 2',NULL,'622f62e4-303a-49f2-bbe0-eb1e1714e37a','54ab0976-a6c0-45d8-89a4-d90d45bf9d96','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,NULL,'google','en-US','en-US-Standard-C','google','en-US','2023-05-31 03:52:48'),('308b4f41-1a18-4052-b89a-c054e75ce242','say',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','54ab0976-a6c0-45d8-89a4-d90d45bf9d96','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,NULL,'google','en-US','en-US-Standard-C','google','en-US','2023-05-31 03:52:48'),('ae026ab5-3029-47b4-9d7c-236e3a4b4ebe','transcribe account 2',NULL,'622f62e4-303a-49f2-bbe0-eb1e1714e37a','ecb67a8f-f7ce-4919-abf0-bbc69c1001e5','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,NULL,'google','en-US','en-US-Standard-C','google','en-US','2023-05-31 03:52:48'),('baf9213b-5556-4c20-870c-586392ed246f','transcribe',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','ecb67a8f-f7ce-4919-abf0-bbc69c1001e5','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,NULL,'google','en-US','en-US-Standard-C','google','en-US','2023-05-31 03:52:48');
/*!40000 ALTER TABLE `applications` ENABLE KEYS */;
UNLOCK TABLES;
@@ -646,7 +646,7 @@ CREATE TABLE `phone_numbers` (
LOCK TABLES `phone_numbers` WRITE;
/*!40000 ALTER TABLE `phone_numbers` DISABLE KEYS */;
INSERT INTO `phone_numbers` VALUES ('05eeed62-b29b-4679-bf38-d7a4e318be44','16174000003','5145b436-2f38-4029-8d4c-fd8c67831c7a','bb845d4b-83a9-4cde-a6e9-50f3743bab3f','17461c69-56b5-4dab-ad83-1c43a0f93a3d',NULL),('4b439355-debc-40c7-9cfa-5be58c2bed6b','16174000000','5145b436-2f38-4029-8d4c-fd8c67831c7a','bb845d4b-83a9-4cde-a6e9-50f3743bab3f','0dddaabf-0a30-43e3-84e8-426873b1a78b',NULL),('964d0581-9627-44cb-be20-8118050406b2','16174000006','5145b436-2f38-4029-8d4c-fd8c67831c7a','bb845d4b-83a9-4cde-a6e9-50f3743bab3f','195d9507-6a42-46a8-825f-f009e729d023',NULL),('964d0581-9627-44cb-be20-8118050406b3','16174000007','5145b436-2f38-4029-8d4c-fd8c67831c7a','bb845d4b-83a9-4cde-a6e9-50f3743bab3f','0dddaabf-0a30-43e3-84e8-426873b1a78c',NULL),('9cc9e7fc-b7b0-4101-8f3c-9fe13ce5df0a','16174000001','5145b436-2f38-4029-8d4c-fd8c67831c7a','bb845d4b-83a9-4cde-a6e9-50f3743bab3f','308b4f41-1a18-4052-b89a-c054e75ce242',NULL),('e686a320-0725-418f-be65-532159bdc3ed','16174000002','5145b436-2f38-4029-8d4c-fd8c67831c7a','622f62e4-303a-49f2-bbe0-eb1e1714e37a','24d0f6af-e976-44dd-a2e8-41c7b55abe33',NULL),('f3c53863-b629-4cf6-9dcb-c7fb7072314b','16174000004','5145b436-2f38-4029-8d4c-fd8c67831c7a','bb845d4b-83a9-4cde-a6e9-50f3743bab3f','baf9213b-5556-4c20-870c-586392ed246f',NULL),('f6416c17-829a-4f11-9c32-f0d00e4a9ae9','16174000005','5145b436-2f38-4029-8d4c-fd8c67831c7a','622f62e4-303a-49f2-bbe0-eb1e1714e37a','ae026ab5-3029-47b4-9d7c-236e3a4b4ebe',NULL),('4b439355-debc-40c7-9cfa-5be58c2bedac','16174000015','5145b436-2f38-4029-8d4c-fd8c67831c7a','bb845d4b-83a9-4cde-a6e9-50f3743bab3f','0dddaabf-0a30-43e3-84e8-426873b1a7ac',NULL);
INSERT INTO `phone_numbers` VALUES ('05eeed62-b29b-4679-bf38-d7a4e318be44','16174000003','5145b436-2f38-4029-8d4c-fd8c67831c7a','bb845d4b-83a9-4cde-a6e9-50f3743bab3f','17461c69-56b5-4dab-ad83-1c43a0f93a3d',NULL),('4b439355-debc-40c7-9cfa-5be58c2bed6b','16174000000','5145b436-2f38-4029-8d4c-fd8c67831c7a','bb845d4b-83a9-4cde-a6e9-50f3743bab3f','0dddaabf-0a30-43e3-84e8-426873b1a78b',NULL),('964d0581-9627-44cb-be20-8118050406b2','16174000006','5145b436-2f38-4029-8d4c-fd8c67831c7a','bb845d4b-83a9-4cde-a6e9-50f3743bab3f','195d9507-6a42-46a8-825f-f009e729d023',NULL),('964d0581-9627-44cb-be20-8118050406b3','16174000007','5145b436-2f38-4029-8d4c-fd8c67831c7a','bb845d4b-83a9-4cde-a6e9-50f3743bab3f','0dddaabf-0a30-43e3-84e8-426873b1a78c',NULL),('9cc9e7fc-b7b0-4101-8f3c-9fe13ce5df0a','16174000001','5145b436-2f38-4029-8d4c-fd8c67831c7a','bb845d4b-83a9-4cde-a6e9-50f3743bab3f','308b4f41-1a18-4052-b89a-c054e75ce242',NULL),('e686a320-0725-418f-be65-532159bdc3ed','16174000002','5145b436-2f38-4029-8d4c-fd8c67831c7a','622f62e4-303a-49f2-bbe0-eb1e1714e37a','24d0f6af-e976-44dd-a2e8-41c7b55abe33',NULL),('f3c53863-b629-4cf6-9dcb-c7fb7072314b','16174000004','5145b436-2f38-4029-8d4c-fd8c67831c7a','bb845d4b-83a9-4cde-a6e9-50f3743bab3f','baf9213b-5556-4c20-870c-586392ed246f',NULL),('f6416c17-829a-4f11-9c32-f0d00e4a9ae9','16174000005','5145b436-2f38-4029-8d4c-fd8c67831c7a','622f62e4-303a-49f2-bbe0-eb1e1714e37a','ae026ab5-3029-47b4-9d7c-236e3a4b4ebe',NULL);
/*!40000 ALTER TABLE `phone_numbers` ENABLE KEYS */;
UNLOCK TABLES;
@@ -1254,7 +1254,7 @@ CREATE TABLE `webhooks` (
LOCK TABLES `webhooks` WRITE;
/*!40000 ALTER TABLE `webhooks` DISABLE KEYS */;
INSERT INTO `webhooks` VALUES ('10692465-a511-4277-9807-b7157e4f81e1','http://127.0.0.1:3102/','POST',NULL,NULL),('293904c1-351b-4bca-8d58-1a29b853c7db','http://127.0.0.1:3100/callStatus','POST',NULL,NULL),('54ab0976-a6c0-45d8-89a4-d90d45bf9d96','http://127.0.0.1:3101/','POST',NULL,NULL),('6ac36aeb-6bd0-428a-80a1-aed95640a296','https://flows.jambonz.cloud/callStatus','POST',NULL,NULL),('c71e79db-24f2-4866-a3ee-febb0f97b341','http://127.0.0.1:3100/','POST',NULL,NULL),('c9113e7a-741f-48b9-96c1-f2f78176eeb3','http://127.0.0.1:3104/','POST',NULL,NULL),('d9c205c6-a129-443e-a9c0-d1bb437d4bb7','https://flows.jambonz.cloud/testCall','POST',NULL,NULL),('ecb67a8f-f7ce-4919-abf0-bbc69c1001e5','http://127.0.0.1:3103/','POST',NULL,NULL),('293904c1-351b-4bca-8d58-1a29b853c7ac','http://172.38.0.60:3000/callStatus','POST',NULL,NULL),('c71e79db-24f2-4866-a3ee-febb0f97b3ac','http://172.38.0.60:3000/','POST',NULL,NULL);
INSERT INTO `webhooks` VALUES ('10692465-a511-4277-9807-b7157e4f81e1','http://127.0.0.1:3102/','POST',NULL,NULL),('293904c1-351b-4bca-8d58-1a29b853c7db','http://127.0.0.1:3100/callStatus','POST',NULL,NULL),('54ab0976-a6c0-45d8-89a4-d90d45bf9d96','http://127.0.0.1:3101/','POST',NULL,NULL),('6ac36aeb-6bd0-428a-80a1-aed95640a296','https://flows.jambonz.cloud/callStatus','POST',NULL,NULL),('c71e79db-24f2-4866-a3ee-febb0f97b341','http://127.0.0.1:3100/','POST',NULL,NULL),('c9113e7a-741f-48b9-96c1-f2f78176eeb3','http://127.0.0.1:3104/','POST',NULL,NULL),('d9c205c6-a129-443e-a9c0-d1bb437d4bb7','https://flows.jambonz.cloud/testCall','POST',NULL,NULL),('ecb67a8f-f7ce-4919-abf0-bbc69c1001e5','http://127.0.0.1:3103/','POST',NULL,NULL);
/*!40000 ALTER TABLE `webhooks` ENABLE KEYS */;
UNLOCK TABLES;
/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;

View File

@@ -42,7 +42,7 @@ services:
ipv4_address: 172.38.0.7
drachtio:
image: drachtio/drachtio-server:0.8.25-rc8
image: drachtio/drachtio-server:0.8.22
restart: always
command: drachtio --contact "sip:*;transport=udp" --mtu 4096 --address 0.0.0.0 --port 9022
ports:
@@ -57,7 +57,7 @@ services:
condition: service_healthy
freeswitch:
image: drachtio/drachtio-freeswitch-mrf:0.7.3
image: drachtio/drachtio-freeswitch-mrf:0.4.33
restart: always
command: freeswitch --rtp-range-start 20000 --rtp-range-end 20100
environment:
@@ -92,13 +92,3 @@ services:
networks:
fs:
ipv4_address: 172.38.0.90
squid:
image: ubuntu/squid:edge
ports:
- "3128:3128"
volumes:
- ./configuration/squid.conf:/etc/squid/squid.conf
networks:
fs:
ipv4_address: 172.38.0.91

View File

@@ -1,53 +0,0 @@
const test = require('tape');
const { sippUac } = require('./sipp')('test_fs');
const clearModule = require('clear-module');
const {provisionCallHook, provisionCustomHook} = require('./utils')
const bent = require('bent');
const getJSON = bent('json')
process.on('unhandledRejection', (reason, p) => {
console.log('Unhandled Rejection at: Promise', p, 'reason:', reason);
});
function connect(connectable) {
return new Promise((resolve, reject) => {
connectable.on('connect', () => {
return resolve();
});
});
}
test('\'hangup\' custom headers', async(t) => {
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
const verbs = [
{
verb: 'play',
url: 'https://example.com/example.mp3'
},
{
"verb": "hangup",
"headers": {
"X-Reason" : "maximum call duration exceeded"
}
}
];
const from = 'hangup_custom_headers';
await provisionCallHook(from, verbs)
// THEN
await sippUac('uac-success-received-bye.xml', '172.38.0.10', from);
t.pass('play: succeeds when using single link');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});

View File

@@ -1,72 +0,0 @@
const test = require('tape');
const { sippUac } = require('./sipp')('test_fs');
const clearModule = require('clear-module');
const {provisionCallHook, provisionCustomHook} = require('./utils')
const bent = require('bent');
const getJSON = bent('json')
process.on('unhandledRejection', (reason, p) => {
console.log('Unhandled Rejection at: Promise', p, 'reason:', reason);
});
function connect(connectable) {
return new Promise((resolve, reject) => {
connectable.on('connect', () => {
return resolve();
});
});
}
test('\'HTTP proxy\' test Info', async(t) => {
clearModule.all();
process.env.JAMBONES_HTTP_PROXY_IP = "127.0.0.1";
process.env.JAMBONES_HTTP_PROXY_PROTOCOL = "http";
process.env.JAMBONES_HTTP_PROXY_PORT = 3128;
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
const verbs = [
{
verb: 'config',
sipRequestWithinDialogHook: '/customHook'
},
{
verb: 'play',
url: 'silence_stream://5000',
}
];
const waitHookVerbs = [
{
verb: 'hangup'
}
];
const from = 'http_proxy_info';
await provisionCustomHook(from, waitHookVerbs)
await provisionCallHook(from, verbs);
// THEN
await sippUac('uac-success-info-received-bye.xml', '172.38.0.10', from, "16174000015");
t.pass('sip Info: success send Info');
// Make sure that sipRequestWithinDialogHook is called and success
const json = await getJSON(`http:127.0.0.1:3100/lastRequest/${from}_customHook`)
t.pass(json.body.sip_method === 'INFO', 'sipRequestWithinDialogHook contains sip_method')
t.pass(json.body.sip_body === 'hello jambonz\r\n', 'sipRequestWithinDialogHook contains sip_method')
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
} finally {
process.env.JAMBONES_HTTP_PROXY_IP = null;
process.env.JAMBONES_HTTP_PROXY_PROTOCOL = null;
process.env.JAMBONES_HTTP_PROXY_PORT = null;
}
});

View File

@@ -1,65 +0,0 @@
const test = require('tape');
const { sippUac } = require('./sipp')('test_fs');
const clearModule = require('clear-module');
const {provisionCallHook, provisionCustomHook} = require('./utils')
const bent = require('bent');
const getJSON = bent('json')
process.on('unhandledRejection', (reason, p) => {
console.log('Unhandled Rejection at: Promise', p, 'reason:', reason);
});
function connect(connectable) {
return new Promise((resolve, reject) => {
connectable.on('connect', () => {
return resolve();
});
});
}
test('\'sip Indialog\' test Info', async(t) => {
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
const verbs = [
{
verb: 'config',
sipRequestWithinDialogHook: '/customHook'
},
{
verb: 'play',
url: 'silence_stream://5000',
}
];
const waitHookVerbs = [
{
verb: 'hangup'
}
];
const from = 'sip_indialog_info';
await provisionCustomHook(from, waitHookVerbs)
await provisionCallHook(from, verbs);
// THEN
await sippUac('uac-success-info-received-bye.xml', '172.38.0.10', from);
t.pass('sip Info: success send Info');
// Make sure that sipRequestWithinDialogHook is called and success
const json = await getJSON(`http:127.0.0.1:3100/lastRequest/${from}_customHook`)
t.pass(json.body.sip_method === 'INFO', 'sipRequestWithinDialogHook contains sip_method')
t.pass(json.body.sip_body === 'hello jambonz\r\n', 'sipRequestWithinDialogHook contains sip_method')
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});

View File

@@ -15,9 +15,5 @@ require('./sip-refer-tests');
require('./listen-tests');
require('./config-test');
require('./queue-test');
require('./in-dialog-test');
require('./hangup-test');
require('./sdp-utils-test');
require('./http-proxy-test');
require('./remove-test-db');
require('./docker_stop');

View File

@@ -188,7 +188,7 @@ test('\'play\' tests with seekOffset and actionHook', async(t) => {
const seconds = parseInt(obj.body.playback_seconds);
const milliseconds = parseInt(obj.body.playback_milliseconds);
const lastOffsetPos = parseInt(obj.body.playback_last_offset_pos);
console.log({obj}, 'lastRequest');
//console.log({obj}, 'lastRequest');
t.ok(obj.body.reason === "playCompleted", "play: actionHook success received");
t.ok(seconds === 2, "playback_seconds: actionHook success received");
t.ok(milliseconds === 2048, "playback_milliseconds: actionHook success received");

View File

@@ -84,46 +84,6 @@ test('\'config\' reset synthesizer tests', async(t) => {
}
});
test('Say verb array test', async(t) => {
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
const verbs = [
{
"verb": "config",
"synthesizer": {
"vendor": "microsft",
"voice": "foobar"
},
},
{
"verb": "config",
"reset": 'synthesizer',
},
{
verb: 'say',
text: ['hello', 'https://samplelib.com/lib/preview/mp3/sample-3s.mp3']
}
];
const from = 'say_test_success';
await provisionCallHook(from, verbs)
// THEN
await sippUac('uac-success-received-bye.xml', '172.38.0.10', from);
t.pass('say: succeeds when using using account credentials');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});
const {MICROSOFT_CUSTOM_API_KEY, MICROSOFT_DEPLOYMENT_ID, MICROSOFT_CUSTOM_REGION, MICROSOFT_CUSTOM_VOICE} = process.env;
if (MICROSOFT_CUSTOM_API_KEY && MICROSOFT_DEPLOYMENT_ID && MICROSOFT_CUSTOM_REGION && MICROSOFT_CUSTOM_VOICE) {
test('\'say\' tests - microsoft custom voice', async(t) => {

View File

@@ -1,114 +0,0 @@
<?xml version="1.0" encoding="ISO-8859-1" ?>
<!DOCTYPE scenario SYSTEM "sipp.dtd">
<scenario name="Basic Sipstone UAC">
<!-- In client mode (sipp placing calls), the Call-ID MUST be -->
<!-- generated by sipp. To do so, use [call_id] keyword. -->
<send retrans="500">
<![CDATA[
INVITE sip:[to]@[remote_ip]:[remote_port] SIP/2.0
Via: SIP/2.0/[transport] [local_ip]:[local_port];branch=[branch]
From: [from] <sip:[from]@[local_ip]:[local_port]>;tag=[pid]SIPpTag00[call_number]
To: <sip:[to]@[remote_ip]:[remote_port]>
Call-ID: [call_id]
CSeq: 1 INVITE
Contact: sip:[from]@[local_ip]:[local_port]
Max-Forwards: 70
X-Account-Sid: bb845d4b-83a9-4cde-a6e9-50f3743bab3f
Subject: uac-say
Content-Type: application/sdp
Content-Length: [len]
v=0
o=user1 53655765 2353687637 IN IP[local_ip_type] [local_ip]
s=-
c=IN IP[media_ip_type] [media_ip]
t=0 0
m=audio [media_port] RTP/AVP 0
a=rtpmap:0 PCMU/8000
]]>
</send>
<recv response="100"
optional="true">
</recv>
<recv response="180" optional="true">
</recv>
<recv response="183" optional="true">
</recv>
<!-- By adding rrs="true" (Record Route Sets), the route sets -->
<!-- are saved and used for following messages sent. Useful to test -->
<!-- against stateful SIP proxies/B2BUAs. -->
<recv response="200" rtd="true">
</recv>
<!-- Packet lost can be simulated in any send/recv message by -->
<!-- by adding the 'lost = "10"'. Value can be [1-100] percent. -->
<send>
<![CDATA[
ACK sip:[to]@[remote_ip]:[remote_port] SIP/2.0
Via: SIP/2.0/[transport] [local_ip]:[local_port];branch=[branch]
From: [from] <sip:[from]@[local_ip]:[local_port]>;tag=[pid]SIPpTag00[call_number]
To: [to] <sip:[to]@[remote_ip]:[remote_port]>[peer_tag_param]
Call-ID: [call_id]
CSeq: 1 ACK
Contact: sip:[from]@[local_ip]:[local_port]
Max-Forwards: 70
Subject: uac-say
Content-Length: 0
]]>
</send>
<pause milliseconds="2000"/>
<!-- Send an INFO message -->
<send>
<![CDATA[
INFO sip:[service]@[remote_ip]:[remote_port] SIP/2.0
Via: SIP/2.0/[transport] [local_ip]:[local_port];branch=[branch]
From: [from] <sip:[from]@[local_ip]:[local_port]>;tag=[pid]SIPpTag00[call_number]
To: [to] <sip:[to]@[remote_ip]:[remote_port]>[peer_tag_param]
Call-ID: [call_id]
CSeq: 2 INFO
Contact: sip:[from]@[local_ip]:[local_port]
Max-Forwards: 70
Subject: Performance Test
Content-Type: text/plain
Content-Length: [len]
hello jambonz
]]>
</send>
<!-- Receive 200 OK -->
<recv response="200">
</recv>
<recv request="BYE">
</recv>
<send>
<![CDATA[
SIP/2.0 200 OK
[last_Via:]
[last_From:]
[last_To:]
[last_Call-ID:]
[last_CSeq:]
Contact: <sip:[local_ip]:[local_port];transport=[transport]>
Content-Length: 0
]]>
</send>
</scenario>

View File

@@ -1,26 +0,0 @@
const test = require('tape');
const {makeOpusFirst, isOpusFirst} = require('../lib/utils/sdp-utils');
const sdpTransform = require('sdp-transform');
test('test opus first', (t) => {
const sdp = 'v=0\r\no=- 3348584794228993675 2 IN IP4 127.0.0.1\r\ns=-\r\nt=0 0\r\na=group:BUNDLE 0\r\na=extmap-allow-mixed\r\na=msid-semantic: WMS caca8b77-5ae5-4e73-a4d5-de1fce930335\r\nm=audio 57088 UDP/TLS/RTP/SAVPF 111 63 9 0 8 13 110 126\r\nc=IN IP4 14.238.89.50\r\na=rtcp:9 IN IP4 0.0.0.0\r\na=candidate:1401281302 1 udp 2122260223 10.231.36.146 57088 typ host generation 0 network-id 1 network-cost 10\r\na=candidate:2173263513 1 udp 1686052607 14.238.89.50 57088 typ srflx raddr 10.231.36.146 rport 57088 generation 0 network-id 1 network-cost 10\r\na=ice-ufrag:k5nc\r\na=ice-pwd:J0qwMs6HrIcFNZbDG5m8Kqpk\r\na=ice-options:trickle\r\na=fingerprint:sha-256 66:DE:9A:76:CE:11:2D:65:C4:08:C7:87:B4:90:7E:F1:8D:07:B9:F4:FF:E3:81:D7:E7:7D:C6:56:47:01:6E:55\r\na=setup:actpass\r\na=mid:0\r\na=extmap:1 urn:ietf:params:rtp-hdrext:ssrc-audio-level\r\na=extmap:2 http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time\r\na=extmap:3 http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01\r\na=extmap:4 urn:ietf:params:rtp-hdrext:sdes:mid\r\na=sendrecv\r\na=msid:caca8b77-5ae5-4e73-a4d5-de1fce930335 52ad01f1-b1df-4b8e-a208-9201e98b6f7b\r\na=rtcp-mux\r\na=rtcp-fb:111 transport-cc\r\na=fmtp:111 minptime=10;useinbandfec=1\r\na=rtpmap:63 red/48000/2\r\na=fmtp:63 111/111\r\na=rtpmap:9 G722/8000\r\na=rtpmap:0 PCMU/8000\r\na=rtpmap:8 PCMA/8000\r\na=rtpmap:13 CN/8000\r\na=rtpmap:111 opus/48000/2\r\na=rtpmap:110 telephone-event/48000\r\na=rtpmap:126 telephone-event/8000\r\na=ssrc:3207459321 cname:4nyPJ6KXvseBUIhu\r\na=ssrc:3207459321 msid:caca8b77-5ae5-4e73-a4d5-de1fce930335 52ad01f1-b1df-4b8e-a208-9201e98b6f7b\r\n';
const opusSdp = makeOpusFirst(sdp);
const parsedSdp = sdpTransform.parse(opusSdp);
const opusIndex = parsedSdp.media[0].rtp.findIndex((entry) => entry.codec === 'opus');
t.ok(opusIndex === 0, 'succesffuly move opus to be first offer')
t.end();
});
test('test is opus first', (t) => {
const sdp = 'v=0\r\no=- 3348584794228993675 2 IN IP4 127.0.0.1\r\ns=-\r\nt=0 0\r\na=group:BUNDLE 0\r\na=extmap-allow-mixed\r\na=msid-semantic: WMS caca8b77-5ae5-4e73-a4d5-de1fce930335\r\nm=audio 57088 UDP/TLS/RTP/SAVPF 111 63 9 0 8 13 110 126\r\nc=IN IP4 14.238.89.50\r\na=rtcp:9 IN IP4 0.0.0.0\r\na=candidate:1401281302 1 udp 2122260223 10.231.36.146 57088 typ host generation 0 network-id 1 network-cost 10\r\na=candidate:2173263513 1 udp 1686052607 14.238.89.50 57088 typ srflx raddr 10.231.36.146 rport 57088 generation 0 network-id 1 network-cost 10\r\na=ice-ufrag:k5nc\r\na=ice-pwd:J0qwMs6HrIcFNZbDG5m8Kqpk\r\na=ice-options:trickle\r\na=fingerprint:sha-256 66:DE:9A:76:CE:11:2D:65:C4:08:C7:87:B4:90:7E:F1:8D:07:B9:F4:FF:E3:81:D7:E7:7D:C6:56:47:01:6E:55\r\na=setup:actpass\r\na=mid:0\r\na=extmap:1 urn:ietf:params:rtp-hdrext:ssrc-audio-level\r\na=extmap:2 http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time\r\na=extmap:3 http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01\r\na=extmap:4 urn:ietf:params:rtp-hdrext:sdes:mid\r\na=sendrecv\r\na=msid:caca8b77-5ae5-4e73-a4d5-de1fce930335 52ad01f1-b1df-4b8e-a208-9201e98b6f7b\r\na=rtcp-mux\r\na=rtpmap:111 opus/48000/2\r\na=rtcp-fb:111 transport-cc\r\na=fmtp:111 minptime=10;useinbandfec=1\r\na=rtpmap:63 red/48000/2\r\na=fmtp:63 111/111\r\na=rtpmap:9 G722/8000\r\na=rtpmap:0 PCMU/8000\r\na=rtpmap:8 PCMA/8000\r\na=rtpmap:13 CN/8000\r\na=rtpmap:110 telephone-event/48000\r\na=rtpmap:126 telephone-event/8000\r\na=ssrc:3207459321 cname:4nyPJ6KXvseBUIhu\r\na=ssrc:3207459321 msid:caca8b77-5ae5-4e73-a4d5-de1fce930335 52ad01f1-b1df-4b8e-a208-9201e98b6f7b\r\n';
t.ok(isOpusFirst(sdp), "opus is first offer");
const sdp2 = 'v=0\r\no=- 3348584794228993675 2 IN IP4 127.0.0.1\r\ns=-\r\nt=0 0\r\na=group:BUNDLE 0\r\na=extmap-allow-mixed\r\na=msid-semantic: WMS caca8b77-5ae5-4e73-a4d5-de1fce930335\r\nm=audio 57088 UDP/TLS/RTP/SAVPF 111 63 9 0 8 13 110 126\r\nc=IN IP4 14.238.89.50\r\na=rtcp:9 IN IP4 0.0.0.0\r\na=candidate:1401281302 1 udp 2122260223 10.231.36.146 57088 typ host generation 0 network-id 1 network-cost 10\r\na=candidate:2173263513 1 udp 1686052607 14.238.89.50 57088 typ srflx raddr 10.231.36.146 rport 57088 generation 0 network-id 1 network-cost 10\r\na=ice-ufrag:k5nc\r\na=ice-pwd:J0qwMs6HrIcFNZbDG5m8Kqpk\r\na=ice-options:trickle\r\na=fingerprint:sha-256 66:DE:9A:76:CE:11:2D:65:C4:08:C7:87:B4:90:7E:F1:8D:07:B9:F4:FF:E3:81:D7:E7:7D:C6:56:47:01:6E:55\r\na=setup:actpass\r\na=mid:0\r\na=extmap:1 urn:ietf:params:rtp-hdrext:ssrc-audio-level\r\na=extmap:2 http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time\r\na=extmap:3 http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01\r\na=extmap:4 urn:ietf:params:rtp-hdrext:sdes:mid\r\na=sendrecv\r\na=msid:caca8b77-5ae5-4e73-a4d5-de1fce930335 52ad01f1-b1df-4b8e-a208-9201e98b6f7b\r\na=rtcp-mux\r\na=rtcp-fb:111 transport-cc\r\na=fmtp:111 minptime=10;useinbandfec=1\r\na=rtpmap:63 red/48000/2\r\na=fmtp:63 111/111\r\na=rtpmap:9 G722/8000\r\na=rtpmap:0 PCMU/8000\r\na=rtpmap:8 PCMA/8000\r\na=rtpmap:13 CN/8000\r\na=rtpmap:111 opus/48000/2\r\na=rtpmap:110 telephone-event/48000\r\na=rtpmap:126 telephone-event/8000\r\na=ssrc:3207459321 cname:4nyPJ6KXvseBUIhu\r\na=ssrc:3207459321 msid:caca8b77-5ae5-4e73-a4d5-de1fce930335 52ad01f1-b1df-4b8e-a208-9201e98b6f7b\r\n';
t.ok(!isOpusFirst(sdp2), "opus is not first offer")
const sdp3 = 'v=0\r\no=xhoaluu2 1314 1504 IN IP4 192.168.1.4\r\ns=Talk\r\nc=IN IP4 192.168.1.4\r\nt=0 0\r\na=ice-pwd:397d063ea23fdc05164e3ee4\r\na=ice-ufrag:16c449a3\r\na=rtcp-xr:rcvr-rtt=all:10000 stat-summary=loss,dup,jitt,TTL voip-metrics\r\na=group:BUNDLE as\r\na=record:off\r\nm=audio 56542 RTP/AVPF 0 8\r\nc=IN IP4 14.226.233.151\r\na=rtcp-mux\r\na=mid:as\r\na=extmap:1 urn:ietf:params:rtp-hdrext:sdes:mid\r\na=rtcp:63076 IN IP4 192.168.1.4\r\na=candidate:1 1 UDP 2130706303 192.168.1.4 56542 typ host\r\na=candidate:1 2 UDP 2130706302 192.168.1.4 63076 typ host\r\na=candidate:2 1 UDP 2130706431 2001:ee0:d744:dcf0:c1d3:d73f:7a93:dc9f 56542 typ host\r\na=candidate:2 2 UDP 2130706430 2001:ee0:d744:dcf0:c1d3:d73f:7a93:dc9f 63076 typ host\r\na=candidate:3 1 UDP 2130706431 2001:ee0:d744:dcf0:15:6be3:8e6b:b736 56542 typ host\r\na=candidate:3 2 UDP 2130706430 2001:ee0:d744:dcf0:15:6be3:8e6b:b736 63076 typ host\r\na=candidate:4 1 UDP 1694498687 14.226.233.151 56542 typ srflx raddr 192.168.1.4 rport 56542\r\na=rtcp-fb:* trr-int 1000\r\na=rtcp-fb:* ccm tmmbr';
t.ok(!isOpusFirst(sdp2), "opus is not first offer")
t.end();
});

View File

@@ -52,7 +52,6 @@ test('\'transcribe\' test - google', async(t) => {
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
//console.log(JSON.stringify(obj));
t.ok(obj.body.speech.alternatives[0].transcript.toLowerCase().startsWith('i\'d like to speak to customer support'),
'transcribe: succeeds when using google credentials');
@@ -90,7 +89,6 @@ test('\'transcribe\' test - microsoft', async(t) => {
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
//console.log(JSON.stringify(obj));
t.ok(obj.body.speech.alternatives[0].transcript.toLowerCase().startsWith('i\'d like to speak to customer support'),
'transcribe: succeeds when using microsoft credentials');
@@ -128,7 +126,6 @@ test('\'transcribe\' test - aws', async(t) => {
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
//console.log(JSON.stringify(obj));
t.ok(obj.body.speech.alternatives[0].transcript.toLowerCase().startsWith('i\'d like to speak to customer support'),
'transcribe: succeeds when using aws credentials');
@@ -140,71 +137,6 @@ test('\'transcribe\' test - aws', async(t) => {
}
});
test('\'transcribe\' test - deepgram config options', async(t) => {
if (!DEEPGRAM_API_KEY ) {
t.pass('skipping deepgram tests');
return t.end();
}
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
let verbs = [
{
"verb": "config",
"recognizer": {
"vendor": "deepgram",
"language": "en-US",
"altLanguages": [
"en-US"
],
"deepgramOptions": {
"model": "2-ea",
"tier": "nova",
"numerals": true,
"ner": true,
"vadTurnoff": 10,
"keywords": [
"CPT"
]
}
}
},
{
"verb": "transcribe",
"transcriptionHook": "/transcriptionHook",
"recognizer": {
"vendor": "deepgram",
"altLanguages": [
"en-AU"
],
"hints": ["customer support", "sales", "human resources", "HR"],
"deepgramOptions": {
"apiKey": DEEPGRAM_API_KEY,
}
}
}
];
let from = "gather_success";
await provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
//console.log(JSON.stringify(obj));
t.ok(obj.body.speech.alternatives[0].transcript.toLowerCase().includes('like to speak to customer support'),
'transcribe: succeeds when using deepgram credentials');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});
test('\'transcribe\' test - deepgram', async(t) => {
if (!DEEPGRAM_API_KEY ) {
t.pass('skipping deepgram tests');
@@ -234,7 +166,6 @@ test('\'transcribe\' test - deepgram', async(t) => {
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
//console.log(JSON.stringify(obj));
t.ok(obj.body.speech.alternatives[0].transcript.toLowerCase().includes('like to speak to customer support'),
'transcribe: succeeds when using deepgram credentials');
@@ -314,131 +245,9 @@ test('\'transcribe\' test - google with asrTimeout', async(t) => {
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
//console.log(JSON.stringify(obj));
t.ok(obj.body.speech.alternatives[0].transcript.toLowerCase().startsWith('i\'d like to speak to customer support'),
'transcribe: succeeds when using google credentials');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});
test('\'transcribe\' test - deepgram config options altLanguages', async(t) => {
if (!DEEPGRAM_API_KEY ) {
t.pass('skipping deepgram tests');
return t.end();
}
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
let verbs = [
{
"verb": "config",
"recognizer": {
"vendor": "deepgram",
"language": "en-US",
"altLanguages": [
"en-US"
],
"deepgramOptions": {
"model": "nova-2",
"numerals": true,
"ner": true,
"vadTurnoff": 10,
"keywords": [
"CPT"
]
}
}
},
{
"verb": "transcribe",
"transcriptionHook": "/transcriptionHook",
"recognizer": {
"vendor": "deepgram",
"hints": ["customer support", "sales", "human resources", "HR"],
"deepgramOptions": {
"apiKey": DEEPGRAM_API_KEY,
}
}
}
];
let from = "gather_success_no_altLanguages";
await provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
//console.log(JSON.stringify(obj));
t.ok(obj.body.speech.alternatives[0].transcript.toLowerCase().includes('like to speak to customer support'),
'transcribe: succeeds when using deepgram credentials');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});
test('\'transcribe\' test - deepgram config options altLanguages', async(t) => {
if (!DEEPGRAM_API_KEY ) {
t.pass('skipping deepgram tests');
return t.end();
}
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
let verbs = [
{
"verb": "config",
"recognizer": {
"vendor": "deepgram",
"language": "en-US",
"altLanguages": [
"en-US"
],
"deepgramOptions": {
"model": "nova-2",
"numerals": true,
"ner": true,
"vadTurnoff": 10,
"keywords": [
"CPT"
]
}
}
},
{
"verb": "transcribe",
"transcriptionHook": "/transcriptionHook",
"recognizer": {
"vendor": "deepgram",
"hints": ["customer support", "sales", "human resources", "HR"],
"altLanguages": [],
"deepgramOptions": {
"apiKey": DEEPGRAM_API_KEY,
}
}
}
];
let from = "gather_success_has_altLanguages";
await provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
//console.log(JSON.stringify(obj));
t.ok(obj.body.speech.alternatives[0].transcript.toLowerCase().includes('like to speak to customer support'),
'transcribe: succeeds when using deepgram credentials');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);