Compare commits

..

68 Commits

Author SHA1 Message Date
Dave Horton
38185426e2 further fixes for #210 2023-01-07 14:49:59 -05:00
Dave Horton
0f6b7e860d allow redirect to ws(s) url after starting with http(s) #210 2023-01-07 14:13:11 -05:00
Dave Horton
2e3fb60e72 support google hints as an array of objects containing both hint phra… (#209)
* support google hints as an array of objects containing both hint phrase and boost value

* handle structured hints for non-google STT (#205)
2023-01-06 08:00:17 -05:00
Dave Horton
05a4665f87 Feature/force tts generation (#208)
* Feature: add option synthesizer.forceTtsGeneration #198

* Feature: add option synthesizer.forceTtsGeneration #198

* minor cleanup

* minor

Co-authored-by: Michal Tesar <michal@irevolution.group>
2023-01-04 15:42:48 -05:00
Dave Horton
b16d49d8ea Bugfix/gather kill race condition (#207)
* further fix for race condition in #206

* #206: ignore request to start bot mode when bot mode is already active
2023-01-04 15:19:35 -05:00
Dave Horton
aad2d52efd fix #206: prevent 2 simultaneous background gathers 2023-01-03 10:04:51 -05:00
Dave Horton
83d767116b add support for http transport for jaeger 2022-12-30 10:47:31 -05:00
Dave Horton
b4673ad942 update to latest drachtio-srf and realtimedb-helpers 2022-12-29 10:22:16 -05:00
Dave Horton
9b8bb07a97 update to latest drachtio-fsmrf 2022-12-28 11:05:06 -06:00
Dave Horton
29f578ff5c faster uuid 2022-12-28 10:40:26 -06:00
Dave Horton
6d86793494 update to latest drachtio-srf and drachtio-fsmrf 2022-12-21 12:26:51 -05:00
Dave Horton
9f95fde67e faster uuid generator 2022-12-21 08:27:00 -05:00
Dave Horton
010b4d2778 bugfix: db caching had side affects of using closed http requestors 2022-12-13 14:55:23 -05:00
Dave Horton
8d81c20c1a Feature/fsrmf perf improvement (#197)
* update drachtio-fsmrf

* update drachtio-fsmrf

* sync package-lock.json
2022-12-11 12:12:50 -05:00
Dave Horton
69f796e960 update gh actions 2022-12-10 15:32:50 -05:00
Dave Horton
4db03d3d1b update to drachtio-fsmrf@3.0.8 with performance improvements for call setup 2022-12-10 15:12:15 -05:00
Dave Horton
a60c6a4740 add support for ws verb:status event notifications (#196) 2022-12-09 21:11:47 -05:00
dependabot[bot]
5b875c3ad4 Bump qs and express in /test/webhook (#195)
Bumps [qs](https://github.com/ljharb/qs) to 6.11.0 and updates ancestor dependency [express](https://github.com/expressjs/express). These dependencies need to be updated together.


Updates `qs` from 6.7.0 to 6.11.0
- [Release notes](https://github.com/ljharb/qs/releases)
- [Changelog](https://github.com/ljharb/qs/blob/main/CHANGELOG.md)
- [Commits](https://github.com/ljharb/qs/compare/v6.7.0...v6.11.0)

Updates `express` from 4.17.1 to 4.18.2
- [Release notes](https://github.com/expressjs/express/releases)
- [Changelog](https://github.com/expressjs/express/blob/master/History.md)
- [Commits](https://github.com/expressjs/express/compare/4.17.1...4.18.2)

---
updated-dependencies:
- dependency-name: qs
  dependency-type: indirect
- dependency-name: express
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-12-09 20:34:00 -05:00
Dave Horton
bf19d2ae6d fixes for runtime speech credentials 2022-12-09 20:25:39 -05:00
Dave Horton
37efdc62be fix prev commit 2022-12-09 11:07:41 -05:00
Dave Horton
78a76bb1f4 bugfix: when handing over from wss to http close the wss socket 2022-12-09 10:57:59 -05:00
Dave Horton
39fb762a15 ibm speech fix 2022-12-04 11:28:02 -05:00
Dave Horton
2cc3140de0 bugfix #192: config with dtmf only followed later by gather with speech not working 2022-12-01 13:05:11 -05:00
Dave Horton
1a1f2770b6 include service_provider_sid in call webhook 2022-11-30 13:00:35 -05:00
Dave Horton
23f3b44b8b add custom header on Refer indicating whether sbc-inbound should fix up the Refer-To 2022-11-30 13:00:03 -05:00
Dave Horton
753d46e513 error handling in amd 2022-11-22 15:39:37 -05:00
Dave Horton
71a2435c63 Feature/ibm watson (#193)
* initial changes to support ibm watson

* update specs.json for ibm

* update to drachtio-fsmrf with support for ibm

* bugfix: set access token for ibm stt, not api_key

* fix name of api_key

* normalize ibm transcription results

* rework ibm credentials

* bugfix setting runtime speech creds

* bugfix: ibm region

* typo

* changes to transcribe for ibm watson

* implement connect handler

* bugfix: bind error

* proper use of result_index

* ibm error handling
2022-11-21 22:09:37 -05:00
Dave Horton
8686348454 Feature/deepgram stt (#190)
* initial changes to support deepgram stt

* fixes for normalizing vendor-specific transcriptions

* update to latest drachtio-fsmrf with support for deepgram stt

* deepgram parsing error

* hints support for deepgram

* handling deepgram errors

* ignore late arriving transcripts for deepgram

* handling of empty transcripts

* transcribe changes

* allow deepgram stt credentials to be provided at run time

* bind channel in transcription handler

* fixes for transcribe when handling empty transcripts

* more empty transcript fixes

* update tests to latest modules

* add test cases for deepgram speech recognition
2022-11-12 19:48:59 -05:00
Guilherme Rauen
f511e6ab6b update node image to the latest and most secure (#189)
Co-authored-by: Guilherme Rauen <g.rauen@cognigy.com>
2022-11-11 11:22:07 -05:00
Dave Horton
706cd4b94b bugfix: handle gather/transcribe where vendor not explicitly specified #187 2022-11-07 09:31:51 -05:00
Dave Horton
e5c209e269 fix for #186: unhandled error when amd webhook returns non-success status code 2022-11-06 09:41:34 -05:00
Dave Horton
d903dbe28d update deps to latest db/realtime-db 2022-11-06 09:39:51 -05:00
Dave Horton
d88321c24d fixes for custom voice testing in azure 2022-11-06 09:37:22 -05:00
Dave Horton
6e1761bab6 update to db-helpers with caching fix 2022-11-01 20:56:51 -04:00
Dave Horton
509bb065bb Feature/nuance stt (#185)
* initial changes to gather to support nuance stt

* updateSpeechCredentialLastUsed could be called without a speech_credential_sid if credentials are passed in the flow

* fix bugname

* typo

* added handlers for nuance

* logging

* major refactor of parsing transcriptions

* initial support for nuance in transcribe verb

* updates from testing

* cleanup some tests

* update action

* typo

* gather: start nuance timers after say/play completes

* update drachtio-fsrmf

* refactor some code

* typo

* log nuance error detail

* timeout handling

* typo

* handle nuance 413 response when recognition times out

* typo in specs.json

* add support for nuance resources

* fixes and tests for transcribe

* remove logging from test

* initial support for kryptonEndpoint

* try getting access token even when using krypton

* typo in kryptonEndpoint property

* add support for Nuance tts

* parse nuance voice and model for tts

* use nuance credentials from db

* update to db-helpers@0.7.0 with caching option

* add support for azure audio logging in gather/transcribe

* sync package-lock.json
2022-11-01 12:23:49 -04:00
Dave Horton
203b9774ca bugfix: ws error max connections error causes a crash 2022-11-01 11:42:08 -04:00
Dave Horton
fade47d423 bugfix when running multiple instances in EC2 2022-11-01 11:42:01 -04:00
Dave Horton
26e52d131e update to db-helpers@0.7.0 with caching option 2022-11-01 11:41:53 -04:00
Dave Horton
70caf00dd1 Feature/multi forks on ec2 (#182)
* changes to allow multiple instances to run in an EC2 autoscale deployment

* fix health check

* fixup aws sns notification so it subscribes using bound port

* AWS SNS port range 3010-3019
2022-10-30 13:07:49 -04:00
Dave Horton
f044cdd150 bugfix: rest:dial with fromHost now working 2022-10-26 09:36:53 -04:00
Dave Horton
c3d39f0970 add support for fromHost in createCall rest api 2022-10-25 13:32:13 -04:00
Dave Horton
9c69a2c79f fix typo (again) 2022-10-24 18:35:15 -04:00
Dave Horton
e0607b9c2e feature: specify user or host part of From uri on outdial 2022-10-23 15:24:30 -04:00
Dave Horton
dc378cd065 update package-lock.json 2022-10-23 12:23:10 -04:00
Markus Frindt
138950c534 [snyk] fix vulnerabilities (#177)
Co-authored-by: Markus Frindt <m.frindt@cognigy.com>
2022-10-20 21:35:36 -04:00
Dave Horton
215a28b615 bugfix: conference verb will have '_' property when leg is moved from other FS 2022-10-20 12:25:50 -04:00
Dave Horton
3a5efa37b9 bugfix: to move call leg to a different FS using the special REFER, we now must include X-Account-Sid header 2022-10-15 10:52:56 -04:00
Dave Horton
917b8f332c minor logging 2022-10-14 12:53:44 -04:00
Dave Horton
17848ea22c bump version 2022-10-13 16:02:35 -04:00
Dave Horton
43af27e802 update time-series 2022-10-10 09:19:05 +01:00
Dave Horton
b25f92e17a Feature/azure custom stt (#171)
* gather/transcribe: support for azure custom speech models (endpoint id)

* allow azure stt custom speech endpoint id to be passed as property in recognizer

* fix to add custom stt endpoint to session speech credentials object
2022-10-07 09:46:25 +01:00
Dave Horton
90cb5e1348 bugfix: typo in bugname was causing transcripts to be ignored 2022-10-04 12:59:58 +01:00
Dave Horton
cf821569b3 minor logging changes 2022-10-02 22:36:27 +01:00
Dave Horton
218f2d6c67 bugfix: unnecessary call to stopTranscription in gather verb when only collecting digits 2022-09-30 10:27:33 +01:00
Joan
c2c8f00978 added call_termination_by on app call status (#168)
Co-authored-by: Joan Salvatella <joan@bookline.io>
2022-09-23 09:13:55 +02:00
Dave Horton
32714d73f3 update to synthAudio with bugfix for writing TTS rtt stats for microsoft 2022-09-21 15:22:19 +02:00
Dave Horton
8da85ebd5a include custom header X-Application-Sid to make it available to cdrs 2022-09-20 13:54:54 +02:00
Dave Horton
dcedf68264 regression bug with adding amd 2022-09-20 09:32:02 +02:00
Dave Horton
05c5d2211f regression bug with parse-url update 2022-09-20 09:31:44 +02:00
Dave Horton
0c089e2380 bugfix: config was not properly enabling amd when configured 2022-09-19 21:16:19 +02:00
Dave Horton
099f33857c update time-series and parse-url 2022-09-16 13:07:08 +02:00
Dave Horton
bd49dacac4 Say length text (#165)
* typo for media bug name in azure and punctuation fix

* say: split very long text intelligently

* more fixes from testing

* update to latest synthAudio
2022-09-14 17:17:29 +02:00
Dave Horton
876824abde typo for media bug name in azure and punctuation fix 2022-09-13 16:22:46 +02:00
Dave Horton
468a9e6d6b make maxPayload of websocket configurable via JAMBONES_WS_MAX_PAYLOAD 2022-09-13 12:35:31 +02:00
Dave Horton
c88163fe11 Bugfix/config stt punctuation (#164)
* support recognizer.punctuation in config verb (#163)

* fixes from testing
2022-09-13 11:45:36 +02:00
xquanluu
bf7ece8f17 feat: play verb support seekOffset and actionHook (#160)
* feat: play verb support seekOffset and actionHook

* add testcase

* fix: testcase
2022-09-13 08:46:16 +02:00
Paulo Telles
e90ef6bc70 change node image and moment package version (#161)
Co-authored-by: p.souza <p.souza@cognigy.com>
2022-09-07 13:20:39 +02:00
Dave Horton
a59f6097d7 bump version to 0.7.6 2022-08-26 20:06:04 +02:00
52 changed files with 7827 additions and 3644 deletions

View File

@@ -5,12 +5,12 @@ on:
jobs:
build:
runs-on: ubuntu-20.04
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v1
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: 14
node-version: 16
- run: npm ci
- run: npm run jslint
- run: docker pull drachtio/sipp
@@ -20,3 +20,5 @@ jobs:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_REGION: ${{ secrets.AWS_REGION }}
MICROSOFT_REGION: ${{ secrets.MICROSOFT_REGION }}
MICROSOFT_API_KEY: ${{ secrets.MICROSOFT_API_KEY }}

View File

@@ -20,7 +20,7 @@ jobs:
if: github.event_name == 'push'
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v3
- name: Build image
run: docker build . --file Dockerfile --tag $IMAGE_NAME

View File

@@ -1,4 +1,4 @@
FROM --platform=linux/amd64 node:18.6.0-alpine as base
FROM --platform=linux/amd64 node:18.12.1-alpine3.16 as base
RUN apk --update --no-cache add --virtual .builds-deps build-base python3

47
app.js
View File

@@ -15,7 +15,6 @@ const tracer = require('./tracer')(process.env.JAMBONES_OTEL_SERVICE_NAME || 'ja
const api = require('@opentelemetry/api');
srf.locals = {...srf.locals, otel: {tracer, api}};
const PORT = process.env.HTTP_PORT || 3000;
const opts = {level: process.env.JAMBONES_LOGLEVEL || 'info'};
const pino = require('pino');
const logger = pino(opts, pino.destination({sync: false}));
@@ -33,17 +32,6 @@ const {
invokeWebCallback
} = require('./lib/middleware')(srf, logger);
// HTTP
const express = require('express');
const helmet = require('helmet');
const app = express();
Object.assign(app.locals, {
logger,
srf
});
const httpRoutes = require('./lib/http-routes');
const InboundCallSession = require('./lib/session/inbound-call-session');
const SipRecCallSession = require('./lib/session/siprec-call-session');
@@ -82,20 +70,6 @@ srf.invite(async(req, res) => {
session.exec();
});
// HTTP
app.use(helmet());
app.use(helmet.hidePoweredBy());
app.use(express.urlencoded({ extended: true }));
app.use(express.json());
app.use('/', httpRoutes);
app.use((err, req, res, next) => {
logger.error(err, 'burped error');
res.status(err.status || 500).json({msg: err.message});
});
const httpServer = app.listen(PORT);
logger.info(`listening for HTTP requests on port ${PORT}, serviceUrl is ${srf.locals.serviceUrl}`);
const sessionTracker = srf.locals.sessionTracker = require('./lib/session/session-tracker');
sessionTracker.on('idle', () => {
if (srf.locals.lifecycleEmitter.operationalState === LifeCycleEvents.ScaleIn) {
@@ -103,19 +77,30 @@ sessionTracker.on('idle', () => {
srf.locals.lifecycleEmitter.scaleIn();
}
});
const getCount = () => sessionTracker.count;
const healthCheck = require('@jambonz/http-health-check');
healthCheck({app, logger, path: '/', fn: getCount});
let httpServer;
const createHttpListener = require('./lib/utils/http-listener');
createHttpListener(logger, srf)
.then(({server, app}) => {
httpServer = server;
healthCheck({app, logger, path: '/', fn: getCount});
return {server, app};
})
.catch((err) => {
logger.error(err, 'Error creating http listener');
});
setInterval(() => {
srf.locals.stats.gauge('fs.sip.calls.count', sessionTracker.count);
}, 5000);
}, 20000);
const disconnect = () => {
return new Promise ((resolve) => {
httpServer.on('close', resolve);
httpServer.close();
httpServer?.on('close', resolve);
httpServer?.close();
srf.disconnect();
srf.locals.mediaservers.forEach((ms) => ms.disconnect());
});

View File

@@ -48,5 +48,5 @@
"ens posarem en contacto",
"ara no estem disponibles",
"no hi som"
],
]
}

View File

@@ -3,7 +3,7 @@ const makeTask = require('../../tasks/make_task');
const RestCallSession = require('../../session/rest-call-session');
const CallInfo = require('../../session/call-info');
const {CallDirection, CallStatus} = require('../../utils/constants');
const { v4: uuidv4 } = require('uuid');
const uuidv4 = require('uuid-random');
const SipError = require('drachtio-srf').SipError;
const sysError = require('./error');
const HttpRequestor = require('../../utils/http-requestor');
@@ -41,7 +41,8 @@ router.post('/', async(req, res) => {
'X-Jambonz-Routing': target.type,
'X-Jambonz-FS-UUID': srf.locals.fsUUID,
'X-Call-Sid': callSid,
'X-Account-Sid': accountSid
'X-Account-Sid': accountSid,
...(restDial.fromHost && {'X-Preferred-From-Host': restDial.fromHost})
};
switch (target.type) {
@@ -135,7 +136,7 @@ router.post('/', async(req, res) => {
}
else if (!app.notifier) {
logger.debug('creating null call status hook');
app.notifier = {request: () => {}};
app.notifier = {request: () => {}, close: () => {}};
}
/* now launch the outdial */

View File

@@ -34,6 +34,7 @@ router.post('/:partner', async(req, res) => {
carrier: req.params.partner,
messageSid: app.messageSid,
accountSid: app.accountSid,
serviceProviderSid: account.service_provider_sid,
applicationSid: app.applicationSid,
from: req.body.from,
to: req.body.to,

View File

@@ -1,4 +1,4 @@
const { v4: uuidv4 } = require('uuid');
const uuidv4 = require('uuid-random');
const {CallDirection, AllowedSipRecVerbs} = require('./utils/constants');
const {parseSiprecPayload} = require('./utils/siprec-utils');
const CallInfo = require('./session/call-info');
@@ -118,6 +118,7 @@ module.exports = function(srf, logger) {
const {span} = rootSpan.startChildSpan('lookupAccountDetails');
try {
req.locals.accountInfo = await lookupAccountDetails(account_sid);
req.locals.service_provider_sid = req.locals.accountInfo?.account?.service_provider_sid;
span.end();
if (!req.locals.accountInfo.account.is_active) {
logger.info(`Account is inactive or suspended ${account_sid}`);
@@ -224,29 +225,30 @@ module.exports = function(srf, logger) {
* create a requestor that we will use for all http requests we make during the call.
* also create a notifier for call status events (if not needed, its a no-op).
*/
/* allow for caching data - when caching treat retrieved data as immutable */
const app2 = process.env.JAMBONES_MYSQL_REFRESH_TTL ? JSON.parse(JSON.stringify(app)) : app;
if ('WS' === app.call_hook?.method ||
app.call_hook?.url.startsWith('ws://') || app.call_hook?.url.startsWith('wss://')) {
app.requestor = new WsRequestor(logger, account_sid, app.call_hook, accountInfo.account.webhook_secret) ;
app.notifier = app.requestor;
app.call_hook.method = 'WS';
app2.requestor = new WsRequestor(logger, account_sid, app.call_hook, accountInfo.account.webhook_secret) ;
app2.notifier = app.requestor;
app2.call_hook.method = 'WS';
}
else {
app.requestor = new HttpRequestor(logger, account_sid, app.call_hook, accountInfo.account.webhook_secret);
if (app.call_status_hook) app.notifier = new HttpRequestor(logger, account_sid, app.call_status_hook,
app2.requestor = new HttpRequestor(logger, account_sid, app.call_hook, accountInfo.account.webhook_secret);
if (app.call_status_hook) app2.notifier = new HttpRequestor(logger, account_sid, app.call_status_hook,
accountInfo.account.webhook_secret);
else app.notifier = {request: () => {}};
else app2.notifier = {request: () => {}};
}
req.locals.application = app;
const obj = Object.assign({}, app);
delete obj.requestor;
delete obj.notifier;
req.locals.application = app2;
// eslint-disable-next-line no-unused-vars
const {call_hook, call_status_hook, ...appInfo} = obj; // mask sensitive data like user/pass on webhook
const {call_hook, call_status_hook, ...appInfo} = app; // mask sensitive data like user/pass on webhook
logger.info({app: appInfo}, `retrieved application for incoming call to ${req.locals.calledNumber}`);
req.locals.callInfo = new CallInfo({
req,
app,
app: app2,
direction: CallDirection.Inbound,
traceId: rootSpan.traceId
});
@@ -273,7 +275,9 @@ module.exports = function(srf, logger) {
}
/* retrieve the application to execute for this inbound call */
const params = Object.assign(['POST', 'WS'].includes(app.call_hook.method) ? {sip: req.msg} : {},
req.locals.callInfo, {
req.locals.callInfo,
{service_provider_sid: req.locals.service_provider_sid},
{
defaults: {
synthesizer: {
vendor: app.speech_synthesis_vendor,

View File

@@ -1,6 +1,6 @@
const {CallDirection, CallStatus} = require('../utils/constants');
const parseUri = require('drachtio-srf').parseUri;
const { v4: uuidv4 } = require('uuid');
const uuidv4 = require('uuid-random');
/**
* @classdesc Represents the common information for all calls
* that is provided in call status webhooks
@@ -11,6 +11,7 @@ class CallInfo {
let srf;
this.direction = opts.direction;
this.traceId = opts.traceId;
this.callTerminationBy = undefined;
if (opts.req) {
const u = opts.req.getParsedHeader('from');
const uri = parseUri(u.uri);
@@ -119,7 +120,7 @@ class CallInfo {
applicationSid: this.applicationSid,
fsSipAddress: this.localSipAddress
};
['parentCallSid', 'originatingSipIp', 'originatingSipTrunkName'].forEach((prop) => {
['parentCallSid', 'originatingSipIp', 'originatingSipTrunkName', 'callTerminationBy'].forEach((prop) => {
if (this[prop]) obj[prop] = this[prop];
});
if (typeof this.duration === 'number') obj.duration = this.duration;

View File

@@ -63,6 +63,7 @@ class CallSession extends Emitter {
assert(rootSpan);
this._recordState = RecordState.RecordingOff;
this._notifyEvents = false;
this.tmpFiles = new Set();
@@ -81,8 +82,18 @@ class CallSession extends Emitter {
this._pool = srf.locals.dbHelpers.pool;
const handover = (newRequestor) => {
this.logger.info(`handover to new base url ${newRequestor.url}`);
this.requestor.removeAllListeners();
this.application.requestor = newRequestor;
this.requestor.on('command', this._onCommand.bind(this));
this.requestor.on('connection-dropped', this._onWsConnectionDropped.bind(this));
this.requestor.on('handover', handover.bind(this));
};
this.requestor.on('command', this._onCommand.bind(this));
this.requestor.on('connection-dropped', this._onWsConnectionDropped.bind(this));
this.requestor.on('handover', handover.bind(this));
}
/**
@@ -261,6 +272,9 @@ class CallSession extends Emitter {
get recordState() { return this._recordState; }
get notifyEvents() { return this._notifyEvents; }
set notifyEvents(notify) { this._notifyEvents = !!notify; }
set globalSttHints({hints, hintsBoost}) {
this._globalSttHints = {hints, hintsBoost};
}
@@ -286,6 +300,18 @@ class CallSession extends Emitter {
return this._globalAltLanguages;
}
set globalSttPunctuation(punctuate) {
this._globalSttPunctuation = punctuate;
}
get globalSttPunctuation() {
return this._globalSttPunctuation;
}
hasGlobalSttPunctuation() {
return this._globalSttPunctuation !== undefined;
}
async notifyRecordOptions(opts) {
const {action} = opts;
this.logger.debug({opts}, 'CallSession:notifyRecordOptions');
@@ -433,6 +459,10 @@ class CallSession extends Emitter {
async enableBotMode(gather, autoEnable) {
try {
if (this.backgroundGatherTask) {
this.logger.info('CallSession:enableBotMode - bot mode currently enabled, ignoring request to start again');
return;
}
const t = normalizeJambones(this.logger, [gather]);
this.backgroundGatherTask = makeTask(this.logger, t[0]);
this._bargeInEnabled = true;
@@ -537,7 +567,11 @@ class CallSession extends Emitter {
return {
speech_credential_sid: credential.speech_credential_sid,
api_key: credential.api_key,
region: credential.region
region: credential.region,
use_custom_stt: credential.use_custom_stt,
custom_stt_endpoint: credential.custom_stt_endpoint,
use_custom_tts: credential.use_custom_tts,
custom_tts_endpoint: credential.custom_tts_endpoint
};
}
else if ('wellsaid' === vendor) {
@@ -546,6 +580,28 @@ class CallSession extends Emitter {
api_key: credential.api_key
};
}
else if ('nuance' === vendor) {
return {
speech_credential_sid: credential.speech_credential_sid,
client_id: credential.client_id,
secret: credential.secret
};
}
else if ('deepgram' === vendor) {
return {
speech_credential_sid: credential.speech_credential_sid,
api_key: credential.api_key
};
}
else if ('ibm' === vendor) {
return {
speech_credential_sid: credential.speech_credential_sid,
tts_api_key: credential.tts_api_key,
tts_region: credential.tts_region,
stt_api_key: credential.stt_api_key,
stt_region: credential.stt_region
};
}
}
else {
writeAlerts({
@@ -570,15 +626,22 @@ class CallSession extends Emitter {
const stackNum = this.stackIdx;
const task = this.tasks.shift();
this.logger.info(`CallSession:exec starting task #${stackNum}:${taskNum}: ${task.name}`);
this._notifyTaskStatus(task, {event: 'starting'});
try {
const resources = await this._evaluatePreconditions(task);
let skip = false;
this.currentTask = task;
if (TaskName.Gather === task.name && this.isBotModeEnabled) {
const timeout = task.timeout;
this.logger.info(`CallSession:exec skipping #${stackNum}:${taskNum}: ${task.name}`);
this.backgroundGatherTask.updateTimeout(timeout);
if (this.backgroundGatherTask.updateTaskInProgress(task)) {
this.logger.info(`CallSession:exec skipping #${stackNum}:${taskNum}: ${task.name}`);
skip = true;
}
else {
this.logger.info('CallSession:exec disabling bot mode to start gather with new options');
this.disableBotMode();
}
}
else {
if (!skip) {
const {span, ctx} = this.rootSpan.startChildSpan(`verb:${task.summary}`);
task.span = span;
task.ctx = ctx;
@@ -587,6 +650,7 @@ class CallSession extends Emitter {
}
this.currentTask = null;
this.logger.info(`CallSession:exec completed task #${stackNum}:${taskNum}: ${task.name}`);
this._notifyTaskStatus(task, {event: 'finished'});
} catch (err) {
task.span?.end();
this.currentTask = null;
@@ -626,6 +690,7 @@ class CallSession extends Emitter {
this._onTasksDone();
this._clearResources();
if (!this.isConfirmCallSession && !this.isSmsCallSession) sessionTracker.remove(this.callSid);
}
@@ -1146,14 +1211,14 @@ class CallSession extends Emitter {
// need to allocate an endpoint
try {
if (!this.ms) this.ms = this.getMS();
const ep = await this.ms.createEndpoint({remoteSdp: this.req.body});
const ep = await this.ms.createEndpoint({
headers: {
'X-Jambones-Call-ID': this.callId,
},
remoteSdp: this.req.body
});
//ep.cs = this;
this.ep = ep;
ep.set({
hangup_after_bridge: false,
park_after_bridge: true
}).catch((err) => this.logger.error({err}, 'Error setting park_after_bridge'));
this.logger.debug(`allocated endpoint ${ep.uuid}`);
this.ep.on('destroy', () => {
@@ -1176,6 +1241,7 @@ class CallSession extends Emitter {
} catch (err) {
if (err === CALLER_CANCELLED_ERR_MSG) {
this.logger.error(err, 'caller canceled quickly before we could respond, ending call');
this.callInfo.callTerminationBy = 'caller';
this._notifyCallStatusChange({
callStatus: CallStatus.NoAnswer,
sipStatus: 487,
@@ -1225,7 +1291,6 @@ class CallSession extends Emitter {
return;
}
this.ep = await this.ms.createEndpoint({remoteSdp: this.dlg.remote.sdp});
await this.ep.set('hangup_after_bridge', false);
await this.dlg.modify(this.ep.local.sdp);
this.logger.debug('CallSession:replaceEndpoint completed');
@@ -1253,6 +1318,7 @@ class CallSession extends Emitter {
}
this.tmpFiles.clear();
this.requestor && this.requestor.close();
this.notifier && this.notifier.close();
this.rootSpan && this.rootSpan.end();
}
@@ -1290,7 +1356,8 @@ class CallSession extends Emitter {
this.dlg = await this.srf.createUAS(this.req, this.res, {
headers: {
'X-Trace-ID': this.req.locals.traceId,
'X-Call-Sid': this.req.locals.callSid
'X-Call-Sid': this.req.locals.callSid,
...(this.applicationSid && {'X-Application-Sid': this.applicationSid})
},
localSdp: this.ep.local.sdp
});
@@ -1361,7 +1428,6 @@ class CallSession extends Emitter {
}
if (!this.ep) {
this.ep = await this.ms.createEndpoint({remoteSdp: this.req.body});
await this.ep.set('hangup_after_bridge', false);
}
return {ms: this.ms, ep: this.ep};
}
@@ -1444,7 +1510,8 @@ class CallSession extends Emitter {
headers: {
'Refer-To': referTo,
'Referred-By': `sip:${this.srf.locals.localSipAddress}`,
'X-Retain-Call-Sid': this.callSid
'X-Retain-Call-Sid': this.callSid,
'X-Account-Sid': this.accountSid
}
});
if ([200, 202].includes(res.status)) {
@@ -1482,8 +1549,9 @@ class CallSession extends Emitter {
dlg.connected = false;
dlg.destroy = origDestroy;
const duration = moment().diff(this.dlg.connectTime, 'seconds');
this.callInfo.callTerminationBy = 'jambonz';
this.emit('callStatusChange', {callStatus: CallStatus.Completed, duration});
this.logger.debug('CallSession: call terminated by jambones');
this.logger.debug('CallSession: call terminated by jambonz');
this.rootSpan.setAttributes({'call.termination': 'hangup by jambonz'});
origDestroy().catch((err) => this.logger.info({err}, 'CallSession - error destroying dialog'));
if (this.wakeupResolver) {
@@ -1565,6 +1633,25 @@ class CallSession extends Emitter {
.catch((err) => this.logger.error(err, 'redis error'));
}
/**
* notifyTaskError - only used when websocket connection is used instead of webhooks
*/
_notifyTaskError(obj) {
if (this.requestor instanceof WsRequestor) {
this.requestor.request('jambonz:error', '/error', obj)
.catch((err) => this.logger.debug({err}, 'CallSession:_notifyTaskError - Error sending'));
}
}
_notifyTaskStatus(task, evt) {
if (this.notifyEvents && this.requestor instanceof WsRequestor) {
const obj = {...evt, id: task.id, name: task.name};
this.requestor.request('verb:status', '/status', obj)
.catch((err) => this.logger.debug({err}, 'CallSession:_notifyTaskStatus - Error sending'));
}
}
_awaitCommandsOrHangup() {
assert(!this.wakeupResolver);
return new Promise((resolve, reject) => {

View File

@@ -34,6 +34,7 @@ class InboundCallSession extends CallSession {
_onCancel() {
this.rootSpan.setAttributes({'call.termination': 'caller abandoned'});
this.callInfo.callTerminationBy = 'caller';
this._notifyCallStatusChange({
callStatus: CallStatus.NoAnswer,
sipStatus: 487,
@@ -69,6 +70,7 @@ class InboundCallSession extends CallSession {
assert(this.dlg.connectTime);
const duration = moment().diff(this.dlg.connectTime, 'seconds');
this.rootSpan.setAttributes({'call.termination': 'hangup by caller'});
this.callInfo.callTerminationBy = 'caller';
this.emit('callStatusChange', {
callStatus: CallStatus.Completed,
duration

View File

@@ -44,6 +44,7 @@ class RestCallSession extends CallSession {
* This is invoked when the called party hangs up, in order to calculate the call duration.
*/
_callerHungup() {
this.callInfo.callTerminationBy = 'caller';
const duration = moment().diff(this.dlg.connectTime, 'seconds');
this.emit('callStatusChange', {callStatus: CallStatus.Completed, duration});
this.logger.debug('RestCallSession: called party hung up');

View File

@@ -36,7 +36,8 @@ class SipRecCallSession extends InboundCallSession {
headers: {
'Content-Type': 'application/sdp',
'X-Trace-ID': this.req.locals.traceId,
'X-Call-Sid': this.req.locals.callSid
'X-Call-Sid': this.req.locals.callSid,
...(this.applicationSid && {'X-Application-Sid': this.applicationSid})
},
localSdp: combinedSdp
});

View File

@@ -11,6 +11,10 @@ class TaskConfig extends Task {
'record'
].forEach((k) => this[k] = this.data[k] || {});
if ('notifyEvents' in this.data) {
this.notifyEvents = !!this.data.notifyEvents;
}
if (this.bargeIn.enable) {
this.gatherOpts = {
verb: 'gather',
@@ -26,7 +30,7 @@ class TaskConfig extends Task {
});
}
if (this.bargeIn.sticky) this.autoEnable = true;
this.preconditions = (this.bargeIn.enable || this.record?.action) ?
this.preconditions = (this.bargeIn.enable || this.record?.action || this.data.amd) ?
TaskPreconditions.Endpoint :
TaskPreconditions.None;
}
@@ -51,16 +55,30 @@ class TaskConfig extends Task {
phrase.push(`set recognizer${s}`);
}
if (this.data.amd) phrase.push('enable amd');
return `${this.name}{${phrase.join(',')}`;
if (this.notifyEvents) phrase.push(`event notification ${this.notifyEvents ? 'on' : 'off'}`);
return `${this.name}{${phrase.join(',')}`;
}
async exec(cs) {
async exec(cs, {ep} = {}) {
await super.exec(cs);
if (this.notifyEvents) {
this.logger.debug(`turning event notification ${this.notifyEvents ? 'on' : 'off'}`);
cs.notifyEvents = !!this.data.notifEvents;
}
if (this.data.amd) {
this.startAmd = cs.startAmd;
this.stopAmd = cs.stopAmd;
this.on('amd', this._onAmdEvent.bind(this, cs));
try {
this.ep = ep;
this.startAmd(cs, ep, this, this.data.amd);
} catch (err) {
this.logger.info({err}, 'Config:exec - Error calling startAmd');
}
}
if (this.hasSynthesizer) {
@@ -98,6 +116,9 @@ class TaskConfig extends Task {
this.logger.info({altLanguages: this.recognizer.altLanguages}, 'Config: updated altLanguages');
cs.altLanguages = this.recognizer.altLanguages;
}
if ('punctuation' in this.recognizer) {
cs.globalSttPunctuation = this.recognizer.punctuation;
}
this.logger.info({
recognizer: this.recognizer,
isContinuousAsr: cs.isContinuousAsr
@@ -130,12 +151,17 @@ class TaskConfig extends Task {
async kill(cs) {
super.kill(cs);
if (this.ep && this.stopAmd) this.stopAmd(this.ep, this);
}
_onAmdEvent(cs, evt) {
this.logger.info({evt}, 'Config:_onAmdEvent');
const {actionHook} = this.data.amd;
this.performHook(cs, actionHook, evt);
this.performHook(cs, actionHook, evt)
.catch((err) => {
this.logger.error({err}, 'Config:_onAmdEvent - error calling actionHook');
});
}
}

View File

@@ -689,7 +689,10 @@ class TaskDial extends Task {
_onAmdEvent(cs, evt) {
this.logger.info({evt}, 'Dial:_onAmdEvent');
const {actionHook} = this.data.amd;
this.performHook(cs, actionHook, evt);
this.performHook(cs, actionHook, evt)
.catch((err) => {
this.logger.error({err}, 'Dial:_onAmdEvent - error calling actionHook');
});
}
}

View File

@@ -3,25 +3,24 @@ const {
TaskName,
TaskPreconditions,
GoogleTranscriptionEvents,
NuanceTranscriptionEvents,
AwsTranscriptionEvents,
AzureTranscriptionEvents
AzureTranscriptionEvents,
DeepgramTranscriptionEvents,
IbmTranscriptionEvents
} = require('../utils/constants');
const makeTask = require('./make_task');
const assert = require('assert');
//const GATHER_STABILITY_THRESHOLD = Number(process.env.JAMBONZ_GATHER_STABILITY_THRESHOLD || 0.7);
const compileTranscripts = (logger, evt, arr) => {
//logger.debug({arr, evt}, 'compile transcripts');
if (!Array.isArray(arr) || arr.length === 0) return;
let t = '';
for (const a of arr) {
//logger.debug(`adding ${a.alternatives[0].transcript}`);
t += ` ${a.alternatives[0].transcript}`;
}
t += ` ${evt.alternatives[0].transcript}`;
evt.alternatives[0].transcript = t.trim();
//logger.debug(`compiled transcript: ${evt.alternatives[0].transcript}`);
};
class TaskGather extends Task {
@@ -29,6 +28,16 @@ class TaskGather extends Task {
super(logger, opts);
this.preconditions = TaskPreconditions.Endpoint;
const {
setChannelVarsForStt,
normalizeTranscription,
removeSpeechListeners,
setSpeechCredentialsAtRuntime
} = require('../utils/transcription-utils')(logger);
this.setChannelVarsForStt = setChannelVarsForStt;
this.normalizeTranscription = normalizeTranscription;
this.removeSpeechListeners = removeSpeechListeners;
[
'finishOnKey', 'hints', 'input', 'numDigits', 'minDigits', 'maxDigits',
'interDigitTimeout', 'partialResultHook', 'bargein', 'dtmfBargein',
@@ -47,46 +56,19 @@ class TaskGather extends Task {
const recognizer = this.data.recognizer;
this.vendor = recognizer.vendor;
this.language = recognizer.language;
this.hints = recognizer.hints || [];
this.hintsBoost = recognizer.hintsBoost;
this.profanityFilter = recognizer.profanityFilter;
this.punctuation = !!recognizer.punctuation;
this.enhancedModel = !!recognizer.enhancedModel;
this.model = recognizer.model || 'command_and_search';
this.words = !!recognizer.words;
this.singleUtterance = recognizer.singleUtterance || true;
this.diarization = !!recognizer.diarization;
this.diarizationMinSpeakers = recognizer.diarizationMinSpeakers || 0;
this.diarizationMaxSpeakers = recognizer.diarizationMaxSpeakers || 0;
this.interactionType = recognizer.interactionType || 'unspecified';
this.naicsCode = recognizer.naicsCode || 0;
this.altLanguages = recognizer.altLanguages || [];
/* let credentials be supplied in the recognizer object at runtime */
this.sttCredentials = setSpeechCredentialsAtRuntime(recognizer);
/* continuous ASR (i.e. compile transcripts until a special timeout or dtmf key) */
this.asrTimeout = typeof recognizer.asrTimeout === 'number' ? recognizer.asrTimeout * 1000 : 0;
if (this.asrTimeout > 0) this.asrDtmfTerminationDigit = recognizer.asrDtmfTerminationDigit;
this.isContinuousAsr = this.asrTimeout > 0;
/* vad: if provided, we dont connect to recognizer until voice activity is detected */
const {enable, voiceMs = 0, mode = -1} = recognizer.vad || {};
this.vad = {enable, voiceMs, mode};
/* aws options */
this.vocabularyName = recognizer.vocabularyName;
this.vocabularyFilterName = recognizer.vocabularyFilterName;
this.filterMethod = recognizer.filterMethod;
/* microsoft options */
this.outputFormat = recognizer.outputFormat || 'simple';
this.profanityOption = recognizer.profanityOption || 'raw';
this.requestSnr = recognizer.requestSnr || false;
this.initialSpeechTimeoutMs = recognizer.initialSpeechTimeoutMs || 0;
this.azureServiceEndpoint = recognizer.azureServiceEndpoint;
}
else {
this.hints = [];
this.altLanguages = [];
this.data.recognizer.hints = this.data.recognizer.hints || [];
this.data.recognizer.altLanguages = this.data.recognizer.altLanguages || [];
}
else this.data.recognizer = {hints: [], altLanguages: []};
this.digitBuffer = '';
this._earlyMedia = this.data.earlyMedia === true;
@@ -133,19 +115,23 @@ class TaskGather extends Task {
this.logger.debug('Gather:exec');
await super.exec(cs);
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, cs.srf);
const {getNuanceAccessToken, getIbmAccessToken} = cs.srf.locals.dbHelpers;
if (cs.hasGlobalSttHints) {
const {hints, hintsBoost} = cs.globalSttHints;
this.hints = this.hints.concat(hints);
if (!this.hintsBoost && hintsBoost) this.hintsBoost = hintsBoost;
this.logger.debug({hints: this.hints, hintsBoost: this.hintsBoost},
this.data.recognizer.hints = this.data.recognizer.hints.concat(hints);
if (!this.data.recognizer.hintsBoost && hintsBoost) this.data.recognizer.hintsBoost = hintsBoost;
this.logger.debug({hints: this.data.recognizer.hints, hintsBoost: this.data.recognizer.hintsBoost},
'Gather:exec - applying global sttHints');
}
if (cs.hasAltLanguages) {
this.altLanguages = this.altLanguages.concat(cs.altLanguages);
this.data.recognizer.altLanguages = this.data.recognizer.altLanguages.concat(cs.altLanguages);
this.logger.debug({altLanguages: this.altLanguages},
'Gather:exec - applying altLanguages');
}
if (cs.hasGlobalSttPunctuation && !this.data.recognizer.punctuation) {
this.data.recognizer.punctuation = cs.globalSttPunctuation;
}
if (!this.isContinuousAsr && cs.isContinuousAsr) {
this.isContinuousAsr = true;
this.asrTimeout = cs.asrTimeout * 1000;
@@ -158,7 +144,10 @@ class TaskGather extends Task {
this.ep = ep;
if ('default' === this.vendor || !this.vendor) this.vendor = cs.speechRecognizerVendor;
if ('default' === this.language || !this.language) this.language = cs.speechRecognizerLanguage;
this.sttCredentials = cs.getSpeechCredentials(this.vendor, 'stt');
if (!this.data.recognizer.vendor) {
this.data.recognizer.vendor = this.vendor;
}
if (this.needsStt && !this.sttCredentials) this.sttCredentials = cs.getSpeechCredentials(this.vendor, 'stt');
if (this.needsStt && !this.sttCredentials) {
const {writeAlerts, AlertType} = cs.srf.locals;
this.logger.info(`TaskGather:exec - ERROR stt using ${this.vendor} requested but creds not supplied`);
@@ -171,16 +160,38 @@ class TaskGather extends Task {
throw new Error(`no speech-to-text service credentials for ${this.vendor} have been configured`);
}
this.logger.info({sttCredentials: this.sttCredentials}, 'Gather:exec - sttCredentials');
if (this.vendor === 'nuance' && this.sttCredentials.client_id) {
/* get nuance access token */
const {client_id, secret} = this.sttCredentials;
const {access_token, servedFromCache} = await getNuanceAccessToken(client_id, secret, 'asr tts');
this.logger.debug({client_id}, `Gather:exec - got nuance access token ${servedFromCache ? 'from cache' : ''}`);
this.sttCredentials = {...this.sttCredentials, access_token};
}
else if (this.vendor == 'ibm' && this.sttCredentials.stt_api_key) {
/* get ibm access token */
const {stt_api_key, stt_region} = this.sttCredentials;
const {access_token, servedFromCache} = await getIbmAccessToken(stt_api_key);
this.logger.debug({stt_api_key}, `Gather:exec - got ibm access token ${servedFromCache ? 'from cache' : ''}`);
this.sttCredentials = {...this.sttCredentials, access_token, stt_region};
}
const startListening = (cs, ep) => {
this._startTimer();
if (this.isContinuousAsr && 0 === this.timeout) this._startAsrTimer();
if (this.input.includes('speech') && !this.listenDuringPrompt) {
this.logger.debug('Gather:exec - calling _initSpeech');
this._initSpeech(cs, ep)
.then(() => {
if (this.killed) {
this.logger.info('Gather:exec - task was quickly killed so do not transcribe');
return;
}
this._startTranscribing(ep);
return updateSpeechCredentialLastUsed(this.sttCredentials.speech_credential_sid);
})
.catch(() => {});
.catch((err) => {
this.logger.error({err}, 'error in initSpeech');
});
}
};
@@ -194,7 +205,15 @@ class TaskGather extends Task {
span.end();
if (err) this.logger.error({err}, 'Gather:exec Error playing tts');
this.logger.debug('Gather: nested say task completed');
if (!this.killed) startListening(cs, ep);
if (!this.killed) {
startListening(cs, ep);
if (this.input.includes('speech') && this.vendor === 'nuance' && this.listenDuringPrompt) {
this.logger.debug('Gather:exec - starting transcription timers after say completes');
ep.startTranscriptionTimers((err) => {
if (err) this.logger.error({err}, 'Gather:exec - error starting transcription timers');
});
}
}
});
}
else if (this.playTask) {
@@ -206,10 +225,24 @@ class TaskGather extends Task {
span.end();
if (err) this.logger.error({err}, 'Gather:exec Error playing url');
this.logger.debug('Gather: nested play task completed');
if (!this.killed) startListening(cs, ep);
if (!this.killed) {
startListening(cs, ep);
if (this.input.includes('speech') && this.vendor === 'nuance' && this.listenDuringPrompt) {
this.logger.debug('Gather:exec - starting transcription timers after play completes');
ep.startTranscriptionTimers((err) => {
if (err) this.logger.error({err}, 'Gather:exec - error starting transcription timers');
});
}
}
});
}
else startListening(cs, ep);
else {
if (this.killed) {
this.logger.info('Gather:exec - task was immediately killed so do not transcribe');
return;
}
startListening(cs, ep);
}
if (this.input.includes('speech') && this.listenDuringPrompt) {
await this._initSpeech(cs, ep);
@@ -226,14 +259,7 @@ class TaskGather extends Task {
} catch (err) {
this.logger.error(err, 'TaskGather:exec error');
}
ep.removeCustomEventListener(GoogleTranscriptionEvents.Transcription);
ep.removeCustomEventListener(GoogleTranscriptionEvents.EndOfUtterance);
ep.removeCustomEventListener(GoogleTranscriptionEvents.VadDetected);
ep.removeCustomEventListener(AwsTranscriptionEvents.Transcription);
ep.removeCustomEventListener(AwsTranscriptionEvents.VadDetected);
ep.removeCustomEventListener(AzureTranscriptionEvents.Transcription);
ep.removeCustomEventListener(AzureTranscriptionEvents.NoSpeechDetected);
ep.removeCustomEventListener(AzureTranscriptionEvents.VadDetected);
this.removeSpeechListeners(ep);
}
kill(cs) {
@@ -246,8 +272,12 @@ class TaskGather extends Task {
this._resolve('killed');
}
updateTimeout(timeout) {
this.logger.info(`TaskGather:updateTimeout - updating timeout to ${timeout}`);
updateTaskInProgress(opts) {
if (!this.needsStt && opts.input.includes('speech')) {
this.logger.info('TaskGather:updateTaskInProgress - adding speech to a background gather');
return false; // this needs be handled by killing the background gather and starting a new one
}
const {timeout} = opts;
this.timeout = timeout;
this._startTimer();
}
@@ -288,97 +318,70 @@ class TaskGather extends Task {
}
async _initSpeech(cs, ep) {
const opts = {};
const opts = this.setChannelVarsForStt(this, this.sttCredentials, this.data.recognizer);
this.logger.debug(opts, 'TaskGather:_initSpeech - channel vars');
switch (this.vendor) {
case 'google':
this.bugname = 'google_transcribe';
ep.addCustomEventListener(GoogleTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
ep.addCustomEventListener(GoogleTranscriptionEvents.EndOfUtterance, this._onEndOfUtterance.bind(this, cs, ep));
ep.addCustomEventListener(GoogleTranscriptionEvents.VadDetected, this._onVadDetected.bind(this, cs, ep));
break;
if (this.vad?.enable) {
opts.START_RECOGNIZING_ON_VAD = 1;
if (this.vad.voiceMs) opts.RECOGNIZER_VAD_VOICE_MS = this.vad.voiceMs;
else opts.RECOGNIZER_VAD_VOICE_MS = 125;
if (this.vad.mode >= 0 && this.vad.mode <= 3) opts.RECOGNIZER_VAD_MODE = this.vad.mode;
}
case 'aws':
case 'polly':
this.bugname = 'aws_transcribe';
ep.addCustomEventListener(AwsTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
ep.addCustomEventListener(AwsTranscriptionEvents.VadDetected, this._onVadDetected.bind(this, cs, ep));
break;
case 'microsoft':
this.bugname = 'azure_transcribe';
ep.addCustomEventListener(AzureTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
ep.addCustomEventListener(AzureTranscriptionEvents.NoSpeechDetected,
this._onNoSpeechDetected.bind(this, cs, ep));
ep.addCustomEventListener(AzureTranscriptionEvents.VadDetected, this._onVadDetected.bind(this, cs, ep));
break;
case 'nuance':
this.bugname = 'nuance_transcribe';
ep.addCustomEventListener(NuanceTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep));
ep.addCustomEventListener(NuanceTranscriptionEvents.StartOfSpeech,
this._onStartOfSpeech.bind(this, cs, ep));
ep.addCustomEventListener(NuanceTranscriptionEvents.TranscriptionComplete,
this._onTranscriptionComplete.bind(this, cs, ep));
ep.addCustomEventListener(NuanceTranscriptionEvents.VadDetected,
this._onVadDetected.bind(this, cs, ep));
ep.addCustomEventListener(NuanceTranscriptionEvents.Error,
this._onNuanceError.bind(this, cs, ep));
if ('google' === this.vendor) {
this.bugname = 'google_trancribe';
if (this.sttCredentials) opts.GOOGLE_APPLICATION_CREDENTIALS = JSON.stringify(this.sttCredentials.credentials);
[
['enhancedModel', 'GOOGLE_SPEECH_USE_ENHANCED'],
['separateRecognitionPerChannel', 'GOOGLE_SPEECH_SEPARATE_RECOGNITION_PER_CHANNEL'],
['profanityFilter', 'GOOGLE_SPEECH_PROFANITY_FILTER'],
['punctuation', 'GOOGLE_SPEECH_ENABLE_AUTOMATIC_PUNCTUATION'],
['words', 'GOOGLE_SPEECH_ENABLE_WORD_TIME_OFFSETS'],
['singleUtterance', 'GOOGLE_SPEECH_SINGLE_UTTERANCE'],
['diarization', 'GOOGLE_SPEECH_PROFANITY_FILTER']
].forEach((arr) => {
if (this[arr[0]]) opts[arr[1]] = true;
});
if (this.hints.length > 0) {
opts.GOOGLE_SPEECH_HINTS = this.hints.join(',');
if (typeof this.hintsBoost === 'number') {
opts.GOOGLE_SPEECH_HINTS_BOOST = this.hintsBoost;
/* stall timers until prompt finishes playing */
if ((this.sayTask || this.playTask) && this.listenDuringPrompt) {
opts.NUANCE_STALL_TIMERS = 1;
}
}
if (this.altLanguages.length > 0) opts.GOOGLE_SPEECH_ALTERNATIVE_LANGUAGE_CODES = this.altLanguages.join(',');
else opts.GOOGLE_SPEECH_ALTERNATIVE_LANGUAGE_CODES = '';
if ('unspecified' !== this.interactionType) {
opts.GOOGLE_SPEECH_METADATA_INTERACTION_TYPE = this.interactionType;
}
opts.GOOGLE_SPEECH_MODEL = this.model;
if (this.diarization && this.diarizationMinSpeakers > 0) {
opts.GOOGLE_SPEECH_SPEAKER_DIARIZATION_MIN_SPEAKER_COUNT = this.diarizationMinSpeakers;
}
if (this.diarization && this.diarizationMaxSpeakers > 0) {
opts.GOOGLE_SPEECH_SPEAKER_DIARIZATION_MAX_SPEAKER_COUNT = this.diarizationMaxSpeakers;
}
if (this.naicsCode > 0) opts.GOOGLE_SPEECH_METADATA_INDUSTRY_NAICS_CODE = this.naicsCode;
ep.addCustomEventListener(GoogleTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
ep.addCustomEventListener(GoogleTranscriptionEvents.EndOfUtterance, this._onEndOfUtterance.bind(this, cs, ep));
ep.addCustomEventListener(GoogleTranscriptionEvents.VadDetected, this._onVadDetected.bind(this, cs, ep));
}
else if (['aws', 'polly'].includes(this.vendor)) {
this.bugname = 'aws_trancribe';
if (this.vocabularyName) opts.AWS_VOCABULARY_NAME = this.vocabularyName;
if (this.vocabularyFilterName) {
opts.AWS_VOCABULARY_NAME = this.vocabularyFilterName;
opts.AWS_VOCABULARY_FILTER_METHOD = this.filterMethod || 'mask';
}
if (this.sttCredentials) {
Object.assign(opts, {
AWS_ACCESS_KEY_ID: this.sttCredentials.accessKeyId,
AWS_SECRET_ACCESS_KEY: this.sttCredentials.secretAccessKey,
AWS_REGION: this.sttCredentials.region
});
}
ep.addCustomEventListener(AwsTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
ep.addCustomEventListener(AwsTranscriptionEvents.VadDetected, this._onVadDetected.bind(this, cs, ep));
}
else if ('microsoft' === this.vendor) {
this.bugname = 'azure_trancribe';
if (this.sttCredentials) {
Object.assign(opts, {
'AZURE_SUBSCRIPTION_KEY': this.sttCredentials.api_key,
'AZURE_REGION': this.sttCredentials.region
});
}
if (this.hints && this.hints.length > 0) {
opts.AZURE_SPEECH_HINTS = this.hints.map((h) => h.trim()).join(',');
}
if (this.altLanguages && this.altLanguages.length > 0) {
opts.AZURE_SPEECH_ALTERNATIVE_LANGUAGE_CODES = this.altLanguages.join(',');
}
else {
opts.AZURE_SPEECH_ALTERNATIVE_LANGUAGE_CODES = '';
}
if (this.requestSnr) opts.AZURE_REQUEST_SNR = 1;
if (this.profanityOption && this.profanityOption !== 'raw') opts.AZURE_PROFANITY_OPTION = this.profanityOption;
if (this.azureServiceEndpoint) opts.AZURE_SERVICE_ENDPOINT = this.azureServiceEndpoint;
if (this.initialSpeechTimeoutMs > 0) opts.AZURE_INITIAL_SPEECH_TIMEOUT_MS = this.initialSpeechTimeoutMs;
else if (this.timeout === 0) opts.AZURE_INITIAL_SPEECH_TIMEOUT_MS = 120000; // lengthy
opts.AZURE_USE_OUTPUT_FORMAT_DETAILED = 1;
break;
ep.addCustomEventListener(AzureTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
ep.addCustomEventListener(AzureTranscriptionEvents.NoSpeechDetected, this._onNoSpeechDetected.bind(this, cs, ep));
ep.addCustomEventListener(AzureTranscriptionEvents.VadDetected, this._onVadDetected.bind(this, cs, ep));
case 'deepgram':
this.bugname = 'deepgram_transcribe';
ep.addCustomEventListener(DeepgramTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
ep.addCustomEventListener(DeepgramTranscriptionEvents.Connect, this._onDeepgramConnect.bind(this, cs, ep));
ep.addCustomEventListener(DeepgramTranscriptionEvents.ConnectFailure,
this._onDeepGramConnectFailure.bind(this, cs, ep));
break;
case 'ibm':
this.bugname = 'ibm_transcribe';
ep.addCustomEventListener(IbmTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
ep.addCustomEventListener(IbmTranscriptionEvents.Connect, this._onIbmConnect.bind(this, cs, ep));
ep.addCustomEventListener(IbmTranscriptionEvents.ConnectFailure,
this._onIbmConnectFailure.bind(this, cs, ep));
ep.addCustomEventListener(IbmTranscriptionEvents.Error,
this._onIbmError.bind(this, cs, ep));
break;
default:
throw new Error(`Invalid vendor ${this.vendor}`);
}
await ep.set(opts)
.catch((err) => this.logger.info(err, 'Error setting channel variables'));
}
@@ -478,56 +481,29 @@ class TaskGather extends Task {
// make sure this is not a transcript from answering machine detection
const bugname = fsEvent.getHeader('media-bugname');
const finished = fsEvent.getHeader('transcription-session-finished');
this.logger.debug({evt, bugname, finished}, 'Gather:_onTranscription');
if (bugname && this.bugname !== bugname) return;
if ('aws' === this.vendor && Array.isArray(evt) && evt.length > 0) evt = evt[0];
if ('microsoft' === this.vendor) {
const final = evt.RecognitionStatus === 'Success';
if (final) {
// don't sort based on confidence: https://github.com/Azure-Samples/cognitive-services-speech-sdk/issues/1463
//const nbest = evt.NBest.sort((a, b) => b.Confidence - a.Confidence);
const nbest = evt.NBest;
const language_code = evt.PrimaryLanguage?.Language || this.language;
evt = {
is_final: true,
language_code,
alternatives: [
{
confidence: nbest[0].Confidence,
transcript: nbest[0].Display
}
]
};
}
else {
evt = {
is_final: false,
alternatives: [
{
transcript: evt.Text
}
]
};
}
}
evt = this.normalizeTranscription(evt, this.vendor, 1, this.language);
/* count words for bargein feature */
const words = evt.alternatives[0].transcript.split(' ').length;
const words = evt.alternatives[0]?.transcript.split(' ').length;
const bufferedWords = this._bufferedTranscripts.reduce((count, e) => {
return count + e.alternatives[0].transcript.split(' ').length;
return count + e.alternatives[0]?.transcript.split(' ').length;
}, 0);
if (evt.is_final) {
if (evt.alternatives[0].transcript === '' && !this.callSession.callGone && !this.killed) {
if ('microsoft' === this.vendor && finished === 'true') {
if (finished === 'true' && ['microsoft', 'deepgram'].includes(this.vendor)) {
this.logger.debug({evt}, 'TaskGather:_onTranscription - got empty transcript from old gather, disregarding');
}
else {
this.logger.info({evt}, 'TaskGather:_onTranscription - got empty transcript, listen again');
this._startTranscribing(ep);
this.logger.info({evt}, 'TaskGather:_onTranscription - got empty transcript, continue listening');
//this._startTranscribing(ep);
}
return;
}
if (this.isContinuousAsr) {
/* append the transcript and start listening again for asrTimeout */
const t = evt.alternatives[0].transcript;
@@ -594,6 +570,62 @@ class TaskGather extends Task {
}
}
_onStartOfSpeech(cs, ep) {
this.logger.debug('TaskGather:_onStartOfSpeech');
}
_onTranscriptionComplete(cs, ep) {
this.logger.debug('TaskGather:_onTranscriptionComplete');
}
_onNuanceError(cs, ep, evt) {
const {code, error, details} = evt;
if (code === 404 && error === 'No speech') {
this.logger.debug({code, error, details}, 'TaskGather:_onNuanceError');
return this._resolve('timeout');
}
this.logger.info({code, error, details}, 'TaskGather:_onNuanceError');
if (code === 413 && error === 'Too much speech') {
return this._resolve('timeout');
}
}
_onDeepgramConnect(_cs, _ep) {
this.logger.debug('TaskGather:_onDeepgramConnect');
}
_onDeepGramConnectFailure(cs, _ep, evt) {
const {reason} = evt;
const {writeAlerts, AlertType} = cs.srf.locals;
this.logger.info({evt}, 'TaskGather:_onDeepgramConnectFailure');
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
message: `Failed connecting to Deepgram speech recognizer: ${reason}`,
vendor: 'deepgram',
}).catch((err) => this.logger.info({err}, 'Error generating alert for deepgram connection failure'));
this.notifyError(`Failed connecting to speech vendor deepgram: ${reason}`);
this.notifyTaskDone();
}
_onIbmConnect(_cs, _ep) {
this.logger.debug('TaskGather:_onIbmConnect');
}
_onIbmConnectFailure(cs, _ep, evt) {
const {reason} = evt;
const {writeAlerts, AlertType} = cs.srf.locals;
this.logger.info({evt}, 'TaskGather:_onIbmConnectFailure');
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
message: `Failed connecting to IBM watson speech recognizer: ${reason}`,
vendor: 'ibm',
}).catch((err) => this.logger.info({err}, 'Error generating alert for IBM connection failure'));
this.notifyError(`Failed connecting to speech vendor IBM: ${reason}`);
this.notifyTaskDone();
}
_onIbmError(cs, _ep, evt) {
this.logger.info({evt}, 'TaskGather:_onIbmError'); }
_onVadDetected(cs, ep) {
if (this.bargein && this.minBargeinWordCount === 0) {
this.logger.debug('TaskGather:_onVadDetected');
@@ -637,7 +669,7 @@ class TaskGather extends Task {
}
this.span.setAttributes({'stt.resolve': reason, 'stt.result': JSON.stringify(evt)});
if (this.ep && this.ep.connected) {
if (this.needsStt && this.ep && this.ep.connected) {
this.ep.stopTranscription({vendor: this.vendor})
.catch((err) => this.logger.error({err}, 'Error stopping transcription'));
}

View File

@@ -1,7 +1,7 @@
const Task = require('./task');
const {TaskName, TaskPreconditions} = require('../utils/constants');
const bent = require('bent');
const { v4: uuidv4 } = require('uuid');
const uuidv4 = require('uuid-random');
class TaskMessage extends Task {
constructor(logger, opts) {

View File

@@ -7,6 +7,8 @@ class TaskPlay extends Task {
this.preconditions = TaskPreconditions.Endpoint;
this.url = this.data.url;
this.seekOffset = this.data.seekOffset || -1;
this.timeoutSecs = this.data.timeoutSecs || -1;
this.loop = this.data.loop || 1;
this.earlyMedia = this.data.earlyMedia === true;
}
@@ -31,8 +33,13 @@ class TaskPlay extends Task {
} else {
await this.playToConfMember(this.ep, memberId, confName, confUuid, this.url);
}
} else {
const file = (this.timeoutSecs >= 0 || this.seekOffset >= 0) ?
{file: this.url, seekOffset: this.seekOffset, timeoutSecs: this.timeoutSecs} : this.url;
const result = await ep.play(file);
await this.performAction(Object.assign(result, {reason: 'playCompleted'}),
!(this.parentTask || cs.isConfirmCallSession));
}
else await ep.play(this.url);
}
} catch (err) {
this.logger.info(err, `TaskPlay:exec - error playing ${this.url}`);

View File

@@ -11,6 +11,7 @@ class TaskRestDial extends Task {
super(logger, opts);
this.from = this.data.from;
this.fromHost = this.data.fromHost;
this.to = this.data.to;
this.call_hook = this.data.call_hook;
this.timeout = this.data.timeout || 60;

View File

@@ -1,15 +1,111 @@
const Task = require('./task');
const {TaskName, TaskPreconditions} = require('../utils/constants');
const breakLengthyTextIfNeeded = (logger, text) => {
const chunkSize = 1000;
if (text.length <= chunkSize) return [text];
const result = [];
const isSSML = text.startsWith('<speak>');
let startPos = 0;
let charPos = isSSML ? 7 : 0; // skip <speak>
let tag;
//logger.debug({isSSML}, `breakLengthyTextIfNeeded: handling text of length ${text.length}`);
while (startPos + charPos < text.length) {
if (isSSML && !tag && text[startPos + charPos] === '<') {
const tagStartPos = ++charPos;
while (startPos + charPos < text.length) {
if (text[startPos + charPos] === '>') {
if (text[startPos + charPos - 1] === '\\') tag = null;
else if (!tag) tag = text.substring(startPos + tagStartPos, startPos + charPos - 1);
break;
}
if (!tag) {
const c = text[startPos + charPos];
if (c === ' ') {
tag = text.substring(startPos + tagStartPos, startPos + charPos);
//logger.debug(`breakLengthyTextIfNeeded: enter tag ${tag} (space)`);
break;
}
}
charPos++;
}
if (tag) {
//search for end of tag
//logger.debug(`breakLengthyTextIfNeeded: searching forward for </${tag}>`);
const e1 = text.indexOf(`</${tag}>`, startPos + charPos);
const e2 = text.indexOf('/>', startPos + charPos);
const tagEndPos = e1 === -1 ? e2 : e2 === -1 ? e1 : Math.min(e1, e2);
if (tagEndPos === -1) {
//logger.debug(`breakLengthyTextIfNeeded: exit tag ${tag} not found, exiting`);
} else {
//logger.debug(`breakLengthyTextIfNeeded: exit tag ${tag} found at ${tagEndPos}`);
charPos = tagEndPos + 1;
}
tag = null;
}
continue;
}
if (charPos < chunkSize) {
charPos++;
continue;
}
// start looking for a good break point
let chunkIt = false;
const a = text[startPos + charPos];
const b = text[startPos + charPos + 1];
if (/[\.!\?]/.test(a) && /\s/.test(b)) {
//logger.debug('breakLengthyTextIfNeeded: breaking at sentence end');
chunkIt = true;
}
if (chunkIt) {
charPos++;
const chunk = text.substr(startPos, charPos);
if (isSSML) {
result.push(0 === startPos ? `${chunk}</speak>` : `<speak>${chunk}</speak>`);
}
else result.push(chunk);
charPos = 0;
startPos += chunk.length;
//logger.debug({chunk: result[result.length - 1]},
// `breakLengthyTextIfNeeded: chunked; new starting pos ${startPos}`);
}
else charPos++;
}
// final chunk
if (startPos < text.length) {
const chunk = text.substr(startPos);
if (isSSML) {
result.push(0 === startPos ? `${chunk}</speak>` : `<speak>${chunk}`);
}
else result.push(chunk);
//logger.debug({chunk: result[result.length - 1]},
// `breakLengthyTextIfNeeded: final chunk; starting pos ${startPos} length ${chunk.length}`);
}
return result;
};
class TaskSay extends Task {
constructor(logger, opts, parentTask) {
super(logger, opts);
this.preconditions = TaskPreconditions.Endpoint;
this.text = Array.isArray(this.data.text) ? this.data.text : [this.data.text];
this.text = (Array.isArray(this.data.text) ? this.data.text : [this.data.text])
.map((t) => breakLengthyTextIfNeeded(this.logger, t))
.flat();
this.loop = this.data.loop || 1;
this.earlyMedia = this.data.earlyMedia === true || (parentTask && parentTask.earlyMedia);
this.synthesizer = this.data.synthesizer || {};
this.disableTtsCache = this.data.disableTtsCache;
}
get name() { return TaskName.Say; }
@@ -35,14 +131,24 @@ class TaskSay extends Task {
const language = this.synthesizer.language && this.synthesizer.language !== 'default' ?
this.synthesizer.language :
cs.speechSynthesisLanguage ;
const voice = this.synthesizer.voice && this.synthesizer.voice !== 'default' ?
let voice = this.synthesizer.voice && this.synthesizer.voice !== 'default' ?
this.synthesizer.voice :
cs.speechSynthesisVoice;
const engine = this.synthesizer.engine || 'standard';
const salt = cs.callSid;
const credentials = cs.getSpeechCredentials(vendor, 'tts');
this.logger.info({vendor, language, voice}, 'TaskSay:exec');
/* parse Nuance voices into name and model */
let model;
if (vendor === 'nuance' && voice) {
const arr = /([A-Za-z-]*)\s+-\s+(enhanced|standard)/.exec(voice);
if (arr) {
voice = arr[1];
model = arr[2];
}
}
this.logger.info({vendor, language, voice, model}, 'TaskSay:exec');
this.ep = ep;
try {
if (!credentials) {
@@ -51,7 +157,10 @@ class TaskSay extends Task {
alert_type: AlertType.TTS_NOT_PROVISIONED,
vendor
}).catch((err) => this.logger.info({err}, 'Error generating alert for no tts'));
this.notifyError(`No speech credentials have been provisioned for ${vendor}`);
this.notifyError({
msg: 'TTS error',
details:`No speech credentials provisioned for selected vendor ${vendor}`
});
throw new Error('no provisioned speech credentials for TTS');
}
// synthesize all of the text elements
@@ -69,14 +178,16 @@ class TaskSay extends Task {
'tts.voice': voice
});
try {
const {filePath, servedFromCache} = await synthAudio(stats, {
const {filePath, servedFromCache, rtt} = await synthAudio(stats, {
text,
vendor,
language,
voice,
engine,
model,
salt,
credentials
credentials,
disableTtsCache : this.disableTtsCache
});
this.logger.debug(`file ${filePath}, served from cache ${servedFromCache}`);
if (filePath) cs.trackTmpFile(filePath);
@@ -87,6 +198,15 @@ class TaskSay extends Task {
}
span.setAttributes({'tts.cached': servedFromCache});
span.end();
if (!servedFromCache && rtt) {
this.notifyStatus({
event: 'synthesized-audio',
vendor,
language,
characters: text.length,
elapsedTime: rtt
});
}
return filePath;
} catch (err) {
this.logger.info({err}, 'Error synthesizing tts');
@@ -97,7 +217,7 @@ class TaskSay extends Task {
vendor,
detail: err.message
}).catch((err) => this.logger.info({err}, 'Error generating alert for tts failure'));
this.notifyError(err.message || err);
this.notifyError({msg: 'TTS error', details: err.message || err});
return;
}
};
@@ -105,6 +225,7 @@ class TaskSay extends Task {
const arr = this.text.map((t) => generateAudio(t));
const filepath = (await Promise.all(arr)).filter((fp) => fp && fp.length);
this.logger.debug({filepath}, 'synthesized files for tts');
this.notifyStatus({event: 'start-playback'});
while (!this.killed && (this.loop === 'forever' || this.loop--) && this.ep?.connected) {
let segment = 0;
@@ -136,6 +257,7 @@ class TaskSay extends Task {
this.killPlayToConfMember(this.ep, memberId, confName);
}
else {
this.notifyStatus({event: 'kill-playback'});
this.ep.api('uuid_break', this.ep.uuid);
}
}

View File

@@ -36,6 +36,7 @@ class TaskSipRefer extends Task {
method: 'REFER',
headers: {
...this.headers,
...(this.referToIsUri && {'X-Refer-To-Leave-Untouched': true}),
'Refer-To': referTo,
'Referred-By': referredBy
}
@@ -100,6 +101,7 @@ class TaskSipRefer extends Task {
/* they may have only provided a phone number/user */
referTo = `sip:${referTo}@${host}`;
}
else this.referToIsUri = true;
if (!referredBy) {
/* default */
referredBy = cs.req?.callingNumber || dlg.local.uri;

View File

@@ -1,6 +1,7 @@
{
"sip:decline": {
"properties": {
"id": "string",
"status": "number",
"reason": "string",
"headers": "object"
@@ -11,6 +12,7 @@
},
"sip:request": {
"properties": {
"id": "string",
"method": "string",
"body": "string",
"headers": "object",
@@ -22,6 +24,7 @@
},
"sip:refer": {
"properties": {
"id": "string",
"referTo": "string",
"referredBy": "string",
"headers": "object",
@@ -34,11 +37,13 @@
},
"config": {
"properties": {
"id": "string",
"synthesizer": "#synthesizer",
"recognizer": "#recognizer",
"bargeIn": "#bargeIn",
"record": "#recordOptions",
"amd": "#amd"
"amd": "#amd",
"notifyEvents": "boolean"
},
"required": []
},
@@ -62,6 +67,7 @@
},
"dequeue": {
"properties": {
"id": "string",
"name": "string",
"actionHook": "object|string",
"timeout": "number",
@@ -73,6 +79,7 @@
},
"enqueue": {
"properties": {
"id": "string",
"name": "string",
"actionHook": "object|string",
"waitHook": "object|string",
@@ -84,11 +91,12 @@
},
"leave": {
"properties": {
"id": "string"
}
},
"hangup": {
"properties": {
"id": "string",
"headers": "object"
},
"required": [
@@ -96,9 +104,13 @@
},
"play": {
"properties": {
"id": "string",
"url": "string|array",
"loop": "number|string",
"earlyMedia": "boolean"
"earlyMedia": "boolean",
"seekOffset": "number|string",
"timeoutSecs": "number|string",
"actionHook": "object|string"
},
"required": [
"url"
@@ -106,10 +118,12 @@
},
"say": {
"properties": {
"id": "string",
"text": "string|array",
"loop": "number|string",
"synthesizer": "#synthesizer",
"earlyMedia": "boolean"
"earlyMedia": "boolean",
"disableTtsCache": "boolean"
},
"required": [
"text"
@@ -117,6 +131,7 @@
},
"gather": {
"properties": {
"id": "string",
"actionHook": "object|string",
"finishOnKey": "string",
"input": "array",
@@ -140,6 +155,7 @@
},
"conference": {
"properties": {
"id": "string",
"name": "string",
"beep": "boolean",
"startConferenceOnEnter": "boolean",
@@ -159,6 +175,7 @@
},
"dial": {
"properties": {
"id": "string",
"actionHook": "object|string",
"answerOnBridge": "boolean",
"callerId": "string",
@@ -182,6 +199,7 @@
},
"dialogflow": {
"properties": {
"id": "string",
"credentials": "object|string",
"project": "string",
"environment": "string",
@@ -210,6 +228,7 @@
},
"dtmf": {
"properties": {
"id": "string",
"dtmf": "string",
"duration": "number"
},
@@ -219,6 +238,7 @@
},
"lex": {
"properties": {
"id": "string",
"botId": "string",
"botAlias": "string",
"credentials": "object",
@@ -243,6 +263,7 @@
},
"listen": {
"properties": {
"id": "string",
"actionHook": "object|string",
"auth": "#auth",
"finishOnKey": "string",
@@ -267,6 +288,7 @@
},
"message": {
"properties": {
"id": "string",
"carrier": "string",
"account_sid": "string",
"message_sid": "string",
@@ -283,6 +305,7 @@
},
"pause": {
"properties": {
"id": "string",
"length": "number"
},
"required": [
@@ -291,6 +314,7 @@
},
"rasa": {
"properties": {
"id": "string",
"url": "string",
"recognizer": "#recognizer",
"tts": "#synthesizer",
@@ -325,6 +349,7 @@
},
"redirect": {
"properties": {
"id": "string",
"actionHook": "object|string"
},
"required": [
@@ -333,11 +358,13 @@
},
"rest:dial": {
"properties": {
"id": "string",
"account_sid": "string",
"application_sid": "string",
"call_hook": "object|string",
"call_status_hook": "object|string",
"from": "string",
"fromHost": "string",
"speech_synthesis_vendor": "string",
"speech_synthesis_voice": "string",
"speech_synthesis_language": "string",
@@ -356,6 +383,7 @@
},
"tag": {
"properties": {
"id": "string",
"data": "object"
},
"required": [
@@ -364,6 +392,7 @@
},
"transcribe": {
"properties": {
"id": "string",
"transcriptionHook": "string",
"recognizer": "#recognizer",
"earlyMedia": "boolean"
@@ -384,6 +413,7 @@
"enum": ["GET", "POST"]
},
"headers": "object",
"from": "#dialFrom",
"name": "string",
"number": "string",
"sipUri": "string",
@@ -397,6 +427,14 @@
"type"
]
},
"dialFrom": {
"properties": {
"user": "string",
"host": "string"
},
"required": [
]
},
"auth": {
"properties": {
"username": "string",
@@ -411,7 +449,7 @@
"properties": {
"vendor": {
"type": "string",
"enum": ["google", "aws", "polly", "microsoft", "default"]
"enum": ["google", "aws", "polly", "microsoft", "nuance", "ibm", "default"]
},
"language": "string",
"voice": "string",
@@ -432,7 +470,7 @@
"properties": {
"vendor": {
"type": "string",
"enum": ["google", "aws", "microsoft", "default"]
"enum": ["google", "aws", "microsoft", "nuance", "deepgram", "ibm", "default"]
},
"language": "string",
"vad": "#vad",
@@ -494,13 +532,192 @@
"requestSnr": "boolean",
"initialSpeechTimeoutMs": "number",
"azureServiceEndpoint": "string",
"azureSttEndpointId": "string",
"asrDtmfTerminationDigit": "string",
"asrTimeout": "number"
"asrTimeout": "number",
"nuanceOptions": "#nuanceOptions",
"deepgramOptions": "#deepgramOptions",
"ibmOptions": "#ibmOptions"
},
"required": [
"vendor"
]
},
"ibmOptions": {
"properties": {
"sttApiKey": "string",
"sttRegion": "string",
"ttsApiKey": "string",
"ttsRegion": "string",
"instanceId": "string",
"model": "string",
"languageCustomizationId": "string",
"acousticCustomizationId": "string",
"baseModelVersion": "string",
"watsonMetadata": "string",
"watsonLearningOptOut": "boolean"
},
"required": [
]
},
"deepgramOptions": {
"properties": {
"apiKey": "string",
"tier": {
"type": "string",
"enum": [
"enhanced",
"base"
]
},
"model": {
"type": "string",
"enum": [
"general",
"meeting",
"phonecall",
"voicemail",
"finance",
"conversationalai",
"video",
"custom"
]
},
"customModel": "string",
"version": "string",
"punctuate": "boolean",
"profanityFilter": "boolean",
"redact": {
"type": "string",
"enum": [
"pci",
"numbers",
"true",
"ssn"
]
},
"diarize": "boolean",
"diarizeVersion": "string",
"ner": "boolean",
"multichannel": "boolean",
"alternatives": "number",
"numerals": "boolean",
"search": "array",
"replace": "array",
"keywords": "array",
"endpointing": "boolean",
"vadTurnoff": "number",
"tag": "string"
}
},
"nuanceOptions": {
"properties": {
"clientId": "string",
"secret": "string",
"kryptonEndpoint": "string",
"topic": "string",
"utteranceDetectionMode": {
"type": "string",
"enum": [
"single",
"multiple",
"disabled"
]
},
"punctuation": "boolean",
"profanityFilter": "boolean",
"includeTokenization": "boolean",
"discardSpeakerAdaptation": "boolean",
"suppressCallRecording": "boolean",
"maskLoadFailures": "boolean",
"suppressInitialCapitalization": "boolean",
"allowZeroBaseLmWeight": "boolean",
"filterWakeupWord": "boolean",
"resultType": {
"type": "string",
"enum": [
"final",
"partial",
"immutable_partial"
]
},
"noInputTimeoutMs": "number",
"recognitionTimeoutMs": "number",
"utteranceEndSilenceMs": "number",
"maxHypotheses": "number",
"speechDomain": "string",
"formatting": "#formatting",
"clientData": "object",
"userId": "string",
"speechDetectionSensitivity": "number",
"resources": ["#resource"]
},
"required": [
]
},
"resource": {
"properties": {
"externalReference": "#resourceReference",
"inlineWordset": "string",
"builtin": "string",
"inlineGrammar": "string",
"wakeupWord": "[string]",
"weightName": {
"type": "string",
"enum": [
"defaultWeight",
"lowest",
"low",
"medium",
"high",
"highest"
]
},
"weightValue": "number",
"reuse": {
"type": "string",
"enum": [
"undefined_reuse",
"low_reuse",
"high_reuse"
]
}
},
"required": [
]
},
"resourceReference": {
"properties": {
"type": {
"type": "string",
"enum": [
"undefined_resource_type",
"wordset",
"compiled_wordset",
"domain_lm",
"speaker_profile",
"grammar",
"settings"
]
},
"uri": "string",
"maxLoadFailures": "boolean",
"requestTimeoutMs": "number",
"headers": "object"
},
"required": [
]
},
"formatting": {
"properties": {
"scheme": "string",
"options": "object"
},
"required": [
"scheme",
"options"
]
},
"lexIntent": {
"properties": {
"name": "string",

View File

@@ -1,9 +1,11 @@
const Emitter = require('events');
const { v4: uuidv4 } = require('uuid');
const uuidv4 = require('uuid-random');
const debug = require('debug')('jambonz:feature-server');
const assert = require('assert');
const {TaskPreconditions} = require('../utils/constants');
const normalizeJambones = require('../utils/normalize-jambones');
const WsRequestor = require('../utils/ws-requestor');
const {TaskName} = require('../utils/constants');
const {trace} = require('@opentelemetry/api');
const specs = new Map();
const _specData = require('./specs');
@@ -21,6 +23,7 @@ class Task extends Emitter {
this.logger = logger;
this.data = data;
this.actionHook = this.data.actionHook;
this.id = data.id;
this._killInProgress = false;
this._completionPromise = new Promise((resolve) => this._completionResolver = resolve);
@@ -137,21 +140,32 @@ class Task extends Emitter {
return this.callSession.normalizeUrl(url, method, auth);
}
notifyError(errMsg) {
const params = {error: errMsg, verb: this.name};
this.cs.requestor.request('jambonz:error', '/error', params)
.catch((err) => this.logger.info({err}, 'Task:notifyError error sending error'));
notifyError(obj) {
if (this.cs.requestor instanceof WsRequestor) {
const params = {...obj, verb: this.name, id: this.id};
this.cs.requestor.request('jambonz:error', '/error', params)
.catch((err) => this.logger.info({err}, 'Task:notifyError error sending error'));
}
}
notifyStatus(obj) {
if (this.cs.notifyEvents && this.cs.requestor instanceof WsRequestor) {
const params = {...obj, verb: this.name, id: this.id};
this.cs.requestor.request('verb:status', '/status', params)
.catch((err) => this.logger.info({err}, 'Task:notifyStatus error sending error'));
}
}
async performAction(results, expectResponse = true) {
if (this.actionHook) {
const type = this.name === TaskName.Redirect ? 'session:redirect' : 'verb:hook';
const params = results ? Object.assign(this.cs.callInfo.toJSON(), results) : this.cs.callInfo.toJSON();
const span = this.startSpan('verb:hook', {'hook.url': this.actionHook});
const span = this.startSpan(type, {'hook.url': this.actionHook});
const b3 = this.getTracingPropagation('b3', span);
const httpHeaders = b3 && {b3};
span.setAttributes({'http.body': JSON.stringify(params)});
try {
const json = await this.cs.requestor.request('verb:hook', this.actionHook, params, httpHeaders);
const json = await this.cs.requestor.request(type, this.actionHook, params, httpHeaders);
span.setAttributes({'http.statusCode': 200});
span.end();
if (expectResponse && json && Array.isArray(json)) {
@@ -336,6 +350,9 @@ class Task extends Emitter {
}
required = required.filter((item) => item !== dKey);
}
else if (dKey === '_') {
/* no op: allow arbitrary info to be carried here, used by conference e.g in transfer */
}
else throw new Error(`${name}: unknown property ${dKey}`);
}
if (required.length > 0) throw new Error(`${name}: missing value for ${required}`);

View File

@@ -4,8 +4,12 @@ const {
TaskPreconditions,
GoogleTranscriptionEvents,
AzureTranscriptionEvents,
AwsTranscriptionEvents
AwsTranscriptionEvents,
NuanceTranscriptionEvents,
DeepgramTranscriptionEvents,
IbmTranscriptionEvents
} = require('../utils/constants');
const normalizeJambones = require('../utils/normalize-jambones');
class TaskTranscribe extends Task {
constructor(logger, opts, parentTask) {
@@ -13,6 +17,16 @@ class TaskTranscribe extends Task {
this.preconditions = TaskPreconditions.Endpoint;
this.parentTask = parentTask;
const {
setChannelVarsForStt,
normalizeTranscription,
removeSpeechListeners,
setSpeechCredentialsAtRuntime
} = require('../utils/transcription-utils')(logger);
this.setChannelVarsForStt = setChannelVarsForStt;
this.normalizeTranscription = normalizeTranscription;
this.removeSpeechListeners = removeSpeechListeners;
this.transcriptionHook = this.data.transcriptionHook;
this.earlyMedia = this.data.earlyMedia === true || (parentTask && parentTask.earlyMedia);
@@ -22,38 +36,11 @@ class TaskTranscribe extends Task {
this.interim = !!recognizer.interim;
this.separateRecognitionPerChannel = recognizer.separateRecognitionPerChannel;
/* vad: if provided, we dont connect to recognizer until voice activity is detected */
const {enable, voiceMs = 0, mode = -1} = recognizer.vad || {};
this.vad = {enable, voiceMs, mode};
/* let credentials be supplied in the recognizer object at runtime */
this.sttCredentials = setSpeechCredentialsAtRuntime(recognizer);
/* google-specific options */
this.hints = recognizer.hints || [];
this.hintsBoost = recognizer.hintsBoost;
this.profanityFilter = recognizer.profanityFilter;
this.punctuation = !!recognizer.punctuation;
this.enhancedModel = !!recognizer.enhancedModel;
this.model = recognizer.model || 'phone_call';
this.words = !!recognizer.words;
this.singleUtterance = recognizer.singleUtterance || false;
this.diarization = !!recognizer.diarization;
this.diarizationMinSpeakers = recognizer.diarizationMinSpeakers || 0;
this.diarizationMaxSpeakers = recognizer.diarizationMaxSpeakers || 0;
this.interactionType = recognizer.interactionType || 'unspecified';
this.naicsCode = recognizer.naicsCode || 0;
this.altLanguages = recognizer.altLanguages || [];
/* aws-specific options */
this.identifyChannels = !!recognizer.identifyChannels;
this.vocabularyName = recognizer.vocabularyName;
this.vocabularyFilterName = recognizer.vocabularyFilterName;
this.filterMethod = recognizer.filterMethod;
/* microsoft options */
this.outputFormat = recognizer.outputFormat || 'simple';
this.profanityOption = recognizer.profanityOption || 'raw';
this.requestSnr = recognizer.requestSnr || false;
this.initialSpeechTimeoutMs = recognizer.initialSpeechTimeoutMs || 0;
this.azureServiceEndpoint = recognizer.azureServiceEndpoint;
recognizer.hints = recognizer.hints || [];
recognizer.altLanguages = recognizer.altLanguages || [];
}
get name() { return TaskName.Transcribe; }
@@ -61,25 +48,32 @@ class TaskTranscribe extends Task {
async exec(cs, {ep, ep2}) {
super.exec(cs);
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, cs.srf);
const {getNuanceAccessToken, getIbmAccessToken} = cs.srf.locals.dbHelpers;
if (cs.hasGlobalSttHints) {
const {hints, hintsBoost} = cs.globalSttHints;
this.hints = this.hints.concat(hints);
if (!this.hintsBoost && hintsBoost) this.hintsBoost = hintsBoost;
this.logger.debug({hints: this.hints, hintsBoost: this.hintsBoost},
'Transcribe:exec - applying global `sttHints');
this.data.recognizer.hints = this.data.recognizer.hints.concat(hints);
if (!this.data.recognizer.hintsBoost && hintsBoost) this.data.recognizer.hintsBoost = hintsBoost;
this.logger.debug({hints: this.data.recognizer.hints, hintsBoost: this.data.recognizer.hintsBoost},
'Transcribe:exec - applying global sttHints');
}
if (cs.hasAltLanguages) {
this.altLanguages = this.altLanguages.concat(cs.altLanguages);
this.data.recognizer.altLanguages = this.data.recognizer.altLanguages.concat(cs.altLanguages);
this.logger.debug({altLanguages: this.altLanguages},
'Gather:exec - applying altLanguages');
'Transcribe:exec - applying altLanguages');
}
if (cs.hasGlobalSttPunctuation && !this.data.recognizer.punctuation) {
this.data.recognizer.punctuation = cs.globalSttPunctuation;
}
this.ep = ep;
this.ep2 = ep2;
if ('default' === this.vendor || !this.vendor) this.vendor = cs.speechRecognizerVendor;
if ('default' === this.language || !this.language) this.language = cs.speechRecognizerLanguage;
this.sttCredentials = cs.getSpeechCredentials(this.vendor, 'stt');
if (!this.data.recognizer.vendor) {
this.data.recognizer.vendor = this.vendor;
}
if (!this.sttCredentials) this.sttCredentials = cs.getSpeechCredentials(this.vendor, 'stt');
try {
if (!this.sttCredentials) {
@@ -92,6 +86,22 @@ class TaskTranscribe extends Task {
}).catch((err) => this.logger.info({err}, 'Error generating alert for no stt'));
throw new Error('no provisioned speech credentials for TTS');
}
if (this.vendor === 'nuance' && this.sttCredentials.client_id) {
/* get nuance access token */
const {client_id, secret} = this.sttCredentials;
const {access_token, servedFromCache} = await getNuanceAccessToken(client_id, secret, 'asr tts');
this.logger.debug({client_id},
`Transcribe:exec - got nuance access token ${servedFromCache ? 'from cache' : ''}`);
this.sttCredentials = {...this.sttCredentials, access_token};
}
else if (this.vendor == 'ibm' && this.sttCredentials.stt_api_key) {
/* get ibm access token */
const {stt_api_key, stt_region} = this.sttCredentials;
const {access_token, servedFromCache} = await getIbmAccessToken(stt_api_key);
this.logger.debug({stt_api_key}, `Gather:exec - got ibm access token ${servedFromCache ? 'from cache' : ''}`);
this.sttCredentials = {...this.sttCredentials, access_token, stt_region};
}
await this._startTranscribing(cs, ep, 1);
if (this.separateRecognitionPerChannel && ep2) {
await this._startTranscribing(cs, ep2, 2);
@@ -105,14 +115,7 @@ class TaskTranscribe extends Task {
this.logger.info(err, 'TaskTranscribe:exec - error');
this.parentTask && this.parentTask.emit('error', err);
}
ep.removeCustomEventListener(GoogleTranscriptionEvents.Transcription);
ep.removeCustomEventListener(GoogleTranscriptionEvents.NoAudioDetected);
ep.removeCustomEventListener(GoogleTranscriptionEvents.MaxDurationExceeded);
ep.removeCustomEventListener(AwsTranscriptionEvents.Transcription);
ep.removeCustomEventListener(AwsTranscriptionEvents.NoAudioDetected);
ep.removeCustomEventListener(AwsTranscriptionEvents.MaxDurationExceeded);
ep.removeCustomEventListener(AzureTranscriptionEvents.Transcription);
ep.removeCustomEventListener(AzureTranscriptionEvents.NoSpeechDetected);
this.removeSpeechListeners(ep);
}
async kill(cs) {
@@ -136,116 +139,75 @@ class TaskTranscribe extends Task {
}
async _startTranscribing(cs, ep, channel) {
const opts = {};
const opts = this.setChannelVarsForStt(this, this.sttCredentials, this.data.recognizer);
switch (this.vendor) {
case 'google':
this.bugname = 'google_transcribe';
ep.addCustomEventListener(GoogleTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
ep.addCustomEventListener(GoogleTranscriptionEvents.NoAudioDetected,
this._onNoAudio.bind(this, cs, ep, channel));
ep.addCustomEventListener(GoogleTranscriptionEvents.MaxDurationExceeded,
this._onMaxDurationExceeded.bind(this, cs, ep, channel));
break;
if (this.vad.enable) {
opts.START_RECOGNIZING_ON_VAD = 1;
if (this.vad.voiceMs) opts.RECOGNIZER_VAD_VOICE_MS = this.vad.voiceMs;
if (this.vad.mode >= 0 && this.vad.mode <= 3) opts.RECOGNIZER_VAD_MODE = this.vad.mode;
case 'aws':
case 'polly':
this.bugname = 'aws_transcribe';
ep.addCustomEventListener(AwsTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
ep.addCustomEventListener(AwsTranscriptionEvents.NoAudioDetected,
this._onNoAudio.bind(this, cs, ep, channel));
ep.addCustomEventListener(AwsTranscriptionEvents.MaxDurationExceeded,
this._onMaxDurationExceeded.bind(this, cs, ep, channel));
break;
case 'microsoft':
this.bugname = 'azure_transcribe';
ep.addCustomEventListener(AzureTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
ep.addCustomEventListener(AzureTranscriptionEvents.NoSpeechDetected,
this._onNoAudio.bind(this, cs, ep, channel));
break;
case 'nuance':
this.bugname = 'nuance_transcribe';
ep.addCustomEventListener(NuanceTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
ep.addCustomEventListener(NuanceTranscriptionEvents.StartOfSpeech,
this._onStartOfSpeech.bind(this, cs, ep, channel));
ep.addCustomEventListener(NuanceTranscriptionEvents.TranscriptionComplete,
this._onTranscriptionComplete.bind(this, cs, ep, channel));
ep.addCustomEventListener(AzureTranscriptionEvents.Error,
this._onNuanceError.bind(this, cs, ep, channel));
break;
case 'deepgram':
this.bugname = 'deepgram_transcribe';
ep.addCustomEventListener(DeepgramTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
ep.addCustomEventListener(DeepgramTranscriptionEvents.Connect,
this._onDeepgramConnect.bind(this, cs, ep, channel));
ep.addCustomEventListener(DeepgramTranscriptionEvents.ConnectFailure,
this._onDeepGramConnectFailure.bind(this, cs, ep, channel));
break;
case 'ibm':
this.bugname = 'ibm_transcribe';
ep.addCustomEventListener(IbmTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
ep.addCustomEventListener(IbmTranscriptionEvents.Connect,
this._onIbmConnect.bind(this, cs, ep, channel));
ep.addCustomEventListener(IbmTranscriptionEvents.ConnectFailure,
this._onIbmConnectFailure.bind(this, cs, ep, channel));
ep.addCustomEventListener(IbmTranscriptionEvents.Error,
this._onIbmError.bind(this, cs, ep, channel));
break;
default:
throw new Error(`Invalid vendor ${this.vendor}`);
}
ep.addCustomEventListener(GoogleTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
ep.addCustomEventListener(GoogleTranscriptionEvents.NoAudioDetected, this._onNoAudio.bind(this, cs, ep, channel));
ep.addCustomEventListener(GoogleTranscriptionEvents.MaxDurationExceeded,
this._onMaxDurationExceeded.bind(this, cs, ep, channel));
ep.addCustomEventListener(AwsTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep, channel));
ep.addCustomEventListener(AwsTranscriptionEvents.NoAudioDetected, this._onNoAudio.bind(this, cs, ep, channel));
ep.addCustomEventListener(AwsTranscriptionEvents.MaxDurationExceeded,
this._onMaxDurationExceeded.bind(this, cs, ep, channel));
ep.addCustomEventListener(AzureTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
ep.addCustomEventListener(AzureTranscriptionEvents.NoSpeechDetected, this._onNoAudio.bind(this, cs, ep, channel));
await ep.set(opts)
.catch((err) => this.logger.info(err, 'Error setting channel variables'));
if (this.vendor === 'google') {
this.bugname = 'google_trancribe';
if (this.sttCredentials) opts.GOOGLE_APPLICATION_CREDENTIALS = JSON.stringify(this.sttCredentials.credentials);
[
['enhancedModel', 'GOOGLE_SPEECH_USE_ENHANCED'],
//['separateRecognitionPerChannel', 'GOOGLE_SPEECH_SEPARATE_RECOGNITION_PER_CHANNEL'],
['profanityFilter', 'GOOGLE_SPEECH_PROFANITY_FILTER'],
['punctuation', 'GOOGLE_SPEECH_ENABLE_AUTOMATIC_PUNCTUATION'],
['words', 'GOOGLE_SPEECH_ENABLE_WORD_TIME_OFFSETS'],
['singleUtterance', 'GOOGLE_SPEECH_SINGLE_UTTERANCE'],
['diarization', 'GOOGLE_SPEECH_PROFANITY_FILTER']
].forEach((arr) => {
if (this[arr[0]]) opts[arr[1]] = true;
});
if (this.hints.length > 0) {
opts.GOOGLE_SPEECH_HINTS = this.hints.join(',');
if (typeof this.hintsBoost === 'number') {
opts.GOOGLE_SPEECH_HINTS_BOOST = this.hintsBoost;
}
}
if (this.altLanguages.length > 0) opts.GOOGLE_SPEECH_ALTERNATIVE_LANGUAGE_CODES = this.altLanguages.join(',');
else opts.GOOGLE_SPEECH_ALTERNATIVE_LANGUAGE_CODES = '';
if ('unspecified' !== this.interactionType) {
opts.GOOGLE_SPEECH_METADATA_INTERACTION_TYPE = this.interactionType;
}
opts.GOOGLE_SPEECH_MODEL = this.model;
if (this.diarization && this.diarizationMinSpeakers > 0) {
opts.GOOGLE_SPEECH_SPEAKER_DIARIZATION_MIN_SPEAKER_COUNT = this.diarizationMinSpeakers;
}
if (this.diarization && this.diarizationMaxSpeakers > 0) {
opts.GOOGLE_SPEECH_SPEAKER_DIARIZATION_MAX_SPEAKER_COUNT = this.diarizationMaxSpeakers;
}
if (this.naicsCode > 0) opts.GOOGLE_SPEECH_METADATA_INDUSTRY_NAICS_CODE = this.naicsCode;
await ep.set(opts)
.catch((err) => this.logger.info(err, 'TaskTranscribe:_startTranscribing with google'));
}
else if (this.vendor === 'aws') {
this.bugname = 'aws_trancribe';
[
['diarization', 'AWS_SHOW_SPEAKER_LABEL'],
['identifyChannels', 'AWS_ENABLE_CHANNEL_IDENTIFICATION']
].forEach((arr) => {
if (this[arr[0]]) opts[arr[1]] = true;
});
if (this.vocabularyName) opts.AWS_VOCABULARY_NAME = this.vocabularyName;
if (this.vocabularyFilterName) {
opts.AWS_VOCABULARY_NAME = this.vocabularyFilterName;
opts.AWS_VOCABULARY_FILTER_METHOD = this.filterMethod || 'mask';
}
if (this.sttCredentials) {
Object.assign(opts, {
AWS_ACCESS_KEY_ID: this.sttCredentials.accessKeyId,
AWS_SECRET_ACCESS_KEY: this.sttCredentials.secretAccessKey,
AWS_REGION: this.sttCredentials.region
});
}
else {
Object.assign(opts, {
AWS_ACCESS_KEY_ID: process.env.AWS_ACCESS_KEY_ID,
AWS_SECRET_ACCESS_KEY: process.env.AWS_SECRET_ACCESS_KEY,
AWS_REGION: process.env.AWS_REGION
});
}
await ep.set(opts)
.catch((err) => this.logger.info(err, 'TaskTranscribe:_startTranscribing with aws'));
}
else if (this.vendor === 'microsoft') {
this.bugname = 'azure_trancribe';
Object.assign(opts, {
'AZURE_SUBSCRIPTION_KEY': this.sttCredentials.api_key,
'AZURE_REGION': this.sttCredentials.region
});
if (this.hints && this.hints.length > 0) {
opts.AZURE_SPEECH_HINTS = this.hints.map((h) => h.trim()).join(',');
}
if (this.altLanguages.length > 0) opts.AZURE_SPEECH_ALTERNATIVE_LANGUAGE_CODES = this.altLanguages.join(',');
else opts.AZURE_SPEECH_ALTERNATIVE_LANGUAGE_CODES = '';
if (this.requestSnr) opts.AZURE_REQUEST_SNR = 1;
if (this.profanityOption !== 'raw') opts.AZURE_PROFANITY_OPTION = this.profanityOption;
if (this.initialSpeechTimeoutMs > 0) opts.AZURE_INITIAL_SPEECH_TIMEOUT_MS = this.initialSpeechTimeoutMs;
if (this.outputFormat !== 'simple') opts.AZURE_USE_OUTPUT_FORMAT_DETAILED = 1;
if (this.azureServiceEndpoint) opts.AZURE_SERVICE_ENDPOINT = this.azureServiceEndpoint;
await ep.set(opts)
.catch((err) => this.logger.info(err, 'TaskTranscribe:_startTranscribing with azure'));
}
await this._transcribe(ep);
}
@@ -259,50 +221,49 @@ class TaskTranscribe extends Task {
});
}
_onTranscription(cs, ep, channel, evt, fsEvent) {
async _onTranscription(cs, ep, channel, evt, fsEvent) {
// make sure this is not a transcript from answering machine detection
const bugname = fsEvent.getHeader('media-bugname');
if (bugname && this.bugname !== bugname) return;
this.logger.debug({evt, channel}, 'TaskTranscribe:_onTranscription');
if ('aws' === this.vendor && Array.isArray(evt) && evt.length > 0) evt = evt[0];
if ('microsoft' === this.vendor) {
const nbest = evt.NBest;
const language_code = evt.PrimaryLanguage?.Language || this.language;
const alternatives = nbest ? nbest.map((n) => {
return {
confidence: n.Confidence,
transcript: n.Display
};
}) :
[
{
transcript: evt.DisplayText || evt.Text
}
];
this.logger.debug({evt}, 'TaskTranscribe:_onTranscription - before normalization');
const newEvent = {
is_final: evt.RecognitionStatus === 'Success',
channel,
language_code,
alternatives
};
evt = newEvent;
evt = this.normalizeTranscription(evt, this.vendor, channel, this.language);
this.logger.debug({evt}, 'TaskTranscribe:_onTranscription');
if (evt.alternatives[0]?.transcript === '' && !cs.callGone && !this.killed) {
if (['microsoft', 'deepgram'].includes(this.vendor)) {
this.logger.info({evt}, 'TaskTranscribe:_onTranscription - got empty transcript, continue listening');
}
else {
this.logger.info({evt}, 'TaskTranscribe:_onTranscription - got empty transcript, listen again');
this._transcribe(ep);
}
return;
}
if (evt.alternatives[0].transcript === '' && !cs.callGone && !this.killed) {
this.logger.info({evt}, 'TaskGather:_onTranscription - got empty transcript, listen again');
return this._transcribe(ep);
}
evt.channel_tag = channel;
if (this.transcriptionHook) {
const b3 = this.getTracingPropagation();
const httpHeaders = b3 && {b3};
this.cs.requestor.request('verb:hook', this.transcriptionHook,
Object.assign({speech: evt}, this.cs.callInfo), httpHeaders)
.catch((err) => this.logger.info(err, 'TranscribeTask:_onTranscription error'));
try {
const json = await this.cs.requestor.request('verb:hook', this.transcriptionHook, {
...this.cs.callInfo,
...httpHeaders,
speech: evt
});
this.logger.info({json}, 'sent transcriptionHook');
if (json && Array.isArray(json) && !this.parentTask) {
const makeTask = require('./make_task');
const tasks = normalizeJambones(this.logger, json).map((tdata) => makeTask(this.logger, tdata));
if (tasks && tasks.length > 0) {
this.logger.info({tasks: tasks}, `${this.name} replacing application with ${tasks.length} tasks`);
this.cs.replaceApplication(tasks);
}
}
} catch (err) {
this.logger.info(err, 'TranscribeTask:_onTranscription error');
}
}
if (this.parentTask) {
this.parentTask.emit('transcription', evt);
@@ -330,6 +291,57 @@ class TaskTranscribe extends Task {
this._timer = null;
}
}
_onNuanceError(_cs, _ep, _channel, evt) {
const {code, error, details} = evt;
if (code === 404 && error === 'No speech') {
this.logger.debug({code, error, details}, 'TaskTranscribe:_onNuanceError');
return this._resolve('timeout');
}
this.logger.info({code, error, details}, 'TaskTranscribe:_onNuanceError');
if (code === 413 && error === 'Too much speech') {
return this._resolve('timeout');
}
}
_onDeepgramConnect(_cs, _ep) {
this.logger.debug('TaskTranscribe:_onDeepgramConnect');
}
_onDeepGramConnectFailure(cs, _ep, _channel, evt) {
const {reason} = evt;
const {writeAlerts, AlertType} = cs.srf.locals;
this.logger.info({evt}, 'TaskTranscribe:_onDeepgramConnectFailure');
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
message: `Failed connecting to Deepgram speech recognizer: ${reason}`,
vendor: 'deepgram',
}).catch((err) => this.logger.info({err}, 'Error generating alert for deepgram connection failure'));
this.notifyError(`Failed connecting to speech vendor deepgram: ${reason}`);
this.notifyTaskDone();
}
_onIbmConnect(_cs, _ep) {
this.logger.debug('TaskTranscribe:_onIbmConnect');
}
_onIbmConnectFailure(cs, _ep, _channel, evt) {
const {reason} = evt;
const {writeAlerts, AlertType} = cs.srf.locals;
this.logger.info({evt}, 'TaskTranscribe:_onIbmConnectFailure');
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
message: `Failed connecting to IBM watson speech recognizer: ${reason}`,
vendor: 'ibm',
}).catch((err) => this.logger.info({err}, 'Error generating alert for IBM connection failure'));
this.notifyError(`Failed connecting to speech vendor IBM: ${reason}`);
this.notifyTaskDone();
}
_onIbmError(cs, _ep, _channel, evt) {
this.logger.info({evt}, 'TaskGather:_onIbmError');
}
}
module.exports = TaskTranscribe;

View File

@@ -273,26 +273,46 @@ module.exports = (logger) => {
amd
.on(AmdEvents.NoSpeechDetected, (evt) => {
task.emit('amd', {type: AmdEvents.NoSpeechDetected, ...evt});
ep.stopTranscription({vendor, bugname});
try {
ep.connected && ep.stopTranscription({vendor, bugname});
} catch (err) {
logger.info({err}, 'Error stopping transcription');
}
})
.on(AmdEvents.HumanDetected, (evt) => {
task.emit('amd', {type: AmdEvents.HumanDetected, ...evt});
ep.stopTranscription({vendor, bugname});
try {
ep.connected && ep.stopTranscription({vendor, bugname});
} catch (err) {
logger.info({err}, 'Error stopping transcription');
}
})
.on(AmdEvents.MachineDetected, (evt) => {
task.emit('amd', {type: AmdEvents.MachineDetected, ...evt});
})
.on(AmdEvents.DecisionTimeout, (evt) => {
task.emit('amd', {type: AmdEvents.DecisionTimeout, ...evt});
ep.stopTranscription({vendor, bugname});
try {
ep.connected && ep.stopTranscription({vendor, bugname});
} catch (err) {
logger.info({err}, 'Error stopping transcription');
}
})
.on(AmdEvents.ToneTimeout, (evt) => {
//task.emit('amd', {type: AmdEvents.ToneTimeout, ...evt});
ep.execute('avmd_stop').catch((err) => logger.info(err, 'Error stopping avmd'));
try {
ep.connected && ep.execute('avmd_stop').catch((err) => logger.info(err, 'Error stopping avmd'));
} catch (err) {
logger.info({err}, 'Error stopping avmd');
}
})
.on(AmdEvents.MachineStoppedSpeaking, () => {
task.emit('amd', {type: AmdEvents.MachineStoppedSpeaking});
ep.stopTranscription({vendor, bugname});
try {
ep.connected && ep.stopTranscription({vendor, bugname});
} catch (err) {
logger.info({err}, 'Error stopping transcription');
}
});
/* start transcribing, and also listening for beep */

View File

@@ -1,7 +1,7 @@
const Emitter = require('events');
const bent = require('bent');
const assert = require('assert');
const PORT = process.env.AWS_SNS_PORT || 3001;
const PORT = process.env.AWS_SNS_PORT || 3010;
const {LifeCycleEvents} = require('./constants');
const express = require('express');
const app = express();
@@ -21,6 +21,26 @@ class SnsNotifier extends Emitter {
this.logger = logger;
}
_doListen(logger, app, port, resolve) {
return app.listen(port, () => {
this.snsEndpoint = `http://${this.publicIp}:${port}`;
logger.info(`SNS lifecycle server listening on http://localhost:${port}`);
resolve(app);
});
}
_handleErrors(logger, app, resolve, reject, e) {
if (e.code === 'EADDRINUSE' &&
process.env.AWS_SNS_PORT_MAX &&
e.port < process.env.AWS_SNS_PORT_MAX) {
logger.info(`SNS lifecycle server failed to bind port on ${e.port}, will try next port`);
const server = this._doListen(logger, app, ++e.port, resolve);
server.on('error', this._handleErrors.bind(this, logger, app, resolve, reject));
return;
}
reject(e);
}
async _handlePost(req, res) {
try {
@@ -84,11 +104,9 @@ class SnsNotifier extends Emitter {
this.logger.debug('SnsNotifier: retrieving instance data');
this.instanceId = await getString('http://169.254.169.254/latest/meta-data/instance-id');
this.publicIp = await getString('http://169.254.169.254/latest/meta-data/public-ipv4');
this.snsEndpoint = `http://${this.publicIp}:${PORT}`;
this.logger.info({
instanceId: this.instanceId,
publicIp: this.publicIp,
snsEndpoint: this.snsEndpoint
publicIp: this.publicIp
}, 'retrieved AWS instance data');
// start listening
@@ -100,7 +118,10 @@ class SnsNotifier extends Emitter {
this.logger.error(err, 'burped error');
res.status(err.status || 500).json({msg: err.message});
});
app.listen(PORT);
return new Promise((resolve, reject) => {
const server = this._doListen(this.logger, app, PORT, resolve);
server.on('error', this._handleErrors.bind(this, this.logger, app, resolve, reject));
});
} catch (err) {
this.logger.error({err}, 'Error retrieving AWS instance metadata');

View File

@@ -67,6 +67,24 @@
"MaxDurationExceeded": "google_transcribe::max_duration_exceeded",
"VadDetected": "google_transcribe::vad_detected"
},
"NuanceTranscriptionEvents": {
"Transcription": "nuance_transcribe::transcription",
"StartOfSpeech": "nuance_transcribe::start_of_speech",
"TranscriptionComplete": "nuance_transcribe::end_of_transcription",
"Error": "nuance_transcribe::error",
"VadDetected": "nuance_transcribe::vad_detected"
},
"DeepgramTranscriptionEvents": {
"Transcription": "deepgram_transcribe::transcription",
"ConnectFailure": "deepgram_transcribe::connect_failed",
"Connect": "deepgram_transcribe::connect"
},
"IbmTranscriptionEvents": {
"Transcription": "ibm_transcribe::transcription",
"ConnectFailure": "ibm_transcribe::connect_failed",
"Connect": "ibm_transcribe::connect",
"Error": "ibm_transcribe::error"
},
"AwsTranscriptionEvents": {
"Transcription": "aws_transcribe::transcription",
"EndOfTranscript": "aws_transcribe::end_of_transcript",

View File

@@ -42,9 +42,9 @@ const clearChannels = () => {
};
const clearFiles = () => {
const {logger} = require('../..');
const out = execSync('find /tmp -name "*.mp3" -mtime +2 -exec rm {} \\;');
logger.debug({out}, 'clearFiles: command output');
//const {logger} = require('../..');
/*const out = */ execSync('find /tmp -name "*.mp3" -mtime +2 -exec rm {} \\;');
//logger.debug({out}, 'clearFiles: command output');
};

View File

@@ -23,23 +23,47 @@ AND vc.name = ?`;
const speechMapper = (cred) => {
const {credential, ...obj} = cred;
if ('google' === obj.vendor) {
obj.service_key = decrypt(credential);
}
else if ('aws' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.access_key_id = o.access_key_id;
obj.secret_access_key = o.secret_access_key;
obj.aws_region = o.aws_region;
}
else if ('microsoft' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
obj.region = o.region;
}
else if ('wellsaid' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
try {
if ('google' === obj.vendor) {
obj.service_key = decrypt(credential);
}
else if ('aws' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.access_key_id = o.access_key_id;
obj.secret_access_key = o.secret_access_key;
obj.aws_region = o.aws_region;
}
else if ('microsoft' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
obj.region = o.region;
obj.use_custom_stt = o.use_custom_stt;
obj.custom_stt_endpoint = o.custom_stt_endpoint;
obj.use_custom_tts = o.use_custom_tts;
obj.custom_tts_endpoint = o.custom_tts_endpoint;
}
else if ('wellsaid' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
}
else if ('nuance' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.client_id = o.client_id;
obj.secret = o.secret;
}
else if ('ibm' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.tts_api_key = o.tts_api_key;
obj.tts_region = o.tts_region;
obj.stt_api_key = o.stt_api_key;
obj.stt_region = o.stt_region;
}
else if ('deepgram' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
}
} catch (err) {
console.log(err);
}
return obj;
};
@@ -60,7 +84,10 @@ module.exports = (logger, srf) => {
const haveAws = speech.find((s) => s.vendor === 'aws');
const haveMicrosoft = speech.find((s) => s.vendor === 'microsoft');
const haveWellsaid = speech.find((s) => s.vendor === 'wellsaid');
if (!haveGoogle || !haveAws || !haveMicrosoft) {
const haveNuance = speech.find((s) => s.vendor === 'nuance');
const haveDeepgram = speech.find((s) => s.vendor === 'deepgram');
const haveIbm = speech.find((s) => s.vendor === 'ibm');
if (!haveGoogle || !haveAws || !haveMicrosoft || !haveWellsaid || !haveNuance) {
const [r3] = await pp.query(sqlSpeechCredentialsForSP, account_sid);
if (r3.length) {
if (!haveGoogle) {
@@ -79,6 +106,18 @@ module.exports = (logger, srf) => {
const wellsaid = r3.find((s) => s.vendor === 'wellsaid');
if (wellsaid) speech.push(speechMapper(wellsaid));
}
if (!haveNuance) {
const nuance = r3.find((s) => s.vendor === 'nuance');
if (nuance) speech.push(speechMapper(nuance));
}
if (!haveDeepgram) {
const deepgram = r3.find((s) => s.vendor === 'deepgram');
if (deepgram) speech.push(speechMapper(deepgram));
}
if (!haveIbm) {
const ibm = r3.find((s) => s.vendor === 'ibm');
if (ibm) speech.push(speechMapper(ibm));
}
}
}
@@ -89,6 +128,7 @@ module.exports = (logger, srf) => {
};
const updateSpeechCredentialLastUsed = async(speech_credential_sid) => {
if (!speech_credential_sid) return;
const pp = pool.promise();
const sql = 'UPDATE speech_credentials SET last_used = NOW() WHERE speech_credential_sid = ?';
try {

View File

@@ -0,0 +1,45 @@
const express = require('express');
const httpRoutes = require('../http-routes');
const PORT = process.env.HTTP_PORT || 3000;
const doListen = (logger, app, port, resolve) => {
const server = app.listen(port, () => {
const {srf} = app.locals;
logger.info(`listening for HTTP requests on port ${PORT}, serviceUrl is ${srf.locals.serviceUrl}`);
resolve({server, app});
});
return server;
};
const handleErrors = (logger, app, resolve, reject, e) => {
if (e.code === 'EADDRINUSE' &&
process.env.HTTP_PORT_MAX &&
e.port < process.env.HTTP_PORT_MAX) {
logger.info(`HTTP server failed to bind port on ${e.port}, will try next port`);
const server = doListen(logger, app, ++e.port, resolve);
server.on('error', handleErrors.bind(null, logger, app, resolve, reject));
return;
}
logger.info({err: e, port: PORT}, 'httpListener error');
reject(e);
};
const createHttpListener = (logger, srf) => {
const app = express();
app.locals = {...app.locals, logger, srf};
app.use(express.urlencoded({ extended: true }));
app.use(express.json());
app.use('/', httpRoutes);
app.use((err, _req, res, _next) => {
logger.error(err, 'burped error');
res.status(err.status || 500).json({msg: err.message});
});
return new Promise((resolve, reject) => {
const server = doListen(logger, app, PORT, resolve);
server.on('error', handleErrors.bind(null, logger, app, resolve, reject));
});
};
module.exports = createHttpListener;

View File

@@ -2,6 +2,7 @@ const {Client, Pool} = require('undici');
const parseUrl = require('parse-url');
const assert = require('assert');
const BaseRequestor = require('./base-requestor');
const WsRequestor = require('./ws-requestor');
const {HookMsgTypes} = require('./constants.json');
const snakeCaseKeys = require('./snakecase-keys');
const pools = new Map();
@@ -28,9 +29,11 @@ class HttpRequestor extends BaseRequestor {
assert(['GET', 'POST'].includes(this.method));
const u = this._parsedUrl = parseUrl(this.url);
this._baseUrl = `${u.protocol}://${u.resource}`;
this._resource = u.resource;
if (u.port) this._baseUrl = `${u.protocol}://${u.resource}:${u.port}`;
else this._baseUrl = `${u.protocol}://${u.resource}`;
this._protocol = u.protocol;
this._resource = u.resource;
this._port = u.port;
this._search = u.search;
this._usePools = process.env.HTTP_POOL && parseInt(process.env.HTTP_POOL);
@@ -49,7 +52,10 @@ class HttpRequestor extends BaseRequestor {
this.logger.debug(`HttpRequestor:created pool for ${this._baseUrl}`);
}
}
else this.client = new Client(`${u.protocol}://${u.resource}`);
else {
if (u.port) this.client = new Client(`${u.protocol}://${u.resource}:${u.port}`);
else this.client = new Client(`${u.protocol}://${u.resource}`);
}
}
get baseUrl() {
@@ -86,6 +92,18 @@ class HttpRequestor extends BaseRequestor {
assert.ok, (['GET', 'POST'].includes(method), `HttpRequestor:request method must be 'GET' or 'POST' not ${method}`);
const startAt = process.hrtime();
/* if we have an absolute url, and it is ws then do a websocket connection */
if (this._isAbsoluteUrl(url) && url.startsWith('ws')) {
this.logger.debug({hook}, 'HttpRequestor: switching to websocket connection');
const h = typeof hook === 'object' ? hook : {url: hook};
const requestor = new WsRequestor(this.logger, this.account_sid, h, this.secret);
if (type === 'session:redirect') {
this.close();
this.emit('handover', requestor);
}
return requestor.request('session:new', hook, params, httpHeaders);
}
let newClient;
try {
let client, path, query;
@@ -95,13 +113,14 @@ class HttpRequestor extends BaseRequestor {
}
else {
const u = parseUrl(url);
if (u.resource === this._resource && u.protocol === this._protocol) {
if (u.resource === this._resource && u.port === this._port && u.protocol === this._protocol) {
client = this.client;
path = u.pathname;
query = u.query;
}
else {
client = newClient = new Client(`${u.protocol}://${u.resource}`);
if (u.port) client = newClient = new Client(`${u.protocol}://${u.resource}:${u.port}`);
else client = newClient = new Client(`${u.protocol}://${u.resource}`);
path = u.pathname;
query = u.query;
}

View File

@@ -152,7 +152,9 @@ function installSrfLocals(srf, logger) {
popFront,
removeFromList,
lengthOfList,
getListPosition
getListPosition,
getNuanceAccessToken,
getIbmAccessToken,
} = require('@jambonz/realtimedb-helpers')({
host: process.env.JAMBONES_REDIS_HOST,
port: process.env.JAMBONES_REDIS_PORT || 6379
@@ -204,7 +206,9 @@ function installSrfLocals(srf, logger) {
popFront,
removeFromList,
lengthOfList,
getListPosition
getListPosition,
getNuanceAccessToken,
getIbmAccessToken
},
parentLogger: logger,
getSBC,

View File

@@ -12,7 +12,7 @@ const deepcopy = require('deepcopy');
const moment = require('moment');
const stripCodecs = require('./strip-ancillary-codecs');
const RootSpan = require('./call-tracer');
const { v4: uuidv4 } = require('uuid');
const uuidv4 = require('uuid-random');
class SingleDialer extends Emitter {
constructor({logger, sbcAddress, target, opts, application, callInfo, accountInfo, rootSpan, startSpan}) {
@@ -21,6 +21,7 @@ class SingleDialer extends Emitter {
this.logger = logger;
this.target = target;
this.from = target.from || {};
this.sbcAddress = sbcAddress;
this.opts = opts;
this.application = application;
@@ -66,8 +67,11 @@ class SingleDialer extends Emitter {
opts.headers = {
...opts.headers,
...(this.target.headers || {}),
...(this.from.user && {'X-Preferred-From-User': this.from.user}),
...(this.from.host && {'X-Preferred-From-Host': this.from.host}),
'X-Jambonz-Routing': this.target.type,
'X-Call-Sid': this.callSid
'X-Call-Sid': this.callSid,
...(this.applicationSid && {'X-Application-Sid': this.applicationSid})
};
if (srf.locals.fsUUID) {
opts.headers = {
@@ -408,7 +412,7 @@ class SingleDialer extends Emitter {
this.callInfo.updateCallStatus(callStatus, sipStatus, sipReason);
if (typeof duration === 'number') this.callInfo.duration = duration;
try {
this.requestor.request('call:status', this.application.call_status_hook, this.callInfo.toJSON());
this.notifier.request('call:status', this.application.call_status_hook, this.callInfo.toJSON());
} catch (err) {
this.logger.info(err, `SingleDialer:_notifyCallStatusChange error sending ${callStatus} ${sipStatus}`);
}

View File

@@ -1,42 +1,7 @@
const bent = require('bent');
const parseUrl = require('parse-url');
const assert = require('assert');
const snakeCaseKeys = require('./snakecase-keys');
const crypto = require('crypto');
const timeSeries = require('@jambonz/time-series');
let alerter ;
const toBase64 = (str) => Buffer.from(str || '', 'utf8').toString('base64');
function computeSignature(payload, timestamp, secret) {
assert(secret);
const data = `${timestamp}.${JSON.stringify(payload)}`;
return crypto
.createHmac('sha256', secret)
.update(data, 'utf8')
.digest('hex');
}
function generateSigHeader(payload, secret) {
const timestamp = Math.floor(Date.now() / 1000);
const signature = computeSignature(payload, timestamp, secret);
const scheme = 'v1';
return {
'Jambonz-Signature': `t=${timestamp},${scheme}=${signature}`
};
}
function basicAuth(username, password) {
if (!username || !password) return {};
const creds = `${username}:${password || ''}`;
const header = `Basic ${toBase64(creds)}`;
return {Authorization: header};
}
function isRelativeUrl(u) {
return typeof u === 'string' && u.startsWith('/');
}
function isAbsoluteUrl(u) {
return typeof u === 'string' &&
u.startsWith('https://') || u.startsWith('http://');
@@ -49,14 +14,6 @@ class Requestor {
this.logger = logger;
this.url = hook.url;
this.method = hook.method || 'POST';
this.authHeader = basicAuth(hook.username, hook.password);
const u = parseUrl(this.url);
const myPort = u.port ? `:${u.port}` : '';
const baseUrl = this._baseUrl = `${u.protocol}://${u.resource}${myPort}`;
this.get = bent(baseUrl, 'GET', 'buffer', 200, 201);
this.post = bent(baseUrl, 'POST', 'buffer', 200, 201);
this.username = hook.username;
this.password = hook.password;
@@ -78,72 +35,15 @@ class Requestor {
}
}
get baseUrl() {
return this._baseUrl;
}
/**
* Make an HTTP request.
* All requests use json bodies.
* All requests expect a 200 statusCode on success
* @param {object|string} hook - may be a absolute or relative url, or an object
* @param {string} [hook.url] - an absolute or relative url
* @param {string} [hook.method] - 'GET' or 'POST'
* @param {string} [hook.username] - if basic auth is protecting the endpoint
* @param {string} [hook.password] - if basic auth is protecting the endpoint
* @param {object} [params] - request parameters
*/
async request(hook, params) {
const payload = params ? snakeCaseKeys(params, ['customerData', 'sip']) : null;
const url = hook.url || hook;
const method = hook.method || 'POST';
assert.ok(url, 'Requestor:request url was not provided');
assert.ok, (['GET', 'POST'].includes(method), `Requestor:request method must be 'GET' or 'POST' not ${method}`);
const {url: urlInfo = hook, method: methodInfo = 'POST'} = hook; // mask user/pass
this.logger.debug({url: urlInfo, method: methodInfo, payload}, `Requestor:request ${method} ${url}`);
const startAt = process.hrtime();
let buf;
try {
const sigHeader = generateSigHeader(payload, this.secret);
const headers = {...sigHeader, ...this.authHeader};
//this.logger.info({url, headers}, 'send webhook');
buf = isRelativeUrl(url) ?
await this.post(url, payload, headers) :
await bent(method, 'buffer', 200, 201, 202)(url, payload, headers);
} catch (err) {
this.logger.error({err, secret: this.secret, baseUrl: this.baseUrl, url, statusCode: err.statusCode},
`web callback returned unexpected error code ${err.statusCode}`);
let opts = {account_sid: this.account_sid};
if (err.code === 'ECONNREFUSED') {
opts = {...opts, alert_type: alerter.AlertType.WEBHOOK_CONNECTION_FAILURE, url};
}
else if (err.name === 'StatusError') {
opts = {...opts, alert_type: alerter.AlertType.WEBHOOK_STATUS_FAILURE, url, status: err.statusCode};
}
else {
opts = {...opts, alert_type: alerter.AlertType.WEBHOOK_CONNECTION_FAILURE, url, detail: err.message};
}
alerter.writeAlerts(opts).catch((err) => this.logger.info({err, opts}, 'Error writing alert'));
throw err;
}
const diff = process.hrtime(startAt);
const time = diff[0] * 1e3 + diff[1] * 1e-6;
const rtt = time.toFixed(0);
if (buf) this.stats.histogram('app.hook.response_time', rtt, ['hook_type:app']);
if (buf && buf.toString().length > 0) {
try {
const json = JSON.parse(buf.toString());
this.logger.info({response: json}, `Requestor:request ${method} ${url} succeeded in ${rtt}ms`);
return json;
}
catch (err) {
//this.logger.debug({err, url, method}, `Requestor:request returned non-JSON content: '${buf.toString()}'`);
}
get Alerter() {
if (!alerter) {
alerter = timeSeries(this.logger, {
host: process.env.JAMBONES_TIME_SERIES_HOST,
commitSize: 50,
commitInterval: 'test' === process.env.NODE_ENV ? 7 : 20
});
}
return alerter;
}
}

View File

@@ -1,5 +1,5 @@
const assert = require('assert');
const { v4: uuidv4 } = require('uuid');
const uuidv4 = require('uuid-random');
const {LifeCycleEvents, FS_UUID_SET_NAME} = require('./constants');
const Emitter = require('events');
const debug = require('debug')('jambonz:feature-server');

View File

@@ -1,5 +1,5 @@
const xmlParser = require('xml2js').parseString;
const { v4: uuidv4 } = require('uuid');
const uuidv4 = require('uuid-random');
const parseUri = require('drachtio-srf').parseUri;
const transform = require('sdp-transform');
const debug = require('debug')('jambonz:feature-server');

View File

@@ -1,33 +1,419 @@
module.exports = (logger) => {
const normalizeTranscription = (evt, vendor, channel) => {
if ('aws' === vendor && Array.isArray(evt) && evt.length > 0) evt = evt[0];
if ('microsoft' === vendor) {
const nbest = evt.NBest;
const language_code = evt.PrimaryLanguage?.Language || this.language;
const alternatives = nbest ? nbest.map((n) => {
return {
confidence: n.Confidence,
transcript: n.Display
};
}) :
[
{
transcript: evt.DisplayText || evt.Text
}
];
const {
TaskName,
AzureTranscriptionEvents,
GoogleTranscriptionEvents,
AwsTranscriptionEvents,
NuanceTranscriptionEvents,
DeepgramTranscriptionEvents,
} = require('./constants');
const newEvent = {
is_final: evt.RecognitionStatus === 'Success',
channel,
language_code,
alternatives
};
evt = newEvent;
const normalizeDeepgram = (evt, channel, language) => {
const copy = JSON.parse(JSON.stringify(evt));
const alternatives = (evt.channel?.alternatives || [])
.map((alt) => ({
confidence: alt.confidence,
transcript: alt.transcript,
}));
return {
language_code: language,
channel_tag: channel,
is_final: evt.is_final,
alternatives,
vendor: {
name: 'deepgram',
evt: copy
}
};
};
const normalizeIbm = (evt, channel, language) => {
const copy = JSON.parse(JSON.stringify(evt));
//const idx = evt.result_index;
const result = evt.results[0];
return {
language_code: language,
channel_tag: channel,
is_final: result.final,
alternatives: result.alternatives,
vendor: {
name: 'ibm',
evt: copy
}
};
};
const normalizeGoogle = (evt, channel, language) => {
const copy = JSON.parse(JSON.stringify(evt));
return {
language_code: language,
channel_tag: channel,
is_final: evt.is_final,
alternatives: evt.alternatives,
vendor: {
name: 'google',
evt: copy
}
};
};
const normalizeNuance = (evt, channel, language) => {
const copy = JSON.parse(JSON.stringify(evt));
return {
language_code: language,
channel_tag: channel,
is_final: evt.is_final,
alternatives: evt.alternatives,
vendor: {
name: 'nuance',
evt: copy
}
};
};
const normalizeMicrosoft = (evt, channel, language) => {
const copy = JSON.parse(JSON.stringify(evt));
const nbest = evt.NBest;
const language_code = evt.PrimaryLanguage?.Language || language;
const alternatives = nbest ? nbest.map((n) => {
return {
confidence: n.Confidence,
transcript: n.Display
};
}) :
[
{
transcript: evt.DisplayText || evt.Text
}
];
return {
language_code,
channel_tag: channel,
is_final: evt.RecognitionStatus === 'Success',
alternatives,
vendor: {
name: 'microsoft',
evt: copy
}
};
};
const normalizeAws = (evt, channel, language) => {
const copy = JSON.parse(JSON.stringify(evt));
return {
language_code: language,
channel_tag: channel,
is_final: evt[0].is_final,
alternatives: evt[0].alternatives,
vendor: {
name: 'aws',
evt: copy
}
};
};
module.exports = (logger) => {
const normalizeTranscription = (evt, vendor, channel, language) => {
logger.debug({ evt, vendor, channel, language }, 'normalizeTranscription');
switch (vendor) {
case 'deepgram':
return normalizeDeepgram(evt, channel, language);
case 'microsoft':
return normalizeMicrosoft(evt, channel, language);
case 'google':
return normalizeGoogle(evt, channel, language);
case 'aws':
return normalizeAws(evt, channel, language);
case 'nuance':
return normalizeNuance(evt, channel, language);
case 'ibm':
return normalizeIbm(evt, channel, language);
default:
logger.error(`Unknown vendor ${vendor}`);
return evt;
}
evt.channel_tag = channel;
//logger.debug({evt}, 'normalized transcription');
return evt;
};
return {normalizeTranscription};
const setChannelVarsForStt = (task, sttCredentials, rOpts = {}) => {
let opts = {};
const {enable, voiceMs = 0, mode = -1} = rOpts.vad || {};
const vad = {enable, voiceMs, mode};
/* voice activity detection works across vendors */
opts = {
...opts,
...(vad.enable && {START_RECOGNIZING_ON_VAD: 1}),
...(vad.enable && vad.voiceMs && {RECOGNIZER_VAD_VOICE_MS: vad.voiceMs}),
...(vad.enable && typeof vad.mode === 'number' && {RECOGNIZER_VAD_MODE: vad.mode}),
};
if ('google' === rOpts.vendor) {
opts = {
...opts,
...(sttCredentials &&
{GOOGLE_APPLICATION_CREDENTIALS: JSON.stringify(sttCredentials.credentials)}),
...(rOpts.enhancedModel &&
{GOOGLE_SPEECH_USE_ENHANCED: 1}),
...(rOpts.separateRecognitionPerChannel &&
{GOOGLE_SPEECH_SEPARATE_RECOGNITION_PER_CHANNEL: 1}),
...(rOpts.profanityFilter &&
{GOOGLE_SPEECH_PROFANITY_FILTER: 1}),
...(rOpts.punctuation &&
{GOOGLE_SPEECH_ENABLE_AUTOMATIC_PUNCTUATION: 1}),
...(rOpts.words &&
{GOOGLE_SPEECH_ENABLE_WORD_TIME_OFFSETS: 1}),
...((rOpts.singleUtterance || task.name === TaskName.Gather) &&
{GOOGLE_SPEECH_SINGLE_UTTERANCE: 1}),
...(rOpts.diarization &&
{GOOGLE_SPEECH_SPEAKER_DIARIZATION: 1}),
...(rOpts.diarization && rOpts.diarizationMinSpeakers > 0 &&
{GOOGLE_SPEECH_SPEAKER_DIARIZATION_MIN_SPEAKER_COUNT: rOpts.diarizationMinSpeakers}),
...(rOpts.diarization && rOpts.diarizationMaxSpeakers > 0 &&
{GOOGLE_SPEECH_SPEAKER_DIARIZATION_MAX_SPEAKER_COUNT: rOpts.diarizationMaxSpeakers}),
...(rOpts.enhancedModel === false &&
{GOOGLE_SPEECH_USE_ENHANCED: 0}),
...(rOpts.separateRecognitionPerChannel === false &&
{GOOGLE_SPEECH_SEPARATE_RECOGNITION_PER_CHANNEL: 0}),
...(rOpts.profanityFilter === false &&
{GOOGLE_SPEECH_PROFANITY_FILTER: 0}),
...(rOpts.punctuation === false &&
{GOOGLE_SPEECH_ENABLE_AUTOMATIC_PUNCTUATION: 0}),
...(rOpts.words == false &&
{GOOGLE_SPEECH_ENABLE_WORD_TIME_OFFSETS: 0}),
...((rOpts.singleUtterance === false || task.name === TaskName.Transcribe) &&
{GOOGLE_SPEECH_SINGLE_UTTERANCE: 0}),
...(rOpts.diarization === false &&
{GOOGLE_SPEECH_SPEAKER_DIARIZATION: 0}),
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'string' &&
{GOOGLE_SPEECH_HINTS: rOpts.hints.join(',')}),
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'object' &&
{GOOGLE_SPEECH_HINTS: JSON.stringify(rOpts.hints)}),
...(typeof rOpts.hintsBoost === 'number' &&
{GOOGLE_SPEECH_HINTS_BOOST: rOpts.hintsBoost}),
...(rOpts.altLanguages.length > 0 &&
{GOOGLE_SPEECH_ALTERNATIVE_LANGUAGE_CODES: rOpts.altLanguages.join(',')}),
...(rOpts.interactionType &&
{GOOGLE_SPEECH_METADATA_INTERACTION_TYPE: rOpts.interactionType}),
...{GOOGLE_SPEECH_MODEL: rOpts.model || (task.name === TaskName.Gather ? 'command_and_search' : 'phone_call')},
...(rOpts.naicsCode > 0 &&
{GOOGLE_SPEECH_METADATA_INDUSTRY_NAICS_CODE: rOpts.naicsCode}),
};
}
else if (['aws', 'polly'].includes(rOpts.vendor)) {
opts = {
...opts,
...(rOpts.vocabularyName && {AWS_VOCABULARY_NAME: rOpts.vocabularyName}),
...(rOpts.vocabularyFilterName && {AWS_VOCABULARY_FILTER_NAME: rOpts.vocabularyFilterName}),
...(rOpts.filterMethod && {AWS_VOCABULARY_FILTER_METHOD: rOpts.filterMethod}),
...(sttCredentials && {
AWS_ACCESS_KEY_ID: sttCredentials.accessKeyId,
AWS_SECRET_ACCESS_KEY: sttCredentials.secretAccessKey,
AWS_REGION: sttCredentials.region
}),
};
}
else if ('microsoft' === rOpts.vendor) {
opts = {
...opts,
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'string' &&
{AZURE_SPEECH_HINTS: rOpts.hints.map((h) => h.trim()).join(',')}),
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'object' &&
{AZURE_SPEECH_HINTS: rOpts.hints.map((h) => h.phrase).join(',')}),
...(rOpts.altLanguages && rOpts.altLanguages.length > 0 &&
{AZURE_SERVICE_ENDPOINT_ID: rOpts.sttCredentials}),
...(rOpts.requestSnr && {AZURE_REQUEST_SNR: 1}),
...(rOpts.profanityOption && {AZURE_PROFANITY_OPTION: rOpts.profanityOption}),
...(rOpts.azureServiceEndpoint && {AZURE_SERVICE_ENDPOINT: rOpts.azureServiceEndpoint}),
...(rOpts.initialSpeechTimeoutMs > 0 &&
{AZURE_INITIAL_SPEECH_TIMEOUT_MS: rOpts.initialSpeechTimeoutMs}),
...(rOpts.requestSnr && {AZURE_REQUEST_SNR: 1}),
...(rOpts.audioLogging && {AZURE_AUDIO_LOGGING: 1}),
...{AZURE_USE_OUTPUT_FORMAT_DETAILED: 1},
...(sttCredentials && {
AZURE_SUBSCRIPTION_KEY: sttCredentials.api_key,
AZURE_REGION: sttCredentials.region,
}),
...(sttCredentials.use_custom_stt && sttCredentials.custom_stt_endpoint &&
{AZURE_SERVICE_ENDPOINT_ID: sttCredentials.custom_stt_endpoint})
};
}
else if ('nuance' === rOpts.vendor) {
/**
* Note: all nuance options are in recognizer.nuanceOptions, should migrate
* other vendor settings to similar nested structure
*/
const {nuanceOptions = {}} = rOpts;
opts = {
...opts,
...(sttCredentials.access_token) &&
{NUANCE_ACCESS_TOKEN: sttCredentials.access_token},
...(sttCredentials.krypton_endpoint) &&
{NUANCE_KRYPTON_ENDPOINT: sttCredentials.krypton_endpoint},
...(nuanceOptions.topic) &&
{NUANCE_TOPIC: nuanceOptions.topic},
...(nuanceOptions.utteranceDetectionMode) &&
{NUANCE_UTTERANCE_DETECTION_MODE: nuanceOptions.utteranceDetectionMode},
...(nuanceOptions.punctuation) && {NUANCE_PUNCTUATION: nuanceOptions.punctuation},
...(nuanceOptions.profanityFilter) &&
{NUANCE_FILTER_PROFANITY: nuanceOptions.profanityFilter},
...(nuanceOptions.includeTokenization) &&
{NUANCE_INCLUDE_TOKENIZATION: nuanceOptions.includeTokenization},
...(nuanceOptions.discardSpeakerAdaptation) &&
{NUANCE_DISCARD_SPEAKER_ADAPTATION: nuanceOptions.discardSpeakerAdaptation},
...(nuanceOptions.suppressCallRecording) &&
{NUANCE_SUPPRESS_CALL_RECORDING: nuanceOptions.suppressCallRecording},
...(nuanceOptions.maskLoadFailures) &&
{NUANCE_MASK_LOAD_FAILURES: nuanceOptions.maskLoadFailures},
...(nuanceOptions.suppressInitialCapitalization) &&
{NUANCE_SUPPRESS_INITIAL_CAPITALIZATION: nuanceOptions.suppressInitialCapitalization},
...(nuanceOptions.allowZeroBaseLmWeight)
&& {NUANCE_ALLOW_ZERO_BASE_LM_WEIGHT: nuanceOptions.allowZeroBaseLmWeight},
...(nuanceOptions.filterWakeupWord) &&
{NUANCE_FILTER_WAKEUP_WORD: nuanceOptions.filterWakeupWord},
...(nuanceOptions.resultType) &&
{NUANCE_RESULT_TYPE: nuanceOptions.resultType || rOpts.interim ? 'partial' : 'final'},
...(nuanceOptions.noInputTimeoutMs) &&
{NUANCE_NO_INPUT_TIMEOUT_MS: nuanceOptions.noInputTimeoutMs},
...(nuanceOptions.recognitionTimeoutMs) &&
{NUANCE_RECOGNITION_TIMEOUT_MS: nuanceOptions.recognitionTimeoutMs},
...(nuanceOptions.utteranceEndSilenceMs) &&
{NUANCE_UTTERANCE_END_SILENCE_MS: nuanceOptions.utteranceEndSilenceMs},
...(nuanceOptions.maxHypotheses) &&
{NUANCE_MAX_HYPOTHESES: nuanceOptions.maxHypotheses},
...(nuanceOptions.speechDomain) &&
{NUANCE_SPEECH_DOMAIN: nuanceOptions.speechDomain},
...(nuanceOptions.formatting) &&
{NUANCE_FORMATTING: nuanceOptions.formatting},
...(nuanceOptions.resources) &&
{NUANCE_RESOURCES: JSON.stringify(nuanceOptions.resources)},
};
}
else if ('deepgram' === rOpts.vendor) {
const {deepgramOptions = {}} = rOpts;
opts = {
...opts,
...(sttCredentials.api_key) &&
{DEEPGRAM_API_KEY: sttCredentials.api_key},
...(deepgramOptions.tier) &&
{DEEPGRAM_SPEECH_TIER: deepgramOptions.tier},
...(deepgramOptions.model) &&
{DEEPGRAM_SPEECH_MODEL: deepgramOptions.model},
...(deepgramOptions.punctuate) &&
{DEEPGRAM_SPEECH_ENABLE_AUTOMATIC_PUNCTUATION: 1},
...(deepgramOptions.profanityFilter) &&
{DEEPGRAM_SPEECH_PROFANITY_FILTER: 1},
...(deepgramOptions.redact) &&
{DEEPGRAM_SPEECH_REDACT: 1},
...(deepgramOptions.diarize) &&
{DEEPGRAM_SPEECH_DIARIZE: 1},
...(deepgramOptions.diarizeVersion) &&
{DEEPGRAM_SPEECH_DIARIZE_VERSION: deepgramOptions.diarizeVersion},
...(deepgramOptions.ner) &&
{DEEPGRAM_SPEECH_NER: 1},
...(deepgramOptions.alternatives) &&
{DEEPGRAM_SPEECH_ALTERNATIVES: deepgramOptions.alternatives},
...(deepgramOptions.numerals) &&
{DEEPGRAM_SPEECH_NUMERALS: deepgramOptions.numerals},
...(deepgramOptions.search) &&
{DEEPGRAM_SPEECH_SEARCH: deepgramOptions.search.join(',')},
...(deepgramOptions.replace) &&
{DEEPGRAM_SPEECH_REPLACE: deepgramOptions.replace.join(',')},
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'string' &&
{DEEPGRAM_SPEECH_KEYWORDS: rOpts.hints.map((h) => h.trim()).join(',')}),
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'object' &&
{DEEPGRAM_SPEECH_KEYWORDS: rOpts.hints.map((h) => h.phrase).join(',')}),
...(deepgramOptions.keywords) &&
{DEEPGRAM_SPEECH_KEYWORDS: deepgramOptions.keywords.join(',')},
...('endpointing' in deepgramOptions) &&
{DEEPGRAM_SPEECH_ENDPOINTING: deepgramOptions.endpointing},
...(deepgramOptions.vadTurnoff) &&
{DEEPGRAM_SPEECH_VAD_TURNOFF: deepgramOptions.vadTurnoff},
...(deepgramOptions.tag) &&
{DEEPGRAM_SPEECH_VAD_TURNOFF: deepgramOptions.tag}
};
}
else if ('ibm' === rOpts.vendor) {
const {ibmOptions = {}} = rOpts;
opts = {
...opts,
...(sttCredentials.access_token) &&
{IBM_ACCESS_TOKEN: sttCredentials.access_token},
...(sttCredentials.stt_region) &&
{IBM_SPEECH_REGION: sttCredentials.stt_region},
...(sttCredentials.instance_id) &&
{IBM_SPEECH_INSTANCE_ID: sttCredentials.instance_id},
...(ibmOptions.model) &&
{IBM_SPEECH_MODEL: ibmOptions.model},
...(ibmOptions.language_customization_id) &&
{IBM_SPEECH_LANGUAGE_CUSTOMIZATION_ID: ibmOptions.language_customization_id},
...(ibmOptions.acoustic_customization_id) &&
{IBM_SPEECH_ACOUSTIC_CUSTOMIZATION_ID: ibmOptions.acoustic_customization_id},
...(ibmOptions.baseModelVersion) &&
{IBM_SPEECH_BASE_MODEL_VERSION: ibmOptions.baseModelVersion},
...(ibmOptions.watsonMetadata) &&
{IBM_SPEECH_WATSON_METADATA: ibmOptions.watsonMetadata},
...(ibmOptions.watsonLearningOptOut) &&
{IBM_SPEECH_WATSON_LEARNING_OPT_OUT: ibmOptions.watsonLearningOptOut}
};
}
logger.debug({opts}, 'recognizer channel vars');
return opts;
};
const removeSpeechListeners = (ep) => {
ep.removeCustomEventListener(GoogleTranscriptionEvents.Transcription);
ep.removeCustomEventListener(GoogleTranscriptionEvents.EndOfUtterance);
ep.removeCustomEventListener(GoogleTranscriptionEvents.VadDetected);
ep.removeCustomEventListener(AwsTranscriptionEvents.Transcription);
ep.removeCustomEventListener(AwsTranscriptionEvents.VadDetected);
ep.removeCustomEventListener(AzureTranscriptionEvents.Transcription);
ep.removeCustomEventListener(AzureTranscriptionEvents.NoSpeechDetected);
ep.removeCustomEventListener(AzureTranscriptionEvents.VadDetected);
ep.removeCustomEventListener(NuanceTranscriptionEvents.Transcription);
ep.removeCustomEventListener(NuanceTranscriptionEvents.TranscriptionComplete);
ep.removeCustomEventListener(NuanceTranscriptionEvents.StartOfSpeech);
ep.removeCustomEventListener(NuanceTranscriptionEvents.Error);
ep.removeCustomEventListener(NuanceTranscriptionEvents.VadDetected);
ep.removeCustomEventListener(DeepgramTranscriptionEvents.Transcription);
ep.removeCustomEventListener(DeepgramTranscriptionEvents.Connect);
ep.removeCustomEventListener(DeepgramTranscriptionEvents.ConnectFailure);
};
const setSpeechCredentialsAtRuntime = (recognizer) => {
if (!recognizer) return;
if (recognizer.vendor === 'nuance') {
const {clientId, secret} = recognizer.nuanceOptions || {};
if (clientId && secret) return {client_id: clientId, secret};
}
else if (recognizer.vendor === 'deepgram') {
const {apiKey} = recognizer.deepgramOptions || {};
if (apiKey) return {api_key: apiKey};
}
else if (recognizer.vendor === 'ibm') {
const {ttsApiKey, ttsRegion, sttApiKey, sttRegion, instanceId} = recognizer.ibmOptions || {};
if (ttsApiKey || sttApiKey) return {
tts_api_key: ttsApiKey,
tts_region: ttsRegion,
stt_api_key: sttApiKey,
stt_region: sttRegion,
instance_id: instanceId
};
}
};
return {
normalizeTranscription,
setChannelVarsForStt,
removeSpeechListeners,
setSpeechCredentialsAtRuntime
};
};

View File

@@ -54,7 +54,12 @@ class WsRequestor extends BaseRequestor {
/* if we have an absolute url, and it is http then do a standard webhook */
if (this._isAbsoluteUrl(url) && url.startsWith('http')) {
this.logger.debug({hook}, 'WsRequestor: sending a webhook (HTTP)');
const requestor = new HttpRequestor(this.logger, this.account_sid, hook, this.secret);
const h = typeof hook === 'object' ? hook : {url: hook};
const requestor = new HttpRequestor(this.logger, this.account_sid, h, this.secret);
if (type === 'session:redirect') {
this.close();
this.emit('handover', requestor);
}
return requestor.request(type, hook, params, httpHeaders);
}
@@ -69,7 +74,7 @@ class WsRequestor extends BaseRequestor {
this.connectInProgress = true;
this.logger.debug(`WsRequestor:request(${this.id}) - connecting since we do not have a connection`);
if (this.connections >= MAX_RECONNECTS) {
throw new Error(`max attempts connecting to ${this.url}`);
return Promise.reject(`max attempts connecting to ${this.url}`);
}
try {
const startAt = process.hrtime();
@@ -79,7 +84,7 @@ class WsRequestor extends BaseRequestor {
} catch (err) {
this.logger.info({url, err}, 'WsRequestor:request - failed connecting');
this.connectInProgress = false;
throw err;
return Promise.reject(err);
}
}
assert(this.ws);
@@ -138,7 +143,7 @@ class WsRequestor extends BaseRequestor {
success: (response) => {
clearTimeout(timer);
const rtt = this._roundTrip(startAt);
this.logger.info({response}, `WsRequestor:request ${url} succeeded in ${rtt}ms`);
this.logger.debug({response}, `WsRequestor:request ${url} succeeded in ${rtt}ms`);
this.stats.histogram('app.hook.ws_response_time', rtt, ['hook_type:app']);
resolve(response);
},
@@ -187,7 +192,7 @@ class WsRequestor extends BaseRequestor {
followRedirects: true,
maxRedirects: 2,
handshakeTimeout,
maxPayload: 8096,
maxPayload: process.env.JAMBONES_WS_MAX_PAYLOAD ? parseInt(process.env.JAMBONES_WS_MAX_PAYLOAD) : 24 * 1024,
};
if (this.username && this.password) opts = {...opts, auth: `${this.username}:${this.password}`};
@@ -286,7 +291,7 @@ class WsRequestor extends BaseRequestor {
const obj = JSON.parse(content);
const {type, msgid, command, call_sid = this.call_sid, queueCommand = false, data} = obj;
this.logger.debug({obj}, 'WsRequestor:request websocket: received');
//this.logger.debug({obj}, 'WsRequestor:request websocket: received');
assert.ok(type, 'type property not supplied');
switch (type) {
@@ -323,7 +328,7 @@ class WsRequestor extends BaseRequestor {
_recvCommand(msgid, command, call_sid, queueCommand, data) {
// TODO: validate command
this.logger.info({msgid, command, call_sid, queueCommand, data}, 'received command');
this.logger.debug({msgid, command, call_sid, queueCommand, data}, 'received command');
this.emit('command', {msgid, command, call_sid, queueCommand, data});
}
}

8027
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "jambonz-feature-server",
"version": "v0.7.5",
"version": "v0.7.8",
"main": "app.js",
"engines": {
"node": ">= 10.16.0"
@@ -16,49 +16,46 @@
"type": "git",
"url": "https://github.com/jambonz/jambonz-feature-server.git"
},
"bugs": {
"url": "https://github.com/jambonz/jambonz-feature-server/issues"
},
"bugs": {},
"scripts": {
"start": "node app",
"test": "NODE_ENV=test JAMBONES_HOSTING=1 HTTP_POOL=1 DRACHTIO_HOST=127.0.0.1 DRACHTIO_PORT=9060 DRACHTIO_SECRET=cymru JAMBONES_MYSQL_HOST=127.0.0.1 JAMBONES_MYSQL_PORT=3360 JAMBONES_MYSQL_USER=jambones_test JAMBONES_MYSQL_PASSWORD=jambones_test JAMBONES_MYSQL_DATABASE=jambones_test JAMBONES_REDIS_HOST=127.0.0.1 JAMBONES_REDIS_PORT=16379 JAMBONES_LOGLEVEL=error ENABLE_METRICS=0 HTTP_PORT=3000 JAMBONES_SBCS=172.38.0.10 JAMBONES_FREESWITCH=127.0.0.1:8022:ClueCon:docker-host JAMBONES_TIME_SERIES_HOST=127.0.0.1 JAMBONES_NETWORK_CIDR=172.38.0.0/16 node test/ ",
"test": "NODE_ENV=test JAMBONES_HOSTING=1 HTTP_POOL=1 DRACHTIO_HOST=127.0.0.1 DRACHTIO_PORT=9060 DRACHTIO_SECRET=cymru JAMBONES_MYSQL_HOST=127.0.0.1 JAMBONES_MYSQL_PORT=3360 JAMBONES_MYSQL_USER=jambones_test JAMBONES_MYSQL_PASSWORD=jambones_test JAMBONES_MYSQL_DATABASE=jambones_test JAMBONES_REDIS_HOST=127.0.0.1 JAMBONES_REDIS_PORT=16379 JAMBONES_LOGLEVEL=error ENABLE_METRICS=0 HTTP_PORT=3000 JAMBONES_SBCS=172.38.0.10 JAMBONES_FREESWITCH=127.0.0.1:8022:JambonzR0ck$:docker-host JAMBONES_TIME_SERIES_HOST=127.0.0.1 JAMBONES_NETWORK_CIDR=172.38.0.0/16 node test/ ",
"coverage": "./node_modules/.bin/nyc --reporter html --report-dir ./coverage npm run test",
"jslint": "eslint app.js lib"
},
"dependencies": {
"@jambonz/db-helpers": "^0.6.18",
"@jambonz/db-helpers": "^0.7.3",
"@jambonz/http-health-check": "^0.0.1",
"@jambonz/realtimedb-helpers": "^0.4.29",
"@jambonz/realtimedb-helpers": "^0.6.3",
"@jambonz/stats-collector": "^0.1.6",
"@jambonz/time-series": "^0.1.12",
"@opentelemetry/api": "^1.1.0",
"@opentelemetry/exporter-jaeger": "^1.3.1",
"@jambonz/time-series": "^0.2.5",
"@opentelemetry/api": "^1.2.0",
"@opentelemetry/exporter-jaeger": "^1.7.0",
"@opentelemetry/exporter-trace-otlp-http": "^0.27.0",
"@opentelemetry/exporter-zipkin": "^1.3.1",
"@opentelemetry/exporter-zipkin": "^1.7.0",
"@opentelemetry/instrumentation": "^0.27.0",
"@opentelemetry/resources": "^1.3.1",
"@opentelemetry/sdk-trace-base": "^1.3.1",
"@opentelemetry/sdk-trace-node": "^1.3.1",
"@opentelemetry/semantic-conventions": "^1.3.1",
"aws-sdk": "^2.1152.0",
"@opentelemetry/resources": "^1.7.0",
"@opentelemetry/sdk-trace-base": "^1.7.0",
"@opentelemetry/sdk-trace-node": "^1.7.0",
"@opentelemetry/semantic-conventions": "^1.7.0",
"aws-sdk": "^2.1233.0",
"bent": "^7.3.12",
"debug": "^4.3.4",
"deepcopy": "^2.1.0",
"drachtio-fsmrf": "^3.0.2",
"drachtio-srf": "^4.5.1",
"express": "^4.18.1",
"helmet": "^5.1.0",
"drachtio-fsmrf": "^3.0.16",
"drachtio-srf": "^4.5.21",
"express": "^4.18.2",
"ip": "^1.1.8",
"moment": "^2.29.3",
"parse-url": "^7.0.2",
"moment": "^2.29.4",
"parse-url": "^8.1.0",
"pino": "^6.14.0",
"sdp-transform": "^2.14.1",
"short-uuid": "^4.2.0",
"to-snake-case": "^1.0.0",
"undici": "^5.8.2",
"uuid": "^8.3.2",
"verify-aws-sns-signature": "^0.0.7",
"ws": "^8.8.0",
"undici": "^5.11.0",
"uuid-random": "^1.3.2",
"verify-aws-sns-signature": "^0.1.0",
"ws": "^8.9.0",
"xml2js": "^0.4.23"
},
"devDependencies": {
@@ -66,7 +63,7 @@
"eslint": "^7.32.0",
"eslint-plugin-promise": "^4.3.1",
"nyc": "^15.1.0",
"tape": "^5.5.3"
"tape": "^5.6.1"
},
"optionalDependencies": {
"bufferutil": "^4.0.6",

View File

@@ -22,11 +22,17 @@ test('creating schema', (t) => {
const google_credential = encrypt(process.env.GCP_JSON_KEY);
const aws_credential = encrypt(JSON.stringify({
access_key_id: process.env.AWS_ACCESS_KEY_ID,
secret_access_key: process.env.AWS_SECRET_ACCESS_KEY
secret_access_key: process.env.AWS_SECRET_ACCESS_KEY,
aws_region: process.env.AWS_REGION
}));
const microsoft_credential = encrypt(JSON.stringify({
region: process.env.MICROSOFT_REGION || 'useast',
api_key: process.env.MICROSOFT_API_KEY || '1234567890'
}));
const cmd = `
UPDATE speech_credentials SET credential='${google_credential}' WHERE vendor='google';
UPDATE speech_credentials SET credential='${aws_credential}' WHERE vendor='aws';
UPDATE speech_credentials SET credential='${microsoft_credential}' WHERE vendor='microsoft';
`;
const path = `${__dirname}/.creds.sql`;
fs.writeFileSync(path, cmd);

View File

@@ -614,7 +614,10 @@ CREATE TABLE `speech_credentials` (
LOCK TABLES `speech_credentials` WRITE;
/*!40000 ALTER TABLE `speech_credentials` DISABLE KEYS */;
INSERT INTO `speech_credentials` VALUES ('2add163c-34f2-45c6-a016-f955d218ffb6',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','google','credential-goes-here',1,1,NULL,'2021-04-03 15:42:10',1,1),('84154212-5c99-4c94-8993-bc2a46288daa',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','aws','credential-goes-here',0,0,NULL,NULL,NULL,NULL);
INSERT INTO `speech_credentials` VALUES
('2add163c-34f2-45c6-a016-f955d218ffb6',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','google','credential-goes-here',1,1,NULL,'2021-04-03 15:42:10',1,1),
('2add347f-34f2-45c6-a016-f955d218ffb6',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','microsoft','credential-goes-here',1,1,NULL,'2021-04-03 15:42:10',1,1),
('84154212-5c99-4c94-8993-bc2a46288daa',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','aws','credential-goes-here',1,1,NULL,NULL,NULL,NULL);
/*!40000 ALTER TABLE `speech_credentials` ENABLE KEYS */;
UNLOCK TABLES;

View File

@@ -57,7 +57,7 @@ services:
condition: service_healthy
freeswitch:
image: drachtio/drachtio-freeswitch-mrf:v1.10.1-full
image: drachtio/drachtio-freeswitch-mrf:0.4.18
restart: always
command: freeswitch --rtp-range-start 20000 --rtp-range-end 20100
environment:
@@ -68,7 +68,7 @@ services:
- /tmp:/tmp
- ./credentials:/opt/credentials
healthcheck:
test: ['CMD', 'fs_cli' ,'-x', '"sofia status"']
test: ['CMD', 'fs_cli' ,'-p', 'JambonzR0ck$$', '-x', '"sofia status"']
timeout: 5s
retries: 15
networks:

View File

@@ -17,7 +17,11 @@ function connect(connectable) {
});
}
test('\'gather\' and \'transcribe\' tests', async(t) => {
test('\'gather\' test - google', async(t) => {
if (!process.env.GCP_JSON_KEY) {
t.pass('skipping google tests');
return t.end();
}
clearModule.all();
const {srf, disconnect} = require('../app');
@@ -41,8 +45,168 @@ test('\'gather\' and \'transcribe\' tests', async(t) => {
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
t.ok(obj.body.speech.alternatives[0].transcript = 'I\'d like to speak to customer support',
'gather: succeeds when using account credentials');
//console.log(JSON.stringify(obj));
t.ok(obj.body.speech.alternatives[0].transcript.toLowerCase().startsWith('i\'d like to speak to customer support'),
'gather: succeeds when using google credentials');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});
test('\'gather\' test - default (google)', async(t) => {
if (!process.env.GCP_JSON_KEY) {
t.pass('skipping google tests');
return t.end();
}
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
let verbs = [
{
"verb": "gather",
"input": ["speech"],
"timeout": 10,
"actionHook": "/actionHook"
}
];
let from = "gather_success";
provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
//console.log(JSON.stringify(obj));
t.ok(obj.body.speech.alternatives[0].transcript.toLowerCase() === 'i\'d like to speak to customer support',
'gather: succeeds when using default (google) credentials');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});
test('\'gather\' test - microsoft', async(t) => {
if (!process.env.MICROSOFT_REGION || !process.env.MICROSOFT_API_KEY) {
t.pass('skipping microsoft tests');
return t.end();
}
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
let verbs = [
{
"verb": "gather",
"input": ["speech"],
"recognizer": {
"vendor": "microsoft",
"hints": ["customer support", "sales", "human resources", "HR"]
},
"timeout": 10,
"actionHook": "/actionHook"
}
];
let from = "gather_success";
provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
//console.log(JSON.stringify(obj));
t.ok(obj.body.speech.alternatives[0].transcript.toLowerCase().startsWith('i\'d like to speak to customer support'),
'gather: succeeds when using microsoft credentials');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});
test('\'gather\' test - aws', async(t) => {
if (!process.env.AWS_ACCESS_KEY_ID || !process.env.AWS_SECRET_ACCESS_KEY) {
t.pass('skipping aws tests');
return t.end();
}
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
let verbs = [
{
"verb": "gather",
"input": ["speech"],
"recognizer": {
"vendor": "aws",
"hints": ["customer support", "sales", "human resources", "HR"]
},
"timeout": 10,
"actionHook": "/actionHook"
}
];
let from = "gather_success";
provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
//console.log(JSON.stringify(obj));
t.ok(obj.body.speech.alternatives[0].transcript.toLowerCase().startsWith('i\'d like to speak to customer support'),
'gather: succeeds when using aws credentials');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});
test('\'gather\' test - deepgram', async(t) => {
if (!process.env.DEEPGRAM_API_KEY ) {
t.pass('skipping deepgram tests');
return t.end();
}
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
let verbs = [
{
"verb": "gather",
"input": ["speech"],
"recognizer": {
"vendor": "deepgram",
"hints": ["customer support", "sales", "human resources", "HR"],
"deepgramOptions": {
"apiKey": process.env.DEEPGRAM_API_KEY
}
},
"timeout": 10,
"actionHook": "/actionHook"
}
];
let from = "gather_success";
provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
//console.log(JSON.stringify(obj));
t.ok(obj.body.speech.alternatives[0].transcript.toLowerCase().startsWith('i\'d like to speak to customer support'),
'gather: succeeds when using deepgram credentials');
disconnect();
} catch (err) {

View File

@@ -5,6 +5,7 @@ require('./account-validation-tests');
require('./webhooks-tests');
require('./say-tests');
require('./gather-tests');
require('./transcribe-tests');
require('./sip-request-tests');
require('./create-call-test');
require('./play-tests');

View File

@@ -156,3 +156,43 @@ test('\'play\' tests multi links in array in conference', async(t) => {
t.error(err);
}
});
test('\'play\' tests with seekOffset and actionHook', async(t) => {
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
const verbs = [
{
verb: 'play',
url: 'silence_stream://5000',
seekOffset: 8000,
timeoutSecs: 2,
actionHook: '/customHook'
}
];
const waitHookVerbs = [];
const from = 'play_action_hook';
provisionCallHook(from, verbs)
provisionCustomHook(from, waitHookVerbs)
// THEN
await sippUac('uac-success-received-bye.xml', '172.38.0.10', from);
t.pass('play: succeeds');
const obj = await getJSON(`http:127.0.0.1:3100/lastRequest/${from}_customHook`)
t.ok(obj.body.reason === "playCompleted", "play: actionHook success received")
t.ok(obj.body.playback_seconds === "2", "playback_seconds: actionHook success received")
t.ok(obj.body.playback_milliseconds === "2048", "playback_milliseconds: actionHook success received")
t.ok(obj.body.playback_last_offset_pos === "16000", "playback_last_offset_pos: actionHook success received")
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});

View File

@@ -41,7 +41,7 @@ obj.sippUac = (file, bindAddress, from='sipp', to='16174000000') => {
if (bindAddress) args.splice(5, 0, '--ip', bindAddress);
console.log(args.join(' '));
//console.log(args.join(' '));
clearOutput();
return new Promise((resolve, reject) => {

169
test/transcribe-tests.js Normal file
View File

@@ -0,0 +1,169 @@
const test = require('tape');
const { sippUac } = require('./sipp')('test_fs');
const bent = require('bent');
const getJSON = bent('json')
const clearModule = require('clear-module');
const {provisionCallHook} = require('./utils')
process.on('unhandledRejection', (reason, p) => {
console.log('Unhandled Rejection at: Promise', p, 'reason:', reason);
});
function connect(connectable) {
return new Promise((resolve, reject) => {
connectable.on('connect', () => {
return resolve();
});
});
}
test('\'transcribe\' test - google', async(t) => {
if (!process.env.GCP_JSON_KEY) {
t.pass('skipping google tests');
return t.end();
}
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
let verbs = [
{
"verb": "transcribe",
"recognizer": {
"vendor": "google",
"hints": ["customer support", "sales", "human resources", "HR"]
},
"transcriptionHook": "/transcriptionHook"
}
];
let from = "gather_success";
provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
t.ok(obj.body.speech.alternatives[0].transcript.toLowerCase().startsWith('i\'d like to speak to customer support'),
'transcribe: succeeds when using google credentials');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});
test('\'transcribe\' test - microsoft', async(t) => {
if (!process.env.MICROSOFT_REGION || !process.env.MICROSOFT_API_KEY) {
t.pass('skipping microsoft tests');
return t.end();
}
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
let verbs = [
{
"verb": "transcribe",
"recognizer": {
"vendor": "microsoft",
"hints": ["customer support", "sales", "human resources", "HR"]
},
"transcriptionHook": "/transcriptionHook"
}
];
let from = "gather_success";
provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
t.ok(obj.body.speech.alternatives[0].transcript.toLowerCase().startsWith('i\'d like to speak to customer support'),
'transcribe: succeeds when using microsoft credentials');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});
test('\'transcribe\' test - aws', async(t) => {
if (!process.env.AWS_ACCESS_KEY_ID || !process.env.AWS_SECRET_ACCESS_KEY) {
t.pass('skipping aws tests');
return t.end();
}
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
let verbs = [
{
"verb": "transcribe",
"recognizer": {
"vendor": "aws",
"hints": ["customer support", "sales", "human resources", "HR"]
},
"transcriptionHook": "/transcriptionHook"
}
];
let from = "gather_success";
provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
t.ok(obj.body.speech.alternatives[0].transcript.toLowerCase().startsWith('i\'d like to speak to customer support'),
'transcribe: succeeds when using aws credentials');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});
test('\'transcribe\' test - deepgram', async(t) => {
if (!process.env.DEEPGRAM_API_KEY ) {
t.pass('skipping deepgram tests');
return t.end();
}
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
let verbs = [
{
"verb": "transcribe",
"recognizer": {
"vendor": "aws",
"hints": ["customer support", "sales", "human resources", "HR"],
"deepgramOptions": {
"apiKey": process.env.DEEPGRAM_API_KEY
}
},
"transcriptionHook": "/transcriptionHook"
}
];
let from = "gather_success";
provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
t.ok(obj.body.speech.alternatives[0].transcript.toLowerCase().startsWith('i\'d like to speak to customer support'),
'transcribe: succeeds when using deepgram credentials');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});

View File

@@ -39,7 +39,16 @@ app.post('/callStatus', (req, res) => {
return res.sendStatus(200);
});
/*
* action Hook
* transcriptionHook
*/
app.post('/transcriptionHook', (req, res) => {
console.log({payload: req.body}, 'POST /transcriptionHook');
let key = req.body.from + "_actionHook"
addRequestToMap(key, req, hook_mapping);
return res.json([{"verb": "hangup"}]);
});
/*
* actionHook
*/
app.post('/actionHook', (req, res) => {
console.log({payload: req.body}, 'POST /actionHook');

File diff suppressed because it is too large Load Diff

View File

@@ -9,6 +9,6 @@
"author": "Dave Horton",
"license": "MIT",
"dependencies": {
"express": "^4.17.1"
"express": "^4.18.2"
}
}

View File

@@ -22,7 +22,7 @@ module.exports = (serviceName) => {
});
let exporter;
if (process.env.OTEL_EXPORTER_JAEGER_AGENT_HOST) {
if (process.env.OTEL_EXPORTER_JAEGER_AGENT_HOST || process.env.OTEL_EXPORTER_JAEGER_ENDPOINT) {
exporter = new JaegerExporter();
}
else if (process.env.OTEL_EXPORTER_ZIPKIN_URL) {