mirror of
https://github.com/jambonz/jambonz-feature-server.git
synced 2026-01-25 02:07:56 +00:00
Compare commits
96 Commits
feat/ws_lc
...
patch/em
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a6dcc9170b | ||
|
|
34693b7e77 | ||
|
|
2439e225a0 | ||
|
|
76c2be1d07 | ||
|
|
4b4807e4cf | ||
|
|
9a3c731389 | ||
|
|
edd8f20642 | ||
|
|
ee24041cba | ||
|
|
83f7abcd89 | ||
|
|
c9194168d2 | ||
|
|
83191487cf | ||
|
|
65ef4e6d64 | ||
|
|
ddb4719220 | ||
|
|
f514a65f63 | ||
|
|
5ccea65b7f | ||
|
|
8672152873 | ||
|
|
425b88f930 | ||
|
|
111976bea5 | ||
|
|
ec6d7b3f42 | ||
|
|
5e1b826da4 | ||
|
|
be9c3406c1 | ||
|
|
2f3ef1654a | ||
|
|
0baa080a1e | ||
|
|
f5cbd26c9f | ||
|
|
d9fd82fa60 | ||
|
|
76a3aa7f42 | ||
|
|
cafe149bdf | ||
|
|
9969e39e7e | ||
|
|
8eea212df2 | ||
|
|
e8e356ea3a | ||
|
|
c5e19bf775 | ||
|
|
498dd64025 | ||
|
|
24b6d2464b | ||
|
|
cd5421120f | ||
|
|
d7c3a4a632 | ||
|
|
c53ad89154 | ||
|
|
10b98630d3 | ||
|
|
d132bdb92b | ||
|
|
6be3fd9b64 | ||
|
|
844b0cb05d | ||
|
|
c0b56d4fc6 | ||
|
|
d27de284e7 | ||
|
|
5e97847a2f | ||
|
|
17c379df47 | ||
|
|
e7bc0b0737 | ||
|
|
dfe623e78a | ||
|
|
56b8f0623b | ||
|
|
7bcbab5b74 | ||
|
|
44e6a3513d | ||
|
|
fad16144b9 | ||
|
|
6523a861c0 | ||
|
|
cff67f5e4c | ||
|
|
c77bd84e0e | ||
|
|
3cd7a619ad | ||
|
|
59cf02bd04 | ||
|
|
a18d55e9ab | ||
|
|
d474b9d604 | ||
|
|
8d2b60c284 | ||
|
|
9cf9d4f587 | ||
|
|
bd002ede48 | ||
|
|
1a2aa91973 | ||
|
|
e322b7d8d3 | ||
|
|
7da11df88e | ||
|
|
09cf1345f6 | ||
|
|
2595f527ff | ||
|
|
1d77c0cd20 | ||
|
|
9eab81268b | ||
|
|
ecf3d140d6 | ||
|
|
4a52be9171 | ||
|
|
9b722ae36d | ||
|
|
370b046fac | ||
|
|
fca391c32e | ||
|
|
043860c4a3 | ||
|
|
a021ee3112 | ||
|
|
8999c85a71 | ||
|
|
72147a8110 | ||
|
|
93d0e41e31 | ||
|
|
5b1d8a8ff3 | ||
|
|
ec58232b61 | ||
|
|
65c241bcd1 | ||
|
|
75b6f89e0c | ||
|
|
b80d39d205 | ||
|
|
40f70e3531 | ||
|
|
1914b88af9 | ||
|
|
c946a5d14d | ||
|
|
878578fe0f | ||
|
|
9b3be6c0b9 | ||
|
|
4ae661daea | ||
|
|
dbd3b59901 | ||
|
|
06b066a3f2 | ||
|
|
fc3655c9bd | ||
|
|
1b5f801830 | ||
|
|
d0ebe3f99f | ||
|
|
51a379998f | ||
|
|
c2ae42a456 | ||
|
|
c187685054 |
6
.github/workflows/build.yml
vendored
6
.github/workflows/build.yml
vendored
@@ -6,10 +6,10 @@ jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 18
|
||||
node-version: 20
|
||||
- run: npm ci
|
||||
- run: npm run jslint
|
||||
- run: docker pull drachtio/sipp
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -42,3 +42,4 @@ ecosystem.config.js
|
||||
test/credentials/*.json
|
||||
run-tests.sh
|
||||
run-coverage.sh
|
||||
.vscode
|
||||
17
.vscode/launch.json
vendored
17
.vscode/launch.json
vendored
@@ -1,17 +0,0 @@
|
||||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"name": "Launch Program",
|
||||
"program": "${workspaceFolder}/test/index.js",
|
||||
"env": {
|
||||
"NODE_ENV": "test"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
2
LICENSE
2
LICENSE
@@ -1,6 +1,6 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2021 Drachtio Communications Services, LLC
|
||||
Copyright (c) 2018-2024 FirstFive8, Inc.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
||||
@@ -9,7 +9,112 @@
|
||||
"can't take your call",
|
||||
"will get back to you",
|
||||
"I'll get back to you",
|
||||
"we are unable"
|
||||
"we are unable",
|
||||
"Unable to take your call now",
|
||||
"I'll reply soon",
|
||||
"I'll call back",
|
||||
"I'll reach out to you as soon as possible",
|
||||
"Leave a message",
|
||||
"Away from phone",
|
||||
"Not available now",
|
||||
"I'll return call",
|
||||
"On another call",
|
||||
"Currently on another call",
|
||||
"I will return call later",
|
||||
"Busy please leave message",
|
||||
"Message will be returned promptly",
|
||||
"Currently unavailable to answer",
|
||||
"Planning to return your call soon",
|
||||
"Apologies for missing your call",
|
||||
"Not by the phone at the moment",
|
||||
"Expecting to return your call",
|
||||
"Currently not accessible",
|
||||
"Intend to call back",
|
||||
"Appreciate your patience!",
|
||||
"Engaged in another conversation",
|
||||
"I Will respond promptly",
|
||||
"Kindly leave a message",
|
||||
"Currently occupied leave a message",
|
||||
"Unfortunately unable to answer right now",
|
||||
"Occupied at the moment",
|
||||
"Not present leave a message",
|
||||
"Regrettably unavailable kindly leave a message",
|
||||
"Will ensure a prompt response to your message",
|
||||
"Currently engaged",
|
||||
"Will return your call at the earliest opportunity",
|
||||
"Your message will receive my prompt attention",
|
||||
"I'll respond as soon as I can",
|
||||
"Your message is important please leave it after the beep",
|
||||
"Away from the phone at the moment",
|
||||
"Unable to answer right now",
|
||||
"Engaged in another task",
|
||||
"Not by the phone presently",
|
||||
"I'll respond at my earliest convenience",
|
||||
"Away from the phone momentarily",
|
||||
"I'll return your call shortly",
|
||||
"Currently not able to answer",
|
||||
"Your message is important please leave it after the tone",
|
||||
"I'm unable to take your call right now",
|
||||
"Please leave your message for me",
|
||||
"I'll get back to you soon",
|
||||
"Your call has been missed",
|
||||
"Please leave a detailed message for me to respond to",
|
||||
"Leave a message I'll make sure to respond",
|
||||
"Feel free to leave a message",
|
||||
"Your call is important to me",
|
||||
"I'll get back to you shortly",
|
||||
"Your message will be attended to promptly",
|
||||
"Not available at the moment",
|
||||
"I'll be sure to get back to you",
|
||||
"I'll call you back soon",
|
||||
"I'll ensure a prompt response",
|
||||
"Sorry for the inconvenience",
|
||||
"I'll return your call",
|
||||
"I'll make sure to get back to you",
|
||||
"I'll call you back shortly",
|
||||
"I'll return your call as soon as possible",
|
||||
"Apologies for the inconvenience leave your message",
|
||||
"Your call is appreciated",
|
||||
"I'm unavailable to answer",
|
||||
"I'm currently away",
|
||||
"I'll return your call as soon as I can",
|
||||
"I'm away from the phone",
|
||||
"I'm currently unavailable to take your call",
|
||||
"Sorry for missing your call",
|
||||
"I'll ensure it receives my immediate attention",
|
||||
"I'm away from the phone momentarily",
|
||||
"I'll reach out to you shortly",
|
||||
"Apologies for the inconvenience",
|
||||
"Currently occupied",
|
||||
"Unable to answer your call at the moment",
|
||||
"I'll make sure to follow up with you",
|
||||
"Sorry for not being available",
|
||||
"I'll reach out to you as soon as I can",
|
||||
"I'm currently engaged",
|
||||
"I'm currently busy",
|
||||
"I'm currently unavailable",
|
||||
"I'll respond to you at my earliest convenience",
|
||||
"Your message is appreciated",
|
||||
"I'll get back to you promptly",
|
||||
"I'll get back to you without delay",
|
||||
"Currently away from the phone",
|
||||
"I'll return your call at my earliest opportunity",
|
||||
"Sorry for the missed call",
|
||||
"I'll make sure to address your concerns",
|
||||
"Please provide your details for a callback",
|
||||
"I'll make every effort to respond promptly",
|
||||
"I'll ensure it's attended to promptly",
|
||||
"Away from the phone temporarily",
|
||||
"I'll get back to you as soon as I return",
|
||||
"Currently not in a position to answer your call",
|
||||
"Your call cannot be answered at the moment",
|
||||
"I'll ensure to respond as soon as I'm able",
|
||||
"Your call is important please leave a message",
|
||||
"Unable to answer right now please leave your message",
|
||||
"Currently not accessible intending to return your call",
|
||||
"I'll respond promptly to your message",
|
||||
"leave a memo",
|
||||
"please leave a memo"
|
||||
],
|
||||
"es-ES": [
|
||||
"le pasamos la llamada",
|
||||
|
||||
@@ -130,7 +130,7 @@ const JAMBONZ_RECORD_WS_PASSWORD = process.env.JAMBONZ_RECORD_WS_PASSWORD || pro
|
||||
const JAMBONZ_DISABLE_DIAL_PAI_HEADER = process.env.JAMBONZ_DISABLE_DIAL_PAI_HEADER || false;
|
||||
const JAMBONES_DISABLE_DIRECT_P2P_CALL = process.env.JAMBONES_DISABLE_DIRECT_P2P_CALL || false;
|
||||
|
||||
const JAMBONES_EAGERLY_PRE_CACHE_AUDIO = process.env.JAMBONES_EAGERLY_PRE_CACHE_AUDIO;
|
||||
const JAMBONES_EAGERLY_PRE_CACHE_AUDIO = parseInt(process.env.JAMBONES_EAGERLY_PRE_CACHE_AUDIO, 10) || 0;
|
||||
|
||||
const JAMBONES_USE_FREESWITCH_TIMER_FD = process.env.JAMBONES_USE_FREESWITCH_TIMER_FD;
|
||||
|
||||
|
||||
@@ -30,6 +30,20 @@ const appsMap = {
|
||||
}
|
||||
]
|
||||
}]
|
||||
},
|
||||
conference: {
|
||||
// Dummy hook to follow later feature server logic.
|
||||
call_hook: {
|
||||
url: 'https://jambonz.org',
|
||||
method: 'GET'
|
||||
},
|
||||
account_sid: '',
|
||||
app_json: [{
|
||||
verb: 'conference',
|
||||
name: '',
|
||||
beep: false,
|
||||
startConferenceOnEnter: true
|
||||
}]
|
||||
}
|
||||
};
|
||||
|
||||
@@ -38,6 +52,7 @@ const createJambonzApp = (type, {account_sid, name, caller_id}) => {
|
||||
app.account_sid = account_sid;
|
||||
switch (type) {
|
||||
case 'queue':
|
||||
case 'conference':
|
||||
app.app_json[0].name = name;
|
||||
break;
|
||||
case 'user':
|
||||
|
||||
@@ -75,13 +75,19 @@ module.exports = function(srf, logger) {
|
||||
req.locals.application_sid = application_sid;
|
||||
}
|
||||
// check for call to queue
|
||||
if (uri.user?.startsWith('queue-') && req.locals.originatingUser && clientDb?.allow_direct_queue_calling) {
|
||||
else if (uri.user?.startsWith('queue-') && req.locals.originatingUser && clientDb?.allow_direct_queue_calling) {
|
||||
const queue_name = uri.user.match(/queue-(.*)/)[1];
|
||||
logger.debug(`got Queue from Request URI header: ${queue_name}`);
|
||||
req.locals.queue_name = queue_name;
|
||||
}
|
||||
// check for call to conference
|
||||
else if (uri.user?.startsWith('conference-') && req.locals.originatingUser && clientDb?.allow_direct_app_calling) {
|
||||
const conference_id = uri.user.match(/conference-(.*)/)[1];
|
||||
logger.debug(`got Conference from Request URI header: ${conference_id}`);
|
||||
req.locals.conference_id = conference_id;
|
||||
}
|
||||
// check for call to registered user
|
||||
if (!JAMBONES_DISABLE_DIRECT_P2P_CALL && req.locals.originatingUser && clientDb?.allow_direct_user_calling) {
|
||||
else if (!JAMBONES_DISABLE_DIRECT_P2P_CALL && req.locals.originatingUser && clientDb?.allow_direct_user_calling) {
|
||||
const arr = /^(.*)@(.*)/.exec(req.locals.originatingUser);
|
||||
if (arr) {
|
||||
const sipRealm = arr[2];
|
||||
@@ -237,6 +243,9 @@ module.exports = function(srf, logger) {
|
||||
logger.debug(`calling to registered user ${req.locals.called_user}, generating dial app`);
|
||||
app = createJambonzApp('user',
|
||||
{account_sid, name: req.locals.called_user, caller_id: req.locals.callingNumber});
|
||||
} else if (req.locals.conference_id) {
|
||||
logger.debug(`calling to conference ${req.locals.conference_id}, generating conference app`);
|
||||
app = createJambonzApp('conference', {account_sid, name: req.locals.conference_id});
|
||||
} else if (req.locals.application_sid) {
|
||||
app = await lookupAppBySid(req.locals.application_sid);
|
||||
} else if (req.locals.originatingUser) {
|
||||
@@ -343,6 +352,15 @@ module.exports = function(srf, logger) {
|
||||
direction: CallDirection.Inbound,
|
||||
traceId: rootSpan.traceId
|
||||
});
|
||||
// if transferred call contains callInfo, let update original data to newly created callInfo in this instance.
|
||||
if (app.transferredCall && app.callInfo) {
|
||||
req.locals.callInfo.callerName = app.callInfo.callerName;
|
||||
req.locals.callInfo.from = app.callInfo.from;
|
||||
req.locals.callInfo.to = app.callInfo.to;
|
||||
req.locals.callInfo.originatingSipIp = app.callInfo.originatingSipIp;
|
||||
req.locals.callInfo.originatingSipTrunkName = app.callInfo.originatingSipTrunkName;
|
||||
delete app.callInfo;
|
||||
}
|
||||
next();
|
||||
} catch (err) {
|
||||
span.end();
|
||||
|
||||
@@ -53,16 +53,24 @@ class AdultingCallSession extends CallSession {
|
||||
}
|
||||
|
||||
_callerHungup() {
|
||||
this._hangup('caller');
|
||||
}
|
||||
|
||||
_jambonzHangup() {
|
||||
this._hangup();
|
||||
}
|
||||
|
||||
_hangup(terminatedBy = 'jambonz') {
|
||||
if (this.dlg.connectTime) {
|
||||
const duration = moment().diff(this.dlg.connectTime, 'seconds');
|
||||
this.rootSpan.setAttributes({'call.termination': 'hangup by caller'});
|
||||
this.callInfo.callTerminationBy = 'caller';
|
||||
this.rootSpan.setAttributes({'call.termination': `hangup by ${terminatedBy}`});
|
||||
this.callInfo.callTerminationBy = terminatedBy;
|
||||
this.emit('callStatusChange', {
|
||||
callStatus: CallStatus.Completed,
|
||||
duration
|
||||
});
|
||||
}
|
||||
this.logger.info('InboundCallSession: caller hung up');
|
||||
this.logger.info(`InboundCallSession: ${terminatedBy} hung up`);
|
||||
this._callReleased();
|
||||
this.req.removeAllListeners('cancel');
|
||||
}
|
||||
|
||||
@@ -7,16 +7,19 @@ const {
|
||||
TaskName,
|
||||
KillReason,
|
||||
RecordState,
|
||||
AllowedSipRecVerbs
|
||||
AllowedSipRecVerbs,
|
||||
AllowedConfirmSessionVerbs
|
||||
} = require('../utils/constants');
|
||||
const moment = require('moment');
|
||||
const assert = require('assert');
|
||||
const sessionTracker = require('./session-tracker');
|
||||
const makeTask = require('../tasks/make_task');
|
||||
const parseDecibels = require('../utils/parse-decibels');
|
||||
const { normalizeJambones } = require('@jambonz/verb-specifications');
|
||||
const listTaskNames = require('../utils/summarize-tasks');
|
||||
const HttpRequestor = require('../utils/http-requestor');
|
||||
const WsRequestor = require('../utils/ws-requestor');
|
||||
const ActionHookDelayProcessor = require('../utils/action-hook-delay');
|
||||
const {
|
||||
JAMBONES_INJECT_CONTENT,
|
||||
JAMBONES_EAGERLY_PRE_CACHE_AUDIO,
|
||||
@@ -110,13 +113,19 @@ class CallSession extends Emitter {
|
||||
this.requestor.removeAllListeners();
|
||||
this.application.requestor = newRequestor;
|
||||
this.requestor.on('command', this._onCommand.bind(this));
|
||||
this.logger.debug(`CallSession: ${this.callSid} listener count ${this.requestor.listenerCount('command')}`);
|
||||
this.requestor.on('connection-dropped', this._onWsConnectionDropped.bind(this));
|
||||
this.requestor.on('handover', handover.bind(this));
|
||||
this.requestor.on('reconnect-error', this._onSessionReconnectError.bind(this));
|
||||
};
|
||||
|
||||
this.requestor.on('command', this._onCommand.bind(this));
|
||||
this.requestor.on('connection-dropped', this._onWsConnectionDropped.bind(this));
|
||||
this.requestor.on('handover', handover.bind(this));
|
||||
if (!this.isConfirmCallSession) {
|
||||
this.requestor.on('command', this._onCommand.bind(this));
|
||||
this.logger.debug(`CallSession: ${this.callSid} listener count ${this.requestor.listenerCount('command')}`);
|
||||
this.requestor.on('connection-dropped', this._onWsConnectionDropped.bind(this));
|
||||
this.requestor.on('handover', handover.bind(this));
|
||||
this.requestor.on('reconnect-error', this._onSessionReconnectError.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -187,6 +196,24 @@ class CallSession extends Emitter {
|
||||
this._synthesizer = synth;
|
||||
}
|
||||
|
||||
/**
|
||||
* ASR TTS fallback
|
||||
*/
|
||||
get hasFallbackAsr() {
|
||||
return this._hasFallbackAsr || false;
|
||||
}
|
||||
|
||||
set hasFallbackAsr(i) {
|
||||
this._hasFallbackAsr = i;
|
||||
}
|
||||
|
||||
get hasFallbackTts() {
|
||||
return this._hasFallbackTts || false;
|
||||
}
|
||||
|
||||
set hasFallbackTts(i) {
|
||||
this._hasFallbackTts = i;
|
||||
}
|
||||
/**
|
||||
* default vendor to use for speech synthesis if not provided in the app
|
||||
*/
|
||||
@@ -312,6 +339,17 @@ class CallSession extends Emitter {
|
||||
this.application.fallback_speech_recognizer_language = language;
|
||||
}
|
||||
|
||||
/**
|
||||
* Vad
|
||||
*/
|
||||
get vad() {
|
||||
return this._vad;
|
||||
}
|
||||
|
||||
set vad(v) {
|
||||
this._vad = v;
|
||||
}
|
||||
|
||||
/**
|
||||
* indicates whether the call currently in progress
|
||||
*/
|
||||
@@ -449,6 +487,113 @@ class CallSession extends Emitter {
|
||||
this._sipRequestWithinDialogHook = url;
|
||||
}
|
||||
|
||||
// Bot Delay (actionHook delayed)
|
||||
get actionHookDelayEnabled() {
|
||||
return this._actionHookDelayEnabled;
|
||||
}
|
||||
|
||||
set actionHookDelayEnabled(e) {
|
||||
this._actionHookDelayEnabled = e;
|
||||
}
|
||||
|
||||
get actionHookNoResponseTimeout() {
|
||||
return this._actionHookNoResponseTimeout;
|
||||
}
|
||||
|
||||
set actionHookNoResponseTimeout(e) {
|
||||
this._actionHookNoResponseTimeout = e;
|
||||
}
|
||||
|
||||
get actionHookNoResponseGiveUpTimeout() {
|
||||
return this._actionHookNoResponseGiveUpTimeout;
|
||||
}
|
||||
|
||||
set actionHookNoResponseGiveUpTimeout(e) {
|
||||
this._actionHookNoResponseGiveUpTimeout = e;
|
||||
}
|
||||
|
||||
get actionHookDelayRetries() {
|
||||
return this._actionHookDelayRetries;
|
||||
}
|
||||
|
||||
set actionHookDelayRetries(e) {
|
||||
this._actionHookDelayRetries = e;
|
||||
}
|
||||
|
||||
// Getter/setter for current tts vendor
|
||||
get currentTtsVendor() {
|
||||
return this._currentTtsVendor;
|
||||
}
|
||||
|
||||
set currentTtsVendor(vendor) {
|
||||
this._currentTtsVendor = vendor;
|
||||
}
|
||||
|
||||
get actionHookDelayProcessor() {
|
||||
return this._actionHookDelayProcessor;
|
||||
}
|
||||
|
||||
set actionHookDelayProperties(opts) {
|
||||
if (this._actionHookDelayProcessor) {
|
||||
this._actionHookDelayProcessor.stop();
|
||||
if (!this._actionHookDelayProcessor.init(opts)) {
|
||||
this._actionHookDelayProcessor.removeAllListeners();
|
||||
this._actionHookDelayProcessor = null;
|
||||
}
|
||||
}
|
||||
else {
|
||||
try {
|
||||
this._actionHookDelayProcessor = new ActionHookDelayProcessor(this.logger, opts, this, this.ep);
|
||||
this._actionHookDelayProcessor.on('giveup', () => {
|
||||
this.logger.info('CallSession: ActionHookDelayProcessor: giveup event - hanging up call');
|
||||
this._jambonzHangup();
|
||||
if (this.wakeupResolver) {
|
||||
this.logger.debug('CallSession: Giveup timer expired - waking up');
|
||||
this.wakeupResolver({reason: 'noResponseGiveUp'});
|
||||
this.wakeupResolver = null;
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
this.logger.error({err}, 'CallSession: Error creating ActionHookDelayProcessor');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async clearOrRestoreActionHookDelayProcessor() {
|
||||
if (this._actionHookDelayProcessor) {
|
||||
await this._actionHookDelayProcessor.stop();
|
||||
if (!this.popActionHookDelayProperties()) {
|
||||
//this.logger.debug('CallSession:clearOrRestoreActionHookDelayProcessor - ahd settings');
|
||||
//await this.clearActionHookDelayProcessor();
|
||||
}
|
||||
else {
|
||||
this.logger.debug('CallSession:clearOrRestoreActionHookDelayProcessor - restore ahd after gather override');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async clearActionHookDelayProcessor() {
|
||||
if (this._actionHookDelayProcessor) {
|
||||
await this._actionHookDelayProcessor.stop();
|
||||
this._actionHookDelayProcessor.removeAllListeners();
|
||||
this._actionHookDelayProcessor = null;
|
||||
}
|
||||
}
|
||||
|
||||
stashActionHookDelayProperties() {
|
||||
this._storedActionHookDelayProperties = this._actionHookDelayProcessor.properties;
|
||||
}
|
||||
|
||||
popActionHookDelayProperties() {
|
||||
if (this._storedActionHookDelayProperties) {
|
||||
this._actionHookDelayProcessor.init(this._storedActionHookDelayProperties);
|
||||
this._storedActionHookDelayProperties = null;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
hasGlobalSttPunctuation() {
|
||||
return this._globalSttPunctuation !== undefined;
|
||||
}
|
||||
@@ -469,6 +614,18 @@ class CallSession extends Emitter {
|
||||
this.speechSynthesisVoice = this._origSynthesizerSettings.voice;
|
||||
}
|
||||
|
||||
enableFillerNoise(opts) {
|
||||
this._fillerNoise = opts;
|
||||
}
|
||||
|
||||
disableFillerNoise() {
|
||||
this._fillerNoise = null;
|
||||
}
|
||||
|
||||
get fillerNoise() {
|
||||
return this._fillerNoise;
|
||||
}
|
||||
|
||||
async notifyRecordOptions(opts) {
|
||||
const {action} = opts;
|
||||
this.logger.debug({opts}, 'CallSession:notifyRecordOptions');
|
||||
@@ -580,8 +737,10 @@ class CallSession extends Emitter {
|
||||
const res = await this.dlg.request({
|
||||
method: 'INFO',
|
||||
headers: {
|
||||
'X-Reason': 'pauseCallRecording'
|
||||
}
|
||||
'X-Reason': 'pauseCallRecording',
|
||||
...(this.recordOptions.headers && {'Content-Type': 'application/json'})
|
||||
},
|
||||
...(this.recordOptions.headers && {body: JSON.stringify(this.recordOptions.headers) + '\n'})
|
||||
});
|
||||
if (res.status === 200) {
|
||||
this._recordState = RecordState.RecordingPaused;
|
||||
@@ -602,8 +761,10 @@ class CallSession extends Emitter {
|
||||
const res = await this.dlg.request({
|
||||
method: 'INFO',
|
||||
headers: {
|
||||
'X-Reason': 'resumeCallRecording'
|
||||
}
|
||||
'X-Reason': 'resumeCallRecording',
|
||||
...(this.recordOptions.headers && {'Content-Type': 'application/json'})
|
||||
},
|
||||
...(this.recordOptions.headers && {body: JSON.stringify(this.recordOptions.headers) + '\n'})
|
||||
});
|
||||
if (res.status === 200) {
|
||||
this._recordState = RecordState.RecordingOn;
|
||||
@@ -638,7 +799,8 @@ class CallSession extends Emitter {
|
||||
}
|
||||
task = await this.backgroundTaskManager.newTask('bargeIn', gather);
|
||||
task.sticky = autoEnable;
|
||||
task.once('bargeIn-done', () => {
|
||||
// listen to the bargein-done from background manager
|
||||
this.backgroundTaskManager.on('bargeIn-done', () => {
|
||||
if (this.requestor instanceof WsRequestor) {
|
||||
try {
|
||||
this.kill(true);
|
||||
@@ -651,6 +813,8 @@ class CallSession extends Emitter {
|
||||
}
|
||||
}
|
||||
async disableBotMode() {
|
||||
const task = this.backgroundTaskManager.getTask('bargeIn');
|
||||
if (task) task.sticky = false;
|
||||
this.backgroundTaskManager.stop('bargeIn');
|
||||
}
|
||||
|
||||
@@ -718,6 +882,7 @@ class CallSession extends Emitter {
|
||||
speech_credential_sid: credential.speech_credential_sid,
|
||||
accessKeyId: credential.access_key_id,
|
||||
secretAccessKey: credential.secret_access_key,
|
||||
roleArn: credential.role_arn,
|
||||
region: credential.aws_region || AWS_REGION
|
||||
};
|
||||
}
|
||||
@@ -789,6 +954,19 @@ class CallSession extends Emitter {
|
||||
model_id: credential.model_id,
|
||||
options: credential.options
|
||||
};
|
||||
} else if ('playht' === vendor) {
|
||||
return {
|
||||
api_key: credential.api_key,
|
||||
user_id: credential.user_id,
|
||||
voice_engine: credential.voice_engine,
|
||||
options: credential.options
|
||||
};
|
||||
} else if ('rimelabs' === vendor) {
|
||||
return {
|
||||
api_key: credential.api_key,
|
||||
model_id: credential.model_id,
|
||||
options: credential.options
|
||||
};
|
||||
} else if ('assemblyai' === vendor) {
|
||||
return {
|
||||
speech_credential_sid: credential.speech_credential_sid,
|
||||
@@ -799,6 +977,12 @@ class CallSession extends Emitter {
|
||||
api_key: credential.api_key,
|
||||
model_id: credential.model_id
|
||||
};
|
||||
} else if ('verbio' === vendor) {
|
||||
return {
|
||||
client_id: credential.client_id,
|
||||
client_secret: credential.client_secret,
|
||||
engine_version: credential.engine_version
|
||||
};
|
||||
} else if (vendor.startsWith('custom:')) {
|
||||
return {
|
||||
speech_credential_sid: credential.speech_credential_sid,
|
||||
@@ -881,6 +1065,15 @@ class CallSession extends Emitter {
|
||||
) {
|
||||
try {
|
||||
await this._awaitCommandsOrHangup();
|
||||
|
||||
await this.clearOrRestoreActionHookDelayProcessor();
|
||||
|
||||
//TODO: remove filler noise code and simply create as action hook delay
|
||||
if (this._isPlayingFillerNoise) {
|
||||
this._isPlayingFillerNoise = false;
|
||||
this.ep.api('uuid_break', this.ep.uuid)
|
||||
.catch((err) => this.logger.info(err, 'Error killing filler noise'));
|
||||
}
|
||||
if (this.callGone) break;
|
||||
} catch (err) {
|
||||
this.logger.info(err, 'CallSession:exec - error waiting for new commands');
|
||||
@@ -892,7 +1085,6 @@ class CallSession extends Emitter {
|
||||
// all done - cleanup
|
||||
this.logger.info('CallSession:exec all tasks complete');
|
||||
this._stopping = true;
|
||||
this.disableBotMode();
|
||||
this._onTasksDone();
|
||||
this._clearResources();
|
||||
|
||||
@@ -1107,6 +1299,8 @@ class CallSession extends Emitter {
|
||||
this.currentTask.kill(this);
|
||||
}
|
||||
this._endVerbHookSpan();
|
||||
|
||||
await this.clearOrRestoreActionHookDelayProcessor();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1162,7 +1356,15 @@ class CallSession extends Emitter {
|
||||
this.callInfo.customerData = tag;
|
||||
}
|
||||
|
||||
async _lccMuteStatus(callSid, mute) {
|
||||
async _lccConferenceParticipantAction(opts) {
|
||||
const task = this.currentTask;
|
||||
if (!task || TaskName.Conference !== task.name || !this.isInConference) {
|
||||
return this.logger.info('CallSession:_lccConferenceParticipantAction - invalid cmd, call is not in conference');
|
||||
}
|
||||
task.doConferenceParticipantAction(this, opts);
|
||||
}
|
||||
|
||||
async _lccMuteStatus(mute, callSid) {
|
||||
// this whole thing requires us to be in a Dial or Conference verb
|
||||
const task = this.currentTask;
|
||||
if (!task || ![TaskName.Dial, TaskName.Conference].includes(task.name)) {
|
||||
@@ -1294,6 +1496,78 @@ Duration=${duration} `
|
||||
task.whisper(tasks, callSid).catch((err) => this.logger.error(err, 'CallSession:_lccWhisper'));
|
||||
}
|
||||
|
||||
async _lccConfig(opts) {
|
||||
this.logger.debug({opts}, 'CallSession:_lccConfig');
|
||||
const t = normalizeJambones(this.logger, [
|
||||
{
|
||||
verb: 'config',
|
||||
...opts
|
||||
}
|
||||
])
|
||||
.map((tdata) => makeTask(this.logger, tdata));
|
||||
|
||||
const task = t[0];
|
||||
|
||||
const {span, ctx} = this.rootSpan.startChildSpan(`verb:${task.summary}`);
|
||||
span.setAttributes({'verb.summary': task.summary});
|
||||
task.span = span;
|
||||
task.ctx = ctx;
|
||||
try {
|
||||
await task.exec(this, {ep: this.ep});
|
||||
} catch (err) {
|
||||
this.logger.error(err, 'CallSession:_lccConfig');
|
||||
}
|
||||
task.span.end();
|
||||
}
|
||||
|
||||
async _lccDub(opts, callSid) {
|
||||
this.logger.debug({opts}, `CallSession:_lccDub on call_sid ${callSid}`);
|
||||
const t = normalizeJambones(this.logger, [
|
||||
{
|
||||
verb: 'dub',
|
||||
...opts
|
||||
}
|
||||
])
|
||||
.map((tdata) => makeTask(this.logger, tdata));
|
||||
|
||||
const task = t[0];
|
||||
const ep = this.currentTask?.name === TaskName.Dial && callSid === this.currentTask?.callSid ?
|
||||
this.currentTask.ep :
|
||||
this.ep;
|
||||
|
||||
const {span, ctx} = this.rootSpan.startChildSpan(`verb:${task.summary}`);
|
||||
span.setAttributes({'verb.summary': task.summary});
|
||||
task.span = span;
|
||||
task.ctx = ctx;
|
||||
try {
|
||||
await task.exec(this, {ep});
|
||||
} catch (err) {
|
||||
this.logger.error(err, 'CallSession:_lccDub');
|
||||
}
|
||||
task.span.end();
|
||||
}
|
||||
|
||||
|
||||
async _lccBoostAudioSignal(opts, callSid) {
|
||||
const ep = this.currentTask?.name === TaskName.Dial && callSid === this.currentTask?.callSid ?
|
||||
this.currentTask.ep :
|
||||
this.ep;
|
||||
const db = parseDecibels(opts);
|
||||
this.logger.info(`_lccBoostAudioSignal: boosting audio signal by ${db} dB`);
|
||||
const args = [ep.uuid, 'setGain', db];
|
||||
const response = await ep.api('uuid_dub', args);
|
||||
this.logger.info({response}, '_lccBoostAudioSignal: response from freeswitch');
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* perform call hangup by jambonz
|
||||
*/
|
||||
|
||||
async hangup() {
|
||||
return this._callerHungup();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* perform live call control
|
||||
@@ -1316,7 +1590,7 @@ Duration=${duration} `
|
||||
await this._lccTranscribeStatus(opts);
|
||||
}
|
||||
else if (opts.mute_status) {
|
||||
await this._lccMuteStatus(callSid, opts.mute_status === 'mute');
|
||||
await this._lccMuteStatus(opts.mute_status === 'mute', callSid);
|
||||
}
|
||||
else if (opts.conf_hold_status) {
|
||||
await this._lccConfHoldStatus(opts);
|
||||
@@ -1336,6 +1610,15 @@ Duration=${duration} `
|
||||
else if (opts.tag) {
|
||||
return this._lccTag(opts);
|
||||
}
|
||||
else if (opts.conferenceParticipantAction) {
|
||||
return this._lccConferenceParticipantAction(opts.conferenceParticipantAction);
|
||||
}
|
||||
else if (opts.dub) {
|
||||
return this._lccDub(opts);
|
||||
}
|
||||
else if (opts.boostAudioSignal) {
|
||||
return this._lccBoostAudioSignal(opts, callSid);
|
||||
}
|
||||
|
||||
// whisper may be the only thing we are asked to do, or it may that
|
||||
// we are doing a whisper after having muted, paused recording etc..
|
||||
@@ -1367,6 +1650,19 @@ Duration=${duration} `
|
||||
tasks = pruned;
|
||||
}
|
||||
}
|
||||
else if (this.isConfirmCallSession) {
|
||||
const pruned = tasks.filter((t) => AllowedConfirmSessionVerbs.includes(t.name));
|
||||
if (0 === pruned.length) {
|
||||
this.logger.info({tasks},
|
||||
'CallSession:replaceApplication - filtering verbs allowed on an confirmSession call');
|
||||
return;
|
||||
}
|
||||
if (pruned.length < tasks.length) {
|
||||
this.logger.info(
|
||||
'CallSession:replaceApplication - removing verbs that are not allowed for confirmSession call');
|
||||
tasks = pruned;
|
||||
}
|
||||
}
|
||||
this.tasks = tasks;
|
||||
this.taskIdx = 0;
|
||||
this.stackIdx++;
|
||||
@@ -1376,6 +1672,11 @@ Duration=${duration} `
|
||||
this.currentTask.kill(this, KillReason.Replaced);
|
||||
this.currentTask = null;
|
||||
}
|
||||
else if (this.wakeupResolver) {
|
||||
this.logger.debug('CallSession:replaceApplication - waking up');
|
||||
this.wakeupResolver({reason: 'new tasks'});
|
||||
this.wakeupResolver = null;
|
||||
}
|
||||
}
|
||||
|
||||
kill(onBackgroundGatherBargein = false) {
|
||||
@@ -1400,7 +1701,8 @@ Duration=${duration} `
|
||||
this.logger.info('CallSession:kill - found bargein disabled in the stack, clearing to that point');
|
||||
break;
|
||||
}
|
||||
this.tasks.shift();
|
||||
const rem = this.tasks.shift();
|
||||
this.logger.debug(`CallSession:kill - clearing task ${rem.summary}`);
|
||||
}
|
||||
}
|
||||
else this.tasks = [];
|
||||
@@ -1408,7 +1710,23 @@ Duration=${duration} `
|
||||
}
|
||||
|
||||
_preCacheAudio(newTasks) {
|
||||
for (const task of newTasks) {
|
||||
/**
|
||||
* only precache audio for the a queued say if we have one or more non-Config verbs
|
||||
* ahead of it in the queue. This is because the Config verb returns immediately
|
||||
* and would not give us enough time to generate the audio. The point of precaching
|
||||
* is to take advantage of getting the audio in advance of being needed, so we need
|
||||
* to be confident we have some time before the say verb is executed, and the Config
|
||||
* does not give us that confidence since it returns immediately.
|
||||
*/
|
||||
const haveQueuedNonConfig = this.tasks.findIndex((t) => t.name !== TaskName.Config) !== -1;
|
||||
let tasks = haveQueuedNonConfig ? newTasks : [];
|
||||
if (!haveQueuedNonConfig) {
|
||||
const idxFirstNotConfig = newTasks.findIndex((t) => t.name !== TaskName.Config);
|
||||
if (-1 === idxFirstNotConfig) return;
|
||||
tasks = newTasks.slice(idxFirstNotConfig + 1);
|
||||
}
|
||||
|
||||
for (const task of tasks) {
|
||||
if (task.name === TaskName.Config && task.hasSynthesizer) {
|
||||
/* if they change synthesizer settings don't try to precache */
|
||||
break;
|
||||
@@ -1480,7 +1798,23 @@ Duration=${duration} `
|
||||
}, 'CallSession:_injectTasks - completed');
|
||||
}
|
||||
|
||||
_onCommand({msgid, command, call_sid, queueCommand, data}) {
|
||||
async _onSessionReconnectError(err) {
|
||||
const {writeAlerts, AlertType} = this.srf.locals;
|
||||
const sid = this.accountInfo.account.account_sid;
|
||||
this.logger.info({err}, `_onSessionReconnectError for account ${sid}`);
|
||||
try {
|
||||
await writeAlerts({
|
||||
alert_type: AlertType.WEBHOOK_CONNECTION_FAILURE,
|
||||
account_sid: this.accountSid,
|
||||
detail: `Session:reconnect error ${err}`
|
||||
});
|
||||
} catch (error) {
|
||||
this.logger.error({error}, 'Error writing WEBHOOK_CONNECTION_FAILURE alert');
|
||||
}
|
||||
this._jambonzHangup();
|
||||
}
|
||||
|
||||
async _onCommand({msgid, command, call_sid, queueCommand, data}) {
|
||||
this.logger.info({msgid, command, queueCommand, data}, 'CallSession:_onCommand - received command');
|
||||
let resolution;
|
||||
switch (command) {
|
||||
@@ -1505,6 +1839,9 @@ Duration=${duration} `
|
||||
}
|
||||
resolution = {reason: 'received command, new tasks', queue: queueCommand, command};
|
||||
resolution.command = listTaskNames(t);
|
||||
|
||||
// clear all delay action hook timeout if there is
|
||||
await this.clearOrRestoreActionHookDelayProcessor();
|
||||
}
|
||||
else this._lccCallHook(data);
|
||||
break;
|
||||
@@ -1513,16 +1850,24 @@ Duration=${duration} `
|
||||
this._lccCallStatus(data);
|
||||
break;
|
||||
|
||||
case 'config':
|
||||
this._lccConfig(data, call_sid);
|
||||
break;
|
||||
|
||||
case 'dial':
|
||||
this._lccCallDial(data);
|
||||
break;
|
||||
|
||||
case 'dub':
|
||||
this._lccDub(data, call_sid);
|
||||
break;
|
||||
|
||||
case 'record':
|
||||
this.notifyRecordOptions(data);
|
||||
break;
|
||||
|
||||
case 'mute:status':
|
||||
this._lccMuteStatus(call_sid, data);
|
||||
this._lccMuteStatus(data, call_sid);
|
||||
break;
|
||||
|
||||
case 'conf:mute-status':
|
||||
@@ -1533,6 +1878,10 @@ Duration=${duration} `
|
||||
this._lccConfHoldStatus(data);
|
||||
break;
|
||||
|
||||
case 'conf:participant-action':
|
||||
this._lccConferenceParticipantAction(data);
|
||||
break;
|
||||
|
||||
case 'listen:status':
|
||||
this._lccListenStatus(data);
|
||||
break;
|
||||
@@ -1559,6 +1908,13 @@ Duration=${duration} `
|
||||
});
|
||||
break;
|
||||
|
||||
case 'boostAudioSignal':
|
||||
this._lccBoostAudioSignal(data, call_sid)
|
||||
.catch((err) => {
|
||||
this.logger.info({err, data}, 'CallSession:_onCommand - error boosting audio signal');
|
||||
});
|
||||
break;
|
||||
|
||||
default:
|
||||
this.logger.info(`CallSession:_onCommand - invalid command ${command}`);
|
||||
}
|
||||
@@ -1638,7 +1994,7 @@ Duration=${duration} `
|
||||
});
|
||||
//ep.cs = this;
|
||||
this.ep = ep;
|
||||
this.logger.debug(`allocated endpoint ${ep.uuid}`);
|
||||
this.logger.info(`allocated endpoint ${ep.uuid}`);
|
||||
|
||||
this._configMsEndpoint();
|
||||
|
||||
@@ -1649,6 +2005,11 @@ Duration=${duration} `
|
||||
if (this.direction === CallDirection.Inbound) {
|
||||
if (task.earlyMedia && !this.req.finalResponseSent) {
|
||||
this.res.send(183, {body: ep.local.sdp});
|
||||
this._notifyCallStatusChange({
|
||||
callStatus: CallStatus.EarlyMedia,
|
||||
sipStatus: 183,
|
||||
sipReason: 'Early Media'
|
||||
});
|
||||
return {ep};
|
||||
}
|
||||
this.logger.debug('propogating answer');
|
||||
@@ -1745,16 +2106,26 @@ Duration=${duration} `
|
||||
this.rootSpan && this.rootSpan.end();
|
||||
// close all background tasks
|
||||
this.backgroundTaskManager.stopAll();
|
||||
this.clearOrRestoreActionHookDelayProcessor().catch((err) => {});
|
||||
}
|
||||
|
||||
/**
|
||||
* called when the caller has hung up. Provided for subclasses to override
|
||||
* in order to apply logic at this point if needed.
|
||||
* return true if success fallback, return false if not
|
||||
*/
|
||||
_callerHungup() {
|
||||
assert(false, 'subclass responsibility to override this method');
|
||||
}
|
||||
|
||||
/**
|
||||
* called when the jambonz has hung up. Provided for subclasses to override
|
||||
* in order to apply logic at this point if needed.
|
||||
*/
|
||||
_jambonzHangup() {
|
||||
assert(false, 'subclass responsibility to override this method');
|
||||
}
|
||||
|
||||
/**
|
||||
* get a media server to use for this call
|
||||
*/
|
||||
@@ -1802,6 +2173,10 @@ Duration=${duration} `
|
||||
}
|
||||
this.logger.debug(`CallSession:propagateAnswer - answered callSid ${this.callSid}`);
|
||||
}
|
||||
else {
|
||||
this.logger.debug('CallSession:propagateAnswer - call already answered - re-anchor media with a reinvite');
|
||||
await this.dlg.modify(this.ep.local.sdp);
|
||||
}
|
||||
}
|
||||
|
||||
async _onRequestWithinDialog(req, res) {
|
||||
@@ -2139,6 +2514,30 @@ Duration=${duration} `
|
||||
return new Promise((resolve, reject) => {
|
||||
this.logger.info('_awaitCommandsOrHangup - waiting...');
|
||||
this.wakeupResolver = resolve;
|
||||
|
||||
if (this._actionHookDelayProcessor) {
|
||||
this._actionHookDelayProcessor.start();
|
||||
}
|
||||
|
||||
/**
|
||||
* TODO: filler noise can be handled as an ActionHookDelayProcessor -
|
||||
* it's just one specific scenario for action hook delay -
|
||||
* remove the code below and simply implement filler noise as an action hook delay
|
||||
*/
|
||||
|
||||
/* start filler noise if configured while we wait for new commands */
|
||||
if (this.fillerNoise?.url && this.ep?.connected && !this.ep2) {
|
||||
this.logger.debug('CallSession:_awaitCommandsOrHangup - playing filler noise');
|
||||
this._isPlayingFillerNoise = true;
|
||||
this.ep.play(this.fillerNoise.url);
|
||||
this.ep.once('playback-start', (evt) => {
|
||||
if (evt.file === this.fillerNoise.url && !this._isPlayingFillerNoise) {
|
||||
this.logger.info('CallSession:_awaitCommandsOrHangup - filler noise started');
|
||||
this.ep.api('uuid_break', this.ep.uuid)
|
||||
.catch((err) => this.logger.info(err, 'Error killing filler noise'));
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -34,6 +34,9 @@ class ConfirmCallSession extends CallSession {
|
||||
_callerHungup() {
|
||||
}
|
||||
|
||||
_jambonzHangup() {
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
@@ -35,11 +35,21 @@ class InboundCallSession extends CallSession {
|
||||
_onCancel() {
|
||||
this.rootSpan.setAttributes({'call.termination': 'caller abandoned'});
|
||||
this.callInfo.callTerminationBy = 'caller';
|
||||
const wasEarlyMedia = this.callInfo.callStatus === 'early-media';
|
||||
this._notifyCallStatusChange({
|
||||
callStatus: CallStatus.NoAnswer,
|
||||
sipStatus: 487,
|
||||
sipReason: 'Request Terminated'
|
||||
});
|
||||
if (wasEarlyMedia) {
|
||||
const duration = 0; // Set duration to 0 for early media termination, required param
|
||||
this._notifyCallStatusChange({
|
||||
callStatus: CallStatus.Completed,
|
||||
sipStatus: 487,
|
||||
sipReason: 'Call Terminated During Early Media',
|
||||
duration: duration
|
||||
});
|
||||
}
|
||||
this._callReleased();
|
||||
}
|
||||
|
||||
@@ -67,15 +77,27 @@ class InboundCallSession extends CallSession {
|
||||
* This is invoked when the caller hangs up, in order to calculate the call duration.
|
||||
*/
|
||||
_callerHungup() {
|
||||
this._hangup('caller');
|
||||
}
|
||||
|
||||
_jambonzHangup() {
|
||||
this.dlg?.destroy();
|
||||
}
|
||||
|
||||
_hangup(terminatedBy = 'jambonz') {
|
||||
if (this.dlg === null) {
|
||||
this.logger.info('InboundCallSession:_hangup - race condition, dlg cleared by app hangup');
|
||||
return;
|
||||
}
|
||||
this.logger.info(`InboundCallSession: ${terminatedBy} hung up`);
|
||||
assert(this.dlg.connectTime);
|
||||
const duration = moment().diff(this.dlg.connectTime, 'seconds');
|
||||
this.rootSpan.setAttributes({'call.termination': 'hangup by caller'});
|
||||
this.callInfo.callTerminationBy = 'caller';
|
||||
this.rootSpan.setAttributes({'call.termination': `hangup by ${terminatedBy}`});
|
||||
this.callInfo.callTerminationBy = terminatedBy;
|
||||
this.emit('callStatusChange', {
|
||||
callStatus: CallStatus.Completed,
|
||||
duration
|
||||
});
|
||||
this.logger.info('InboundCallSession: caller hung up');
|
||||
this._callReleased();
|
||||
this.req.removeAllListeners('cancel');
|
||||
}
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
const CallSession = require('./call-session');
|
||||
const {CallStatus} = require('../utils/constants');
|
||||
const moment = require('moment');
|
||||
const {parseUri} = require('drachtio-srf');
|
||||
const { normalizeJambones } = require('@jambonz/verb-specifications');
|
||||
const makeTask = require('../tasks/make_task');
|
||||
|
||||
/**
|
||||
* @classdesc Subclass of CallSession. This represents a CallSession that is
|
||||
@@ -42,20 +45,81 @@ class RestCallSession extends CallSession {
|
||||
setDialog(dlg) {
|
||||
this.dlg = dlg;
|
||||
dlg.on('destroy', this._callerHungup.bind(this));
|
||||
dlg.on('refer', this._onRefer.bind(this));
|
||||
this.wrapDialog(dlg);
|
||||
}
|
||||
|
||||
/**
|
||||
* global referHook
|
||||
*/
|
||||
|
||||
set referHook(hook) {
|
||||
this._referHook = hook;
|
||||
}
|
||||
|
||||
/**
|
||||
* This is invoked when the called party sends REFER to Jambonz.
|
||||
*/
|
||||
async _onRefer(req, res) {
|
||||
if (this._referHook) {
|
||||
try {
|
||||
const to = parseUri(req.getParsedHeader('Refer-To').uri);
|
||||
const by = parseUri(req.getParsedHeader('Referred-By').uri);
|
||||
const b3 = this.b3;
|
||||
const httpHeaders = b3 && {b3};
|
||||
const json = await this.requestor.request('verb:hook', this._referHook, {
|
||||
...(this.callInfo.toJSON()),
|
||||
refer_details: {
|
||||
sip_refer_to: req.get('Refer-To'),
|
||||
sip_referred_by: req.get('Referred-By'),
|
||||
sip_user_agent: req.get('User-Agent'),
|
||||
refer_to_user: to.scheme === 'tel' ? to.number : to.user,
|
||||
referred_by_user: by.scheme === 'tel' ? by.number : by.user,
|
||||
referring_call_sid: this.callSid,
|
||||
referred_call_sid: null,
|
||||
}
|
||||
}, httpHeaders);
|
||||
|
||||
if (json && Array.isArray(json)) {
|
||||
const tasks = normalizeJambones(this.logger, json).map((tdata) => makeTask(this.logger, tdata));
|
||||
if (tasks && tasks.length > 0) {
|
||||
this.logger.info('RestCallSession:handleRefer received REFER, get new tasks');
|
||||
this.replaceApplication(tasks);
|
||||
if (this.wakeupResolver) {
|
||||
this.wakeupResolver({reason: 'RestCallSession: referHook new taks'});
|
||||
this.wakeupResolver = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
res.send(202);
|
||||
this.logger.info('RestCallSession:handleRefer - sent 202 Accepted');
|
||||
} catch (err) {
|
||||
this.logger.error({err}, 'RestCallSession:handleRefer - error while asking referHook');
|
||||
res.send(err.statusCode || 501);
|
||||
}
|
||||
} else {
|
||||
res.send(501);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* This is invoked when the called party hangs up, in order to calculate the call duration.
|
||||
*/
|
||||
_callerHungup() {
|
||||
this._hangup('caller');
|
||||
}
|
||||
|
||||
_jambonzHangup() {
|
||||
this._hangup();
|
||||
}
|
||||
|
||||
_hangup(terminatedBy = 'jambonz') {
|
||||
if (this.restDialTask) {
|
||||
this.restDialTask.turnOffAmd();
|
||||
}
|
||||
this.callInfo.callTerminationBy = 'caller';
|
||||
this.callInfo.callTerminationBy = terminatedBy;
|
||||
const duration = moment().diff(this.dlg.connectTime, 'seconds');
|
||||
this.emit('callStatusChange', {callStatus: CallStatus.Completed, duration});
|
||||
this.logger.debug('RestCallSession: called party hung up');
|
||||
this.logger.debug(`RestCallSession: called party hung up by ${terminatedBy}`);
|
||||
this._callReleased();
|
||||
}
|
||||
|
||||
|
||||
22
lib/tasks/answer.js
Normal file
22
lib/tasks/answer.js
Normal file
@@ -0,0 +1,22 @@
|
||||
const Task = require('./task');
|
||||
const {TaskName, TaskPreconditions} = require('../utils/constants');
|
||||
|
||||
/**
|
||||
* Answer the call.
|
||||
* Note: This is rarely used, as the call is typically answered automatically when required by the app,
|
||||
* but it can be useful to force an answer before a pause in some cases
|
||||
*/
|
||||
class TaskAnswer extends Task {
|
||||
constructor(logger, opts) {
|
||||
super(logger, opts);
|
||||
this.preconditions = TaskPreconditions.Endpoint;
|
||||
}
|
||||
|
||||
get name() { return TaskName.Answer; }
|
||||
|
||||
async exec(cs) {
|
||||
super.exec(cs);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = TaskAnswer;
|
||||
@@ -6,6 +6,7 @@ const { normalizeJambones } = require('@jambonz/verb-specifications');
|
||||
const makeTask = require('./make_task');
|
||||
const bent = require('bent');
|
||||
const assert = require('assert');
|
||||
const HttpRequestor = require('../utils/http-requestor');
|
||||
const WAIT = 'wait';
|
||||
const JOIN = 'join';
|
||||
const START = 'start';
|
||||
@@ -60,6 +61,8 @@ class Conference extends Task {
|
||||
|
||||
this.emitter = new Emitter();
|
||||
this.results = {};
|
||||
this.coaching = [];
|
||||
this.speakOnlyTo = this.data.speakOnlyTo;
|
||||
|
||||
// transferred from another server in order to bridge to a local caller?
|
||||
if (this.data._ && this.data._.connectTime) {
|
||||
@@ -348,16 +351,29 @@ class Conference extends Task {
|
||||
Object.assign(opts, {flags: {
|
||||
...(this.endConferenceOnExit && {endconf: true}),
|
||||
...(this.startConferenceOnEnter && {moderator: true}),
|
||||
...(this.joinMuted && {joinMuted: true}),
|
||||
//https://developer.signalwire.com/freeswitch/FreeSWITCH-Explained/Modules/mod_conference_3965534/
|
||||
// mute | Enter conference muted
|
||||
...((this.joinMuted || this.speakOnlyTo) && {mute: true}),
|
||||
}});
|
||||
|
||||
/**
|
||||
* Note on the above: if we are joining in "coaching" mode (ie only going to heard by a subset of participants)
|
||||
* then we join muted temporarily, and then unmute ourselves once we have identified the subset of participants
|
||||
* to whom we will be speaking.
|
||||
*/
|
||||
}
|
||||
|
||||
try {
|
||||
const {memberId, confUuid} = await this.ep.join(this.confName, opts);
|
||||
this.logger.debug({memberId, confUuid}, `Conference:_joinConference: successfully joined ${this.confName}`);
|
||||
this.memberId = memberId;
|
||||
this.memberId = parseInt(memberId, 10);
|
||||
this.confUuid = confUuid;
|
||||
|
||||
// set a tag for this member, if provided
|
||||
if (this.data.memberTag) {
|
||||
this.setMemberTag(this.data.memberTag);
|
||||
}
|
||||
|
||||
cs.setConferenceDetails(memberId, this.confName, confUuid);
|
||||
const response = await this.ep.api('conference', [this.confName, 'get', 'count']);
|
||||
if (response.body && /\d+/.test(response.body)) this.participantCount = parseInt(response.body);
|
||||
@@ -384,6 +400,9 @@ class Conference extends Task {
|
||||
.catch((err) => {});
|
||||
}
|
||||
|
||||
if (this.speakOnlyTo) {
|
||||
this.setCoachMode(this.speakOnlyTo);
|
||||
}
|
||||
} catch (err) {
|
||||
this.logger.error(err, `Failed to join conference ${this.confName}`);
|
||||
throw err;
|
||||
@@ -428,7 +447,15 @@ class Conference extends Task {
|
||||
}
|
||||
}
|
||||
|
||||
async doConferenceHold(cs, opts) {
|
||||
doConferenceMute(cs, opts) {
|
||||
assert (cs.isInConference);
|
||||
|
||||
const mute = opts.conf_mute_status === 'mute';
|
||||
this.ep.api(`conference ${this.confName} ${mute ? 'mute' : 'unmute'} ${this.memberId}`)
|
||||
.catch((err) => this.logger.info({err}, 'Error muting or unmuting participant'));
|
||||
}
|
||||
|
||||
doConferenceHold(cs, opts) {
|
||||
assert (cs.isInConference);
|
||||
|
||||
const {conf_hold_status, wait_hook} = opts;
|
||||
@@ -465,6 +492,43 @@ class Conference extends Task {
|
||||
}
|
||||
}
|
||||
|
||||
async doConferenceParticipantAction(cs, opts) {
|
||||
const {action, tag} = opts;
|
||||
|
||||
switch (action) {
|
||||
case 'tag':
|
||||
await this.setMemberTag(tag);
|
||||
break;
|
||||
case 'untag':
|
||||
await this.clearMemberTag();
|
||||
break;
|
||||
case 'coach':
|
||||
await this.setCoachMode(tag);
|
||||
break;
|
||||
case 'uncoach':
|
||||
await this.clearCoachMode();
|
||||
break;
|
||||
case 'hold':
|
||||
this.doConferenceHold(cs, {conf_hold_status: 'hold'});
|
||||
break;
|
||||
case 'unhold':
|
||||
this.doConferenceHold(cs, {conf_hold_status: 'unhold'});
|
||||
break;
|
||||
case 'mute':
|
||||
this.doConferenceMute(cs, {conf_mute_status: 'mute'});
|
||||
break;
|
||||
case 'unmute':
|
||||
this.doConferenceMute(cs, {conf_mute_status: 'unmute'});
|
||||
break;
|
||||
case 'kick':
|
||||
this.kickMember(cs);
|
||||
break;
|
||||
default:
|
||||
this.logger.info(`Conference:doConferenceParticipantAction - unhandled action ${action}`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
async _doWaitHookWhileOnHold(cs, dlg, wait_hook) {
|
||||
do {
|
||||
try {
|
||||
@@ -511,7 +575,7 @@ class Conference extends Task {
|
||||
_normalizeHook(cs, hook) {
|
||||
if (typeof hook === 'object') return hook;
|
||||
const url = hook.startsWith('/') ?
|
||||
`${cs.application.requestor.baseUrl}${hook}` :
|
||||
`${cs.application.requestor instanceof HttpRequestor ? cs.application.requestor.baseUrl : ''}${hook}` :
|
||||
hook;
|
||||
|
||||
return { url } ;
|
||||
@@ -530,7 +594,7 @@ class Conference extends Task {
|
||||
const response = await this.ep.api('conference', [this.confName, 'get', 'count']);
|
||||
if (response.body && confNoMatch(response.body)) this.participantCount = 0;
|
||||
else if (response.body && /^\d+$/.test(response.body)) this.participantCount = parseInt(response.body) - 1;
|
||||
this.logger.debug({response}, `Conference:_doFinalMemberCheck conference count ${this.participantCount}`);
|
||||
this.logger.debug(`Conference:_doFinalMemberCheck conference count ${this.participantCount}`);
|
||||
} catch (err) {
|
||||
this.logger.info({err}, 'Conference:_doFinalMemberCheck error retrieving count (we were probably kicked');
|
||||
}
|
||||
@@ -642,11 +706,19 @@ class Conference extends Task {
|
||||
}
|
||||
|
||||
// conference event handlers
|
||||
_onAddMember(logger, cs, evt) {
|
||||
const memberId = parseInt(evt.getHeader('Member-ID')) ;
|
||||
if (this.speakOnlyTo) {
|
||||
logger.debug(`Conference:_onAddMember - member ${memberId} added to ${this.confName}, updating coaching mode`);
|
||||
this.setCoachMode(this.speakOnlyTo).catch(() => {});
|
||||
}
|
||||
else logger.debug(`Conference:_onAddMember - member ${memberId} added to conference ${this.confName}`);
|
||||
}
|
||||
_onDelMember(logger, cs, evt) {
|
||||
const memberId = parseInt(evt.getHeader('Member-ID')) ;
|
||||
this.participantCount = parseInt(evt.getHeader('Conference-Size'));
|
||||
if (memberId === this.memberId) {
|
||||
this.logger.info(`Conference:_onDelMember - I was dropped from conference ${this.confName}, task is complete`);
|
||||
logger.info(`Conference:_onDelMember - I was dropped from conference ${this.confName}, task is complete`);
|
||||
this.replaceEndpointAndEnd(cs);
|
||||
}
|
||||
}
|
||||
@@ -675,6 +747,99 @@ class Conference extends Task {
|
||||
}
|
||||
}
|
||||
|
||||
_onTag(logger, cs, evt) {
|
||||
const memberId = parseInt(evt.getHeader('Member-ID')) ;
|
||||
const tag = evt.getHeader('Tag') || '';
|
||||
if (memberId !== this.memberId && this.speakOnlyTo) {
|
||||
logger.info(`Conference:_onTag - member ${memberId} set tag to '${tag }'; updating coach mode accordingly`);
|
||||
this.setCoachMode(this.speakOnlyTo).catch(() => {});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the conference to "coaching" mode, where the audio of the participant is only heard
|
||||
* by a subset of the participants in the conference.
|
||||
* We do this by first getting all of the members who do *not* have this tag, and then
|
||||
* we configure this members audio to not be sent to them.
|
||||
* @param {string} speakOnlyTo - tag of the members who should receive our audio
|
||||
*
|
||||
* N.B.: this feature requires jambonz patches to freeswitch mod_conference
|
||||
*/
|
||||
async setCoachMode(speakOnlyTo) {
|
||||
this.speakOnlyTo = speakOnlyTo;
|
||||
if (!this.memberId) {
|
||||
this.logger.info('Conference:_setCoachMode: no member id yet');
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const members = (await this.ep.getNonMatchingConfParticipants(this.confName, speakOnlyTo))
|
||||
.filter((m) => m !== this.memberId);
|
||||
if (members.length === 0) {
|
||||
this.logger.info({members}, 'Conference:_setCoachMode: all participants have the tag, so all will hear me');
|
||||
if (this.coaching.length) {
|
||||
await this.ep.api('conference', [this.confName, 'relate', this.memberId, this.coaching.join(','), 'clear']);
|
||||
this.coaching = [];
|
||||
}
|
||||
}
|
||||
else {
|
||||
const memberList = members.join(',');
|
||||
this.logger.info(`Conference:_setCoachMode: my audio will NOT be sent to ${memberList}`);
|
||||
await this.ep.api('conference', [this.confName, 'relate', this.memberId, memberList, 'nospeak']);
|
||||
this.coaching = members;
|
||||
}
|
||||
} catch (err) {
|
||||
this.logger.error({err, speakOnlyTo}, '_setCoachMode: Error');
|
||||
}
|
||||
}
|
||||
|
||||
async clearCoachMode() {
|
||||
if (!this.memberId) return;
|
||||
try {
|
||||
if (this.coaching.length === 0) {
|
||||
this.logger.info('Conference:_clearCoachMode: no coaching mode to clear');
|
||||
}
|
||||
else {
|
||||
const memberList = this.coaching.join(',');
|
||||
this.logger.info(`Conference:_clearCoachMode: now sending my audio to all, including ${memberList}`);
|
||||
await this.ep.api('conference', [this.confName, 'relate', this.memberId, memberList, 'clear']);
|
||||
}
|
||||
this.speakOnlyTo = null;
|
||||
this.coaching = [];
|
||||
} catch (err) {
|
||||
this.logger.error({err}, '_clearCoachMode: Error');
|
||||
}
|
||||
}
|
||||
|
||||
async setMemberTag(tag) {
|
||||
try {
|
||||
await this.ep.api('conference', [this.confName, 'tag', this.memberId, tag]);
|
||||
this.logger.info(`Conference:setMemberTag: set tag for ${this.memberId} to ${tag}`);
|
||||
this.memberTag = tag;
|
||||
} catch (err) {
|
||||
this.logger.error({err}, `Error setting tag for ${this.memberId} to ${tag}`);
|
||||
}
|
||||
}
|
||||
|
||||
async clearMemberTag() {
|
||||
try {
|
||||
await this.ep.api('conference', [this.confName, 'tag', this.memberId]);
|
||||
this.logger.info(`Conference:setMemberTag: clearing tag for ${this.memberId}`);
|
||||
this.memberTag = null;
|
||||
} catch (err) {
|
||||
this.logger.error({err}, `Error clearing tag for ${this.memberId}`);
|
||||
}
|
||||
}
|
||||
|
||||
async kickMember(cs) {
|
||||
assert(cs.isInConference);
|
||||
try {
|
||||
await this.ep.api('conference', [this.confName, 'kick', this.memberId]);
|
||||
this.logger.info(`Conference:kickMember: kick ${this.memberId} out of conference ${this.confName}`);
|
||||
} catch (err) {
|
||||
this.logger.error({err}, `Error kicking member out of conference for ${this.memberId}`);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = Conference;
|
||||
|
||||
@@ -1,16 +1,22 @@
|
||||
const Task = require('./task');
|
||||
const {TaskName, TaskPreconditions} = require('../utils/constants');
|
||||
const parseDecibels = require('../utils/parse-decibels');
|
||||
|
||||
class TaskConfig extends Task {
|
||||
constructor(logger, opts) {
|
||||
super(logger, opts);
|
||||
|
||||
[
|
||||
'synthesizer',
|
||||
'recognizer',
|
||||
'bargeIn',
|
||||
'record',
|
||||
'listen',
|
||||
'transcribe'
|
||||
'transcribe',
|
||||
'fillerNoise',
|
||||
'actionHookDelayAction',
|
||||
'boostAudioSignal',
|
||||
'vad'
|
||||
].forEach((k) => this[k] = this.data[k] || {});
|
||||
|
||||
if ('notifyEvents' in this.data) {
|
||||
@@ -49,6 +55,7 @@ class TaskConfig extends Task {
|
||||
this.record?.action ||
|
||||
this.listen?.url ||
|
||||
this.data.amd ||
|
||||
'boostAudioSignal' in this.data ||
|
||||
this.transcribe?.enable) ?
|
||||
TaskPreconditions.Endpoint :
|
||||
TaskPreconditions.None;
|
||||
@@ -63,6 +70,9 @@ class TaskConfig extends Task {
|
||||
get hasRecording() { return Object.keys(this.record).length; }
|
||||
get hasListen() { return Object.keys(this.listen).length; }
|
||||
get hasTranscribe() { return Object.keys(this.transcribe).length; }
|
||||
get hasDub() { return Object.keys(this.dub).length; }
|
||||
get hasVad() { return Object.keys(this.vad).length; }
|
||||
get hasFillerNoise() { return Object.keys(this.fillerNoise).length; }
|
||||
|
||||
get summary() {
|
||||
const phrase = [];
|
||||
@@ -88,9 +98,11 @@ class TaskConfig extends Task {
|
||||
if (this.hasTranscribe) {
|
||||
phrase.push(this.transcribe.enable ? `transcribe ${this.transcribe.transcriptionHook}` : 'stop transcribe');
|
||||
}
|
||||
if (this.hasFillerNoise) phrase.push(`fillerNoise ${this.fillerNoise.enable ? 'on' : 'off'}`);
|
||||
if (this.data.amd) phrase.push('enable amd');
|
||||
if (this.notifyEvents) phrase.push(`event notification ${this.notifyEvents ? 'on' : 'off'}`);
|
||||
if (this.onHoldMusic) phrase.push(`onHoldMusic: ${this.onHoldMusic}`);
|
||||
if ('boostAudioSignal' in this.data) phrase.push(`setGain ${this.data.boostAudioSignal}`);
|
||||
return `${this.name}{${phrase.join(',')}}`;
|
||||
}
|
||||
|
||||
@@ -129,9 +141,8 @@ class TaskConfig extends Task {
|
||||
cs.speechSynthesisVendor = this.synthesizer.vendor !== 'default'
|
||||
? this.synthesizer.vendor
|
||||
: cs.speechSynthesisVendor;
|
||||
cs.speechSynthesisLabel = this.synthesizer.label !== 'default'
|
||||
? this.synthesizer.label
|
||||
: cs.speechSynthesisLabel;
|
||||
cs.speechSynthesisLabel = this.synthesizer.label === 'default'
|
||||
? cs.speechSynthesisLabel : this.synthesizer.label;
|
||||
cs.speechSynthesisLanguage = this.synthesizer.language !== 'default'
|
||||
? this.synthesizer.language
|
||||
: cs.speechSynthesisLanguage;
|
||||
@@ -143,15 +154,16 @@ class TaskConfig extends Task {
|
||||
cs.fallbackSpeechSynthesisVendor = this.synthesizer.fallbackVendor !== 'default'
|
||||
? this.synthesizer.fallbackVendor
|
||||
: cs.fallbackSpeechSynthesisVendor;
|
||||
cs.fallbackSpeechSynthesisLabel = this.synthesizer.fallbackLabel !== 'default'
|
||||
? this.synthesizer.fallbackLabel
|
||||
: cs.fallbackSpeechSynthesisLabel;
|
||||
cs.fallbackSpeechSynthesisLabel = this.synthesizer.fallbackLabel === 'default'
|
||||
? cs.fallbackSpeechSynthesisLabel : this.synthesizer.fallbackLabel;
|
||||
cs.fallbackSpeechSynthesisLanguage = this.synthesizer.fallbackLanguage !== 'default'
|
||||
? this.synthesizer.fallbackLanguage
|
||||
: cs.fallbackSpeechSynthesisLanguage;
|
||||
cs.fallbackSpeechSynthesisVoice = this.synthesizer.fallbackVoice !== 'default'
|
||||
? this.synthesizer.fallbackVoice
|
||||
: cs.fallbackSpeechSynthesisVoice;
|
||||
// new vendor is set, reset fallback vendor
|
||||
cs.hasFallbackTts = false;
|
||||
this.logger.info({synthesizer: this.synthesizer}, 'Config: updated synthesizer');
|
||||
}
|
||||
if (this.hasRecognizer) {
|
||||
@@ -159,9 +171,8 @@ class TaskConfig extends Task {
|
||||
cs.speechRecognizerVendor = this.recognizer.vendor !== 'default'
|
||||
? this.recognizer.vendor
|
||||
: cs.speechRecognizerVendor;
|
||||
cs.speechRecognizerLabel = this.recognizer.label !== 'default'
|
||||
? this.recognizer.label
|
||||
: cs.speechRecognizerLabel;
|
||||
cs.speechRecognizerLabel = this.recognizer.label === 'default'
|
||||
? cs.speechRecognizerLabel : this.recognizer.label;
|
||||
cs.speechRecognizerLanguage = this.recognizer.language !== 'default'
|
||||
? this.recognizer.language
|
||||
: cs.speechRecognizerLanguage;
|
||||
@@ -170,9 +181,9 @@ class TaskConfig extends Task {
|
||||
cs.fallbackSpeechRecognizerVendor = this.recognizer.fallbackVendor !== 'default'
|
||||
? this.recognizer.fallbackVendor
|
||||
: cs.fallbackSpeechRecognizerVendor;
|
||||
cs.fallbackSpeechRecognizerLabel = this.recognizer.fallbackLabel !== 'default'
|
||||
? this.recognizer.fallbackLabel
|
||||
: cs.fallbackSpeechRecognizerLabel;
|
||||
cs.fallbackSpeechRecognizerLabel = this.recognizer.fallbackLabel === 'default' ?
|
||||
cs.fallbackSpeechRecognizerLabel :
|
||||
this.recognizer.fallbackLabel;
|
||||
cs.fallbackSpeechRecognizerLanguage = this.recognizer.fallbackLanguage !== 'default'
|
||||
? this.recognizer.fallbackLanguage
|
||||
: cs.fallbackSpeechRecognizerLanguage;
|
||||
@@ -196,6 +207,8 @@ class TaskConfig extends Task {
|
||||
if ('punctuation' in this.recognizer) {
|
||||
cs.globalSttPunctuation = this.recognizer.punctuation;
|
||||
}
|
||||
// new vendor is set, reset fallback vendor
|
||||
cs.hasFallbackAsr = false;
|
||||
this.logger.info({
|
||||
recognizer: this.recognizer,
|
||||
isContinuousAsr: cs.isContinuousAsr
|
||||
@@ -236,12 +249,14 @@ class TaskConfig extends Task {
|
||||
}
|
||||
if (this.hasTranscribe) {
|
||||
if (this.transcribe.enable) {
|
||||
this.transcribeOpts.recognizer = this.hasRecognizer ?
|
||||
this.recognizer :
|
||||
{
|
||||
vendor: cs.speechRecognizerVendor,
|
||||
language: cs.speechRecognizerLanguage
|
||||
};
|
||||
if (!this.transcribeOpts.recognizer) {
|
||||
this.transcribeOpts.recognizer = this.hasRecognizer ?
|
||||
this.recognizer :
|
||||
{
|
||||
vendor: cs.speechRecognizerVendor,
|
||||
language: cs.speechRecognizerLanguage
|
||||
};
|
||||
}
|
||||
this.logger.debug(this.transcribeOpts, 'Config: enabling transcribe');
|
||||
cs.startBackgroundTask('transcribe', this.transcribeOpts);
|
||||
} else {
|
||||
@@ -249,9 +264,40 @@ class TaskConfig extends Task {
|
||||
cs.stopBackgroundTask('transcribe');
|
||||
}
|
||||
}
|
||||
if (Object.keys(this.actionHookDelayAction).length !== 0) {
|
||||
cs.actionHookDelayProperties = this.actionHookDelayAction;
|
||||
}
|
||||
if (this.data.sipRequestWithinDialogHook) {
|
||||
cs.sipRequestWithinDialogHook = this.data.sipRequestWithinDialogHook;
|
||||
}
|
||||
|
||||
if ('boostAudioSignal' in this.data) {
|
||||
const db = parseDecibels(this.data.boostAudioSignal);
|
||||
this.logger.info(`Config: boosting audio signal by ${db} dB`);
|
||||
const args = [ep.uuid, 'setGain', db];
|
||||
ep.api('uuid_dub', args).catch((err) => {
|
||||
this.logger.error(err, 'Error boosting audio signal');
|
||||
});
|
||||
}
|
||||
|
||||
if (this.hasFillerNoise) {
|
||||
const {enable, ...opts} = this.fillerNoise;
|
||||
this.logger.info({fillerNoise: this.fillerNoise}, 'Config: fillerNoise');
|
||||
if (!enable) cs.disableFillerNoise();
|
||||
else {
|
||||
cs.enableFillerNoise(opts);
|
||||
}
|
||||
}
|
||||
|
||||
if (this.hasVad) {
|
||||
cs.vad = {
|
||||
enable: this.vad.enable || false,
|
||||
voiceMs: this.vad.voiceMs || 250,
|
||||
silenceMs: this.vad.silenceMs || 150,
|
||||
strategy: this.vad.strategy || 'one-shot',
|
||||
mode: this.vad.mod || 2
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async kill(cs) {
|
||||
|
||||
@@ -14,6 +14,7 @@ const sessionTracker = require('../session/session-tracker');
|
||||
const DtmfCollector = require('../utils/dtmf-collector');
|
||||
const ConfirmCallSession = require('../session/confirm-call-session');
|
||||
const dbUtils = require('../utils/db-utils');
|
||||
const parseDecibels = require('../utils/parse-decibels');
|
||||
const debug = require('debug')('jambonz:feature-server');
|
||||
const {parseUri} = require('drachtio-srf');
|
||||
const {ANCHOR_MEDIA_ALWAYS, JAMBONZ_DISABLE_DIAL_PAI_HEADER} = require('../config');
|
||||
@@ -101,6 +102,7 @@ class TaskDial extends Task {
|
||||
this.dtmfHook = this.data.dtmfHook;
|
||||
this.proxy = this.data.proxy;
|
||||
this.tag = this.data.tag;
|
||||
this.boostAudioSignal = this.data.boostAudioSignal;
|
||||
|
||||
if (this.dtmfHook) {
|
||||
const {parentDtmfCollector, childDtmfCollector} = parseDtmfOptions(logger, this.data.dtmfCapture || {});
|
||||
@@ -118,6 +120,9 @@ class TaskDial extends Task {
|
||||
if (this.data.transcribe) {
|
||||
this.transcribeTask = makeTask(logger, {'transcribe' : this.data.transcribe}, this);
|
||||
}
|
||||
if (this.data.dub && Array.isArray(this.data.dub) && this.data.dub.length > 0) {
|
||||
this.dubTasks = this.data.dub.map((d) => makeTask(logger, {'dub': d}, this));
|
||||
}
|
||||
|
||||
this.results = {};
|
||||
this.bridged = false;
|
||||
@@ -149,6 +154,7 @@ class TaskDial extends Task {
|
||||
this.cs.onHoldMusic ||
|
||||
ANCHOR_MEDIA_ALWAYS ||
|
||||
this.listenTask ||
|
||||
this.dubTasks ||
|
||||
this.transcribeTask ||
|
||||
this.startAmd;
|
||||
|
||||
@@ -551,9 +557,9 @@ class TaskDial extends Task {
|
||||
const str = this.callerId || req.callingNumber || '';
|
||||
const callingNumber = str.startsWith('+') ? str.substring(1) : str;
|
||||
const voip_carrier_sid = await lookupCarrierByPhoneNumber(cs.accountSid, callingNumber);
|
||||
this.logger.info(
|
||||
`Dial:_attemptCalls: selected ${voip_carrier_sid} for requested phone number: ${callingNumber}`);
|
||||
if (voip_carrier_sid) {
|
||||
this.logger.info(
|
||||
`Dial:_attemptCalls: selected voip_carrier_sid ${voip_carrier_sid} for callingNumber: ${callingNumber}`);
|
||||
opts.headers['X-Requested-Carrier-Sid'] = voip_carrier_sid;
|
||||
}
|
||||
}
|
||||
@@ -630,6 +636,8 @@ class TaskDial extends Task {
|
||||
await this._connectSingleDial(cs, sd);
|
||||
} catch (err) {
|
||||
this.logger.info({err}, 'Dial:_attemptCalls - Error calling _connectSingleDial ');
|
||||
sd.removeAllListeners();
|
||||
this.kill(cs);
|
||||
}
|
||||
})
|
||||
.on('decline', () => {
|
||||
@@ -779,6 +787,17 @@ class TaskDial extends Task {
|
||||
dialCallSid: sd.callSid,
|
||||
});
|
||||
|
||||
if (this.dubTasks) {
|
||||
for (const dub of this.dubTasks) {
|
||||
try {
|
||||
await dub.exec(cs, {ep: sd.ep});
|
||||
}
|
||||
catch (err) {
|
||||
this.logger.error({err}, 'Dial:_selectSingleDial - error executing dubTask');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (this.parentDtmfCollector) this._installDtmfDetection(cs, cs.dlg);
|
||||
if (this.childDtmfCollector) this._installDtmfDetection(cs, this.dlg);
|
||||
if (cs.sipRequestWithinDialogHook) this._initSipIndialogRequestListener(cs, this.dlg);
|
||||
@@ -793,6 +812,18 @@ class TaskDial extends Task {
|
||||
}
|
||||
}
|
||||
|
||||
/* boost audio signal if requested */
|
||||
if (this.boostAudioSignal) {
|
||||
try {
|
||||
const db = parseDecibels(this.boostAudioSignal);
|
||||
this.logger.info(`Dial: boosting audio signal by ${db} dB`);
|
||||
const args = [this.ep.uuid, 'setGain', db];
|
||||
await this.ep.api('uuid_dub', args);
|
||||
} catch (err) {
|
||||
this.logger.info({err}, 'Dial:_selectSingleDial - Error boosting audio signal');
|
||||
}
|
||||
}
|
||||
|
||||
/* if we can release the media back to the SBC, do so now */
|
||||
if (this.canReleaseMedia) setTimeout(this._releaseMedia.bind(this, cs, sd), 200);
|
||||
}
|
||||
|
||||
144
lib/tasks/dub.js
Normal file
144
lib/tasks/dub.js
Normal file
@@ -0,0 +1,144 @@
|
||||
const {TaskName} = require('../utils/constants');
|
||||
const TtsTask = require('./tts-task');
|
||||
const assert = require('assert');
|
||||
const parseDecibels = require('../utils/parse-decibels');
|
||||
|
||||
/**
|
||||
* Dub task: add or remove additional audio tracks into the call
|
||||
*/
|
||||
class TaskDub extends TtsTask {
|
||||
constructor(logger, opts, parentTask) {
|
||||
super(logger, opts, parentTask);
|
||||
|
||||
this.logger.debug({opts: this.data}, 'TaskDub constructor');
|
||||
['action', 'track', 'play', 'say', 'loop'].forEach((prop) => {
|
||||
this[prop] = this.data[prop];
|
||||
});
|
||||
this.gain = parseDecibels(this.data.gain);
|
||||
|
||||
assert.ok(this.action, 'TaskDub: action is required');
|
||||
assert.ok(this.track, 'TaskDub: track is required');
|
||||
}
|
||||
|
||||
get name() { return TaskName.Dub; }
|
||||
|
||||
async exec(cs, {ep}) {
|
||||
super.exec(cs);
|
||||
|
||||
try {
|
||||
switch (this.action) {
|
||||
case 'addTrack':
|
||||
await this._addTrack(cs, ep);
|
||||
break;
|
||||
case 'removeTrack':
|
||||
await this._removeTrack(cs, ep);
|
||||
break;
|
||||
case 'silenceTrack':
|
||||
await this._silenceTrack(cs, ep);
|
||||
break;
|
||||
case 'playOnTrack':
|
||||
await this._playOnTrack(cs, ep);
|
||||
break;
|
||||
case 'sayOnTrack':
|
||||
await this._sayOnTrack(cs, ep);
|
||||
break;
|
||||
default:
|
||||
throw new Error(`TaskDub: unsupported action ${this.action}`);
|
||||
}
|
||||
} catch (err) {
|
||||
this.logger.error(err, 'Error executing dub task');
|
||||
}
|
||||
}
|
||||
|
||||
async _addTrack(cs, ep) {
|
||||
this.logger.info(`adding track: ${this.track}`);
|
||||
await ep.dub({
|
||||
action: 'addTrack',
|
||||
track: this.track
|
||||
});
|
||||
|
||||
if (this.play) await this._playOnTrack(cs, ep);
|
||||
else if (this.say) await this._sayOnTrack(cs, ep);
|
||||
}
|
||||
|
||||
async _removeTrack(_cs, ep) {
|
||||
this.logger.info(`removing track: ${this.track}`);
|
||||
await ep.dub({
|
||||
action: 'removeTrack',
|
||||
track: this.track
|
||||
});
|
||||
}
|
||||
|
||||
async _silenceTrack(_cs, ep) {
|
||||
this.logger.info(`silencing track: ${this.track}`);
|
||||
await ep.dub({
|
||||
action: 'silenceTrack',
|
||||
track: this.track
|
||||
});
|
||||
}
|
||||
|
||||
async _playOnTrack(_cs, ep) {
|
||||
this.logger.info(`playing on track: ${this.track}`);
|
||||
await ep.dub({
|
||||
action: 'playOnTrack',
|
||||
track: this.track,
|
||||
play: this.play,
|
||||
loop: this.loop ? 'loop' : 'once',
|
||||
gain: this.gain
|
||||
});
|
||||
}
|
||||
|
||||
async _sayOnTrack(cs, ep) {
|
||||
const text = this.say.text || this.say;
|
||||
this.synthesizer = this.say.synthesizer || {};
|
||||
|
||||
if (Object.keys(this.synthesizer).length) {
|
||||
this.logger.info({synthesizer: this.synthesizer},
|
||||
`saying on track ${this.track}: ${text} with synthesizer options`);
|
||||
}
|
||||
else {
|
||||
this.logger.info(`saying on track ${this.track}: ${text}`);
|
||||
}
|
||||
this.synthesizer = this.synthesizer || {};
|
||||
|
||||
this.text = [text];
|
||||
|
||||
const vendor = this.synthesizer.vendor && this.synthesizer.vendor !== 'default' ?
|
||||
this.synthesizer.vendor :
|
||||
cs.speechSynthesisVendor;
|
||||
const language = this.synthesizer.language && this.synthesizer.language !== 'default' ?
|
||||
this.synthesizer.language :
|
||||
cs.speechSynthesisLanguage ;
|
||||
const voice = this.synthesizer.voice && this.synthesizer.voice !== 'default' ?
|
||||
this.synthesizer.voice :
|
||||
cs.speechSynthesisVoice;
|
||||
const label = this.synthesizer.label && this.synthesizer.label !== 'default' ?
|
||||
this.synthesizer.label :
|
||||
cs.speechSynthesisLabel;
|
||||
|
||||
const disableTtsStreaming = false;
|
||||
const filepath = await this._synthesizeWithSpecificVendor(cs, ep, {
|
||||
vendor, language, voice, label, disableTtsStreaming
|
||||
});
|
||||
assert.ok(filepath.length === 1, 'TaskDub: no filepath returned from synthesizer');
|
||||
|
||||
const path = filepath[0];
|
||||
if (!path.startsWith('say:{')) {
|
||||
/* we have a local file of mp3 or r8 of synthesized speech audio to play */
|
||||
this.logger.info(`playing synthesized speech from file on track ${this.track}: ${path}`);
|
||||
this.play = path;
|
||||
await this._playOnTrack(cs, ep);
|
||||
}
|
||||
else {
|
||||
this.logger.info(`doing actual text to speech file on track ${this.track}: ${path}`);
|
||||
await ep.dub({
|
||||
action: 'sayOnTrack',
|
||||
track: this.track,
|
||||
say: path,
|
||||
gain: this.gain
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = TaskDub;
|
||||
@@ -338,6 +338,7 @@ class TaskEnqueue extends Task {
|
||||
this.logger.error({err}, `TaskEnqueue:_playHook error retrieving list info for queue ${this.queueName}`);
|
||||
}
|
||||
const json = await cs.application.requestor.request('verb:hook', hook, params, httpHeaders);
|
||||
this.logger.debug({json}, 'TaskEnqueue:_playHook: received response from waitHook');
|
||||
const tasks = normalizeJambones(this.logger, json).map((tdata) => makeTask(this.logger, tdata));
|
||||
|
||||
const allowedTasks = tasks.filter((t) => allowed.includes(t.name));
|
||||
|
||||
@@ -10,7 +10,9 @@ const {
|
||||
IbmTranscriptionEvents,
|
||||
NvidiaTranscriptionEvents,
|
||||
JambonzTranscriptionEvents,
|
||||
AssemblyAiTranscriptionEvents
|
||||
AssemblyAiTranscriptionEvents,
|
||||
VadDetection,
|
||||
VerbioTranscriptionEvents
|
||||
} = require('../utils/constants.json');
|
||||
const {
|
||||
JAMBONES_GATHER_EARLY_HINTS_MATCH,
|
||||
@@ -27,7 +29,7 @@ class TaskGather extends SttTask {
|
||||
[
|
||||
'finishOnKey', 'input', 'numDigits', 'minDigits', 'maxDigits',
|
||||
'interDigitTimeout', 'partialResultHook', 'bargein', 'dtmfBargein',
|
||||
'speechTimeout', 'timeout', 'say', 'play'
|
||||
'speechTimeout', 'timeout', 'say', 'play', 'actionHookDelayAction', 'fillerNoise', 'vad'
|
||||
].forEach((k) => this[k] = this.data[k]);
|
||||
|
||||
// gather default input is digits
|
||||
@@ -41,7 +43,8 @@ class TaskGather extends SttTask {
|
||||
this.timeout = this.timeout === 0 ? 0 : (this.timeout || 15) * 1000;
|
||||
this.interim = !!this.partialResultHook || this.bargein || (this.timeout > 0);
|
||||
this.listenDuringPrompt = this.data.listenDuringPrompt === false ? false : true;
|
||||
this.minBargeinWordCount = this.data.minBargeinWordCount || 1;
|
||||
this.minBargeinWordCount = this.data.minBargeinWordCount !== undefined ? this.data.minBargeinWordCount : 1;
|
||||
this._vadEnabled = this.minBargeinWordCount === 0;
|
||||
if (this.data.recognizer) {
|
||||
/* continuous ASR (i.e. compile transcripts until a special timeout or dtmf key) */
|
||||
this.asrTimeout = typeof this.data.recognizer.asrTimeout === 'number' ?
|
||||
@@ -91,6 +94,18 @@ class TaskGather extends SttTask {
|
||||
(this.playTask && this.playTask.earlyMedia);
|
||||
}
|
||||
|
||||
get hasFillerNoise() {
|
||||
return Object.keys(this.fillerNoise).length > 0 && this.fillerNoise.enabled !== false;
|
||||
}
|
||||
|
||||
get fillerNoiseUrl() {
|
||||
return this.fillerNoise.url;
|
||||
}
|
||||
|
||||
get fillerNoiseStartDelaySecs() {
|
||||
return this.fillerNoise.startDelaySecs;
|
||||
}
|
||||
|
||||
get summary() {
|
||||
let s = `${this.name}{`;
|
||||
if (this.input.length === 2) s += 'inputs=[speech,digits],';
|
||||
@@ -102,6 +117,7 @@ class TaskGather extends SttTask {
|
||||
}
|
||||
if (this.sayTask) s += ',with nested say task';
|
||||
if (this.playTask) s += ',with nested play task';
|
||||
if (this.actionHookDelayAction) s += ',with actionHookDelayAction';
|
||||
s += '}';
|
||||
return s;
|
||||
}
|
||||
@@ -111,6 +127,16 @@ class TaskGather extends SttTask {
|
||||
await super.exec(cs, {ep});
|
||||
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, cs.srf);
|
||||
|
||||
this.fillerNoise = {
|
||||
...(cs.fillerNoise || {}),
|
||||
...(this.fillerNoise || {})
|
||||
};
|
||||
|
||||
this.vad = {
|
||||
...(cs.vad || {}),
|
||||
...(this.vad || {})
|
||||
};
|
||||
|
||||
if (cs.hasGlobalSttHints && !this.maskGlobalSttHints) {
|
||||
const {hints, hintsBoost} = cs.globalSttHints;
|
||||
const setOfHints = new Set((this.data.recognizer.hints || [])
|
||||
@@ -138,6 +164,24 @@ class TaskGather extends SttTask {
|
||||
this.interim = true;
|
||||
this.logger.debug('Gather:exec - early hints match enabled');
|
||||
}
|
||||
|
||||
// if we have actionHook delay, and the session does as well, stash the session config
|
||||
if (this.actionHookDelayAction) {
|
||||
if (cs.actionHookDelayProcessor) {
|
||||
this.logger.debug('Gather:exec - stashing session-level ahd proprerties');
|
||||
cs.stashActionHookDelayProperties();
|
||||
}
|
||||
cs.actionHookDelayProperties = this.actionHookDelayAction;
|
||||
}
|
||||
|
||||
this._startVad();
|
||||
|
||||
const startDtmfListener = () => {
|
||||
if (this.input.includes('digits') || this.dtmfBargein || this.asrDtmfTerminationDigit) {
|
||||
ep.on('dtmf', this._onDtmf.bind(this, cs, ep));
|
||||
}
|
||||
};
|
||||
|
||||
const startListening = async(cs, ep) => {
|
||||
this._startTimer();
|
||||
if (this.isContinuousAsr && 0 === this.timeout) this._startAsrTimer();
|
||||
@@ -151,12 +195,7 @@ class TaskGather extends SttTask {
|
||||
this._startTranscribing(ep);
|
||||
return updateSpeechCredentialLastUsed(this.sttCredentials.speech_credential_sid);
|
||||
} catch (e) {
|
||||
if (this.fallbackVendor && this.isHandledByPrimaryProvider) {
|
||||
await this._fallback();
|
||||
startListening(cs, ep);
|
||||
} else {
|
||||
this.logger.error({error: e}, 'error in initSpeech');
|
||||
}
|
||||
await this._startFallback(cs, ep, {error: e});
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -164,13 +203,12 @@ class TaskGather extends SttTask {
|
||||
try {
|
||||
if (this.sayTask) {
|
||||
const {span, ctx} = this.startChildSpan(`nested:${this.sayTask.summary}`);
|
||||
this.sayTask.span = span;
|
||||
this.sayTask.ctx = ctx;
|
||||
this.sayTask.exec(cs, {ep}); // kicked off, _not_ waiting for it to complete
|
||||
this.sayTask.on('playDone', (err) => {
|
||||
span.end();
|
||||
if (err) this.logger.error({err}, 'Gather:exec Error playing tts');
|
||||
const process = () => {
|
||||
this.logger.debug('Gather: nested say task completed');
|
||||
if (!this.listenDuringPrompt) {
|
||||
startDtmfListener();
|
||||
}
|
||||
this._stopVad();
|
||||
if (!this.killed) {
|
||||
startListening(cs, ep);
|
||||
if (this.input.includes('speech') && this.vendor === 'nuance' && this.listenDuringPrompt) {
|
||||
@@ -180,17 +218,27 @@ class TaskGather extends SttTask {
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
this.sayTask.span = span;
|
||||
this.sayTask.ctx = ctx;
|
||||
this.sayTask.exec(cs, {ep}) // kicked off, _not_ waiting for it to complete
|
||||
.catch((err) => {
|
||||
process();
|
||||
});
|
||||
this.sayTask.on('playDone', (err) => {
|
||||
span.end();
|
||||
if (err) this.logger.error({err}, 'Gather:exec Error playing tts');
|
||||
process();
|
||||
});
|
||||
}
|
||||
else if (this.playTask) {
|
||||
const {span, ctx} = this.startChildSpan(`nested:${this.playTask.summary}`);
|
||||
this.playTask.span = span;
|
||||
this.playTask.ctx = ctx;
|
||||
this.playTask.exec(cs, {ep}); // kicked off, _not_ waiting for it to complete
|
||||
this.playTask.on('playDone', (err) => {
|
||||
span.end();
|
||||
if (err) this.logger.error({err}, 'Gather:exec Error playing url');
|
||||
const process = () => {
|
||||
this.logger.debug('Gather: nested play task completed');
|
||||
if (!this.listenDuringPrompt) {
|
||||
startDtmfListener();
|
||||
}
|
||||
this._stopVad();
|
||||
if (!this.killed) {
|
||||
startListening(cs, ep);
|
||||
if (this.input.includes('speech') && this.vendor === 'nuance' && this.listenDuringPrompt) {
|
||||
@@ -200,6 +248,17 @@ class TaskGather extends SttTask {
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
this.playTask.span = span;
|
||||
this.playTask.ctx = ctx;
|
||||
this.playTask.exec(cs, {ep}) // kicked off, _not_ waiting for it to complete
|
||||
.catch((err) => {
|
||||
process();
|
||||
});
|
||||
this.playTask.on('playDone', (err) => {
|
||||
span.end();
|
||||
if (err) this.logger.error({err}, 'Gather:exec Error playing url');
|
||||
process();
|
||||
});
|
||||
}
|
||||
else {
|
||||
@@ -212,16 +271,22 @@ class TaskGather extends SttTask {
|
||||
|
||||
if (this.input.includes('speech') && this.listenDuringPrompt) {
|
||||
await this._setSpeechHandlers(cs, ep);
|
||||
this._startTranscribing(ep);
|
||||
updateSpeechCredentialLastUsed(this.sttCredentials.speech_credential_sid)
|
||||
.catch(() => {/*already logged error */});
|
||||
if (!this.resolved && !this.killed) {
|
||||
this._startTranscribing(ep);
|
||||
updateSpeechCredentialLastUsed(this.sttCredentials.speech_credential_sid)
|
||||
.catch(() => {/*already logged error */});
|
||||
}
|
||||
else {
|
||||
this.logger.info('Gather:exec - task was killed or resolved quickly, not starting transcription');
|
||||
}
|
||||
}
|
||||
|
||||
if (this.input.includes('digits') || this.dtmfBargein || this.asrDtmfTerminationDigit) {
|
||||
ep.on('dtmf', this._onDtmf.bind(this, cs, ep));
|
||||
if (this.listenDuringPrompt) {
|
||||
startDtmfListener();
|
||||
}
|
||||
|
||||
await this.awaitTaskDone();
|
||||
this._killAudio(cs);
|
||||
} catch (err) {
|
||||
this.logger.error(err, 'TaskGather:exec error');
|
||||
}
|
||||
@@ -231,11 +296,13 @@ class TaskGather extends SttTask {
|
||||
kill(cs) {
|
||||
super.kill(cs);
|
||||
this._killAudio(cs);
|
||||
this._clearFillerNoiseTimer();
|
||||
this.ep.removeAllListeners('dtmf');
|
||||
clearTimeout(this.interDigitTimer);
|
||||
this._clearAsrTimer();
|
||||
this.playTask?.span.end();
|
||||
this.sayTask?.span.end();
|
||||
this._stopVad();
|
||||
this._resolve('killed');
|
||||
}
|
||||
|
||||
@@ -313,23 +380,19 @@ class TaskGather extends SttTask {
|
||||
ep, GoogleTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
|
||||
this.addCustomEventListener(
|
||||
ep, GoogleTranscriptionEvents.EndOfUtterance, this._onEndOfUtterance.bind(this, cs, ep));
|
||||
this.addCustomEventListener(
|
||||
ep, GoogleTranscriptionEvents.VadDetected, this._onVadDetected.bind(this, cs, ep));
|
||||
break;
|
||||
|
||||
case 'aws':
|
||||
case 'polly':
|
||||
this.bugname = `${this.bugname_prefix}aws_transcribe`;
|
||||
this.addCustomEventListener(ep, AwsTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
|
||||
this.addCustomEventListener(ep, AwsTranscriptionEvents.VadDetected, this._onVadDetected.bind(this, cs, ep));
|
||||
break;
|
||||
case 'microsoft':
|
||||
this.bugname = `${this.bugname_prefix}azure_transcribe`;
|
||||
this.addCustomEventListener(
|
||||
ep, AzureTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
|
||||
this.addCustomEventListener(ep, AzureTranscriptionEvents.NoSpeechDetected,
|
||||
this._onNoSpeechDetected.bind(this, cs, ep));
|
||||
this.addCustomEventListener(ep, AzureTranscriptionEvents.VadDetected, this._onVadDetected.bind(this, cs, ep));
|
||||
//this.addCustomEventListener(ep, AzureTranscriptionEvents.NoSpeechDetected,
|
||||
//this._onNoSpeechDetected.bind(this, cs, ep));
|
||||
break;
|
||||
case 'nuance':
|
||||
this.bugname = `${this.bugname_prefix}nuance_transcribe`;
|
||||
@@ -339,8 +402,6 @@ class TaskGather extends SttTask {
|
||||
this._onStartOfSpeech.bind(this, cs, ep));
|
||||
this.addCustomEventListener(ep, NuanceTranscriptionEvents.TranscriptionComplete,
|
||||
this._onTranscriptionComplete.bind(this, cs, ep));
|
||||
this.addCustomEventListener(ep, NuanceTranscriptionEvents.VadDetected,
|
||||
this._onVadDetected.bind(this, cs, ep));
|
||||
|
||||
/* stall timers until prompt finishes playing */
|
||||
if ((this.sayTask || this.playTask) && this.listenDuringPrompt) {
|
||||
@@ -363,6 +424,12 @@ class TaskGather extends SttTask {
|
||||
ep, SonioxTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
|
||||
break;
|
||||
|
||||
case 'verbio':
|
||||
this.bugname = `${this.bugname_prefix}verbio_transcribe`;
|
||||
this.addCustomEventListener(
|
||||
ep, VerbioTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
|
||||
break;
|
||||
|
||||
case 'cobalt':
|
||||
this.bugname = `${this.bugname_prefix}cobalt_transcribe`;
|
||||
this.addCustomEventListener(
|
||||
@@ -410,8 +477,6 @@ class TaskGather extends SttTask {
|
||||
this._onStartOfSpeech.bind(this, cs, ep));
|
||||
this.addCustomEventListener(ep, NvidiaTranscriptionEvents.TranscriptionComplete,
|
||||
this._onTranscriptionComplete.bind(this, cs, ep));
|
||||
this.addCustomEventListener(ep, NvidiaTranscriptionEvents.VadDetected,
|
||||
this._onVadDetected.bind(this, cs, ep));
|
||||
|
||||
/* I think nvidia has this (??) - stall timers until prompt finishes playing */
|
||||
if ((this.sayTask || this.playTask) && this.listenDuringPrompt) {
|
||||
@@ -510,17 +575,25 @@ class TaskGather extends SttTask {
|
||||
this._clearAsrTimer();
|
||||
this._asrTimer = setTimeout(() => {
|
||||
this.logger.debug('_startAsrTimer - asr timer went off');
|
||||
const evt = this.consolidateTranscripts(this._bufferedTranscripts, 1, this.language);
|
||||
const evt = this.consolidateTranscripts(this._bufferedTranscripts, 1, this.language, this.vendor);
|
||||
this._resolve(this._bufferedTranscripts.length > 0 ? 'speech' : 'timeout', evt);
|
||||
}, this.asrTimeout);
|
||||
this.logger.debug(`_startAsrTimer: set for ${this.asrTimeout}ms`);
|
||||
}
|
||||
|
||||
_clearAsrTimer() {
|
||||
if (this._asrTimer) clearTimeout(this._asrTimer);
|
||||
if (this._asrTimer) {
|
||||
this.logger.debug('_clearAsrTimer: asrTimer cleared');
|
||||
clearTimeout(this._asrTimer);
|
||||
}
|
||||
this._asrTimer = null;
|
||||
}
|
||||
|
||||
_hangupCall() {
|
||||
this.logger.debug('_hangupCall');
|
||||
this.cs.hangup();
|
||||
}
|
||||
|
||||
_startFastRecognitionTimer(evt) {
|
||||
assert(this.fastRecognitionTimeout > 0);
|
||||
this._clearFastRecognitionTimer();
|
||||
@@ -541,7 +614,7 @@ class TaskGather extends SttTask {
|
||||
this._clearFinalAsrTimer();
|
||||
this._finalAsrTimer = setTimeout(() => {
|
||||
this.logger.debug('_startFinalAsrTimer - final asr timer went off');
|
||||
const evt = this.consolidateTranscripts(this._bufferedTranscripts, 1, this.language);
|
||||
const evt = this.consolidateTranscripts(this._bufferedTranscripts, 1, this.language, this.vendor);
|
||||
this._resolve(this._bufferedTranscripts.length > 0 ? 'speech' : 'timeout', evt);
|
||||
}, 1000);
|
||||
this.logger.debug('_startFinalAsrTimer: set for 1 second');
|
||||
@@ -552,11 +625,65 @@ class TaskGather extends SttTask {
|
||||
this._finalAsrTimer = null;
|
||||
}
|
||||
|
||||
|
||||
_startVad() {
|
||||
if (!this._vadStarted && this._vadEnabled) {
|
||||
this.logger.debug('_startVad');
|
||||
this.addCustomEventListener(this.ep, VadDetection.Detection, this._onVadDetected.bind(this, this.cs, this.ep));
|
||||
this.ep?.startVadDetection(this.vad);
|
||||
this._vadStarted = true;
|
||||
}
|
||||
}
|
||||
|
||||
_stopVad() {
|
||||
if (this._vadStarted) {
|
||||
this.logger.debug('_stopVad');
|
||||
this.ep?.stopVadDetection(this.vad);
|
||||
this.ep?.removeCustomEventListener(VadDetection.Detection, this._onVadDetected);
|
||||
this._vadStarted = false;
|
||||
}
|
||||
}
|
||||
|
||||
_startFillerNoise() {
|
||||
this.logger.debug('Gather:_startFillerNoise - playing filler noise');
|
||||
this.ep?.play(this.fillerNoise.url);
|
||||
this._fillerNoiseOn = true;
|
||||
this.ep.once('playback-start', (evt) => {
|
||||
if (evt.file === this.fillerNoise.url && !this._fillerNoiseOn) {
|
||||
this.logger.info({evt}, 'Gather:_startFillerNoise - race condition - kill filler noise here');
|
||||
this.ep.api('uuid_break', this.ep.uuid)
|
||||
.catch((err) => this.logger.info(err, 'Error killing filler noise'));
|
||||
return;
|
||||
} else this.logger.debug({evt}, 'Gather:_startFillerNoise - playback started');
|
||||
});
|
||||
}
|
||||
|
||||
_startFillerNoiseTimer() {
|
||||
this._clearFillerNoiseTimer();
|
||||
this._fillerNoiseTimer = setTimeout(() => {
|
||||
this.logger.debug('Gather:_startFillerNoiseTimer - playing filler noise');
|
||||
this._startFillerNoise();
|
||||
}, this.fillerNoise.startDelaySecs * 1000);
|
||||
}
|
||||
|
||||
_clearFillerNoiseTimer() {
|
||||
if (this._fillerNoiseTimer) clearTimeout(this._fillerNoiseTimer);
|
||||
this._fillerNoiseTimer = null;
|
||||
}
|
||||
|
||||
_killFillerNoise() {
|
||||
if (this._fillerNoiseTimer) {
|
||||
this.logger.debug('Gather:_killFillerNoise');
|
||||
this.ep?.api('uuid_break', this.ep.uuid);
|
||||
}
|
||||
}
|
||||
|
||||
_killAudio(cs) {
|
||||
if (!this.sayTask && !this.playTask && this.bargein) {
|
||||
if (this.ep?.connected && !this.playComplete) {
|
||||
if (this.hasFillerNoise || (!this.sayTask && !this.playTask && this.bargein)) {
|
||||
if (this.ep?.connected && (!this.playComplete || this.hasFillerNoise)) {
|
||||
this.logger.debug('Gather:_killAudio: killing playback of any audio');
|
||||
this.playComplete = true;
|
||||
this._fillerNoiseOn = false; // in a race, if we just started audio it may sneak through here
|
||||
this.ep.api('uuid_break', this.ep.uuid)
|
||||
.catch((err) => this.logger.info(err, 'Error killing audio'));
|
||||
}
|
||||
@@ -578,11 +705,11 @@ class TaskGather extends SttTask {
|
||||
// make sure this is not a transcript from answering machine detection
|
||||
const bugname = fsEvent.getHeader('media-bugname');
|
||||
const finished = fsEvent.getHeader('transcription-session-finished');
|
||||
this.logger.debug({evt, bugname, finished}, `Gather:_onTranscription for vendor ${this.vendor}`);
|
||||
this.logger.debug({evt, bugname, finished, vendor: this.vendor}, 'Gather:_onTranscription raw transcript');
|
||||
if (bugname && this.bugname !== bugname) return;
|
||||
if (finished === 'true') return;
|
||||
|
||||
if (this.vendor === 'ibm' && evt?.state === 'listening') return;
|
||||
|
||||
if (this.vendor === 'deepgram' && evt.type === 'UtteranceEnd') {
|
||||
/* we will only get this when we have set utterance_end_ms */
|
||||
if (this._bufferedTranscripts.length === 0) {
|
||||
@@ -590,19 +717,35 @@ class TaskGather extends SttTask {
|
||||
}
|
||||
else {
|
||||
this.logger.debug('Gather:_onTranscription - got UtteranceEnd event from deepgram, return buffered transcript');
|
||||
evt = this.consolidateTranscripts(this._bufferedTranscripts, 1, this.language);
|
||||
evt = this.consolidateTranscripts(this._bufferedTranscripts, 1, this.language, this.vendor);
|
||||
this._bufferedTranscripts = [];
|
||||
this._resolve('speech', evt);
|
||||
}
|
||||
return;
|
||||
}
|
||||
if (this.vendor === 'deepgram' && evt.type === 'Metadata') {
|
||||
this.logger.debug('Gather:_onTranscription - discarding Metadata event from deepgram');
|
||||
return;
|
||||
}
|
||||
|
||||
evt = this.normalizeTranscription(evt, this.vendor, 1, this.language,
|
||||
this.shortUtterance, this.data.recognizer.punctuation);
|
||||
//this.logger.debug({evt, bugname, finished, vendor: this.vendor}, 'Gather:_onTranscription normalized transcript');
|
||||
|
||||
if (evt.alternatives.length === 0) {
|
||||
this.logger.info({evt}, 'TaskGather:_onTranscription - got empty transcript, continue listening');
|
||||
return;
|
||||
}
|
||||
const confidence = evt.alternatives[0].confidence;
|
||||
const minConfidence = this.data.recognizer?.minConfidence;
|
||||
this.logger.debug({evt},
|
||||
`TaskGather:_onTranscription - confidence (${confidence}), minConfidence (${minConfidence})`);
|
||||
if (confidence && minConfidence && confidence < minConfidence) {
|
||||
this.logger.info({evt},
|
||||
'TaskGather:_onTranscription - Transcript confidence ' +
|
||||
`(${confidence}) < minConfidence (${minConfidence})`);
|
||||
return this._resolve('stt-low-confidence', evt);
|
||||
}
|
||||
|
||||
/* fast path: our first partial transcript exactly matches an early hint */
|
||||
if (this.earlyHintsMatch && evt.is_final === false && this.partialTranscriptsCount++ === 0) {
|
||||
@@ -660,7 +803,7 @@ class TaskGather extends SttTask {
|
||||
this._clearTimer();
|
||||
if (this._finalAsrTimer) {
|
||||
this._clearFinalAsrTimer();
|
||||
const evt = this.consolidateTranscripts(this._bufferedTranscripts, 1, this.language);
|
||||
const evt = this.consolidateTranscripts(this._bufferedTranscripts, 1, this.language, this.vendor);
|
||||
return this._resolve(this._bufferedTranscripts.length > 0 ? 'speech' : 'timeout', evt);
|
||||
}
|
||||
this._startAsrTimer();
|
||||
@@ -669,43 +812,59 @@ class TaskGather extends SttTask {
|
||||
if (!['soniox', 'aws', 'microsoft', 'deepgram'].includes(this.vendor)) this._startTranscribing(ep);
|
||||
}
|
||||
else {
|
||||
/* this was removed to fix https://github.com/jambonz/jambonz-feature-server/issues/783 */
|
||||
/*
|
||||
if (this.bargein && (words + bufferedWords) < this.minBargeinWordCount) {
|
||||
this.logger.debug({evt, words, bufferedWords},
|
||||
'TaskGather:_onTranscription - final transcript but < min barge words');
|
||||
this._bufferedTranscripts.push(evt);
|
||||
if (!emptyTranscript) this._bufferedTranscripts.push(evt);
|
||||
if (!['soniox', 'aws', 'microsoft', 'deepgram'].includes(this.vendor)) this._startTranscribing(ep);
|
||||
return;
|
||||
}
|
||||
else {
|
||||
if (this.vendor === 'soniox') {
|
||||
/* compile transcripts into one */
|
||||
this._sonioxTranscripts.push(evt.vendor.finalWords);
|
||||
evt = this.compileSonioxTranscripts(this._sonioxTranscripts, 1, this.language);
|
||||
this._sonioxTranscripts = [];
|
||||
}
|
||||
else if (this.vendor === 'deepgram') {
|
||||
/* compile transcripts into one */
|
||||
if (!emptyTranscript) this._bufferedTranscripts.push(evt);
|
||||
|
||||
/* deepgram can send an empty and final transcript; only if we have any buffered should we resolve */
|
||||
if (this._bufferedTranscripts.length === 0) return;
|
||||
evt = this.consolidateTranscripts(this._bufferedTranscripts, 1, this.language);
|
||||
this._bufferedTranscripts = [];
|
||||
}
|
||||
|
||||
/* here is where we return a final transcript */
|
||||
this._resolve('speech', evt);
|
||||
*/
|
||||
if (this.vendor === 'soniox') {
|
||||
/* compile transcripts into one */
|
||||
this._sonioxTranscripts.push(evt.vendor.finalWords);
|
||||
evt = this.compileSonioxTranscripts(this._sonioxTranscripts, 1, this.language);
|
||||
this._sonioxTranscripts = [];
|
||||
}
|
||||
else if (this.vendor === 'deepgram') {
|
||||
/* compile transcripts into one */
|
||||
if (!emptyTranscript) this._bufferedTranscripts.push(evt);
|
||||
|
||||
/* deepgram can send an empty and final transcript; only if we have any buffered should we resolve */
|
||||
if (this._bufferedTranscripts.length === 0) return;
|
||||
evt = this.consolidateTranscripts(this._bufferedTranscripts, 1, this.language, this.vendor);
|
||||
this._bufferedTranscripts = [];
|
||||
}
|
||||
|
||||
/* here is where we return a final transcript */
|
||||
this._resolve('speech', evt);
|
||||
/*}*/
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (this._clearTimer()) this._startTimer();
|
||||
if (this.bargein && (words + bufferedWords) >= this.minBargeinWordCount) {
|
||||
if (!this.playComplete) {
|
||||
this.logger.debug({transcript: evt.alternatives[0].transcript}, 'killing audio due to speech');
|
||||
this.emit('vad');
|
||||
/* deepgram can send a non-final transcript but with words that are final, so we need to buffer */
|
||||
let emptyTranscript = false;
|
||||
if (this.vendor === 'deepgram') {
|
||||
const originalEvent = evt.vendor.evt;
|
||||
if (originalEvent.is_final && evt.alternatives[0].transcript !== '') {
|
||||
this.logger.debug({evt}, 'Gather:_onTranscription - buffering a completed (partial) deepgram transcript');
|
||||
this._bufferedTranscripts.push(evt);
|
||||
}
|
||||
if (evt.alternatives[0].transcript === '') emptyTranscript = true;
|
||||
}
|
||||
|
||||
if (!emptyTranscript) {
|
||||
if (this._clearTimer()) this._startTimer();
|
||||
if (this.bargein && (words + bufferedWords) >= this.minBargeinWordCount) {
|
||||
if (!this.playComplete) {
|
||||
this.logger.debug({transcript: evt.alternatives[0].transcript}, 'killing audio due to speech');
|
||||
this.emit('vad');
|
||||
}
|
||||
this._killAudio(cs);
|
||||
}
|
||||
this._killAudio(cs);
|
||||
}
|
||||
if (this.fastRecognitionTimeout) {
|
||||
this._startFastRecognitionTimer(evt);
|
||||
@@ -723,14 +882,9 @@ class TaskGather extends SttTask {
|
||||
this._sonioxTranscripts.push(evt.vendor.finalWords);
|
||||
}
|
||||
}
|
||||
/* deepgram can send a non-final transcript but with words that are final, so we need to buffer */
|
||||
if (this.vendor === 'deepgram') {
|
||||
const originalEvent = evt.vendor.evt;
|
||||
if (originalEvent.is_final && evt.alternatives[0].transcript !== '') {
|
||||
this.logger.debug({evt}, 'Gather:_onTranscription - buffering a completed (partial) deepgram transcript');
|
||||
this._bufferedTranscripts.push(evt);
|
||||
}
|
||||
}
|
||||
|
||||
/* restart asr timer if we get a partial transcript */
|
||||
if (this.isContinuousAsr) this._startAsrTimer();
|
||||
}
|
||||
}
|
||||
_onEndOfUtterance(cs, ep) {
|
||||
@@ -762,9 +916,9 @@ class TaskGather extends SttTask {
|
||||
_onTranscriptionComplete(cs, ep) {
|
||||
this.logger.debug('TaskGather:_onTranscriptionComplete');
|
||||
}
|
||||
async _onJambonzError(cs, ep, evt) {
|
||||
this.logger.info({evt}, 'TaskGather:_onJambonzError');
|
||||
if (this.isHandledByPrimaryProvider && this.fallbackVendor) {
|
||||
|
||||
async _startFallback(cs, ep, evt) {
|
||||
if (this.canFallback) {
|
||||
ep.stopTranscription({
|
||||
vendor: this.vendor,
|
||||
bugname: this.bugname
|
||||
@@ -772,17 +926,35 @@ class TaskGather extends SttTask {
|
||||
.catch((err) => this.logger.error({err}, `Error stopping transcription for primary vendor ${this.vendor}`));
|
||||
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, cs.srf);
|
||||
try {
|
||||
await this._fallback();
|
||||
await this._initSpeech(cs, ep);
|
||||
this.logger.debug('gather:_startFallback');
|
||||
this.notifyError({ msg: 'ASR error',
|
||||
details:`STT Vendor ${this.vendor} error: ${evt.error || evt.reason}`, failover: 'in progress'});
|
||||
await this._initFallback();
|
||||
this._speechHandlersSet = false;
|
||||
await this._setSpeechHandlers(cs, ep);
|
||||
this._startTranscribing(ep);
|
||||
updateSpeechCredentialLastUsed(this.sttCredentials.speech_credential_sid);
|
||||
return;
|
||||
return true;
|
||||
} catch (error) {
|
||||
this.logger.info({error}, `There is error while falling back to ${this.fallbackVendor}`);
|
||||
this.notifyError({ msg: 'ASR error',
|
||||
details:`STT Vendor ${this.vendor} error: ${evt.error || evt.reason}`, failover: 'not available'});
|
||||
}
|
||||
} else {
|
||||
this.logger.debug('gather:_startFallback no condition for falling back');
|
||||
this.notifyError({ msg: 'ASR error',
|
||||
details:`STT Vendor ${this.vendor} error: ${evt.error || evt.reason}`, failover: 'not available'});
|
||||
}
|
||||
const {writeAlerts, AlertType} = cs.srf.locals;
|
||||
return false;
|
||||
}
|
||||
|
||||
async _onJambonzError(cs, ep, evt) {
|
||||
if (this.vendor === 'google' && evt.error_code === 0) {
|
||||
this.logger.info({evt}, 'TaskTranscribe:_onJambonzError - ignoring google error code 0');
|
||||
return;
|
||||
}
|
||||
this.logger.info({evt}, 'TaskGather:_onJambonzError');
|
||||
const {writeAlerts, AlertType} = cs.srf.locals;
|
||||
if (this.vendor === 'nuance') {
|
||||
const {code, error} = evt;
|
||||
if (code === 404 && error === 'No speech') return this._resolve('timeout');
|
||||
@@ -795,17 +967,23 @@ class TaskGather extends SttTask {
|
||||
message: `Custom speech vendor ${this.vendor} error: ${evt.error}`,
|
||||
vendor: this.vendor,
|
||||
}).catch((err) => this.logger.info({err}, 'Error generating alert for jambonz custom connection failure'));
|
||||
this.notifyError({msg: 'ASR error', details:`Custom speech vendor ${this.vendor} error: ${evt.error}`});
|
||||
if (!(await this._startFallback(cs, ep, evt))) {
|
||||
this.notifyTaskDone();
|
||||
}
|
||||
}
|
||||
|
||||
_onVendorConnectFailure(cs, _ep, evt) {
|
||||
async _onVendorConnectFailure(cs, _ep, evt) {
|
||||
super._onVendorConnectFailure(cs, _ep, evt);
|
||||
this.notifyTaskDone();
|
||||
if (!(await this._startFallback(cs, _ep, evt))) {
|
||||
this.notifyTaskDone();
|
||||
}
|
||||
}
|
||||
|
||||
_onVendorError(cs, _ep, evt) {
|
||||
async _onVendorError(cs, _ep, evt) {
|
||||
super._onVendorError(cs, _ep, evt);
|
||||
this._resolve('stt-error', evt);
|
||||
if (!(await this._startFallback(cs, _ep, evt))) {
|
||||
this._resolve('stt-error', evt);
|
||||
}
|
||||
}
|
||||
|
||||
_onVadDetected(cs, ep) {
|
||||
@@ -814,6 +992,10 @@ class TaskGather extends SttTask {
|
||||
this._killAudio(cs);
|
||||
this.emit('vad');
|
||||
}
|
||||
if (this.vad?.strategy === 'one-shot') {
|
||||
this.ep?.removeCustomEventListener(VadDetection.Detection, this._onVadDetected);
|
||||
this._vadStarted = false;
|
||||
}
|
||||
}
|
||||
|
||||
_onNoSpeechDetected(cs, ep, evt, fsEvent) {
|
||||
@@ -832,32 +1014,38 @@ class TaskGather extends SttTask {
|
||||
|
||||
async _resolve(reason, evt) {
|
||||
this.logger.debug(`TaskGather:resolve with reason ${reason}`);
|
||||
if (this.resolved) return;
|
||||
if (this.needsStt && this.ep && this.ep.connected) {
|
||||
this.ep.stopTranscription({
|
||||
vendor: this.vendor,
|
||||
bugname: this.bugname
|
||||
})
|
||||
.catch((err) => {
|
||||
if (this.resolved) return;
|
||||
this.logger.error({err}, 'Error stopping transcription');
|
||||
});
|
||||
}
|
||||
if (this.resolved) {
|
||||
this.logger.debug('TaskGather:_resolve - already resolved');
|
||||
return;
|
||||
}
|
||||
|
||||
this.resolved = true;
|
||||
// If bargin is false and ws application return ack to verb:hook
|
||||
// the gather should not play any audio
|
||||
this._killAudio(this.cs);
|
||||
// Clear dtmf event
|
||||
if (this.dtmfBargein) {
|
||||
this.ep.removeAllListeners('dtmf');
|
||||
}
|
||||
// Clear dtmf events, to avoid any case can leak the listener, just clean it
|
||||
this.ep.removeAllListeners('dtmf');
|
||||
clearTimeout(this.interDigitTimer);
|
||||
this._clearTimer();
|
||||
this._clearFastRecognitionTimer();
|
||||
this._clearAsrTimer();
|
||||
this._clearFinalAsrTimer();
|
||||
|
||||
this.span.setAttributes({
|
||||
channel: 1,
|
||||
'stt.resolve': reason,
|
||||
'stt.result': JSON.stringify(evt)
|
||||
});
|
||||
if (this.needsStt && this.ep && this.ep.connected) {
|
||||
this.ep.stopTranscription({
|
||||
vendor: this.vendor,
|
||||
bugname: this.bugname
|
||||
})
|
||||
.catch((err) => this.logger.error({err}, 'Error stopping transcription'));
|
||||
}
|
||||
|
||||
if (this.callSession && this.callSession.callGone) {
|
||||
this.logger.debug('TaskGather:_resolve - call is gone, not invoking web callback');
|
||||
@@ -865,36 +1053,82 @@ class TaskGather extends SttTask {
|
||||
return;
|
||||
}
|
||||
|
||||
// action hook delay
|
||||
if (this.cs.actionHookDelayProcessor) {
|
||||
this.logger.debug('TaskGather:_resolve - actionHookDelayProcessor exists - starting it');
|
||||
this.cs.actionHookDelayProcessor.start();
|
||||
}
|
||||
|
||||
// TODO: remove and implement as actionHookDelay
|
||||
if (this.hasFillerNoise && (reason.startsWith('dtmf') || reason.startsWith('speech'))) {
|
||||
if (this.fillerNoiseStartDelaySecs > 0) {
|
||||
this._startFillerNoiseTimer();
|
||||
}
|
||||
else {
|
||||
this.logger.debug(`TaskGather:_resolve - playing filler noise: ${this.fillerNoiseUrl}`);
|
||||
this._startFillerNoise();
|
||||
}
|
||||
}
|
||||
|
||||
let returnedVerbs = false;
|
||||
try {
|
||||
if (reason.startsWith('dtmf')) {
|
||||
if (this.parentTask) this.parentTask.emit('dtmf', evt);
|
||||
else {
|
||||
this.emit('dtmf', evt);
|
||||
await this.performAction({digits: this.digitBuffer, reason: 'dtmfDetected'});
|
||||
returnedVerbs = await this.performAction({digits: this.digitBuffer, reason: 'dtmfDetected'});
|
||||
}
|
||||
}
|
||||
else if (reason.startsWith('speech')) {
|
||||
if (this.parentTask) this.parentTask.emit('transcription', evt);
|
||||
else {
|
||||
this.emit('transcription', evt);
|
||||
await this.performAction({speech: evt, reason: 'speechDetected'});
|
||||
this.logger.debug('TaskGather:_resolve - invoking performAction');
|
||||
returnedVerbs = await this.performAction({speech: evt, reason: 'speechDetected'});
|
||||
this.logger.debug({returnedVerbs}, 'TaskGather:_resolve - back from performAction');
|
||||
}
|
||||
}
|
||||
else if (reason.startsWith('timeout')) {
|
||||
if (this.parentTask) this.parentTask.emit('timeout', evt);
|
||||
else {
|
||||
this.emit('timeout', evt);
|
||||
await this.performAction({reason: 'timeout'});
|
||||
returnedVerbs = await this.performAction({reason: 'timeout'});
|
||||
}
|
||||
}
|
||||
else if (reason.startsWith('stt-error')) {
|
||||
if (this.parentTask) this.parentTask.emit('stt-error', evt);
|
||||
else {
|
||||
this.emit('stt-error', evt);
|
||||
await this.performAction({reason: 'error', details: evt.error});
|
||||
returnedVerbs = await this.performAction({reason: 'error', details: evt.error});
|
||||
}
|
||||
} else if (reason.startsWith('stt-low-confidence')) {
|
||||
if (this.parentTask) this.parentTask.emit('stt-low-confidence', evt);
|
||||
else {
|
||||
this.emit('stt-low-confidence', evt);
|
||||
returnedVerbs = await this.performAction({reason: 'stt-low-confidence'});
|
||||
}
|
||||
}
|
||||
} catch (err) { /*already logged error*/ }
|
||||
|
||||
// Gather got response from hook, cancel actionHookDelay processing
|
||||
this.logger.debug('TaskGather:_resolve - checking ahd');
|
||||
if (this.cs.actionHookDelayProcessor) {
|
||||
if (returnedVerbs) {
|
||||
this.logger.debug('TaskGather:_resolve - got response from action hook, cancelling actionHookDelay');
|
||||
await this.cs.actionHookDelayProcessor.stop();
|
||||
if (this.actionHookDelayAction && !this.cs.popActionHookDelayProperties()) {
|
||||
// no session level ahd was running when this task started, so clear it
|
||||
this.cs.clearActionHookDelayProcessor();
|
||||
this.logger.debug('TaskGather:_resolve - clear ahd');
|
||||
}
|
||||
}
|
||||
else {
|
||||
this.logger.debug('TaskGather:_resolve - no response from action hook, continue actionHookDelay');
|
||||
}
|
||||
}
|
||||
|
||||
this._clearFillerNoiseTimer();
|
||||
|
||||
this.notifyTaskDone();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,6 +8,10 @@ const DTMF_SPAN_NAME = 'dtmf';
|
||||
class TaskListen extends Task {
|
||||
constructor(logger, opts, parentTask) {
|
||||
super(logger, opts);
|
||||
/**
|
||||
* @deprecated
|
||||
* use bidirectionalAudio.enabled
|
||||
*/
|
||||
this.disableBidirectionalAudio = opts.disableBidirectionalAudio;
|
||||
this.preconditions = TaskPreconditions.Endpoint;
|
||||
|
||||
@@ -25,6 +29,15 @@ class TaskListen extends Task {
|
||||
this.results = {};
|
||||
this.playAudioQueue = [];
|
||||
this.isPlayingAudioFromQueue = false;
|
||||
this.bidirectionalAudio = {
|
||||
enabled: this.disableBidirectionalAudio === true ? false : true,
|
||||
...(this.data['bidirectionalAudio']),
|
||||
};
|
||||
|
||||
// From drachtio-version 3.0.40, forkAudioStart will send empty bugname, metadata together with
|
||||
// bidirectionalAudio params that cause old version of freeswitch missunderstand between bugname and
|
||||
// bidirectionalAudio params
|
||||
this._bugname = 'audio_fork';
|
||||
|
||||
if (this.transcribe) this.transcribeTask = makeTask(logger, {'transcribe': opts.transcribe}, this);
|
||||
}
|
||||
@@ -133,7 +146,8 @@ class TaskListen extends Task {
|
||||
mixType: this.mixType,
|
||||
sampling: this.sampleRate,
|
||||
...(this._bugname && {bugname: this._bugname}),
|
||||
metadata
|
||||
metadata,
|
||||
bidirectionalAudio: this.bidirectionalAudio || {}
|
||||
});
|
||||
this.recordStartTime = moment();
|
||||
if (this.maxLength) {
|
||||
@@ -153,7 +167,7 @@ class TaskListen extends Task {
|
||||
}
|
||||
|
||||
/* support bi-directional audio */
|
||||
if (!this.disableBidirectionalAudio) {
|
||||
if (this.bidirectionalAudio.enabled) {
|
||||
ep.addCustomEventListener(ListenEvents.PlayAudio, this._onPlayAudio.bind(this, ep));
|
||||
}
|
||||
ep.addCustomEventListener(ListenEvents.KillAudio, this._onKillAudio.bind(this, ep));
|
||||
|
||||
@@ -14,6 +14,9 @@ function makeTask(logger, obj, parent) {
|
||||
}
|
||||
validateVerb(name, data, logger);
|
||||
switch (name) {
|
||||
case TaskName.Answer:
|
||||
const TaskAnswer = require('./answer');
|
||||
return new TaskAnswer(logger, data, parent);
|
||||
case TaskName.SipDecline:
|
||||
const TaskSipDecline = require('./sip_decline');
|
||||
return new TaskSipDecline(logger, data, parent);
|
||||
@@ -41,6 +44,9 @@ function makeTask(logger, obj, parent) {
|
||||
case TaskName.Dtmf:
|
||||
const TaskDtmf = require('./dtmf');
|
||||
return new TaskDtmf(logger, data, parent);
|
||||
case TaskName.Dub:
|
||||
const TaskDub = require('./dub');
|
||||
return new TaskDub(logger, data, parent);
|
||||
case TaskName.Enqueue:
|
||||
const TaskEnqueue = require('./enqueue');
|
||||
return new TaskEnqueue(logger, data, parent);
|
||||
|
||||
@@ -17,6 +17,7 @@ class TaskRestDial extends Task {
|
||||
this.call_hook = this.data.call_hook;
|
||||
this.timeout = this.data.timeout || 60;
|
||||
this.sipRequestWithinDialogHook = this.data.sipRequestWithinDialogHook;
|
||||
this.referHook = this.data.referHook;
|
||||
|
||||
this.on('connect', this._onConnect.bind(this));
|
||||
this.on('callStatus', this._onCallStatus.bind(this));
|
||||
@@ -64,6 +65,7 @@ class TaskRestDial extends Task {
|
||||
this.canCancel = false;
|
||||
const cs = this.callSession;
|
||||
cs.setDialog(dlg);
|
||||
cs.referHook = this.referHook;
|
||||
this.logger.debug('TaskRestDial:_onConnect - call connected');
|
||||
if (this.sipRequestWithinDialogHook) this._initSipRequestWithinDialogHandler(cs, dlg);
|
||||
try {
|
||||
|
||||
226
lib/tasks/say.js
226
lib/tasks/say.js
@@ -1,4 +1,4 @@
|
||||
const Task = require('./task');
|
||||
const TtsTask = require('./tts-task');
|
||||
const {TaskName, TaskPreconditions} = require('../utils/constants');
|
||||
const pollySSMLSplit = require('polly-ssml-split');
|
||||
|
||||
@@ -29,9 +29,9 @@ const parseTextFromSayString = (text) => {
|
||||
return text.slice(closingBraceIndex + 1);
|
||||
};
|
||||
|
||||
class TaskSay extends Task {
|
||||
class TaskSay extends TtsTask {
|
||||
constructor(logger, opts, parentTask) {
|
||||
super(logger, opts);
|
||||
super(logger, opts, parentTask);
|
||||
this.preconditions = TaskPreconditions.Endpoint;
|
||||
|
||||
this.text = (Array.isArray(this.data.text) ? this.data.text : [this.data.text])
|
||||
@@ -39,10 +39,6 @@ class TaskSay extends Task {
|
||||
.flat();
|
||||
|
||||
this.loop = this.data.loop || 1;
|
||||
this.earlyMedia = this.data.earlyMedia === true || (parentTask && parentTask.earlyMedia);
|
||||
this.synthesizer = this.data.synthesizer || {};
|
||||
this.disableTtsCache = this.data.disableTtsCache;
|
||||
this.options = this.synthesizer.options || {};
|
||||
this.isHandledByPrimaryProvider = true;
|
||||
}
|
||||
|
||||
@@ -70,7 +66,7 @@ class TaskSay extends Task {
|
||||
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, srf);
|
||||
const {writeAlerts, AlertType, stats} = srf.locals;
|
||||
const {synthAudio} = srf.locals.dbHelpers;
|
||||
const engine = this.synthesizer.engine || 'standard';
|
||||
const engine = this.synthesizer.engine || cs.synthesizer?.engine || 'neural';
|
||||
const salt = cs.callSid;
|
||||
|
||||
let credentials = cs.getSpeechCredentials(vendor, 'tts', label);
|
||||
@@ -103,13 +99,16 @@ class TaskSay extends Task {
|
||||
voice = this.options.voice_id || voice;
|
||||
}
|
||||
|
||||
this.ep.set({
|
||||
tts_engine: vendor,
|
||||
ep.set({
|
||||
tts_engine: vendor.startsWith('custom:') ? 'custom' : vendor,
|
||||
tts_voice: voice,
|
||||
cache_speech_handles: 1,
|
||||
cache_speech_handles: !cs.currentTtsVendor || cs.currentTtsVendor === vendor ? 1 : 0,
|
||||
}).catch((err) => this.logger.info({err}, 'Error setting tts_engine on endpoint'));
|
||||
// set the current vendor on the call session
|
||||
// If vendor is changed from the previous one, then reset the cache_speech_handles flag
|
||||
cs.currentTtsVendor = vendor;
|
||||
|
||||
if (!preCache) this.logger.info({vendor, language, voice, model}, 'TaskSay:exec');
|
||||
if (!preCache && !this._disableTracing) this.logger.info({vendor, language, voice, model}, 'TaskSay:exec');
|
||||
try {
|
||||
if (!credentials) {
|
||||
writeAlerts({
|
||||
@@ -117,10 +116,6 @@ class TaskSay extends Task {
|
||||
alert_type: AlertType.TTS_NOT_PROVISIONED,
|
||||
vendor
|
||||
}).catch((err) => this.logger.info({err}, 'Error generating alert for no tts'));
|
||||
this.notifyError({
|
||||
msg: 'TTS error',
|
||||
details:`No speech credentials provisioned for selected vendor ${vendor}`
|
||||
});
|
||||
throw new Error('no provisioned speech credentials for TTS');
|
||||
}
|
||||
// synthesize all of the text elements
|
||||
@@ -132,7 +127,7 @@ class TaskSay extends Task {
|
||||
if (text.startsWith('silence_stream://')) return text;
|
||||
|
||||
/* otel: trace time for tts */
|
||||
if (!preCache) {
|
||||
if (!preCache && !this._disableTracing) {
|
||||
const {span} = this.startChildSpan('tts-generation', {
|
||||
'tts.vendor': vendor,
|
||||
'tts.language': language,
|
||||
@@ -156,7 +151,7 @@ class TaskSay extends Task {
|
||||
preCache
|
||||
});
|
||||
if (!filePath.startsWith('say:')) {
|
||||
this.logger.debug(`file ${filePath}, served from cache ${servedFromCache}`);
|
||||
this.logger.debug(`Say: file ${filePath}, served from cache ${servedFromCache}`);
|
||||
if (filePath) cs.trackTmpFile(filePath);
|
||||
if (this.otelSpan) {
|
||||
this.otelSpan.setAttributes({'tts.cached': servedFromCache});
|
||||
@@ -167,7 +162,7 @@ class TaskSay extends Task {
|
||||
lastUpdated = true;
|
||||
updateSpeechCredentialLastUsed(credentials.speech_credential_sid).catch(() => {/* logged error */});
|
||||
}
|
||||
if (!servedFromCache && rtt && !preCache) {
|
||||
if (!servedFromCache && rtt && !preCache && !this._disableTracing) {
|
||||
this.notifyStatus({
|
||||
event: 'synthesized-audio',
|
||||
vendor,
|
||||
@@ -178,8 +173,8 @@ class TaskSay extends Task {
|
||||
}
|
||||
}
|
||||
else {
|
||||
this.logger.debug('a streaming tts api will be used');
|
||||
const modifiedPath = filePath.replace('say:{', `say:{session-uuid=${this.ep.uuid},`);
|
||||
this.logger.debug('Say: a streaming tts api will be used');
|
||||
const modifiedPath = filePath.replace('say:{', `say:{session-uuid=${ep.uuid},`);
|
||||
return modifiedPath;
|
||||
}
|
||||
return filePath;
|
||||
@@ -192,7 +187,6 @@ class TaskSay extends Task {
|
||||
vendor,
|
||||
detail: err.message
|
||||
}).catch((err) => this.logger.info({err}, 'Error generating alert for tts failure'));
|
||||
this.notifyError({msg: 'TTS error', details: err.message || err});
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
@@ -210,23 +204,24 @@ class TaskSay extends Task {
|
||||
const {srf, accountSid:account_sid} = cs;
|
||||
const {writeAlerts, AlertType} = srf.locals;
|
||||
const {addFileToCache} = srf.locals.dbHelpers;
|
||||
const engine = this.synthesizer.engine || 'standard';
|
||||
const engine = this.synthesizer.engine || cs.synthesizer?.engine || 'neural';
|
||||
|
||||
await super.exec(cs);
|
||||
this.ep = ep;
|
||||
|
||||
const vendor = this.synthesizer.vendor && this.synthesizer.vendor !== 'default' ?
|
||||
let vendor = this.synthesizer.vendor && this.synthesizer.vendor !== 'default' ?
|
||||
this.synthesizer.vendor :
|
||||
cs.speechSynthesisVendor;
|
||||
const language = this.synthesizer.language && this.synthesizer.language !== 'default' ?
|
||||
let language = this.synthesizer.language && this.synthesizer.language !== 'default' ?
|
||||
this.synthesizer.language :
|
||||
cs.speechSynthesisLanguage ;
|
||||
const voice = this.synthesizer.voice && this.synthesizer.voice !== 'default' ?
|
||||
let voice = this.synthesizer.voice && this.synthesizer.voice !== 'default' ?
|
||||
this.synthesizer.voice :
|
||||
cs.speechSynthesisVoice;
|
||||
const label = this.synthesizer.label && this.synthesizer.label !== 'default' ?
|
||||
this.synthesizer.label :
|
||||
cs.speechSynthesisLabel;
|
||||
// label can be null/empty in synthesizer config, just use application level label if it's default
|
||||
let label = this.synthesizer.label === 'default' ?
|
||||
cs.speechSynthesisLabel :
|
||||
this.synthesizer.label;
|
||||
|
||||
const fallbackVendor = this.synthesizer.fallbackVendor && this.synthesizer.fallbackVendor !== 'default' ?
|
||||
this.synthesizer.fallbackVendor :
|
||||
@@ -237,16 +232,24 @@ class TaskSay extends Task {
|
||||
const fallbackVoice = this.synthesizer.fallbackVoice && this.synthesizer.fallbackVoice !== 'default' ?
|
||||
this.synthesizer.fallbackVoice :
|
||||
cs.fallbackSpeechSynthesisVoice;
|
||||
const fallbackLabel = this.synthesizer.fallbackLabel && this.synthesizer.fallbackLabel !== 'default' ?
|
||||
this.synthesizer.fallbackLabel :
|
||||
cs.fallbackSpeechSynthesisLabel;
|
||||
// label can be null/empty in synthesizer config, just use application level label if it's default
|
||||
const fallbackLabel = this.synthesizer.fallbackLabel === 'default' ?
|
||||
cs.fallbackSpeechSynthesisLabel :
|
||||
this.synthesizer.fallbackLabel;
|
||||
|
||||
let filepath;
|
||||
try {
|
||||
filepath = await this._synthesizeWithSpecificVendor(cs, ep, {vendor, language, voice, label});
|
||||
} catch (error) {
|
||||
if (fallbackVendor && this.isHandledByPrimaryProvider) {
|
||||
if (cs.hasFallbackTts) {
|
||||
vendor = fallbackVendor;
|
||||
language = fallbackLanguage;
|
||||
voice = fallbackVoice;
|
||||
label = fallbackLabel;
|
||||
}
|
||||
|
||||
const startFallback = async(error) => {
|
||||
if (fallbackVendor && this.isHandledByPrimaryProvider && !cs.hasFallbackTts) {
|
||||
this.notifyError(
|
||||
{ msg: 'TTS error', details:`TTS vendor ${vendor} error: ${error}`, failover: 'in progress'});
|
||||
this.isHandledByPrimaryProvider = false;
|
||||
cs.hasFallbackTts = true;
|
||||
this.logger.info(`Synthesize error, fallback to ${fallbackVendor}`);
|
||||
filepath = await this._synthesizeWithSpecificVendor(cs, ep,
|
||||
{
|
||||
@@ -256,62 +259,107 @@ class TaskSay extends Task {
|
||||
label: fallbackLabel
|
||||
});
|
||||
} else {
|
||||
this.notifyError(
|
||||
{ msg: 'TTS error', details:`TTS vendor ${vendor} error: ${error}`, failover: 'not available'});
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
let filepath;
|
||||
try {
|
||||
filepath = await this._synthesizeWithSpecificVendor(cs, ep, {vendor, language, voice, label});
|
||||
} catch (error) {
|
||||
await startFallback(error);
|
||||
}
|
||||
this.notifyStatus({event: 'start-playback'});
|
||||
|
||||
while (!this.killed && (this.loop === 'forever' || this.loop--) && this.ep?.connected) {
|
||||
while (!this.killed && (this.loop === 'forever' || this.loop--) && ep?.connected) {
|
||||
let segment = 0;
|
||||
while (!this.killed && segment < filepath.length) {
|
||||
if (cs.isInConference) {
|
||||
const {memberId, confName, confUuid} = cs;
|
||||
await this.playToConfMember(this.ep, memberId, confName, confUuid, filepath[segment]);
|
||||
await this.playToConfMember(ep, memberId, confName, confUuid, filepath[segment]);
|
||||
}
|
||||
else {
|
||||
let tts_cache_filename;
|
||||
if (filepath[segment].startsWith('say:{')) {
|
||||
const arr = /^say:\{.*\}\s*(.*)$/.exec(filepath[segment]);
|
||||
if (arr) this.logger.debug(`Say:exec sending streaming tts request: ${arr[1].substring(0, 64)}..`);
|
||||
}
|
||||
else this.logger.debug(`Say:exec sending ${filepath[segment].substring(0, 64)}`);
|
||||
this.ep.once('playback-start', (evt) => {
|
||||
this.logger.debug({evt}, 'got playback-start');
|
||||
ep.once('playback-start', (evt) => {
|
||||
this.logger.debug({evt}, 'Say got playback-start');
|
||||
if (this.otelSpan) {
|
||||
this.logger.debug({evt}, 'got playback-start');
|
||||
this._addStreamingTtsAttributes(this.otelSpan, evt);
|
||||
this.otelSpan.end();
|
||||
this.otelSpan = null;
|
||||
if (evt.variable_tts_cache_filename) cs.trackTmpFile(evt.variable_tts_cache_filename);
|
||||
if (evt.variable_tts_cache_filename) {
|
||||
tts_cache_filename = evt.variable_tts_cache_filename;
|
||||
cs.trackTmpFile(evt.variable_tts_cache_filename);
|
||||
}
|
||||
else {
|
||||
this.logger.info('No tts_cache_filename in playback-start event');
|
||||
}
|
||||
}
|
||||
});
|
||||
this.ep.once('playback-stop', (evt) => {
|
||||
this.logger.debug({evt}, 'got playback-stop');
|
||||
if (evt.variable_tts_error) {
|
||||
writeAlerts({
|
||||
account_sid,
|
||||
alert_type: AlertType.TTS_FAILURE,
|
||||
vendor,
|
||||
detail: evt.variable_tts_error
|
||||
}).catch((err) => this.logger.info({err}, 'Error generating alert for no tts'));
|
||||
ep.once('playback-stop', (evt) => {
|
||||
if (!tts_cache_filename || evt.variable_tts_cache_filename !== tts_cache_filename) {
|
||||
this.logger.info({evt}, 'Say: discarding playback-stop from other say verb');
|
||||
}
|
||||
if (evt.variable_tts_cache_filename) {
|
||||
const text = parseTextFromSayString(this.text[segment]);
|
||||
addFileToCache(evt.variable_tts_cache_filename, {
|
||||
account_sid,
|
||||
vendor,
|
||||
language,
|
||||
voice,
|
||||
engine,
|
||||
text
|
||||
}).catch((err) => this.logger.info({err}, 'Error adding file to cache'));
|
||||
else {
|
||||
this.logger.debug({evt}, 'Say got playback-stop');
|
||||
if (evt.variable_tts_error) {
|
||||
writeAlerts({
|
||||
account_sid,
|
||||
alert_type: AlertType.TTS_FAILURE,
|
||||
vendor,
|
||||
detail: evt.variable_tts_error
|
||||
}).catch((err) => this.logger.info({err}, 'Error generating alert for no tts'));
|
||||
}
|
||||
if (evt.variable_tts_cache_filename && !this.killed) {
|
||||
const text = parseTextFromSayString(this.text[segment]);
|
||||
addFileToCache(evt.variable_tts_cache_filename, {
|
||||
account_sid,
|
||||
vendor,
|
||||
language,
|
||||
voice,
|
||||
engine,
|
||||
text
|
||||
}).catch((err) => this.logger.info({err}, 'Error adding file to cache'));
|
||||
}
|
||||
}
|
||||
if (this._playResolve) {
|
||||
evt.variable_tts_error ? this._playReject(new Error(evt.variable_tts_error)) : this._playResolve();
|
||||
}
|
||||
});
|
||||
await ep.play(filepath[segment]);
|
||||
// wait for playback-stop event received to confirm if the playback is successful
|
||||
this._playPromise = new Promise((resolve, reject) => {
|
||||
this._playResolve = resolve;
|
||||
this._playReject = reject;
|
||||
});
|
||||
const r = await ep.play(filepath[segment]);
|
||||
this.logger.debug({r}, 'Say:exec play result');
|
||||
try {
|
||||
// wait for playback-stop event received to confirm if the playback is successful
|
||||
await this._playPromise;
|
||||
} catch (err) {
|
||||
try {
|
||||
await startFallback(err);
|
||||
continue;
|
||||
} catch (err) {
|
||||
this.logger.info({err}, 'Error waiting for playback-stop event');
|
||||
}
|
||||
} finally {
|
||||
this._playPromise = null;
|
||||
this._playResolve = null;
|
||||
this._playReject = null;
|
||||
}
|
||||
if (filepath[segment].startsWith('say:{')) {
|
||||
const arr = /^say:\{.*\}\s*(.*)$/.exec(filepath[segment]);
|
||||
if (arr) this.logger.debug(`Say:exec complete playing streaming tts request: ${arr[1].substring(0, 64)}..`);
|
||||
} else {
|
||||
// This log will print spech credentials in say command for tts stream mode
|
||||
this.logger.debug(`Say:exec completed play file ${filepath[segment]}`);
|
||||
}
|
||||
this.logger.debug(`Say:exec completed play file ${filepath[segment]}`);
|
||||
}
|
||||
segment++;
|
||||
}
|
||||
@@ -321,7 +369,7 @@ class TaskSay extends Task {
|
||||
|
||||
async kill(cs) {
|
||||
super.kill(cs);
|
||||
if (this.ep.connected) {
|
||||
if (this.ep?.connected) {
|
||||
this.logger.debug('TaskSay:kill - killing audio');
|
||||
if (cs.isInConference) {
|
||||
const {memberId, confName} = cs;
|
||||
@@ -333,6 +381,11 @@ class TaskSay extends Task {
|
||||
}
|
||||
this.ep.removeAllListeners('playback-start');
|
||||
this.ep.removeAllListeners('playback-stop');
|
||||
// if we are waiting for playback-stop event, resolve the promise
|
||||
if (this._playResolve) {
|
||||
this._playResolve();
|
||||
this._playResolve = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -341,20 +394,63 @@ class TaskSay extends Task {
|
||||
for (const [key, value] of Object.entries(evt)) {
|
||||
if (key.startsWith('variable_tts_')) {
|
||||
let newKey = key.substring('variable_tts_'.length)
|
||||
.replace('whisper_', 'whisper.')
|
||||
.replace('deepgram_', 'deepgram.')
|
||||
.replace('playht_', 'playht.')
|
||||
.replace('rimelabs_', 'rimelabs.')
|
||||
.replace('verbio_', 'verbio.')
|
||||
.replace('elevenlabs_', 'elevenlabs.');
|
||||
if (spanMapping[newKey]) newKey = spanMapping[newKey];
|
||||
attrs[newKey] = value;
|
||||
}
|
||||
}
|
||||
delete attrs['cache_filename']; //no value in adding this to the span
|
||||
span.setAttributes(attrs);
|
||||
}
|
||||
}
|
||||
|
||||
const spanMapping = {
|
||||
// IMPORTANT!!! JAMBONZ WEBAPP WILL SHOW TEXT PERFECTLY IF THE SPAN NAME IS SMALLER OR EQUAL 25 CHARACTERS.
|
||||
// EX: whisper.ratelim_reqs has length 20 <= 25 which is perfect
|
||||
// Elevenlabs
|
||||
'elevenlabs.reported_latency_ms': 'elevenlabs.latency_ms',
|
||||
'elevenlabs.request_id': 'elevenlabs.req_id',
|
||||
'elevenlabs.history_item_id': 'elevenlabs.item_id',
|
||||
'elevenlabs.optimize_streaming_latency': 'elevenlabs.optimization',
|
||||
'elevenlabs.name_lookup_time_ms': 'name_lookup_ms',
|
||||
'elevenlabs.connect_time_ms': 'connect_ms',
|
||||
'elevenlabs.final_response_time_ms': 'final_response_ms',
|
||||
// Whisper
|
||||
'whisper.reported_latency_ms': 'whisper.latency_ms',
|
||||
'whisper.request_id': 'whisper.req_id',
|
||||
'whisper.reported_organization': 'whisper.organization',
|
||||
'whisper.reported_ratelimit_requests': 'whisper.ratelimit',
|
||||
'whisper.reported_ratelimit_remaining_requests': 'whisper.ratelimit_remain',
|
||||
'whisper.reported_ratelimit_reset_requests': 'whisper.ratelimit_reset',
|
||||
'whisper.name_lookup_time_ms': 'name_lookup_ms',
|
||||
'whisper.connect_time_ms': 'connect_ms',
|
||||
'whisper.final_response_time_ms': 'final_response_ms',
|
||||
// Deepgram
|
||||
'deepgram.request_id': 'deepgram.req_id',
|
||||
'deepgram.reported_model_name': 'deepgram.model_name',
|
||||
'deepgram.reported_model_uuid': 'deepgram.model_uuid',
|
||||
'deepgram.reported_char_count': 'deepgram.char_count',
|
||||
'deepgram.name_lookup_time_ms': 'name_lookup_ms',
|
||||
'deepgram.connect_time_ms': 'connect_ms',
|
||||
'deepgram.final_response_time_ms': 'final_response_ms',
|
||||
// Playht
|
||||
'playht.request_id': 'playht.req_id',
|
||||
'playht.name_lookup_time_ms': 'name_lookup_ms',
|
||||
'playht.connect_time_ms': 'connect_ms',
|
||||
'playht.final_response_time_ms': 'final_response_ms',
|
||||
// Rimelabs
|
||||
'rimelabs.name_lookup_time_ms': 'name_lookup_ms',
|
||||
'rimelabs.connect_time_ms': 'connect_ms',
|
||||
'rimelabs.final_response_time_ms': 'final_response_ms',
|
||||
// verbio
|
||||
'verbio.name_lookup_time_ms': 'name_lookup_ms',
|
||||
'verbio.connect_time_ms': 'connect_ms',
|
||||
'verbio.final_response_time_ms': 'final_response_ms',
|
||||
};
|
||||
|
||||
module.exports = TaskSay;
|
||||
|
||||
@@ -33,7 +33,8 @@ class SttTask extends Task {
|
||||
//fallback
|
||||
this.fallbackVendor = recognizer.fallbackVendor || 'default';
|
||||
this.fallbackLanguage = recognizer.fallbackLanguage || 'default';
|
||||
this.fallbackLabel = recognizer.fallbackLabel || 'default';
|
||||
// label can be empty and should not have default value.
|
||||
this.fallbackLabel = recognizer.fallbackLabel;
|
||||
|
||||
/* let credentials be supplied in the recognizer object at runtime */
|
||||
this.sttCredentials = setSpeechCredentialsAtRuntime(recognizer);
|
||||
@@ -56,24 +57,20 @@ class SttTask extends Task {
|
||||
super.exec(cs);
|
||||
this.ep = ep;
|
||||
this.ep2 = ep2;
|
||||
// copy all value from config verb to this object.
|
||||
|
||||
// use session preferences if we don't have specific verb-level settings.
|
||||
if (cs.recognizer) {
|
||||
for (const k in cs.recognizer) {
|
||||
if (Array.isArray(this.data.recognizer[k]) ||
|
||||
Array.isArray(cs.recognizer[k])) {
|
||||
this.data.recognizer[k] = [
|
||||
...this.data.recognizer[k],
|
||||
...cs.recognizer[k]
|
||||
];
|
||||
} else if (typeof this.data.recognizer[k] === 'object' ||
|
||||
typeof cs.recognizer[k] === 'object'
|
||||
) {
|
||||
this.data.recognizer[k] = {
|
||||
...this.data.recognizer[k],
|
||||
...cs.recognizer[k]
|
||||
};
|
||||
const newValue = this.data.recognizer && this.data.recognizer[k] !== undefined ?
|
||||
this.data.recognizer[k] :
|
||||
cs.recognizer[k];
|
||||
|
||||
if (Array.isArray(newValue)) {
|
||||
this.data.recognizer[k] = [...(this.data.recognizer[k] || []), ...cs.recognizer[k]];
|
||||
} else if (typeof newValue === 'object' && newValue !== null) {
|
||||
this.data.recognizer[k] = { ...(this.data.recognizer[k] || {}), ...cs.recognizer[k] };
|
||||
} else {
|
||||
this.data.recognizer[k] = cs.recognizer[k] || this.data.recognizer[k];
|
||||
this.data.recognizer[k] = newValue;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -85,7 +82,8 @@ class SttTask extends Task {
|
||||
this.language = cs.speechRecognizerLanguage;
|
||||
if (this.data.recognizer) this.data.recognizer.language = this.language;
|
||||
}
|
||||
if ('default' === this.label || !this.label) {
|
||||
// label can be empty, should not assign application level label
|
||||
if ('default' === this.label) {
|
||||
this.label = cs.speechRecognizerLabel;
|
||||
if (this.data.recognizer) this.data.recognizer.label = this.label;
|
||||
}
|
||||
@@ -98,10 +96,18 @@ class SttTask extends Task {
|
||||
this.fallbackLanguage = cs.fallbackSpeechRecognizerLanguage;
|
||||
if (this.data.recognizer) this.data.recognizer.fallbackLanguage = this.fallbackLanguage;
|
||||
}
|
||||
if ('default' === this.fallbackLabel || !this.fallbackLabel) {
|
||||
// label can be empty, should not assign application level label
|
||||
if ('default' === this.fallbackLabel) {
|
||||
this.fallbackLabel = cs.fallbackSpeechRecognizerLabel;
|
||||
if (this.data.recognizer) this.data.recognizer.fallbackLabel = this.fallbackLabel;
|
||||
}
|
||||
// If call is already fallback to 2nd ASR vendor
|
||||
// use that.
|
||||
if (cs.hasFallbackAsr) {
|
||||
this.vendor = this.fallbackVendor;
|
||||
this.language = this.fallbackLanguage;
|
||||
this.label = this.fallbackLabel;
|
||||
}
|
||||
if (!this.data.recognizer.vendor) {
|
||||
this.data.recognizer.vendor = this.vendor;
|
||||
}
|
||||
@@ -119,9 +125,19 @@ class SttTask extends Task {
|
||||
try {
|
||||
this.sttCredentials = await this._initSpeechCredentials(this.cs, this.vendor, this.label);
|
||||
} catch (error) {
|
||||
if (this.fallbackVendor && this.isHandledByPrimaryProvider) {
|
||||
await this._fallback();
|
||||
if (this.canFallback) {
|
||||
this.notifyError(
|
||||
{
|
||||
msg: 'ASR error', details:`Invalid vendor ${this.vendor}, Error: ${error}`,
|
||||
failover: 'in progress'
|
||||
});
|
||||
await this._initFallback();
|
||||
} else {
|
||||
this.notifyError(
|
||||
{
|
||||
msg: 'ASR error', details:`Invalid vendor ${this.vendor}, Error: ${error}`,
|
||||
failover: 'not available'
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
@@ -153,7 +169,7 @@ class SttTask extends Task {
|
||||
}
|
||||
|
||||
async _initSpeechCredentials(cs, vendor, label) {
|
||||
const {getNuanceAccessToken, getIbmAccessToken} = cs.srf.locals.dbHelpers;
|
||||
const {getNuanceAccessToken, getIbmAccessToken, getAwsAuthToken, getVerbioAccessToken} = cs.srf.locals.dbHelpers;
|
||||
let credentials = cs.getSpeechCredentials(vendor, 'stt', label);
|
||||
|
||||
if (!credentials) {
|
||||
@@ -164,11 +180,6 @@ class SttTask extends Task {
|
||||
alert_type: AlertType.STT_NOT_PROVISIONED,
|
||||
vendor
|
||||
}).catch((err) => this.logger.info({err}, 'Error generating alert for no stt'));
|
||||
// Notify application that STT vender is wrong.
|
||||
this.notifyError({
|
||||
msg: 'ASR error',
|
||||
details: `No speech-to-text service credentials for ${vendor} have been configured`
|
||||
});
|
||||
this.notifyTaskDone();
|
||||
throw new Error(`No speech-to-text service credentials for ${vendor} have been configured`);
|
||||
}
|
||||
@@ -186,13 +197,33 @@ class SttTask extends Task {
|
||||
const {access_token, servedFromCache} = await getIbmAccessToken(stt_api_key);
|
||||
this.logger.debug({stt_api_key}, `got ibm access token ${servedFromCache ? 'from cache' : ''}`);
|
||||
credentials = {...credentials, access_token, stt_region};
|
||||
} else if (['aws', 'polly'].includes(vendor) && credentials.roleArn) {
|
||||
/* get aws access token */
|
||||
const {roleArn, region} = credentials;
|
||||
const {accessKeyId, secretAccessKey, sessionToken, servedFromCache} =
|
||||
await getAwsAuthToken({
|
||||
region,
|
||||
roleArn
|
||||
});
|
||||
this.logger.debug({roleArn}, `got aws access token ${servedFromCache ? 'from cache' : ''}`);
|
||||
credentials = {...credentials, accessKeyId, secretAccessKey, sessionToken};
|
||||
} else if (vendor === 'verbio' && credentials.client_id && credentials.client_secret) {
|
||||
const {access_token, servedFromCache} = await getVerbioAccessToken(credentials);
|
||||
this.logger.debug({client_id: credentials.client_id},
|
||||
`got verbio access token ${servedFromCache ? 'from cache' : ''}`);
|
||||
credentials.access_token = access_token;
|
||||
}
|
||||
return credentials;
|
||||
}
|
||||
|
||||
async _fallback() {
|
||||
get canFallback() {
|
||||
return this.fallbackVendor && this.isHandledByPrimaryProvider && !this.cs.hasFallbackAsr;
|
||||
}
|
||||
|
||||
async _initFallback() {
|
||||
assert(this.fallbackVendor, 'fallback failed without fallbackVendor configuration');
|
||||
this.isHandledByPrimaryProvider = false;
|
||||
this.cs.hasFallbackAsr = true;
|
||||
this.logger.info(`Failed to use primary STT provider, fallback to ${this.fallbackVendor}`);
|
||||
this.vendor = this.fallbackVendor;
|
||||
this.language = this.fallbackLanguage;
|
||||
@@ -201,6 +232,8 @@ class SttTask extends Task {
|
||||
this.data.recognizer.language = this.language;
|
||||
this.data.recognizer.label = this.label;
|
||||
this.sttCredentials = await this._initSpeechCredentials(this.cs, this.vendor, this.label);
|
||||
// cleanup previous listener from previous vendor
|
||||
this.removeCustomEventListeners();
|
||||
}
|
||||
|
||||
async compileHintsForCobalt(ep, hostport, model, token, hints) {
|
||||
@@ -244,6 +277,20 @@ class SttTask extends Task {
|
||||
_doContinuousAsrWithDeepgram(asrTimeout) {
|
||||
/* deepgram has an utterance_end_ms property that simplifies things */
|
||||
assert(this.vendor === 'deepgram');
|
||||
if (asrTimeout < 1000) {
|
||||
this.notifyError({
|
||||
msg: 'ASR error',
|
||||
details:`asrTimeout ${asrTimeout} is too short for deepgram; setting it to 1000ms`
|
||||
});
|
||||
asrTimeout = 1000;
|
||||
}
|
||||
else if (asrTimeout > 5000) {
|
||||
this.notifyError({
|
||||
msg: 'ASR error',
|
||||
details:`asrTimeout ${asrTimeout} is too long for deepgram; setting it to 5000ms`
|
||||
});
|
||||
asrTimeout = 5000;
|
||||
}
|
||||
this.logger.debug(`_doContinuousAsrWithDeepgram - setting utterance_end_ms to ${asrTimeout}`);
|
||||
const dgOptions = this.data.recognizer.deepgramOptions = this.data.recognizer.deepgramOptions || {};
|
||||
dgOptions.utteranceEndMs = dgOptions.utteranceEndMs || asrTimeout;
|
||||
@@ -263,7 +310,6 @@ class SttTask extends Task {
|
||||
detail: evt.error,
|
||||
vendor: this.vendor,
|
||||
}).catch((err) => this.logger.info({err}, `Error generating alert for ${this.vendor} connection failure`));
|
||||
this.notifyError({msg: 'ASR error', details:`Failed connecting to speech vendor ${this.vendor}: ${evt.error}`});
|
||||
}
|
||||
|
||||
_onVendorConnectFailure(cs, _ep, evt) {
|
||||
@@ -276,7 +322,6 @@ class SttTask extends Task {
|
||||
message: `Failed connecting to ${this.vendor} speech recognizer: ${reason}`,
|
||||
vendor: this.vendor,
|
||||
}).catch((err) => this.logger.info({err}, `Error generating alert for ${this.vendor} connection failure`));
|
||||
this.notifyError({msg: 'ASR error', details:`Failed connecting to speech vendor ${this.vendor}: ${reason}`});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -45,6 +45,10 @@ class Task extends Emitter {
|
||||
return this.name;
|
||||
}
|
||||
|
||||
set disableTracing(val) {
|
||||
this._disableTracing = val;
|
||||
}
|
||||
|
||||
toJSON() {
|
||||
return this.data;
|
||||
}
|
||||
@@ -173,12 +177,20 @@ class Task extends Emitter {
|
||||
* first new set of verbs arrive after sending a transcript
|
||||
* */
|
||||
this.emit('VerbHookSpanWaitForEnd', {span});
|
||||
|
||||
// If actionHook delay action is configured, and ws application have not responded yet any verb for actionHook
|
||||
// We have to transfer the task to call-session to await on next ws command verbs, and also run action Hook
|
||||
// delay actions
|
||||
//if (this.hookDelayActionOpts) {
|
||||
// this.emit('ActionHookDelayActionOptions', this.hookDelayActionOpts);
|
||||
//}
|
||||
}
|
||||
if (expectResponse && json && Array.isArray(json)) {
|
||||
const makeTask = require('./make_task');
|
||||
const tasks = normalizeJambones(this.logger, json).map((tdata) => makeTask(this.logger, tdata));
|
||||
if (tasks && tasks.length > 0) {
|
||||
this.callSession.replaceApplication(tasks);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
@@ -186,6 +198,7 @@ class Task extends Emitter {
|
||||
span.end();
|
||||
throw err;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -265,6 +278,7 @@ class Task extends Emitter {
|
||||
delete obj.requestor;
|
||||
delete obj.notifier;
|
||||
obj.tasks = cs.getRemainingTaskData();
|
||||
obj.callInfo = cs.callInfo.toJSON();
|
||||
if (opts && obj.tasks.length > 0) {
|
||||
const key = Object.keys(obj.tasks[0])[0];
|
||||
Object.assign(obj.tasks[0][key], {_: opts});
|
||||
|
||||
@@ -31,6 +31,25 @@ class TaskTranscribe extends SttTask {
|
||||
this.separateRecognitionPerChannel = this.data.recognizer.separateRecognitionPerChannel;
|
||||
}
|
||||
|
||||
/* for nested transcribe in dial, unless the app explicitly says so we want to transcribe both legs */
|
||||
if (this.parentTask?.name === TaskName.Dial) {
|
||||
if (this.data.channel === 1 || this.data.channel === 2) {
|
||||
/* transcribe only the channel specified */
|
||||
this.separateRecognitionPerChannel = false;
|
||||
this.channel = this.data.channel;
|
||||
logger.debug(`TaskTranscribe: transcribing only channel ${this.channel} in the Dial verb`);
|
||||
}
|
||||
else if (this.separateRecognitionPerChannel !== false) {
|
||||
this.separateRecognitionPerChannel = true;
|
||||
}
|
||||
else {
|
||||
this.channel = 1;
|
||||
}
|
||||
}
|
||||
else {
|
||||
this.channel = 1;
|
||||
}
|
||||
|
||||
this.childSpan = [null, null];
|
||||
|
||||
// Continuous asr timeout
|
||||
@@ -39,13 +58,21 @@ class TaskTranscribe extends SttTask {
|
||||
this.isContinuousAsr = true;
|
||||
}
|
||||
/* buffer speech for continuous asr */
|
||||
this._bufferedTranscripts = [];
|
||||
this._bufferedTranscripts = [ [], [] ]; // for channel 1 and 2
|
||||
this.bugname_prefix = 'transcribe_';
|
||||
this.paused = false;
|
||||
}
|
||||
|
||||
get name() { return TaskName.Transcribe; }
|
||||
|
||||
get transcribing1() {
|
||||
return this.channel === 1 || this.separateRecognitionPerChannel;
|
||||
}
|
||||
|
||||
get transcribing2() {
|
||||
return this.channel === 2 || this.separateRecognitionPerChannel && this.ep2;
|
||||
}
|
||||
|
||||
async exec(cs, {ep, ep2}) {
|
||||
await super.exec(cs, {ep, ep2});
|
||||
|
||||
@@ -68,25 +95,30 @@ class TaskTranscribe extends SttTask {
|
||||
}
|
||||
|
||||
try {
|
||||
await this._startTranscribing(cs, ep, 1);
|
||||
if (this.separateRecognitionPerChannel && ep2) {
|
||||
if (this.transcribing1) {
|
||||
await this._startTranscribing(cs, ep, 1);
|
||||
}
|
||||
if (this.transcribing2) {
|
||||
await this._startTranscribing(cs, ep2, 2);
|
||||
}
|
||||
|
||||
updateSpeechCredentialLastUsed(this.sttCredentials.speech_credential_sid)
|
||||
.catch(() => {/*already logged error */});
|
||||
|
||||
await this.awaitTaskDone();
|
||||
} catch (err) {
|
||||
this.logger.info(err, 'TaskTranscribe:exec - error');
|
||||
this.parentTask && this.parentTask.emit('error', err);
|
||||
if (!(await this._startFallback(cs, ep, {error: err}))) {
|
||||
this.logger.info(err, 'TaskTranscribe:exec - error');
|
||||
this.parentTask && this.parentTask.emit('error', err);
|
||||
this.removeCustomEventListeners();
|
||||
return;
|
||||
}
|
||||
}
|
||||
await this.awaitTaskDone();
|
||||
this.removeCustomEventListeners();
|
||||
}
|
||||
|
||||
async _stopTranscription() {
|
||||
let stopTranscription = false;
|
||||
if (this.ep?.connected) {
|
||||
if (this.transcribing1 && this.ep?.connected) {
|
||||
stopTranscription = true;
|
||||
this.ep.stopTranscription({
|
||||
vendor: this.vendor,
|
||||
@@ -94,7 +126,7 @@ class TaskTranscribe extends SttTask {
|
||||
})
|
||||
.catch((err) => this.logger.info(err, 'Error TaskTranscribe:kill'));
|
||||
}
|
||||
if (this.separateRecognitionPerChannel && this.ep2 && this.ep2.connected) {
|
||||
if (this.transcribing2 && this.ep2?.connected) {
|
||||
stopTranscription = true;
|
||||
this.ep2.stopTranscription({vendor: this.vendor, bugname: this.bugname})
|
||||
.catch((err) => this.logger.info(err, 'Error TaskTranscribe:kill'));
|
||||
@@ -123,10 +155,8 @@ class TaskTranscribe extends SttTask {
|
||||
break;
|
||||
case TranscribeStatus.Resume:
|
||||
this.paused = false;
|
||||
await this._startTranscribing(this.cs, this.ep, 1);
|
||||
if (this.separateRecognitionPerChannel && this.ep2) {
|
||||
await this._startTranscribing(this.cs, this.ep2, 2);
|
||||
}
|
||||
if (this.transcribing1) await this._startTranscribing(this.cs, this.ep, 1);
|
||||
if (this.transcribing2) await this._startTranscribing(this.cs, this.ep2, 2);
|
||||
break;
|
||||
}
|
||||
}
|
||||
@@ -167,8 +197,8 @@ class TaskTranscribe extends SttTask {
|
||||
this.bugname = `${this.bugname_prefix}azure_transcribe`;
|
||||
this.addCustomEventListener(ep, AzureTranscriptionEvents.Transcription,
|
||||
this._onTranscription.bind(this, cs, ep, channel));
|
||||
this.addCustomEventListener(ep, AzureTranscriptionEvents.NoSpeechDetected,
|
||||
this._onNoAudio.bind(this, cs, ep, channel));
|
||||
//this.addCustomEventListener(ep, AzureTranscriptionEvents.NoSpeechDetected,
|
||||
// this._onNoAudio.bind(this, cs, ep, channel));
|
||||
break;
|
||||
case 'nuance':
|
||||
this.bugname = `${this.bugname_prefix}nuance_transcribe`;
|
||||
@@ -185,7 +215,7 @@ class TaskTranscribe extends SttTask {
|
||||
this._onVendorConnectFailure.bind(this, cs, ep, channel));
|
||||
|
||||
/* if app sets deepgramOptions.utteranceEndMs they essentially want continuous asr */
|
||||
if (opts.DEEPGRAM_SPEECH_UTTERANCE_END_MS) this.isContinuousAsr = true;
|
||||
//if (opts.DEEPGRAM_SPEECH_UTTERANCE_END_MS) this.isContinuousAsr = true;
|
||||
|
||||
break;
|
||||
case 'soniox':
|
||||
@@ -289,7 +319,7 @@ class TaskTranscribe extends SttTask {
|
||||
vendor: this.vendor,
|
||||
interim: this.interim ? true : false,
|
||||
locale: this.language,
|
||||
channels: /*this.separateRecognitionPerChannel ? 2 : */ 1,
|
||||
channels: 1,
|
||||
bugname: this.bugname,
|
||||
hostport: this.hostport
|
||||
});
|
||||
@@ -298,24 +328,32 @@ class TaskTranscribe extends SttTask {
|
||||
async _onTranscription(cs, ep, channel, evt, fsEvent) {
|
||||
// make sure this is not a transcript from answering machine detection
|
||||
const bugname = fsEvent.getHeader('media-bugname');
|
||||
const finished = fsEvent.getHeader('transcription-session-finished');
|
||||
const bufferedTranscripts = this._bufferedTranscripts[channel - 1];
|
||||
if (bugname && this.bugname !== bugname) return;
|
||||
if (this.paused) {
|
||||
this.logger.debug({evt}, 'TaskTranscribe:_onTranscription - paused, ignoring transcript');
|
||||
}
|
||||
|
||||
|
||||
if (this.vendor === 'ibm' && evt?.state === 'listening') return;
|
||||
|
||||
if (this.vendor === 'deepgram' && evt.type === 'UtteranceEnd') {
|
||||
/* we will only get this when we have set utterance_end_ms */
|
||||
if (this._bufferedTranscripts.length === 0) {
|
||||
|
||||
/* DH: send a speech event when we get UtteranceEnd if they want interim events */
|
||||
if (this.interim) {
|
||||
this.logger.debug('Gather:_onTranscription - got UtteranceEnd event from deepgram, sending speech event');
|
||||
this._resolve(channel, evt);
|
||||
}
|
||||
if (bufferedTranscripts.length === 0) {
|
||||
this.logger.debug('Gather:_onTranscription - got UtteranceEnd event from deepgram but no buffered transcripts');
|
||||
}
|
||||
else {
|
||||
this.logger.debug('Gather:_onTranscription - got UtteranceEnd event from deepgram, return buffered transcript');
|
||||
evt = this.consolidateTranscripts(this._bufferedTranscripts, 1, this.language);
|
||||
this._bufferedTranscripts = [];
|
||||
this._resolve('speech', evt);
|
||||
evt = this.consolidateTranscripts(bufferedTranscripts, channel, this.language, this.vendor);
|
||||
evt.is_final = true;
|
||||
this._bufferedTranscripts[channel - 1] = [];
|
||||
this._resolve(channel, evt);
|
||||
}
|
||||
return;
|
||||
}
|
||||
@@ -329,55 +367,122 @@ class TaskTranscribe extends SttTask {
|
||||
return;
|
||||
}
|
||||
|
||||
if (evt.alternatives[0]?.transcript === '' && !cs.callGone && !this.killed) {
|
||||
if (['microsoft', 'deepgram'].includes(this.vendor)) {
|
||||
this.logger.info({evt}, 'TaskTranscribe:_onTranscription - got empty transcript, continue listening');
|
||||
let emptyTranscript = false;
|
||||
if (evt.is_final) {
|
||||
if (evt.alternatives.length === 0 || evt.alternatives[0].transcript === '' && !cs.callGone && !this.killed) {
|
||||
emptyTranscript = true;
|
||||
if (finished === 'true' &&
|
||||
['microsoft', 'deepgram'].includes(this.vendor) &&
|
||||
bufferedTranscripts.length === 0) {
|
||||
this.logger.debug({evt}, 'TaskGather:_onTranscription - got empty transcript from old gather, disregarding');
|
||||
return;
|
||||
}
|
||||
else if (this.vendor !== 'deepgram') {
|
||||
this.logger.info({evt}, 'TaskGather:_onTranscription - got empty transcript, continue listening');
|
||||
return;
|
||||
}
|
||||
else if (this.isContinuousAsr) {
|
||||
this.logger.info({evt},
|
||||
'TaskGather:_onTranscription - got empty deepgram transcript during continous asr, continue listening');
|
||||
return;
|
||||
}
|
||||
else if (this.vendor === 'deepgram' && bufferedTranscripts.length > 0) {
|
||||
this.logger.info({evt},
|
||||
'TaskGather:_onTranscription - got empty transcript from deepgram, return the buffered transcripts');
|
||||
}
|
||||
}
|
||||
if (this.isContinuousAsr) {
|
||||
/* append the transcript and start listening again for asrTimeout */
|
||||
const t = evt.alternatives[0].transcript;
|
||||
if (t) {
|
||||
/* remove trailing punctuation */
|
||||
if (/[,;:\.!\?]$/.test(t)) {
|
||||
this.logger.debug('TaskGather:_onTranscription - removing trailing punctuation');
|
||||
evt.alternatives[0].transcript = t.slice(0, -1);
|
||||
}
|
||||
}
|
||||
this.logger.info({evt}, 'TaskGather:_onTranscription - got transcript during continous asr');
|
||||
bufferedTranscripts.push(evt);
|
||||
this._startAsrTimer(channel);
|
||||
|
||||
/* some STT engines will keep listening after a final response, so no need to restart */
|
||||
if (!['soniox', 'aws', 'microsoft', 'deepgram', 'google']
|
||||
.includes(this.vendor)) this._startTranscribing(cs, ep, channel);
|
||||
}
|
||||
else {
|
||||
this.logger.info({evt}, 'TaskTranscribe:_onTranscription - got empty transcript, listen again');
|
||||
this._transcribe(ep);
|
||||
}
|
||||
return;
|
||||
}
|
||||
if (this.vendor === 'soniox') {
|
||||
/* compile transcripts into one */
|
||||
this._sonioxTranscripts.push(evt.vendor.finalWords);
|
||||
evt = this.compileSonioxTranscripts(this._sonioxTranscripts, 1, this.language);
|
||||
this._sonioxTranscripts = [];
|
||||
}
|
||||
else if (this.vendor === 'deepgram') {
|
||||
/* compile transcripts into one */
|
||||
if (!emptyTranscript) bufferedTranscripts.push(evt);
|
||||
|
||||
if (this.vendor === 'soniox') {
|
||||
/* compile transcripts into one */
|
||||
this._sonioxTranscripts.push(evt.vendor.finalWords);
|
||||
if (evt.is_final) {
|
||||
evt = this.compileSonioxTranscripts(this._sonioxTranscripts, 1, this.language);
|
||||
this._sonioxTranscripts = [];
|
||||
/* deepgram can send an empty and final transcript; only if we have any buffered should we resolve */
|
||||
if (bufferedTranscripts.length === 0) return;
|
||||
evt = this.consolidateTranscripts(bufferedTranscripts, channel, this.language);
|
||||
this._bufferedTranscripts[channel - 1] = [];
|
||||
}
|
||||
|
||||
/* here is where we return a final transcript */
|
||||
this.logger.debug({evt}, 'TaskTranscribe:_onTranscription - sending final transcript');
|
||||
this._resolve(channel, evt);
|
||||
|
||||
/* some STT engines will keep listening after a final response, so no need to restart */
|
||||
if (!['soniox', 'aws', 'microsoft', 'deepgram', 'google'].includes(this.vendor) &&
|
||||
!this.vendor.startsWith('custom:')) {
|
||||
this.logger.debug('TaskTranscribe:_onTranscription - restarting transcribe');
|
||||
this._startTranscribing(cs, ep, channel);
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
/* interim transcript */
|
||||
|
||||
if (this.isContinuousAsr && evt.is_final) {
|
||||
this._bufferedTranscripts.push(evt);
|
||||
this._startAsrTimer(channel);
|
||||
} else {
|
||||
await this._resolve(channel, evt);
|
||||
/* deepgram can send a non-final transcript but with words that are final, so we need to buffer */
|
||||
if (this.vendor === 'deepgram') {
|
||||
const originalEvent = evt.vendor.evt;
|
||||
if (originalEvent.is_final && evt.alternatives[0].transcript !== '') {
|
||||
this.logger.debug({evt}, 'Gather:_onTranscription - buffering a completed (partial) deepgram transcript');
|
||||
bufferedTranscripts.push(evt);
|
||||
}
|
||||
}
|
||||
|
||||
if (this.interim) {
|
||||
this.logger.debug({evt}, 'TaskTranscribe:_onTranscription - sending interim transcript');
|
||||
this._resolve(channel, evt);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async _resolve(channel, evt) {
|
||||
/* we've got a transcript, so end the otel child span for this channel */
|
||||
if (this.childSpan[channel - 1] && this.childSpan[channel - 1].span) {
|
||||
this.childSpan[channel - 1].span.setAttributes({
|
||||
channel,
|
||||
'stt.resolve': 'transcript',
|
||||
'stt.result': JSON.stringify(evt)
|
||||
});
|
||||
this.childSpan[channel - 1].span.end();
|
||||
if (evt.is_final) {
|
||||
/* we've got a final transcript, so end the otel child span for this channel */
|
||||
if (this.childSpan[channel - 1] && this.childSpan[channel - 1].span) {
|
||||
this.childSpan[channel - 1].span.setAttributes({
|
||||
channel,
|
||||
'stt.resolve': 'transcript',
|
||||
'stt.result': JSON.stringify(evt)
|
||||
});
|
||||
this.childSpan[channel - 1].span.end();
|
||||
}
|
||||
}
|
||||
|
||||
if (this.transcriptionHook) {
|
||||
const b3 = this.getTracingPropagation();
|
||||
const httpHeaders = b3 && {b3};
|
||||
const payload = {
|
||||
...this.cs.callInfo,
|
||||
...httpHeaders,
|
||||
...(evt.alternatives && {speech: evt}),
|
||||
...(evt.type && {speechEvent: evt})
|
||||
};
|
||||
try {
|
||||
const json = await this.cs.requestor.request('verb:hook', this.transcriptionHook, {
|
||||
...this.cs.callInfo,
|
||||
...httpHeaders,
|
||||
speech: evt
|
||||
});
|
||||
this.logger.info({json}, 'sent transcriptionHook');
|
||||
this.logger.debug({payload}, 'sending transcriptionHook');
|
||||
const json = await this.cs.requestor.request('verb:hook', this.transcriptionHook, payload);
|
||||
this.logger.info({json}, 'completed transcriptionHook');
|
||||
if (json && Array.isArray(json) && !this.parentTask) {
|
||||
const makeTask = require('./make_task');
|
||||
const tasks = normalizeJambones(this.logger, json).map((tdata) => makeTask(this.logger, tdata));
|
||||
@@ -398,7 +503,7 @@ class TaskTranscribe extends SttTask {
|
||||
this._clearTimer();
|
||||
this.notifyTaskDone();
|
||||
}
|
||||
else {
|
||||
else if (evt.is_final) {
|
||||
/* start another child span for this channel */
|
||||
const {span, ctx} = this.startChildSpan(`${STT_LISTEN_SPAN_NAME}:${channel}`);
|
||||
this.childSpan[channel - 1] = {span, ctx};
|
||||
@@ -447,10 +552,8 @@ class TaskTranscribe extends SttTask {
|
||||
}
|
||||
}
|
||||
|
||||
async _onJambonzError(cs, _ep, evt) {
|
||||
this.logger.info({evt}, 'TaskTranscribe:_onJambonzError');
|
||||
if (this.paused) return;
|
||||
if (this.isHandledByPrimaryProvider && this.fallbackVendor) {
|
||||
async _startFallback(cs, _ep, evt) {
|
||||
if (this.canFallback) {
|
||||
_ep.stopTranscription({
|
||||
vendor: this.vendor,
|
||||
bugname: this.bugname
|
||||
@@ -458,37 +561,57 @@ class TaskTranscribe extends SttTask {
|
||||
.catch((err) => this.logger.error({err}, `Error stopping transcription for primary vendor ${this.vendor}`));
|
||||
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, cs.srf);
|
||||
try {
|
||||
await this._fallback();
|
||||
this.notifyError({ msg: 'ASR error',
|
||||
details:`STT Vendor ${this.vendor} error: ${evt.error || evt.reason}`, failover: 'in progress'});
|
||||
await this._initFallback();
|
||||
let channel = 1;
|
||||
if (this.ep !== _ep) {
|
||||
channel = 2;
|
||||
}
|
||||
this[`_speechHandlersSet_${channel}`] = false;
|
||||
this._startTranscribing(cs, _ep, channel);
|
||||
updateSpeechCredentialLastUsed(this.sttCredentials.speech_credential_sid);
|
||||
return;
|
||||
return true;
|
||||
} catch (error) {
|
||||
this.notifyError({ msg: 'ASR error',
|
||||
details:`STT Vendor ${this.vendor} error: ${evt.error || evt.reason}`, failover: 'not available'});
|
||||
this.logger.info({error}, `There is error while falling back to ${this.fallbackVendor}`);
|
||||
}
|
||||
} else {
|
||||
const {writeAlerts, AlertType} = cs.srf.locals;
|
||||
this.logger.debug('transcribe:_startFallback no condition for falling back');
|
||||
this.notifyError({ msg: 'ASR error',
|
||||
details:`STT Vendor ${this.vendor} error: ${evt.error || evt.reason}`, failover: 'not available'});
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
if (this.vendor === 'nuance') {
|
||||
const {code, error} = evt;
|
||||
if (code === 404 && error === 'No speech') return this._resolve('timeout');
|
||||
if (code === 413 && error === 'Too much speech') return this._resolve('timeout');
|
||||
}
|
||||
this.logger.info({evt}, 'TaskTranscribe:_onJambonzError');
|
||||
writeAlerts({
|
||||
account_sid: cs.accountSid,
|
||||
alert_type: AlertType.STT_FAILURE,
|
||||
message: `Custom speech vendor ${this.vendor} error: ${evt.error}`,
|
||||
vendor: this.vendor,
|
||||
}).catch((err) => this.logger.info({err}, 'Error generating alert for jambonz custom connection failure'));
|
||||
this.notifyError({msg: 'ASR error', details:`Custom speech vendor ${this.vendor} error: ${evt.error}`});
|
||||
async _onJambonzError(cs, _ep, evt) {
|
||||
if (this.vendor === 'google' && evt.error_code === 0) {
|
||||
this.logger.info({evt}, 'TaskTranscribe:_onJambonzError - ignoring google error code 0');
|
||||
return;
|
||||
}
|
||||
this.logger.info({evt}, 'TaskTranscribe:_onJambonzError');
|
||||
if (this.paused) return;
|
||||
const {writeAlerts, AlertType} = cs.srf.locals;
|
||||
|
||||
if (this.vendor === 'nuance') {
|
||||
const {code, error} = evt;
|
||||
if (code === 404 && error === 'No speech') return this._resolve('timeout');
|
||||
if (code === 413 && error === 'Too much speech') return this._resolve('timeout');
|
||||
}
|
||||
this.logger.info({evt}, 'TaskTranscribe:_onJambonzError');
|
||||
writeAlerts({
|
||||
account_sid: cs.accountSid,
|
||||
alert_type: AlertType.STT_FAILURE,
|
||||
message: `Custom speech vendor ${this.vendor} error: ${evt.error}`,
|
||||
vendor: this.vendor,
|
||||
}).catch((err) => this.logger.info({err}, 'Error generating alert for jambonz custom connection failure'));
|
||||
if (!(await this._startFallback(cs, _ep, evt))) {
|
||||
this.notifyTaskDone();
|
||||
}
|
||||
}
|
||||
|
||||
_onVendorConnectFailure(cs, _ep, channel, evt) {
|
||||
async _onVendorConnectFailure(cs, _ep, channel, evt) {
|
||||
super._onVendorConnectFailure(cs, _ep, evt);
|
||||
if (this.childSpan[channel - 1] && this.childSpan[channel - 1].span) {
|
||||
this.childSpan[channel - 1].span.setAttributes({
|
||||
@@ -497,7 +620,9 @@ class TaskTranscribe extends SttTask {
|
||||
});
|
||||
this.childSpan[channel - 1].span.end();
|
||||
}
|
||||
this.notifyTaskDone();
|
||||
if (!(await this._startFallback(cs, _ep, evt))) {
|
||||
this.notifyTaskDone();
|
||||
}
|
||||
}
|
||||
|
||||
_startAsrTimer(channel) {
|
||||
@@ -506,8 +631,9 @@ class TaskTranscribe extends SttTask {
|
||||
this._clearAsrTimer(channel);
|
||||
this._asrTimer = setTimeout(() => {
|
||||
this.logger.debug(`TaskTranscribe:_startAsrTimer - asr timer went off for channel: ${channel}`);
|
||||
const evt = this.consolidateTranscripts(this._bufferedTranscripts, channel, this.language);
|
||||
this._bufferedTranscripts = [];
|
||||
const evt = this.consolidateTranscripts(
|
||||
this._bufferedTranscripts[channel - 1], channel, this.language, this.vendor);
|
||||
this._bufferedTranscripts[channel - 1] = [];
|
||||
this._resolve(channel, evt);
|
||||
}, this.asrTimeout);
|
||||
this.logger.debug(`TaskTranscribe:_startAsrTimer: set for ${this.asrTimeout}ms for channel ${channel}`);
|
||||
|
||||
180
lib/tasks/tts-task.js
Normal file
180
lib/tasks/tts-task.js
Normal file
@@ -0,0 +1,180 @@
|
||||
const Task = require('./task');
|
||||
const { TaskPreconditions } = require('../utils/constants');
|
||||
|
||||
class TtsTask extends Task {
|
||||
|
||||
constructor(logger, data, parentTask) {
|
||||
super(logger, data);
|
||||
this.parentTask = parentTask;
|
||||
|
||||
this.preconditions = TaskPreconditions.Endpoint;
|
||||
|
||||
this.earlyMedia = this.data.earlyMedia === true || (parentTask && parentTask.earlyMedia);
|
||||
this.synthesizer = this.data.synthesizer || {};
|
||||
this.disableTtsCache = this.data.disableTtsCache;
|
||||
this.options = this.synthesizer.options || {};
|
||||
}
|
||||
|
||||
async exec(cs) {
|
||||
super.exec(cs);
|
||||
}
|
||||
|
||||
async _synthesizeWithSpecificVendor(cs, ep, {
|
||||
vendor,
|
||||
language,
|
||||
voice,
|
||||
label,
|
||||
disableTtsStreaming,
|
||||
preCache
|
||||
}) {
|
||||
const {srf, accountSid:account_sid} = cs;
|
||||
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, srf);
|
||||
const {writeAlerts, AlertType, stats} = srf.locals;
|
||||
const {synthAudio} = srf.locals.dbHelpers;
|
||||
const engine = this.synthesizer.engine || cs.synthesizer?.engine || 'neural';
|
||||
const salt = cs.callSid;
|
||||
|
||||
let credentials = cs.getSpeechCredentials(vendor, 'tts', label);
|
||||
/* parse Nuance voices into name and model */
|
||||
let model;
|
||||
if (vendor === 'nuance' && voice) {
|
||||
const arr = /([A-Za-z-]*)\s+-\s+(enhanced|standard)/.exec(voice);
|
||||
if (arr) {
|
||||
voice = arr[1];
|
||||
model = arr[2];
|
||||
}
|
||||
} else if (vendor === 'deepgram') {
|
||||
model = voice;
|
||||
}
|
||||
|
||||
/* allow for microsoft custom region voice and api_key to be specified as an override */
|
||||
if (vendor === 'microsoft' && this.options.deploymentId) {
|
||||
credentials = credentials || {};
|
||||
credentials.use_custom_tts = true;
|
||||
credentials.custom_tts_endpoint = this.options.deploymentId;
|
||||
credentials.api_key = this.options.apiKey || credentials.apiKey;
|
||||
credentials.region = this.options.region || credentials.region;
|
||||
voice = this.options.voice || voice;
|
||||
} else if (vendor === 'elevenlabs') {
|
||||
credentials = credentials || {};
|
||||
credentials.model_id = this.options.model_id || credentials.model_id;
|
||||
credentials.voice_settings = this.options.voice_settings || {};
|
||||
credentials.optimize_streaming_latency = this.options.optimize_streaming_latency
|
||||
|| credentials.optimize_streaming_latency;
|
||||
voice = this.options.voice_id || voice;
|
||||
}
|
||||
|
||||
ep.set({
|
||||
tts_engine: vendor,
|
||||
tts_voice: voice,
|
||||
cache_speech_handles: 1,
|
||||
}).catch((err) => this.logger.info({err}, `${this.name}: Error setting tts_engine on endpoint`));
|
||||
|
||||
if (!preCache) this.logger.info({vendor, language, voice, model}, `${this.name}:exec`);
|
||||
try {
|
||||
if (!credentials) {
|
||||
writeAlerts({
|
||||
account_sid,
|
||||
alert_type: AlertType.TTS_NOT_PROVISIONED,
|
||||
vendor
|
||||
}).catch((err) => this.logger.info({err}, 'Error generating alert for no tts'));
|
||||
this.notifyError({
|
||||
msg: 'TTS error',
|
||||
details:`No speech credentials provisioned for selected vendor ${vendor}`
|
||||
});
|
||||
throw new Error('no provisioned speech credentials for TTS');
|
||||
}
|
||||
// synthesize all of the text elements
|
||||
let lastUpdated = false;
|
||||
|
||||
/* produce an audio segment from the provided text */
|
||||
const generateAudio = async(text) => {
|
||||
if (this.killed) return;
|
||||
if (text.startsWith('silence_stream://')) return text;
|
||||
|
||||
/* otel: trace time for tts */
|
||||
if (!preCache && !this.parentTask) {
|
||||
const {span} = this.startChildSpan('tts-generation', {
|
||||
'tts.vendor': vendor,
|
||||
'tts.language': language,
|
||||
'tts.voice': voice
|
||||
});
|
||||
this.otelSpan = span;
|
||||
}
|
||||
try {
|
||||
const {filePath, servedFromCache, rtt} = await synthAudio(stats, {
|
||||
account_sid,
|
||||
text,
|
||||
vendor,
|
||||
language,
|
||||
voice,
|
||||
engine,
|
||||
model,
|
||||
salt,
|
||||
credentials,
|
||||
options: this.options,
|
||||
disableTtsCache : this.disableTtsCache,
|
||||
disableTtsStreaming,
|
||||
preCache
|
||||
});
|
||||
if (!filePath.startsWith('say:')) {
|
||||
this.logger.debug(`file ${filePath}, served from cache ${servedFromCache}`);
|
||||
if (filePath) cs.trackTmpFile(filePath);
|
||||
if (this.otelSpan) {
|
||||
this.otelSpan.setAttributes({'tts.cached': servedFromCache});
|
||||
this.otelSpan.end();
|
||||
this.otelSpan = null;
|
||||
}
|
||||
if (!servedFromCache && !lastUpdated) {
|
||||
lastUpdated = true;
|
||||
updateSpeechCredentialLastUsed(credentials.speech_credential_sid).catch(() => {/* logged error */});
|
||||
}
|
||||
if (!servedFromCache && rtt && !preCache) {
|
||||
this.notifyStatus({
|
||||
event: 'synthesized-audio',
|
||||
vendor,
|
||||
language,
|
||||
characters: text.length,
|
||||
elapsedTime: rtt
|
||||
});
|
||||
}
|
||||
}
|
||||
else {
|
||||
this.logger.debug('a streaming tts api will be used');
|
||||
const modifiedPath = filePath.replace('say:{', `say:{session-uuid=${ep.uuid},`);
|
||||
return modifiedPath;
|
||||
}
|
||||
return filePath;
|
||||
} catch (err) {
|
||||
this.logger.info({err}, 'Error synthesizing tts');
|
||||
if (this.otelSpan) this.otelSpan.end();
|
||||
writeAlerts({
|
||||
account_sid: cs.accountSid,
|
||||
alert_type: AlertType.TTS_FAILURE,
|
||||
vendor,
|
||||
detail: err.message
|
||||
}).catch((err) => this.logger.info({err}, 'Error generating alert for tts failure'));
|
||||
this.notifyError({msg: 'TTS error', details: err.message || err});
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
|
||||
const arr = this.text.map((t) => (this._validateURL(t) ? t : generateAudio(t)));
|
||||
return (await Promise.all(arr)).filter((fp) => fp && fp.length);
|
||||
} catch (err) {
|
||||
this.logger.info(err, 'TaskSay:exec error');
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
_validateURL(urlString) {
|
||||
try {
|
||||
new URL(urlString);
|
||||
return true;
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = TtsTask;
|
||||
175
lib/utils/action-hook-delay.js
Normal file
175
lib/utils/action-hook-delay.js
Normal file
@@ -0,0 +1,175 @@
|
||||
const makeTask = require('../tasks/make_task');
|
||||
const Emitter = require('events');
|
||||
const { normalizeJambones } = require('@jambonz/verb-specifications');
|
||||
const {TaskName} = require('../utils/constants');
|
||||
const assert = require('assert');
|
||||
|
||||
/**
|
||||
* ActionHookDelayProcessor
|
||||
* @extends Emitter
|
||||
*
|
||||
* @param {Object} logger - logger instance
|
||||
* @param {Object} opts - options
|
||||
* @param {Object} cs - call session
|
||||
* @param {Object} ep - endpoint
|
||||
*
|
||||
* @emits {Event} 'giveup' - when associated giveup timer expires
|
||||
*
|
||||
* Ref:https://www.jambonz.org/docs/supporting-articles/handling-action-hook-delays/
|
||||
*/
|
||||
class ActionHookDelayProcessor extends Emitter {
|
||||
constructor(logger, opts, cs) {
|
||||
super();
|
||||
this.logger = logger;
|
||||
this.cs = cs;
|
||||
this._active = false;
|
||||
|
||||
const enabled = this.init(opts);
|
||||
if (enabled && (!this.actions || !Array.isArray(this.actions) || this.actions.length === 0)) {
|
||||
throw new Error('ActionHookDelayProcessor: no actions specified');
|
||||
}
|
||||
else if (enabled && this.actions.some((a) => !a.verb || ![TaskName.Say, TaskName.Play].includes(a.verb))) {
|
||||
throw new Error(`ActionHookDelayProcessor: invalid actions specified: ${JSON.stringify(this.actions)}`);
|
||||
}
|
||||
}
|
||||
|
||||
get properties() {
|
||||
return {
|
||||
actions: this.actions,
|
||||
retries: this.retries,
|
||||
noResponseTimeout: this.noResponseTimeout,
|
||||
noResponseGiveUpTimeout: this.noResponseGiveUpTimeout
|
||||
};
|
||||
}
|
||||
|
||||
get ep() {
|
||||
return this.cs.ep;
|
||||
}
|
||||
|
||||
init(opts) {
|
||||
this.logger.debug({opts}, 'ActionHookDelayProcessor#init');
|
||||
|
||||
this.actions = opts.actions;
|
||||
this.retries = opts.retries || 0;
|
||||
this.noResponseTimeout = opts.noResponseTimeout || 0;
|
||||
this.noResponseGiveUpTimeout = opts.noResponseGiveUpTimeout;
|
||||
|
||||
// return false if these options actually disable the ahdp
|
||||
return ('enable' in opts && opts.enable === true) ||
|
||||
('enabled' in opts && opts.enabled === true) ||
|
||||
(!('enable' in opts) && !('enabled' in opts));
|
||||
}
|
||||
|
||||
start() {
|
||||
this.logger.debug('ActionHookDelayProcessor#start');
|
||||
if (this._active) {
|
||||
this.logger.debug('ActionHookDelayProcessor#start: already started due to prior gather which is continuing');
|
||||
return;
|
||||
}
|
||||
assert(!this._noResponseTimer);
|
||||
this._active = true;
|
||||
this._retryCount = 0;
|
||||
const timeoutMs = this.noResponseTimeout === 0 ? 1 : this.noResponseTimeout * 1000;
|
||||
this._noResponseTimer = setTimeout(this._onNoResponseTimer.bind(this), timeoutMs);
|
||||
|
||||
if (this.noResponseGiveUpTimeout > 0) {
|
||||
const timeoutMs = this.noResponseGiveUpTimeout * 1000;
|
||||
this._noResponseGiveUpTimer = setTimeout(this._onNoResponseGiveUpTimer.bind(this), timeoutMs);
|
||||
}
|
||||
}
|
||||
|
||||
async stop() {
|
||||
this.logger.debug('ActionHookDelayProcessor#stop');
|
||||
this._active = false;
|
||||
|
||||
if (this._noResponseTimer) {
|
||||
clearTimeout(this._noResponseTimer);
|
||||
this._noResponseTimer = null;
|
||||
}
|
||||
if (this._noResponseGiveUpTimer) {
|
||||
clearTimeout(this._noResponseGiveUpTimer);
|
||||
this._noResponseGiveUpTimer = null;
|
||||
}
|
||||
if (this._taskInProgress) {
|
||||
this.logger.debug(`ActionHookDelayProcessor#stop: killing task in progress: ${this._taskInProgress.name}`);
|
||||
|
||||
/** if we are doing a play, kill it immediately
|
||||
* if we are doing a say, wait for it to finish
|
||||
*/
|
||||
if (TaskName.Say === this._taskInProgress.name) {
|
||||
this._sayResolver = () => {
|
||||
this.logger.debug('ActionHookDelayProcessor#stop: say is done, continue on..');
|
||||
this._taskInProgress.kill(this.cs);
|
||||
this._taskInProgress = null;
|
||||
};
|
||||
this.logger.debug('ActionHookDelayProcessor#stop returning promise');
|
||||
return new Promise((resolve) => this._sayResolver = resolve);
|
||||
}
|
||||
else {
|
||||
/* play */
|
||||
this._taskInProgress.kill(this.cs);
|
||||
this._taskInProgress = null;
|
||||
}
|
||||
}
|
||||
this.logger.debug('ActionHookDelayProcessor#stop returning');
|
||||
}
|
||||
|
||||
_onNoResponseTimer() {
|
||||
this.logger.debug('ActionHookDelayProcessor#_onNoResponseTimer');
|
||||
this._noResponseTimer = null;
|
||||
|
||||
/* get the next play or say action */
|
||||
const verb = this.actions[this._retryCount % this.actions.length];
|
||||
|
||||
const t = normalizeJambones(this.logger, [verb]);
|
||||
this.logger.debug({verb}, 'ActionHookDelayProcessor#_onNoResponseTimer: starting action');
|
||||
try {
|
||||
this._taskInProgress = makeTask(this.logger, t[0]);
|
||||
this._taskInProgress.disableTracing = true;
|
||||
this._taskInProgress.exec(this.cs, {ep: this.ep});
|
||||
} catch (err) {
|
||||
this.logger.info(err, 'ActionHookDelayProcessor#_onNoResponseTimer: error starting action');
|
||||
this._taskInProgress = null;
|
||||
return;
|
||||
}
|
||||
|
||||
this.ep.once('playback-start', (evt) => {
|
||||
this.logger.debug({evt}, 'got playback-start');
|
||||
if (!this._active) {
|
||||
this.logger.info({evt}, 'ActionHookDelayProcessor#_onNoResponseTimer: killing audio immediately');
|
||||
this.ep.api('uuid_break', this.ep.uuid)
|
||||
.catch((err) => this.logger.info(err,
|
||||
'ActionHookDelayProcessor#_onNoResponseTimer Error killing audio'));
|
||||
}
|
||||
});
|
||||
|
||||
this.ep.once('playback-stop', (evt) => {
|
||||
this._taskInProgress = null;
|
||||
if (this._sayResolver) {
|
||||
/* we were waiting for the play to finish before continuing to next task */
|
||||
this.logger.debug({evt}, 'got playback-stop');
|
||||
this._sayResolver();
|
||||
this._sayResolver = null;
|
||||
}
|
||||
else {
|
||||
/* possibly start the no response timer again */
|
||||
if (this._active && this.retries > 0 && this._retryCount < this.retries && this.noResponseTimeout > 0) {
|
||||
this.logger.debug({evt}, 'ActionHookDelayProcessor#_onNoResponseTimer: playback-stop on play/say action');
|
||||
const timeoutMs = this.noResponseTimeout * 1000;
|
||||
this._noResponseTimer = setTimeout(this._onNoResponseTimer.bind(this), timeoutMs);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
this._retryCount++;
|
||||
}
|
||||
|
||||
_onNoResponseGiveUpTimer() {
|
||||
this._active = false;
|
||||
this.logger.info('ActionHookDelayProcessor#_onNoResponseGiveUpTimer');
|
||||
this.stop().catch((err) => {});
|
||||
this.emit('giveup');
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ActionHookDelayProcessor;
|
||||
@@ -26,25 +26,25 @@ class BackgroundTaskManager extends Emitter {
|
||||
return this.tasks.size;
|
||||
}
|
||||
|
||||
async newTask(type, taskOpts) {
|
||||
this.logger.info({taskOpts}, `initiating Background task ${type}`);
|
||||
async newTask(type, opts, sticky = false) {
|
||||
this.logger.info({opts}, `initiating Background task ${type}`);
|
||||
if (this.tasks.has(type)) {
|
||||
this.logger.info(`Background task ${type} is running, skiped`);
|
||||
this.logger.info(`Background task ${type} is running, skipped`);
|
||||
return;
|
||||
}
|
||||
let task;
|
||||
switch (type) {
|
||||
case 'listen':
|
||||
task = await this._initListen(taskOpts);
|
||||
task = await this._initListen(opts);
|
||||
break;
|
||||
case 'bargeIn':
|
||||
task = await this._initBargeIn(taskOpts);
|
||||
task = await this._initBargeIn(opts);
|
||||
break;
|
||||
case 'record':
|
||||
task = await this._initRecord();
|
||||
break;
|
||||
case 'transcribe':
|
||||
task = await this._initTranscribe(taskOpts);
|
||||
task = await this._initTranscribe(opts);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
@@ -52,6 +52,7 @@ class BackgroundTaskManager extends Emitter {
|
||||
if (task) {
|
||||
this.tasks.set(type, task);
|
||||
}
|
||||
if (task && sticky) task.sticky = true;
|
||||
return task;
|
||||
}
|
||||
|
||||
@@ -64,8 +65,6 @@ class BackgroundTaskManager extends Emitter {
|
||||
task.kill();
|
||||
// Remove task from managed List
|
||||
this.tasks.delete(type);
|
||||
} else {
|
||||
this.logger.debug(`stopping background task, ${type} is not running, skipped`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -118,7 +117,8 @@ class BackgroundTaskManager extends Emitter {
|
||||
this._taskCompleted('bargeIn', task);
|
||||
if (task.sticky && !this.cs.callGone && !this.cs._stopping) {
|
||||
this.logger.info('BackgroundTaskManager:_initBargeIn: restarting background bargeIn');
|
||||
this.newTask('bargeIn', opts);
|
||||
this._bargeInHandled = false;
|
||||
this.newTask('bargeIn', opts, true);
|
||||
}
|
||||
return;
|
||||
})
|
||||
@@ -133,8 +133,7 @@ class BackgroundTaskManager extends Emitter {
|
||||
async _initRecord() {
|
||||
if (this.cs.accountInfo.account.record_all_calls || this.cs.application.record_all_calls) {
|
||||
if (!JAMBONZ_RECORD_WS_BASE_URL || !this.cs.accountInfo.account.bucket_credential) {
|
||||
this.logger.error(`_initRecord: invalid configuration,
|
||||
missing JAMBONZ_RECORD_WS_BASE_URL or bucket configuration`);
|
||||
this.logger.error('_initRecord: invalid cfg - missing JAMBONZ_RECORD_WS_BASE_URL or bucket config');
|
||||
return undefined;
|
||||
}
|
||||
const listenOpts = {
|
||||
@@ -175,7 +174,7 @@ class BackgroundTaskManager extends Emitter {
|
||||
}
|
||||
|
||||
_taskCompleted(type, task) {
|
||||
this.logger.debug({type, task}, 'BackgroundTaskManager:_taskCompleted: task completed');
|
||||
this.logger.debug({type, task}, `BackgroundTaskManager:_taskCompleted: task completed, sticky: ${task.sticky}`);
|
||||
task.removeAllListeners();
|
||||
task.span.end();
|
||||
this.tasks.delete(type);
|
||||
@@ -188,7 +187,10 @@ class BackgroundTaskManager extends Emitter {
|
||||
}
|
||||
|
||||
_bargeInTaskCompleted(evt) {
|
||||
this.logger.debug({evt}, 'BackgroundTaskManager:_bargeInTaskCompleted on event from background bargeIn');
|
||||
if (this._bargeInHandled) return;
|
||||
this._bargeInHandled = true;
|
||||
this.logger.debug({evt},
|
||||
'BackgroundTaskManager:_bargeInTaskCompleted on event from background bargeIn, emitting bargein-done event');
|
||||
this.emit('bargeIn-done', evt);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
{
|
||||
"TaskName": {
|
||||
"Cognigy": "cognigy",
|
||||
"Answer": "answer",
|
||||
"Conference": "conference",
|
||||
"Config": "config",
|
||||
"Dequeue": "dequeue",
|
||||
"Dial": "dial",
|
||||
"Dialogflow": "dialogflow",
|
||||
"Dtmf": "dtmf",
|
||||
"Dub": "dub",
|
||||
"Enqueue": "enqueue",
|
||||
"Gather": "gather",
|
||||
"Hangup": "hangup",
|
||||
@@ -29,7 +30,8 @@
|
||||
"Tag": "tag",
|
||||
"Transcribe": "transcribe"
|
||||
},
|
||||
"AllowedSipRecVerbs": ["config", "gather", "transcribe", "listen", "tag"],
|
||||
"AllowedSipRecVerbs": ["answer", "config", "gather", "transcribe", "listen", "tag"],
|
||||
"AllowedConfirmSessionVerbs": ["config", "gather", "plays", "say", "tag"],
|
||||
"CallStatus": {
|
||||
"Trying": "trying",
|
||||
"Ringing": "ringing",
|
||||
@@ -95,6 +97,10 @@
|
||||
"Transcription": "soniox_transcribe::transcription",
|
||||
"Error": "soniox_transcribe::error"
|
||||
},
|
||||
"VerbioTranscriptionEvents": {
|
||||
"Transcription": "verbio_transcribe::transcription",
|
||||
"Error": "verbio_transcribe::error"
|
||||
},
|
||||
"CobaltTranscriptionEvents": {
|
||||
"Transcription": "cobalt_speech::transcription",
|
||||
"CompileContext": "cobalt_speech::compile_context_response",
|
||||
@@ -132,6 +138,9 @@
|
||||
"ConnectFailure": "assemblyai_transcribe::connect_failed",
|
||||
"Connect": "assemblyai_transcribe::connect"
|
||||
},
|
||||
"VadDetection": {
|
||||
"Detection": "vad_detect:detection"
|
||||
},
|
||||
"ListenEvents": {
|
||||
"Connect": "mod_audio_fork::connect",
|
||||
"ConnectFailure": "mod_audio_fork::connect_failed",
|
||||
@@ -169,6 +178,7 @@
|
||||
"session:new",
|
||||
"session:reconnect",
|
||||
"session:redirect",
|
||||
"session:adulting",
|
||||
"call:status",
|
||||
"queue:status",
|
||||
"dial:confirm",
|
||||
|
||||
@@ -41,6 +41,7 @@ const speechMapper = (cred) => {
|
||||
const o = JSON.parse(decrypt(credential));
|
||||
obj.access_key_id = o.access_key_id;
|
||||
obj.secret_access_key = o.secret_access_key;
|
||||
obj.role_arn = o.role_arn;
|
||||
obj.aws_region = o.aws_region;
|
||||
}
|
||||
else if ('microsoft' === obj.vendor) {
|
||||
@@ -94,6 +95,17 @@ const speechMapper = (cred) => {
|
||||
obj.api_key = o.api_key;
|
||||
obj.model_id = o.model_id;
|
||||
obj.options = o.options;
|
||||
} else if ('playht' === obj.vendor) {
|
||||
const o = JSON.parse(decrypt(credential));
|
||||
obj.api_key = o.api_key;
|
||||
obj.user_id = o.user_id;
|
||||
obj.voice_engine = o.voice_engine;
|
||||
obj.options = o.options;
|
||||
} else if ('rimelabs' === obj.vendor) {
|
||||
const o = JSON.parse(decrypt(credential));
|
||||
obj.api_key = o.api_key;
|
||||
obj.model_id = o.model_id;
|
||||
obj.options = o.options;
|
||||
} else if ('assemblyai' === obj.vendor) {
|
||||
const o = JSON.parse(decrypt(credential));
|
||||
obj.api_key = o.api_key;
|
||||
@@ -101,6 +113,11 @@ const speechMapper = (cred) => {
|
||||
const o = JSON.parse(decrypt(credential));
|
||||
obj.api_key = o.api_key;
|
||||
obj.model_id = o.model_id;
|
||||
} else if ('verbio' === obj.vendor) {
|
||||
const o = JSON.parse(decrypt(credential));
|
||||
obj.client_id = o.client_id;
|
||||
obj.client_secret = o.client_secret;
|
||||
obj.engine_version = o.engine_version;
|
||||
} else if (obj.vendor.startsWith('custom:')) {
|
||||
const o = JSON.parse(decrypt(credential));
|
||||
obj.auth_token = o.auth_token;
|
||||
|
||||
@@ -171,7 +171,7 @@ function installSrfLocals(srf, logger) {
|
||||
retrieveFromSortedSet,
|
||||
retrieveByPatternSortedSet,
|
||||
sortedSetLength,
|
||||
sortedSetPositionByPattern
|
||||
sortedSetPositionByPattern,
|
||||
} = require('@jambonz/realtimedb-helpers')({}, logger, tracer);
|
||||
const registrar = new Registrar(logger, client);
|
||||
const {
|
||||
@@ -179,6 +179,8 @@ function installSrfLocals(srf, logger) {
|
||||
addFileToCache,
|
||||
getNuanceAccessToken,
|
||||
getIbmAccessToken,
|
||||
getAwsAuthToken,
|
||||
getVerbioAccessToken
|
||||
} = require('@jambonz/speech-utils')({}, logger);
|
||||
const {
|
||||
writeAlerts,
|
||||
@@ -216,6 +218,7 @@ function installSrfLocals(srf, logger) {
|
||||
listCalls,
|
||||
deleteCall,
|
||||
synthAudio,
|
||||
getAwsAuthToken,
|
||||
addFileToCache,
|
||||
createHash,
|
||||
retrieveHash,
|
||||
@@ -237,7 +240,8 @@ function installSrfLocals(srf, logger) {
|
||||
retrieveFromSortedSet,
|
||||
retrieveByPatternSortedSet,
|
||||
sortedSetLength,
|
||||
sortedSetPositionByPattern
|
||||
sortedSetPositionByPattern,
|
||||
getVerbioAccessToken
|
||||
},
|
||||
parentLogger: logger,
|
||||
getSBC,
|
||||
|
||||
18
lib/utils/parse-decibels.js
Normal file
18
lib/utils/parse-decibels.js
Normal file
@@ -0,0 +1,18 @@
|
||||
const parseDecibels = (db) => {
|
||||
if (!db) return 0;
|
||||
if (typeof db === 'number') {
|
||||
return db;
|
||||
}
|
||||
else if (typeof db === 'string') {
|
||||
const match = db.match(/([+-]?\d+(\.\d+)?)\s*db/i);
|
||||
if (match) {
|
||||
return Math.trunc(parseFloat(match[1]));
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = parseDecibels;
|
||||
@@ -295,17 +295,17 @@ class SingleDialer extends Emitter {
|
||||
if (err.status === 487) status.callStatus = CallStatus.NoAnswer;
|
||||
else if ([486, 600].includes(err.status)) status.callStatus = CallStatus.Busy;
|
||||
this.logger.info(`SingleDialer:exec outdial failure ${err.status}`);
|
||||
inviteSpan.setAttributes({'invite.status_code': err.status});
|
||||
inviteSpan.end();
|
||||
inviteSpan?.setAttributes({'invite.status_code': err.status});
|
||||
inviteSpan?.end();
|
||||
}
|
||||
else {
|
||||
this.logger.error(err, 'SingleDialer:exec');
|
||||
status.sipStatus = 500;
|
||||
inviteSpan.setAttributes({
|
||||
inviteSpan?.setAttributes({
|
||||
'invite.status_code': 500,
|
||||
'invite.err': err.message
|
||||
});
|
||||
inviteSpan.end();
|
||||
inviteSpan?.end();
|
||||
}
|
||||
this.emit('callStatusChange', status);
|
||||
if (this.ep) this.ep.destroy();
|
||||
@@ -353,6 +353,7 @@ class SingleDialer extends Emitter {
|
||||
const json = await this.requestor.request('dial:confirm', confirmHook, this.callInfo.toJSON());
|
||||
if (!json || (Array.isArray(json) && json.length === 0)) {
|
||||
this.logger.info('SingleDialer:_executeApp: no tasks returned from confirm hook');
|
||||
this.emit('accept');
|
||||
return;
|
||||
}
|
||||
const tasks = normalizeJambones(this.logger, json).map((tdata) => makeTask(this.logger, tdata));
|
||||
@@ -412,6 +413,7 @@ class SingleDialer extends Emitter {
|
||||
const app = {...application};
|
||||
if ('WS' === app.call_hook?.method ||
|
||||
app.call_hook?.url.startsWith('ws://') || app.call_hook?.url.startsWith('wss://')) {
|
||||
if (app.call_hook?.url) app.call_hook.url += '/adulting';
|
||||
const requestor = new WsRequestor(logger, this.accountInfo.account.account_sid,
|
||||
app.call_hook, this.accountInfo.account.webhook_secret);
|
||||
app.requestor = requestor;
|
||||
@@ -437,6 +439,13 @@ class SingleDialer extends Emitter {
|
||||
tasks,
|
||||
rootSpan
|
||||
});
|
||||
app.requestor.request('session:adulting', '/adulting', {
|
||||
...cs.callInfo.toJSON(),
|
||||
parentCallInfo: this.parentCallInfo
|
||||
}).catch((err) => {
|
||||
newLogger.error({err}, 'doAdulting: error sending adulting request');
|
||||
});
|
||||
|
||||
cs.req = this.req;
|
||||
cs.exec().catch((err) => newLogger.error({err}, 'doAdulting: error executing session'));
|
||||
return cs;
|
||||
|
||||
@@ -150,7 +150,7 @@ const selectDefaultDeepgramModel = (task, language) => {
|
||||
return 'base';
|
||||
};
|
||||
|
||||
const consolidateTranscripts = (bufferedTranscripts, channel, language) => {
|
||||
const consolidateTranscripts = (bufferedTranscripts, channel, language, vendor) => {
|
||||
if (bufferedTranscripts.length === 1) return bufferedTranscripts[0];
|
||||
let totalConfidence = 0;
|
||||
const finalTranscript = bufferedTranscripts.reduce((acc, evt) => {
|
||||
@@ -191,7 +191,7 @@ const consolidateTranscripts = (bufferedTranscripts, channel, language) => {
|
||||
totalConfidence / bufferedTranscripts.length;
|
||||
finalTranscript.alternatives[0].transcript = finalTranscript.alternatives[0].transcript.trim();
|
||||
finalTranscript.vendor = {
|
||||
name: 'deepgram',
|
||||
name: vendor,
|
||||
evt: bufferedTranscripts
|
||||
};
|
||||
return finalTranscript;
|
||||
@@ -270,7 +270,7 @@ const normalizeDeepgram = (evt, channel, language, shortUtterance) => {
|
||||
language_code: language,
|
||||
channel_tag: channel,
|
||||
is_final: shortUtterance ? evt.is_final : evt.speech_final,
|
||||
alternatives: [alternatives[0]],
|
||||
alternatives: alternatives.length ? [alternatives[0]] : [],
|
||||
vendor: {
|
||||
name: 'deepgram',
|
||||
evt: copy
|
||||
@@ -376,6 +376,20 @@ const normalizeNuance = (evt, channel, language) => {
|
||||
};
|
||||
};
|
||||
|
||||
const normalizeVerbio = (evt, channel, language) => {
|
||||
const copy = JSON.parse(JSON.stringify(evt));
|
||||
return {
|
||||
language_code: language,
|
||||
channel_tag: channel,
|
||||
is_final: evt.is_final,
|
||||
alternatives: evt.alternatives,
|
||||
vendor: {
|
||||
name: 'verbio',
|
||||
evt: copy
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const normalizeMicrosoft = (evt, channel, language, punctuation = true) => {
|
||||
const copy = JSON.parse(JSON.stringify(evt));
|
||||
const nbest = evt.NBest;
|
||||
@@ -463,6 +477,8 @@ module.exports = (logger) => {
|
||||
return normalizeCobalt(evt, channel, language);
|
||||
case 'assemblyai':
|
||||
return normalizeAssemblyAi(evt, channel, language, shortUtterance);
|
||||
case 'verbio':
|
||||
return normalizeVerbio(evt, channel, language);
|
||||
default:
|
||||
if (vendor.startsWith('custom:')) {
|
||||
return normalizeCustom(evt, channel, language, vendor);
|
||||
@@ -474,20 +490,13 @@ module.exports = (logger) => {
|
||||
|
||||
const setChannelVarsForStt = (task, sttCredentials, language, rOpts = {}) => {
|
||||
let opts = {};
|
||||
const {enable, voiceMs = 0, mode = -1} = rOpts.vad || {};
|
||||
const vad = {enable, voiceMs, mode};
|
||||
const vendor = rOpts.vendor;
|
||||
|
||||
/* voice activity detection works across vendors */
|
||||
opts = {
|
||||
...opts,
|
||||
...(vad.enable && {START_RECOGNIZING_ON_VAD: 1}),
|
||||
...(vad.enable && vad.voiceMs && {RECOGNIZER_VAD_VOICE_MS: vad.voiceMs}),
|
||||
...(vad.enable && typeof vad.mode === 'number' && {RECOGNIZER_VAD_MODE: vad.mode}),
|
||||
};
|
||||
|
||||
if ('google' === vendor) {
|
||||
const model = task.name === TaskName.Gather ? 'command_and_search' : 'latest_long';
|
||||
const useV2 = rOpts.googleOptions?.serviceVersion === 'v2';
|
||||
const model = task.name === TaskName.Gather ?
|
||||
(useV2 ? 'telephony_short' : 'command_and_search') :
|
||||
(useV2 ? 'long' : 'latest_long');
|
||||
opts = {
|
||||
...opts,
|
||||
...(sttCredentials && {GOOGLE_APPLICATION_CREDENTIALS: JSON.stringify(sttCredentials.credentials)}),
|
||||
@@ -520,6 +529,26 @@ module.exports = (logger) => {
|
||||
...{GOOGLE_SPEECH_MODEL: rOpts.model || model},
|
||||
...(rOpts.naicsCode > 0 && {GOOGLE_SPEECH_METADATA_INDUSTRY_NAICS_CODE: rOpts.naicsCode}),
|
||||
GOOGLE_SPEECH_METADATA_RECORDING_DEVICE_TYPE: 'phone_line',
|
||||
...(useV2 && {
|
||||
GOOGLE_SPEECH_RECOGNIZER_PARENT: `projects/${sttCredentials.credentials.project_id}/locations/global`,
|
||||
GOOGLE_SPEECH_CLOUD_SERVICES_VERSION: 'v2',
|
||||
...(rOpts.googleOptions?.speechStartTimeoutMs && {
|
||||
GOOGLE_SPEECH_START_TIMEOUT_MS: rOpts.googleOptions.speechStartTimeoutMs
|
||||
}),
|
||||
...(rOpts.googleOptions?.speechEndTimeoutMs && {
|
||||
GOOGLE_SPEECH_END_TIMEOUT_MS: rOpts.googleOptions.speechEndTimeoutMs
|
||||
}),
|
||||
...(rOpts.googleOptions?.transcriptNormalization && {
|
||||
GOOGLE_SPEECH_TRANSCRIPTION_NORMALIZATION: JSON.stringify(rOpts.googleOptions.transcriptNormalization)
|
||||
}),
|
||||
...(rOpts.googleOptions?.enableVoiceActivityEvents && {
|
||||
GOOGLE_SPEECH_ENABLE_VOICE_ACTIVITY_EVENTS: rOpts.googleOptions.enableVoiceActivityEvents
|
||||
}),
|
||||
...(rOpts.sgoogleOptions?.recognizerId) && {GOOGLE_SPEECH_RECOGNIZER_ID: rOpts.googleOptions.recognizerId},
|
||||
...(rOpts.googleOptions?.enableVoiceActivityEvents && {
|
||||
GOOGLE_SPEECH_ENABLE_VOICE_ACTIVITY_EVENTS: rOpts.googleOptions.enableVoiceActivityEvents
|
||||
}),
|
||||
}),
|
||||
};
|
||||
}
|
||||
else if (['aws', 'polly'].includes(vendor)) {
|
||||
@@ -529,9 +558,10 @@ module.exports = (logger) => {
|
||||
...(rOpts.vocabularyFilterName && {AWS_VOCABULARY_FILTER_NAME: rOpts.vocabularyFilterName}),
|
||||
...(rOpts.filterMethod && {AWS_VOCABULARY_FILTER_METHOD: rOpts.filterMethod}),
|
||||
...(sttCredentials && {
|
||||
AWS_ACCESS_KEY_ID: sttCredentials.accessKeyId,
|
||||
AWS_SECRET_ACCESS_KEY: sttCredentials.secretAccessKey,
|
||||
AWS_REGION: sttCredentials.region
|
||||
...(sttCredentials.accessKeyId && {AWS_ACCESS_KEY_ID: sttCredentials.accessKeyId}),
|
||||
...(sttCredentials.secretAccessKey && {AWS_SECRET_ACCESS_KEY: sttCredentials.secretAccessKey}),
|
||||
AWS_REGION: sttCredentials.region,
|
||||
...(sttCredentials.sessionToken && {AWS_SESSION_TOKEN: sttCredentials.sessionToken}),
|
||||
}),
|
||||
};
|
||||
}
|
||||
@@ -558,6 +588,10 @@ module.exports = (logger) => {
|
||||
...{AZURE_USE_OUTPUT_FORMAT_DETAILED: 1},
|
||||
...(azureOptions.speechSegmentationSilenceTimeoutMs &&
|
||||
{AZURE_SPEECH_SEGMENTATION_SILENCE_TIMEOUT_MS: azureOptions.speechSegmentationSilenceTimeoutMs}),
|
||||
...(azureOptions.languageIdMode &&
|
||||
{AZURE_LANGUAGE_ID_MODE: azureOptions.languageIdMode}),
|
||||
...(azureOptions.postProcessing &&
|
||||
{AZURE_POST_PROCESSING_OPTION: azureOptions.postProcessing}),
|
||||
...(sttCredentials && {
|
||||
...(sttCredentials.api_key && {AZURE_SUBSCRIPTION_KEY: sttCredentials.api_key}),
|
||||
...(sttCredentials.region && {AZURE_REGION: sttCredentials.region}),
|
||||
@@ -665,7 +699,9 @@ module.exports = (logger) => {
|
||||
...(deepgramOptions.keywords) &&
|
||||
{DEEPGRAM_SPEECH_KEYWORDS: deepgramOptions.keywords.join(',')},
|
||||
...('endpointing' in deepgramOptions) &&
|
||||
{DEEPGRAM_SPEECH_ENDPOINTING: deepgramOptions.endpointing === false ? 'false' : deepgramOptions.endpointing},
|
||||
{DEEPGRAM_SPEECH_ENDPOINTING: deepgramOptions.endpointing === false ? 'false' : deepgramOptions.endpointing,
|
||||
// default DEEPGRAM_SPEECH_UTTERANCE_END_MS is 1000, will be override by user settings later if there is.
|
||||
DEEPGRAM_SPEECH_UTTERANCE_END_MS: 1000},
|
||||
...(deepgramOptions.utteranceEndMs) &&
|
||||
{DEEPGRAM_SPEECH_UTTERANCE_END_MS: deepgramOptions.utteranceEndMs},
|
||||
...(deepgramOptions.vadTurnoff) &&
|
||||
@@ -778,8 +814,26 @@ module.exports = (logger) => {
|
||||
...(rOpts.hints?.length > 0 &&
|
||||
{ASSEMBLYAI_WORD_BOOST: JSON.stringify(rOpts.hints)})
|
||||
};
|
||||
}
|
||||
else if (vendor.startsWith('custom:')) {
|
||||
} else if ('verbio' === vendor) {
|
||||
const {verbioOptions = {}} = rOpts;
|
||||
opts = {
|
||||
...opts,
|
||||
...(sttCredentials.access_token && { VERBIO_ACCESS_TOKEN: sttCredentials.access_token}),
|
||||
...(sttCredentials.engine_version && {VERBIO_ENGINE_VERSION: sttCredentials.engine_version}),
|
||||
...(language && {VERBIO_LANGUAGE: language}),
|
||||
...(verbioOptions.enable_formatting && {VERBIO_ENABLE_FORMATTING: verbioOptions.enable_formatting}),
|
||||
...(verbioOptions.enable_diarization && {VERBIO_ENABLE_DIARIZATION: verbioOptions.enable_diarization}),
|
||||
...(verbioOptions.topic && {VERBIO_TOPIC: verbioOptions.topic}),
|
||||
...(verbioOptions.inline_grammar && {VERBIO_INLINE_GRAMMAR: verbioOptions.inline_grammar}),
|
||||
...(verbioOptions.grammar_uri && {VERBIO_GRAMMAR_URI: verbioOptions.grammar_uri}),
|
||||
...(verbioOptions.label && {VERBIO_LABEL: verbioOptions.label}),
|
||||
...(verbioOptions.recognition_timeout && {VERBIO_RECOGNITION_TIMEOUT: verbioOptions.recognition_timeout}),
|
||||
...(verbioOptions.speech_complete_timeout &&
|
||||
{VERBIO_SPEECH_COMPLETE_TIMEOUT: verbioOptions.speech_complete_timeout}),
|
||||
...(verbioOptions.speech_incomplete_timeout &&
|
||||
{VERBIO_SPEECH_INCOMPLETE_TIMEOUT: verbioOptions.speech_incomplete_timeout}),
|
||||
};
|
||||
} else if (vendor.startsWith('custom:')) {
|
||||
let {options = {}} = rOpts;
|
||||
const {auth_token, custom_stt_url} = sttCredentials;
|
||||
options = {
|
||||
|
||||
@@ -56,6 +56,12 @@ class WsRequestor extends BaseRequestor {
|
||||
}
|
||||
|
||||
if (type === 'session:new') this.call_sid = params.callSid;
|
||||
if (type === 'session:reconnect') {
|
||||
this._reconnectPromise = new Promise((resolve, reject) => {
|
||||
this._reconnectResolve = resolve;
|
||||
this._reconnectReject = reject;
|
||||
});
|
||||
}
|
||||
|
||||
/* if we have an absolute url, and it is http then do a standard webhook */
|
||||
if (this._isAbsoluteUrl(url) && url.startsWith('http')) {
|
||||
@@ -71,20 +77,23 @@ class WsRequestor extends BaseRequestor {
|
||||
}
|
||||
|
||||
/* connect if necessary */
|
||||
const queueMsg = () => {
|
||||
this.logger.debug(
|
||||
`WsRequestor:request(${this.id}) - queueing ${type} message since we are connecting`);
|
||||
if (wantsAck) {
|
||||
const p = new Promise((resolve, reject) => {
|
||||
this.queuedMsg.push({type, hook, params, httpHeaders, promise: {resolve, reject}});
|
||||
});
|
||||
return p;
|
||||
}
|
||||
else {
|
||||
this.queuedMsg.push({type, hook, params, httpHeaders});
|
||||
}
|
||||
return;
|
||||
};
|
||||
if (!this.ws) {
|
||||
if (this.connectInProgress) {
|
||||
this.logger.debug(
|
||||
`WsRequestor:request(${this.id}) - queueing ${type} message since we are connecting`);
|
||||
if (wantsAck) {
|
||||
const p = new Promise((resolve, reject) => {
|
||||
this.queuedMsg.push({type, hook, params, httpHeaders, promise: {resolve, reject}});
|
||||
});
|
||||
return p;
|
||||
}
|
||||
else {
|
||||
this.queuedMsg.push({type, hook, params, httpHeaders});
|
||||
}
|
||||
return;
|
||||
return queueMsg();
|
||||
}
|
||||
this.connectInProgress = true;
|
||||
this.logger.debug(`WsRequestor:request(${this.id}) - connecting since we do not have a connection for ${type}`);
|
||||
@@ -102,6 +111,10 @@ class WsRequestor extends BaseRequestor {
|
||||
return Promise.reject(err);
|
||||
}
|
||||
}
|
||||
// If jambonz wait for ack from reconnect, queue the msg until reconnect is acked
|
||||
if (type !== 'session:reconnect' && this._reconnectPromise) {
|
||||
return queueMsg();
|
||||
}
|
||||
assert(this.ws);
|
||||
|
||||
/* prepare and send message */
|
||||
@@ -119,7 +132,7 @@ class WsRequestor extends BaseRequestor {
|
||||
type,
|
||||
msgid,
|
||||
call_sid: this.call_sid,
|
||||
hook: type === 'verb:hook' ? url : undefined,
|
||||
hook: ['verb:hook', 'session:redirect'].includes(type) ? url : undefined,
|
||||
data: {...payload},
|
||||
...b3
|
||||
};
|
||||
@@ -139,6 +152,18 @@ class WsRequestor extends BaseRequestor {
|
||||
}
|
||||
};
|
||||
|
||||
const rejectQueuedMsgs = (err) => {
|
||||
if (this.queuedMsg.length > 0) {
|
||||
for (const {promise} of this.queuedMsg) {
|
||||
this.logger.debug(`WsRequestor:request - preparing queued ${type} for rejectQueuedMsgs`);
|
||||
if (promise) {
|
||||
promise.reject(err);
|
||||
}
|
||||
}
|
||||
this.queuedMsg.length = 0;
|
||||
}
|
||||
};
|
||||
|
||||
//this.logger.debug({obj}, `websocket: sending (${url})`);
|
||||
|
||||
/* special case: reconnecting before we received ack to session:new */
|
||||
@@ -179,16 +204,37 @@ class WsRequestor extends BaseRequestor {
|
||||
this.logger.debug({response}, `WsRequestor:request ${url} succeeded in ${rtt}ms`);
|
||||
this.stats.histogram('app.hook.ws_response_time', rtt, ['hook_type:app']);
|
||||
resolve(response);
|
||||
if (this._reconnectResolve) {
|
||||
this._reconnectResolve();
|
||||
}
|
||||
},
|
||||
failure: (err) => {
|
||||
if (this._reconnectReject) {
|
||||
this._reconnectReject(err);
|
||||
}
|
||||
clearTimeout(timer);
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
|
||||
/* send the message */
|
||||
this.ws.send(JSON.stringify(obj), () => {
|
||||
this.ws.send(JSON.stringify(obj), async() => {
|
||||
this.logger.debug({obj}, `WsRequestor:request websocket: sent (${url})`);
|
||||
// If session:reconnect is waiting for ack, hold here until ack to send queuedMsgs
|
||||
if (this._reconnectPromise) {
|
||||
try {
|
||||
await this._reconnectPromise;
|
||||
} catch (err) {
|
||||
// bad thing happened to session:recconnect
|
||||
rejectQueuedMsgs(err);
|
||||
this.emit('reconnect-error');
|
||||
return;
|
||||
} finally {
|
||||
this._reconnectPromise = null;
|
||||
this._reconnectResolve = null;
|
||||
this._reconnectReject = null;
|
||||
}
|
||||
}
|
||||
sendQueuedMsgs();
|
||||
});
|
||||
});
|
||||
@@ -346,7 +392,9 @@ class WsRequestor extends BaseRequestor {
|
||||
/* messages must be JSON format */
|
||||
try {
|
||||
const obj = JSON.parse(content);
|
||||
const {type, msgid, command, call_sid = this.call_sid, queueCommand = false, data} = obj;
|
||||
//const {type, msgid, command, call_sid = this.call_sid, queueCommand = false, data} = obj;
|
||||
const {type, msgid, command, queueCommand = false, data} = obj;
|
||||
const call_sid = obj.callSid || this.call_sid;
|
||||
|
||||
//this.logger.debug({obj}, 'WsRequestor:request websocket: received');
|
||||
assert.ok(type, 'type property not supplied');
|
||||
|
||||
9879
package-lock.json
generated
9879
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
70
package.json
70
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "jambonz-feature-server",
|
||||
"version": "0.8.6",
|
||||
"version": "0.9.0",
|
||||
"main": "app.js",
|
||||
"engines": {
|
||||
"node": ">= 18.x"
|
||||
@@ -25,57 +25,57 @@
|
||||
"jslint:fix": "eslint app.js tracer.js lib --fix"
|
||||
},
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-auto-scaling": "^3.360.0",
|
||||
"@aws-sdk/client-sns": "^3.360.0",
|
||||
"@jambonz/db-helpers": "^0.9.3",
|
||||
"@aws-sdk/client-auto-scaling": "^3.549.0",
|
||||
"@aws-sdk/client-sns": "^3.549.0",
|
||||
"@jambonz/db-helpers": "^0.9.6",
|
||||
"@jambonz/http-health-check": "^0.0.1",
|
||||
"@jambonz/mw-registrar": "^0.2.4",
|
||||
"@jambonz/realtimedb-helpers": "^0.8.7",
|
||||
"@jambonz/speech-utils": "^0.0.41",
|
||||
"@jambonz/stats-collector": "^0.1.9",
|
||||
"@jambonz/mw-registrar": "^0.2.7",
|
||||
"@jambonz/realtimedb-helpers": "^0.8.8",
|
||||
"@jambonz/speech-utils": "^0.1.11",
|
||||
"@jambonz/stats-collector": "^0.1.10",
|
||||
"@jambonz/time-series": "^0.2.8",
|
||||
"@jambonz/verb-specifications": "^0.0.53",
|
||||
"@opentelemetry/api": "^1.4.0",
|
||||
"@opentelemetry/exporter-jaeger": "^1.9.0",
|
||||
"@opentelemetry/exporter-trace-otlp-http": "^0.35.0",
|
||||
"@opentelemetry/exporter-zipkin": "^1.9.0",
|
||||
"@opentelemetry/instrumentation": "^0.35.0",
|
||||
"@opentelemetry/resources": "^1.9.0",
|
||||
"@opentelemetry/sdk-trace-base": "^1.9.0",
|
||||
"@opentelemetry/sdk-trace-node": "^1.9.0",
|
||||
"@opentelemetry/semantic-conventions": "^1.9.0",
|
||||
"@jambonz/verb-specifications": "^0.0.74",
|
||||
"@opentelemetry/api": "^1.8.0",
|
||||
"@opentelemetry/exporter-jaeger": "^1.23.0",
|
||||
"@opentelemetry/exporter-trace-otlp-http": "^0.50.0",
|
||||
"@opentelemetry/exporter-zipkin": "^1.23.0",
|
||||
"@opentelemetry/instrumentation": "^0.50.0",
|
||||
"@opentelemetry/resources": "^1.23.0",
|
||||
"@opentelemetry/sdk-trace-base": "^1.23.0",
|
||||
"@opentelemetry/sdk-trace-node": "^1.23.0",
|
||||
"@opentelemetry/semantic-conventions": "^1.23.0",
|
||||
"bent": "^7.3.12",
|
||||
"debug": "^4.3.4",
|
||||
"deepcopy": "^2.1.0",
|
||||
"drachtio-fsmrf": "^3.0.37",
|
||||
"drachtio-srf": "^4.5.31",
|
||||
"express": "^4.18.2",
|
||||
"drachtio-fsmrf": "^3.0.43",
|
||||
"drachtio-srf": "^4.5.35",
|
||||
"express": "^4.19.2",
|
||||
"express-validator": "^7.0.1",
|
||||
"ip": "^1.1.8",
|
||||
"moment": "^2.29.4",
|
||||
"parse-url": "^8.1.0",
|
||||
"pino": "^8.8.0",
|
||||
"ip": "^2.0.1",
|
||||
"moment": "^2.30.1",
|
||||
"parse-url": "^9.2.0",
|
||||
"pino": "^8.20.0",
|
||||
"polly-ssml-split": "^0.1.0",
|
||||
"proxyquire": "^2.1.3",
|
||||
"sdp-transform": "^2.14.1",
|
||||
"short-uuid": "^4.2.2",
|
||||
"sinon": "^15.0.1",
|
||||
"sdp-transform": "^2.14.2",
|
||||
"short-uuid": "^5.1.0",
|
||||
"sinon": "^17.0.1",
|
||||
"to-snake-case": "^1.0.0",
|
||||
"undici": "^5.26.2",
|
||||
"undici": "^6.19.2",
|
||||
"uuid-random": "^1.3.2",
|
||||
"verify-aws-sns-signature": "^0.1.0",
|
||||
"ws": "^8.9.0",
|
||||
"ws": "^8.17.1",
|
||||
"xml2js": "^0.6.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"clear-module": "^4.1.2",
|
||||
"eslint": "^7.32.0",
|
||||
"eslint-plugin-promise": "^4.3.1",
|
||||
"eslint": "7.32.0",
|
||||
"eslint-plugin-promise": "^6.1.1",
|
||||
"nyc": "^15.1.0",
|
||||
"tape": "^5.6.1"
|
||||
"tape": "^5.7.5"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"bufferutil": "^4.0.6",
|
||||
"utf-8-validate": "^5.0.8"
|
||||
"bufferutil": "^4.0.8",
|
||||
"utf-8-validate": "^6.0.3"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -42,7 +42,7 @@ services:
|
||||
ipv4_address: 172.38.0.7
|
||||
|
||||
drachtio:
|
||||
image: drachtio/drachtio-server:0.8.24
|
||||
image: drachtio/drachtio-server:0.8.25-rc8
|
||||
restart: always
|
||||
command: drachtio --contact "sip:*;transport=udp" --mtu 4096 --address 0.0.0.0 --port 9022
|
||||
ports:
|
||||
@@ -57,7 +57,7 @@ services:
|
||||
condition: service_healthy
|
||||
|
||||
freeswitch:
|
||||
image: drachtio/drachtio-freeswitch-mrf:0.6.1
|
||||
image: drachtio/drachtio-freeswitch-mrf:0.7.3
|
||||
restart: always
|
||||
command: freeswitch --rtp-range-start 20000 --rtp-range-end 20100
|
||||
environment:
|
||||
|
||||
Reference in New Issue
Block a user