Compare commits

...

19 Commits

Author SHA1 Message Date
Dave Horton
46302703da further fix for #192, also bug fix for starting with a ws connection and switching to webhooks later in the same call 2022-12-05 10:53:41 -05:00
Dave Horton
c728417581 bugfix #192: config with dtmf only followed later by gather with speech not working 2022-12-01 14:06:29 -05:00
Dave Horton
8853f84f01 add custom header on Refer indicating whether sbc-inbound should fix up the Refer-To 2022-11-30 12:50:54 -05:00
Dave Horton
665d26b6fb bugfix: continuous asr timer in gather should not start until transcript is received 2022-11-29 11:37:41 -05:00
Dave Horton
d69c773de0 include service_provider_sid in call webhook 2022-11-29 11:27:20 -05:00
Dave Horton
21eaa442b2 add recognizer.azureServiceEndpoint for custom azure voices 2022-11-25 10:46:47 -05:00
Dave Horton
6484086222 feature: return transcript faster if we get an exact match to a provided hint on an interim transcript (requires env JAMBONZ_GATHER_EARLY_HINTS_MATCH=1) 2022-11-25 08:15:18 -05:00
Dave Horton
01645df920 error handling in amd 2022-11-22 15:40:26 -05:00
Guilherme Rauen
b2363b09c1 update node image to the latest and most secure (#189)
Co-authored-by: Guilherme Rauen <g.rauen@cognigy.com>
2022-11-11 17:45:26 -05:00
Dave Horton
c11d892f0a bugfix: microsoft tts voice was not being sent in tts request, resulting in a default voice being selected 2022-11-10 13:00:59 -05:00
Dave Horton
9fd116b05f fix for #186: unhandled error when amd webhook returns non-success status code 2022-11-05 10:27:00 -04:00
Dave Horton
19098aee98 fixes for custom voice testing in azure 2022-11-04 09:36:44 -04:00
Dave Horton
d15dbf7f5a update to synthAudio with support for Azure custom voices 2022-11-04 08:27:09 -04:00
Dave Horton
824f983955 update deps 2022-11-02 13:40:25 -04:00
Dave Horton
7c76bc52f6 update to db-helpers with caching fix 2022-11-01 20:57:18 -04:00
Dave Horton
bfc8a99950 bugfix: ws error max connections error causes a crash 2022-11-01 11:33:03 -04:00
Dave Horton
9097c6d6ac bugfix when running multiple instances in EC2 2022-10-31 14:42:53 -04:00
Dave Horton
15b2fdd5a8 update to db-helpers@0.7.0 with caching option 2022-10-31 11:43:07 -04:00
Dave Horton
979e17c814 add support for Azure audio logging in gather and transcribe 2022-10-31 11:08:16 -04:00
19 changed files with 2143 additions and 2286 deletions

View File

@@ -1,4 +1,4 @@
FROM --platform=linux/amd64 node:18.9.0-alpine3.16 as base
FROM --platform=linux/amd64 node:18.12.1-alpine3.16 as base
RUN apk --update --no-cache add --virtual .builds-deps build-base python3

View File

@@ -34,6 +34,7 @@ router.post('/:partner', async(req, res) => {
carrier: req.params.partner,
messageSid: app.messageSid,
accountSid: app.accountSid,
serviceProviderSid: account.service_provider_sid,
applicationSid: app.applicationSid,
from: req.body.from,
to: req.body.to,

View File

@@ -41,7 +41,7 @@ function retrieveCallSession(callSid, opts) {
router.post('/:callSid', async(req, res) => {
const logger = req.app.locals.logger;
const callSid = req.params.callSid;
logger.debug({body: req.body}, 'got upateCall request');
logger.debug({body: req.body}, 'got updateCall request');
try {
const cs = retrieveCallSession(callSid, req.body);
if (!cs) {

View File

@@ -118,6 +118,7 @@ module.exports = function(srf, logger) {
const {span} = rootSpan.startChildSpan('lookupAccountDetails');
try {
req.locals.accountInfo = await lookupAccountDetails(account_sid);
req.locals.service_provider_sid = req.locals.accountInfo?.account?.service_provider_sid;
span.end();
if (!req.locals.accountInfo.account.is_active) {
logger.info(`Account is inactive or suspended ${account_sid}`);
@@ -273,7 +274,9 @@ module.exports = function(srf, logger) {
}
/* retrieve the application to execute for this inbound call */
const params = Object.assign(['POST', 'WS'].includes(app.call_hook.method) ? {sip: req.msg} : {},
req.locals.callInfo, {
req.locals.callInfo,
{service_provider_sid: req.locals.service_provider_sid},
{
defaults: {
synthesizer: {
vendor: app.speech_synthesis_vendor,

View File

@@ -551,7 +551,9 @@ class CallSession extends Emitter {
api_key: credential.api_key,
region: credential.region,
use_custom_stt: credential.use_custom_stt,
custom_stt_endpoint: credential.custom_stt_endpoint
custom_stt_endpoint: credential.custom_stt_endpoint,
use_custom_tts: credential.use_custom_tts,
custom_tts_endpoint: credential.custom_tts_endpoint
};
}
else if ('wellsaid' === vendor) {
@@ -586,13 +588,19 @@ class CallSession extends Emitter {
this.logger.info(`CallSession:exec starting task #${stackNum}:${taskNum}: ${task.name}`);
try {
const resources = await this._evaluatePreconditions(task);
let skip = false;
this.currentTask = task;
if (TaskName.Gather === task.name && this.isBotModeEnabled) {
const timeout = task.timeout;
this.logger.info(`CallSession:exec skipping #${stackNum}:${taskNum}: ${task.name}`);
this.backgroundGatherTask.updateTimeout(timeout);
if (this.backgroundGatherTask.updateTaskInProgress(task)) {
this.logger.info(`CallSession:exec skipping #${stackNum}:${taskNum}: ${task.name}`);
skip = true;
}
else {
this.logger.info('CallSession:exec disabling bot mode to start gather with new options');
this.disableBotMode();
}
}
else {
if (!skip) {
const {span, ctx} = this.rootSpan.startChildSpan(`verb:${task.summary}`);
task.span = span;
task.ctx = ctx;

View File

@@ -146,7 +146,11 @@ class TaskConfig extends Task {
_onAmdEvent(cs, evt) {
this.logger.info({evt}, 'Config:_onAmdEvent');
const {actionHook} = this.data.amd;
this.performHook(cs, actionHook, evt);
this.performHook(cs, actionHook, evt)
.catch((err) => {
this.logger.error({err}, 'Config:_onAmdEvent - error calling actionHook');
});
}
}

View File

@@ -689,7 +689,10 @@ class TaskDial extends Task {
_onAmdEvent(cs, evt) {
this.logger.info({evt}, 'Dial:_onAmdEvent');
const {actionHook} = this.data.amd;
this.performHook(cs, actionHook, evt);
this.performHook(cs, actionHook, evt)
.catch((err) => {
this.logger.error({err}, 'Dial:_onAmdEvent - error calling actionHook');
});
}
}

View File

@@ -83,6 +83,7 @@ class TaskGather extends Task {
this.initialSpeechTimeoutMs = recognizer.initialSpeechTimeoutMs || 0;
this.azureServiceEndpoint = recognizer.azureServiceEndpoint;
this.azureSttEndpointId = recognizer.azureSttEndpointId;
this.azureAudioLogging = recognizer.audioLogging;
}
else {
this.hints = [];
@@ -142,6 +143,14 @@ class TaskGather extends Task {
this.logger.debug({hints: this.hints, hintsBoost: this.hintsBoost},
'Gather:exec - applying global sttHints');
}
if (process.env.JAMBONZ_GATHER_EARLY_HINTS_MATCH &&
!this.isContinuousAsr &&
this.hints.length > 0 && this.hints.length <= 10) {
this.earlyHintsMatch = true;
this.interim = true;
this.logger.debug('Gather:exec - early hints match enabled');
}
if (cs.hasAltLanguages) {
this.altLanguages = this.altLanguages.concat(cs.altLanguages);
this.logger.debug({altLanguages: this.altLanguages},
@@ -177,7 +186,8 @@ class TaskGather extends Task {
const startListening = (cs, ep) => {
this._startTimer();
if (this.isContinuousAsr && 0 === this.timeout) this._startAsrTimer();
// dont start asr timer until we have a transcription
//if (this.isContinuousAsr && 0 === this.timeout) this._startAsrTimer();
if (this.input.includes('speech') && !this.listenDuringPrompt) {
this._initSpeech(cs, ep)
.then(() => {
@@ -250,10 +260,15 @@ class TaskGather extends Task {
this._resolve('killed');
}
updateTimeout(timeout) {
this.logger.info(`TaskGather:updateTimeout - updating timeout to ${timeout}`);
updateTaskInProgress(opts) {
if (!this.needsStt && opts.input.includes('speech')) {
this.logger.info('TaskGather:updateTaskInProgress - adding speech to a background gather');
return false; // this needs be handled by killing the background gather and starting a new one
}
const {timeout} = opts;
this.timeout = timeout;
this._startTimer();
return true;
}
_onDtmf(cs, ep, evt) {
@@ -381,6 +396,7 @@ class TaskGather extends Task {
else {
opts.AZURE_SPEECH_ALTERNATIVE_LANGUAGE_CODES = '';
}
if (this.azureAudioLogging) opts.AZURE_AUDIO_LOGGING = 1;
if (this.requestSnr) opts.AZURE_REQUEST_SNR = 1;
if (this.profanityOption && this.profanityOption !== 'raw') opts.AZURE_PROFANITY_OPTION = this.profanityOption;
if (this.azureServiceEndpoint) opts.AZURE_SERVICE_ENDPOINT = this.azureServiceEndpoint;
@@ -424,8 +440,7 @@ class TaskGather extends Task {
if (0 === this.timeout) return;
this._clearTimer();
this._timeoutTimer = setTimeout(() => {
if (this.isContinuousAsr) this._startAsrTimer();
else this._resolve(this.digitBuffer.length >= this.minDigits ? 'dtmf-num-digits' : 'timeout');
this._resolve(this.digitBuffer.length >= this.minDigits ? 'dtmf-num-digits' : 'timeout');
}, this.timeout);
}
@@ -524,6 +539,15 @@ class TaskGather extends Task {
}
}
if (this.earlyHintsMatch && evt.is_final === false) {
const transcript = evt.alternatives[0].transcript?.toLowerCase();
if (this.hints.find((h) => h.toLowerCase() === transcript)) {
this.logger.debug({evt}, 'Gather:_onTranscription: early hint match');
this._resolve('speech', evt);
return;
}
}
/* count words for bargein feature */
const words = evt.alternatives[0].transcript.split(' ').length;
const bufferedWords = this._bufferedTranscripts.reduce((count, e) => {

View File

@@ -164,6 +164,10 @@ class TaskSay extends Task {
'tts.voice': voice
});
try {
if (vendor === 'microsoft' && this.synthesizer.azureServiceEndpoint) {
credentials.use_custom_tts = true;
credentials.custom_tts_endpoint = this.synthesizer.azureServiceEndpoint;
}
const {filePath, servedFromCache} = await synthAudio(stats, {
text,
vendor,

View File

@@ -36,6 +36,7 @@ class TaskSipRefer extends Task {
method: 'REFER',
headers: {
...this.headers,
...(this.referToIsUri && {'X-Refer-To-Leave-Untouched': true}),
'Refer-To': referTo,
'Referred-By': referredBy
}
@@ -100,6 +101,7 @@ class TaskSipRefer extends Task {
/* they may have only provided a phone number/user */
referTo = `sip:${referTo}@${host}`;
}
else this.referToIsUri = true;
if (!referredBy) {
/* default */
referredBy = cs.req?.callingNumber || dlg.local.uri;

View File

@@ -435,7 +435,8 @@
"gender": {
"type": "string",
"enum": ["MALE", "FEMALE", "NEUTRAL"]
}
},
"azureServiceEndpoint": "string"
},
"required": [
"vendor"
@@ -509,7 +510,8 @@
"azureServiceEndpoint": "string",
"azureSttEndpointId": "string",
"asrDtmfTerminationDigit": "string",
"asrTimeout": "number"
"asrTimeout": "number",
"audioLogging": "boolean"
},
"required": [
"vendor"

View File

@@ -55,6 +55,7 @@ class TaskTranscribe extends Task {
this.initialSpeechTimeoutMs = recognizer.initialSpeechTimeoutMs || 0;
this.azureServiceEndpoint = recognizer.azureServiceEndpoint;
this.azureSttEndpointId = recognizer.azureSttEndpointId;
this.azureAudioLogging = recognizer.audioLogging;
}
get name() { return TaskName.Transcribe; }
@@ -249,6 +250,7 @@ class TaskTranscribe extends Task {
}
if (this.altLanguages.length > 0) opts.AZURE_SPEECH_ALTERNATIVE_LANGUAGE_CODES = this.altLanguages.join(',');
else opts.AZURE_SPEECH_ALTERNATIVE_LANGUAGE_CODES = '';
if (this.azureAudioLogging) opts.AZURE_AUDIO_LOGGING = 1;
if (this.requestSnr) opts.AZURE_REQUEST_SNR = 1;
if (this.profanityOption !== 'raw') opts.AZURE_PROFANITY_OPTION = this.profanityOption;
if (this.initialSpeechTimeoutMs > 0) opts.AZURE_INITIAL_SPEECH_TIMEOUT_MS = this.initialSpeechTimeoutMs;

View File

@@ -273,26 +273,46 @@ module.exports = (logger) => {
amd
.on(AmdEvents.NoSpeechDetected, (evt) => {
task.emit('amd', {type: AmdEvents.NoSpeechDetected, ...evt});
ep.stopTranscription({vendor, bugname});
try {
ep.connected && ep.stopTranscription({vendor, bugname});
} catch (err) {
logger.info({err}, 'Error stopping transcription');
}
})
.on(AmdEvents.HumanDetected, (evt) => {
task.emit('amd', {type: AmdEvents.HumanDetected, ...evt});
ep.stopTranscription({vendor, bugname});
try {
ep.connected && ep.stopTranscription({vendor, bugname});
} catch (err) {
logger.info({err}, 'Error stopping transcription');
}
})
.on(AmdEvents.MachineDetected, (evt) => {
task.emit('amd', {type: AmdEvents.MachineDetected, ...evt});
})
.on(AmdEvents.DecisionTimeout, (evt) => {
task.emit('amd', {type: AmdEvents.DecisionTimeout, ...evt});
ep.stopTranscription({vendor, bugname});
try {
ep.connected && ep.stopTranscription({vendor, bugname});
} catch (err) {
logger.info({err}, 'Error stopping transcription');
}
})
.on(AmdEvents.ToneTimeout, (evt) => {
//task.emit('amd', {type: AmdEvents.ToneTimeout, ...evt});
ep.execute('avmd_stop').catch((err) => logger.info(err, 'Error stopping avmd'));
try {
ep.connected && ep.execute('avmd_stop').catch((err) => logger.info(err, 'Error stopping avmd'));
} catch (err) {
logger.info({err}, 'Error stopping avmd');
}
})
.on(AmdEvents.MachineStoppedSpeaking, () => {
task.emit('amd', {type: AmdEvents.MachineStoppedSpeaking});
ep.stopTranscription({vendor, bugname});
try {
ep.connected && ep.stopTranscription({vendor, bugname});
} catch (err) {
logger.info({err}, 'Error stopping transcription');
}
});
/* start transcribing, and also listening for beep */

View File

@@ -36,7 +36,7 @@ class SnsNotifier extends Emitter {
logger.info(`SNS lifecycle server failed to bind port on ${e.port}, will try next port`);
const server = this._doListen(logger, app, ++e.port, resolve);
server.on('error', this._handleErrors.bind(null, logger, app, resolve, reject));
server.on('error', this._handleErrors.bind(this, logger, app, resolve, reject));
return;
}
reject(e);
@@ -120,7 +120,7 @@ class SnsNotifier extends Emitter {
});
return new Promise((resolve, reject) => {
const server = this._doListen(this.logger, app, PORT, resolve);
server.on('error', this._handleErrors.bind(null, this.logger, app, resolve, reject));
server.on('error', this._handleErrors.bind(this, this.logger, app, resolve, reject));
});
} catch (err) {

View File

@@ -39,6 +39,8 @@ const speechMapper = (cred) => {
obj.region = o.region;
obj.use_custom_stt = o.use_custom_stt;
obj.custom_stt_endpoint = o.custom_stt_endpoint;
obj.use_custom_tts = o.use_custom_tts;
obj.custom_tts_endpoint = o.custom_tts_endpoint;
}
else if ('wellsaid' === obj.vendor) {
const o = JSON.parse(decrypt(credential));

View File

@@ -21,6 +21,7 @@ const handleErrors = (logger, app, resolve, reject, e) => {
server.on('error', handleErrors.bind(null, logger, app, resolve, reject));
return;
}
logger.info({err: e, port: PORT}, 'httpListener error');
reject(e);
};
@@ -30,7 +31,7 @@ const createHttpListener = (logger, srf) => {
app.use(express.urlencoded({ extended: true }));
app.use(express.json());
app.use('/', httpRoutes);
app.use((err, req, res, next) => {
app.use((err, _req, res, _next) => {
logger.error(err, 'burped error');
res.status(err.status || 500).json({msg: err.message});
});

View File

@@ -54,7 +54,7 @@ class WsRequestor extends BaseRequestor {
/* if we have an absolute url, and it is http then do a standard webhook */
if (this._isAbsoluteUrl(url) && url.startsWith('http')) {
this.logger.debug({hook}, 'WsRequestor: sending a webhook (HTTP)');
const requestor = new HttpRequestor(this.logger, this.account_sid, hook, this.secret);
const requestor = new HttpRequestor(this.logger, this.account_sid, {url: hook}, this.secret);
return requestor.request(type, hook, params, httpHeaders);
}
@@ -69,7 +69,7 @@ class WsRequestor extends BaseRequestor {
this.connectInProgress = true;
this.logger.debug(`WsRequestor:request(${this.id}) - connecting since we do not have a connection`);
if (this.connections >= MAX_RECONNECTS) {
throw new Error(`max attempts connecting to ${this.url}`);
return Promise.reject(`max attempts connecting to ${this.url}`);
}
try {
const startAt = process.hrtime();
@@ -79,7 +79,7 @@ class WsRequestor extends BaseRequestor {
} catch (err) {
this.logger.info({url, err}, 'WsRequestor:request - failed connecting');
this.connectInProgress = false;
throw err;
return Promise.reject(err);
}
}
assert(this.ws);

4293
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -25,8 +25,8 @@
},
"dependencies": {
"@jambonz/http-health-check": "^0.0.1",
"@jambonz/db-helpers": "^0.6.19",
"@jambonz/realtimedb-helpers": "^0.4.35",
"@jambonz/db-helpers": "^0.7.3",
"@jambonz/realtimedb-helpers": "^0.5.9",
"@jambonz/stats-collector": "^0.1.6",
"@jambonz/time-series": "^0.2.5",
"@opentelemetry/api": "^1.1.0",