Feature/vm detection (#137)

* initial changes for amd

* wip

* fix bug where transcripts were discarded

* a bit of refactoring, and adding support for avmd in config verb

* bug fixes
This commit is contained in:
Dave Horton
2022-07-27 17:46:52 +01:00
committed by GitHub
parent a035b67e6c
commit 2882fa2d0a
14 changed files with 503 additions and 30 deletions

View File

@@ -0,0 +1,17 @@
{
"en-US": [
"call has been forwarded",
"at the beep",
"at the tone",
"leave a message",
"leave me a message",
"not available right now",
"not available to take your call",
"can't take your call",
"I will get back to you",
"I'll get back to you",
"we will get back to you",
"we are unable",
"we are not available"
]
}

View File

@@ -72,6 +72,10 @@ class CallSession extends Emitter {
if (!this.isConfirmCallSession && !this.isSmsCallSession && !this.isAdultingCallSession) {
sessionTracker.add(this.callSid, this);
const {startAmd, stopAmd} = require('../utils/amd-utils')(logger);
this.startAmd = startAmd;
this.stopAmd = stopAmd;
}
this._pool = srf.locals.dbHelpers.pool;

View File

@@ -50,12 +50,19 @@ class TaskConfig extends Task {
const s = `{${v},${l}}`;
phrase.push(`set recognizer${s}`);
}
if (this.data.amd) phrase.push('enable amd');
return `${this.name}{${phrase.join(',')}`;
}
async exec(cs) {
await super.exec(cs);
if (this.data.amd) {
this.startAmd = cs.startAmd;
this.stopAmd = cs.stopAmd;
this.on('amd', this._onAmdEvent.bind(this, cs));
}
if (this.hasSynthesizer) {
cs.speechSynthesisVendor = this.synthesizer.vendor !== 'default'
? this.synthesizer.vendor
@@ -107,6 +114,12 @@ class TaskConfig extends Task {
async kill(cs) {
super.kill(cs);
}
_onAmdEvent(cs, evt) {
this.logger.info({evt}, 'Config:_onAmdEvent');
const {actionHook} = this.data.amd;
this.performHook(cs, actionHook, evt);
}
}
module.exports = TaskConfig;

View File

@@ -134,7 +134,10 @@ class TaskDial extends Task {
get name() { return TaskName.Dial; }
get canReleaseMedia() {
return !process.env.ANCHOR_MEDIA_ALWAYS && !this.listenTask && !this.transcribeTask;
return !process.env.ANCHOR_MEDIA_ALWAYS &&
!this.listenTask &&
!this.transcribeTask &&
!this.startAmd;
}
get summary() {
@@ -158,6 +161,11 @@ class TaskDial extends Task {
async exec(cs) {
await super.exec(cs);
try {
if (this.data.amd) {
this.startAmd = cs.startAmd;
this.stopAmd = cs.stopAmd;
this.on('amd', this._onAmdEvent.bind(this, cs));
}
if (cs.direction === CallDirection.Inbound) {
await this._initializeInbound(cs);
}
@@ -181,6 +189,11 @@ class TaskDial extends Task {
async kill(cs, reason) {
super.kill(cs);
try {
if (this.ep && this.ep.amd) this.stopAmd(this.ep, this);
} catch (err) {
this.logger.error({err}, 'DialTask:kill - error stopping answering machine detectin');
}
if (this.dialMusic && this.epOther) {
this.epOther.api('uuid_break', this.epOther.uuid)
.catch((err) => this.logger.info(err, 'Error killing dialMusic'));
@@ -226,7 +239,7 @@ class TaskDial extends Task {
this.logger.debug('Dial:whisper executing tasks');
while (tasks.length && !cs.callGone) {
const task = tasks.shift();
const {span, ctx} = this.startChildSpan(`whisper:${this.sayTask.summary}`);
const {span, ctx} = this.startChildSpan(`whisper:${task.summary}`);
task.span = span;
task.ctx = ctx;
await task.exec(cs, callSid === this.callSid ? this.ep : this.epOther);
@@ -571,6 +584,7 @@ class TaskDial extends Task {
* - save the dialog and endpoint
* - clock the start time of the call,
* - start a max call length timer (optionally)
* - start answering machine detection (optionally)
* - launch any nested tasks
* - and establish a handler to clean up if the called party hangs up
*/
@@ -613,6 +627,13 @@ class TaskDial extends Task {
if (this.transcribeTask) this.transcribeTask.exec(cs, this.epOther, this.ep);
if (this.listenTask) this.listenTask.exec(cs, this.epOther);
if (this.startAmd) {
try {
this.startAmd(cs, this.ep, this, this.data.amd);
} catch (err) {
this.logger.info({err}, 'Dial:_selectSingleDial - Error calling startAmd');
}
}
/* if we can release the media back to the SBC, do so now */
if (this.canReleaseMedia) this._releaseMedia(cs, sd);
@@ -659,6 +680,12 @@ class TaskDial extends Task {
this.logger.info({sdp}, 'Dial:handleReinviteAfterMediaReleased - sent reinvite to B leg');
res.send(200, {body: sdp});
}
_onAmdEvent(cs, evt) {
this.logger.info({evt}, 'Dial:_onAmdEvent');
const {actionHook} = this.data.amd;
this.performHook(cs, actionHook, evt);
}
}
module.exports = TaskDial;

View File

@@ -273,6 +273,7 @@ class TaskGather extends Task {
}
if ('google' === this.vendor) {
this.bugname = 'google_trancribe';
if (this.sttCredentials) opts.GOOGLE_APPLICATION_CREDENTIALS = JSON.stringify(this.sttCredentials.credentials);
[
['enhancedModel', 'GOOGLE_SPEECH_USE_ENHANCED'],
@@ -308,6 +309,7 @@ class TaskGather extends Task {
ep.addCustomEventListener(GoogleTranscriptionEvents.VadDetected, this._onVadDetected.bind(this, cs, ep));
}
else if (['aws', 'polly'].includes(this.vendor)) {
this.bugname = 'aws_trancribe';
if (this.vocabularyName) opts.AWS_VOCABULARY_NAME = this.vocabularyName;
if (this.vocabularyFilterName) {
opts.AWS_VOCABULARY_NAME = this.vocabularyFilterName;
@@ -324,6 +326,7 @@ class TaskGather extends Task {
ep.addCustomEventListener(AwsTranscriptionEvents.VadDetected, this._onVadDetected.bind(this, cs, ep));
}
else if ('microsoft' === this.vendor) {
this.bugname = 'azure_trancribe';
if (this.sttCredentials) {
Object.assign(opts, {
'AZURE_SUBSCRIPTION_KEY': this.sttCredentials.api_key,
@@ -355,12 +358,14 @@ class TaskGather extends Task {
this.logger.debug({
vendor: this.vendor,
locale: this.language,
interim: this.interim
interim: this.interim,
bugname: this.bugname
}, 'Gather:_startTranscribing');
ep.startTranscription({
vendor: this.vendor,
locale: this.language,
interim: this.interim,
bugname: this.bugname,
}).catch((err) => {
const {writeAlerts, AlertType} = this.cs.srf.locals;
this.logger.error(err, 'TaskGather:_startTranscribing error');
@@ -440,7 +445,11 @@ class TaskGather extends Task {
}
}
_onTranscription(cs, ep, evt) {
_onTranscription(cs, ep, evt, fsEvent) {
// make sure this is not a transcript from answering machine detection
const bugname = fsEvent.getHeader('media-bugname');
if (bugname && this.bugname !== bugname) return;
if ('aws' === this.vendor && Array.isArray(evt) && evt.length > 0) evt = evt[0];
if ('microsoft' === this.vendor) {
const final = evt.RecognitionStatus === 'Success';

View File

@@ -37,7 +37,8 @@
"synthesizer": "#synthesizer",
"recognizer": "#recognizer",
"bargeIn": "#bargeIn",
"record": "#recordOptions"
"record": "#recordOptions",
"amd": "#amd"
},
"required": []
},
@@ -172,7 +173,8 @@
"timeLimit": "number",
"timeout": "number",
"proxy": "string",
"transcribe": "#transcribe"
"transcribe": "#transcribe",
"amd": "#amd"
},
"required": [
"target"
@@ -517,5 +519,22 @@
"required": [
"enable"
]
},
"amd": {
"properties": {
"actionHook": "object|string",
"thresholdWordCount": "number",
"timers": "#amdTimers",
"recognizer": "#recognizer"
},
"required": [
"actionHook"
]
},
"amdTimers": {
"noSpeechTimeoutMs": "number",
"decisionTimeoutMs": "number",
"toneTimeoutMs": "number",
"greetingCompletionTimeoutMs": "number"
}
}

View File

@@ -171,12 +171,13 @@ class Task extends Emitter {
}
async performHook(cs, hook, results) {
const params = results ? Object.assign(cs.callInfo.toJSON(), results) : cs.callInfo.toJSON();
const span = this.startSpan('verb:hook', {'hook.url': hook});
const b3 = this.getTracingPropagation('b3', span);
const httpHeaders = b3 && {b3};
span.setAttributes({'http.body': JSON.stringify(results)});
span.setAttributes({'http.body': JSON.stringify(params)});
try {
const json = await cs.requestor.request('verb:hook', hook, results, httpHeaders);
const json = await cs.requestor.request('verb:hook', hook, params, httpHeaders);
span.setAttributes({'http.statusCode': 200});
span.end();
if (json && Array.isArray(json)) {

View File

@@ -102,7 +102,7 @@ class TaskTranscribe extends Task {
async kill(cs) {
super.kill(cs);
if (this.ep.connected) {
if (this.ep?.connected) {
this.ep.stopTranscription({vendor: this.vendor})
.catch((err) => this.logger.info(err, 'Error TaskTranscribe:kill'));
@@ -140,6 +140,7 @@ class TaskTranscribe extends Task {
ep.addCustomEventListener(AzureTranscriptionEvents.NoSpeechDetected, this._onNoAudio.bind(this, cs, ep, channel));
if (this.vendor === 'google') {
this.bugname = 'google_trancribe';
if (this.sttCredentials) opts.GOOGLE_APPLICATION_CREDENTIALS = JSON.stringify(this.sttCredentials.credentials);
[
['enhancedModel', 'GOOGLE_SPEECH_USE_ENHANCED'],
@@ -175,6 +176,7 @@ class TaskTranscribe extends Task {
.catch((err) => this.logger.info(err, 'TaskTranscribe:_startTranscribing with google'));
}
else if (this.vendor === 'aws') {
this.bugname = 'aws_trancribe';
[
['diarization', 'AWS_SHOW_SPEAKER_LABEL'],
['identifyChannels', 'AWS_ENABLE_CHANNEL_IDENTIFICATION']
@@ -206,6 +208,7 @@ class TaskTranscribe extends Task {
.catch((err) => this.logger.info(err, 'TaskTranscribe:_startTranscribing with aws'));
}
else if (this.vendor === 'microsoft') {
this.bugname = 'azure_trancribe';
Object.assign(opts, {
'AZURE_SUBSCRIPTION_KEY': this.sttCredentials.api_key,
'AZURE_REGION': this.sttCredentials.region
@@ -231,11 +234,16 @@ class TaskTranscribe extends Task {
vendor: this.vendor,
interim: this.interim ? true : false,
locale: this.language,
channels: /*this.separateRecognitionPerChannel ? 2 : */ 1
channels: /*this.separateRecognitionPerChannel ? 2 : */ 1,
bugname: this.bugname
});
}
_onTranscription(cs, ep, channel, evt) {
_onTranscription(cs, ep, channel, evt, fsEvent) {
// make sure this is not a transcript from answering machine detection
const bugname = fsEvent.getHeader('media-bugname');
if (bugname && this.bugname !== bugname) return;
this.logger.debug({evt, channel}, 'TaskTranscribe:_onTranscription');
if ('aws' === this.vendor && Array.isArray(evt) && evt.length > 0) evt = evt[0];
if ('microsoft' === this.vendor) {
@@ -249,7 +257,7 @@ class TaskTranscribe extends Task {
}) :
[
{
transcript: evt.DisplayText
transcript: evt.DisplayText || evt.Text
}
];

324
lib/utils/amd-utils.js Normal file
View File

@@ -0,0 +1,324 @@
const Emitter = require('events');
const {readFile} = require('fs');
const {
GoogleTranscriptionEvents,
AwsTranscriptionEvents,
AzureTranscriptionEvents,
AmdEvents,
AvmdEvents
} = require('./constants');
const bugname = 'amd_bug';
const {VMD_HINTS_FILE} = process.env;
let voicemailHints = [];
const updateHints = async(file, callback) => {
readFile(file, 'utf8', (err, data) => {
if (err) return callback(err);
try {
callback(null, JSON.parse(data));
} catch (err) {
callback(err);
}
});
};
if (VMD_HINTS_FILE) {
updateHints(VMD_HINTS_FILE, (err, hints) => {
if (err) { console.error(err); }
voicemailHints = hints;
/* if successful, update the hints every hour */
setInterval(() => {
updateHints(VMD_HINTS_FILE, (err, hints) => {
if (err) { console.error(err); }
voicemailHints = hints;
});
}, 60000);
});
}
class Amd extends Emitter {
constructor(logger, cs, opts) {
super();
this.logger = logger;
this.vendor = opts.recognizer?.vendor || cs.speechRecognizerVendor;
if ('default' === this.vendor) this.vendor = cs.speechRecognizerVendor;
this.language = opts.recognizer?.language || cs.speechRecognizerLanguage;
if ('default' === this.language) this.language = cs.speechRecognizerLanguage;
this.sttCredentials = cs.getSpeechCredentials(this.vendor, 'stt');
if (!this.sttCredentials) throw new Error(`No speech credentials found for vendor ${this.vendor}`);
this.thresholdWordCount = opts.thresholdWordCount || 9;
const {normalizeTranscription} = require('./transcription-utils')(logger);
this.normalizeTranscription = normalizeTranscription;
const {
noSpeechTimeoutMs = 5000,
decisionTimeoutMs = 15000,
toneTimeoutMs = 20000,
greetingCompletionTimeoutMs = 2000
} = opts.timers || {};
this.noSpeechTimeoutMs = noSpeechTimeoutMs;
this.decisionTimeoutMs = decisionTimeoutMs;
this.toneTimeoutMs = toneTimeoutMs;
this.greetingCompletionTimeoutMs = greetingCompletionTimeoutMs;
this.beepDetected = false;
}
startDecisionTimer() {
this.decisionTimer = setTimeout(this._onDecisionTimeout.bind(this), this.decisionTimeoutMs);
this.noSpeechTimer = setTimeout(this._onNoSpeechTimeout.bind(this), this.noSpeechTimeoutMs);
this.startToneTimer();
}
stopDecisionTimer() {
this.decisionTimer && clearTimeout(this.decisionTimer);
}
stopNoSpeechTimer() {
this.noSpeechTimer && clearTimeout(this.noSpeechTimer);
}
startToneTimer() {
this.toneTimer = setTimeout(this._onToneTimeout.bind(this), this.toneTimeoutMs);
}
startGreetingCompletionTimer() {
this.greetingCompletionTimer = setTimeout(
this._onGreetingCompletionTimeout.bind(this),
this.beepDetected ? 1000 : this.greetingCompletionTimeoutMs);
}
stopGreetingCompletionTimer() {
this.greetingCompletionTimer && clearTimeout(this.greetingCompletionTimer);
}
restartGreetingCompletionTimer() {
this.stopGreetingCompletionTimer();
this.startGreetingCompletionTimer();
}
stopToneTimer() {
this.toneTimer && clearTimeout(this.toneTimer);
}
stopAllTimers() {
this.stopDecisionTimer();
this.stopNoSpeechTimer();
this.stopToneTimer();
this.stopGreetingCompletionTimer();
}
_onDecisionTimeout() {
this.emit(this.decision = AmdEvents.DecisionTimeout);
this.stopNoSpeechTimer();
}
_onToneTimeout() {
this.emit(AmdEvents.ToneTimeout);
}
_onNoSpeechTimeout() {
this.emit(this.decision = AmdEvents.NoSpeechDetected);
this.stopDecisionTimer();
}
_onGreetingCompletionTimeout() {
this.emit(AmdEvents.MachineStoppedSpeaking);
}
evaluateTranscription(evt) {
if (this.decision) {
/* at this point we are only listening for the machine to stop speaking */
if (this.decision === AmdEvents.MachineDetected) {
this.restartGreetingCompletionTimer();
}
return;
}
this.stopNoSpeechTimer();
this.logger.debug({evt}, 'Amd:evaluateTranscription - raw');
const t = this.normalizeTranscription(evt, this.vendor, this.language);
const hints = voicemailHints[this.language] || [];
this.logger.debug({t}, 'Amd:evaluateTranscription - normalized');
if (Array.isArray(t.alternatives) && t.alternatives.length > 0) {
const wordCount = t.alternatives[0].transcript.split(' ').length;
const final = t.is_final;
const foundHint = hints.find((h) => t.alternatives[0].transcript.includes(h));
if (foundHint) {
/* we detected a common voice mail greeting */
this.logger.debug(`Amd:evaluateTranscription: found hint ${foundHint}`);
this.emit(this.decision = AmdEvents.MachineDetected, {
reason: 'hint',
hint: foundHint,
language: t.language_code
});
}
else if (final && wordCount < this.thresholdWordCount) {
/* a short greeting is typically a human */
this.emit(this.decision = AmdEvents.HumanDetected, {
reason: 'short greeting',
greeting: t.alternatives[0].transcript,
language: t.language_code
});
}
else if (wordCount >= this.thresholdWordCount) {
/* a long greeting is typically a machine */
this.emit(this.decision = AmdEvents.MachineDetected, {
reason: 'long greeting',
greeting: t.alternatives[0].transcript,
language: t.language_code
});
}
if (this.decision) {
this.stopDecisionTimer();
if (this.decision === AmdEvents.MachineDetected) {
/* if we detected a machine, then wait for greeting to end */
this.startGreetingCompletionTimer();
}
}
return this.decision;
}
}
}
module.exports = (logger) => {
const startTranscribing = async(cs, ep, task) => {
const {vendor, language} = ep.amd;
ep.startTranscription({
vendor,
language,
interim: true,
bugname
}).catch((err) => {
const {writeAlerts, AlertType} = cs.srf.locals;
ep.amd = null;
task.emit(AmdEvents.Error, err);
logger.error(err, 'amd:_startTranscribing error');
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
vendor: vendor,
detail: err.message
});
}).catch((err) => logger.info({err}, 'Error generating alert for tts failure'));
};
const onEndOfUtterance = (cs, ep, task) => {
logger.debug('amd:onEndOfUtterance');
startTranscribing(cs, ep, task);
};
const onNoSpeechDetected = (cs, ep, task) => {
logger.debug('amd:onNoSpeechDetected');
ep.amd.stopAllTimers();
task.emit(AmdEvents.NoSpeechDetected);
};
const onTranscription = (cs, ep, task, evt, fsEvent) => {
if (fsEvent.getHeader('media-bugname') !== bugname || ep.amd.decision) return;
ep.amd?.evaluateTranscription(evt);
};
const onBeep = (cs, ep, task, evt, fsEvent) => {
logger.debug({evt, fsEvent}, 'onBeep');
const frequency = Math.floor(fsEvent.getHeader('Frequency'));
const variance = Math.floor(fsEvent.getHeader('Frequency-variance'));
task.emit('amd', {type: AmdEvents.ToneDetected, frequency, variance});
if (ep.amd) {
ep.amd.stopToneTimer();
ep.amd.beepDetected = true;
}
ep.execute('avmd_stop').catch((err) => this.logger.info(err, 'Error stopping avmd'));
};
const startAmd = async(cs, ep, task, opts) => {
const amd = ep.amd = new Amd(logger, cs, opts);
const {vendor, language, sttCredentials} = amd;
const sttOpts = {};
const hints = voicemailHints[language] || [];
/* set stt options */
logger.info(`starting amd for vendor ${vendor} and language ${language}`);
if ('google' === vendor) {
sttOpts.GOOGLE_APPLICATION_CREDENTIALS = JSON.stringify(sttCredentials.credentials);
sttOpts.GOOGLE_SPEECH_USE_ENHANCED = true;
sttOpts.GOOGLE_SPEECH_HINTS = hints.join(',');
if (opts.recognizer?.altLanguages) {
sttOpts.GOOGLE_SPEECH_ALTERNATIVE_LANGUAGE_CODES = opts.recognizer.altLanguages.join(',');
}
ep.addCustomEventListener(GoogleTranscriptionEvents.Transcription, onTranscription.bind(null, cs, ep, task));
ep.addCustomEventListener(GoogleTranscriptionEvents.EndOfUtterance, onEndOfUtterance.bind(null, cs, ep, task));
}
else if (['aws', 'polly'].includes(vendor)) {
Object.assign(sttOpts, {
AWS_ACCESS_KEY_ID: sttCredentials.accessKeyId,
AWS_SECRET_ACCESS_KEY: sttCredentials.secretAccessKey,
AWS_REGION: sttCredentials.region
});
ep.addCustomEventListener(AwsTranscriptionEvents.Transcription, onTranscription.bind(null, cs, ep, task));
}
else if ('microsoft' === vendor) {
Object.assign(sttOpts, {
'AZURE_SUBSCRIPTION_KEY': sttCredentials.api_key,
'AZURE_REGION': sttCredentials.region
});
sttOpts.AZURE_SPEECH_HINTS = hints.join(',');
if (opts.recognizer?.altLanguages) {
sttOpts.AZURE_SPEECH_ALTERNATIVE_LANGUAGE_CODES = opts.recognizer.altLanguages.join(',');
}
sttOpts.AZURE_INITIAL_SPEECH_TIMEOUT_MS = opts.resolveTimeoutMs || 20000;
ep.addCustomEventListener(AzureTranscriptionEvents.Transcription, onTranscription.bind(null, cs, ep, task));
ep.addCustomEventListener(AzureTranscriptionEvents.NoSpeechDetected, onNoSpeechDetected.bind(null, cs, ep, task));
}
logger.debug({sttOpts}, 'startAmd: setting channel vars');
await ep.set(sttOpts).catch((err) => logger.info(err, 'Error setting channel variables'));
amd
.on(AmdEvents.NoSpeechDetected, (evt) => {
task.emit('amd', {type: AmdEvents.NoSpeechDetected, ...evt});
ep.stopTranscription({vendor, bugname});
})
.on(AmdEvents.HumanDetected, (evt) => {
task.emit('amd', {type: AmdEvents.HumanDetected, ...evt});
ep.stopTranscription({vendor, bugname});
})
.on(AmdEvents.MachineDetected, (evt) => {
task.emit('amd', {type: AmdEvents.MachineDetected, ...evt});
})
.on(AmdEvents.DecisionTimeout, (evt) => {
task.emit('amd', {type: AmdEvents.DecisionTimeout, ...evt});
ep.stopTranscription({vendor, bugname});
})
.on(AmdEvents.ToneTimeout, (evt) => {
//task.emit('amd', {type: AmdEvents.ToneTimeout, ...evt});
ep.execute('avmd_stop').catch((err) => logger.info(err, 'Error stopping avmd'));
})
.on(AmdEvents.MachineStoppedSpeaking, () => {
task.emit('amd', {type: AmdEvents.MachineStoppedSpeaking});
ep.stopTranscription({vendor, bugname});
});
/* start transcribing, and also listening for beep */
amd.startDecisionTimer();
startTranscribing(cs, ep, task);
ep.addCustomEventListener(AvmdEvents.Beep, onBeep.bind(null, cs, ep, task));
ep.execute('avmd_start').catch((err) => this.logger.info(err, 'Error starting avmd'));
};
const stopAmd = (ep, task) => {
let vendor;
if (ep.amd) {
vendor = ep.amd.vendor;
ep.amd.stopAllTimers();
ep.amd = null;
}
if (ep.connected) {
ep.stopTranscription({vendor, bugname})
.catch((err) => logger.info(err, 'stopAmd: Error stopping transcription'));
task.emit('amd', {type: AmdEvents.Stopped});
ep.execute('avmd_stop').catch((err) => this.logger.info(err, 'Error stopping avmd'));
}
ep.removeCustomEventListener(AvmdEvents.Beep);
};
return {startAmd, stopAmd};
};

View File

@@ -56,6 +56,9 @@
"StableCall": "stable-call",
"UnansweredCall": "unanswered-call"
},
"AvmdEvents": {
"Beep": "avmd::beep"
},
"GoogleTranscriptionEvents": {
"Transcription": "google_transcribe::transcription",
"EndOfUtterance": "google_transcribe::end_of_utterance",
@@ -125,6 +128,17 @@
"RecordingOff": "recording_off",
"RecordingPaused": "recording_paused"
},
"AmdEvents": {
"NoSpeechDetected": "amd_no_speech_detected",
"HumanDetected": "amd_human_detected",
"MachineDetected": "amd_machine_detected",
"MachineStoppedSpeaking": "amd_machine_stopped_speaking",
"Error": "amd_error",
"DecisionTimeout": "amd_decision_timeout",
"ToneDetected": "amd_tone_detected",
"ToneTimeout": "amd_tone_timeout",
"Stopped": "amd_stopped"
},
"MAX_SIMRINGS": 10,
"BONG_TONE": "tone_stream://v=-7;%(100,0,941.0,1477.0);v=-7;>=2;+=.1;%(1400,0,350,440)",
"FS_UUID_SET_NAME": "fsUUIDs"

View File

@@ -122,7 +122,11 @@ class HttpRequestor extends BaseRequestor {
timeout: HTTP_TIMEOUT,
followRedirects: false
});
if (![200, 202, 204].includes(statusCode)) throw new Error({statusCode});
if (![200, 202, 204].includes(statusCode)) {
const err = new Error();
err.statusCode = statusCode;
throw err;
}
if (headers['content-type'].includes('application/json')) {
buf = await body.json();
}

View File

@@ -0,0 +1,33 @@
module.exports = (logger) => {
const normalizeTranscription = (evt, vendor, channel) => {
if ('aws' === vendor && Array.isArray(evt) && evt.length > 0) evt = evt[0];
if ('microsoft' === vendor) {
const nbest = evt.NBest;
const language_code = evt.PrimaryLanguage?.Language || this.language;
const alternatives = nbest ? nbest.map((n) => {
return {
confidence: n.Confidence,
transcript: n.Display
};
}) :
[
{
transcript: evt.DisplayText || evt.Text
}
];
const newEvent = {
is_final: evt.RecognitionStatus === 'Success',
channel,
language_code,
alternatives
};
evt = newEvent;
}
evt.channel_tag = channel;
//logger.debug({evt}, 'normalized transcription');
return evt;
};
return {normalizeTranscription};
};

28
package-lock.json generated
View File

@@ -27,7 +27,7 @@
"bent": "^7.3.12",
"debug": "^4.3.4",
"deepcopy": "^2.1.0",
"drachtio-fsmrf": "^3.0.1",
"drachtio-fsmrf": "^3.0.2",
"drachtio-srf": "^4.5.1",
"express": "^4.18.1",
"helmet": "^5.1.0",
@@ -38,7 +38,7 @@
"sdp-transform": "^2.14.1",
"short-uuid": "^4.2.0",
"to-snake-case": "^1.0.0",
"undici": "^5.7.0",
"undici": "^5.8.0",
"uuid": "^8.3.2",
"verify-aws-sns-signature": "^0.0.7",
"ws": "^8.8.0",
@@ -1992,9 +1992,9 @@
}
},
"node_modules/drachtio-fsmrf": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/drachtio-fsmrf/-/drachtio-fsmrf-3.0.1.tgz",
"integrity": "sha512-W+HFsQaAyJrZuKQw0CuKGjvx5Ms0c7tCYt+7QbXprtXWwP80kX9gSyyY7n28VEzFeOi9+SIsYmVBXd+25g2hdg==",
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/drachtio-fsmrf/-/drachtio-fsmrf-3.0.2.tgz",
"integrity": "sha512-Daub8hXFVNr8OTc7HnkdusNFjpgBavLYr+0TVTu9q5eemum7lgmnrCY3qKfSnPDEb24TpLbZarFa4NWPFYoqNQ==",
"dependencies": {
"camel-case": "^4.1.2",
"debug": "^2.6.9",
@@ -5883,9 +5883,9 @@
}
},
"node_modules/undici": {
"version": "5.7.0",
"resolved": "https://registry.npmjs.org/undici/-/undici-5.7.0.tgz",
"integrity": "sha512-ORgxwDkiPS+gK2VxE7iyVeR7JliVn5DqhZ4LgQqYLBXsuK+lwOEmnJ66dhvlpLM0tC3fC7eYF1Bti2frbw2eAA==",
"version": "5.8.0",
"resolved": "https://registry.npmjs.org/undici/-/undici-5.8.0.tgz",
"integrity": "sha512-1F7Vtcez5w/LwH2G2tGnFIihuWUlc58YidwLiCv+jR2Z50x0tNXpRRw7eOIJ+GvqCqIkg9SB7NWAJ/T9TLfv8Q==",
"engines": {
"node": ">=12.18"
}
@@ -7791,9 +7791,9 @@
}
},
"drachtio-fsmrf": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/drachtio-fsmrf/-/drachtio-fsmrf-3.0.1.tgz",
"integrity": "sha512-W+HFsQaAyJrZuKQw0CuKGjvx5Ms0c7tCYt+7QbXprtXWwP80kX9gSyyY7n28VEzFeOi9+SIsYmVBXd+25g2hdg==",
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/drachtio-fsmrf/-/drachtio-fsmrf-3.0.2.tgz",
"integrity": "sha512-Daub8hXFVNr8OTc7HnkdusNFjpgBavLYr+0TVTu9q5eemum7lgmnrCY3qKfSnPDEb24TpLbZarFa4NWPFYoqNQ==",
"requires": {
"camel-case": "^4.1.2",
"debug": "^2.6.9",
@@ -10783,9 +10783,9 @@
}
},
"undici": {
"version": "5.7.0",
"resolved": "https://registry.npmjs.org/undici/-/undici-5.7.0.tgz",
"integrity": "sha512-ORgxwDkiPS+gK2VxE7iyVeR7JliVn5DqhZ4LgQqYLBXsuK+lwOEmnJ66dhvlpLM0tC3fC7eYF1Bti2frbw2eAA=="
"version": "5.8.0",
"resolved": "https://registry.npmjs.org/undici/-/undici-5.8.0.tgz",
"integrity": "sha512-1F7Vtcez5w/LwH2G2tGnFIihuWUlc58YidwLiCv+jR2Z50x0tNXpRRw7eOIJ+GvqCqIkg9SB7NWAJ/T9TLfv8Q=="
},
"unix-dgram": {
"version": "2.0.4",

View File

@@ -21,7 +21,7 @@
},
"scripts": {
"start": "node app",
"test": "NODE_ENV=test JAMBONES_HOSTING=1 HTTP_POOL=1 DRACHTIO_HOST=127.0.0.1 DRACHTIO_PORT=9060 DRACHTIO_SECRET=cymru JAMBONES_MYSQL_HOST=127.0.0.1 JAMBONES_MYSQL_PORT=3360 JAMBONES_MYSQL_USER=jambones_test JAMBONES_MYSQL_PASSWORD=jambones_test JAMBONES_MYSQL_DATABASE=jambones_test JAMBONES_REDIS_HOST=127.0.0.1 JAMBONES_REDIS_PORT=16379 JAMBONES_LOGLEVEL=info ENABLE_METRICS=0 HTTP_PORT=3000 JAMBONES_SBCS=172.38.0.10 JAMBONES_FREESWITCH=127.0.0.1:8022:ClueCon:docker-host JAMBONES_TIME_SERIES_HOST=127.0.0.1 JAMBONES_NETWORK_CIDR=172.38.0.0/16 node test/ ",
"test": "NODE_ENV=test JAMBONES_HOSTING=1 HTTP_POOL=1 DRACHTIO_HOST=127.0.0.1 DRACHTIO_PORT=9060 DRACHTIO_SECRET=cymru JAMBONES_MYSQL_HOST=127.0.0.1 JAMBONES_MYSQL_PORT=3360 JAMBONES_MYSQL_USER=jambones_test JAMBONES_MYSQL_PASSWORD=jambones_test JAMBONES_MYSQL_DATABASE=jambones_test JAMBONES_REDIS_HOST=127.0.0.1 JAMBONES_REDIS_PORT=16379 JAMBONES_LOGLEVEL=error ENABLE_METRICS=0 HTTP_PORT=3000 JAMBONES_SBCS=172.38.0.10 JAMBONES_FREESWITCH=127.0.0.1:8022:ClueCon:docker-host JAMBONES_TIME_SERIES_HOST=127.0.0.1 JAMBONES_NETWORK_CIDR=172.38.0.0/16 node test/ ",
"coverage": "./node_modules/.bin/nyc --reporter html --report-dir ./coverage npm run test",
"jslint": "eslint app.js lib"
},
@@ -44,7 +44,7 @@
"bent": "^7.3.12",
"debug": "^4.3.4",
"deepcopy": "^2.1.0",
"drachtio-fsmrf": "^3.0.1",
"drachtio-fsmrf": "^3.0.2",
"drachtio-srf": "^4.5.1",
"express": "^4.18.1",
"helmet": "^5.1.0",
@@ -55,7 +55,7 @@
"sdp-transform": "^2.14.1",
"short-uuid": "^4.2.0",
"to-snake-case": "^1.0.0",
"undici": "^5.7.0",
"undici": "^5.8.0",
"uuid": "^8.3.2",
"verify-aws-sns-signature": "^0.0.7",
"ws": "^8.8.0",