Feature/config verb (#77)

* remove cognigy verb

* initial implementation of config verb

* further updates to config

* Bot mode alex (#75)

* do not use default as value for TTS/STT

* fix gather listener if no say or play provided

Co-authored-by: akirilyuk <a.kirilyuk@cognigy.com>

* gather: listenDuringPrompt requires a nested play/say

* fix exception

* say: fix exception where caller hangs up during say

* bugfix: sip refer was not ending if caller hungup during refer

* add support for sip:request to ws commands

* gather: when bargein is set and minBargeinWordCount is zero, kill audio on endOfUtterrance

* gather/transcribe: add support for google boost and azure custom endpoints

* minor logging changes

* lint error

Co-authored-by: akirilyuk <45361199+akirilyuk@users.noreply.github.com>
Co-authored-by: akirilyuk <a.kirilyuk@cognigy.com>
This commit is contained in:
Dave Horton
2022-03-06 15:09:45 -05:00
committed by GitHub
parent 72b74de767
commit 172dc1aaa7
12 changed files with 271 additions and 301 deletions

View File

@@ -194,6 +194,7 @@ module.exports = function(srf, logger) {
} catch (err) {
logger.info({err}, `Error retrieving or parsing application: ${err?.message}`);
res.send(480, {headers: {'X-Reason': err?.message || 'unknown'}});
app.requestor.close();
}
}

View File

@@ -117,18 +117,27 @@ class CallSession extends Emitter {
get speechSynthesisVendor() {
return this.application.speech_synthesis_vendor;
}
set speechSynthesisVendor(vendor) {
this.application.speech_synthesis_vendor = vendor;
}
/**
* default voice to use for speech synthesis if not provided in the app
*/
get speechSynthesisVoice() {
return this.application.speech_synthesis_voice;
}
set speechSynthesisVoice(voice) {
this.application.speech_synthesis_voice = voice;
}
/**
* default language to use for speech synthesis if not provided in the app
*/
get speechSynthesisLanguage() {
return this.application.speech_synthesis_language;
}
set speechSynthesisLanguage(language) {
this.application.speech_synthesis_language = language;
}
/**
* default vendor to use for speech recognition if not provided in the app
@@ -136,12 +145,18 @@ class CallSession extends Emitter {
get speechRecognizerVendor() {
return this.application.speech_recognizer_vendor;
}
set speechRecognizerVendor(vendor) {
this.application.speech_recognizer_vendor = vendor;
}
/**
* default language to use for speech recognition if not provided in the app
*/
get speechRecognizerLanguage() {
return this.application.speech_recognizer_language;
}
set speechRecognizerLanguage(language) {
this.application.speech_recognizer_language = language;
}
/**
* indicates whether the call currently in progress
@@ -207,6 +222,46 @@ class CallSession extends Emitter {
return this.memberId && this.confName && this.confUuid;
}
get isBotModeEnabled() {
return this.backgroundGatherTask;
}
async enableBotMode(gather) {
try {
const t = normalizeJambones(this.logger, [gather]);
this.backgroundGatherTask = makeTask(this.logger, t[0]);
this.backgroundGatherTask
.on('dtmf', this._clearTasks.bind(this))
.on('transcription', this._clearTasks.bind(this))
.on('timeout', this._clearTasks.bind(this));
this.logger.info({gather}, 'CallSession:enableBotMode - starting background gather');
const resources = await this._evaluatePreconditions(this.backgroundGatherTask);
this.backgroundGatherTask.exec(this, resources)
.then(() => {
this.logger.info('CallSession:enableBotMode: gather completed');
this.backgroundGatherTask && this.backgroundGatherTask.removeAllListeners();
this.backgroundGatherTask = null;
return;
})
.catch((err) => {
this.logger.info({err}, 'CallSession:enableBotMode: gather threw error');
this.backgroundGatherTask && this.backgroundGatherTask.removeAllListeners();
this.backgroundGatherTask = null;
});
} catch (err) {
this.logger.info({err, gather}, 'CallSession:enableBotMode - Error creating gather task');
}
}
disableBotMode() {
if (this.backgroundGatherTask) {
try {
this.backgroundGatherTask.removeAllListeners();
this.backgroundGatherTask.kill();
} catch (err) {}
this.backgroundGatherTask = null;
}
}
setConferenceDetails(memberId, confName, confUuid) {
assert(!this.memberId && !this.confName && !this.confUuid);
assert (memberId && confName && confUuid);
@@ -301,7 +356,14 @@ class CallSession extends Emitter {
try {
const resources = await this._evaluatePreconditions(task);
this.currentTask = task;
await task.exec(this, resources);
if (TaskName.Gather === task.name && this.isBotModeEnabled) {
const timeout = task.timeout;
this.logger.info(`CallSession:exec skipping #${stackNum}:${taskNum}: ${task.name}`);
this.backgroundGatherTask.updateTimeout(timeout);
}
else {
await task.exec(this, resources);
}
this.currentTask = null;
this.logger.info(`CallSession:exec completed task #${stackNum}:${taskNum}: ${task.name}`);
} catch (err) {
@@ -386,6 +448,7 @@ class CallSession extends Emitter {
this.wakeupResolver();
this.wakeupResolver = null;
}
this.requestor && this.requestor.close();
}
/**
@@ -505,7 +568,7 @@ class CallSession extends Emitter {
task.mute(callSid, mute).catch((err) => this.logger.error(err, 'CallSession:_lccMuteStatus'));
}
async _lccConfHoldStatus(callSid, opts) {
async _lccConfHoldStatus(opts) {
const task = this.currentTask;
if (!task || TaskName.Conference !== task.name || !this.isInConference) {
return this.logger.info('CallSession:_lccConfHoldStatus - invalid command as call is not in conference');
@@ -513,7 +576,7 @@ class CallSession extends Emitter {
task.doConferenceHold(this, opts);
}
async _lccConfMuteStatus(callSid, opts) {
async _lccConfMuteStatus(opts) {
const task = this.currentTask;
if (!task || TaskName.Conference !== task.name || !this.isInConference) {
return this.logger.info('CallSession:_lccConfHoldStatus - invalid command as call is not in conference');
@@ -521,7 +584,7 @@ class CallSession extends Emitter {
task.doConferenceMuteNonModerators(this, opts);
}
async _lccSipRequest(callSid, opts) {
async _lccSipRequest(opts) {
const {sip_request} = opts;
const {method, content_type, content, headers = {}} = sip_request;
if (!this.hasStableDialog) {
@@ -614,13 +677,13 @@ class CallSession extends Emitter {
await this._lccMuteStatus(callSid, opts.mute_status === 'mute');
}
else if (opts.conf_hold_status) {
await this._lccConfHoldStatus(callSid, opts);
await this._lccConfHoldStatus(opts);
}
else if (opts.conf_mute_status) {
await this._lccConfMuteStatus(callSid, opts);
await this._lccConfMuteStatus(opts);
}
else if (opts.sip_request) {
const res = await this._lccSipRequest(callSid, opts);
const res = await this._lccSipRequest(opts);
return {status: res.status, reason: res.reason};
}
@@ -667,7 +730,8 @@ class CallSession extends Emitter {
switch (command) {
case 'redirect':
if (Array.isArray(data)) {
const t = normalizeJambones(this.logger, data).map((tdata) => makeTask(this.logger, tdata));
const t = normalizeJambones(this.logger, data)
.map((tdata) => makeTask(this.logger, tdata));
if (!queueCommand) {
this.logger.info({tasks: listTaskNames(t)}, 'CallSession:_onCommand new task list');
this.replaceApplication(t);
@@ -686,7 +750,7 @@ class CallSession extends Emitter {
break;
case 'mute:status':
this._lccMuteStatus(data);
this._lccMuteStatus(this.callSid, data);
break;
case 'conf:mute-status':
@@ -702,7 +766,14 @@ class CallSession extends Emitter {
break;
case 'whisper':
this._lccWhisper(data);
this._lccWhisper(data, this.callSid);
break;
case 'sip:request':
this._lccSipRequest(data)
.catch((err) => {
this.logger.info({err, data}, `CallSession:_onCommand - error sending ${data.method}`);
});
break;
default:
@@ -1157,6 +1228,15 @@ class CallSession extends Emitter {
this.wakeupResolver = resolve;
});
}
_clearTasks(evt) {
if (this.requestor instanceof WsRequestor) {
this.logger.info({evt}, 'CallSession:_clearTasks on event from background gather');
try {
this.kill();
} catch (err) {}
}
}
}
module.exports = CallSession;

View File

@@ -1,248 +0,0 @@
const Task = require('./task');
const {TaskName, TaskPreconditions} = require('../utils/constants');
const makeTask = require('./make_task');
const { SocketClient } = require('@cognigy/socket-client');
const parseGallery = (obj = {}) => {
const {_default} = obj;
if (_default) {
const {_gallery} = _default;
if (_gallery) return _gallery.fallbackText;
}
};
const parseQuickReplies = (obj) => {
const {_default} = obj;
if (_default) {
const {_quickReplies} = _default;
if (_quickReplies) return _quickReplies.text || _quickReplies.fallbackText;
}
};
const parseBotText = (evt) => {
const {text, data} = evt;
if (text) return text;
switch (data?.type) {
case 'quickReplies':
return parseQuickReplies(data?._cognigy);
case 'gallery':
return parseGallery(data?._cognigy);
default:
break;
}
};
class Cognigy extends Task {
constructor(logger, opts) {
super(logger, opts);
this.preconditions = TaskPreconditions.Endpoint;
this.url = this.data.url;
this.token = this.data.token;
this.prompt = this.data.prompt;
this.eventHook = this.data?.eventHook;
this.actionHook = this.data?.actionHook;
this.data = this.data.data || {};
this.prompts = [];
}
get name() { return TaskName.Cognigy; }
get hasReportedFinalAction() {
return this.reportedFinalAction || this.isReplacingApplication;
}
async exec(cs, ep) {
await super.exec(cs);
this.ep = ep;
try {
/* set event handlers and start transcribing */
this.on('transcription', this._onTranscription.bind(this, cs, ep));
this.on('error', this._onError.bind(this, cs, ep));
this.transcribeTask = this._makeTranscribeTask();
this.transcribeTask.exec(cs, ep, this)
.catch((err) => {
this.logger.info({err}, 'Cognigy transcribe task returned error');
this.notifyTaskDone();
});
if (this.prompt) {
this.sayTask = this._makeSayTask(this.prompt);
this.sayTask.exec(cs, ep, this)
.catch((err) => {
this.logger.info({err}, 'Cognigy say task returned error');
this.notifyTaskDone();
});
}
/* connect to the bot and send initial data */
this.client = new SocketClient(
this.url,
this.token,
{
sessionId: cs.callSid,
channel: 'jambonz',
forceWebsockets: true,
reconnection: true,
settings: {
enableTypingIndicator: false
}
}
);
this.client.on('output', this._onBotUtterance.bind(this, cs, ep));
this.client.on('typingStatus', this._onBotTypingStatus.bind(this, cs, ep));
this.client.on('error', this._onBotError.bind(this, cs, ep));
this.client.on('finalPing', this._onBotFinalPing.bind(this, cs, ep));
await this.client.connect();
this.client.sendMessage('', {...this.data, ...cs.callInfo});
await this.awaitTaskDone();
} catch (err) {
this.logger.error({err}, 'Cognigy error');
throw err;
}
}
async kill(cs) {
super.kill(cs);
this.logger.debug('Cognigy:kill');
this.removeAllListeners();
this.transcribeTask && this.transcribeTask.kill();
this.client.removeAllListeners();
if (this.client && this.client.connected) this.client.disconnect();
if (!this.hasReportedFinalAction) {
this.reportedFinalAction = true;
this.performAction({cognigyResult: 'caller hungup'})
.catch((err) => this.logger.info({err}, 'cognigy - error w/ action webook'));
}
if (this.ep.connected) {
await this.ep.api('uuid_break', this.ep.uuid).catch((err) => this.logger.info(err, 'Error killing audio'));
}
this.notifyTaskDone();
}
_makeTranscribeTask() {
const opts = {
recognizer: this.data.recognizer || {
vendor: 'default',
language: 'default',
outputFormat: 'detailed'
}
};
this.logger.debug({opts}, 'constructing a nested transcribe object');
const transcribe = makeTask(this.logger, {transcribe: opts}, this);
return transcribe;
}
_makeSayTask(text) {
const opts = {
text,
synthesizer: this.data.synthesizer ||
{
vendor: 'default',
language: 'default',
voice: 'default'
}
};
this.logger.debug({opts}, 'constructing a nested say object');
const say = makeTask(this.logger, {say: opts}, this);
return say;
}
async _onBotError(cs, ep, evt) {
this.logger.info({evt}, 'Cognigy:_onBotError');
this.performAction({cognigyResult: 'botError', message: evt.message });
this.reportedFinalAction = true;
this.notifyTaskDone();
}
async _onBotTypingStatus(cs, ep, evt) {
this.logger.info({evt}, 'Cognigy:_onBotTypingStatus');
}
async _onBotFinalPing(cs, ep) {
this.logger.info('Cognigy:_onBotFinalPing');
if (this.prompts.length) {
const text = this.prompts.join('.');
this.prompts = [];
if (text && !this.killed) {
this.sayTask = this._makeSayTask(text);
this.sayTask.exec(cs, ep, this)
.catch((err) => {
this.logger.info({err}, 'Cognigy say task returned error');
this.notifyTaskDone();
});
}
}
}
async _onBotUtterance(cs, ep, evt) {
this.logger.debug({evt}, 'Cognigy:_onBotUtterance');
if (this.eventHook) {
this.performHook(cs, this.eventHook, {event: 'botMessage', message: evt})
.then((redirected) => {
if (redirected) {
this.logger.info('Cognigy_onTranscription: event handler for bot message redirected us to new webhook');
this.reportedFinalAction = true;
this.performAction({cognigyResult: 'redirect'}, false);
}
return;
})
.catch(({err}) => {
this.logger.info({err}, 'Cognigy_onTranscription: error sending event hook');
});
}
const text = parseBotText(evt);
this.prompts.push(text);
}
async _onTranscription(cs, ep, evt) {
this.logger.debug({evt}, `Cognigy: got transcription for callSid ${cs.callSid}`);
const utterance = evt.alternatives[0].transcript;
if (this.eventHook) {
this.performHook(cs, this.eventHook, {event: 'userMessage', message: utterance})
.then((redirected) => {
if (redirected) {
this.logger.info('Cognigy_onTranscription: event handler for user message redirected us to new webhook');
this.reportedFinalAction = true;
this.performAction({cognigyResult: 'redirect'}, false);
if (this.transcribeTask) this.transcribeTask.kill(cs);
}
return;
})
.catch(({err}) => {
this.logger.info({err}, 'Cognigy_onTranscription: error sending event hook');
});
}
/* send the user utterance to the bot */
try {
if (this.client && this.client.connected) {
this.client.sendMessage(utterance);
}
else {
this.logger.info('Cognigy_onTranscription - not sending user utterance as bot is disconnected');
}
} catch (err) {
this.logger.error({err}, 'Cognigy_onTranscription: Error sending user utterance to Cognigy - ending task');
this.performAction({cognigyResult: 'socketError'});
this.reportedFinalAction = true;
this.notifyTaskDone();
}
}
_onError(cs, ep, err) {
this.logger.debug({err}, 'Cognigy: got error');
if (!this.hasReportedFinalAction) this.performAction({cognigyResult: 'error', err});
this.reportedFinalAction = true;
this.notifyTaskDone();
}
}
module.exports = Cognigy;

80
lib/tasks/config.js Normal file
View File

@@ -0,0 +1,80 @@
const Task = require('./task');
const {TaskName, TaskPreconditions} = require('../utils/constants');
class TaskConfig extends Task {
constructor(logger, opts) {
super(logger, opts);
this.preconditions = TaskPreconditions.Endpoint;
[
'synthesizer',
'recognizer',
'bargeIn'
].forEach((k) => this[k] = this.data[k] || {});
if (this.hasBargeIn && this.bargeIn.enable === true) {
this.gatherOpts = {
verb: 'gather',
timeout: 0
};
[
'finishOnKey', 'input', 'numDigits', 'minDigits', 'maxDigits',
'interDigitTimeout', 'dtmfBargein', 'actionHook'
].forEach((k) => {
if (this.bargeIn[k]) this.gatherOpts[k] = this.bargeIn[k];
});
this.preconditions = this.hasBargeIn ? TaskPreconditions.Endpoint : TaskPreconditions.None;
}
}
get name() { return TaskName.Config; }
get hasSynthesizer() { return Object.keys(this.synthesizer).length; }
get hasRecognizer() { return Object.keys(this.recognizer).length; }
get hasBargeIn() { return Object.keys(this.bargeIn).length; }
async exec(cs) {
await super.exec(cs);
if (this.hasSynthesizer) {
cs.speechSynthesisVendor = this.synthesizer.vendor !== 'default'
? this.synthesizer.vendor
: cs.speechSynthesisVendor;
cs.speechSynthesisLanguage = this.synthesizer.language !== 'default'
? this.synthesizer.language
: cs.speechSynthesisLanguage;
cs.speechSynthesisVoice = this.synthesizer.voice !== 'default'
? this.synthesizer.voice
: cs.speechSynthesisVoice;
this.logger.info({synthesizer: this.synthesizer}, 'Config: updated synthesizer');
}
if (this.hasRecognizer) {
cs.speechRecognizerVendor = this.recognizer.vendor !== 'default'
? this.recognizer.vendor
: cs.speechRecognizerVendor;
cs.speechRecognizerLanguage = this.recognizer.language !== 'default'
? this.recognizer.language
: cs.speechRecognizerLanguage;
this.logger.info({recognizer: this.recognizer}, 'Config: updated recognizer');
}
if (this.hasBargeIn) {
if (this.gatherOpts) {
this.logger.debug({opts: this.gatherOpts}, 'Config: enabling bargeIn');
cs.enableBotMode(this.gatherOpts);
}
else {
this.logger.debug('Config: disabling bargeIn');
cs.disableBotMode();
}
}
}
async kill(cs) {
super.kill(cs);
}
}
module.exports = TaskConfig;

View File

@@ -25,21 +25,23 @@ class TaskGather extends Task {
/* when collecting dtmf, bargein on dtmf is true unless explicitly set to false */
if (this.dtmfBargein !== false && this.input.includes('digits')) this.dtmfBargein = true;
this.timeout = (this.timeout || 15) * 1000;
/* timeout of zero means no timeout */
this.timeout = this.timeout === 0 ? 0 : (this.timeout || 15) * 1000;
this.interim = this.partialResultHook || this.bargein;
this.listenDuringPrompt = this.data.listenDuringPrompt === false ? false : true;
if (this.data.recognizer) {
const recognizer = this.data.recognizer;
this.vendor = recognizer.vendor;
this.language = recognizer.language;
this.hints = recognizer.hints || [];
this.hintsBoost = recognizer.hintsBoost;
this.altLanguages = recognizer.altLanguages || [];
/* vad: if provided, we dont connect to recognizer until voice activity is detected */
const {enable, voiceMs = 0, mode = -1} = recognizer.vad || {};
this.vad = {enable, voiceMs, mode};
this.listenDuringPrompt = this.data.listenDuringPrompt === false ? false : true;
this.minBargeinWordCount = this.data.minBargeinWordCount || 1;
this.minBargeinWordCount = this.data.minBargeinWordCount || 0;
/* aws options */
this.vocabularyName = recognizer.vocabularyName;
@@ -51,6 +53,7 @@ class TaskGather extends Task {
this.profanityOption = recognizer.profanityOption || 'raw';
this.requestSnr = recognizer.requestSnr || false;
this.initialSpeechTimeoutMs = recognizer.initialSpeechTimeoutMs || 0;
this.azureServiceEndpoint = recognizer.azureServiceEndpoint;
}
this.digitBuffer = '';
@@ -58,6 +61,7 @@ class TaskGather extends Task {
if (this.say) this.sayTask = makeTask(this.logger, {say: this.say}, this);
if (this.play) this.playTask = makeTask(this.logger, {play: this.play}, this);
if (!this.sayTask && !this.playTask) this.listenDuringPrompt = false;
this.parentTask = parentTask;
}
@@ -78,7 +82,7 @@ class TaskGather extends Task {
else s += 'inputs=speech,';
if (this.input.includes('speech')) {
s += `vendor=${this.vendor},language=${this.language}`;
s += `vendor=${this.vendor || 'default'},language=${this.language || 'default'}`;
}
if (this.sayTask) s += ',with nested say task';
if (this.playTask) s += ',with nested play task';
@@ -87,6 +91,7 @@ class TaskGather extends Task {
}
async exec(cs, ep) {
this.logger.debug('Gather:exec');
await super.exec(cs);
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, cs.srf);
@@ -133,7 +138,7 @@ class TaskGather extends Task {
if (!this.killed) startListening(cs, ep);
});
}
else this._startTimer();
else startListening(cs, ep);
if (this.input.includes('speech') && this.listenDuringPrompt) {
await this._initSpeech(cs, ep);
@@ -165,6 +170,12 @@ class TaskGather extends Task {
this._resolve('killed');
}
updateTimeout(timeout) {
this.logger.info(`TaskGather:updateTimout - updating timeout to ${timeout}`);
this.timeout = timeout;
this._startTimer();
}
_onDtmf(cs, ep, evt) {
this.logger.debug(evt, 'TaskGather:_onDtmf');
clearTimeout(this.interDigitTimer);
@@ -193,7 +204,7 @@ class TaskGather extends Task {
async _initSpeech(cs, ep) {
const opts = {};
if (this.vad.enable) {
if (this.vad?.enable) {
opts.START_RECOGNIZING_ON_VAD = 1;
if (this.vad.voiceMs) opts.RECOGNIZER_VAD_VOICE_MS = this.vad.voiceMs;
if (this.vad.mode >= 0 && this.vad.mode <= 3) opts.RECOGNIZER_VAD_MODE = this.vad.mode;
@@ -208,6 +219,9 @@ class TaskGather extends Task {
});
if (this.hints && this.hints.length > 1) {
opts.GOOGLE_SPEECH_HINTS = this.hints.map((h) => h.trim()).join(',');
if (typeof this.hintsBoost === 'number') {
opts.GOOGLE_SPEECH_HINTS_BOOST = this.hintsBoost;
}
}
if (this.altLanguages && this.altLanguages.length > 0) {
opts.GOOGLE_SPEECH_ALTERNATIVE_LANGUAGE_CODES = this.altLanguages.join(',');
@@ -243,8 +257,9 @@ class TaskGather extends Task {
if (this.hints && this.hints.length > 1) {
opts.AZURE_SPEECH_HINTS = this.hints.map((h) => h.trim()).join(',');
}
//if (this.requestSnr) opts.AZURE_REQUEST_SNR = 1;
//if (this.profanityOption !== 'raw') opts.AZURE_PROFANITY_OPTION = this.profanityOption;
if (this.requestSnr) opts.AZURE_REQUEST_SNR = 1;
if (this.profanityOption !== 'raw') opts.AZURE_PROFANITY_OPTION = this.profanityOption;
if (this.azureServiceEndpoint) opts.AZURE_SERVICE_ENDPOINT = this.azureServiceEndpoint;
if (this.initialSpeechTimeoutMs > 0) opts.AZURE_INITIAL_SPEECH_TIMEOUT_MS = this.initialSpeechTimeoutMs;
opts.AZURE_USE_OUTPUT_FORMAT_DETAILED = 1;
@@ -256,6 +271,7 @@ class TaskGather extends Task {
}
_startTranscribing(ep) {
this.logger.debug('Gather:_startTranscribing');
ep.startTranscription({
vendor: this.vendor,
locale: this.language,
@@ -273,6 +289,7 @@ class TaskGather extends Task {
}
_startTimer() {
if (0 === this.timeout) return;
assert(!this._timeoutTimer);
this.logger.debug(`Gather:_startTimer: timeout ${this.timeout}`);
this._timeoutTimer = setTimeout(() => this._resolve('timeout'), this.timeout);
@@ -339,13 +356,17 @@ class TaskGather extends Task {
this._killAudio(cs);
}
if (this.partialResultHook) {
this.cs.requestor.request(this.partialResultHook, Object.assign({speech: evt}, this.cs.callInfo))
.catch((err) => this.logger.info(err, 'GatherTask:_onTranscription error'));
this.cs.requestor.request(this.partialResultHook, Object.assign({speech: evt}, this.cs.callInfo));
}
}
}
_onEndOfUtterance(cs, ep) {
this.logger.info('TaskGather:_onEndOfUtterance');
if (this.bargein && this.minBargeinWordCount === 0) {
this.logger.debug('Gather:_onTranscription - killing audio due to utterance detected');
this._killAudio(cs);
}
if (!this.resolved && !this.killed) {
this._startTranscribing(ep);
}
@@ -368,15 +389,25 @@ class TaskGather extends Task {
this._clearTimer();
if (reason.startsWith('dtmf')) {
await this.performAction({digits: this.digitBuffer, reason: 'dtmfDetected'});
if (this.parentTask) this.parentTask.emit('dtmf', evt);
else {
this.emit('dtmf', evt);
await this.performAction({digits: this.digitBuffer, reason: 'dtmfDetected'});
}
}
else if (reason.startsWith('speech')) {
if (this.parentTask) this.parentTask.emit('transcription', evt);
else await this.performAction({speech: evt, reason: 'speechDetected'});
else {
this.emit('transcription', evt);
await this.performAction({speech: evt, reason: 'speechDetected'});
}
}
else if (reason.startsWith('timeout')) {
if (this.parentTask) this.parentTask.emit('timeout', evt);
else await this.performAction({reason: 'timeout'});
else {
this.emit('timeout', evt);
await this.performAction({reason: 'timeout'});
}
}
this.notifyTaskDone();
}

View File

@@ -20,9 +20,9 @@ function makeTask(logger, obj, parent) {
case TaskName.SipRefer:
const TaskSipRefer = require('./sip_refer');
return new TaskSipRefer(logger, data, parent);
case TaskName.Cognigy:
const TaskCognigy = require('./cognigy');
return new TaskCognigy(logger, data, parent);
case TaskName.Config:
const TaskConfig = require('./config');
return new TaskConfig(logger, data, parent);
case TaskName.Conference:
const TaskConference = require('./conference');
return new TaskConference(logger, data, parent);

View File

@@ -56,6 +56,7 @@ class TaskSay extends Task {
// synthesize all of the text elements
let lastUpdated = false;
const filepath = (await Promise.all(this.text.map(async(text) => {
if (this.killed) return;
if (text.startsWith('silence_stream://')) return text;
const {filePath, servedFromCache} = await synthAudio(stats, {
text,
@@ -86,7 +87,7 @@ class TaskSay extends Task {
this.logger.debug({filepath}, 'synthesized files for tts');
while (!this.killed && (this.loop === 'forever' || this.loop--) && this.ep.connected) {
while (!this.killed && (this.loop === 'forever' || this.loop--) && this.ep?.connected) {
let segment = 0;
do {
if (cs.isInConference) {

View File

@@ -21,20 +21,29 @@
"referTo"
]
},
"cognigy": {
"config": {
"properties": {
"url": "string",
"token": "string",
"synthesizer": "#synthesizer",
"recognizer": "#recognizer",
"tts": "#synthesizer",
"prompt": "string",
"bargeIn": "#bargeIn"
},
"required": []
},
"bargeIn": {
"properties": {
"enable": "boolean",
"actionHook": "object|string",
"eventHook": "object|string",
"data": "object"
"input": "array",
"finishOnKey": "string",
"numDigits": "number",
"minDigits": "number",
"maxDigits": "number",
"interDigitTimeout": "number",
"dtmfBargein": "boolean",
"minBargeinWordCount": "number"
},
"required": [
"url",
"token"
"enable"
]
},
"dequeue": {
@@ -400,6 +409,7 @@
"language": "string",
"vad": "#vad",
"hints": "array",
"hintsBoost": "number",
"altLanguages": "array",
"profanityFilter": "boolean",
"interim": "boolean",
@@ -453,7 +463,8 @@
]
},
"requestSnr": "boolean",
"initialSpeechTimeoutMs": "number"
"initialSpeechTimeoutMs": "number",
"azureServiceEndpoint": "string"
},
"required": [
"vendor"

View File

@@ -28,6 +28,7 @@ class TaskTranscribe extends Task {
/* google-specific options */
this.hints = recognizer.hints || [];
this.hintsBoost = recognizer.hintsBoost;
this.profanityFilter = recognizer.profanityFilter;
this.punctuation = !!recognizer.punctuation;
this.enhancedModel = !!recognizer.enhancedModel;
@@ -50,6 +51,7 @@ class TaskTranscribe extends Task {
this.profanityOption = recognizer.profanityOption || 'raw';
this.requestSnr = recognizer.requestSnr || false;
this.initialSpeechTimeoutMs = recognizer.initialSpeechTimeoutMs || 0;
this.azureServiceEndpoint = recognizer.azureServiceEndpoint;
}
get name() { return TaskName.Transcribe; }
@@ -138,7 +140,12 @@ class TaskTranscribe extends Task {
].forEach((arr) => {
if (this[arr[0]]) opts[arr[1]] = true;
});
if (this.hints.length > 1) opts.GOOGLE_SPEECH_HINTS = this.hints.join(',');
if (this.hints.length > 1) {
opts.GOOGLE_SPEECH_HINTS = this.hints.join(',');
if (typeof this.hintsBoost === 'number') {
opts.GOOGLE_SPEECH_HINTS_BOOST = this.hintsBoost;
}
}
if (this.altLanguages.length > 1) opts.GOOGLE_SPEECH_ALTERNATIVE_LANGUAGE_CODES = this.altLanguages.join(',');
if ('unspecified' !== this.interactionType) {
opts.GOOGLE_SPEECH_METADATA_INTERACTION_TYPE = this.interactionType;
@@ -205,6 +212,7 @@ class TaskTranscribe extends Task {
if (this.profanityOption !== 'raw') opts.AZURE_PROFANITY_OPTION = this.profanityOption;
if (this.initialSpeechTimeoutMs > 0) opts.AZURE_INITIAL_SPEECH_TIMEOUT_MS = this.initialSpeechTimeoutMs;
if (this.outputFormat !== 'simple') opts.AZURE_USE_OUTPUT_FORMAT_DETAILED = 1;
if (this.azureServiceEndpoint) opts.AZURE_SERVICE_ENDPOINT = this.azureServiceEndpoint;
await ep.set(opts)
.catch((err) => this.logger.info(err, 'TaskTranscribe:_startTranscribing with azure'));

View File

@@ -2,6 +2,7 @@
"TaskName": {
"Cognigy": "cognigy",
"Conference": "conference",
"Config": "config",
"Dequeue": "dequeue",
"Dial": "dial",
"Dialogflow": "dialogflow",

View File

@@ -14,6 +14,7 @@ class WsRequestor extends BaseRequestor {
this.connections = 0;
this.messagesInFlight = new Map();
this.maliciousClient = false;
this.closedByUs = false;
assert(this._isAbsoluteUrl(this.url));
@@ -44,7 +45,7 @@ class WsRequestor extends BaseRequestor {
/* if we have an absolute url, and it is http then do a standard webhook */
if (this._isAbsoluteUrl(url) && url.startsWith('http')) {
this.logger.debug({hook}, 'WsRequestor: sending a webhook');
this.logger.debug({hook}, 'WsRequestor: sending a webhook (HTTP)');
const requestor = new HttpRequestor(this.logger, this.account_sid, hook, this.secret);
return requestor.request(type, hook, params);
}
@@ -79,7 +80,7 @@ class WsRequestor extends BaseRequestor {
data: {...payload}
};
this.logger.debug({obj}, `WsRequestor:request ${url}`);
this.logger.debug({obj}, `websocket: sending (${url})`);
/* simple notifications */
if (['call:status', 'jambonz:error'].includes(type)) {
@@ -118,11 +119,17 @@ class WsRequestor extends BaseRequestor {
}
close() {
this.logger.info('WsRequestor: closing socket');
if (this.ws) {
this.ws.close();
this.ws.removeAllListeners();
this.logger.info('WsRequestor:close closing socket');
this.closedByUs = true;
try {
if (this.ws) {
this.ws.close();
this.ws.removeAllListeners();
}
} catch (err) {
this.logger.info({err}, 'WsRequestor: Error closing socket');
}
this.logger.info('WsRequestor:close socket closed');
}
_connect() {
@@ -197,13 +204,8 @@ class WsRequestor extends BaseRequestor {
_onSocketClosed() {
this.ws = null;
if (this.connections > 0) {
if (this.connections < MAX_RECONNECTS) {
setTimeout(this._connect.bind(this), 500);
}
else {
this.logger.info('WsRequestor:_onSocketClosed - max reconnection attempts reached');
}
if (this.connections > 0 && this.connections < MAX_RECONNECTS && !this.closedByUs) {
setTimeout(this._connect.bind(this), 500);
}
}
@@ -217,7 +219,10 @@ class WsRequestor extends BaseRequestor {
/* messages must be JSON format */
try {
const {type, msgid, command, queueCommand = false, data} = JSON.parse(content);
const obj = JSON.parse(content);
const {type, msgid, command, queueCommand = false, data} = obj;
this.logger.debug({obj}, 'websocket: received');
assert.ok(type, 'type property not supplied');
switch (type) {

View File

@@ -21,7 +21,7 @@
},
"scripts": {
"start": "node app",
"test": "NODE_ENV=test JAMBONES_HOSTING=1 DRACHTIO_HOST=127.0.0.1 DRACHTIO_PORT=9060 DRACHTIO_SECRET=cymru JAMBONES_MYSQL_HOST=127.0.0.1 JAMBONES_MYSQL_PORT=3360 JAMBONES_MYSQL_USER=jambones_test JAMBONES_MYSQL_PASSWORD=jambones_test JAMBONES_MYSQL_DATABASE=jambones_test JAMBONES_REDIS_HOST=127.0.0.1 JAMBONES_REDIS_PORT=16379 JAMBONES_LOGLEVEL=debug ENABLE_METRICS=0 HTTP_PORT=3000 JAMBONES_SBCS=172.38.0.10 JAMBONES_FREESWITCH=127.0.0.1:8022:ClueCon:docker-host JAMBONES_TIME_SERIES_HOST=127.0.0.1 JAMBONES_NETWORK_CIDR=172.38.0.0/16 node test/ ",
"test": "NODE_ENV=test JAMBONES_HOSTING=1 DRACHTIO_HOST=127.0.0.1 DRACHTIO_PORT=9060 DRACHTIO_SECRET=cymru JAMBONES_MYSQL_HOST=127.0.0.1 JAMBONES_MYSQL_PORT=3360 JAMBONES_MYSQL_USER=jambones_test JAMBONES_MYSQL_PASSWORD=jambones_test JAMBONES_MYSQL_DATABASE=jambones_test JAMBONES_REDIS_HOST=127.0.0.1 JAMBONES_REDIS_PORT=16379 JAMBONES_LOGLEVEL=error ENABLE_METRICS=0 HTTP_PORT=3000 JAMBONES_SBCS=172.38.0.10 JAMBONES_FREESWITCH=127.0.0.1:8022:ClueCon:docker-host JAMBONES_TIME_SERIES_HOST=127.0.0.1 JAMBONES_NETWORK_CIDR=172.38.0.0/16 node test/ ",
"coverage": "./node_modules/.bin/nyc --reporter html --report-dir ./coverage npm run test",
"jslint": "eslint app.js lib"
},