mirror of
https://github.com/jambonz/jambonz-feature-server.git
synced 2026-02-15 02:39:35 +00:00
Compare commits
14 Commits
v0.9.5-10
...
v0.9.6-rc3
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d7beaa1b7b | ||
|
|
45d0ca87af | ||
|
|
bd435dfff9 | ||
|
|
b598cd94ae | ||
|
|
ceb9a7a3bd | ||
|
|
ff5f9acaf8 | ||
|
|
96cdc2936b | ||
|
|
6120dcbe96 | ||
|
|
96d72216e2 | ||
|
|
faee30278b | ||
|
|
325af42946 | ||
|
|
9848152d5b | ||
|
|
2468557aef | ||
|
|
3c3dfa81d3 |
@@ -1082,7 +1082,7 @@ class CallSession extends Emitter {
|
|||||||
const cred = JSON.parse(credential.service_key.replace(/\n/g, '\\n'));
|
const cred = JSON.parse(credential.service_key.replace(/\n/g, '\\n'));
|
||||||
return {
|
return {
|
||||||
speech_credential_sid: credential.speech_credential_sid,
|
speech_credential_sid: credential.speech_credential_sid,
|
||||||
credentials: cred
|
credentials: cred,
|
||||||
};
|
};
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
const sid = this.accountInfo.account.account_sid;
|
const sid = this.accountInfo.account.account_sid;
|
||||||
@@ -2028,7 +2028,7 @@ Duration=${duration} `
|
|||||||
return this._lccDub(opts.dub, callSid);
|
return this._lccDub(opts.dub, callSid);
|
||||||
}
|
}
|
||||||
else if (opts.boostAudioSignal) {
|
else if (opts.boostAudioSignal) {
|
||||||
return this._lccBoostAudioSignal(opts, callSid);
|
return this._lccBoostAudioSignal(opts.boostAudioSignal, callSid);
|
||||||
}
|
}
|
||||||
else if (opts.media_path) {
|
else if (opts.media_path) {
|
||||||
return this._lccMediaPath(opts.media_path, callSid);
|
return this._lccMediaPath(opts.media_path, callSid);
|
||||||
|
|||||||
@@ -195,6 +195,9 @@ class TaskDial extends Task {
|
|||||||
async exec(cs) {
|
async exec(cs) {
|
||||||
await super.exec(cs);
|
await super.exec(cs);
|
||||||
|
|
||||||
|
/* capture whether A leg was already answered before this dial task started */
|
||||||
|
this._aLegAlreadyAnswered = !!cs.dlg;
|
||||||
|
|
||||||
if (this.data.anchorMedia && this.data.exitMediaPath) {
|
if (this.data.anchorMedia && this.data.exitMediaPath) {
|
||||||
this.logger.info('Dial:exec - incompatible anchorMedia and exitMediaPath are both set, will obey anchorMedia');
|
this.logger.info('Dial:exec - incompatible anchorMedia and exitMediaPath are both set, will obey anchorMedia');
|
||||||
delete this.data.exitMediaPath;
|
delete this.data.exitMediaPath;
|
||||||
@@ -550,7 +553,7 @@ class TaskDial extends Task {
|
|||||||
let sbcAddress = this.proxy || getSBC();
|
let sbcAddress = this.proxy || getSBC();
|
||||||
const teamsInfo = {};
|
const teamsInfo = {};
|
||||||
let fqdn;
|
let fqdn;
|
||||||
const forwardPAI = this.forwardPAI ?? JAMBONZ_DIAL_PAI_HEADER; // dial verb overides env var
|
const forwardPAI = this.forwardPAI ?? !JAMBONZ_DIAL_PAI_HEADER; // dial verb overides env var
|
||||||
this.logger.debug(forwardPAI, 'forwardPAI value');
|
this.logger.debug(forwardPAI, 'forwardPAI value');
|
||||||
if (!sbcAddress) throw new Error('no SBC found for outbound call');
|
if (!sbcAddress) throw new Error('no SBC found for outbound call');
|
||||||
this.headers = {
|
this.headers = {
|
||||||
@@ -872,8 +875,12 @@ class TaskDial extends Task {
|
|||||||
this.sd = sd;
|
this.sd = sd;
|
||||||
this.callSid = sd.callSid;
|
this.callSid = sd.callSid;
|
||||||
if (this.earlyMedia) {
|
if (this.earlyMedia) {
|
||||||
debug('Dial:_selectSingleDial propagating answer supervision on A leg now that B is connected');
|
if (this._aLegAlreadyAnswered) {
|
||||||
await cs.propagateAnswer();
|
debug('Dial:_selectSingleDial A leg was already answered, skipping propagateAnswer');
|
||||||
|
} else {
|
||||||
|
debug('Dial:_selectSingleDial propagating answer supervision on A leg now that B is connected');
|
||||||
|
await cs.propagateAnswer();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if (this.timeLimit) {
|
if (this.timeLimit) {
|
||||||
this.timerMaxCallDuration = setTimeout(this._onMaxCallDuration.bind(this, cs), this.timeLimit * 1000);
|
this.timerMaxCallDuration = setTimeout(this._onMaxCallDuration.bind(this, cs), this.timeLimit * 1000);
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
const assert = require('assert');
|
||||||
const Task = require('../task');
|
const Task = require('../task');
|
||||||
const {TaskName, TaskPreconditions} = require('../../utils/constants');
|
const {TaskName, TaskPreconditions} = require('../../utils/constants');
|
||||||
const Intent = require('./intent');
|
const Intent = require('./intent');
|
||||||
@@ -10,19 +11,27 @@ class Dialogflow extends Task {
|
|||||||
super(logger, opts);
|
super(logger, opts);
|
||||||
this.preconditions = TaskPreconditions.Endpoint;
|
this.preconditions = TaskPreconditions.Endpoint;
|
||||||
this.credentials = this.data.credentials;
|
this.credentials = this.data.credentials;
|
||||||
|
this.project = this.data.project;
|
||||||
|
this.agent = this.data.agent;
|
||||||
|
this.region = this.data.region || 'us-central1';
|
||||||
|
this.model = this.data.model || 'es';
|
||||||
|
|
||||||
/* set project id with environment and region (optionally) */
|
assert(this.agent || !this.isCX, 'agent is required for dialogflow cx');
|
||||||
if (this.data.environment && this.data.region) {
|
assert(this.credentials, 'dialogflow credentials are required');
|
||||||
this.project = `${this.data.project}:${this.data.environment}:${this.data.region}`;
|
|
||||||
}
|
if (this.isCX) {
|
||||||
else if (this.data.environment) {
|
this.environment = this.data.environment || 'none';
|
||||||
this.project = `${this.data.project}:${this.data.environment}`;
|
|
||||||
}
|
|
||||||
else if (this.data.region) {
|
|
||||||
this.project = `${this.data.project}::${this.data.region}`;
|
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
this.project = this.data.project;
|
if (this.data.environment && this.data.region) {
|
||||||
|
this.project = `${this.data.project}:${this.data.environment}:${this.data.region}`;
|
||||||
|
}
|
||||||
|
else if (this.data.environment) {
|
||||||
|
this.project = `${this.data.project}:${this.data.environment}`;
|
||||||
|
}
|
||||||
|
else if (this.data.region) {
|
||||||
|
this.project = `${this.data.project}::${this.data.region}`;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
this.lang = this.data.lang || 'en-US';
|
this.lang = this.data.lang || 'en-US';
|
||||||
@@ -39,7 +48,6 @@ class Dialogflow extends Task {
|
|||||||
this.events = this.data.events;
|
this.events = this.data.events;
|
||||||
}
|
}
|
||||||
else if (this.eventHook) {
|
else if (this.eventHook) {
|
||||||
// send all events by default - except interim transcripts
|
|
||||||
this.events = [
|
this.events = [
|
||||||
'intent',
|
'intent',
|
||||||
'transcription',
|
'transcription',
|
||||||
@@ -60,38 +68,33 @@ class Dialogflow extends Task {
|
|||||||
this.voice = this.data.tts.voice || 'default';
|
this.voice = this.data.tts.voice || 'default';
|
||||||
this.speechSynthesisLabel = this.data.tts.label;
|
this.speechSynthesisLabel = this.data.tts.label;
|
||||||
|
|
||||||
// fallback tts
|
|
||||||
this.fallbackVendor = this.data.tts.fallbackVendor || 'default';
|
this.fallbackVendor = this.data.tts.fallbackVendor || 'default';
|
||||||
this.fallbackLanguage = this.data.tts.fallbackLanguage || 'default';
|
this.fallbackLanguage = this.data.tts.fallbackLanguage || 'default';
|
||||||
this.fallbackVoice = this.data.tts.fallbackLanguage || 'default';
|
this.fallbackVoice = this.data.tts.fallbackVoice || 'default';
|
||||||
this.fallbackLabel = this.data.tts.fallbackLabel;
|
this.fallbackLabel = this.data.tts.fallbackLabel;
|
||||||
}
|
}
|
||||||
this.bargein = this.data.bargein;
|
this.bargein = this.data.bargein;
|
||||||
|
|
||||||
|
this.cmd = this.isCX ? 'dialogflow_cx_start' : 'dialogflow_start';
|
||||||
|
this.cmdStop = this.isCX ? 'dialogflow_cx_stop' : 'dialogflow_stop';
|
||||||
|
|
||||||
|
// CX-specific state
|
||||||
|
this._suppressNextCXAudio = false;
|
||||||
|
this._cxAudioHandled = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
get name() { return TaskName.Dialogflow; }
|
get name() { return TaskName.Dialogflow; }
|
||||||
|
|
||||||
|
get isCX() { return this.model === 'cx'; }
|
||||||
|
|
||||||
|
get isES() { return !this.isCX; }
|
||||||
|
|
||||||
async exec(cs, {ep}) {
|
async exec(cs, {ep}) {
|
||||||
await super.exec(cs);
|
await super.exec(cs);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await this.init(cs, ep);
|
await this.init(cs, ep);
|
||||||
|
await this.startBot('default');
|
||||||
this.logger.debug(`starting dialogflow bot ${this.project}`);
|
|
||||||
|
|
||||||
// kick it off
|
|
||||||
const baseArgs = `${this.ep.uuid} ${this.project} ${this.lang} ${this.welcomeEvent}`;
|
|
||||||
if (this.welcomeEventParams) {
|
|
||||||
this.ep.api('dialogflow_start', `${baseArgs} '${JSON.stringify(this.welcomeEventParams)}'`);
|
|
||||||
}
|
|
||||||
else if (this.welcomeEvent.length) {
|
|
||||||
this.ep.api('dialogflow_start', baseArgs);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
this.ep.api('dialogflow_start', `${this.ep.uuid} ${this.project} ${this.lang}`);
|
|
||||||
}
|
|
||||||
this.logger.debug(`started dialogflow bot ${this.project}`);
|
|
||||||
|
|
||||||
await this.awaitTaskDone();
|
await this.awaitTaskDone();
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
this.logger.error({err}, 'Dialogflow:exec error');
|
this.logger.error({err}, 'Dialogflow:exec error');
|
||||||
@@ -108,6 +111,12 @@ class Dialogflow extends Task {
|
|||||||
this.ep.removeCustomEventListener('dialogflow::end_of_utterance');
|
this.ep.removeCustomEventListener('dialogflow::end_of_utterance');
|
||||||
this.ep.removeCustomEventListener('dialogflow::error');
|
this.ep.removeCustomEventListener('dialogflow::error');
|
||||||
|
|
||||||
|
this.ep.removeCustomEventListener('dialogflow_cx::intent');
|
||||||
|
this.ep.removeCustomEventListener('dialogflow_cx::transcription');
|
||||||
|
this.ep.removeCustomEventListener('dialogflow_cx::audio_provided');
|
||||||
|
this.ep.removeCustomEventListener('dialogflow_cx::end_of_utterance');
|
||||||
|
this.ep.removeCustomEventListener('dialogflow_cx::error');
|
||||||
|
|
||||||
this._clearNoinputTimer();
|
this._clearNoinputTimer();
|
||||||
|
|
||||||
if (!this.reportedFinalAction) this.performAction({dialogflowResult: 'caller hungup'})
|
if (!this.reportedFinalAction) this.performAction({dialogflowResult: 'caller hungup'})
|
||||||
@@ -141,6 +150,12 @@ class Dialogflow extends Task {
|
|||||||
this.ep.addCustomEventListener('dialogflow::end_of_utterance', this._onEndOfUtterance.bind(this, ep, cs));
|
this.ep.addCustomEventListener('dialogflow::end_of_utterance', this._onEndOfUtterance.bind(this, ep, cs));
|
||||||
this.ep.addCustomEventListener('dialogflow::error', this._onError.bind(this, ep, cs));
|
this.ep.addCustomEventListener('dialogflow::error', this._onError.bind(this, ep, cs));
|
||||||
|
|
||||||
|
this.ep.addCustomEventListener('dialogflow_cx::intent', this._onIntent.bind(this, ep, cs));
|
||||||
|
this.ep.addCustomEventListener('dialogflow_cx::transcription', this._onTranscription.bind(this, ep, cs));
|
||||||
|
this.ep.addCustomEventListener('dialogflow_cx::audio_provided', this._onAudioProvided.bind(this, ep, cs));
|
||||||
|
this.ep.addCustomEventListener('dialogflow_cx::end_of_utterance', this._onEndOfUtterance.bind(this, ep, cs));
|
||||||
|
this.ep.addCustomEventListener('dialogflow_cx::error', this._onError.bind(this, ep, cs));
|
||||||
|
|
||||||
const obj = typeof this.credentials === 'string' ? JSON.parse(this.credentials) : this.credentials;
|
const obj = typeof this.credentials === 'string' ? JSON.parse(this.credentials) : this.credentials;
|
||||||
const creds = JSON.stringify(obj);
|
const creds = JSON.stringify(obj);
|
||||||
await this.ep.set('GOOGLE_APPLICATION_CREDENTIALS', creds);
|
await this.ep.set('GOOGLE_APPLICATION_CREDENTIALS', creds);
|
||||||
@@ -151,56 +166,113 @@ class Dialogflow extends Task {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async startBot(intent) {
|
||||||
|
if (this.isCX) {
|
||||||
|
const event = this.welcomeEvent || intent;
|
||||||
|
const args = this._buildStartArgs({
|
||||||
|
event: event && event !== 'default' ? event : undefined
|
||||||
|
});
|
||||||
|
this.logger.info({args}, 'starting dialogflow CX bot');
|
||||||
|
await this.ep.api(this.cmd, args);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
await this._startBotES();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async _startBotES() {
|
||||||
|
this.logger.info('starting dialogflow ES bot');
|
||||||
|
const baseArgs = `${this.ep.uuid} ${this.project} ${this.lang} ${this.welcomeEvent}`;
|
||||||
|
if (this.welcomeEventParams) {
|
||||||
|
await this.ep.api(this.cmd, `${baseArgs} '${JSON.stringify(this.welcomeEventParams)}'`);
|
||||||
|
}
|
||||||
|
else if (this.welcomeEvent.length) {
|
||||||
|
await this.ep.api(this.cmd, baseArgs);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
await this.ep.api(this.cmd, `${this.ep.uuid} ${this.project} ${this.lang}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build the start command args string for either ES or CX.
|
||||||
|
* @param {object} opts - options
|
||||||
|
* @param {string} opts.event - optional event to send
|
||||||
|
* @param {string} opts.text - optional text to send
|
||||||
|
* @param {number} opts.singleUtterance - 1 or 0 (CX only, default 1)
|
||||||
|
* @returns {string} command args string
|
||||||
|
*/
|
||||||
|
_buildStartArgs({event, text, singleUtterance = 1} = {}) {
|
||||||
|
if (this.isCX) {
|
||||||
|
const args = [
|
||||||
|
this.ep.uuid,
|
||||||
|
this.project,
|
||||||
|
this.region,
|
||||||
|
this.agent,
|
||||||
|
this.environment || 'none',
|
||||||
|
this.lang,
|
||||||
|
event || 'none',
|
||||||
|
text ? `'${text}'` : 'none',
|
||||||
|
singleUtterance ? '1' : '0',
|
||||||
|
];
|
||||||
|
return args.join(' ');
|
||||||
|
}
|
||||||
|
// ES
|
||||||
|
const args = [this.ep.uuid, this.project, this.lang];
|
||||||
|
if (event) {
|
||||||
|
args.push(event);
|
||||||
|
}
|
||||||
|
if (text) {
|
||||||
|
if (!event) args.push('none');
|
||||||
|
args.push(`'${text}'`);
|
||||||
|
}
|
||||||
|
return args.join(' ');
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* An intent has been returned. Since we are using SINGLE_UTTERANCE on the dialogflow side,
|
* An intent has been returned. Since we are using SINGLE_UTTERANCE on the dialogflow side,
|
||||||
* we may get an empty intent, signified by the lack of a 'response_id' attribute.
|
* we may get an empty intent, signified by the lack of a 'response_id' attribute.
|
||||||
* In such a case, we just start another StreamingIntentDetectionRequest.
|
* In such a case, we just start another StreamingIntentDetectionRequest.
|
||||||
* @param {*} ep - media server endpoint
|
* @param {*} ep - media server endpoint
|
||||||
|
* @param {*} cs - call session
|
||||||
* @param {*} evt - event data
|
* @param {*} evt - event data
|
||||||
*/
|
*/
|
||||||
async _onIntent(ep, cs, evt) {
|
async _onIntent(ep, cs, evt) {
|
||||||
const intent = new Intent(this.logger, evt);
|
const intent = new Intent(this.logger, evt);
|
||||||
|
|
||||||
if (intent.isEmpty) {
|
if (intent.isEmpty) {
|
||||||
/**
|
|
||||||
* An empty intent is returned in 3 conditions:
|
|
||||||
* 1. Our no-input timer fired
|
|
||||||
* 2. We collected dtmf that needs to be fed to dialogflow
|
|
||||||
* 3. A normal dialogflow timeout
|
|
||||||
*/
|
|
||||||
if (this.noinput && this.greetingPlayed) {
|
if (this.noinput && this.greetingPlayed) {
|
||||||
this.logger.info('no input timer fired, reprompting..');
|
this.logger.info('no input timer fired, reprompting..');
|
||||||
this.noinput = false;
|
this.noinput = false;
|
||||||
ep.api('dialogflow_start', `${ep.uuid} ${this.project} ${this.lang} ${this.noInputEvent}`);
|
ep.api(this.cmd, this._buildStartArgs({event: this.noInputEvent}));
|
||||||
}
|
}
|
||||||
else if (this.dtmfEntry && this.greetingPlayed) {
|
else if (this.dtmfEntry && this.greetingPlayed) {
|
||||||
this.logger.info('dtmf detected, reprompting..');
|
this.logger.info('dtmf detected, reprompting..');
|
||||||
ep.api('dialogflow_start', `${ep.uuid} ${this.project} ${this.lang} none \'${this.dtmfEntry}\'`);
|
ep.api(this.cmd, this._buildStartArgs({text: this.dtmfEntry}));
|
||||||
this.dtmfEntry = null;
|
this.dtmfEntry = null;
|
||||||
}
|
}
|
||||||
else if (this.greetingPlayed) {
|
|
||||||
this.logger.info('starting another intent');
|
|
||||||
ep.api('dialogflow_start', `${ep.uuid} ${this.project} ${this.lang}`);
|
|
||||||
}
|
|
||||||
else {
|
else {
|
||||||
this.logger.info('got empty intent');
|
this.logger.info('got empty intent, restarting');
|
||||||
ep.api('dialogflow_start', `${ep.uuid} ${this.project} ${this.lang}`);
|
ep.api(this.cmd, this._buildStartArgs());
|
||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// For CX: suppress NO_INPUT "I didn't get that" audio and silently restart
|
||||||
|
if (this.isCX && intent.isNoInput && this.greetingPlayed) {
|
||||||
|
this.logger.info('CX returned NO_INPUT after greeting, suppressing and restarting');
|
||||||
|
this._suppressNextCXAudio = true;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
if (this.events.includes('intent')) {
|
if (this.events.includes('intent')) {
|
||||||
this._performHook(cs, this.eventHook, {event: 'intent', data: evt});
|
this._performHook(cs, this.eventHook, {event: 'intent', data: evt});
|
||||||
}
|
}
|
||||||
|
|
||||||
// clear the no-input timer and the digit buffer
|
|
||||||
this._clearNoinputTimer();
|
this._clearNoinputTimer();
|
||||||
if (this.digitBuffer) this.digitBuffer.flush();
|
if (this.digitBuffer) this.digitBuffer.flush();
|
||||||
|
|
||||||
/* hang up (or tranfer call) after playing next audio file? */
|
|
||||||
if (intent.saysEndInteraction) {
|
if (intent.saysEndInteraction) {
|
||||||
// if 'end_interaction' is true, end the dialog after playing the final prompt
|
|
||||||
// (or in 1 second if there is no final prompt)
|
|
||||||
this.hangupAfterPlayDone = true;
|
this.hangupAfterPlayDone = true;
|
||||||
this.waitingForPlayStart = true;
|
this.waitingForPlayStart = true;
|
||||||
setTimeout(() => {
|
setTimeout(() => {
|
||||||
@@ -211,8 +283,6 @@ class Dialogflow extends Task {
|
|||||||
}
|
}
|
||||||
}, 1000);
|
}, 1000);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* collect digits? */
|
|
||||||
else if (intent.saysCollectDtmf || this.enableDtmfAlways) {
|
else if (intent.saysCollectDtmf || this.enableDtmfAlways) {
|
||||||
const opts = Object.assign({
|
const opts = Object.assign({
|
||||||
idt: this.opts.interDigitTimeout
|
idt: this.opts.interDigitTimeout
|
||||||
@@ -221,68 +291,44 @@ class Dialogflow extends Task {
|
|||||||
this.digitBuffer.once('fulfilled', this._onDtmfEntryComplete.bind(this, ep));
|
this.digitBuffer.once('fulfilled', this._onDtmfEntryComplete.bind(this, ep));
|
||||||
}
|
}
|
||||||
|
|
||||||
/* if we are using tts and a message was provided, play it out */
|
// If we have a TTS vendor and fulfillment text, synthesize and play
|
||||||
if (this.vendor && intent.fulfillmentText && intent.fulfillmentText.length > 0) {
|
if (this.vendor && intent.fulfillmentText && intent.fulfillmentText.length > 0) {
|
||||||
const {srf} = cs;
|
|
||||||
const {stats} = srf.locals;
|
|
||||||
const {synthAudio} = srf.locals.dbHelpers;
|
|
||||||
this.waitingForPlayStart = false;
|
this.waitingForPlayStart = false;
|
||||||
|
|
||||||
// start a new intent, (we want to continue to listen during the audio playback)
|
// ES: start a new intent during playback so we continue to listen
|
||||||
// _unless_ we are transferring or ending the session
|
if (!this.hangupAfterPlayDone && this.isES) {
|
||||||
if (!this.hangupAfterPlayDone) {
|
ep.api(this.cmd, this._buildStartArgs());
|
||||||
ep.api('dialogflow_start', `${ep.uuid} ${this.project} ${this.lang}`);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
const {srf} = cs;
|
||||||
|
const {stats} = srf.locals;
|
||||||
|
const {synthAudio} = srf.locals.dbHelpers;
|
||||||
const {filePath} = await this._fallbackSynthAudio(cs, intent, stats, synthAudio);
|
const {filePath} = await this._fallbackSynthAudio(cs, intent, stats, synthAudio);
|
||||||
if (filePath) cs.trackTmpFile(filePath);
|
if (filePath) cs.trackTmpFile(filePath);
|
||||||
|
await this._playAndHandlePostPlay(ep, cs, filePath);
|
||||||
if (this.playInProgress) {
|
|
||||||
await ep.api('uuid_break', ep.uuid).catch((err) => this.logger.info(err, 'Error killing audio'));
|
|
||||||
}
|
|
||||||
this.playInProgress = true;
|
|
||||||
this.curentAudioFile = filePath;
|
|
||||||
|
|
||||||
this.logger.debug(`starting to play tts ${filePath}`);
|
|
||||||
|
|
||||||
if (this.events.includes('start-play')) {
|
|
||||||
this._performHook(cs, this.eventHook, {event: 'start-play', data: {path: filePath}});
|
|
||||||
}
|
|
||||||
await ep.play(filePath);
|
|
||||||
if (this.events.includes('stop-play')) {
|
|
||||||
this._performHook(cs, this.eventHook, {event: 'stop-play', data: {path: filePath}});
|
|
||||||
}
|
|
||||||
this.logger.debug(`finished ${filePath}`);
|
|
||||||
|
|
||||||
if (this.curentAudioFile === filePath) {
|
|
||||||
this.playInProgress = false;
|
|
||||||
if (this.queuedTasks) {
|
|
||||||
this.logger.debug('finished playing audio and we have queued tasks');
|
|
||||||
this._redirect(cs, this.queuedTasks);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
this.greetingPlayed = true;
|
|
||||||
|
|
||||||
if (this.hangupAfterPlayDone) {
|
|
||||||
this.logger.info('hanging up since intent was marked end interaction and we completed final prompt');
|
|
||||||
this.performAction({dialogflowResult: 'completed'});
|
|
||||||
this.notifyTaskDone();
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
// every time we finish playing a prompt, start the no-input timer
|
|
||||||
this._startNoinputTimer(ep, cs);
|
|
||||||
}
|
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
this.logger.error({err}, 'Dialogflow:_onIntent - error playing tts');
|
this.logger.error({err}, 'Dialogflow:_onIntent - error playing tts');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
else if (this.isCX && !this.hangupAfterPlayDone) {
|
||||||
|
// CX intent with no TTS — _onAudioProvided may handle playback.
|
||||||
|
// If not, restart CX after a short delay.
|
||||||
|
this.greetingPlayed = true;
|
||||||
|
this._cxAudioHandled = false;
|
||||||
|
setTimeout(() => {
|
||||||
|
if (!this._cxAudioHandled && !this.playInProgress) {
|
||||||
|
this.logger.info('CX: no TTS and no audio provided, restarting to listen');
|
||||||
|
ep.api(this.cmd, this._buildStartArgs());
|
||||||
|
this._startNoinputTimer(ep, cs);
|
||||||
|
}
|
||||||
|
}, 500);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async _fallbackSynthAudio(cs, intent, stats, synthAudio) {
|
async _fallbackSynthAudio(cs, intent, stats, synthAudio) {
|
||||||
try {
|
try {
|
||||||
const obj = {
|
return await synthAudio(stats, {
|
||||||
account_sid: cs.accountSid,
|
account_sid: cs.accountSid,
|
||||||
text: intent.fulfillmentText,
|
text: intent.fulfillmentText,
|
||||||
vendor: this.vendor,
|
vendor: this.vendor,
|
||||||
@@ -290,17 +336,13 @@ class Dialogflow extends Task {
|
|||||||
voice: this.voice,
|
voice: this.voice,
|
||||||
salt: cs.callSid,
|
salt: cs.callSid,
|
||||||
credentials: this.ttsCredentials
|
credentials: this.ttsCredentials
|
||||||
};
|
});
|
||||||
this.logger.debug({obj}, 'Dialogflow:_onIntent - playing message via tts');
|
|
||||||
|
|
||||||
return await synthAudio(stats, obj);
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
this.logger.info({error}, 'Failed to synthesize audio from primary vendor');
|
this.logger.info({error}, 'Failed to synthesize audio from primary vendor');
|
||||||
|
if (this.fallbackVendor) {
|
||||||
try {
|
try {
|
||||||
if (this.fallbackVendor) {
|
|
||||||
const credentials = cs.getSpeechCredentials(this.fallbackVendor, 'tts', this.fallbackLabel);
|
const credentials = cs.getSpeechCredentials(this.fallbackVendor, 'tts', this.fallbackLabel);
|
||||||
const obj = {
|
return await synthAudio(stats, {
|
||||||
account_sid: cs.accountSid,
|
account_sid: cs.accountSid,
|
||||||
text: intent.fulfillmentText,
|
text: intent.fulfillmentText,
|
||||||
vendor: this.fallbackVendor,
|
vendor: this.fallbackVendor,
|
||||||
@@ -308,24 +350,20 @@ class Dialogflow extends Task {
|
|||||||
voice: this.fallbackVoice,
|
voice: this.fallbackVoice,
|
||||||
salt: cs.callSid,
|
salt: cs.callSid,
|
||||||
credentials
|
credentials
|
||||||
};
|
});
|
||||||
this.logger.debug({obj}, 'Dialogflow:_onIntent - playing message via fallback tts');
|
} catch (err) {
|
||||||
return await synthAudio(stats, obj);
|
this.logger.info({err}, 'Failed to synthesize audio from fallback vendor');
|
||||||
|
throw err;
|
||||||
}
|
}
|
||||||
} catch (err) {
|
|
||||||
this.logger.info({err}, 'Failed to synthesize audio from falllback vendor');
|
|
||||||
throw err;
|
|
||||||
}
|
}
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A transcription - either interim or final - has been returned.
|
* A transcription has been returned.
|
||||||
* If we are doing barge-in based on hotword detection, check for the hotword or phrase.
|
* @param {*} ep - media server endpoint
|
||||||
* If we are playing a filler sound, like typing, during the fullfillment phase, start that
|
* @param {*} cs - call session
|
||||||
* if this is a final transcript.
|
|
||||||
* @param {*} ep - media server endpoint
|
|
||||||
* @param {*} evt - event data
|
* @param {*} evt - event data
|
||||||
*/
|
*/
|
||||||
async _onTranscription(ep, cs, evt) {
|
async _onTranscription(ep, cs, evt) {
|
||||||
@@ -338,13 +376,11 @@ class Dialogflow extends Task {
|
|||||||
this._performHook(cs, this.eventHook, {event: 'transcription', data: evt});
|
this._performHook(cs, this.eventHook, {event: 'transcription', data: evt});
|
||||||
}
|
}
|
||||||
|
|
||||||
// if a final transcription, start a typing sound
|
|
||||||
if (this.thinkingMusic && !transcription.isEmpty && transcription.isFinal &&
|
if (this.thinkingMusic && !transcription.isEmpty && transcription.isFinal &&
|
||||||
transcription.confidence > 0.8) {
|
transcription.confidence > 0.8) {
|
||||||
ep.play(this.data.thinkingMusic).catch((err) => this.logger.info(err, 'Error playing typing sound'));
|
ep.play(this.data.thinkingMusic).catch((err) => this.logger.info(err, 'Error playing typing sound'));
|
||||||
}
|
}
|
||||||
|
|
||||||
// interrupt playback on speaking if bargein = true
|
|
||||||
if (this.bargein && this.playInProgress) {
|
if (this.bargein && this.playInProgress) {
|
||||||
this.logger.debug('terminating playback due to speech bargein');
|
this.logger.debug('terminating playback due to speech bargein');
|
||||||
this.playInProgress = false;
|
this.playInProgress = false;
|
||||||
@@ -353,17 +389,21 @@ class Dialogflow extends Task {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The caller has just finished speaking. No action currently taken.
|
* The caller has just finished speaking.
|
||||||
|
* @param {*} ep - media server endpoint
|
||||||
|
* @param {*} cs - call session
|
||||||
* @param {*} evt - event data
|
* @param {*} evt - event data
|
||||||
*/
|
*/
|
||||||
_onEndOfUtterance(cs, evt) {
|
_onEndOfUtterance(ep, cs, evt) {
|
||||||
if (this.events.includes('end-utterance')) {
|
if (this.events.includes('end-utterance')) {
|
||||||
this._performHook(cs, this.eventHook, {event: 'end-utterance'});
|
this._performHook(cs, this.eventHook, {event: 'end-utterance'});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Dialogflow has returned an error of some kind.
|
* Dialogflow has returned an error.
|
||||||
|
* @param {*} ep - media server endpoint
|
||||||
|
* @param {*} cs - call session
|
||||||
* @param {*} evt - event data
|
* @param {*} evt - event data
|
||||||
*/
|
*/
|
||||||
_onError(ep, cs, evt) {
|
_onError(ep, cs, evt) {
|
||||||
@@ -372,70 +412,87 @@ class Dialogflow extends Task {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Audio has been received from dialogflow and written to a temporary disk file.
|
* Audio has been received from dialogflow and written to a temporary disk file.
|
||||||
* Start playing the audio, after killing any filler sound that might be playing.
|
* Play the audio, then restart or hang up as appropriate.
|
||||||
* When the audio completes, start the no-input timer.
|
|
||||||
* @param {*} ep - media server endpoint
|
* @param {*} ep - media server endpoint
|
||||||
|
* @param {*} cs - call session
|
||||||
* @param {*} evt - event data
|
* @param {*} evt - event data
|
||||||
*/
|
*/
|
||||||
async _onAudioProvided(ep, cs, evt) {
|
async _onAudioProvided(ep, cs, evt) {
|
||||||
|
// For CX: suppress NO_INPUT reprompt audio and silently restart
|
||||||
if (this.vendor) return;
|
if (this._suppressNextCXAudio) {
|
||||||
|
this._suppressNextCXAudio = false;
|
||||||
this.waitingForPlayStart = false;
|
ep.api(this.cmd, this._buildStartArgs());
|
||||||
|
return;
|
||||||
// kill filler audio
|
|
||||||
await ep.api('uuid_break', ep.uuid);
|
|
||||||
|
|
||||||
// start a new intent, (we want to continue to listen during the audio playback)
|
|
||||||
// _unless_ we are transferring or ending the session
|
|
||||||
if (/*this.greetingPlayed &&*/ !this.hangupAfterPlayDone) {
|
|
||||||
ep.api('dialogflow_start', `${ep.uuid} ${this.project} ${this.lang}`);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
this.playInProgress = true;
|
if (this.vendor) {
|
||||||
this.curentAudioFile = evt.path;
|
if (this.isCX && !this.playInProgress) {
|
||||||
|
// CX audio arrived but TTS didn't play — fall through to use CX audio
|
||||||
this.logger.info(`starting to play ${evt.path}`);
|
this.logger.info('CX audio provided, TTS vendor did not play - using CX audio');
|
||||||
if (this.events.includes('start-play')) {
|
} else {
|
||||||
this._performHook(cs, this.eventHook, {event: 'start-play', data: {path: evt.path}});
|
|
||||||
}
|
|
||||||
await ep.play(evt.path);
|
|
||||||
if (this.events.includes('stop-play')) {
|
|
||||||
this._performHook(cs, this.eventHook, {event: 'stop-play', data: {path: evt.path}});
|
|
||||||
}
|
|
||||||
this.logger.info(`finished ${evt.path}, queued tasks: ${(this.queuedTasks || []).length}`);
|
|
||||||
|
|
||||||
if (this.curentAudioFile === evt.path) {
|
|
||||||
this.playInProgress = false;
|
|
||||||
if (this.queuedTasks) {
|
|
||||||
this.logger.debug('finished playing audio and we have queued tasks');
|
|
||||||
this._redirect(cs, this.queuedTasks);
|
|
||||||
this.queuedTasks.length = 0;
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
/*
|
|
||||||
if (!this.inbound && !this.greetingPlayed) {
|
this._cxAudioHandled = true;
|
||||||
this.logger.info('finished greeting on outbound call, starting new intent');
|
this.waitingForPlayStart = false;
|
||||||
this.ep.api('dialogflow_start', `${ep.uuid} ${this.project} ${this.lang}`);
|
|
||||||
|
await ep.api('uuid_break', ep.uuid);
|
||||||
|
|
||||||
|
// ES: start a new intent during playback so we continue to listen
|
||||||
|
if (!this.hangupAfterPlayDone && this.isES) {
|
||||||
|
ep.api(this.cmd, this._buildStartArgs());
|
||||||
}
|
}
|
||||||
*/
|
|
||||||
|
await this._playAndHandlePostPlay(ep, cs, evt.path);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Shared post-play logic for both TTS (_onIntent) and CX audio (_onAudioProvided).
|
||||||
|
* Plays audio, then either hangs up, redirects, or restarts the dialog.
|
||||||
|
*/
|
||||||
|
async _playAndHandlePostPlay(ep, cs, filePath) {
|
||||||
|
if (this.playInProgress) {
|
||||||
|
await ep.api('uuid_break', ep.uuid).catch((err) => this.logger.info(err, 'Error killing audio'));
|
||||||
|
}
|
||||||
|
this.playInProgress = true;
|
||||||
|
this.curentAudioFile = filePath;
|
||||||
|
|
||||||
|
if (this.events.includes('start-play')) {
|
||||||
|
this._performHook(cs, this.eventHook, {event: 'start-play', data: {path: filePath}});
|
||||||
|
}
|
||||||
|
await ep.play(filePath);
|
||||||
|
if (this.events.includes('stop-play')) {
|
||||||
|
this._performHook(cs, this.eventHook, {event: 'stop-play', data: {path: filePath}});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.curentAudioFile === filePath) {
|
||||||
|
this.playInProgress = false;
|
||||||
|
if (this.queuedTasks) {
|
||||||
|
this._redirect(cs, this.queuedTasks);
|
||||||
|
this.queuedTasks = null;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
this.greetingPlayed = true;
|
this.greetingPlayed = true;
|
||||||
|
|
||||||
if (this.hangupAfterPlayDone) {
|
if (this.hangupAfterPlayDone) {
|
||||||
this.logger.info('hanging up since intent was marked end interaction and we completed final prompt');
|
this.logger.info('hanging up after end interaction prompt');
|
||||||
this.performAction({dialogflowResult: 'completed'});
|
this.performAction({dialogflowResult: 'completed'});
|
||||||
this.notifyTaskDone();
|
this.notifyTaskDone();
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
// every time we finish playing a prompt, start the no-input timer
|
// CX: restart to listen for the next utterance
|
||||||
|
if (this.isCX) {
|
||||||
|
ep.api(this.cmd, this._buildStartArgs());
|
||||||
|
}
|
||||||
this._startNoinputTimer(ep, cs);
|
this._startNoinputTimer(ep, cs);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* receive a dmtf entry from the caller.
|
* Receive a DTMF entry from the caller.
|
||||||
* If we have active dtmf instructions, collect and process accordingly.
|
|
||||||
*/
|
*/
|
||||||
_onDtmf(ep, cs, evt) {
|
_onDtmf(ep, cs, evt) {
|
||||||
if (this.digitBuffer) this.digitBuffer.process(evt.dtmf);
|
if (this.digitBuffer) this.digitBuffer.process(evt.dtmf);
|
||||||
@@ -444,41 +501,48 @@ class Dialogflow extends Task {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
_onDtmfEntryComplete(ep, dtmfEntry) {
|
async _onDtmfEntryComplete(ep, dtmfEntry) {
|
||||||
this.logger.info(`collected dtmf entry: ${dtmfEntry}`);
|
this.logger.info(`collected dtmf entry: ${dtmfEntry}`);
|
||||||
this.dtmfEntry = dtmfEntry;
|
|
||||||
this.digitBuffer = null;
|
this.digitBuffer = null;
|
||||||
// if a final transcription, start a typing sound
|
|
||||||
if (this.thinkingMusic) {
|
if (this.thinkingMusic) {
|
||||||
ep.play(this.thinkingMusic).catch((err) => this.logger.info(err, 'Error playing typing sound'));
|
ep.play(this.thinkingMusic).catch((err) => this.logger.info(err, 'Error playing typing sound'));
|
||||||
}
|
}
|
||||||
|
|
||||||
// kill the current dialogflow, which will result in us getting an immediate intent
|
if (this.isCX) {
|
||||||
ep.api('dialogflow_stop', `${ep.uuid}`)
|
try {
|
||||||
.catch((err) => this.logger.info(`dialogflow_stop failed: ${err.message}`));
|
await ep.api(this.cmdStop, ep.uuid);
|
||||||
|
} catch (err) {
|
||||||
|
this.logger.info(err, 'dialogflow_cx_stop failed');
|
||||||
|
}
|
||||||
|
ep.api(this.cmd, this._buildStartArgs({text: dtmfEntry}));
|
||||||
|
} else {
|
||||||
|
this.dtmfEntry = dtmfEntry;
|
||||||
|
ep.api(this.cmdStop, `${ep.uuid}`)
|
||||||
|
.catch((err) => this.logger.info(`dialogflow_stop failed: ${err.message}`));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
async _onNoInput(ep, cs) {
|
||||||
* The user has not provided any input for some time.
|
this.logger.info('no-input timer fired');
|
||||||
* Set the 'noinput' member to true and kill the current dialogflow.
|
|
||||||
* This will result in us re-prompting with an event indicating no input.
|
|
||||||
* @param {*} ep
|
|
||||||
*/
|
|
||||||
_onNoInput(ep, cs) {
|
|
||||||
this.noinput = true;
|
|
||||||
|
|
||||||
if (this.events.includes('no-input')) {
|
if (this.events.includes('no-input')) {
|
||||||
this._performHook(cs, this.eventHook, {event: 'no-input'});
|
this._performHook(cs, this.eventHook, {event: 'no-input'});
|
||||||
}
|
}
|
||||||
|
|
||||||
// kill the current dialogflow, which will result in us getting an immediate intent
|
if (this.isCX) {
|
||||||
ep.api('dialogflow_stop', `${ep.uuid}`)
|
try {
|
||||||
.catch((err) => this.logger.info(`dialogflow_stop failed: ${err.message}`));
|
await ep.api(this.cmdStop, ep.uuid);
|
||||||
|
} catch (err) {
|
||||||
|
this.logger.info(err, 'dialogflow_cx_stop failed');
|
||||||
|
}
|
||||||
|
ep.api(this.cmd, this._buildStartArgs({event: this.noInputEvent}));
|
||||||
|
} else {
|
||||||
|
this.noinput = true;
|
||||||
|
ep.api(this.cmdStop, `${ep.uuid}`)
|
||||||
|
.catch((err) => this.logger.info(`dialogflow_stop failed: ${err.message}`));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Stop the no-input timer, if it is running
|
|
||||||
*/
|
|
||||||
_clearNoinputTimer() {
|
_clearNoinputTimer() {
|
||||||
if (this.noinputTimer) {
|
if (this.noinputTimer) {
|
||||||
clearTimeout(this.noinputTimer);
|
clearTimeout(this.noinputTimer);
|
||||||
@@ -486,10 +550,6 @@ class Dialogflow extends Task {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Start the no-input timer. The duration is set in the configuration file.
|
|
||||||
* @param {*} ep
|
|
||||||
*/
|
|
||||||
_startNoinputTimer(ep, cs) {
|
_startNoinputTimer(ep, cs) {
|
||||||
if (!this.noInputTimeout) return;
|
if (!this.noInputTimeout) return;
|
||||||
this._clearNoinputTimer();
|
this._clearNoinputTimer();
|
||||||
@@ -507,7 +567,7 @@ class Dialogflow extends Task {
|
|||||||
if (tasks && tasks.length > 0) {
|
if (tasks && tasks.length > 0) {
|
||||||
if (this.playInProgress) {
|
if (this.playInProgress) {
|
||||||
this.queuedTasks = tasks;
|
this.queuedTasks = tasks;
|
||||||
this.logger.info({tasks: tasks},
|
this.logger.info({tasks},
|
||||||
`${this.name} replacing application with ${tasks.length} tasks after play completes`);
|
`${this.name} replacing application with ${tasks.length} tasks after play completes`);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -517,7 +577,7 @@ class Dialogflow extends Task {
|
|||||||
}
|
}
|
||||||
|
|
||||||
_redirect(cs, tasks) {
|
_redirect(cs, tasks) {
|
||||||
this.logger.info({tasks: tasks}, `${this.name} replacing application with ${tasks.length} tasks`);
|
this.logger.info({tasks}, `${this.name} replacing application with ${tasks.length} tasks`);
|
||||||
this.performAction({dialogflowResult: 'redirect'}, false);
|
this.performAction({dialogflowResult: 'redirect'}, false);
|
||||||
this.reportedFinalAction = true;
|
this.reportedFinalAction = true;
|
||||||
cs.replaceApplication(tasks);
|
cs.replaceApplication(tasks);
|
||||||
|
|||||||
@@ -3,20 +3,44 @@ class Intent {
|
|||||||
this.logger = logger;
|
this.logger = logger;
|
||||||
this.evt = evt;
|
this.evt = evt;
|
||||||
|
|
||||||
this.logger.debug({evt}, 'intent');
|
this.qr = this.isCX ? evt.detect_intent_response.query_result : evt.query_result;
|
||||||
this.dtmfRequest = checkIntentForDtmfEntry(logger, evt);
|
this.dtmfRequest = this._checkIntentForDtmfEntry();
|
||||||
|
}
|
||||||
|
|
||||||
|
get response_id() {
|
||||||
|
return this.isCX ? this.evt.detect_intent_response.response_id : this.evt.response_id;
|
||||||
}
|
}
|
||||||
|
|
||||||
get isEmpty() {
|
get isEmpty() {
|
||||||
return this.evt.response_id.length === 0;
|
return !(this.response_id?.length > 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
get fulfillmentText() {
|
get fulfillmentText() {
|
||||||
return this.evt.query_result.fulfillment_text;
|
if (this.isCX) {
|
||||||
|
if (this.qr && this.qr.response_messages) {
|
||||||
|
for (const msg of this.qr.response_messages) {
|
||||||
|
if (msg.text && msg.text.text && msg.text.text.length > 0) {
|
||||||
|
return msg.text.text.join('\n');
|
||||||
|
}
|
||||||
|
if (msg.output_audio_text) {
|
||||||
|
if (msg.output_audio_text.text) return msg.output_audio_text.text;
|
||||||
|
if (msg.output_audio_text.ssml) return msg.output_audio_text.ssml;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
return this.qr.fulfillment_text;
|
||||||
}
|
}
|
||||||
|
|
||||||
get saysEndInteraction() {
|
get saysEndInteraction() {
|
||||||
return this.evt.query_result.intent.end_interaction ;
|
if (this.isCX) {
|
||||||
|
if (!this.qr || !this.qr.response_messages) return false;
|
||||||
|
const end_interaction = this.qr.response_messages
|
||||||
|
.find((m) => typeof m === 'object' && 'end_interaction' in m)?.end_interaction;
|
||||||
|
return end_interaction && Object.keys(end_interaction).length > 0;
|
||||||
|
}
|
||||||
|
return this.qr.intent.end_interaction;
|
||||||
}
|
}
|
||||||
|
|
||||||
get saysCollectDtmf() {
|
get saysCollectDtmf() {
|
||||||
@@ -28,7 +52,23 @@ class Intent {
|
|||||||
}
|
}
|
||||||
|
|
||||||
get name() {
|
get name() {
|
||||||
if (!this.isEmpty) return this.evt.query_result.intent.display_name;
|
if (!this.isEmpty) {
|
||||||
|
if (this.isCX) {
|
||||||
|
return this.qr.match?.intent?.display_name;
|
||||||
|
}
|
||||||
|
return this.qr.intent.display_name;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
get isCX() {
|
||||||
|
return typeof this.evt.detect_intent_response === 'object';
|
||||||
|
}
|
||||||
|
|
||||||
|
get isNoInput() {
|
||||||
|
if (this.isCX && this.qr && this.qr.match) {
|
||||||
|
return this.qr.match.match_type === 'NO_INPUT';
|
||||||
|
}
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
toJSON() {
|
toJSON() {
|
||||||
@@ -38,52 +78,48 @@ class Intent {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse a returned intent for DTMF entry information (ES only).
|
||||||
|
* CX does not use fulfillment_messages or output_contexts.
|
||||||
|
*
|
||||||
|
* allow-dtmf-x-y-z
|
||||||
|
* x = min number of digits
|
||||||
|
* y = optional, max number of digits
|
||||||
|
* z = optional, terminating character
|
||||||
|
*/
|
||||||
|
_checkIntentForDtmfEntry() {
|
||||||
|
if (this.isCX) return;
|
||||||
|
|
||||||
|
const qr = this.qr;
|
||||||
|
if (!qr || !qr.fulfillment_messages || !qr.output_contexts) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// check for custom payloads with a gather verb
|
||||||
|
const custom = qr.fulfillment_messages.find((f) => f.payload && f.payload.verb === 'gather');
|
||||||
|
if (custom) {
|
||||||
|
this.logger.info({custom}, 'found dtmf custom payload');
|
||||||
|
return {
|
||||||
|
max: custom.payload.numDigits,
|
||||||
|
term: custom.payload.finishOnKey,
|
||||||
|
template: custom.payload.responseTemplate
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// check for an output context with a specific naming convention
|
||||||
|
const context = qr.output_contexts.find((oc) => oc.name.includes('/contexts/allow-dtmf-'));
|
||||||
|
if (context) {
|
||||||
|
const arr = /allow-dtmf-(\d+)(?:-(\d+))?(?:-(.*))?/.exec(context.name);
|
||||||
|
if (arr) {
|
||||||
|
this.logger.info('found dtmf output context');
|
||||||
|
return {
|
||||||
|
min: parseInt(arr[1]),
|
||||||
|
max: arr.length > 2 ? parseInt(arr[2]) : null,
|
||||||
|
term: arr.length > 3 ? arr[3] : null
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = Intent;
|
module.exports = Intent;
|
||||||
|
|
||||||
/**
|
|
||||||
* Parse a returned intent for DTMF entry information
|
|
||||||
* i.e.
|
|
||||||
* allow-dtmf-x-y-z
|
|
||||||
* x = min number of digits
|
|
||||||
* y = optional, max number of digits
|
|
||||||
* z = optional, terminating character
|
|
||||||
* e.g.
|
|
||||||
* allow-dtmf-5 : collect 5 digits
|
|
||||||
* allow-dtmf-1-4 : collect between 1 to 4 (inclusive) digits
|
|
||||||
* allow-dtmf-1-4-# : collect 1-4 digits, terminating if '#' is entered
|
|
||||||
* @param {*} intent - dialogflow intent
|
|
||||||
*/
|
|
||||||
const checkIntentForDtmfEntry = (logger, intent) => {
|
|
||||||
const qr = intent.query_result;
|
|
||||||
if (!qr || !qr.fulfillment_messages || !qr.output_contexts) {
|
|
||||||
logger.info({f: qr.fulfillment_messages, o: qr.output_contexts}, 'no dtmfs');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// check for custom payloads with a gather verb
|
|
||||||
const custom = qr.fulfillment_messages.find((f) => f.payload && f.payload.verb === 'gather');
|
|
||||||
if (custom && custom.payload && custom.payload.verb === 'gather') {
|
|
||||||
logger.info({custom}, 'found dtmf custom payload');
|
|
||||||
return {
|
|
||||||
max: custom.payload.numDigits,
|
|
||||||
term: custom.payload.finishOnKey,
|
|
||||||
template: custom.payload.responseTemplate
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// check for an output context with a specific naming convention
|
|
||||||
const context = qr.output_contexts.find((oc) => oc.name.includes('/contexts/allow-dtmf-'));
|
|
||||||
if (context) {
|
|
||||||
const arr = /allow-dtmf-(\d+)(?:-(\d+))?(?:-(.*))?/.exec(context.name);
|
|
||||||
if (arr) {
|
|
||||||
logger.info({custom}, 'found dtmf output context');
|
|
||||||
return {
|
|
||||||
min: parseInt(arr[1]),
|
|
||||||
max: arr.length > 2 ? parseInt(arr[2]) : null,
|
|
||||||
term: arr.length > 3 ? arr[3] : null
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|||||||
@@ -152,9 +152,17 @@ class TaskListen extends Task {
|
|||||||
|
|
||||||
async _startListening(cs, ep) {
|
async _startListening(cs, ep) {
|
||||||
this._initListeners(ep);
|
this._initListeners(ep);
|
||||||
const ci = this.nested ? this.parentTask.sd.callInfo : cs.callInfo.toJSON();
|
const tempci = this.nested ? this.parentTask.sd.callInfo : cs.callInfo.toJSON();
|
||||||
|
const ci = structuredClone(tempci);
|
||||||
if (this._ignoreCustomerData) {
|
if (this._ignoreCustomerData) {
|
||||||
delete ci.customerData;
|
delete ci.customerData;
|
||||||
|
} else {
|
||||||
|
for (const key in ci.customerData) {
|
||||||
|
if (ci.customerData.hasOwnProperty(key)) {
|
||||||
|
const value = ci.customerData[key];
|
||||||
|
ci.customerData[key] = typeof value === 'string' ? escapeString(value) : value;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
const metadata = Object.assign(
|
const metadata = Object.assign(
|
||||||
{sampleRate: this.sampleRate, mixType: this.mixType},
|
{sampleRate: this.sampleRate, mixType: this.mixType},
|
||||||
|
|||||||
@@ -36,6 +36,9 @@ class TaskLlmGoogle_S2S extends Task {
|
|||||||
this.model = this.parent.model || 'models/gemini-2.0-flash-live-001';
|
this.model = this.parent.model || 'models/gemini-2.0-flash-live-001';
|
||||||
this.auth = this.parent.auth;
|
this.auth = this.parent.auth;
|
||||||
this.connectionOptions = this.parent.connectOptions;
|
this.connectionOptions = this.parent.connectOptions;
|
||||||
|
const {host, version} = this.connectionOptions || {};
|
||||||
|
this.host = host;
|
||||||
|
this.version = version;
|
||||||
|
|
||||||
const {apiKey} = this.auth || {};
|
const {apiKey} = this.auth || {};
|
||||||
if (!apiKey) throw new Error('auth.apiKey is required for Google S2S');
|
if (!apiKey) throw new Error('auth.apiKey is required for Google S2S');
|
||||||
@@ -46,7 +49,7 @@ class TaskLlmGoogle_S2S extends Task {
|
|||||||
this.eventHook = this.data.eventHook;
|
this.eventHook = this.data.eventHook;
|
||||||
this.toolHook = this.data.toolHook;
|
this.toolHook = this.data.toolHook;
|
||||||
|
|
||||||
const {setup} = this.data.llmOptions;
|
const {setup, sessionResumption} = this.data.llmOptions;
|
||||||
|
|
||||||
if (typeof setup !== 'object') {
|
if (typeof setup !== 'object') {
|
||||||
throw new Error('llmOptions with an initial setup is required for Google S2S');
|
throw new Error('llmOptions with an initial setup is required for Google S2S');
|
||||||
@@ -54,6 +57,7 @@ class TaskLlmGoogle_S2S extends Task {
|
|||||||
this.setup = {
|
this.setup = {
|
||||||
...setup,
|
...setup,
|
||||||
model: this.model,
|
model: this.model,
|
||||||
|
...(sessionResumption && {sessionResumption}),
|
||||||
// make sure output is always audio
|
// make sure output is always audio
|
||||||
generationConfig: {
|
generationConfig: {
|
||||||
...(setup.generationConfig || {}),
|
...(setup.generationConfig || {}),
|
||||||
@@ -138,6 +142,10 @@ class TaskLlmGoogle_S2S extends Task {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
const args = [ep.uuid, 'session.create', this.apiKey];
|
const args = [ep.uuid, 'session.create', this.apiKey];
|
||||||
|
if (this.host) {
|
||||||
|
args.push(this.host);
|
||||||
|
if (this.version) args.push(this.version);
|
||||||
|
}
|
||||||
await this._api(ep, args);
|
await this._api(ep, args);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
this.logger.error({err}, 'TaskLlmGoogle_S2S:_startListening');
|
this.logger.error({err}, 'TaskLlmGoogle_S2S:_startListening');
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
const Task = require('./task');
|
const Task = require('./task');
|
||||||
const {TaskName} = require('../utils/constants');
|
const {TaskName} = require('../utils/constants');
|
||||||
const WsRequestor = require('../utils/ws-requestor');
|
const WsRequestor = require('../utils/ws-requestor');
|
||||||
const URL = require('url');
|
|
||||||
const HttpRequestor = require('../utils/http-requestor');
|
const HttpRequestor = require('../utils/http-requestor');
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -10,6 +9,7 @@ const HttpRequestor = require('../utils/http-requestor');
|
|||||||
class TaskRedirect extends Task {
|
class TaskRedirect extends Task {
|
||||||
constructor(logger, opts) {
|
constructor(logger, opts) {
|
||||||
super(logger, opts);
|
super(logger, opts);
|
||||||
|
this.statusHook = opts.statusHook || false;
|
||||||
}
|
}
|
||||||
|
|
||||||
get name() { return TaskName.Redirect; }
|
get name() { return TaskName.Redirect; }
|
||||||
@@ -47,6 +47,30 @@ class TaskRedirect extends Task {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* update the notifier if a new statusHook was provided */
|
||||||
|
if (this.statusHook) {
|
||||||
|
this.logger.info(`TaskRedirect updating statusHook to ${this.statusHook}`);
|
||||||
|
try {
|
||||||
|
const oldNotifier = cs.application.notifier;
|
||||||
|
const isStatusHookAbsolute = cs.notifier?._isAbsoluteUrl(this.statusHook);
|
||||||
|
if (isStatusHookAbsolute) {
|
||||||
|
if (cs.notifier instanceof WsRequestor) {
|
||||||
|
cs.application.notifier = new WsRequestor(this.logger, cs.accountSid, {url: this.statusHook},
|
||||||
|
cs.accountInfo.account.webhook_secret);
|
||||||
|
} else {
|
||||||
|
cs.application.notifier = new HttpRequestor(this.logger, cs.accountSid, {url: this.statusHook},
|
||||||
|
cs.accountInfo.account.webhook_secret);
|
||||||
|
}
|
||||||
|
if (oldNotifier?.close) oldNotifier.close();
|
||||||
|
}
|
||||||
|
/* update the call_status_hook URL that gets passed to the notifier */
|
||||||
|
cs.application.call_status_hook = this.statusHook;
|
||||||
|
} catch (err) {
|
||||||
|
this.logger.info(err, `TaskRedirect error updating statusHook to ${this.statusHook}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
await this.performAction();
|
await this.performAction();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -31,8 +31,9 @@ class TtsTask extends Task {
|
|||||||
this.synthesizer = this.data.synthesizer || {};
|
this.synthesizer = this.data.synthesizer || {};
|
||||||
this.disableTtsCache = this.data.disableTtsCache;
|
this.disableTtsCache = this.data.disableTtsCache;
|
||||||
this.options = this.synthesizer.options || {};
|
this.options = this.synthesizer.options || {};
|
||||||
this.instructions = this.data.instructions;
|
this.instructions = this.data.instructions || this.options.instructions;
|
||||||
this.playbackIds = [];
|
this.playbackIds = [];
|
||||||
|
this.useGeminiTts = this.options.useGeminiTts;
|
||||||
}
|
}
|
||||||
|
|
||||||
getPlaybackId(offset) {
|
getPlaybackId(offset) {
|
||||||
@@ -156,6 +157,13 @@ class TtsTask extends Task {
|
|||||||
...(reduceLatency && {RIMELABS_TTS_STREAMING_REDUCE_LATENCY: reduceLatency})
|
...(reduceLatency && {RIMELABS_TTS_STREAMING_REDUCE_LATENCY: reduceLatency})
|
||||||
};
|
};
|
||||||
break;
|
break;
|
||||||
|
case 'google':
|
||||||
|
obj = {
|
||||||
|
GOOGLE_TTS_LANGUAGE_CODE: language,
|
||||||
|
GOOGLE_TTS_VOICE_NAME: voice,
|
||||||
|
GOOGLE_APPLICATION_CREDENTIALS: JSON.stringify(credentials.credentials)
|
||||||
|
};
|
||||||
|
break;
|
||||||
default:
|
default:
|
||||||
if (vendor.startsWith('custom:')) {
|
if (vendor.startsWith('custom:')) {
|
||||||
const use_tls = custom_tts_streaming_url.startsWith('wss://');
|
const use_tls = custom_tts_streaming_url.startsWith('wss://');
|
||||||
@@ -242,6 +250,8 @@ class TtsTask extends Task {
|
|||||||
}
|
}
|
||||||
} else if (vendor === 'cartesia') {
|
} else if (vendor === 'cartesia') {
|
||||||
credentials.model_id = this.options.model_id || credentials.model_id;
|
credentials.model_id = this.options.model_id || credentials.model_id;
|
||||||
|
} else if (vendor === 'google') {
|
||||||
|
this.model = this.options.model || credentials.credentials.model_id;
|
||||||
}
|
}
|
||||||
|
|
||||||
this.model_id = credentials.model_id;
|
this.model_id = credentials.model_id;
|
||||||
|
|||||||
@@ -118,6 +118,13 @@ class ActionHookDelayProcessor extends Emitter {
|
|||||||
this.logger.debug('ActionHookDelayProcessor#_onNoResponseTimer');
|
this.logger.debug('ActionHookDelayProcessor#_onNoResponseTimer');
|
||||||
this._noResponseTimer = null;
|
this._noResponseTimer = null;
|
||||||
|
|
||||||
|
/* check if endpoint is still available (call may have ended) */
|
||||||
|
if (!this.ep) {
|
||||||
|
this.logger.debug('ActionHookDelayProcessor#_onNoResponseTimer: endpoint is null, call may have ended');
|
||||||
|
this._active = false;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
/* get the next play or say action */
|
/* get the next play or say action */
|
||||||
const verb = this.actions[this._retryCount % this.actions.length];
|
const verb = this.actions[this._retryCount % this.actions.length];
|
||||||
|
|
||||||
@@ -129,8 +136,8 @@ class ActionHookDelayProcessor extends Emitter {
|
|||||||
this._taskInProgress.exec(this.cs, {ep: this.ep}).catch((err) => {
|
this._taskInProgress.exec(this.cs, {ep: this.ep}).catch((err) => {
|
||||||
this.logger.info(`ActionHookDelayProcessor#_onNoResponseTimer: error playing file: ${err.message}`);
|
this.logger.info(`ActionHookDelayProcessor#_onNoResponseTimer: error playing file: ${err.message}`);
|
||||||
this._taskInProgress = null;
|
this._taskInProgress = null;
|
||||||
this.ep.removeAllListeners('playback-start');
|
this.ep?.removeAllListeners('playback-start');
|
||||||
this.ep.removeAllListeners('playback-stop');
|
this.ep?.removeAllListeners('playback-stop');
|
||||||
});
|
});
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
this.logger.info(err, 'ActionHookDelayProcessor#_onNoResponseTimer: error starting action');
|
this.logger.info(err, 'ActionHookDelayProcessor#_onNoResponseTimer: error starting action');
|
||||||
|
|||||||
@@ -311,6 +311,11 @@
|
|||||||
"ConnectFailure": "deepgram_tts_streaming::connect_failed",
|
"ConnectFailure": "deepgram_tts_streaming::connect_failed",
|
||||||
"Connect": "deepgram_tts_streaming::connect"
|
"Connect": "deepgram_tts_streaming::connect"
|
||||||
},
|
},
|
||||||
|
"GoogleTtsStreamingEvents": {
|
||||||
|
"Empty": "google_tts_streaming::empty",
|
||||||
|
"ConnectFailure": "google_tts_streaming::connect_failed",
|
||||||
|
"Connect": "google_tts_streaming::connect"
|
||||||
|
},
|
||||||
"CartesiaTtsStreamingEvents": {
|
"CartesiaTtsStreamingEvents": {
|
||||||
"Empty": "cartesia_tts_streaming::empty",
|
"Empty": "cartesia_tts_streaming::empty",
|
||||||
"ConnectFailure": "cartesia_tts_streaming::connect_failed",
|
"ConnectFailure": "cartesia_tts_streaming::connect_failed",
|
||||||
|
|||||||
@@ -1310,6 +1310,9 @@ module.exports = (logger) => {
|
|||||||
...(openaiOptions.turn_detection.silence_duration_ms && {
|
...(openaiOptions.turn_detection.silence_duration_ms && {
|
||||||
OPENAI_TURN_DETECTION_SILENCE_DURATION_MS: openaiOptions.turn_detection.silence_duration_ms
|
OPENAI_TURN_DETECTION_SILENCE_DURATION_MS: openaiOptions.turn_detection.silence_duration_ms
|
||||||
}),
|
}),
|
||||||
|
...(openaiOptions.turn_detection.eagerness && {
|
||||||
|
OPENAI_TURN_DETECTION_EAGERNESS: openaiOptions.turn_detection.eagerness
|
||||||
|
})
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1375,7 +1378,9 @@ module.exports = (logger) => {
|
|||||||
speechmaticsOptions.transcription_config.audio_filtering_config.volume_threshold}),
|
speechmaticsOptions.transcription_config.audio_filtering_config.volume_threshold}),
|
||||||
...(speechmaticsOptions.transcription_config?.transcript_filtering_config?.remove_disfluencies &&
|
...(speechmaticsOptions.transcription_config?.transcript_filtering_config?.remove_disfluencies &&
|
||||||
{SPEECHMATICS_REMOVE_DISFLUENCIES:
|
{SPEECHMATICS_REMOVE_DISFLUENCIES:
|
||||||
speechmaticsOptions.transcription_config.transcript_filtering_config.remove_disfluencies})
|
speechmaticsOptions.transcription_config.transcript_filtering_config.remove_disfluencies}),
|
||||||
|
SPEECHMATICS_END_OF_UTTERANCE_SILENCE_TRIGGER:
|
||||||
|
speechmaticsOptions.transcription_config?.conversation_config?.end_of_utterance_silence_trigger || 0.5
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
else if (vendor.startsWith('custom:')) {
|
else if (vendor.startsWith('custom:')) {
|
||||||
|
|||||||
@@ -421,6 +421,7 @@ class TtsStreamingBuffer extends Emitter {
|
|||||||
'cartesia',
|
'cartesia',
|
||||||
'elevenlabs',
|
'elevenlabs',
|
||||||
'rimelabs',
|
'rimelabs',
|
||||||
|
'google',
|
||||||
'custom'
|
'custom'
|
||||||
].forEach((vendor) => {
|
].forEach((vendor) => {
|
||||||
const eventClassName = `${vendor.charAt(0).toUpperCase() + vendor.slice(1)}TtsStreamingEvents`;
|
const eventClassName = `${vendor.charAt(0).toUpperCase() + vendor.slice(1)}TtsStreamingEvents`;
|
||||||
|
|||||||
1973
package-lock.json
generated
1973
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -31,10 +31,10 @@
|
|||||||
"@jambonz/http-health-check": "^0.0.1",
|
"@jambonz/http-health-check": "^0.0.1",
|
||||||
"@jambonz/mw-registrar": "^0.2.7",
|
"@jambonz/mw-registrar": "^0.2.7",
|
||||||
"@jambonz/realtimedb-helpers": "^0.8.15",
|
"@jambonz/realtimedb-helpers": "^0.8.15",
|
||||||
"@jambonz/speech-utils": "^0.2.26",
|
"@jambonz/speech-utils": "^0.2.30",
|
||||||
"@jambonz/stats-collector": "^0.1.10",
|
"@jambonz/stats-collector": "^0.1.10",
|
||||||
"@jambonz/time-series": "^0.2.15",
|
"@jambonz/time-series": "^0.2.15",
|
||||||
"@jambonz/verb-specifications": "^0.0.123",
|
"@jambonz/verb-specifications": "^0.0.125",
|
||||||
"@modelcontextprotocol/sdk": "^1.9.0",
|
"@modelcontextprotocol/sdk": "^1.9.0",
|
||||||
"@opentelemetry/api": "^1.8.0",
|
"@opentelemetry/api": "^1.8.0",
|
||||||
"@opentelemetry/exporter-jaeger": "^1.23.0",
|
"@opentelemetry/exporter-jaeger": "^1.23.0",
|
||||||
@@ -49,7 +49,7 @@
|
|||||||
"debug": "^4.3.4",
|
"debug": "^4.3.4",
|
||||||
"deepcopy": "^2.1.0",
|
"deepcopy": "^2.1.0",
|
||||||
"drachtio-fsmrf": "^4.1.2",
|
"drachtio-fsmrf": "^4.1.2",
|
||||||
"drachtio-srf": "^5.0.14",
|
"drachtio-srf": "^5.0.18",
|
||||||
"express": "^4.19.2",
|
"express": "^4.19.2",
|
||||||
"express-validator": "^7.0.1",
|
"express-validator": "^7.0.1",
|
||||||
"moment": "^2.30.1",
|
"moment": "^2.30.1",
|
||||||
|
|||||||
Reference in New Issue
Block a user