Compare commits

..

1 Commits

Author SHA1 Message Date
Dave Horton
b93bfbb486 fix #714 2024-04-10 16:15:28 -04:00
10 changed files with 92 additions and 404 deletions

View File

@@ -193,24 +193,6 @@ class CallSession extends Emitter {
this._synthesizer = synth;
}
/**
* ASR TTS fallback
*/
get hasFallbackAsr() {
return this._hasFallbackAsr || false;
}
set hasFallbackAsr(i) {
this._hasFallbackAsr = i;
}
get hasFallbackTts() {
return this._hasFallbackTts || false;
}
set hasFallbackTts(i) {
this._hasFallbackTts = i;
}
/**
* default vendor to use for speech synthesis if not provided in the app
*/
@@ -720,7 +702,7 @@ class CallSession extends Emitter {
task = await this.backgroundTaskManager.newTask('bargeIn', gather);
task.sticky = autoEnable;
// listen to the bargein-done from background manager
this.backgroundTaskManager.on('bargeIn-done', () => {
this.backgroundTaskManager.once('bargeIn-done', () => {
if (this.requestor instanceof WsRequestor) {
try {
this.kill(true);
@@ -880,12 +862,6 @@ class CallSession extends Emitter {
voice_engine: credential.voice_engine,
options: credential.options
};
} else if ('rimelabs' === vendor) {
return {
api_key: credential.api_key,
model_id: credential.model_id,
options: credential.options
};
} else if ('assemblyai' === vendor) {
return {
speech_credential_sid: credential.speech_credential_sid,
@@ -1616,23 +1592,7 @@ Duration=${duration} `
}
_preCacheAudio(newTasks) {
/**
* only precache audio for the a queued say if we have one or more non-Config verbs
* ahead of it in the queue. This is because the Config verb returns immediately
* and would not give us enough time to generate the audio. The point of precaching
* is to take advantage of getting the audio in advance of being needed, so we need
* to be confident we have some time before the say verb is executed, and the Config
* does not give us that confidence since it returns immediately.
*/
const haveQueuedNonConfig = this.tasks.findIndex((t) => t.name !== TaskName.Config) !== -1;
let tasks = haveQueuedNonConfig ? newTasks : [];
if (!haveQueuedNonConfig) {
const idxFirstNotConfig = newTasks.findIndex((t) => t.name !== TaskName.Config);
if (-1 === idxFirstNotConfig) return;
tasks = newTasks.slice(idxFirstNotConfig + 1);
}
for (const task of tasks) {
for (const task of newTasks) {
if (task.name === TaskName.Config && task.hasSynthesizer) {
/* if they change synthesizer settings don't try to precache */
break;
@@ -1998,7 +1958,6 @@ Duration=${duration} `
/**
* called when the caller has hung up. Provided for subclasses to override
* in order to apply logic at this point if needed.
* return true if success fallback, return false if not
*/
_callerHungup() {
assert(false, 'subclass responsibility to override this method');
@@ -2440,7 +2399,6 @@ Duration=${duration} `
_startActionHookNoResponseTimer(options) {
this._clearActionHookNoResponseTimer();
this._actionHookDelayResolved = false;
if (options.noResponseTimeoutMs) {
this.logger.debug(`CallSession:_startActionHookNoResponseTimer ${options.noResponseTimeoutMs}`);
this._actionHookNoResponseTimer = setTimeout(() => {
@@ -2454,9 +2412,7 @@ Duration=${duration} `
if (t.length) {
t[0].on('playDone', (err) => {
if (err) this.logger.error({err}, `Call-Session:exec Error delay action, play ${verb}`);
if (!this._actionHookDelayResolved) {
this._startActionHookNoResponseTimer(options);
}
this._startActionHookNoResponseTimer(options);
});
}
this.tasks.push(...t);
@@ -2474,16 +2430,7 @@ Duration=${duration} `
_clearActionHookNoResponseTimer() {
if (this._actionHookNoResponseTimer) {
// Action Hook delay is solved.
this._actionHookDelayResolved = true;
clearTimeout(this._actionHookNoResponseTimer);
// if delay action is enabled
// and bot has responded with list of new verbs
// Only kill current running play task.
//https://github.com/jambonz/jambonz-feature-server/issues/710
if (this.currentTask?.name === TaskName.Play) {
this.currentTask.kill(this);
}
}
this._actionHookNoResponseTimer = null;
}

View File

@@ -30,7 +30,7 @@ class TaskConfig extends Task {
input: ['speech']
};
[
'finishOnKey', 'input', 'numDigits', 'minDigits', 'maxDigits', 'partialResultHook',
'finishOnKey', 'input', 'numDigits', 'minDigits', 'maxDigits',
'interDigitTimeout', 'bargein', 'dtmfBargein', 'minBargeinWordCount', 'actionHook'
].forEach((k) => {
if (this.bargeIn[k]) this.gatherOpts[k] = this.bargeIn[k];
@@ -259,7 +259,8 @@ class TaskConfig extends Task {
cs.stopBackgroundTask('transcribe');
}
}
if (Object.keys(this.actionHookDelayAction).length !== 0) {
if (this.actionHookDelayAction) {
cs.actionHookDelayEnabled = this.actionHookDelayAction.enabled || false;
cs.actionHookNoResponseTimeout = this.actionHookDelayAction.noResponseTimeout || 0;
cs.actionHookNoResponseGiveUpTimeout = this.actionHookDelayAction.noResponseGiveUpTimeout || 0;

View File

@@ -191,7 +191,12 @@ class TaskGather extends SttTask {
this._startTranscribing(ep);
return updateSpeechCredentialLastUsed(this.sttCredentials.speech_credential_sid);
} catch (e) {
await this._startFallback(cs, ep, {error: e});
if (this.fallbackVendor && this.isHandledByPrimaryProvider) {
await this._fallback();
startListening(cs, ep);
} else {
this.logger.error({error: e}, 'error in initSpeech');
}
}
}
};
@@ -199,7 +204,12 @@ class TaskGather extends SttTask {
try {
if (this.sayTask) {
const {span, ctx} = this.startChildSpan(`nested:${this.sayTask.summary}`);
const process = () => {
this.sayTask.span = span;
this.sayTask.ctx = ctx;
this.sayTask.exec(cs, {ep}); // kicked off, _not_ waiting for it to complete
this.sayTask.on('playDone', (err) => {
span.end();
if (err) this.logger.error({err}, 'Gather:exec Error playing tts');
this.logger.debug('Gather: nested say task completed');
if (!this.killed) {
startListening(cs, ep);
@@ -210,22 +220,16 @@ class TaskGather extends SttTask {
});
}
}
};
this.sayTask.span = span;
this.sayTask.ctx = ctx;
this.sayTask.exec(cs, {ep}) // kicked off, _not_ waiting for it to complete
.catch((err) => {
process();
});
this.sayTask.on('playDone', (err) => {
span.end();
if (err) this.logger.error({err}, 'Gather:exec Error playing tts');
process();
});
}
else if (this.playTask) {
const {span, ctx} = this.startChildSpan(`nested:${this.playTask.summary}`);
const process = () => {
this.playTask.span = span;
this.playTask.ctx = ctx;
this.playTask.exec(cs, {ep}); // kicked off, _not_ waiting for it to complete
this.playTask.on('playDone', (err) => {
span.end();
if (err) this.logger.error({err}, 'Gather:exec Error playing url');
this.logger.debug('Gather: nested play task completed');
if (!this.killed) {
startListening(cs, ep);
@@ -236,17 +240,6 @@ class TaskGather extends SttTask {
});
}
}
};
this.playTask.span = span;
this.playTask.ctx = ctx;
this.playTask.exec(cs, {ep}) // kicked off, _not_ waiting for it to complete
.catch((err) => {
process();
});
this.playTask.on('playDone', (err) => {
span.end();
if (err) this.logger.error({err}, 'Gather:exec Error playing url');
process();
});
}
else {
@@ -351,8 +344,7 @@ class TaskGather extends SttTask {
/* some special deepgram logic */
if (this.vendor === 'deepgram') {
if (this.isContinuousAsr) this._doContinuousAsrWithDeepgram(this.asrTimeout);
if (this.data.recognizer?.deepgramOptions?.shortUtterance ||
this.data.recognizer?.deepgramOptions?.endpointing === false) this.shortUtterance = true;
if (this.data.recognizer?.deepgramOptions?.shortUtterance) this.shortUtterance = true;
}
const opts = this.setChannelVarsForStt(this, this.sttCredentials, this.language, this.data.recognizer);
@@ -845,7 +837,7 @@ class TaskGather extends SttTask {
if (this.bargein && (words + bufferedWords) < this.minBargeinWordCount) {
this.logger.debug({evt, words, bufferedWords},
'TaskGather:_onTranscription - final transcript but < min barge words');
if (!emptyTranscript) this._bufferedTranscripts.push(evt);
this._bufferedTranscripts.push(evt);
if (!['soniox', 'aws', 'microsoft', 'deepgram'].includes(this.vendor)) this._startTranscribing(ep);
return;
}
@@ -940,9 +932,9 @@ class TaskGather extends SttTask {
_onTranscriptionComplete(cs, ep) {
this.logger.debug('TaskGather:_onTranscriptionComplete');
}
async _startFallback(cs, ep, evt) {
if (this.canFallback) {
async _onJambonzError(cs, ep, evt) {
this.logger.info({evt}, 'TaskGather:_onJambonzError');
if (this.isHandledByPrimaryProvider && this.fallbackVendor) {
ep.stopTranscription({
vendor: this.vendor,
bugname: this.bugname
@@ -950,35 +942,17 @@ class TaskGather extends SttTask {
.catch((err) => this.logger.error({err}, `Error stopping transcription for primary vendor ${this.vendor}`));
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, cs.srf);
try {
this.logger.debug('gather:_startFallback');
this.notifyError({ msg: 'ASR error',
details:`STT Vendor ${this.vendor} error: ${evt.error || evt.reason}`, failover: 'in progress'});
await this._initFallback();
this._speechHandlersSet = false;
await this._setSpeechHandlers(cs, ep);
await this._fallback();
await this._initSpeech(cs, ep);
this._startTranscribing(ep);
updateSpeechCredentialLastUsed(this.sttCredentials.speech_credential_sid);
return true;
return;
} catch (error) {
this.logger.info({error}, `There is error while falling back to ${this.fallbackVendor}`);
this.notifyError({ msg: 'ASR error',
details:`STT Vendor ${this.vendor} error: ${evt.error || evt.reason}`, failover: 'not available'});
}
} else {
this.logger.debug('gather:_startFallback no condition for falling back');
this.notifyError({ msg: 'ASR error',
details:`STT Vendor ${this.vendor} error: ${evt.error || evt.reason}`, failover: 'not available'});
}
return false;
}
async _onJambonzError(cs, ep, evt) {
if (this.vendor === 'google' && evt.error_code === 0) {
this.logger.info({evt}, 'TaskTranscribe:_onJambonzError - ignoring google error code 0');
return;
}
this.logger.info({evt}, 'TaskGather:_onJambonzError');
const {writeAlerts, AlertType} = cs.srf.locals;
if (this.vendor === 'nuance') {
const {code, error} = evt;
if (code === 404 && error === 'No speech') return this._resolve('timeout');
@@ -991,23 +965,17 @@ class TaskGather extends SttTask {
message: `Custom speech vendor ${this.vendor} error: ${evt.error}`,
vendor: this.vendor,
}).catch((err) => this.logger.info({err}, 'Error generating alert for jambonz custom connection failure'));
if (!(await this._startFallback(cs, ep, evt))) {
this.notifyTaskDone();
}
this.notifyError({msg: 'ASR error', details:`Custom speech vendor ${this.vendor} error: ${evt.error}`});
}
async _onVendorConnectFailure(cs, _ep, evt) {
_onVendorConnectFailure(cs, _ep, evt) {
super._onVendorConnectFailure(cs, _ep, evt);
if (!(await this._startFallback(cs, _ep, evt))) {
this.notifyTaskDone();
}
this.notifyTaskDone();
}
async _onVendorError(cs, _ep, evt) {
_onVendorError(cs, _ep, evt) {
super._onVendorError(cs, _ep, evt);
if (!(await this._startFallback(cs, _ep, evt))) {
this._resolve('stt-error', evt);
}
this._resolve('stt-error', evt);
}
_onVadDetected(cs, ep) {

View File

@@ -52,151 +52,6 @@ class TaskSay extends TtsTask {
return `${this.name}{${this.text[0]}}`;
}
_validateURL(urlString) {
try {
new URL(urlString);
return true;
} catch (e) {
return false;
}
}
async _synthesizeWithSpecificVendor(cs, ep, {vendor, language, voice, label, preCache = false}) {
const {srf, accountSid:account_sid} = cs;
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, srf);
const {writeAlerts, AlertType, stats} = srf.locals;
const {synthAudio} = srf.locals.dbHelpers;
const engine = this.synthesizer.engine || 'standard';
const salt = cs.callSid;
let credentials = cs.getSpeechCredentials(vendor, 'tts', label);
/* parse Nuance voices into name and model */
let model;
if (vendor === 'nuance' && voice) {
const arr = /([A-Za-z-]*)\s+-\s+(enhanced|standard)/.exec(voice);
if (arr) {
voice = arr[1];
model = arr[2];
}
} else if (vendor === 'deepgram') {
model = voice;
}
/* allow for microsoft custom region voice and api_key to be specified as an override */
if (vendor === 'microsoft' && this.options.deploymentId) {
credentials = credentials || {};
credentials.use_custom_tts = true;
credentials.custom_tts_endpoint = this.options.deploymentId;
credentials.api_key = this.options.apiKey || credentials.apiKey;
credentials.region = this.options.region || credentials.region;
voice = this.options.voice || voice;
} else if (vendor === 'elevenlabs') {
credentials = credentials || {};
credentials.model_id = this.options.model_id || credentials.model_id;
credentials.voice_settings = this.options.voice_settings || {};
credentials.optimize_streaming_latency = this.options.optimize_streaming_latency
|| credentials.optimize_streaming_latency;
voice = this.options.voice_id || voice;
}
ep.set({
tts_engine: vendor,
tts_voice: voice,
cache_speech_handles: 1,
}).catch((err) => this.logger.info({err}, 'Error setting tts_engine on endpoint'));
if (!preCache) this.logger.info({vendor, language, voice, model}, 'TaskSay:exec');
try {
if (!credentials) {
writeAlerts({
account_sid,
alert_type: AlertType.TTS_NOT_PROVISIONED,
vendor
}).catch((err) => this.logger.info({err}, 'Error generating alert for no tts'));
throw new Error('no provisioned speech credentials for TTS');
}
// synthesize all of the text elements
let lastUpdated = false;
/* produce an audio segment from the provided text */
const generateAudio = async(text) => {
if (this.killed) return;
if (text.startsWith('silence_stream://')) return text;
/* otel: trace time for tts */
if (!preCache) {
const {span} = this.startChildSpan('tts-generation', {
'tts.vendor': vendor,
'tts.language': language,
'tts.voice': voice
});
this.otelSpan = span;
}
try {
const {filePath, servedFromCache, rtt} = await synthAudio(stats, {
account_sid,
text,
vendor,
language,
voice,
engine,
model,
salt,
credentials,
options: this.options,
disableTtsCache : this.disableTtsCache,
preCache
});
if (!filePath.startsWith('say:')) {
this.logger.debug(`file ${filePath}, served from cache ${servedFromCache}`);
if (filePath) cs.trackTmpFile(filePath);
if (this.otelSpan) {
this.otelSpan.setAttributes({'tts.cached': servedFromCache});
this.otelSpan.end();
this.otelSpan = null;
}
if (!servedFromCache && !lastUpdated) {
lastUpdated = true;
updateSpeechCredentialLastUsed(credentials.speech_credential_sid).catch(() => {/* logged error */});
}
if (!servedFromCache && rtt && !preCache) {
this.notifyStatus({
event: 'synthesized-audio',
vendor,
language,
characters: text.length,
elapsedTime: rtt
});
}
}
else {
this.logger.debug('a streaming tts api will be used');
const modifiedPath = filePath.replace('say:{', `say:{session-uuid=${ep.uuid},`);
return modifiedPath;
}
return filePath;
} catch (err) {
this.logger.info({err}, 'Error synthesizing tts');
if (this.otelSpan) this.otelSpan.end();
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.TTS_FAILURE,
vendor,
detail: err.message
}).catch((err) => this.logger.info({err}, 'Error generating alert for tts failure'));
throw err;
}
};
const arr = this.text.map((t) => (this._validateURL(t) ? t : generateAudio(t)));
return (await Promise.all(arr)).filter((fp) => fp && fp.length);
} catch (err) {
this.logger.info(err, 'TaskSay:exec error');
throw err;
}
}
async exec(cs, {ep}) {
const {srf, accountSid:account_sid} = cs;
const {writeAlerts, AlertType} = srf.locals;
@@ -206,16 +61,16 @@ class TaskSay extends TtsTask {
await super.exec(cs);
this.ep = ep;
let vendor = this.synthesizer.vendor && this.synthesizer.vendor !== 'default' ?
const vendor = this.synthesizer.vendor && this.synthesizer.vendor !== 'default' ?
this.synthesizer.vendor :
cs.speechSynthesisVendor;
let language = this.synthesizer.language && this.synthesizer.language !== 'default' ?
const language = this.synthesizer.language && this.synthesizer.language !== 'default' ?
this.synthesizer.language :
cs.speechSynthesisLanguage ;
let voice = this.synthesizer.voice && this.synthesizer.voice !== 'default' ?
const voice = this.synthesizer.voice && this.synthesizer.voice !== 'default' ?
this.synthesizer.voice :
cs.speechSynthesisVoice;
let label = this.synthesizer.label && this.synthesizer.label !== 'default' ?
const label = this.synthesizer.label && this.synthesizer.label !== 'default' ?
this.synthesizer.label :
cs.speechSynthesisLabel;
@@ -232,22 +87,12 @@ class TaskSay extends TtsTask {
this.synthesizer.fallbackLabel :
cs.fallbackSpeechSynthesisLabel;
if (cs.hasFallbackTts) {
vendor = fallbackVendor;
language = fallbackLanguage;
voice = fallbackVoice;
label = fallbackLabel;
}
let filepath;
try {
filepath = await this._synthesizeWithSpecificVendor(cs, ep, {vendor, language, voice, label});
} catch (error) {
if (fallbackVendor && this.isHandledByPrimaryProvider && !cs.hasFallbackTts) {
this.notifyError(
{ msg: 'TTS error', details:`TTS vendor ${vendor} error: ${error}`, failover: 'in progress'});
if (fallbackVendor && this.isHandledByPrimaryProvider) {
this.isHandledByPrimaryProvider = false;
cs.hasFallbackTts = true;
this.logger.info(`Synthesize error, fallback to ${fallbackVendor}`);
filepath = await this._synthesizeWithSpecificVendor(cs, ep,
{
@@ -257,8 +102,6 @@ class TaskSay extends TtsTask {
label: fallbackLabel
});
} else {
this.notifyError(
{ msg: 'TTS error', details:`TTS vendor ${vendor} error: ${error}`, failover: 'not available'});
throw error;
}
}
@@ -344,9 +187,6 @@ class TaskSay extends TtsTask {
if (key.startsWith('variable_tts_')) {
let newKey = key.substring('variable_tts_'.length)
.replace('whisper_', 'whisper.')
.replace('deepgram_', 'deepgram.')
.replace('playht_', 'playht.')
.replace('rimelabs_', 'rimelabs.')
.replace('elevenlabs_', 'elevenlabs.');
if (spanMapping[newKey]) newKey = spanMapping[newKey];
attrs[newKey] = value;
@@ -358,9 +198,6 @@ class TaskSay extends TtsTask {
}
const spanMapping = {
// IMPORTANT!!! JAMBONZ WEBAPP WILL SHOW TEXT PERFECTLY IF THE SPAN NAME IS SMALLER OR EQUAL 25 CHARACTERS.
// EX: whisper.ratelim_reqs has length 20 <= 25 which is perfect
// Elevenlabs
'elevenlabs.reported_latency_ms': 'elevenlabs.latency_ms',
'elevenlabs.request_id': 'elevenlabs.req_id',
'elevenlabs.history_item_id': 'elevenlabs.item_id',
@@ -368,33 +205,11 @@ const spanMapping = {
'elevenlabs.name_lookup_time_ms': 'name_lookup_ms',
'elevenlabs.connect_time_ms': 'connect_ms',
'elevenlabs.final_response_time_ms': 'final_response_ms',
// Whisper
'whisper.reported_latency_ms': 'whisper.latency_ms',
'whisper.request_id': 'whisper.req_id',
'whisper.reported_organization': 'whisper.organization',
'whisper.reported_ratelimit_requests': 'whisper.ratelimit',
'whisper.reported_ratelimit_remaining_requests': 'whisper.ratelimit_remain',
'whisper.reported_ratelimit_reset_requests': 'whisper.ratelimit_reset',
'whisper.name_lookup_time_ms': 'name_lookup_ms',
'whisper.connect_time_ms': 'connect_ms',
'whisper.final_response_time_ms': 'final_response_ms',
// Deepgram
'deepgram.request_id': 'deepgram.req_id',
'deepgram.reported_model_name': 'deepgram.model_name',
'deepgram.reported_model_uuid': 'deepgram.model_uuid',
'deepgram.reported_char_count': 'deepgram.char_count',
'deepgram.name_lookup_time_ms': 'name_lookup_ms',
'deepgram.connect_time_ms': 'connect_ms',
'deepgram.final_response_time_ms': 'final_response_ms',
// Playht
'playht.request_id': 'playht.req_id',
'playht.name_lookup_time_ms': 'name_lookup_ms',
'playht.connect_time_ms': 'connect_ms',
'playht.final_response_time_ms': 'final_response_ms',
// Rimelabs
'rimelabs.name_lookup_time_ms': 'name_lookup_ms',
'rimelabs.connect_time_ms': 'connect_ms',
'rimelabs.final_response_time_ms': 'final_response_ms',
};
module.exports = TaskSay;

View File

@@ -98,13 +98,6 @@ class SttTask extends Task {
this.fallbackLabel = cs.fallbackSpeechRecognizerLabel;
if (this.data.recognizer) this.data.recognizer.fallbackLabel = this.fallbackLabel;
}
// If call is already fallback to 2nd ASR vendor
// use that.
if (cs.hasFallbackAsr) {
this.vendor = this.fallbackVendor;
this.language = this.fallbackLanguage;
this.label = this.fallbackLabel;
}
if (!this.data.recognizer.vendor) {
this.data.recognizer.vendor = this.vendor;
}
@@ -122,11 +115,9 @@ class SttTask extends Task {
try {
this.sttCredentials = await this._initSpeechCredentials(this.cs, this.vendor, this.label);
} catch (error) {
if (this.canFallback) {
await this._initFallback();
this.notifyError({ msg: 'ASR error', details:`Invalid vendor ${this.vendor}`, failover: 'in progress'});
if (this.fallbackVendor && this.isHandledByPrimaryProvider) {
await this._fallback();
} else {
this.notifyError({ msg: 'ASR error', details:`Invalid vendor ${this.vendor}`, failover: 'not available'});
throw error;
}
}
@@ -195,14 +186,9 @@ class SttTask extends Task {
return credentials;
}
get canFallback() {
return this.fallbackVendor && this.isHandledByPrimaryProvider && !this.cs.hasFallbackAsr;
}
async _initFallback() {
async _fallback() {
assert(this.fallbackVendor, 'fallback failed without fallbackVendor configuration');
this.isHandledByPrimaryProvider = false;
this.cs.hasFallbackAsr = true;
this.logger.info(`Failed to use primary STT provider, fallback to ${this.fallbackVendor}`);
this.vendor = this.fallbackVendor;
this.language = this.fallbackLanguage;
@@ -211,8 +197,6 @@ class SttTask extends Task {
this.data.recognizer.language = this.language;
this.data.recognizer.label = this.label;
this.sttCredentials = await this._initSpeechCredentials(this.cs, this.vendor, this.label);
// cleanup previous listener from previous vendor
this.removeCustomEventListeners();
}
async compileHintsForCobalt(ep, hostport, model, token, hints) {
@@ -275,6 +259,7 @@ class SttTask extends Task {
detail: evt.error,
vendor: this.vendor,
}).catch((err) => this.logger.info({err}, `Error generating alert for ${this.vendor} connection failure`));
this.notifyError({msg: 'ASR error', details:`Failed connecting to speech vendor ${this.vendor}: ${evt.error}`});
}
_onVendorConnectFailure(cs, _ep, evt) {
@@ -287,6 +272,7 @@ class SttTask extends Task {
message: `Failed connecting to ${this.vendor} speech recognizer: ${reason}`,
vendor: this.vendor,
}).catch((err) => this.logger.info({err}, `Error generating alert for ${this.vendor} connection failure`));
this.notifyError({msg: 'ASR error', details:`Failed connecting to speech vendor ${this.vendor}: ${reason}`});
}
}

View File

@@ -104,15 +104,12 @@ class TaskTranscribe extends SttTask {
updateSpeechCredentialLastUsed(this.sttCredentials.speech_credential_sid)
.catch(() => {/*already logged error */});
await this.awaitTaskDone();
} catch (err) {
if (!(await this._startFallback(cs, ep, {error: err}))) {
this.logger.info(err, 'TaskTranscribe:exec - error');
this.parentTask && this.parentTask.emit('error', err);
this.removeCustomEventListeners();
return;
}
this.logger.info(err, 'TaskTranscribe:exec - error');
this.parentTask && this.parentTask.emit('error', err);
}
await this.awaitTaskDone();
this.removeCustomEventListeners();
}
@@ -537,8 +534,10 @@ class TaskTranscribe extends SttTask {
}
}
async _startFallback(cs, _ep, evt) {
if (this.canFallback) {
async _onJambonzError(cs, _ep, evt) {
this.logger.info({evt}, 'TaskTranscribe:_onJambonzError');
if (this.paused) return;
if (this.isHandledByPrimaryProvider && this.fallbackVendor) {
_ep.stopTranscription({
vendor: this.vendor,
bugname: this.bugname
@@ -546,57 +545,38 @@ class TaskTranscribe extends SttTask {
.catch((err) => this.logger.error({err}, `Error stopping transcription for primary vendor ${this.vendor}`));
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, cs.srf);
try {
this.notifyError({ msg: 'ASR error',
details:`STT Vendor ${this.vendor} error: ${evt.error || evt.reason}`, failover: 'in progress'});
await this._initFallback();
await this._fallback();
let channel = 1;
if (this.ep !== _ep) {
channel = 2;
}
this[`_speechHandlersSet_${channel}`] = false;
this._startTranscribing(cs, _ep, channel);
updateSpeechCredentialLastUsed(this.sttCredentials.speech_credential_sid);
return true;
return;
} catch (error) {
this.notifyError({ msg: 'ASR error',
details:`STT Vendor ${this.vendor} error: ${evt.error || evt.reason}`, failover: 'not available'});
this.logger.info({error}, `There is error while falling back to ${this.fallbackVendor}`);
}
} else {
this.logger.debug('transcribe:_startFallback no condition for falling back');
this.notifyError({ msg: 'ASR error',
details:`STT Vendor ${this.vendor} error: ${evt.error || evt.reason}`, failover: 'not available'});
}
return false;
}
const {writeAlerts, AlertType} = cs.srf.locals;
async _onJambonzError(cs, _ep, evt) {
if (this.vendor === 'google' && evt.error_code === 0) {
this.logger.info({evt}, 'TaskTranscribe:_onJambonzError - ignoring google error code 0');
return;
}
this.logger.info({evt}, 'TaskTranscribe:_onJambonzError');
if (this.paused) return;
const {writeAlerts, AlertType} = cs.srf.locals;
if (this.vendor === 'nuance') {
const {code, error} = evt;
if (code === 404 && error === 'No speech') return this._resolve('timeout');
if (code === 413 && error === 'Too much speech') return this._resolve('timeout');
}
this.logger.info({evt}, 'TaskTranscribe:_onJambonzError');
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
message: `Custom speech vendor ${this.vendor} error: ${evt.error}`,
vendor: this.vendor,
}).catch((err) => this.logger.info({err}, 'Error generating alert for jambonz custom connection failure'));
if (!(await this._startFallback(cs, _ep, evt))) {
this.notifyTaskDone();
if (this.vendor === 'nuance') {
const {code, error} = evt;
//TODO: fix below, currently _resolve does not send timeout events
if (code === 404 && error === 'No speech') return this._resolve('timeout');
if (code === 413 && error === 'Too much speech') return this._resolve('timeout');
}
this.logger.info({evt}, 'TaskTranscribe:_onJambonzError');
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
message: `Custom speech vendor ${this.vendor} error: ${evt.error}`,
vendor: this.vendor,
}).catch((err) => this.logger.info({err}, 'Error generating alert for jambonz custom connection failure'));
this.notifyError({msg: 'ASR error', details:`Custom speech vendor ${this.vendor} error: ${evt.error}`});
}
}
async _onVendorConnectFailure(cs, _ep, channel, evt) {
_onVendorConnectFailure(cs, _ep, channel, evt) {
super._onVendorConnectFailure(cs, _ep, evt);
if (this.childSpan[channel - 1] && this.childSpan[channel - 1].span) {
this.childSpan[channel - 1].span.setAttributes({
@@ -605,9 +585,7 @@ class TaskTranscribe extends SttTask {
});
this.childSpan[channel - 1].span.end();
}
if (!(await this._startFallback(cs, _ep, evt))) {
this.notifyTaskDone();
}
this.notifyTaskDone();
}
_startAsrTimer(channel) {

View File

@@ -100,11 +100,6 @@ const speechMapper = (cred) => {
obj.user_id = o.user_id;
obj.voice_engine = o.voice_engine;
obj.options = o.options;
} else if ('rimelabs' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
obj.model_id = o.model_id;
obj.options = o.options;
} else if ('assemblyai' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;

View File

@@ -690,9 +690,7 @@ module.exports = (logger) => {
...(deepgramOptions.keywords) &&
{DEEPGRAM_SPEECH_KEYWORDS: deepgramOptions.keywords.join(',')},
...('endpointing' in deepgramOptions) &&
{DEEPGRAM_SPEECH_ENDPOINTING: deepgramOptions.endpointing === false ? 'false' : deepgramOptions.endpointing,
// default DEEPGRAM_SPEECH_UTTERANCE_END_MS is 1000, will be override by user settings later if there is.
DEEPGRAM_SPEECH_UTTERANCE_END_MS: deepgramOptions.endpointing === false ? null : 1000},
{DEEPGRAM_SPEECH_ENDPOINTING: deepgramOptions.endpointing === false ? 'false' : deepgramOptions.endpointing},
...(deepgramOptions.utteranceEndMs) &&
{DEEPGRAM_SPEECH_UTTERANCE_END_MS: deepgramOptions.utteranceEndMs},
...(deepgramOptions.vadTurnoff) &&

28
package-lock.json generated
View File

@@ -15,10 +15,10 @@
"@jambonz/http-health-check": "^0.0.1",
"@jambonz/mw-registrar": "^0.2.7",
"@jambonz/realtimedb-helpers": "^0.8.8",
"@jambonz/speech-utils": "^0.0.51",
"@jambonz/speech-utils": "^0.0.50",
"@jambonz/stats-collector": "^0.1.9",
"@jambonz/time-series": "^0.2.8",
"@jambonz/verb-specifications": "^0.0.70",
"@jambonz/verb-specifications": "^0.0.69",
"@opentelemetry/api": "^1.8.0",
"@opentelemetry/exporter-jaeger": "^1.23.0",
"@opentelemetry/exporter-trace-otlp-http": "^0.50.0",
@@ -2322,9 +2322,9 @@
}
},
"node_modules/@jambonz/speech-utils": {
"version": "0.0.51",
"resolved": "https://registry.npmjs.org/@jambonz/speech-utils/-/speech-utils-0.0.51.tgz",
"integrity": "sha512-3Zk2CERs1PYQiCG08NDMNBbDzBBfPuEwgADTANMP56dd07PpW360ufL8CcQfkBmWKGVma0wevRrv6DQLu2Ifdg==",
"version": "0.0.50",
"resolved": "https://registry.npmjs.org/@jambonz/speech-utils/-/speech-utils-0.0.50.tgz",
"integrity": "sha512-fcMaOuWrBVFh6FKiiurYhnQV71xXmnkyBQmp4OjNd1Zo8Ya+tZMdAJjyHtimjJdgiwJbwDnfdSwKSuz8G9CVkQ==",
"dependencies": {
"@aws-sdk/client-polly": "^3.496.0",
"@aws-sdk/client-sts": "^3.496.0",
@@ -2360,9 +2360,9 @@
}
},
"node_modules/@jambonz/verb-specifications": {
"version": "0.0.70",
"resolved": "https://registry.npmjs.org/@jambonz/verb-specifications/-/verb-specifications-0.0.70.tgz",
"integrity": "sha512-WRSM7+hMnxLv4Fe5PG02m64uKX1AryRLF/yWpv/0PjHJSO4SYXN8CCAhgb1vctq27FMenMtOrSVt0haZeARObg==",
"version": "0.0.69",
"resolved": "https://registry.npmjs.org/@jambonz/verb-specifications/-/verb-specifications-0.0.69.tgz",
"integrity": "sha512-DWnz7XRkCzpzyCVJH7NtScv+wSlUC414/EO8j/gPZs3RT4WBW1OBXwXpfjURHcSrDG7lycz+tfA+2WoUdW/W+g==",
"dependencies": {
"debug": "^4.3.4",
"pino": "^8.8.0"
@@ -11930,9 +11930,9 @@
}
},
"@jambonz/speech-utils": {
"version": "0.0.51",
"resolved": "https://registry.npmjs.org/@jambonz/speech-utils/-/speech-utils-0.0.51.tgz",
"integrity": "sha512-3Zk2CERs1PYQiCG08NDMNBbDzBBfPuEwgADTANMP56dd07PpW360ufL8CcQfkBmWKGVma0wevRrv6DQLu2Ifdg==",
"version": "0.0.50",
"resolved": "https://registry.npmjs.org/@jambonz/speech-utils/-/speech-utils-0.0.50.tgz",
"integrity": "sha512-fcMaOuWrBVFh6FKiiurYhnQV71xXmnkyBQmp4OjNd1Zo8Ya+tZMdAJjyHtimjJdgiwJbwDnfdSwKSuz8G9CVkQ==",
"requires": {
"@aws-sdk/client-polly": "^3.496.0",
"@aws-sdk/client-sts": "^3.496.0",
@@ -11968,9 +11968,9 @@
}
},
"@jambonz/verb-specifications": {
"version": "0.0.70",
"resolved": "https://registry.npmjs.org/@jambonz/verb-specifications/-/verb-specifications-0.0.70.tgz",
"integrity": "sha512-WRSM7+hMnxLv4Fe5PG02m64uKX1AryRLF/yWpv/0PjHJSO4SYXN8CCAhgb1vctq27FMenMtOrSVt0haZeARObg==",
"version": "0.0.69",
"resolved": "https://registry.npmjs.org/@jambonz/verb-specifications/-/verb-specifications-0.0.69.tgz",
"integrity": "sha512-DWnz7XRkCzpzyCVJH7NtScv+wSlUC414/EO8j/gPZs3RT4WBW1OBXwXpfjURHcSrDG7lycz+tfA+2WoUdW/W+g==",
"requires": {
"debug": "^4.3.4",
"pino": "^8.8.0"

View File

@@ -31,10 +31,10 @@
"@jambonz/http-health-check": "^0.0.1",
"@jambonz/mw-registrar": "^0.2.7",
"@jambonz/realtimedb-helpers": "^0.8.8",
"@jambonz/speech-utils": "^0.0.51",
"@jambonz/speech-utils": "^0.0.50",
"@jambonz/stats-collector": "^0.1.9",
"@jambonz/time-series": "^0.2.8",
"@jambonz/verb-specifications": "^0.0.70",
"@jambonz/verb-specifications": "^0.0.69",
"@opentelemetry/api": "^1.8.0",
"@opentelemetry/exporter-jaeger": "^1.23.0",
"@opentelemetry/exporter-trace-otlp-http": "^0.50.0",