Feat/ambient sounds (#678)

* initial support for coaching mode in conference

* wip

* wip

* add support for answer verb

* wip

* wip

* wip

* wip

* wip

* updates to rename option to dub

* wip

* wip

* wip

* update verb-specs

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* add option to boost audio signal in main channel

* wip

* wip

* wip

* wip

* wip

* wip

* for now, bypass use of streaming apis when generating tts audio for dub tracks

* add nested dub to dial

* wip

* add support for filler noise

* kill filler noise when gather killed

* wip

* wip

* while using sayOnTrack, we have to enclose the say command in double quotes

* disableTtsStreaming = false

* allow transcribe of b leg only on dial verb

* dub.say can either be text or object like say verb with text and synthesizer

* remove loop for sayOnTrack

* update speech-utils

* fixes for testing transcribe verb and support for dub and boostAudioSignal in lcc commands

* add dial.boostAudioSignal

* fix bug where session-level recognizer settings incorrectly overwrite verb-level settings

* update verb specs

* update dial to support array of dub verbs

* fix bug setting gain

* lint

* wip

* update speech-utils

* use new endpoint methods for mod_dub

---------

Co-authored-by: Dave Horton <daveh@beachdognet.com>
This commit is contained in:
Hoan Luu Huu
2024-03-24 03:23:57 +07:00
committed by GitHub
parent ec58232b61
commit 5b1d8a8ff3
18 changed files with 915 additions and 278 deletions

View File

@@ -14,6 +14,7 @@ const moment = require('moment');
const assert = require('assert');
const sessionTracker = require('./session-tracker');
const makeTask = require('../tasks/make_task');
const parseDecibels = require('../utils/parse-decibels');
const { normalizeJambones } = require('@jambonz/verb-specifications');
const listTaskNames = require('../utils/summarize-tasks');
const HttpRequestor = require('../utils/http-requestor');
@@ -511,6 +512,18 @@ class CallSession extends Emitter {
this.speechSynthesisVoice = this._origSynthesizerSettings.voice;
}
enableFillerNoise(opts) {
this._fillerNoise = opts;
}
disableFillerNoise() {
this._fillerNoise = null;
}
get fillerNoise() {
return this._fillerNoise;
}
async notifyRecordOptions(opts) {
const {action} = opts;
this.logger.debug({opts}, 'CallSession:notifyRecordOptions');
@@ -698,6 +711,8 @@ class CallSession extends Emitter {
}
}
async disableBotMode() {
const task = this.backgroundTaskManager.getTask('bargeIn');
if (task) task.sticky = false;
this.backgroundTaskManager.stop('bargeIn');
}
@@ -1212,7 +1227,15 @@ class CallSession extends Emitter {
this.callInfo.customerData = tag;
}
async _lccMuteStatus(callSid, mute) {
async _lccConferenceParticipantAction(opts) {
const task = this.currentTask;
if (!task || TaskName.Conference !== task.name || !this.isInConference) {
return this.logger.info('CallSession:_lccConferenceParticipantState - invalid cmd, call is not in conference');
}
task.doConferenceParticipantAction(this, opts);
}
async _lccMuteStatus(mute, callSid) {
// this whole thing requires us to be in a Dial or Conference verb
const task = this.currentTask;
if (!task || ![TaskName.Dial, TaskName.Conference].includes(task.name)) {
@@ -1344,6 +1367,45 @@ Duration=${duration} `
task.whisper(tasks, callSid).catch((err) => this.logger.error(err, 'CallSession:_lccWhisper'));
}
async _lccDub(opts, callSid) {
this.logger.debug({opts}, `CallSession:_lccDub on call_sid ${callSid}`);
const t = normalizeJambones(this.logger, [
{
verb: 'dub',
...opts
}
])
.map((tdata) => makeTask(this.logger, tdata));
const dubTask = t[0];
const ep = this.currentTask?.name === TaskName.Dial && callSid === this.currentTask?.callSid ?
this.currentTask.ep :
this.ep;
const {span, ctx} = this.rootSpan.startChildSpan(`verb:${dubTask.summary}`);
span.setAttributes({'verb.summary': dubTask.summary});
dubTask.span = span;
dubTask.ctx = ctx;
try {
await dubTask.exec(this, {ep});
} catch (err) {
this.logger.error(err, 'CallSession:_lccDub');
}
dubTask.span.end();
}
async _lccBoostAudioSignal(opts, callSid) {
const ep = this.currentTask?.name === TaskName.Dial && callSid === this.currentTask?.callSid ?
this.currentTask.ep :
this.ep;
const db = parseDecibels(opts);
this.logger.info(`_lccBoostAudioSignal: boosting audio signal by ${db} dB`);
const args = [ep.uuid, 'setGain', db];
const response = await ep.api('uuid_dub', args);
this.logger.info({response}, '_lccBoostAudioSignal: response from freeswitch');
}
/**
* perform call hangup by jambonz
*/
@@ -1374,7 +1436,7 @@ Duration=${duration} `
await this._lccTranscribeStatus(opts);
}
else if (opts.mute_status) {
await this._lccMuteStatus(callSid, opts.mute_status === 'mute');
await this._lccMuteStatus(opts.mute_status === 'mute', callSid);
}
else if (opts.conf_hold_status) {
await this._lccConfHoldStatus(opts);
@@ -1394,6 +1456,15 @@ Duration=${duration} `
else if (opts.tag) {
return this._lccTag(opts);
}
else if (opts.conferenceParticipantAction) {
return this._lccConferenceParticipantState(opts);
}
else if (opts.dub) {
return this._lccDub(opts);
}
else if (opts.boostAudioSignal) {
return this._lccBoostAudioSignal(opts, callSid);
}
// whisper may be the only thing we are asked to do, or it may that
// we are doing a whisper after having muted, paused recording etc..
@@ -1597,12 +1668,16 @@ Duration=${duration} `
this._lccCallDial(data);
break;
case 'dub':
this._lccDub(data, call_sid);
break;
case 'record':
this.notifyRecordOptions(data);
break;
case 'mute:status':
this._lccMuteStatus(call_sid, data);
this._lccMuteStatus(data, call_sid);
break;
case 'conf:mute-status':
@@ -1613,6 +1688,10 @@ Duration=${duration} `
this._lccConfHoldStatus(data);
break;
case 'conf:participant-action':
this._lccConferenceParticipantAction(data);
break;
case 'listen:status':
this._lccListenStatus(data);
break;
@@ -1639,6 +1718,13 @@ Duration=${duration} `
});
break;
case 'boostAudioSignal':
this._lccBoostAudioSignal(data, call_sid)
.catch((err) => {
this.logger.info({err, data}, 'CallSession:_onCommand - error boosting audio signal');
});
break;
default:
this.logger.info(`CallSession:_onCommand - invalid command ${command}`);
}

22
lib/tasks/answer.js Normal file
View File

@@ -0,0 +1,22 @@
const Task = require('./task');
const {TaskName, TaskPreconditions} = require('../utils/constants');
/**
* Answer the call.
* Note: This is rarely used, as the call is typically answered automatically when required by the app,
* but it can be useful to force an answer before a pause in some cases
*/
class TaskAnswer extends Task {
constructor(logger, opts) {
super(logger, opts);
this.preconditions = TaskPreconditions.Endpoint;
}
get name() { return TaskName.Answer; }
async exec(cs) {
super.exec(cs);
}
}
module.exports = TaskAnswer;

View File

@@ -348,8 +348,14 @@ class Conference extends Task {
Object.assign(opts, {flags: {
...(this.endConferenceOnExit && {endconf: true}),
...(this.startConferenceOnEnter && {moderator: true}),
...(this.joinMuted && {joinMuted: true}),
...((this.joinMuted || this.data.speakOnlyTo) && {joinMuted: true}),
}});
/**
* Note on the above: if we are joining in "coaching" mode (ie only going to heard by a subset of participants)
* then we join muted temporarily, and then unmute ourselves once we have identified the subset of participants
* to whom we will be speaking.
*/
}
try {
@@ -358,6 +364,11 @@ class Conference extends Task {
this.memberId = memberId;
this.confUuid = confUuid;
// set a tag for this member, if provided
if (this.data.memberTag) {
this.setMemberTag(this.data.memberTag);
}
cs.setConferenceDetails(memberId, this.confName, confUuid);
const response = await this.ep.api('conference', [this.confName, 'get', 'count']);
if (response.body && /\d+/.test(response.body)) this.participantCount = parseInt(response.body);
@@ -384,6 +395,9 @@ class Conference extends Task {
.catch((err) => {});
}
if (this.data.speakOnlyTo) {
this.setCoachMode(this.data.speakOnlyTo);
}
} catch (err) {
this.logger.error(err, `Failed to join conference ${this.confName}`);
throw err;
@@ -428,7 +442,15 @@ class Conference extends Task {
}
}
async doConferenceHold(cs, opts) {
doConferenceMute(cs, opts) {
assert (cs.isInConference);
const mute = opts.conf_mute_status === 'mute';
this.ep.api(`conference ${this.confName} ${mute ? 'mute' : 'unmute'} ${this.memberId}`)
.catch((err) => this.logger.info({err}, 'Error muting or unmuting participant'));
}
doConferenceHold(cs, opts) {
assert (cs.isInConference);
const {conf_hold_status, wait_hook} = opts;
@@ -465,6 +487,40 @@ class Conference extends Task {
}
}
async doConferenceParticipantAction(cs, opts) {
const {action, tag} = opts;
switch (action) {
case 'tag':
await this.setMemberTag(tag);
break;
case 'untag':
await this.clearMemberTag();
break;
case 'coach':
await this.setCoachMode(tag);
break;
case 'uncoach':
await this.clearCoachMode();
break;
case 'hold':
this.doConferenceHold(cs, {conf_hold_status: 'hold'});
break;
case 'unhold':
this.doConferenceHold(cs, {conf_hold_status: 'unhold'});
break;
case 'mute':
this.doConferenceMute(cs, {conf_mute_status: 'mute'});
break;
case 'unmute':
this.doConferenceMute(cs, {conf_mute_status: 'unmute'});
break;
default:
this.logger.info(`Conference:doConferenceParticipantState - unhandled action ${action}`);
break;
}
}
async _doWaitHookWhileOnHold(cs, dlg, wait_hook) {
do {
try {
@@ -642,11 +698,14 @@ class Conference extends Task {
}
// conference event handlers
_onAddMember(logger, cs, evt) {
logger.debug({evt}, `Conference:_onAddMember - member added to conference ${this.confName}`);
}
_onDelMember(logger, cs, evt) {
const memberId = parseInt(evt.getHeader('Member-ID')) ;
this.participantCount = parseInt(evt.getHeader('Conference-Size'));
if (memberId === this.memberId) {
this.logger.info(`Conference:_onDelMember - I was dropped from conference ${this.confName}, task is complete`);
logger.info(`Conference:_onDelMember - I was dropped from conference ${this.confName}, task is complete`);
this.replaceEndpointAndEnd(cs);
}
}
@@ -675,6 +734,53 @@ class Conference extends Task {
}
}
async setCoachMode(speakOnlyTo) {
try {
const response = await this.ep.api('conference', [this.confName, 'gettag', speakOnlyTo, 'nomatch']);
this.logger.info(`Conference:_setCoachMode: my audio will only be sent to particpants ${response}`);
await this.ep.api('conference', [this.confName, 'relate', this.memberId, response, 'nospeak']);
this.speakOnlyTo = speakOnlyTo;
this.coaching = response;
} catch (err) {
this.logger.error({err, speakOnlyTo}, '_setCoachMode: Error');
}
}
async clearCoachMode() {
try {
if (!this.coaching) {
this.logger.info('Conference:_clearCoachMode: no coaching mode to clear');
return;
}
this.logger.info(`Conference:_clearCoachMode: now sending my audio to all, including ${this.coaching}`);
await this.ep.api('conference', [this.confName, 'relate', this.memberId, this.coaching, 'clear']);
this.speakOnlyTo = null;
this.coaching = null;
} catch (err) {
this.logger.error({err}, '_clearCoachMode: Error');
}
}
async setMemberTag(tag) {
try {
await this.ep.api('conference', [this.confName, 'tag', this.memberId, tag]);
this.logger.info(`Conference:setMemberTag: set tag for ${this.memberId} to ${tag}`);
this.memberTag = tag;
} catch (err) {
this.logger.error({err}, `Error setting tag for ${this.memberId} to ${tag}`);
}
}
async clearMemberTag() {
try {
await this.ep.api('conference', [this.confName, 'tag', this.memberId]);
this.logger.info(`Conference:setMemberTag: clearing tag for ${this.memberId}`);
this.memberTag = null;
} catch (err) {
this.logger.error({err}, `Error clearing tag for ${this.memberId}`);
}
}
}
module.exports = Conference;

View File

@@ -1,9 +1,11 @@
const Task = require('./task');
const {TaskName, TaskPreconditions} = require('../utils/constants');
const parseDecibels = require('../utils/parse-decibels');
class TaskConfig extends Task {
constructor(logger, opts) {
super(logger, opts);
[
'synthesizer',
'recognizer',
@@ -11,7 +13,9 @@ class TaskConfig extends Task {
'record',
'listen',
'transcribe',
'actionHookDelayAction'
'fillerNoise',
'actionHookDelayAction',
'boostAudioSignal'
].forEach((k) => this[k] = this.data[k] || {});
if ('notifyEvents' in this.data) {
@@ -50,6 +54,7 @@ class TaskConfig extends Task {
this.record?.action ||
this.listen?.url ||
this.data.amd ||
'boostAudioSignal' in this.data ||
this.transcribe?.enable) ?
TaskPreconditions.Endpoint :
TaskPreconditions.None;
@@ -64,6 +69,8 @@ class TaskConfig extends Task {
get hasRecording() { return Object.keys(this.record).length; }
get hasListen() { return Object.keys(this.listen).length; }
get hasTranscribe() { return Object.keys(this.transcribe).length; }
get hasDub() { return Object.keys(this.dub).length; }
get hasFillerNoise() { return Object.keys(this.fillerNoise).length; }
get summary() {
const phrase = [];
@@ -89,9 +96,11 @@ class TaskConfig extends Task {
if (this.hasTranscribe) {
phrase.push(this.transcribe.enable ? `transcribe ${this.transcribe.transcriptionHook}` : 'stop transcribe');
}
if (this.hasFillerNoise) phrase.push(`fillerNoise ${this.fillerNoise.enable ? 'on' : 'off'}`);
if (this.data.amd) phrase.push('enable amd');
if (this.notifyEvents) phrase.push(`event notification ${this.notifyEvents ? 'on' : 'off'}`);
if (this.onHoldMusic) phrase.push(`onHoldMusic: ${this.onHoldMusic}`);
if ('boostAudioSignal' in this.data) phrase.push(`setGain ${this.data.boostAudioSignal}`);
return `${this.name}{${phrase.join(',')}}`;
}
@@ -261,6 +270,24 @@ class TaskConfig extends Task {
if (this.data.sipRequestWithinDialogHook) {
cs.sipRequestWithinDialogHook = this.data.sipRequestWithinDialogHook;
}
if ('boostAudioSignal' in this.data) {
const db = parseDecibels(this.data.boostAudioSignal);
this.logger.info(`Config: boosting audio signal by ${db} dB`);
const args = [ep.uuid, 'setGain', db];
ep.api('uuid_dub', args).catch((err) => {
this.logger.error(err, 'Error boosting audio signal');
});
}
if (this.hasFillerNoise) {
const {enable, ...opts} = this.fillerNoise;
this.logger.info({fillerNoise: this.fillerNoise}, 'Config: fillerNoise');
if (!enable) cs.disableFillerNoise();
else {
cs.enableFillerNoise(opts);
}
}
}
async kill(cs) {

View File

@@ -14,6 +14,7 @@ const sessionTracker = require('../session/session-tracker');
const DtmfCollector = require('../utils/dtmf-collector');
const ConfirmCallSession = require('../session/confirm-call-session');
const dbUtils = require('../utils/db-utils');
const parseDecibels = require('../utils/parse-decibels');
const debug = require('debug')('jambonz:feature-server');
const {parseUri} = require('drachtio-srf');
const {ANCHOR_MEDIA_ALWAYS, JAMBONZ_DISABLE_DIAL_PAI_HEADER} = require('../config');
@@ -101,6 +102,7 @@ class TaskDial extends Task {
this.dtmfHook = this.data.dtmfHook;
this.proxy = this.data.proxy;
this.tag = this.data.tag;
this.boostAudioSignal = this.data.boostAudioSignal;
if (this.dtmfHook) {
const {parentDtmfCollector, childDtmfCollector} = parseDtmfOptions(logger, this.data.dtmfCapture || {});
@@ -118,6 +120,9 @@ class TaskDial extends Task {
if (this.data.transcribe) {
this.transcribeTask = makeTask(logger, {'transcribe' : this.data.transcribe}, this);
}
if (this.data.dub && Array.isArray(this.data.dub) && this.data.dub.length > 0) {
this.dubTasks = this.data.dub.map((d) => makeTask(logger, {'dub': d}, this));
}
this.results = {};
this.bridged = false;
@@ -149,6 +154,7 @@ class TaskDial extends Task {
this.cs.onHoldMusic ||
ANCHOR_MEDIA_ALWAYS ||
this.listenTask ||
this.dubTasks ||
this.transcribeTask ||
this.startAmd;
@@ -551,9 +557,9 @@ class TaskDial extends Task {
const str = this.callerId || req.callingNumber || '';
const callingNumber = str.startsWith('+') ? str.substring(1) : str;
const voip_carrier_sid = await lookupCarrierByPhoneNumber(cs.accountSid, callingNumber);
this.logger.info(
`Dial:_attemptCalls: selected ${voip_carrier_sid} for requested phone number: ${callingNumber}`);
if (voip_carrier_sid) {
this.logger.info(
`Dial:_attemptCalls: selected voip_carrier_sid ${voip_carrier_sid} for callingNumber: ${callingNumber}`);
opts.headers['X-Requested-Carrier-Sid'] = voip_carrier_sid;
}
}
@@ -779,6 +785,17 @@ class TaskDial extends Task {
dialCallSid: sd.callSid,
});
if (this.dubTasks) {
for (const dub of this.dubTasks) {
try {
await dub.exec(cs, {ep: sd.ep});
}
catch (err) {
this.logger.error({err}, 'Dial:_selectSingleDial - error executing dubTask');
}
}
}
if (this.parentDtmfCollector) this._installDtmfDetection(cs, cs.dlg);
if (this.childDtmfCollector) this._installDtmfDetection(cs, this.dlg);
if (cs.sipRequestWithinDialogHook) this._initSipIndialogRequestListener(cs, this.dlg);
@@ -793,6 +810,18 @@ class TaskDial extends Task {
}
}
/* boost audio signal if requested */
if (this.boostAudioSignal) {
try {
const db = parseDecibels(this.boostAudioSignal);
this.logger.info(`Dial: boosting audio signal by ${db} dB`);
const args = [this.ep.uuid, 'setGain', db];
await this.ep.api('uuid_dub', args);
} catch (err) {
this.logger.info({err}, 'Dial:_selectSingleDial - Error boosting audio signal');
}
}
/* if we can release the media back to the SBC, do so now */
if (this.canReleaseMedia) setTimeout(this._releaseMedia.bind(this, cs, sd), 200);
}

142
lib/tasks/dub.js Normal file
View File

@@ -0,0 +1,142 @@
const {TaskName} = require('../utils/constants');
const TtsTask = require('./tts-task');
const assert = require('assert');
const parseDecibels = require('../utils/parse-decibels');
/**
* Dub task: add or remove additional audio tracks into the call
*/
class TaskDub extends TtsTask {
constructor(logger, opts, parentTask) {
super(logger, opts, parentTask);
this.logger.debug({opts: this.data}, 'TaskDub constructor');
['action', 'track', 'play', 'say', 'loop'].forEach((prop) => {
this[prop] = this.data[prop];
});
this.gain = parseDecibels(this.data.gain);
assert.ok(this.action, 'TaskDub: action is required');
assert.ok(this.track, 'TaskDub: track is required');
}
get name() { return TaskName.Dub; }
async exec(cs, {ep}) {
super.exec(cs);
try {
switch (this.action) {
case 'addTrack':
await this._addTrack(cs, ep);
break;
case 'removeTrack':
await this._removeTrack(cs, ep);
break;
case 'silenceTrack':
await this._silenceTrack(cs, ep);
break;
case 'playOnTrack':
await this._playOnTrack(cs, ep);
break;
case 'sayOnTrack':
await this._sayOnTrack(cs, ep);
break;
default:
throw new Error(`TaskDub: unsupported action ${this.action}`);
}
} catch (err) {
this.logger.error(err, 'Error executing dub task');
}
}
async _addTrack(cs, ep) {
this.logger.info(`adding track: ${this.track}`);
await ep.dub({
action: 'addTrack',
track: this.track
});
if (this.play) await this._playOnTrack(cs, ep);
else if (this.say) await this._sayOnTrack(cs, ep);
}
async _removeTrack(_cs, ep) {
this.logger.info(`removing track: ${this.track}`);
await ep.dub({
action: 'removeTrack',
track: this.track
});
}
async _silenceTrack(_cs, ep) {
this.logger.info(`silencing track: ${this.track}`);
await ep.dub({
action: 'silenceTrack',
track: this.track
});
}
async _playOnTrack(_cs, ep) {
this.logger.info(`playing on track: ${this.track}`);
await ep.dub({
action: 'playOnTrack',
track: this.track,
play: this.play,
loop: this.loop ? 'loop' : 'once',
gain: this.gain
});
}
async _sayOnTrack(cs, ep) {
const text = this.say.text || this.say;
this.synthesizer = this.say.synthesizer || {};
if (Object.keys(this.synthesizer).length) {
this.logger.info({synthesizer: this.synthesizer},
`saying on track ${this.track}: ${text} with synthesizer options`);
}
else {
this.logger.info(`saying on track ${this.track}: ${text}`);
}
this.synthesizer = this.synthesizer || {};
this.text = [text];
const vendor = this.synthesizer.vendor && this.synthesizer.vendor !== 'default' ?
this.synthesizer.vendor :
cs.speechSynthesisVendor;
const language = this.synthesizer.language && this.synthesizer.language !== 'default' ?
this.synthesizer.language :
cs.speechSynthesisLanguage ;
const voice = this.synthesizer.voice && this.synthesizer.voice !== 'default' ?
this.synthesizer.voice :
cs.speechSynthesisVoice;
const label = this.synthesizer.label && this.synthesizer.label !== 'default' ?
this.synthesizer.label :
cs.speechSynthesisLabel;
const disableTtsStreaming = false;
const filepath = await this._synthesizeWithSpecificVendor(cs, ep, {
vendor, language, voice, label, disableTtsStreaming
});
assert.ok(filepath.length === 1, 'TaskDub: no filepath returned from synthesizer');
const path = filepath[0];
if (!path.startsWith('say:{')) {
/* we have a local file of mp3 or r8 of synthesized speech audio to play */
this.play = path;
await this._playOnTrack(cs, ep);
}
else {
await ep.dub({
action: 'sayOnTrack',
track: this.track,
say: path,
gain: this.gain
});
}
}
}
module.exports = TaskDub;

View File

@@ -27,7 +27,7 @@ class TaskGather extends SttTask {
[
'finishOnKey', 'input', 'numDigits', 'minDigits', 'maxDigits',
'interDigitTimeout', 'partialResultHook', 'bargein', 'dtmfBargein',
'speechTimeout', 'timeout', 'say', 'play', 'actionHookDelayAction'
'speechTimeout', 'timeout', 'say', 'play', 'actionHookDelayAction', 'fillerNoise'
].forEach((k) => this[k] = this.data[k]);
// gather default input is digits
@@ -91,6 +91,18 @@ class TaskGather extends SttTask {
(this.playTask && this.playTask.earlyMedia);
}
get hasFillerNoise() {
return Object.keys(this.fillerNoise).length > 0 && this.fillerNoise.enabled !== false;
}
get fillerNoiseUrl() {
return this.fillerNoise.url;
}
get fillerNoiseStartDelaySecs() {
return this.fillerNoise.startDelaySecs;
}
get summary() {
let s = `${this.name}{`;
if (this.input.length === 2) s += 'inputs=[speech,digits],';
@@ -111,6 +123,11 @@ class TaskGather extends SttTask {
await super.exec(cs, {ep});
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, cs.srf);
this.fillerNoise = {
...(cs.fillerNoise || {}),
...(this.fillerNoise || {})
};
if (cs.hasGlobalSttHints && !this.maskGlobalSttHints) {
const {hints, hintsBoost} = cs.globalSttHints;
const setOfHints = new Set((this.data.recognizer.hints || [])
@@ -255,6 +272,7 @@ class TaskGather extends SttTask {
super.kill(cs);
this._killAudio(cs);
this._killActionHookDelayAction();
this._clearFillerNoiseTimer();
this.ep.removeAllListeners('dtmf');
clearTimeout(this.interDigitTimer);
this._clearAsrTimer();
@@ -674,9 +692,29 @@ class TaskGather extends SttTask {
this._finalAsrTimer = null;
}
_startFillerNoiseTimer() {
this._clearFillerNoiseTimer();
this._fillerNoiseTimer = setTimeout(() => {
this.logger.debug('Gather:_startFillerNoiseTimer - playing filler noise');
this.ep?.play(this.fillerNoise.url);
}, this.fillerNoise.startDelaySecs * 1000);
}
_clearFillerNoiseTimer() {
if (this._fillerNoiseTimer) clearTimeout(this._fillerNoiseTimer);
this._fillerNoiseTimer = null;
}
_killFillerNoise() {
if (this._fillerNoiseTimer) {
this.logger.debug('Gather:_killFillerNoise');
this.ep?.api('uuid_break', this.ep.uuid);
}
}
_killAudio(cs) {
if (!this.sayTask && !this.playTask && this.bargein) {
if (this.ep?.connected && !this.playComplete) {
if (this.hasFillerNoise || (!this.sayTask && !this.playTask && this.bargein)) {
if (this.ep?.connected && (!this.playComplete || this.hasFillerNoise)) {
this.logger.debug('Gather:_killAudio: killing playback of any audio');
this.playComplete = true;
this.ep.api('uuid_break', this.ep.uuid)
@@ -1004,6 +1042,16 @@ class TaskGather extends SttTask {
this._startActionHookNoResponseGiveUpTimer();
}
if (this.hasFillerNoise && (reason.startsWith('dtmf') || reason.startsWith('speech'))) {
if (this.fillerNoiseStartDelaySecs > 0) {
this._startFillerNoiseTimer();
}
else {
this.logger.debug(`TaskGather:_resolve - playing filler noise: ${this.fillerNoiseUrl}`);
this.ep.play(this.fillerNoiseUrl);
}
}
try {
if (reason.startsWith('dtmf')) {
if (this.parentTask) this.parentTask.emit('dtmf', evt);
@@ -1038,6 +1086,7 @@ class TaskGather extends SttTask {
// Gather got response from hook, cancel all delay timers if there is any
this._clearActionHookNoResponseTimer();
this._clearActionHookNoResponseGiveUpTimer();
this._clearFillerNoiseTimer();
this.notifyTaskDone();
}

View File

@@ -14,6 +14,9 @@ function makeTask(logger, obj, parent) {
}
validateVerb(name, data, logger);
switch (name) {
case TaskName.Answer:
const TaskAnswer = require('./answer');
return new TaskAnswer(logger, data, parent);
case TaskName.SipDecline:
const TaskSipDecline = require('./sip_decline');
return new TaskSipDecline(logger, data, parent);
@@ -41,6 +44,9 @@ function makeTask(logger, obj, parent) {
case TaskName.Dtmf:
const TaskDtmf = require('./dtmf');
return new TaskDtmf(logger, data, parent);
case TaskName.Dub:
const TaskDub = require('./dub');
return new TaskDub(logger, data, parent);
case TaskName.Enqueue:
const TaskEnqueue = require('./enqueue');
return new TaskEnqueue(logger, data, parent);

View File

@@ -1,4 +1,4 @@
const Task = require('./task');
const TtsTask = require('./tts-task');
const {TaskName, TaskPreconditions} = require('../utils/constants');
const pollySSMLSplit = require('polly-ssml-split');
@@ -29,9 +29,9 @@ const parseTextFromSayString = (text) => {
return text.slice(closingBraceIndex + 1);
};
class TaskSay extends Task {
class TaskSay extends TtsTask {
constructor(logger, opts, parentTask) {
super(logger, opts);
super(logger, opts, parentTask);
this.preconditions = TaskPreconditions.Endpoint;
this.text = (Array.isArray(this.data.text) ? this.data.text : [this.data.text])
@@ -39,10 +39,6 @@ class TaskSay extends Task {
.flat();
this.loop = this.data.loop || 1;
this.earlyMedia = this.data.earlyMedia === true || (parentTask && parentTask.earlyMedia);
this.synthesizer = this.data.synthesizer || {};
this.disableTtsCache = this.data.disableTtsCache;
this.options = this.synthesizer.options || {};
this.isHandledByPrimaryProvider = true;
}
@@ -56,156 +52,6 @@ class TaskSay extends Task {
return `${this.name}{${this.text[0]}}`;
}
_validateURL(urlString) {
try {
new URL(urlString);
return true;
} catch (e) {
return false;
}
}
async _synthesizeWithSpecificVendor(cs, ep, {vendor, language, voice, label, preCache = false}) {
const {srf, accountSid:account_sid} = cs;
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, srf);
const {writeAlerts, AlertType, stats} = srf.locals;
const {synthAudio} = srf.locals.dbHelpers;
const engine = this.synthesizer.engine || 'standard';
const salt = cs.callSid;
let credentials = cs.getSpeechCredentials(vendor, 'tts', label);
/* parse Nuance voices into name and model */
let model;
if (vendor === 'nuance' && voice) {
const arr = /([A-Za-z-]*)\s+-\s+(enhanced|standard)/.exec(voice);
if (arr) {
voice = arr[1];
model = arr[2];
}
} else if (vendor === 'deepgram') {
model = voice;
}
/* allow for microsoft custom region voice and api_key to be specified as an override */
if (vendor === 'microsoft' && this.options.deploymentId) {
credentials = credentials || {};
credentials.use_custom_tts = true;
credentials.custom_tts_endpoint = this.options.deploymentId;
credentials.api_key = this.options.apiKey || credentials.apiKey;
credentials.region = this.options.region || credentials.region;
voice = this.options.voice || voice;
} else if (vendor === 'elevenlabs') {
credentials = credentials || {};
credentials.model_id = this.options.model_id || credentials.model_id;
credentials.voice_settings = this.options.voice_settings || {};
credentials.optimize_streaming_latency = this.options.optimize_streaming_latency
|| credentials.optimize_streaming_latency;
voice = this.options.voice_id || voice;
}
ep.set({
tts_engine: vendor,
tts_voice: voice,
cache_speech_handles: 1,
}).catch((err) => this.logger.info({err}, 'Error setting tts_engine on endpoint'));
if (!preCache) this.logger.info({vendor, language, voice, model}, 'TaskSay:exec');
try {
if (!credentials) {
writeAlerts({
account_sid,
alert_type: AlertType.TTS_NOT_PROVISIONED,
vendor
}).catch((err) => this.logger.info({err}, 'Error generating alert for no tts'));
this.notifyError({
msg: 'TTS error',
details:`No speech credentials provisioned for selected vendor ${vendor}`
});
throw new Error('no provisioned speech credentials for TTS');
}
// synthesize all of the text elements
let lastUpdated = false;
/* produce an audio segment from the provided text */
const generateAudio = async(text) => {
if (this.killed) return;
if (text.startsWith('silence_stream://')) return text;
/* otel: trace time for tts */
if (!preCache) {
const {span} = this.startChildSpan('tts-generation', {
'tts.vendor': vendor,
'tts.language': language,
'tts.voice': voice
});
this.otelSpan = span;
}
try {
const {filePath, servedFromCache, rtt} = await synthAudio(stats, {
account_sid,
text,
vendor,
language,
voice,
engine,
model,
salt,
credentials,
options: this.options,
disableTtsCache : this.disableTtsCache,
preCache
});
if (!filePath.startsWith('say:')) {
this.logger.debug(`file ${filePath}, served from cache ${servedFromCache}`);
if (filePath) cs.trackTmpFile(filePath);
if (this.otelSpan) {
this.otelSpan.setAttributes({'tts.cached': servedFromCache});
this.otelSpan.end();
this.otelSpan = null;
}
if (!servedFromCache && !lastUpdated) {
lastUpdated = true;
updateSpeechCredentialLastUsed(credentials.speech_credential_sid).catch(() => {/* logged error */});
}
if (!servedFromCache && rtt && !preCache) {
this.notifyStatus({
event: 'synthesized-audio',
vendor,
language,
characters: text.length,
elapsedTime: rtt
});
}
}
else {
this.logger.debug('a streaming tts api will be used');
const modifiedPath = filePath.replace('say:{', `say:{session-uuid=${ep.uuid},`);
return modifiedPath;
}
return filePath;
} catch (err) {
this.logger.info({err}, 'Error synthesizing tts');
if (this.otelSpan) this.otelSpan.end();
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.TTS_FAILURE,
vendor,
detail: err.message
}).catch((err) => this.logger.info({err}, 'Error generating alert for tts failure'));
this.notifyError({msg: 'TTS error', details: err.message || err});
throw err;
}
};
const arr = this.text.map((t) => (this._validateURL(t) ? t : generateAudio(t)));
return (await Promise.all(arr)).filter((fp) => fp && fp.length);
} catch (err) {
this.logger.info(err, 'TaskSay:exec error');
throw err;
}
}
async exec(cs, {ep}) {
const {srf, accountSid:account_sid} = cs;
const {writeAlerts, AlertType} = srf.locals;

View File

@@ -56,24 +56,20 @@ class SttTask extends Task {
super.exec(cs);
this.ep = ep;
this.ep2 = ep2;
// copy all value from config verb to this object.
// use session preferences if we don't have specific verb-level settings.
if (cs.recognizer) {
for (const k in cs.recognizer) {
if (Array.isArray(this.data.recognizer[k]) ||
Array.isArray(cs.recognizer[k])) {
this.data.recognizer[k] = [
...this.data.recognizer[k],
...cs.recognizer[k]
];
} else if (typeof this.data.recognizer[k] === 'object' ||
typeof cs.recognizer[k] === 'object'
) {
this.data.recognizer[k] = {
...this.data.recognizer[k],
...cs.recognizer[k]
};
const newValue = this.data.recognizer && this.data.recognizer[k] !== undefined ?
this.data.recognizer[k] :
cs.recognizer[k];
if (Array.isArray(newValue)) {
this.data.recognizer[k] = [...(this.data.recognizer[k] || []), ...cs.recognizer[k]];
} else if (typeof newValue === 'object' && newValue !== null) {
this.data.recognizer[k] = { ...(this.data.recognizer[k] || {}), ...cs.recognizer[k] };
} else {
this.data.recognizer[k] = cs.recognizer[k] || this.data.recognizer[k];
this.data.recognizer[k] = newValue;
}
}
}

View File

@@ -32,8 +32,22 @@ class TaskTranscribe extends SttTask {
}
/* for nested transcribe in dial, unless the app explicitly says so we want to transcribe both legs */
if (this.parentTask?.name === TaskName.Dial && this.separateRecognitionPerChannel !== false) {
this.separateRecognitionPerChannel = true;
if (this.parentTask?.name === TaskName.Dial) {
if (this.data.channel === 1 || this.data.channel === 2) {
/* transcribe only the channel specified */
this.separateRecognitionPerChannel = false;
this.channel = this.data.channel;
logger.debug(`TaskTranscribe: transcribing only channel ${this.channel} in the Dial verb`);
}
else if (this.separateRecognitionPerChannel !== false) {
this.separateRecognitionPerChannel = true;
}
else {
this.channel = 1;
}
}
else {
this.channel = 1;
}
this.childSpan = [null, null];
@@ -51,6 +65,14 @@ class TaskTranscribe extends SttTask {
get name() { return TaskName.Transcribe; }
get transcribing1() {
return this.channel === 1 || this.separateRecognitionPerChannel;
}
get transcribing2() {
return this.channel === 2 || this.separateRecognitionPerChannel && this.ep2;
}
async exec(cs, {ep, ep2}) {
await super.exec(cs, {ep, ep2});
@@ -73,8 +95,10 @@ class TaskTranscribe extends SttTask {
}
try {
await this._startTranscribing(cs, ep, 1);
if (this.separateRecognitionPerChannel && ep2) {
if (this.transcribing1) {
await this._startTranscribing(cs, ep, 1);
}
if (this.transcribing2) {
await this._startTranscribing(cs, ep2, 2);
}
@@ -91,7 +115,7 @@ class TaskTranscribe extends SttTask {
async _stopTranscription() {
let stopTranscription = false;
if (this.ep?.connected) {
if (this.transcribing1 && this.ep?.connected) {
stopTranscription = true;
this.ep.stopTranscription({
vendor: this.vendor,
@@ -99,7 +123,7 @@ class TaskTranscribe extends SttTask {
})
.catch((err) => this.logger.info(err, 'Error TaskTranscribe:kill'));
}
if (this.separateRecognitionPerChannel && this.ep2 && this.ep2.connected) {
if (this.transcribing2 && this.ep2.connected) {
stopTranscription = true;
this.ep2.stopTranscription({vendor: this.vendor, bugname: this.bugname})
.catch((err) => this.logger.info(err, 'Error TaskTranscribe:kill'));
@@ -128,10 +152,8 @@ class TaskTranscribe extends SttTask {
break;
case TranscribeStatus.Resume:
this.paused = false;
await this._startTranscribing(this.cs, this.ep, 1);
if (this.separateRecognitionPerChannel && this.ep2) {
await this._startTranscribing(this.cs, this.ep2, 2);
}
if (this.transcribing1) await this._startTranscribing(this.cs, this.ep, 1);
if (this.transcribing2) await this._startTranscribing(this.cs, this.ep2, 2);
break;
}
}
@@ -294,7 +316,7 @@ class TaskTranscribe extends SttTask {
vendor: this.vendor,
interim: this.interim ? true : false,
locale: this.language,
channels: /*this.separateRecognitionPerChannel ? 2 : */ 1,
channels: 1,
bugname: this.bugname,
hostport: this.hostport
});
@@ -303,12 +325,12 @@ class TaskTranscribe extends SttTask {
async _onTranscription(cs, ep, channel, evt, fsEvent) {
// make sure this is not a transcript from answering machine detection
const bugname = fsEvent.getHeader('media-bugname');
const finished = fsEvent.getHeader('transcription-session-finished');
if (bugname && this.bugname !== bugname) return;
if (this.paused) {
this.logger.debug({evt}, 'TaskTranscribe:_onTranscription - paused, ignoring transcript');
}
if (this.vendor === 'ibm' && evt?.state === 'listening') return;
if (this.vendor === 'deepgram' && evt.type === 'UtteranceEnd') {
@@ -319,8 +341,9 @@ class TaskTranscribe extends SttTask {
else {
this.logger.debug('Gather:_onTranscription - got UtteranceEnd event from deepgram, return buffered transcript');
evt = this.consolidateTranscripts(this._bufferedTranscripts, 1, this.language, this.vendor);
evt.is_final = true;
this._bufferedTranscripts = [];
this._resolve('speech', evt);
this._resolve(channel, evt);
}
return;
}
@@ -334,31 +357,87 @@ class TaskTranscribe extends SttTask {
return;
}
if (evt.alternatives[0]?.transcript === '' && !cs.callGone && !this.killed) {
if (['microsoft', 'deepgram'].includes(this.vendor)) {
this.logger.info({evt}, 'TaskTranscribe:_onTranscription - got empty transcript, continue listening');
let emptyTranscript = false;
if (evt.is_final) {
if (evt.alternatives[0].transcript === '' && !cs.callGone && !this.killed) {
emptyTranscript = true;
if (finished === 'true' &&
['microsoft', 'deepgram'].includes(this.vendor) &&
this._bufferedTranscripts.length === 0) {
this.logger.debug({evt}, 'TaskGather:_onTranscription - got empty transcript from old gather, disregarding');
return;
}
else if (this.vendor !== 'deepgram') {
this.logger.info({evt}, 'TaskGather:_onTranscription - got empty transcript, continue listening');
return;
}
else if (this.isContinuousAsr) {
this.logger.info({evt},
'TaskGather:_onTranscription - got empty deepgram transcript during continous asr, continue listening');
return;
}
else if (this.vendor === 'deepgram' && this._bufferedTranscripts.length > 0) {
this.logger.info({evt},
'TaskGather:_onTranscription - got empty transcript from deepgram, return the buffered transcripts');
}
}
if (this.isContinuousAsr) {
/* append the transcript and start listening again for asrTimeout */
const t = evt.alternatives[0].transcript;
if (t) {
/* remove trailing punctuation */
if (/[,;:\.!\?]$/.test(t)) {
this.logger.debug('TaskGather:_onTranscription - removing trailing punctuation');
evt.alternatives[0].transcript = t.slice(0, -1);
}
}
this.logger.info({evt}, 'TaskGather:_onTranscription - got transcript during continous asr');
this._bufferedTranscripts.push(evt);
this._startAsrTimer(channel);
/* some STT engines will keep listening after a final response, so no need to restart */
if (!['soniox', 'aws', 'microsoft', 'deepgram'].includes(this.vendor)) this._startTranscribing(cs, ep, channel);
}
else {
this.logger.info({evt}, 'TaskTranscribe:_onTranscription - got empty transcript, listen again');
this._transcribe(ep);
}
return;
}
if (this.vendor === 'soniox') {
/* compile transcripts into one */
this._sonioxTranscripts.push(evt.vendor.finalWords);
evt = this.compileSonioxTranscripts(this._sonioxTranscripts, 1, this.language);
this._sonioxTranscripts = [];
}
else if (this.vendor === 'deepgram') {
/* compile transcripts into one */
if (!emptyTranscript) this._bufferedTranscripts.push(evt);
if (this.vendor === 'soniox') {
/* compile transcripts into one */
this._sonioxTranscripts.push(evt.vendor.finalWords);
if (evt.is_final) {
evt = this.compileSonioxTranscripts(this._sonioxTranscripts, 1, this.language);
this._sonioxTranscripts = [];
/* deepgram can send an empty and final transcript; only if we have any buffered should we resolve */
if (this._bufferedTranscripts.length === 0) return;
evt = this.consolidateTranscripts(this._bufferedTranscripts, channel, this.language);
this._bufferedTranscripts = [];
}
/* here is where we return a final transcript */
this.logger.debug({evt}, 'TaskTranscribe:_onTranscription - sending final transcript');
this._resolve(channel, evt);
/* some STT engines will keep listening after a final response, so no need to restart */
if (!['soniox', 'aws', 'microsoft', 'deepgram'].includes(this.vendor)) this._startTranscribing(cs, ep, channel);
}
}
else {
/* interim transcript */
if (this.isContinuousAsr && evt.is_final) {
this._bufferedTranscripts.push(evt);
this._startAsrTimer(channel);
} else {
await this._resolve(channel, evt);
/* deepgram can send a non-final transcript but with words that are final, so we need to buffer */
if (this.vendor === 'deepgram') {
const originalEvent = evt.vendor.evt;
if (originalEvent.is_final && evt.alternatives[0].transcript !== '') {
this.logger.debug({evt}, 'Gather:_onTranscription - buffering a completed (partial) deepgram transcript');
this._bufferedTranscripts.push(evt);
}
}
if (this.interim) {
this.logger.debug({evt}, 'TaskTranscribe:_onTranscription - sending interim transcript');
this._resolve(channel, evt);
}
}
}
@@ -479,6 +558,7 @@ class TaskTranscribe extends SttTask {
if (this.vendor === 'nuance') {
const {code, error} = evt;
//TODO: fix below, currently _resolve does not send timeout events
if (code === 404 && error === 'No speech') return this._resolve('timeout');
if (code === 413 && error === 'Too much speech') return this._resolve('timeout');
}

180
lib/tasks/tts-task.js Normal file
View File

@@ -0,0 +1,180 @@
const Task = require('./task');
const { TaskPreconditions } = require('../utils/constants');
class TtsTask extends Task {
constructor(logger, data, parentTask) {
super(logger, data);
this.parentTask = parentTask;
this.preconditions = TaskPreconditions.Endpoint;
this.earlyMedia = this.data.earlyMedia === true || (parentTask && parentTask.earlyMedia);
this.synthesizer = this.data.synthesizer || {};
this.disableTtsCache = this.data.disableTtsCache;
this.options = this.synthesizer.options || {};
}
async exec(cs) {
super.exec(cs);
}
async _synthesizeWithSpecificVendor(cs, ep, {
vendor,
language,
voice,
label,
disableTtsStreaming,
preCache
}) {
const {srf, accountSid:account_sid} = cs;
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, srf);
const {writeAlerts, AlertType, stats} = srf.locals;
const {synthAudio} = srf.locals.dbHelpers;
const engine = this.synthesizer.engine || 'standard';
const salt = cs.callSid;
let credentials = cs.getSpeechCredentials(vendor, 'tts', label);
/* parse Nuance voices into name and model */
let model;
if (vendor === 'nuance' && voice) {
const arr = /([A-Za-z-]*)\s+-\s+(enhanced|standard)/.exec(voice);
if (arr) {
voice = arr[1];
model = arr[2];
}
} else if (vendor === 'deepgram') {
model = voice;
}
/* allow for microsoft custom region voice and api_key to be specified as an override */
if (vendor === 'microsoft' && this.options.deploymentId) {
credentials = credentials || {};
credentials.use_custom_tts = true;
credentials.custom_tts_endpoint = this.options.deploymentId;
credentials.api_key = this.options.apiKey || credentials.apiKey;
credentials.region = this.options.region || credentials.region;
voice = this.options.voice || voice;
} else if (vendor === 'elevenlabs') {
credentials = credentials || {};
credentials.model_id = this.options.model_id || credentials.model_id;
credentials.voice_settings = this.options.voice_settings || {};
credentials.optimize_streaming_latency = this.options.optimize_streaming_latency
|| credentials.optimize_streaming_latency;
voice = this.options.voice_id || voice;
}
ep.set({
tts_engine: vendor,
tts_voice: voice,
cache_speech_handles: 1,
}).catch((err) => this.logger.info({err}, `${this.name}: Error setting tts_engine on endpoint`));
if (!preCache) this.logger.info({vendor, language, voice, model}, `${this.name}:exec`);
try {
if (!credentials) {
writeAlerts({
account_sid,
alert_type: AlertType.TTS_NOT_PROVISIONED,
vendor
}).catch((err) => this.logger.info({err}, 'Error generating alert for no tts'));
this.notifyError({
msg: 'TTS error',
details:`No speech credentials provisioned for selected vendor ${vendor}`
});
throw new Error('no provisioned speech credentials for TTS');
}
// synthesize all of the text elements
let lastUpdated = false;
/* produce an audio segment from the provided text */
const generateAudio = async(text) => {
if (this.killed) return;
if (text.startsWith('silence_stream://')) return text;
/* otel: trace time for tts */
if (!preCache && !this.parentTask) {
const {span} = this.startChildSpan('tts-generation', {
'tts.vendor': vendor,
'tts.language': language,
'tts.voice': voice
});
this.otelSpan = span;
}
try {
const {filePath, servedFromCache, rtt} = await synthAudio(stats, {
account_sid,
text,
vendor,
language,
voice,
engine,
model,
salt,
credentials,
options: this.options,
disableTtsCache : this.disableTtsCache,
disableTtsStreaming,
preCache
});
if (!filePath.startsWith('say:')) {
this.logger.debug(`file ${filePath}, served from cache ${servedFromCache}`);
if (filePath) cs.trackTmpFile(filePath);
if (this.otelSpan) {
this.otelSpan.setAttributes({'tts.cached': servedFromCache});
this.otelSpan.end();
this.otelSpan = null;
}
if (!servedFromCache && !lastUpdated) {
lastUpdated = true;
updateSpeechCredentialLastUsed(credentials.speech_credential_sid).catch(() => {/* logged error */});
}
if (!servedFromCache && rtt && !preCache) {
this.notifyStatus({
event: 'synthesized-audio',
vendor,
language,
characters: text.length,
elapsedTime: rtt
});
}
}
else {
this.logger.debug('a streaming tts api will be used');
const modifiedPath = filePath.replace('say:{', `say:{session-uuid=${ep.uuid},`);
return modifiedPath;
}
return filePath;
} catch (err) {
this.logger.info({err}, 'Error synthesizing tts');
if (this.otelSpan) this.otelSpan.end();
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.TTS_FAILURE,
vendor,
detail: err.message
}).catch((err) => this.logger.info({err}, 'Error generating alert for tts failure'));
this.notifyError({msg: 'TTS error', details: err.message || err});
throw err;
}
};
const arr = this.text.map((t) => (this._validateURL(t) ? t : generateAudio(t)));
return (await Promise.all(arr)).filter((fp) => fp && fp.length);
} catch (err) {
this.logger.info(err, 'TaskSay:exec error');
throw err;
}
}
_validateURL(urlString) {
try {
new URL(urlString);
return true;
} catch (e) {
return false;
}
}
}
module.exports = TtsTask;

View File

@@ -26,25 +26,25 @@ class BackgroundTaskManager extends Emitter {
return this.tasks.size;
}
async newTask(type, taskOpts) {
this.logger.info({taskOpts}, `initiating Background task ${type}`);
async newTask(type, opts) {
this.logger.info({opts}, `initiating Background task ${type}`);
if (this.tasks.has(type)) {
this.logger.info(`Background task ${type} is running, skiped`);
this.logger.info(`Background task ${type} is running, skipped`);
return;
}
let task;
switch (type) {
case 'listen':
task = await this._initListen(taskOpts);
task = await this._initListen(opts);
break;
case 'bargeIn':
task = await this._initBargeIn(taskOpts);
task = await this._initBargeIn(opts);
break;
case 'record':
task = await this._initRecord();
break;
case 'transcribe':
task = await this._initTranscribe(taskOpts);
task = await this._initTranscribe(opts);
break;
default:
break;
@@ -64,8 +64,6 @@ class BackgroundTaskManager extends Emitter {
task.kill();
// Remove task from managed List
this.tasks.delete(type);
} else {
this.logger.debug(`stopping background task, ${type} is not running, skipped`);
}
}

View File

@@ -1,12 +1,13 @@
{
"TaskName": {
"Cognigy": "cognigy",
"Answer": "answer",
"Conference": "conference",
"Config": "config",
"Dequeue": "dequeue",
"Dial": "dial",
"Dialogflow": "dialogflow",
"Dtmf": "dtmf",
"Dub": "dub",
"Enqueue": "enqueue",
"Gather": "gather",
"Hangup": "hangup",
@@ -29,7 +30,7 @@
"Tag": "tag",
"Transcribe": "transcribe"
},
"AllowedSipRecVerbs": ["config", "gather", "transcribe", "listen", "tag"],
"AllowedSipRecVerbs": ["answer", "config", "gather", "transcribe", "listen", "tag"],
"AllowedConfirmSessionVerbs": ["config", "gather", "plays", "say", "tag"],
"CallStatus": {
"Trying": "trying",

View File

@@ -0,0 +1,18 @@
const parseDecibels = (db) => {
if (!db) return 0;
if (typeof db === 'number') {
return db;
}
else if (typeof db === 'string') {
const match = db.match(/([+-]?\d+(\.\d+)?)\s*db/i);
if (match) {
return Math.trunc(parseFloat(match[1]));
} else {
return 0;
}
} else {
return 0;
}
};
module.exports = parseDecibels;

View File

@@ -295,17 +295,17 @@ class SingleDialer extends Emitter {
if (err.status === 487) status.callStatus = CallStatus.NoAnswer;
else if ([486, 600].includes(err.status)) status.callStatus = CallStatus.Busy;
this.logger.info(`SingleDialer:exec outdial failure ${err.status}`);
inviteSpan.setAttributes({'invite.status_code': err.status});
inviteSpan.end();
inviteSpan?.setAttributes({'invite.status_code': err.status});
inviteSpan?.end();
}
else {
this.logger.error(err, 'SingleDialer:exec');
status.sipStatus = 500;
inviteSpan.setAttributes({
inviteSpan?.setAttributes({
'invite.status_code': 500,
'invite.err': err.message
});
inviteSpan.end();
inviteSpan?.end();
}
this.emit('callStatusChange', status);
if (this.ep) this.ep.destroy();

141
package-lock.json generated
View File

@@ -15,10 +15,10 @@
"@jambonz/http-health-check": "^0.0.1",
"@jambonz/mw-registrar": "^0.2.4",
"@jambonz/realtimedb-helpers": "^0.8.7",
"@jambonz/speech-utils": "^0.0.42",
"@jambonz/speech-utils": "^0.0.44",
"@jambonz/stats-collector": "^0.1.9",
"@jambonz/time-series": "^0.2.8",
"@jambonz/verb-specifications": "^0.0.63",
"@jambonz/verb-specifications": "^0.0.64",
"@opentelemetry/api": "^1.4.0",
"@opentelemetry/exporter-jaeger": "^1.9.0",
"@opentelemetry/exporter-trace-otlp-http": "^0.35.0",
@@ -31,7 +31,7 @@
"bent": "^7.3.12",
"debug": "^4.3.4",
"deepcopy": "^2.1.0",
"drachtio-fsmrf": "^3.0.38",
"drachtio-fsmrf": "^3.0.39",
"drachtio-srf": "^4.5.31",
"express": "^4.18.2",
"express-validator": "^7.0.1",
@@ -3468,9 +3468,9 @@
}
},
"node_modules/@jambonz/speech-utils": {
"version": "0.0.42",
"resolved": "https://registry.npmjs.org/@jambonz/speech-utils/-/speech-utils-0.0.42.tgz",
"integrity": "sha512-ROYin2JqV41Q9T14SOpaXBAvalkOAiMGzCxG9Q1d3XCvxDQ/QQXHbZeFdd9cc64eq4OJNtd9lxmnCS+DSPNuXQ==",
"version": "0.0.44",
"resolved": "https://registry.npmjs.org/@jambonz/speech-utils/-/speech-utils-0.0.44.tgz",
"integrity": "sha512-47EtN/cu2R86STPLE5bUcPBKXZFlER0BeJweRPjac6jfxd5MmJpjezgec3ZKr5MkvmrYFhY4CTA8qcbTc5mycQ==",
"dependencies": {
"@aws-sdk/client-polly": "^3.496.0",
"@aws-sdk/client-sts": "^3.496.0",
@@ -3488,12 +3488,9 @@
}
},
"node_modules/@jambonz/speech-utils/node_modules/undici": {
"version": "6.4.0",
"resolved": "https://registry.npmjs.org/undici/-/undici-6.4.0.tgz",
"integrity": "sha512-wYaKgftNqf6Je7JQ51YzkEkEevzOgM7at5JytKO7BjaURQpERW8edQSMrr2xb+Yv4U8Yg47J24+lc9+NbeXMFA==",
"dependencies": {
"@fastify/busboy": "^2.0.0"
},
"version": "6.10.1",
"resolved": "https://registry.npmjs.org/undici/-/undici-6.10.1.tgz",
"integrity": "sha512-kSzmWrOx3XBKTgPm4Tal8Hyl3yf+hzlA00SAf4goxv8LZYafKmS6gJD/7Fe5HH/DMNiFTRXvkwhLo7mUn5fuQQ==",
"engines": {
"node": ">=18.0"
}
@@ -3517,9 +3514,9 @@
}
},
"node_modules/@jambonz/verb-specifications": {
"version": "0.0.63",
"resolved": "https://registry.npmjs.org/@jambonz/verb-specifications/-/verb-specifications-0.0.63.tgz",
"integrity": "sha512-eVO/W1z/y3U6xvwWdbdl3QGACJPcjgsGARcuzeqnafD5n8M22htM9HfHBXjw6L6TfQBc1NEFkRIF/1wx3GEyHA==",
"version": "0.0.64",
"resolved": "https://registry.npmjs.org/@jambonz/verb-specifications/-/verb-specifications-0.0.64.tgz",
"integrity": "sha512-cW9wci240vSkVqVhN/B0MnJYXL4vUCAOia0ccNlVDiQmrcED3hFU1IkFXHRzyY5uiD+GaoapXUOqaerjbDtmLA==",
"dependencies": {
"debug": "^4.3.4",
"pino": "^8.8.0"
@@ -6110,9 +6107,9 @@
}
},
"node_modules/drachtio-fsmrf": {
"version": "3.0.38",
"resolved": "https://registry.npmjs.org/drachtio-fsmrf/-/drachtio-fsmrf-3.0.38.tgz",
"integrity": "sha512-nR/FPEqgGxKkqYxU+afRivIyDQOpZJbLLd2ydYlubFsUWYxDugPu2rGT6/t0fYgePn6qpA418z+uMA65aB8Q/w==",
"version": "3.0.39",
"resolved": "https://registry.npmjs.org/drachtio-fsmrf/-/drachtio-fsmrf-3.0.39.tgz",
"integrity": "sha512-ATpmm3HA6Skp9R8Kt12Jc9g7BV5nZqRSHJsPEQF7AMhRtrEIrSC1GsMEIqwGTCH/wO4lBRh0+gXTtDuVFBCsVg==",
"dependencies": {
"camel-case": "^4.1.2",
"debug": "^2.6.9",
@@ -6372,13 +6369,14 @@
}
},
"node_modules/es5-ext": {
"version": "0.10.62",
"resolved": "https://registry.npmjs.org/es5-ext/-/es5-ext-0.10.62.tgz",
"integrity": "sha512-BHLqn0klhEpnOKSrzn/Xsz2UIW8j+cGmo9JLzr8BiUapV8hPL9+FliFqjwr9ngW7jWdnxv6eO+/LqyhJVqgrjA==",
"version": "0.10.64",
"resolved": "https://registry.npmjs.org/es5-ext/-/es5-ext-0.10.64.tgz",
"integrity": "sha512-p2snDhiLaXe6dahss1LddxqEm+SkuDvV8dnIQG0MWjyHpcMNfXKPE+/Cc0y+PhxJX3A4xGNeFCj5oc0BUh6deg==",
"hasInstallScript": true,
"dependencies": {
"es6-iterator": "^2.0.3",
"es6-symbol": "^3.1.3",
"esniff": "^2.0.1",
"next-tick": "^1.1.0"
},
"engines": {
@@ -6547,6 +6545,25 @@
"node": ">=10"
}
},
"node_modules/esniff": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/esniff/-/esniff-2.0.1.tgz",
"integrity": "sha512-kTUIGKQ/mDPFoJ0oVfcmyJn4iBDRptjNVIzwIFR7tqWXdVI9xfA2RMwY/gbSpJG3lkdWNEjLap/NqVHZiJsdfg==",
"dependencies": {
"d": "^1.0.1",
"es5-ext": "^0.10.62",
"event-emitter": "^0.3.5",
"type": "^2.7.2"
},
"engines": {
"node": ">=0.10"
}
},
"node_modules/esniff/node_modules/type": {
"version": "2.7.2",
"resolved": "https://registry.npmjs.org/type/-/type-2.7.2.tgz",
"integrity": "sha512-dzlvlNlt6AXU7EBSfpAscydQ7gXB+pPGsPnfJnZpiNJBDj7IaJzQlBZYGdEi4R9HmPdBv2XmWJ6YUtoTa7lmCw=="
},
"node_modules/espree": {
"version": "7.3.1",
"resolved": "https://registry.npmjs.org/espree/-/espree-7.3.1.tgz",
@@ -6651,6 +6668,15 @@
"node": ">= 0.6"
}
},
"node_modules/event-emitter": {
"version": "0.3.5",
"resolved": "https://registry.npmjs.org/event-emitter/-/event-emitter-0.3.5.tgz",
"integrity": "sha512-D9rRn9y7kLPnJ+hMq7S/nhvoKwwvVJahBi2BPmx3bvbsEdK3W9ii8cBSGjP+72/LnM4n6fo3+dkCX5FeTQruXA==",
"dependencies": {
"d": "1",
"es5-ext": "~0.10.14"
}
},
"node_modules/event-target-shim": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz",
@@ -6956,9 +6982,9 @@
"dev": true
},
"node_modules/follow-redirects": {
"version": "1.15.4",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.4.tgz",
"integrity": "sha512-Cr4D/5wlrb0z9dgERpUL3LrmPKVDsETIJhaCMeDfuFYcqa5bldGV6wBsAN6X/vxlXQtFBMrXdXxdL8CbDTGniw==",
"version": "1.15.6",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz",
"integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==",
"funding": [
{
"type": "individual",
@@ -14112,9 +14138,9 @@
}
},
"@jambonz/speech-utils": {
"version": "0.0.42",
"resolved": "https://registry.npmjs.org/@jambonz/speech-utils/-/speech-utils-0.0.42.tgz",
"integrity": "sha512-ROYin2JqV41Q9T14SOpaXBAvalkOAiMGzCxG9Q1d3XCvxDQ/QQXHbZeFdd9cc64eq4OJNtd9lxmnCS+DSPNuXQ==",
"version": "0.0.44",
"resolved": "https://registry.npmjs.org/@jambonz/speech-utils/-/speech-utils-0.0.44.tgz",
"integrity": "sha512-47EtN/cu2R86STPLE5bUcPBKXZFlER0BeJweRPjac6jfxd5MmJpjezgec3ZKr5MkvmrYFhY4CTA8qcbTc5mycQ==",
"requires": {
"@aws-sdk/client-polly": "^3.496.0",
"@aws-sdk/client-sts": "^3.496.0",
@@ -14132,12 +14158,9 @@
},
"dependencies": {
"undici": {
"version": "6.4.0",
"resolved": "https://registry.npmjs.org/undici/-/undici-6.4.0.tgz",
"integrity": "sha512-wYaKgftNqf6Je7JQ51YzkEkEevzOgM7at5JytKO7BjaURQpERW8edQSMrr2xb+Yv4U8Yg47J24+lc9+NbeXMFA==",
"requires": {
"@fastify/busboy": "^2.0.0"
}
"version": "6.10.1",
"resolved": "https://registry.npmjs.org/undici/-/undici-6.10.1.tgz",
"integrity": "sha512-kSzmWrOx3XBKTgPm4Tal8Hyl3yf+hzlA00SAf4goxv8LZYafKmS6gJD/7Fe5HH/DMNiFTRXvkwhLo7mUn5fuQQ=="
}
}
},
@@ -14160,9 +14183,9 @@
}
},
"@jambonz/verb-specifications": {
"version": "0.0.63",
"resolved": "https://registry.npmjs.org/@jambonz/verb-specifications/-/verb-specifications-0.0.63.tgz",
"integrity": "sha512-eVO/W1z/y3U6xvwWdbdl3QGACJPcjgsGARcuzeqnafD5n8M22htM9HfHBXjw6L6TfQBc1NEFkRIF/1wx3GEyHA==",
"version": "0.0.64",
"resolved": "https://registry.npmjs.org/@jambonz/verb-specifications/-/verb-specifications-0.0.64.tgz",
"integrity": "sha512-cW9wci240vSkVqVhN/B0MnJYXL4vUCAOia0ccNlVDiQmrcED3hFU1IkFXHRzyY5uiD+GaoapXUOqaerjbDtmLA==",
"requires": {
"debug": "^4.3.4",
"pino": "^8.8.0"
@@ -16191,9 +16214,9 @@
}
},
"drachtio-fsmrf": {
"version": "3.0.38",
"resolved": "https://registry.npmjs.org/drachtio-fsmrf/-/drachtio-fsmrf-3.0.38.tgz",
"integrity": "sha512-nR/FPEqgGxKkqYxU+afRivIyDQOpZJbLLd2ydYlubFsUWYxDugPu2rGT6/t0fYgePn6qpA418z+uMA65aB8Q/w==",
"version": "3.0.39",
"resolved": "https://registry.npmjs.org/drachtio-fsmrf/-/drachtio-fsmrf-3.0.39.tgz",
"integrity": "sha512-ATpmm3HA6Skp9R8Kt12Jc9g7BV5nZqRSHJsPEQF7AMhRtrEIrSC1GsMEIqwGTCH/wO4lBRh0+gXTtDuVFBCsVg==",
"requires": {
"camel-case": "^4.1.2",
"debug": "^2.6.9",
@@ -16425,12 +16448,13 @@
}
},
"es5-ext": {
"version": "0.10.62",
"resolved": "https://registry.npmjs.org/es5-ext/-/es5-ext-0.10.62.tgz",
"integrity": "sha512-BHLqn0klhEpnOKSrzn/Xsz2UIW8j+cGmo9JLzr8BiUapV8hPL9+FliFqjwr9ngW7jWdnxv6eO+/LqyhJVqgrjA==",
"version": "0.10.64",
"resolved": "https://registry.npmjs.org/es5-ext/-/es5-ext-0.10.64.tgz",
"integrity": "sha512-p2snDhiLaXe6dahss1LddxqEm+SkuDvV8dnIQG0MWjyHpcMNfXKPE+/Cc0y+PhxJX3A4xGNeFCj5oc0BUh6deg==",
"requires": {
"es6-iterator": "^2.0.3",
"es6-symbol": "^3.1.3",
"esniff": "^2.0.1",
"next-tick": "^1.1.0"
}
},
@@ -16562,6 +16586,24 @@
"integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==",
"dev": true
},
"esniff": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/esniff/-/esniff-2.0.1.tgz",
"integrity": "sha512-kTUIGKQ/mDPFoJ0oVfcmyJn4iBDRptjNVIzwIFR7tqWXdVI9xfA2RMwY/gbSpJG3lkdWNEjLap/NqVHZiJsdfg==",
"requires": {
"d": "^1.0.1",
"es5-ext": "^0.10.62",
"event-emitter": "^0.3.5",
"type": "^2.7.2"
},
"dependencies": {
"type": {
"version": "2.7.2",
"resolved": "https://registry.npmjs.org/type/-/type-2.7.2.tgz",
"integrity": "sha512-dzlvlNlt6AXU7EBSfpAscydQ7gXB+pPGsPnfJnZpiNJBDj7IaJzQlBZYGdEi4R9HmPdBv2XmWJ6YUtoTa7lmCw=="
}
}
},
"espree": {
"version": "7.3.1",
"resolved": "https://registry.npmjs.org/espree/-/espree-7.3.1.tgz",
@@ -16638,6 +16680,15 @@
"resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz",
"integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg=="
},
"event-emitter": {
"version": "0.3.5",
"resolved": "https://registry.npmjs.org/event-emitter/-/event-emitter-0.3.5.tgz",
"integrity": "sha512-D9rRn9y7kLPnJ+hMq7S/nhvoKwwvVJahBi2BPmx3bvbsEdK3W9ii8cBSGjP+72/LnM4n6fo3+dkCX5FeTQruXA==",
"requires": {
"d": "1",
"es5-ext": "~0.10.14"
}
},
"event-target-shim": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz",
@@ -16888,9 +16939,9 @@
"dev": true
},
"follow-redirects": {
"version": "1.15.4",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.4.tgz",
"integrity": "sha512-Cr4D/5wlrb0z9dgERpUL3LrmPKVDsETIJhaCMeDfuFYcqa5bldGV6wBsAN6X/vxlXQtFBMrXdXxdL8CbDTGniw=="
"version": "1.15.6",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz",
"integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA=="
},
"for-each": {
"version": "0.3.3",

View File

@@ -31,10 +31,10 @@
"@jambonz/http-health-check": "^0.0.1",
"@jambonz/mw-registrar": "^0.2.4",
"@jambonz/realtimedb-helpers": "^0.8.7",
"@jambonz/speech-utils": "^0.0.42",
"@jambonz/speech-utils": "^0.0.44",
"@jambonz/stats-collector": "^0.1.9",
"@jambonz/time-series": "^0.2.8",
"@jambonz/verb-specifications": "^0.0.63",
"@jambonz/verb-specifications": "^0.0.64",
"@opentelemetry/api": "^1.4.0",
"@opentelemetry/exporter-jaeger": "^1.9.0",
"@opentelemetry/exporter-trace-otlp-http": "^0.35.0",
@@ -47,7 +47,7 @@
"bent": "^7.3.12",
"debug": "^4.3.4",
"deepcopy": "^2.1.0",
"drachtio-fsmrf": "^3.0.38",
"drachtio-fsmrf": "^3.0.39",
"drachtio-srf": "^4.5.31",
"express": "^4.18.2",
"express-validator": "^7.0.1",