Compare commits

...

40 Commits

Author SHA1 Message Date
Dave Horton
a005253a9f update to latest speech-utils 2024-10-18 12:27:29 -04:00
rammohan-y
10efc5d608 feat/942: updated optimal google models (#943) 2024-10-18 10:03:56 -04:00
Hoan Luu Huu
1c48c40496 Support sip_parent_callid for sbc-outbound (#939)
* include X-CID for dial outbound if the call-session is outbound

* include X-CID for dial outbound if the call-session is outbound

* include X-CID for dial outbound if the call-session is outbound

* include X-CID for dial outbound if the call-session is outbound
2024-10-17 07:18:58 -04:00
Dave Horton
c79a6aaf8a Feat/llm update (#936)
* add support for llm:update during LLM session

* make sure to end openai session when Llm task is killed

* wip

* wip

* wip

* wip

* wip

* wip

* wip
2024-10-16 09:27:51 -04:00
Hoan Luu Huu
da5f51e8e0 update speech utils version (#937) 2024-10-16 08:26:06 -04:00
Hoan Luu Huu
e7fd40e297 support sbcCallId in calling/status hook (#934)
* support sbcCallId in calling/status hook

* support sbcCallId in calling/status hook

* support sbcCallId in calling/status hook

* wip

* wip

* wip
2024-10-14 18:00:09 -04:00
Dave Horton
f541ff1a15 add support for aws language model name when transcribing (#890)
* add support for aws language model name when transcribing

* wip - from prev branch

* wip

* support both aws grpc and ws api - detect based on transcription payload

* update to drachtio-fsmrf@4.0.0

* fix for grpc compatibility, requires JAMBONES_AWS_TRANSCRIBE_USE_GRPC env

* back out major update to drachtio-srf and fsmrf; that should come in a separate PR

* update drachtio-srf
2024-10-12 19:46:31 -04:00
Dave Horton
98b968d61f update test db (#933) 2024-10-12 19:34:40 -04:00
Dave Horton
f09722a5b5 Feat/llm verb (#931)
* wip

* working version for openai realtime beta

* lint

* tests: update db to latest 0.9.2 schema
2024-10-12 19:26:27 -04:00
Dave Horton
f84b3793e1 Feat/speechmatics (#932)
* wip

* initial working version of speechmatics

* linting
2024-10-12 18:42:53 -04:00
Dave Horton
84b7456c2d add support for speechmatics asr (#920)
* update to verb specs with speechmatics support

* discover local ip using os module
2024-10-11 09:24:36 -04:00
Hoan Luu Huu
c67499e38b update speech version 0.1.18 (#930) 2024-10-11 08:59:33 -04:00
Hoan Luu Huu
e372a3cdfb update speech version (#927) 2024-10-09 19:46:44 -04:00
rammohan-y
ea303caa1c feat/924: made actions as optional when there is no noResponseTimeout (#925) 2024-10-08 08:06:12 -04:00
Hoan Luu Huu
2af67d8f05 support changing log level runtime (#926)
* support changing log level runtime

* support changing log level runtime

* support changing log level runtime
2024-10-07 09:51:51 -04:00
Hoan Luu Huu
96b3b0fe07 Allow Say, Gather, Transcribe is able to finished if there is error for speech credential (#910)
* allow move to next task if say verb is failed because of speech credential

* allow move to next task if say verb is failed because of speech credential

* allow move to next task if say verb is failed because of speech credential

* wip

* wip
2024-10-01 13:40:41 -04:00
Hoan Luu Huu
b898b70520 support config referHook (#915) 2024-09-30 08:13:48 -04:00
Hoan Luu Huu
b9ef00dfc7 Fixed Gather digits does not work without nested say/play (#914)
* Fixed Gather digits does not work without nested say/play

* fix review comment

* add assert to make sure we don't register dtmf twice in gather verb
2024-09-30 07:46:21 -04:00
rammohan-y
68fa3c013d Feat/902: executing giveUpAction when noResponseGiveupTimeout is reached (#908)
* feat/893: made noResponseTimeout and noResponseGiveUpTimout independent

* support for giveUpActions implemented

* feat/902: using makeTask and exec of task to execute the giveUpActions

* feat/902: changed version of verb-specifications and speech-utils

* feat/902: fixed jslint errors

* feat/902: modified log

* feat/902: using a new event giveupWithTasks for processing giveUpActions

* feat/902: removed check for wakeupResolver and replaceApplication already taking care of wakeupResolver, also updated the verb-specifications version

* feat/902: removed sync for _onNoResponseGiveUpTimer function
2024-09-26 09:40:30 -04:00
Dave Horton
7c24208067 fix #916: race condition where call just ended when action hook play completes (#917) 2024-09-25 20:17:22 -04:00
Dave Horton
7f7c26e982 fix for https://github.com/jambonz/freeswitch-modules/issues/117 (#912) 2024-09-25 20:13:56 -04:00
Markus Frindt
402adc2098 add label to tts stt spans (#909)
Co-authored-by: Markus Frindt <m.frindt@cognigy.com>
2024-09-25 16:44:15 -04:00
rammohan-y
724d4fb713 Feat/893: Made noResponseTimeout and noResponseGiveUpTimeout independent (#896)
* feat/893: made noResponseTimeout and noResponseGiveUpTimout independent

* feat/893: not assuming 0 if noResponseTimeout is not specified
2024-09-19 10:29:51 -04:00
Hoan Luu Huu
673827cce3 fixed adulting call session does not send status callback if hangup is used (#907) 2024-09-19 10:08:15 -04:00
Anton Voylenko
c4c5ad33d8 feat: loop dial music (#769) 2024-09-17 13:51:01 -04:00
rammohan-y
7bbfc01cb0 feat/864: enable bargeIn when minBargeinWordCount is 0 (#900)
* feat/864: checking for undefined, because 0 is a valid value for minBargeinWordCount

* feat/864: checking for undefined and null

* feat/864: corrected spelling of mode and added check for undefined as 0 is a valid value for vad.mode
2024-09-17 13:26:29 -04:00
Hoan Luu Huu
7daf056d6b allow set vendor model or engine in runtime (#897) 2024-09-12 09:03:15 +01:00
Hoan Luu Huu
e69afc4be4 fix recognizer/synthesizer label wrongly select between verb and app (#881)
* fix recognizer/synthesizer label wrongly select between verb and application

* fix jslint

* fix ASR cannot fallback

* update tts fallback does not send notification
2024-09-11 09:34:52 +01:00
rammohan-y
3a7cc27d0a feat/891: getting the options from customOptions in case of custom stt (#892) 2024-09-10 09:38:33 +01:00
Dave Horton
c4a6057fc6 bump version 2024-09-04 13:31:05 +01:00
rammohan-y
174438bb01 Feat/882: default model setting for en-IN language (#888)
* feat/882: default model setting for en-IN language

* feat/882: refactored if into ||
2024-09-03 13:22:38 +01:00
Antony Jukes
4348615b75 Create Call Rest is missing target headers on outdial (#874)
* add target headers for rest create-call

* rebased

---------

Co-authored-by: ajukes <ajukes@callable.io>
2024-09-02 21:48:09 +01:00
Hoan Luu Huu
d365883bfe fix #883 that after kicked from conference, no long receive freeswitch CUSTOM event (#886)
* fix #883 that after kicked from conference, no long receive freeswitch CUSTOM event

* fix #883 that after kicked from conference, no long receive freeswitch CUSTOM event

* reset Esl Custom event after conference.

* update drachtio fsmrf version
2024-08-31 14:47:39 +01:00
Dave Horton
c0ab936b76 wip (#830)
* wip

* wip

* wip

* wip

* update deps

* update test to use latest freeswitch image
2024-08-29 15:23:49 -04:00
Dave Horton
600ff763fa fix #840 (#880) 2024-08-26 10:14:59 -04:00
Hoan Luu Huu
4d077e990f Fix/audio issue kick conference (#878)
* rest call session does not handle for RE-INVITE

* fixed audio is bad after kicked from conference

* fix review comment
2024-08-23 09:28:39 -04:00
RJ Burnham
eccef54b04 Add support for configuring the IP address that is advertised to the API server. (#875)
This is needed when running in fargate as ip.address() will return the wrong ip address.
2024-08-23 08:33:16 -04:00
Dave Horton
2790e6d9ad fix linting error from PR 2024-08-20 08:36:24 -04:00
rammohan-y
f95d8639be Feat/868: Use global synthesizer config properties for say verb (#869)
* feat/868: Use the properties from global config in verb for TTS

* feat/868: setting this.options to combination of cs.synthesizer.options and this.options

* feat/868: Move the logic of copying cs properties to parent class tts-task.js

* feat/868: add empty line that was removed, say.js restored to original version

* feat/868: moved _synthesizeWithSpecificVendor to tts-task.js

---------

Co-authored-by: Rammohan Yadavalli <rammohan.yadavalli@kore.com>
2024-08-20 08:31:44 -04:00
Hoan Luu Huu
fc838512b6 Fixed long amd hints make freeswitch module cannot connect the vendor (#872)
* rest call session does not handle for RE-INVITE

* fixed long amd hints make freeswitch module cannot connect the vendor
2024-08-20 07:30:32 -04:00
33 changed files with 1817 additions and 829 deletions

View File

@@ -21,6 +21,7 @@ Configuration is provided via environment variables:
|ENCRYPTION_SECRET| secret for credential encryption(JWT_SECRET is deprecated) |yes|
|GOOGLE_APPLICATION_CREDENTIALS| path to gcp service key file|yes|
|HTTP_PORT| tcp port to listen on for API requests from jambonz-api-server|yes|
|HTTP_IP| IP Address for API requests from jambonz-api-server |no|
|JAMBONES_GATHER_EARLY_HINTS_MATCH| if true and hints are provided, gather will opportunistically review interim transcripts if possible to reduce ASR latency |no|
|JAMBONES_FREESWITCH| IP:port:secret for Freeswitch server (e.g. '127.0.0.1:8021:JambonzR0ck$'|yes|
|JAMBONES_LOGLEVEL| log level for application, 'info' or 'debug'|no|

7
app.js
View File

@@ -100,8 +100,13 @@ createHttpListener(logger, srf)
});
setInterval(() => {
setInterval(async() => {
srf.locals.stats.gauge('fs.sip.calls.count', sessionTracker.count);
// Checking system log level
const systemInformation = await srf.locals.dbHelpers.lookupSystemInformation();
if (systemInformation && systemInformation.log_level) {
logger.level = systemInformation.log_level;
}
}, 20000);
const disconnect = () => {

View File

@@ -73,6 +73,7 @@ const JAMBONES_LOGLEVEL = process.env.JAMBONES_LOGLEVEL || 'info';
const JAMBONES_INJECT_CONTENT = process.env.JAMBONES_INJECT_CONTENT;
const PORT = parseInt(process.env.HTTP_PORT, 10) || 3000;
const HTTP_IP = process.env.HTTP_IP;
const HTTP_PORT_MAX = parseInt(process.env.HTTP_PORT_MAX, 10);
const K8S = process.env.K8S;
@@ -107,6 +108,8 @@ const DEEPGRAM_API_KEY = process.env.DEEPGRAM_API_KEY;
const ANCHOR_MEDIA_ALWAYS = process.env.ANCHOR_MEDIA_ALWAYS;
const VMD_HINTS_FILE = process.env.VMD_HINTS_FILE;
const JAMBONES_AWS_TRANSCRIBE_USE_GRPC = process.env.JAMBONES_AWS_TRANSCRIBE_USE_GRPC;
/* security, secrets */
const LEGACY_CRYPTO = !!process.env.LEGACY_CRYPTO;
const JWT_SECRET = process.env.JWT_SECRET;
@@ -170,6 +173,7 @@ module.exports = {
JAMBONES_CLUSTER_ID,
PORT,
HTTP_PORT_MAX,
HTTP_IP,
K8S,
K8S_SBC_SIP_SERVICE_NAME,
JAMBONES_SUBNET,
@@ -188,6 +192,7 @@ module.exports = {
ANCHOR_MEDIA_ALWAYS,
VMD_HINTS_FILE,
JAMBONES_FREESWITCH_MAX_CALL_DURATION_MINS,
JAMBONES_AWS_TRANSCRIBE_USE_GRPC,
LEGACY_CRYPTO,
JWT_SECRET,

View File

@@ -97,7 +97,8 @@ router.post('/',
'X-Trace-ID': rootSpan.traceId,
...(req.body?.application_sid && {'X-Application-Sid': req.body.application_sid}),
...(restDial.fromHost && {'X-Preferred-From-Host': restDial.fromHost}),
...(record_all_calls && {'X-Record-All-Calls': recordOutputFormat})
...(record_all_calls && {'X-Record-All-Calls': recordOutputFormat}),
...target.headers
};
switch (target.type) {
@@ -257,6 +258,8 @@ router.post('/',
callId: inviteReq.get('Call-ID'),
accountSid,
traceId: rootSpan.traceId
}, {
...(account.enable_debug_log && {level: 'debug'})
});
app.requestor.logger = app.notifier.logger = sipLogger;
const callInfo = new CallInfo({
@@ -290,6 +293,8 @@ router.post('/',
},
cbProvisional: (prov) => {
const callStatus = prov.body ? CallStatus.EarlyMedia : CallStatus.Ringing;
// Update call-id for sbc outbound INVITE
cs.callInfo.sbcCallid = prov.get('X-CID');
if ([180, 183].includes(prov.status) && prov.body) connectStream(prov.body);
restDial.emit('callStatus', prov.status, !!prov.body);
cs.emit('callStatusChange', {callStatus, sipStatus: prov.status});

View File

@@ -187,14 +187,20 @@ module.exports = function(srf, logger) {
const {span} = rootSpan.startChildSpan('lookupAccountDetails');
try {
req.locals.accountInfo = await lookupAccountDetails(account_sid);
req.locals.service_provider_sid = req.locals.accountInfo?.account?.service_provider_sid;
const accountDetail = await lookupAccountDetails(account_sid);
const account = accountDetail?.account;
req.locals.accountInfo = accountDetail;
req.locals.service_provider_sid = account?.service_provider_sid;
span.end();
if (!req.locals.accountInfo.account.is_active) {
if (!account?.is_active) {
logger.info(`Account is inactive or suspended ${account_sid}`);
// TODO: alert
return res.send(503, {headers: {'X-Reason': 'Account exists but is inactive'}});
}
// Change the default log level to debug
if (account?.enable_debug_log) {
req.locals.logger.level = 'debug';
}
logger.debug({accountInfo: req.locals?.accountInfo?.account}, `retrieved account info for ${account_sid}`);
next();
} catch (err) {

View File

@@ -32,6 +32,7 @@ class CallInfo {
this.sipStatus = 100;
this.sipReason = 'Trying';
this.callStatus = CallStatus.Trying;
this.sbcCallid = req.get('X-CID');
this.originatingSipIp = req.get('X-Forwarded-For');
this.originatingSipTrunkName = req.get('X-Originating-Carrier');
const {siprec} = req.locals;
@@ -129,6 +130,7 @@ class CallInfo {
from: this.from,
to: this.to,
callId: this.callId,
sbcCallid: this.sbcCallid,
sipStatus: this.sipStatus,
sipReason: this.sipReason,
callStatus: this.callStatus,

View File

@@ -20,6 +20,7 @@ const listTaskNames = require('../utils/summarize-tasks');
const HttpRequestor = require('../utils/http-requestor');
const WsRequestor = require('../utils/ws-requestor');
const ActionHookDelayProcessor = require('../utils/action-hook-delay');
const {parseUri} = require('drachtio-srf');
const {
JAMBONES_INJECT_CONTENT,
JAMBONES_EAGERLY_PRE_CACHE_AUDIO,
@@ -339,6 +340,18 @@ class CallSession extends Emitter {
this.application.fallback_speech_recognizer_language = language;
}
/**
* global referHook
*/
set referHook(hook) {
this._referHook = hook;
}
get referHook() {
return this._referHook;
}
/**
* Vad
*/
@@ -520,15 +533,6 @@ class CallSession extends Emitter {
this._actionHookDelayRetries = e;
}
// Getter/setter for current tts vendor
get currentTtsVendor() {
return this._currentTtsVendor;
}
set currentTtsVendor(vendor) {
this._currentTtsVendor = vendor;
}
get actionHookDelayProcessor() {
return this._actionHookDelayProcessor;
}
@@ -553,6 +557,17 @@ class CallSession extends Emitter {
this.wakeupResolver = null;
}
});
this._actionHookDelayProcessor.on('giveupWithTasks', (tasks) => {
this.logger.info('CallSession: ActionHookDelayProcessor: giveupWithTasks event');
const giveUpTasks = normalizeJambones(this.logger, tasks).map((tdata) => makeTask(this.logger, tdata));
this.logger.info({tasks: listTaskNames(giveUpTasks)}, 'CallSession:giveupWithTasks task list');
// we need to clear the ahd, as we do not want to execute actionHookDelay actions again
this.clearActionHookDelayProcessor();
// replace the application with giveUpTasks
this.replaceApplication(giveUpTasks);
});
} catch (err) {
this.logger.error({err}, 'CallSession: Error creating ActionHookDelayProcessor');
}
@@ -566,9 +581,7 @@ class CallSession extends Emitter {
//this.logger.debug('CallSession:clearOrRestoreActionHookDelayProcessor - ahd settings');
//await this.clearActionHookDelayProcessor();
}
else {
this.logger.debug('CallSession:clearOrRestoreActionHookDelayProcessor - restore ahd after gather override');
}
this.logger.debug('CallSession:clearOrRestoreActionHookDelayProcessor - say or play action completed');
}
}
@@ -950,42 +963,56 @@ class CallSession extends Emitter {
speech_credential_sid: credential.speech_credential_sid,
cobalt_server_uri: credential.cobalt_server_uri
};
} else if ('elevenlabs' === vendor) {
}
else if ('elevenlabs' === vendor) {
return {
api_key: credential.api_key,
model_id: credential.model_id,
options: credential.options
};
} else if ('playht' === vendor) {
}
else if ('playht' === vendor) {
return {
api_key: credential.api_key,
user_id: credential.user_id,
voice_engine: credential.voice_engine,
options: credential.options
};
} else if ('rimelabs' === vendor) {
}
else if ('rimelabs' === vendor) {
return {
api_key: credential.api_key,
model_id: credential.model_id,
options: credential.options
};
} else if ('assemblyai' === vendor) {
}
else if ('assemblyai' === vendor) {
return {
speech_credential_sid: credential.speech_credential_sid,
api_key: credential.api_key
};
} else if ('whisper' === vendor) {
}
else if ('whisper' === vendor) {
return {
api_key: credential.api_key,
model_id: credential.model_id
};
} else if ('verbio' === vendor) {
}
else if ('verbio' === vendor) {
return {
client_id: credential.client_id,
client_secret: credential.client_secret,
engine_version: credential.engine_version
};
} else if (vendor.startsWith('custom:')) {
}
else if ('speechmatics' === vendor) {
this.logger.info({credential}, 'CallSession:getSpeechCredentials - speechmatics credential');
return {
api_key: credential.api_key,
speechmatics_stt_uri: credential.speechmatics_stt_uri,
};
}
else if (vendor.startsWith('custom:')) {
return {
speech_credential_sid: credential.speech_credential_sid,
auth_token: credential.auth_token,
@@ -1069,7 +1096,7 @@ class CallSession extends Emitter {
try {
await this._awaitCommandsOrHangup();
await this.clearOrRestoreActionHookDelayProcessor();
//await this.clearOrRestoreActionHookDelayProcessor();
//TODO: remove filler noise code and simply create as action hook delay
if (this._isPlayingFillerNoise) {
@@ -1562,6 +1589,29 @@ Duration=${duration} `
this.logger.info({response}, '_lccBoostAudioSignal: response from freeswitch');
}
_lccToolOutput(tool_call_id, opts, callSid) {
// only valid if we are in an LLM verb
const task = this.currentTask;
if (!task || !task.name.startsWith('Llm')) {
return this.logger.info('CallSession:_lccToolOutput - invalid command since we are not in an llm');
}
task.processToolOutput(tool_call_id, opts, callSid)
.catch((err) => this.logger.error(err, 'CallSession:_lccToolOutput'));
}
_lccLlmUpdate(opts, callSid) {
// only valid if we are in an LLM verb
const task = this.currentTask;
if (!task || !task.name.startsWith('Llm')) {
return this.logger.info('CallSession:_lccLlmUpdate - invalid command since we are not in an llm');
}
task.processLlmUpdate(opts, callSid)
.catch((err) => this.logger.error(err, 'CallSession:_lccLlmUpdate'));
}
/**
* perform call hangup by jambonz
@@ -1622,6 +1672,12 @@ Duration=${duration} `
else if (opts.boostAudioSignal) {
return this._lccBoostAudioSignal(opts, callSid);
}
else if (opts.llm_tool_output) {
return this._lccToolOutput(opts.tool_call_id, opts.llm_tool_output, callSid);
}
else if (opts.llm_update) {
return this._lccLlmUpdate(opts.llm_update, callSid);
}
// whisper may be the only thing we are asked to do, or it may that
// we are doing a whisper after having muted, paused recording etc..
@@ -1818,7 +1874,7 @@ Duration=${duration} `
this._jambonzHangup();
}
async _onCommand({msgid, command, call_sid, queueCommand, data}) {
async _onCommand({msgid, command, call_sid, queueCommand, tool_call_id, data}) {
this.logger.info({msgid, command, queueCommand, data}, 'CallSession:_onCommand - received command');
let resolution;
switch (command) {
@@ -1919,6 +1975,14 @@ Duration=${duration} `
});
break;
case 'llm:tool-output':
this._lccToolOutput(tool_call_id, data, call_sid);
break;
case 'llm:update':
this._lccLlmUpdate(data, call_sid);
break;
default:
this.logger.info(`CallSession:_onCommand - invalid command ${command}`);
}
@@ -2074,6 +2138,9 @@ Duration=${duration} `
// When this call kicked out from conference, session need to replace endpoint
// but this.ms might be undefined/null at this case.
this.ms = this.ms || this.getMS();
// Destroy previous ep if it's still running.
if (this.ep?.connected) this.ep.destroy();
this.ep = await this.ms.createEndpoint({remoteSdp: this.dlg.remote.sdp});
this._configMsEndpoint();
@@ -2231,17 +2298,59 @@ Duration=${duration} `
}
/**
* Handle incoming REFER if we are in a dial task
* Handle incoming REFER
* @param {*} req
* @param {*} res
*/
_onRefer(req, res) {
const task = this.currentTask;
const sd = task.sd;
if (task && TaskName.Dial === task.name && sd) {
if (task && TaskName.Dial === task.name && sd && task.referHook) {
task.handleRefer(this, req, res);
}
else {
this._handleRefer(req, res);
}
}
async _handleRefer(req, res) {
if (this._referHook) {
try {
const to = parseUri(req.getParsedHeader('Refer-To').uri);
const by = parseUri(req.getParsedHeader('Referred-By').uri);
const b3 = this.b3;
const httpHeaders = b3 && {b3};
const json = await this.requestor.request('verb:hook', this._referHook, {
...(this.callInfo.toJSON()),
refer_details: {
sip_refer_to: req.get('Refer-To'),
sip_referred_by: req.get('Referred-By'),
sip_user_agent: req.get('User-Agent'),
refer_to_user: to.scheme === 'tel' ? to.number : to.user,
referred_by_user: by.scheme === 'tel' ? by.number : by.user,
referring_call_sid: this.callSid,
referred_call_sid: null,
}
}, httpHeaders);
if (json && Array.isArray(json)) {
const tasks = normalizeJambones(this.logger, json).map((tdata) => makeTask(this.logger, tdata));
if (tasks && tasks.length > 0) {
this.logger.info('CallSession:handleRefer received REFER, get new tasks');
this.replaceApplication(tasks);
if (this.wakeupResolver) {
this.wakeupResolver({reason: 'CallSession: referHook new taks'});
this.wakeupResolver = null;
}
}
}
res.send(202);
this.logger.info('CallSession:handleRefer - sent 202 Accepted');
} catch (err) {
this.logger.error({err}, 'CallSession:handleRefer - error while asking referHook');
res.send(err.statusCode || 501);
}
} else {
res.send(501);
}
}

View File

@@ -1,10 +1,6 @@
const CallSession = require('./call-session');
const {CallStatus} = require('../utils/constants');
const moment = require('moment');
const {parseUri} = require('drachtio-srf');
const { normalizeJambones } = require('@jambonz/verb-specifications');
const makeTask = require('../tasks/make_task');
/**
* @classdesc Subclass of CallSession. This represents a CallSession that is
* created for an outbound call that is initiated via the REST API.
@@ -49,59 +45,6 @@ class RestCallSession extends CallSession {
dlg.on('modify', this._onReinvite.bind(this));
this.wrapDialog(dlg);
}
/**
* global referHook
*/
set referHook(hook) {
this._referHook = hook;
}
/**
* This is invoked when the called party sends REFER to Jambonz.
*/
async _onRefer(req, res) {
if (this._referHook) {
try {
const to = parseUri(req.getParsedHeader('Refer-To').uri);
const by = parseUri(req.getParsedHeader('Referred-By').uri);
const b3 = this.b3;
const httpHeaders = b3 && {b3};
const json = await this.requestor.request('verb:hook', this._referHook, {
...(this.callInfo.toJSON()),
refer_details: {
sip_refer_to: req.get('Refer-To'),
sip_referred_by: req.get('Referred-By'),
sip_user_agent: req.get('User-Agent'),
refer_to_user: to.scheme === 'tel' ? to.number : to.user,
referred_by_user: by.scheme === 'tel' ? by.number : by.user,
referring_call_sid: this.callSid,
referred_call_sid: null,
}
}, httpHeaders);
if (json && Array.isArray(json)) {
const tasks = normalizeJambones(this.logger, json).map((tdata) => makeTask(this.logger, tdata));
if (tasks && tasks.length > 0) {
this.logger.info('RestCallSession:handleRefer received REFER, get new tasks');
this.replaceApplication(tasks);
if (this.wakeupResolver) {
this.wakeupResolver({reason: 'RestCallSession: referHook new taks'});
this.wakeupResolver = null;
}
}
}
res.send(202);
this.logger.info('RestCallSession:handleRefer - sent 202 Accepted');
} catch (err) {
this.logger.error({err}, 'RestCallSession:handleRefer - error while asking referHook');
res.send(err.statusCode || 501);
}
} else {
res.send(501);
}
}
/**
* This is invoked when the called party hangs up, in order to calculate the call duration.
*/

View File

@@ -118,7 +118,9 @@ class Conference extends Task {
this.emitter.emit('kill');
await this._doFinalMemberCheck(cs);
if (this.ep && this.ep.connected) {
this.ep.conn.removeAllListeners('esl::event::CUSTOM::*');
// drachtio-fsmrf override esl::event::CUSTOM to conference join listerner, After finish the conference
// the application need to reset the esl::event::CUSTOM for another use on the same endpoint
this.ep.resetEslCustomEvent();
this.ep.api(`conference ${this.confName} kick ${this.memberId}`)
.catch((err) => this.logger.info({err}, 'Error kicking participant'));
}

View File

@@ -34,7 +34,8 @@ class TaskConfig extends Task {
'finishOnKey', 'input', 'numDigits', 'minDigits', 'maxDigits',
'interDigitTimeout', 'bargein', 'dtmfBargein', 'minBargeinWordCount', 'actionHook'
].forEach((k) => {
if (this.bargeIn[k]) this.gatherOpts[k] = this.bargeIn[k];
const val = this.bargeIn[k];
if (val !== undefined && val !== null) this.gatherOpts[k] = val;
});
}
if (this.transcribe?.enable) {
@@ -73,6 +74,7 @@ class TaskConfig extends Task {
get hasDub() { return Object.keys(this.dub).length; }
get hasVad() { return Object.keys(this.vad).length; }
get hasFillerNoise() { return Object.keys(this.fillerNoise).length; }
get hasReferHook() { return Object.keys(this.data).includes('referHook'); }
get summary() {
const phrase = [];
@@ -82,13 +84,13 @@ class TaskConfig extends Task {
if (this.bargeIn.enable) phrase.push('enable barge-in');
if (this.hasSynthesizer) {
const {vendor:v, language:l, voice} = this.synthesizer;
const s = `{${v},${l},${voice}}`;
const {vendor:v, language:l, voice, label} = this.synthesizer;
const s = `{${v},${l},${voice},${label || 'None'}}`;
phrase.push(`set synthesizer${s}`);
}
if (this.hasRecognizer) {
const {vendor:v, language:l} = this.recognizer;
const s = `{${v},${l}}`;
const {vendor:v, language:l, label} = this.recognizer;
const s = `{${v},${l},${label || 'None'}}`;
phrase.push(`set recognizer${s}`);
}
if (this.hasRecording) phrase.push(this.record.action);
@@ -103,6 +105,7 @@ class TaskConfig extends Task {
if (this.notifyEvents) phrase.push(`event notification ${this.notifyEvents ? 'on' : 'off'}`);
if (this.onHoldMusic) phrase.push(`onHoldMusic: ${this.onHoldMusic}`);
if ('boostAudioSignal' in this.data) phrase.push(`setGain ${this.data.boostAudioSignal}`);
if (this.hasReferHook) phrase.push('set referHook');
return `${this.name}{${phrase.join(',')}}`;
}
@@ -295,9 +298,13 @@ class TaskConfig extends Task {
voiceMs: this.vad.voiceMs || 250,
silenceMs: this.vad.silenceMs || 150,
strategy: this.vad.strategy || 'one-shot',
mode: this.vad.mod || 2
mode: (this.vad.mode !== undefined && this.vad.mode !== null) ? this.vad.mode : 2
};
}
if (this.hasReferHook) {
cs.referHook = this.data.referHook;
}
}
async kill(cs) {

View File

@@ -82,6 +82,8 @@ function filterAndLimit(logger, tasks) {
return unique;
}
const sleepFor = (ms) => new Promise((resolve) => setTimeout(() => resolve(), ms));
class TaskDial extends Task {
constructor(logger, opts) {
super(logger, opts);
@@ -203,7 +205,16 @@ class TaskDial extends Task {
else {
this.epOther = cs.ep;
if (this.dialMusic && this.epOther && this.epOther.connected) {
this.epOther.play(this.dialMusic).catch((err) => {});
(async() => {
do {
try {
await this.epOther.play(this.dialMusic);
} catch (err) {
this.logger.error(err, `TaskDial:exec error playing ${this.dialMusic}`);
await sleepFor(1000);
}
} while (!this.killed || !this.bridged);
})();
}
}
if (!this.killed) await this._attemptCalls(cs);
@@ -473,7 +484,7 @@ class TaskDial extends Task {
}
async _attemptCalls(cs) {
const {req, srf} = cs;
const {req, callInfo, direction, srf} = cs;
const {getSBC} = srf.locals;
const {lookupTeamsByAccount, lookupAccountBySid} = srf.locals.dbHelpers;
const {lookupCarrier, lookupCarrierByPhoneNumber} = dbUtils(this.logger, cs.srf);
@@ -485,6 +496,7 @@ class TaskDial extends Task {
this.headers = {
'X-Account-Sid': cs.accountSid,
...(req && req.has('X-CID') && {'X-CID': req.get('X-CID')}),
...(direction === 'outbound' && callInfo.sbcCallid && {'X-CID': callInfo.sbcCallid}),
...(req && req.has('P-Asserted-Identity') && !JAMBONZ_DISABLE_DIAL_PAI_HEADER &&
{'P-Asserted-Identity': req.get('P-Asserted-Identity')}),
...(req && req.has('X-Voip-Carrier-Sid') && {'X-Voip-Carrier-Sid': req.get('X-Voip-Carrier-Sid')}),
@@ -601,6 +613,7 @@ class TaskDial extends Task {
dialCallStatus: obj.callStatus,
dialSipStatus: obj.sipStatus,
dialCallSid: sd.callSid,
dialSbcCallid: sd.callInfo.sbcCallid
});
}
switch (obj.callStatus) {

View File

@@ -12,7 +12,8 @@ const {
JambonzTranscriptionEvents,
AssemblyAiTranscriptionEvents,
VadDetection,
VerbioTranscriptionEvents
VerbioTranscriptionEvents,
SpeechmaticsTranscriptionEvents
} = require('../utils/constants.json');
const {
JAMBONES_GATHER_EARLY_HINTS_MATCH,
@@ -22,6 +23,7 @@ const {
const makeTask = require('./make_task');
const assert = require('assert');
const SttTask = require('./stt-task');
const { SpeechCredentialError } = require('../utils/error');
class TaskGather extends SttTask {
constructor(logger, opts, parentTask) {
@@ -122,7 +124,20 @@ class TaskGather extends SttTask {
return s;
}
async exec(cs, {ep}) {
async exec(cs, obj) {
try {
await this.handling(cs, obj);
} catch (error) {
if (error instanceof SpeechCredentialError) {
this.logger.info('Gather failed due to SpeechCredentialError, finished!');
this.notifyTaskDone();
return;
}
throw error;
}
}
async handling(cs, {ep}) {
this.logger.debug({options: this.data}, 'Gather:exec');
await super.exec(cs, {ep});
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, cs.srf);
@@ -177,8 +192,10 @@ class TaskGather extends SttTask {
this._startVad();
const startDtmfListener = () => {
assert(!this._dtmfListenerStarted);
if (this.input.includes('digits') || this.dtmfBargein || this.asrDtmfTerminationDigit) {
ep.on('dtmf', this._onDtmf.bind(this, cs, ep));
this._dtmfListenerStarted = true;
}
};
@@ -281,7 +298,8 @@ class TaskGather extends SttTask {
}
}
if (this.listenDuringPrompt) {
// https://github.com/jambonz/jambonz-feature-server/issues/913
if (this.listenDuringPrompt || (!this.sayTask && !this.playTask)) {
startDtmfListener();
}
@@ -497,6 +515,24 @@ class TaskGather extends SttTask {
this.addCustomEventListener(ep, AssemblyAiTranscriptionEvents.ConnectFailure,
this._onVendorConnectFailure.bind(this, cs, ep));
break;
case 'speechmatics':
this.bugname = `${this.bugname_prefix}speechmatics_transcribe`;
this.addCustomEventListener(
ep, SpeechmaticsTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
this.addCustomEventListener(ep, SpeechmaticsTranscriptionEvents.Info,
this._onSpeechmaticsInfo.bind(this, cs, ep));
this.addCustomEventListener(ep, SpeechmaticsTranscriptionEvents.RecognitionStarted,
this._onSpeechmaticsRecognitionStarted.bind(this, cs, ep));
this.addCustomEventListener(ep, SpeechmaticsTranscriptionEvents.Connect,
this._onVendorConnect.bind(this, cs, ep));
this.addCustomEventListener(ep, SpeechmaticsTranscriptionEvents.ConnectFailure,
this._onVendorConnectFailure.bind(this, cs, ep));
this.addCustomEventListener(ep, SpeechmaticsTranscriptionEvents.Error,
this._onSpeechmaticsErrror.bind(this, cs, ep));
break;
default:
if (this.vendor.startsWith('custom:')) {
this.bugname = `${this.bugname_prefix}${this.vendor}_transcribe`;
@@ -735,7 +771,7 @@ class TaskGather extends SttTask {
evt = this.normalizeTranscription(evt, this.vendor, 1, this.language,
this.shortUtterance, this.data.recognizer.punctuation);
//this.logger.debug({evt, bugname, finished, vendor: this.vendor}, 'Gather:_onTranscription normalized transcript');
this.logger.debug({evt, bugname, finished, vendor: this.vendor}, 'Gather:_onTranscription normalized transcript');
if (evt.alternatives.length === 0) {
this.logger.info({evt}, 'TaskGather:_onTranscription - got empty transcript, continue listening');
@@ -970,10 +1006,11 @@ class TaskGather extends SttTask {
if (code === 413 && error === 'Too much speech') return this._resolve('timeout');
}
this.logger.info({evt}, 'TaskGather:_onJambonzError');
const errMessage = evt.error || evt.Message;
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
message: `Custom speech vendor ${this.vendor} error: ${evt.error}`,
message: `Custom speech vendor ${this.vendor} error: ${errMessage}`,
vendor: this.vendor,
target_sid: cs.callSid
}).catch((err) => this.logger.info({err}, 'Error generating alert for jambonz custom connection failure'));
@@ -989,12 +1026,25 @@ class TaskGather extends SttTask {
}
}
async _onSpeechmaticsErrror(cs, _ep, evt) {
// eslint-disable-next-line no-unused-vars
const {message, ...e} = evt;
this._onVendorError(cs, _ep, {error: JSON.stringify(e)});
}
async _onVendorError(cs, _ep, evt) {
super._onVendorError(cs, _ep, evt);
if (!(await this._startFallback(cs, _ep, evt))) {
this._resolve('stt-error', evt);
}
}
async _onSpeechmaticsRecognitionStarted(_cs, _ep, evt) {
this.logger.debug({evt}, 'TaskGather:_onSpeechmaticsRecognitionStarted');
}
async _onSpeechmaticsInfo(_cs, _ep, evt) {
this.logger.debug({evt}, 'TaskGather:_onSpeechmaticsInfo');
}
_onVadDetected(cs, ep) {
if (this.bargein && this.minBargeinWordCount === 0) {
@@ -1053,6 +1103,7 @@ class TaskGather extends SttTask {
this.span.setAttributes({
channel: 1,
'stt.label': this.label || 'None',
'stt.resolve': reason,
'stt.result': JSON.stringify(evt)
});

96
lib/tasks/llm/index.js Normal file
View File

@@ -0,0 +1,96 @@
const Task = require('../task');
const {TaskPreconditions} = require('../../utils/constants');
const TaskLlmOpenAI_S2S = require('./llms/openai_s2s');
class TaskLlm extends Task {
constructor(logger, opts) {
super(logger, opts);
this.preconditions = TaskPreconditions.Endpoint;
['vendor', 'model', 'auth', 'connectOptions'].forEach((prop) => {
this[prop] = this.data[prop];
});
this.eventHandlers = [];
// delegate to the specific llm model
this.llm = this.createSpecificLlm();
}
get name() { return this.llm.name ; }
get toolHook() { return this.llm?.toolHook; }
get eventHook() { return this.llm?.eventHook; }
get ep() { return this.cs.ep; }
async exec(cs, {ep}) {
await super.exec(cs, {ep});
await this.llm.exec(cs, {ep});
}
async kill(cs) {
super.kill(cs);
await this.llm.kill(cs);
}
createSpecificLlm() {
let llm;
switch (this.vendor) {
case 'openai':
case 'microsoft':
if (this.model.startsWith('gpt-4o-realtime')) {
llm = new TaskLlmOpenAI_S2S(this.logger, this.data, this);
}
break;
default:
throw new Error(`Unsupported vendor ${this.vendor} for LLM`);
}
if (!llm) {
throw new Error(`Unsupported vendor:model ${this.vendor}:${this.model}`);
}
return llm;
}
addCustomEventListener(ep, event, handler) {
this.eventHandlers.push({ep, event, handler});
ep.addCustomEventListener(event, handler);
}
removeCustomEventListeners() {
this.eventHandlers.forEach((h) => h.ep.removeCustomEventListener(h.event, h.handler));
}
async sendEventHook(data) {
await this.cs?.requestor.request('llm:event', this.eventHook, data);
}
async sendToolHook(tool_call_id, data) {
await this.cs?.requestor.request('llm:tool-call', this.toolHook, {tool_call_id, ...data});
}
async processToolOutput(tool_call_id, data) {
if (!this.ep.connected) {
this.logger.info('TaskLlm:processToolOutput - no connected endpoint');
return;
}
this.llm.processToolOutput(this.ep, tool_call_id, data);
}
async processLlmUpdate(data, callSid) {
if (this.ep.connected) {
if (typeof this.llm.processLlmUpdate === 'function') {
this.llm.processLlmUpdate(this.ep, data, callSid);
}
else {
const {vendor, model} = this.llm;
this.logger.info({data, callSid},
`TaskLlm:_processLlmUpdate: LLM ${vendor}:${model} does not support llm:update`);
}
}
}
}
module.exports = TaskLlm;

View File

@@ -0,0 +1,357 @@
const Task = require('../../task');
const TaskName = 'Llm_OpenAI_s2s';
const {LlmEvents_OpenAI} = require('../../../utils/constants');
const ClientEvent = 'client.event';
const SessionDelete = 'session.delete';
const openai_server_events = [
'error',
'session.created',
'session.updated',
'conversation.created',
'input_audio_buffer.committed',
'input_audio_buffer.cleared',
'input_audio_buffer.speech_started',
'input_audio_buffer.speech_stopped',
'conversation.item.created',
'conversation.item.input_audio_transcription.completed',
'conversation.item.input_audio_transcription.failed',
'conversation.item.truncated',
'conversation.item.deleted',
'response.created',
'response.done',
'response.output_item.added',
'response.output_item.done',
'response.content_part.added',
'response.content_part.done',
'response.text.delta',
'response.text.done',
'response.audio_transcript.delta',
'response.audio_transcript.done',
'response.audio.delta',
'response.audio.done',
'response.function_call_arguments.delta',
'response.function_call_arguments.done',
'rate_limits.updated',
'output_audio.playback_started',
'output_audio.playback_stopped',
];
const expandWildcards = (events) => {
const expandedEvents = [];
events.forEach((evt) => {
if (evt.endsWith('.*')) {
const prefix = evt.slice(0, -2); // Remove the wildcard ".*"
const matchingEvents = openai_server_events.filter((e) => e.startsWith(prefix));
expandedEvents.push(...matchingEvents);
} else {
expandedEvents.push(evt);
}
});
return expandedEvents;
};
class TaskLlmOpenAI_S2S extends Task {
constructor(logger, opts, parentTask) {
super(logger, opts, parentTask);
this.parent = parentTask;
this.vendor = this.parent.vendor;
this.model = this.parent.model;
this.auth = this.parent.auth;
this.connectionOptions = this.parent.connectOptions;
const {apiKey} = this.auth || {};
if (!apiKey) throw new Error('auth.apiKey is required for OpenAI S2S');
if (['openai', 'microsoft'].indexOf(this.vendor) === -1) {
throw new Error(`Invalid vendor ${this.vendor} for OpenAI S2S`);
}
if ('microsoft' === this.vendor && !this.connectionOptions?.host) {
throw new Error('connectionOptions.host is required for Microsoft OpenAI S2S');
}
this.apiKey = apiKey;
this.authType = 'microsoft' === this.vendor ? 'query' : 'bearer';
this.actionHook = this.data.actionHook;
this.eventHook = this.data.eventHook;
this.toolHook = this.data.toolHook;
const {response_create, session_update} = this.data.llmOptions;
if (typeof response_create !== 'object') {
throw new Error('llmOptions with an initial response.create is required for OpenAI S2S');
}
this.response_create = response_create;
this.session_update = session_update;
this.results = {
completionReason: 'normal conversation end'
};
/**
* only one of these will have items,
* if includeEvents, then these are the events to include
* if excludeEvents, then these are the events to exclude
*/
this.includeEvents = [];
this.excludeEvents = [];
/* default to all events if user did not specify */
this._populateEvents(this.data.events || openai_server_events);
this.addCustomEventListener = parentTask.addCustomEventListener.bind(parentTask);
this.removeCustomEventListeners = parentTask.removeCustomEventListeners.bind(parentTask);
}
get name() { return TaskName; }
get host() {
const {host} = this.connectionOptions || {};
return host || (this.vendor === 'openai' ? 'api.openai.com' : void 0);
}
get path() {
const {path} = this.connectionOptions || {};
if (path) return path;
switch (this.vendor) {
case 'openai':
return 'v1/realtime?model=gpt-4o-realtime-preview-2024-10-01';
case 'microsoft':
return 'openai/realtime?api-version=2024-10-01-preview&deployment=gpt-4o-realtime-preview-1001&';
}
}
async _api(ep, args) {
const res = await ep.api('uuid_openai_s2s', `^^|${args.join('|')}`);
if (!res.body?.startsWith('+OK')) {
throw new Error({args}, `Error calling uuid_openai_s2s: ${res.body}`);
}
}
async exec(cs, {ep}) {
await super.exec(cs);
await this._startListening(cs, ep);
await this.awaitTaskDone();
/* note: the parent llm verb started the span, which is why this is necessary */
await this.parent.performAction(this.results);
this._unregisterHandlers();
}
async kill(cs) {
super.kill(cs);
this._api(cs.ep, [cs.ep.uuid, SessionDelete])
.catch((err) => this.logger.info({err}, 'TaskLlmOpenAI_S2S:kill - error deleting session'));
this.notifyTaskDone();
}
/**
* Send function call output to the OpenAI server in the form of conversation.item.create
* per https://platform.openai.com/docs/guides/realtime/function-calls
*/
async processToolOutput(ep, tool_call_id, data) {
try {
this.logger.debug({tool_call_id, data}, 'TaskLlmOpenAI_S2S:processToolOutput');
if (!data.type || data.type !== 'conversation.item.create') {
this.logger.info({data},
'TaskLlmOpenAI_S2S:processToolOutput - invalid tool output, must be conversation.item.create');
}
else {
await this._api(ep, [ep.uuid, ClientEvent, JSON.stringify(data)]);
// spec also recommends to send immediate response.create
await this._api(ep, [ep.uuid, ClientEvent, JSON.stringify({type: 'response.create'})]);
}
} catch (err) {
this.logger.info({err}, 'TaskLlmOpenAI_S2S:processToolOutput');
}
}
/**
* Send a session.update to the OpenAI server
* Note: creating and deleting conversation items also supported as well as interrupting the assistant
*/
async processLlmUpdate(ep, data, _callSid) {
try {
this.logger.debug({data, _callSid}, 'TaskLlmOpenAI_S2S:processLlmUpdate');
if (!data.type || ![
'session.update',
'conversation.item.create',
'conversation.item.delete',
'response.cancel'
].includes(data.type)) {
this.logger.info({data}, 'TaskLlmOpenAI_S2S:processLlmUpdate - invalid mid-call request');
}
else {
await this._api(ep, [ep.uuid, ClientEvent, JSON.stringify(data)]);
}
} catch (err) {
this.logger.info({err}, 'TaskLlmOpenAI_S2S:processLlmUpdate');
}
}
async _startListening(cs, ep) {
this._registerHandlers(ep);
try {
const args = [ep.uuid, 'session.create', this.host, this.path, this.authType, this.apiKey];
await this._api(ep, args);
} catch (err) {
this.logger.error({err}, 'TaskLlmOpenAI_S2S:_startListening');
this.notifyTaskDone();
}
}
async _sendClientEvent(ep, obj) {
let ok = true;
this.logger.debug({obj}, 'TaskLlmOpenAI_S2S:_sendClientEvent');
try {
const args = [ep.uuid, ClientEvent, JSON.stringify(obj)];
await this._api(ep, args);
} catch (err) {
ok = false;
this.logger.error({err}, 'TaskLlmOpenAI_S2S:_sendClientEvent - Error');
}
return ok;
}
async _sendInitialMessage(ep) {
let obj = {type: 'response.create', response: this.response_create};
if (!await this._sendClientEvent(ep, obj)) {
this.notifyTaskDone();
}
/* send immediate session.update if present */
else if (this.session_update) {
obj = {type: 'session.update', session: this.session_update};
this.logger.debug({obj}, 'TaskLlmOpenAI_S2S:_sendInitialMessage - sending session.update');
if (!await this._sendClientEvent(ep, obj)) {
this.notifyTaskDone();
}
}
}
_registerHandlers(ep) {
this.addCustomEventListener(ep, LlmEvents_OpenAI.Connect, this._onConnect.bind(this, ep));
this.addCustomEventListener(ep, LlmEvents_OpenAI.ConnectFailure, this._onConnectFailure.bind(this, ep));
this.addCustomEventListener(ep, LlmEvents_OpenAI.Disconnect, this._onDisconnect.bind(this, ep));
this.addCustomEventListener(ep, LlmEvents_OpenAI.ServerEvent, this._onServerEvent.bind(this, ep));
}
_unregisterHandlers() {
this.removeCustomEventListeners();
}
_onError(ep, evt) {
this.logger.info({evt}, 'TaskLlmOpenAI_S2S:_onError');
this.notifyTaskDone();
}
_onConnect(ep) {
this.logger.debug('TaskLlmOpenAI_S2S:_onConnect');
this._sendInitialMessage(ep);
}
_onConnectFailure(_ep, evt) {
this.logger.info(evt, 'TaskLlmOpenAI_S2S:_onConnectFailure');
this.results = {completionReason: 'connection failure'};
this.notifyTaskDone();
}
_onDisconnect(_ep, evt) {
this.logger.info(evt, 'TaskLlmOpenAI_S2S:_onConnectFailure');
this.results = {completionReason: 'disconnect from remote end'};
this.notifyTaskDone();
}
async _onServerEvent(ep, evt) {
let endConversation = false;
const type = evt.type;
this.logger.info({evt}, 'TaskLlmOpenAI_S2S:_onServerEvent');
/* check for failures, such as rate limit exceeded, that should terminate the conversation */
if (type === 'response.done' && evt.response.status === 'failed') {
endConversation = true;
this.results = {
completionReason: 'server failure',
error: evt.response.status_details?.error
};
}
/* server errors of some sort */
else if (type === 'error') {
endConversation = true;
this.results = {
completionReason: 'server error',
error: evt.error
};
}
/* tool calls */
else if (type === 'response.output_item.done' && evt.item?.type === 'function_call') {
this.logger.debug({evt}, 'TaskLlmOpenAI_S2S:_onServerEvent - function_call');
if (!this.toolHook) {
this.logger.warn({evt}, 'TaskLlmOpenAI_S2S:_onServerEvent - no toolHook defined!');
}
else {
const {name, call_id} = evt.item;
const args = JSON.parse(evt.item.arguments);
try {
await this.parent.sendToolHook(call_id, {name, args});
} catch (err) {
this.logger.info({err, evt}, 'TaskLlmOpenAI - error calling function');
this.results = {
completionReason: 'client error calling function',
error: err
};
endConversation = true;
}
}
}
/* check whether we should notify on this event */
if (this.includeEvents.length > 0 ? this.includeEvents.includes(type) : !this.excludeEvents.includes(type)) {
this.parent.sendEventHook(evt)
.catch((err) => this.logger.info({err}, 'TaskLlmOpenAI_S2S:_onServerEvent - error sending event hook'));
}
if (endConversation) {
this.logger.info({results: this.results}, 'TaskLlmOpenAI_S2S:_onServerEvent - ending conversation due to error');
this.notifyTaskDone();
}
}
_populateEvents(events) {
if (events.includes('all')) {
/* work by excluding specific events */
const exclude = events
.filter((evt) => evt.startsWith('-'))
.map((evt) => evt.slice(1));
if (exclude.length === 0) this.includeEvents = openai_server_events;
else this.excludeEvents = expandWildcards(exclude);
}
else {
/* work by including specific events */
const include = events
.filter((evt) => !evt.startsWith('-'));
this.includeEvents = expandWildcards(include);
}
this.logger.debug({
includeEvents: this.includeEvents,
excludeEvents: this.excludeEvents
}, 'TaskLlmOpenAI_S2S:_populateEvents');
}
}
module.exports = TaskLlmOpenAI_S2S;

View File

@@ -62,6 +62,9 @@ function makeTask(logger, obj, parent) {
case TaskName.Message:
const TaskMessage = require('./message');
return new TaskMessage(logger, data, parent);
case TaskName.Llm:
const TaskLlm = require('./llm');
return new TaskLlm(logger, data, parent);
case TaskName.Rasa:
const TaskRasa = require('./rasa');
return new TaskRasa(logger, data, parent);

View File

@@ -39,9 +39,9 @@ class TaskRestDial extends Task {
if (this.data.amd) {
this.startAmd = cs.startAmd;
this.stopAmd = cs.stopAmd;
this.on('amd', this._onAmdEvent.bind(this, cs));
}
this.stopAmd = cs.stopAmd;
this._setCallTimer();
await this.awaitTaskDone();

View File

@@ -1,6 +1,7 @@
const TtsTask = require('./tts-task');
const {TaskName, TaskPreconditions} = require('../utils/constants');
const pollySSMLSplit = require('polly-ssml-split');
const { SpeechCredentialError } = require('../utils/error');
const breakLengthyTextIfNeeded = (logger, text) => {
const chunkSize = 1000;
@@ -61,148 +62,23 @@ class TaskSay extends TtsTask {
}
}
async _synthesizeWithSpecificVendor(cs, ep, {vendor, language, voice, label, preCache = false}) {
const {srf, accountSid:account_sid} = cs;
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, srf);
const {writeAlerts, AlertType, stats} = srf.locals;
const {synthAudio} = srf.locals.dbHelpers;
const engine = this.synthesizer.engine || cs.synthesizer?.engine || 'neural';
const salt = cs.callSid;
let credentials = cs.getSpeechCredentials(vendor, 'tts', label);
/* parse Nuance voices into name and model */
let model;
if (vendor === 'nuance' && voice) {
const arr = /([A-Za-z-]*)\s+-\s+(enhanced|standard)/.exec(voice);
if (arr) {
voice = arr[1];
model = arr[2];
}
} else if (vendor === 'deepgram') {
model = voice;
}
/* allow for microsoft custom region voice and api_key to be specified as an override */
if (vendor === 'microsoft' && this.options.deploymentId) {
credentials = credentials || {};
credentials.use_custom_tts = true;
credentials.custom_tts_endpoint = this.options.deploymentId;
credentials.api_key = this.options.apiKey || credentials.apiKey;
credentials.region = this.options.region || credentials.region;
voice = this.options.voice || voice;
} else if (vendor === 'elevenlabs') {
credentials = credentials || {};
credentials.model_id = this.options.model_id || credentials.model_id;
credentials.voice_settings = this.options.voice_settings || {};
credentials.optimize_streaming_latency = this.options.optimize_streaming_latency
|| credentials.optimize_streaming_latency;
voice = this.options.voice_id || voice;
}
ep.set({
tts_engine: vendor.startsWith('custom:') ? 'custom' : vendor,
tts_voice: voice,
cache_speech_handles: !cs.currentTtsVendor || cs.currentTtsVendor === vendor ? 1 : 0,
}).catch((err) => this.logger.info({err}, 'Error setting tts_engine on endpoint'));
// set the current vendor on the call session
// If vendor is changed from the previous one, then reset the cache_speech_handles flag
cs.currentTtsVendor = vendor;
if (!preCache && !this._disableTracing) this.logger.info({vendor, language, voice, model}, 'TaskSay:exec');
async exec(cs, obj) {
try {
if (!credentials) {
writeAlerts({
account_sid,
alert_type: AlertType.TTS_NOT_PROVISIONED,
vendor,
target_sid: cs.callSid
}).catch((err) => this.logger.info({err}, 'Error generating alert for no tts'));
throw new Error('no provisioned speech credentials for TTS');
await this.handling(cs, obj);
this.emit('playDone');
} catch (error) {
if (error instanceof SpeechCredentialError) {
// if say failed due to speech credentials, alarm is writtern and error notification is sent
// finished this say to move to next task.
this.logger.info('Say failed due to SpeechCredentialError, finished!');
this.emit('playDone');
return;
}
// synthesize all of the text elements
let lastUpdated = false;
/* produce an audio segment from the provided text */
const generateAudio = async(text) => {
if (this.killed) return;
if (text.startsWith('silence_stream://')) return text;
/* otel: trace time for tts */
if (!preCache && !this._disableTracing) {
const {span} = this.startChildSpan('tts-generation', {
'tts.vendor': vendor,
'tts.language': language,
'tts.voice': voice
});
this.otelSpan = span;
}
try {
const {filePath, servedFromCache, rtt} = await synthAudio(stats, {
account_sid,
text,
vendor,
language,
voice,
engine,
model,
salt,
credentials,
options: this.options,
disableTtsCache : this.disableTtsCache,
renderForCaching: preCache
});
if (!filePath.startsWith('say:')) {
this.logger.debug(`Say: file ${filePath}, served from cache ${servedFromCache}`);
if (filePath) cs.trackTmpFile(filePath);
if (this.otelSpan) {
this.otelSpan.setAttributes({'tts.cached': servedFromCache});
this.otelSpan.end();
this.otelSpan = null;
}
if (!servedFromCache && !lastUpdated) {
lastUpdated = true;
updateSpeechCredentialLastUsed(credentials.speech_credential_sid).catch(() => {/* logged error */});
}
if (!servedFromCache && rtt && !preCache && !this._disableTracing) {
this.notifyStatus({
event: 'synthesized-audio',
vendor,
language,
characters: text.length,
elapsedTime: rtt
});
}
}
else {
this.logger.debug('Say: a streaming tts api will be used');
const modifiedPath = filePath.replace('say:{', `say:{session-uuid=${ep.uuid},`);
return modifiedPath;
}
return filePath;
} catch (err) {
this.logger.info({err}, 'Error synthesizing tts');
if (this.otelSpan) this.otelSpan.end();
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.TTS_FAILURE,
vendor,
detail: err.message,
target_sid: cs.callSid
}).catch((err) => this.logger.info({err}, 'Error generating alert for tts failure'));
throw err;
}
};
const arr = this.text.map((t) => (this._validateURL(t) ? t : generateAudio(t)));
return (await Promise.all(arr)).filter((fp) => fp && fp.length);
} catch (err) {
this.logger.info(err, 'TaskSay:exec error');
throw err;
throw error;
}
}
async exec(cs, {ep}) {
async handling(cs, {ep}) {
const {srf, accountSid:account_sid, callSid:target_sid} = cs;
const {writeAlerts, AlertType} = srf.locals;
const {addFileToCache} = srf.locals.dbHelpers;
@@ -220,10 +96,7 @@ class TaskSay extends TtsTask {
let voice = this.synthesizer.voice && this.synthesizer.voice !== 'default' ?
this.synthesizer.voice :
cs.speechSynthesisVoice;
// label can be null/empty in synthesizer config, just use application level label if it's default
let label = this.synthesizer.label === 'default' ?
cs.speechSynthesisLabel :
this.synthesizer.label;
let label = this.taskInlcudeSynthesizer ? this.synthesizer.label : cs.speechSynthesisLabel;
const fallbackVendor = this.synthesizer.fallbackVendor && this.synthesizer.fallbackVendor !== 'default' ?
this.synthesizer.fallbackVendor :
@@ -234,10 +107,8 @@ class TaskSay extends TtsTask {
const fallbackVoice = this.synthesizer.fallbackVoice && this.synthesizer.fallbackVoice !== 'default' ?
this.synthesizer.fallbackVoice :
cs.fallbackSpeechSynthesisVoice;
// label can be null/empty in synthesizer config, just use application level label if it's default
const fallbackLabel = this.synthesizer.fallbackLabel === 'default' ?
cs.fallbackSpeechSynthesisLabel :
this.synthesizer.fallbackLabel;
const fallbackLabel = this.taskInlcudeSynthesizer ?
this.synthesizer.fallbackLabel : cs.fallbackSpeechSynthesisLabel;
if (cs.hasFallbackTts) {
vendor = fallbackVendor;
@@ -263,7 +134,7 @@ class TaskSay extends TtsTask {
} else {
this.notifyError(
{ msg: 'TTS error', details:`TTS vendor ${vendor} error: ${error}`, failover: 'not available'});
throw error;
throw new SpeechCredentialError(error.message);
}
};
let filepath;
@@ -282,12 +153,12 @@ class TaskSay extends TtsTask {
await this.playToConfMember(ep, memberId, confName, confUuid, filepath[segment]);
}
else {
let tts_cache_filename;
if (filepath[segment].startsWith('say:{')) {
const isStreaming = filepath[segment].startsWith('say:{');
if (isStreaming) {
const arr = /^say:\{.*\}\s*(.*)$/.exec(filepath[segment]);
if (arr) this.logger.debug(`Say:exec sending streaming tts request: ${arr[1].substring(0, 64)}..`);
}
else this.logger.debug(`Say:exec sending ${filepath[segment].substring(0, 64)}`);
else this.logger.debug(`Say:exec sending ${filepath[segment].substring(0, 64)}`);
ep.once('playback-start', (evt) => {
this.logger.debug({evt}, 'Say got playback-start');
if (this.otelSpan) {
@@ -295,17 +166,19 @@ class TaskSay extends TtsTask {
this.otelSpan.end();
this.otelSpan = null;
if (evt.variable_tts_cache_filename) {
tts_cache_filename = evt.variable_tts_cache_filename;
cs.trackTmpFile(evt.variable_tts_cache_filename);
}
else {
this.logger.info('No tts_cache_filename in playback-start event');
}
}
});
ep.once('playback-stop', (evt) => {
if (!tts_cache_filename || evt.variable_tts_cache_filename !== tts_cache_filename) {
this.logger.info({evt}, 'Say: discarding playback-stop from other say verb');
this.logger.debug({evt}, 'Say got playback-stop');
if (evt.variable_tts_error) {
writeAlerts({
account_sid,
alert_type: AlertType.TTS_FAILURE,
vendor,
detail: evt.variable_tts_error
}).catch((err) => this.logger.info({err}, 'Error generating alert for no tts'));
}
else {
this.logger.debug({evt}, 'Say got playback-stop');
@@ -350,6 +223,7 @@ class TaskSay extends TtsTask {
continue;
} catch (err) {
this.logger.info({err}, 'Error waiting for playback-stop event');
throw err;
}
} finally {
this._playPromise = null;
@@ -367,7 +241,6 @@ class TaskSay extends TtsTask {
segment++;
}
}
this.emit('playDone');
}
async kill(cs) {

View File

@@ -2,6 +2,8 @@ const Task = require('./task');
const assert = require('assert');
const crypto = require('crypto');
const { TaskPreconditions, CobaltTranscriptionEvents } = require('../utils/constants');
const { SpeechCredentialError } = require('../utils/error');
const {JAMBONES_AWS_TRANSCRIBE_USE_GRPC} = require('../config');
class SttTask extends Task {
@@ -16,14 +18,22 @@ class SttTask extends Task {
normalizeTranscription,
setSpeechCredentialsAtRuntime,
compileSonioxTranscripts,
consolidateTranscripts
consolidateTranscripts,
updateSpeechmaticsPayload
} = require('../utils/transcription-utils')(logger);
this.setChannelVarsForStt = setChannelVarsForStt;
this.normalizeTranscription = normalizeTranscription;
this.compileSonioxTranscripts = compileSonioxTranscripts;
this.consolidateTranscripts = consolidateTranscripts;
this.updateSpeechmaticsPayload = updateSpeechmaticsPayload;
this.eventHandlers = [];
this.isHandledByPrimaryProvider = true;
/**
* Task use taskIncludeRecognizer to identify
* if taskIncludeRecognizer === true, use label from verb.recognizer, even it's empty
* if taskIncludeRecognizer === false, use label from application.recognizer
*/
this.taskIncludeRecognizer = !!this.data.recognizer;
if (this.data.recognizer) {
const recognizer = this.data.recognizer;
this.vendor = recognizer.vendor;
@@ -33,7 +43,6 @@ class SttTask extends Task {
//fallback
this.fallbackVendor = recognizer.fallbackVendor || 'default';
this.fallbackLanguage = recognizer.fallbackLanguage || 'default';
// label can be empty and should not have default value.
this.fallbackLabel = recognizer.fallbackLabel;
/* let credentials be supplied in the recognizer object at runtime */
@@ -82,8 +91,7 @@ class SttTask extends Task {
this.language = cs.speechRecognizerLanguage;
if (this.data.recognizer) this.data.recognizer.language = this.language;
}
// label can be empty, should not assign application level label
if ('default' === this.label) {
if (!this.taskIncludeRecognizer) {
this.label = cs.speechRecognizerLabel;
if (this.data.recognizer) this.data.recognizer.label = this.label;
}
@@ -96,17 +104,21 @@ class SttTask extends Task {
this.fallbackLanguage = cs.fallbackSpeechRecognizerLanguage;
if (this.data.recognizer) this.data.recognizer.fallbackLanguage = this.fallbackLanguage;
}
// label can be empty, should not assign application level label
if ('default' === this.fallbackLabel) {
if (!this.taskIncludeRecognizer) {
this.fallbackLabel = cs.fallbackSpeechRecognizerLabel;
if (this.data.recognizer) this.data.recognizer.fallbackLabel = this.fallbackLabel;
}
// If call is already fallback to 2nd ASR vendor
// use that.
if (cs.hasFallbackAsr) {
this.vendor = this.fallbackVendor;
this.language = this.fallbackLanguage;
this.label = this.fallbackLabel;
if (this.taskIncludeRecognizer) {
// reset fallback ASR from previous run if this verb contains data.recognizer.
cs.hasFallbackAsr = false;
} else {
this.logger.debug('Call session has fallback to 2nd ASR, use 2nd recognizer configuration');
this.vendor = this.fallbackVendor;
this.language = this.fallbackLanguage;
this.label = this.fallbackLabel;
}
}
if (!this.data.recognizer.vendor) {
this.data.recognizer.vendor = this.vendor;
@@ -181,8 +193,8 @@ class SttTask extends Task {
vendor,
target_sid: cs.callSid
}).catch((err) => this.logger.info({err}, 'Error generating alert for no stt'));
this.notifyTaskDone();
throw new Error(`No speech-to-text service credentials for ${vendor} have been configured`);
// the ASR might have fallback configuration, should not done task here.
throw new SpeechCredentialError(`No speech-to-text service credentials for ${vendor} have been configured`);
}
if (vendor === 'nuance' && credentials.client_id) {
@@ -206,14 +218,25 @@ class SttTask extends Task {
region,
roleArn
});
this.logger.debug({roleArn}, `got aws access token ${servedFromCache ? 'from cache' : ''}`);
this.logger.debug({roleArn}, `(roleArn) got aws access token ${servedFromCache ? 'from cache' : ''}`);
credentials = {...credentials, accessKeyId, secretAccessKey, sessionToken};
} else if (vendor === 'verbio' && credentials.client_id && credentials.client_secret) {
}
else if (vendor === 'verbio' && credentials.client_id && credentials.client_secret) {
const {access_token, servedFromCache} = await getVerbioAccessToken(credentials);
this.logger.debug({client_id: credentials.client_id},
`got verbio access token ${servedFromCache ? 'from cache' : ''}`);
credentials.access_token = access_token;
}
else if (vendor == 'aws' && !JAMBONES_AWS_TRANSCRIBE_USE_GRPC) {
/* get AWS access token */
const {accessKeyId, secretAccessKey, securityToken, region } = credentials;
if (!securityToken) {
const { servedFromCache, ...newCredentials} = await getAwsAuthToken({accessKeyId, secretAccessKey, region});
this.logger.debug({newCredentials}, `got aws security token ${servedFromCache ? 'from cache' : ''}`);
credentials = {...newCredentials, region};
}
}
return credentials;
}
@@ -223,12 +246,12 @@ class SttTask extends Task {
async _initFallback() {
assert(this.fallbackVendor, 'fallback failed without fallbackVendor configuration');
this.logger.info(`Failed to use primary STT provider, fallback to ${this.fallbackVendor}`);
this.isHandledByPrimaryProvider = false;
this.cs.hasFallbackAsr = true;
this.logger.info(`Failed to use primary STT provider, fallback to ${this.fallbackVendor}`);
this.vendor = this.fallbackVendor;
this.language = this.fallbackLanguage;
this.label = this.fallbackLabel;
this.vendor = this.cs.fallbackSpeechRecognizerVendor = this.fallbackVendor;
this.language = this.cs.fallbackSpeechRecognizerLanguage = this.fallbackLanguage;
this.label = this.cs.fallbackSpeechRecognizerLabel = this.fallbackLabel;
this.data.recognizer.vendor = this.vendor;
this.data.recognizer.language = this.language;
this.data.recognizer.label = this.label;

View File

@@ -12,10 +12,13 @@ const {
NvidiaTranscriptionEvents,
JambonzTranscriptionEvents,
TranscribeStatus,
AssemblyAiTranscriptionEvents
AssemblyAiTranscriptionEvents,
VerbioTranscriptionEvents,
SpeechmaticsTranscriptionEvents
} = require('../utils/constants.json');
const { normalizeJambones } = require('@jambonz/verb-specifications');
const SttTask = require('./stt-task');
const { SpeechCredentialError } = require('../utils/error');
const STT_LISTEN_SPAN_NAME = 'stt-listen';
@@ -73,7 +76,20 @@ class TaskTranscribe extends SttTask {
return this.channel === 2 || this.separateRecognitionPerChannel && this.ep2;
}
async exec(cs, {ep, ep2}) {
async exec(cs, obj) {
try {
await this.handling(cs, obj);
} catch (error) {
if (error instanceof SpeechCredentialError) {
this.logger.info('Transcribe failed due to SpeechCredentialError, finished!');
this.notifyTaskDone();
return;
}
throw error;
}
}
async handling(cs, {ep, ep2}) {
await super.exec(cs, {ep, ep2});
if (this.data.recognizer.vendor === 'nuance') {
@@ -223,6 +239,13 @@ class TaskTranscribe extends SttTask {
this.addCustomEventListener(ep, SonioxTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
break;
case 'verbio':
this.bugname = `${this.bugname_prefix}verbio_transcribe`;
this.addCustomEventListener(
ep, VerbioTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
break;
case 'cobalt':
this.bugname = `${this.bugname_prefix}cobalt_transcribe`;
this.addCustomEventListener(ep, CobaltTranscriptionEvents.Transcription,
@@ -280,6 +303,22 @@ class TaskTranscribe extends SttTask {
this._onVendorConnectFailure.bind(this, cs, ep, channel));
break;
case 'speechmatics':
this.bugname = `${this.bugname_prefix}speechmatics_transcribe`;
this.addCustomEventListener(
ep, SpeechmaticsTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
this.addCustomEventListener(ep, SpeechmaticsTranscriptionEvents.Info,
this._onSpeechmaticsInfo.bind(this, cs, ep));
this.addCustomEventListener(ep, SpeechmaticsTranscriptionEvents.RecognitionStarted,
this._onSpeechmaticsRecognitionStarted.bind(this, cs, ep));
this.addCustomEventListener(ep, SpeechmaticsTranscriptionEvents.Connect,
this._onVendorConnect.bind(this, cs, ep));
this.addCustomEventListener(ep, SpeechmaticsTranscriptionEvents.ConnectFailure,
this._onVendorConnectFailure.bind(this, cs, ep));
this.addCustomEventListener(ep, SpeechmaticsTranscriptionEvents.Error,
this._onSpeechmaticsError.bind(this, cs, ep));
break;
default:
if (this.vendor.startsWith('custom:')) {
this.bugname = `${this.bugname_prefix}${this.vendor}_transcribe`;
@@ -463,6 +502,7 @@ class TaskTranscribe extends SttTask {
if (this.childSpan[channel - 1] && this.childSpan[channel - 1].span) {
this.childSpan[channel - 1].span.setAttributes({
channel,
'stt.label': this.label || 'None',
'stt.resolve': 'transcript',
'stt.result': JSON.stringify(evt)
});
@@ -516,7 +556,8 @@ class TaskTranscribe extends SttTask {
if (this.childSpan[channel - 1] && this.childSpan[channel - 1].span) {
this.childSpan[channel - 1].span.setAttributes({
channel,
'stt.resolve': 'timeout'
'stt.resolve': 'timeout',
'stt.label': this.label || 'None',
});
this.childSpan[channel - 1].span.end();
}
@@ -533,7 +574,8 @@ class TaskTranscribe extends SttTask {
if (this.childSpan[channel - 1] && this.childSpan[channel - 1].span) {
this.childSpan[channel - 1].span.setAttributes({
channel,
'stt.resolve': 'max duration exceeded'
'stt.resolve': 'max duration exceeded',
'stt.label': this.label || 'None',
});
this.childSpan[channel - 1].span.end();
}
@@ -617,7 +659,8 @@ class TaskTranscribe extends SttTask {
if (this.childSpan[channel - 1] && this.childSpan[channel - 1].span) {
this.childSpan[channel - 1].span.setAttributes({
channel,
'stt.resolve': 'connection failure'
'stt.resolve': 'connection failure',
'stt.label': this.label || 'None',
});
this.childSpan[channel - 1].span.end();
}
@@ -626,6 +669,20 @@ class TaskTranscribe extends SttTask {
}
}
async _onSpeechmaticsRecognitionStarted(_cs, _ep, evt) {
this.logger.debug({evt}, 'TaskGather:_onSpeechmaticsRecognitionStarted');
}
async _onSpeechmaticsInfo(_cs, _ep, evt) {
this.logger.debug({evt}, 'TaskGather:_onSpeechmaticsInfo');
}
async _onSpeechmaticsErrror(cs, _ep, evt) {
// eslint-disable-next-line no-unused-vars
const {message, ...e} = evt;
this._onVendorError(cs, _ep, {error: JSON.stringify(e)});
}
_startAsrTimer(channel) {
if (this.vendor === 'deepgram') return; // no need
assert(this.isContinuousAsr);

View File

@@ -1,5 +1,6 @@
const Task = require('./task');
const { TaskPreconditions } = require('../utils/constants');
const { SpeechCredentialError } = require('../utils/error');
class TtsTask extends Task {
@@ -10,6 +11,12 @@ class TtsTask extends Task {
this.preconditions = TaskPreconditions.Endpoint;
this.earlyMedia = this.data.earlyMedia === true || (parentTask && parentTask.earlyMedia);
/**
* Task use taskInlcudeSynthesizer to identify
* if taskInlcudeSynthesizer === true, use label from verb.synthesizer, even it's empty
* if taskInlcudeSynthesizer === false, use label from application.synthesizer
*/
this.taskInlcudeSynthesizer = !!this.data.synthesizer;
this.synthesizer = this.data.synthesizer || {};
this.disableTtsCache = this.data.disableTtsCache;
this.options = this.synthesizer.options || {};
@@ -17,16 +24,26 @@ class TtsTask extends Task {
async exec(cs) {
super.exec(cs);
if (cs.synthesizer) {
this.options = {...cs.synthesizer.options, ...this.options};
this.data.synthesizer = this.data.synthesizer || {};
for (const k in cs.synthesizer) {
const newValue = this.data.synthesizer && this.data.synthesizer[k] !== undefined ?
this.data.synthesizer[k] :
cs.synthesizer[k];
if (Array.isArray(newValue)) {
this.data.synthesizer[k] = [...(this.data.synthesizer[k] || []), ...cs.synthesizer[k]];
} else if (typeof newValue === 'object' && newValue !== null) {
this.data.synthesizer[k] = { ...(this.data.synthesizer[k] || {}), ...cs.synthesizer[k] };
} else {
this.data.synthesizer[k] = newValue;
}
}
}
}
async _synthesizeWithSpecificVendor(cs, ep, {
vendor,
language,
voice,
label,
disableTtsStreaming,
preCache
}) {
async _synthesizeWithSpecificVendor(cs, ep, {vendor, language, voice, label, preCache = false}) {
const {srf, accountSid:account_sid} = cs;
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, srf);
const {writeAlerts, AlertType, stats} = srf.locals;
@@ -35,6 +52,10 @@ class TtsTask extends Task {
const salt = cs.callSid;
let credentials = cs.getSpeechCredentials(vendor, 'tts', label);
if (!credentials) {
throw new SpeechCredentialError(
`No text-to-speech service credentials for ${vendor} with labels: ${label} have been configured`);
}
/* parse Nuance voices into name and model */
let model;
if (vendor === 'nuance' && voice) {
@@ -62,27 +83,50 @@ class TtsTask extends Task {
credentials.optimize_streaming_latency = this.options.optimize_streaming_latency
|| credentials.optimize_streaming_latency;
voice = this.options.voice_id || voice;
} else if (vendor === 'rimelabs') {
credentials = credentials || {};
credentials.model_id = this.options.model_id || credentials.model_id;
} else if (vendor === 'whisper') {
credentials = credentials || {};
credentials.model_id = this.options.model_id || credentials.model_id;
} else if (vendor === 'verbio') {
credentials = credentials || {};
credentials.engine_version = this.options.engine_version || credentials.engine_version;
} else if (vendor === 'playht') {
credentials = credentials || {};
credentials.voice_engine = this.options.voice_engine || credentials.voice_engine;
}
/**
* note on cache_speech_handles. This was found to be risky.
* It can cause a crash in the following sequence on a single call:
* 1. Stream tts on vendor A with cache_speech_handles=1, then
* 2. Stream tts on vendor B with cache_speech_handles=1
*
* we previously tried to track when vendors were switched and manage the flag accordingly,
* but it difficult to track all the scenarios and the benefit (slightly faster start to tts playout)
* is probably minimal. DH.
*/
ep.set({
tts_engine: vendor,
tts_engine: vendor.startsWith('custom:') ? 'custom' : vendor,
tts_voice: voice,
cache_speech_handles: 1,
}).catch((err) => this.logger.info({err}, `${this.name}: Error setting tts_engine on endpoint`));
//cache_speech_handles: !cs.currentTtsVendor || cs.currentTtsVendor === vendor ? 1 : 0,
cache_speech_handles: 0,
}).catch((err) => this.logger.info({err}, 'Error setting tts_engine on endpoint'));
// set the current vendor on the call session
// If vendor is changed from the previous one, then reset the cache_speech_handles flag
//cs.currentTtsVendor = vendor;
if (!preCache) this.logger.info({vendor, language, voice, model}, `${this.name}:exec`);
if (!preCache && !this._disableTracing) this.logger.info({vendor, language, voice, model}, 'TaskSay:exec');
try {
if (!credentials) {
writeAlerts({
account_sid,
alert_type: AlertType.TTS_NOT_PROVISIONED,
vendor
vendor,
target_sid: cs.callSid
}).catch((err) => this.logger.info({err}, 'Error generating alert for no tts'));
this.notifyError({
msg: 'TTS error',
details:`No speech credentials provisioned for selected vendor ${vendor}`
});
throw new Error('no provisioned speech credentials for TTS');
throw new SpeechCredentialError('no provisioned speech credentials for TTS');
}
// synthesize all of the text elements
let lastUpdated = false;
@@ -93,11 +137,12 @@ class TtsTask extends Task {
if (text.startsWith('silence_stream://')) return text;
/* otel: trace time for tts */
if (!preCache && !this.parentTask) {
if (!preCache && !this._disableTracing) {
const {span} = this.startChildSpan('tts-generation', {
'tts.vendor': vendor,
'tts.language': language,
'tts.voice': voice
'tts.voice': voice,
'tts.label': label || 'None',
});
this.otelSpan = span;
}
@@ -114,11 +159,10 @@ class TtsTask extends Task {
credentials,
options: this.options,
disableTtsCache : this.disableTtsCache,
disableTtsStreaming,
preCache
renderForCaching: preCache
});
if (!filePath.startsWith('say:')) {
this.logger.debug(`file ${filePath}, served from cache ${servedFromCache}`);
this.logger.debug(`Say: file ${filePath}, served from cache ${servedFromCache}`);
if (filePath) cs.trackTmpFile(filePath);
if (this.otelSpan) {
this.otelSpan.setAttributes({'tts.cached': servedFromCache});
@@ -129,7 +173,7 @@ class TtsTask extends Task {
lastUpdated = true;
updateSpeechCredentialLastUsed(credentials.speech_credential_sid).catch(() => {/* logged error */});
}
if (!servedFromCache && rtt && !preCache) {
if (!servedFromCache && rtt && !preCache && !this._disableTracing) {
this.notifyStatus({
event: 'synthesized-audio',
vendor,
@@ -140,7 +184,7 @@ class TtsTask extends Task {
}
}
else {
this.logger.debug('a streaming tts api will be used');
this.logger.debug('Say: a streaming tts api will be used');
const modifiedPath = filePath.replace('say:{', `say:{session-uuid=${ep.uuid},`);
return modifiedPath;
}
@@ -155,7 +199,6 @@ class TtsTask extends Task {
detail: err.message,
target_sid: cs.callSid
}).catch((err) => this.logger.info({err}, 'Error generating alert for tts failure'));
this.notifyError({msg: 'TTS error', details: err.message || err});
throw err;
}
};
@@ -166,6 +209,7 @@ class TtsTask extends Task {
this.logger.info(err, 'TaskSay:exec error');
throw err;
}
}
_validateURL(urlString) {

View File

@@ -2,7 +2,6 @@ const makeTask = require('../tasks/make_task');
const Emitter = require('events');
const { normalizeJambones } = require('@jambonz/verb-specifications');
const {TaskName} = require('../utils/constants');
const assert = require('assert');
/**
* ActionHookDelayProcessor
@@ -25,10 +24,12 @@ class ActionHookDelayProcessor extends Emitter {
this._active = false;
const enabled = this.init(opts);
if (enabled && (!this.actions || !Array.isArray(this.actions) || this.actions.length === 0)) {
if (enabled && this.noResponseTimeout &&
(!this.actions || !Array.isArray(this.actions) || this.actions.length === 0)) {
throw new Error('ActionHookDelayProcessor: no actions specified');
}
else if (enabled && this.actions.some((a) => !a.verb || ![TaskName.Say, TaskName.Play].includes(a.verb))) {
else if (enabled && this.actions &&
this.actions.some((a) => !a.verb || ![TaskName.Say, TaskName.Play].includes(a.verb))) {
throw new Error(`ActionHookDelayProcessor: invalid actions specified: ${JSON.stringify(this.actions)}`);
}
}
@@ -51,8 +52,9 @@ class ActionHookDelayProcessor extends Emitter {
this.actions = opts.actions;
this.retries = opts.retries || 0;
this.noResponseTimeout = opts.noResponseTimeout || 0;
this.noResponseTimeout = opts.noResponseTimeout;
this.noResponseGiveUpTimeout = opts.noResponseGiveUpTimeout;
this.giveUpActions = opts.giveUpActions;
// return false if these options actually disable the ahdp
return ('enable' in opts && opts.enable === true) ||
@@ -66,11 +68,16 @@ class ActionHookDelayProcessor extends Emitter {
this.logger.debug('ActionHookDelayProcessor#start: already started due to prior gather which is continuing');
return;
}
assert(!this._noResponseTimer);
this._active = true;
this._retryCount = 0;
const timeoutMs = this.noResponseTimeout === 0 ? 1 : this.noResponseTimeout * 1000;
this._noResponseTimer = setTimeout(this._onNoResponseTimer.bind(this), timeoutMs);
if (this.noResponseTimeout > 0) {
const timeoutMs = this.noResponseTimeout * 1000;
this._noResponseTimer = setTimeout(this._onNoResponseTimer.bind(this), timeoutMs);
} else {
this.logger.debug(
'ActionHookDelayProcessor#start: noResponseTimeout is 0 or undefined hence not calling _onNoResponseTimer'
);
}
if (this.noResponseGiveUpTimeout > 0) {
const timeoutMs = this.noResponseGiveUpTimeout * 1000;
@@ -79,7 +86,6 @@ class ActionHookDelayProcessor extends Emitter {
}
async stop() {
this.logger.debug('ActionHookDelayProcessor#stop');
this._active = false;
if (this._noResponseTimer) {
@@ -91,25 +97,19 @@ class ActionHookDelayProcessor extends Emitter {
this._noResponseGiveUpTimer = null;
}
if (this._taskInProgress) {
this.logger.debug(`ActionHookDelayProcessor#stop: killing task in progress: ${this._taskInProgress.name}`);
this.logger.debug(`ActionHookDelayProcessor#stop: stopping ${this._taskInProgress.name}`);
/** if we are doing a play, kill it immediately
* if we are doing a say, wait for it to finish
*/
if (TaskName.Say === this._taskInProgress.name) {
this._sayResolver = () => {
this.logger.debug('ActionHookDelayProcessor#stop: say is done, continue on..');
this._taskInProgress.kill(this.cs);
this._taskInProgress = null;
};
this.logger.debug('ActionHookDelayProcessor#stop returning promise');
return new Promise((resolve) => this._sayResolver = resolve);
}
else {
/* play */
this._taskInProgress.kill(this.cs);
this._sayResolver = () => {
this.logger.debug('ActionHookDelayProcessor#stop: play/say is done, continue on..');
//this._taskInProgress.kill(this.cs);
this._taskInProgress = null;
};
/* we let Say finish, but interrupt Play */
if (TaskName.Play === this._taskInProgress.name) {
await this._taskInProgress.kill(this.cs);
}
return new Promise((resolve) => this._sayResolver = resolve);
}
this.logger.debug('ActionHookDelayProcessor#stop returning');
}
@@ -137,7 +137,9 @@ class ActionHookDelayProcessor extends Emitter {
this.logger.debug({evt}, 'got playback-start');
if (!this._active) {
this.logger.info({evt}, 'ActionHookDelayProcessor#_onNoResponseTimer: killing audio immediately');
this.ep.api('uuid_break', this.ep.uuid)
/* note: in race condition we may have just hung up and cs.ep cleared */
this.ep?.api('uuid_break', this.ep?.uuid)
.catch((err) => this.logger.info(err,
'ActionHookDelayProcessor#_onNoResponseTimer Error killing audio'));
}
@@ -147,7 +149,7 @@ class ActionHookDelayProcessor extends Emitter {
this._taskInProgress = null;
if (this._sayResolver) {
/* we were waiting for the play to finish before continuing to next task */
this.logger.debug({evt}, 'got playback-stop');
this.logger.debug({evt}, 'ActionHookDelayProcessor#_onNoResponseTimer got playback-stop');
this._sayResolver();
this._sayResolver = null;
}
@@ -166,9 +168,14 @@ class ActionHookDelayProcessor extends Emitter {
_onNoResponseGiveUpTimer() {
this._active = false;
this.logger.info('ActionHookDelayProcessor#_onNoResponseGiveUpTimer');
this.stop().catch((err) => {});
this.emit('giveup');
if (!this.giveUpActions) {
this.logger.info('ActionHookDelayProcessor#_onNoResponseGiveUpTimer');
this.stop().catch((err) => {});
this.emit('giveup');
} else {
this.logger.info('ActionHookDelayProcessor#_onNoResponseGiveUpTimer - giveUpActions');
this.emit('giveupWithTasks', this.giveUpActions);
}
}
}

View File

@@ -246,7 +246,10 @@ module.exports = (logger) => {
const amd = ep.amd = new Amd(logger, cs, opts);
const {vendor, language} = amd;
let sttCredentials = amd.sttCredentials;
const hints = voicemailHints[language] || [];
// hints from configuration might be too long for specific language and vendor that make transcribe freeswitch
// modules cannot connect to the vendor. hints is used in next step to validate if the transcription
// matchs voice mail hints.
const hints = [];
if (vendor === 'nuance' && sttCredentials.client_id) {
/* get nuance access token */

View File

@@ -14,6 +14,7 @@
"Leave": "leave",
"Lex": "lex",
"Listen": "listen",
"Llm": "llm",
"Message": "message",
"Pause": "pause",
"Play": "play",
@@ -126,6 +127,14 @@
"NoSpeechDetected": "azure_transcribe::no_speech_detected",
"VadDetected": "azure_transcribe::vad_detected"
},
"SpeechmaticsTranscriptionEvents": {
"Transcription": "speechmatics_transcribe::transcription",
"Info": "speechmatics_transcribe::info",
"RecognitionStarted": "speechmatics_transcribe::recognition_started",
"ConnectFailure": "speechmatics_transcribe::connect_failed",
"Connect": "speechmatics_transcribe::connect",
"Error": "speechmatics_transcribe::error"
},
"JambonzTranscriptionEvents": {
"Transcription": "jambonz_transcribe::transcription",
"ConnectFailure": "jambonz_transcribe::connect_failed",
@@ -158,6 +167,13 @@
"StandbyEnter": "standby-enter",
"StandbyExit": "standby-exit"
},
"LlmEvents_OpenAI": {
"Error": "error",
"Connect": "openai_s2s::connect",
"ConnectFailure": "openai_s2s::connect_failed",
"Disconnect": "openai_s2s::disconnect",
"ServerEvent": "openai_s2s::server_event"
},
"QueueResults": {
"Bridged": "bridged",
"Error": "error",
@@ -184,6 +200,8 @@
"dial:confirm",
"verb:hook",
"verb:status",
"llm:event",
"llm:tool-call",
"jambonz:error"
],
"RecordState": {

View File

@@ -91,35 +91,47 @@ const speechMapper = (cred) => {
else if ('cobalt' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.cobalt_server_uri = o.cobalt_server_uri;
} else if ('elevenlabs' === obj.vendor) {
}
else if ('elevenlabs' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
obj.model_id = o.model_id;
obj.options = o.options;
} else if ('playht' === obj.vendor) {
}
else if ('playht' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
obj.user_id = o.user_id;
obj.voice_engine = o.voice_engine;
obj.options = o.options;
} else if ('rimelabs' === obj.vendor) {
}
else if ('rimelabs' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
obj.model_id = o.model_id;
obj.options = o.options;
} else if ('assemblyai' === obj.vendor) {
}
else if ('assemblyai' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
} else if ('whisper' === obj.vendor) {
}
else if ('whisper' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
obj.model_id = o.model_id;
} else if ('verbio' === obj.vendor) {
}
else if ('verbio' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.client_id = o.client_id;
obj.client_secret = o.client_secret;
obj.engine_version = o.engine_version;
} else if (obj.vendor.startsWith('custom:')) {
}
else if ('speechmatics' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
obj.speechmatics_stt_uri = o.speechmatics_stt_uri;
}
else if (obj.vendor.startsWith('custom:')) {
const o = JSON.parse(decrypt(credential));
obj.auth_token = o.auth_token;
obj.custom_stt_url = o.custom_stt_url;

9
lib/utils/error.js Normal file
View File

@@ -0,0 +1,9 @@
class SpeechCredentialError extends Error {
constructor(msg) {
super(msg);
}
}
module.exports = {
SpeechCredentialError
};

View File

@@ -1,5 +1,5 @@
const Mrf = require('drachtio-fsmrf');
const ip = require('ip');
const os = require('os');
const {
JAMBONES_MYSQL_HOST,
JAMBONES_MYSQL_USER,
@@ -12,11 +12,25 @@ const {
JAMBONES_TIME_SERIES_HOST,
JAMBONES_ESL_LISTEN_ADDRESS,
PORT,
HTTP_IP,
NODE_ENV,
} = require('../config');
const Registrar = require('@jambonz/mw-registrar');
const assert = require('assert');
function getLocalIp() {
const interfaces = os.networkInterfaces();
for (const interfaceName in interfaces) {
const interface = interfaces[interfaceName];
for (const iface of interface) {
if (iface.family === 'IPv4' && !iface.internal) {
return iface.address;
}
}
}
return '127.0.0.1'; // Fallback to localhost if no suitable interface found
}
function initMS(logger, wrapper, ms) {
Object.assign(wrapper, {ms, active: true, connects: 1});
logger.info(`connected to freeswitch at ${ms.address}`);
@@ -138,7 +152,8 @@ function installSrfLocals(srf, logger) {
lookupAccountBySid,
lookupAccountCapacitiesBySid,
lookupSmppGateways,
lookupClientByAccountAndUsername
lookupClientByAccountAndUsername,
lookupSystemInformation
} = require('@jambonz/db-helpers')({
host: JAMBONES_MYSQL_HOST,
user: JAMBONES_MYSQL_USER,
@@ -193,7 +208,8 @@ function installSrfLocals(srf, logger) {
let localIp;
try {
localIp = ip.address();
// Either use the configured IP address or discover it
localIp = HTTP_IP || getLocalIp();
} catch (err) {
logger.error({err}, 'installSrfLocals - error detecting local ipv4 address');
}
@@ -213,6 +229,7 @@ function installSrfLocals(srf, logger) {
lookupAccountCapacitiesBySid,
lookupSmppGateways,
lookupClientByAccountAndUsername,
lookupSystemInformation,
updateCallStatus,
retrieveCall,
listCalls,

View File

@@ -213,6 +213,8 @@ class SingleDialer extends Emitter {
},
cbProvisional: (prov) => {
const status = {sipStatus: prov.status, sipReason: prov.reason};
// Update call-id for sbc outbound INVITE
this.callInfo.sbcCallid = prov.get('X-CID');
if ([180, 183].includes(prov.status) && prov.body) {
if (status.callStatus !== CallStatus.EarlyMedia) {
status.callStatus = CallStatus.EarlyMedia;
@@ -447,6 +449,8 @@ class SingleDialer extends Emitter {
});
cs.req = this.req;
// fixed hangup an adulting session does not send status callback Completed
cs.wrapDialog(this.dlg);
cs.exec().catch((err) => newLogger.error({err}, 'doAdulting: error executing session'));
return cs;
}

View File

@@ -1,7 +1,4 @@
const {
TaskName,
} = require('./constants.json');
const {TaskName} = require('./constants.json');
const stickyVars = {
google: [
'GOOGLE_SPEECH_HINTS',
@@ -51,7 +48,13 @@ const stickyVars = {
aws: [
'AWS_VOCABULARY_NAME',
'AWS_VOCABULARY_FILTER_METHOD',
'AWS_VOCABULARY_FILTER_NAME'
'AWS_VOCABULARY_FILTER_NAME',
'AWS_LANGUAGE_MODEL_NAME',
'AWS_ACCESS_KEY_ID',
'AWS_SECRET_ACCESS_KEY',
'AWS_REGION',
'AWS_SECURITY_TOKEN',
'AWS_PII_ENTITY_TYPES',
],
nuance: [
'NUANCE_ACCESS_TOKEN',
@@ -100,6 +103,12 @@ const stickyVars = {
assemblyai: [
'ASSEMBLYAI_API_KEY',
'ASSEMBLYAI_WORD_BOOST'
],
speechmatics: [
'SPEECHMATICS_API_KEY',
'SPEECHMATICS_HOST',
'SPEECHMATICS_PATH',
'SPEECHMATICS_SPEECH_HINTS',
]
};
@@ -142,7 +151,6 @@ const optimalDeepramModels = {
tr: ['nova-2', 'nova-2'],
uk: ['nova-2', 'nova-2']
};
const selectDefaultDeepgramModel = (task, language) => {
if (language in optimalDeepramModels) {
const [gather, transcribe] = optimalDeepramModels[language];
@@ -151,6 +159,29 @@ const selectDefaultDeepgramModel = (task, language) => {
return 'base';
};
const optimalGoogleModels = {
'v1' : {
'en-IN':['telephony', 'telephony'],
'es-DO':['default', 'default'],
'es-MX':['default', 'default'],
'en-AU':['telephony', 'telephony'],
'en-GB':['telephony', 'telephony'],
'en-NZ':['telephony', 'telephony']
},
'v2' : {
'en-IN':['telephony', 'long']
}
};
const selectDefaultGoogleModel = (task, language, version) => {
const useV2 = version === 'v2';
if (language in optimalGoogleModels[version]) {
const [gather, transcribe] = optimalGoogleModels[version][language];
return task.name === TaskName.Gather ? gather : transcribe;
}
return task.name === TaskName.Gather ?
(useV2 ? 'telephony_short' : 'command_and_search') :
(useV2 ? 'long' : 'latest_long');
};
const consolidateTranscripts = (bufferedTranscripts, channel, language, vendor) => {
if (bufferedTranscripts.length === 1) return bufferedTranscripts[0];
let totalConfidence = 0;
@@ -424,16 +455,41 @@ const normalizeMicrosoft = (evt, channel, language, punctuation = true) => {
const normalizeAws = (evt, channel, language) => {
const copy = JSON.parse(JSON.stringify(evt));
return {
language_code: language,
channel_tag: channel,
is_final: evt[0].is_final,
alternatives: evt[0].alternatives,
vendor: {
name: 'aws',
evt: copy
}
};
const isGrpcPayload = Array.isArray(evt);
if (isGrpcPayload) {
/* legacy grpc api */
return {
language_code: language,
channel_tag: channel,
is_final: evt[0].is_final,
alternatives: evt[0].alternatives,
vendor: {
name: 'aws',
evt: copy
}
};
}
else {
/* websocket api */
const alternatives = evt.Transcript?.Results[0]?.Alternatives.map((alt) => {
const items = alt.Items.filter((item) => item.Type === 'pronunciation' && 'Confidence' in item);
const confidence = items.reduce((acc, item) => acc + item.Confidence, 0) / items.length;
return {
transcript: alt.Transcript,
confidence
};
});
return {
language_code: language,
channel_tag: channel,
is_final: evt.Transcript?.Results[0].IsPartial === false,
alternatives,
vendor: {
name: 'aws',
evt: copy
}
};
}
};
const normalizeAssemblyAi = (evt, channel, language) => {
@@ -449,12 +505,37 @@ const normalizeAssemblyAi = (evt, channel, language) => {
}
],
vendor: {
name: 'ASSEMBLYAI',
name: 'assemblyai',
evt: copy
}
};
};
const normalizeSpeechmatics = (evt, channel, language) => {
const copy = JSON.parse(JSON.stringify(evt));
const is_final = evt.message === 'AddTranscript';
const words = evt.results?.filter((r) => r.type === 'word') || [];
const confidence = words.length > 0 ?
words.reduce((acc, word) => acc + word.alternatives[0].confidence, 0) / words.length :
0;
const alternative = {
confidence,
transcript: evt.metadata?.transcript
};
const obj = {
language_code: language,
channel_tag: channel,
is_final,
alternatives: [alternative],
vendor: {
name: 'speechmatics',
evt: copy
}
};
return obj;
};
module.exports = (logger) => {
const normalizeTranscription = (evt, vendor, channel, language, shortUtterance, punctuation) => {
@@ -482,6 +563,8 @@ module.exports = (logger) => {
return normalizeAssemblyAi(evt, channel, language, shortUtterance);
case 'verbio':
return normalizeVerbio(evt, channel, language);
case 'speechmatics':
return normalizeSpeechmatics(evt, channel, language);
default:
if (vendor.startsWith('custom:')) {
return normalizeCustom(evt, channel, language, vendor);
@@ -497,9 +580,9 @@ module.exports = (logger) => {
if ('google' === vendor) {
const useV2 = rOpts.googleOptions?.serviceVersion === 'v2';
const model = task.name === TaskName.Gather ?
(useV2 ? 'telephony_short' : 'command_and_search') :
(useV2 ? 'long' : 'latest_long');
const version = useV2 ? 'v2' : 'v1';
let {model} = rOpts;
model = model || selectDefaultGoogleModel(task, language, version);
opts = {
...opts,
...(sttCredentials && {GOOGLE_APPLICATION_CREDENTIALS: JSON.stringify(sttCredentials.credentials)}),
@@ -555,17 +638,29 @@ module.exports = (logger) => {
};
}
else if (['aws', 'polly'].includes(vendor)) {
const {awsOptions = {}} = rOpts;
const vocabularyName = awsOptions.vocabularyName || rOpts.vocabularyName;
const vocabularyFilterName = awsOptions.vocabularyFilterName || rOpts.vocabularyFilterName;
const filterMethod = awsOptions.vocabularyFilterMethod || rOpts.filterMethod;
opts = {
...opts,
...(rOpts.vocabularyName && {AWS_VOCABULARY_NAME: rOpts.vocabularyName}),
...(rOpts.vocabularyFilterName && {AWS_VOCABULARY_FILTER_NAME: rOpts.vocabularyFilterName}),
...(rOpts.filterMethod && {AWS_VOCABULARY_FILTER_METHOD: rOpts.filterMethod}),
...(vocabularyName && {AWS_VOCABULARY_NAME: vocabularyName}),
...(vocabularyFilterName && {AWS_VOCABULARY_FILTER_NAME: vocabularyFilterName}),
...(filterMethod && {AWS_VOCABULARY_FILTER_METHOD: filterMethod}),
...(sttCredentials && {
...(sttCredentials.accessKeyId && {AWS_ACCESS_KEY_ID: sttCredentials.accessKeyId}),
...(sttCredentials.secretAccessKey && {AWS_SECRET_ACCESS_KEY: sttCredentials.secretAccessKey}),
AWS_ACCESS_KEY_ID: sttCredentials.accessKeyId,
AWS_SECRET_ACCESS_KEY: sttCredentials.secretAccessKey,
AWS_REGION: sttCredentials.region,
...(sttCredentials.sessionToken && {AWS_SESSION_TOKEN: sttCredentials.sessionToken}),
AWS_SECURITY_TOKEN: sttCredentials.securityToken
}),
...(awsOptions.accessKey && {AWS_ACCESS_KEY_ID: awsOptions.accessKey}),
...(awsOptions.secretKey && {AWS_SECRET_ACCESS_KEY: awsOptions.secretKey}),
...(awsOptions.region && {AWS_REGION: awsOptions.region}),
...(awsOptions.securityToken && {AWS_SECURITY_TOKEN: awsOptions.securityToken}),
...(awsOptions.languageModelName && {AWS_LANGUAGE_MODEL_NAME: awsOptions.languageModelName}),
...(awsOptions.piiEntityTypes?.length && {AWS_PII_ENTITY_TYPES: awsOptions.piiEntityTypes.join(',')}),
...(awsOptions.piiIdentifyEntities && {AWS_PII_IDENTIFY_ENTITIES: true}),
...(awsOptions.languageModelName && {AWS_LANGUAGE_MODEL_NAME: awsOptions.languageModelName}),
};
}
else if ('microsoft' === vendor) {
@@ -811,7 +906,8 @@ module.exports = (logger) => {
...(cobaltOptions.enableConfusionNetwork && {COBALT_ENABLE_CONFUSION_NETWORK: 1}),
...(cobaltOptions.compiledContextData && {COBALT_COMPILED_CONTEXT_DATA: cobaltOptions.compiledContextData}),
};
} else if ('assemblyai' === vendor) {
}
else if ('assemblyai' === vendor) {
opts = {
...opts,
...(sttCredentials.api_key) &&
@@ -819,7 +915,8 @@ module.exports = (logger) => {
...(rOpts.hints?.length > 0 &&
{ASSEMBLYAI_WORD_BOOST: JSON.stringify(rOpts.hints)})
};
} else if ('verbio' === vendor) {
}
else if ('verbio' === vendor) {
const {verbioOptions = {}} = rOpts;
opts = {
...opts,
@@ -838,8 +935,17 @@ module.exports = (logger) => {
...(verbioOptions.speech_incomplete_timeout &&
{VERBIO_SPEECH_INCOMPLETE_TIMEOUT: verbioOptions.speech_incomplete_timeout}),
};
} else if (vendor.startsWith('custom:')) {
let {options = {}} = rOpts;
}
else if ('speechmatics' === vendor) {
opts = {
...opts,
...(sttCredentials.api_key) && {SPEECHMATICS_API_KEY: sttCredentials.api_key},
...(sttCredentials.speechmatics_stt_uri) && {SPEECHMATICS_HOST: sttCredentials.speechmatics_stt_uri},
...(rOpts.hints?.length > 0 && {SPEECHMATICS_SPEECH_HINTS: rOpts.hints.join(',')}),
};
}
else if (vendor.startsWith('custom:')) {
let {options = {}} = rOpts.customOptions || {};
const {sampleRate} = rOpts.customOptions || {};
const {auth_token, custom_stt_url} = sttCredentials;
options = {
@@ -906,6 +1012,6 @@ module.exports = (logger) => {
setChannelVarsForStt,
setSpeechCredentialsAtRuntime,
compileSonioxTranscripts,
consolidateTranscripts
consolidateTranscripts,
};
};

View File

@@ -44,7 +44,7 @@ class WsRequestor extends BaseRequestor {
async request(type, hook, params, httpHeaders = {}) {
assert(HookMsgTypes.includes(type));
const url = hook.url || hook;
const wantsAck = !['call:status', 'verb:status', 'jambonz:error'].includes(type);
const wantsAck = !['call:status', 'verb:status', 'jambonz:error', 'llm:event', 'llm:tool-call'].includes(type);
if (this.maliciousClient) {
this.logger.info({url: this.url}, 'WsRequestor:request - discarding msg to malicious client');
@@ -132,7 +132,7 @@ class WsRequestor extends BaseRequestor {
type,
msgid,
call_sid: this.call_sid,
hook: ['verb:hook', 'session:redirect'].includes(type) ? url : undefined,
hook: ['verb:hook', 'session:redirect', 'llm:event', 'llm:tool-call'].includes(type) ? url : undefined,
data: {...payload},
...b3
};
@@ -392,8 +392,9 @@ class WsRequestor extends BaseRequestor {
/* messages must be JSON format */
try {
const obj = JSON.parse(content);
this.logger.debug({obj}, 'WsRequestor:_onMessage - received message');
//const {type, msgid, command, call_sid = this.call_sid, queueCommand = false, data} = obj;
const {type, msgid, command, queueCommand = false, data} = obj;
const {type, msgid, command, queueCommand = false, tool_call_id, data} = obj;
const call_sid = obj.callSid || this.call_sid;
//this.logger.debug({obj}, 'WsRequestor:request websocket: received');
@@ -407,8 +408,8 @@ class WsRequestor extends BaseRequestor {
case 'command':
assert.ok(command, 'command property not supplied');
assert.ok(data, 'data property not supplied');
this._recvCommand(msgid, command, call_sid, queueCommand, data);
assert.ok(data || command === 'llm:tool-output', 'data property not supplied');
this._recvCommand(msgid, command, call_sid, queueCommand, tool_call_id, data);
break;
default:
@@ -432,10 +433,10 @@ class WsRequestor extends BaseRequestor {
success && success(data);
}
_recvCommand(msgid, command, call_sid, queueCommand, data) {
_recvCommand(msgid, command, call_sid, queueCommand, tool_call_id, data) {
// TODO: validate command
this.logger.debug({msgid, command, call_sid, queueCommand, data}, 'received command');
this.emit('command', {msgid, command, call_sid, queueCommand, data});
this.emit('command', {msgid, command, call_sid, queueCommand, tool_call_id, data});
}
}

1041
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "jambonz-feature-server",
"version": "0.9.0",
"version": "0.9.2",
"main": "app.js",
"engines": {
"node": ">= 18.x"
@@ -31,10 +31,10 @@
"@jambonz/http-health-check": "^0.0.1",
"@jambonz/mw-registrar": "^0.2.7",
"@jambonz/realtimedb-helpers": "^0.8.8",
"@jambonz/speech-utils": "^0.1.15",
"@jambonz/speech-utils": "^0.1.20",
"@jambonz/stats-collector": "^0.1.10",
"@jambonz/time-series": "^0.2.9",
"@jambonz/verb-specifications": "^0.0.76",
"@jambonz/verb-specifications": "^0.0.83",
"@opentelemetry/api": "^1.8.0",
"@opentelemetry/exporter-jaeger": "^1.23.0",
"@opentelemetry/exporter-trace-otlp-http": "^0.50.0",
@@ -47,11 +47,10 @@
"bent": "^7.3.12",
"debug": "^4.3.4",
"deepcopy": "^2.1.0",
"drachtio-fsmrf": "^3.0.43",
"drachtio-fsmrf": "^3.0.45",
"drachtio-srf": "^4.5.35",
"express": "^4.19.2",
"express-validator": "^7.0.1",
"ip": "^2.0.1",
"moment": "^2.30.1",
"parse-url": "^9.2.0",
"pino": "^8.20.0",
@@ -61,10 +60,10 @@
"short-uuid": "^5.1.0",
"sinon": "^17.0.1",
"to-snake-case": "^1.0.0",
"undici": "^6.19.2",
"undici": "^6.20.0",
"uuid-random": "^1.3.2",
"verify-aws-sns-signature": "^0.1.0",
"ws": "^8.17.1",
"ws": "^8.18.0",
"xml2js": "^0.6.2"
},
"devDependencies": {

View File

@@ -1,4 +1,5 @@
/* SQLEditor (MySQL (2))*/
SET FOREIGN_KEY_CHECKS=0;
DROP TABLE IF EXISTS account_static_ips;
@@ -53,6 +54,8 @@ DROP TABLE IF EXISTS signup_history;
DROP TABLE IF EXISTS smpp_addresses;
DROP TABLE IF EXISTS google_custom_voices;
DROP TABLE IF EXISTS speech_credentials;
DROP TABLE IF EXISTS system_information;
@@ -136,6 +139,9 @@ account_sid CHAR(36) NOT NULL,
is_active BOOLEAN NOT NULL DEFAULT 1,
username VARCHAR(64),
password VARCHAR(1024),
allow_direct_app_calling BOOLEAN NOT NULL DEFAULT 1,
allow_direct_queue_calling BOOLEAN NOT NULL DEFAULT 1,
allow_direct_user_calling BOOLEAN NOT NULL DEFAULT 1,
PRIMARY KEY (client_sid)
);
@@ -338,11 +344,23 @@ label VARCHAR(64),
PRIMARY KEY (speech_credential_sid)
);
CREATE TABLE google_custom_voices
(
google_custom_voice_sid CHAR(36) NOT NULL UNIQUE ,
speech_credential_sid CHAR(36) NOT NULL,
model VARCHAR(512) NOT NULL,
reported_usage ENUM('REPORTED_USAGE_UNSPECIFIED','REALTIME','OFFLINE') DEFAULT 'REALTIME',
name VARCHAR(64) NOT NULL,
PRIMARY KEY (google_custom_voice_sid)
);
CREATE TABLE system_information
(
domain_name VARCHAR(255),
sip_domain_name VARCHAR(255),
monitoring_domain_name VARCHAR(255)
monitoring_domain_name VARCHAR(255),
private_network_cidr VARCHAR(8192),
log_level ENUM('info', 'debug') NOT NULL DEFAULT 'info'
);
CREATE TABLE users
@@ -437,11 +455,14 @@ CREATE TABLE sip_gateways
sip_gateway_sid CHAR(36),
ipv4 VARCHAR(128) NOT NULL COMMENT 'ip address or DNS name of the gateway. For gateways providing inbound calling service, ip address is required.',
netmask INTEGER NOT NULL DEFAULT 32,
port INTEGER NOT NULL DEFAULT 5060 COMMENT 'sip signaling port',
port INTEGER COMMENT 'sip signaling port',
inbound BOOLEAN NOT NULL COMMENT 'if true, whitelist this IP to allow inbound calls from the gateway',
outbound BOOLEAN NOT NULL COMMENT 'if true, include in least-cost routing when placing calls to the PSTN',
voip_carrier_sid CHAR(36) NOT NULL,
is_active BOOLEAN NOT NULL DEFAULT 1,
send_options_ping BOOLEAN NOT NULL DEFAULT 0,
use_sips_scheme BOOLEAN NOT NULL DEFAULT 0,
pad_crypto BOOLEAN NOT NULL DEFAULT 0,
protocol ENUM('udp','tcp','tls', 'tls/srtp') DEFAULT 'udp' COMMENT 'Outbound call protocol',
PRIMARY KEY (sip_gateway_sid)
) COMMENT='A whitelisted sip gateway used for origination/termination';
@@ -478,11 +499,19 @@ messaging_hook_sid CHAR(36) COMMENT 'webhook to call for inbound SMS/MMS ',
app_json TEXT,
speech_synthesis_vendor VARCHAR(64) NOT NULL DEFAULT 'google',
speech_synthesis_language VARCHAR(12) NOT NULL DEFAULT 'en-US',
speech_synthesis_voice VARCHAR(64),
speech_synthesis_voice VARCHAR(256),
speech_synthesis_label VARCHAR(64),
speech_recognizer_vendor VARCHAR(64) NOT NULL DEFAULT 'google',
speech_recognizer_language VARCHAR(64) NOT NULL DEFAULT 'en-US',
speech_recognizer_label VARCHAR(64),
use_for_fallback_speech BOOLEAN DEFAULT false,
fallback_speech_synthesis_vendor VARCHAR(64),
fallback_speech_synthesis_language VARCHAR(12),
fallback_speech_synthesis_voice VARCHAR(256),
fallback_speech_synthesis_label VARCHAR(64),
fallback_speech_recognizer_vendor VARCHAR(64),
fallback_speech_recognizer_language VARCHAR(64),
fallback_speech_recognizer_label VARCHAR(64),
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
record_all_calls BOOLEAN NOT NULL DEFAULT false,
PRIMARY KEY (application_sid)
@@ -525,6 +554,7 @@ siprec_hook_sid CHAR(36),
record_all_calls BOOLEAN NOT NULL DEFAULT false,
record_format VARCHAR(16) NOT NULL DEFAULT 'mp3',
bucket_credential VARCHAR(8192) COMMENT 'credential used to authenticate with storage service',
enable_debug_log BOOLEAN NOT NULL DEFAULT false,
PRIMARY KEY (account_sid)
) COMMENT='An enterprise that uses the platform for comm services';
@@ -619,6 +649,10 @@ ALTER TABLE speech_credentials ADD FOREIGN KEY service_provider_sid_idxfk_5 (ser
CREATE INDEX account_sid_idx ON speech_credentials (account_sid);
ALTER TABLE speech_credentials ADD FOREIGN KEY account_sid_idxfk_8 (account_sid) REFERENCES accounts (account_sid);
CREATE INDEX google_custom_voice_sid_idx ON google_custom_voices (google_custom_voice_sid);
CREATE INDEX speech_credential_sid_idx ON google_custom_voices (speech_credential_sid);
ALTER TABLE google_custom_voices ADD FOREIGN KEY speech_credential_sid_idxfk (speech_credential_sid) REFERENCES speech_credentials (speech_credential_sid) ON DELETE CASCADE;
CREATE INDEX user_sid_idx ON users (user_sid);
CREATE INDEX email_idx ON users (email);
CREATE INDEX phone_idx ON users (phone);
@@ -704,4 +738,5 @@ ALTER TABLE accounts ADD FOREIGN KEY queue_event_hook_sid_idxfk (queue_event_hoo
ALTER TABLE accounts ADD FOREIGN KEY device_calling_application_sid_idxfk (device_calling_application_sid) REFERENCES applications (application_sid);
ALTER TABLE accounts ADD FOREIGN KEY siprec_hook_sid_idxfk (siprec_hook_sid) REFERENCES applications (application_sid);
SET FOREIGN_KEY_CHECKS=1;
SET FOREIGN_KEY_CHECKS=1;

View File

@@ -57,7 +57,7 @@ services:
condition: service_healthy
freeswitch:
image: drachtio/drachtio-freeswitch-mrf:0.7.3
image: drachtio/drachtio-freeswitch-mrf:latest
restart: always
command: freeswitch --rtp-range-start 20000 --rtp-range-end 20100
environment: