Compare commits

..

1 Commits

Author SHA1 Message Date
Quan HL
9222ab1d72 fix missing speech vendor in telemetry metric 2023-08-28 14:09:09 +07:00
20 changed files with 119 additions and 483 deletions

View File

@@ -9,7 +9,7 @@ jobs:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: 18
node-version: 16
- run: npm ci
- run: npm run jslint
- run: docker pull drachtio/sipp

View File

@@ -37,9 +37,6 @@ Configuration is provided via environment variables:
|STATS_PORT| listening port for metrics host|no|
|STATS_PROTOCOL| 'tcp' or 'udp'|no|
|STATS_TELEGRAF| if 1, metrics will be generated in telegraf format|no|
|JAMBONZ_RECORD_WS_BASE_URL| recording websocket URL to send the recording audio|no|
|JAMBONZ_RECORD_WS_USERNAME| recording websocket username|no|
|JAMBONZ_RECORD_WS_PASSWORD| recording websocket password|no|
### running under pm2
Typically, this application runs under [pm2](https://pm2.io) using an [ecosystem.config.js](https://pm2.keymetrics.io/docs/usage/application-declaration/) file similar to this:

View File

@@ -25,7 +25,7 @@ const JAMBONES_MYSQL_USER = process.env.JAMBONES_MYSQL_USER;
const JAMBONES_MYSQL_PASSWORD = process.env.JAMBONES_MYSQL_PASSWORD;
const JAMBONES_MYSQL_DATABASE = process.env.JAMBONES_MYSQL_DATABASE;
const JAMBONES_MYSQL_PORT = parseInt(process.env.JAMBONES_MYSQL_PORT, 10) || 3306;
const JAMBONES_MYSQL_REFRESH_TTL = parseInt(process.env.JAMBONES_MYSQL_REFRESH_TTL, 10) || 0;
const JAMBONES_MYSQL_REFRESH_TTL = process.env.JAMBONES_MYSQL_REFRESH_TTL;
const JAMBONES_MYSQL_CONNECTION_LIMIT = parseInt(process.env.JAMBONES_MYSQL_CONNECTION_LIMIT, 10) || 10;
/* redis */
@@ -144,10 +144,9 @@ const JAMBONES_REDIS_SENTINELS = process.env.JAMBONES_REDIS_SENTINELS ? {
username: process.env.JAMBONES_REDIS_SENTINEL_USERNAME
})
} : null;
const JAMBONZ_RECORD_WS_BASE_URL = process.env.JAMBONZ_RECORD_WS_BASE_URL || process.env.JAMBONES_RECORD_WS_BASE_URL;
const JAMBONZ_RECORD_WS_USERNAME = process.env.JAMBONZ_RECORD_WS_USERNAME || process.env.JAMBONES_RECORD_WS_USERNAME;
const JAMBONZ_RECORD_WS_PASSWORD = process.env.JAMBONZ_RECORD_WS_PASSWORD || process.env.JAMBONES_RECORD_WS_PASSWORD;
const JAMBONZ_DISABLE_DIAL_PAI_HEADER = process.env.JAMBONZ_DISABLE_DIAL_PAI_HEADER || false;
const JAMBONZ_RECORD_WS_BASE_URL = process.env.JAMBONZ_RECORD_WS_BASE_URL;
const JAMBONZ_RECORD_WS_USERNAME = process.env.JAMBONZ_RECORD_WS_USERNAME;
const JAMBONZ_RECORD_WS_PASSWORD = process.env.JAMBONZ_RECORD_WS_PASSWORD;
module.exports = {
JAMBONES_MYSQL_HOST,
@@ -226,6 +225,5 @@ module.exports = {
DEEPGRAM_API_KEY,
JAMBONZ_RECORD_WS_BASE_URL,
JAMBONZ_RECORD_WS_USERNAME,
JAMBONZ_RECORD_WS_PASSWORD,
JAMBONZ_DISABLE_DIAL_PAI_HEADER
JAMBONZ_RECORD_WS_PASSWORD
};

View File

@@ -10,7 +10,6 @@ const HttpRequestor = require('../../utils/http-requestor');
const WsRequestor = require('../../utils/ws-requestor');
const RootSpan = require('../../utils/call-tracer');
const dbUtils = require('../../utils/db-utils');
const { mergeSdpMedia, extractSdpMedia } = require('../../utils/sdp-utils');
router.post('/', async(req, res) => {
const {logger} = req.app.locals;
@@ -128,33 +127,16 @@ router.post('/', async(req, res) => {
/* launch outdial */
let sdp, sipLogger;
let dualEp;
let localSdp = ep.local.sdp;
if (req.body.dual_streams) {
dualEp = await ms.createEndpoint();
localSdp = mergeSdpMedia(localSdp, dualEp.local.sdp);
}
const connectStream = async(remoteSdp) => {
if (remoteSdp !== sdp) {
sdp = remoteSdp;
if (req.body.dual_streams) {
const [sdpLegA, sdpLebB] = extractSdpMedia(remoteSdp);
await ep.modify(sdpLegA);
await dualEp.modify(sdpLebB);
await ep.bridge(dualEp);
} else {
ep.modify(sdp);
}
ep.modify(sdp = remoteSdp);
return true;
}
return false;
};
Object.assign(opts, {
proxy: `sip:${sbcAddress}`,
localSdp
localSdp: ep.local.sdp
});
if (target.auth) opts.auth = target.auth;
@@ -241,7 +223,6 @@ router.post('/', async(req, res) => {
srf,
req: inviteReq,
ep,
ep2: dualEp,
tasks,
callInfo,
accountInfo,
@@ -294,10 +275,7 @@ router.post('/', async(req, res) => {
else console.error(err);
}
ep.destroy();
if (dualEp) {
dualEp.destroy();
}
setTimeout(restDial.kill.bind(restDial, cs), 5000);
setTimeout(restDial.kill.bind(restDial), 5000);
}
} catch (err) {
sysError(logger, res, err);

View File

@@ -299,30 +299,22 @@ module.exports = function(srf, logger) {
if (app.app_json) {
json = JSON.parse(app.app_json);
} else {
const defaults = {
synthesizer: {
vendor: app.speech_synthesis_vendor,
...(app.speech_synthesis_label && {label: app.speech_synthesis_label}),
language: app.speech_synthesis_language,
voice: app.speech_synthesis_voice,
...(app.fallback_speech_synthesis_vendor && {fallback_vendor: app.fallback_speech_synthesis_vendor}),
...(app.fallback_speech_synthesis_label && {fallback_label: app.fallback_speech_synthesis_label}),
...(app.fallback_speech_synthesis_language && {fallback_language: app.fallback_speech_synthesis_language}),
...(app.fallback_speech_synthesis_voice && {fallback_voice: app.fallback_speech_synthesis_voice})
},
recognizer: {
vendor: app.speech_recognizer_vendor,
...(app.speech_synthesis_label && {label: app.speech_synthesis_label}),
language: app.speech_recognizer_language,
...(app.fallback_speech_recognizer_vendor && {fallback_vendor: app.fallback_speech_recognizer_vendor}),
...(app.fallback_speech_recognizer_label && {fallback_label: app.fallback_speech_recognizer_label}),
...(app.fallback_speech_recognizer_language && {fallback_language: app.fallback_speech_recognizer_language})
}
};
const params = Object.assign(['POST', 'WS'].includes(app.call_hook.method) ? { sip: req.msg } : {},
req.locals.callInfo,
{ service_provider_sid: req.locals.service_provider_sid },
{ defaults });
{
defaults: {
synthesizer: {
vendor: app.speech_synthesis_vendor,
language: app.speech_synthesis_language,
voice: app.speech_synthesis_voice
},
recognizer: {
vendor: app.speech_recognizer_vendor,
language: app.speech_recognizer_language
}
}
});
logger.debug({ params }, 'sending initial webhook');
const obj = rootSpan.startChildSpan('performAppWebhook');
span = obj.span;

View File

@@ -719,20 +719,14 @@ class CallSession extends Emitter {
getSpeechCredentials(vendor, type, label = null) {
const {writeAlerts, AlertType} = this.srf.locals;
if (this.accountInfo.speech && this.accountInfo.speech.length > 0) {
// firstly check if account level has expected credential
let credential = this.accountInfo.speech.find((s) => s.vendor === vendor &&
s.label === label && s.account_sid);
if (!credential) {
// check if SP level has expected credential
credential = this.accountInfo.speech.find((s) => s.vendor === vendor &&
s.label === label && !s.account_sid);
}
const credential = this.accountInfo.speech.find((s) => s.vendor === vendor &&
((label && s.label === label) || label === null));
if (credential && (
(type === 'tts' && credential.use_for_tts) ||
(type === 'stt' && credential.use_for_stt)
)) {
this.logger.info(
`Speech vendor: ${credential.vendor} ${credential.label ? `, label: ${credential.label}` : ''} selected`);
this.logger.info(`Speech credential vendor: ${credential.vendor}
${credential.label ? `, label: ${credential.label}` : ''} is chosen`);
if ('google' === vendor) {
if (type === 'tts' && !credential.tts_tested_ok ||
type === 'stt' && !credential.stt_tested_ok) {
@@ -769,10 +763,8 @@ class CallSession extends Emitter {
region: credential.region,
use_custom_stt: credential.use_custom_stt,
custom_stt_endpoint: credential.custom_stt_endpoint,
custom_stt_endpoint_url: credential.custom_stt_endpoint_url,
use_custom_tts: credential.use_custom_tts,
custom_tts_endpoint: credential.custom_tts_endpoint,
custom_tts_endpoint_url: credential.custom_tts_endpoint_url
custom_tts_endpoint: credential.custom_tts_endpoint
};
}
else if ('wellsaid' === vendor) {
@@ -817,12 +809,6 @@ class CallSession extends Emitter {
riva_server_uri: credential.riva_server_uri
};
}
else if ('cobalt' === vendor) {
return {
speech_credential_sid: credential.speech_credential_sid,
cobalt_server_uri: credential.cobalt_server_uri
};
}
else if (vendor.startsWith('custom:')) {
return {
speech_credential_sid: credential.speech_credential_sid,
@@ -1081,24 +1067,6 @@ class CallSession extends Emitter {
listenTask.updateListen(opts.listen_status);
}
/**
* perform live call control -- change Transcribe status
* @param {object} opts
* @param {string} opts.transcribe_status - 'pause' or 'resume'
*/
async _lccTranscribeStatus(opts) {
const task = this.currentTask;
if (!task || ![TaskName.Dial, TaskName.Transcribe].includes(task.name)) {
return this.logger.info(`CallSession:_lccTranscribeStatus - invalid transcribe_status in task ${task.name}`);
}
const transcribeTask = task.name === TaskName.Transcribe ? task : task.transcribeTask;
if (!transcribeTask) {
return this.logger
.info('CallSession:_lccTranscribeStatus - invalid transcribe_status: Dial does not have a Transcribe');
}
transcribeTask.updateTranscribe(opts.transcribe_status);
}
async _lccMuteStatus(callSid, mute) {
// this whole thing requires us to be in a Dial or Conference verb
const task = this.currentTask;
@@ -1217,9 +1185,6 @@ class CallSession extends Emitter {
if (opts.listen_status) {
await this._lccListenStatus(opts);
}
if (opts.transcribe_status) {
await this._lccTranscribeStatus(opts);
}
else if (opts.mute_status) {
await this._lccMuteStatus(callSid, opts.mute_status === 'mute');
}
@@ -1396,10 +1361,6 @@ class CallSession extends Emitter {
this._lccListenStatus(data);
break;
case 'transcribe:status':
this._lccTranscribeStatus(data);
break;
case 'whisper':
this._lccWhisper(data, call_sid);
break;
@@ -1933,22 +1894,17 @@ class CallSession extends Emitter {
async enableRecordAllCall() {
if (this.accountInfo.account.record_all_calls || this.application.record_all_calls) {
if (!JAMBONZ_RECORD_WS_BASE_URL || !this.accountInfo.account.bucket_credential) {
this.logger.error('Record all calls: invalid configuration');
return;
}
const listenOpts = {
url: `${JAMBONZ_RECORD_WS_BASE_URL}/record/${this.accountInfo.account.bucket_credential.vendor}`,
wsAuth: {
username: JAMBONZ_RECORD_WS_USERNAME,
password: JAMBONZ_RECORD_WS_PASSWORD
},
disableBidirectionalAudio: true,
mixType : 'stereo',
passDtmf: true
};
if (JAMBONZ_RECORD_WS_USERNAME && JAMBONZ_RECORD_WS_PASSWORD) {
listenOpts.wsAuth = {
username: JAMBONZ_RECORD_WS_USERNAME,
password: JAMBONZ_RECORD_WS_PASSWORD
};
}
this.logger.debug({listenOpts}, 'Record all calls: enabling listen');
await this.startBackgroundListen({verb: 'listen', ...listenOpts}, 'jambonz-session-record');
}

View File

@@ -8,7 +8,7 @@ const moment = require('moment');
* @extends CallSession
*/
class RestCallSession extends CallSession {
constructor({logger, application, srf, req, ep, ep2, tasks, callInfo, accountInfo, rootSpan}) {
constructor({logger, application, srf, req, ep, tasks, callInfo, accountInfo, rootSpan}) {
super({
logger,
application,
@@ -21,7 +21,6 @@ class RestCallSession extends CallSession {
});
this.req = req;
this.ep = ep;
this.ep2 = ep2;
// keep restDialTask reference for closing AMD
if (tasks.length) {
this.restDialTask = tasks[0];

View File

@@ -16,7 +16,7 @@ const ConfirmCallSession = require('../session/confirm-call-session');
const dbUtils = require('../utils/db-utils');
const debug = require('debug')('jambonz:feature-server');
const {parseUri} = require('drachtio-srf');
const {ANCHOR_MEDIA_ALWAYS, JAMBONZ_DISABLE_DIAL_PAI_HEADER} = require('../config');
const {ANCHOR_MEDIA_ALWAYS} = require('../config');
const { isOnhold } = require('../utils/sdp-utils');
const { normalizeJambones } = require('@jambonz/verb-specifications');
@@ -232,7 +232,7 @@ class TaskDial extends Task {
}
this._removeDtmfDetection(cs.dlg);
this._removeDtmfDetection(this.dlg);
await this._killOutdials();
this._killOutdials();
if (this.sd) {
this.sd.kill();
this.sd.removeAllListeners();
@@ -353,16 +353,11 @@ class TaskDial extends Task {
sd.removeAllListeners('callCreateFail');
}
async _killOutdials() {
_killOutdials() {
for (const [callSid, sd] of Array.from(this.dials)) {
this.logger.debug(`Dial:_killOutdials killing callSid ${callSid}`);
try {
await sd.kill();
} catch (err) {
this.logger.info(err, `Dial:_killOutdials Error killing ${callSid}`);
}
sd.kill().catch((err) => this.logger.info(err, `Dial:_killOutdials Error killing ${callSid}`));
this._removeHandlers(sd);
this.logger.debug(`Dial:_killOutdials killed callSid ${callSid}`);
}
this.dials.clear();
}
@@ -433,8 +428,7 @@ class TaskDial extends Task {
this.headers = {
'X-Account-Sid': cs.accountSid,
...(req && req.has('X-CID') && {'X-CID': req.get('X-CID')}),
...(req && req.has('P-Asserted-Identity') && !JAMBONZ_DISABLE_DIAL_PAI_HEADER &&
{'P-Asserted-Identity': req.get('P-Asserted-Identity')}),
...(req && req.has('P-Asserted-Identity') && {'P-Asserted-Identity': req.get('P-Asserted-Identity')}),
...(req && req.has('X-Voip-Carrier-Sid') && {'X-Voip-Carrier-Sid': req.get('X-Voip-Carrier-Sid')}),
// Put headers at the end to make sure opt.headers override all default behavior.
...this.headers
@@ -455,14 +449,10 @@ class TaskDial extends Task {
}
const ms = await cs.getMS();
this.timerRing = setTimeout(async() => {
this.timerRing = setTimeout(() => {
this.logger.info(`Dial:_attemptCall: ring no answer timer ${this.timeout}s exceeded`);
this.timerRing = null;
try {
await this._killOutdials();
} catch (err) {
this.logger.info(err, 'Dial:_attemptCall - error killing outdials');
}
this._killOutdials();
this.result = {
dialCallStatus: CallStatus.NoAnswer,
dialSipStatus: 487
@@ -541,8 +531,7 @@ class TaskDial extends Task {
}
})
.on('callStatusChange', (obj) => {
if (this.results.dialCallStatus !== CallStatus.Completed &&
this.results.dialCallStatus !== CallStatus.NoAnswer) {
if (this.results.dialCallStatus !== CallStatus.Completed) {
Object.assign(this.results, {
dialCallStatus: obj.callStatus,
dialSipStatus: obj.sipStatus,

View File

@@ -6,7 +6,6 @@ const {
AzureTranscriptionEvents,
DeepgramTranscriptionEvents,
SonioxTranscriptionEvents,
CobaltTranscriptionEvents,
IbmTranscriptionEvents,
NvidiaTranscriptionEvents,
JambonzTranscriptionEvents
@@ -59,9 +58,6 @@ class TaskGather extends SttTask {
logger.debug('Gather: an empty hints array was supplied, so we will mask global hints');
this.maskGlobalSttHints = true;
}
// fast Recognition, fire event after a specified time after the last hypothesis.
this.fastRecognitionTimeout = typeof this.data.recognizer.fastRecognitionTimeout === 'number' ?
this.data.recognizer.fastRecognitionTimeout * 1000 : 0;
}
this.digitBuffer = '';
@@ -115,7 +111,7 @@ class TaskGather extends SttTask {
if (cs.hasGlobalSttHints && !this.maskGlobalSttHints) {
const {hints, hintsBoost} = cs.globalSttHints;
const setOfHints = new Set((this.data.recognizer.hints || [])
const setOfHints = new Set(this.data.recognizer.hints
.concat(hints)
.filter((h) => typeof h === 'string' && h.length > 0));
this.data.recognizer.hints = [...setOfHints];
@@ -178,10 +174,6 @@ class TaskGather extends SttTask {
if (!this.data.recognizer.vendor) {
this.data.recognizer.vendor = this.vendor;
}
if (this.vendor === 'cobalt' && !this.data.recognizer.model) {
// By default, application saves cobalt model in language
this.data.recognizer.model = cs.speechRecognizerLanguage;
}
if (this.needsStt && !this.sttCredentials) {
try {
@@ -195,18 +187,12 @@ class TaskGather extends SttTask {
}
}
/* when using cobalt model is required */
if (this.vendor === 'cobalt' && !this.data.recognizer.model) {
this.notifyError({ msg: 'ASR error', details:'Cobalt requires a model to be specified'});
throw new Error('Cobalt requires a model to be specified');
}
const startListening = async(cs, ep) => {
this._startTimer();
if (this.isContinuousAsr && 0 === this.timeout) this._startAsrTimer();
if (this.input.includes('speech') && !this.listenDuringPrompt) {
try {
await this._setSpeechHandlers(cs, ep);
await this._initSpeech(cs, ep);
if (this.killed) {
this.logger.info('Gather:exec - task was quickly killed so do not transcribe');
return;
@@ -274,7 +260,7 @@ class TaskGather extends SttTask {
}
if (this.input.includes('speech') && this.listenDuringPrompt) {
await this._setSpeechHandlers(cs, ep);
await this._initSpeech(cs, ep);
this._startTranscribing(ep);
updateSpeechCredentialLastUsed(this.sttCredentials.speech_credential_sid)
.catch(() => {/*already logged error */});
@@ -348,9 +334,7 @@ class TaskGather extends SttTask {
}
}
async _setSpeechHandlers(cs, ep) {
if (this._speechHandlersSet) return;
this._speechHandlersSet = true;
async _initSpeech(cs, ep) {
const opts = this.setChannelVarsForStt(this, this.sttCredentials, this.data.recognizer);
switch (this.vendor) {
case 'google':
@@ -403,36 +387,6 @@ class TaskGather extends SttTask {
ep.addCustomEventListener(SonioxTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
break;
case 'cobalt':
this.bugname = 'cobalt_transcribe';
ep.addCustomEventListener(CobaltTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
/* cobalt doesnt have language, it has model, which is required */
if (!this.data.recognizer.model) {
throw new Error('Cobalt requires a model to be specified');
}
this.language = this.data.recognizer.model;
/* special case: if using hints with cobalt we need to compile them */
this.hostport = opts.COBALT_SERVER_URI;
if (this.vendor === 'cobalt' && opts.COBALT_SPEECH_HINTS) {
try {
const context = await this.compileHintsForCobalt(
ep,
this.hostport,
this.data.recognizer.model,
opts.COBALT_CONTEXT_TOKEN,
opts.COBALT_SPEECH_HINTS
);
if (context) opts.COBALT_COMPILED_CONTEXT_DATA = context;
delete opts.COBALT_SPEECH_HINTS;
} catch (err) {
this.logger.error({err}, 'Error compiling hints for cobalt');
}
}
delete opts.COBALT_SERVER_URI;
break;
case 'ibm':
this.bugname = 'ibm_transcribe';
ep.addCustomEventListener(IbmTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
@@ -492,7 +446,6 @@ class TaskGather extends SttTask {
locale: this.language,
interim: this.interim,
bugname: this.bugname,
hostport: this.hostport,
}).catch((err) => {
const {writeAlerts, AlertType} = this.cs.srf.locals;
this.logger.error(err, 'TaskGather:_startTranscribing error');
@@ -536,22 +489,6 @@ class TaskGather extends SttTask {
this._asrTimer = null;
}
_startFastRecognitionTimer(evt) {
assert(this.fastRecognitionTimeout > 0);
this._clearFastRecognitionTimer();
this._fastRecognitionTimer = setTimeout(() => {
evt.is_final = true;
this._resolve('speech', evt);
}, this.fastRecognitionTimeout);
}
_clearFastRecognitionTimer() {
if (this._fastRecognitionTimer) {
clearTimeout(this._fastRecognitionTimer);
}
this._fastRecognitionTimer = null;
}
_startFinalAsrTimer() {
this._clearFinalAsrTimer();
this._finalAsrTimer = setTimeout(() => {
@@ -689,9 +626,6 @@ class TaskGather extends SttTask {
}
this._killAudio(cs);
}
if (this.fastRecognitionTimeout) {
this._startFastRecognitionTimer(evt);
}
if (this.partialResultHook) {
const b3 = this.getTracingPropagation();
const httpHeaders = b3 && {b3};
@@ -742,16 +676,16 @@ class TaskGather extends SttTask {
_onJambonzConnect(_cs, _ep) {
this.logger.debug('TaskGather:_onJambonzConnect');
}
async _onJambonzError(cs, ep, evt) {
async _onJambonzError(cs, _ep, evt) {
this.logger.info({evt}, 'TaskGather:_onJambonzError');
if (this.isHandledByPrimaryProvider && this.fallbackVendor) {
ep.stopTranscription({vendor: this.vendor})
_ep.stopTranscription({vendor: this.vendor})
.catch((err) => this.logger.error({err}, `Error stopping transcription for primary vendor ${this.vendor}`));
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, cs.srf);
try {
await this._fallback();
await this._initSpeech(cs, ep);
this._startTranscribing(ep);
await this._initSpeech(cs, _ep);
this._startTranscribing(_ep);
updateSpeechCredentialLastUsed(this.sttCredentials.speech_credential_sid);
return;
} catch (error) {
@@ -856,7 +790,6 @@ class TaskGather extends SttTask {
}
clearTimeout(this.interDigitTimer);
this._clearTimer();
this._clearFastRecognitionTimer();
if (this.isContinuousAsr && reason.startsWith('speech')) {
evt = {

View File

@@ -1,7 +1,6 @@
const Task = require('./task');
const assert = require('assert');
const crypto = require('crypto');
const { TaskPreconditions, CobaltTranscriptionEvents } = require('../utils/constants');
const { TaskPreconditions } = require('../utils/constants');
class SttTask extends Task {
@@ -96,44 +95,6 @@ class SttTask extends Task {
this.data.recognizer.label = this.label;
this.sttCredentials = await this._initSpeechCredentials(this.cs, this.vendor, this.label);
}
async compileHintsForCobalt(ep, hostport, model, token, hints) {
const {retrieveKey} = this.cs.srf.locals.dbHelpers;
const hash = crypto.createHash('sha1');
hash.update(`${model}:${hints}`);
const key = `cobalt:${hash.digest('hex')}`;
this.context = await retrieveKey(key);
if (this.context) {
this.logger.debug({model, hints}, 'found cached cobalt context for supplied hints');
return this.context;
}
this.logger.debug({model, hints}, 'compiling cobalt context for supplied hints');
return new Promise((resolve, reject) => {
this.cobaltCompileResolver = resolve;
ep.addCustomEventListener(CobaltTranscriptionEvents.CompileContext, this._onCompileContext.bind(this, ep, key));
ep.api('uuid_cobalt_compile_context', [ep.uuid, hostport, model, token, hints], (err, evt) => {
if (err || 0 !== evt.getBody().indexOf('+OK')) {
ep.removeCustomEventListener(CobaltTranscriptionEvents.CompileContext);
return reject(err);
}
});
});
}
_onCompileContext(ep, key, evt) {
const {addKey} = this.cs.srf.locals.dbHelpers;
this.logger.debug({evt}, `received cobalt compile context event, will cache under ${key}`);
this.cobaltCompileResolver(evt.compiled_context);
ep.removeCustomEventListener(CobaltTranscriptionEvents.CompileContext);
this.cobaltCompileResolver = null;
//cache the compiled context
addKey(key, evt.compiled_context, 3600 * 12)
.catch((err) => this.logger.info({err}, `Error caching cobalt context for ${key}`));
}
}
module.exports = SttTask;

View File

@@ -7,11 +7,9 @@ const {
AzureTranscriptionEvents,
DeepgramTranscriptionEvents,
SonioxTranscriptionEvents,
CobaltTranscriptionEvents,
IbmTranscriptionEvents,
NvidiaTranscriptionEvents,
JambonzTranscriptionEvents,
TranscribeStatus
JambonzTranscriptionEvents
} = require('../utils/constants');
const { normalizeJambones } = require('@jambonz/verb-specifications');
const SttTask = require('./stt-task');
@@ -91,10 +89,6 @@ class TaskTranscribe extends SttTask {
if (!this.data.recognizer.vendor) {
this.data.recognizer.vendor = this.vendor;
}
if (this.vendor === 'cobalt' && !this.data.recognizer.model) {
// By default, application saves cobalt model in language
this.data.recognizer.model = cs.speechRecognizerLanguage;
}
if (!this.sttCredentials) {
try {
@@ -108,12 +102,6 @@ class TaskTranscribe extends SttTask {
}
}
/* when using cobalt model is required */
if (this.vendor === 'cobalt' && !this.data.recognizer.model) {
this.notifyError({ msg: 'ASR error', details:'Cobalt requires a model to be specified'});
throw new Error('Cobalt requires a model to be specified');
}
try {
await this._startTranscribing(cs, ep, 1);
if (this.separateRecognitionPerChannel && ep2) {
@@ -131,7 +119,8 @@ class TaskTranscribe extends SttTask {
this.removeSpeechListeners(ep);
}
async _stopTranscription() {
async kill(cs) {
super.kill(cs);
let stopTranscription = false;
if (this.ep?.connected) {
stopTranscription = true;
@@ -143,13 +132,6 @@ class TaskTranscribe extends SttTask {
this.ep2.stopTranscription({vendor: this.vendor})
.catch((err) => this.logger.info(err, 'Error TaskTranscribe:kill'));
}
return stopTranscription;
}
async kill(cs) {
super.kill(cs);
const stopTranscription = this._stopTranscription();
// hangup after 1 sec if we don't get a final transcription
if (stopTranscription) this._timer = setTimeout(() => this.notifyTaskDone(), 1500);
else this.notifyTaskDone();
@@ -157,26 +139,7 @@ class TaskTranscribe extends SttTask {
await this.awaitTaskDone();
}
async updateTranscribe(status) {
if (!this.killed && this.ep && this.ep.connected) {
this.logger.info(`TaskTranscribe:updateTranscribe status ${status}`);
switch (status) {
case TranscribeStatus.Pause:
await this._stopTranscription();
break;
case TranscribeStatus.Resume:
await this._startTranscribing(this.cs, this.ep, 1);
if (this.separateRecognitionPerChannel && this.ep2) {
await this._startTranscribing(this.cs, this.ep2, 2);
}
break;
}
}
}
async _setSpeechHandlers(cs, ep, channel) {
if (this._speechHandlersSet) return;
this._speechHandlersSet = true;
async _startTranscribing(cs, ep, channel) {
const opts = this.setChannelVarsForStt(this, this.sttCredentials, this.data.recognizer);
switch (this.vendor) {
case 'google':
@@ -229,36 +192,6 @@ class TaskTranscribe extends SttTask {
ep.addCustomEventListener(SonioxTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
break;
case 'cobalt':
this.bugname = 'cobalt_transcribe';
ep.addCustomEventListener(CobaltTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
/* cobalt doesnt have language, it has model, which is required */
if (!this.data.recognizer.model) {
throw new Error('Cobalt requires a model to be specified');
}
this.language = this.data.recognizer.model;
/* special case: if using hints with cobalt we need to compile them */
this.hostport = opts.COBALT_SERVER_URI;
if (this.vendor === 'cobalt' && opts.COBALT_SPEECH_HINTS) {
try {
const context = await this.compileHintsForCobalt(
ep,
opts.COBALT_SERVER_URI,
this.data.recognizer.model,
opts.COBALT_CONTEXT_TOKEN,
opts.COBALT_SPEECH_HINTS
);
if (context) opts.COBALT_COMPILED_CONTEXT_DATA = context;
delete opts.COBALT_SPEECH_HINTS;
} catch (err) {
this.logger.error({err}, 'Error compiling hints for cobalt');
}
}
break;
case 'ibm':
this.bugname = 'ibm_transcribe';
ep.addCustomEventListener(IbmTranscriptionEvents.Transcription,
@@ -280,12 +213,10 @@ class TaskTranscribe extends SttTask {
ep.addCustomEventListener(NvidiaTranscriptionEvents.VadDetected,
this._onVadDetected.bind(this, cs, ep));
break;
default:
if (this.vendor.startsWith('custom:')) {
this.bugname = `${this.vendor}_transcribe`;
ep.addCustomEventListener(JambonzTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
ep.addCustomEventListener(JambonzTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
ep.addCustomEventListener(JambonzTranscriptionEvents.Connect, this._onJambonzConnect.bind(this, cs, ep));
ep.addCustomEventListener(JambonzTranscriptionEvents.ConnectFailure,
this._onJambonzConnectFailure.bind(this, cs, ep));
@@ -302,10 +233,7 @@ class TaskTranscribe extends SttTask {
ep.addCustomEventListener(JambonzTranscriptionEvents.Error, this._onJambonzError.bind(this, cs, ep));
await ep.set(opts)
.catch((err) => this.logger.info(err, 'Error setting channel variables'));
}
async _startTranscribing(cs, ep, channel) {
await this._setSpeechHandlers(cs, ep, channel);
await this._transcribe(ep);
/* start child span for this channel */
@@ -319,8 +247,7 @@ class TaskTranscribe extends SttTask {
interim: this.interim ? true : false,
locale: this.language,
channels: /*this.separateRecognitionPerChannel ? 2 : */ 1,
bugname: this.bugname,
hostport: this.hostport
bugname: this.bugname
});
}

View File

@@ -9,7 +9,6 @@ const {
NvidiaTranscriptionEvents,
IbmTranscriptionEvents,
SonioxTranscriptionEvents,
CobaltTranscriptionEvents,
DeepgramTranscriptionEvents,
JambonzTranscriptionEvents,
AmdEvents,
@@ -315,10 +314,6 @@ module.exports = (logger) => {
ep.addCustomEventListener(NvidiaTranscriptionEvents.Transcription, amd.transcriptionHandler);
break;
case 'cobalt':
ep.addCustomEventListener(CobaltTranscriptionEvents.Transcription, amd.transcriptionHandler);
break;
default:
if (vendor.startsWith('custom:')) {
ep.addCustomEventListener(JambonzTranscriptionEvents.Transcription, amd.transcriptionHandler);

View File

@@ -51,11 +51,6 @@
"Silence": "silence",
"Resume": "resume"
},
"TranscribeStatus": {
"Pause": "pause",
"Silence": "silence",
"Resume": "resume"
},
"TaskPreconditions": {
"None": "none",
"Endpoint": "endpoint",
@@ -95,11 +90,6 @@
"Transcription": "soniox_transcribe::transcription",
"Error": "soniox_transcribe::error"
},
"CobaltTranscriptionEvents": {
"Transcription": "cobalt_speech::transcription",
"CompileContext": "cobalt_speech::compile_context_response",
"Error": "cobalt_speech::error"
},
"IbmTranscriptionEvents": {
"Transcription": "ibm_transcribe::transcription",
"ConnectFailure": "ibm_transcribe::connect_failed",

View File

@@ -3,10 +3,13 @@ const {decrypt} = require('./encrypt-decrypt');
const sqlAccountDetails = `SELECT *
FROM accounts account
WHERE account.account_sid = ?`;
const sqlSpeechCredentialsForAccount = `SELECT *
const sqlSpeechCredentials = `SELECT *
FROM speech_credentials
WHERE account_sid = ? OR (account_sid is NULL AND service_provider_sid =
(SELECT service_provider_sid from accounts where account_sid = ?))`;
WHERE account_sid = ? `;
const sqlSpeechCredentialsForSP = `SELECT *
FROM speech_credentials
WHERE service_provider_sid =
(SELECT service_provider_sid from accounts where account_sid = ?)`;
const sqlQueryAccountCarrierByName = `SELECT voip_carrier_sid
FROM voip_carriers vc
WHERE vc.account_sid = ?
@@ -46,10 +49,8 @@ const speechMapper = (cred) => {
obj.region = o.region;
obj.use_custom_stt = o.use_custom_stt;
obj.custom_stt_endpoint = o.custom_stt_endpoint;
obj.custom_stt_endpoint_url = o.custom_stt_endpoint_url;
obj.use_custom_tts = o.use_custom_tts;
obj.custom_tts_endpoint = o.custom_tts_endpoint;
obj.custom_tts_endpoint_url = o.custom_tts_endpoint_url;
}
else if ('wellsaid' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
@@ -81,10 +82,6 @@ const speechMapper = (cred) => {
const o = JSON.parse(decrypt(credential));
obj.riva_server_uri = o.riva_server_uri;
}
else if ('cobalt' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.cobalt_server_uri = o.cobalt_server_uri;
}
else if (obj.vendor.startsWith('custom:')) {
const o = JSON.parse(decrypt(credential));
obj.auth_token = o.auth_token;
@@ -111,9 +108,16 @@ module.exports = (logger, srf) => {
const [r] = await pp.query({sql: sqlAccountDetails, nestTables: true}, [account_sid]);
if (0 === r.length) throw new Error(`invalid accountSid: ${account_sid}`);
const [r2] = await pp.query(sqlSpeechCredentialsForAccount, [account_sid, account_sid]);
const [r2] = await pp.query(sqlSpeechCredentials, [account_sid]);
const speech = r2.map(speechMapper);
/* add service provider creds unless we have that vendor at the account level */
const [r3] = await pp.query(sqlSpeechCredentialsForSP, [account_sid]);
r3.forEach((s) => {
if (!speech.find((s2) => s2.vendor === s.vendor)) {
speech.push(speechMapper(s));
}
});
const account = r[0];
bucketCredentialDecrypt(account);

View File

@@ -6,8 +6,7 @@ const {PORT, HTTP_PORT_MAX} = require('../config');
const doListen = (logger, app, port, resolve) => {
const server = app.listen(port, () => {
const {srf} = app.locals;
srf.locals.serviceUrl = `http://${srf.locals.ipv4}:${port}`;
logger.info(`listening for HTTP requests on port ${port}, serviceUrl is ${srf.locals.serviceUrl}`);
logger.info(`listening for HTTP requests on port ${PORT}, serviceUrl is ${srf.locals.serviceUrl}`);
resolve({server, app});
});
return server;

View File

@@ -1,32 +1,7 @@
const sdpTransform = require('sdp-transform');
const isOnhold = (sdp) => {
return sdp && (sdp.includes('a=sendonly') || sdp.includes('a=inactive'));
};
const mergeSdpMedia = (sdp1, sdp2) => {
const parsedSdp1 = sdpTransform.parse(sdp1);
const parsedSdp2 = sdpTransform.parse(sdp2);
parsedSdp1.media.push(...parsedSdp2.media);
return sdpTransform.write(parsedSdp1);
};
const extractSdpMedia = (sdp) => {
const parsedSdp1 = sdpTransform.parse(sdp);
if (parsedSdp1.media.length > 1) {
parsedSdp1.media = [parsedSdp1.media[0]];
const parsedSdp2 = sdpTransform.parse(sdp);
parsedSdp2.media = [parsedSdp2.media[1]];
return [sdpTransform.write(parsedSdp1), sdpTransform.write(parsedSdp2)];
} else {
return [sdp, sdp];
}
};
module.exports = {
isOnhold,
mergeSdpMedia,
extractSdpMedia
isOnhold
};

View File

@@ -7,7 +7,6 @@ const {
DeepgramTranscriptionEvents,
SonioxTranscriptionEvents,
NvidiaTranscriptionEvents,
CobaltTranscriptionEvents,
JambonzTranscriptionEvents
} = require('./constants');
@@ -93,11 +92,6 @@ const stickyVars = {
nvidia: [
'NVIDIA_HINTS'
],
cobalt: [
'COBALT_SPEECH_HINTS',
'COBALT_COMPILED_CONTEXT_DATA',
'COBALT_METADATA'
],
soniox: [
'SONIOX_PROFANITY_FILTER',
'SONIOX_MODEL'
@@ -231,26 +225,6 @@ const normalizeGoogle = (evt, channel, language) => {
};
};
const normalizeCobalt = (evt, channel, language) => {
const copy = JSON.parse(JSON.stringify(evt));
const alternatives = (evt.alternatives || [])
.map((alt) => ({
confidence: alt.confidence,
transcript: alt.transcript_formatted,
}));
return {
language_code: language,
channel_tag: channel,
is_final: evt.is_final,
alternatives,
vendor: {
name: 'cobalt',
evt: copy
}
};
};
const normalizeCustom = (evt, channel, language, vendor) => {
const copy = JSON.parse(JSON.stringify(evt));
return {
@@ -343,8 +317,6 @@ module.exports = (logger) => {
return normalizeNvidia(evt, channel, language);
case 'soniox':
return normalizeSoniox(evt, channel, language);
case 'cobalt':
return normalizeCobalt(evt, channel, language);
default:
if (vendor.startsWith('custom:')) {
return normalizeCustom(evt, channel, language, vendor);
@@ -419,16 +391,14 @@ module.exports = (logger) => {
else if ('microsoft' === vendor) {
opts = {
...opts,
...(rOpts.hints && rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'string' &&
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'string' &&
{AZURE_SPEECH_HINTS: rOpts.hints.map((h) => h.trim()).join(',')}),
...(rOpts.hints && rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'object' &&
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'object' &&
{AZURE_SPEECH_HINTS: rOpts.hints.map((h) => h.phrase).join(',')}),
...(rOpts.altLanguages && rOpts.altLanguages.length > 0 &&
{AZURE_SPEECH_ALTERNATIVE_LANGUAGE_CODES: [...new Set(rOpts.altLanguages)].join(',')}),
...(rOpts.requestSnr && {AZURE_REQUEST_SNR: 1}),
...(rOpts.profanityOption && {AZURE_PROFANITY_OPTION: rOpts.profanityOption}),
...(sttCredentials.use_custom_stt && sttCredentials.custom_stt_endpoint_url &&
{AZURE_SERVICE_ENDPOINT: sttCredentials.custom_stt_endpoint_url}),
...(rOpts.azureServiceEndpoint && {AZURE_SERVICE_ENDPOINT: rOpts.azureServiceEndpoint}),
...(rOpts.initialSpeechTimeoutMs > 0 &&
{AZURE_INITIAL_SPEECH_TIMEOUT_MS: rOpts.initialSpeechTimeoutMs}),
@@ -436,11 +406,11 @@ module.exports = (logger) => {
...(rOpts.audioLogging && {AZURE_AUDIO_LOGGING: 1}),
...{AZURE_USE_OUTPUT_FORMAT_DETAILED: 1},
...(sttCredentials && {
...(sttCredentials.api_key && {AZURE_SUBSCRIPTION_KEY: sttCredentials.api_key}),
...(sttCredentials.region && {AZURE_REGION: sttCredentials.region}),
AZURE_SUBSCRIPTION_KEY: sttCredentials.api_key,
AZURE_REGION: sttCredentials.region,
}),
...(sttCredentials.use_custom_stt && sttCredentials.custom_stt_endpoint &&
{AZURE_SERVICE_ENDPOINT_ID: sttCredentials.custom_stt_endpoint}),
{AZURE_SERVICE_ENDPOINT_ID: sttCredentials.custom_stt_endpoint})
};
}
else if ('nuance' === vendor) {
@@ -506,7 +476,7 @@ module.exports = (logger) => {
...(deepgramOptions.profanityFilter) &&
{DEEPGRAM_SPEECH_PROFANITY_FILTER: 1},
...(deepgramOptions.redact) &&
{DEEPGRAM_SPEECH_REDACT: deepgramOptions.redact},
{DEEPGRAM_SPEECH_REDACT: 1},
...(deepgramOptions.diarize) &&
{DEEPGRAM_SPEECH_DIARIZE: 1},
...(deepgramOptions.diarizeVersion) &&
@@ -612,25 +582,6 @@ module.exports = (logger) => {
{NVIDIA_CUSTOM_CONFIGURATION: JSON.stringify(nvidiaOptions.customConfiguration)}),
};
}
else if ('cobalt' === vendor) {
const {cobaltOptions = {}} = rOpts;
const cobaltUri = cobaltOptions.serverUri || sttCredentials.cobalt_server_uri;
opts = {
...opts,
...(rOpts.words && {COBALT_WORD_TIME_OFFSETS: 1}),
...(!rOpts.words && {COBALT_WORD_TIME_OFFSETS: 0}),
...(rOpts.model && {COBALT_MODEL: rOpts.model}),
...(cobaltUri && {COBALT_SERVER_URI: cobaltUri}),
...(rOpts.hints?.length > 0 && typeof rOpts.hints[0] === 'string' &&
{COBALT_SPEECH_HINTS: rOpts.hints.join(',')}),
...(rOpts.hints?.length > 0 && typeof rOpts.hints[0] === 'object' &&
{COBALT_SPEECH_HINTS: JSON.stringify(rOpts.hints)}),
...(rOpts.hints?.length > 0 && {COBALT_CONTEXT_TOKEN: cobaltOptions.contextToken || 'unk:default'}),
...(cobaltOptions.metadata && {COBALT_METADATA: cobaltOptions.metadata}),
...(cobaltOptions.enableConfusionNetwork && {COBALT_ENABLE_CONFUSION_NETWORK: 1}),
...(cobaltOptions.compiledContextData && {COBALT_COMPILED_CONTEXT_DATA: cobaltOptions.compiledContextData}),
};
}
else if (vendor.startsWith('custom:')) {
let {options = {}} = rOpts;
const {auth_token, custom_stt_url} = sttCredentials;
@@ -680,9 +631,6 @@ module.exports = (logger) => {
ep.removeCustomEventListener(SonioxTranscriptionEvents.Transcription);
ep.removeCustomEventListener(CobaltTranscriptionEvents.Transcription);
ep.removeCustomEventListener(CobaltTranscriptionEvents.CompileContext);
ep.removeCustomEventListener(NvidiaTranscriptionEvents.Transcription);
ep.removeCustomEventListener(NvidiaTranscriptionEvents.TranscriptionComplete);
ep.removeCustomEventListener(NvidiaTranscriptionEvents.StartOfSpeech);
@@ -714,10 +662,6 @@ module.exports = (logger) => {
const {apiKey} = recognizer.sonioxOptions || {};
if (apiKey) return {api_key: apiKey};
}
else if (recognizer.vendor === 'cobalt') {
const {serverUri} = recognizer.cobaltOptions || {};
if (serverUri) return {cobalt_server_uri: serverUri};
}
else if (recognizer.vendor === 'ibm') {
const {ttsApiKey, ttsRegion, sttApiKey, sttRegion, instanceId} = recognizer.ibmOptions || {};
if (ttsApiKey || sttApiKey) return {

72
package-lock.json generated
View File

@@ -14,10 +14,10 @@
"@jambonz/db-helpers": "^0.9.1",
"@jambonz/http-health-check": "^0.0.1",
"@jambonz/realtimedb-helpers": "^0.8.6",
"@jambonz/speech-utils": "^0.0.21",
"@jambonz/speech-utils": "^0.0.19",
"@jambonz/stats-collector": "^0.1.9",
"@jambonz/time-series": "^0.2.8",
"@jambonz/verb-specifications": "^0.0.35",
"@jambonz/verb-specifications": "^0.0.30",
"@opentelemetry/api": "^1.4.0",
"@opentelemetry/exporter-jaeger": "^1.9.0",
"@opentelemetry/exporter-trace-otlp-http": "^0.35.0",
@@ -30,7 +30,7 @@
"bent": "^7.3.12",
"debug": "^4.3.4",
"deepcopy": "^2.1.0",
"drachtio-fsmrf": "^3.0.27",
"drachtio-fsmrf": "^3.0.23",
"drachtio-srf": "^4.5.26",
"express": "^4.18.2",
"ip": "^1.1.8",
@@ -47,7 +47,7 @@
"uuid-random": "^1.3.2",
"verify-aws-sns-signature": "^0.1.0",
"ws": "^8.9.0",
"xml2js": "^0.6.2"
"xml2js": "^0.5.0"
},
"devDependencies": {
"clear-module": "^4.1.2",
@@ -2983,9 +2983,9 @@
}
},
"node_modules/@jambonz/speech-utils": {
"version": "0.0.21",
"resolved": "https://registry.npmjs.org/@jambonz/speech-utils/-/speech-utils-0.0.21.tgz",
"integrity": "sha512-I9ULe9PHMIISZ5gr58SZ+9I5hL1F24XjFPqPlulfaoe6vJ4EgSKs1l1fWKPCuZQmv6Cf259gK5H+bjGTqjVRQw==",
"version": "0.0.19",
"resolved": "https://registry.npmjs.org/@jambonz/speech-utils/-/speech-utils-0.0.19.tgz",
"integrity": "sha512-uVwt4cCQkNhRDDPK0BHgI9Zn+bXoqQdtK8kjj3dbqPAOO3FlMTWxA2ZWJoglzZShQUNzpEg3CLcVTAGNnJHP2Q==",
"dependencies": {
"@aws-sdk/client-polly": "^3.359.0",
"@google-cloud/text-to-speech": "^4.2.1",
@@ -2996,7 +2996,7 @@
"google-protobuf": "^3.21.2",
"ibm-watson": "^8.0.0",
"ioredis": "^5.3.2",
"microsoft-cognitiveservices-speech-sdk": "^1.31.0",
"microsoft-cognitiveservices-speech-sdk": "^1.26.0",
"undici": "^5.21.0"
}
},
@@ -3019,9 +3019,9 @@
}
},
"node_modules/@jambonz/verb-specifications": {
"version": "0.0.35",
"resolved": "https://registry.npmjs.org/@jambonz/verb-specifications/-/verb-specifications-0.0.35.tgz",
"integrity": "sha512-MTdK4zGxE6+lO4Ir0q/q3TjKPOvWIewH6woS2NmDSMqVOdBKmO5lDDDlwutEr689CnVQbSpwiob1Uv42IK12EA==",
"version": "0.0.30",
"resolved": "https://registry.npmjs.org/@jambonz/verb-specifications/-/verb-specifications-0.0.30.tgz",
"integrity": "sha512-IbusgtBBo2V5Tc1FvDJvkWogHOhR2tNZN6Iyb2PjUomMI48BsWKmHW/wegppKTDpBBeN3ABY+L96pvX4N0+mCw==",
"dependencies": {
"debug": "^4.3.4",
"pino": "^8.8.0"
@@ -5152,9 +5152,9 @@
}
},
"node_modules/drachtio-fsmrf": {
"version": "3.0.27",
"resolved": "https://registry.npmjs.org/drachtio-fsmrf/-/drachtio-fsmrf-3.0.27.tgz",
"integrity": "sha512-k5jwc6RpiSoasKEMq0ROxXVga+yaMplujNUKjAidKjheso+PijdQtjEwNANl0F+JyTBELaRe81PEgeRHuAdv1w==",
"version": "3.0.23",
"resolved": "https://registry.npmjs.org/drachtio-fsmrf/-/drachtio-fsmrf-3.0.23.tgz",
"integrity": "sha512-ElruNKuPzFiMOUH06PUd3dR9tElEGRhbP/gXxai58qhrqRQNLJxzCRJkbgbjrZdYWFQPHOAzy4ZQb7+qq0AUPw==",
"dependencies": {
"camel-case": "^4.1.2",
"debug": "^2.6.9",
@@ -7807,9 +7807,9 @@
}
},
"node_modules/microsoft-cognitiveservices-speech-sdk": {
"version": "1.31.0",
"resolved": "https://registry.npmjs.org/microsoft-cognitiveservices-speech-sdk/-/microsoft-cognitiveservices-speech-sdk-1.31.0.tgz",
"integrity": "sha512-wmNi0XoGtQwRoI2To6QSrGHVW0d8WfhJwXtE2nk48l4YkBiDqdPV2tdSXFHRrdv3uwr/+THip45H91Fllpm8qA==",
"version": "1.29.0",
"resolved": "https://registry.npmjs.org/microsoft-cognitiveservices-speech-sdk/-/microsoft-cognitiveservices-speech-sdk-1.29.0.tgz",
"integrity": "sha512-/NaDni70OR5x0FPG6LD/aOkmfLIsOwqpw0UsijcHbH+U0q6FPAX1VZVlv6ZMvkObw3k/FJv0N9CFNsN1P4M7kA==",
"dependencies": {
"agent-base": "^6.0.1",
"bent": "^7.3.12",
@@ -10445,9 +10445,9 @@
}
},
"node_modules/xml2js": {
"version": "0.6.2",
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.6.2.tgz",
"integrity": "sha512-T4rieHaC1EXcES0Kxxj4JWgaUQHDk+qwHcYOCFHfiwKz7tOVPLq7Hjq9dM1WCMhylqMEfP7hMcOIChvotiZegA==",
"version": "0.5.0",
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.5.0.tgz",
"integrity": "sha512-drPFnkQJik/O+uPKpqSgr22mpuFHqKdbS835iAQrUC73L2F5WkboIRd63ai/2Yg6I1jzifPFKH2NTK+cfglkIA==",
"dependencies": {
"sax": ">=0.6.0",
"xmlbuilder": "~11.0.0"
@@ -12949,9 +12949,9 @@
}
},
"@jambonz/speech-utils": {
"version": "0.0.21",
"resolved": "https://registry.npmjs.org/@jambonz/speech-utils/-/speech-utils-0.0.21.tgz",
"integrity": "sha512-I9ULe9PHMIISZ5gr58SZ+9I5hL1F24XjFPqPlulfaoe6vJ4EgSKs1l1fWKPCuZQmv6Cf259gK5H+bjGTqjVRQw==",
"version": "0.0.19",
"resolved": "https://registry.npmjs.org/@jambonz/speech-utils/-/speech-utils-0.0.19.tgz",
"integrity": "sha512-uVwt4cCQkNhRDDPK0BHgI9Zn+bXoqQdtK8kjj3dbqPAOO3FlMTWxA2ZWJoglzZShQUNzpEg3CLcVTAGNnJHP2Q==",
"requires": {
"@aws-sdk/client-polly": "^3.359.0",
"@google-cloud/text-to-speech": "^4.2.1",
@@ -12962,7 +12962,7 @@
"google-protobuf": "^3.21.2",
"ibm-watson": "^8.0.0",
"ioredis": "^5.3.2",
"microsoft-cognitiveservices-speech-sdk": "^1.31.0",
"microsoft-cognitiveservices-speech-sdk": "^1.26.0",
"undici": "^5.21.0"
}
},
@@ -12985,9 +12985,9 @@
}
},
"@jambonz/verb-specifications": {
"version": "0.0.35",
"resolved": "https://registry.npmjs.org/@jambonz/verb-specifications/-/verb-specifications-0.0.35.tgz",
"integrity": "sha512-MTdK4zGxE6+lO4Ir0q/q3TjKPOvWIewH6woS2NmDSMqVOdBKmO5lDDDlwutEr689CnVQbSpwiob1Uv42IK12EA==",
"version": "0.0.30",
"resolved": "https://registry.npmjs.org/@jambonz/verb-specifications/-/verb-specifications-0.0.30.tgz",
"integrity": "sha512-IbusgtBBo2V5Tc1FvDJvkWogHOhR2tNZN6Iyb2PjUomMI48BsWKmHW/wegppKTDpBBeN3ABY+L96pvX4N0+mCw==",
"requires": {
"debug": "^4.3.4",
"pino": "^8.8.0"
@@ -14610,9 +14610,9 @@
}
},
"drachtio-fsmrf": {
"version": "3.0.27",
"resolved": "https://registry.npmjs.org/drachtio-fsmrf/-/drachtio-fsmrf-3.0.27.tgz",
"integrity": "sha512-k5jwc6RpiSoasKEMq0ROxXVga+yaMplujNUKjAidKjheso+PijdQtjEwNANl0F+JyTBELaRe81PEgeRHuAdv1w==",
"version": "3.0.23",
"resolved": "https://registry.npmjs.org/drachtio-fsmrf/-/drachtio-fsmrf-3.0.23.tgz",
"integrity": "sha512-ElruNKuPzFiMOUH06PUd3dR9tElEGRhbP/gXxai58qhrqRQNLJxzCRJkbgbjrZdYWFQPHOAzy4ZQb7+qq0AUPw==",
"requires": {
"camel-case": "^4.1.2",
"debug": "^2.6.9",
@@ -16661,9 +16661,9 @@
}
},
"microsoft-cognitiveservices-speech-sdk": {
"version": "1.31.0",
"resolved": "https://registry.npmjs.org/microsoft-cognitiveservices-speech-sdk/-/microsoft-cognitiveservices-speech-sdk-1.31.0.tgz",
"integrity": "sha512-wmNi0XoGtQwRoI2To6QSrGHVW0d8WfhJwXtE2nk48l4YkBiDqdPV2tdSXFHRrdv3uwr/+THip45H91Fllpm8qA==",
"version": "1.29.0",
"resolved": "https://registry.npmjs.org/microsoft-cognitiveservices-speech-sdk/-/microsoft-cognitiveservices-speech-sdk-1.29.0.tgz",
"integrity": "sha512-/NaDni70OR5x0FPG6LD/aOkmfLIsOwqpw0UsijcHbH+U0q6FPAX1VZVlv6ZMvkObw3k/FJv0N9CFNsN1P4M7kA==",
"requires": {
"agent-base": "^6.0.1",
"bent": "^7.3.12",
@@ -18637,9 +18637,9 @@
"requires": {}
},
"xml2js": {
"version": "0.6.2",
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.6.2.tgz",
"integrity": "sha512-T4rieHaC1EXcES0Kxxj4JWgaUQHDk+qwHcYOCFHfiwKz7tOVPLq7Hjq9dM1WCMhylqMEfP7hMcOIChvotiZegA==",
"version": "0.5.0",
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.5.0.tgz",
"integrity": "sha512-drPFnkQJik/O+uPKpqSgr22mpuFHqKdbS835iAQrUC73L2F5WkboIRd63ai/2Yg6I1jzifPFKH2NTK+cfglkIA==",
"requires": {
"sax": ">=0.6.0",
"xmlbuilder": "~11.0.0"

View File

@@ -30,10 +30,10 @@
"@jambonz/db-helpers": "^0.9.1",
"@jambonz/http-health-check": "^0.0.1",
"@jambonz/realtimedb-helpers": "^0.8.6",
"@jambonz/speech-utils": "^0.0.21",
"@jambonz/speech-utils": "^0.0.19",
"@jambonz/stats-collector": "^0.1.9",
"@jambonz/time-series": "^0.2.8",
"@jambonz/verb-specifications": "^0.0.35",
"@jambonz/verb-specifications": "^0.0.30",
"@opentelemetry/api": "^1.4.0",
"@opentelemetry/exporter-jaeger": "^1.9.0",
"@opentelemetry/exporter-trace-otlp-http": "^0.35.0",
@@ -46,7 +46,7 @@
"bent": "^7.3.12",
"debug": "^4.3.4",
"deepcopy": "^2.1.0",
"drachtio-fsmrf": "^3.0.27",
"drachtio-fsmrf": "^3.0.23",
"drachtio-srf": "^4.5.26",
"express": "^4.18.2",
"ip": "^1.1.8",
@@ -63,7 +63,7 @@
"uuid-random": "^1.3.2",
"verify-aws-sns-signature": "^0.1.0",
"ws": "^8.9.0",
"xml2js": "^0.6.2"
"xml2js": "^0.5.0"
},
"devDependencies": {
"clear-module": "^4.1.2",

View File

@@ -1045,7 +1045,6 @@ CREATE TABLE `speech_credentials` (
`tts_tested_ok` tinyint(1) DEFAULT NULL,
`stt_tested_ok` tinyint(1) DEFAULT NULL,
`created_at` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
`label` VARCHAR(64),
PRIMARY KEY (`speech_credential_sid`),
UNIQUE KEY `speech_credential_sid` (`speech_credential_sid`),
UNIQUE KEY `speech_credentials_idx_1` (`vendor`,`account_sid`),
@@ -1064,7 +1063,7 @@ CREATE TABLE `speech_credentials` (
LOCK TABLES `speech_credentials` WRITE;
/*!40000 ALTER TABLE `speech_credentials` DISABLE KEYS */;
INSERT INTO `speech_credentials` VALUES ('2add163c-34f2-45c6-a016-f955d218ffb6',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','google','credential-goes-here',1,1,NULL,'2021-04-03 15:42:10',1,1,'2023-05-31 03:44:21', NULL),('2add347f-34f2-45c6-a016-f955d218ffb6',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','microsoft','credential-goes-here',1,1,NULL,'2021-04-03 15:42:10',1,1,'2023-05-31 03:44:21', NULL),('84154212-5c99-4c94-8993-bc2a46288daa',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','aws','credential-goes-here',1,1,NULL,NULL,1,1,'2023-05-31 03:44:21', NULL);
INSERT INTO `speech_credentials` VALUES ('2add163c-34f2-45c6-a016-f955d218ffb6',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','google','credential-goes-here',1,1,NULL,'2021-04-03 15:42:10',1,1,'2023-05-31 03:44:21'),('2add347f-34f2-45c6-a016-f955d218ffb6',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','microsoft','credential-goes-here',1,1,NULL,'2021-04-03 15:42:10',1,1,'2023-05-31 03:44:21'),('84154212-5c99-4c94-8993-bc2a46288daa',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','aws','credential-goes-here',1,1,NULL,NULL,1,1,'2023-05-31 03:44:21');
/*!40000 ALTER TABLE `speech_credentials` ENABLE KEYS */;
UNLOCK TABLES;