Compare commits

...

21 Commits

Author SHA1 Message Date
Hoan Luu Huu
09ae083c9a fix: transcribe 2 channels (#479)
* fix: transcribe 2 channels

* fix

* fix
2023-10-10 08:46:50 -04:00
Hoan Luu Huu
6a3e12e293 feat support refer teluri (#476)
* feat support refer teluri

* update drachtio source code
2023-10-10 08:42:52 -04:00
Hoan Luu Huu
48f2c57ae2 feat sip indialog actionHook (#477)
* feat sip indialog actionHook

* feat sip indialog actionHook

* feat sip indialog actionHook

* feat sip indialog actionHook

* update verb spec

* fix

* fix

* rename function as required _onRequestWithinDialog
2023-10-10 08:41:32 -04:00
Dave Horton
f651cfa0b7 prune unused function (#478) 2023-10-09 15:03:27 -04:00
Dave Horton
cb78627e66 tag is allowed in siprec (#475) 2023-10-06 08:53:13 -04:00
Hoan Luu Huu
ae9386791f fix cobalt model (#469) 2023-09-26 07:41:43 -04:00
Dave Horton
1aa0b07b8f fix: deepgram redact option 2023-09-22 08:13:54 -04:00
Hoan Luu Huu
4e916acf6c fixed clean dualEp for rest dial dual_stream=true (#468)
* fixed clean dualEp for rest dial dual_stream=true

* fix

* fix

* fix

* fix

* fix
2023-09-22 08:12:34 -04:00
Hoan Luu Huu
991fff3386 dual streams (#467)
* dual streams

* dual streams

* dual streams

* dual streams

* use sdp transform
2023-09-21 07:57:30 -04:00
Dave Horton
76cf4e527f minor: allow hints not to be specified 2023-09-18 10:32:07 -04:00
Dave Horton
d7affddd85 minor fix to create-call: pass cs param to restDial.kill task 2023-09-14 09:10:24 -04:00
Anton Voylenko
d42798e0b4 validate recording env (#466) 2023-09-14 07:40:32 -04:00
Dave Horton
6a8a2aa955 update to provide hostport and model on the command line not via env (#465) 2023-09-13 13:53:00 -04:00
Dave Horton
6587b1f758 include fallback speech selections, if any, in the initial webhook (#464) 2023-09-13 10:19:37 -04:00
Hoan Luu Huu
c29def92e8 feat fast recognition (#461)
* feat fast recognition

* don't use buffer transcript for fast recognition

* update verb specification

* fix merge conflict

---------

Co-authored-by: Dave Horton <daveh@beachdognet.com>
2023-09-13 09:59:12 -04:00
Dave Horton
a1793ac359 Speech vendor/cobalt (#463)
* initial changes for cobalt speech

* wip

* wip

* update to drachtio-fsmrf that supports cobalt

* update to verb-specifications with cobalt speech support

* more wip

* lint

* use node 18 with gh actions

* support for compiling cobalt hints

* fix bug in uuid_cobalt_compile_context

* update verb-specifications

* remove repeated code

* cobalt support for transcribe

* update to verb specs
2023-09-13 09:47:30 -04:00
Hoan Luu Huu
d220733dea add JAMBONZ_DISABLE_DIAL_PAI_HEADER env var (#460) 2023-09-08 20:02:20 -04:00
Hoan Luu Huu
a09605fc51 update speech util version (#458) 2023-09-08 09:58:09 -04:00
Paulo Telles
7f59bba634 issue #456 error on validating hints using microsoft (#454)
* error ono validating hints using microsoft

* fix lint

* fix lint 2

* remove try catch

---------

Co-authored-by: p.souza <p.souza@cognigy.com>
2023-09-08 09:21:44 -04:00
Hoan Luu Huu
1477605e66 fix custom stt transcribe (#457) 2023-09-08 08:09:03 -04:00
Dave Horton
4f0ab83f5f fix #445 (#452) 2023-08-31 08:03:45 -04:00
23 changed files with 632 additions and 92 deletions

View File

@@ -9,7 +9,7 @@ jobs:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: 16
node-version: 18
- run: npm ci
- run: npm run jslint
- run: docker pull drachtio/sipp

View File

@@ -37,6 +37,9 @@ Configuration is provided via environment variables:
|STATS_PORT| listening port for metrics host|no|
|STATS_PROTOCOL| 'tcp' or 'udp'|no|
|STATS_TELEGRAF| if 1, metrics will be generated in telegraf format|no|
|JAMBONZ_RECORD_WS_BASE_URL| recording websocket URL to send the recording audio|no|
|JAMBONZ_RECORD_WS_USERNAME| recording websocket username|no|
|JAMBONZ_RECORD_WS_PASSWORD| recording websocket password|no|
### running under pm2
Typically, this application runs under [pm2](https://pm2.io) using an [ecosystem.config.js](https://pm2.keymetrics.io/docs/usage/application-declaration/) file similar to this:

View File

@@ -144,9 +144,10 @@ const JAMBONES_REDIS_SENTINELS = process.env.JAMBONES_REDIS_SENTINELS ? {
username: process.env.JAMBONES_REDIS_SENTINEL_USERNAME
})
} : null;
const JAMBONZ_RECORD_WS_BASE_URL = process.env.JAMBONZ_RECORD_WS_BASE_URL;
const JAMBONZ_RECORD_WS_USERNAME = process.env.JAMBONZ_RECORD_WS_USERNAME;
const JAMBONZ_RECORD_WS_PASSWORD = process.env.JAMBONZ_RECORD_WS_PASSWORD;
const JAMBONZ_RECORD_WS_BASE_URL = process.env.JAMBONZ_RECORD_WS_BASE_URL || process.env.JAMBONES_RECORD_WS_BASE_URL;
const JAMBONZ_RECORD_WS_USERNAME = process.env.JAMBONZ_RECORD_WS_USERNAME || process.env.JAMBONES_RECORD_WS_USERNAME;
const JAMBONZ_RECORD_WS_PASSWORD = process.env.JAMBONZ_RECORD_WS_PASSWORD || process.env.JAMBONES_RECORD_WS_PASSWORD;
const JAMBONZ_DISABLE_DIAL_PAI_HEADER = process.env.JAMBONZ_DISABLE_DIAL_PAI_HEADER || false;
module.exports = {
JAMBONES_MYSQL_HOST,
@@ -225,5 +226,6 @@ module.exports = {
DEEPGRAM_API_KEY,
JAMBONZ_RECORD_WS_BASE_URL,
JAMBONZ_RECORD_WS_USERNAME,
JAMBONZ_RECORD_WS_PASSWORD
JAMBONZ_RECORD_WS_PASSWORD,
JAMBONZ_DISABLE_DIAL_PAI_HEADER
};

View File

@@ -10,6 +10,7 @@ const HttpRequestor = require('../../utils/http-requestor');
const WsRequestor = require('../../utils/ws-requestor');
const RootSpan = require('../../utils/call-tracer');
const dbUtils = require('../../utils/db-utils');
const { mergeSdpMedia, extractSdpMedia } = require('../../utils/sdp-utils');
router.post('/', async(req, res) => {
const {logger} = req.app.locals;
@@ -127,16 +128,33 @@ router.post('/', async(req, res) => {
/* launch outdial */
let sdp, sipLogger;
let dualEp;
let localSdp = ep.local.sdp;
if (req.body.dual_streams) {
dualEp = await ms.createEndpoint();
localSdp = mergeSdpMedia(localSdp, dualEp.local.sdp);
}
const connectStream = async(remoteSdp) => {
if (remoteSdp !== sdp) {
ep.modify(sdp = remoteSdp);
sdp = remoteSdp;
if (req.body.dual_streams) {
const [sdpLegA, sdpLebB] = extractSdpMedia(remoteSdp);
await ep.modify(sdpLegA);
await dualEp.modify(sdpLebB);
await ep.bridge(dualEp);
} else {
ep.modify(sdp);
}
return true;
}
return false;
};
Object.assign(opts, {
proxy: `sip:${sbcAddress}`,
localSdp: ep.local.sdp
localSdp
});
if (target.auth) opts.auth = target.auth;
@@ -223,6 +241,7 @@ router.post('/', async(req, res) => {
srf,
req: inviteReq,
ep,
ep2: dualEp,
tasks,
callInfo,
accountInfo,
@@ -275,7 +294,10 @@ router.post('/', async(req, res) => {
else console.error(err);
}
ep.destroy();
setTimeout(restDial.kill.bind(restDial), 5000);
if (dualEp) {
dualEp.destroy();
}
setTimeout(restDial.kill.bind(restDial, cs), 5000);
}
} catch (err) {
sysError(logger, res, err);

View File

@@ -299,22 +299,30 @@ module.exports = function(srf, logger) {
if (app.app_json) {
json = JSON.parse(app.app_json);
} else {
const defaults = {
synthesizer: {
vendor: app.speech_synthesis_vendor,
...(app.speech_synthesis_label && {label: app.speech_synthesis_label}),
language: app.speech_synthesis_language,
voice: app.speech_synthesis_voice,
...(app.fallback_speech_synthesis_vendor && {fallback_vendor: app.fallback_speech_synthesis_vendor}),
...(app.fallback_speech_synthesis_label && {fallback_label: app.fallback_speech_synthesis_label}),
...(app.fallback_speech_synthesis_language && {fallback_language: app.fallback_speech_synthesis_language}),
...(app.fallback_speech_synthesis_voice && {fallback_voice: app.fallback_speech_synthesis_voice})
},
recognizer: {
vendor: app.speech_recognizer_vendor,
...(app.speech_synthesis_label && {label: app.speech_synthesis_label}),
language: app.speech_recognizer_language,
...(app.fallback_speech_recognizer_vendor && {fallback_vendor: app.fallback_speech_recognizer_vendor}),
...(app.fallback_speech_recognizer_label && {fallback_label: app.fallback_speech_recognizer_label}),
...(app.fallback_speech_recognizer_language && {fallback_language: app.fallback_speech_recognizer_language})
}
};
const params = Object.assign(['POST', 'WS'].includes(app.call_hook.method) ? { sip: req.msg } : {},
req.locals.callInfo,
{ service_provider_sid: req.locals.service_provider_sid },
{
defaults: {
synthesizer: {
vendor: app.speech_synthesis_vendor,
language: app.speech_synthesis_language,
voice: app.speech_synthesis_voice
},
recognizer: {
vendor: app.speech_recognizer_vendor,
language: app.speech_recognizer_language
}
}
});
{ defaults });
logger.debug({ params }, 'sending initial webhook');
const obj = rootSpan.startChildSpan('performAppWebhook');
span = obj.span;

View File

@@ -414,6 +414,14 @@ class CallSession extends Emitter {
this._onHoldMusic = url;
}
get sipRequestWithinDialogHook() {
return this._sipRequestWithinDialogHook;
}
set sipRequestWithinDialogHook(url) {
this._sipRequestWithinDialogHook = url;
}
hasGlobalSttPunctuation() {
return this._globalSttPunctuation !== undefined;
}
@@ -731,8 +739,8 @@ class CallSession extends Emitter {
(type === 'tts' && credential.use_for_tts) ||
(type === 'stt' && credential.use_for_stt)
)) {
this.logger.info(`Speech credential vendor: ${credential.vendor}
${credential.label ? `, label: ${credential.label}` : ''} is chosen`);
this.logger.info(
`Speech vendor: ${credential.vendor} ${credential.label ? `, label: ${credential.label}` : ''} selected`);
if ('google' === vendor) {
if (type === 'tts' && !credential.tts_tested_ok ||
type === 'stt' && !credential.stt_tested_ok) {
@@ -817,6 +825,12 @@ ${credential.label ? `, label: ${credential.label}` : ''} is chosen`);
riva_server_uri: credential.riva_server_uri
};
}
else if ('cobalt' === vendor) {
return {
speech_credential_sid: credential.speech_credential_sid,
cobalt_server_uri: credential.cobalt_server_uri
};
}
else if (vendor.startsWith('custom:')) {
return {
speech_credential_sid: credential.speech_credential_sid,
@@ -1640,10 +1654,32 @@ ${credential.label ? `, label: ${credential.label}` : ''} is chosen`);
}
this.dlg.on('modify', this._onReinvite.bind(this));
this.dlg.on('refer', this._onRefer.bind(this));
if (this.sipRequestWithinDialogHook) {
this.dlg.on('info', this._onRequestWithinDialog.bind(this));
this.dlg.on('message', this._onRequestWithinDialog.bind(this));
}
this.logger.debug(`CallSession:propagateAnswer - answered callSid ${this.callSid}`);
}
}
async _onRequestWithinDialog(req, res) {
if (!this.sipRequestWithinDialogHook) {
return;
}
const sip_method = req.method;
if (sip_method === 'INFO') {
res.send(200);
} else if (sip_method === 'MESSAGE') {
res.send(202);
} else {
this.logger.info(`CallSession:_onRequestWithinDialog unsported method: ${req.method}`);
res.send(501);
return;
}
const params = {sip_method, sip_body: req.body};
this.currentTask.performHook(this, this.sipRequestWithinDialogHook, params);
}
async _onReinvite(req, res) {
try {
if (this.ep) {
@@ -1927,17 +1963,22 @@ ${credential.label ? `, label: ${credential.label}` : ''} is chosen`);
async enableRecordAllCall() {
if (this.accountInfo.account.record_all_calls || this.application.record_all_calls) {
if (!JAMBONZ_RECORD_WS_BASE_URL || !this.accountInfo.account.bucket_credential) {
this.logger.error('Record all calls: invalid configuration');
return;
}
const listenOpts = {
url: `${JAMBONZ_RECORD_WS_BASE_URL}/record/${this.accountInfo.account.bucket_credential.vendor}`,
wsAuth: {
username: JAMBONZ_RECORD_WS_USERNAME,
password: JAMBONZ_RECORD_WS_PASSWORD
},
disableBidirectionalAudio: true,
mixType : 'stereo',
passDtmf: true
};
if (JAMBONZ_RECORD_WS_USERNAME && JAMBONZ_RECORD_WS_PASSWORD) {
listenOpts.wsAuth = {
username: JAMBONZ_RECORD_WS_USERNAME,
password: JAMBONZ_RECORD_WS_PASSWORD
};
}
this.logger.debug({listenOpts}, 'Record all calls: enabling listen');
await this.startBackgroundListen({verb: 'listen', ...listenOpts}, 'jambonz-session-record');
}

View File

@@ -8,7 +8,7 @@ const moment = require('moment');
* @extends CallSession
*/
class RestCallSession extends CallSession {
constructor({logger, application, srf, req, ep, tasks, callInfo, accountInfo, rootSpan}) {
constructor({logger, application, srf, req, ep, ep2, tasks, callInfo, accountInfo, rootSpan}) {
super({
logger,
application,
@@ -21,6 +21,7 @@ class RestCallSession extends CallSession {
});
this.req = req;
this.ep = ep;
this.ep2 = ep2;
// keep restDialTask reference for closing AMD
if (tasks.length) {
this.restDialTask = tasks[0];

View File

@@ -216,6 +216,9 @@ class TaskConfig extends Task {
cs.stopBackgroundListen();
}
}
if (this.data.sipRequestWithinDialogHook) {
cs.sipRequestWithinDialogHook = this.data.sipRequestWithinDialogHook;
}
}
async kill(cs) {

View File

@@ -16,7 +16,7 @@ const ConfirmCallSession = require('../session/confirm-call-session');
const dbUtils = require('../utils/db-utils');
const debug = require('debug')('jambonz:feature-server');
const {parseUri} = require('drachtio-srf');
const {ANCHOR_MEDIA_ALWAYS} = require('../config');
const {ANCHOR_MEDIA_ALWAYS, JAMBONZ_DISABLE_DIAL_PAI_HEADER} = require('../config');
const { isOnhold } = require('../utils/sdp-utils');
const { normalizeJambones } = require('@jambonz/verb-specifications');
@@ -204,6 +204,7 @@ class TaskDial extends Task {
await this.performAction(this.results, this.killReason !== KillReason.Replaced);
this._removeDtmfDetection(cs.dlg);
this._removeDtmfDetection(this.dlg);
this._removeSipIndialogRequestListener(this.dlg);
} catch (err) {
this.logger.error({err}, 'TaskDial:exec terminating with error');
this.kill(cs);
@@ -232,7 +233,7 @@ class TaskDial extends Task {
}
this._removeDtmfDetection(cs.dlg);
this._removeDtmfDetection(this.dlg);
this._killOutdials();
await this._killOutdials();
if (this.sd) {
this.sd.kill();
this.sd.removeAllListeners();
@@ -327,8 +328,8 @@ class TaskDial extends Task {
sip_refer_to: req.get('Refer-To'),
sip_referred_by: req.get('Referred-By'),
sip_user_agent: req.get('User-Agent'),
refer_to_user: to.user,
referred_by_user: by.user,
refer_to_user: to.scheme === 'tel' ? to.number : to.user,
referred_by_user: by.scheme === 'tel' ? by.number : by.user,
referring_call_sid,
referred_call_sid
}
@@ -353,11 +354,16 @@ class TaskDial extends Task {
sd.removeAllListeners('callCreateFail');
}
_killOutdials() {
async _killOutdials() {
for (const [callSid, sd] of Array.from(this.dials)) {
this.logger.debug(`Dial:_killOutdials killing callSid ${callSid}`);
sd.kill().catch((err) => this.logger.info(err, `Dial:_killOutdials Error killing ${callSid}`));
try {
await sd.kill();
} catch (err) {
this.logger.info(err, `Dial:_killOutdials Error killing ${callSid}`);
}
this._removeHandlers(sd);
this.logger.debug(`Dial:_killOutdials killed callSid ${callSid}`);
}
this.dials.clear();
}
@@ -370,8 +376,14 @@ class TaskDial extends Task {
}
_onInfo(cs, dlg, req, res) {
// SIP Indialog will be handled by another handler
if (cs.sipRequestWithinDialogHook) {
return;
}
res.send(200);
if (req.get('Content-Type') !== 'application/dtmf-relay') return;
if (req.get('Content-Type') !== 'application/dtmf-relay') {
return;
}
const dtmfDetector = dlg === cs.dlg ? this.parentDtmfCollector : this.childDtmfCollector;
if (!dtmfDetector) return;
@@ -400,6 +412,20 @@ class TaskDial extends Task {
}
}
_initSipIndialogRequestListener(cs, dlg) {
dlg.on('info', this._onRequestWithinDialog.bind(this, cs));
dlg.on('message', this._onRequestWithinDialog.bind(this, cs));
}
_removeSipIndialogRequestListener(dlg) {
dlg && dlg.removeAllListeners('message');
dlg && dlg.removeAllListeners('info');
}
async _onRequestWithinDialog(cs, req, res) {
cs._onRequestWithinDialog(req, res);
}
async _initializeInbound(cs) {
const {ep} = await cs._evalEndpointPrecondition(this);
this.epOther = ep;
@@ -428,7 +454,8 @@ class TaskDial extends Task {
this.headers = {
'X-Account-Sid': cs.accountSid,
...(req && req.has('X-CID') && {'X-CID': req.get('X-CID')}),
...(req && req.has('P-Asserted-Identity') && {'P-Asserted-Identity': req.get('P-Asserted-Identity')}),
...(req && req.has('P-Asserted-Identity') && !JAMBONZ_DISABLE_DIAL_PAI_HEADER &&
{'P-Asserted-Identity': req.get('P-Asserted-Identity')}),
...(req && req.has('X-Voip-Carrier-Sid') && {'X-Voip-Carrier-Sid': req.get('X-Voip-Carrier-Sid')}),
// Put headers at the end to make sure opt.headers override all default behavior.
...this.headers
@@ -449,10 +476,14 @@ class TaskDial extends Task {
}
const ms = await cs.getMS();
this.timerRing = setTimeout(() => {
this.timerRing = setTimeout(async() => {
this.logger.info(`Dial:_attemptCall: ring no answer timer ${this.timeout}s exceeded`);
this.timerRing = null;
this._killOutdials();
try {
await this._killOutdials();
} catch (err) {
this.logger.info(err, 'Dial:_attemptCall - error killing outdials');
}
this.result = {
dialCallStatus: CallStatus.NoAnswer,
dialSipStatus: 487
@@ -531,7 +562,8 @@ class TaskDial extends Task {
}
})
.on('callStatusChange', (obj) => {
if (this.results.dialCallStatus !== CallStatus.Completed) {
if (this.results.dialCallStatus !== CallStatus.Completed &&
this.results.dialCallStatus !== CallStatus.NoAnswer) {
Object.assign(this.results, {
dialCallStatus: obj.callStatus,
dialSipStatus: obj.sipStatus,
@@ -701,6 +733,7 @@ class TaskDial extends Task {
if (this.parentDtmfCollector) this._installDtmfDetection(cs, cs.dlg);
if (this.childDtmfCollector) this._installDtmfDetection(cs, this.dlg);
if (cs.sipRequestWithinDialogHook) this._initSipIndialogRequestListener(cs, this.dlg);
if (this.transcribeTask) this.transcribeTask.exec(cs, {ep: this.epOther, ep2:this.ep});
if (this.listenTask) this.listenTask.exec(cs, {ep: this.epOther});

View File

@@ -235,9 +235,8 @@ class Dialogflow extends Task {
}
try {
const {filePath, servedFromCache} = await this._fallbackSynthAudio(cs, intent, stats, synthAudio);
const {filePath} = await this._fallbackSynthAudio(cs, intent, stats, synthAudio);
if (filePath) cs.trackTmpFile(filePath);
if (!this.ttsCredentials && !servedFromCache) cs.billForTts(intent.fulfillmentText.length);
if (this.playInProgress) {
await ep.api('uuid_break', ep.uuid).catch((err) => this.logger.info(err, 'Error killing audio'));

View File

@@ -6,6 +6,7 @@ const {
AzureTranscriptionEvents,
DeepgramTranscriptionEvents,
SonioxTranscriptionEvents,
CobaltTranscriptionEvents,
IbmTranscriptionEvents,
NvidiaTranscriptionEvents,
JambonzTranscriptionEvents
@@ -58,6 +59,9 @@ class TaskGather extends SttTask {
logger.debug('Gather: an empty hints array was supplied, so we will mask global hints');
this.maskGlobalSttHints = true;
}
// fast Recognition, fire event after a specified time after the last hypothesis.
this.fastRecognitionTimeout = typeof this.data.recognizer.fastRecognitionTimeout === 'number' ?
this.data.recognizer.fastRecognitionTimeout * 1000 : 0;
}
this.digitBuffer = '';
@@ -111,7 +115,7 @@ class TaskGather extends SttTask {
if (cs.hasGlobalSttHints && !this.maskGlobalSttHints) {
const {hints, hintsBoost} = cs.globalSttHints;
const setOfHints = new Set(this.data.recognizer.hints
const setOfHints = new Set((this.data.recognizer.hints || [])
.concat(hints)
.filter((h) => typeof h === 'string' && h.length > 0));
this.data.recognizer.hints = [...setOfHints];
@@ -174,6 +178,10 @@ class TaskGather extends SttTask {
if (!this.data.recognizer.vendor) {
this.data.recognizer.vendor = this.vendor;
}
if (this.vendor === 'cobalt' && !this.data.recognizer.model) {
// By default, application saves cobalt model in language
this.data.recognizer.model = cs.speechRecognizerLanguage;
}
if (this.needsStt && !this.sttCredentials) {
try {
@@ -187,12 +195,18 @@ class TaskGather extends SttTask {
}
}
/* when using cobalt model is required */
if (this.vendor === 'cobalt' && !this.data.recognizer.model) {
this.notifyError({ msg: 'ASR error', details:'Cobalt requires a model to be specified'});
throw new Error('Cobalt requires a model to be specified');
}
const startListening = async(cs, ep) => {
this._startTimer();
if (this.isContinuousAsr && 0 === this.timeout) this._startAsrTimer();
if (this.input.includes('speech') && !this.listenDuringPrompt) {
try {
await this._initSpeech(cs, ep);
await this._setSpeechHandlers(cs, ep);
if (this.killed) {
this.logger.info('Gather:exec - task was quickly killed so do not transcribe');
return;
@@ -260,7 +274,7 @@ class TaskGather extends SttTask {
}
if (this.input.includes('speech') && this.listenDuringPrompt) {
await this._initSpeech(cs, ep);
await this._setSpeechHandlers(cs, ep);
this._startTranscribing(ep);
updateSpeechCredentialLastUsed(this.sttCredentials.speech_credential_sid)
.catch(() => {/*already logged error */});
@@ -334,7 +348,9 @@ class TaskGather extends SttTask {
}
}
async _initSpeech(cs, ep) {
async _setSpeechHandlers(cs, ep) {
if (this._speechHandlersSet) return;
this._speechHandlersSet = true;
const opts = this.setChannelVarsForStt(this, this.sttCredentials, this.data.recognizer);
switch (this.vendor) {
case 'google':
@@ -387,6 +403,36 @@ class TaskGather extends SttTask {
ep.addCustomEventListener(SonioxTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
break;
case 'cobalt':
this.bugname = 'cobalt_transcribe';
ep.addCustomEventListener(CobaltTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
/* cobalt doesnt have language, it has model, which is required */
if (!this.data.recognizer.model) {
throw new Error('Cobalt requires a model to be specified');
}
this.language = this.data.recognizer.model;
/* special case: if using hints with cobalt we need to compile them */
this.hostport = opts.COBALT_SERVER_URI;
if (this.vendor === 'cobalt' && opts.COBALT_SPEECH_HINTS) {
try {
const context = await this.compileHintsForCobalt(
ep,
this.hostport,
this.data.recognizer.model,
opts.COBALT_CONTEXT_TOKEN,
opts.COBALT_SPEECH_HINTS
);
if (context) opts.COBALT_COMPILED_CONTEXT_DATA = context;
delete opts.COBALT_SPEECH_HINTS;
} catch (err) {
this.logger.error({err}, 'Error compiling hints for cobalt');
}
}
delete opts.COBALT_SERVER_URI;
break;
case 'ibm':
this.bugname = 'ibm_transcribe';
ep.addCustomEventListener(IbmTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
@@ -446,6 +492,7 @@ class TaskGather extends SttTask {
locale: this.language,
interim: this.interim,
bugname: this.bugname,
hostport: this.hostport,
}).catch((err) => {
const {writeAlerts, AlertType} = this.cs.srf.locals;
this.logger.error(err, 'TaskGather:_startTranscribing error');
@@ -489,6 +536,22 @@ class TaskGather extends SttTask {
this._asrTimer = null;
}
_startFastRecognitionTimer(evt) {
assert(this.fastRecognitionTimeout > 0);
this._clearFastRecognitionTimer();
this._fastRecognitionTimer = setTimeout(() => {
evt.is_final = true;
this._resolve('speech', evt);
}, this.fastRecognitionTimeout);
}
_clearFastRecognitionTimer() {
if (this._fastRecognitionTimer) {
clearTimeout(this._fastRecognitionTimer);
}
this._fastRecognitionTimer = null;
}
_startFinalAsrTimer() {
this._clearFinalAsrTimer();
this._finalAsrTimer = setTimeout(() => {
@@ -626,6 +689,9 @@ class TaskGather extends SttTask {
}
this._killAudio(cs);
}
if (this.fastRecognitionTimeout) {
this._startFastRecognitionTimer(evt);
}
if (this.partialResultHook) {
const b3 = this.getTracingPropagation();
const httpHeaders = b3 && {b3};
@@ -676,16 +742,16 @@ class TaskGather extends SttTask {
_onJambonzConnect(_cs, _ep) {
this.logger.debug('TaskGather:_onJambonzConnect');
}
async _onJambonzError(cs, _ep, evt) {
async _onJambonzError(cs, ep, evt) {
this.logger.info({evt}, 'TaskGather:_onJambonzError');
if (this.isHandledByPrimaryProvider && this.fallbackVendor) {
_ep.stopTranscription({vendor: this.vendor})
ep.stopTranscription({vendor: this.vendor})
.catch((err) => this.logger.error({err}, `Error stopping transcription for primary vendor ${this.vendor}`));
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, cs.srf);
try {
await this._fallback();
await this._initSpeech(cs, _ep);
this._startTranscribing(_ep);
await this._initSpeech(cs, ep);
this._startTranscribing(ep);
updateSpeechCredentialLastUsed(this.sttCredentials.speech_credential_sid);
return;
} catch (error) {
@@ -790,6 +856,7 @@ class TaskGather extends SttTask {
}
clearTimeout(this.interDigitTimer);
this._clearTimer();
this._clearFastRecognitionTimer();
if (this.isContinuousAsr && reason.startsWith('speech')) {
evt = {

View File

@@ -1,6 +1,7 @@
const Task = require('./task');
const assert = require('assert');
const { TaskPreconditions } = require('../utils/constants');
const crypto = require('crypto');
const { TaskPreconditions, CobaltTranscriptionEvents } = require('../utils/constants');
class SttTask extends Task {
@@ -95,6 +96,44 @@ class SttTask extends Task {
this.data.recognizer.label = this.label;
this.sttCredentials = await this._initSpeechCredentials(this.cs, this.vendor, this.label);
}
async compileHintsForCobalt(ep, hostport, model, token, hints) {
const {retrieveKey} = this.cs.srf.locals.dbHelpers;
const hash = crypto.createHash('sha1');
hash.update(`${model}:${hints}`);
const key = `cobalt:${hash.digest('hex')}`;
this.context = await retrieveKey(key);
if (this.context) {
this.logger.debug({model, hints}, 'found cached cobalt context for supplied hints');
return this.context;
}
this.logger.debug({model, hints}, 'compiling cobalt context for supplied hints');
return new Promise((resolve, reject) => {
this.cobaltCompileResolver = resolve;
ep.addCustomEventListener(CobaltTranscriptionEvents.CompileContext, this._onCompileContext.bind(this, ep, key));
ep.api('uuid_cobalt_compile_context', [ep.uuid, hostport, model, token, hints], (err, evt) => {
if (err || 0 !== evt.getBody().indexOf('+OK')) {
ep.removeCustomEventListener(CobaltTranscriptionEvents.CompileContext);
return reject(err);
}
});
});
}
_onCompileContext(ep, key, evt) {
const {addKey} = this.cs.srf.locals.dbHelpers;
this.logger.debug({evt}, `received cobalt compile context event, will cache under ${key}`);
this.cobaltCompileResolver(evt.compiled_context);
ep.removeCustomEventListener(CobaltTranscriptionEvents.CompileContext);
this.cobaltCompileResolver = null;
//cache the compiled context
addKey(key, evt.compiled_context, 3600 * 12)
.catch((err) => this.logger.info({err}, `Error caching cobalt context for ${key}`));
}
}
module.exports = SttTask;

View File

@@ -7,6 +7,7 @@ const {
AzureTranscriptionEvents,
DeepgramTranscriptionEvents,
SonioxTranscriptionEvents,
CobaltTranscriptionEvents,
IbmTranscriptionEvents,
NvidiaTranscriptionEvents,
JambonzTranscriptionEvents,
@@ -90,6 +91,10 @@ class TaskTranscribe extends SttTask {
if (!this.data.recognizer.vendor) {
this.data.recognizer.vendor = this.vendor;
}
if (this.vendor === 'cobalt' && !this.data.recognizer.model) {
// By default, application saves cobalt model in language
this.data.recognizer.model = cs.speechRecognizerLanguage;
}
if (!this.sttCredentials) {
try {
@@ -103,6 +108,12 @@ class TaskTranscribe extends SttTask {
}
}
/* when using cobalt model is required */
if (this.vendor === 'cobalt' && !this.data.recognizer.model) {
this.notifyError({ msg: 'ASR error', details:'Cobalt requires a model to be specified'});
throw new Error('Cobalt requires a model to be specified');
}
try {
await this._startTranscribing(cs, ep, 1);
if (this.separateRecognitionPerChannel && ep2) {
@@ -163,7 +174,9 @@ class TaskTranscribe extends SttTask {
}
}
async _startTranscribing(cs, ep, channel) {
async _setSpeechHandlers(cs, ep, channel) {
if (ep._speechHandlersSet) return;
ep._speechHandlersSet = true;
const opts = this.setChannelVarsForStt(this, this.sttCredentials, this.data.recognizer);
switch (this.vendor) {
case 'google':
@@ -216,6 +229,36 @@ class TaskTranscribe extends SttTask {
ep.addCustomEventListener(SonioxTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
break;
case 'cobalt':
this.bugname = 'cobalt_transcribe';
ep.addCustomEventListener(CobaltTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
/* cobalt doesnt have language, it has model, which is required */
if (!this.data.recognizer.model) {
throw new Error('Cobalt requires a model to be specified');
}
this.language = this.data.recognizer.model;
/* special case: if using hints with cobalt we need to compile them */
this.hostport = opts.COBALT_SERVER_URI;
if (this.vendor === 'cobalt' && opts.COBALT_SPEECH_HINTS) {
try {
const context = await this.compileHintsForCobalt(
ep,
opts.COBALT_SERVER_URI,
this.data.recognizer.model,
opts.COBALT_CONTEXT_TOKEN,
opts.COBALT_SPEECH_HINTS
);
if (context) opts.COBALT_COMPILED_CONTEXT_DATA = context;
delete opts.COBALT_SPEECH_HINTS;
} catch (err) {
this.logger.error({err}, 'Error compiling hints for cobalt');
}
}
break;
case 'ibm':
this.bugname = 'ibm_transcribe';
ep.addCustomEventListener(IbmTranscriptionEvents.Transcription,
@@ -237,10 +280,12 @@ class TaskTranscribe extends SttTask {
ep.addCustomEventListener(NvidiaTranscriptionEvents.VadDetected,
this._onVadDetected.bind(this, cs, ep));
break;
default:
if (this.vendor.startsWith('custom:')) {
this.bugname = `${this.vendor}_transcribe`;
ep.addCustomEventListener(JambonzTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
ep.addCustomEventListener(JambonzTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
ep.addCustomEventListener(JambonzTranscriptionEvents.Connect, this._onJambonzConnect.bind(this, cs, ep));
ep.addCustomEventListener(JambonzTranscriptionEvents.ConnectFailure,
this._onJambonzConnectFailure.bind(this, cs, ep));
@@ -257,7 +302,10 @@ class TaskTranscribe extends SttTask {
ep.addCustomEventListener(JambonzTranscriptionEvents.Error, this._onJambonzError.bind(this, cs, ep));
await ep.set(opts)
.catch((err) => this.logger.info(err, 'Error setting channel variables'));
}
async _startTranscribing(cs, ep, channel) {
await this._setSpeechHandlers(cs, ep, channel);
await this._transcribe(ep);
/* start child span for this channel */
@@ -271,7 +319,8 @@ class TaskTranscribe extends SttTask {
interim: this.interim ? true : false,
locale: this.language,
channels: /*this.separateRecognitionPerChannel ? 2 : */ 1,
bugname: this.bugname
bugname: this.bugname,
hostport: this.hostport
});
}

View File

@@ -9,6 +9,7 @@ const {
NvidiaTranscriptionEvents,
IbmTranscriptionEvents,
SonioxTranscriptionEvents,
CobaltTranscriptionEvents,
DeepgramTranscriptionEvents,
JambonzTranscriptionEvents,
AmdEvents,
@@ -314,6 +315,10 @@ module.exports = (logger) => {
ep.addCustomEventListener(NvidiaTranscriptionEvents.Transcription, amd.transcriptionHandler);
break;
case 'cobalt':
ep.addCustomEventListener(CobaltTranscriptionEvents.Transcription, amd.transcriptionHandler);
break;
default:
if (vendor.startsWith('custom:')) {
ep.addCustomEventListener(JambonzTranscriptionEvents.Transcription, amd.transcriptionHandler);

View File

@@ -29,7 +29,7 @@
"Tag": "tag",
"Transcribe": "transcribe"
},
"AllowedSipRecVerbs": ["config", "gather", "transcribe", "listen"],
"AllowedSipRecVerbs": ["config", "gather", "transcribe", "listen", "tag"],
"CallStatus": {
"Trying": "trying",
"Ringing": "ringing",
@@ -95,6 +95,11 @@
"Transcription": "soniox_transcribe::transcription",
"Error": "soniox_transcribe::error"
},
"CobaltTranscriptionEvents": {
"Transcription": "cobalt_speech::transcription",
"CompileContext": "cobalt_speech::compile_context_response",
"Error": "cobalt_speech::error"
},
"IbmTranscriptionEvents": {
"Transcription": "ibm_transcribe::transcription",
"ConnectFailure": "ibm_transcribe::connect_failed",

View File

@@ -81,6 +81,10 @@ const speechMapper = (cred) => {
const o = JSON.parse(decrypt(credential));
obj.riva_server_uri = o.riva_server_uri;
}
else if ('cobalt' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.cobalt_server_uri = o.cobalt_server_uri;
}
else if (obj.vendor.startsWith('custom:')) {
const o = JSON.parse(decrypt(credential));
obj.auth_token = o.auth_token;

View File

@@ -1,7 +1,32 @@
const sdpTransform = require('sdp-transform');
const isOnhold = (sdp) => {
return sdp && (sdp.includes('a=sendonly') || sdp.includes('a=inactive'));
};
module.exports = {
isOnhold
const mergeSdpMedia = (sdp1, sdp2) => {
const parsedSdp1 = sdpTransform.parse(sdp1);
const parsedSdp2 = sdpTransform.parse(sdp2);
parsedSdp1.media.push(...parsedSdp2.media);
return sdpTransform.write(parsedSdp1);
};
const extractSdpMedia = (sdp) => {
const parsedSdp1 = sdpTransform.parse(sdp);
if (parsedSdp1.media.length > 1) {
parsedSdp1.media = [parsedSdp1.media[0]];
const parsedSdp2 = sdpTransform.parse(sdp);
parsedSdp2.media = [parsedSdp2.media[1]];
return [sdpTransform.write(parsedSdp1), sdpTransform.write(parsedSdp2)];
} else {
return [sdp, sdp];
}
};
module.exports = {
isOnhold,
mergeSdpMedia,
extractSdpMedia
};

View File

@@ -7,6 +7,7 @@ const {
DeepgramTranscriptionEvents,
SonioxTranscriptionEvents,
NvidiaTranscriptionEvents,
CobaltTranscriptionEvents,
JambonzTranscriptionEvents
} = require('./constants');
@@ -92,6 +93,11 @@ const stickyVars = {
nvidia: [
'NVIDIA_HINTS'
],
cobalt: [
'COBALT_SPEECH_HINTS',
'COBALT_COMPILED_CONTEXT_DATA',
'COBALT_METADATA'
],
soniox: [
'SONIOX_PROFANITY_FILTER',
'SONIOX_MODEL'
@@ -225,6 +231,26 @@ const normalizeGoogle = (evt, channel, language) => {
};
};
const normalizeCobalt = (evt, channel, language) => {
const copy = JSON.parse(JSON.stringify(evt));
const alternatives = (evt.alternatives || [])
.map((alt) => ({
confidence: alt.confidence,
transcript: alt.transcript_formatted,
}));
return {
language_code: language,
channel_tag: channel,
is_final: evt.is_final,
alternatives,
vendor: {
name: 'cobalt',
evt: copy
}
};
};
const normalizeCustom = (evt, channel, language, vendor) => {
const copy = JSON.parse(JSON.stringify(evt));
return {
@@ -317,6 +343,8 @@ module.exports = (logger) => {
return normalizeNvidia(evt, channel, language);
case 'soniox':
return normalizeSoniox(evt, channel, language);
case 'cobalt':
return normalizeCobalt(evt, channel, language);
default:
if (vendor.startsWith('custom:')) {
return normalizeCustom(evt, channel, language, vendor);
@@ -391,9 +419,9 @@ module.exports = (logger) => {
else if ('microsoft' === vendor) {
opts = {
...opts,
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'string' &&
...(rOpts.hints && rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'string' &&
{AZURE_SPEECH_HINTS: rOpts.hints.map((h) => h.trim()).join(',')}),
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'object' &&
...(rOpts.hints && rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'object' &&
{AZURE_SPEECH_HINTS: rOpts.hints.map((h) => h.phrase).join(',')}),
...(rOpts.altLanguages && rOpts.altLanguages.length > 0 &&
{AZURE_SPEECH_ALTERNATIVE_LANGUAGE_CODES: [...new Set(rOpts.altLanguages)].join(',')}),
@@ -478,7 +506,7 @@ module.exports = (logger) => {
...(deepgramOptions.profanityFilter) &&
{DEEPGRAM_SPEECH_PROFANITY_FILTER: 1},
...(deepgramOptions.redact) &&
{DEEPGRAM_SPEECH_REDACT: 1},
{DEEPGRAM_SPEECH_REDACT: deepgramOptions.redact},
...(deepgramOptions.diarize) &&
{DEEPGRAM_SPEECH_DIARIZE: 1},
...(deepgramOptions.diarizeVersion) &&
@@ -584,6 +612,25 @@ module.exports = (logger) => {
{NVIDIA_CUSTOM_CONFIGURATION: JSON.stringify(nvidiaOptions.customConfiguration)}),
};
}
else if ('cobalt' === vendor) {
const {cobaltOptions = {}} = rOpts;
const cobaltUri = cobaltOptions.serverUri || sttCredentials.cobalt_server_uri;
opts = {
...opts,
...(rOpts.words && {COBALT_WORD_TIME_OFFSETS: 1}),
...(!rOpts.words && {COBALT_WORD_TIME_OFFSETS: 0}),
...(rOpts.model && {COBALT_MODEL: rOpts.model}),
...(cobaltUri && {COBALT_SERVER_URI: cobaltUri}),
...(rOpts.hints?.length > 0 && typeof rOpts.hints[0] === 'string' &&
{COBALT_SPEECH_HINTS: rOpts.hints.join(',')}),
...(rOpts.hints?.length > 0 && typeof rOpts.hints[0] === 'object' &&
{COBALT_SPEECH_HINTS: JSON.stringify(rOpts.hints)}),
...(rOpts.hints?.length > 0 && {COBALT_CONTEXT_TOKEN: cobaltOptions.contextToken || 'unk:default'}),
...(cobaltOptions.metadata && {COBALT_METADATA: cobaltOptions.metadata}),
...(cobaltOptions.enableConfusionNetwork && {COBALT_ENABLE_CONFUSION_NETWORK: 1}),
...(cobaltOptions.compiledContextData && {COBALT_COMPILED_CONTEXT_DATA: cobaltOptions.compiledContextData}),
};
}
else if (vendor.startsWith('custom:')) {
let {options = {}} = rOpts;
const {auth_token, custom_stt_url} = sttCredentials;
@@ -633,6 +680,9 @@ module.exports = (logger) => {
ep.removeCustomEventListener(SonioxTranscriptionEvents.Transcription);
ep.removeCustomEventListener(CobaltTranscriptionEvents.Transcription);
ep.removeCustomEventListener(CobaltTranscriptionEvents.CompileContext);
ep.removeCustomEventListener(NvidiaTranscriptionEvents.Transcription);
ep.removeCustomEventListener(NvidiaTranscriptionEvents.TranscriptionComplete);
ep.removeCustomEventListener(NvidiaTranscriptionEvents.StartOfSpeech);
@@ -664,6 +714,10 @@ module.exports = (logger) => {
const {apiKey} = recognizer.sonioxOptions || {};
if (apiKey) return {api_key: apiKey};
}
else if (recognizer.vendor === 'cobalt') {
const {serverUri} = recognizer.cobaltOptions || {};
if (serverUri) return {cobalt_server_uri: serverUri};
}
else if (recognizer.vendor === 'ibm') {
const {ttsApiKey, ttsRegion, sttApiKey, sttRegion, instanceId} = recognizer.ibmOptions || {};
if (ttsApiKey || sttApiKey) return {

56
package-lock.json generated
View File

@@ -14,10 +14,10 @@
"@jambonz/db-helpers": "^0.9.1",
"@jambonz/http-health-check": "^0.0.1",
"@jambonz/realtimedb-helpers": "^0.8.6",
"@jambonz/speech-utils": "^0.0.20",
"@jambonz/speech-utils": "^0.0.21",
"@jambonz/stats-collector": "^0.1.9",
"@jambonz/time-series": "^0.2.8",
"@jambonz/verb-specifications": "^0.0.30",
"@jambonz/verb-specifications": "^0.0.37",
"@opentelemetry/api": "^1.4.0",
"@opentelemetry/exporter-jaeger": "^1.9.0",
"@opentelemetry/exporter-trace-otlp-http": "^0.35.0",
@@ -30,8 +30,8 @@
"bent": "^7.3.12",
"debug": "^4.3.4",
"deepcopy": "^2.1.0",
"drachtio-fsmrf": "^3.0.24",
"drachtio-srf": "^4.5.26",
"drachtio-fsmrf": "^3.0.27",
"drachtio-srf": "^4.5.29",
"express": "^4.18.2",
"ip": "^1.1.8",
"moment": "^2.29.4",
@@ -2983,9 +2983,9 @@
}
},
"node_modules/@jambonz/speech-utils": {
"version": "0.0.20",
"resolved": "https://registry.npmjs.org/@jambonz/speech-utils/-/speech-utils-0.0.20.tgz",
"integrity": "sha512-vyMgfCAdv5+zOVisyJKhVQ1+lY1i1Ygv5amgw3JgyUy+lDbWmzANctLZXpJpoUGbTw8ZOwippEnk0F4XWFmtlg==",
"version": "0.0.21",
"resolved": "https://registry.npmjs.org/@jambonz/speech-utils/-/speech-utils-0.0.21.tgz",
"integrity": "sha512-I9ULe9PHMIISZ5gr58SZ+9I5hL1F24XjFPqPlulfaoe6vJ4EgSKs1l1fWKPCuZQmv6Cf259gK5H+bjGTqjVRQw==",
"dependencies": {
"@aws-sdk/client-polly": "^3.359.0",
"@google-cloud/text-to-speech": "^4.2.1",
@@ -3019,9 +3019,9 @@
}
},
"node_modules/@jambonz/verb-specifications": {
"version": "0.0.30",
"resolved": "https://registry.npmjs.org/@jambonz/verb-specifications/-/verb-specifications-0.0.30.tgz",
"integrity": "sha512-IbusgtBBo2V5Tc1FvDJvkWogHOhR2tNZN6Iyb2PjUomMI48BsWKmHW/wegppKTDpBBeN3ABY+L96pvX4N0+mCw==",
"version": "0.0.37",
"resolved": "https://registry.npmjs.org/@jambonz/verb-specifications/-/verb-specifications-0.0.37.tgz",
"integrity": "sha512-iunu0AoholuUlkrRIPgbNFtKj2irlJbkLnZX/ghEAlNQANZOndKnlKUdAmR9Xgrjo3G7Vole/Guu372m1PRCfw==",
"dependencies": {
"debug": "^4.3.4",
"pino": "^8.8.0"
@@ -5152,9 +5152,9 @@
}
},
"node_modules/drachtio-fsmrf": {
"version": "3.0.24",
"resolved": "https://registry.npmjs.org/drachtio-fsmrf/-/drachtio-fsmrf-3.0.24.tgz",
"integrity": "sha512-MsTwHDOOg5yrKCcQ2buNTVSEwoMZzTcVeU5/BZ2Km0TPxBI1lErW6IiyZxA5pyoLfSdBtbqSh9ZwXrtxWTp6kA==",
"version": "3.0.27",
"resolved": "https://registry.npmjs.org/drachtio-fsmrf/-/drachtio-fsmrf-3.0.27.tgz",
"integrity": "sha512-k5jwc6RpiSoasKEMq0ROxXVga+yaMplujNUKjAidKjheso+PijdQtjEwNANl0F+JyTBELaRe81PEgeRHuAdv1w==",
"dependencies": {
"camel-case": "^4.1.2",
"debug": "^2.6.9",
@@ -5209,9 +5209,9 @@
}
},
"node_modules/drachtio-srf": {
"version": "4.5.26",
"resolved": "https://registry.npmjs.org/drachtio-srf/-/drachtio-srf-4.5.26.tgz",
"integrity": "sha512-Dqq3E0qY5yxjDAkrgtwrkmQYuaGUwKqUu98kDS9xzEdX8oFqKZeV5onUZq0veznw4ctuGQolY8BI6zs1e5ynHg==",
"version": "4.5.29",
"resolved": "https://registry.npmjs.org/drachtio-srf/-/drachtio-srf-4.5.29.tgz",
"integrity": "sha512-Hj2OW+SyQxAyLpyHngJwW26FX9V4fDYZGX0mlU4YOF4ml7I7b7XITcRPxKhroYDOrLgKqhNeh5BcPoKqPhEK7A==",
"dependencies": {
"debug": "^3.2.7",
"delegates": "^0.1.0",
@@ -12949,9 +12949,9 @@
}
},
"@jambonz/speech-utils": {
"version": "0.0.20",
"resolved": "https://registry.npmjs.org/@jambonz/speech-utils/-/speech-utils-0.0.20.tgz",
"integrity": "sha512-vyMgfCAdv5+zOVisyJKhVQ1+lY1i1Ygv5amgw3JgyUy+lDbWmzANctLZXpJpoUGbTw8ZOwippEnk0F4XWFmtlg==",
"version": "0.0.21",
"resolved": "https://registry.npmjs.org/@jambonz/speech-utils/-/speech-utils-0.0.21.tgz",
"integrity": "sha512-I9ULe9PHMIISZ5gr58SZ+9I5hL1F24XjFPqPlulfaoe6vJ4EgSKs1l1fWKPCuZQmv6Cf259gK5H+bjGTqjVRQw==",
"requires": {
"@aws-sdk/client-polly": "^3.359.0",
"@google-cloud/text-to-speech": "^4.2.1",
@@ -12985,9 +12985,9 @@
}
},
"@jambonz/verb-specifications": {
"version": "0.0.30",
"resolved": "https://registry.npmjs.org/@jambonz/verb-specifications/-/verb-specifications-0.0.30.tgz",
"integrity": "sha512-IbusgtBBo2V5Tc1FvDJvkWogHOhR2tNZN6Iyb2PjUomMI48BsWKmHW/wegppKTDpBBeN3ABY+L96pvX4N0+mCw==",
"version": "0.0.37",
"resolved": "https://registry.npmjs.org/@jambonz/verb-specifications/-/verb-specifications-0.0.37.tgz",
"integrity": "sha512-iunu0AoholuUlkrRIPgbNFtKj2irlJbkLnZX/ghEAlNQANZOndKnlKUdAmR9Xgrjo3G7Vole/Guu372m1PRCfw==",
"requires": {
"debug": "^4.3.4",
"pino": "^8.8.0"
@@ -14610,9 +14610,9 @@
}
},
"drachtio-fsmrf": {
"version": "3.0.24",
"resolved": "https://registry.npmjs.org/drachtio-fsmrf/-/drachtio-fsmrf-3.0.24.tgz",
"integrity": "sha512-MsTwHDOOg5yrKCcQ2buNTVSEwoMZzTcVeU5/BZ2Km0TPxBI1lErW6IiyZxA5pyoLfSdBtbqSh9ZwXrtxWTp6kA==",
"version": "3.0.27",
"resolved": "https://registry.npmjs.org/drachtio-fsmrf/-/drachtio-fsmrf-3.0.27.tgz",
"integrity": "sha512-k5jwc6RpiSoasKEMq0ROxXVga+yaMplujNUKjAidKjheso+PijdQtjEwNANl0F+JyTBELaRe81PEgeRHuAdv1w==",
"requires": {
"camel-case": "^4.1.2",
"debug": "^2.6.9",
@@ -14662,9 +14662,9 @@
}
},
"drachtio-srf": {
"version": "4.5.26",
"resolved": "https://registry.npmjs.org/drachtio-srf/-/drachtio-srf-4.5.26.tgz",
"integrity": "sha512-Dqq3E0qY5yxjDAkrgtwrkmQYuaGUwKqUu98kDS9xzEdX8oFqKZeV5onUZq0veznw4ctuGQolY8BI6zs1e5ynHg==",
"version": "4.5.29",
"resolved": "https://registry.npmjs.org/drachtio-srf/-/drachtio-srf-4.5.29.tgz",
"integrity": "sha512-Hj2OW+SyQxAyLpyHngJwW26FX9V4fDYZGX0mlU4YOF4ml7I7b7XITcRPxKhroYDOrLgKqhNeh5BcPoKqPhEK7A==",
"requires": {
"debug": "^3.2.7",
"delegates": "^0.1.0",

View File

@@ -30,10 +30,10 @@
"@jambonz/db-helpers": "^0.9.1",
"@jambonz/http-health-check": "^0.0.1",
"@jambonz/realtimedb-helpers": "^0.8.6",
"@jambonz/speech-utils": "^0.0.20",
"@jambonz/speech-utils": "^0.0.21",
"@jambonz/stats-collector": "^0.1.9",
"@jambonz/time-series": "^0.2.8",
"@jambonz/verb-specifications": "^0.0.30",
"@jambonz/verb-specifications": "^0.0.37",
"@opentelemetry/api": "^1.4.0",
"@opentelemetry/exporter-jaeger": "^1.9.0",
"@opentelemetry/exporter-trace-otlp-http": "^0.35.0",
@@ -46,8 +46,8 @@
"bent": "^7.3.12",
"debug": "^4.3.4",
"deepcopy": "^2.1.0",
"drachtio-fsmrf": "^3.0.24",
"drachtio-srf": "^4.5.26",
"drachtio-fsmrf": "^3.0.27",
"drachtio-srf": "^4.5.29",
"express": "^4.18.2",
"ip": "^1.1.8",
"moment": "^2.29.4",

65
test/in-dialog-test.js Normal file
View File

@@ -0,0 +1,65 @@
const test = require('tape');
const { sippUac } = require('./sipp')('test_fs');
const clearModule = require('clear-module');
const {provisionCallHook, provisionCustomHook} = require('./utils')
const bent = require('bent');
const getJSON = bent('json')
process.on('unhandledRejection', (reason, p) => {
console.log('Unhandled Rejection at: Promise', p, 'reason:', reason);
});
function connect(connectable) {
return new Promise((resolve, reject) => {
connectable.on('connect', () => {
return resolve();
});
});
}
test('\'sip Indialog\' test Info', async(t) => {
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
const verbs = [
{
verb: 'config',
sipRequestWithinDialogHook: '/customHook'
},
{
verb: 'play',
url: 'silence_stream://5000',
}
];
const waitHookVerbs = [
{
verb: 'hangup'
}
];
const from = 'sip_indialog_info';
await provisionCustomHook(from, waitHookVerbs)
await provisionCallHook(from, verbs);
// THEN
await sippUac('uac-success-info-received-bye.xml', '172.38.0.10', from);
t.pass('sip Info: success send Info');
// Make sure that sipRequestWithinDialogHook is called and success
const json = await getJSON(`http:127.0.0.1:3100/lastRequest/${from}_customHook`)
t.pass(json.body.sip_method === 'INFO', 'sipRequestWithinDialogHook contains sip_method')
t.pass(json.body.sip_body === 'hello jambonz\r\n', 'sipRequestWithinDialogHook contains sip_method')
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});

View File

@@ -15,5 +15,6 @@ require('./sip-refer-tests');
require('./listen-tests');
require('./config-test');
require('./queue-test');
require('./in-dialog-test');
require('./remove-test-db');
require('./docker_stop');

View File

@@ -0,0 +1,114 @@
<?xml version="1.0" encoding="ISO-8859-1" ?>
<!DOCTYPE scenario SYSTEM "sipp.dtd">
<scenario name="Basic Sipstone UAC">
<!-- In client mode (sipp placing calls), the Call-ID MUST be -->
<!-- generated by sipp. To do so, use [call_id] keyword. -->
<send retrans="500">
<![CDATA[
INVITE sip:[to]@[remote_ip]:[remote_port] SIP/2.0
Via: SIP/2.0/[transport] [local_ip]:[local_port];branch=[branch]
From: [from] <sip:[from]@[local_ip]:[local_port]>;tag=[pid]SIPpTag00[call_number]
To: <sip:[to]@[remote_ip]:[remote_port]>
Call-ID: [call_id]
CSeq: 1 INVITE
Contact: sip:[from]@[local_ip]:[local_port]
Max-Forwards: 70
X-Account-Sid: bb845d4b-83a9-4cde-a6e9-50f3743bab3f
Subject: uac-say
Content-Type: application/sdp
Content-Length: [len]
v=0
o=user1 53655765 2353687637 IN IP[local_ip_type] [local_ip]
s=-
c=IN IP[media_ip_type] [media_ip]
t=0 0
m=audio [media_port] RTP/AVP 0
a=rtpmap:0 PCMU/8000
]]>
</send>
<recv response="100"
optional="true">
</recv>
<recv response="180" optional="true">
</recv>
<recv response="183" optional="true">
</recv>
<!-- By adding rrs="true" (Record Route Sets), the route sets -->
<!-- are saved and used for following messages sent. Useful to test -->
<!-- against stateful SIP proxies/B2BUAs. -->
<recv response="200" rtd="true">
</recv>
<!-- Packet lost can be simulated in any send/recv message by -->
<!-- by adding the 'lost = "10"'. Value can be [1-100] percent. -->
<send>
<![CDATA[
ACK sip:[to]@[remote_ip]:[remote_port] SIP/2.0
Via: SIP/2.0/[transport] [local_ip]:[local_port];branch=[branch]
From: [from] <sip:[from]@[local_ip]:[local_port]>;tag=[pid]SIPpTag00[call_number]
To: [to] <sip:[to]@[remote_ip]:[remote_port]>[peer_tag_param]
Call-ID: [call_id]
CSeq: 1 ACK
Contact: sip:[from]@[local_ip]:[local_port]
Max-Forwards: 70
Subject: uac-say
Content-Length: 0
]]>
</send>
<pause milliseconds="2000"/>
<!-- Send an INFO message -->
<send>
<![CDATA[
INFO sip:[service]@[remote_ip]:[remote_port] SIP/2.0
Via: SIP/2.0/[transport] [local_ip]:[local_port];branch=[branch]
From: [from] <sip:[from]@[local_ip]:[local_port]>;tag=[pid]SIPpTag00[call_number]
To: [to] <sip:[to]@[remote_ip]:[remote_port]>[peer_tag_param]
Call-ID: [call_id]
CSeq: 2 INFO
Contact: sip:[from]@[local_ip]:[local_port]
Max-Forwards: 70
Subject: Performance Test
Content-Type: text/plain
Content-Length: [len]
hello jambonz
]]>
</send>
<!-- Receive 200 OK -->
<recv response="200">
</recv>
<recv request="BYE">
</recv>
<send>
<![CDATA[
SIP/2.0 200 OK
[last_Via:]
[last_From:]
[last_To:]
[last_Call-ID:]
[last_CSeq:]
Contact: <sip:[local_ip]:[local_port];transport=[transport]>
Content-Length: 0
]]>
</send>
</scenario>