Compare commits

..

31 Commits

Author SHA1 Message Date
Dave Horton
305facb03b Fix/11labs no client config (#1149)
* update to verb specs

* add parameter to api call when there is not client config provided
2025-04-12 10:36:35 -04:00
Dave Horton
d310ba0ed1 reduce verbosity of logging (#1145) 2025-04-09 15:36:58 -04:00
Hoan Luu Huu
77f0fc85a3 ell tts support speech and pronunciation_dictionary_locators (#1137) 2025-04-09 12:32:06 -04:00
Sam Machin
c708b7d007 fix initial message format (#1144) 2025-04-09 10:43:07 -04:00
Hoan Luu Huu
343b382373 fixed ws-requestor missing hook for dial:confirm (#1143) 2025-04-09 07:29:08 -04:00
rammohan-y
0a541e089d Fix for https://github.com/jambonz/jambonz-feature-server/issues/1138 (#1139) 2025-04-04 09:02:18 -04:00
rammohan-y
d910981b1a Allow hangup verb on siprec call (#1136)
* Allow hangup verb on siprec call
https://github.com/jambonz/jambonz-feature-server/issues/1135

* added sip:decline to AllowedSipRecVerbs
2025-04-04 08:23:39 -04:00
Hoan Luu Huu
3f2744f032 fixed replaceEndpoint offer single codec that callee does not support (#1131) 2025-04-03 07:58:39 -04:00
Dave Horton
fcaf2e59e7 initial changes for openai stt (#1127)
* initial changes for openai stt

* wip

* wip

* wip

* wip

* wip

* make minBargeinWordCount work for openai

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wipp

* wip

* wip

* wip

* openai stt: support for prompt templates

* lint

* wip

* support openai semantic_vad

* wip

* transcribe supports openai stt

* sip

* wip

* wip

* refactor list of stt vendors that dont need to be restarted after a final transcript

* cleanup

* wip

* cleanup

* wip

* wip

* wip

* remove credentials from log

* comment
2025-03-28 13:14:58 -04:00
rammohan-y
ee846b283d Feat 1120 video call - remove video media from SDP if the call is audio call (#1124)
* sending jambonz:error when the incoming message is not parsable

https://github.com/jambonz/jambonz-feature-server/issues/1094

* writing an alert when incoming paylod is invalid

* added content to the jambonz:error payload

* removing video media from sdp if the call is an audio call. This is to avoid sending video media to destination if the incoming call is an audio call

* calling removeVideoSdp only when the environment variable JAMBONES_VIDEO_CALLS_ENABLED_IN_FS is set to true, this will ensure there are no regression issues for audio calls

* fixed jslint errors
2025-03-28 12:56:08 -04:00
Hoan Luu Huu
acdb8695a0 allow cartesia model_id is override from synthesizer option (#1130) 2025-03-27 13:37:57 -04:00
Hoan Luu Huu
f33f197e8d gather say support ttstream (#1128) 2025-03-27 07:19:19 -04:00
Sam Machin
9c437ab687 use deepgramOptions.model (#1126)
* use deepgramOptions.model

* lint

* Update transcription-utils.js
2025-03-24 12:25:29 -04:00
Dave Horton
1873694784 update to dractio-fsmrf@4.0.2 2025-03-17 08:50:10 -04:00
rammohan-y
d36e6b4c22 set the detected language as language_code when deepgram detects the language (#1116)
https://github.com/jambonz/jambonz-feature-server/issues/1115
2025-03-11 12:16:29 -04:00
rammohan-y
0470168757 updated realtimedb-helper to 0.8.13 (#1113) 2025-03-10 09:49:04 -04:00
Sam Machin
3120dbc3e0 Feature: add digitCount to amd-utils (#1111)
* add digitCount to amd-utils

* linting

* bump verb-specs
2025-03-06 12:01:51 -05:00
Hoan Luu Huu
8b8283e603 ws requestor should store initial sessionData when sending session:adulting (#1110) 2025-03-06 07:42:47 -05:00
Dave Horton
29de4b8878 fix crashing error with some media timeout scenarios (#1108) 2025-03-05 09:48:40 -05:00
Sam Machin
fa5fc1af9f allow transcribe_status update on Listen/Transcribe tasks (#1107) 2025-03-04 12:41:27 -05:00
Sam Machin
a5e778d7f3 call jambonzHangup when API ends call (#1104) 2025-03-03 07:23:03 -05:00
Dave Horton
bf4ae5b618 #1101 - allow listen url to have relative url and use base url of app… (#1102)
* #1101 - allow listen url to have relative url and use base url of application if ws

* remove logging
2025-02-28 14:19:45 -05:00
Sam Machin
ad2d99c417 if redirect to new server update requestor for baseURL (#1096) 2025-02-28 08:04:37 -05:00
Hoan Luu Huu
af4e17f447 fixed dial transcribe is not able to receive final transcribe when closing the call (#1073)
* fixed dial transcribe is not able to received final transcribe when close call.

* wip

* fix review comment

* support call session delay detroy ep when current task is transcribe

* wip

* wip

* fixed review comments

* fixed review comments
2025-02-27 07:25:01 -05:00
Hoan Luu Huu
cd2563ce17 support ultravox send user_input_message (#1100) 2025-02-26 19:50:09 -05:00
Sam Machin
af475cbea4 Update place-outdial.js (#1090)
* Update place-outdial.js

* update baseURL on redirect

* Revert "update baseURL on redirect"

This reverts commit 55778ba37edf029fa8687cd971b202af15478f95.
2025-02-25 15:09:21 -05:00
Anton Voylenko
69ba18acd1 Support sipindialog for conference (#1050)
* fix: add _onRequestWithinDialog catch block

* feat: support sipindialog for conference

* fix: remove any existing listener before adding new
2025-02-24 13:59:32 -05:00
rammohan-y
8bed44cce3 sending jambonz:error when the incoming message is not parsable (#1095)
* sending jambonz:error when the incoming message is not parsable

https://github.com/jambonz/jambonz-feature-server/issues/1094

* writing an alert when incoming paylod is invalid

* added content to the jambonz:error payload
2025-02-24 12:44:25 -05:00
Dave Horton
8ede41714b fix typo: change AWS_SNS_TOPIC_ARM to AWS_SNS_TOPIC_ARN (#1093) 2025-02-24 10:51:07 -05:00
Dave Horton
ee54e4341a update drachtio-srf 2025-02-20 10:17:53 -05:00
Hoan Luu Huu
4bf2f42f33 support ultravox sends createCall response to app (#1091)
* support ultravox sends createCall response to app

* update type issue

Co-authored-by: Matt Hertogs <matthertogs@gmail.com>

---------

Co-authored-by: Matt Hertogs <matthertogs@gmail.com>
2025-02-20 07:07:03 -05:00
32 changed files with 772 additions and 242 deletions

View File

@@ -13,7 +13,7 @@ Configuration is provided via environment variables:
|AWS_ACCESS_KEY_ID| aws access key id, used for TTS/STT as well SNS notifications|no|
|AWS_REGION| aws region| no|
|AWS_SECRET_ACCESS_KEY| aws secret access key, used per above|no|
|AWS_SNS_TOPIC_ARM| aws sns topic arn that scale-in lifecycle notifications will be published to|no|
|AWS_SNS_TOPIC_ARN| aws sns topic arn that scale-in lifecycle notifications will be published to|no|
|DRACHTIO_HOST| ip address of drachtio server (typically '127.0.0.1')|yes|
|DRACHTIO_PORT| listening port of drachtio server for control connections (typically 9022)|yes|
|DRACHTIO_SECRET| shared secret|yes|
@@ -72,7 +72,7 @@ module.exports = {
STATS_PORT: 8125,
STATS_PROTOCOL: 'tcp',
STATS_TELEGRAF: 1,
AWS_SNS_TOPIC_ARM: 'arn:aws:sns:us-west-1:xxxxxxxxxxx:terraform-20201107200347128600000002',
AWS_SNS_TOPIC_ARN: 'arn:aws:sns:us-west-1:xxxxxxxxxxx:terraform-20201107200347128600000002',
JAMBONES_NETWORK_CIDR: '172.31.0.0/16',
JAMBONES_MYSQL_HOST: 'aurora-cluster-jambonz.cluster-yyyyyyyyyyy.us-west-1.rds.amazonaws.com',
JAMBONES_MYSQL_USER: 'admin',

View File

@@ -93,7 +93,7 @@ const AWS_REGION = process.env.AWS_REGION;
const AWS_ACCESS_KEY_ID = process.env.AWS_ACCESS_KEY_ID;
const AWS_SECRET_ACCESS_KEY = process.env.AWS_SECRET_ACCESS_KEY;
const AWS_SNS_PORT = parseInt(process.env.AWS_SNS_PORT, 10) || 3001;
const AWS_SNS_TOPIC_ARM = process.env.AWS_SNS_TOPIC_ARM;
const AWS_SNS_TOPIC_ARN = process.env.AWS_SNS_TOPIC_ARN;
const AWS_SNS_PORT_MAX = parseInt(process.env.AWS_SNS_PORT_MAX, 10) || 3005;
const GCP_JSON_KEY = process.env.GCP_JSON_KEY;
@@ -139,6 +139,9 @@ const JAMBONES_USE_FREESWITCH_TIMER_FD = process.env.JAMBONES_USE_FREESWITCH_TIM
const JAMBONES_DIAL_SBC_FOR_REGISTERED_USER = process.env.JAMBONES_DIAL_SBC_FOR_REGISTERED_USER || false;
const JAMBONES_MEDIA_TIMEOUT_MS = process.env.JAMBONES_MEDIA_TIMEOUT_MS || 0;
const JAMBONES_MEDIA_HOLD_TIMEOUT_MS = process.env.JAMBONES_MEDIA_HOLD_TIMEOUT_MS || 0;
// jambonz
const JAMBONES_TRANSCRIBE_EP_DESTROY_DELAY_MS =
process.env.JAMBONES_TRANSCRIBE_EP_DESTROY_DELAY_MS;
module.exports = {
JAMBONES_MYSQL_HOST,
@@ -189,7 +192,7 @@ module.exports = {
AWS_ACCESS_KEY_ID,
AWS_SECRET_ACCESS_KEY,
AWS_SNS_PORT,
AWS_SNS_TOPIC_ARM,
AWS_SNS_TOPIC_ARN,
AWS_SNS_PORT_MAX,
ANCHOR_MEDIA_ALWAYS,
@@ -227,5 +230,6 @@ module.exports = {
JAMBONES_USE_FREESWITCH_TIMER_FD,
JAMBONES_DIAL_SBC_FOR_REGISTERED_USER,
JAMBONES_MEDIA_TIMEOUT_MS,
JAMBONES_MEDIA_HOLD_TIMEOUT_MS
JAMBONES_MEDIA_HOLD_TIMEOUT_MS,
JAMBONES_TRANSCRIBE_EP_DESTROY_DELAY_MS
};

View File

@@ -30,7 +30,8 @@ const {
AWS_REGION,
JAMBONES_USE_FREESWITCH_TIMER_FD,
JAMBONES_MEDIA_TIMEOUT_MS,
JAMBONES_MEDIA_HOLD_TIMEOUT_MS
JAMBONES_MEDIA_HOLD_TIMEOUT_MS,
JAMBONES_TRANSCRIBE_EP_DESTROY_DELAY_MS
} = require('../config');
const bent = require('bent');
const BackgroundTaskManager = require('../utils/background-task-manager');
@@ -38,6 +39,7 @@ const dbUtils = require('../utils/db-utils');
const BADPRECONDITIONS = 'preconditions not met';
const CALLER_CANCELLED_ERR_MSG = 'Response not sent due to unknown transaction';
const { NonFatalTaskError} = require('../utils/error');
const { sleepFor } = require('../utils/helpers');
const sqlRetrieveQueueEventHook = `SELECT * FROM webhooks
WHERE webhook_sid =
(
@@ -133,6 +135,15 @@ class CallSession extends Emitter {
this.requestor.on('handover', handover.bind(this));
this.requestor.on('reconnect-error', this._onSessionReconnectError.bind(this));
}
/**
* Currently this is used for openai STT, which has a prompt paramater and
* we have an experimental feature where you can send the conversation
* history in the prompt
*/
this.conversationTurns = [];
this.on('userSaid', this._onUserSaid.bind(this));
this.on('botSaid', this._onBotSaid.bind(this));
}
/**
@@ -951,7 +962,7 @@ class CallSession extends Emitter {
(type === 'tts' && credential.use_for_tts) ||
(type === 'stt' && credential.use_for_stt)
)) {
this.logger.info(
this.logger.debug(
`${type}: ${credential.vendor} ${credential.label ? `, label: ${credential.label}` : ''} `);
if ('google' === vendor) {
if (type === 'tts' && !credential.tts_tested_ok ||
@@ -1104,12 +1115,17 @@ class CallSession extends Emitter {
};
}
else if ('speechmatics' === vendor) {
this.logger.info({credential}, 'CallSession:getSpeechCredentials - speechmatics credential');
return {
api_key: credential.api_key,
speechmatics_stt_uri: credential.speechmatics_stt_uri,
};
}
else if ('openai' === vendor) {
return {
api_key: credential.api_key,
model_id: credential.model_id,
};
}
else if (vendor.startsWith('custom:')) {
return {
speech_credential_sid: credential.speech_credential_sid,
@@ -1238,7 +1254,7 @@ class CallSession extends Emitter {
this.logger.info('CallSession:exec all tasks complete');
this._stopping = true;
this._onTasksDone();
this._clearResources();
await this._clearResources();
if (!this.isConfirmCallSession && !this.isSmsCallSession) sessionTracker.remove(this.callSid);
@@ -1309,7 +1325,7 @@ class CallSession extends Emitter {
_lccCallStatus(opts) {
if (opts.call_status === CallStatus.Completed && this.dlg) {
this.logger.info('CallSession:_lccCallStatus hanging up call due to request from api');
this._callerHungup();
this._jambonzHangup();
}
else if (opts.call_status === CallStatus.NoAnswer) {
if (this.direction === CallDirection.Inbound) {
@@ -1443,7 +1459,7 @@ class CallSession extends Emitter {
}
if (tasks) {
const t = normalizeJambones(this.logger, tasks).map((tdata) => makeTask(this.logger, tdata));
this.logger.info({tasks: listTaskNames(t)}, 'CallSession:_lccCallHook new task list');
this.logger.debug({tasks: listTaskNames(t)}, 'CallSession:_lccCallHook new task list');
this.replaceApplication(t);
if (this.wakeupResolver) {
//this.logger.debug({resolution}, 'CallSession:_onCommand - got commands, waking up..');
@@ -1487,7 +1503,7 @@ class CallSession extends Emitter {
this.backgroundTaskManager.getTask('transcribe').updateTranscribe(opts.transcribe_status);
}
const task = this.currentTask;
if (!task || ![TaskName.Dial, TaskName.Transcribe].includes(task.name)) {
if (!task || ![TaskName.Dial, TaskName.Transcribe, TaskName.Listen].includes(task.name)) {
return this.logger.info(`CallSession:_lccTranscribeStatus - invalid transcribe_status in task ${task.name}`);
}
const transcribeTask = task.name === TaskName.Transcribe ? task : task.transcribeTask;
@@ -1710,10 +1726,10 @@ Duration=${duration} `
this.currentTask.ep :
this.ep;
const db = parseDecibels(opts);
this.logger.info(`_lccBoostAudioSignal: boosting audio signal by ${db} dB`);
this.logger.debug(`_lccBoostAudioSignal: boosting audio signal by ${db} dB`);
const args = [ep.uuid, 'setGain', db];
const response = await ep.api('uuid_dub', args);
this.logger.info({response}, '_lccBoostAudioSignal: response from freeswitch');
this.logger.debug({response}, '_lccBoostAudioSignal: response from freeswitch');
}
async _lccMediaPath(desiredPath) {
@@ -1766,7 +1782,6 @@ Duration=${duration} `
let res;
try {
res = await this.ttsStreamingBuffer?.bufferTokens(tokens);
this.logger.info({id, res}, 'CallSession:_lccTtsTokens - tts:tokens-result');
} catch (err) {
this.logger.info(err, 'CallSession:_lccTtsTokens');
}
@@ -1901,7 +1916,7 @@ Duration=${duration} `
this.tasks = tasks;
this.taskIdx = 0;
this.stackIdx++;
this.logger.info({tasks: listTaskNames(tasks)},
this.logger.debug({tasks: listTaskNames(tasks)},
`CallSession:replaceApplication reset with ${tasks.length} new tasks, stack depth is ${this.stackIdx}`);
if (this.currentTask) {
this.currentTask.kill(this, KillReason.Replaced);
@@ -1916,7 +1931,7 @@ Duration=${duration} `
kill(onBackgroundGatherBargein = false) {
if (this.isConfirmCallSession) this.logger.debug('CallSession:kill (ConfirmSession)');
else this.logger.info('CallSession:kill');
else this.logger.debug('CallSession:kill');
this._endVerbHookSpan();
if (this.currentTask) {
this.currentTask.kill(this);
@@ -1981,7 +1996,7 @@ Duration=${duration} `
task.synthesizer.label :
this.speechSynthesisLabel;
this.logger.info({vendor, language, voice, label},
this.logger.debug({vendor, language, voice, label},
'CallSession:_preCacheAudio - precaching audio for future prompt');
task._synthesizeWithSpecificVendor(this, this.ep, {vendor, language, voice, label, preCache: true})
.catch((err) => this.logger.error(err, 'CallSession:_preCacheAudio - error precaching audio'));
@@ -2051,7 +2066,7 @@ Duration=${duration} `
}
async _onCommand({msgid, command, call_sid, queueCommand, tool_call_id, data}) {
this.logger.info({msgid, command, queueCommand, data}, 'CallSession:_onCommand - received command');
this.logger.debug({msgid, command, queueCommand, data}, 'CallSession:_onCommand - received command');
let resolution;
switch (command) {
case 'redirect':
@@ -2060,18 +2075,18 @@ Duration=${duration} `
const t = normalizeJambones(this.logger, data)
.map((tdata) => makeTask(this.logger, tdata));
if (!queueCommand) {
this.logger.info({tasks: listTaskNames(t)}, 'CallSession:_onCommand new task list');
this.logger.debug({tasks: listTaskNames(t)}, 'CallSession:_onCommand new task list');
this.replaceApplication(t);
}
else if (JAMBONES_INJECT_CONTENT) {
if (JAMBONES_EAGERLY_PRE_CACHE_AUDIO) this._preCacheAudio(t);
this._injectTasks(t);
this.logger.info({tasks: listTaskNames(this.tasks)}, 'CallSession:_onCommand - updated task list');
this.logger.debug({tasks: listTaskNames(this.tasks)}, 'CallSession:_onCommand - updated task list');
}
else {
if (JAMBONES_EAGERLY_PRE_CACHE_AUDIO) this._preCacheAudio(t);
this.tasks.push(...t);
this.logger.info({tasks: listTaskNames(this.tasks)}, 'CallSession:_onCommand - updated task list');
this.logger.debug({tasks: listTaskNames(this.tasks)}, 'CallSession:_onCommand - updated task list');
}
resolution = {reason: 'received command, new tasks', queue: queueCommand, command};
resolution.command = listTaskNames(t);
@@ -2338,10 +2353,32 @@ Duration=${duration} `
// Destroy previous ep if it's still running.
if (this.ep?.connected) this.ep.destroy();
this.ep = await this.ms.createEndpoint({remoteSdp: this.dlg.remote.sdp});
/* Codec negotiation issue explanation:
*
* Problem scenario:
* 1. Initial negotiation:
* - FreeSWITCH → SBC: offers multiple codecs (PCMU, PCMA, G722)
* - SBC → Callee: passes all codecs (PCMU, PCMA, G722)
* - Callee → SBC: responds with PCMA (its supported codec)
* - SBC → FreeSWITCH: responds with PCMU (after transcoding)
*
* 2. After endpoint replacement:
* - If we only offer PCMU in the new endpoint
* - FreeSWITCH → SBC: offers only PCMU
* - SBC → Callee: offers only PCMU
* - Call fails: Callee rejects since it only supports PCMA
*
* Solution:
* Always have FreeSWITCH offer multiple codecs to the SBC, don't pass remote sdp here to ensure
* the SBC can reoffer the same codecs that the callee originally accepted.
* This prevents call failures during media renegotiation.
*/
this.ep = await this.ms.createEndpoint();
this._configMsEndpoint();
await this.dlg.modify(this.ep.local.sdp);
const sdp = await this.dlg.modify(this.ep.local.sdp);
await this.ep.modify(sdp);
this.logger.debug('CallSession:replaceEndpoint completed');
return this.ep;
}
@@ -2349,9 +2386,13 @@ Duration=${duration} `
/**
* Hang up the call and free the media endpoint
*/
_clearResources() {
async _clearResources() {
for (const resource of [this.dlg, this.ep, this.ep2]) {
if (resource && resource.connected) resource.destroy();
try {
if (resource && resource.connected) await resource.destroy();
} catch (err) {
this.logger.info({err}, 'CallSession:_clearResources - error clearing resources');
}
}
this.dlg = null;
this.ep = null;
@@ -2478,12 +2519,14 @@ Duration=${duration} `
} else if (sip_method === 'MESSAGE') {
res.send(202);
} else {
this.logger.info(`CallSession:_onRequestWithinDialog unsported method: ${req.method}`);
this.logger.warn(`CallSession:_onRequestWithinDialog unsupported method: ${req.method}`);
res.send(501);
return;
}
const params = {sip_method, sip_body: req.body, sip_headers: req.headers};
this.currentTask.performHook(this, this.sipRequestWithinDialogHook, params);
this.currentTask.performHook(this, this.sipRequestWithinDialogHook, params).catch((err) => {
this.logger.error({err}, 'CallSession:_onRequestWithinDialog - error calling sipRequestWithinDialogHook');
});
}
async _onReinvite(req, res) {
@@ -2563,7 +2606,7 @@ Duration=${duration} `
if (json && Array.isArray(json)) {
const tasks = normalizeJambones(this.logger, json).map((tdata) => makeTask(this.logger, tdata));
if (tasks && tasks.length > 0) {
this.logger.info('CallSession:handleRefer received REFER, get new tasks');
this.logger.debug('CallSession:handleRefer received REFER, get new tasks');
this.replaceApplication(tasks);
if (this.wakeupResolver) {
this.wakeupResolver({reason: 'CallSession: referHook new taks'});
@@ -2610,14 +2653,14 @@ Duration=${duration} `
if (typeof this.queueEventHookRequestor === 'undefined') {
const pp = this._pool.promise();
try {
this.logger.info({accountSid: this.accountSid}, 'performQueueWebhook: looking up account');
this.logger.debug({accountSid: this.accountSid}, 'performQueueWebhook: looking up account');
const [r] = await pp.query(sqlRetrieveQueueEventHook, [this.accountSid]);
if (0 === r.length) {
this.logger.info({accountSid: this.accountSid}, 'performQueueWebhook: no webhook provisioned');
this.queueEventHookRequestor = null;
}
else {
this.logger.info({accountSid: this.accountSid, webhook: r[0]}, 'performQueueWebhook: webhook found');
this.logger.debug({accountSid: this.accountSid, webhook: r[0]}, 'performQueueWebhook: webhook found');
this.queueEventHookRequestor = new HttpRequestor(this.logger, this.accountSid,
r[0], this.webhook_secret);
this.queueEventHook = r[0];
@@ -2631,7 +2674,7 @@ Duration=${duration} `
/* send webhook */
const params = {...obj, ...this.callInfo.toJSON()};
this.logger.info({accountSid: this.accountSid, params}, 'performQueueWebhook: sending webhook');
this.logger.debug({accountSid: this.accountSid, params}, 'performQueueWebhook: sending webhook');
this.queueEventHookRequestor.request('queue:status', this.queueEventHook, params)
.catch((err) => {
this.logger.info({err, accountSid: this.accountSid, obj}, 'Error sending queue notification event');
@@ -2766,7 +2809,7 @@ Duration=${duration} `
async handleReinviteAfterMediaReleased(req, res) {
assert(this.dlg && this.dlg.connected && !this.ep);
const sdp = await this.dlg.modify(req.body);
this.logger.info({sdp}, 'CallSession:handleReinviteAfterMediaReleased - reinvite to A leg returned sdp');
this.logger.debug({sdp}, 'CallSession:handleReinviteAfterMediaReleased - reinvite to A leg returned sdp');
res.send(200, {body: sdp});
}
@@ -2838,6 +2881,24 @@ Duration=${duration} `
if (Object.keys(opts).length > 0) {
this.ep.set(opts);
}
const origDestroy = this.ep.destroy.bind(this.ep);
this.ep.destroy = async() => {
try {
if (this.currentTask?.name === TaskName.Transcribe && JAMBONES_TRANSCRIBE_EP_DESTROY_DELAY_MS) {
// transcribe task is being used, wait for some time before destroy
// if final transcription is received but endpoint is already closed,
// freeswitch module will not be able to send the transcription
this.logger.debug('callSession:_configMsEndpoint -' +
' transcribe task, wait for some time before destroy');
await sleepFor(JAMBONES_TRANSCRIBE_EP_DESTROY_DELAY_MS);
}
await origDestroy();
} catch (err) {
this.logger.error(err, 'callSession:_configMsEndpoint - error destroying endpoint');
}
};
}
async _handleMediaTimeout(evt) {
@@ -2883,7 +2944,7 @@ Duration=${duration} `
_awaitCommandsOrHangup() {
assert(!this.wakeupResolver);
return new Promise((resolve, reject) => {
this.logger.info('_awaitCommandsOrHangup - waiting...');
this.logger.debug('_awaitCommandsOrHangup - waiting...');
this.wakeupResolver = resolve;
if (this._actionHookDelayProcessor) {
@@ -2903,7 +2964,7 @@ Duration=${duration} `
this.ep.play(this.fillerNoise.url);
this.ep.once('playback-start', (evt) => {
if (evt.file === this.fillerNoise.url && !this._isPlayingFillerNoise) {
this.logger.info('CallSession:_awaitCommandsOrHangup - filler noise started');
this.logger.debug('CallSession:_awaitCommandsOrHangup - filler noise started');
this.ep.api('uuid_break', this.ep.uuid)
.catch((err) => this.logger.info(err, 'Error killing filler noise'));
}
@@ -2914,7 +2975,7 @@ Duration=${duration} `
_clearTasks(backgroundGather, evt) {
if (this.requestor instanceof WsRequestor && !backgroundGather.cleared) {
this.logger.info({evt}, 'CallSession:_clearTasks on event from background gather');
this.logger.debug({evt}, 'CallSession:_clearTasks on event from background gather');
try {
backgroundGather.cleared = true;
this.kill(true);
@@ -2992,6 +3053,43 @@ Duration=${duration} `
this._jambonzHangup('Max Call Duration');
this._maxCallDurationTimer = null;
}
_onUserSaid(transcript) {
const count = this.conversationTurns.length;
if (count === 0 || this.conversationTurns[count - 1].type === 'assistant') {
this.conversationTurns.push({
type: 'user',
text: transcript
});
}
else {
this.conversationTurns[count - 1].text += ` ${transcript}`;
}
}
_onBotSaid(transcript) {
const count = this.conversationTurns.length;
if (count === 0 || this.conversationTurns[count - 1].type === 'user') {
this.conversationTurns.push({
type: 'assistant',
text: transcript
});
}
else {
this.conversationTurns[count - 1].text += ` ${transcript}`;
}
}
getFormattedConversation(numTurns) {
const turns = this.conversationTurns.slice(-numTurns);
if (turns.length === 0) return null;
return turns.map((t) => {
if (t.type === 'user') {
return `user: ${t.text}`;
}
return `assistant: ${t.text}`;
}).join('\n');
}
}
module.exports = CallSession;

View File

@@ -63,7 +63,7 @@ class RestCallSession extends CallSession {
this.callInfo.callTerminationBy = terminatedBy;
const duration = moment().diff(this.dlg.connectTime, 'seconds');
this.emit('callStatusChange', {callStatus: CallStatus.Completed, duration});
this.logger.debug(`RestCallSession: called party hung up by ${terminatedBy}`);
this.logger.info(`RestCallSession: called party hung up by ${terminatedBy}`);
this._callReleased();
}

View File

@@ -83,7 +83,11 @@ class Conference extends Task {
// reset answer time if we were transferred from another feature server
if (this.connectTime) dlg.connectTime = this.connectTime;
if (cs.sipRequestWithinDialogHook) {
/* remove any existing listener to escape from duplicating events */
this._removeSipIndialogRequestListener(this.dlg);
this._initSipIndialogRequestListener(cs, dlg);
}
this.ep.on('destroy', this._kicked.bind(this, cs, dlg));
try {
@@ -103,6 +107,7 @@ class Conference extends Task {
this.logger.debug(`Conference:exec - conference ${this.confName} is over`);
if (this.callMoved !== false) await this.performAction(this.results);
this._removeSipIndialogRequestListener(dlg);
} catch (err) {
this.logger.info(err, `TaskConference:exec - error in conference ${this.confName}`);
}
@@ -416,6 +421,20 @@ class Conference extends Task {
}
}
_initSipIndialogRequestListener(cs, dlg) {
dlg.on('info', this._onRequestWithinDialog.bind(this, cs));
dlg.on('message', this._onRequestWithinDialog.bind(this, cs));
}
_removeSipIndialogRequestListener(dlg) {
dlg && dlg.removeAllListeners('message');
dlg && dlg.removeAllListeners('info');
}
_onRequestWithinDialog(cs, req, res) {
cs._onRequestWithinDialog(req, res);
}
/**
* The conference we have been waiting for has started.
* It may be on this server or a different one, and we are

View File

@@ -3,8 +3,7 @@ const {TaskName, TaskPreconditions, DequeueResults, BONG_TONE} = require('../uti
const Emitter = require('events');
const bent = require('bent');
const assert = require('assert');
const sleepFor = (ms) => new Promise((resolve) => setTimeout(() => resolve(), ms));
const { sleepFor } = require('../utils/helpers');
const getUrl = (cs) => `${cs.srf.locals.serviceUrl}/v1/dequeue/${cs.callSid}`;

View File

@@ -24,6 +24,7 @@ const {ANCHOR_MEDIA_ALWAYS,
const { isOnhold, isOpusFirst } = require('../utils/sdp-utils');
const { normalizeJambones } = require('@jambonz/verb-specifications');
const { selectHostPort } = require('../utils/network');
const { sleepFor } = require('../utils/helpers');
function parseDtmfOptions(logger, dtmfCapture) {
let parentDtmfCollector, childDtmfCollector;
@@ -86,8 +87,6 @@ function filterAndLimit(logger, tasks) {
return unique;
}
const sleepFor = (ms) => new Promise((resolve) => setTimeout(() => resolve(), ms));
class TaskDial extends Task {
constructor(logger, opts) {
super(logger, opts);
@@ -499,7 +498,7 @@ class TaskDial extends Task {
dlg && dlg.removeAllListeners('info');
}
async _onRequestWithinDialog(cs, req, res) {
_onRequestWithinDialog(cs, req, res) {
cs._onRequestWithinDialog(req, res);
}
@@ -551,7 +550,8 @@ class TaskDial extends Task {
proxy: `sip:${sbcAddress}`,
callingNumber: this.callerId || fromUri.user,
...(this.callerName && {callingName: this.callerName}),
opusFirst: isOpusFirst(this.cs.ep.remote.sdp)
opusFirst: isOpusFirst(this.cs.ep.remote.sdp),
isVideoCall: this.cs.ep.remote.sdp.includes('m=video')
};
const t = this.target.find((t) => t.type === 'teams');
@@ -871,7 +871,11 @@ class TaskDial extends Task {
if (this.parentDtmfCollector) this._installDtmfDetection(cs, cs.dlg);
if (this.childDtmfCollector) this._installDtmfDetection(cs, this.dlg);
if (cs.sipRequestWithinDialogHook) this._initSipIndialogRequestListener(cs, this.dlg);
if (cs.sipRequestWithinDialogHook) {
/* remove any existing listener to escape from duplicating events */
this._removeSipIndialogRequestListener(this.dlg);
this._initSipIndialogRequestListener(cs, this.dlg);
}
if (this.transcribeTask) this.transcribeTask.exec(cs, {ep: this.epOther, ep2:this.ep});
if (this.listenTask) this.listenTask.exec(cs, {ep: this.listenTask.channel === 2 ? this.ep : this.epOther});
@@ -904,7 +908,7 @@ class TaskDial extends Task {
}
_handleMediaTimeout(evt) {
if (evt.reason === 'MEDIA_TIMEOUT' && this.sd && this.bridged) {
if (evt?.reason === 'MEDIA_TIMEOUT' && this.sd && this.bridged) {
this.kill(this.cs, KillReason.MediaTimeout);
}
}

View File

@@ -12,6 +12,7 @@ const {
JambonzTranscriptionEvents,
AssemblyAiTranscriptionEvents,
VoxistTranscriptionEvents,
OpenAITranscriptionEvents,
VadDetection,
VerbioTranscriptionEvents,
SpeechmaticsTranscriptionEvents
@@ -83,6 +84,7 @@ class TaskGather extends SttTask {
this._bufferedTranscripts = [];
this.partialTranscriptsCount = 0;
this.bugname_prefix = 'gather_';
}
get name() { return TaskName.Gather; }
@@ -110,6 +112,12 @@ class TaskGather extends SttTask {
return this.fillerNoise.startDelaySecs;
}
get isStreamingTts() { return this.sayTask && this.sayTask.isStreamingTts; }
getTtsVendorData() {
if (this.sayTask) return this.sayTask.getTtsVendorData(this.cs);
}
get summary() {
let s = `${this.name}{`;
if (this.input.length === 2) s += 'inputs=[speech,digits],';
@@ -233,6 +241,7 @@ class TaskGather extends SttTask {
const {span, ctx} = this.startChildSpan(`nested:${this.sayTask.summary}`);
const process = () => {
this.logger.debug('Gather: nested say task completed');
this.playComplete = true;
if (!this.listenDuringPrompt) {
startDtmfListener();
}
@@ -263,6 +272,7 @@ class TaskGather extends SttTask {
const {span, ctx} = this.startChildSpan(`nested:${this.playTask.summary}`);
const process = () => {
this.logger.debug('Gather: nested play task completed');
this.playComplete = true;
if (!this.listenDuringPrompt) {
startDtmfListener();
}
@@ -553,6 +563,31 @@ class TaskGather extends SttTask {
break;
case 'openai':
this.bugname = `${this.bugname_prefix}openai_transcribe`;
this.addCustomEventListener(
ep, OpenAITranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
this.addCustomEventListener(
ep, OpenAITranscriptionEvents.SpeechStarted, this._onOpenAISpeechStarted.bind(this, cs, ep));
this.addCustomEventListener(
ep, OpenAITranscriptionEvents.SpeechStopped, this._onOpenAISpeechStopped.bind(this, cs, ep));
this.addCustomEventListener(ep, OpenAITranscriptionEvents.Connect,
this._onVendorConnect.bind(this, cs, ep));
this.addCustomEventListener(ep, OpenAITranscriptionEvents.ConnectFailure,
this._onVendorConnectFailure.bind(this, cs, ep));
this.addCustomEventListener(ep, OpenAITranscriptionEvents.Error,
this._onOpenAIErrror.bind(this, cs, ep));
/* openai delta transcripts are useful only for minBargeinWordCount eval */
if (this.minBargeinWordCount > 1) {
this.openaiPartials = [];
opts.OPENAI_WANT_PARTIALS = 1;
this.addCustomEventListener(
ep, OpenAITranscriptionEvents.PartialTranscript, this._onOpenAIPartialTranscript.bind(this, cs, ep));
}
this.modelSupportsConversationTracking = opts.OPENAI_MODEL !== 'whisper-1';
break;
default:
if (this.vendor.startsWith('custom:')) {
this.bugname = `${this.bugname_prefix}${this.vendor}_transcribe`;
@@ -584,6 +619,25 @@ class TaskGather extends SttTask {
bugname: this.bugname
}, 'Gather:_startTranscribing');
/* special feature for openai: we can provide a prompt that includes recent conversation history */
let prompt;
if (this.vendor === 'openai') {
if (this.modelSupportsConversationTracking) {
prompt = this.formatOpenAIPrompt(this.cs, {
prompt: this.data.recognizer?.openaiOptions?.prompt,
hintsTemplate: this.data.recognizer?.openaiOptions?.promptTemplates?.hintsTemplate,
// eslint-disable-next-line max-len
conversationHistoryTemplate: this.data.recognizer?.openaiOptions?.promptTemplates?.conversationHistoryTemplate,
hints: this.data.recognizer?.hints,
});
this.logger.debug({prompt}, 'Gather:_startTranscribing - created an openai prompt');
}
else if (this.data.recognizer?.hints?.length > 0) {
prompt = this.data.recognizer?.hints.join(', ');
}
}
/**
* Note: we don't need to ask deepgram for interim results, because they
* already send us words as they are finalized (is_final=true) even before
@@ -595,6 +649,7 @@ class TaskGather extends SttTask {
interim: this.interim,
bugname: this.bugname,
hostport: this.hostport,
prompt
}).catch((err) => {
const {writeAlerts, AlertType} = this.cs.srf.locals;
this.logger.error(err, 'TaskGather:_startTranscribing error');
@@ -775,7 +830,11 @@ class TaskGather extends SttTask {
const bugname = fsEvent.getHeader('media-bugname');
const finished = fsEvent.getHeader('transcription-session-finished');
this.logger.debug({evt, bugname, finished, vendor: this.vendor}, 'Gather:_onTranscription raw transcript');
if (bugname && this.bugname !== bugname) return;
if (bugname && this.bugname !== bugname) {
this.logger.debug(
`Gather:_onTranscription - ignoring transcript from ${bugname} because our bug is ${this.bugname}`);
return;
}
if (finished === 'true') return;
if (this.vendor === 'ibm' && evt?.state === 'listening') return;
@@ -1078,6 +1137,33 @@ class TaskGather extends SttTask {
this._onVendorError(cs, _ep, {error: JSON.stringify(e)});
}
async _onOpenAIErrror(cs, _ep, evt) {
// eslint-disable-next-line no-unused-vars
const {message, ...e} = evt;
this._onVendorError(cs, _ep, {error: JSON.stringify(e)});
}
async _onOpenAISpeechStarted(cs, _ep, evt) {
this.logger.debug({evt}, 'TaskGather:_onOpenAISpeechStarted');
}
async _onOpenAISpeechStopped(cs, _ep, evt) {
this.logger.debug({evt}, 'TaskGather:_onOpenAISpeechStopped');
}
async _onOpenAIPartialTranscript(cs, _ep, evt) {
if (!this.playComplete) {
const words = evt.delta.split(' ').filter((w) => /[A-Za-z0-0]/.test(w));
this.openaiPartials.push(...words);
this.logger.debug({words, partials: this.openaiPartials, evt}, 'TaskGather:_onOpenAIPartialTranscript - words');
if (this.openaiPartials.length >= this.minBargeinWordCount) {
this.logger.debug({partials: this.openaiPartials}, 'killing audio due to speech (openai)');
this._killAudio(cs);
this.notifyStatus({event: 'speech-bargein-detected', words: this.openaiPartials});
}
}
}
async _onVendorError(cs, _ep, evt) {
super._onVendorError(cs, _ep, evt);
if (!(await this._startFallback(cs, _ep, evt))) {
@@ -1187,6 +1273,7 @@ class TaskGather extends SttTask {
}
}
else if (reason.startsWith('speech')) {
this.cs.emit('userSaid', evt.alternatives[0].transcript);
if (this.parentTask) this.parentTask.emit('transcription', evt);
else {
this.emit('transcription', evt);

View File

@@ -221,7 +221,7 @@ class TaskListen extends Task {
}
}
_onConnect(ep) {
this.logger.debug('TaskListen:_onConnect');
this.logger.info('TaskListen:_onConnect');
}
_onConnectFailure(ep, evt) {
this.logger.info(evt, 'TaskListen:_onConnectFailure');

View File

@@ -164,7 +164,9 @@ class TaskLlmElevenlabs_S2S extends Task {
try {
const {host, path} = await this.getSignedUrl();
const args = [ep.uuid, 'session.create', this.input_sample_rate, this.output_sample_rate, host, path];
const args = this.conversation_initiation_client_data ?
[ep.uuid, 'session.create', this.input_sample_rate, this.output_sample_rate, host, path] :
[ep.uuid, 'session.create', this.input_sample_rate, this.output_sample_rate, host, path, 'no_initial_config'];
await this._api(ep, args);
} catch (err) {
this.logger.error({err}, 'TaskLlmElevenlabs_S2S:_startListening');
@@ -189,7 +191,7 @@ class TaskLlmElevenlabs_S2S extends Task {
if (this.conversation_initiation_client_data) {
if (!await this._sendClientEvent(ep, {
type: 'conversation_initiation_client_data',
conversation_initiation_client_data: this.conversation_initiation_client_data
...this.conversation_initiation_client_data
})) {
this.notifyTaskDone();
}

View File

@@ -4,6 +4,7 @@ const {request} = require('undici');
const {LlmEvents_Ultravox} = require('../../../utils/constants');
const ultravox_server_events = [
'createCall',
'pong',
'state',
'transcript',
@@ -84,11 +85,11 @@ class TaskLlmUltravox_S2S extends Task {
});
const data = await body.json();
if (statusCode !== 201 || !data?.joinUrl) {
this.logger.error({statusCode, data}, 'Ultravox Error registering call');
this.logger.info({statusCode, data}, 'Ultravox Error registering call');
throw new Error(`Ultravox Error registering call: ${data.message}`);
}
this.logger.info({joinUrl: data.joinUrl}, 'Ultravox Call registered');
return data.joinUrl;
this.logger.debug({joinUrl: data.joinUrl}, 'Ultravox Call registered');
return data;
}
_unregisterHandlers() {
@@ -105,15 +106,21 @@ class TaskLlmUltravox_S2S extends Task {
async _startListening(cs, ep) {
this._registerHandlers(ep);
const joinUrl = await this.createCall();
const data = await this.createCall();
const {joinUrl} = data;
// split the joinUrl into host and path
const {host, pathname, search} = new URL(joinUrl);
try {
const args = [ep.uuid, 'session.create', host, pathname + search];
await this._api(ep, args);
// Notify the application that the session has been created with detail information
this._sendLlmEvent('createCall', {
type: 'createCall',
...data
});
} catch (err) {
this.logger.error({err}, 'TaskLlmUltraVox_S2S:_startListening');
this.logger.info({err}, 'TaskLlmUltraVox_S2S:_startListening - Error sending createCall');
this.notifyTaskDone();
}
}
@@ -141,7 +148,7 @@ class TaskLlmUltravox_S2S extends Task {
}
_onConnect(ep) {
this.logger.debug('TaskLlmUltravox_S2S:_onConnect');
this.logger.info('TaskLlmUltravox_S2S:_onConnect');
}
_onConnectFailure(_ep, evt) {
this.logger.info(evt, 'TaskLlmUltravox_S2S:_onConnectFailure');
@@ -157,7 +164,7 @@ class TaskLlmUltravox_S2S extends Task {
async _onServerEvent(_ep, evt) {
let endConversation = false;
const type = evt.type;
this.logger.info({evt}, 'TaskLlmUltravox_S2S:_onServerEvent');
this.logger.debug({evt}, 'TaskLlmUltravox_S2S:_onServerEvent');
/* server errors of some sort */
if (type === 'error') {
@@ -172,7 +179,7 @@ class TaskLlmUltravox_S2S extends Task {
else if (type === 'client_tool_invocation') {
this.logger.debug({evt}, 'TaskLlmUltravox_S2S:_onServerEvent - function_call');
if (!this.toolHook) {
this.logger.warn({evt}, 'TaskLlmUltravox_S2S:_onServerEvent - no toolHook defined!');
this.logger.info({evt}, 'TaskLlmUltravox_S2S:_onServerEvent - no toolHook defined!');
}
else {
const {toolName: name, invocationId: call_id, parameters: args} = evt;
@@ -190,11 +197,7 @@ class TaskLlmUltravox_S2S extends Task {
}
}
/* check whether we should notify on this event */
if (this.includeEvents.length > 0 ? this.includeEvents.includes(type) : !this.excludeEvents.includes(type)) {
this.parent.sendEventHook(evt)
.catch((err) => this.logger.info({err}, 'TaskLlmUltravox_S2S:_onServerEvent - error sending event hook'));
}
this._sendLlmEvent(type, evt);
if (endConversation) {
this.logger.info({results: this.results},
@@ -203,6 +206,32 @@ class TaskLlmUltravox_S2S extends Task {
}
}
_sendLlmEvent(type, evt) {
/* check whether we should notify on this event */
if (this.includeEvents.length > 0 ? this.includeEvents.includes(type) : !this.excludeEvents.includes(type)) {
this.parent.sendEventHook(evt)
.catch((err) => this.logger.info({err}, 'TaskLlmUltravox_S2S:_onServerEvent - error sending event hook'));
}
}
async processLlmUpdate(ep, data, _callSid) {
try {
this.logger.debug({data, _callSid}, 'TaskLlmUltravox_S2S:processLlmUpdate');
if (!data.type || ![
'input_text_message'
].includes(data.type)) {
this.logger.info({data},
'TaskLlmUltravox_S2S:processLlmUpdate - invalid mid-call request, only input_text_message supported');
}
else {
await this._api(ep, [ep.uuid, ClientEvent, JSON.stringify(data)]);
}
} catch (err) {
this.logger.info({err, data}, 'TaskLlmUltravox_S2S:processLlmUpdate - Error processing LLM update');
}
}
async processToolOutput(ep, tool_call_id, data) {
try {
this.logger.debug({tool_call_id, data}, 'TaskLlmUltravox_S2S:processToolOutput');
@@ -215,7 +244,7 @@ class TaskLlmUltravox_S2S extends Task {
await this._api(ep, [ep.uuid, ClientEvent, JSON.stringify(data)]);
}
} catch (err) {
this.logger.info({err}, 'TaskLlmUltravox_S2S:processToolOutput');
this.logger.info({err, data}, 'TaskLlmUltravox_S2S:processToolOutput - Error processing tool output');
}
}

View File

@@ -1,6 +1,8 @@
const Task = require('./task');
const {TaskName} = require('../utils/constants');
const WsRequestor = require('../utils/ws-requestor');
const URL = require('url');
const HttpRequestor = require('../utils/http-requestor');
/**
* Redirects to a new application
@@ -24,6 +26,21 @@ class TaskRedirect extends Task {
} catch (err) {
this.logger.info(err, `Task:performAction error redirecting to ${this.actionHook}`);
}
} else if (cs.application.requestor._isAbsoluteUrl(this.actionHook)) {
const baseUrl = this.cs.application.requestor.baseUrl;
const newUrl = URL.parse(this.actionHook);
const newBaseUrl = newUrl.protocol + '//' + newUrl.host;
if (baseUrl != newBaseUrl) {
try {
this.logger.info(`Task:redirect updating base url to ${newBaseUrl}`);
const newRequestor = new HttpRequestor(this.logger, cs.accountSid, {url: this.actionHook},
cs.accountInfo.account.webhook_secret);
this.cs.requestor.removeAllListeners();
this.cs.application.requestor = newRequestor;
} catch (err) {
this.logger.info(err, `Task:redirect error updating base url to ${this.actionHook}`);
}
}
}
await this.performAction();
}

View File

@@ -5,6 +5,30 @@ const { TaskPreconditions, CobaltTranscriptionEvents } = require('../utils/const
const { SpeechCredentialError } = require('../utils/error');
const {JAMBONES_AWS_TRANSCRIBE_USE_GRPC} = require('../config');
/**
* "Please insert turns here: {{turns:4}}"
// -> { processed: 'Please insert turns here: {{turns}}', turns: 4 }
processTurnString("Please insert turns here: {{turns}}"));
// -> { processed: 'Please insert turns here: {{turns}}', turns: null }
*/
const processTurnString = (input) => {
const regex = /\{\{turns(?::(\d+))?\}\}/;
const match = input.match(regex);
if (!match) {
return {
processed: input,
turns: null
};
}
const turns = match[1] ? parseInt(match[1], 10) : null;
const processed = input.replace(regex, '{{turns}}');
return { processed, turns };
};
class SttTask extends Task {
constructor(logger, data, parentTask) {
@@ -290,6 +314,57 @@ class SttTask extends Task {
});
}
formatOpenAIPrompt(cs, {prompt, hintsTemplate, conversationHistoryTemplate, hints}) {
let conversationHistoryPrompt, hintsPrompt;
/* generate conversation history from template */
if (conversationHistoryTemplate) {
const {processed, turns} = processTurnString(conversationHistoryTemplate);
this.logger.debug({processed, turns}, 'SttTask: processed conversation history template');
conversationHistoryPrompt = cs.getFormattedConversation(turns || 4);
//this.logger.debug({conversationHistoryPrompt}, 'SttTask: conversation history');
if (conversationHistoryPrompt) {
conversationHistoryPrompt = processed.replace('{{turns}}', `\n${conversationHistoryPrompt}\nuser: `);
}
}
/* generate hints from template */
if (hintsTemplate && Array.isArray(hints) && hints.length > 0) {
hintsPrompt = hintsTemplate.replace('{{hints}}', hints);
}
/* combine into final prompt */
let finalPrompt = prompt || '';
if (hintsPrompt) {
finalPrompt = `${finalPrompt}\n${hintsPrompt}`;
}
if (conversationHistoryPrompt) {
finalPrompt = `${finalPrompt}\n${conversationHistoryPrompt}`;
}
this.logger.debug({
finalPrompt,
hints,
hintsPrompt,
conversationHistoryTemplate,
conversationHistoryPrompt
}, 'SttTask: formatted OpenAI prompt');
return finalPrompt?.trimStart();
}
/* some STT engines will keep listening after a final response, so no need to restart */
doesVendorContinueListeningAfterFinalTranscript(vendor) {
return (vendor.startsWith('custom:') || [
'soniox',
'aws',
'microsoft',
'deepgram',
'google',
'speechmatics',
'openai',
].includes(vendor));
}
_onCompileContext(ep, key, evt) {
const {addKey} = this.cs.srf.locals.dbHelpers;
this.logger.debug({evt}, `received cobalt compile context event, will cache under ${key}`);

View File

@@ -14,6 +14,7 @@ const {
TranscribeStatus,
AssemblyAiTranscriptionEvents,
VoxistTranscriptionEvents,
OpenAITranscriptionEvents,
VerbioTranscriptionEvents,
SpeechmaticsTranscriptionEvents
} = require('../utils/constants.json');
@@ -30,7 +31,6 @@ class TaskTranscribe extends SttTask {
this.transcriptionHook = this.data.transcriptionHook;
this.translationHook = this.data.translationHook;
this.earlyMedia = this.data.earlyMedia === true || (parentTask && parentTask.earlyMedia);
if (this.data.recognizer) {
this.interim = !!this.data.recognizer.interim;
this.separateRecognitionPerChannel = this.data.recognizer.separateRecognitionPerChannel;
@@ -105,7 +105,7 @@ class TaskTranscribe extends SttTask {
if (cs.hasGlobalSttHints) {
const {hints, hintsBoost} = cs.globalSttHints;
this.data.recognizer.hints = this.data.recognizer.hints.concat(hints);
this.data.recognizer.hints = this.data.recognizer?.hints?.concat(hints);
if (!this.data.recognizer.hintsBoost && hintsBoost) this.data.recognizer.hintsBoost = hintsBoost;
this.logger.debug({hints: this.data.recognizer.hints, hintsBoost: this.data.recognizer.hintsBoost},
'Transcribe:exec - applying global sttHints');
@@ -330,6 +330,20 @@ class TaskTranscribe extends SttTask {
this._onSpeechmaticsError.bind(this, cs, ep));
break;
case 'openai':
this.bugname = `${this.bugname_prefix}openai_transcribe`;
this.addCustomEventListener(
ep, OpenAITranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep, channel));
this.addCustomEventListener(ep, OpenAITranscriptionEvents.Connect,
this._onVendorConnect.bind(this, cs, ep));
this.addCustomEventListener(ep, OpenAITranscriptionEvents.ConnectFailure,
this._onVendorConnectFailure.bind(this, cs, ep));
this.addCustomEventListener(ep, OpenAITranscriptionEvents.Error,
this._onOpenAIErrror.bind(this, cs, ep));
this.modelSupportsConversationTracking = opts.OPENAI_MODEL !== 'whisper-1';
break;
default:
if (this.vendor.startsWith('custom:')) {
this.bugname = `${this.bugname_prefix}${this.vendor}_transcribe`;
@@ -365,6 +379,25 @@ class TaskTranscribe extends SttTask {
async _transcribe(ep) {
this.logger.debug(
`TaskTranscribe:_transcribe - starting transcription vendor ${this.vendor} bugname ${this.bugname}`);
/* special feature for openai: we can provide a prompt that includes recent conversation history */
let prompt;
if (this.vendor === 'openai') {
if (this.modelSupportsConversationTracking) {
prompt = this.formatOpenAIPrompt(this.cs, {
prompt: this.data.recognizer?.openaiOptions?.prompt,
hintsTemplate: this.data.recognizer?.openaiOptions?.promptTemplates?.hintsTemplate,
// eslint-disable-next-line max-len
conversationHistoryTemplate: this.data.recognizer?.openaiOptions?.promptTemplates?.conversationHistoryTemplate,
hints: this.data.recognizer?.hints,
});
this.logger.debug({prompt}, 'Gather:_startTranscribing - created an openai prompt');
}
else if (this.data.recognizer?.hints?.length > 0) {
prompt = this.data.recognizer?.hints.join(', ');
}
}
await ep.startTranscription({
vendor: this.vendor,
interim: this.interim ? true : false,
@@ -456,8 +489,9 @@ class TaskTranscribe extends SttTask {
this._startAsrTimer(channel);
/* some STT engines will keep listening after a final response, so no need to restart */
if (!['soniox', 'aws', 'microsoft', 'deepgram', 'google', 'speechmatics']
.includes(this.vendor)) this._startTranscribing(cs, ep, channel);
if (!this.doesVendorContinueListeningAfterFinalTranscript(this.vendor)) {
this._startTranscribing(cs, ep, channel);
}
}
else {
if (this.vendor === 'soniox') {
@@ -480,9 +514,7 @@ class TaskTranscribe extends SttTask {
this.logger.debug({evt}, 'TaskTranscribe:_onTranscription - sending final transcript');
this._resolve(channel, evt);
/* some STT engines will keep listening after a final response, so no need to restart */
if (!['soniox', 'aws', 'microsoft', 'deepgram', 'google', 'speechmatics'].includes(this.vendor) &&
!this.vendor.startsWith('custom:')) {
if (!this.doesVendorContinueListeningAfterFinalTranscript(this.vendor)) {
this.logger.debug('TaskTranscribe:_onTranscription - restarting transcribe');
this._startTranscribing(cs, ep, channel);
}
@@ -733,6 +765,12 @@ class TaskTranscribe extends SttTask {
this._onVendorError(cs, _ep, {error: JSON.stringify(e)});
}
async _onOpenAIErrror(cs, _ep, evt) {
// eslint-disable-next-line no-unused-vars
const {message, ...e} = evt;
this._onVendorError(cs, _ep, {error: JSON.stringify(e)});
}
_startAsrTimer(channel) {
if (this.vendor === 'deepgram') return; // no need
assert(this.isContinuousAsr);

View File

@@ -42,6 +42,11 @@ class TtsTask extends Task {
}
}
}
const fullText = Array.isArray(this.text) ? this.text.join(' ') : this.text;
if (fullText.length > 0) {
cs.emit('botSaid', fullText);
}
}
getTtsVendorData(cs) {
@@ -60,7 +65,6 @@ class TtsTask extends Task {
async setTtsStreamingChannelVars(vendor, language, voice, credentials, ep) {
const {api_key, model_id, custom_tts_streaming_url, auth_token} = credentials;
const {stability, similarity_boost, use_speaker_boost, style} = this.options;
let obj;
this.logger.debug({credentials},
@@ -82,6 +86,7 @@ class TtsTask extends Task {
};
break;
case 'elevenlabs':
const {stability, similarity_boost, use_speaker_boost, style, speed} = this.options.voice_settings || {};
obj = {
ELEVENLABS_API_KEY: api_key,
ELEVENLABS_TTS_STREAMING_MODEL_ID: model_id,
@@ -91,7 +96,14 @@ class TtsTask extends Task {
...(stability && {ELEVENLABS_TTS_STREAMING_VOICE_SETTINGS_STABILITY: stability}),
...(similarity_boost && {ELEVENLABS_TTS_STREAMING_VOICE_SETTINGS_SIMILARITY_BOOST: similarity_boost}),
...(use_speaker_boost && {ELEVENLABS_TTS_STREAMING_VOICE_SETTINGS_USE_SPEAKER_BOOST: use_speaker_boost}),
...(style && {ELEVENLABS_TTS_STREAMING_VOICE_SETTINGS_STYLE: style})
...(style && {ELEVENLABS_TTS_STREAMING_VOICE_SETTINGS_STYLE: style}),
// speed has value 0.7 to 1.2, 1.0 is default, make sure we send the value event it's 0
...(speed !== null && speed !== undefined && {ELEVENLABS_TTS_STREAMING_VOICE_SETTINGS_SPEED: `${speed}`}),
...(this.options.pronunciation_dictionary_locators &&
Array.isArray(this.options.pronunciation_dictionary_locators) && {
ELEVENLABS_TTS_STREAMING_PRONUNCIATION_DICTIONARY_LOCATORS:
JSON.stringify(this.options.pronunciation_dictionary_locators)
}),
};
break;
case 'rimelabs':
@@ -125,7 +137,7 @@ class TtsTask extends Task {
throw new Error(`vendor ${vendor} is not supported for tts streaming yet`);
}
}
this.logger.info({vendor, credentials, obj}, 'setTtsStreamingChannelVars');
this.logger.debug({vendor, credentials, obj}, 'setTtsStreamingChannelVars');
await ep.set(obj);
}
@@ -152,7 +164,6 @@ class TtsTask extends Task {
} else if (vendor === 'deepgram') {
this.model = voice;
}
this.model_id = credentials.model_id;
/* allow for microsoft custom region voice and api_key to be specified as an override */
if (vendor === 'microsoft' && this.options.deploymentId) {
@@ -193,8 +204,12 @@ class TtsTask extends Task {
};
}
}
} else if (vendor === 'cartesia') {
credentials.model_id = this.options.model_id || credentials.model_id;
}
this.model_id = credentials.model_id;
/**
* note on cache_speech_handles. This was found to be risky.
* It can cause a crash in the following sequence on a single call:
@@ -216,7 +231,7 @@ class TtsTask extends Task {
//cs.currentTtsVendor = vendor;
if (!preCache && !this._disableTracing)
this.logger.info({vendor, language, voice, model: this.model}, 'TaskSay:exec');
this.logger.debug({vendor, language, voice, model: this.model}, 'TaskSay:exec');
try {
if (!credentials) {
writeAlerts({

View File

@@ -45,6 +45,7 @@ if (VMD_HINTS_FILE) {
});
}
class Amd extends Emitter {
constructor(logger, cs, opts) {
super();
@@ -68,6 +69,8 @@ class Amd extends Emitter {
this.getIbmAccessToken = getIbmAccessToken;
const {setChannelVarsForStt} = require('./transcription-utils')(logger);
this.setChannelVarsForStt = setChannelVarsForStt;
this.digitCount = opts.digitCount || 0;
this.numberRegEx = RegExp(`[0-9]{${this.digitCount}}`);
const {
noSpeechTimeoutMs = 5000,
@@ -163,6 +166,14 @@ class Amd extends Emitter {
language: t.language_code
});
}
else if (this.digitCount != 0 && this.numberRegEx.test(t.alternatives[0].transcript)) {
/* a string of numbers is typically a machine */
this.emit(this.decision = AmdEvents.MachineDetected, {
reason: 'digit count',
greeting: t.alternatives[0].transcript,
language: t.language_code
});
}
else if (final && wordCount < this.thresholdWordCount) {
/* a short greeting is typically a human */
this.emit(this.decision = AmdEvents.HumanDetected, {

View File

@@ -4,7 +4,7 @@ const assert = require('assert');
const {
AWS_REGION,
AWS_SNS_PORT: PORT,
AWS_SNS_TOPIC_ARM,
AWS_SNS_TOPIC_ARN,
AWS_SNS_PORT_MAX,
} = require('../config');
const {LifeCycleEvents} = require('./constants');
@@ -55,12 +55,12 @@ class SnsNotifier extends Emitter {
async _handlePost(req, res) {
try {
const parsedBody = JSON.parse(req.body);
this.logger.debug({headers: req.headers, body: parsedBody}, 'Received HTTP POST from AWS');
this.logger.info({headers: req.headers, body: parsedBody}, 'Received HTTP POST from AWS');
if (!validatePayload(parsedBody)) {
this.logger.info('incoming AWS SNS HTTP POST failed signature validation');
return res.sendStatus(403);
}
this.logger.debug('incoming HTTP POST passed validation');
this.logger.info('incoming HTTP POST passed validation');
res.sendStatus(200);
switch (parsedBody.Type) {
@@ -74,7 +74,18 @@ class SnsNotifier extends Emitter {
subscriptionRequestId: this.subscriptionRequestId
}, 'response from SNS SubscribeURL');
const data = await this.describeInstance();
this.lifecycleState = data.AutoScalingGroups[0].Instances[0].LifecycleState;
const group = data.AutoScalingGroups.find((group) =>
group.Instances && group.Instances.some((instance) => instance.InstanceId === this.instanceId)
);
if (!group) {
this.logger.error('Current instance not found in any Auto Scaling group', data);
} else {
const instance = group.Instances.find((instance) => instance.InstanceId === this.instanceId);
this.lifecycleState = instance.LifecycleState;
}
//this.lifecycleState = data.AutoScalingGroups[0].Instances[0].LifecycleState;
this.emit('SubscriptionConfirmation', {publicIp: this.publicIp});
break;
@@ -94,7 +105,7 @@ class SnsNotifier extends Emitter {
this.unsubscribe();
}
else {
this.logger.debug(`SnsNotifier - instance ${msg.EC2InstanceId} is scaling in (not us)`);
this.logger.info(`SnsNotifier - instance ${msg.EC2InstanceId} is scaling in (not us)`);
}
}
break;
@@ -111,7 +122,7 @@ class SnsNotifier extends Emitter {
async init() {
try {
this.logger.debug('SnsNotifier: retrieving instance data');
this.logger.info('SnsNotifier: retrieving instance data');
this.instanceId = await getString('http://169.254.169.254/latest/meta-data/instance-id');
this.publicIp = await getString('http://169.254.169.254/latest/meta-data/public-ipv4');
this.logger.info({
@@ -142,13 +153,13 @@ class SnsNotifier extends Emitter {
try {
const params = {
Protocol: 'http',
TopicArn: AWS_SNS_TOPIC_ARM,
TopicArn: AWS_SNS_TOPIC_ARN,
Endpoint: this.snsEndpoint
};
const response = await snsClient.send(new SubscribeCommand(params));
this.logger.info({response}, `response to SNS subscribe to ${AWS_SNS_TOPIC_ARM}`);
this.logger.info({response}, `response to SNS subscribe to ${AWS_SNS_TOPIC_ARN}`);
} catch (err) {
this.logger.error({err}, `Error subscribing to SNS topic arn ${AWS_SNS_TOPIC_ARM}`);
this.logger.error({err}, `Error subscribing to SNS topic arn ${AWS_SNS_TOPIC_ARN}`);
}
}
@@ -159,9 +170,9 @@ class SnsNotifier extends Emitter {
SubscriptionArn: this.subscriptionArn
};
const response = await snsClient.send(new UnsubscribeCommand(params));
this.logger.info({response}, `response to SNS unsubscribe to ${AWS_SNS_TOPIC_ARM}`);
this.logger.info({response}, `response to SNS unsubscribe to ${AWS_SNS_TOPIC_ARN}`);
} catch (err) {
this.logger.error({err}, `Error unsubscribing to SNS topic arn ${AWS_SNS_TOPIC_ARM}`);
this.logger.error({err}, `Error unsubscribing to SNS topic arn ${AWS_SNS_TOPIC_ARN}`);
}
}

View File

@@ -1,6 +1,7 @@
const assert = require('assert');
const Emitter = require('events');
const crypto = require('crypto');
const parseUrl = require('parse-url');
const timeSeries = require('@jambonz/time-series');
const {NODE_ENV, JAMBONES_TIME_SERIES_HOST} = require('../config');
let alerter ;
@@ -21,6 +22,10 @@ class BaseRequestor extends Emitter {
const {stats} = require('../../').srf.locals;
this.stats = stats;
const u = this._parsedUrl = parseUrl(this.url);
if (u.port) this._baseUrl = `${u.protocol}://${u.resource}:${u.port}`;
else this._baseUrl = `${u.protocol}://${u.resource}`;
if (!alerter) {
alerter = timeSeries(logger, {
host: JAMBONES_TIME_SERIES_HOST,
@@ -30,6 +35,10 @@ class BaseRequestor extends Emitter {
}
}
get baseUrl() {
return this._baseUrl;
}
get Alerter() {
return alerter;
}

View File

@@ -32,7 +32,7 @@
"Tag": "tag",
"Transcribe": "transcribe"
},
"AllowedSipRecVerbs": ["answer", "config", "gather", "transcribe", "listen", "tag"],
"AllowedSipRecVerbs": ["answer", "config", "gather", "transcribe", "listen", "tag", "hangup", "sip:decline"],
"AllowedConfirmSessionVerbs": ["config", "gather", "plays", "say", "tag"],
"CallStatus": {
"Trying": "trying",
@@ -137,6 +137,18 @@
"Connect": "speechmatics_transcribe::connect",
"Error": "speechmatics_transcribe::error"
},
"OpenAITranscriptionEvents": {
"Transcription": "openai_transcribe::transcription",
"Translation": "openai_transcribe::translation",
"SpeechStarted": "openai_transcribe::speech_started",
"SpeechStopped": "openai_transcribe::speech_stopped",
"PartialTranscript": "openai_transcribe::partial_transcript",
"Info": "openai_transcribe::info",
"RecognitionStarted": "openai_transcribe::recognition_started",
"ConnectFailure": "openai_transcribe::connect_failed",
"Connect": "openai_transcribe::connect",
"Error": "openai_transcribe::error"
},
"JambonzTranscriptionEvents": {
"Transcription": "jambonz_transcribe::transcription",
"ConnectFailure": "jambonz_transcribe::connect_failed",

View File

@@ -142,6 +142,11 @@ const speechMapper = (cred) => {
obj.api_key = o.api_key;
obj.speechmatics_stt_uri = o.speechmatics_stt_uri;
}
else if ('openai' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
obj.model_id = o.model_id;
}
else if (obj.vendor.startsWith('custom:')) {
const o = JSON.parse(decrypt(credential));
obj.auth_token = o.auth_token;

5
lib/utils/helpers.js Normal file
View File

@@ -0,0 +1,5 @@
const sleepFor = (ms) => new Promise((resolve) => setTimeout(() => resolve(), ms));
module.exports = {
sleepFor
};

View File

@@ -48,8 +48,6 @@ class HttpRequestor extends BaseRequestor {
assert(['GET', 'POST'].includes(this.method));
const u = this._parsedUrl = parseUrl(this.url);
if (u.port) this._baseUrl = `${u.protocol}://${u.resource}:${u.port}`;
else this._baseUrl = `${u.protocol}://${u.resource}`;
this._protocol = u.protocol;
this._resource = u.resource;
this._port = u.port;
@@ -57,18 +55,18 @@ class HttpRequestor extends BaseRequestor {
this._usePools = HTTP_POOL && parseInt(HTTP_POOL);
if (this._usePools) {
if (pools.has(this._baseUrl)) {
this.client = pools.get(this._baseUrl);
if (pools.has(this.baseUrl)) {
this.client = pools.get(this.baseUrl);
}
else {
const connections = HTTP_POOLSIZE ? parseInt(HTTP_POOLSIZE) : 10;
const pipelining = HTTP_PIPELINING ? parseInt(HTTP_PIPELINING) : 1;
const pool = this.client = new Pool(this._baseUrl, {
const pool = this.client = new Pool(this.baseUrl, {
connections,
pipelining
});
pools.set(this._baseUrl, pool);
this.logger.debug(`HttpRequestor:created pool for ${this._baseUrl}`);
pools.set(this.baseUrl, pool);
this.logger.debug(`HttpRequestor:created pool for ${this.baseUrl}`);
}
}
else {
@@ -89,10 +87,6 @@ class HttpRequestor extends BaseRequestor {
}
}
get baseUrl() {
return this._baseUrl;
}
close() {
if (!this._usePools && !this.client?.closed) this.client.close();
}

View File

@@ -15,12 +15,14 @@ const RootSpan = require('./call-tracer');
const uuidv4 = require('uuid-random');
const HttpRequestor = require('./http-requestor');
const WsRequestor = require('./ws-requestor');
const {makeOpusFirst} = require('./sdp-utils');
const {makeOpusFirst, removeVideoSdp} = require('./sdp-utils');
const {
JAMBONES_USE_FREESWITCH_TIMER_FD,
JAMBONES_MEDIA_TIMEOUT_MS,
JAMBONES_MEDIA_HOLD_TIMEOUT_MS
JAMBONES_MEDIA_HOLD_TIMEOUT_MS,
JAMBONES_TRANSCRIBE_EP_DESTROY_DELAY_MS
} = require('../config');
const { sleepFor } = require('./helpers');
class SingleDialer extends Emitter {
constructor({logger, sbcAddress, target, opts, application, callInfo, accountInfo, rootSpan, startSpan, dialTask,
@@ -152,15 +154,21 @@ class SingleDialer extends Emitter {
return;
}
let lastSdp;
const connectStream = async(remoteSdp) => {
const connectStream = async(remoteSdp, isVideoCall) => {
if (remoteSdp === lastSdp) return;
if (process.env.JAMBONES_VIDEO_CALLS_ENABLED_IN_FS && !isVideoCall) {
remoteSdp = removeVideoSdp(remoteSdp);
}
lastSdp = remoteSdp;
return this.ep.modify(remoteSdp);
};
let localSdp = this.ep.local.sdp;
if (process.env.JAMBONES_VIDEO_CALLS_ENABLED_IN_FS && !opts.isVideoCall) {
localSdp = removeVideoSdp(localSdp);
}
Object.assign(opts, {
proxy: `sip:${this.sbcAddress}`,
localSdp: opts.opusFirst ? makeOpusFirst(this.ep.local.sdp) : this.ep.local.sdp
localSdp: opts.opusFirst ? makeOpusFirst(localSdp) : localSdp
});
if (this.target.auth) opts.auth = this.target.auth;
inviteSpan = this.startSpan('invite', {
@@ -222,13 +230,13 @@ class SingleDialer extends Emitter {
status.callStatus = CallStatus.EarlyMedia;
this.emit('earlyMedia');
}
connectStream(prov.body);
connectStream(prov.body, opts.isVideoCall);
}
else status.callStatus = CallStatus.Ringing;
this.emit('callStatusChange', status);
}
});
await connectStream(this.dlg.remote.sdp);
await connectStream(this.dlg.remote.sdp, opts.isVideoCall);
this.dlg.callSid = this.callSid;
this.inviteInProgress = null;
this.emit('callStatusChange', {
@@ -358,6 +366,24 @@ class SingleDialer extends Emitter {
this.logger.info(err, 'place-outdial:_configMsEndpoint - error enable inband DTMF');
}
}
const origDestroy = this.ep.destroy.bind(this.ep);
this.ep.destroy = async() => {
try {
if (this.dialTask.transcribeTask && JAMBONES_TRANSCRIBE_EP_DESTROY_DELAY_MS) {
// transcribe task is being used, wait for some time before destroy
// if final transcription is received but endpoint is already closed,
// freeswitch module will not be able to send the transcription
this.logger.info('SingleDialer:_configMsEndpoint -' +
' Dial with transcribe task, wait for some time before destroy');
await sleepFor(JAMBONES_TRANSCRIBE_EP_DESTROY_DELAY_MS);
}
await origDestroy();
} catch (err) {
this.logger.error(err, 'SingleDialer:_configMsEndpoint - error destroying endpoint');
}
};
}
/**
@@ -409,7 +435,10 @@ class SingleDialer extends Emitter {
} catch (err) {
this.logger.debug(err, 'SingleDialer:_executeApp: error');
this.emit('decline');
if (this.dlg.connected) this.dlg.destroy();
if (this.dlg.connected) {
this.dlg.destroy();
this.ep.destroy();
}
}
}

View File

@@ -8,7 +8,7 @@ const {
JAMBONES_SBCS,
K8S,
K8S_SBC_SIP_SERVICE_NAME,
AWS_SNS_TOPIC_ARM,
AWS_SNS_TOPIC_ARN,
OPTIONS_PING_INTERVAL,
AWS_REGION,
NODE_ENV,
@@ -35,7 +35,7 @@ module.exports = (logger) => {
// listen for SNS lifecycle changes
let lifecycleEmitter = new Emitter();
let dryUpCalls = false;
if (AWS_SNS_TOPIC_ARM && AWS_REGION) {
if (AWS_SNS_TOPIC_ARN && AWS_REGION) {
(async function() {
try {

View File

@@ -35,6 +35,12 @@ const makeOpusFirst = (sdp) => {
}
return sdpTransform.write(parsedSdp);
};
const removeVideoSdp = (sdp) => {
const parsedSdp = sdpTransform.parse(sdp);
// Filter out video media sections, keeping only non-video media
parsedSdp.media = parsedSdp.media.filter((media) => media.type !== 'video');
return sdpTransform.write(parsedSdp);
};
const extractSdpMedia = (sdp) => {
const parsedSdp1 = sdpTransform.parse(sdp);
@@ -54,5 +60,6 @@ module.exports = {
mergeSdpMedia,
extractSdpMedia,
isOpusFirst,
makeOpusFirst
makeOpusFirst,
removeVideoSdp
};

View File

@@ -117,7 +117,16 @@ const stickyVars = {
'SPEECHMATICS_SPEECH_HINTS',
'SPEECHMATICS_TRANSLATION_LANGUAGES',
'SPEECHMATICS_TRANSLATION_PARTIALS'
]
],
openai: [
'OPENAI_API_KEY',
'OPENAI_MODEL',
'OPENAI_INPUT_AUDIO_NOISE_REDUCTION',
'OPENAI_TURN_DETECTION_TYPE',
'OPENAI_TURN_DETECTION_THRESHOLD',
'OPENAI_TURN_DETECTION_PREFIX_PADDING_MS',
'OPENAI_TURN_DETECTION_SILENCE_DURATION_MS',
],
};
/**
@@ -304,13 +313,18 @@ const normalizeDeepgram = (evt, channel, language, shortUtterance) => {
confidence: alt.confidence,
transcript: alt.transcript,
}));
/**
* Some models (nova-2-general) return the detected language in the
* alternatives.languages array if the language is set as multi.
* If the language is detected, we use it as the language_code.
*/
const detectedLanguage = evt.channel?.alternatives?.[0]?.languages?.[0];
/**
* note difference between is_final and speech_final in Deepgram:
* https://developers.deepgram.com/docs/understand-endpointing-interim-results
*/
return {
language_code: language,
language_code: detectedLanguage || language,
channel_tag: channel,
is_final: shortUtterance ? evt.is_final : evt.speech_final,
alternatives: alternatives.length ? [alternatives[0]] : [],
@@ -566,6 +580,35 @@ const normalizeSpeechmatics = (evt, channel, language) => {
return obj;
};
const calculateConfidence = (logprobsArray) => {
// Sum the individual log probabilities
const totalLogProb = logprobsArray.reduce((sum, tokenInfo) => sum + tokenInfo.logprob, 0);
// Convert the total log probability back to a regular probability
const confidence = Math.exp(totalLogProb);
return confidence;
};
const normalizeOpenAI = (evt, channel, language) => {
const copy = JSON.parse(JSON.stringify(evt));
const obj = {
language_code: language,
channel_tag: channel,
is_final: true,
alternatives: [
{
transcript: evt.transcript,
confidence: evt.logprobs ? calculateConfidence(evt.logprobs) : 1.0,
}
],
vendor: {
name: 'openai',
evt: copy
}
};
return obj;
};
module.exports = (logger) => {
const normalizeTranscription = (evt, vendor, channel, language, shortUtterance, punctuation) => {
@@ -597,6 +640,8 @@ module.exports = (logger) => {
return normalizeVerbio(evt, channel, language);
case 'speechmatics':
return normalizeSpeechmatics(evt, channel, language);
case 'openai':
return normalizeOpenAI(evt, channel, language);
default:
if (vendor.startsWith('custom:')) {
return normalizeCustom(evt, channel, language, vendor);
@@ -784,7 +829,7 @@ module.exports = (logger) => {
};
}
else if ('deepgram' === vendor) {
let {model} = rOpts;
let model = rOpts.deepgramOptions?.model || rOpts.model;
const {deepgramOptions = {}} = rOpts;
const deepgramUri = deepgramOptions.deepgramSttUri || sttCredentials.deepgram_stt_uri;
const useTls = deepgramOptions.deepgramSttUseTls || sttCredentials.deepgram_stt_use_tls;
@@ -963,6 +1008,36 @@ module.exports = (logger) => {
{VOXIST_API_KEY: sttCredentials.api_key},
};
}
else if ('openai' === vendor) {
const {openaiOptions = {}} = rOpts;
const model = openaiOptions.model || rOpts.model || sttCredentials.model_id || 'whisper-1';
const apiKey = openaiOptions.apiKey || sttCredentials.api_key;
opts = {
OPENAI_MODEL: model,
OPENAI_API_KEY: apiKey,
...opts,
...(openaiOptions.prompt && {OPENAI_PROMPT: openaiOptions.prompt}),
...(openaiOptions.input_audio_noise_reduction &&
{OPENAI_INPUT_AUDIO_NOISE_REDUCTION: openaiOptions.input_audio_noise_reduction}),
};
if (openaiOptions.turn_detection) {
opts = {
...opts,
OPENAI_TURN_DETECTION_TYPE: openaiOptions.turn_detection.type,
...(openaiOptions.turn_detection.threshold && {
OPENAI_TURN_DETECTION_THRESHOLD: openaiOptions.turn_detection.threshold
}),
...(openaiOptions.turn_detection.prefix_padding_ms && {
OPENAI_TURN_DETECTION_PREFIX_PADDING_MS: openaiOptions.turn_detection.prefix_padding_ms
}),
...(openaiOptions.turn_detection.silence_duration_ms && {
OPENAI_TURN_DETECTION_SILENCE_DURATION_MS: openaiOptions.turn_detection.silence_duration_ms
}),
};
}
}
else if ('verbio' === vendor) {
const {verbioOptions = {}} = rOpts;
opts = {

View File

@@ -133,7 +133,7 @@ class WsRequestor extends BaseRequestor {
/* prepare and send message */
let payload = params ? snakeCaseKeys(params, ['customerData', 'sip']) : null;
if (type === 'session:new') this._sessionData = payload;
if (type === 'session:new' || type === 'session:adulting') this._sessionData = payload;
if (type === 'session:reconnect') payload = this._sessionData;
assert.ok(url, 'WsRequestor:request url was not provided');
@@ -146,7 +146,9 @@ class WsRequestor extends BaseRequestor {
type,
msgid,
call_sid: this.call_sid,
hook: ['verb:hook', 'session:redirect', 'llm:event', 'llm:tool-call'].includes(type) ? url : undefined,
hook: [
'verb:hook', 'dial:confirm', 'session:redirect', 'llm:event', 'llm:tool-call'
].includes(type) ? url : undefined,
data: {...payload},
...b3
};
@@ -431,6 +433,21 @@ class WsRequestor extends BaseRequestor {
}
} catch (err) {
this.logger.info({err, content}, 'WsRequestor:_onMessage - invalid incoming message');
const params = {
msg: 'InvalidMessage',
details: err.message,
content: Buffer.from(content).toString('utf-8')
};
const {writeAlerts, AlertType} = this.Alerter;
writeAlerts({
account_sid: this.account_sid,
alert_type: AlertType.INVALID_APP_PAYLOAD,
target_sid: this.call_sid,
message: err.message,
}).catch((err) => this.logger.info({err}, 'Error generating alert for invalid message'));
this.request('jambonz:error', '/error', params)
.catch((err) => this.logger.debug({err}, 'WsRequestor:_onMessage - Error sending'));
}
}

199
package-lock.json generated
View File

@@ -14,11 +14,11 @@
"@jambonz/db-helpers": "^0.9.6",
"@jambonz/http-health-check": "^0.0.1",
"@jambonz/mw-registrar": "^0.2.7",
"@jambonz/realtimedb-helpers": "^0.8.8",
"@jambonz/realtimedb-helpers": "^0.8.13",
"@jambonz/speech-utils": "^0.2.3",
"@jambonz/stats-collector": "^0.1.10",
"@jambonz/time-series": "^0.2.13",
"@jambonz/verb-specifications": "^0.0.97",
"@jambonz/verb-specifications": "^0.0.102",
"@opentelemetry/api": "^1.8.0",
"@opentelemetry/exporter-jaeger": "^1.23.0",
"@opentelemetry/exporter-trace-otlp-http": "^0.50.0",
@@ -31,8 +31,8 @@
"bent": "^7.3.12",
"debug": "^4.3.4",
"deepcopy": "^2.1.0",
"drachtio-fsmrf": "^4.0.1",
"drachtio-srf": "^5.0.1",
"drachtio-fsmrf": "^4.0.3",
"drachtio-srf": "^5.0.2",
"express": "^4.19.2",
"express-validator": "^7.0.1",
"moment": "^2.30.1",
@@ -44,7 +44,7 @@
"short-uuid": "^5.1.0",
"sinon": "^17.0.1",
"to-snake-case": "^1.0.0",
"undici": "^6.20.0",
"undici": "^7.5.0",
"uuid-random": "^1.3.2",
"verify-aws-sns-signature": "^0.1.0",
"ws": "^8.18.0",
@@ -969,7 +969,9 @@
}
},
"node_modules/@babel/helper-string-parser": {
"version": "7.22.5",
"version": "7.25.9",
"resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.25.9.tgz",
"integrity": "sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA==",
"dev": true,
"license": "MIT",
"engines": {
@@ -977,7 +979,9 @@
}
},
"node_modules/@babel/helper-validator-identifier": {
"version": "7.24.7",
"version": "7.25.9",
"resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz",
"integrity": "sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==",
"license": "MIT",
"engines": {
"node": ">=6.9.0"
@@ -992,13 +996,14 @@
}
},
"node_modules/@babel/helpers": {
"version": "7.22.5",
"version": "7.26.10",
"resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.26.10.tgz",
"integrity": "sha512-UPYc3SauzZ3JGgj87GgZ89JVdC5dj0AoetR5Bw6wj4niittNyFh6+eOGonYvJ1ao6B8lEa3Q3klS7ADZ53bc5g==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/template": "^7.22.5",
"@babel/traverse": "^7.22.5",
"@babel/types": "^7.22.5"
"@babel/template": "^7.26.9",
"@babel/types": "^7.26.10"
},
"engines": {
"node": ">=6.9.0"
@@ -1075,9 +1080,14 @@
}
},
"node_modules/@babel/parser": {
"version": "7.23.0",
"version": "7.26.10",
"resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.26.10.tgz",
"integrity": "sha512-6aQR2zGE/QFi8JpDLjUZEPYOs7+mhKXm86VaKFiLP35JQwQb6bwUE+XbvkH0EptsYhbNBSUGaUBLKqxH1xSgsA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/types": "^7.26.10"
},
"bin": {
"parser": "bin/babel-parser.js"
},
@@ -1086,94 +1096,35 @@
}
},
"node_modules/@babel/template": {
"version": "7.22.15",
"version": "7.26.9",
"resolved": "https://registry.npmjs.org/@babel/template/-/template-7.26.9.tgz",
"integrity": "sha512-qyRplbeIpNZhmzOysF/wFMuP9sctmh2cFzRAZOn1YapxBsE1i9bJIY586R/WBLfLcmcBlM8ROBiQURnnNy+zfA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/code-frame": "^7.22.13",
"@babel/parser": "^7.22.15",
"@babel/types": "^7.22.15"
"@babel/code-frame": "^7.26.2",
"@babel/parser": "^7.26.9",
"@babel/types": "^7.26.9"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/template/node_modules/@babel/code-frame": {
"version": "7.22.13",
"version": "7.26.2",
"resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.26.2.tgz",
"integrity": "sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/highlight": "^7.22.13",
"chalk": "^2.4.2"
"@babel/helper-validator-identifier": "^7.25.9",
"js-tokens": "^4.0.0",
"picocolors": "^1.0.0"
},
"engines": {
"node": ">=6.9.0"
}
},
"node_modules/@babel/template/node_modules/ansi-styles": {
"version": "3.2.1",
"dev": true,
"license": "MIT",
"dependencies": {
"color-convert": "^1.9.0"
},
"engines": {
"node": ">=4"
}
},
"node_modules/@babel/template/node_modules/chalk": {
"version": "2.4.2",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-styles": "^3.2.1",
"escape-string-regexp": "^1.0.5",
"supports-color": "^5.3.0"
},
"engines": {
"node": ">=4"
}
},
"node_modules/@babel/template/node_modules/color-convert": {
"version": "1.9.3",
"dev": true,
"license": "MIT",
"dependencies": {
"color-name": "1.1.3"
}
},
"node_modules/@babel/template/node_modules/color-name": {
"version": "1.1.3",
"dev": true,
"license": "MIT"
},
"node_modules/@babel/template/node_modules/escape-string-regexp": {
"version": "1.0.5",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=0.8.0"
}
},
"node_modules/@babel/template/node_modules/has-flag": {
"version": "3.0.0",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=4"
}
},
"node_modules/@babel/template/node_modules/supports-color": {
"version": "5.5.0",
"dev": true,
"license": "MIT",
"dependencies": {
"has-flag": "^3.0.0"
},
"engines": {
"node": ">=4"
}
},
"node_modules/@babel/traverse": {
"version": "7.23.2",
"dev": true,
@@ -1279,13 +1230,14 @@
}
},
"node_modules/@babel/types": {
"version": "7.23.0",
"version": "7.26.10",
"resolved": "https://registry.npmjs.org/@babel/types/-/types-7.26.10.tgz",
"integrity": "sha512-emqcG3vHrpxUKTrxcblR36dcrcoRDvKmnL/dCL6ZsHaShW80qxCAcNhzQZrpeM765VzEos+xOi4s+r4IXzTwdQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@babel/helper-string-parser": "^7.22.5",
"@babel/helper-validator-identifier": "^7.22.20",
"to-fast-properties": "^2.0.0"
"@babel/helper-string-parser": "^7.25.9",
"@babel/helper-validator-identifier": "^7.25.9"
},
"engines": {
"node": ">=6.9.0"
@@ -1504,8 +1456,9 @@
}
},
"node_modules/@jambonz/realtimedb-helpers": {
"version": "0.8.8",
"license": "MIT",
"version": "0.8.13",
"resolved": "https://registry.npmjs.org/@jambonz/realtimedb-helpers/-/realtimedb-helpers-0.8.13.tgz",
"integrity": "sha512-/7c8kCu/dMSk8CWTYdEv4xU/7+vmVL30VCjtHPRAXVBRIVejAPDf1xK5n0rXwG8Fda1Oa+0TFsymkiRpew8P6w==",
"dependencies": {
"debug": "^4.3.4",
"ioredis": "^5.3.2"
@@ -1533,6 +1486,15 @@
"undici": "^6.4.0"
}
},
"node_modules/@jambonz/speech-utils/node_modules/undici": {
"version": "6.21.2",
"resolved": "https://registry.npmjs.org/undici/-/undici-6.21.2.tgz",
"integrity": "sha512-uROZWze0R0itiAKVPsYhFov9LxrPMHLMEQFszeI2gCN6bnIIZ8twzBCJcN2LJrBBLfrP0t1FW0g+JmKVl8Vk1g==",
"license": "MIT",
"engines": {
"node": ">=18.17"
}
},
"node_modules/@jambonz/stats-collector": {
"version": "0.1.10",
"license": "MIT",
@@ -1550,10 +1512,9 @@
}
},
"node_modules/@jambonz/verb-specifications": {
"version": "0.0.97",
"resolved": "https://registry.npmjs.org/@jambonz/verb-specifications/-/verb-specifications-0.0.97.tgz",
"integrity": "sha512-CncykmCwc8YZcDYwFDq88n6IAyoQNae3lSF2BI5etoBKMujzxOty227lq6zgeXun9UYYDy/CONk5MiLO29kcBg==",
"license": "MIT",
"version": "0.0.102",
"resolved": "https://registry.npmjs.org/@jambonz/verb-specifications/-/verb-specifications-0.0.102.tgz",
"integrity": "sha512-hpxzrzmSv3uRjjI503lFsDddVhfI5oFHWfGaym2RCUuKhv+hX5UKjW1joU1MRXyG2ZX4Oh5BRMAcjEI9lTxoLA==",
"dependencies": {
"debug": "^4.3.4",
"pino": "^8.8.0"
@@ -2862,7 +2823,9 @@
}
},
"node_modules/axios": {
"version": "1.7.7",
"version": "1.8.3",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.8.3.tgz",
"integrity": "sha512-iP4DebzoNlP/YN2dpwCgb8zoCmhtkajzS48JvwmkSkXvPI3DHc7m+XYL5tGnSlJtR6nImXZmdCuN5aP8dh1d8A==",
"license": "MIT",
"dependencies": {
"follow-redirects": "^1.15.6",
@@ -3120,7 +3083,8 @@
},
"node_modules/camel-case": {
"version": "4.1.2",
"license": "MIT",
"resolved": "https://registry.npmjs.org/camel-case/-/camel-case-4.1.2.tgz",
"integrity": "sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw==",
"dependencies": {
"pascal-case": "^3.1.2",
"tslib": "^2.0.3"
@@ -3567,7 +3531,8 @@
},
"node_modules/dot-case": {
"version": "3.0.4",
"license": "MIT",
"resolved": "https://registry.npmjs.org/dot-case/-/dot-case-3.0.4.tgz",
"integrity": "sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w==",
"dependencies": {
"no-case": "^3.0.4",
"tslib": "^2.0.3"
@@ -3595,8 +3560,9 @@
}
},
"node_modules/drachtio-fsmrf": {
"version": "4.0.1",
"license": "MIT",
"version": "4.0.3",
"resolved": "https://registry.npmjs.org/drachtio-fsmrf/-/drachtio-fsmrf-4.0.3.tgz",
"integrity": "sha512-5j8LqPMHJEgK56gI6MTVbasxCS4cUjo9UdPO8P9qJGJfLG/k/LI6QQAzPrFUcGlpOQ3WYZNkOp/drsKdttlk2Q==",
"dependencies": {
"camel-case": "^4.1.2",
"debug": "^2.6.9",
@@ -3625,6 +3591,8 @@
},
"node_modules/drachtio-modesl": {
"version": "1.2.9",
"resolved": "https://registry.npmjs.org/drachtio-modesl/-/drachtio-modesl-1.2.9.tgz",
"integrity": "sha512-Ob/N0ntwd/Qu6IWjRbUr17DSpw9dTpPNMwmi6ZTh8ryGRE29zlx6U446y/VYpN8ZN9rEi0OgTyAmUt3RjLoRyQ==",
"license": "MPL-2.0",
"dependencies": {
"eventemitter2": "^6.4.4",
@@ -3637,6 +3605,8 @@
},
"node_modules/drachtio-modesl/node_modules/xml2js": {
"version": "0.4.23",
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.23.tgz",
"integrity": "sha512-ySPiMjM0+pLDftHgXY4By0uswI3SPKLDw/i3UXbnO8M/p28zqexCUoPmQFrYD+/1BzhGJSs2i1ERWKJAtiLrug==",
"license": "MIT",
"dependencies": {
"sax": ">=0.6.0",
@@ -3647,7 +3617,9 @@
}
},
"node_modules/drachtio-srf": {
"version": "5.0.1",
"version": "5.0.3",
"resolved": "https://registry.npmjs.org/drachtio-srf/-/drachtio-srf-5.0.3.tgz",
"integrity": "sha512-gOeOmU3LsrDXAw8a9Vd+od6cJXyqqV5E+2LsCD2N1SjoJybJS72PHTN+GfKtk3fRhFYpww2325CO4pr/DK21cA==",
"license": "MIT",
"dependencies": {
"debug": "^3.2.7",
@@ -4185,6 +4157,8 @@
},
"node_modules/eventemitter2": {
"version": "6.4.9",
"resolved": "https://registry.npmjs.org/eventemitter2/-/eventemitter2-6.4.9.tgz",
"integrity": "sha512-JEPTiaOt9f04oa6NOkc4aH+nVp5I3wEjpHbIPqfgCdD5v5bUzy7xQqwcVO2aDQgOWhI28da57HksMrzK9HlRxg==",
"license": "MIT"
},
"node_modules/events": {
@@ -6025,7 +5999,8 @@
},
"node_modules/lower-case": {
"version": "2.0.2",
"license": "MIT",
"resolved": "https://registry.npmjs.org/lower-case/-/lower-case-2.0.2.tgz",
"integrity": "sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==",
"dependencies": {
"tslib": "^2.0.3"
}
@@ -6359,7 +6334,8 @@
},
"node_modules/no-case": {
"version": "3.0.4",
"license": "MIT",
"resolved": "https://registry.npmjs.org/no-case/-/no-case-3.0.4.tgz",
"integrity": "sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==",
"dependencies": {
"lower-case": "^2.0.2",
"tslib": "^2.0.3"
@@ -6771,7 +6747,8 @@
},
"node_modules/pascal-case": {
"version": "3.1.2",
"license": "MIT",
"resolved": "https://registry.npmjs.org/pascal-case/-/pascal-case-3.1.2.tgz",
"integrity": "sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g==",
"dependencies": {
"no-case": "^3.0.4",
"tslib": "^2.0.3"
@@ -7575,7 +7552,8 @@
},
"node_modules/snake-case": {
"version": "3.0.4",
"license": "MIT",
"resolved": "https://registry.npmjs.org/snake-case/-/snake-case-3.0.4.tgz",
"integrity": "sha512-LAOh4z89bGQvl9pFfNF8V146i7o7/CqFPbqzYgP+yYzDIDeS9HaNFtXABamRW+AQzEVODcvE79ljJ+8a9YSdMg==",
"dependencies": {
"dot-case": "^3.0.4",
"tslib": "^2.0.3"
@@ -7999,14 +7977,6 @@
"node": ">=0.6"
}
},
"node_modules/to-fast-properties": {
"version": "2.0.0",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=4"
}
},
"node_modules/to-no-case": {
"version": "1.0.2",
"license": "MIT"
@@ -8213,10 +8183,11 @@
}
},
"node_modules/undici": {
"version": "6.20.0",
"license": "MIT",
"version": "7.5.0",
"resolved": "https://registry.npmjs.org/undici/-/undici-7.5.0.tgz",
"integrity": "sha512-NFQG741e8mJ0fLQk90xKxFdaSM7z4+IQpAgsFI36bCDY9Z2+aXXZjVy2uUksMouWfMI9+w5ejOq5zYYTBCQJDQ==",
"engines": {
"node": ">=18.17"
"node": ">=20.18.1"
}
},
"node_modules/undici-types": {

View File

@@ -30,11 +30,11 @@
"@jambonz/db-helpers": "^0.9.6",
"@jambonz/http-health-check": "^0.0.1",
"@jambonz/mw-registrar": "^0.2.7",
"@jambonz/realtimedb-helpers": "^0.8.8",
"@jambonz/realtimedb-helpers": "^0.8.13",
"@jambonz/speech-utils": "^0.2.3",
"@jambonz/stats-collector": "^0.1.10",
"@jambonz/verb-specifications": "^0.0.97",
"@jambonz/time-series": "^0.2.13",
"@jambonz/verb-specifications": "^0.0.102",
"@opentelemetry/api": "^1.8.0",
"@opentelemetry/exporter-jaeger": "^1.23.0",
"@opentelemetry/exporter-trace-otlp-http": "^0.50.0",
@@ -47,8 +47,8 @@
"bent": "^7.3.12",
"debug": "^4.3.4",
"deepcopy": "^2.1.0",
"drachtio-fsmrf": "^4.0.1",
"drachtio-srf": "^5.0.1",
"drachtio-fsmrf": "^4.0.3",
"drachtio-srf": "^5.0.2",
"express": "^4.19.2",
"express-validator": "^7.0.1",
"moment": "^2.30.1",
@@ -60,7 +60,7 @@
"short-uuid": "^5.1.0",
"sinon": "^17.0.1",
"to-snake-case": "^1.0.0",
"undici": "^6.20.0",
"undici": "^7.5.0",
"uuid-random": "^1.3.2",
"verify-aws-sns-signature": "^0.1.0",
"ws": "^8.18.0",

View File

@@ -3,9 +3,8 @@ const { sippUac } = require('./sipp')('test_fs');
const bent = require('bent');
const getJSON = bent('json')
const clearModule = require('clear-module');
const {provisionCallHook} = require('./utils')
const sleepFor = (ms) => new Promise((r) => setTimeout(r, ms));
const {provisionCallHook} = require('./utils');
const { sleepFor } = require('../lib/utils/helpers');
process.on('unhandledRejection', (reason, p) => {
console.log('Unhandled Rejection at: Promise', p, 'reason:', reason);

View File

@@ -3,6 +3,7 @@ const { sippUac } = require('./sipp')('test_fs');
const clearModule = require('clear-module');
const {provisionCallHook, provisionActionHook, provisionAnyHook} = require('./utils');
const bent = require('bent');
const { sleepFor } = require('../lib/utils/helpers');
const getJSON = bent('json');
process.on('unhandledRejection', (reason, p) => {
@@ -17,8 +18,6 @@ function connect(connectable) {
});
}
const sleepFor = (ms) => new Promise((resolve) => setTimeout(() => resolve(), ms));
test('\'enqueue-dequeue\' tests', async(t) => {
clearModule.all();

View File

@@ -3,10 +3,9 @@ const { sippUac } = require('./sipp')('test_fs');
const clearModule = require('clear-module');
const {provisionCallHook, provisionCustomHook, provisionActionHook} = require('./utils')
const bent = require('bent');
const { sleepFor } = require('../lib/utils/helpers');
const getJSON = bent('json')
const sleepFor = async(ms) => new Promise(resolve => setTimeout(resolve, ms));
process.on('unhandledRejection', (reason, p) => {
console.log('Unhandled Rejection at: Promise', p, 'reason:', reason);
});