Compare commits

...

35 Commits

Author SHA1 Message Date
Quan HL
8ad047b605 fix 2023-08-18 14:06:46 +07:00
Quan HL
b6c307db70 fix 2023-08-18 13:38:23 +07:00
Quan HL
aa161290c7 fix 2023-08-18 12:17:30 +07:00
Quan HL
4322159a41 fix gather verb 2023-08-18 11:24:14 +07:00
Quan HL
848aa43dcb fix gather verb 2023-08-18 11:18:01 +07:00
Quan HL
18d7ea3e37 fix transcribe 2023-08-18 10:38:27 +07:00
Quan HL
09961f564a fix transcribe 2023-08-18 10:24:41 +07:00
Quan HL
e9f2837370 fix gather 2023-08-18 09:57:02 +07:00
Quan HL
a97d99650c wip 2023-08-17 17:04:38 +07:00
Quan HL
541cb1458d wip 2023-08-17 16:58:46 +07:00
Quan HL
5754c386d3 feat fallback speech 2023-08-17 14:28:50 +07:00
Quan HL
b1c0478051 feat fallback speech 2023-08-17 14:25:26 +07:00
Hoan Luu Huu
f8c5abe9e9 feat: multi speech credential diff labels but same vendor (#426)
* feat: multi speech credential diff labels but same vendor

* update sql

* fix

* fix

* fix jslint

* fix review comment

* update verb spec version
2023-08-15 08:57:49 -04:00
Dave Horton
ad722a55ee generate trace id before outdial so we can include it in custom header (#418)
* generate trace id before outdial so we can include it in custom header

* logging

* logging

* fix #420 race condition on rest outdial when ws is used

* revert unnecessary logging change
2023-08-08 13:00:34 -04:00
Hoan Luu Huu
82939214a2 update stats-collector version (#421) 2023-08-07 21:22:10 -04:00
Dave Horton
043a171f41 remove log message 2023-08-07 15:22:03 -04:00
Dave Horton
c8e9b34b53 fix typo that caused record to fail on rest calls 2023-08-07 14:46:51 -04:00
Hoan Luu Huu
d7dcdb1d0c Continuos ASR for transcribe (#398)
* asrTimeout

* fix jslint

* change log

* fix interrim
2023-08-03 09:49:44 -04:00
Dave Horton
fbd0782258 #388 - support custom speech vendor in transcribe verb (#414)
Co-authored-by: Hoan Luu Huu <110280845+xquanluu@users.noreply.github.com>
2023-08-02 19:06:31 -04:00
Fábio Gomes
38f9329b12 When recordings are enabled, disable bidirectional audio on jambonz-session-record (#415) 2023-08-02 14:21:59 -04:00
Dave Horton
d4bfdf0916 #412 - dont delay sending call status when stopping background listen (#413) 2023-08-02 12:50:13 -04:00
Dave Horton
9203deef0f fix bug in prev commit 2023-08-02 10:27:50 -04:00
Dave Horton
48b182c891 Fix/rest outdial failure session hangs (#411)
* fix #410

* on rest outdial failure, if remote end closed gracefully don't wait for a reconnection
2023-08-01 12:59:30 -04:00
Dave Horton
e8e987cb9d Fix/snake case customer data issue 406 (#409)
* revert recent change on silence trimming

* fix issue with incorrectly snake-casing customer data (#406)
2023-07-27 22:31:43 -04:00
Dave Horton
38ea9e7411 update to speech-utils@0.0.18 which ignores trimming of silence on azure ssml audio 2023-07-25 07:51:46 -04:00
Hoan Luu Huu
7b11a56a53 feat siprec custom header (#400)
* feat siprec custom header

* wip

* update verb specification

* add newline to info siprec body

* add newline to info siprec body
2023-07-20 09:10:41 -04:00
Dave Horton
66305b5aea feature: optionally trim silence from azure tts (#399) 2023-07-19 10:36:24 -04:00
Dave Horton
6793bbf330 fix exception that appears in logs if session ends before last call status update 2023-07-18 13:20:53 -04:00
Hoan Luu Huu
d8543f73f2 execute status callback async (#394)
* execute status callback async

* fix review comment

* revert fix review comment
2023-07-18 12:40:57 -04:00
Hoan Luu Huu
e1dad569dc Fix/background listen tag (#391)
* fix background listen send customerData to api server

* test listen

* fix review comment
2023-07-11 16:03:32 +01:00
Hoan Luu Huu
643bee48c5 feat multi srs (#381) 2023-07-05 08:16:59 +01:00
Dave Horton
487bfd90d9 0.8.4 2023-06-28 09:23:40 +01:00
Hoan Luu Huu
810f6eb695 fix aws-sdk v3 (#387)
* fix aws-sdk v3

* fix jslint

* fix jslint

* fix aws response parser
2023-06-28 09:20:43 +01:00
Hoan Luu Huu
62bc6b4bac feat: add fs service url to sbc ping option (#383)
* feat multi srs

* add fs service URL to SBC ping option
2023-06-23 11:13:08 +01:00
two56
91fe3ceb06 Clear conference details in both Jambonz and FreeSWITCH (#350)
Co-authored-by: Matt Preskett <matt.preskett@netcall.com>
2023-06-14 15:35:04 -04:00
24 changed files with 5248 additions and 1633 deletions

5
app.js
View File

@@ -120,10 +120,15 @@ function handle(signal) {
srf.locals.disabled = true;
logger.info(`got signal ${signal}`);
const setName = `${(JAMBONES_CLUSTER_ID || 'default')}:active-fs`;
const fsServiceUrlSetName = `${(JAMBONES_CLUSTER_ID || 'default')}:fs-service-url`;
if (setName && srf.locals.localSipAddress) {
logger.info(`got signal ${signal}, removing ${srf.locals.localSipAddress} from set ${setName}`);
removeFromSet(setName, srf.locals.localSipAddress);
}
if (fsServiceUrlSetName && srf.locals.serviceUrl) {
logger.info(`got signal ${signal}, removing ${srf.locals.serviceUrl} from set ${fsServiceUrlSetName}`);
removeFromSet(fsServiceUrlSetName, srf.locals.serviceUrl);
}
removeFromSet(FS_UUID_SET_NAME, srf.locals.fsUUID);
if (K8S) {
srf.locals.lifecycleEmitter.operationalState = LifeCycleEvents.ScaleIn;

View File

@@ -47,6 +47,11 @@ router.post('/', async(req, res) => {
const application = req.body.application_sid ? await lookupAppBySid(req.body.application_sid) : null;
const record_all_calls = account.record_all_calls || (application && application.record_all_calls);
const recordOutputFormat = account.record_format || 'mp3';
const rootSpan = new RootSpan('rest-call', {
callSid,
accountSid,
...(req.body?.application_sid && {'X-Application-Sid': req.body.application_sid})
});
opts.headers = {
...opts.headers,
@@ -54,6 +59,7 @@ router.post('/', async(req, res) => {
'X-Jambonz-FS-UUID': srf.locals.fsUUID,
'X-Call-Sid': callSid,
'X-Account-Sid': accountSid,
'X-Trace-ID': rootSpan.traceId,
...(req.body?.application_sid && {'X-Application-Sid': req.body.application_sid}),
...(restDial.fromHost && {'X-Preferred-From-Host': restDial.fromHost}),
...(record_all_calls && {'X-Record-All-Calls': recordOutputFormat})
@@ -194,7 +200,6 @@ router.post('/', async(req, res) => {
/* ok our outbound INVITE is in flight */
const tasks = [restDial];
const rootSpan = new RootSpan('rest-call', inviteReq);
sipLogger = logger.child({
callSid,
callId: inviteReq.get('Call-ID'),
@@ -258,6 +263,7 @@ router.post('/', async(req, res) => {
sipStatus: err.status,
sipReason: err.reason
});
cs.callGone = true;
}
else {
if (cs) cs.emit('callStatusChange', {

View File

@@ -179,6 +179,30 @@ class CallSession extends Emitter {
set speechSynthesisVendor(vendor) {
this.application.speech_synthesis_vendor = vendor;
}
get fallbackSpeechSynthesisVendor() {
return this.application.use_for_fallback_speech ? this.application.fallback_speech_synthesis_vendor : null;
}
set fallbackSpeechSynthesisVendor(vendor) {
this.application.fallback_speech_synthesis_vendor = vendor;
}
/**
* default label to use for speech synthesis if not provided in the app
*/
get speechSynthesisLabel() {
return this.application.speech_synthesis_label;
}
set speechSynthesisLabel(label) {
this.application.speech_synthesis_label = label;
}
get fallbackSpeechSynthesisLabel() {
return this.application.fallback_speech_synthesis_label;
}
set fallbackSpeechSynthesisLabel(label) {
this.application.fallback_speech_synthesis_label = label;
}
/**
* default voice to use for speech synthesis if not provided in the app
*/
@@ -188,6 +212,13 @@ class CallSession extends Emitter {
set speechSynthesisVoice(voice) {
this.application.speech_synthesis_voice = voice;
}
get fallbackSpeechSynthesisVoice() {
return this.application.fallback_speech_synthesis_voice;
}
set fallbackSpeechSynthesisVoice(voice) {
this.application.fallback_speech_synthesis_voice = voice;
}
/**
* default language to use for speech synthesis if not provided in the app
*/
@@ -198,6 +229,13 @@ class CallSession extends Emitter {
this.application.speech_synthesis_language = language;
}
get fallbackSpeechSynthesisLanguage() {
return this.application.fallback_speech_synthesis_language;
}
set fallbackSpeechSynthesisLanguage(language) {
this.application.fallback_speech_synthesis_language = language;
}
/**
* default vendor to use for speech recognition if not provided in the app
*/
@@ -207,6 +245,29 @@ class CallSession extends Emitter {
set speechRecognizerVendor(vendor) {
this.application.speech_recognizer_vendor = vendor;
}
get fallbackSpeechRecognizerVendor() {
return this.application.fallback_speech_recognizer_vendor;
}
set fallbackSpeechRecognizerVendor(vendor) {
this.application.fallback_speech_recognizer_vendor = vendor;
}
/**
* default vendor to use for speech recognition if not provided in the app
*/
get speechRecognizerLabel() {
return this.application.speech_recognizer_label;
}
set speechRecognizerLabel(label) {
this.application.speech_recognizer_label = label;
}
get fallbackSpeechRecognizerLabel() {
return this.application.fallback_speech_recognizer_label;
}
set fallbackSpeechRecognizerLabel(label) {
this.application.fallback_speech_recognizer_label = label;
}
/**
* default language to use for speech recognition if not provided in the app
*/
@@ -217,6 +278,13 @@ class CallSession extends Emitter {
this.application.speech_recognizer_language = language;
}
get fallbackSpeechRecognizerLanguage() {
return this.application.fallback_speech_recognizer_language;
}
set fallbackSpeechRecognizerLanguage(language) {
this.application.fallback_speech_recognizer_language = language;
}
/**
* indicates whether the call currently in progress
*/
@@ -423,7 +491,10 @@ class CallSession extends Emitter {
'X-Call-Sid': this.callSid,
'X-Account-Sid': this.accountSid,
'X-Application-Sid': this.applicationSid,
}
...(this.recordOptions.headers && {'Content-Type': 'application/json'})
},
// Siprect Client is initiated from startCallRecording, so just need to pass custom headers in startRecording
...(this.recordOptions.headers && {body: JSON.stringify(this.recordOptions.headers) + '\n'})
});
if (res.status === 200) {
this._recordState = RecordState.RecordingOn;
@@ -444,7 +515,7 @@ class CallSession extends Emitter {
const res = await this.dlg.request({
method: 'INFO',
headers: {
'X-Reason': 'stopCallRecording',
'X-Reason': 'stopCallRecording'
}
});
if (res.status === 200) {
@@ -466,7 +537,7 @@ class CallSession extends Emitter {
const res = await this.dlg.request({
method: 'INFO',
headers: {
'X-Reason': 'pauseCallRecording',
'X-Reason': 'pauseCallRecording'
}
});
if (res.status === 200) {
@@ -488,7 +559,7 @@ class CallSession extends Emitter {
const res = await this.dlg.request({
method: 'INFO',
headers: {
'X-Reason': 'resumeCallRecording',
'X-Reason': 'resumeCallRecording'
}
});
if (res.status === 200) {
@@ -513,6 +584,8 @@ class CallSession extends Emitter {
const t = normalizeJambones(this.logger, [opts]);
this.backgroundListenTask = makeTask(this.logger, t[0]);
this.backgroundListenTask.bugname = bugname;
// Remove unneeded customer data to be sent to api server.
this.backgroundListenTask.ignoreCustomerData = true;
const resources = await this._evaluatePreconditions(this.backgroundListenTask);
const {span, ctx} = this.rootSpan.startChildSpan(`background-listen:${this.backgroundListenTask.summary}`);
this.backgroundListenTask.span = span;
@@ -635,14 +708,17 @@ class CallSession extends Emitter {
* Check for speech credentials for the specified vendor
* @param {*} vendor - google or aws
*/
getSpeechCredentials(vendor, type) {
getSpeechCredentials(vendor, type, label = null) {
const {writeAlerts, AlertType} = this.srf.locals;
if (this.accountInfo.speech && this.accountInfo.speech.length > 0) {
const credential = this.accountInfo.speech.find((s) => s.vendor === vendor);
const credential = this.accountInfo.speech.find((s) => s.vendor === vendor &&
((label && s.label === label) || label === null));
if (credential && (
(type === 'tts' && credential.use_for_tts) ||
(type === 'stt' && credential.use_for_stt)
)) {
this.logger.info(`Speech credential vendor: ${credential.vendor}
${credential.label ? `, label: ${credential.label}` : ''} is chosen`);
if ('google' === vendor) {
try {
const cred = JSON.parse(credential.service_key.replace(/\n/g, '\\n'));
@@ -791,23 +867,15 @@ class CallSession extends Emitter {
}
}
if (0 === this.tasks.length && this.requestor instanceof WsRequestor && !this.callGone) {
//let span;
if (0 === this.tasks.length &&
this.requestor instanceof WsRequestor &&
!this.requestor.closedGracefully &&
!this.callGone
) {
try {
//const {span} = this.rootSpan.startChildSpan('waiting for commands');
//const {reason, queue, command} = await this._awaitCommandsOrHangup();
/*
span.setAttributes({
'completion.reason': reason,
'async.request.queue': queue,
'async.request.command': command
});
span.end();
*/
await this._awaitCommandsOrHangup();
if (this.callGone) break;
} catch (err) {
//span.end();
this.logger.info(err, 'CallSession:exec - error waiting for new commands');
break;
}
@@ -1764,7 +1832,8 @@ class CallSession extends Emitter {
// nice, call is in progress, good time to enable record
await this.enableRecordAllCall();
} else if (callStatus == CallStatus.Completed && this.isBackGroundListen) {
await this.stopBackgroundListen();
this.stopBackgroundListen().catch((err) => this.logger.error(
{err}, 'CallSession:_notifyCallStatusChange - error stopping background listen'));
}
/* race condition: we hang up at the same time as the caller */
@@ -1779,6 +1848,15 @@ class CallSession extends Emitter {
this.callInfo.updateCallStatus(callStatus, sipStatus, sipReason);
if (typeof duration === 'number') this.callInfo.duration = duration;
this.executeStatusCallback(callStatus, sipStatus);
// update calls db
//this.logger.debug(`updating redis with ${JSON.stringify(this.callInfo)}`);
this.updateCallStatus(Object.assign({}, this.callInfo.toJSON()), this.serviceUrl)
.catch((err) => this.logger.error(err, 'redis error'));
}
async executeStatusCallback(callStatus, sipStatus) {
const {span} = this.rootSpan.startChildSpan(`call-status:${this.callInfo.callStatus}`);
span.setAttributes(this.callInfo.toJSON());
try {
@@ -1790,11 +1868,6 @@ class CallSession extends Emitter {
span.end();
this.logger.info(err, `CallSession:_notifyCallStatusChange error sending ${callStatus} ${sipStatus}`);
}
// update calls db
//this.logger.debug(`updating redis with ${JSON.stringify(this.callInfo)}`);
this.updateCallStatus(Object.assign({}, this.callInfo.toJSON()), this.serviceUrl)
.catch((err) => this.logger.error(err, 'redis error'));
}
async enableRecordAllCall() {
@@ -1805,6 +1878,7 @@ class CallSession extends Emitter {
username: JAMBONZ_RECORD_WS_USERNAME,
password: JAMBONZ_RECORD_WS_PASSWORD
},
disableBidirectionalAudio: true,
mixType : 'stereo',
passDtmf: true
};

View File

@@ -49,7 +49,6 @@ class RestCallSession extends CallSession {
*/
_callerHungup() {
if (this.restDialTask) {
this.logger.info('RestCallSession: releasing AMD');
this.restDialTask.turnOffAmd();
}
this.callInfo.callTerminationBy = 'caller';

View File

@@ -114,7 +114,12 @@ class Conference extends Task {
}
this.emitter.emit('kill');
await this._doFinalMemberCheck(cs);
if (this.ep && this.ep.connected) this.ep.conn.removeAllListeners('esl::event::CUSTOM::*') ;
if (this.ep && this.ep.connected) {
this.ep.conn.removeAllListeners('esl::event::CUSTOM::*');
this.ep.api(`conference ${this.confName} kick ${this.memberId}`)
.catch((err) => this.logger.info({err}, 'Error kicking participant'));
}
cs.clearConferenceDetails();
this.notifyTaskDone();
}

View File

@@ -105,21 +105,51 @@ class TaskConfig extends Task {
cs.speechSynthesisVendor = this.synthesizer.vendor !== 'default'
? this.synthesizer.vendor
: cs.speechSynthesisVendor;
cs.speechSynthesisLabel = this.synthesizer.label !== 'default'
? this.synthesizer.label
: cs.speechSynthesisLabel;
cs.speechSynthesisLanguage = this.synthesizer.language !== 'default'
? this.synthesizer.language
: cs.speechSynthesisLanguage;
cs.speechSynthesisVoice = this.synthesizer.voice !== 'default'
? this.synthesizer.voice
: cs.speechSynthesisVoice;
// fallback vendor
cs.fallbackSpeechSynthesisVendor = this.synthesizer.fallbackVendor !== 'default'
? this.synthesizer.fallbackVendor
: cs.fallbackSpeechSynthesisVendor;
cs.fallbackSpeechSynthesisLabel = this.synthesizer.fallbackLabel !== 'default'
? this.synthesizer.fallbackLabel
: cs.fallbackSpeechSynthesisLabel;
cs.fallbackSpeechSynthesisLanguage = this.synthesizer.fallbackLanguage !== 'default'
? this.synthesizer.fallbackLanguage
: cs.fallbackSpeechSynthesisLanguage;
cs.fallbackSpeechSynthesisVoice = this.synthesizer.fallbackVoice !== 'default'
? this.synthesizer.fallbackVoice
: cs.fallbackSpeechSynthesisVoice;
this.logger.info({synthesizer: this.synthesizer}, 'Config: updated synthesizer');
}
if (this.hasRecognizer) {
cs.speechRecognizerVendor = this.recognizer.vendor !== 'default'
? this.recognizer.vendor
: cs.speechRecognizerVendor;
cs.speechRecognizerLabel = this.recognizer.label !== 'default'
? this.recognizer.label
: cs.speechRecognizerLabel;
cs.speechRecognizerLanguage = this.recognizer.language !== 'default'
? this.recognizer.language
: cs.speechRecognizerLanguage;
//fallback
cs.fallbackSpeechRecognizerVendor = this.recognizer.fallbackVendor !== 'default'
? this.recognizer.fallbackVendor
: cs.fallbackSpeechRecognizerVendor;
cs.fallbackSpeechRecognizerLabel = this.recognizer.fallbackLabel !== 'default'
? this.recognizer.fallbackLabel
: cs.fallbackSpeechRecognizerLabel;
cs.fallbackSpeechRecognizerLanguage = this.recognizer.fallbackLanguage !== 'default'
? this.recognizer.fallbackLanguage
: cs.fallbackSpeechRecognizerLanguage;
cs.isContinuousAsr = typeof this.recognizer.asrTimeout === 'number' ? true : false;
if (cs.isContinuousAsr) {
cs.asrTimeout = this.recognizer.asrTimeout;

View File

@@ -58,6 +58,13 @@ class Dialogflow extends Task {
this.vendor = this.data.tts.vendor || 'default';
this.language = this.data.tts.language || 'default';
this.voice = this.data.tts.voice || 'default';
this.speechSynthesisLabel = this.data.tts.label || null;
// fallback tts
this.fallbackVendor = this.data.tts.fallbackVendor || 'default';
this.fallbackLanguage = this.data.tts.fallbackLanguage || 'default';
this.fallbackVoice = this.data.tts.fallbackLanguage || 'default';
this.fallbackLabel = this.data.tts.fallbackLabel || 'default';
}
this.bargein = this.data.bargein;
}
@@ -118,8 +125,15 @@ class Dialogflow extends Task {
this.vendor = cs.speechSynthesisVendor;
this.language = cs.speechSynthesisLanguage;
this.voice = cs.speechSynthesisVoice;
this.speechSynthesisLabel = cs.speechSynthesisLabel;
}
this.ttsCredentials = cs.getSpeechCredentials(this.vendor, 'tts');
if (this.fallbackVendor === 'default') {
this.fallbackVendor = cs.fallbackSpeechSynthesisVendor;
this.fallbackLanguage = cs.fallbackSpeechSynthesisLanguage;
this.fallbackVoice = cs.fallbackSpeechSynthesisVoice;
this.fallbackLabel = cs.fallbackSpeechSynthesisLabel;
}
this.ttsCredentials = cs.getSpeechCredentials(this.vendor, 'tts', this.speechSynthesisLabel);
this.ep.addCustomEventListener('dialogflow::intent', this._onIntent.bind(this, ep, cs));
this.ep.addCustomEventListener('dialogflow::transcription', this._onTranscription.bind(this, ep, cs));
@@ -221,17 +235,7 @@ class Dialogflow extends Task {
}
try {
const obj = {
account_sid: cs.accountSid,
text: intent.fulfillmentText,
vendor: this.vendor,
language: this.language,
voice: this.voice,
salt: cs.callSid,
credentials: this.ttsCredentials
};
this.logger.debug({obj}, 'Dialogflow:_onIntent - playing message via tts');
const {filePath, servedFromCache} = await synthAudio(stats, obj);
const {filePath, servedFromCache} = await this.fallbackSynthAudio(cs, intent, stats, synthAudio);
if (filePath) cs.trackTmpFile(filePath);
if (!this.ttsCredentials && !servedFromCache) cs.billForTts(intent.fulfillmentText.length);
@@ -277,6 +281,46 @@ class Dialogflow extends Task {
}
}
async fallbackSynthAudio(cs, intent, stats, synthAudio) {
try {
const obj = {
account_sid: cs.accountSid,
text: intent.fulfillmentText,
vendor: this.vendor,
language: this.language,
voice: this.voice,
salt: cs.callSid,
credentials: this.ttsCredentials
};
this.logger.debug({obj}, 'Dialogflow:_onIntent - playing message via tts');
return await synthAudio(stats, obj);
} catch (error) {
this.logger.info({error}, 'Failed to synthesize audio from primary vendor');
try {
if (this.fallbackVendor) {
const credentials = cs.getSpeechCredentials(this.fallbackVendor, 'tts', this.fallbackLabel);
const obj = {
account_sid: cs.accountSid,
text: intent.fulfillmentText,
vendor: this.fallbackVendor,
language: this.fallbackLanguage,
voice: this.fallbackVoice,
salt: cs.callSid,
credentials
};
this.logger.debug({obj}, 'Dialogflow:_onIntent - playing message via fallback tts');
return await synthAudio(stats, obj);
}
} catch (err) {
this.logger.info({err}, 'Failed to synthesize audio from falllback vendor');
throw err;
}
throw error;
}
}
/**
* A transcription - either interim or final - has been returned.
* If we are doing barge-in based on hotword detection, check for the hotword or phrase.

View File

@@ -65,6 +65,11 @@ class TaskGather extends Task {
const recognizer = this.data.recognizer;
this.vendor = recognizer.vendor;
this.language = recognizer.language;
this.label = recognizer.label;
this.fallbackVendor = recognizer.fallbackVendor || 'default';
this.fallbackLanguage = recognizer.fallbackLanguage || 'default';
this.fallbackLabel = recognizer.fallbackLabel || 'default';
/* let credentials be supplied in the recognizer object at runtime */
this.sttCredentials = setSpeechCredentialsAtRuntime(recognizer);
@@ -133,11 +138,60 @@ class TaskGather extends Task {
return s;
}
async _initSpeechCredentials(cs, vendor, label) {
const {getNuanceAccessToken, getIbmAccessToken} = this.cs.srf.locals.dbHelpers;
let credentials = cs.getSpeechCredentials(vendor, 'stt', label);
if (!credentials) {
const {writeAlerts, AlertType} = cs.srf.locals;
this.logger.info(`TaskGather:exec - ERROR stt using ${vendor} requested but creds not supplied`);
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_NOT_PROVISIONED,
vendor
}).catch((err) => this.logger.info({err}, 'Error generating alert for no stt'));
// Notify application that STT vender is wrong.
this.notifyError({
msg: 'ASR error',
details: `No speech-to-text service credentials for ${vendor} have been configured`
});
this.notifyTaskDone();
throw new Error(`No speech-to-text service credentials for ${vendor} have been configured`);
}
if (vendor === 'nuance' && credentials.client_id) {
/* get nuance access token */
const {client_id, secret} = credentials;
const {access_token, servedFromCache} = await getNuanceAccessToken(client_id, secret, 'asr tts');
this.logger.debug({client_id}, `Gather:exec - got nuance access token ${servedFromCache ? 'from cache' : ''}`);
credentials = {...credentials, access_token};
}
else if (vendor == 'ibm' && credentials.stt_api_key) {
/* get ibm access token */
const {stt_api_key, stt_region} = credentials;
const {access_token, servedFromCache} = await getIbmAccessToken(stt_api_key);
this.logger.debug({stt_api_key}, `Gather:exec - got ibm access token ${servedFromCache ? 'from cache' : ''}`);
credentials = {...credentials, access_token, stt_region};
}
return credentials;
}
async _startTranscribeForSpeech(cs, ep, vendor, language, credentials) {
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, cs.srf);
await this._initSpeech(cs, ep, vendor, credentials);
if (this.killed) {
this.logger.info(`Gather:exec - task was quickly killed so do not transcribe for vendor: ${vendor}`);
return;
}
this.execVendor = vendor;
this.execLanguage = language;
this._startTranscribing(ep);
return updateSpeechCredentialLastUsed(credentials.speech_credential_sid);
}
async exec(cs, {ep}) {
this.logger.debug({options: this.data}, 'Gather:exec');
await super.exec(cs);
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, cs.srf);
const {getNuanceAccessToken, getIbmAccessToken} = cs.srf.locals.dbHelpers;
if (cs.hasGlobalSttHints && !this.maskGlobalSttHints) {
const {hints, hintsBoost} = cs.globalSttHints;
@@ -184,57 +238,55 @@ class TaskGather extends Task {
this.language = cs.speechRecognizerLanguage;
if (this.data.recognizer) this.data.recognizer.language = this.language;
}
if ('default' === this.label || !this.label) {
this.label = cs.speechRecognizerLabel;
if (this.data.recognizer) this.data.recognizer.label = this.label;
}
// Fallback options
if ('default' === this.fallbackVendor || !this.fallbackVendor) {
this.fallbackVendor = cs.fallbackSpeechRecognizerVendor;
if (this.data.recognizer) this.data.recognizer.fallbackVendor = this.fallbackVendor;
}
if ('default' === this.fallbackLanguage || !this.fallbackLanguage) {
this.fallbackLanguage = cs.fallbackSpeechRecognizerLanguage;
if (this.data.recognizer) this.data.recognizer.fallbackLanguage = this.fallbackLanguage;
}
if ('default' === this.fallbackLabel || !this.fallbackLabel) {
this.fallbackLabel = cs.fallbackSpeechRecognizerLabel;
if (this.data.recognizer) this.data.recognizer.fallbackLabel = this.fallbackLabel;
}
if (!this.data.recognizer.vendor) {
this.data.recognizer.vendor = this.vendor;
}
if (this.needsStt && !this.sttCredentials) this.sttCredentials = cs.getSpeechCredentials(this.vendor, 'stt');
if (this.needsStt && !this.sttCredentials) {
const {writeAlerts, AlertType} = cs.srf.locals;
this.logger.info(`TaskGather:exec - ERROR stt using ${this.vendor} requested but creds not supplied`);
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_NOT_PROVISIONED,
vendor: this.vendor
}).catch((err) => this.logger.info({err}, 'Error generating alert for no stt'));
// Notify application that STT vender is wrong.
this.notifyError({
msg: 'ASR error',
details: `No speech-to-text service credentials for ${this.vendor} have been configured`
});
this.notifyTaskDone();
throw new Error(`No speech-to-text service credentials for ${this.vendor} have been configured`);
this.sttCredentials = await this._initSpeechCredentials(cs, this.vendor, this.label);
}
if (this.vendor === 'nuance' && this.sttCredentials.client_id) {
/* get nuance access token */
const {client_id, secret} = this.sttCredentials;
const {access_token, servedFromCache} = await getNuanceAccessToken(client_id, secret, 'asr tts');
this.logger.debug({client_id}, `Gather:exec - got nuance access token ${servedFromCache ? 'from cache' : ''}`);
this.sttCredentials = {...this.sttCredentials, access_token};
// Fetch credential for fallback recognizer
if (this.needsStt && !this.fallbackSttCredentials && this.fallbackVendor) {
this.fallbackSttCredentials = await this._initSpeechCredentials(
cs, this.fallbackSttCredentials, this.fallbackLabel);
}
else if (this.vendor == 'ibm' && this.sttCredentials.stt_api_key) {
/* get ibm access token */
const {stt_api_key, stt_region} = this.sttCredentials;
const {access_token, servedFromCache} = await getIbmAccessToken(stt_api_key);
this.logger.debug({stt_api_key}, `Gather:exec - got ibm access token ${servedFromCache ? 'from cache' : ''}`);
this.sttCredentials = {...this.sttCredentials, access_token, stt_region};
}
const startListening = (cs, ep) => {
const startListening = async(cs, ep) => {
this._startTimer();
if (this.isContinuousAsr && 0 === this.timeout) this._startAsrTimer();
if (this.input.includes('speech') && !this.listenDuringPrompt) {
this._initSpeech(cs, ep)
.then(() => {
if (this.killed) {
this.logger.info('Gather:exec - task was quickly killed so do not transcribe');
return;
try {
return await this._startTranscribeForSpeech(cs, ep, this.vendor, this.language, this.sttCredentials);
} catch (error) {
this.logger.error({error}, 'error in initSpeech');
if (this.fallbackSttCredentials) {
try {
return await this._startTranscribeForSpeech(cs, ep, this.fallbackVendor,
this.fallbackLanguage, this.fallbackSttCredentials);
} catch (err) {
this.logger.error({err}, `error in initSpeech for fallback STT provider ${this.fallbackVendor}`);
}
this._startTranscribing(ep);
return updateSpeechCredentialLastUsed(this.sttCredentials.speech_credential_sid);
})
.catch((err) => {
this.logger.error({err}, 'error in initSpeech');
});
}
}
}
};
@@ -288,10 +340,19 @@ class TaskGather extends Task {
}
if (this.input.includes('speech') && this.listenDuringPrompt) {
await this._initSpeech(cs, ep);
this._startTranscribing(ep);
updateSpeechCredentialLastUsed(this.sttCredentials.speech_credential_sid)
.catch(() => {/*already logged error */});
try {
await this._startTranscribeForSpeech(cs, ep, this.vendor, this.language, this.sttCredentials);
} catch (error) {
this.logger.error({error}, 'error in initSpeech');
if (this.fallbackSttCredentials) {
try {
await this._startTranscribeForSpeech(cs, ep, this.fallbackVendor,
this.fallbackLanguage, this.fallbackSttCredentials);
} catch (err) {
this.logger.error({err}, `error in initSpeech for fallback STT provider ${this.fallbackVendor}`);
}
}
}
}
if (this.input.includes('digits') || this.dtmfBargein || this.asrDtmfTerminationDigit) {
@@ -362,9 +423,9 @@ class TaskGather extends Task {
}
}
async _initSpeech(cs, ep) {
const opts = this.setChannelVarsForStt(this, this.sttCredentials, this.data.recognizer);
switch (this.vendor) {
async _initSpeech(cs, ep, vendor, credentials) {
const opts = this.setChannelVarsForStt(this, credentials, this.data.recognizer);
switch (vendor) {
case 'google':
this.bugname = 'google_transcribe';
ep.addCustomEventListener(GoogleTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
@@ -450,9 +511,9 @@ class TaskGather extends Task {
break;
}
else {
this.notifyError({ msg: 'ASR error', details:`Invalid vendor ${this.vendor}`});
this.notifyError({ msg: 'ASR error', details:`Invalid vendor ${credentials.vendor}`});
this.notifyTaskDone();
throw new Error(`Invalid vendor ${this.vendor}`);
throw new Error(`Invalid vendor ${credentials.vendor}`);
}
}
@@ -464,14 +525,14 @@ class TaskGather extends Task {
_startTranscribing(ep) {
this.logger.debug({
vendor: this.vendor,
locale: this.language,
vendor: this.execVendor,
locale: this.execLanguage,
interim: this.interim,
bugname: this.bugname
}, 'Gather:_startTranscribing');
ep.startTranscription({
vendor: this.vendor,
locale: this.language,
vendor: this.execVendor,
locale: this.execLanguage,
interim: this.interim,
bugname: this.bugname,
}).catch((err) => {
@@ -480,7 +541,7 @@ class TaskGather extends Task {
writeAlerts({
account_sid: this.cs.accountSid,
alert_type: AlertType.STT_FAILURE,
vendor: this.vendor,
vendor: this.execVendor,
detail: err.message
});
}).catch((err) => this.logger.info({err}, 'Error generating alert for tts failure'));

View File

@@ -25,6 +25,13 @@ class Lex extends Task {
this.vendor = this.data.tts.vendor || 'default';
this.language = this.data.tts.language || 'default';
this.voice = this.data.tts.voice || 'default';
this.speechCredentialLabel = this.data.tts.label || null;
// fallback tts
this.fallbackVendor = this.data.tts.fallbackVendor || 'default';
this.fallbackLanguage = this.data.tts.fallbackLanguage || 'default';
this.fallbackVoice = this.data.tts.fallbackLanguage || 'default';
this.fallbackLabel = this.data.tts.fallbackLabel || 'default';
}
this.botName = `${this.bot}:${this.alias}:${this.region}`;
@@ -102,8 +109,15 @@ class Lex extends Task {
this.vendor = cs.speechSynthesisVendor;
this.language = cs.speechSynthesisLanguage;
this.voice = cs.speechSynthesisVoice;
this.speechCredentialLabel = cs.speechSynthesisLabel;
}
this.ttsCredentials = cs.getSpeechCredentials(this.vendor, 'tts');
if (this.fallbackVendor === 'default') {
this.fallbackVendor = cs.fallbackSpeechSynthesisVendor;
this.fallbackLanguage = cs.fallbackSpeechSynthesisLanguage;
this.fallbackVoice = cs.fallbackSpeechSynthesisVoice;
this.fallbackLabel = cs.fallbackSpeechSynthesisLabel;
}
this.ttsCredentials = cs.getSpeechCredentials(this.vendor, 'tts', this.speechCredentialLabel);
this.ep.addCustomEventListener('lex::intent', this._onIntent.bind(this, ep, cs));
this.ep.addCustomEventListener('lex::transcription', this._onTranscription.bind(this, ep, cs));
@@ -168,6 +182,41 @@ class Lex extends Task {
}
}
async fallbackSynthAudio(cs, msg, stats, synthAudio) {
try {
const {filePath} = await synthAudio(stats, {
account_sid: cs.accountSid,
text: msg,
vendor: this.vendor,
language: this.language,
voice: this.voice,
salt: cs.callSid,
credentials: this.ttsCredentials
});
return filePath;
} catch (error) {
this.logger.info({error}, 'failed to synth audio from primary vendor');
if (this.fallbackVendor) {
try {
const credential = cs.getSpeechCredentials(this.fallbackVendor, 'tts', this.fallbackLabel);
const {filePath} = await synthAudio(stats, {
account_sid: cs.accountSid,
text: msg,
vendor: this.fallbackVendor,
language: this.fallbackLanguage,
voice: this.fallbackVoice,
salt: cs.callSid,
credentials: credential
});
return filePath;
} catch (err) {
this.logger.info({err}, 'failed to synth audio from fallback vendor');
}
}
}
}
/**
* @param {*} evt - event data
*/
@@ -187,16 +236,7 @@ class Lex extends Task {
try {
this.logger.debug(`tts with ${this.vendor} ${this.voice}`);
// eslint-disable-next-line no-unused-vars
const {filePath, servedFromCache} = await synthAudio(stats, {
account_sid: cs.accountSid,
text: msg,
vendor: this.vendor,
language: this.language,
voice: this.voice,
salt: cs.callSid,
credentials: this.ttsCredentials
});
const filePath = await this.fallbackSynthAudio(cs, msg, stats, synthAudio);
if (filePath) cs.trackTmpFile(filePath);
if (this.events.includes('start-play')) {

View File

@@ -8,6 +8,7 @@ const DTMF_SPAN_NAME = 'dtmf';
class TaskListen extends Task {
constructor(logger, opts, parentTask) {
super(logger, opts);
this.disableBidirectionalAudio = opts.disableBidirectionalAudio;
this.preconditions = TaskPreconditions.Endpoint;
[
@@ -32,6 +33,8 @@ class TaskListen extends Task {
set bugname(name) { this._bugname = name; }
set ignoreCustomerData(val) { this._ignoreCustomerData = val; }
async exec(cs, {ep}) {
await super.exec(cs);
this.ep = ep;
@@ -111,9 +114,13 @@ class TaskListen extends Task {
async _startListening(cs, ep) {
this._initListeners(ep);
const ci = this.nested ? this.parentTask.sd.callInfo : cs.callInfo.toJSON();
if (this._ignoreCustomerData) {
delete ci.customerData;
}
const metadata = Object.assign(
{sampleRate: this.sampleRate, mixType: this.mixType},
this.nested ? this.parentTask.sd.callInfo : cs.callInfo.toJSON(),
ci,
this.metadata);
if (this.hook.auth) {
this.logger.debug({username: this.hook.auth.username, password: this.hook.auth.password},
@@ -148,7 +155,7 @@ class TaskListen extends Task {
}
/* support bi-directional audio */
if (!this.disableBiDirectionalAudio) {
if (!this.disableBidirectionalAudio) {
ep.addCustomEventListener(ListenEvents.PlayAudio, this._onPlayAudio.bind(this, ep));
}
ep.addCustomEventListener(ListenEvents.KillAudio, this._onKillAudio.bind(this, ep));

View File

@@ -63,12 +63,13 @@ class TaskRestDial extends Task {
this.canCancel = false;
const cs = this.callSession;
cs.setDialog(dlg);
this.logger.debug('TaskRestDial:_onConnect - call connected');
try {
const b3 = this.getTracingPropagation();
const httpHeaders = b3 && {b3};
const params = {
...cs.callInfo,
...(cs.callInfo.toJSON()),
defaults: {
synthesizer: {
vendor: cs.speechSynthesisVendor,
@@ -90,8 +91,10 @@ class TaskRestDial extends Task {
}
let tasks;
if (this.app_json) {
this.logger.debug('TaskRestDial: using app_json from task data');
tasks = JSON.parse(this.app_json);
} else {
this.logger.debug({call_hook: this.call_hook}, 'TaskRestDial: retrieving application');
tasks = await cs.requestor.request('session:new', this.call_hook, params, httpHeaders);
}
if (tasks && Array.isArray(tasks)) {

View File

@@ -59,15 +59,30 @@ class TaskSay extends Task {
const vendor = this.synthesizer.vendor && this.synthesizer.vendor !== 'default' ?
this.synthesizer.vendor :
cs.speechSynthesisVendor;
const fallbackVendor = this.synthesizer.fallbackVendor && this.synthesizer.fallbackVendor !== 'default' ?
this.synthesizer.fallbackVendor :
cs.fallbackSpeechSynthesisVendor;
const language = this.synthesizer.language && this.synthesizer.language !== 'default' ?
this.synthesizer.language :
cs.speechSynthesisLanguage ;
const fallbackLanguage = this.synthesizer.fallbackLanguage && this.synthesizer.fallbackLanguage !== 'default' ?
this.synthesizer.fallbackLanguage :
cs.fallbackSpeechSynthesisLanguage ;
let voice = this.synthesizer.voice && this.synthesizer.voice !== 'default' ?
this.synthesizer.voice :
cs.speechSynthesisVoice;
const fallbackVoice = this.synthesizer.fallbackVoice && this.synthesizer.fallbackVoice !== 'default' ?
this.synthesizer.fallbackVoice :
cs.fallbackSpeechSynthesisVoice;
const label = this.synthesizer.label && this.synthesizer.label !== 'default' ?
this.synthesizer.label :
cs.speechSynthesisLabel;
const fallbackLabel = this.synthesizer.fallbackLabel && this.synthesizer.fallbackLabel !== 'default' ?
this.synthesizer.fallbackLabel :
cs.fallbackSpeechSynthesisLabel;
const engine = this.synthesizer.engine || 'standard';
const salt = cs.callSid;
let credentials = cs.getSpeechCredentials(vendor, 'tts');
let credentials = cs.getSpeechCredentials(vendor, 'tts', label);
/* parse Nuance voices into name and model */
let model;
@@ -118,6 +133,8 @@ class TaskSay extends Task {
'tts.language': language,
'tts.voice': voice
});
let filePathUrl, isFromCache, roundTripTime;
let executedVendor, executedLanguage;
try {
const {filePath, servedFromCache, rtt} = await synthAudio(stats, {
account_sid: cs.accountSid,
@@ -131,37 +148,101 @@ class TaskSay extends Task {
credentials,
disableTtsCache : this.disableTtsCache
});
this.logger.debug(`file ${filePath}, served from cache ${servedFromCache}`);
if (filePath) cs.trackTmpFile(filePath);
span.setAttributes({'tts.cached': servedFromCache});
span.end();
if (!servedFromCache && !lastUpdated) {
lastUpdated = true;
updateSpeechCredentialLastUsed(credentials.speech_credential_sid)
.catch(() => {/*already logged error */});
}
span.setAttributes({'tts.cached': servedFromCache});
span.end();
if (!servedFromCache && rtt) {
this.notifyStatus({
event: 'synthesized-audio',
vendor,
language,
characters: text.length,
elapsedTime: rtt
filePathUrl = filePath;
isFromCache = servedFromCache;
roundTripTime = rtt;
executedVendor = vendor;
executedLanguage = language;
} catch (error) {
let isFallbackSuccess = false;
if (fallbackVendor) {
const fallbackcredentials = cs.getSpeechCredentials(fallbackVendor, 'tts', fallbackLabel);
const {span: fallbackSpan} = this.startChildSpan('fallback-tts-generation', {
'tts.vendor': fallbackVendor,
'tts.language': fallbackLanguage,
'tts.voice': fallbackVoice
});
try {
const {filePath, servedFromCache, rtt} = await synthAudio(stats, {
account_sid: cs.accountSid,
text,
vendor: fallbackVendor,
language: fallbackLanguage,
voice: fallbackVoice,
engine,
model,
salt,
credentials: fallbackcredentials,
disableTtsCache : this.disableTtsCache
});
isFallbackSuccess = true;
fallbackSpan.setAttributes({'tts.cached': servedFromCache});
fallbackSpan.end();
if (!servedFromCache && !lastUpdated) {
lastUpdated = true;
updateSpeechCredentialLastUsed(credentials.speech_credential_sid)
.catch(() => {/*already logged error */});
}
filePathUrl = filePath;
isFromCache = servedFromCache;
roundTripTime = rtt;
executedVendor = fallbackVendor;
executedLanguage = fallbackLanguage;
} catch (err) {
this.logger.info({err}, 'fallback Speech failed to synthesize audio');
fallbackSpan.end();
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.TTS_FAILURE,
vendor: fallbackVendor,
detail: err.message
}).catch((err) => this.logger.info({err}, 'Error generating alert for fallback tts failure'));
}
}
if (!isFallbackSuccess) {
this.logger.info({error}, 'Error synthesizing tts');
span.end();
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.TTS_FAILURE,
vendor,
detail: error.message
}).catch((err) => this.logger.info({err}, 'Error generating alert for tts failure'));
this.notifyError({msg: 'TTS error', details: error.message || error});
return;
}
return filePath;
} catch (err) {
this.logger.info({err}, 'Error synthesizing tts');
span.end();
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.TTS_FAILURE,
vendor,
detail: err.message
}).catch((err) => this.logger.info({err}, 'Error generating alert for tts failure'));
this.notifyError({msg: 'TTS error', details: err.message || err});
return;
}
this.logger.debug(`file ${filePathUrl}, served from cache ${isFromCache}`);
if (filePathUrl) cs.trackTmpFile(filePathUrl);
if (!isFromCache && roundTripTime) {
this.notifyStatus({
event: 'synthesized-audio',
vendor: executedVendor,
language: executedLanguage,
characters: text.length,
elapsedTime: roundTripTime
});
}
return filePathUrl;
};
const arr = this.text.map((t) => generateAudio(t));

View File

@@ -1,4 +1,5 @@
const Task = require('./task');
const assert = require('assert');
const {
TaskName,
TaskPreconditions,
@@ -41,6 +42,11 @@ class TaskTranscribe extends Task {
const recognizer = this.data.recognizer;
this.vendor = recognizer.vendor;
this.language = recognizer.language;
this.label = recognizer.label;
this.fallbackVendor = recognizer.fallbackVendor || 'default';
this.fallbackLanguage = recognizer.fallbackLanguage || 'default';
this.fallbackLabel = recognizer.fallbackLabel || 'default';
/* let credentials be supplied in the recognizer object at runtime */
this.sttCredentials = setSpeechCredentialsAtRuntime(recognizer);
@@ -56,14 +62,53 @@ class TaskTranscribe extends Task {
this._sonioxTranscripts = [];
this.childSpan = [null, null];
// Continuos asr timeout
this.asrTimeout = typeof this.data.recognizer.asrTimeout === 'number' ? this.data.recognizer.asrTimeout * 1000 : 0;
this.isContinuousAsr = this.asrTimeout > 0;
/* buffer speech for continuous asr */
this._bufferedTranscripts = [];
}
get name() { return TaskName.Transcribe; }
async _initSpeechCredential(cs, vendor, label) {
const {getNuanceAccessToken, getIbmAccessToken} = cs.srf.locals.dbHelpers;
let credentials = cs.getSpeechCredentials(vendor, 'stt', label);
if (!credentials) {
const {writeAlerts, AlertType} = cs.srf.locals;
this.logger.info(`TaskTranscribe:exec - ERROR stt using ${vendor} requested but creds not supplied`);
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_NOT_PROVISIONED,
vendor: vendor
}).catch((err) => this.logger.info({err}, 'Error generating alert for no stt'));
throw new Error('no provisioned speech credentials for TTS');
}
if (vendor === 'nuance' && credentials.client_id) {
/* get nuance access token */
const {client_id, secret} = credentials;
const {access_token, servedFromCache} = await getNuanceAccessToken(client_id, secret, 'asr tts');
this.logger.debug({client_id},
`Transcribe:exec - got nuance access token ${servedFromCache ? 'from cache' : ''}`);
credentials = {...credentials, access_token};
}
else if (vendor == 'ibm' && credentials.stt_api_key) {
/* get ibm access token */
const {stt_api_key, stt_region} = credentials;
const {access_token, servedFromCache} = await getIbmAccessToken(stt_api_key);
this.logger.debug({stt_api_key}, `Gather:exec - got ibm access token ${servedFromCache ? 'from cache' : ''}`);
credentials = {...credentials, access_token, stt_region};
}
return credentials;
}
async exec(cs, {ep, ep2}) {
super.exec(cs);
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, cs.srf);
const {getNuanceAccessToken, getIbmAccessToken} = cs.srf.locals.dbHelpers;
if (cs.hasGlobalSttHints) {
const {hints, hintsBoost} = cs.globalSttHints;
@@ -91,50 +136,60 @@ class TaskTranscribe extends Task {
this.language = cs.speechRecognizerLanguage;
if (this.data.recognizer) this.data.recognizer.language = this.language;
}
if ('default' === this.label || !this.label) {
this.label = cs.speechRecognizerLabel;
if (this.data.recognizer) this.data.recognizer.label = this.label;
}
// fallback options
if ('default' === this.fallbackVendor || !this.fallbackVendor) {
this.fallbackVendor = cs.fallbackSpeechRecognizerVendor;
if (this.data.recognizer) this.data.recognizer.fallbackVendor = this.fallbackVendor;
}
if ('default' === this.fallbackLanguage || !this.fallbackLanguage) {
this.fallbackLanguage = cs.fallbackSpeechRecognizerLanguage;
if (this.data.recognizer) this.data.recognizer.fallbackLanguage = this.fallbackLanguage;
}
if ('default' === this.fallbackLabel || !this.fallbackLabel) {
this.label = cs.fallbackSpeechRecognizerLabel;
if (this.data.recognizer) this.data.recognizer.fallbackLabel = this.fallbackLabel;
}
if (!this.data.recognizer.vendor) {
this.data.recognizer.vendor = this.vendor;
}
if (!this.sttCredentials) this.sttCredentials = cs.getSpeechCredentials(this.vendor, 'stt');
if (!this.sttCredentials) {
this.sttCredentials = await this._initSpeechCredential(cs, this.vendor, this.label);
}
if (!this.fallbackSttCredentials) {
this.fallbackSttCredentials = await this._initSpeechCredential(cs, this.fallbackVendor, this.fallbackLabel);
}
try {
if (!this.sttCredentials) {
const {writeAlerts, AlertType} = cs.srf.locals;
this.logger.info(`TaskTranscribe:exec - ERROR stt using ${this.vendor} requested but creds not supplied`);
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_NOT_PROVISIONED,
vendor: this.vendor
}).catch((err) => this.logger.info({err}, 'Error generating alert for no stt'));
throw new Error('no provisioned speech credentials for TTS');
}
if (this.vendor === 'nuance' && this.sttCredentials.client_id) {
/* get nuance access token */
const {client_id, secret} = this.sttCredentials;
const {access_token, servedFromCache} = await getNuanceAccessToken(client_id, secret, 'asr tts');
this.logger.debug({client_id},
`Transcribe:exec - got nuance access token ${servedFromCache ? 'from cache' : ''}`);
this.sttCredentials = {...this.sttCredentials, access_token};
}
else if (this.vendor == 'ibm' && this.sttCredentials.stt_api_key) {
/* get ibm access token */
const {stt_api_key, stt_region} = this.sttCredentials;
const {access_token, servedFromCache} = await getIbmAccessToken(stt_api_key);
this.logger.debug({stt_api_key}, `Gather:exec - got ibm access token ${servedFromCache ? 'from cache' : ''}`);
this.sttCredentials = {...this.sttCredentials, access_token, stt_region};
}
await this._startTranscribing(cs, ep, 1);
await this._startTranscribing(cs, ep, 1, this.sttCredentials);
if (this.separateRecognitionPerChannel && ep2) {
await this._startTranscribing(cs, ep2, 2);
await this._startTranscribing(cs, ep2, 2, this.sttCredentials);
}
updateSpeechCredentialLastUsed(this.sttCredentials.speech_credential_sid)
.catch(() => {/*already logged error */});
updateSpeechCredentialLastUsed(this.sttCredentials.speech_credential_sid);
await this.awaitTaskDone();
} catch (err) {
this.logger.info(err, 'TaskTranscribe:exec - error');
this.parentTask && this.parentTask.emit('error', err);
let isFallbackSuccess = false;
if (this.fallbackSttCredentials) {
this.logger.info(err, 'TaskTranscribe:exec - fallback to 2nd speech provider');
try {
await this._startTranscribing(cs, ep, 1, this.fallbackSttCredentials);
if (this.separateRecognitionPerChannel && ep2) {
await this._startTranscribing(cs, ep2, 2, this.fallbackSttCredentials);
}
updateSpeechCredentialLastUsed(this.fallbackSttCredentials.speech_credential_sid);
await this.awaitTaskDone();
isFallbackSuccess = true;
} catch (error) {
this.logger.info(err, 'TaskTranscribe:exec - fallback error');
}
}
if (!isFallbackSuccess) {
this.parentTask && this.parentTask.emit('error', err);
}
}
this.removeSpeechListeners(ep);
}
@@ -159,8 +214,8 @@ class TaskTranscribe extends Task {
await this.awaitTaskDone();
}
async _startTranscribing(cs, ep, channel) {
const opts = this.setChannelVarsForStt(this, this.sttCredentials, this.data.recognizer);
async _startTranscribing(cs, ep, channel, credentials) {
const opts = this.setChannelVarsForStt(this, credentials, this.data.recognizer);
switch (this.vendor) {
case 'google':
this.bugname = 'google_transcribe';
@@ -234,7 +289,19 @@ class TaskTranscribe extends Task {
this._onVadDetected.bind(this, cs, ep));
break;
default:
throw new Error(`Invalid vendor ${this.vendor}`);
if (this.vendor.startsWith('custom:')) {
this.bugname = `${this.vendor}_transcribe`;
ep.addCustomEventListener(JambonzTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
ep.addCustomEventListener(JambonzTranscriptionEvents.Connect, this._onJambonzConnect.bind(this, cs, ep));
ep.addCustomEventListener(JambonzTranscriptionEvents.ConnectFailure,
this._onJambonzConnectFailure.bind(this, cs, ep));
break;
}
else {
this.notifyError({ msg: 'ASR error', details:`Invalid vendor ${this.vendor}`});
this.notifyTaskDone();
throw new Error(`Invalid vendor ${this.vendor}`);
}
}
/* common handler for all stt engine errors */
@@ -296,6 +363,26 @@ class TaskTranscribe extends Task {
}
}
if (this.isContinuousAsr && evt.is_final) {
this._bufferedTranscripts.push(evt);
this._startAsrTimer(channel);
} else {
await this._resolve(channel, evt);
}
}
_compileTranscripts() {
assert(this._bufferedTranscripts.length);
const evt = this._bufferedTranscripts[0];
let t = '';
for (const a of this._bufferedTranscripts) {
t += ` ${a.alternatives[0].transcript}`;
}
evt.alternatives[0].transcript = t.trim();
return evt;
}
async _resolve(channel, evt) {
/* we've got a transcript, so end the otel child span for this channel */
if (this.childSpan[channel - 1] && this.childSpan[channel - 1].span) {
this.childSpan[channel - 1].span.setAttributes({
@@ -408,6 +495,24 @@ class TaskTranscribe extends Task {
this.notifyTaskDone();
}
_onJambonzConnect(_cs, _ep) {
this.logger.debug('TaskTranscribe:_onJambonzConnect');
}
_onJambonzConnectFailure(cs, _ep, evt) {
const {reason} = evt;
const {writeAlerts, AlertType} = cs.srf.locals;
this.logger.info({evt}, 'TaskTranscribe:_onJambonzConnectFailure');
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
message: `Failed connecting to ${this.vendor} speech recognizer: ${reason}`,
vendor: this.vendor,
}).catch((err) => this.logger.info({err}, 'Error generating alert for jambonz custom connection failure'));
this.notifyError({msg: 'ASR error', details:`Failed connecting to speech vendor ${this.vendor}: ${reason}`});
this.notifyTaskDone();
}
_onIbmConnect(_cs, _ep) {
this.logger.debug('TaskTranscribe:_onIbmConnect');
}
@@ -455,7 +560,22 @@ class TaskTranscribe extends Task {
this.notifyError({msg: 'ASR error', details:`Custom speech vendor ${this.vendor} error: ${evt.error}`});
}
_startAsrTimer(channel) {
assert(this.isContinuousAsr);
this._clearAsrTimer(channel);
this._asrTimer = setTimeout(() => {
this.logger.debug(`TaskTranscribe:_startAsrTimer - asr timer went off for channel: ${channel}`);
const evt = this._compileTranscripts();
this._bufferedTranscripts = [];
this._resolve(channel, evt);
}, this.asrTimeout);
this.logger.debug(`TaskTranscribe:_startAsrTimer: set for ${this.asrTimeout}ms for channel ${channel}`);
}
_clearAsrTimer(channel) {
if (this._asrTimer) clearTimeout(this._asrTimer);
this._asrTimer = null;
}
}
module.exports = TaskTranscribe;

View File

@@ -54,7 +54,8 @@ class Amd extends Emitter {
this.language = opts.recognizer?.language || cs.speechRecognizerLanguage;
if ('default' === this.language) this.language = cs.speechRecognizerLanguage;
this.sttCredentials = cs.getSpeechCredentials(this.vendor, 'stt');
this.sttCredentials = cs.getSpeechCredentials(this.vendor, 'stt',
opts.recognizer?.label || cs.speechRecognizerLabel);
if (!this.sttCredentials) throw new Error(`No speech credentials found for vendor ${this.vendor}`);

View File

@@ -11,15 +11,20 @@ const {LifeCycleEvents} = require('./constants');
const express = require('express');
const app = express();
const getString = bent('string');
const AWS = require('aws-sdk');
const sns = new AWS.SNS({apiVersion: '2010-03-31'});
const autoscaling = new AWS.AutoScaling({apiVersion: '2011-01-01'});
const {
SNSClient,
SubscribeCommand,
UnsubscribeCommand } = require('@aws-sdk/client-sns');
const snsClient = new SNSClient({ region: AWS_REGION, apiVersion: '2010-03-31' });
const {
AutoScalingClient,
DescribeAutoScalingGroupsCommand,
CompleteLifecycleActionCommand } = require('@aws-sdk/client-auto-scaling');
const autoScalingClient = new AutoScalingClient({ region: AWS_REGION, apiVersion: '2011-01-01' });
const {Parser} = require('xml2js');
const parser = new Parser();
const {validatePayload} = require('verify-aws-sns-signature');
AWS.config.update({region: AWS_REGION});
class SnsNotifier extends Emitter {
constructor(logger) {
super();
@@ -69,7 +74,7 @@ class SnsNotifier extends Emitter {
subscriptionRequestId: this.subscriptionRequestId
}, 'response from SNS SubscribeURL');
const data = await this.describeInstance();
this.lifecycleState = data.AutoScalingInstances[0].LifecycleState;
this.lifecycleState = data.AutoScalingGroups[0].Instances[0].LifecycleState;
this.emit('SubscriptionConfirmation', {publicIp: this.publicIp});
break;
@@ -135,11 +140,12 @@ class SnsNotifier extends Emitter {
async subscribe() {
try {
const response = await sns.subscribe({
const params = {
Protocol: 'http',
TopicArn: AWS_SNS_TOPIC_ARM,
Endpoint: this.snsEndpoint
}).promise();
};
const response = await snsClient.send(new SubscribeCommand(params));
this.logger.info({response}, `response to SNS subscribe to ${AWS_SNS_TOPIC_ARM}`);
} catch (err) {
this.logger.error({err}, `Error subscribing to SNS topic arn ${AWS_SNS_TOPIC_ARM}`);
@@ -149,9 +155,10 @@ class SnsNotifier extends Emitter {
async unsubscribe() {
if (!this.subscriptionArn) throw new Error('SnsNotifier#unsubscribe called without an active subscription');
try {
const response = await sns.unsubscribe({
const params = {
SubscriptionArn: this.subscriptionArn
}).promise();
};
const response = await snsClient.send(new UnsubscribeCommand(params));
this.logger.info({response}, `response to SNS unsubscribe to ${AWS_SNS_TOPIC_ARM}`);
} catch (err) {
this.logger.error({err}, `Error unsubscribing to SNS topic arn ${AWS_SNS_TOPIC_ARM}`);
@@ -160,26 +167,29 @@ class SnsNotifier extends Emitter {
completeScaleIn() {
assert(this.scaleInParams);
autoscaling.completeLifecycleAction(this.scaleInParams, (err, response) => {
if (err) return this.logger.error({err}, 'Error completing scale-in');
this.logger.info({response}, 'Successfully completed scale-in action');
});
autoScalingClient.send(new CompleteLifecycleActionCommand(this.scaleInParams))
.then((data) => {
return this.logger.info({data}, 'Successfully completed scale-in action');
})
.catch((err) => {
this.logger.error({err}, 'Error completing scale-in');
});
}
describeInstance() {
return new Promise((resolve, reject) => {
if (!this.instanceId) return reject('instance-id unknown');
autoscaling.describeAutoScalingInstances({
autoScalingClient.send(new DescribeAutoScalingGroupsCommand({
InstanceIds: [this.instanceId]
}, (err, data) => {
if (err) {
}))
.then((data) => {
this.logger.info({data}, 'SnsNotifier: describeInstance');
return resolve(data);
})
.catch((err) => {
this.logger.error({err}, 'Error describing instances');
reject(err);
} else {
this.logger.info({data}, 'SnsNotifier: describeInstance');
resolve(data);
}
});
});
});
}
@@ -193,7 +203,7 @@ module.exports = async function(logger) {
process.on('SIGHUP', async() => {
try {
const data = await notifier.describeInstance();
const state = data.AutoScalingInstances[0].LifecycleState;
const state = data.AutoScalingGroups[0].Instances[0].LifecycleState;
if (state !== notifier.lifecycleState) {
notifier.lifecycleState = state;
switch (state) {

View File

@@ -2,17 +2,24 @@ const {context, trace} = require('@opentelemetry/api');
const {Dialog} = require('drachtio-srf');
class RootSpan {
constructor(callType, req) {
let tracer, callSid, linkedSpanId;
const {srf} = require('../../');
const tracer = srf.locals.otel.tracer;
let callSid, accountSid, applicationSid, linkedSpanId;
if (req instanceof Dialog) {
const dlg = req;
tracer = dlg.srf.locals.otel.tracer;
callSid = dlg.callSid;
linkedSpanId = dlg.linkedSpanId;
}
else {
tracer = req.srf.locals.otel.tracer;
else if (req.srf) {
callSid = req.locals.callSid;
accountSid = req.get('X-Account-Sid'),
applicationSid = req.locals.application_sid;
}
else {
callSid = req.callSid;
accountSid = req.accountSid;
applicationSid = req.applicationSid;
}
this._span = tracer.startSpan(callType || 'incoming-call');
if (req instanceof Dialog) {
@@ -22,13 +29,20 @@ class RootSpan {
callId: dlg.sip.callId
});
}
else if (req.srf) {
this._span.setAttributes({
callSid,
accountSid,
applicationSid,
callId: req.get('Call-ID'),
externalCallId: req.get('X-CID')
});
}
else {
this._span.setAttributes({
callSid,
accountSid: req.get('X-Account-Sid'),
applicationSid: req.locals.application_sid,
callId: req.get('Call-ID'),
externalCallId: req.get('X-CID')
accountSid,
applicationSid
});
}

View File

@@ -101,7 +101,8 @@ module.exports = (logger) => {
method: 'OPTIONS',
headers: {
'X-FS-Status': ms && !dryUpCalls ? 'open' : 'closed',
'X-FS-Calls': srf.locals.sessionTracker.count
'X-FS-Calls': srf.locals.sessionTracker.count,
'X-FS-ServiceUrl': srf.locals.serviceUrl
}
});
req.on('response', (res) => {

View File

@@ -43,6 +43,7 @@ class WsRequestor extends BaseRequestor {
async request(type, hook, params, httpHeaders = {}) {
assert(HookMsgTypes.includes(type));
const url = hook.url || hook;
const wantsAck = !['call:status', 'verb:status', 'jambonz:error'].includes(type);
if (this.maliciousClient) {
this.logger.info({url: this.url}, 'WsRequestor:request - discarding msg to malicious client');
@@ -73,11 +74,19 @@ class WsRequestor extends BaseRequestor {
if (this.connectInProgress) {
this.logger.debug(
`WsRequestor:request(${this.id}) - queueing ${type} message since we are connecting`);
this.queuedMsg.push({type, hook, params, httpHeaders});
if (wantsAck) {
const p = new Promise((resolve, reject) => {
this.queuedMsg.push({type, hook, params, httpHeaders, promise: {resolve, reject}});
});
return p;
}
else {
this.queuedMsg.push({type, hook, params, httpHeaders});
}
return;
}
this.connectInProgress = true;
this.logger.debug(`WsRequestor:request(${this.id}) - connecting since we do not have a connection`);
this.logger.debug(`WsRequestor:request(${this.id}) - connecting since we do not have a connection for ${type}`);
if (this.connections >= MAX_RECONNECTS) {
return Promise.reject(`max attempts connecting to ${this.url}`);
}
@@ -116,9 +125,14 @@ class WsRequestor extends BaseRequestor {
const sendQueuedMsgs = () => {
if (this.queuedMsg.length > 0) {
for (const {type, hook, params, httpHeaders} of this.queuedMsg) {
for (const {type, hook, params, httpHeaders, promise} of this.queuedMsg) {
this.logger.debug(`WsRequestor:request - preparing queued ${type} for sending`);
setImmediate(this.request.bind(this, type, hook, params, httpHeaders));
if (promise) {
this.request(type, hook, params, httpHeaders)
.then((res) => promise.resolve(res))
.catch((err) => promise.reject(err));
}
else setImmediate(this.request.bind(this, type, hook, params, httpHeaders));
}
this.queuedMsg.length = 0;
}
@@ -137,8 +151,8 @@ class WsRequestor extends BaseRequestor {
}
/* simple notifications */
if (['call:status', 'verb:status', 'jambonz:error'].includes(type) || reconnectingWithoutAck) {
this.ws.send(JSON.stringify(obj), () => {
if (!wantsAck || reconnectingWithoutAck) {
this.ws?.send(JSON.stringify(obj), () => {
this.logger.debug({obj}, `WsRequestor:request websocket: sent (${url})`);
sendQueuedMsgs();
});

5851
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "jambonz-feature-server",
"version": "0.8.3",
"version": "0.8.4",
"main": "app.js",
"engines": {
"node": ">= 10.16.0"
@@ -19,19 +19,19 @@
"bugs": {},
"scripts": {
"start": "node app",
"test": "NODE_ENV=test JAMBONES_HOSTING=1 HTTP_POOL=1 ENCRYPTION_SECRET=foobar DRACHTIO_HOST=127.0.0.1 DRACHTIO_PORT=9060 DRACHTIO_SECRET=cymru JAMBONES_MYSQL_HOST=127.0.0.1 JAMBONES_MYSQL_PORT=3360 JAMBONES_MYSQL_USER=jambones_test JAMBONES_MYSQL_PASSWORD=jambones_test JAMBONES_MYSQL_DATABASE=jambones_test JAMBONES_REDIS_HOST=127.0.0.1 JAMBONES_REDIS_PORT=16379 JAMBONES_LOGLEVEL=error ENABLE_METRICS=0 HTTP_PORT=3000 JAMBONES_SBCS=172.38.0.10 JAMBONES_FREESWITCH=127.0.0.1:8022:JambonzR0ck$:docker-host JAMBONES_TIME_SERIES_HOST=127.0.0.1 JAMBONES_NETWORK_CIDR=172.38.0.0/16 node test/ ",
"test": "NODE_ENV=test JAMBONES_HOSTING=1 HTTP_POOL=1 JAMBONES_TTS_TRIM_SILENCE=1 ENCRYPTION_SECRET=foobar DRACHTIO_HOST=127.0.0.1 DRACHTIO_PORT=9060 DRACHTIO_SECRET=cymru JAMBONES_MYSQL_HOST=127.0.0.1 JAMBONES_MYSQL_PORT=3360 JAMBONES_MYSQL_USER=jambones_test JAMBONES_MYSQL_PASSWORD=jambones_test JAMBONES_MYSQL_DATABASE=jambones_test JAMBONES_REDIS_HOST=127.0.0.1 JAMBONES_REDIS_PORT=16379 JAMBONES_LOGLEVEL=error ENABLE_METRICS=0 HTTP_PORT=3000 JAMBONES_SBCS=172.38.0.10 JAMBONES_FREESWITCH=127.0.0.1:8022:JambonzR0ck$:docker-host JAMBONES_TIME_SERIES_HOST=127.0.0.1 JAMBONES_NETWORK_CIDR=172.38.0.0/16 node test/ ",
"coverage": "./node_modules/.bin/nyc --reporter html --report-dir ./coverage npm run test",
"jslint": "eslint app.js tracer.js lib",
"jslint:fix": "eslint app.js tracer.js lib --fix"
},
"dependencies": {
"@jambonz/db-helpers": "^0.9.0",
"@jambonz/db-helpers": "^0.9.1",
"@jambonz/http-health-check": "^0.0.1",
"@jambonz/realtimedb-helpers": "^0.8.6",
"@jambonz/speech-utils": "^0.0.15",
"@jambonz/stats-collector": "^0.1.8",
"@jambonz/time-series": "^0.2.7",
"@jambonz/verb-specifications": "^0.0.24",
"@jambonz/speech-utils": "^0.0.19",
"@jambonz/stats-collector": "^0.1.9",
"@jambonz/time-series": "^0.2.8",
"@jambonz/verb-specifications": "^0.0.29",
"@opentelemetry/api": "^1.4.0",
"@opentelemetry/exporter-jaeger": "^1.9.0",
"@opentelemetry/exporter-trace-otlp-http": "^0.35.0",
@@ -41,7 +41,8 @@
"@opentelemetry/sdk-trace-base": "^1.9.0",
"@opentelemetry/sdk-trace-node": "^1.9.0",
"@opentelemetry/semantic-conventions": "^1.9.0",
"aws-sdk": "^2.1313.0",
"@aws-sdk/client-sns": "^3.360.0",
"@aws-sdk/client-auto-scaling": "^3.360.0",
"bent": "^7.3.12",
"debug": "^4.3.4",
"deepcopy": "^2.1.0",

View File

@@ -13,6 +13,8 @@ DROP TABLE IF EXISTS beta_invite_codes;
DROP TABLE IF EXISTS call_routes;
DROP TABLE IF EXISTS clients;
DROP TABLE IF EXISTS dns_records;
DROP TABLE IF EXISTS lcr;
@@ -127,6 +129,16 @@ application_sid CHAR(36) NOT NULL,
PRIMARY KEY (call_route_sid)
) COMMENT='a regex-based pattern match for call routing';
CREATE TABLE clients
(
client_sid CHAR(36) NOT NULL UNIQUE ,
account_sid CHAR(36) NOT NULL,
is_active BOOLEAN NOT NULL DEFAULT 1,
username VARCHAR(64),
password VARCHAR(1024),
PRIMARY KEY (client_sid)
);
CREATE TABLE dns_records
(
dns_record_sid CHAR(36) NOT NULL UNIQUE ,
@@ -322,6 +334,7 @@ last_tested DATETIME,
tts_tested_ok BOOLEAN,
stt_tested_ok BOOLEAN,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
label VARCHAR(64),
PRIMARY KEY (speech_credential_sid)
);
@@ -411,7 +424,7 @@ PRIMARY KEY (smpp_gateway_sid)
CREATE TABLE phone_numbers
(
phone_number_sid CHAR(36) UNIQUE ,
number VARCHAR(132) NOT NULL UNIQUE ,
number VARCHAR(132) NOT NULL,
voip_carrier_sid CHAR(36),
account_sid CHAR(36),
application_sid CHAR(36),
@@ -469,6 +482,7 @@ speech_synthesis_voice VARCHAR(64),
speech_recognizer_vendor VARCHAR(64) NOT NULL DEFAULT 'google',
speech_recognizer_language VARCHAR(64) NOT NULL DEFAULT 'en-US',
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
record_all_calls BOOLEAN NOT NULL DEFAULT false,
PRIMARY KEY (application_sid)
) COMMENT='A defined set of behaviors to be applied to phone calls ';
@@ -506,6 +520,9 @@ subspace_client_secret VARCHAR(255),
subspace_sip_teleport_id VARCHAR(255),
subspace_sip_teleport_destinations VARCHAR(255),
siprec_hook_sid CHAR(36),
record_all_calls BOOLEAN NOT NULL DEFAULT false,
record_format VARCHAR(16) NOT NULL DEFAULT 'mp3',
bucket_credential VARCHAR(8192) COMMENT 'credential used to authenticate with storage service',
PRIMARY KEY (account_sid)
) COMMENT='An enterprise that uses the platform for comm services';
@@ -526,6 +543,9 @@ ALTER TABLE call_routes ADD FOREIGN KEY account_sid_idxfk_3 (account_sid) REFERE
ALTER TABLE call_routes ADD FOREIGN KEY application_sid_idxfk (application_sid) REFERENCES applications (application_sid);
CREATE INDEX client_sid_idx ON clients (client_sid);
ALTER TABLE clients ADD CONSTRAINT account_sid_idxfk_13 FOREIGN KEY account_sid_idxfk_13 (account_sid) REFERENCES accounts (account_sid);
CREATE INDEX dns_record_sid_idx ON dns_records (dns_record_sid);
ALTER TABLE dns_records ADD FOREIGN KEY account_sid_idxfk_4 (account_sid) REFERENCES accounts (account_sid);
@@ -590,8 +610,6 @@ CREATE INDEX smpp_address_sid_idx ON smpp_addresses (smpp_address_sid);
CREATE INDEX service_provider_sid_idx ON smpp_addresses (service_provider_sid);
ALTER TABLE smpp_addresses ADD FOREIGN KEY service_provider_sid_idxfk_4 (service_provider_sid) REFERENCES service_providers (service_provider_sid);
CREATE UNIQUE INDEX speech_credentials_idx_1 ON speech_credentials (vendor,account_sid);
CREATE INDEX speech_credential_sid_idx ON speech_credentials (speech_credential_sid);
CREATE INDEX service_provider_sid_idx ON speech_credentials (service_provider_sid);
ALTER TABLE speech_credentials ADD FOREIGN KEY service_provider_sid_idxfk_5 (service_provider_sid) REFERENCES service_providers (service_provider_sid);
@@ -628,6 +646,8 @@ CREATE INDEX smpp_gateway_sid_idx ON smpp_gateways (smpp_gateway_sid);
CREATE INDEX voip_carrier_sid_idx ON smpp_gateways (voip_carrier_sid);
ALTER TABLE smpp_gateways ADD FOREIGN KEY voip_carrier_sid_idxfk (voip_carrier_sid) REFERENCES voip_carriers (voip_carrier_sid);
CREATE UNIQUE INDEX phone_numbers_unique_idx_voip_carrier_number ON phone_numbers (number,voip_carrier_sid);
CREATE INDEX phone_number_sid_idx ON phone_numbers (phone_number_sid);
CREATE INDEX number_idx ON phone_numbers (number);
CREATE INDEX voip_carrier_sid_idx ON phone_numbers (voip_carrier_sid);
@@ -682,5 +702,4 @@ ALTER TABLE accounts ADD FOREIGN KEY queue_event_hook_sid_idxfk (queue_event_hoo
ALTER TABLE accounts ADD FOREIGN KEY device_calling_application_sid_idxfk (device_calling_application_sid) REFERENCES applications (application_sid);
ALTER TABLE accounts ADD FOREIGN KEY siprec_hook_sid_idxfk (siprec_hook_sid) REFERENCES applications (application_sid);
SET FOREIGN_KEY_CHECKS=1;
SET FOREIGN_KEY_CHECKS=1;

View File

@@ -5,6 +5,8 @@ const getJSON = bent('json')
const clearModule = require('clear-module');
const {provisionCallHook} = require('./utils')
const sleepFor = (ms) => new Promise((r) => setTimeout(r, ms));
process.on('unhandledRejection', (reason, p) => {
console.log('Unhandled Rejection at: Promise', p, 'reason:', reason);
});
@@ -47,6 +49,7 @@ test('\'dial-phone\'', async(t) => {
// THEN
const p = sippUac('uas-dial.xml', '172.38.0.10', undefined, undefined, 2);
await sleepFor(1000);
let account_sid = '622f62e4-303a-49f2-bbe0-eb1e1714e37a';
let post = bent('http://127.0.0.1:3000/', 'POST', 'json', 201);
@@ -84,7 +87,7 @@ test('\'dial-sip\'', async(t) => {
try {
await connect(srf);
// wait for fs connected to drachtio server.
await new Promise(r => setTimeout(r, 1000));
await sleepFor(1000);
// GIVEN
const from = "dial_sip";
let verbs = [

View File

@@ -42,7 +42,7 @@ services:
ipv4_address: 172.38.0.7
drachtio:
image: drachtio/drachtio-server:latest
image: drachtio/drachtio-server:0.8.22
restart: always
command: drachtio --contact "sip:*;transport=udp" --mtu 4096 --address 0.0.0.0 --port 9022
ports:
@@ -57,7 +57,7 @@ services:
condition: service_healthy
freeswitch:
image: drachtio/drachtio-freeswitch-mrf:0.4.18
image: drachtio/drachtio-freeswitch-mrf:0.4.33
restart: always
command: freeswitch --rtp-range-start 20000 --rtp-range-end 20100
environment:

View File

@@ -210,6 +210,44 @@ test('\'transcribe\' test - soniox', async(t) => {
t.ok(obj.body.speech.alternatives[0].transcript.toLowerCase().startsWith('i\'d like to speak to customer support'),
'transcribe: succeeds when using soniox credentials');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});
test('\'transcribe\' test - google with asrTimeout', async(t) => {
if (!GCP_JSON_KEY) {
t.pass('skipping google tests');
return t.end();
}
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
let verbs = [
{
"verb": "transcribe",
"recognizer": {
"vendor": "google",
"hints": ["customer support", "sales", "human resources", "HR"],
"asrTimeout": 4
},
"transcriptionHook": "/transcriptionHook"
}
];
let from = "gather_success";
await provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
t.ok(obj.body.speech.alternatives[0].transcript.toLowerCase().startsWith('i\'d like to speak to customer support'),
'transcribe: succeeds when using google credentials');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);