Compare commits

...

146 Commits

Author SHA1 Message Date
Dave Horton
0cde92798d improve deprecated syntax 2023-04-10 09:20:49 -04:00
Dave Horton
45045fd39b improve deprecated syntax 2023-04-10 09:12:41 -04:00
Dave Horton
92da5518eb try 5 2023-04-10 09:08:54 -04:00
Dave Horton
1126ea827f try 4 2023-04-10 09:02:39 -04:00
Dave Horton
fcc4c0a5ce try 3 2023-04-10 09:00:19 -04:00
Dave Horton
ba3ff42987 try 2 2023-04-10 08:56:44 -04:00
Dave Horton
765311c491 interpolate tags 2023-04-10 08:53:57 -04:00
Dave Horton
dcd8b378b2 use docker/build-push-action@v4 2023-04-10 08:49:44 -04:00
Quan HL
552a4e9fd1 feat: upload docker image to docker hub 2023-04-10 15:12:50 +07:00
Snyk bot
04003a709e fix: package.json & package-lock.json to reduce vulnerabilities (#305)
The following vulnerabilities are fixed with an upgrade:
- https://snyk.io/vuln/SNYK-JS-XML2JS-5414874
2023-04-09 12:41:47 -04:00
Dave Horton
565ee609ef based on more testing default google to command_and_search for gather and latest_long for transcribe 2023-04-07 07:34:03 -04:00
Vinod Dharashive
9587465e85 Support for Cisco NBR for Agentassist (#303)
* NBR Support

* NBR Support

* NBR Support

* re-invite Ok sdp should be sendonly

* NBR Support

* sendrecv sdp correction

* Update siprec-utils.js

* Updated comments

* Siprec participants details added to hook

* Bugfix siprec

* Update call-info.js
2023-04-07 07:33:24 -04:00
Dave Horton
845d80a23d change population of test data 2023-04-05 13:10:01 -04:00
Hoan Luu Huu
3109db7861 feat: update stat collector version (#302) 2023-04-05 12:02:41 -04:00
Hoan Luu Huu
11c5047465 fix: Re-invite sip rec does not update media (#300)
* fix: Re-invite sip rec does not update media

* fix: Re-invite sip rec does not update media
2023-04-05 09:46:32 -04:00
Dave Horton
e19ea629f0 response to siprec invite should have a:recvonly if offer had a:sendonly (#298) 2023-04-04 21:02:21 -04:00
Antony Jukes
fe529c6bfb removed incorrect "this" from this.target.auth as it actually a local const. (#296) 2023-04-03 11:13:12 -04:00
Dave Horton
e980b82ec4 update to speech utils with improved microsoft tts 2023-04-01 13:20:59 -04:00
Hoan Luu Huu
318ca19791 fix: update speech utils version (#295)
* fix: update speech utils version

* update package-lock.json

---------

Co-authored-by: Dave Horton <daveh@beachdognet.com>
2023-04-01 11:35:13 -04:00
Dave Horton
e2bd211346 update to latest speech-utils 2023-03-31 16:50:46 -04:00
Dave Horton
410c07fae6 further fix for google model 2023-03-31 12:37:04 -04:00
Dave Horton
2ebfbfb3d8 google STT: when altLanguges are used default to a model that supports it 2023-03-31 12:31:14 -04:00
Dave Horton
a29795839d Bugfix/bot mode restart (#292)
* restart background gather if we get a new config with bargein=enable and changes to input types

* stop background gather properly before restarting

* fix: sticky background gather tasks must not be restarted if we have a new background gather

* fix undefined reference

* safety
2023-03-31 09:35:23 -04:00
Hoan Luu Huu
28088a4cdd feat: queue play audio (#290)
* feat: queue play audio

* fix: typo

* fix: typo

* fix: typo

* fix: typo

* fix: typo

* fix: typo

* fix: typo

* fix: typo

* fix: typo

* fix: typo
2023-03-30 15:31:54 -04:00
Dave Horton
afb381eec9 bugfix: setting altLanguages on Azure once left it turned on 2023-03-29 08:49:34 -04:00
Dave Horton
ed00ccb681 bump version 2023-03-28 14:14:25 -04:00
Dave Horton
6e945dde9a google stt fixes, including defaulting to phone_call model based on c… (#288)
* google stt fixes, including defaulting to phone_call model based on comparative model testing

* lint error
2023-03-28 10:02:03 -04:00
Dave Horton
efdea3e514 gather defaults to multiple utterances 2023-03-27 15:53:01 -04:00
Dave Horton
5131d524ce bugfix: allow for empty transcripts that nuance returns 2023-03-27 14:13:50 -04:00
Anton Voylenko
c0114015ea check encryption env on start (#286) 2023-03-26 15:45:20 -04:00
Anton Voylenko
a293ec09d0 add ENCRYPTION_SECRET variable (#283)
* add ENCRYPTION_SECRET variable

* add env for tests
2023-03-26 14:52:58 -04:00
Dave Horton
f71ae83ce4 bugfix: nuance on-prem stt 2023-03-26 14:26:36 -04:00
Hoan Luu Huu
0dd161913c fix: gather task should clear dtmf event before resolve (#284)
Co-authored-by: Quan HL <quanluuhoang8@gmail.com>
2023-03-26 12:32:51 -04:00
Dave Horton
63ab554908 google STT: default to command_and_search for Gather, as latest_short seems to have issues, various other fixes (#285) 2023-03-26 12:20:03 -04:00
Dave Horton
e1bd075ebc support for nuance on-prem stt/tts 2023-03-25 12:08:54 -04:00
Dave Horton
9de89258a1 update speech-utils@0.0.8 2023-03-24 14:50:08 -04:00
Dave Horton
145ed488db make the feature committed in dd4d9aa enabled only if JAMBONES_GATHER_CLEAR_GLOBAL_HINTS_ON_EMPTY_HINTS is set, as it is a behavior change 2023-03-23 07:54:39 -04:00
Dave Horton
c06a43adfa Gather: bugfix for alternate languages with Azure 2023-03-22 14:32:25 -04:00
Dave Horton
bebc82d194 bugfix: gather with google STT does not need to restart transcribing after end of utterance 2023-03-21 15:46:00 -04:00
Dave Horton
cdc82e99ff add minor logging 2023-03-21 12:35:02 -04:00
Dave Horton
dd4d9aa261 Gather: if an empty array of hints are supplied, this signals we should mask global hints for this collection 2023-03-21 12:16:12 -04:00
Dave Horton
1dcf9ee5a2 update to speech-utils@0.0.6 2023-03-21 08:27:25 -04:00
Dave Horton
4b28db0946 update to speech-utils@.0.0.5 2023-03-21 08:00:52 -04:00
Dave Horton
e7ff76b938 update to speech-utils with AWS tts bugfix 2023-03-20 15:35:20 -04:00
Dave Horton
f245275983 gather: remove duplicate and null hints, restart timeout on interim transcripts 2023-03-20 15:34:55 -04:00
Dave Horton
690deed89d prune unused logging 2023-03-19 12:04:02 -04:00
Dave Horton
26053ec709 update speech-utils with support for more audio formats for custom tts 2023-03-15 09:14:41 -04:00
Dave Horton
34e8203338 update to realtime-dbhelpers that factored out speech-utils 2023-03-14 10:07:29 -04:00
Hoan Luu Huu
7be3c64116 feat: update speech-ultil version 1.0.1 (#275)
* feat: update speech-ultil version 1.0.1

* feat: update speech-ultil version 1.0.1

* more fixes for custom stt

* more fixes

* fixes

* update drachtio-fsmrf

* pass url to mod_jambonz_transcribe

* transcription utils: handle custom results

* handle custom speech vendor errors

* add support for hints to custom speech

* change to custom speech options

* send hints as an array for custom speech

* update latest speech-utils

* transcribe: changes to support soniox

* bugfix: soniox transcribe

---------

Co-authored-by: Quan HL <quanluuhoang8@gmail.com>
Co-authored-by: Dave Horton <daveh@beachdognet.com>
2023-03-12 19:38:36 -04:00
Hoan Luu Huu
f71d3aed8b feat: forward PAI from inbound call to dial outbound call (#280)
* feat: forward PAI from inbound call to dial outbound call

* fix: review comment

---------

Co-authored-by: Quan HL <quanluuhoang8@gmail.com>
2023-03-09 08:58:19 -05:00
Hoan Luu Huu
5ab24337b2 fix: use TTS_FAILURE alert type for synthAudio (#278)
Co-authored-by: Quan HL <quanluuhoang8@gmail.com>
2023-03-08 07:42:06 -05:00
Dave Horton
2af76d94a6 bugfix: repeated ws failures should stop eventually 2023-03-07 16:29:00 -05:00
Dave Horton
4919c05181 add verb:status for play events (#274) 2023-03-03 15:56:50 -05:00
Dave Horton
3084a9d6ba #241 - gather bargein on Nuance has to be based on start of speech event (#246) 2023-03-03 13:39:23 -05:00
Dave Horton
1c683f1142 initial changes for soniox (#270)
* initial changes for soniox

* changes to gather for soniox

* parse soniox stt results

* handle <end> token for soniox

* soniox: handle empty array of words

* support for soniox hints

* add soniox storage options

* update to verb specs

* add support for transcribe

* compile soniox transcripts

* gather: kill no input timer for soniox when we get interim results

* fix buffering of soniox transcripts

* fix for compiling soniox transcript

* another fix for compiling soniox transcript

* another fix

* handling of <end> token

* fix soniox bug

* gather: fixes for soniox continous asr

* fix undefined variable reference

* fix prev commit

* bugfix: allow verb_status requests

* gather: for soniox no need to restart transcription after final transcription received

* update verb specs

* update verb specs, fixes for continuous asr:
2023-03-03 13:37:55 -05:00
Dave Horton
ab1947e23e bugfix: gather minBargeinWordCount defaults to 1 2023-02-24 10:27:05 -05:00
Dave Horton
5527abff09 bump version 2023-02-24 10:04:25 -05:00
Dave Horton
68827112fc further fix for early hints match in gather 2023-02-23 13:10:46 -05:00
Dave Horton
8a9a2df128 early hints fix that was not merged 2023-02-23 12:54:21 -05:00
Dave Horton
3a3544a5e8 remove some wordy logging 2023-02-23 12:32:41 -05:00
Dave Horton
cbeb706946 update to latest @jambonz/verb-specifications with less verbose logging 2023-02-23 12:16:14 -05:00
Dave Horton
f005262615 docs 2023-02-23 10:48:09 -05:00
Snyk bot
67ec28484c fix: package.json & package-lock.json to reduce vulnerabilities (#265)
The following vulnerabilities are fixed with an upgrade:
- https://snyk.io/vuln/SNYK-JS-UNDICI-3323844
- https://snyk.io/vuln/SNYK-JS-UNDICI-3323845
2023-02-23 10:26:06 -05:00
two56
803a944240 Use the request from CallSession for cancel (#268)
* Use the req from CallSession for cancel

* Check cs is set

---------

Co-authored-by: Matt Preskett <matt.preskett@netcall.com>
2023-02-23 09:13:44 -05:00
Snyk bot
a5cd342e46 fix: Dockerfile to reduce vulnerabilities (#269) 2023-02-22 14:04:39 -05:00
EgleH
e91feb64f5 Update node base image to node:18.14.0-alpine3.16 (#267) 2023-02-21 07:54:00 -05:00
Dave Horton
ae688ddc7e when handling reinvites for SIPREC incoming calls just respond 200 OK with existing sdp 2023-02-17 09:12:54 -05:00
Dave Horton
9b21b65478 fix uncaught exception in certain ws reconnect scenarios 2023-02-15 20:29:47 -05:00
Hoan Luu Huu
c09425fa89 feat: use verb-specifications (#262)
* feat: use verb-specifications

* feat: use verb-specifications

* fix: verb specification v2

* remove irrelevant tests

* fix: verb-scpecification

* update to use @jambonz/verb-specifications

---------

Co-authored-by: Quan HL <quanluuhoang8@gmail.com>
Co-authored-by: Dave Horton <daveh@beachdognet.com>
2023-02-15 09:56:23 -05:00
Dave Horton
6706992b4b update to latest @jambonz/realtimedb-helpers 2023-02-14 09:00:00 -05:00
Dave Horton
0fdcb3a6d6 Feature/nvidia speech (#261)
* initial changes for nvidia speech

* allow nvidia speech credentials to be set at runtime

* update drachtio-fsmrf

* fix handling of nvidia-specific options

* fix nvidia custom config

* fix nvidia word time offsets

* fix nvidia custom configuration

* normalize nvidia transcripts

* update to @jambonz/realtime-dbhelpers with nvidia tts support
2023-02-12 14:06:01 -05:00
Snyk bot
50057deca9 fix: Dockerfile to reduce vulnerabilities (#260)
The following vulnerabilities are fixed with an upgrade:
- https://snyk.io/vuln/SNYK-ALPINE316-OPENSSL-3314623
- https://snyk.io/vuln/SNYK-ALPINE316-OPENSSL-3314624
- https://snyk.io/vuln/SNYK-ALPINE316-OPENSSL-3314624
- https://snyk.io/vuln/SNYK-ALPINE316-OPENSSL-3314641
- https://snyk.io/vuln/SNYK-ALPINE316-OPENSSL-3314643
2023-02-12 12:43:38 -05:00
Dave Horton
c7eacdd0f8 bugfix: switching to http webhook during a ws session 2023-02-11 21:09:48 -05:00
Dave Horton
e990b5dbf9 proper shut down in K8S (#253) 2023-02-07 19:54:34 -05:00
Dave Horton
7ae37b1e60 #251 - unhandled exception in Session#_notifyCallStatusChange 2023-02-07 14:51:54 -05:00
Dave Horton
ed284c367d update db-helpers 2023-02-07 13:34:11 -05:00
Dave Horton
272380dc62 fix vulnerability 2023-02-07 08:31:36 -05:00
Dave Horton
61dbb659b3 fix lint errors 2023-02-07 08:29:32 -05:00
Vinod Dharashive
fbae7c0eab notifyError object jambonz:error (#247)
Consist notifyerror for jambonz:error for asr
2023-02-07 08:21:25 -05:00
Dave Horton
4894a85569 bugfix: recognizer.punctuation not working on nuance 2023-02-06 12:07:28 -05:00
Hoan Luu Huu
0e5bb876ce ws-requestor unit test (#244)
* ws-requestor unit test

* ws-requestor unit test

* ws-requestor unit test

* handle special case of reconnecting during the initial session:new - ack transaction

* fix: add more wsrequestor unit test

* fix: add more wsrequestor unit test

---------

Co-authored-by: Quan HL <quanluuhoang8@gmail.com>
Co-authored-by: Dave Horton <daveh@beachdognet.com>
2023-02-06 08:06:41 -05:00
Dave Horton
8658d03f1f add mixType to config.listen 2023-02-04 16:08:56 -05:00
Dave Horton
f2ff5250b0 reset variables like hints so that previous hints do not automatically carry over (#243) 2023-02-03 13:36:14 -05:00
Dave Horton
c37fba541f for backwards compatibility in gather/transcribe return only 1 alternative at top level (#240) 2023-02-02 12:58:22 -05:00
Dave Horton
f9921cf4e9 #238 - add 'listen' option to config verb (#239) 2023-01-31 14:39:55 -05:00
Dave Horton
86fed4ec90 gather: fix bug referencing this.hints which does not exist (hints are part of recognizer object) 2023-01-30 18:21:36 -05:00
Hoan Luu Huu
9d07a1354c feat: support app_json in application (#236)
* feat: support app_json in application

* feat: support app_json in application

* update db schema to latest

---------

Co-authored-by: Quan HL <quanluuhoang8@gmail.com>
Co-authored-by: Dave Horton <daveh@beachdognet.com>
2023-01-29 14:35:29 -05:00
Dave Horton
2775c7ddd1 update drachtio-srf 2023-01-29 13:59:33 -05:00
Dave Horton
70822cb278 test fix: ACK to 487 response must have same branch in via as invite 2023-01-29 13:23:11 -05:00
Hoan Luu Huu
14a02735be fix: uncomment testsuite (#234)
* fix: uncomment testsuite

* fix: uncomment testsuite

* fix: unstable testcases

* fix: uncomment testsuite

* fix: uncomment testsuite

* fix: uncomment testsuite

* fix: uncomment testsuite

* fix: uncomment testsuite

* fix: uncomment testsuite

* fix: unstable testcase

* fix: uncomment testsuite

* wip: test

* wip: test

* wip: test

Co-authored-by: Quan HL <quanluuhoang8@gmail.com>
2023-01-27 07:27:53 -05:00
Hoan Luu Huu
4b3ebe37ac fix: add early media testcase (#233)
* fix: add early media testcase

* fix: add early media testcase

* fix: add early media testcase

Co-authored-by: Quan HL <quanluuhoang8@gmail.com>
2023-01-26 13:29:31 -05:00
Hoan Luu Huu
f4fbd07f8e feat: add dial verb testcases (#231)
* feat: add dial verb testcases

* feat: add dial verb testcases

* feat: add dial verb testcases

* feat: add dial verb testcases

Co-authored-by: Quan HL <quanluuhoang8@gmail.com>
2023-01-25 10:39:01 -05:00
Hoan Luu Huu
6ebba8673f feat: listen verb testsuite (#222)
* first draft

* first draft

* listen should connect to port 3000 on webhook scaffold

* revamp webhook scaffold for listen ws support

* fix: finished listen test

* fix: add playbeep test listen

* fix: add playbeep test listen

* fixed: listen on 10% loss

* feat: add test case for listen pause resume and complete the call

Co-authored-by: Quan HL <quanluuhoang8@gmail.com>
Co-authored-by: Dave Horton <daveh@beachdognet.com>
2023-01-24 22:20:31 -05:00
dependabot[bot]
2b06177dc5 Bump jsonwebtoken and ibm-cloud-sdk-core (#227)
Bumps [jsonwebtoken](https://github.com/auth0/node-jsonwebtoken) and [ibm-cloud-sdk-core](https://github.com/IBM/node-sdk-core). These dependencies needed to be updated together.

Updates `jsonwebtoken` from 8.5.1 to 9.0.0
- [Release notes](https://github.com/auth0/node-jsonwebtoken/releases)
- [Changelog](https://github.com/auth0/node-jsonwebtoken/blob/master/CHANGELOG.md)
- [Commits](https://github.com/auth0/node-jsonwebtoken/compare/v8.5.1...v9.0.0)

Updates `ibm-cloud-sdk-core` from 3.2.1 to 3.2.2
- [Release notes](https://github.com/IBM/node-sdk-core/releases)
- [Changelog](https://github.com/IBM/node-sdk-core/blob/main/CHANGELOG.md)
- [Commits](https://github.com/IBM/node-sdk-core/commits)

---
updated-dependencies:
- dependency-name: jsonwebtoken
  dependency-type: indirect
- dependency-name: ibm-cloud-sdk-core
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-01-24 10:03:39 -05:00
Hoan Luu Huu
088316d266 fix: split ssml to correct chunks (#225)
* fix: split ssml to correct chunks

* fix: split ssml to correct chunks

* fixed: eslint

* fixed: eslint

* fixed: add comment to testcase

* fixed: review comments

* fixed: review comments

* fixed: review comments

* fixed: review comments

* fixed: review comments

Co-authored-by: Quan HL <quanluuhoang8@gmail.com>
2023-01-24 09:48:31 -05:00
Hoan Luu Huu
8c0044a378 Weebhook app test support ws (#226)
* feat: webhook test app support ws

* feat: webhook test app support ws

* feat: webhook test app support ws

* feat: webhook test app support ws

Co-authored-by: Quan HL <quanluuhoang8@gmail.com>
2023-01-24 09:04:37 -05:00
Dave Horton
dae307d71f when closing websocket at end of call send ws code 1000 2023-01-22 12:46:20 -05:00
Dave Horton
1b5b37184b bugfix #223: early hints should not be enabled with continuous asr is used in gather 2023-01-19 09:48:12 -05:00
Dave Horton
2f8efb80d0 bugfix #220 (config w/ bargein enable followed later in flow w bargein disable) 2023-01-19 09:31:23 -05:00
Dave Horton
c57e88b496 update .gitignore 2023-01-17 08:01:50 -05:00
Dave Horton
7122d955fe fix for #217 - refer blocks if notify nor bye received 2023-01-17 08:01:50 -05:00
Hoan Luu Huu
028aeea856 fix: wrong stt vendor will raise ws notification (#219)
* fix: wrong stt vendor will raise ws notification

* fix: wrong stt vendor will raise ws notification

Co-authored-by: Quan HL <quan.luuhoan8@gmail.com>
2023-01-17 07:52:05 -05:00
Dave Horton
567b03fd36 bugfix: transcribe/gather using default as vendor 2023-01-11 15:31:24 -05:00
Dave Horton
d5c04d2133 transcribe and gather: silently discard listening events from ibm stt 2023-01-11 14:59:15 -05:00
Dave Horton
a2e909b057 fix analysis of play test 2023-01-10 13:50:58 -05:00
Dave Horton
c3627cecb8 avoid hanging up on websocket apps after initial set of commands have completed 2023-01-10 08:28:59 -05:00
Dave Horton
6753fdc2b4 linting error 2023-01-09 09:30:29 -05:00
two56
740d996739 Fix/play timeout (#183)
* Move play timeout into jambonz

* Unnecessary await on kill + eslint

* Change call status test to be consistent

Co-authored-by: Matt Preskett <matt.preskett@netcall.com>
2023-01-09 09:27:20 -05:00
two56
714d06a600 Fix/conference wait hook (#213)
* Deref old wait_hook on change

* Kill running playSession on conference exit

Co-authored-by: Matt Preskett <matt.preskett@netcall.com>
2023-01-09 08:13:34 -05:00
Dave Horton
0c52324915 Bugfix/http redirect to ws (#211)
* allow redirect to ws(s) url after starting with http(s) #210

* further fixes for #210
2023-01-07 15:00:18 -05:00
Dave Horton
2e3fb60e72 support google hints as an array of objects containing both hint phra… (#209)
* support google hints as an array of objects containing both hint phrase and boost value

* handle structured hints for non-google STT (#205)
2023-01-06 08:00:17 -05:00
Dave Horton
05a4665f87 Feature/force tts generation (#208)
* Feature: add option synthesizer.forceTtsGeneration #198

* Feature: add option synthesizer.forceTtsGeneration #198

* minor cleanup

* minor

Co-authored-by: Michal Tesar <michal@irevolution.group>
2023-01-04 15:42:48 -05:00
Dave Horton
b16d49d8ea Bugfix/gather kill race condition (#207)
* further fix for race condition in #206

* #206: ignore request to start bot mode when bot mode is already active
2023-01-04 15:19:35 -05:00
Dave Horton
aad2d52efd fix #206: prevent 2 simultaneous background gathers 2023-01-03 10:04:51 -05:00
Dave Horton
83d767116b add support for http transport for jaeger 2022-12-30 10:47:31 -05:00
Dave Horton
b4673ad942 update to latest drachtio-srf and realtimedb-helpers 2022-12-29 10:22:16 -05:00
Dave Horton
9b8bb07a97 update to latest drachtio-fsmrf 2022-12-28 11:05:06 -06:00
Dave Horton
29f578ff5c faster uuid 2022-12-28 10:40:26 -06:00
Dave Horton
6d86793494 update to latest drachtio-srf and drachtio-fsmrf 2022-12-21 12:26:51 -05:00
Dave Horton
9f95fde67e faster uuid generator 2022-12-21 08:27:00 -05:00
Dave Horton
010b4d2778 bugfix: db caching had side affects of using closed http requestors 2022-12-13 14:55:23 -05:00
Dave Horton
8d81c20c1a Feature/fsrmf perf improvement (#197)
* update drachtio-fsmrf

* update drachtio-fsmrf

* sync package-lock.json
2022-12-11 12:12:50 -05:00
Dave Horton
69f796e960 update gh actions 2022-12-10 15:32:50 -05:00
Dave Horton
4db03d3d1b update to drachtio-fsmrf@3.0.8 with performance improvements for call setup 2022-12-10 15:12:15 -05:00
Dave Horton
a60c6a4740 add support for ws verb:status event notifications (#196) 2022-12-09 21:11:47 -05:00
dependabot[bot]
5b875c3ad4 Bump qs and express in /test/webhook (#195)
Bumps [qs](https://github.com/ljharb/qs) to 6.11.0 and updates ancestor dependency [express](https://github.com/expressjs/express). These dependencies need to be updated together.


Updates `qs` from 6.7.0 to 6.11.0
- [Release notes](https://github.com/ljharb/qs/releases)
- [Changelog](https://github.com/ljharb/qs/blob/main/CHANGELOG.md)
- [Commits](https://github.com/ljharb/qs/compare/v6.7.0...v6.11.0)

Updates `express` from 4.17.1 to 4.18.2
- [Release notes](https://github.com/expressjs/express/releases)
- [Changelog](https://github.com/expressjs/express/blob/master/History.md)
- [Commits](https://github.com/expressjs/express/compare/4.17.1...4.18.2)

---
updated-dependencies:
- dependency-name: qs
  dependency-type: indirect
- dependency-name: express
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-12-09 20:34:00 -05:00
Dave Horton
bf19d2ae6d fixes for runtime speech credentials 2022-12-09 20:25:39 -05:00
Dave Horton
37efdc62be fix prev commit 2022-12-09 11:07:41 -05:00
Dave Horton
78a76bb1f4 bugfix: when handing over from wss to http close the wss socket 2022-12-09 10:57:59 -05:00
Dave Horton
39fb762a15 ibm speech fix 2022-12-04 11:28:02 -05:00
Dave Horton
2cc3140de0 bugfix #192: config with dtmf only followed later by gather with speech not working 2022-12-01 13:05:11 -05:00
Dave Horton
1a1f2770b6 include service_provider_sid in call webhook 2022-11-30 13:00:35 -05:00
Dave Horton
23f3b44b8b add custom header on Refer indicating whether sbc-inbound should fix up the Refer-To 2022-11-30 13:00:03 -05:00
Dave Horton
753d46e513 error handling in amd 2022-11-22 15:39:37 -05:00
Dave Horton
71a2435c63 Feature/ibm watson (#193)
* initial changes to support ibm watson

* update specs.json for ibm

* update to drachtio-fsmrf with support for ibm

* bugfix: set access token for ibm stt, not api_key

* fix name of api_key

* normalize ibm transcription results

* rework ibm credentials

* bugfix setting runtime speech creds

* bugfix: ibm region

* typo

* changes to transcribe for ibm watson

* implement connect handler

* bugfix: bind error

* proper use of result_index

* ibm error handling
2022-11-21 22:09:37 -05:00
Dave Horton
8686348454 Feature/deepgram stt (#190)
* initial changes to support deepgram stt

* fixes for normalizing vendor-specific transcriptions

* update to latest drachtio-fsmrf with support for deepgram stt

* deepgram parsing error

* hints support for deepgram

* handling deepgram errors

* ignore late arriving transcripts for deepgram

* handling of empty transcripts

* transcribe changes

* allow deepgram stt credentials to be provided at run time

* bind channel in transcription handler

* fixes for transcribe when handling empty transcripts

* more empty transcript fixes

* update tests to latest modules

* add test cases for deepgram speech recognition
2022-11-12 19:48:59 -05:00
Guilherme Rauen
f511e6ab6b update node image to the latest and most secure (#189)
Co-authored-by: Guilherme Rauen <g.rauen@cognigy.com>
2022-11-11 11:22:07 -05:00
Dave Horton
706cd4b94b bugfix: handle gather/transcribe where vendor not explicitly specified #187 2022-11-07 09:31:51 -05:00
Dave Horton
e5c209e269 fix for #186: unhandled error when amd webhook returns non-success status code 2022-11-06 09:41:34 -05:00
Dave Horton
d903dbe28d update deps to latest db/realtime-db 2022-11-06 09:39:51 -05:00
Dave Horton
d88321c24d fixes for custom voice testing in azure 2022-11-06 09:37:22 -05:00
Dave Horton
6e1761bab6 update to db-helpers with caching fix 2022-11-01 20:56:51 -04:00
Dave Horton
509bb065bb Feature/nuance stt (#185)
* initial changes to gather to support nuance stt

* updateSpeechCredentialLastUsed could be called without a speech_credential_sid if credentials are passed in the flow

* fix bugname

* typo

* added handlers for nuance

* logging

* major refactor of parsing transcriptions

* initial support for nuance in transcribe verb

* updates from testing

* cleanup some tests

* update action

* typo

* gather: start nuance timers after say/play completes

* update drachtio-fsrmf

* refactor some code

* typo

* log nuance error detail

* timeout handling

* typo

* handle nuance 413 response when recognition times out

* typo in specs.json

* add support for nuance resources

* fixes and tests for transcribe

* remove logging from test

* initial support for kryptonEndpoint

* try getting access token even when using krypton

* typo in kryptonEndpoint property

* add support for Nuance tts

* parse nuance voice and model for tts

* use nuance credentials from db

* update to db-helpers@0.7.0 with caching option

* add support for azure audio logging in gather/transcribe

* sync package-lock.json
2022-11-01 12:23:49 -04:00
Dave Horton
203b9774ca bugfix: ws error max connections error causes a crash 2022-11-01 11:42:08 -04:00
Dave Horton
fade47d423 bugfix when running multiple instances in EC2 2022-11-01 11:42:01 -04:00
Dave Horton
26e52d131e update to db-helpers@0.7.0 with caching option 2022-11-01 11:41:53 -04:00
77 changed files with 11056 additions and 4438 deletions

View File

@@ -1,14 +1,13 @@
name: CI
on:
push:
on: [push, pull_request]
jobs:
build:
runs-on: ubuntu-20.04
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v1
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: 16
- run: npm ci
@@ -20,3 +19,5 @@ jobs:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_REGION: ${{ secrets.AWS_REGION }}
MICROSOFT_REGION: ${{ secrets.MICROSOFT_REGION }}
MICROSOFT_API_KEY: ${{ secrets.MICROSOFT_API_KEY }}

View File

@@ -2,13 +2,8 @@ name: Docker
on:
push:
# Publish `main` as Docker `latest` image.
branches:
- main
# Publish `v1.2.3` tags as releases.
tags:
- v*
- '*'
env:
IMAGE_NAME: feature-server
@@ -20,32 +15,41 @@ jobs:
if: github.event_name == 'push'
steps:
- uses: actions/checkout@v2
- name: Checkout code
uses: actions/checkout@v3
- name: Build image
run: docker build . --file Dockerfile --tag $IMAGE_NAME
- name: Log into registry
run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
- name: Push image
- name: prepare tag
id: prepare_tag
run: |
IMAGE_ID=ghcr.io/${{ github.repository_owner }}/$IMAGE_NAME
IMAGE_ID=jambonz/$IMAGE_NAME
# Change all uppercase to lowercase
IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]')
# Strip git ref prefix from version
VERSION=$(echo "${{ github.ref }}" | sed -e 's,.*/\(.*\),\1,')
# Strip git ref prefix from version
VERSION=$(echo "${{ github.ref }}" | sed -e 's,.*/\(.*\),\1,')
# Strip "v" prefix from tag name
[[ "${{ github.ref }}" == "refs/tags/"* ]] && VERSION=$(echo $VERSION | sed -e 's/^v//')
# Strip "v" prefix from tag name
[[ "${{ github.ref }}" == "refs/tags/"* ]] && VERSION=$(echo $VERSION | sed -e 's/^v//')
# Use Docker `latest` tag convention
[ "$VERSION" == "main" ] && VERSION=latest
# Use Docker `latest` tag convention
[ "$VERSION" == "main" ] && VERSION=latest
echo IMAGE_ID=$IMAGE_ID
echo VERSION=$VERSION
echo IMAGE_ID=$IMAGE_ID
echo VERSION=$VERSION
echo "image_id=$IMAGE_ID" >> $GITHUB_OUTPUT
echo "version=$VERSION" >> $GITHUB_OUTPUT
docker tag $IMAGE_NAME $IMAGE_ID:$VERSION
docker push $IMAGE_ID:$VERSION
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push Docker image
uses: docker/build-push-action@v4
with:
context: .
push: true
tags: ${{ steps.prepare_tag.outputs.image_id }}:${{ steps.prepare_tag.outputs.version }}
build-args: |
GITHUB_REPOSITORY=$GITHUB_REPOSITORY
GITHUB_REF=$GITHUB_REF

3
.gitignore vendored
View File

@@ -40,4 +40,5 @@ examples/*
ecosystem.config.js
.vscode
test/credentials/*.json
run-tests.sh
run-tests.sh
run-coverage.sh

View File

@@ -1,4 +1,4 @@
FROM --platform=linux/amd64 node:18.9.0-alpine3.16 as base
FROM --platform=linux/amd64 node:18.14.1-alpine3.16 as base
RUN apk --update --no-cache add --virtual .builds-deps build-base python3

View File

@@ -18,8 +18,10 @@ Configuration is provided via environment variables:
|DRACHTIO_PORT| listening port of drachtio server for control connections (typically 9022)|yes|
|DRACHTIO_SECRET| shared secret|yes|
|ENABLE_METRICS| if 1, metrics will be generated|no|
|ENCRYPTION_SECRET| secret for credential encryption(JWT_SECRET is deprecated) |yes|
|GOOGLE_APPLICATION_CREDENTIALS| path to gcp service key file|yes|
|HTTP_PORT| tcp port to listen on for API requests from jambonz-api-server|yes|
|JAMBONES_GATHER_EARLY_HINTS_MATCH| if true and hints are provided, gather will opportunistically review interim transcripts if possible to reduce ASR latency |no|
|JAMBONES_FREESWITCH| IP:port:secret for Freeswitch server (e.g. '127.0.0.1:8021:JambonzR0ck$'|yes|
|JAMBONES_LOGLEVEL| log level for application, 'info' or 'debug'|no|
|JAMBONES_MYSQL_HOST| mysql host|yes|

20
app.js
View File

@@ -8,6 +8,7 @@ assert.ok(process.env.DRACHTIO_SECRET, 'missing DRACHTIO_SECRET env var');
assert.ok(process.env.JAMBONES_FREESWITCH, 'missing JAMBONES_FREESWITCH env var');
assert.ok(process.env.JAMBONES_REDIS_HOST, 'missing JAMBONES_REDIS_HOST env var');
assert.ok(process.env.JAMBONES_NETWORK_CIDR || process.env.K8S, 'missing JAMBONES_SUBNET env var');
assert.ok(process.env.ENCRYPTION_SECRET || process.env.JWT_SECRET, 'missing ENCRYPTION_SECRET env var');
const Srf = require('drachtio-srf');
const srf = new Srf();
@@ -106,16 +107,25 @@ const disconnect = () => {
});
};
process.on('SIGUSR2', handle);
process.on('SIGTERM', handle);
function handle(signal) {
const {removeFromSet} = srf.locals.dbHelpers;
const setName = `${(process.env.JAMBONES_CLUSTER_ID || 'default')}:active-fs`;
logger.info(`got signal ${signal}, removing ${srf.locals.localSipAddress} from set ${setName}`);
removeFromSet(setName, srf.locals.localSipAddress);
removeFromSet(FS_UUID_SET_NAME, srf.locals.fsUUID);
srf.locals.disabled = true;
logger.info(`got signal ${signal}`);
const setName = `${(process.env.JAMBONES_CLUSTER_ID || 'default')}:active-fs`;
if (setName && srf.locals.localSipAddress) {
logger.info(`got signal ${signal}, removing ${srf.locals.localSipAddress} from set ${setName}`);
removeFromSet(setName, srf.locals.localSipAddress);
}
removeFromSet(FS_UUID_SET_NAME, srf.locals.fsUUID);
if (process.env.K8S) {
srf.locals.lifecycleEmitter.operationalState = LifeCycleEvents.ScaleIn;
}
if (getCount() === 0) {
logger.info('no calls in progress, exiting');
process.exit(0);
}
}
if (process.env.JAMBONZ_CLEANUP_INTERVAL_MINS) {

View File

@@ -3,7 +3,7 @@ const makeTask = require('../../tasks/make_task');
const RestCallSession = require('../../session/rest-call-session');
const CallInfo = require('../../session/call-info');
const {CallDirection, CallStatus} = require('../../utils/constants');
const { v4: uuidv4 } = require('uuid');
const uuidv4 = require('uuid-random');
const SipError = require('drachtio-srf').SipError;
const sysError = require('./error');
const HttpRequestor = require('../../utils/http-requestor');
@@ -104,7 +104,7 @@ router.post('/', async(req, res) => {
proxy: `sip:${sbcAddress}`,
localSdp: ep.local.sdp
});
if (target.auth) opts.auth = this.target.auth;
if (target.auth) opts.auth = target.auth;
/**
@@ -136,7 +136,7 @@ router.post('/', async(req, res) => {
}
else if (!app.notifier) {
logger.debug('creating null call status hook');
app.notifier = {request: () => {}};
app.notifier = {request: () => {}, close: () => {}};
}
/* now launch the outdial */

View File

@@ -2,7 +2,7 @@ const router = require('express').Router();
const CallInfo = require('../../session/call-info');
const {CallDirection} = require('../../utils/constants');
const SmsSession = require('../../session/sms-call-session');
const normalizeJambones = require('../../utils/normalize-jambones');
const { normalizeJambones } = require('@jambonz/verb-specifications');
const makeTask = require('../../tasks/make_task');
router.post('/:sid', async(req, res) => {

View File

@@ -4,7 +4,7 @@ const WsRequestor = require('../../utils/ws-requestor');
const CallInfo = require('../../session/call-info');
const {CallDirection} = require('../../utils/constants');
const SmsSession = require('../../session/sms-call-session');
const normalizeJambones = require('../../utils/normalize-jambones');
const { normalizeJambones } = require('@jambonz/verb-specifications');
const {TaskPreconditions} = require('../../utils/constants');
const makeTask = require('../../tasks/make_task');
@@ -34,6 +34,7 @@ router.post('/:partner', async(req, res) => {
carrier: req.params.partner,
messageSid: app.messageSid,
accountSid: app.accountSid,
serviceProviderSid: account.service_provider_sid,
applicationSid: app.applicationSid,
from: req.body.from,
to: req.body.to,

View File

@@ -1,4 +1,4 @@
const { v4: uuidv4 } = require('uuid');
const uuidv4 = require('uuid-random');
const {CallDirection, AllowedSipRecVerbs} = require('./utils/constants');
const {parseSiprecPayload} = require('./utils/siprec-utils');
const CallInfo = require('./session/call-info');
@@ -6,7 +6,7 @@ const HttpRequestor = require('./utils/http-requestor');
const WsRequestor = require('./utils/ws-requestor');
const makeTask = require('./tasks/make_task');
const parseUri = require('drachtio-srf').parseUri;
const normalizeJambones = require('./utils/normalize-jambones');
const { normalizeJambones } = require('@jambonz/verb-specifications');
const dbUtils = require('./utils/db-utils');
const RootSpan = require('./utils/call-tracer');
const listTaskNames = require('./utils/summarize-tasks');
@@ -27,7 +27,11 @@ module.exports = function(srf, logger) {
function initLocals(req, res, next) {
const callId = req.get('Call-ID');
logger.info({callId}, 'new incoming call');
logger.info({
callId,
callingNumber: req.callingNumber,
calledNumber: req.calledNumber
}, 'new incoming call');
if (!req.has('X-Account-Sid')) {
logger.info('getAccountDetails - rejecting call due to missing X-Account-Sid header');
return res.send(500);
@@ -42,7 +46,16 @@ module.exports = function(srf, logger) {
}
if (req.has('X-Authenticated-User')) req.locals.originatingUser = req.get('X-Authenticated-User');
if (req.has('X-MS-Teams-Tenant-FQDN')) req.locals.msTeamsTenant = req.get('X-MS-Teams-Tenant-FQDN');
if (req.has('X-Cisco-Recording-Participant')) {
const ciscoParticipants = req.get('X-Cisco-Recording-Participant');
const regex = /sip:[\d]+@[\d]+\.[\d]+\.[\d]+\.[\d]+/g;
const sipURIs = ciscoParticipants.match(regex);
logger.info(`X-Cisco-Recording-Participant : ${sipURIs} `);
if (sipURIs && sipURIs.length > 0) {
req.locals.calledNumber = sipURIs[0];
req.locals.callingNumber = sipURIs[1];
}
}
next();
}
@@ -90,8 +103,10 @@ module.exports = function(srf, logger) {
.find((p) => p.type === 'application/sdp')
.content;
const {sdp1, sdp2, ...metadata} = await parseSiprecPayload(req, logger);
req.locals.calledNumber = metadata.caller.number;
req.locals.callingNumber = metadata.callee.number;
if (!req.locals.calledNumber && !req.locals.calledNumber) {
req.locals.calledNumber = metadata.caller.number;
req.locals.callingNumber = metadata.callee.number;
}
req.locals = {
...req.locals,
siprec: {
@@ -118,6 +133,7 @@ module.exports = function(srf, logger) {
const {span} = rootSpan.startChildSpan('lookupAccountDetails');
try {
req.locals.accountInfo = await lookupAccountDetails(account_sid);
req.locals.service_provider_sid = req.locals.accountInfo?.account?.service_provider_sid;
span.end();
if (!req.locals.accountInfo.account.is_active) {
logger.info(`Account is inactive or suspended ${account_sid}`);
@@ -224,29 +240,32 @@ module.exports = function(srf, logger) {
* create a requestor that we will use for all http requests we make during the call.
* also create a notifier for call status events (if not needed, its a no-op).
*/
/* allow for caching data - when caching treat retrieved data as immutable */
const app2 = process.env.JAMBONES_MYSQL_REFRESH_TTL ? JSON.parse(JSON.stringify(app)) : app;
if ('WS' === app.call_hook?.method ||
app.call_hook?.url.startsWith('ws://') || app.call_hook?.url.startsWith('wss://')) {
app.requestor = new WsRequestor(logger, account_sid, app.call_hook, accountInfo.account.webhook_secret) ;
app.notifier = app.requestor;
app.call_hook.method = 'WS';
app2.requestor = new WsRequestor(logger, account_sid, app.call_hook, accountInfo.account.webhook_secret) ;
app2.notifier = app.requestor;
app2.call_hook.method = 'WS';
}
else {
app.requestor = new HttpRequestor(logger, account_sid, app.call_hook, accountInfo.account.webhook_secret);
if (app.call_status_hook) app.notifier = new HttpRequestor(logger, account_sid, app.call_status_hook,
app2.requestor = new HttpRequestor(logger, account_sid, app.call_hook, accountInfo.account.webhook_secret);
if (app.call_status_hook) app2.notifier = new HttpRequestor(logger, account_sid, app.call_status_hook,
accountInfo.account.webhook_secret);
else app.notifier = {request: () => {}};
else app2.notifier = {request: () => {}};
}
req.locals.application = app;
const obj = Object.assign({}, app);
delete obj.requestor;
delete obj.notifier;
req.locals.application = app2;
// eslint-disable-next-line no-unused-vars
const {call_hook, call_status_hook, ...appInfo} = obj; // mask sensitive data like user/pass on webhook
logger.info({app: appInfo}, `retrieved application for incoming call to ${req.locals.calledNumber}`);
const {call_hook, call_status_hook, ...appInfo} = app; // mask sensitive data like user/pass on webhook
// eslint-disable-next-line no-unused-vars
const {requestor, notifier, ...loggable} = appInfo;
logger.info({app: loggable}, `retrieved application for incoming call to ${req.locals.calledNumber}`);
req.locals.callInfo = new CallInfo({
req,
app,
app: app2,
direction: CallDirection.Inbound,
traceId: rootSpan.traceId
});
@@ -272,32 +291,39 @@ module.exports = function(srf, logger) {
return next();
}
/* retrieve the application to execute for this inbound call */
const params = Object.assign(['POST', 'WS'].includes(app.call_hook.method) ? {sip: req.msg} : {},
req.locals.callInfo, {
defaults: {
synthesizer: {
vendor: app.speech_synthesis_vendor,
language: app.speech_synthesis_language,
voice: app.speech_synthesis_voice
},
recognizer: {
vendor: app.speech_recognizer_vendor,
language: app.speech_recognizer_language
let json;
if (app.app_json) {
json = JSON.parse(app.app_json);
} else {
const params = Object.assign(['POST', 'WS'].includes(app.call_hook.method) ? { sip: req.msg } : {},
req.locals.callInfo,
{ service_provider_sid: req.locals.service_provider_sid },
{
defaults: {
synthesizer: {
vendor: app.speech_synthesis_vendor,
language: app.speech_synthesis_language,
voice: app.speech_synthesis_voice
},
recognizer: {
vendor: app.speech_recognizer_vendor,
language: app.speech_recognizer_language
}
}
}
});
logger.debug({params}, 'sending initial webhook');
const obj = rootSpan.startChildSpan('performAppWebhook');
span = obj.span;
const b3 = rootSpan.getTracingPropagation();
const httpHeaders = b3 && {b3};
const json = await app.requestor.request('session:new', app.call_hook, params, httpHeaders);
});
logger.debug({ params }, 'sending initial webhook');
const obj = rootSpan.startChildSpan('performAppWebhook');
span = obj.span;
const b3 = rootSpan.getTracingPropagation();
const httpHeaders = b3 && { b3 };
json = await app.requestor.request('session:new', app.call_hook, params, httpHeaders);
}
app.tasks = normalizeJambones(logger, json).map((tdata) => makeTask(logger, tdata));
span.setAttributes({
span?.setAttributes({
'http.statusCode': 200,
'app.tasks': listTaskNames(app.tasks)
});
span.end();
span?.end();
if (0 === app.tasks.length) throw new Error('no application provided');
if (siprec) {

View File

@@ -1,6 +1,6 @@
const {CallDirection, CallStatus} = require('../utils/constants');
const parseUri = require('drachtio-srf').parseUri;
const { v4: uuidv4 } = require('uuid');
const uuidv4 = require('uuid-random');
/**
* @classdesc Represents the common information for all calls
* that is provided in call status webhooks
@@ -33,6 +33,23 @@ class CallInfo {
this.callStatus = CallStatus.Trying;
this.originatingSipIp = req.get('X-Forwarded-For');
this.originatingSipTrunkName = req.get('X-Originating-Carrier');
const {siprec} = req.locals;
if (siprec) {
const caller = parseUri(req.locals.callingNumber);
const callee = parseUri(req.locals.calledNumber);
this.participants = [
{
participant: 'caller',
uriUser: caller.user,
uriHost: caller.host
},
{
participant: 'callee',
uriUser: callee.user,
uriHost: callee.host
}
];
}
}
else if (opts.parentCallInfo) {
// outbound call that is a child of an existing call

View File

@@ -13,7 +13,7 @@ const moment = require('moment');
const assert = require('assert');
const sessionTracker = require('./session-tracker');
const makeTask = require('../tasks/make_task');
const normalizeJambones = require('../utils/normalize-jambones');
const { normalizeJambones } = require('@jambonz/verb-specifications');
const listTaskNames = require('../utils/summarize-tasks');
const HttpRequestor = require('../utils/http-requestor');
const WsRequestor = require('../utils/ws-requestor');
@@ -63,6 +63,7 @@ class CallSession extends Emitter {
assert(rootSpan);
this._recordState = RecordState.RecordingOff;
this._notifyEvents = false;
this.tmpFiles = new Set();
@@ -81,8 +82,18 @@ class CallSession extends Emitter {
this._pool = srf.locals.dbHelpers.pool;
const handover = (newRequestor) => {
this.logger.info(`handover to new base url ${newRequestor.url}`);
this.requestor.removeAllListeners();
this.application.requestor = newRequestor;
this.requestor.on('command', this._onCommand.bind(this));
this.requestor.on('connection-dropped', this._onWsConnectionDropped.bind(this));
this.requestor.on('handover', handover.bind(this));
};
this.requestor.on('command', this._onCommand.bind(this));
this.requestor.on('connection-dropped', this._onWsConnectionDropped.bind(this));
this.requestor.on('handover', handover.bind(this));
}
/**
@@ -103,6 +114,10 @@ class CallSession extends Emitter {
return this.callInfo.applicationSid;
}
get callStatus() {
return this.callInfo.callStatus;
}
/**
* SIP call-id for the call
*/
@@ -255,12 +270,19 @@ class CallSession extends Emitter {
return this.backgroundGatherTask;
}
get isListenEnabled() {
return this.backgroundListenTask;
}
get b3() {
return this.rootSpan?.getTracingPropagation();
}
get recordState() { return this._recordState; }
get notifyEvents() { return this._notifyEvents; }
set notifyEvents(notify) { this._notifyEvents = !!notify; }
set globalSttHints({hints, hintsBoost}) {
this._globalSttHints = {hints, hintsBoost};
}
@@ -443,28 +465,90 @@ class CallSession extends Emitter {
}
}
async startBackgroundListen(opts) {
if (this.isListenEnabled) {
this.logger.info('CallSession:startBackgroundListen - listen is already enabled, ignoring request');
return;
}
try {
this.logger.debug({opts}, 'CallSession:startBackgroundListen');
const t = normalizeJambones(this.logger, [opts]);
this.backgroundListenTask = makeTask(this.logger, t[0]);
const resources = await this._evaluatePreconditions(this.backgroundListenTask);
const {span, ctx} = this.rootSpan.startChildSpan(`background-gather:${this.backgroundListenTask.summary}`);
this.backgroundListenTask.span = span;
this.backgroundListenTask.ctx = ctx;
this.backgroundListenTask.exec(this, resources)
.then(() => {
this.logger.info('CallSession:startBackgroundListen: listen completed');
this.backgroundListenTask && this.backgroundListenTask.removeAllListeners();
this.backgroundListenTask && this.backgroundListenTask.span.end();
this.backgroundListenTask = null;
return;
})
.catch((err) => {
this.logger.info({err}, 'CallSession:startBackgroundListen: listen threw error');
this.backgroundListenTask && this.backgroundListenTask.removeAllListeners();
this.backgroundListenTask && this.backgroundListenTask.span.end();
this.backgroundListenTask = null;
});
} catch (err) {
this.logger.info({err, opts}, 'CallSession:startBackgroundListen - Error creating listen task');
}
}
async stopBackgroundListen() {
try {
if (this.backgroundListenTask) {
this.backgroundListenTask.removeAllListeners();
this.backgroundListenTask.kill().catch(() => {});
}
} catch (err) {
this.logger.info({err}, 'CallSession:stopBackgroundListen - Error stopping listen task');
}
this.backgroundListenTask = null;
}
async enableBotMode(gather, autoEnable) {
try {
const t = normalizeJambones(this.logger, [gather]);
this.backgroundGatherTask = makeTask(this.logger, t[0]);
const task = makeTask(this.logger, t[0]);
if (this.isBotModeEnabled) {
const currInput = this.backgroundGatherTask.input;
const newInput = task.input;
if (JSON.stringify(currInput) === JSON.stringify(newInput)) {
this.logger.info('CallSession:enableBotMode - bot mode currently enabled, ignoring request to start again');
return;
}
else {
this.logger.info({currInput, newInput},
'CallSession:enableBotMode - restarting background gather to apply new input type');
this.backgroundGatherTask.sticky = false;
this.disableBotMode();
}
}
this.backgroundGatherTask = task;
this._bargeInEnabled = true;
this.backgroundGatherTask
.once('dtmf', this._clearTasks.bind(this))
.once('vad', this._clearTasks.bind(this))
.once('transcription', this._clearTasks.bind(this))
.once('timeout', this._clearTasks.bind(this));
.once('dtmf', this._clearTasks.bind(this, this.backgroundGatherTask))
.once('vad', this._clearTasks.bind(this, this.backgroundGatherTask))
.once('transcription', this._clearTasks.bind(this, this.backgroundGatherTask))
.once('timeout', this._clearTasks.bind(this, this.backgroundGatherTask));
this.logger.info({gather}, 'CallSession:enableBotMode - starting background gather');
const resources = await this._evaluatePreconditions(this.backgroundGatherTask);
const {span, ctx} = this.rootSpan.startChildSpan(`background-gather:${this.backgroundGatherTask.summary}`);
this.backgroundGatherTask.span = span;
this.backgroundGatherTask.ctx = ctx;
this.backgroundGatherTask.sticky = autoEnable;
this.backgroundGatherTask.exec(this, resources)
.then(() => {
this.logger.info('CallSession:enableBotMode: gather completed');
this.backgroundGatherTask && this.backgroundGatherTask.removeAllListeners();
this.backgroundGatherTask && this.backgroundGatherTask.span.end();
const sticky = this.backgroundGatherTask?.sticky;
this.backgroundGatherTask = null;
if (autoEnable && !this.callGone && !this._stopping && this._bargeInEnabled) {
if (sticky && !this.callGone && !this._stopping && this._bargeInEnabled) {
this.logger.info('CallSession:enableBotMode: restarting background gather');
setImmediate(() => this.enableBotMode(gather, true));
}
@@ -551,7 +635,9 @@ class CallSession extends Emitter {
api_key: credential.api_key,
region: credential.region,
use_custom_stt: credential.use_custom_stt,
custom_stt_endpoint: credential.custom_stt_endpoint
custom_stt_endpoint: credential.custom_stt_endpoint,
use_custom_tts: credential.use_custom_tts,
custom_tts_endpoint: credential.custom_tts_endpoint
};
}
else if ('wellsaid' === vendor) {
@@ -560,6 +646,44 @@ class CallSession extends Emitter {
api_key: credential.api_key
};
}
else if ('nuance' === vendor) {
return {
speech_credential_sid: credential.speech_credential_sid,
client_id: credential.client_id,
secret: credential.secret,
nuance_tts_uri: credential.nuance_tts_uri,
nuance_stt_uri: credential.nuance_stt_uri
};
}
else if ('deepgram' === vendor) {
return {
speech_credential_sid: credential.speech_credential_sid,
api_key: credential.api_key
};
}
else if ('soniox' === vendor) {
return {
speech_credential_sid: credential.speech_credential_sid,
api_key: credential.api_key
};
}
else if ('ibm' === vendor) {
return {
speech_credential_sid: credential.speech_credential_sid,
tts_api_key: credential.tts_api_key,
tts_region: credential.tts_region,
stt_api_key: credential.stt_api_key,
stt_region: credential.stt_region
};
}
else if (vendor.startsWith('custom:')) {
return {
speech_credential_sid: credential.speech_credential_sid,
auth_token: credential.auth_token,
custom_stt_url: credential.custom_stt_url,
custom_tts_url: credential.custom_tts_url
};
}
}
else {
writeAlerts({
@@ -584,15 +708,22 @@ class CallSession extends Emitter {
const stackNum = this.stackIdx;
const task = this.tasks.shift();
this.logger.info(`CallSession:exec starting task #${stackNum}:${taskNum}: ${task.name}`);
this._notifyTaskStatus(task, {event: 'starting'});
try {
const resources = await this._evaluatePreconditions(task);
let skip = false;
this.currentTask = task;
if (TaskName.Gather === task.name && this.isBotModeEnabled) {
const timeout = task.timeout;
this.logger.info(`CallSession:exec skipping #${stackNum}:${taskNum}: ${task.name}`);
this.backgroundGatherTask.updateTimeout(timeout);
if (this.backgroundGatherTask.updateTaskInProgress(task) !== false) {
this.logger.info(`CallSession:exec skipping #${stackNum}:${taskNum}: ${task.name}`);
skip = true;
}
else {
this.logger.info('CallSession:exec disabling bot mode to start gather with new options');
this.disableBotMode();
}
}
else {
if (!skip) {
const {span, ctx} = this.rootSpan.startChildSpan(`verb:${task.summary}`);
task.span = span;
task.ctx = ctx;
@@ -601,6 +732,7 @@ class CallSession extends Emitter {
}
this.currentTask = null;
this.logger.info(`CallSession:exec completed task #${stackNum}:${taskNum}: ${task.name}`);
this._notifyTaskStatus(task, {event: 'finished'});
} catch (err) {
task.span?.end();
this.currentTask = null;
@@ -613,7 +745,7 @@ class CallSession extends Emitter {
}
}
if (0 === this.tasks.length && this.hasStableDialog && this.requestor instanceof WsRequestor) {
if (0 === this.tasks.length && this.requestor instanceof WsRequestor && !this.callGone) {
let span;
try {
const {span} = this.rootSpan.startChildSpan('waiting for commands');
@@ -624,7 +756,7 @@ class CallSession extends Emitter {
'async.request.command': command
});
span.end();
if (!this.hasStableDialog || this.callGone) break;
if (this.callGone) break;
} catch (err) {
span.end();
this.logger.info(err, 'CallSession:exec - error waiting for new commands');
@@ -646,7 +778,6 @@ class CallSession extends Emitter {
trackTmpFile(path) {
// TODO: don't add if its already in the list (should we make it a set?)
this.logger.debug(`adding tmp file to track ${path}`);
this.tmpFiles.add(path);
}
@@ -983,14 +1114,32 @@ class CallSession extends Emitter {
}
}
kill() {
kill(onBackgroundGatherBargein = false) {
if (this.isConfirmCallSession) this.logger.debug('CallSession:kill (ConfirmSession)');
else this.logger.info('CallSession:kill');
if (this.currentTask) {
this.currentTask.kill(this);
this.currentTask = null;
}
this.tasks = [];
if (onBackgroundGatherBargein) {
/* search for a config with bargein disabled */
while (this.tasks.length) {
const t = this.tasks[0];
if (t.name === TaskName.Config && t.bargeIn?.enable === false) {
/* found it, clear to that point and remove the disable
because we likely already received a partial transcription
and we don't want to kill the background gather before we
get the full transcription.
*/
delete t.bargeIn.enable;
this._bargeInEnabled = false;
this.logger.info('CallSession:kill - found bargein disabled in the stack, clearing to that point');
break;
}
this.tasks.shift();
}
}
else this.tasks = [];
this.taskIdx = 0;
}
@@ -1003,14 +1152,14 @@ class CallSession extends Emitter {
_injectTasks(newTasks) {
const gatherPos = this.tasks.map((t) => t.name).indexOf(TaskName.Gather);
const currentlyExecutingGather = this.currentTask?.name === TaskName.Gather;
/*
this.logger.debug({
currentTaskList: listTaskNames(this.tasks),
newContent: listTaskNames(newTasks),
currentlyExecutingGather,
gatherPos
}, 'CallSession:_injectTasks - starting');
*/
const killGather = () => {
this.logger.debug('CallSession:_injectTasks - killing current gather because we have new content');
this.currentTask.kill(this);
@@ -1019,10 +1168,11 @@ class CallSession extends Emitter {
if (-1 === gatherPos) {
/* no gather in the stack simply append tasks */
this.tasks.push(...newTasks);
/*
this.logger.debug({
updatedTaskList: listTaskNames(this.tasks)
}, 'CallSession:_injectTasks - completed (simple append)');
*/
/* we do need to kill the current gather if we are executing one */
if (currentlyExecutingGather) killGather();
return;
@@ -1050,12 +1200,10 @@ class CallSession extends Emitter {
this.replaceApplication(t);
}
else if (process.env.JAMBONES_INJECT_CONTENT) {
this.logger.debug({tasks: listTaskNames(t)}, 'CallSession:_onCommand - queueing tasks (injecting content)');
this._injectTasks(t);
this.logger.info({tasks: listTaskNames(this.tasks)}, 'CallSession:_onCommand - updated task list');
}
else {
this.logger.debug({tasks: listTaskNames(t)}, 'CallSession:_onCommand - queueing tasks');
this.tasks.push(...t);
this.logger.info({tasks: listTaskNames(this.tasks)}, 'CallSession:_onCommand - updated task list');
}
@@ -1099,7 +1247,7 @@ class CallSession extends Emitter {
this.logger.info(`CallSession:_onCommand - invalid command ${command}`);
}
if (this.wakeupResolver) {
this.logger.debug({resolution}, 'CallSession:_onCommand - got commands, waking up..');
//this.logger.debug({resolution}, 'CallSession:_onCommand - got commands, waking up..');
this.wakeupResolver(resolution);
this.wakeupResolver = null;
}
@@ -1169,11 +1317,6 @@ class CallSession extends Emitter {
});
//ep.cs = this;
this.ep = ep;
ep.set({
hangup_after_bridge: false,
park_after_bridge: true
}).catch((err) => this.logger.error({err}, 'Error setting park_after_bridge'));
this.logger.debug(`allocated endpoint ${ep.uuid}`);
this.ep.on('destroy', () => {
@@ -1246,7 +1389,6 @@ class CallSession extends Emitter {
return;
}
this.ep = await this.ms.createEndpoint({remoteSdp: this.dlg.remote.sdp});
await this.ep.set('hangup_after_bridge', false);
await this.dlg.modify(this.ep.local.sdp);
this.logger.debug('CallSession:replaceEndpoint completed');
@@ -1274,6 +1416,7 @@ class CallSession extends Emitter {
}
this.tmpFiles.clear();
this.requestor && this.requestor.close();
this.notifier && this.notifier.close();
this.rootSpan && this.rootSpan.end();
}
@@ -1335,9 +1478,15 @@ class CallSession extends Emitter {
async _onReinvite(req, res) {
try {
if (this.ep) {
const newSdp = await this.ep.modify(req.body);
res.send(200, {body: newSdp});
this.logger.info({offer: req.body, answer: newSdp}, 'handling reINVITE');
if (this.isSipRecCallSession) {
this.logger.info('handling reINVITE for siprec call');
res.send(200, {body: this.ep.local.sdp});
}
else {
const newSdp = await this.ep.modify(req.body);
res.send(200, {body: newSdp});
this.logger.info({offer: req.body, answer: newSdp}, 'handling reINVITE');
}
}
else if (this.currentTask && this.currentTask.name === TaskName.Dial) {
this.logger.info('handling reINVITE after media has been released');
@@ -1383,7 +1532,6 @@ class CallSession extends Emitter {
}
if (!this.ep) {
this.ep = await this.ms.createEndpoint({remoteSdp: this.req.body});
await this.ep.set('hangup_after_bridge', false);
}
return {ms: this.ms, ep: this.ep};
}
@@ -1556,7 +1704,7 @@ class CallSession extends Emitter {
* @param {number} sipStatus - current sip status
* @param {number} [duration] - duration of a completed call, in seconds
*/
_notifyCallStatusChange({callStatus, sipStatus, sipReason, duration}) {
async _notifyCallStatusChange({callStatus, sipStatus, sipReason, duration}) {
if (this.callMoved) return;
/* race condition: we hang up at the same time as the caller */
@@ -1576,7 +1724,7 @@ class CallSession extends Emitter {
try {
const b3 = this.b3;
const httpHeaders = b3 && {b3};
this.notifier.request('call:status', this.call_status_hook, this.callInfo.toJSON(), httpHeaders);
await this.notifier.request('call:status', this.call_status_hook, this.callInfo.toJSON(), httpHeaders);
span.end();
} catch (err) {
span.end();
@@ -1589,6 +1737,25 @@ class CallSession extends Emitter {
.catch((err) => this.logger.error(err, 'redis error'));
}
/**
* notifyTaskError - only used when websocket connection is used instead of webhooks
*/
_notifyTaskError(obj) {
if (this.requestor instanceof WsRequestor) {
this.requestor.request('jambonz:error', '/error', obj)
.catch((err) => this.logger.debug({err}, 'CallSession:_notifyTaskError - Error sending'));
}
}
_notifyTaskStatus(task, evt) {
if (this.notifyEvents && this.requestor instanceof WsRequestor) {
const obj = {...evt, id: task.id, name: task.name};
this.requestor.request('verb:status', '/status', obj)
.catch((err) => this.logger.debug({err}, 'CallSession:_notifyTaskStatus - Error sending'));
}
}
_awaitCommandsOrHangup() {
assert(!this.wakeupResolver);
return new Promise((resolve, reject) => {
@@ -1597,11 +1764,12 @@ class CallSession extends Emitter {
});
}
_clearTasks(evt) {
if (this.requestor instanceof WsRequestor) {
_clearTasks(backgroundGather, evt) {
if (this.requestor instanceof WsRequestor && !backgroundGather.cleared) {
this.logger.info({evt}, 'CallSession:_clearTasks on event from background gather');
try {
this.kill();
backgroundGather.cleared = true;
this.kill(true);
} catch (err) {}
}
}

View File

@@ -1,6 +1,7 @@
const InboundCallSession = require('./inbound-call-session');
const {createSipRecPayload} = require('../utils/siprec-utils');
const {CallStatus} = require('../utils/constants');
const {parseSiprecPayload} = require('../utils/siprec-utils');
/**
* @classdesc Subclass of InboundCallSession. This represents a CallSession that is
* established for an inbound SIPREC call.
@@ -16,6 +17,32 @@ class SipRecCallSession extends InboundCallSession {
this.metadata = metadata;
}
async _onReinvite(req, res) {
try {
this.logger.info(req.payload, 'SipRec Re-INVITE payload');
const {sdp1: reSdp1, sdp2: reSdp2, metadata: reMetadata} = await parseSiprecPayload(req, this.logger);
this.sdp1 = reSdp1;
this.sdp2 = reSdp2;
this.metadata = reMetadata;
if (this.ep && this.ep2) {
let remoteSdp = this.sdp1.replace(/sendonly/, 'sendrecv');
const newSdp1 = await this.ep.modify(remoteSdp);
remoteSdp = this.sdp2.replace(/sendonly/, 'sendrecv');
const newSdp2 = await this.ep2.modify(remoteSdp);
const combinedSdp = await createSipRecPayload(newSdp1, newSdp2, this.logger);
res.send(200, {body: combinedSdp});
this.logger.info({offer: req.body, answer: combinedSdp}, 'SipRec handling reINVITE');
}
else {
this.logger.info('got SipRec reINVITE but no endpoint and media has not been released');
res.send(488);
}
} catch (err) {
this.logger.error(err, 'Error handling reinvite');
}
}
async answerSipRecCall() {
try {
this.ms = this.getMS();

View File

@@ -2,7 +2,7 @@ const Task = require('./task');
const Emitter = require('events');
const ConfirmCallSession = require('../session/confirm-call-session');
const {TaskName, TaskPreconditions, BONG_TONE} = require('../utils/constants');
const normalizeJambones = require('../utils/normalize-jambones');
const { normalizeJambones } = require('@jambonz/verb-specifications');
const makeTask = require('./make_task');
const bent = require('bent');
const assert = require('assert');
@@ -108,6 +108,10 @@ class Conference extends Task {
async kill(cs) {
super.kill(cs);
this.logger.info(`Conference:kill ${this.confName}`);
if (this._playSession) {
this._playSession.kill();
this._playSession = null;
}
this.emitter.emit('kill');
await this._doFinalMemberCheck(cs);
if (this.ep && this.ep.connected) this.ep.conn.removeAllListeners('esl::event::CUSTOM::*') ;
@@ -427,13 +431,19 @@ class Conference extends Task {
.catch((err) => this.logger.info({err}, 'Error deafing or undeafing participant'));
}
if (wait_hook) {
if (this.wait_hook)
delete this.wait_hook.url;
this.wait_hook = {url: wait_hook};
}
if (hookOnly && this._playSession) {
this._playSession.kill();
this._playSession = null;
}
if (wait_hook && this.conf_hold_status === 'hold') {
if (this.wait_hook?.url && this.conf_hold_status === 'hold') {
const {dlg} = cs;
this._doWaitHookWhileOnHold(cs, dlg, wait_hook);
this._doWaitHookWhileOnHold(cs, dlg, this.wait_hook);
}
else if (this.conf_hold_status !== 'hold' && this._playSession) {
this._playSession.kill();
@@ -444,7 +454,9 @@ class Conference extends Task {
async _doWaitHookWhileOnHold(cs, dlg, wait_hook) {
do {
try {
const tasks = await this._playHook(cs, dlg, wait_hook);
let tasks = [];
if (wait_hook.url)
tasks = await this._playHook(cs, dlg, wait_hook.url);
if (0 === tasks.length) break;
} catch (err) {
if (!this.killed) {
@@ -571,6 +583,10 @@ class Conference extends Task {
*/
_kicked(cs, dlg) {
this.logger.info(`Conference:kicked - I was dropped from conference ${this.confName}, task is complete`);
if (this._playSession) {
this._playSession.kill();
this._playSession = null;
}
this.replaceEndpointAndEnd(cs);
}

View File

@@ -8,9 +8,14 @@ class TaskConfig extends Task {
'synthesizer',
'recognizer',
'bargeIn',
'record'
'record',
'listen'
].forEach((k) => this[k] = this.data[k] || {});
if ('notifyEvents' in this.data) {
this.notifyEvents = !!this.data.notifyEvents;
}
if (this.bargeIn.enable) {
this.gatherOpts = {
verb: 'gather',
@@ -26,7 +31,7 @@ class TaskConfig extends Task {
});
}
if (this.bargeIn.sticky) this.autoEnable = true;
this.preconditions = (this.bargeIn.enable || this.record?.action || this.data.amd) ?
this.preconditions = (this.bargeIn.enable || this.record?.action || this.listen?.url || this.data.amd) ?
TaskPreconditions.Endpoint :
TaskPreconditions.None;
}
@@ -34,8 +39,9 @@ class TaskConfig extends Task {
get name() { return TaskName.Config; }
get hasSynthesizer() { return Object.keys(this.synthesizer).length; }
get hasRecognizer() { return Object.keys(this.recognizer).length; }
get hasRecording() { return Object.keys(this.record).length; }
get hasListen() { return Object.keys(this.listen).length; }
get summary() {
const phrase = [];
@@ -50,13 +56,23 @@ class TaskConfig extends Task {
const s = `{${v},${l}}`;
phrase.push(`set recognizer${s}`);
}
if (this.hasRecording) phrase.push(this.record.action);
if (this.hasListen) {
phrase.push(this.listen.enable ? `listen ${this.listen.url}` : 'stop listen');
}
if (this.data.amd) phrase.push('enable amd');
return `${this.name}{${phrase.join(',')}`;
if (this.notifyEvents) phrase.push(`event notification ${this.notifyEvents ? 'on' : 'off'}`);
return `${this.name}{${phrase.join(',')}`;
}
async exec(cs, {ep} = {}) {
await super.exec(cs);
if (this.notifyEvents) {
this.logger.debug(`turning event notification ${this.notifyEvents ? 'on' : 'off'}`);
cs.notifyEvents = !!this.data.notifyEvents;
}
if (this.data.amd) {
this.startAmd = cs.startAmd;
this.stopAmd = cs.stopAmd;
@@ -136,17 +152,31 @@ class TaskConfig extends Task {
this.logger.info({err}, 'Config: error starting recording');
}
}
if (this.hasListen) {
const {enable, ...opts} = this.listen;
if (enable) {
this.logger.debug({opts}, 'Config: enabling listen');
cs.startBackgroundListen({verb: 'listen', ...opts});
} else {
this.logger.info('Config: disabling listen');
cs.stopBackgroundListen();
}
}
}
async kill(cs) {
super.kill(cs);
if (this.ep && this.stopAmd) this.stopAmd(this.ep, this);
//if (this.ep && this.stopAmd) this.stopAmd(this.ep, this);
}
_onAmdEvent(cs, evt) {
this.logger.info({evt}, 'Config:_onAmdEvent');
const {actionHook} = this.data.amd;
this.performHook(cs, actionHook, evt);
this.performHook(cs, actionHook, evt)
.catch((err) => {
this.logger.error({err}, 'Config:_onAmdEvent - error calling actionHook');
});
}
}

View File

@@ -400,15 +400,19 @@ class TaskDial extends Task {
let fqdn;
if (!sbcAddress) throw new Error('no SBC found for outbound call');
this.headers = {
'X-Account-Sid': cs.accountSid,
...(req && req.has('X-CID') && {'X-CID': req.get('X-CID')}),
...(req && req.has('P-Asserted-Identity') && {'P-Asserted-Identity': req.get('P-Asserted-Identity')}),
// Put headers at the end to make sure opt.headers override all default behavior.
...this.headers
};
const opts = {
headers: req && req.has('X-CID') ? Object.assign(this.headers, {'X-CID': req.get('X-CID')}) : this.headers,
headers: this.headers,
proxy: `sip:${sbcAddress}`,
callingNumber: this.callerId || req.callingNumber
};
opts.headers = {
...opts.headers,
'X-Account-Sid': cs.accountSid
};
const t = this.target.find((t) => t.type === 'teams');
if (t) {
@@ -689,7 +693,10 @@ class TaskDial extends Task {
_onAmdEvent(cs, evt) {
this.logger.info({evt}, 'Dial:_onAmdEvent');
const {actionHook} = this.data.amd;
this.performHook(cs, actionHook, evt);
this.performHook(cs, actionHook, evt)
.catch((err) => {
this.logger.error({err}, 'Dial:_onAmdEvent - error calling actionHook');
});
}
}

View File

@@ -3,7 +3,7 @@ const {TaskName, TaskPreconditions} = require('../../utils/constants');
const Intent = require('./intent');
const DigitBuffer = require('./digit-buffer');
const Transcription = require('./transcription');
const normalizeJambones = require('../../utils/normalize-jambones');
const { normalizeJambones } = require('@jambonz/verb-specifications');
class Dialogflow extends Task {
constructor(logger, opts) {

View File

@@ -1,7 +1,7 @@
const Task = require('./task');
const Emitter = require('events');
const ConfirmCallSession = require('../session/confirm-call-session');
const normalizeJambones = require('../utils/normalize-jambones');
const { normalizeJambones } = require('@jambonz/verb-specifications');
const makeTask = require('./make_task');
const {TaskName, TaskPreconditions, QueueResults, KillReason} = require('../utils/constants');
const bent = require('bent');

View File

@@ -3,25 +3,27 @@ const {
TaskName,
TaskPreconditions,
GoogleTranscriptionEvents,
NuanceTranscriptionEvents,
AwsTranscriptionEvents,
AzureTranscriptionEvents
AzureTranscriptionEvents,
DeepgramTranscriptionEvents,
SonioxTranscriptionEvents,
IbmTranscriptionEvents,
NvidiaTranscriptionEvents,
JambonzTranscriptionEvents
} = require('../utils/constants');
const makeTask = require('./make_task');
const assert = require('assert');
//const GATHER_STABILITY_THRESHOLD = Number(process.env.JAMBONZ_GATHER_STABILITY_THRESHOLD || 0.7);
const compileTranscripts = (logger, evt, arr) => {
//logger.debug({arr, evt}, 'compile transcripts');
if (!Array.isArray(arr) || arr.length === 0) return;
let t = '';
for (const a of arr) {
//logger.debug(`adding ${a.alternatives[0].transcript}`);
t += ` ${a.alternatives[0].transcript}`;
}
t += ` ${evt.alternatives[0].transcript}`;
evt.alternatives[0].transcript = t.trim();
//logger.debug(`compiled transcript: ${evt.alternatives[0].transcript}`);
};
class TaskGather extends Task {
@@ -29,8 +31,20 @@ class TaskGather extends Task {
super(logger, opts);
this.preconditions = TaskPreconditions.Endpoint;
const {
setChannelVarsForStt,
normalizeTranscription,
removeSpeechListeners,
setSpeechCredentialsAtRuntime,
compileSonioxTranscripts
} = require('../utils/transcription-utils')(logger);
this.setChannelVarsForStt = setChannelVarsForStt;
this.normalizeTranscription = normalizeTranscription;
this.removeSpeechListeners = removeSpeechListeners;
this.compileSonioxTranscripts = compileSonioxTranscripts;
[
'finishOnKey', 'hints', 'input', 'numDigits', 'minDigits', 'maxDigits',
'finishOnKey', 'input', 'numDigits', 'minDigits', 'maxDigits',
'interDigitTimeout', 'partialResultHook', 'bargein', 'dtmfBargein',
'speechTimeout', 'timeout', 'say', 'play'
].forEach((k) => this[k] = this.data[k]);
@@ -40,54 +54,31 @@ class TaskGather extends Task {
/* timeout of zero means no timeout */
this.timeout = this.timeout === 0 ? 0 : (this.timeout || 15) * 1000;
this.interim = !!this.partialResultHook || this.bargein;
this.interim = !!this.partialResultHook || this.bargein || (this.timeout > 0);
this.listenDuringPrompt = this.data.listenDuringPrompt === false ? false : true;
this.minBargeinWordCount = this.data.minBargeinWordCount || 0;
this.minBargeinWordCount = this.data.minBargeinWordCount || 1;
if (this.data.recognizer) {
const recognizer = this.data.recognizer;
this.vendor = recognizer.vendor;
this.language = recognizer.language;
this.hints = recognizer.hints || [];
this.hintsBoost = recognizer.hintsBoost;
this.profanityFilter = recognizer.profanityFilter;
this.punctuation = !!recognizer.punctuation;
this.enhancedModel = !!recognizer.enhancedModel;
this.model = recognizer.model || 'command_and_search';
this.words = !!recognizer.words;
this.singleUtterance = recognizer.singleUtterance || true;
this.diarization = !!recognizer.diarization;
this.diarizationMinSpeakers = recognizer.diarizationMinSpeakers || 0;
this.diarizationMaxSpeakers = recognizer.diarizationMaxSpeakers || 0;
this.interactionType = recognizer.interactionType || 'unspecified';
this.naicsCode = recognizer.naicsCode || 0;
this.altLanguages = recognizer.altLanguages || [];
/* let credentials be supplied in the recognizer object at runtime */
this.sttCredentials = setSpeechCredentialsAtRuntime(recognizer);
/* continuous ASR (i.e. compile transcripts until a special timeout or dtmf key) */
this.asrTimeout = typeof recognizer.asrTimeout === 'number' ? recognizer.asrTimeout * 1000 : 0;
if (this.asrTimeout > 0) this.asrDtmfTerminationDigit = recognizer.asrDtmfTerminationDigit;
this.isContinuousAsr = this.asrTimeout > 0;
/* vad: if provided, we dont connect to recognizer until voice activity is detected */
const {enable, voiceMs = 0, mode = -1} = recognizer.vad || {};
this.vad = {enable, voiceMs, mode};
/* aws options */
this.vocabularyName = recognizer.vocabularyName;
this.vocabularyFilterName = recognizer.vocabularyFilterName;
this.filterMethod = recognizer.filterMethod;
/* microsoft options */
this.outputFormat = recognizer.outputFormat || 'simple';
this.profanityOption = recognizer.profanityOption || 'raw';
this.requestSnr = recognizer.requestSnr || false;
this.initialSpeechTimeoutMs = recognizer.initialSpeechTimeoutMs || 0;
this.azureServiceEndpoint = recognizer.azureServiceEndpoint;
this.azureSttEndpointId = recognizer.azureSttEndpointId;
}
else {
this.hints = [];
this.altLanguages = [];
if (Array.isArray(this.data.recognizer.hints) &&
0 == this.data.recognizer.hints.length && process.env.JAMBONES_GATHER_CLEAR_GLOBAL_HINTS_ON_EMPTY_HINTS) {
logger.debug('Gather: an empty hints array was supplied, so we will mask global hints');
this.maskGlobalSttHints = true;
}
this.data.recognizer.hints = this.data.recognizer.hints || [];
this.data.recognizer.altLanguages = this.data.recognizer.altLanguages || [];
}
else this.data.recognizer = {hints: [], altLanguages: []};
this.digitBuffer = '';
this._earlyMedia = this.data.earlyMedia === true;
@@ -103,13 +94,21 @@ class TaskGather extends Task {
/* buffer speech for continuous asr */
this._bufferedTranscripts = [];
/* buffer for soniox transcripts */
this._sonioxTranscripts = [];
this.parentTask = parentTask;
this.partialTranscriptsCount = 0;
}
get name() { return TaskName.Gather; }
get needsStt() { return this.input.includes('speech'); }
get wantsSingleUtterance() {
return this.data.recognizer?.singleUtterance === true;
}
get earlyMedia() {
return (this.sayTask && this.sayTask.earlyMedia) ||
(this.playTask && this.playTask.earlyMedia);
@@ -131,24 +130,28 @@ class TaskGather extends Task {
}
async exec(cs, {ep}) {
this.logger.debug('Gather:exec');
this.logger.debug({options: this.data}, 'Gather:exec');
await super.exec(cs);
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, cs.srf);
const {getNuanceAccessToken, getIbmAccessToken} = cs.srf.locals.dbHelpers;
if (cs.hasGlobalSttHints) {
if (cs.hasGlobalSttHints && !this.maskGlobalSttHints) {
const {hints, hintsBoost} = cs.globalSttHints;
this.hints = this.hints.concat(hints);
if (!this.hintsBoost && hintsBoost) this.hintsBoost = hintsBoost;
this.logger.debug({hints: this.hints, hintsBoost: this.hintsBoost},
const setOfHints = new Set(this.data.recognizer.hints
.concat(hints)
.filter((h) => typeof h === 'string' && h.length > 0));
this.data.recognizer.hints = [...setOfHints];
if (!this.data.recognizer.hintsBoost && hintsBoost) this.data.recognizer.hintsBoost = hintsBoost;
this.logger.debug({hints: this.data.recognizer.hints, hintsBoost: this.data.recognizer.hintsBoost},
'Gather:exec - applying global sttHints');
}
if (cs.hasAltLanguages) {
this.altLanguages = this.altLanguages.concat(cs.altLanguages);
this.data.recognizer.altLanguages = this.data.recognizer.altLanguages.concat(cs.altLanguages);
this.logger.debug({altLanguages: this.altLanguages},
'Gather:exec - applying altLanguages');
}
if (cs.hasGlobalSttPunctuation) {
this.punctuation = cs.globalSttPunctuation;
if (cs.hasGlobalSttPunctuation && !this.data.recognizer.punctuation) {
this.data.recognizer.punctuation = cs.globalSttPunctuation;
}
if (!this.isContinuousAsr && cs.isContinuousAsr) {
this.isContinuousAsr = true;
@@ -159,10 +162,28 @@ class TaskGather extends Task {
asrDtmfTerminationDigit: this.asrDtmfTerminationDigit
}, 'Gather:exec - enabling continuous ASR since it is turned on for the session');
}
const {JAMBONZ_GATHER_EARLY_HINTS_MATCH, JAMBONES_GATHER_EARLY_HINTS_MATCH} = process.env;
if ((JAMBONZ_GATHER_EARLY_HINTS_MATCH || JAMBONES_GATHER_EARLY_HINTS_MATCH) && this.needsStt &&
!this.isContinuousAsr &&
this.data.recognizer?.hints?.length > 0 && this.data.recognizer?.hints?.length <= 10) {
this.earlyHintsMatch = true;
this.interim = true;
this.logger.debug('Gather:exec - early hints match enabled');
}
this.ep = ep;
if ('default' === this.vendor || !this.vendor) this.vendor = cs.speechRecognizerVendor;
if ('default' === this.language || !this.language) this.language = cs.speechRecognizerLanguage;
this.sttCredentials = cs.getSpeechCredentials(this.vendor, 'stt');
if ('default' === this.vendor || !this.vendor) {
this.vendor = cs.speechRecognizerVendor;
if (this.data.recognizer) this.data.recognizer.vendor = this.vendor;
}
if ('default' === this.language || !this.language) {
this.language = cs.speechRecognizerLanguage;
if (this.data.recognizer) this.data.recognizer.language = this.language;
}
if (!this.data.recognizer.vendor) {
this.data.recognizer.vendor = this.vendor;
}
if (this.needsStt && !this.sttCredentials) this.sttCredentials = cs.getSpeechCredentials(this.vendor, 'stt');
if (this.needsStt && !this.sttCredentials) {
const {writeAlerts, AlertType} = cs.srf.locals;
this.logger.info(`TaskGather:exec - ERROR stt using ${this.vendor} requested but creds not supplied`);
@@ -171,20 +192,45 @@ class TaskGather extends Task {
alert_type: AlertType.STT_NOT_PROVISIONED,
vendor: this.vendor
}).catch((err) => this.logger.info({err}, 'Error generating alert for no stt'));
throw new Error(`no speech-to-text service credentials for ${this.vendor} have been configured`);
// Notify application that STT vender is wrong.
this.notifyError({
msg: 'ASR error',
details: `No speech-to-text service credentials for ${this.vendor} have been configured`
});
this.notifyTaskDone();
throw new Error(`No speech-to-text service credentials for ${this.vendor} have been configured`);
}
if (this.vendor === 'nuance' && this.sttCredentials.client_id) {
/* get nuance access token */
const {client_id, secret} = this.sttCredentials;
const {access_token, servedFromCache} = await getNuanceAccessToken(client_id, secret, 'asr tts');
this.logger.debug({client_id}, `Gather:exec - got nuance access token ${servedFromCache ? 'from cache' : ''}`);
this.sttCredentials = {...this.sttCredentials, access_token};
}
else if (this.vendor == 'ibm' && this.sttCredentials.stt_api_key) {
/* get ibm access token */
const {stt_api_key, stt_region} = this.sttCredentials;
const {access_token, servedFromCache} = await getIbmAccessToken(stt_api_key);
this.logger.debug({stt_api_key}, `Gather:exec - got ibm access token ${servedFromCache ? 'from cache' : ''}`);
this.sttCredentials = {...this.sttCredentials, access_token, stt_region};
}
const startListening = (cs, ep) => {
this._startTimer();
if (this.isContinuousAsr && 0 === this.timeout) this._startAsrTimer();
if (this.input.includes('speech') && !this.listenDuringPrompt) {
this._initSpeech(cs, ep)
.then(() => {
if (this.killed) {
this.logger.info('Gather:exec - task was quickly killed so do not transcribe');
return;
}
this._startTranscribing(ep);
return updateSpeechCredentialLastUsed(this.sttCredentials.speech_credential_sid);
})
.catch(() => {});
.catch((err) => {
this.logger.error({err}, 'error in initSpeech');
});
}
};
@@ -198,7 +244,15 @@ class TaskGather extends Task {
span.end();
if (err) this.logger.error({err}, 'Gather:exec Error playing tts');
this.logger.debug('Gather: nested say task completed');
if (!this.killed) startListening(cs, ep);
if (!this.killed) {
startListening(cs, ep);
if (this.input.includes('speech') && this.vendor === 'nuance' && this.listenDuringPrompt) {
this.logger.debug('Gather:exec - starting transcription timers after say completes');
ep.startTranscriptionTimers((err) => {
if (err) this.logger.error({err}, 'Gather:exec - error starting transcription timers');
});
}
}
});
}
else if (this.playTask) {
@@ -210,10 +264,24 @@ class TaskGather extends Task {
span.end();
if (err) this.logger.error({err}, 'Gather:exec Error playing url');
this.logger.debug('Gather: nested play task completed');
if (!this.killed) startListening(cs, ep);
if (!this.killed) {
startListening(cs, ep);
if (this.input.includes('speech') && this.vendor === 'nuance' && this.listenDuringPrompt) {
this.logger.debug('Gather:exec - starting transcription timers after play completes');
ep.startTranscriptionTimers((err) => {
if (err) this.logger.error({err}, 'Gather:exec - error starting transcription timers');
});
}
}
});
}
else startListening(cs, ep);
else {
if (this.killed) {
this.logger.info('Gather:exec - task was immediately killed so do not transcribe');
return;
}
startListening(cs, ep);
}
if (this.input.includes('speech') && this.listenDuringPrompt) {
await this._initSpeech(cs, ep);
@@ -230,14 +298,7 @@ class TaskGather extends Task {
} catch (err) {
this.logger.error(err, 'TaskGather:exec error');
}
ep.removeCustomEventListener(GoogleTranscriptionEvents.Transcription);
ep.removeCustomEventListener(GoogleTranscriptionEvents.EndOfUtterance);
ep.removeCustomEventListener(GoogleTranscriptionEvents.VadDetected);
ep.removeCustomEventListener(AwsTranscriptionEvents.Transcription);
ep.removeCustomEventListener(AwsTranscriptionEvents.VadDetected);
ep.removeCustomEventListener(AzureTranscriptionEvents.Transcription);
ep.removeCustomEventListener(AzureTranscriptionEvents.NoSpeechDetected);
ep.removeCustomEventListener(AzureTranscriptionEvents.VadDetected);
this.removeSpeechListeners(ep);
}
kill(cs) {
@@ -245,15 +306,21 @@ class TaskGather extends Task {
this._killAudio(cs);
this.ep.removeAllListeners('dtmf');
clearTimeout(this.interDigitTimer);
this._clearAsrTimer();
this.playTask?.span.end();
this.sayTask?.span.end();
this._resolve('killed');
}
updateTimeout(timeout) {
this.logger.info(`TaskGather:updateTimeout - updating timeout to ${timeout}`);
updateTaskInProgress(opts) {
if (!this.needsStt && opts.input.includes('speech')) {
this.logger.info('TaskGather:updateTaskInProgress - adding speech to a background gather');
return false; // this needs be handled by killing the background gather and starting a new one
}
const {timeout} = opts;
this.timeout = timeout;
this._startTimer();
return true;
}
_onDtmf(cs, ep, evt) {
@@ -292,106 +359,101 @@ class TaskGather extends Task {
}
async _initSpeech(cs, ep) {
const opts = {};
const opts = this.setChannelVarsForStt(this, this.sttCredentials, this.data.recognizer);
switch (this.vendor) {
case 'google':
this.bugname = 'google_transcribe';
ep.addCustomEventListener(GoogleTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
ep.addCustomEventListener(GoogleTranscriptionEvents.EndOfUtterance, this._onEndOfUtterance.bind(this, cs, ep));
ep.addCustomEventListener(GoogleTranscriptionEvents.VadDetected, this._onVadDetected.bind(this, cs, ep));
break;
if (this.vad?.enable) {
opts.START_RECOGNIZING_ON_VAD = 1;
if (this.vad.voiceMs) opts.RECOGNIZER_VAD_VOICE_MS = this.vad.voiceMs;
else opts.RECOGNIZER_VAD_VOICE_MS = 125;
if (this.vad.mode >= 0 && this.vad.mode <= 3) opts.RECOGNIZER_VAD_MODE = this.vad.mode;
}
case 'aws':
case 'polly':
this.bugname = 'aws_transcribe';
ep.addCustomEventListener(AwsTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
ep.addCustomEventListener(AwsTranscriptionEvents.VadDetected, this._onVadDetected.bind(this, cs, ep));
break;
case 'microsoft':
this.bugname = 'azure_transcribe';
ep.addCustomEventListener(AzureTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
ep.addCustomEventListener(AzureTranscriptionEvents.NoSpeechDetected,
this._onNoSpeechDetected.bind(this, cs, ep));
ep.addCustomEventListener(AzureTranscriptionEvents.VadDetected, this._onVadDetected.bind(this, cs, ep));
break;
case 'nuance':
this.bugname = 'nuance_transcribe';
ep.addCustomEventListener(NuanceTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep));
ep.addCustomEventListener(NuanceTranscriptionEvents.StartOfSpeech,
this._onStartOfSpeech.bind(this, cs, ep));
ep.addCustomEventListener(NuanceTranscriptionEvents.TranscriptionComplete,
this._onTranscriptionComplete.bind(this, cs, ep));
ep.addCustomEventListener(NuanceTranscriptionEvents.VadDetected,
this._onVadDetected.bind(this, cs, ep));
if ('google' === this.vendor) {
this.bugname = 'google_transcribe';
if (this.sttCredentials) opts.GOOGLE_APPLICATION_CREDENTIALS = JSON.stringify(this.sttCredentials.credentials);
[
['enhancedModel', 'GOOGLE_SPEECH_USE_ENHANCED'],
['separateRecognitionPerChannel', 'GOOGLE_SPEECH_SEPARATE_RECOGNITION_PER_CHANNEL'],
['profanityFilter', 'GOOGLE_SPEECH_PROFANITY_FILTER'],
['punctuation', 'GOOGLE_SPEECH_ENABLE_AUTOMATIC_PUNCTUATION'],
['words', 'GOOGLE_SPEECH_ENABLE_WORD_TIME_OFFSETS'],
['singleUtterance', 'GOOGLE_SPEECH_SINGLE_UTTERANCE'],
['diarization', 'GOOGLE_SPEECH_PROFANITY_FILTER']
].forEach((arr) => {
if (this[arr[0]]) opts[arr[1]] = true;
else if (this[arr[0]] === false) opts[arr[1]] = false;
});
if (this.hints.length > 0) {
opts.GOOGLE_SPEECH_HINTS = this.hints.join(',');
if (typeof this.hintsBoost === 'number') {
opts.GOOGLE_SPEECH_HINTS_BOOST = this.hintsBoost;
/* stall timers until prompt finishes playing */
if ((this.sayTask || this.playTask) && this.listenDuringPrompt) {
opts.NUANCE_STALL_TIMERS = 1;
}
}
if (this.altLanguages.length > 0) opts.GOOGLE_SPEECH_ALTERNATIVE_LANGUAGE_CODES = this.altLanguages.join(',');
else opts.GOOGLE_SPEECH_ALTERNATIVE_LANGUAGE_CODES = '';
if ('unspecified' !== this.interactionType) {
opts.GOOGLE_SPEECH_METADATA_INTERACTION_TYPE = this.interactionType;
}
opts.GOOGLE_SPEECH_MODEL = this.model;
if (this.diarization && this.diarizationMinSpeakers > 0) {
opts.GOOGLE_SPEECH_SPEAKER_DIARIZATION_MIN_SPEAKER_COUNT = this.diarizationMinSpeakers;
}
if (this.diarization && this.diarizationMaxSpeakers > 0) {
opts.GOOGLE_SPEECH_SPEAKER_DIARIZATION_MAX_SPEAKER_COUNT = this.diarizationMaxSpeakers;
}
if (this.naicsCode > 0) opts.GOOGLE_SPEECH_METADATA_INDUSTRY_NAICS_CODE = this.naicsCode;
ep.addCustomEventListener(GoogleTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
ep.addCustomEventListener(GoogleTranscriptionEvents.EndOfUtterance, this._onEndOfUtterance.bind(this, cs, ep));
ep.addCustomEventListener(GoogleTranscriptionEvents.VadDetected, this._onVadDetected.bind(this, cs, ep));
}
else if (['aws', 'polly'].includes(this.vendor)) {
this.bugname = 'aws_transcribe';
if (this.vocabularyName) opts.AWS_VOCABULARY_NAME = this.vocabularyName;
if (this.vocabularyFilterName) {
opts.AWS_VOCABULARY_NAME = this.vocabularyFilterName;
opts.AWS_VOCABULARY_FILTER_METHOD = this.filterMethod || 'mask';
}
if (this.sttCredentials) {
Object.assign(opts, {
AWS_ACCESS_KEY_ID: this.sttCredentials.accessKeyId,
AWS_SECRET_ACCESS_KEY: this.sttCredentials.secretAccessKey,
AWS_REGION: this.sttCredentials.region
});
}
ep.addCustomEventListener(AwsTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
ep.addCustomEventListener(AwsTranscriptionEvents.VadDetected, this._onVadDetected.bind(this, cs, ep));
}
else if ('microsoft' === this.vendor) {
this.bugname = 'azure_transcribe';
if (this.sttCredentials) {
const {api_key, region, use_custom_stt, custom_stt_endpoint} = this.sttCredentials;
break;
Object.assign(opts, {
'AZURE_SUBSCRIPTION_KEY': api_key,
'AZURE_REGION': region
});
if (this.azureSttEndpointId) {
Object.assign(opts, {'AZURE_SERVICE_ENDPOINT_ID': this.azureSttEndpointId});
}
else if (use_custom_stt && custom_stt_endpoint) {
Object.assign(opts, {'AZURE_SERVICE_ENDPOINT_ID': custom_stt_endpoint});
}
}
if (this.hints && this.hints.length > 0) {
opts.AZURE_SPEECH_HINTS = this.hints.map((h) => h.trim()).join(',');
}
if (this.altLanguages && this.altLanguages.length > 0) {
opts.AZURE_SPEECH_ALTERNATIVE_LANGUAGE_CODES = this.altLanguages.join(',');
}
else {
opts.AZURE_SPEECH_ALTERNATIVE_LANGUAGE_CODES = '';
}
if (this.requestSnr) opts.AZURE_REQUEST_SNR = 1;
if (this.profanityOption && this.profanityOption !== 'raw') opts.AZURE_PROFANITY_OPTION = this.profanityOption;
if (this.azureServiceEndpoint) opts.AZURE_SERVICE_ENDPOINT = this.azureServiceEndpoint;
if (this.initialSpeechTimeoutMs > 0) opts.AZURE_INITIAL_SPEECH_TIMEOUT_MS = this.initialSpeechTimeoutMs;
else if (this.timeout === 0) opts.AZURE_INITIAL_SPEECH_TIMEOUT_MS = 120000; // lengthy
opts.AZURE_USE_OUTPUT_FORMAT_DETAILED = 1;
case 'deepgram':
this.bugname = 'deepgram_transcribe';
ep.addCustomEventListener(DeepgramTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
ep.addCustomEventListener(DeepgramTranscriptionEvents.Connect, this._onDeepgramConnect.bind(this, cs, ep));
ep.addCustomEventListener(DeepgramTranscriptionEvents.ConnectFailure,
this._onDeepGramConnectFailure.bind(this, cs, ep));
break;
ep.addCustomEventListener(AzureTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
ep.addCustomEventListener(AzureTranscriptionEvents.NoSpeechDetected, this._onNoSpeechDetected.bind(this, cs, ep));
ep.addCustomEventListener(AzureTranscriptionEvents.VadDetected, this._onVadDetected.bind(this, cs, ep));
case 'soniox':
this.bugname = 'soniox_transcribe';
ep.addCustomEventListener(SonioxTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
break;
case 'ibm':
this.bugname = 'ibm_transcribe';
ep.addCustomEventListener(IbmTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
ep.addCustomEventListener(IbmTranscriptionEvents.Connect, this._onIbmConnect.bind(this, cs, ep));
ep.addCustomEventListener(IbmTranscriptionEvents.ConnectFailure,
this._onIbmConnectFailure.bind(this, cs, ep));
break;
case 'nvidia':
this.bugname = 'nvidia_transcribe';
ep.addCustomEventListener(NvidiaTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep));
ep.addCustomEventListener(NvidiaTranscriptionEvents.StartOfSpeech,
this._onStartOfSpeech.bind(this, cs, ep));
ep.addCustomEventListener(NvidiaTranscriptionEvents.TranscriptionComplete,
this._onTranscriptionComplete.bind(this, cs, ep));
ep.addCustomEventListener(NvidiaTranscriptionEvents.VadDetected,
this._onVadDetected.bind(this, cs, ep));
/* I think nvidia has this (??) - stall timers until prompt finishes playing */
if ((this.sayTask || this.playTask) && this.listenDuringPrompt) {
opts.NVIDIA_STALL_TIMERS = 1;
}
break;
default:
if (this.vendor.startsWith('custom:')) {
this.bugname = `${this.vendor}_transcribe`;
ep.addCustomEventListener(JambonzTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
ep.addCustomEventListener(JambonzTranscriptionEvents.Connect, this._onJambonzConnect.bind(this, cs, ep));
ep.addCustomEventListener(JambonzTranscriptionEvents.ConnectFailure,
this._onJambonzConnectFailure.bind(this, cs, ep));
break;
}
else {
this.notifyError({ msg: 'ASR error', details:`Invalid vendor ${this.vendor}`});
this.notifyTaskDone();
throw new Error(`Invalid vendor ${this.vendor}`);
}
}
/* common handler for all stt engine errors */
ep.addCustomEventListener(JambonzTranscriptionEvents.Error, this._onJambonzError.bind(this, cs, ep));
await ep.set(opts)
.catch((err) => this.logger.info(err, 'Error setting channel variables'));
}
@@ -491,56 +553,46 @@ class TaskGather extends Task {
// make sure this is not a transcript from answering machine detection
const bugname = fsEvent.getHeader('media-bugname');
const finished = fsEvent.getHeader('transcription-session-finished');
this.logger.debug({evt, bugname, finished}, `Gather:_onTranscription for vendor ${this.vendor}`);
if (bugname && this.bugname !== bugname) return;
if ('aws' === this.vendor && Array.isArray(evt) && evt.length > 0) evt = evt[0];
if ('microsoft' === this.vendor) {
const final = evt.RecognitionStatus === 'Success';
if (final) {
// don't sort based on confidence: https://github.com/Azure-Samples/cognitive-services-speech-sdk/issues/1463
//const nbest = evt.NBest.sort((a, b) => b.Confidence - a.Confidence);
const nbest = evt.NBest;
const language_code = evt.PrimaryLanguage?.Language || this.language;
evt = {
is_final: true,
language_code,
alternatives: [
{
confidence: nbest[0].Confidence,
transcript: nbest[0].Display
}
]
};
}
else {
evt = {
is_final: false,
alternatives: [
{
transcript: evt.Text
}
]
};
if (this.vendor === 'ibm') {
if (evt?.state === 'listening') return;
}
evt = this.normalizeTranscription(evt, this.vendor, 1, this.language);
if (evt.alternatives.length === 0) {
this.logger.info({evt}, 'TaskGather:_onTranscription - got empty transcript, continue listening');
return;
}
/* fast path: our first partial transcript exactly matches an early hint */
if (this.earlyHintsMatch && evt.is_final === false && this.partialTranscriptsCount++ === 0) {
const transcript = evt.alternatives[0].transcript?.toLowerCase();
const hints = this.data.recognizer?.hints || [];
if (hints.find((h) => h.toLowerCase() === transcript)) {
this.logger.debug({evt}, 'Gather:_onTranscription: early hint match');
this._resolve('speech', evt);
return;
}
}
/* count words for bargein feature */
const words = evt.alternatives[0].transcript.split(' ').length;
const bufferedWords = this._bufferedTranscripts.reduce((count, e) => {
return count + e.alternatives[0].transcript.split(' ').length;
}, 0);
const words = evt.alternatives[0]?.transcript.split(' ').length;
const bufferedWords = this._sonioxTranscripts.length +
this._bufferedTranscripts.reduce((count, e) => count + e.alternatives[0]?.transcript.split(' ').length, 0);
if (evt.is_final) {
if (evt.alternatives[0].transcript === '' && !this.callSession.callGone && !this.killed) {
if ('microsoft' === this.vendor && finished === 'true') {
if (finished === 'true' && ['microsoft', 'deepgram'].includes(this.vendor)) {
this.logger.debug({evt}, 'TaskGather:_onTranscription - got empty transcript from old gather, disregarding');
}
else {
this.logger.info({evt}, 'TaskGather:_onTranscription - got empty transcript, listen again');
this._startTranscribing(ep);
this.logger.info({evt}, 'TaskGather:_onTranscription - got empty transcript, continue listening');
}
return;
}
if (this.isContinuousAsr) {
/* append the transcript and start listening again for asrTimeout */
const t = evt.alternatives[0].transcript;
@@ -560,7 +612,9 @@ class TaskGather extends Task {
return this._resolve(this._bufferedTranscripts.length > 0 ? 'speech' : 'timeout');
}
this._startAsrTimer();
return this._startTranscribing(ep);
/* some STT engines will keep listening after a final response, so no need to restart */
if (!['soniox', 'aws', 'microsoft', 'deepgram'].includes(this.vendor)) this._startTranscribing(ep);
}
else {
if (this.bargein && (words + bufferedWords) < this.minBargeinWordCount) {
@@ -571,6 +625,12 @@ class TaskGather extends Task {
return;
}
else {
if (this.vendor === 'soniox') {
/* compile transcripts into one */
this._sonioxTranscripts.push(evt.vendor.finalWords);
evt = this.compileSonioxTranscripts(this._sonioxTranscripts, 1, this.language);
this._sonioxTranscripts = [];
}
this._resolve('speech', evt);
}
}
@@ -581,6 +641,8 @@ class TaskGather extends Task {
others do not.
*/
//const isStableEnough = typeof evt.stability === 'undefined' || evt.stability > GATHER_STABILITY_THRESHOLD;
this._clearTimer();
this._startTimer();
if (this.bargein && (words + bufferedWords) >= this.minBargeinWordCount) {
if (!this.playComplete) {
this.logger.debug({transcript: evt.alternatives[0].transcript}, 'killing audio due to speech');
@@ -594,6 +656,13 @@ class TaskGather extends Task {
this.cs.requestor.request('verb:hook', this.partialResultHook, Object.assign({speech: evt},
this.cs.callInfo, httpHeaders));
}
if (this.vendor === 'soniox') {
this._clearTimer();
if (evt.vendor.finalWords.length) {
this.logger.debug({evt}, 'TaskGather:_onTranscription - buffering soniox transcript');
this._sonioxTranscripts.push(evt.vendor.finalWords);
}
}
}
}
_onEndOfUtterance(cs, ep) {
@@ -602,11 +671,102 @@ class TaskGather extends Task {
this._killAudio(cs);
}
if (!this.resolved && !this.killed && !this._bufferedTranscripts.length) {
/**
* By default, Gather asks google for multiple utterances.
* The reason is that we can sometimes get an 'end_of_utterance' event without
* getting a transcription. This can happen if someone coughs or mumbles.
* For that reason don't ask for a single utterance and we'll terminate the transcribe operation
* once we get a final transcript.
* However, if the usr has specified a singleUtterance, then we need to restart here
* since we dont have a final transcript yet.
*/
if (!this.resolved && !this.killed && !this._bufferedTranscripts.length && this.wantsSingleUtterance) {
this._startTranscribing(ep);
}
}
_onStartOfSpeech(cs, ep) {
this.logger.debug('TaskGather:_onStartOfSpeech');
if (this.bargein) {
this._killAudio(cs);
}
}
_onTranscriptionComplete(cs, ep) {
this.logger.debug('TaskGather:_onTranscriptionComplete');
}
_onDeepgramConnect(_cs, _ep) {
this.logger.debug('TaskGather:_onDeepgramConnect');
}
_onJambonzConnect(_cs, _ep) {
this.logger.debug('TaskGather:_onJambonzConnect');
}
_onJambonzError(cs, _ep, evt) {
this.logger.info({evt}, 'TaskGather:_onJambonzError');
const {writeAlerts, AlertType} = cs.srf.locals;
if (this.vendor === 'nuance') {
const {code, error} = evt;
if (code === 404 && error === 'No speech') return this._resolve('timeout');
if (code === 413 && error === 'Too much speech') return this._resolve('timeout');
}
this.logger.info({evt}, 'TaskGather:_onJambonzError');
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
message: `Custom speech vendor ${this.vendor} error: ${evt.error}`,
vendor: this.vendor,
}).catch((err) => this.logger.info({err}, 'Error generating alert for jambonz custom connection failure'));
this.notifyError({msg: 'ASR error', details:`Custom speech vendor ${this.vendor} error: ${evt.error}`});
}
_onDeepGramConnectFailure(cs, _ep, evt) {
const {reason} = evt;
const {writeAlerts, AlertType} = cs.srf.locals;
this.logger.info({evt}, 'TaskGather:_onDeepgramConnectFailure');
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
message: `Failed connecting to Deepgram speech recognizer: ${reason}`,
vendor: 'deepgram',
}).catch((err) => this.logger.info({err}, 'Error generating alert for deepgram connection failure'));
this.notifyError({msg: 'ASR error', details:`Failed connecting to speech vendor deepgram: ${reason}`});
this.notifyTaskDone();
}
_onJambonzConnectFailure(cs, _ep, evt) {
const {reason} = evt;
const {writeAlerts, AlertType} = cs.srf.locals;
this.logger.info({evt}, 'TaskGather:_onJambonzConnectFailure');
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
message: `Failed connecting to ${this.vendor} speech recognizer: ${reason}`,
vendor: this.vendor,
}).catch((err) => this.logger.info({err}, 'Error generating alert for jambonz custom connection failure'));
this.notifyError({msg: 'ASR error', details:`Failed connecting to speech vendor ${this.vendor}: ${reason}`});
this.notifyTaskDone();
}
_onIbmConnect(_cs, _ep) {
this.logger.debug('TaskGather:_onIbmConnect');
}
_onIbmConnectFailure(cs, _ep, evt) {
const {reason} = evt;
const {writeAlerts, AlertType} = cs.srf.locals;
this.logger.info({evt}, 'TaskGather:_onIbmConnectFailure');
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
message: `Failed connecting to IBM watson speech recognizer: ${reason}`,
vendor: 'ibm',
}).catch((err) => this.logger.info({err}, 'Error generating alert for IBM connection failure'));
this.notifyError({msg: 'ASR error', details:`Failed connecting to speech vendor IBM: ${reason}`});
this.notifyTaskDone();
}
_onIbmError(cs, _ep, evt) {
this.logger.info({evt}, 'TaskGather:_onIbmError'); }
_onVadDetected(cs, ep) {
if (this.bargein && this.minBargeinWordCount === 0) {
this.logger.debug('TaskGather:_onVadDetected');
@@ -634,6 +794,10 @@ class TaskGather extends Task {
if (this.resolved) return;
this.resolved = true;
// Clear dtmf event
if (this.dtmfBargein) {
this.ep.removeAllListeners('dtmf');
}
clearTimeout(this.interDigitTimer);
this._clearTimer();

View File

@@ -18,6 +18,7 @@ class TaskHangup extends Task {
await super.exec(cs);
try {
await dlg.destroy({headers: this.headers});
cs._callReleased();
} catch (err) {
this.logger.error(err, 'TaskHangup:exec - Error hanging up call');
}

View File

@@ -1,6 +1,6 @@
const Task = require('./task');
const {TaskName, TaskPreconditions} = require('../utils/constants');
const normalizeJambones = require('../utils/normalize-jambones');
const { normalizeJambones } = require('@jambonz/verb-specifications');
class Lex extends Task {
constructor(logger, opts) {

View File

@@ -2,6 +2,7 @@ const Task = require('./task');
const {TaskName, TaskPreconditions, ListenEvents, ListenStatus} = require('../utils/constants');
const makeTask = require('./make_task');
const moment = require('moment');
const MAX_PLAY_AUDIO_QUEUE_SIZE = 10;
class TaskListen extends Task {
constructor(logger, opts, parentTask) {
@@ -10,7 +11,7 @@ class TaskListen extends Task {
[
'action', 'auth', 'method', 'url', 'finishOnKey', 'maxLength', 'metadata', 'mixType', 'passDtmf', 'playBeep',
'sampleRate', 'timeout', 'transcribe', 'wsAuth'
'sampleRate', 'timeout', 'transcribe', 'wsAuth', 'disableBidirectionalAudio'
].forEach((k) => this[k] = this.data[k]);
this.mixType = this.mixType || 'mono';
@@ -20,6 +21,8 @@ class TaskListen extends Task {
this.nested = parentTask instanceof Task;
this.results = {};
this.playAudioQueue = [];
this.isPlayingAudioFromQueue = false;
if (this.transcribe) this.transcribeTask = makeTask(logger, {'transcribe': opts.transcribe}, this);
}
@@ -58,6 +61,7 @@ class TaskListen extends Task {
super.kill(cs);
this.logger.debug(`TaskListen:kill endpoint connected? ${this.ep && this.ep.connected}`);
this._clearTimer();
this.playAudioQueue = [];
if (this.ep && this.ep.connected) {
this.logger.debug('TaskListen:kill closing websocket');
try {
@@ -136,7 +140,9 @@ class TaskListen extends Task {
}
/* support bi-directional audio */
ep.addCustomEventListener(ListenEvents.PlayAudio, this._onPlayAudio.bind(this, ep));
if (!this.disableBiDirectionalAudio) {
ep.addCustomEventListener(ListenEvents.PlayAudio, this._onPlayAudio.bind(this, ep));
}
ep.addCustomEventListener(ListenEvents.KillAudio, this._onKillAudio.bind(this, ep));
ep.addCustomEventListener(ListenEvents.Disconnect, this._onDisconnect.bind(this, ep));
}
@@ -182,16 +188,36 @@ class TaskListen extends Task {
this.notifyTaskDone();
}
async _onPlayAudio(ep, evt) {
this.logger.info(`received play_audio event: ${JSON.stringify(evt)}`);
async _playAudio(ep, evt, logger) {
try {
const results = await ep.play(evt.file);
this.logger.debug(`Finished playing file, result: ${JSON.stringify(results)}`);
logger.debug(`Finished playing file, result: ${JSON.stringify(results)}`);
ep.forkAudioSendText({type: 'playDone', data: Object.assign({id: evt.id}, results)});
} catch (err) {
logger.error({err}, 'Error playing file');
}
catch (err) {
this.logger.error({err}, 'Error playing file');
}
async _onPlayAudio(ep, evt) {
this.logger.info(`received play_audio event: ${JSON.stringify(evt)}`);
if (!evt.queuePlay) {
this.playAudioQueue = [];
this._playAudio(ep, evt, this.logger);
this.isPlayingAudioFromQueue = false;
return;
}
if (this.playAudioQueue.length <= MAX_PLAY_AUDIO_QUEUE_SIZE) {
this.playAudioQueue.push(evt);
}
if (this.isPlayingAudioFromQueue) return;
this.isPlayingAudioFromQueue = true;
while (this.playAudioQueue.length > 0) {
await this._playAudio(ep, this.playAudioQueue.shift(), this.logger);
}
this.isPlayingAudioFromQueue = false;
}
_onKillAudio(ep) {

View File

@@ -1,4 +1,4 @@
const Task = require('./task');
const { validateVerb } = require('@jambonz/verb-specifications');
const {TaskName} = require('../utils/constants');
const errBadInstruction = new Error('malformed jambonz application payload');
@@ -12,7 +12,7 @@ function makeTask(logger, obj, parent) {
if (typeof data !== 'object') {
throw errBadInstruction;
}
Task.validate(name, data);
validateVerb(name, data, logger);
switch (name) {
case TaskName.SipDecline:
const TaskSipDecline = require('./sip_decline');

View File

@@ -1,7 +1,7 @@
const Task = require('./task');
const {TaskName, TaskPreconditions} = require('../utils/constants');
const bent = require('bent');
const { v4: uuidv4 } = require('uuid');
const uuidv4 = require('uuid-random');
class TaskMessage extends Task {
constructor(logger, opts) {

View File

@@ -22,7 +22,22 @@ class TaskPlay extends Task {
async exec(cs, {ep}) {
await super.exec(cs);
this.ep = ep;
let timeout;
let playbackSeconds = 0;
let playbackMilliseconds = 0;
let completed = !(this.timeoutSecs > 0 || this.loop);
if (this.timeoutSecs > 0) {
timeout = setTimeout(async() => {
completed = true;
try {
await this.kill(cs);
} catch (err) {
this.logger.info(err, 'Error killing audio on timeoutSecs');
}
}, this.timeoutSecs * 1000);
}
try {
this.notifyStatus({event: 'start-playback'});
while (!this.killed && (this.loop === 'forever' || this.loop--) && this.ep.connected) {
if (cs.isInConference) {
const {memberId, confName, confUuid} = cs;
@@ -34,14 +49,24 @@ class TaskPlay extends Task {
await this.playToConfMember(this.ep, memberId, confName, confUuid, this.url);
}
} else {
const file = (this.timeoutSecs >= 0 || this.seekOffset >= 0) ?
{file: this.url, seekOffset: this.seekOffset, timeoutSecs: this.timeoutSecs} : this.url;
let file = this.url;
if (this.seekOffset >= 0) {
file = {file: this.url, seekOffset: this.seekOffset};
this.seekOffset = -1;
}
const result = await ep.play(file);
await this.performAction(Object.assign(result, {reason: 'playCompleted'}),
!(this.parentTask || cs.isConfirmCallSession));
playbackSeconds += parseInt(result.playbackSeconds);
playbackMilliseconds += parseInt(result.playbackMilliseconds);
if (this.killed || !this.loop || completed) {
if (timeout) clearTimeout(timeout);
await this.performAction(
Object.assign(result, {reason: 'playCompleted', playbackSeconds, playbackMilliseconds}),
!(this.parentTask || cs.isConfirmCallSession));
}
}
}
} catch (err) {
if (timeout) clearTimeout(timeout);
this.logger.info(err, `TaskPlay:exec - error playing ${this.url}`);
}
this.emit('playDone');
@@ -56,7 +81,8 @@ class TaskPlay extends Task {
this.killPlayToConfMember(this.ep, memberId, confName);
}
else {
await this.ep.api('uuid_break', this.ep.uuid).catch((err) => this.logger.info(err, 'Error killing audio'));
this.notifyStatus({event: 'kill-playback'});
this.ep.api('uuid_break', this.ep.uuid).catch((err) => this.logger.info(err, 'Error killing audio'));
}
}
}

View File

@@ -1,7 +1,7 @@
const Task = require('./task');
const {TaskName} = require('../utils/constants');
const makeTask = require('./make_task');
const normalizeJambones = require('../utils/normalize-jambones');
const { normalizeJambones } = require('@jambonz/verb-specifications');
/**
* Manages an outdial made via REST API
@@ -27,7 +27,7 @@ class TaskRestDial extends Task {
*/
async exec(cs) {
await super.exec(cs);
this.req = cs.req;
this.canCancel = true;
this._setCallTimer();
await this.awaitTaskDone();
@@ -36,15 +36,15 @@ class TaskRestDial extends Task {
kill(cs) {
super.kill(cs);
this._clearCallTimer();
if (this.req) {
this.req.cancel();
this.req = null;
if (this.canCancel && cs?.req) {
this.canCancel = false;
cs.req.cancel();
}
this.notifyTaskDone();
}
async _onConnect(dlg) {
this.req = null;
this.canCancel = false;
const cs = this.callSession;
cs.setDialog(dlg);
@@ -79,7 +79,7 @@ class TaskRestDial extends Task {
_onCallStatus(status) {
this.logger.debug(`CallStatus: ${status}`);
if (status >= 200) {
this.req = null;
this.canCancel = false;
this._clearCallTimer();
if (status !== 200) this.notifyTaskDone();
}

View File

@@ -1,96 +1,26 @@
const Task = require('./task');
const {TaskName, TaskPreconditions} = require('../utils/constants');
const pollySSMLSplit = require('polly-ssml-split');
const breakLengthyTextIfNeeded = (logger, text) => {
const chunkSize = 1000;
if (text.length <= chunkSize) return [text];
const result = [];
const isSSML = text.startsWith('<speak>');
let startPos = 0;
let charPos = isSSML ? 7 : 0; // skip <speak>
let tag;
//logger.debug({isSSML}, `breakLengthyTextIfNeeded: handling text of length ${text.length}`);
while (startPos + charPos < text.length) {
if (isSSML && !tag && text[startPos + charPos] === '<') {
const tagStartPos = ++charPos;
while (startPos + charPos < text.length) {
if (text[startPos + charPos] === '>') {
if (text[startPos + charPos - 1] === '\\') tag = null;
else if (!tag) tag = text.substring(startPos + tagStartPos, startPos + charPos - 1);
break;
}
if (!tag) {
const c = text[startPos + charPos];
if (c === ' ') {
tag = text.substring(startPos + tagStartPos, startPos + charPos);
//logger.debug(`breakLengthyTextIfNeeded: enter tag ${tag} (space)`);
break;
}
}
charPos++;
}
if (tag) {
//search for end of tag
//logger.debug(`breakLengthyTextIfNeeded: searching forward for </${tag}>`);
const e1 = text.indexOf(`</${tag}>`, startPos + charPos);
const e2 = text.indexOf('/>', startPos + charPos);
const tagEndPos = e1 === -1 ? e2 : e2 === -1 ? e1 : Math.min(e1, e2);
if (tagEndPos === -1) {
//logger.debug(`breakLengthyTextIfNeeded: exit tag ${tag} not found, exiting`);
} else {
//logger.debug(`breakLengthyTextIfNeeded: exit tag ${tag} found at ${tagEndPos}`);
charPos = tagEndPos + 1;
}
tag = null;
}
continue;
}
if (charPos < chunkSize) {
charPos++;
continue;
}
// start looking for a good break point
let chunkIt = false;
const a = text[startPos + charPos];
const b = text[startPos + charPos + 1];
if (/[\.!\?]/.test(a) && /\s/.test(b)) {
//logger.debug('breakLengthyTextIfNeeded: breaking at sentence end');
chunkIt = true;
}
if (chunkIt) {
charPos++;
const chunk = text.substr(startPos, charPos);
if (isSSML) {
result.push(0 === startPos ? `${chunk}</speak>` : `<speak>${chunk}</speak>`);
}
else result.push(chunk);
charPos = 0;
startPos += chunk.length;
//logger.debug({chunk: result[result.length - 1]},
// `breakLengthyTextIfNeeded: chunked; new starting pos ${startPos}`);
}
else charPos++;
if (text.length <= chunkSize || !isSSML) return [text];
const options = {
// MIN length
softLimit: 100,
// MAX length, exclude 15 characters <speak></speak>
hardLimit: chunkSize - 15,
// Set of extra split characters (Optional property)
extraSplitChars: ',;!?',
};
pollySSMLSplit.configure(options);
try {
return pollySSMLSplit.split(text);
} catch (err) {
logger.info({err}, 'Error spliting SSML long text');
return [text];
}
// final chunk
if (startPos < text.length) {
const chunk = text.substr(startPos);
if (isSSML) {
result.push(0 === startPos ? `${chunk}</speak>` : `<speak>${chunk}`);
}
else result.push(chunk);
//logger.debug({chunk: result[result.length - 1]},
// `breakLengthyTextIfNeeded: final chunk; starting pos ${startPos} length ${chunk.length}`);
}
return result;
};
class TaskSay extends Task {
@@ -105,6 +35,7 @@ class TaskSay extends Task {
this.loop = this.data.loop || 1;
this.earlyMedia = this.data.earlyMedia === true || (parentTask && parentTask.earlyMedia);
this.synthesizer = this.data.synthesizer || {};
this.disableTtsCache = this.data.disableTtsCache;
}
get name() { return TaskName.Say; }
@@ -130,14 +61,24 @@ class TaskSay extends Task {
const language = this.synthesizer.language && this.synthesizer.language !== 'default' ?
this.synthesizer.language :
cs.speechSynthesisLanguage ;
const voice = this.synthesizer.voice && this.synthesizer.voice !== 'default' ?
let voice = this.synthesizer.voice && this.synthesizer.voice !== 'default' ?
this.synthesizer.voice :
cs.speechSynthesisVoice;
const engine = this.synthesizer.engine || 'standard';
const salt = cs.callSid;
const credentials = cs.getSpeechCredentials(vendor, 'tts');
this.logger.info({vendor, language, voice}, 'TaskSay:exec');
/* parse Nuance voices into name and model */
let model;
if (vendor === 'nuance' && voice) {
const arr = /([A-Za-z-]*)\s+-\s+(enhanced|standard)/.exec(voice);
if (arr) {
voice = arr[1];
model = arr[2];
}
}
this.logger.info({vendor, language, voice, model}, 'TaskSay:exec');
this.ep = ep;
try {
if (!credentials) {
@@ -146,7 +87,10 @@ class TaskSay extends Task {
alert_type: AlertType.TTS_NOT_PROVISIONED,
vendor
}).catch((err) => this.logger.info({err}, 'Error generating alert for no tts'));
this.notifyError(`No speech credentials have been provisioned for ${vendor}`);
this.notifyError({
msg: 'TTS error',
details:`No speech credentials provisioned for selected vendor ${vendor}`
});
throw new Error('no provisioned speech credentials for TTS');
}
// synthesize all of the text elements
@@ -164,14 +108,16 @@ class TaskSay extends Task {
'tts.voice': voice
});
try {
const {filePath, servedFromCache} = await synthAudio(stats, {
const {filePath, servedFromCache, rtt} = await synthAudio(stats, {
text,
vendor,
language,
voice,
engine,
model,
salt,
credentials
credentials,
disableTtsCache : this.disableTtsCache
});
this.logger.debug(`file ${filePath}, served from cache ${servedFromCache}`);
if (filePath) cs.trackTmpFile(filePath);
@@ -182,24 +128,33 @@ class TaskSay extends Task {
}
span.setAttributes({'tts.cached': servedFromCache});
span.end();
if (!servedFromCache && rtt) {
this.notifyStatus({
event: 'synthesized-audio',
vendor,
language,
characters: text.length,
elapsedTime: rtt
});
}
return filePath;
} catch (err) {
this.logger.info({err}, 'Error synthesizing tts');
span.end();
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.TTS_NOT_PROVISIONED,
alert_type: AlertType.TTS_FAILURE,
vendor,
detail: err.message
}).catch((err) => this.logger.info({err}, 'Error generating alert for tts failure'));
this.notifyError(err.message || err);
this.notifyError({msg: 'TTS error', details: err.message || err});
return;
}
};
const arr = this.text.map((t) => generateAudio(t));
const filepath = (await Promise.all(arr)).filter((fp) => fp && fp.length);
this.logger.debug({filepath}, 'synthesized files for tts');
this.notifyStatus({event: 'start-playback'});
while (!this.killed && (this.loop === 'forever' || this.loop--) && this.ep?.connected) {
let segment = 0;
@@ -231,6 +186,7 @@ class TaskSay extends Task {
this.killPlayToConfMember(this.ep, memberId, confName);
}
else {
this.notifyStatus({event: 'kill-playback'});
this.ep.api('uuid_break', this.ep.uuid);
}
}

View File

@@ -36,6 +36,7 @@ class TaskSipRefer extends Task {
method: 'REFER',
headers: {
...this.headers,
...(this.referToIsUri && {'X-Refer-To-Leave-Untouched': true}),
'Refer-To': referTo,
'Referred-By': referredBy
}
@@ -46,7 +47,17 @@ class TaskSipRefer extends Task {
/* if we fail, fall through to next verb. If success, we should get BYE from far end */
if (this.referStatus === 202) {
this._notifyTimer = setTimeout(() => {
this.logger.info('TaskSipRefer:exec - no NOTIFY received in 15 secs, exiting');
this.performAction({refer_status: this.referStatus})
.catch((err) => this.logger.error(err, 'TaskSipRefer:exec - error performing action'));
this.notifyTaskDone();
}, 15000);
await this.awaitTaskDone();
if (this._notifyTimer) {
clearTimeout(this._notifyTimer);
this._notifyTimer = null;
}
}
else {
await this.performAction({refer_status: this.referStatus});
@@ -70,10 +81,10 @@ class TaskSipRefer extends Task {
const contentType = req.get('Content-Type');
this.logger.debug({body: req.body}, `TaskSipRefer:_handleNotify got ${contentType}`);
if (contentType === 'message/sipfrag') {
if (contentType?.includes('message/sipfrag')) {
const arr = /SIP\/2\.0\s+(\d+)/.exec(req.body);
if (arr) {
const status = arr[1];
const status = typeof arr[1] === 'string' ? parseInt(arr[1], 10) : arr[1];
this.logger.debug(`TaskSipRefer:_handleNotify: call got status ${status}`);
if (this.eventHook) {
const b3 = this.getTracingPropagation();
@@ -100,6 +111,7 @@ class TaskSipRefer extends Task {
/* they may have only provided a phone number/user */
referTo = `sip:${referTo}@${host}`;
}
else this.referToIsUri = true;
if (!referredBy) {
/* default */
referredBy = cs.req?.callingNumber || dlg.local.uri;

View File

@@ -1,556 +0,0 @@
{
"sip:decline": {
"properties": {
"status": "number",
"reason": "string",
"headers": "object"
},
"required": [
"status"
]
},
"sip:request": {
"properties": {
"method": "string",
"body": "string",
"headers": "object",
"actionHook": "object|string"
},
"required": [
"method"
]
},
"sip:refer": {
"properties": {
"referTo": "string",
"referredBy": "string",
"headers": "object",
"actionHook": "object|string",
"eventHook": "object|string"
},
"required": [
"referTo"
]
},
"config": {
"properties": {
"synthesizer": "#synthesizer",
"recognizer": "#recognizer",
"bargeIn": "#bargeIn",
"record": "#recordOptions",
"amd": "#amd"
},
"required": []
},
"bargeIn": {
"properties": {
"enable": "boolean",
"sticky": "boolean",
"actionHook": "object|string",
"input": "array",
"finishOnKey": "string",
"numDigits": "number",
"minDigits": "number",
"maxDigits": "number",
"interDigitTimeout": "number",
"dtmfBargein": "boolean",
"minBargeinWordCount": "number"
},
"required": [
"enable"
]
},
"dequeue": {
"properties": {
"name": "string",
"actionHook": "object|string",
"timeout": "number",
"beep": "boolean"
},
"required": [
"name"
]
},
"enqueue": {
"properties": {
"name": "string",
"actionHook": "object|string",
"waitHook": "object|string",
"_": "object"
},
"required": [
"name"
]
},
"leave": {
"properties": {
}
},
"hangup": {
"properties": {
"headers": "object"
},
"required": [
]
},
"play": {
"properties": {
"url": "string|array",
"loop": "number|string",
"earlyMedia": "boolean",
"seekOffset": "number|string",
"timeoutSecs": "number|string",
"actionHook": "object|string"
},
"required": [
"url"
]
},
"say": {
"properties": {
"text": "string|array",
"loop": "number|string",
"synthesizer": "#synthesizer",
"earlyMedia": "boolean"
},
"required": [
"text"
]
},
"gather": {
"properties": {
"actionHook": "object|string",
"finishOnKey": "string",
"input": "array",
"numDigits": "number",
"minDigits": "number",
"maxDigits": "number",
"interDigitTimeout": "number",
"partialResultHook": "object|string",
"speechTimeout": "number",
"listenDuringPrompt": "boolean",
"dtmfBargein": "boolean",
"bargein": "boolean",
"minBargeinWordCount": "number",
"timeout": "number",
"recognizer": "#recognizer",
"play": "#play",
"say": "#say"
},
"required": [
]
},
"conference": {
"properties": {
"name": "string",
"beep": "boolean",
"startConferenceOnEnter": "boolean",
"endConferenceOnExit": "boolean",
"maxParticipants": "number",
"joinMuted": "boolean",
"actionHook": "object|string",
"waitHook": "object|string",
"statusEvents": "array",
"statusHook": "object|string",
"enterHook": "object|string",
"record": "#record"
},
"required": [
"name"
]
},
"dial": {
"properties": {
"actionHook": "object|string",
"answerOnBridge": "boolean",
"callerId": "string",
"confirmHook": "object|string",
"referHook": "object|string",
"dialMusic": "string",
"dtmfCapture": "object",
"dtmfHook": "object|string",
"headers": "object",
"listen": "#listen",
"target": ["#target"],
"timeLimit": "number",
"timeout": "number",
"proxy": "string",
"transcribe": "#transcribe",
"amd": "#amd"
},
"required": [
"target"
]
},
"dialogflow": {
"properties": {
"credentials": "object|string",
"project": "string",
"environment": "string",
"region": {
"type": "string",
"enum": ["europe-west1", "europe-west2", "australia-southeast1", "asia-northeast1"]
},
"lang": "string",
"actionHook": "object|string",
"eventHook": "object|string",
"events": "[string]",
"welcomeEvent": "string",
"welcomeEventParams": "object",
"noInputTimeout": "number",
"noInputEvent": "string",
"passDtmfAsTextInput": "boolean",
"thinkingMusic": "string",
"tts": "#synthesizer",
"bargein": "boolean"
},
"required": [
"project",
"credentials",
"lang"
]
},
"dtmf": {
"properties": {
"dtmf": "string",
"duration": "number"
},
"required": [
"dtmf"
]
},
"lex": {
"properties": {
"botId": "string",
"botAlias": "string",
"credentials": "object",
"region": "string",
"locale": "string",
"intent": "#lexIntent",
"welcomeMessage": "string",
"metadata": "object",
"bargein": "boolean",
"passDtmf": "boolean",
"actionHook": "object|string",
"eventHook": "object|string",
"noInputTimeout": "number",
"tts": "#synthesizer"
},
"required": [
"botId",
"botAlias",
"region",
"credentials"
]
},
"listen": {
"properties": {
"actionHook": "object|string",
"auth": "#auth",
"finishOnKey": "string",
"maxLength": "number",
"metadata": "object",
"mixType": {
"type": "string",
"enum": ["mono", "stereo", "mixed"]
},
"passDtmf": "boolean",
"playBeep": "boolean",
"sampleRate": "number",
"timeout": "number",
"transcribe": "#transcribe",
"url": "string",
"wsAuth": "#auth",
"earlyMedia": "boolean"
},
"required": [
"url"
]
},
"message": {
"properties": {
"carrier": "string",
"account_sid": "string",
"message_sid": "string",
"to": "string",
"from": "string",
"text": "string",
"media": "string|array",
"actionHook": "object|string"
},
"required": [
"to",
"from"
]
},
"pause": {
"properties": {
"length": "number"
},
"required": [
"length"
]
},
"rasa": {
"properties": {
"url": "string",
"recognizer": "#recognizer",
"tts": "#synthesizer",
"prompt": "string",
"actionHook": "object|string",
"eventHook": "object|string"
},
"required": [
"url"
]
},
"record": {
"properties": {
"path": "string"
},
"required": [
"path"
]
},
"recordOptions": {
"properties": {
"action": {
"type": "string",
"enum": ["startCallRecording", "stopCallRecording", "pauseCallRecording", "resumeCallRecording"]
},
"recordingID": "string",
"siprecServerURL": "string"
},
"required": [
"action"
]
},
"redirect": {
"properties": {
"actionHook": "object|string"
},
"required": [
"actionHook"
]
},
"rest:dial": {
"properties": {
"account_sid": "string",
"application_sid": "string",
"call_hook": "object|string",
"call_status_hook": "object|string",
"from": "string",
"fromHost": "string",
"speech_synthesis_vendor": "string",
"speech_synthesis_voice": "string",
"speech_synthesis_language": "string",
"speech_recognizer_vendor": "string",
"speech_recognizer_language": "string",
"tag": "object",
"to": "#target",
"headers": "object",
"timeout": "number"
},
"required": [
"call_hook",
"from",
"to"
]
},
"tag": {
"properties": {
"data": "object"
},
"required": [
"data"
]
},
"transcribe": {
"properties": {
"transcriptionHook": "string",
"recognizer": "#recognizer",
"earlyMedia": "boolean"
},
"required": [
"recognizer"
]
},
"target": {
"properties": {
"type": {
"type": "string",
"enum": ["phone", "sip", "user", "teams"]
},
"confirmHook": "object|string",
"method": {
"type": "string",
"enum": ["GET", "POST"]
},
"headers": "object",
"from": "#dialFrom",
"name": "string",
"number": "string",
"sipUri": "string",
"auth": "#auth",
"vmail": "boolean",
"tenant": "string",
"trunk": "string",
"overrideTo": "string"
},
"required": [
"type"
]
},
"dialFrom": {
"properties": {
"user": "string",
"host": "string"
},
"required": [
]
},
"auth": {
"properties": {
"username": "string",
"password": "string"
},
"required": [
"username",
"password"
]
},
"synthesizer": {
"properties": {
"vendor": {
"type": "string",
"enum": ["google", "aws", "polly", "microsoft", "default"]
},
"language": "string",
"voice": "string",
"engine": {
"type": "string",
"enum": ["standard", "neural"]
},
"gender": {
"type": "string",
"enum": ["MALE", "FEMALE", "NEUTRAL"]
}
},
"required": [
"vendor"
]
},
"recognizer": {
"properties": {
"vendor": {
"type": "string",
"enum": ["google", "aws", "microsoft", "default"]
},
"language": "string",
"vad": "#vad",
"hints": "array",
"hintsBoost": "number",
"altLanguages": "array",
"profanityFilter": "boolean",
"interim": "boolean",
"singleUtterance": "boolean",
"dualChannel": "boolean",
"separateRecognitionPerChannel": "boolean",
"punctuation": "boolean",
"enhancedModel": "boolean",
"words": "boolean",
"diarization": "boolean",
"diarizationMinSpeakers": "number",
"diarizationMaxSpeakers": "number",
"interactionType": {
"type": "string",
"enum": [
"unspecified",
"discussion",
"presentation",
"phone_call",
"voicemail",
"voice_search",
"voice_command",
"dictation"
]
},
"naicsCode": "number",
"identifyChannels": "boolean",
"vocabularyName": "string",
"vocabularyFilterName": "string",
"filterMethod": {
"type": "string",
"enum": [
"remove",
"mask",
"tag"
]
},
"model": "string",
"outputFormat": {
"type": "string",
"enum": [
"simple",
"detailed"
]
},
"profanityOption": {
"type": "string",
"enum": [
"masked",
"removed",
"raw"
]
},
"requestSnr": "boolean",
"initialSpeechTimeoutMs": "number",
"azureServiceEndpoint": "string",
"azureSttEndpointId": "string",
"asrDtmfTerminationDigit": "string",
"asrTimeout": "number"
},
"required": [
"vendor"
]
},
"lexIntent": {
"properties": {
"name": "string",
"slots": "object"
},
"required": [
"name"
]
},
"vad": {
"properties": {
"enable": "boolean",
"voiceMs": "number",
"mode": "number"
},
"required": [
"enable"
]
},
"amd": {
"properties": {
"actionHook": "object|string",
"thresholdWordCount": "number",
"timers": "#amdTimers",
"recognizer": "#recognizer"
},
"required": [
"actionHook"
]
},
"amdTimers": {
"properties": {
"noSpeechTimeoutMs": "number",
"decisionTimeoutMs": "number",
"toneTimeoutMs": "number",
"greetingCompletionTimeoutMs": "number"
}
}
}

View File

@@ -1,13 +1,10 @@
const Emitter = require('events');
const { v4: uuidv4 } = require('uuid');
const debug = require('debug')('jambonz:feature-server');
const assert = require('assert');
const uuidv4 = require('uuid-random');
const {TaskPreconditions} = require('../utils/constants');
const normalizeJambones = require('../utils/normalize-jambones');
const { normalizeJambones } = require('@jambonz/verb-specifications');
const WsRequestor = require('../utils/ws-requestor');
const {TaskName} = require('../utils/constants');
const {trace} = require('@opentelemetry/api');
const specs = new Map();
const _specData = require('./specs');
for (const key in _specData) {specs.set(key, _specData[key]);}
/**
* @classdesc Represents a jambonz verb. This is a superclass that is extended
@@ -21,6 +18,7 @@ class Task extends Emitter {
this.logger = logger;
this.data = data;
this.actionHook = this.data.actionHook;
this.id = data.id;
this._killInProgress = false;
this._completionPromise = new Promise((resolve) => this._completionResolver = resolve);
@@ -137,21 +135,32 @@ class Task extends Emitter {
return this.callSession.normalizeUrl(url, method, auth);
}
notifyError(errMsg) {
const params = {error: errMsg, verb: this.name};
this.cs.requestor.request('jambonz:error', '/error', params)
.catch((err) => this.logger.info({err}, 'Task:notifyError error sending error'));
notifyError(obj) {
if (this.cs.requestor instanceof WsRequestor) {
const params = {...obj, verb: this.name, id: this.id};
this.cs.requestor.request('jambonz:error', '/error', params)
.catch((err) => this.logger.info({err}, 'Task:notifyError error sending error'));
}
}
notifyStatus(obj) {
if (this.cs.notifyEvents && this.cs.requestor instanceof WsRequestor) {
const params = {...obj, verb: this.name, id: this.id};
this.cs.requestor.request('verb:status', '/status', params)
.catch((err) => this.logger.info({err}, 'Task:notifyStatus error sending error'));
}
}
async performAction(results, expectResponse = true) {
if (this.actionHook) {
const type = this.name === TaskName.Redirect ? 'session:redirect' : 'verb:hook';
const params = results ? Object.assign(this.cs.callInfo.toJSON(), results) : this.cs.callInfo.toJSON();
const span = this.startSpan('verb:hook', {'hook.url': this.actionHook});
const span = this.startSpan(type, {'hook.url': this.actionHook});
const b3 = this.getTracingPropagation('b3', span);
const httpHeaders = b3 && {b3};
span.setAttributes({'http.body': JSON.stringify(params)});
try {
const json = await this.cs.requestor.request('verb:hook', this.actionHook, params, httpHeaders);
const json = await this.cs.requestor.request(type, this.actionHook, params, httpHeaders);
span.setAttributes({'http.statusCode': 200});
span.end();
if (expectResponse && json && Array.isArray(json)) {
@@ -273,77 +282,6 @@ class Task extends Emitter {
this.logger.error(err, 'Task:_doRefer error');
}
}
/**
* validate that the JSON task description is valid
* @param {string} name - verb name
* @param {object} data - verb properties
*/
static validate(name, data) {
debug(`validating ${name} with data ${JSON.stringify(data)}`);
// validate the instruction is supported
if (!specs.has(name)) throw new Error(`invalid instruction: ${name}`);
// check type of each element and make sure required elements are present
const specData = specs.get(name);
let required = specData.required || [];
for (const dKey in data) {
if (dKey in specData.properties) {
const dVal = data[dKey];
const dSpec = specData.properties[dKey];
debug(`Task:validate validating property ${dKey} with value ${JSON.stringify(dVal)}`);
if (typeof dSpec === 'string' && dSpec === 'array') {
if (!Array.isArray(dVal)) throw new Error(`${name}: property ${dKey} is not an array`);
}
else if (typeof dSpec === 'string' && dSpec.includes('|')) {
const types = dSpec.split('|').map((t) => t.trim());
if (!types.includes(typeof dVal) && !(types.includes('array') && Array.isArray(dVal))) {
throw new Error(`${name}: property ${dKey} has invalid data type, must be one of ${types}`);
}
}
else if (typeof dSpec === 'string' && ['number', 'string', 'object', 'boolean'].includes(dSpec)) {
// simple types
if (typeof dVal !== specData.properties[dKey]) {
throw new Error(`${name}: property ${dKey} has invalid data type`);
}
}
else if (Array.isArray(dSpec) && dSpec[0].startsWith('#')) {
const name = dSpec[0].slice(1);
for (const item of dVal) {
Task.validate(name, item);
}
}
else if (typeof dSpec === 'object') {
// complex types
const type = dSpec.type;
assert.ok(['number', 'string', 'object', 'boolean'].includes(type),
`invalid or missing type in spec ${JSON.stringify(dSpec)}`);
if (type === 'string' && dSpec.enum) {
assert.ok(Array.isArray(dSpec.enum), `enum must be an array ${JSON.stringify(dSpec.enum)}`);
if (!dSpec.enum.includes(dVal)) throw new Error(`invalid value ${dVal} must be one of ${dSpec.enum}`);
}
}
else if (typeof dSpec === 'string' && dSpec.startsWith('#')) {
// reference to another datatype (i.e. nested type)
const name = dSpec.slice(1);
//const obj = {};
//obj[name] = dVal;
Task.validate(name, dVal);
}
else {
assert.ok(0, `invalid spec ${JSON.stringify(dSpec)}`);
}
required = required.filter((item) => item !== dKey);
}
else if (dKey === '_') {
/* no op: allow arbitrary info to be carried here, used by conference e.g in transfer */
}
else throw new Error(`${name}: unknown property ${dKey}`);
}
if (required.length > 0) throw new Error(`${name}: missing value for ${required}`);
}
}
module.exports = Task;

View File

@@ -3,9 +3,16 @@ const {
TaskName,
TaskPreconditions,
GoogleTranscriptionEvents,
NuanceTranscriptionEvents,
AwsTranscriptionEvents,
AzureTranscriptionEvents,
AwsTranscriptionEvents
DeepgramTranscriptionEvents,
SonioxTranscriptionEvents,
IbmTranscriptionEvents,
NvidiaTranscriptionEvents,
JambonzTranscriptionEvents
} = require('../utils/constants');
const { normalizeJambones } = require('@jambonz/verb-specifications');
class TaskTranscribe extends Task {
constructor(logger, opts, parentTask) {
@@ -13,6 +20,18 @@ class TaskTranscribe extends Task {
this.preconditions = TaskPreconditions.Endpoint;
this.parentTask = parentTask;
const {
setChannelVarsForStt,
normalizeTranscription,
removeSpeechListeners,
setSpeechCredentialsAtRuntime,
compileSonioxTranscripts
} = require('../utils/transcription-utils')(logger);
this.setChannelVarsForStt = setChannelVarsForStt;
this.normalizeTranscription = normalizeTranscription;
this.removeSpeechListeners = removeSpeechListeners;
this.compileSonioxTranscripts = compileSonioxTranscripts;
this.transcriptionHook = this.data.transcriptionHook;
this.earlyMedia = this.data.earlyMedia === true || (parentTask && parentTask.earlyMedia);
@@ -22,39 +41,14 @@ class TaskTranscribe extends Task {
this.interim = !!recognizer.interim;
this.separateRecognitionPerChannel = recognizer.separateRecognitionPerChannel;
/* vad: if provided, we dont connect to recognizer until voice activity is detected */
const {enable, voiceMs = 0, mode = -1} = recognizer.vad || {};
this.vad = {enable, voiceMs, mode};
/* let credentials be supplied in the recognizer object at runtime */
this.sttCredentials = setSpeechCredentialsAtRuntime(recognizer);
/* google-specific options */
this.hints = recognizer.hints || [];
this.hintsBoost = recognizer.hintsBoost;
this.profanityFilter = recognizer.profanityFilter;
this.punctuation = !!recognizer.punctuation;
this.enhancedModel = !!recognizer.enhancedModel;
this.model = recognizer.model || 'phone_call';
this.words = !!recognizer.words;
this.singleUtterance = recognizer.singleUtterance || false;
this.diarization = !!recognizer.diarization;
this.diarizationMinSpeakers = recognizer.diarizationMinSpeakers || 0;
this.diarizationMaxSpeakers = recognizer.diarizationMaxSpeakers || 0;
this.interactionType = recognizer.interactionType || 'unspecified';
this.naicsCode = recognizer.naicsCode || 0;
this.altLanguages = recognizer.altLanguages || [];
/* buffer for soniox transcripts */
this._sonioxTranscripts = [];
/* aws-specific options */
this.identifyChannels = !!recognizer.identifyChannels;
this.vocabularyName = recognizer.vocabularyName;
this.vocabularyFilterName = recognizer.vocabularyFilterName;
this.filterMethod = recognizer.filterMethod;
/* microsoft options */
this.outputFormat = recognizer.outputFormat || 'simple';
this.profanityOption = recognizer.profanityOption || 'raw';
this.requestSnr = recognizer.requestSnr || false;
this.initialSpeechTimeoutMs = recognizer.initialSpeechTimeoutMs || 0;
this.azureServiceEndpoint = recognizer.azureServiceEndpoint;
this.azureSttEndpointId = recognizer.azureSttEndpointId;
recognizer.hints = recognizer.hints || [];
recognizer.altLanguages = recognizer.altLanguages || [];
}
get name() { return TaskName.Transcribe; }
@@ -62,28 +56,38 @@ class TaskTranscribe extends Task {
async exec(cs, {ep, ep2}) {
super.exec(cs);
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, cs.srf);
const {getNuanceAccessToken, getIbmAccessToken} = cs.srf.locals.dbHelpers;
if (cs.hasGlobalSttHints) {
const {hints, hintsBoost} = cs.globalSttHints;
this.hints = this.hints.concat(hints);
if (!this.hintsBoost && hintsBoost) this.hintsBoost = hintsBoost;
this.logger.debug({hints: this.hints, hintsBoost: this.hintsBoost},
'Transcribe:exec - applying global `sttHints');
this.data.recognizer.hints = this.data.recognizer.hints.concat(hints);
if (!this.data.recognizer.hintsBoost && hintsBoost) this.data.recognizer.hintsBoost = hintsBoost;
this.logger.debug({hints: this.data.recognizer.hints, hintsBoost: this.data.recognizer.hintsBoost},
'Transcribe:exec - applying global sttHints');
}
if (cs.hasAltLanguages) {
this.altLanguages = this.altLanguages.concat(cs.altLanguages);
this.data.recognizer.altLanguages = this.data.recognizer.altLanguages.concat(cs.altLanguages);
this.logger.debug({altLanguages: this.altLanguages},
'Gather:exec - applying altLanguages');
'Transcribe:exec - applying altLanguages');
}
if (cs.hasGlobalSttPunctuation) {
this.punctuation = cs.globalSttPunctuation;
if (cs.hasGlobalSttPunctuation && !this.data.recognizer.punctuation) {
this.data.recognizer.punctuation = cs.globalSttPunctuation;
}
this.ep = ep;
this.ep2 = ep2;
if ('default' === this.vendor || !this.vendor) this.vendor = cs.speechRecognizerVendor;
if ('default' === this.language || !this.language) this.language = cs.speechRecognizerLanguage;
this.sttCredentials = cs.getSpeechCredentials(this.vendor, 'stt');
if ('default' === this.vendor || !this.vendor) {
this.vendor = cs.speechRecognizerVendor;
if (this.data.recognizer) this.data.recognizer.vendor = this.vendor;
}
if ('default' === this.language || !this.language) {
this.language = cs.speechRecognizerLanguage;
if (this.data.recognizer) this.data.recognizer.language = this.language;
}
if (!this.data.recognizer.vendor) {
this.data.recognizer.vendor = this.vendor;
}
if (!this.sttCredentials) this.sttCredentials = cs.getSpeechCredentials(this.vendor, 'stt');
try {
if (!this.sttCredentials) {
@@ -96,6 +100,22 @@ class TaskTranscribe extends Task {
}).catch((err) => this.logger.info({err}, 'Error generating alert for no stt'));
throw new Error('no provisioned speech credentials for TTS');
}
if (this.vendor === 'nuance' && this.sttCredentials.client_id) {
/* get nuance access token */
const {client_id, secret} = this.sttCredentials;
const {access_token, servedFromCache} = await getNuanceAccessToken(client_id, secret, 'asr tts');
this.logger.debug({client_id},
`Transcribe:exec - got nuance access token ${servedFromCache ? 'from cache' : ''}`);
this.sttCredentials = {...this.sttCredentials, access_token};
}
else if (this.vendor == 'ibm' && this.sttCredentials.stt_api_key) {
/* get ibm access token */
const {stt_api_key, stt_region} = this.sttCredentials;
const {access_token, servedFromCache} = await getIbmAccessToken(stt_api_key);
this.logger.debug({stt_api_key}, `Gather:exec - got ibm access token ${servedFromCache ? 'from cache' : ''}`);
this.sttCredentials = {...this.sttCredentials, access_token, stt_region};
}
await this._startTranscribing(cs, ep, 1);
if (this.separateRecognitionPerChannel && ep2) {
await this._startTranscribing(cs, ep2, 2);
@@ -109,14 +129,7 @@ class TaskTranscribe extends Task {
this.logger.info(err, 'TaskTranscribe:exec - error');
this.parentTask && this.parentTask.emit('error', err);
}
ep.removeCustomEventListener(GoogleTranscriptionEvents.Transcription);
ep.removeCustomEventListener(GoogleTranscriptionEvents.NoAudioDetected);
ep.removeCustomEventListener(GoogleTranscriptionEvents.MaxDurationExceeded);
ep.removeCustomEventListener(AwsTranscriptionEvents.Transcription);
ep.removeCustomEventListener(AwsTranscriptionEvents.NoAudioDetected);
ep.removeCustomEventListener(AwsTranscriptionEvents.MaxDurationExceeded);
ep.removeCustomEventListener(AzureTranscriptionEvents.Transcription);
ep.removeCustomEventListener(AzureTranscriptionEvents.NoSpeechDetected);
this.removeSpeechListeners(ep);
}
async kill(cs) {
@@ -140,124 +153,88 @@ class TaskTranscribe extends Task {
}
async _startTranscribing(cs, ep, channel) {
const opts = {};
const opts = this.setChannelVarsForStt(this, this.sttCredentials, this.data.recognizer);
switch (this.vendor) {
case 'google':
this.bugname = 'google_transcribe';
ep.addCustomEventListener(GoogleTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
ep.addCustomEventListener(GoogleTranscriptionEvents.NoAudioDetected,
this._onNoAudio.bind(this, cs, ep, channel));
ep.addCustomEventListener(GoogleTranscriptionEvents.MaxDurationExceeded,
this._onMaxDurationExceeded.bind(this, cs, ep, channel));
break;
if (this.vad.enable) {
opts.START_RECOGNIZING_ON_VAD = 1;
if (this.vad.voiceMs) opts.RECOGNIZER_VAD_VOICE_MS = this.vad.voiceMs;
if (this.vad.mode >= 0 && this.vad.mode <= 3) opts.RECOGNIZER_VAD_MODE = this.vad.mode;
case 'aws':
case 'polly':
this.bugname = 'aws_transcribe';
ep.addCustomEventListener(AwsTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
ep.addCustomEventListener(AwsTranscriptionEvents.NoAudioDetected,
this._onNoAudio.bind(this, cs, ep, channel));
ep.addCustomEventListener(AwsTranscriptionEvents.MaxDurationExceeded,
this._onMaxDurationExceeded.bind(this, cs, ep, channel));
break;
case 'microsoft':
this.bugname = 'azure_transcribe';
ep.addCustomEventListener(AzureTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
ep.addCustomEventListener(AzureTranscriptionEvents.NoSpeechDetected,
this._onNoAudio.bind(this, cs, ep, channel));
break;
case 'nuance':
this.bugname = 'nuance_transcribe';
ep.addCustomEventListener(NuanceTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
ep.addCustomEventListener(NuanceTranscriptionEvents.StartOfSpeech,
this._onStartOfSpeech.bind(this, cs, ep, channel));
ep.addCustomEventListener(NuanceTranscriptionEvents.TranscriptionComplete,
this._onTranscriptionComplete.bind(this, cs, ep, channel));
break;
case 'deepgram':
this.bugname = 'deepgram_transcribe';
ep.addCustomEventListener(DeepgramTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
ep.addCustomEventListener(DeepgramTranscriptionEvents.Connect,
this._onDeepgramConnect.bind(this, cs, ep, channel));
ep.addCustomEventListener(DeepgramTranscriptionEvents.ConnectFailure,
this._onDeepGramConnectFailure.bind(this, cs, ep, channel));
break;
case 'soniox':
this.bugname = 'soniox_transcribe';
ep.addCustomEventListener(SonioxTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
break;
case 'ibm':
this.bugname = 'ibm_transcribe';
ep.addCustomEventListener(IbmTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
ep.addCustomEventListener(IbmTranscriptionEvents.Connect,
this._onIbmConnect.bind(this, cs, ep, channel));
ep.addCustomEventListener(IbmTranscriptionEvents.ConnectFailure,
this._onIbmConnectFailure.bind(this, cs, ep, channel));
break;
case 'nvidia':
this.bugname = 'nvidia_transcribe';
ep.addCustomEventListener(NvidiaTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep));
ep.addCustomEventListener(NvidiaTranscriptionEvents.StartOfSpeech,
this._onStartOfSpeech.bind(this, cs, ep));
ep.addCustomEventListener(NvidiaTranscriptionEvents.TranscriptionComplete,
this._onTranscriptionComplete.bind(this, cs, ep));
ep.addCustomEventListener(NvidiaTranscriptionEvents.VadDetected,
this._onVadDetected.bind(this, cs, ep));
break;
default:
throw new Error(`Invalid vendor ${this.vendor}`);
}
ep.addCustomEventListener(GoogleTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
ep.addCustomEventListener(GoogleTranscriptionEvents.NoAudioDetected, this._onNoAudio.bind(this, cs, ep, channel));
ep.addCustomEventListener(GoogleTranscriptionEvents.MaxDurationExceeded,
this._onMaxDurationExceeded.bind(this, cs, ep, channel));
ep.addCustomEventListener(AwsTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep, channel));
ep.addCustomEventListener(AwsTranscriptionEvents.NoAudioDetected, this._onNoAudio.bind(this, cs, ep, channel));
ep.addCustomEventListener(AwsTranscriptionEvents.MaxDurationExceeded,
this._onMaxDurationExceeded.bind(this, cs, ep, channel));
ep.addCustomEventListener(AzureTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
ep.addCustomEventListener(AzureTranscriptionEvents.NoSpeechDetected, this._onNoAudio.bind(this, cs, ep, channel));
/* common handler for all stt engine errors */
ep.addCustomEventListener(JambonzTranscriptionEvents.Error, this._onJambonzError.bind(this, cs, ep));
await ep.set(opts)
.catch((err) => this.logger.info(err, 'Error setting channel variables'));
if (this.vendor === 'google') {
this.bugname = 'google_transcribe';
if (this.sttCredentials) opts.GOOGLE_APPLICATION_CREDENTIALS = JSON.stringify(this.sttCredentials.credentials);
[
['enhancedModel', 'GOOGLE_SPEECH_USE_ENHANCED'],
//['separateRecognitionPerChannel', 'GOOGLE_SPEECH_SEPARATE_RECOGNITION_PER_CHANNEL'],
['profanityFilter', 'GOOGLE_SPEECH_PROFANITY_FILTER'],
['punctuation', 'GOOGLE_SPEECH_ENABLE_AUTOMATIC_PUNCTUATION'],
['words', 'GOOGLE_SPEECH_ENABLE_WORD_TIME_OFFSETS'],
['singleUtterance', 'GOOGLE_SPEECH_SINGLE_UTTERANCE'],
['diarization', 'GOOGLE_SPEECH_PROFANITY_FILTER']
].forEach((arr) => {
if (this[arr[0]]) opts[arr[1]] = true;
else if (this[arr[0]] === false) opts[arr[1]] = false;
});
if (this.hints.length > 0) {
opts.GOOGLE_SPEECH_HINTS = this.hints.join(',');
if (typeof this.hintsBoost === 'number') {
opts.GOOGLE_SPEECH_HINTS_BOOST = this.hintsBoost;
}
}
if (this.altLanguages.length > 0) opts.GOOGLE_SPEECH_ALTERNATIVE_LANGUAGE_CODES = this.altLanguages.join(',');
else opts.GOOGLE_SPEECH_ALTERNATIVE_LANGUAGE_CODES = '';
if ('unspecified' !== this.interactionType) {
opts.GOOGLE_SPEECH_METADATA_INTERACTION_TYPE = this.interactionType;
}
opts.GOOGLE_SPEECH_MODEL = this.model;
if (this.diarization && this.diarizationMinSpeakers > 0) {
opts.GOOGLE_SPEECH_SPEAKER_DIARIZATION_MIN_SPEAKER_COUNT = this.diarizationMinSpeakers;
}
if (this.diarization && this.diarizationMaxSpeakers > 0) {
opts.GOOGLE_SPEECH_SPEAKER_DIARIZATION_MAX_SPEAKER_COUNT = this.diarizationMaxSpeakers;
}
if (this.naicsCode > 0) opts.GOOGLE_SPEECH_METADATA_INDUSTRY_NAICS_CODE = this.naicsCode;
await ep.set(opts)
.catch((err) => this.logger.info(err, 'TaskTranscribe:_startTranscribing with google'));
}
else if (this.vendor === 'aws') {
this.bugname = 'aws_transcribe';
[
['diarization', 'AWS_SHOW_SPEAKER_LABEL'],
['identifyChannels', 'AWS_ENABLE_CHANNEL_IDENTIFICATION']
].forEach((arr) => {
if (this[arr[0]]) opts[arr[1]] = true;
});
if (this.vocabularyName) opts.AWS_VOCABULARY_NAME = this.vocabularyName;
if (this.vocabularyFilterName) {
opts.AWS_VOCABULARY_NAME = this.vocabularyFilterName;
opts.AWS_VOCABULARY_FILTER_METHOD = this.filterMethod || 'mask';
}
if (this.sttCredentials) {
Object.assign(opts, {
AWS_ACCESS_KEY_ID: this.sttCredentials.accessKeyId,
AWS_SECRET_ACCESS_KEY: this.sttCredentials.secretAccessKey,
AWS_REGION: this.sttCredentials.region
});
}
else {
Object.assign(opts, {
AWS_ACCESS_KEY_ID: process.env.AWS_ACCESS_KEY_ID,
AWS_SECRET_ACCESS_KEY: process.env.AWS_SECRET_ACCESS_KEY,
AWS_REGION: process.env.AWS_REGION
});
}
await ep.set(opts)
.catch((err) => this.logger.info(err, 'TaskTranscribe:_startTranscribing with aws'));
}
else if (this.vendor === 'microsoft') {
this.bugname = 'azure_transcribe';
const {api_key, region, use_custom_stt, custom_stt_endpoint} = this.sttCredentials;
Object.assign(opts, {
'AZURE_SUBSCRIPTION_KEY': api_key,
'AZURE_REGION': region
});
if (this.azureSttEndpointId) {
Object.assign(opts, {'AZURE_SERVICE_ENDPOINT_ID': this.azureSttEndpointId});
}
else if (use_custom_stt && custom_stt_endpoint) {
Object.assign(opts, {'AZURE_SERVICE_ENDPOINT_ID': custom_stt_endpoint});
}
if (this.hints && this.hints.length > 0) {
opts.AZURE_SPEECH_HINTS = this.hints.map((h) => h.trim()).join(',');
}
if (this.altLanguages.length > 0) opts.AZURE_SPEECH_ALTERNATIVE_LANGUAGE_CODES = this.altLanguages.join(',');
else opts.AZURE_SPEECH_ALTERNATIVE_LANGUAGE_CODES = '';
if (this.requestSnr) opts.AZURE_REQUEST_SNR = 1;
if (this.profanityOption !== 'raw') opts.AZURE_PROFANITY_OPTION = this.profanityOption;
if (this.initialSpeechTimeoutMs > 0) opts.AZURE_INITIAL_SPEECH_TIMEOUT_MS = this.initialSpeechTimeoutMs;
if (this.outputFormat !== 'simple') opts.AZURE_USE_OUTPUT_FORMAT_DETAILED = 1;
if (this.azureServiceEndpoint) opts.AZURE_SERVICE_ENDPOINT = this.azureServiceEndpoint;
await ep.set(opts)
.catch((err) => this.logger.info(err, 'TaskTranscribe:_startTranscribing with azure'));
}
await this._transcribe(ep);
}
@@ -271,50 +248,64 @@ class TaskTranscribe extends Task {
});
}
_onTranscription(cs, ep, channel, evt, fsEvent) {
async _onTranscription(cs, ep, channel, evt, fsEvent) {
// make sure this is not a transcript from answering machine detection
const bugname = fsEvent.getHeader('media-bugname');
if (bugname && this.bugname !== bugname) return;
this.logger.debug({evt, channel}, 'TaskTranscribe:_onTranscription');
if ('aws' === this.vendor && Array.isArray(evt) && evt.length > 0) evt = evt[0];
if ('microsoft' === this.vendor) {
const nbest = evt.NBest;
const language_code = evt.PrimaryLanguage?.Language || this.language;
const alternatives = nbest ? nbest.map((n) => {
return {
confidence: n.Confidence,
transcript: n.Display
};
}) :
[
{
transcript: evt.DisplayText || evt.Text
}
];
if (this.vendor === 'ibm') {
if (evt?.state === 'listening') return;
}
this.logger.debug({evt}, 'TaskTranscribe:_onTranscription - before normalization');
const newEvent = {
is_final: evt.RecognitionStatus === 'Success',
channel,
language_code,
alternatives
};
evt = newEvent;
evt = this.normalizeTranscription(evt, this.vendor, channel, this.language);
this.logger.debug({evt}, 'TaskTranscribe:_onTranscription');
if (evt.alternatives.length === 0) {
this.logger.info({evt}, 'TaskTranscribe:_onTranscription - got empty transcript, continue listening');
return;
}
if (evt.alternatives[0].transcript === '' && !cs.callGone && !this.killed) {
this.logger.info({evt}, 'TaskGather:_onTranscription - got empty transcript, listen again');
return this._transcribe(ep);
if (evt.alternatives[0]?.transcript === '' && !cs.callGone && !this.killed) {
if (['microsoft', 'deepgram'].includes(this.vendor)) {
this.logger.info({evt}, 'TaskTranscribe:_onTranscription - got empty transcript, continue listening');
}
else {
this.logger.info({evt}, 'TaskTranscribe:_onTranscription - got empty transcript, listen again');
this._transcribe(ep);
}
return;
}
evt.channel_tag = channel;
if (this.vendor === 'soniox') {
/* compile transcripts into one */
this._sonioxTranscripts.push(evt.vendor.finalWords);
if (evt.is_final) {
evt = this.compileSonioxTranscripts(this._sonioxTranscripts, 1, this.language);
this._sonioxTranscripts = [];
}
}
if (this.transcriptionHook) {
const b3 = this.getTracingPropagation();
const httpHeaders = b3 && {b3};
this.cs.requestor.request('verb:hook', this.transcriptionHook,
Object.assign({speech: evt}, this.cs.callInfo), httpHeaders)
.catch((err) => this.logger.info(err, 'TranscribeTask:_onTranscription error'));
try {
const json = await this.cs.requestor.request('verb:hook', this.transcriptionHook, {
...this.cs.callInfo,
...httpHeaders,
speech: evt
});
this.logger.info({json}, 'sent transcriptionHook');
if (json && Array.isArray(json) && !this.parentTask) {
const makeTask = require('./make_task');
const tasks = normalizeJambones(this.logger, json).map((tdata) => makeTask(this.logger, tdata));
if (tasks && tasks.length > 0) {
this.logger.info({tasks: tasks}, `${this.name} replacing application with ${tasks.length} tasks`);
this.cs.replaceApplication(tasks);
}
}
} catch (err) {
this.logger.info(err, 'TranscribeTask:_onTranscription error');
}
}
if (this.parentTask) {
this.parentTask.emit('transcription', evt);
@@ -342,6 +333,64 @@ class TaskTranscribe extends Task {
this._timer = null;
}
}
_onDeepgramConnect(_cs, _ep) {
this.logger.debug('TaskTranscribe:_onDeepgramConnect');
}
_onDeepGramConnectFailure(cs, _ep, _channel, evt) {
const {reason} = evt;
const {writeAlerts, AlertType} = cs.srf.locals;
this.logger.info({evt}, 'TaskTranscribe:_onDeepgramConnectFailure');
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
message: `Failed connecting to Deepgram speech recognizer: ${reason}`,
vendor: 'deepgram',
}).catch((err) => this.logger.info({err}, 'Error generating alert for deepgram connection failure'));
this.notifyError(`Failed connecting to speech vendor deepgram: ${reason}`);
this.notifyTaskDone();
}
_onIbmConnect(_cs, _ep) {
this.logger.debug('TaskTranscribe:_onIbmConnect');
}
_onIbmConnectFailure(cs, _ep, _channel, evt) {
const {reason} = evt;
const {writeAlerts, AlertType} = cs.srf.locals;
this.logger.info({evt}, 'TaskTranscribe:_onIbmConnectFailure');
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
message: `Failed connecting to IBM watson speech recognizer: ${reason}`,
vendor: 'ibm',
}).catch((err) => this.logger.info({err}, 'Error generating alert for IBM connection failure'));
this.notifyError(`Failed connecting to speech vendor IBM: ${reason}`);
this.notifyTaskDone();
}
_onIbmError(cs, _ep, _channel, evt) {
this.logger.info({evt}, 'TaskTranscribe:_onIbmError');
}
_onJambonzError(cs, _ep, evt) {
this.logger.info({evt}, 'TaskTranscribe:_onJambonzError');
const {writeAlerts, AlertType} = cs.srf.locals;
if (this.vendor === 'nuance') {
const {code, error} = evt;
if (code === 404 && error === 'No speech') return this._resolve('timeout');
if (code === 413 && error === 'Too much speech') return this._resolve('timeout');
}
this.logger.info({evt}, 'TaskTranscribe:_onJambonzError');
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
message: `Custom speech vendor ${this.vendor} error: ${evt.error}`,
vendor: this.vendor,
}).catch((err) => this.logger.info({err}, 'Error generating alert for jambonz custom connection failure'));
this.notifyError({msg: 'ASR error', details:`Custom speech vendor ${this.vendor} error: ${evt.error}`});
}
}
module.exports = TaskTranscribe;

View File

@@ -267,32 +267,51 @@ module.exports = (logger) => {
ep.addCustomEventListener(AzureTranscriptionEvents.Transcription, onTranscription.bind(null, cs, ep, task));
ep.addCustomEventListener(AzureTranscriptionEvents.NoSpeechDetected, onNoSpeechDetected.bind(null, cs, ep, task));
}
logger.debug({sttOpts}, 'startAmd: setting channel vars');
await ep.set(sttOpts).catch((err) => logger.info(err, 'Error setting channel variables'));
amd
.on(AmdEvents.NoSpeechDetected, (evt) => {
task.emit('amd', {type: AmdEvents.NoSpeechDetected, ...evt});
ep.stopTranscription({vendor, bugname});
try {
ep.connected && ep.stopTranscription({vendor, bugname});
} catch (err) {
logger.info({err}, 'Error stopping transcription');
}
})
.on(AmdEvents.HumanDetected, (evt) => {
task.emit('amd', {type: AmdEvents.HumanDetected, ...evt});
ep.stopTranscription({vendor, bugname});
try {
ep.connected && ep.stopTranscription({vendor, bugname});
} catch (err) {
logger.info({err}, 'Error stopping transcription');
}
})
.on(AmdEvents.MachineDetected, (evt) => {
task.emit('amd', {type: AmdEvents.MachineDetected, ...evt});
})
.on(AmdEvents.DecisionTimeout, (evt) => {
task.emit('amd', {type: AmdEvents.DecisionTimeout, ...evt});
ep.stopTranscription({vendor, bugname});
try {
ep.connected && ep.stopTranscription({vendor, bugname});
} catch (err) {
logger.info({err}, 'Error stopping transcription');
}
})
.on(AmdEvents.ToneTimeout, (evt) => {
//task.emit('amd', {type: AmdEvents.ToneTimeout, ...evt});
ep.execute('avmd_stop').catch((err) => logger.info(err, 'Error stopping avmd'));
try {
ep.connected && ep.execute('avmd_stop').catch((err) => logger.info(err, 'Error stopping avmd'));
} catch (err) {
logger.info({err}, 'Error stopping avmd');
}
})
.on(AmdEvents.MachineStoppedSpeaking, () => {
task.emit('amd', {type: AmdEvents.MachineStoppedSpeaking});
ep.stopTranscription({vendor, bugname});
try {
ep.connected && ep.stopTranscription({vendor, bugname});
} catch (err) {
logger.info({err}, 'Error stopping transcription');
}
});
/* start transcribing, and also listening for beep */

View File

@@ -36,7 +36,7 @@ class SnsNotifier extends Emitter {
logger.info(`SNS lifecycle server failed to bind port on ${e.port}, will try next port`);
const server = this._doListen(logger, app, ++e.port, resolve);
server.on('error', this._handleErrors.bind(null, logger, app, resolve, reject));
server.on('error', this._handleErrors.bind(this, logger, app, resolve, reject));
return;
}
reject(e);
@@ -120,7 +120,7 @@ class SnsNotifier extends Emitter {
});
return new Promise((resolve, reject) => {
const server = this._doListen(this.logger, app, PORT, resolve);
server.on('error', this._handleErrors.bind(null, this.logger, app, resolve, reject));
server.on('error', this._handleErrors.bind(this, this.logger, app, resolve, reject));
});
} catch (err) {

View File

@@ -67,6 +67,35 @@
"MaxDurationExceeded": "google_transcribe::max_duration_exceeded",
"VadDetected": "google_transcribe::vad_detected"
},
"NuanceTranscriptionEvents": {
"Transcription": "nuance_transcribe::transcription",
"StartOfSpeech": "nuance_transcribe::start_of_speech",
"TranscriptionComplete": "nuance_transcribe::end_of_transcription",
"Error": "nuance_transcribe::error",
"VadDetected": "nuance_transcribe::vad_detected"
},
"NvidiaTranscriptionEvents": {
"Transcription": "nvidia_transcribe::transcription",
"StartOfSpeech": "nvidia_transcribe::start_of_speech",
"TranscriptionComplete": "nvidia_transcribe::end_of_transcription",
"Error": "nvidia_transcribe::error",
"VadDetected": "nvidia_transcribe::vad_detected"
},
"DeepgramTranscriptionEvents": {
"Transcription": "deepgram_transcribe::transcription",
"ConnectFailure": "deepgram_transcribe::connect_failed",
"Connect": "deepgram_transcribe::connect"
},
"SonioxTranscriptionEvents": {
"Transcription": "soniox_transcribe::transcription",
"Error": "soniox_transcribe::error"
},
"IbmTranscriptionEvents": {
"Transcription": "ibm_transcribe::transcription",
"ConnectFailure": "ibm_transcribe::connect_failed",
"Connect": "ibm_transcribe::connect",
"Error": "ibm_transcribe::error"
},
"AwsTranscriptionEvents": {
"Transcription": "aws_transcribe::transcription",
"EndOfTranscript": "aws_transcribe::end_of_transcript",
@@ -81,6 +110,12 @@
"NoSpeechDetected": "azure_transcribe::no_speech_detected",
"VadDetected": "azure_transcribe::vad_detected"
},
"JambonzTranscriptionEvents": {
"Transcription": "jambonz_transcribe::transcription",
"ConnectFailure": "jambonz_transcribe::connect_failed",
"Connect": "jambonz_transcribe::connect",
"Error": "jambonz_transcribe::error"
},
"ListenEvents": {
"Connect": "mod_audio_fork::connect",
"ConnectFailure": "mod_audio_fork::connect_failed",
@@ -122,6 +157,7 @@
"queue:status",
"dial:confirm",
"verb:hook",
"verb:status",
"jambonz:error"
],
"RecordState": {

View File

@@ -39,12 +39,43 @@ const speechMapper = (cred) => {
obj.region = o.region;
obj.use_custom_stt = o.use_custom_stt;
obj.custom_stt_endpoint = o.custom_stt_endpoint;
obj.use_custom_tts = o.use_custom_tts;
obj.custom_tts_endpoint = o.custom_tts_endpoint;
}
else if ('wellsaid' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
}
else if ('nuance' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.client_id = o.client_id;
obj.secret = o.secret;
obj.nuance_tts_uri = o.nuance_tts_uri;
obj.nuance_stt_uri = o.nuance_stt_uri;
}
else if ('ibm' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.tts_api_key = o.tts_api_key;
obj.tts_region = o.tts_region;
obj.stt_api_key = o.stt_api_key;
obj.stt_region = o.stt_region;
}
else if ('deepgram' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
}
else if ('soniox' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
}
else if (obj.vendor.startsWith('custom:')) {
const o = JSON.parse(decrypt(credential));
obj.auth_token = o.auth_token;
obj.custom_stt_url = o.custom_stt_url;
obj.custom_tts_url = o.custom_tts_url;
}
} catch (err) {
console.log(err);
}
return obj;
};
@@ -60,32 +91,13 @@ module.exports = (logger, srf) => {
const [r2] = await pp.query(sqlSpeechCredentials, account_sid);
const speech = r2.map(speechMapper);
/* search at the service provider level if we don't find it at the account level */
const haveGoogle = speech.find((s) => s.vendor === 'google');
const haveAws = speech.find((s) => s.vendor === 'aws');
const haveMicrosoft = speech.find((s) => s.vendor === 'microsoft');
const haveWellsaid = speech.find((s) => s.vendor === 'wellsaid');
if (!haveGoogle || !haveAws || !haveMicrosoft) {
const [r3] = await pp.query(sqlSpeechCredentialsForSP, account_sid);
if (r3.length) {
if (!haveGoogle) {
const google = r3.find((s) => s.vendor === 'google');
if (google) speech.push(speechMapper(google));
}
if (!haveAws) {
const aws = r3.find((s) => s.vendor === 'aws');
if (aws) speech.push(speechMapper(aws));
}
if (!haveMicrosoft) {
const ms = r3.find((s) => s.vendor === 'microsoft');
if (ms) speech.push(speechMapper(ms));
}
if (!haveWellsaid) {
const wellsaid = r3.find((s) => s.vendor === 'wellsaid');
if (wellsaid) speech.push(speechMapper(wellsaid));
}
/* add service provider creds unless we have that vendor at the account level */
const [r3] = await pp.query(sqlSpeechCredentialsForSP, account_sid);
r3.forEach((s) => {
if (!speech.find((s2) => s2.vendor === s.vendor)) {
speech.push(speechMapper(s));
}
}
});
return {
...r[0],
@@ -94,6 +106,7 @@ module.exports = (logger, srf) => {
};
const updateSpeechCredentialLastUsed = async(speech_credential_sid) => {
if (!speech_credential_sid) return;
const pp = pool.promise();
const sql = 'UPDATE speech_credentials SET last_used = NOW() WHERE speech_credential_sid = ?';
try {

View File

@@ -2,9 +2,9 @@ const crypto = require('crypto');
const algorithm = process.env.LEGACY_CRYPTO ? 'aes-256-ctr' : 'aes-256-cbc';
const iv = crypto.randomBytes(16);
const secretKey = crypto.createHash('sha256')
.update(String(process.env.JWT_SECRET))
.update(process.env.ENCRYPTION_SECRET || process.env.JWT_SECRET)
.digest('base64')
.substr(0, 32);
.substring(0, 32);
const encrypt = (text) => {
const cipher = crypto.createCipheriv(algorithm, secretKey, iv);
@@ -25,8 +25,8 @@ const decrypt = (data) => {
throw err;
}
const decipher = crypto.createDecipheriv(algorithm, secretKey, Buffer.from(hash.iv, 'hex'));
const decrpyted = Buffer.concat([decipher.update(Buffer.from(hash.content, 'hex')), decipher.final()]);
return decrpyted.toString();
const decrypted = Buffer.concat([decipher.update(Buffer.from(hash.content, 'hex')), decipher.final()]);
return decrypted.toString();
};
module.exports = {

View File

@@ -21,6 +21,7 @@ const handleErrors = (logger, app, resolve, reject, e) => {
server.on('error', handleErrors.bind(null, logger, app, resolve, reject));
return;
}
logger.info({err: e, port: PORT}, 'httpListener error');
reject(e);
};
@@ -30,7 +31,7 @@ const createHttpListener = (logger, srf) => {
app.use(express.urlencoded({ extended: true }));
app.use(express.json());
app.use('/', httpRoutes);
app.use((err, req, res, next) => {
app.use((err, _req, res, _next) => {
logger.error(err, 'burped error');
res.status(err.status || 500).json({msg: err.message});
});

View File

@@ -31,6 +31,8 @@ class HttpRequestor extends BaseRequestor {
if (u.port) this._baseUrl = `${u.protocol}://${u.resource}:${u.port}`;
else this._baseUrl = `${u.protocol}://${u.resource}`;
this._protocol = u.protocol;
this._resource = u.resource;
this._port = u.port;
this._search = u.search;
this._usePools = process.env.HTTP_POOL && parseInt(process.env.HTTP_POOL);
@@ -89,6 +91,19 @@ class HttpRequestor extends BaseRequestor {
assert.ok, (['GET', 'POST'].includes(method), `HttpRequestor:request method must be 'GET' or 'POST' not ${method}`);
const startAt = process.hrtime();
/* if we have an absolute url, and it is ws then do a websocket connection */
if (this._isAbsoluteUrl(url) && url.startsWith('ws')) {
const WsRequestor = require('./ws-requestor');
this.logger.debug({hook}, 'HttpRequestor: switching to websocket connection');
const h = typeof hook === 'object' ? hook : {url: hook};
const requestor = new WsRequestor(this.logger, this.account_sid, h, this.secret);
if (type === 'session:redirect') {
this.close();
this.emit('handover', requestor);
}
return requestor.request('session:new', hook, params, httpHeaders);
}
let newClient;
try {
let client, path, query;
@@ -98,7 +113,7 @@ class HttpRequestor extends BaseRequestor {
}
else {
const u = parseUrl(url);
if (u.resource === this._resource && u.protocol === this._protocol) {
if (u.resource === this._resource && u.port === this._port && u.protocol === this._protocol) {
client = this.client;
path = u.pathname;
query = u.query;

View File

@@ -138,7 +138,6 @@ function installSrfLocals(srf, logger) {
retrieveCall,
listCalls,
deleteCall,
synthAudio,
createHash,
retrieveHash,
deleteKey,
@@ -151,12 +150,20 @@ function installSrfLocals(srf, logger) {
pushBack,
popFront,
removeFromList,
getListPosition,
lengthOfList,
getListPosition
} = require('@jambonz/realtimedb-helpers')({
host: process.env.JAMBONES_REDIS_HOST,
port: process.env.JAMBONES_REDIS_PORT || 6379
}, logger, tracer);
const {
synthAudio,
getNuanceAccessToken,
getIbmAccessToken,
} = require('@jambonz/speech-utils')({
host: process.env.JAMBONES_REDIS_HOST,
port: process.env.JAMBONES_REDIS_PORT || 6379
}, logger, tracer);
const {
writeAlerts,
AlertType
@@ -204,7 +211,9 @@ function installSrfLocals(srf, logger) {
popFront,
removeFromList,
lengthOfList,
getListPosition
getListPosition,
getNuanceAccessToken,
getIbmAccessToken
},
parentLogger: logger,
getSBC,

View File

@@ -1,31 +0,0 @@
function normalizeJambones(logger, obj) {
if (!Array.isArray(obj)) throw new Error('malformed jambonz payload: must be array');
const document = [];
for (const tdata of obj) {
if (typeof tdata !== 'object') throw new Error('malformed jambonz payload: must be array of objects');
if ('verb' in tdata) {
// {verb: 'say', text: 'foo..bar'..}
const name = tdata.verb;
const o = {};
Object.keys(tdata)
.filter((k) => k !== 'verb')
.forEach((k) => o[k] = tdata[k]);
const o2 = {};
o2[name] = o;
document.push(o2);
}
else if (Object.keys(tdata).length === 1) {
// {'say': {..}}
document.push(tdata);
}
else {
logger.info(tdata, 'malformed jambonz payload: missing verb property');
throw new Error('malformed jambonz payload: missing verb property');
}
}
logger.debug({document}, `normalizeJambones: returning document with ${document.length} tasks`);
return document;
}
module.exports = normalizeJambones;

View File

@@ -4,7 +4,7 @@ const SipError = require('drachtio-srf').SipError;
const {TaskPreconditions, CallDirection} = require('../utils/constants');
const CallInfo = require('../session/call-info');
const assert = require('assert');
const normalizeJambones = require('../utils/normalize-jambones');
const { normalizeJambones } = require('@jambonz/verb-specifications');
const makeTask = require('../tasks/make_task');
const ConfirmCallSession = require('../session/confirm-call-session');
const AdultingCallSession = require('../session/adulting-call-session');
@@ -12,7 +12,7 @@ const deepcopy = require('deepcopy');
const moment = require('moment');
const stripCodecs = require('./strip-ancillary-codecs');
const RootSpan = require('./call-tracer');
const { v4: uuidv4 } = require('uuid');
const uuidv4 = require('uuid-random');
class SingleDialer extends Emitter {
constructor({logger, sbcAddress, target, opts, application, callInfo, accountInfo, rootSpan, startSpan}) {
@@ -412,7 +412,7 @@ class SingleDialer extends Emitter {
this.callInfo.updateCallStatus(callStatus, sipStatus, sipReason);
if (typeof duration === 'number') this.callInfo.duration = duration;
try {
this.requestor.request('call:status', this.application.call_status_hook, this.callInfo.toJSON());
this.notifier.request('call:status', this.application.call_status_hook, this.callInfo.toJSON());
} catch (err) {
this.logger.info(err, `SingleDialer:_notifyCallStatusChange error sending ${callStatus} ${sipStatus}`);
}

View File

@@ -1,5 +1,5 @@
const assert = require('assert');
const { v4: uuidv4 } = require('uuid');
const uuidv4 = require('uuid-random');
const {LifeCycleEvents, FS_UUID_SET_NAME} = require('./constants');
const Emitter = require('events');
const debug = require('debug')('jambonz:feature-server');
@@ -75,6 +75,10 @@ module.exports = (logger) => {
}
})();
}
else if (process.env.K8S) {
lifecycleEmitter.scaleIn = () => process.exit(0);
}
async function pingProxies(srf) {
if (process.env.NODE_ENV === 'test') return;

View File

@@ -1,5 +1,5 @@
const xmlParser = require('xml2js').parseString;
const { v4: uuidv4 } = require('uuid');
const uuidv4 = require('uuid-random');
const parseUri = require('drachtio-srf').parseUri;
const transform = require('sdp-transform');
const debug = require('debug')('jambonz:feature-server');
@@ -47,8 +47,16 @@ const parseSiprecPayload = (req, logger) => {
}
}
if (!sdp || !meta) {
logger.info({payload: req.payload}, 'invalid SIPREC payload');
if (!meta && sdp) {
const arr = /^([^]+)(m=[^]+?)(m=[^]+?)$/.exec(sdp);
opts.sdp1 = `${arr[1]}${arr[2]}`;
opts.sdp2 = `${arr[1]}${arr[3]}\r\n`;
opts.sessionId = uuidv4();
logger.info({ payload: req.payload }, 'SIPREC payload with no metadata (e.g. Cisco NBR)');
resolve(opts);
} else if (!sdp || !meta) {
logger.info({ payload: req.payload }, 'invalid SIPREC payload');
return reject(new Error('expected multipart SIPREC body'));
}
@@ -242,7 +250,8 @@ const createSipRecPayload = (sdp1, sdp2, logger) => {
.replace(/a=sendonly\r\n/g, '')
.replace(/a=direction:both\r\n/g, '');
*/
return combinedSdp;
return combinedSdp.replace(/sendrecv/g, 'recvonly');
};
module.exports = { parseSiprecPayload, createSipRecPayload } ;

View File

@@ -1,33 +1,678 @@
module.exports = (logger) => {
const normalizeTranscription = (evt, vendor, channel) => {
if ('aws' === vendor && Array.isArray(evt) && evt.length > 0) evt = evt[0];
if ('microsoft' === vendor) {
const nbest = evt.NBest;
const language_code = evt.PrimaryLanguage?.Language || this.language;
const alternatives = nbest ? nbest.map((n) => {
return {
confidence: n.Confidence,
transcript: n.Display
};
}) :
[
{
transcript: evt.DisplayText || evt.Text
}
];
const {
TaskName,
AzureTranscriptionEvents,
GoogleTranscriptionEvents,
AwsTranscriptionEvents,
NuanceTranscriptionEvents,
DeepgramTranscriptionEvents,
SonioxTranscriptionEvents,
NvidiaTranscriptionEvents,
JambonzTranscriptionEvents
} = require('./constants');
const newEvent = {
is_final: evt.RecognitionStatus === 'Success',
channel,
language_code,
alternatives
};
evt = newEvent;
const stickyVars = {
google: [
'GOOGLE_SPEECH_HINTS',
'GOOGLE_SPEECH_SEPARATE_RECOGNITION_PER_CHANNEL',
'GOOGLE_SPEECH_PROFANITY_FILTER',
'GOOGLE_SPEECH_ENABLE_AUTOMATIC_PUNCTUATION',
'GOOGLE_SPEECH_ENABLE_WORD_TIME_OFFSETS',
'GOOGLE_SPEECH_SINGLE_UTTERANCE',
'GOOGLE_SPEECH_SPEAKER_DIARIZATION',
'GOOGLE_SPEECH_USE_ENHANCED',
'GOOGLE_SPEECH_ALTERNATIVE_LANGUAGE_CODES',
'GOOGLE_SPEECH_METADATA_INTERACTION_TYPE',
'GOOGLE_SPEECH_METADATA_INDUSTRY_NAICS_CODE'
],
microsoft: [
'AZURE_SPEECH_HINTS',
'AZURE_SERVICE_ENDPOINT_ID',
'AZURE_REQUEST_SNR',
'AZURE_PROFANITY_OPTION',
'AZURE_SPEECH_ALTERNATIVE_LANGUAGE_CODES',
'AZURE_SERVICE_ENDPOINT',
'AZURE_INITIAL_SPEECH_TIMEOUT_MS',
'AZURE_USE_OUTPUT_FORMAT_DETAILED',
],
deepgram: [
'DEEPGRAM_SPEECH_KEYWORDS',
'DEEPGRAM_API_KEY',
'DEEPGRAM_SPEECH_TIER',
'DEEPGRAM_SPEECH_MODEL',
'DEEPGRAM_SPEECH_ENABLE_AUTOMATIC_PUNCTUATION',
'DEEPGRAM_SPEECH_PROFANITY_FILTER',
'DEEPGRAM_SPEECH_REDACT',
'DEEPGRAM_SPEECH_DIARIZE',
'DEEPGRAM_SPEECH_NER',
'DEEPGRAM_SPEECH_ALTERNATIVES',
'DEEPGRAM_SPEECH_NUMERALS',
'DEEPGRAM_SPEECH_SEARCH',
'DEEPGRAM_SPEECH_REPLACE',
'DEEPGRAM_SPEECH_ENDPOINTING',
'DEEPGRAM_SPEECH_VAD_TURNOFF',
'DEEPGRAM_SPEECH_TAG'
],
aws: [
'AWS_VOCABULARY_NAME',
'AWS_VOCABULARY_FILTER_METHOD',
'AWS_VOCABULARY_FILTER_NAME'
],
nuance: [
'NUANCE_ACCESS_TOKEN',
'NUANCE_KRYPTON_ENDPOINT',
'NUANCE_TOPIC',
'NUANCE_UTTERANCE_DETECTION_MODE',
'NUANCE_FILTER_PROFANITY',
'NUANCE_INCLUDE_TOKENIZATION',
'NUANCE_DISCARD_SPEAKER_ADAPTATION',
'NUANCE_SUPPRESS_CALL_RECORDING',
'NUANCE_MASK_LOAD_FAILURES',
'NUANCE_SUPPRESS_INITIAL_CAPITALIZATION',
'NUANCE_ALLOW_ZERO_BASE_LM_WEIGHT',
'NUANCE_FILTER_WAKEUP_WORD',
'NUANCE_NO_INPUT_TIMEOUT_MS',
'NUANCE_RECOGNITION_TIMEOUT_MS',
'NUANCE_UTTERANCE_END_SILENCE_MS',
'NUANCE_MAX_HYPOTHESES',
'NUANCE_SPEECH_DOMAIN',
'NUANCE_FORMATTING',
'NUANCE_RESOURCES'
],
ibm: [
'IBM_ACCESS_TOKEN',
'IBM_SPEECH_REGION',
'IBM_SPEECH_INSTANCE_ID',
'IBM_SPEECH_MODEL',
'IBM_SPEECH_LANGUAGE_CUSTOMIZATION_ID',
'IBM_SPEECH_ACOUSTIC_CUSTOMIZATION_ID',
'IBM_SPEECH_BASE_MODEL_VERSION',
'IBM_SPEECH_WATSON_METADATA',
'IBM_SPEECH_WATSON_LEARNING_OPT_OUT'
],
nvidia: [
'NVIDIA_HINTS'
],
soniox: [
'SONIOX_PROFANITY_FILTER',
'SONIOX_MODEL'
]
};
const compileSonioxTranscripts = (finalWordChunks, channel, language) => {
const words = finalWordChunks.flat();
const transcript = words.reduce((acc, word) => {
if (word.text === '<end>') return acc;
if ([',', '.', '?', '!'].includes(word.text)) return `${acc}${word.text}`;
return `${acc} ${word.text}`;
}, '').trim();
const realWords = words.filter((word) => ![',.!?;'].includes(word.text) && word.text !== '<end>');
const confidence = realWords.reduce((acc, word) => acc + word.confidence, 0) / realWords.length;
const alternatives = [{transcript, confidence}];
return {
language_code: language,
channel_tag: channel,
is_final: true,
alternatives,
vendor: {
name: 'soniox',
evt: words
}
};
};
const normalizeSoniox = (evt, channel, language) => {
const copy = JSON.parse(JSON.stringify(evt));
/* an <end> token indicates the end of an utterance */
const endTokenPos = evt.words.map((w) => w.text).indexOf('<end>');
const endpointReached = endTokenPos !== -1;
const words = endpointReached ? evt.words.slice(0, endTokenPos) : evt.words;
/* note: we can safely ignore words after the <end> token as they will be returned again */
const finalWords = words.filter((word) => word.is_final);
const nonFinalWords = words.filter((word) => !word.is_final);
const is_final = endpointReached && finalWords.length > 0;
const transcript = words.reduce((acc, word) => {
if ([',', '.', '?', '!'].includes(word.text)) return `${acc}${word.text}`;
else return `${acc} ${word.text}`;
}, '').trim();
const realWords = words.filter((word) => ![',.!?;'].includes(word.text) && word.text !== '<end>');
const confidence = realWords.reduce((acc, word) => acc + word.confidence, 0) / realWords.length;
const alternatives = [{transcript, confidence}];
return {
language_code: language,
channel_tag: channel,
is_final,
alternatives,
vendor: {
name: 'soniox',
endpointReached,
evt: copy,
finalWords,
nonFinalWords
}
};
};
const normalizeDeepgram = (evt, channel, language) => {
const copy = JSON.parse(JSON.stringify(evt));
const alternatives = (evt.channel?.alternatives || [])
.map((alt) => ({
confidence: alt.confidence,
transcript: alt.transcript,
}));
return {
language_code: language,
channel_tag: channel,
is_final: evt.is_final,
alternatives: [alternatives[0]],
vendor: {
name: 'deepgram',
evt: copy
}
};
};
const normalizeNvidia = (evt, channel, language) => {
const copy = JSON.parse(JSON.stringify(evt));
const alternatives = (evt.alternatives || [])
.map((alt) => ({
confidence: alt.confidence,
transcript: alt.transcript,
}));
return {
language_code: language,
channel_tag: channel,
is_final: evt.is_final,
alternatives,
vendor: {
name: 'nvidia',
evt: copy
}
};
};
const normalizeIbm = (evt, channel, language) => {
const copy = JSON.parse(JSON.stringify(evt));
//const idx = evt.result_index;
const result = evt.results[0];
return {
language_code: language,
channel_tag: channel,
is_final: result.final,
alternatives: result.alternatives,
vendor: {
name: 'ibm',
evt: copy
}
};
};
const normalizeGoogle = (evt, channel, language) => {
const copy = JSON.parse(JSON.stringify(evt));
return {
language_code: language,
channel_tag: channel,
is_final: evt.is_final,
alternatives: [evt.alternatives[0]],
vendor: {
name: 'google',
evt: copy
}
};
};
const normalizeCustom = (evt, channel, language) => {
return {
language_code: language,
channel_tag: channel,
is_final: evt.is_final,
alternatives: [evt.alternatives[0]]
};
};
const normalizeNuance = (evt, channel, language) => {
const copy = JSON.parse(JSON.stringify(evt));
return {
language_code: language,
channel_tag: channel,
is_final: evt.is_final,
alternatives: [evt.alternatives[0]],
vendor: {
name: 'nuance',
evt: copy
}
};
};
const normalizeMicrosoft = (evt, channel, language) => {
const copy = JSON.parse(JSON.stringify(evt));
const nbest = evt.NBest;
const language_code = evt.PrimaryLanguage?.Language || language;
const alternatives = nbest ? nbest.map((n) => {
return {
confidence: n.Confidence,
transcript: n.Display
};
}) :
[
{
transcript: evt.DisplayText || evt.Text
}
];
return {
language_code,
channel_tag: channel,
is_final: evt.RecognitionStatus === 'Success',
alternatives: [alternatives[0]],
vendor: {
name: 'microsoft',
evt: copy
}
};
};
const normalizeAws = (evt, channel, language) => {
const copy = JSON.parse(JSON.stringify(evt));
return {
language_code: language,
channel_tag: channel,
is_final: evt[0].is_final,
alternatives: evt[0].alternatives,
vendor: {
name: 'aws',
evt: copy
}
};
};
module.exports = (logger) => {
const normalizeTranscription = (evt, vendor, channel, language) => {
//logger.debug({ evt, vendor, channel, language }, 'normalizeTranscription');
switch (vendor) {
case 'deepgram':
return normalizeDeepgram(evt, channel, language);
case 'microsoft':
return normalizeMicrosoft(evt, channel, language);
case 'google':
return normalizeGoogle(evt, channel, language);
case 'aws':
return normalizeAws(evt, channel, language);
case 'nuance':
return normalizeNuance(evt, channel, language);
case 'ibm':
return normalizeIbm(evt, channel, language);
case 'nvidia':
return normalizeNvidia(evt, channel, language);
case 'soniox':
return normalizeSoniox(evt, channel, language);
default:
if (vendor.startsWith('custom:')) {
return normalizeCustom(evt, channel, language);
}
logger.error(`Unknown vendor ${vendor}`);
return evt;
}
evt.channel_tag = channel;
//logger.debug({evt}, 'normalized transcription');
return evt;
};
return {normalizeTranscription};
const setChannelVarsForStt = (task, sttCredentials, rOpts = {}) => {
let opts = {};
const {enable, voiceMs = 0, mode = -1} = rOpts.vad || {};
const vad = {enable, voiceMs, mode};
const vendor = rOpts.vendor;
/* voice activity detection works across vendors */
opts = {
...opts,
...(vad.enable && {START_RECOGNIZING_ON_VAD: 1}),
...(vad.enable && vad.voiceMs && {RECOGNIZER_VAD_VOICE_MS: vad.voiceMs}),
...(vad.enable && typeof vad.mode === 'number' && {RECOGNIZER_VAD_MODE: vad.mode}),
};
if ('google' === vendor) {
const model = task.name === TaskName.Gather ? 'command_and_search' : 'latest_long';
opts = {
...opts,
...(sttCredentials && {GOOGLE_APPLICATION_CREDENTIALS: JSON.stringify(sttCredentials.credentials)}),
...(rOpts.separateRecognitionPerChannel && {GOOGLE_SPEECH_SEPARATE_RECOGNITION_PER_CHANNEL: 1}),
...(rOpts.separateRecognitionPerChanne === false && {GOOGLE_SPEECH_SEPARATE_RECOGNITION_PER_CHANNEL: 0}),
...(rOpts.profanityFilter && {GOOGLE_SPEECH_PROFANITY_FILTER: 1}),
...(rOpts.punctuation && {GOOGLE_SPEECH_ENABLE_AUTOMATIC_PUNCTUATION: 1}),
...(rOpts.words && {GOOGLE_SPEECH_ENABLE_WORD_TIME_OFFSETS: 1}),
...(rOpts.singleUtterance && {GOOGLE_SPEECH_SINGLE_UTTERANCE: 1}),
...(rOpts.diarization && {GOOGLE_SPEECH_SPEAKER_DIARIZATION: 1}),
...(rOpts.diarization && rOpts.diarizationMinSpeakers > 0 &&
{GOOGLE_SPEECH_SPEAKER_DIARIZATION_MIN_SPEAKER_COUNT: rOpts.diarizationMinSpeakers}),
...(rOpts.diarization && rOpts.diarizationMaxSpeakers > 0 &&
{GOOGLE_SPEECH_SPEAKER_DIARIZATION_MAX_SPEAKER_COUNT: rOpts.diarizationMaxSpeakers}),
...(rOpts.enhancedModel !== false && {GOOGLE_SPEECH_USE_ENHANCED: 1}),
...(rOpts.profanityFilter === false && {GOOGLE_SPEECH_PROFANITY_FILTER: 0}),
...(rOpts.punctuation === false && {GOOGLE_SPEECH_ENABLE_AUTOMATIC_PUNCTUATION: 0}),
...(rOpts.words == false && {GOOGLE_SPEECH_ENABLE_WORD_TIME_OFFSETS: 0}),
...(rOpts.diarization === false && {GOOGLE_SPEECH_SPEAKER_DIARIZATION: 0}),
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'string' &&
{GOOGLE_SPEECH_HINTS: rOpts.hints.join(',')}),
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'object' &&
{GOOGLE_SPEECH_HINTS: JSON.stringify(rOpts.hints)}),
...(typeof rOpts.hintsBoost === 'number' && {GOOGLE_SPEECH_HINTS_BOOST: rOpts.hintsBoost}),
...(rOpts.altLanguages.length > 0 &&
{GOOGLE_SPEECH_ALTERNATIVE_LANGUAGE_CODES: [...new Set(rOpts.altLanguages)].join(',')}),
...(rOpts.interactionType &&
{GOOGLE_SPEECH_METADATA_INTERACTION_TYPE: rOpts.interactionType}),
...{GOOGLE_SPEECH_MODEL: rOpts.model || model},
...(rOpts.naicsCode > 0 && {GOOGLE_SPEECH_METADATA_INDUSTRY_NAICS_CODE: rOpts.naicsCode}),
GOOGLE_SPEECH_METADATA_RECORDING_DEVICE_TYPE: 'phone_line',
};
}
else if (['aws', 'polly'].includes(vendor)) {
opts = {
...opts,
...(rOpts.vocabularyName && {AWS_VOCABULARY_NAME: rOpts.vocabularyName}),
...(rOpts.vocabularyFilterName && {AWS_VOCABULARY_FILTER_NAME: rOpts.vocabularyFilterName}),
...(rOpts.filterMethod && {AWS_VOCABULARY_FILTER_METHOD: rOpts.filterMethod}),
...(sttCredentials && {
AWS_ACCESS_KEY_ID: sttCredentials.accessKeyId,
AWS_SECRET_ACCESS_KEY: sttCredentials.secretAccessKey,
AWS_REGION: sttCredentials.region
}),
};
}
else if ('microsoft' === vendor) {
opts = {
...opts,
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'string' &&
{AZURE_SPEECH_HINTS: rOpts.hints.map((h) => h.trim()).join(',')}),
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'object' &&
{AZURE_SPEECH_HINTS: rOpts.hints.map((h) => h.phrase).join(',')}),
...(rOpts.altLanguages && rOpts.altLanguages.length > 0 &&
{AZURE_SPEECH_ALTERNATIVE_LANGUAGE_CODES: [...new Set(rOpts.altLanguages)].join(',')}),
...(rOpts.requestSnr && {AZURE_REQUEST_SNR: 1}),
...(rOpts.profanityOption && {AZURE_PROFANITY_OPTION: rOpts.profanityOption}),
...(rOpts.azureServiceEndpoint && {AZURE_SERVICE_ENDPOINT: rOpts.azureServiceEndpoint}),
...(rOpts.initialSpeechTimeoutMs > 0 &&
{AZURE_INITIAL_SPEECH_TIMEOUT_MS: rOpts.initialSpeechTimeoutMs}),
...(rOpts.requestSnr && {AZURE_REQUEST_SNR: 1}),
...(rOpts.audioLogging && {AZURE_AUDIO_LOGGING: 1}),
...{AZURE_USE_OUTPUT_FORMAT_DETAILED: 1},
...(sttCredentials && {
AZURE_SUBSCRIPTION_KEY: sttCredentials.api_key,
AZURE_REGION: sttCredentials.region,
}),
...(sttCredentials.use_custom_stt && sttCredentials.custom_stt_endpoint &&
{AZURE_SERVICE_ENDPOINT_ID: sttCredentials.custom_stt_endpoint})
};
}
else if ('nuance' === vendor) {
/**
* Note: all nuance options are in recognizer.nuanceOptions, should migrate
* other vendor settings to similar nested structure
*/
const {nuanceOptions = {}} = rOpts;
opts = {
...opts,
...(sttCredentials.access_token) && {NUANCE_ACCESS_TOKEN: sttCredentials.access_token},
...(sttCredentials.nuance_stt_uri) && {NUANCE_KRYPTON_ENDPOINT: sttCredentials.nuance_stt_uri},
...(nuanceOptions.topic) && {NUANCE_TOPIC: nuanceOptions.topic},
...(nuanceOptions.utteranceDetectionMode) &&
{NUANCE_UTTERANCE_DETECTION_MODE: nuanceOptions.utteranceDetectionMode},
...(nuanceOptions.punctuation || rOpts.punctuation) && {NUANCE_PUNCTUATION: nuanceOptions.punctuation},
...(nuanceOptions.profanityFilter) &&
{NUANCE_FILTER_PROFANITY: nuanceOptions.profanityFilter},
...(nuanceOptions.includeTokenization) &&
{NUANCE_INCLUDE_TOKENIZATION: nuanceOptions.includeTokenization},
...(nuanceOptions.discardSpeakerAdaptation) &&
{NUANCE_DISCARD_SPEAKER_ADAPTATION: nuanceOptions.discardSpeakerAdaptation},
...(nuanceOptions.suppressCallRecording) &&
{NUANCE_SUPPRESS_CALL_RECORDING: nuanceOptions.suppressCallRecording},
...(nuanceOptions.maskLoadFailures) &&
{NUANCE_MASK_LOAD_FAILURES: nuanceOptions.maskLoadFailures},
...(nuanceOptions.suppressInitialCapitalization) &&
{NUANCE_SUPPRESS_INITIAL_CAPITALIZATION: nuanceOptions.suppressInitialCapitalization},
...(nuanceOptions.allowZeroBaseLmWeight)
&& {NUANCE_ALLOW_ZERO_BASE_LM_WEIGHT: nuanceOptions.allowZeroBaseLmWeight},
...(nuanceOptions.filterWakeupWord) &&
{NUANCE_FILTER_WAKEUP_WORD: nuanceOptions.filterWakeupWord},
...(nuanceOptions.resultType) &&
{NUANCE_RESULT_TYPE: nuanceOptions.resultType || rOpts.interim ? 'partial' : 'final'},
...(nuanceOptions.noInputTimeoutMs) &&
{NUANCE_NO_INPUT_TIMEOUT_MS: nuanceOptions.noInputTimeoutMs},
...(nuanceOptions.recognitionTimeoutMs) &&
{NUANCE_RECOGNITION_TIMEOUT_MS: nuanceOptions.recognitionTimeoutMs},
...(nuanceOptions.utteranceEndSilenceMs) &&
{NUANCE_UTTERANCE_END_SILENCE_MS: nuanceOptions.utteranceEndSilenceMs},
...(nuanceOptions.maxHypotheses) &&
{NUANCE_MAX_HYPOTHESES: nuanceOptions.maxHypotheses},
...(nuanceOptions.speechDomain) &&
{NUANCE_SPEECH_DOMAIN: nuanceOptions.speechDomain},
...(nuanceOptions.formatting) &&
{NUANCE_FORMATTING: nuanceOptions.formatting},
...(nuanceOptions.resources) &&
{NUANCE_RESOURCES: JSON.stringify(nuanceOptions.resources)},
};
}
else if ('deepgram' === vendor) {
const {deepgramOptions = {}} = rOpts;
opts = {
...opts,
...(sttCredentials.api_key) &&
{DEEPGRAM_API_KEY: sttCredentials.api_key},
...(deepgramOptions.tier) &&
{DEEPGRAM_SPEECH_TIER: deepgramOptions.tier},
...(deepgramOptions.model) &&
{DEEPGRAM_SPEECH_MODEL: deepgramOptions.model},
...(deepgramOptions.punctuate) &&
{DEEPGRAM_SPEECH_ENABLE_AUTOMATIC_PUNCTUATION: 1},
...(deepgramOptions.profanityFilter) &&
{DEEPGRAM_SPEECH_PROFANITY_FILTER: 1},
...(deepgramOptions.redact) &&
{DEEPGRAM_SPEECH_REDACT: 1},
...(deepgramOptions.diarize) &&
{DEEPGRAM_SPEECH_DIARIZE: 1},
...(deepgramOptions.diarizeVersion) &&
{DEEPGRAM_SPEECH_DIARIZE_VERSION: deepgramOptions.diarizeVersion},
...(deepgramOptions.ner) &&
{DEEPGRAM_SPEECH_NER: 1},
...(deepgramOptions.alternatives) &&
{DEEPGRAM_SPEECH_ALTERNATIVES: deepgramOptions.alternatives},
...(deepgramOptions.numerals) &&
{DEEPGRAM_SPEECH_NUMERALS: deepgramOptions.numerals},
...(deepgramOptions.search) &&
{DEEPGRAM_SPEECH_SEARCH: deepgramOptions.search.join(',')},
...(deepgramOptions.replace) &&
{DEEPGRAM_SPEECH_REPLACE: deepgramOptions.replace.join(',')},
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'string' &&
{DEEPGRAM_SPEECH_KEYWORDS: rOpts.hints.map((h) => h.trim()).join(',')}),
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'object' &&
{DEEPGRAM_SPEECH_KEYWORDS: rOpts.hints.map((h) => h.phrase).join(',')}),
...(deepgramOptions.keywords) &&
{DEEPGRAM_SPEECH_KEYWORDS: deepgramOptions.keywords.join(',')},
...('endpointing' in deepgramOptions) &&
{DEEPGRAM_SPEECH_ENDPOINTING: deepgramOptions.endpointing},
...(deepgramOptions.vadTurnoff) &&
{DEEPGRAM_SPEECH_VAD_TURNOFF: deepgramOptions.vadTurnoff},
...(deepgramOptions.tag) &&
{DEEPGRAM_SPEECH_TAG: deepgramOptions.tag}
};
}
else if ('soniox' === vendor) {
const {sonioxOptions = {}} = rOpts;
const {storage = {}} = sonioxOptions;
opts = {
...opts,
...(sttCredentials.api_key) &&
{SONIOX_API_KEY: sttCredentials.api_key},
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'string' &&
{SONIOX_HINTS: rOpts.hints.join(',')}),
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'object' &&
{SONIOX_HINTS: JSON.stringify(rOpts.hints)}),
...(typeof rOpts.hintsBoost === 'number' &&
{SONIOX_HINTS_BOOST: rOpts.hintsBoost}),
...(sonioxOptions.model) &&
{SONIOX_MODEL: sonioxOptions.model},
...((sonioxOptions.profanityFilter || rOpts.profanityFilter) && {SONIOX_PROFANITY_FILTER: 1}),
...(storage?.id && {SONIOX_STORAGE_ID: storage.id}),
...(storage?.id && storage?.title && {SONIOX_STORAGE_TITLE: storage.title}),
...(storage?.id && storage?.disableStoreAudio && {SONIOX_STORAGE_DISABLE_AUDIO: 1}),
...(storage?.id && storage?.disableStoreTranscript && {SONIOX_STORAGE_DISABLE_TRANSCRIPT: 1}),
...(storage?.id && storage?.disableSearch && {SONIOX_STORAGE_DISABLE_SEARCH: 1})
};
}
else if ('ibm' === vendor) {
const {ibmOptions = {}} = rOpts;
opts = {
...opts,
...(sttCredentials.access_token) &&
{IBM_ACCESS_TOKEN: sttCredentials.access_token},
...(sttCredentials.stt_region) &&
{IBM_SPEECH_REGION: sttCredentials.stt_region},
...(sttCredentials.instance_id) &&
{IBM_SPEECH_INSTANCE_ID: sttCredentials.instance_id},
...(ibmOptions.model) &&
{IBM_SPEECH_MODEL: ibmOptions.model},
...(ibmOptions.language_customization_id) &&
{IBM_SPEECH_LANGUAGE_CUSTOMIZATION_ID: ibmOptions.language_customization_id},
...(ibmOptions.acoustic_customization_id) &&
{IBM_SPEECH_ACOUSTIC_CUSTOMIZATION_ID: ibmOptions.acoustic_customization_id},
...(ibmOptions.baseModelVersion) &&
{IBM_SPEECH_BASE_MODEL_VERSION: ibmOptions.baseModelVersion},
...(ibmOptions.watsonMetadata) &&
{IBM_SPEECH_WATSON_METADATA: ibmOptions.watsonMetadata},
...(ibmOptions.watsonLearningOptOut) &&
{IBM_SPEECH_WATSON_LEARNING_OPT_OUT: ibmOptions.watsonLearningOptOut}
};
}
else if ('nvidia' === vendor) {
const {nvidiaOptions = {}} = rOpts;
opts = {
...opts,
...((nvidiaOptions.profanityFilter || rOpts.profanityFilter) && {NVIDIA_PROFANITY_FILTER: 1}),
...(!(nvidiaOptions.profanityFilter || rOpts.profanityFilter) && {NVIDIA_PROFANITY_FILTER: 0}),
...((nvidiaOptions.punctuation || rOpts.punctuation) && {NVIDIA_PUNCTUATION: 1}),
...(!(nvidiaOptions.punctuation || rOpts.punctuation) && {NVIDIA_PUNCTUATION: 0}),
...((rOpts.words || nvidiaOptions.wordTimeOffsets) && {NVIDIA_WORD_TIME_OFFSETS: 1}),
...(!(rOpts.words || nvidiaOptions.wordTimeOffsets) && {NVIDIA_WORD_TIME_OFFSETS: 0}),
...(nvidiaOptions.maxAlternatives && {NVIDIA_MAX_ALTERNATIVES: nvidiaOptions.maxAlternatives}),
...(!nvidiaOptions.maxAlternatives && {NVIDIA_MAX_ALTERNATIVES: 1}),
...(rOpts.model && {NVIDIA_MODEL: rOpts.model}),
...(nvidiaOptions.rivaUri && {NVIDIA_RIVA_URI: nvidiaOptions.rivaUri}),
...(nvidiaOptions.verbatimTranscripts && {NVIDIA_VERBATIM_TRANSCRIPTS: 1}),
...(rOpts.diarization && {NVIDIA_SPEAKER_DIARIZATION: 1}),
...(rOpts.diarization && rOpts.diarizationMaxSpeakers > 0 &&
{NVIDIA_DIARIZATION_SPEAKER_COUNT: rOpts.diarizationMaxSpeakers}),
...(rOpts.separateRecognitionPerChannel && {NVIDIA_SEPARATE_RECOGNITION_PER_CHANNEL: 1}),
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'string' &&
{NVIDIA_HINTS: rOpts.hints.join(',')}),
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'object' &&
{NVIDIA_HINTS: JSON.stringify(rOpts.hints)}),
...(typeof rOpts.hintsBoost === 'number' &&
{NVIDIA_HINTS_BOOST: rOpts.hintsBoost}),
...(nvidiaOptions.customConfiguration &&
{NVIDIA_CUSTOM_CONFIGURATION: JSON.stringify(nvidiaOptions.customConfiguration)}),
};
}
else if (vendor.startsWith('custom:')) {
let {options = {}} = rOpts;
const {auth_token, custom_stt_url} = sttCredentials;
options = {
...options,
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'string' &&
{hints: rOpts.hints}),
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'object' &&
{hints: JSON.stringify(rOpts.hints)}),
...(typeof rOpts.hintsBoost === 'number' && {hintsBoost: rOpts.hintsBoost})
};
opts = {
...opts,
JAMBONZ_STT_API_KEY: auth_token,
JAMBONZ_STT_URL: custom_stt_url,
...(Object.keys(options).length > 0 && {JAMBONZ_STT_OPTIONS: JSON.stringify(options)}),
};
}
(stickyVars[vendor] || []).forEach((key) => {
if (!opts[key]) opts[key] = '';
});
return opts;
};
const removeSpeechListeners = (ep) => {
ep.removeCustomEventListener(GoogleTranscriptionEvents.Transcription);
ep.removeCustomEventListener(GoogleTranscriptionEvents.EndOfUtterance);
ep.removeCustomEventListener(GoogleTranscriptionEvents.VadDetected);
ep.removeCustomEventListener(AwsTranscriptionEvents.Transcription);
ep.removeCustomEventListener(AwsTranscriptionEvents.VadDetected);
ep.removeCustomEventListener(AzureTranscriptionEvents.Transcription);
ep.removeCustomEventListener(AzureTranscriptionEvents.NoSpeechDetected);
ep.removeCustomEventListener(AzureTranscriptionEvents.VadDetected);
ep.removeCustomEventListener(NuanceTranscriptionEvents.Transcription);
ep.removeCustomEventListener(NuanceTranscriptionEvents.TranscriptionComplete);
ep.removeCustomEventListener(NuanceTranscriptionEvents.StartOfSpeech);
ep.removeCustomEventListener(NuanceTranscriptionEvents.VadDetected);
ep.removeCustomEventListener(DeepgramTranscriptionEvents.Transcription);
ep.removeCustomEventListener(DeepgramTranscriptionEvents.Connect);
ep.removeCustomEventListener(DeepgramTranscriptionEvents.ConnectFailure);
ep.removeCustomEventListener(SonioxTranscriptionEvents.Transcription);
ep.removeCustomEventListener(NvidiaTranscriptionEvents.Transcription);
ep.removeCustomEventListener(NvidiaTranscriptionEvents.TranscriptionComplete);
ep.removeCustomEventListener(NvidiaTranscriptionEvents.StartOfSpeech);
ep.removeCustomEventListener(NvidiaTranscriptionEvents.VadDetected);
ep.removeCustomEventListener(JambonzTranscriptionEvents.Transcription);
ep.removeCustomEventListener(JambonzTranscriptionEvents.Connect);
ep.removeCustomEventListener(JambonzTranscriptionEvents.ConnectFailure);
ep.removeCustomEventListener(JambonzTranscriptionEvents.Error);
};
const setSpeechCredentialsAtRuntime = (recognizer) => {
if (!recognizer) return;
if (recognizer.vendor === 'nuance') {
const {clientId, secret, kryptonEndpoint} = recognizer.nuanceOptions || {};
if (clientId && secret) return {client_id: clientId, secret};
if (kryptonEndpoint) return {nuance_stt_uri: kryptonEndpoint};
}
else if (recognizer.vendor === 'nvidia') {
const {rivaUri} = recognizer.nvidiaOptions || {};
if (rivaUri) return {riva_uri: rivaUri};
}
else if (recognizer.vendor === 'deepgram') {
const {apiKey} = recognizer.deepgramOptions || {};
if (apiKey) return {api_key: apiKey};
}
else if (recognizer.vendor === 'soniox') {
const {apiKey} = recognizer.sonioxOptions || {};
if (apiKey) return {api_key: apiKey};
}
else if (recognizer.vendor === 'ibm') {
const {ttsApiKey, ttsRegion, sttApiKey, sttRegion, instanceId} = recognizer.ibmOptions || {};
if (ttsApiKey || sttApiKey) return {
tts_api_key: ttsApiKey,
tts_region: ttsRegion,
stt_api_key: sttApiKey,
stt_region: sttRegion,
instance_id: instanceId
};
}
};
return {
normalizeTranscription,
setChannelVarsForStt,
removeSpeechListeners,
setSpeechCredentialsAtRuntime,
compileSonioxTranscripts
};
};

View File

@@ -4,7 +4,6 @@ const short = require('short-uuid');
const {HookMsgTypes} = require('./constants.json');
const Websocket = require('ws');
const snakeCaseKeys = require('./snakecase-keys');
const HttpRequestor = require('./http-requestor');
const MAX_RECONNECTS = 5;
const RESPONSE_TIMEOUT_MS = process.env.JAMBONES_WS_API_MSG_RESPONSE_TIMEOUT || 5000;
@@ -45,7 +44,7 @@ class WsRequestor extends BaseRequestor {
return;
}
if (this.closedGracefully) {
this.logger.debug(`WsRequestor:request - discarding ${type} because we closed the socket`);
this.logger.debug(`WsRequestor:request - discarding ${type} because socket was closed gracefully`);
return;
}
@@ -53,8 +52,14 @@ class WsRequestor extends BaseRequestor {
/* if we have an absolute url, and it is http then do a standard webhook */
if (this._isAbsoluteUrl(url) && url.startsWith('http')) {
const HttpRequestor = require('./http-requestor');
this.logger.debug({hook}, 'WsRequestor: sending a webhook (HTTP)');
const requestor = new HttpRequestor(this.logger, this.account_sid, hook, this.secret);
const h = typeof hook === 'object' ? hook : {url: hook};
const requestor = new HttpRequestor(this.logger, this.account_sid, h, this.secret);
if (type === 'session:redirect') {
this.close();
this.emit('handover', requestor);
}
return requestor.request(type, hook, params, httpHeaders);
}
@@ -69,7 +74,7 @@ class WsRequestor extends BaseRequestor {
this.connectInProgress = true;
this.logger.debug(`WsRequestor:request(${this.id}) - connecting since we do not have a connection`);
if (this.connections >= MAX_RECONNECTS) {
throw new Error(`max attempts connecting to ${this.url}`);
return Promise.reject(`max attempts connecting to ${this.url}`);
}
try {
const startAt = process.hrtime();
@@ -79,7 +84,7 @@ class WsRequestor extends BaseRequestor {
} catch (err) {
this.logger.info({url, err}, 'WsRequestor:request - failed connecting');
this.connectInProgress = false;
throw err;
return Promise.reject(err);
}
}
assert(this.ws);
@@ -91,6 +96,9 @@ class WsRequestor extends BaseRequestor {
assert.ok(url, 'WsRequestor:request url was not provided');
const msgid = short.generate();
// save initial msgid in case we need to reconnect during initial session:new
if (type === 'session:new') this._initMsgId = msgid;
const b3 = httpHeaders?.b3 ? {b3: httpHeaders.b3} : {};
const obj = {
type,
@@ -113,8 +121,18 @@ class WsRequestor extends BaseRequestor {
//this.logger.debug({obj}, `websocket: sending (${url})`);
/* special case: reconnecting before we received ack to session:new */
let reconnectingWithoutAck = false;
if (type === 'session:reconnect' && this._initMsgId) {
reconnectingWithoutAck = true;
const obj = this.messagesInFlight.get(this._initMsgId);
this.messagesInFlight.delete(this._initMsgId);
this.messagesInFlight.set(msgid, obj);
this._initMsgId = msgid;
}
/* simple notifications */
if (['call:status', 'jambonz:error', 'session:reconnect'].includes(type)) {
if (['call:status', 'verb:status', 'jambonz:error'].includes(type) || reconnectingWithoutAck) {
this.ws.send(JSON.stringify(obj), () => {
this.logger.debug({obj}, `WsRequestor:request websocket: sent (${url})`);
sendQueuedMsgs();
@@ -126,7 +144,7 @@ class WsRequestor extends BaseRequestor {
return new Promise((resolve, reject) => {
/* give the far end a reasonable amount of time to ack our message */
const timer = setTimeout(() => {
const {failure} = this.messagesInFlight.get(msgid);
const {failure} = this.messagesInFlight.get(msgid) || {};
failure && failure(`timeout from far end for msgid ${msgid}`);
this.messagesInFlight.delete(msgid);
}, RESPONSE_TIMEOUT_MS);
@@ -161,17 +179,11 @@ class WsRequestor extends BaseRequestor {
this.logger.debug('WsRequestor:close closing socket');
try {
if (this.ws) {
this.ws.close();
this.ws.close(1000);
this.ws.removeAllListeners();
this.ws = null;
}
for (const [msgid, obj] of this.messagesInFlight) {
const {timer} = obj;
clearTimeout(timer);
obj.failure(`abandoning msgid ${msgid} since we have closed the socket`);
}
this.messagesInFlight.clear();
this._clearPendingMessages();
} catch (err) {
this.logger.info({err}, 'WsRequestor: Error closing socket');
}
@@ -207,7 +219,6 @@ class WsRequestor extends BaseRequestor {
}
_setHandlers(ws) {
this.logger.debug('WsRequestor:_setHandlers');
ws
.once('open', this._onOpen.bind(this, ws))
.once('close', this._onClose.bind(this))
@@ -216,6 +227,15 @@ class WsRequestor extends BaseRequestor {
.on('error', this._onError.bind(this));
}
_clearPendingMessages() {
for (const [msgid, obj] of this.messagesInFlight) {
const {timer} = obj;
clearTimeout(timer);
if (!this._initMsgId) obj.failure(`abandoning msgid ${msgid} since socket is closed`);
}
this.messagesInFlight.clear();
}
_onError(err) {
if (this.connections > 0) {
this.logger.info({url: this.url, err}, 'WsRequestor:_onError');
@@ -253,12 +273,14 @@ class WsRequestor extends BaseRequestor {
}, 'WsRequestor - unexpected response');
this.emit('connection-failure');
this.emit('not-ready', new Error(`${res.statusCode} ${res.statusMessage}`));
this.connections++;
}
_onSocketClosed() {
this.ws = null;
this.emit('connection-dropped');
if (this.connections > 0 && this.connections < MAX_RECONNECTS && !this.closedGracefully) {
if (!this._initMsgId) this._clearPendingMessages();
this.logger.debug(`WsRequestor:_onSocketClosed waiting ${this.backoffMs} to reconnect`);
setTimeout(() => {
this.logger.debug(
@@ -310,12 +332,13 @@ class WsRequestor extends BaseRequestor {
}
_recvAck(msgid, data) {
this._initMsgId = null;
const obj = this.messagesInFlight.get(msgid);
if (!obj) {
this.logger.info({url: this.url}, `WsRequestor:_recvAck - ack to unknown msgid ${msgid}, discarding`);
return;
}
this.logger.debug({url: this.url}, `WsRequestor:_recvAck - received response to ${msgid}`);
//this.logger.debug({url: this.url}, `WsRequestor:_recvAck - received response to ${msgid}`);
this.messagesInFlight.delete(msgid);
const {success} = obj;
success && success(data);

8976
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "jambonz-feature-server",
"version": "v0.7.7",
"version": "v0.8.2",
"main": "app.js",
"engines": {
"node": ">= 10.16.0"
@@ -19,52 +19,56 @@
"bugs": {},
"scripts": {
"start": "node app",
"test": "NODE_ENV=test JAMBONES_HOSTING=1 HTTP_POOL=1 DRACHTIO_HOST=127.0.0.1 DRACHTIO_PORT=9060 DRACHTIO_SECRET=cymru JAMBONES_MYSQL_HOST=127.0.0.1 JAMBONES_MYSQL_PORT=3360 JAMBONES_MYSQL_USER=jambones_test JAMBONES_MYSQL_PASSWORD=jambones_test JAMBONES_MYSQL_DATABASE=jambones_test JAMBONES_REDIS_HOST=127.0.0.1 JAMBONES_REDIS_PORT=16379 JAMBONES_LOGLEVEL=error ENABLE_METRICS=0 HTTP_PORT=3000 JAMBONES_SBCS=172.38.0.10 JAMBONES_FREESWITCH=127.0.0.1:8022:ClueCon:docker-host JAMBONES_TIME_SERIES_HOST=127.0.0.1 JAMBONES_NETWORK_CIDR=172.38.0.0/16 node test/ ",
"test": "NODE_ENV=test JAMBONES_HOSTING=1 HTTP_POOL=1 ENCRYPTION_SECRET=foobar DRACHTIO_HOST=127.0.0.1 DRACHTIO_PORT=9060 DRACHTIO_SECRET=cymru JAMBONES_MYSQL_HOST=127.0.0.1 JAMBONES_MYSQL_PORT=3360 JAMBONES_MYSQL_USER=jambones_test JAMBONES_MYSQL_PASSWORD=jambones_test JAMBONES_MYSQL_DATABASE=jambones_test JAMBONES_REDIS_HOST=127.0.0.1 JAMBONES_REDIS_PORT=16379 JAMBONES_LOGLEVEL=error ENABLE_METRICS=0 HTTP_PORT=3000 JAMBONES_SBCS=172.38.0.10 JAMBONES_FREESWITCH=127.0.0.1:8022:JambonzR0ck$:docker-host JAMBONES_TIME_SERIES_HOST=127.0.0.1 JAMBONES_NETWORK_CIDR=172.38.0.0/16 node test/ ",
"coverage": "./node_modules/.bin/nyc --reporter html --report-dir ./coverage npm run test",
"jslint": "eslint app.js lib"
},
"dependencies": {
"@jambonz/db-helpers": "^0.7.4",
"@jambonz/http-health-check": "^0.0.1",
"@jambonz/db-helpers": "^0.6.19",
"@jambonz/realtimedb-helpers": "^0.4.35",
"@jambonz/stats-collector": "^0.1.6",
"@jambonz/realtimedb-helpers": "^0.7.0",
"@jambonz/speech-utils": "^0.0.12",
"@jambonz/stats-collector": "^0.1.8",
"@jambonz/time-series": "^0.2.5",
"@opentelemetry/api": "^1.1.0",
"@opentelemetry/exporter-jaeger": "^1.3.1",
"@opentelemetry/exporter-trace-otlp-http": "^0.27.0",
"@opentelemetry/exporter-zipkin": "^1.3.1",
"@opentelemetry/instrumentation": "^0.27.0",
"@opentelemetry/resources": "^1.3.1",
"@opentelemetry/sdk-trace-base": "^1.3.1",
"@opentelemetry/sdk-trace-node": "^1.3.1",
"@opentelemetry/semantic-conventions": "^1.3.1",
"aws-sdk": "^2.1152.0",
"@jambonz/verb-specifications": "^0.0.11",
"@opentelemetry/api": "^1.4.0",
"@opentelemetry/exporter-jaeger": "^1.9.0",
"@opentelemetry/exporter-trace-otlp-http": "^0.35.0",
"@opentelemetry/exporter-zipkin": "^1.9.0",
"@opentelemetry/instrumentation": "^0.35.0",
"@opentelemetry/resources": "^1.9.0",
"@opentelemetry/sdk-trace-base": "^1.9.0",
"@opentelemetry/sdk-trace-node": "^1.9.0",
"@opentelemetry/semantic-conventions": "^1.9.0",
"aws-sdk": "^2.1313.0",
"bent": "^7.3.12",
"debug": "^4.3.4",
"deepcopy": "^2.1.0",
"drachtio-fsmrf": "^3.0.3",
"drachtio-srf": "^4.5.1",
"express": "^4.18.1",
"helmet": "^5.1.0",
"drachtio-fsmrf": "^3.0.20",
"drachtio-srf": "^4.5.23",
"express": "^4.18.2",
"ip": "^1.1.8",
"moment": "^2.29.4",
"parse-url": "^8.1.0",
"pino": "^6.14.0",
"pino": "^8.8.0",
"polly-ssml-split": "^0.1.0",
"proxyquire": "^2.1.3",
"sdp-transform": "^2.14.1",
"short-uuid": "^4.2.0",
"short-uuid": "^4.2.2",
"sinon": "^15.0.1",
"to-snake-case": "^1.0.0",
"undici": "^5.8.2",
"uuid": "^8.3.2",
"undici": "^5.19.1",
"uuid-random": "^1.3.2",
"verify-aws-sns-signature": "^0.1.0",
"ws": "^8.8.0",
"xml2js": "^0.4.23"
"ws": "^8.9.0",
"xml2js": "^0.5.0"
},
"devDependencies": {
"clear-module": "^4.1.2",
"eslint": "^7.32.0",
"eslint-plugin-promise": "^4.3.1",
"nyc": "^15.1.0",
"tape": "^5.5.3"
"tape": "^5.6.1"
},
"optionalDependencies": {
"bufferutil": "^4.0.6",

102
test/config-test.js Normal file
View File

@@ -0,0 +1,102 @@
const test = require('tape');
const { sippUac } = require('./sipp')('test_fs');
const bent = require('bent');
const getJSON = bent('json')
const clearModule = require('clear-module');
const {provisionCallHook} = require('./utils')
process.on('unhandledRejection', (reason, p) => {
console.log('Unhandled Rejection at: Promise', p, 'reason:', reason);
});
function connect(connectable) {
return new Promise((resolve, reject) => {
connectable.on('connect', () => {
return resolve();
});
});
}
test('\'config: listen\'', async(t) => {
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
const from = "config_listen_success";
let verbs = [
{
"verb": "config",
"listen": {
"enable": true,
"url": `ws://172.38.0.60:3000/${from}`
}
},
{
"verb": "pause",
"length": 5
}
];
provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success-send-bye.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/ws_packet_count/${from}`);
t.pass('config: successfully started background listen');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});
test('\'config: listen - stop\'', async(t) => {
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
const from = "config_listen_success";
let verbs = [
{
"verb": "config",
"listen": {
"enable": true,
"url": `ws://172.38.0.60:3000/${from}`
}
},
{
"verb": "pause",
"length": 1
},
{
"verb": "config",
"listen": {
"enable": false
}
},
{
"verb": "pause",
"length": 3
}
];
provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success-send-bye.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/ws_packet_count/${from}`);
t.pass('config: successfully started then stopped background listen');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});

View File

@@ -18,24 +18,38 @@ test('creating schema', (t) => {
if (err) return t.end(err);
t.pass('schema and test data successfully created');
if (process.env.GCP_JSON_KEY && process.env.AWS_ACCESS_KEY_ID && process.env.AWS_SECRET_ACCESS_KEY) {
const sql = [];
if (process.env.GCP_JSON_KEY) {
const google_credential = encrypt(process.env.GCP_JSON_KEY);
t.pass('adding google credentials');
sql.push(`UPDATE speech_credentials SET credential='${google_credential}' WHERE vendor='google';`);
}
if (process.env.AWS_ACCESS_KEY_ID && process.env.AWS_SECRET_ACCESS_KEY) {
const aws_credential = encrypt(JSON.stringify({
access_key_id: process.env.AWS_ACCESS_KEY_ID,
secret_access_key: process.env.AWS_SECRET_ACCESS_KEY
secret_access_key: process.env.AWS_SECRET_ACCESS_KEY,
aws_region: process.env.AWS_REGION
}));
const cmd = `
UPDATE speech_credentials SET credential='${google_credential}' WHERE vendor='google';
UPDATE speech_credentials SET credential='${aws_credential}' WHERE vendor='aws';
`;
t.pass('adding aws credentials');
sql.push(`UPDATE speech_credentials SET credential='${aws_credential}' WHERE vendor='aws';`);
}
if (process.env.MICROSOFT_REGION && process.env.MICROSOFT_API_KEY) {
const microsoft_credential = encrypt(JSON.stringify({
region: process.env.MICROSOFT_REGION,
api_key: process.env.MICROSOFT_API_KEY
}));
t.pass('adding microsoft credentials');
sql.push(`UPDATE speech_credentials SET credential='${microsoft_credential}' WHERE vendor='microsoft';`);
}
if (sql.length > 0) {
const path = `${__dirname}/.creds.sql`;
fs.writeFileSync(path, cmd);
const cmd = sql.join('\n');
fs.writeFileSync(path, sql.join('\n'));
exec(`mysql -h 127.0.0.1 -u root --protocol=tcp --port=3360 -D jambones_test < ${path}`, (err, stdout, stderr) => {
console.log(stdout);
console.log(stderr);
if (err) return t.end(err);
fs.unlinkSync(path)
fs.writeFileSync(`${__dirname}/credentials/gcp.json`, process.env.GCP_JSON_KEY);
t.pass('set account-level speech credentials');
t.end();
});

View File

@@ -0,0 +1,9 @@
{
"say": {
"text": "<speak>I already told you <emphasis level=\"strong\">I already told you I already told you I already told you I already told you! I already told you I already told you I already told you I already told you? I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you told I already told you I already told you told I already told you I already told you. I already told you <break time=\"3s\"/> I really like that person!</emphasis> this is another long text.</speak>",
"synthesizer": {
"vendor": "google",
"language": "en-US"
}
}
}

View File

@@ -0,0 +1,9 @@
{
"say": {
"text": "<speak>I already told you I already told you I already told you I already told you I already told you! I already told you I already told you I already told you I already told you? I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you I already told you told I already told you I already told you told I already told you I already told you. I already told you <break time=\"3s\"/> I <emphasis level=\"strong\">really like that person!</emphasis> this is another long text.</speak>",
"synthesizer": {
"vendor": "google",
"language": "en-US"
}
}
}

View File

@@ -217,6 +217,7 @@ CREATE TABLE `applications` (
`call_hook_sid` char(36) DEFAULT NULL COMMENT 'webhook to call for inbound calls ',
`call_status_hook_sid` char(36) DEFAULT NULL COMMENT 'webhook to call for call status events',
`messaging_hook_sid` char(36) DEFAULT NULL COMMENT 'webhook to call for inbound SMS/MMS ',
`app_json` VARCHAR(16384),
`speech_synthesis_vendor` varchar(64) NOT NULL DEFAULT 'google',
`speech_synthesis_language` varchar(12) NOT NULL DEFAULT 'en-US',
`speech_synthesis_voice` varchar(64) DEFAULT NULL,
@@ -245,13 +246,14 @@ CREATE TABLE `applications` (
LOCK TABLES `applications` WRITE;
/*!40000 ALTER TABLE `applications` DISABLE KEYS */;
INSERT INTO `applications` VALUES ('0dddaabf-0a30-43e3-84e8-426873b1a78b','decline call',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','c71e79db-24f2-4866-a3ee-febb0f97b341','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,'google','en-US','en-US-Standard-C','google','en-US');
INSERT INTO `applications` VALUES ('308b4f41-1a18-4052-b89a-c054e75ce242','say',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','54ab0976-a6c0-45d8-89a4-d90d45bf9d96','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,'google','en-US','en-US-Standard-C','google','en-US');
INSERT INTO `applications` VALUES ('24d0f6af-e976-44dd-a2e8-41c7b55abe33','say account 2',NULL,'622f62e4-303a-49f2-bbe0-eb1e1714e37a','54ab0976-a6c0-45d8-89a4-d90d45bf9d96','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,'google','en-US','en-US-Standard-C','google','en-US');
INSERT INTO `applications` VALUES ('17461c69-56b5-4dab-ad83-1c43a0f93a3d','gather',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','10692465-a511-4277-9807-b7157e4f81e1','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,'google','en-US','en-US-Standard-C','google','en-US');
INSERT INTO `applications` VALUES ('baf9213b-5556-4c20-870c-586392ed246f','transcribe',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','ecb67a8f-f7ce-4919-abf0-bbc69c1001e5','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,'google','en-US','en-US-Standard-C','google','en-US');
INSERT INTO `applications` VALUES ('ae026ab5-3029-47b4-9d7c-236e3a4b4ebe','transcribe account 2',NULL,'622f62e4-303a-49f2-bbe0-eb1e1714e37a','ecb67a8f-f7ce-4919-abf0-bbc69c1001e5','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,'google','en-US','en-US-Standard-C','google','en-US');
INSERT INTO `applications` VALUES ('195d9507-6a42-46a8-825f-f009e729d023','sip info',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','c9113e7a-741f-48b9-96c1-f2f78176eeb3','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,'google','en-US','en-US-Standard-C','google','en-US');
INSERT INTO `applications` VALUES ('0dddaabf-0a30-43e3-84e8-426873b1a78b','decline call',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','c71e79db-24f2-4866-a3ee-febb0f97b341','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,NULL,'google','en-US','en-US-Standard-C','google','en-US');
INSERT INTO `applications` VALUES ('308b4f41-1a18-4052-b89a-c054e75ce242','say',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','54ab0976-a6c0-45d8-89a4-d90d45bf9d96','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,NULL,'google','en-US','en-US-Standard-C','google','en-US');
INSERT INTO `applications` VALUES ('24d0f6af-e976-44dd-a2e8-41c7b55abe33','say account 2',NULL,'622f62e4-303a-49f2-bbe0-eb1e1714e37a','54ab0976-a6c0-45d8-89a4-d90d45bf9d96','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,NULL,'google','en-US','en-US-Standard-C','google','en-US');
INSERT INTO `applications` VALUES ('17461c69-56b5-4dab-ad83-1c43a0f93a3d','gather',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','10692465-a511-4277-9807-b7157e4f81e1','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,NULL,'google','en-US','en-US-Standard-C','google','en-US');
INSERT INTO `applications` VALUES ('baf9213b-5556-4c20-870c-586392ed246f','transcribe',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','ecb67a8f-f7ce-4919-abf0-bbc69c1001e5','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,NULL,'google','en-US','en-US-Standard-C','google','en-US');
INSERT INTO `applications` VALUES ('ae026ab5-3029-47b4-9d7c-236e3a4b4ebe','transcribe account 2',NULL,'622f62e4-303a-49f2-bbe0-eb1e1714e37a','ecb67a8f-f7ce-4919-abf0-bbc69c1001e5','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,NULL,'google','en-US','en-US-Standard-C','google','en-US');
INSERT INTO `applications` VALUES ('195d9507-6a42-46a8-825f-f009e729d023','sip info',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','c9113e7a-741f-48b9-96c1-f2f78176eeb3','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,NULL,'google','en-US','en-US-Standard-C','google','en-US');
INSERT INTO `applications` VALUES ('0dddaabf-0a30-43e3-84e8-426873b1a78c','app json',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','c71e79db-24f2-4866-a3ee-febb0f97b341','293904c1-351b-4bca-8d58-1a29b853c7db',NULL,'[{"verb": "play","url": "silence_stream://5000"}]','google','en-US','en-US-Standard-C','google','en-US');
/*!40000 ALTER TABLE `applications` ENABLE KEYS */;
UNLOCK TABLES;
@@ -451,6 +453,7 @@ INSERT INTO `phone_numbers` VALUES ('05eeed62-b29b-4679-bf38-d7a4e318be44','1617
INSERT INTO `phone_numbers` VALUES ('f3c53863-b629-4cf6-9dcb-c7fb7072314b','16174000004','5145b436-2f38-4029-8d4c-fd8c67831c7a','bb845d4b-83a9-4cde-a6e9-50f3743bab3f','baf9213b-5556-4c20-870c-586392ed246f', NULL);
INSERT INTO `phone_numbers` VALUES ('f6416c17-829a-4f11-9c32-f0d00e4a9ae9','16174000005','5145b436-2f38-4029-8d4c-fd8c67831c7a','622f62e4-303a-49f2-bbe0-eb1e1714e37a','ae026ab5-3029-47b4-9d7c-236e3a4b4ebe', NULL);
INSERT INTO `phone_numbers` VALUES ('964d0581-9627-44cb-be20-8118050406b2','16174000006','5145b436-2f38-4029-8d4c-fd8c67831c7a','bb845d4b-83a9-4cde-a6e9-50f3743bab3f','195d9507-6a42-46a8-825f-f009e729d023', NULL);
INSERT INTO `phone_numbers` VALUES ('964d0581-9627-44cb-be20-8118050406b3','16174000007','5145b436-2f38-4029-8d4c-fd8c67831c7a','bb845d4b-83a9-4cde-a6e9-50f3743bab3f','0dddaabf-0a30-43e3-84e8-426873b1a78c', NULL);
/*!40000 ALTER TABLE `phone_numbers` ENABLE KEYS */;
UNLOCK TABLES;
@@ -614,7 +617,10 @@ CREATE TABLE `speech_credentials` (
LOCK TABLES `speech_credentials` WRITE;
/*!40000 ALTER TABLE `speech_credentials` DISABLE KEYS */;
INSERT INTO `speech_credentials` VALUES ('2add163c-34f2-45c6-a016-f955d218ffb6',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','google','credential-goes-here',1,1,NULL,'2021-04-03 15:42:10',1,1),('84154212-5c99-4c94-8993-bc2a46288daa',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','aws','credential-goes-here',0,0,NULL,NULL,NULL,NULL);
INSERT INTO `speech_credentials` VALUES
('2add163c-34f2-45c6-a016-f955d218ffb6',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','google','credential-goes-here',1,1,NULL,'2021-04-03 15:42:10',1,1),
('2add347f-34f2-45c6-a016-f955d218ffb6',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','microsoft','credential-goes-here',1,1,NULL,'2021-04-03 15:42:10',1,1),
('84154212-5c99-4c94-8993-bc2a46288daa',NULL,'bb845d4b-83a9-4cde-a6e9-50f3743bab3f','aws','credential-goes-here',1,1,NULL,NULL,NULL,NULL);
/*!40000 ALTER TABLE `speech_credentials` ENABLE KEYS */;
UNLOCK TABLES;

View File

@@ -4,6 +4,8 @@ SET FOREIGN_KEY_CHECKS=0;
DROP TABLE IF EXISTS account_static_ips;
DROP TABLE IF EXISTS account_limits;
DROP TABLE IF EXISTS account_products;
DROP TABLE IF EXISTS account_subscriptions;
@@ -18,6 +20,12 @@ DROP TABLE IF EXISTS lcr_carrier_set_entry;
DROP TABLE IF EXISTS lcr_routes;
DROP TABLE IF EXISTS password_settings;
DROP TABLE IF EXISTS user_permissions;
DROP TABLE IF EXISTS permissions;
DROP TABLE IF EXISTS predefined_sip_gateways;
DROP TABLE IF EXISTS predefined_smpp_gateways;
@@ -36,6 +44,8 @@ DROP TABLE IF EXISTS sbc_addresses;
DROP TABLE IF EXISTS ms_teams_tenants;
DROP TABLE IF EXISTS service_provider_limits;
DROP TABLE IF EXISTS signup_history;
DROP TABLE IF EXISTS smpp_addresses;
@@ -69,6 +79,15 @@ private_ipv4 VARBINARY(16) NOT NULL UNIQUE ,
PRIMARY KEY (account_static_ip_sid)
);
CREATE TABLE account_limits
(
account_limits_sid CHAR(36) NOT NULL UNIQUE ,
account_sid CHAR(36) NOT NULL,
category ENUM('api_rate','voice_call_session', 'device','voice_call_minutes','voice_call_session_license', 'voice_call_minutes_license') NOT NULL,
quantity INTEGER NOT NULL,
PRIMARY KEY (account_limits_sid)
);
CREATE TABLE account_subscriptions
(
account_subscription_sid CHAR(36) NOT NULL UNIQUE ,
@@ -123,6 +142,21 @@ priority INTEGER NOT NULL UNIQUE COMMENT 'lower priority routes are attempted f
PRIMARY KEY (lcr_route_sid)
) COMMENT='Least cost routing table';
CREATE TABLE password_settings
(
min_password_length INTEGER NOT NULL DEFAULT 8,
require_digit BOOLEAN NOT NULL DEFAULT false,
require_special_character BOOLEAN NOT NULL DEFAULT false
);
CREATE TABLE permissions
(
permission_sid CHAR(36) NOT NULL UNIQUE ,
name VARCHAR(32) NOT NULL UNIQUE ,
description VARCHAR(255),
PRIMARY KEY (permission_sid)
);
CREATE TABLE predefined_carriers
(
predefined_carrier_sid CHAR(36) NOT NULL UNIQUE ,
@@ -228,6 +262,15 @@ tenant_fqdn VARCHAR(255) NOT NULL UNIQUE ,
PRIMARY KEY (ms_teams_tenant_sid)
) COMMENT='A Microsoft Teams customer tenant';
CREATE TABLE service_provider_limits
(
service_provider_limits_sid CHAR(36) NOT NULL UNIQUE ,
service_provider_sid CHAR(36) NOT NULL,
category ENUM('api_rate','voice_call_session', 'device','voice_call_minutes','voice_call_session_license', 'voice_call_minutes_license') NOT NULL,
quantity INTEGER NOT NULL,
PRIMARY KEY (service_provider_limits_sid)
);
CREATE TABLE signup_history
(
email VARCHAR(255) NOT NULL,
@@ -283,6 +326,7 @@ email_activation_code VARCHAR(16),
email_validated BOOLEAN NOT NULL DEFAULT false,
phone_validated BOOLEAN NOT NULL DEFAULT false,
email_content_opt_out BOOLEAN NOT NULL DEFAULT false,
is_active BOOLEAN NOT NULL DEFAULT true,
PRIMARY KEY (user_sid)
);
@@ -310,9 +354,20 @@ smpp_password VARCHAR(64),
smpp_enquire_link_interval INTEGER DEFAULT 0,
smpp_inbound_system_id VARCHAR(255),
smpp_inbound_password VARCHAR(64),
register_from_user VARCHAR(128),
register_from_domain VARCHAR(255),
register_public_ip_in_contact BOOLEAN NOT NULL DEFAULT false,
PRIMARY KEY (voip_carrier_sid)
) COMMENT='A Carrier or customer PBX that can send or receive calls';
CREATE TABLE user_permissions
(
user_permissions_sid CHAR(36) NOT NULL UNIQUE ,
user_sid CHAR(36) NOT NULL,
permission_sid CHAR(36) NOT NULL,
PRIMARY KEY (user_permissions_sid)
);
CREATE TABLE smpp_gateways
(
smpp_gateway_sid CHAR(36) NOT NULL UNIQUE ,
@@ -330,7 +385,7 @@ PRIMARY KEY (smpp_gateway_sid)
CREATE TABLE phone_numbers
(
phone_number_sid CHAR(36) UNIQUE ,
number VARCHAR(32) NOT NULL UNIQUE ,
number VARCHAR(132) NOT NULL UNIQUE ,
voip_carrier_sid CHAR(36),
account_sid CHAR(36),
application_sid CHAR(36),
@@ -380,6 +435,7 @@ account_sid CHAR(36) COMMENT 'account that this application belongs to (if null,
call_hook_sid CHAR(36) COMMENT 'webhook to call for inbound calls ',
call_status_hook_sid CHAR(36) COMMENT 'webhook to call for call status events',
messaging_hook_sid CHAR(36) COMMENT 'webhook to call for inbound SMS/MMS ',
app_json VARCHAR(16384),
speech_synthesis_vendor VARCHAR(64) NOT NULL DEFAULT 'google',
speech_synthesis_language VARCHAR(12) NOT NULL DEFAULT 'en-US',
speech_synthesis_voice VARCHAR(64),
@@ -418,6 +474,11 @@ disable_cdrs BOOLEAN NOT NULL DEFAULT 0,
trial_end_date DATETIME,
deactivated_reason VARCHAR(255),
device_to_call_ratio INTEGER NOT NULL DEFAULT 5,
subspace_client_id VARCHAR(255),
subspace_client_secret VARCHAR(255),
subspace_sip_teleport_id VARCHAR(255),
subspace_sip_teleport_destinations VARCHAR(255),
siprec_hook_sid CHAR(36),
PRIMARY KEY (account_sid)
) COMMENT='An enterprise that uses the platform for comm services';
@@ -425,19 +486,23 @@ CREATE INDEX account_static_ip_sid_idx ON account_static_ips (account_static_ip_
CREATE INDEX account_sid_idx ON account_static_ips (account_sid);
ALTER TABLE account_static_ips ADD FOREIGN KEY account_sid_idxfk (account_sid) REFERENCES accounts (account_sid);
CREATE INDEX account_sid_idx ON account_limits (account_sid);
ALTER TABLE account_limits ADD FOREIGN KEY account_sid_idxfk_1 (account_sid) REFERENCES accounts (account_sid) ON DELETE CASCADE;
CREATE INDEX account_subscription_sid_idx ON account_subscriptions (account_subscription_sid);
CREATE INDEX account_sid_idx ON account_subscriptions (account_sid);
ALTER TABLE account_subscriptions ADD FOREIGN KEY account_sid_idxfk_1 (account_sid) REFERENCES accounts (account_sid);
ALTER TABLE account_subscriptions ADD FOREIGN KEY account_sid_idxfk_2 (account_sid) REFERENCES accounts (account_sid);
CREATE INDEX invite_code_idx ON beta_invite_codes (invite_code);
CREATE INDEX call_route_sid_idx ON call_routes (call_route_sid);
ALTER TABLE call_routes ADD FOREIGN KEY account_sid_idxfk_2 (account_sid) REFERENCES accounts (account_sid);
ALTER TABLE call_routes ADD FOREIGN KEY account_sid_idxfk_3 (account_sid) REFERENCES accounts (account_sid);
ALTER TABLE call_routes ADD FOREIGN KEY application_sid_idxfk (application_sid) REFERENCES applications (application_sid);
CREATE INDEX dns_record_sid_idx ON dns_records (dns_record_sid);
ALTER TABLE dns_records ADD FOREIGN KEY account_sid_idxfk_3 (account_sid) REFERENCES accounts (account_sid);
ALTER TABLE dns_records ADD FOREIGN KEY account_sid_idxfk_4 (account_sid) REFERENCES accounts (account_sid);
CREATE INDEX permission_sid_idx ON permissions (permission_sid);
CREATE INDEX predefined_carrier_sid_idx ON predefined_carriers (predefined_carrier_sid);
CREATE INDEX predefined_sip_gateway_sid_idx ON predefined_sip_gateways (predefined_sip_gateway_sid);
CREATE INDEX predefined_carrier_sid_idx ON predefined_sip_gateways (predefined_carrier_sid);
@@ -456,14 +521,14 @@ ALTER TABLE account_products ADD FOREIGN KEY product_sid_idxfk (product_sid) REF
CREATE INDEX account_offer_sid_idx ON account_offers (account_offer_sid);
CREATE INDEX account_sid_idx ON account_offers (account_sid);
ALTER TABLE account_offers ADD FOREIGN KEY account_sid_idxfk_4 (account_sid) REFERENCES accounts (account_sid);
ALTER TABLE account_offers ADD FOREIGN KEY account_sid_idxfk_5 (account_sid) REFERENCES accounts (account_sid);
CREATE INDEX product_sid_idx ON account_offers (product_sid);
ALTER TABLE account_offers ADD FOREIGN KEY product_sid_idxfk_1 (product_sid) REFERENCES products (product_sid);
CREATE INDEX api_key_sid_idx ON api_keys (api_key_sid);
CREATE INDEX account_sid_idx ON api_keys (account_sid);
ALTER TABLE api_keys ADD FOREIGN KEY account_sid_idxfk_5 (account_sid) REFERENCES accounts (account_sid);
ALTER TABLE api_keys ADD FOREIGN KEY account_sid_idxfk_6 (account_sid) REFERENCES accounts (account_sid);
CREATE INDEX service_provider_sid_idx ON api_keys (service_provider_sid);
ALTER TABLE api_keys ADD FOREIGN KEY service_provider_sid_idxfk (service_provider_sid) REFERENCES service_providers (service_provider_sid);
@@ -477,44 +542,53 @@ ALTER TABLE sbc_addresses ADD FOREIGN KEY service_provider_sid_idxfk_1 (service_
CREATE INDEX ms_teams_tenant_sid_idx ON ms_teams_tenants (ms_teams_tenant_sid);
ALTER TABLE ms_teams_tenants ADD FOREIGN KEY service_provider_sid_idxfk_2 (service_provider_sid) REFERENCES service_providers (service_provider_sid);
ALTER TABLE ms_teams_tenants ADD FOREIGN KEY account_sid_idxfk_6 (account_sid) REFERENCES accounts (account_sid);
ALTER TABLE ms_teams_tenants ADD FOREIGN KEY account_sid_idxfk_7 (account_sid) REFERENCES accounts (account_sid);
ALTER TABLE ms_teams_tenants ADD FOREIGN KEY application_sid_idxfk_1 (application_sid) REFERENCES applications (application_sid);
CREATE INDEX tenant_fqdn_idx ON ms_teams_tenants (tenant_fqdn);
CREATE INDEX service_provider_sid_idx ON service_provider_limits (service_provider_sid);
ALTER TABLE service_provider_limits ADD FOREIGN KEY service_provider_sid_idxfk_3 (service_provider_sid) REFERENCES service_providers (service_provider_sid) ON DELETE CASCADE;
CREATE INDEX email_idx ON signup_history (email);
CREATE INDEX smpp_address_sid_idx ON smpp_addresses (smpp_address_sid);
CREATE INDEX service_provider_sid_idx ON smpp_addresses (service_provider_sid);
ALTER TABLE smpp_addresses ADD FOREIGN KEY service_provider_sid_idxfk_3 (service_provider_sid) REFERENCES service_providers (service_provider_sid);
ALTER TABLE smpp_addresses ADD FOREIGN KEY service_provider_sid_idxfk_4 (service_provider_sid) REFERENCES service_providers (service_provider_sid);
CREATE UNIQUE INDEX speech_credentials_idx_1 ON speech_credentials (vendor,account_sid);
CREATE INDEX speech_credential_sid_idx ON speech_credentials (speech_credential_sid);
CREATE INDEX service_provider_sid_idx ON speech_credentials (service_provider_sid);
ALTER TABLE speech_credentials ADD FOREIGN KEY service_provider_sid_idxfk_4 (service_provider_sid) REFERENCES service_providers (service_provider_sid);
ALTER TABLE speech_credentials ADD FOREIGN KEY service_provider_sid_idxfk_5 (service_provider_sid) REFERENCES service_providers (service_provider_sid);
CREATE INDEX account_sid_idx ON speech_credentials (account_sid);
ALTER TABLE speech_credentials ADD FOREIGN KEY account_sid_idxfk_7 (account_sid) REFERENCES accounts (account_sid);
ALTER TABLE speech_credentials ADD FOREIGN KEY account_sid_idxfk_8 (account_sid) REFERENCES accounts (account_sid);
CREATE INDEX user_sid_idx ON users (user_sid);
CREATE INDEX email_idx ON users (email);
CREATE INDEX phone_idx ON users (phone);
CREATE INDEX account_sid_idx ON users (account_sid);
ALTER TABLE users ADD FOREIGN KEY account_sid_idxfk_8 (account_sid) REFERENCES accounts (account_sid);
ALTER TABLE users ADD FOREIGN KEY account_sid_idxfk_9 (account_sid) REFERENCES accounts (account_sid);
CREATE INDEX service_provider_sid_idx ON users (service_provider_sid);
ALTER TABLE users ADD FOREIGN KEY service_provider_sid_idxfk_5 (service_provider_sid) REFERENCES service_providers (service_provider_sid);
ALTER TABLE users ADD FOREIGN KEY service_provider_sid_idxfk_6 (service_provider_sid) REFERENCES service_providers (service_provider_sid);
CREATE INDEX email_activation_code_idx ON users (email_activation_code);
CREATE INDEX voip_carrier_sid_idx ON voip_carriers (voip_carrier_sid);
CREATE INDEX account_sid_idx ON voip_carriers (account_sid);
ALTER TABLE voip_carriers ADD FOREIGN KEY account_sid_idxfk_9 (account_sid) REFERENCES accounts (account_sid);
ALTER TABLE voip_carriers ADD FOREIGN KEY account_sid_idxfk_10 (account_sid) REFERENCES accounts (account_sid);
CREATE INDEX service_provider_sid_idx ON voip_carriers (service_provider_sid);
ALTER TABLE voip_carriers ADD FOREIGN KEY service_provider_sid_idxfk_6 (service_provider_sid) REFERENCES service_providers (service_provider_sid);
ALTER TABLE voip_carriers ADD FOREIGN KEY service_provider_sid_idxfk_7 (service_provider_sid) REFERENCES service_providers (service_provider_sid);
ALTER TABLE voip_carriers ADD FOREIGN KEY application_sid_idxfk_2 (application_sid) REFERENCES applications (application_sid);
CREATE INDEX user_permissions_sid_idx ON user_permissions (user_permissions_sid);
CREATE INDEX user_sid_idx ON user_permissions (user_sid);
ALTER TABLE user_permissions ADD FOREIGN KEY user_sid_idxfk (user_sid) REFERENCES users (user_sid) ON DELETE CASCADE;
ALTER TABLE user_permissions ADD FOREIGN KEY permission_sid_idxfk (permission_sid) REFERENCES permissions (permission_sid);
CREATE INDEX smpp_gateway_sid_idx ON smpp_gateways (smpp_gateway_sid);
CREATE INDEX voip_carrier_sid_idx ON smpp_gateways (voip_carrier_sid);
ALTER TABLE smpp_gateways ADD FOREIGN KEY voip_carrier_sid_idxfk (voip_carrier_sid) REFERENCES voip_carriers (voip_carrier_sid);
@@ -524,12 +598,12 @@ CREATE INDEX number_idx ON phone_numbers (number);
CREATE INDEX voip_carrier_sid_idx ON phone_numbers (voip_carrier_sid);
ALTER TABLE phone_numbers ADD FOREIGN KEY voip_carrier_sid_idxfk_1 (voip_carrier_sid) REFERENCES voip_carriers (voip_carrier_sid);
ALTER TABLE phone_numbers ADD FOREIGN KEY account_sid_idxfk_10 (account_sid) REFERENCES accounts (account_sid);
ALTER TABLE phone_numbers ADD FOREIGN KEY account_sid_idxfk_11 (account_sid) REFERENCES accounts (account_sid);
ALTER TABLE phone_numbers ADD FOREIGN KEY application_sid_idxfk_3 (application_sid) REFERENCES applications (application_sid);
CREATE INDEX service_provider_sid_idx ON phone_numbers (service_provider_sid);
ALTER TABLE phone_numbers ADD FOREIGN KEY service_provider_sid_idxfk_7 (service_provider_sid) REFERENCES service_providers (service_provider_sid);
ALTER TABLE phone_numbers ADD FOREIGN KEY service_provider_sid_idxfk_8 (service_provider_sid) REFERENCES service_providers (service_provider_sid);
CREATE INDEX sip_gateway_idx_hostport ON sip_gateways (ipv4,port);
@@ -545,10 +619,10 @@ CREATE UNIQUE INDEX applications_idx_name ON applications (account_sid,name);
CREATE INDEX application_sid_idx ON applications (application_sid);
CREATE INDEX service_provider_sid_idx ON applications (service_provider_sid);
ALTER TABLE applications ADD FOREIGN KEY service_provider_sid_idxfk_8 (service_provider_sid) REFERENCES service_providers (service_provider_sid);
ALTER TABLE applications ADD FOREIGN KEY service_provider_sid_idxfk_9 (service_provider_sid) REFERENCES service_providers (service_provider_sid);
CREATE INDEX account_sid_idx ON applications (account_sid);
ALTER TABLE applications ADD FOREIGN KEY account_sid_idxfk_11 (account_sid) REFERENCES accounts (account_sid);
ALTER TABLE applications ADD FOREIGN KEY account_sid_idxfk_12 (account_sid) REFERENCES accounts (account_sid);
ALTER TABLE applications ADD FOREIGN KEY call_hook_sid_idxfk (call_hook_sid) REFERENCES webhooks (webhook_sid);
@@ -564,7 +638,7 @@ ALTER TABLE service_providers ADD FOREIGN KEY registration_hook_sid_idxfk (regis
CREATE INDEX account_sid_idx ON accounts (account_sid);
CREATE INDEX sip_realm_idx ON accounts (sip_realm);
CREATE INDEX service_provider_sid_idx ON accounts (service_provider_sid);
ALTER TABLE accounts ADD FOREIGN KEY service_provider_sid_idxfk_9 (service_provider_sid) REFERENCES service_providers (service_provider_sid);
ALTER TABLE accounts ADD FOREIGN KEY service_provider_sid_idxfk_10 (service_provider_sid) REFERENCES service_providers (service_provider_sid);
ALTER TABLE accounts ADD FOREIGN KEY registration_hook_sid_idxfk_1 (registration_hook_sid) REFERENCES webhooks (webhook_sid);
@@ -572,4 +646,6 @@ ALTER TABLE accounts ADD FOREIGN KEY queue_event_hook_sid_idxfk (queue_event_hoo
ALTER TABLE accounts ADD FOREIGN KEY device_calling_application_sid_idxfk (device_calling_application_sid) REFERENCES applications (application_sid);
SET FOREIGN_KEY_CHECKS=0;
ALTER TABLE accounts ADD FOREIGN KEY siprec_hook_sid_idxfk (siprec_hook_sid) REFERENCES applications (application_sid);
SET FOREIGN_KEY_CHECKS=1;

212
test/dial-tests.js Normal file
View File

@@ -0,0 +1,212 @@
const test = require('tape');
const { sippUac } = require('./sipp')('test_fs');
const bent = require('bent');
const getJSON = bent('json')
const clearModule = require('clear-module');
const {provisionCallHook} = require('./utils')
process.on('unhandledRejection', (reason, p) => {
console.log('Unhandled Rejection at: Promise', p, 'reason:', reason);
});
function connect(connectable) {
return new Promise((resolve, reject) => {
connectable.on('connect', () => {
return resolve();
});
});
}
test('\'dial-phone\'', async(t) => {
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// wait for fs connected to drachtio server.
await new Promise(r => setTimeout(r, 1000));
// GIVEN
const from = "dial_success";
let verbs = [
{
"verb": "dial",
"callerId": from,
"actionHook": "/actionHook",
"timeLimit": 5,
"target": [
{
"type": "phone",
"number": "15083084809"
}
]
}
];
provisionCallHook(from, verbs);
// THEN
const p = sippUac('uas-dial.xml', '172.38.0.10', undefined, undefined, 2);
let account_sid = '622f62e4-303a-49f2-bbe0-eb1e1714e37a';
let post = bent('http://127.0.0.1:3000/', 'POST', 'json', 201);
post('v1/createCall', {
'account_sid':account_sid,
"call_hook": {
"url": "http://127.0.0.1:3100/",
"method": "POST",
},
"from": from,
"to": {
"type": "phone",
"number": "15583084808"
}});
await p;
obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
t.ok(obj.body.from === from,
'dial: succeeds actionHook');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});
test('\'dial-sip\'', async(t) => {
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// wait for fs connected to drachtio server.
await new Promise(r => setTimeout(r, 1000));
// GIVEN
const from = "dial_sip";
let verbs = [
{
"verb": "dial",
"callerId": from,
"actionHook": "/actionHook",
"dtmfCapture":["*2", "*3"],
"target": [
{
"type": "sip",
"sipUri": "sip:15083084809@jambonz.com"
}
]
}
];
provisionCallHook(from, verbs);
// THEN
const p = sippUac('uas-dial.xml', '172.38.0.10', undefined, undefined, 2);
let account_sid = '622f62e4-303a-49f2-bbe0-eb1e1714e37a';
let post = bent('http://127.0.0.1:3000/', 'POST', 'json', 201);
post('v1/createCall', {
'account_sid':account_sid,
"call_hook": {
"url": "http://127.0.0.1:3100/",
"method": "POST",
},
"from": from,
"to": {
"type": "phone",
"number": "15583084808"
}});
await new Promise(r => setTimeout(r, 2000));
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}`);
const callSid = obj.body.call_sid;
post = bent('http://127.0.0.1:3000/', 'POST', 202);
await post(`v1/updateCall/${callSid}`, {
"call_status": "completed"
});
await p;
obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
t.ok(obj.body.from === from,
'dial: succeeds actionHook');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});
test('\'dial-user\'', async(t) => {
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// wait for fs connected to drachtio server.
await new Promise(r => setTimeout(r, 1000));
// GIVEN
const from = "dial_user";
let verbs = [
{
"verb": "dial",
"callerId": from,
"actionHook": "/actionHook",
"target": [
{
"type": "user",
"name": "user110@jambonz.com"
}
]
}
];
provisionCallHook(from, verbs);
// THEN
const p = sippUac('uas-dial.xml', '172.38.0.10', undefined, undefined, 2);
let account_sid = '622f62e4-303a-49f2-bbe0-eb1e1714e37a';
let post = bent('http://127.0.0.1:3000/', 'POST', 'json', 201);
post('v1/createCall', {
'account_sid':account_sid,
"call_hook": {
"url": "http://127.0.0.1:3100/",
"method": "POST",
},
"from": from,
"to": {
"type": "phone",
"number": "15583084808"
}});
await new Promise(r => setTimeout(r, 2000));
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}`);
const callSid = obj.body.call_sid;
post = bent('http://127.0.0.1:3000/', 'POST', 202);
await post(`v1/updateCall/${callSid}`, {
"call_status": "completed"
});
await p;
obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
t.ok(obj.body.from === from,
'dial: succeeds actionHook');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});

View File

@@ -44,7 +44,7 @@ services:
drachtio:
image: drachtio/drachtio-server:latest
restart: always
command: drachtio --contact "sip:*;transport=udp,tcp" --address 0.0.0.0 --port 9022
command: drachtio --contact "sip:*;transport=udp" --mtu 4096 --address 0.0.0.0 --port 9022
ports:
- "9060:9022/tcp"
networks:
@@ -57,7 +57,7 @@ services:
condition: service_healthy
freeswitch:
image: drachtio/drachtio-freeswitch-mrf:v1.10.1-full
image: drachtio/drachtio-freeswitch-mrf:0.4.18
restart: always
command: freeswitch --rtp-range-start 20000 --rtp-range-end 20100
environment:
@@ -68,7 +68,7 @@ services:
- /tmp:/tmp
- ./credentials:/opt/credentials
healthcheck:
test: ['CMD', 'fs_cli' ,'-x', '"sofia status"']
test: ['CMD', 'fs_cli' ,'-p', 'JambonzR0ck$$', '-x', '"sofia status"']
timeout: 5s
retries: 15
networks:

View File

@@ -17,7 +17,11 @@ function connect(connectable) {
});
}
test('\'gather\' and \'transcribe\' tests', async(t) => {
test('\'gather\' test - google', async(t) => {
if (!process.env.GCP_JSON_KEY) {
t.pass('skipping google tests');
return t.end();
}
clearModule.all();
const {srf, disconnect} = require('../app');
@@ -41,8 +45,210 @@ test('\'gather\' and \'transcribe\' tests', async(t) => {
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
t.ok(obj.body.speech.alternatives[0].transcript = 'I\'d like to speak to customer support',
'gather: succeeds when using account credentials');
//console.log(JSON.stringify(obj));
t.ok(obj.body.speech.alternatives[0].transcript.toLowerCase().startsWith('i\'d like to speak to customer support'),
'gather: succeeds when using google credentials');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});
test('\'gather\' test - default (google)', async(t) => {
if (!process.env.GCP_JSON_KEY) {
t.pass('skipping google tests');
return t.end();
}
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
let verbs = [
{
"verb": "gather",
"input": ["speech"],
"timeout": 10,
"actionHook": "/actionHook"
}
];
let from = "gather_success";
provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
//console.log(JSON.stringify(obj));
t.ok(obj.body.speech.alternatives[0].transcript.toLowerCase() === 'i\'d like to speak to customer support',
'gather: succeeds when using default (google) credentials');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});
test('\'gather\' test - microsoft', async(t) => {
if (!process.env.MICROSOFT_REGION || !process.env.MICROSOFT_API_KEY) {
t.pass('skipping microsoft tests');
return t.end();
}
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
let verbs = [
{
"verb": "gather",
"input": ["speech"],
"recognizer": {
"vendor": "microsoft",
"hints": ["customer support", "sales", "human resources", "HR"]
},
"timeout": 10,
"actionHook": "/actionHook"
}
];
let from = "gather_success";
provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
//console.log(JSON.stringify(obj));
t.ok(obj.body.speech.alternatives[0].transcript.toLowerCase().startsWith('i\'d like to speak to customer support'),
'gather: succeeds when using microsoft credentials');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});
test('\'gather\' test - aws', async(t) => {
if (!process.env.AWS_ACCESS_KEY_ID || !process.env.AWS_SECRET_ACCESS_KEY) {
t.pass('skipping aws tests');
return t.end();
}
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
let verbs = [
{
"verb": "gather",
"input": ["speech"],
"recognizer": {
"vendor": "aws",
"hints": ["customer support", "sales", "human resources", "HR"]
},
"timeout": 10,
"actionHook": "/actionHook"
}
];
let from = "gather_success";
provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
//console.log(JSON.stringify(obj));
t.ok(obj.body.speech.alternatives[0].transcript.toLowerCase().startsWith('i\'d like to speak to customer support'),
'gather: succeeds when using aws credentials');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});
test('\'gather\' test - deepgram', async(t) => {
if (!process.env.DEEPGRAM_API_KEY ) {
t.pass('skipping deepgram tests');
return t.end();
}
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
let verbs = [
{
"verb": "gather",
"input": ["speech"],
"recognizer": {
"vendor": "deepgram",
"hints": ["customer support", "sales", "human resources", "HR"],
"deepgramOptions": {
"apiKey": process.env.DEEPGRAM_API_KEY
}
},
"timeout": 10,
"actionHook": "/actionHook"
}
];
let from = "gather_success";
provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
//console.log(JSON.stringify(obj));
t.ok(obj.body.speech.alternatives[0].transcript.toLowerCase().startsWith('i\'d like to speak to customer support'),
'gather: succeeds when using deepgram credentials');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});
test('\'gather\' test - soniox', async(t) => {
if (!process.env.SONIOX_API_KEY ) {
t.pass('skipping soniox tests');
return t.end();
}
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
let verbs = [
{
"verb": "gather",
"input": ["speech"],
"recognizer": {
"vendor": "deepgram",
"hints": ["customer support", "sales", "human resources", "HR"],
"deepgramOptions": {
"apiKey": process.env.SONIOX_API_KEY
}
},
"timeout": 10,
"actionHook": "/actionHook"
}
];
let from = "gather_success";
provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
console.log(JSON.stringify(obj));
t.ok(obj.body.speech.alternatives[0].transcript.toLowerCase().startsWith('i\'d like to speak to customer support'),
'gather: succeeds when using soniox credentials');
disconnect();
} catch (err) {

View File

@@ -1,12 +1,18 @@
require('./ws-requestor-unit-test');
require('./unit-tests');
require('./docker_start');
require('./create-test-db');
require('./account-validation-tests');
require('./dial-tests');
require('./webhooks-tests');
require('./say-tests');
require('./gather-tests');
require('./transcribe-tests');
require('./sip-request-tests');
require('./create-call-test');
require('./play-tests');
require('./sip-refer-tests');
require('./listen-tests');
require('./config-test');
require('./remove-test-db');
require('./docker_stop');
require('./docker_stop');

149
test/listen-tests.js Normal file
View File

@@ -0,0 +1,149 @@
const test = require('tape');
const { sippUac } = require('./sipp')('test_fs');
const bent = require('bent');
const getJSON = bent('json')
const clearModule = require('clear-module');
const {provisionCallHook} = require('./utils')
process.on('unhandledRejection', (reason, p) => {
console.log('Unhandled Rejection at: Promise', p, 'reason:', reason);
});
function connect(connectable) {
return new Promise((resolve, reject) => {
connectable.on('connect', () => {
return resolve();
});
});
}
test('\'listen-success\'', async(t) => {
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
const from = "listen_success";
let verbs = [
{
"verb": "listen",
"url": `ws://172.38.0.60:3000/${from}`,
"mixType" : "mono",
"actionHook": "/actionHook",
"playBeep": true,
}
];
provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success-send-bye.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/ws_packet_count/${from}`);
t.ok(38000 <= obj.count, 'listen: success incoming call audio');
obj = await getJSON(`http://127.0.0.1:3100/ws_metadata/${from}`);
t.ok(obj.metadata.from === from && obj.metadata.sampleRate === 8000, 'listen: success metadata');
obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
t.ok(obj.body.from === from,
'listen: succeeds actionHook');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});
test('\'listen-maxLength\'', async(t) => {
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
let from = "listen_timeout";
let verbs = [
{
"verb": "listen",
"url": `ws://172.38.0.60:3000/${from}`,
"mixType" : "mixed",
"timeout": 2,
"maxLength": 2
}
];
provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/ws_packet_count/${from}`);
t.ok(30000 <= obj.count, 'listen: success maxLength incoming call audio');
obj = await getJSON(`http://127.0.0.1:3100/ws_metadata/${from}`);
t.ok(obj.metadata.from === from && obj.metadata.sampleRate === 8000, 'listen: success maxLength metadata');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});
test('\'listen-pause-resume\'', async(t) => {
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
let from = "listen_timeout";
let verbs = [
{
"verb": "listen",
"url": `ws://172.38.0.60:3000/${from}`,
"mixType" : "mixed"
}
];
provisionCallHook(from, verbs);
// THEN
const p = sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
await new Promise(r => setTimeout(r, 2000));
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}`);
const callSid = obj.body.call_sid;
// GIVEN
// Pause listen
let post = bent('http://127.0.0.1:3000/', 'POST', 202);
await post(`v1/updateCall/${callSid}`, {
"listen_status": "pause"
});
await new Promise(r => setTimeout(r, 2000));
// Resume listen
post = bent('http://127.0.0.1:3000/', 'POST', 202);
await post(`v1/updateCall/${callSid}`, {
"listen_status": "resume"
});
// turn off the call
post = bent('http://127.0.0.1:3000/', 'POST', 202);
await post(`v1/updateCall/${callSid}`, {
"call_status": "completed"
});
await p;
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});

View File

@@ -184,11 +184,67 @@ test('\'play\' tests with seekOffset and actionHook', async(t) => {
// THEN
await sippUac('uac-success-received-bye.xml', '172.38.0.10', from);
t.pass('play: succeeds');
const obj = await getJSON(`http:127.0.0.1:3100/lastRequest/${from}_customHook`)
t.ok(obj.body.reason === "playCompleted", "play: actionHook success received")
t.ok(obj.body.playback_seconds === "2", "playback_seconds: actionHook success received")
t.ok(obj.body.playback_milliseconds === "2048", "playback_milliseconds: actionHook success received")
t.ok(obj.body.playback_last_offset_pos === "16000", "playback_last_offset_pos: actionHook success received")
const obj = await getJSON(`http:127.0.0.1:3100/lastRequest/${from}_customHook`);
const seconds = parseInt(obj.body.playback_seconds);
const milliseconds = parseInt(obj.body.playback_milliseconds);
const lastOffsetPos = parseInt(obj.body.playback_last_offset_pos);
//console.log({obj}, 'lastRequest');
t.ok(obj.body.reason === "playCompleted", "play: actionHook success received");
t.ok(seconds === 2, "playback_seconds: actionHook success received");
t.ok(milliseconds === 2048, "playback_milliseconds: actionHook success received");
t.ok(lastOffsetPos > 15500 && lastOffsetPos < 16500, "playback_last_offset_pos: actionHook success received")
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});
test('\'play\' tests with earlymedia', async(t) => {
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
const verbs = [
{
verb: 'play',
url: 'silence_stream://5000',
earlyMedia: true
}
];
const from = 'play_early_media';
provisionCallHook(from, verbs)
// THEN
await sippUac('uac-invite-expect-183-cancel.xml', '172.38.0.10', from);
const obj = await getJSON(`http:127.0.0.1:3100/lastRequest/${from}_callStatus`);
t.ok(obj.body.sip_status === 487, "play: actionHook success received");
t.ok(obj.body.sip_reason === 'Request Terminated', "play: actionHook success received");
t.ok(obj.body.call_termination_by === 'caller', "play: actionHook success received");
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});
test('\'play\' tests with initial app_json', async(t) => {
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
const from = 'play_initial_app_json';
// THEN
await sippUac('uac-success-received-bye.xml', '172.38.0.10', from, "16174000007");
t.pass('application can use app_json for initial instructions');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);

View File

@@ -5,7 +5,6 @@ test('dropping jambones_test database', (t) => {
exec(`mysql -h 127.0.0.1 -u root --protocol=tcp --port=3360 < ${__dirname}/db/remove_test_db.sql`, (err, stdout, stderr) => {
if (err) return t.end(err);
t.pass('database successfully dropped');
fs.unlinkSync(`${__dirname}/credentials/gcp.json`);
t.end();
});
});

View File

@@ -0,0 +1,99 @@
<?xml version="1.0" encoding="ISO-8859-1" ?>
<!DOCTYPE scenario SYSTEM "sipp.dtd">
<scenario name="Basic Sipstone UAC">
<!-- In client mode (sipp placing calls), the Call-ID MUST be -->
<!-- generated by sipp. To do so, use [call_id] keyword. -->
<send retrans="500">
<![CDATA[
INVITE sip:[to]@[remote_ip]:[remote_port] SIP/2.0
Via: SIP/2.0/[transport] [local_ip]:[local_port];branch=[branch]
From: [from] <sip:[from]@[local_ip]:[local_port]>;tag=[pid]SIPpTag00[call_number]
To: <sip:[to]@[remote_ip]:[remote_port]>
Call-ID: [call_id]
CSeq: 1 INVITE
Contact: sip:[from]@[local_ip]:[local_port]
Max-Forwards: 70
X-Account-Sid: bb845d4b-83a9-4cde-a6e9-50f3743bab3f
Subject: uac-gather-account-creds-success
Content-Type: application/sdp
Content-Length: [len]
v=0
o=user1 53655765 2353687637 IN IP[local_ip_type] [local_ip]
s=-
c=IN IP[media_ip_type] [media_ip]
t=0 0
m=audio [media_port] RTP/AVP 0
a=rtpmap:0 PCMU/8000
]]>
</send>
<recv response="100"
optional="true">
</recv>
<recv response="180" optional="true">
</recv>
<recv response="183" optional="true">
</recv>
<!-- By adding rrs="true" (Record Route Sets), the route sets -->
<!-- are saved and used for following messages sent. Useful to test -->
<!-- against stateful SIP proxies/B2BUAs. -->
<recv response="200" rtd="true">
</recv>
<!-- Packet lost can be simulated in any send/recv message by -->
<!-- by adding the 'lost = "10"'. Value can be [1-100] percent. -->
<send>
<![CDATA[
ACK sip:[to]@[remote_ip]:[remote_port] SIP/2.0
Via: SIP/2.0/[transport] [local_ip]:[local_port];branch=[branch]
From: [from] <sip:[from]@[local_ip]:[local_port]>;tag=[pid]SIPpTag00[call_number]
To: [to] <sip:[to]@[remote_ip]:[remote_port]>[peer_tag_param]
Call-ID: [call_id]
CSeq: 1 ACK
Contact: sip:[from]@[local_ip]:[local_port]
Max-Forwards: 70
Subject: uac-gather-account-creds-success
Content-Length: 0
]]>
</send>
<nop>
<action>
<exec rtp_stream="/tmp/scenarios/wav/speak-to-customer-support.wav,1,0"/>
</action>
</nop>
<!-- Pause briefly -->
<pause milliseconds="3000"/>
<!-- The 'crlf' option inserts a blank line in the statistics report. -->
<send retrans="500">
<![CDATA[
BYE sip:[to]@[remote_ip]:[remote_port] SIP/2.0
Via: SIP/2.0/[transport] [local_ip]:[local_port];branch=[branch]
From: [from] <sip:[from]@[local_ip]:[local_port]>;tag=[pid]SIPpTag00[call_number]
To: [to] <sip:[to]@[remote_ip]:[remote_port]>[peer_tag_param]
Call-ID: [call_id]
CSeq: 3 BYE
Contact: sip:[from]@[local_ip]:[local_port]
Max-Forwards: 70
]]>
</send>
<recv response="200" crlf="true">
</recv>
</scenario>

View File

@@ -0,0 +1,86 @@
<?xml version="1.0" encoding="ISO-8859-1" ?>
<!DOCTYPE scenario SYSTEM "sipp.dtd">
<scenario name="Basic Sipstone UAC">
<!-- In client mode (sipp placing calls), the Call-ID MUST be -->
<!-- generated by sipp. To do so, use [call_id] keyword. -->
<send retrans="500">
<![CDATA[
INVITE sip:[to]@[remote_ip]:[remote_port] SIP/2.0
Via: SIP/2.0/[transport] [local_ip]:[local_port];branch=[branch]
From: [from] <sip:[from]@[local_ip]:[local_port]>;tag=[pid]SIPpTag00[call_number]
To: <sip:[to]@[remote_ip]:[remote_port]>
Call-ID: [call_id]
CSeq: 1 INVITE
Contact: sip:[from]@[local_ip]:[local_port]
Max-Forwards: 70
X-Account-Sid: bb845d4b-83a9-4cde-a6e9-50f3743bab3f
Subject: uac-say
Content-Type: application/sdp
Content-Length: [len]
v=0
o=user1 53655765 2353687637 IN IP[local_ip_type] [local_ip]
s=-
c=IN IP[media_ip_type] [media_ip]
t=0 0
m=audio [media_port] RTP/AVP 0
a=rtpmap:0 PCMU/8000
]]>
</send>
<recv response="100"
optional="true">
</recv>
<recv response="180" optional="true">
</recv>
<recv response="183" rtd="true">
<action>
<ereg regexp=";branch=[^;]*" search_in="hdr" header="Via" check_it="false" assign_to="1"/>
</action>
</recv>
<send>
<![CDATA[
CANCEL sip:[to]@[remote_ip]:[remote_port] SIP/2.0
Via: SIP/2.0/[transport] [local_ip]:[local_port][$1]
From: [from] <sip:[from]@[local_ip]:[local_port]>;tag=[pid]SIPpTag00[call_number]
To: [to] <sip:[to]@[remote_ip]:[remote_port]>
Call-ID: [call_id]
CSeq: [cseq] CANCEL
Contact: sip:[from]@[local_ip]:[local_port]
Max-Forwards: 70
]]>
</send>
<recv response="200" rtd="true">
</recv>
<recv response="487" rtd="true">
</recv>
<!-- Packet lost can be simulated in any send/recv message by -->
<!-- by adding the 'lost = "10"'. Value can be [1-100] percent. -->
<send>
<![CDATA[
ACK sip:[to]@[remote_ip]:[remote_port] SIP/2.0
Via: SIP/2.0/[transport] [local_ip]:[local_port][$1]
From: [from] <sip:[from]@[local_ip]:[local_port]>;tag=[pid]SIPpTag00[call_number]
To: [to] <sip:[to]@[remote_ip]:[remote_port]>[peer_tag_param]
Call-ID: [call_id]
CSeq: 1 ACK
Contact: sip:[from]@[local_ip]:[local_port]
Max-Forwards: 70
Subject: uac-say
Content-Length: 0
]]>
</send>
</scenario>

View File

@@ -0,0 +1,95 @@
<?xml version="1.0" encoding="ISO-8859-1" ?>
<!DOCTYPE scenario SYSTEM "sipp.dtd">
<scenario name="Basic Sipstone UAC">
<!-- In client mode (sipp placing calls), the Call-ID MUST be -->
<!-- generated by sipp. To do so, use [call_id] keyword. -->
<send retrans="500">
<![CDATA[
INVITE sip:[to]@[remote_ip]:[remote_port] SIP/2.0
Via: SIP/2.0/[transport] [local_ip]:[local_port];branch=[branch]
From: [from] <sip:[from]@[local_ip]:[local_port]>;tag=[pid]SIPpTag00[call_number]
To: <sip:[to]@[remote_ip]:[remote_port]>
Call-ID: [call_id]
CSeq: 1 INVITE
Contact: sip:[from]@[local_ip]:[local_port]
Max-Forwards: 70
X-Account-Sid: bb845d4b-83a9-4cde-a6e9-50f3743bab3f
Subject: uac-refer-no-notify.xml
Content-Type: application/sdp
Content-Length: [len]
v=0
o=user1 53655765 2353687637 IN IP[local_ip_type] [local_ip]
s=-
c=IN IP[media_ip_type] [media_ip]
t=0 0
m=audio [media_port] RTP/AVP 0
a=rtpmap:0 PCMU/8000
]]>
</send>
<recv response="100"
optional="true">
</recv>
<recv response="180" optional="true">
</recv>
<recv response="183" optional="true">
</recv>
<!-- By adding rrs="true" (Record Route Sets), the route sets -->
<!-- are saved and used for following messages sent. Useful to test -->
<!-- against stateful SIP proxies/B2BUAs. -->
<recv response="200" rtd="true">
</recv>
<!-- Packet lost can be simulated in any send/recv message by -->
<!-- by adding the 'lost = "10"'. Value can be [1-100] percent. -->
<send>
<![CDATA[
ACK sip:[service]@[remote_ip]:[remote_port] SIP/2.0
Via: SIP/2.0/[transport] [local_ip]:[local_port];branch=[branch]
From: sipp <sip:sipp@[local_ip]:[local_port]>;tag=[pid]SIPpTag00[call_number]
To: [service] <sip:[service]@[remote_ip]:[remote_port]>[peer_tag_param]
Call-ID: [call_id]
CSeq: 1 ACK
Max-Forwards: 70
Subject: REFER test with no NOT
Content-Length: 0
]]>
</send>
<!-- receive re-invite -->
<recv request="REFER" crlf="true"/>
<send>
<![CDATA[
SIP/2.0 202 Accepted
[last_Via:]
[last_From:]
[last_To:]
[last_Call-ID:]
[last_CSeq:]
Contact: sip:sipp@[local_ip]:[local_port]
Content-Type: application/sdp
Content-Length: [len]
v=0
o=user1 53655765 2353687637 IN IP[local_ip_type] [local_ip]
s=-
c=IN IP[media_ip_type] [media_ip]
t=0 0
m=audio [media_port] RTP/AVP 0
a=rtpmap:0 PCMU/8000
]]>
</send>
</scenario>

View File

@@ -0,0 +1,115 @@
<?xml version="1.0" encoding="ISO-8859-1" ?>
<!DOCTYPE scenario SYSTEM "sipp.dtd">
<scenario name="Basic Sipstone UAC">
<!-- In client mode (sipp placing calls), the Call-ID MUST be -->
<!-- generated by sipp. To do so, use [call_id] keyword. -->
<send retrans="500">
<![CDATA[
INVITE sip:[to]@[remote_ip]:[remote_port] SIP/2.0
Via: SIP/2.0/[transport] [local_ip]:[local_port];branch=[branch]
From: [from] <sip:[from]@[local_ip]:[local_port]>;tag=[pid]SIPpTag00[call_number]
To: <sip:16174000000@[remote_ip]:[remote_port]>
Call-ID: [call_id]
CSeq: 1 INVITE
Contact: sip:[from]@[local_ip]:[local_port]
Max-Forwards: 70
X-Account-Sid: bb845d4b-83a9-4cde-a6e9-50f3743bab3f
Subject: uac-refer-with-notify.xml
Content-Type: application/sdp
Content-Length: [len]
v=0
o=user1 53655765 2353687637 IN IP[local_ip_type] [local_ip]
s=-
c=IN IP[media_ip_type] [media_ip]
t=0 0
m=audio [media_port] RTP/AVP 0
a=rtpmap:0 PCMU/8000
]]>
</send>
<recv response="100"
optional="true">
</recv>
<recv response="180" optional="true">
</recv>
<recv response="183" optional="true">
</recv>
<!-- By adding rrs="true" (Record Route Sets), the route sets -->
<!-- are saved and used for following messages sent. Useful to test -->
<!-- against stateful SIP proxies/B2BUAs. -->
<recv response="200" rtd="true">
</recv>
<!-- Packet lost can be simulated in any send/recv message by -->
<!-- by adding the 'lost = "10"'. Value can be [1-100] percent. -->
<send>
<![CDATA[
ACK sip:[service]@[remote_ip]:[remote_port] SIP/2.0
Via: SIP/2.0/[transport] [local_ip]:[local_port];branch=[branch]
From: sipp <sip:sipp@[local_ip]:[local_port]>;tag=[pid]SIPpTag00[call_number]
To: <sip:16174000000@[remote_ip]:[remote_port]>[peer_tag_param]
Call-ID: [call_id]
CSeq: 1 ACK
Max-Forwards: 70
Subject: uac-refer-with-notify.xml
Content-Length: 0
]]>
</send>
<!-- receive re-invite -->
<recv request="REFER" crlf="true"/>
<send>
<![CDATA[
SIP/2.0 202 Accepted
[last_Via:]
[last_From:]
[last_To:]
[last_Call-ID:]
[last_CSeq:]
Contact: sip:sipp@[local_ip]:[local_port]
Content-Type: application/sdp
Content-Length: [len]
v=0
o=user1 53655765 2353687637 IN IP[local_ip_type] [local_ip]
s=-
c=IN IP[media_ip_type] [media_ip]
t=0 0
m=audio [media_port] RTP/AVP 0
a=rtpmap:0 PCMU/8000
]]>
</send>
<send retrans="500">
<![CDATA[
NOTIFY sip:[service]@[remote_ip]:[remote_port] SIP/2.0
Via: SIP/2.0/[transport] [local_ip]:[local_port];branch=[branch]
From: sipp <sip:sipp@[local_ip]:[local_port]>;tag=[pid]SIPpTag00[call_number]
To: <sip:16174000000@[remote_ip]:[remote_port]>[peer_tag_param]
Call-ID: [call_id]
CSeq: 2 NOTIFY
Contact: sip:sipp@[local_ip]:[local_port]
Max-Forwards: 70
Subject: uac-refer-with-notify.xml
Content-Type: message/sipfrag;version=2.0
Content-Length: 16
SIP/2.0 200 OK
]]>
</send>
<recv response="200"</recv>
</scenario>

164
test/scenarios/uas-dial.xml Normal file
View File

@@ -0,0 +1,164 @@
<?xml version="1.0" encoding="ISO-8859-1" ?>
<!DOCTYPE scenario SYSTEM "sipp.dtd">
<!-- This program is free software; you can redistribute it and/or -->
<!-- modify it under the terms of the GNU General Public License as -->
<!-- published by the Free Software Foundation; either version 2 of the -->
<!-- License, or (at your option) any later version. -->
<!-- -->
<!-- This program is distributed in the hope that it will be useful, -->
<!-- but WITHOUT ANY WARRANTY; without even the implied warranty of -->
<!-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -->
<!-- GNU General Public License for more details. -->
<!-- -->
<!-- You should have received a copy of the GNU General Public License -->
<!-- along with this program; if not, write to the -->
<!-- Free Software Foundation, Inc., -->
<!-- 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -->
<!-- -->
<!-- Sipp default 'uas' scenario. -->
<!-- -->
<scenario name="Basic UAS responder">
<!-- By adding rrs="true" (Record Route Sets), the route sets -->
<!-- are saved and used for following messages sent. Useful to test -->
<!-- against stateful SIP proxies/B2BUAs. -->
<recv request="INVITE" crlf="true">
<action>
<ereg regexp=".*" search_in="hdr" header="Subject:" assign_to="1" />
</action>
</recv>
<!-- The '[last_*]' keyword is replaced automatically by the -->
<!-- specified header if it was present in the last message received -->
<!-- (except if it was a retransmission). If the header was not -->
<!-- present or if no message has been received, the '[last_*]' -->
<!-- keyword is discarded, and all bytes until the end of the line -->
<!-- are also discarded. -->
<!-- -->
<!-- If the specified header was present several times in the -->
<!-- message, all occurrences are concatenated (CRLF separated) -->
<!-- to be used in place of the '[last_*]' keyword. -->
<send>
<![CDATA[
SIP/2.0 180 Ringing
[last_Via:]
[last_From:]
[last_To:];tag=[pid]SIPpTag01[call_number]
[last_Call-ID:]
[last_CSeq:]
[last_Record-Route:]
Subject:[$1]
Content-Length: 0
]]>
</send>
<send>
<![CDATA[
SIP/2.0 200 OK
[last_Via:]
[last_From:]
[last_To:];tag=[pid]SIPpTag01[call_number]
[last_Call-ID:]
[last_CSeq:]
[last_Record-Route:]
Subject:[$1]
Contact: <sip:[local_ip]:[local_port];transport=[transport]>
Content-Type: application/sdp
Content-Length: [len]
v=0
o=user1 53655765 2353687637 IN IP[local_ip_type] [local_ip]
s=-
c=IN IP[media_ip_type] [media_ip]
t=0 0
m=audio [media_port] RTP/AVP 0
a=rtpmap:0 PCMU/8000
]]>
</send>
<recv request="ACK"
rtd="true"
crlf="true">
</recv>
<recv request="INFO" optional="true" next="1">
</recv>
<recv request="INVITE" crlf="true">
</recv>
<send>
<![CDATA[
SIP/2.0 200 OK
[last_Via:]
[last_From:]
[last_To:];tag=[pid]SIPpTag01[call_number]
[last_Call-ID:]
[last_CSeq:]
[last_Record-Route:]
Subject:[$1]
Contact: <sip:[local_ip]:[local_port];transport=[transport]>
Content-Type: application/sdp
Content-Length: [len]
v=0
o=user1 53655765 2353687637 IN IP[local_ip_type] [local_ip]
s=-
c=IN IP[media_ip_type] [media_ip]
t=0 0
m=audio [media_port] RTP/AVP 0
a=rtpmap:0 PCMU/8000
]]>
</send>
<recv request="ACK"
rtd="true"
crlf="true">
</recv>
<recv request="BYE">
</recv>
<send next="2">
<![CDATA[
SIP/2.0 200 OK
[last_Via:]
[last_From:]
[last_To:]
[last_Call-ID:]
[last_CSeq:]
Contact: <sip:[local_ip]:[local_port];transport=[transport]>
Content-Length: 0
]]>
</send>
<label id="1"/>
<send>
<![CDATA[
SIP/2.0 200 OK
[last_Via:]
[last_From:]
[last_To:]
[last_Call-ID:]
[last_CSeq:]
Contact: <sip:[local_ip]:[local_port];transport=[transport]>
Content-Length: 0
]]>
</send>
<label id="2"/>
</scenario>

100
test/sip-refer-tests.js Normal file
View File

@@ -0,0 +1,100 @@
const test = require('tape');
const { sippUac } = require('./sipp')('test_fs');
const clearModule = require('clear-module');
const {provisionCallHook, provisionCustomHook, provisionActionHook} = require('./utils')
const bent = require('bent');
const getJSON = bent('json')
const sleepFor = async(ms) => new Promise(resolve => setTimeout(resolve, ms));
process.on('unhandledRejection', (reason, p) => {
console.log('Unhandled Rejection at: Promise', p, 'reason:', reason);
});
function connect(connectable) {
return new Promise((resolve, reject) => {
connectable.on('connect', () => {
return resolve();
});
});
}
test('\'refer\' tests w/202 and NOTIFY', {timeout: 25000}, async(t) => {
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
const verbs = [
{
verb: 'say',
text: 'silence_stream://100'
},
{
verb: 'sip:refer',
referTo: '123456',
actionHook: '/actionHook'
}
];
const noVerbs = [];
const from = 'refer_with_notify';
provisionCallHook(from, verbs);
provisionActionHook(from, noVerbs)
// THEN
await sippUac('uac-refer-with-notify.xml', '172.38.0.10', from);
t.pass('refer: successfully received 202 Accepted');
await sleepFor(1000);
const obj = await getJSON(`http:127.0.0.1:3100/lastRequest/${from}_actionHook`);
t.ok(obj.body.final_referred_call_status === 200, 'refer: successfully received NOTIFY with 200 OK');
//console.log(`obj: ${JSON.stringify(obj)}`);
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});
test('\'refer\' tests w/202 but no NOTIFY', {timeout: 25000}, async(t) => {
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
const verbs = [
{
verb: 'say',
text: 'silence_stream://100'
},
{
verb: 'sip:refer',
referTo: '123456',
actionHook: '/actionHook'
}
];
const noVerbs = [];
const from = 'refer_no_notify';
provisionCallHook(from, verbs);
provisionActionHook(from, noVerbs)
// THEN
await sippUac('uac-refer-no-notify.xml', '172.38.0.10', from);
t.pass('refer: successfully received 202 Accepted w/o NOTIFY');
await sleepFor(17000);
const obj = await getJSON(`http:127.0.0.1:3100/lastRequest/${from}_actionHook`);
console.log(`obj: ${JSON.stringify(obj)}`);
t.ok(obj.body.refer_status === 202, 'refer: successfully timed out and reported 202');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});

View File

@@ -53,6 +53,13 @@ test('incoming call tests', (t) => {
.then(() => {
return t.pass('handles in-dialog requests');
})
.then(() => {
return sippUac('uac-refer-no-notify.xml', '172.38.0.30');
})
.then(() => {
return t.pass('handles sip:refer where we get 202 but no NOTIFY');
})
.then(() => {
srf.disconnect();
t.end();

View File

@@ -24,13 +24,13 @@ obj.output = () => {
return output;
};
obj.sippUac = (file, bindAddress, from='sipp', to='16174000000') => {
obj.sippUac = (file, bindAddress, from='sipp', to='16174000000', loop=1) => {
const cmd = 'docker';
const args = [
'run', '-t', '--rm', '--net', `${network}`,
'-v', `${__dirname}/scenarios:/tmp/scenarios`,
'drachtio/sipp', 'sipp', '-sf', `/tmp/scenarios/${file}`,
'-m', '1',
'-m', loop,
'-sleep', '250ms',
'-nostdin',
'-cid_str', `%u-%p@%s-${idx++}`,
@@ -41,7 +41,7 @@ obj.sippUac = (file, bindAddress, from='sipp', to='16174000000') => {
if (bindAddress) args.splice(5, 0, '--ip', bindAddress);
console.log(args.join(' '));
//console.log(args.join(' '));
clearOutput();
return new Promise((resolve, reject) => {

210
test/transcribe-tests.js Normal file
View File

@@ -0,0 +1,210 @@
const test = require('tape');
const { sippUac } = require('./sipp')('test_fs');
const bent = require('bent');
const getJSON = bent('json')
const clearModule = require('clear-module');
const {provisionCallHook} = require('./utils')
process.on('unhandledRejection', (reason, p) => {
console.log('Unhandled Rejection at: Promise', p, 'reason:', reason);
});
function connect(connectable) {
return new Promise((resolve, reject) => {
connectable.on('connect', () => {
return resolve();
});
});
}
test('\'transcribe\' test - google', async(t) => {
if (!process.env.GCP_JSON_KEY) {
t.pass('skipping google tests');
return t.end();
}
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
let verbs = [
{
"verb": "transcribe",
"recognizer": {
"vendor": "google",
"hints": ["customer support", "sales", "human resources", "HR"]
},
"transcriptionHook": "/transcriptionHook"
}
];
let from = "gather_success";
provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
t.ok(obj.body.speech.alternatives[0].transcript.toLowerCase().startsWith('i\'d like to speak to customer support'),
'transcribe: succeeds when using google credentials');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});
test('\'transcribe\' test - microsoft', async(t) => {
if (!process.env.MICROSOFT_REGION || !process.env.MICROSOFT_API_KEY) {
t.pass('skipping microsoft tests');
return t.end();
}
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
let verbs = [
{
"verb": "transcribe",
"recognizer": {
"vendor": "microsoft",
"hints": ["customer support", "sales", "human resources", "HR"]
},
"transcriptionHook": "/transcriptionHook"
}
];
let from = "gather_success";
provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
t.ok(obj.body.speech.alternatives[0].transcript.toLowerCase().startsWith('i\'d like to speak to customer support'),
'transcribe: succeeds when using microsoft credentials');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});
test('\'transcribe\' test - aws', async(t) => {
if (!process.env.AWS_ACCESS_KEY_ID || !process.env.AWS_SECRET_ACCESS_KEY) {
t.pass('skipping aws tests');
return t.end();
}
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
let verbs = [
{
"verb": "transcribe",
"recognizer": {
"vendor": "aws",
"hints": ["customer support", "sales", "human resources", "HR"]
},
"transcriptionHook": "/transcriptionHook"
}
];
let from = "gather_success";
provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
t.ok(obj.body.speech.alternatives[0].transcript.toLowerCase().startsWith('i\'d like to speak to customer support'),
'transcribe: succeeds when using aws credentials');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});
test('\'transcribe\' test - deepgram', async(t) => {
if (!process.env.DEEPGRAM_API_KEY ) {
t.pass('skipping deepgram tests');
return t.end();
}
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
let verbs = [
{
"verb": "transcribe",
"recognizer": {
"vendor": "deepgram",
"hints": ["customer support", "sales", "human resources", "HR"],
"deepgramOptions": {
"apiKey": process.env.DEEPGRAM_API_KEY
}
},
"transcriptionHook": "/transcriptionHook"
}
];
let from = "gather_success";
provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
t.ok(obj.body.speech.alternatives[0].transcript.toLowerCase().startsWith('i\'d like to speak to customer support'),
'transcribe: succeeds when using deepgram credentials');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});
test('\'transcribe\' test - soniox', async(t) => {
if (!process.env.SONIOX_API_KEY ) {
t.pass('skipping soniox tests');
return t.end();
}
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
let verbs = [
{
"verb": "transcribe",
"recognizer": {
"vendor": "soniox",
"hints": ["customer support", "sales", "human resources", "HR"],
"deepgramOptions": {
"apiKey": process.env.SONIOX_API_KEY
}
},
"transcriptionHook": "/transcriptionHook"
}
];
let from = "gather_success";
provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
console.log(JSON.stringify(obj));
t.ok(obj.body.speech.alternatives[0].transcript.toLowerCase().startsWith('i\'d like to speak to customer support'),
'transcribe: succeeds when using soniox credentials');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});

View File

@@ -44,10 +44,25 @@ test('unit tests', (t) => {
task = makeTask(logger, require('./data/good/say-text-array'));
t.ok(task.name === 'say', 'parsed say with multiple segments');
task = makeTask(logger, require('./data/good/say-ssml'));
// the ssml is more than 1000 chars,
// expecting first chunk is length > 100, stop at ? instead of first .
// 2nd chunk is long text < 1000 char, stop at .
// 3rd chunk is the rest.
t.ok(task.text.length === 3 &&
task.text[0].length === 187 &&
task.text[1].length === 882 &&
task.text[2].length === 123, 'parsed say');
task = makeTask(logger, require('./data/bad/bad-say-ssml'));
t.ok(task.text.length === 1 &&
task.text[0].length === 1162, 'parsed bad say');
const alt = require('./data/good/alternate-syntax');
const normalize = require('../lib/utils/normalize-jambones');
normalize(logger, alt).forEach((t) => {
const { normalizeJambones } = require('@jambonz/verb-specifications');
normalizeJambones(logger, alt).forEach((t) => {
const task = makeTask(logger, t);
});
t.pass('alternate syntax works');
@@ -62,4 +77,4 @@ const errMissingProperty = () => makeTask(logger, require('./data/bad/missing-re
const errInvalidType = () => makeTask(logger, require('./data/bad/invalid-type'));
const errBadEnum = () => makeTask(logger, require('./data/bad/bad-enum'));
const errBadPayload = () => makeTask(logger, require('./data/bad/bad-payload'));
const errBadPayload2 = () => makeTask(logger, require('./data/bad/bad-payload2'));
const errBadPayload2 = () => makeTask(logger, require('./data/bad/bad-payload2'));

View File

@@ -24,4 +24,13 @@ const provisionCustomHook = (from, verbs) => {
post(`/customHookMapping`, mapping);
}
module.exports = { provisionCallHook, provisionCustomHook}
const provisionActionHook = (from, verbs) => {
const mapping = {
from,
data: JSON.stringify(verbs)
};
const post = bent('http://127.0.0.1:3100', 'POST', 'string', 200);
post(`/actionHook`, mapping);
}
module.exports = { provisionCallHook, provisionCustomHook, provisionActionHook}

View File

@@ -1,14 +1,54 @@
const assert = require('assert');
const fs = require('fs');
const express = require('express');
const app = express();
const Websocket = require('ws');
const listenPort = process.env.HTTP_PORT || 3000;
let json_mapping = new Map();
let hook_mapping = new Map();
let ws_packet_count = new Map();
let ws_metadata = new Map();
app.listen(listenPort, () => {
/** websocket server for listen audio */
const recvAudio = (socket, req) => {
let packets = 0;
let path = req.url;
console.log('received websocket connection');
socket.on('message', (data, isBinary) => {
if (!isBinary) {
try {
const msg = JSON.parse(data);
console.log({msg}, 'received websocket message');
ws_metadata.set(path, msg);
}
catch (err) {
console.log({err}, 'error parsing websocket message');
}
}
else {
packets += data.length;
}
});
socket.on('error', (err) => {
console.log({err}, 'listen websocket: error');
});
socket.on('close', () => {
ws_packet_count.set(path, packets);
})
};
const wsServer = new Websocket.Server({ noServer: true });
wsServer.setMaxListeners(0);
wsServer.on('connection', recvAudio.bind(null));
const server = app.listen(listenPort, () => {
console.log(`sample jambones app server listening on ${listenPort}`);
});
server.on('upgrade', (request, socket, head) => {
console.log('received upgrade request');
wsServer.handleUpgrade(request, socket, head, (socket) => {
wsServer.emit('connection', socket, request);
});
});
app.use(express.urlencoded({ extended: true }));
app.use(express.json());
@@ -20,6 +60,7 @@ app.use(express.json());
app.all('/', (req, res) => {
console.log(req.body, 'POST /');
const key = req.body.from
addRequestToMap(key, req, hook_mapping);
return getJsonFromMap(key, req, res);
});
@@ -39,7 +80,16 @@ app.post('/callStatus', (req, res) => {
return res.sendStatus(200);
});
/*
* action Hook
* transcriptionHook
*/
app.post('/transcriptionHook', (req, res) => {
console.log({payload: req.body}, 'POST /transcriptionHook');
let key = req.body.from + "_actionHook"
addRequestToMap(key, req, hook_mapping);
return res.json([{"verb": "hangup"}]);
});
/*
* actionHook
*/
app.post('/actionHook', (req, res) => {
console.log({payload: req.body}, 'POST /actionHook');
@@ -87,6 +137,27 @@ app.get('/lastRequest/:key', (req, res) => {
}
})
// WS Fetch
app.get('/ws_packet_count/:key', (req, res) => {
let key = `/${req.params.key}`;
console.log(key, ws_packet_count);
if (ws_packet_count.has(key)) {
return res.json({ count: ws_packet_count.get(key) });
} else {
return res.sendStatus(404);
}
})
app.get('/ws_metadata/:key', (req, res) => {
let key = `/${req.params.key}`;
console.log(key, ws_packet_count);
if (ws_metadata.has(key)) {
return res.json({ metadata: ws_metadata.get(key) });
} else {
return res.sendStatus(404);
}
})
/*
* private function
*/

File diff suppressed because it is too large Load Diff

View File

@@ -9,6 +9,7 @@
"author": "Dave Horton",
"license": "MIT",
"dependencies": {
"express": "^4.17.1"
"express": "^4.18.2",
"ws": "^8.12.0"
}
}

97
test/ws-mock.js Normal file
View File

@@ -0,0 +1,97 @@
class MockWebsocket {
static eventResponses = new Map();
static actionLoops = new Map();
eventListeners = new Map();
constructor(url, protocols, options) {
this.u = url;
this.pros = protocols;
this.opts = options;
setTimeout(() => {
this.open();
}, 500)
}
static addJsonMapping(key, value) {
MockWebsocket.eventResponses.set(key, value);
}
static getAndIncreaseActionLoops(key) {
const ret = MockWebsocket.actionLoops.has(key) ? MockWebsocket.actionLoops.get(key) : 0;
MockWebsocket.actionLoops.set(key, ret + 1);
return ret;
}
once(event, listener) {
// Websocket.ws = this;
this.eventListeners.set(event, listener);
return this;
}
on(event, listener) {
// Websocket.ws = this;
this.eventListeners.set(event, listener);
return this;
}
open() {
if (this.eventListeners.has('open')) {
this.eventListeners.get('open')();
}
}
removeAllListeners() {
this.eventListeners.clear();
}
send(data, callback) {
const json = JSON.parse(data);
console.log({json}, 'got message from ws-requestor');
if (MockWebsocket.eventResponses.has(json.call_sid)) {
const resp_data = MockWebsocket.eventResponses.get(json.call_sid);
const action = resp_data.action[MockWebsocket.getAndIncreaseActionLoops(json.call_sid)];
if (action === 'connect') {
setTimeout(()=> {
const msg = {
type: 'ack',
msgid: json.msgid,
command: 'command',
call_sid: json.call_sid,
queueCommand: false,
data: resp_data.body}
console.log({msg}, 'sending ack to ws-requestor');
this.mockOnMessage(JSON.stringify(msg));
}, 100);
} else if (action === 'close') {
if (this.eventListeners.has('close')) {
this.eventListeners.get('close')(500);
}
} else if (action === 'terminate') {
if (this.eventListeners.has('close')) {
this.eventListeners.get('close')(1000);
}
} else if (action === 'error') {
if (this.eventListeners.has('error')) {
this.eventListeners.get('error')();
}
} else if (action === 'unexpected-response') {
if (this.eventListeners.has('unexpected-response')) {
this.eventListeners.get('unexpected-response')();
}
}
}
if (callback) {
callback();
}
}
mockOnMessage(message, isBinary=false) {
if (this.eventListeners.has('message')) {
this.eventListeners.get('message')(message, isBinary);
}
}
}
module.exports = MockWebsocket;

View File

@@ -0,0 +1,198 @@
const test = require('tape');
const sinon = require('sinon');
const proxyquire = require("proxyquire");
proxyquire.noCallThru();
const MockWebsocket = require('./ws-mock')
const logger = require('pino')({level: process.env.JAMBONES_LOGLEVEL || 'error'});
const BaseRequestor = proxyquire(
"../lib/utils/base-requestor",
{
"../../": {
srf: {
locals: {
stats: {
histogram: () => {}
}
}
}
},
"@jambonz/time-series": sinon.stub()
}
);
const WsRequestor = proxyquire(
"../lib/utils/ws-requestor",
{
"./base-requestor": BaseRequestor,
"ws": MockWebsocket
}
);
test('ws success', async (t) => {
// GIVEN
const json = '[{\"verb\": \"play\",\"url\": \"silence_stream://5000\"}]';
const ws_response = {
action: ['connect'],
body: json
}
const call_sid = 'ws_success';
MockWebsocket.addJsonMapping(call_sid, ws_response);
const hook = {
url: 'ws://localhost:3000',
username: 'username',
password: 'password'
}
const params = {
callSid: call_sid
}
// WHEN
const requestor = new WsRequestor(logger, "account_sid", hook, "webhook_secret");
const result = await requestor.request('session:new',hook, params, {});
// THEN
t.ok(result == json,'ws successfully sent session:new and got initial jambonz app');
t.end();
});
test('ws close success reconnect', async (t) => {
// GIVEN
const call_sid = 'ws_closed'
const json = '[{\"verb\": \"play\",\"url\": \"silence_stream://5000\"}]';
const ws_response = {
action: ['close', 'connect'],
body: json
}
MockWebsocket.addJsonMapping(call_sid, ws_response);
const hook = {
url: 'ws://localhost:3000',
username: 'username',
password: 'password'
}
const params = {
callSid: call_sid
}
// WHEN
const requestor = new WsRequestor(logger, "account_sid", hook, "webhook_secret");
const result = await requestor.request('session:new',hook, params, {});
// THEN
t.ok(result == json,'ws successfully reconnect after close from far end');
t.end();
});
test('ws response error 1000', async (t) => {
// GIVEN
const call_sid = 'ws_terminated'
const json = '[{\"verb\": \"play\",\"url\": \"silence_stream://5000\"}]';
const ws_response = {
action: ['terminate'],
body: json
}
MockWebsocket.addJsonMapping(call_sid, ws_response);
const hook = {
url: 'ws://localhost:3000',
username: 'username',
password: 'password'
}
const params = {
callSid: call_sid
}
// WHEN
const requestor = new WsRequestor(logger, "account_sid", hook, "webhook_secret");
try {
await requestor.request('session:new',hook, params, {});
}
catch (err) {
// THEN
t.ok(err.startsWith('timeout from far end for msgid'), 'ws does not reconnect if far end closes gracefully');
t.end();
}
});
test('ws response error', async (t) => {
// GIVEN
const call_sid = 'ws_error'
const json = '[{\"verb\": \"play\",\"url\": \"silence_stream://5000\"}]';
const ws_response = {
action: ['error'],
body: json
}
MockWebsocket.addJsonMapping(call_sid, ws_response);
const hook = {
url: 'ws://localhost:3000',
username: 'username',
password: 'password'
}
const params = {
callSid: call_sid
}
// WHEN
const requestor = new WsRequestor(logger, "account_sid", hook, "webhook_secret");
try {
await requestor.request('session:new',hook, params, {});
}
catch (err) {
// THEN
t.ok(err.startsWith('timeout from far end for msgid'), 'ws does not reconnect if far end closes gracefully');
t.end();
}
});
test('ws unexpected-response', async (t) => {
// GIVEN
const call_sid = 'ws_unexpected-response'
const json = '[{\"verb\": \"play\",\"url\": \"silence_stream://5000\"}]';
const ws_response = {
action: ['unexpected-response'],
body: json
}
MockWebsocket.addJsonMapping(call_sid, ws_response);
const hook = {
url: 'ws://localhost:3000',
username: 'username',
password: 'password'
}
const params = {
callSid: call_sid
}
// WHEN
const requestor = new WsRequestor(logger, "account_sid", hook, "webhook_secret");
try {
await requestor.request('session:new',hook, params, {});
}
catch (err) {
// THEN
t.ok(err.code = 'ERR_ASSERTION', 'ws does not reconnect if far end closes gracefully');
t.end();
}
});

View File

@@ -22,7 +22,7 @@ module.exports = (serviceName) => {
});
let exporter;
if (process.env.OTEL_EXPORTER_JAEGER_AGENT_HOST) {
if (process.env.OTEL_EXPORTER_JAEGER_AGENT_HOST || process.env.OTEL_EXPORTER_JAEGER_ENDPOINT) {
exporter = new JaegerExporter();
}
else if (process.env.OTEL_EXPORTER_ZIPKIN_URL) {