Compare commits

...

163 Commits

Author SHA1 Message Date
Dave Horton
119a49725a avoid a pre-caching attempt if synth settings change 2024-01-13 12:21:54 -05:00
Dave Horton
5b1c2d3052 more fixes 2024-01-12 16:22:23 -05:00
Dave Horton
21f03dd13a fix for establishing vendor etc 2024-01-12 16:19:59 -05:00
Dave Horton
c2eef50512 wip 2024-01-12 16:13:36 -05:00
dependabot[bot]
d3d494191f Bump follow-redirects from 1.15.2 to 1.15.4 (#603)
Bumps [follow-redirects](https://github.com/follow-redirects/follow-redirects) from 1.15.2 to 1.15.4.
- [Release notes](https://github.com/follow-redirects/follow-redirects/releases)
- [Commits](https://github.com/follow-redirects/follow-redirects/compare/v1.15.2...v1.15.4)

---
updated-dependencies:
- dependency-name: follow-redirects
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-01-10 09:24:36 -05:00
Paulo Telles
859e816a8e issue #605 (#606)
Co-authored-by: p.souza <p.souza@cognigy.com>
2024-01-10 09:23:09 -05:00
Hoan Luu Huu
29bbcf1be0 add user-agent to http and ws requestor (#602)
* add user-agent to http and ws requestor

* wip

* fix review comment
2024-01-10 08:54:46 -05:00
Dave Horton
6f6d7a06b0 Revert "Fix/dual legs transcribe race condition (#593)" (#600)
This reverts commit 9d70ed96a1.
2024-01-09 08:23:13 -05:00
Anton Voylenko
a2ba80a9a3 Snake case customer data for refer (#598)
* update envs

* fix refer customer data

* use data from function
2024-01-08 19:15:08 -05:00
Hoan Luu Huu
9d70ed96a1 Fix/dual legs transcribe race condition (#593)
* fs only stop one of bugname when transcribe is used for dual legs

* wip

* fix review comment

* wip

* wip
2024-01-06 19:12:51 -05:00
Hoan Luu Huu
8173a306f7 fix stt default vendor cannot be mapped to correct value (#588) 2024-01-04 07:34:30 -05:00
Hoan Luu Huu
2e69630544 fix siprec to remap sdp base on participant label (#587)
* fix siprec to remap sdp base on participant label

* fix
2024-01-03 11:10:31 -05:00
Hoan Luu Huu
15829139c1 fix hangup headers (#583)
* fix hangup headers

* no need for callback

* fix test failure

---------

Co-authored-by: Dave Horton <daveh@beachdognet.com>
2023-12-28 14:59:59 -05:00
Dave Horton
2c48083c26 fix to be more precise about removing custom event handlers so that w… (#580)
* fix to be more precise about removing custom event handlers so that when we stop a gather we dont also inadvertently stop a background transcribe as well

* test fixes

* fix: endpointing=false was being ignored for Deepgram
2023-12-28 11:00:27 -05:00
Hoan Luu Huu
9d8291f892 Transcribe background task (#576)
* first draft

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* update verb-specification

* fix comment reviews

* provide bugname when stopping transcription, otherwise it will continue

---------

Co-authored-by: Dave Horton <daveh@beachdognet.com>
2023-12-26 21:50:51 -05:00
Hoan Luu Huu
3e8474867f support deepgram (#579)
* support deepgram

* update speech utils
2023-12-26 07:46:35 -05:00
Hoan Luu Huu
9eb315ecd6 fix config and stt task for altLanguages (#575)
* fix config and stt task for altLanguages

* clear freeswitch channel var when altLanguages is empty list
2023-12-25 22:21:34 -05:00
Hoan Luu Huu
2ec1460b4e nuance transcribe should have utteranceDetectionMode=multiple (#574)
* nuance transcribe should have utteranceDetectionMode=multiple

* nuance transcribe should have utteranceDetectionMode=multiple
2023-12-20 18:44:08 -05:00
Hoan Luu Huu
e30782ea7b fix riva transcribe issue (#570) 2023-12-20 10:25:17 -05:00
Hoan Luu Huu
83c1c07eb0 fix enqueue waithook on ws hold the session (#572)
* fix enqueue waithook on ws hold the session

* wip
2023-12-19 09:42:20 -05:00
Dave Horton
47fbc1a4a4 allow custom speech with no auth token (#571) 2023-12-18 14:51:34 -05:00
Dave Horton
7474a359a4 update to drachtio-fsmrf@3.0.33 2023-12-18 14:28:25 -05:00
Hoan Luu Huu
30977b309c punctuation for microsoft (#566)
* punctuation for microsoft

* wip
2023-12-18 08:38:05 -05:00
Hoan Luu Huu
bcb4bf43bf fix altLanguages (#567)
* fix altLanguages

* adding testcase
2023-12-16 08:35:09 -05:00
Dave Horton
077460d0e2 Feat/multiset envs (#569)
* update to drachtio-fsmrf@3.0.31

* fix prev commits
2023-12-15 15:39:15 -05:00
Matt Hertogs
6629b45671 Fix typo in contributing.md (#555) 2023-12-11 08:37:36 -05:00
Hoan Luu Huu
353a9c1917 Feat/complete adult call (#563)
* feat client direct call from registration webhook (#562)

* feat client direct call from registration webhook

* feat client direct call from registration webhook

* wip

* fix cannot lcc complete adult call

* wip

* wip
2023-12-11 08:37:01 -05:00
Hoan Luu Huu
230fe9ea11 feat client direct call from registration webhook (#562)
* feat client direct call from registration webhook

* feat client direct call from registration webhook

* wip
2023-12-11 08:36:19 -05:00
Dave Horton
bb81f9f3da minor logging 2023-12-07 13:00:25 -05:00
Hoan Luu Huu
a7673c1819 update speech utils 0.0.31 (#560) 2023-12-05 20:47:16 -05:00
Hoan Luu Huu
59248c7638 fix config verb can override recognizer *Options (#558)
* fix config verb can override recognizer *Options

* wip

* clean up gather and transcribe

* fix failing testcase

* fix failing testcase

* fix failing testcase
2023-12-05 20:27:55 -05:00
Markus Frindt
46755f909c do not escape req.body.tag (#559)
Co-authored-by: Markus Frindt <m.frindt@cognigy.com>
2023-12-05 10:06:04 -05:00
Hoan Luu Huu
4273196447 Fix/interdigit timeout (#554)
* fix interdtmf timeout

* wip

* wip

* wip

* remove all speech listener if dtmf is received
2023-12-04 08:16:42 -05:00
Dave Horton
e5b60ca9b0 trying to pass redis client to speech utils causing restart (#557) 2023-12-01 18:26:33 -05:00
Markus Frindt
86a14daf79 Update say task and add possibility to use elevenlabs options (#556)
* Update say task and add possibility to use elevenlabs options from synthesizer

* revert ms change

* fix contdition for alerting

---------

Co-authored-by: Markus Frindt <m.frindt@cognigy.com>
2023-12-01 09:51:22 -05:00
Hoan Luu Huu
c66ad39001 sending DTMF inband from updateCall/command (#536)
* sending DTMF inband from updateCall/command

* wip

* wip
2023-12-01 09:05:14 -05:00
Hoan Luu Huu
0a0cbd57ba support elevenlabs options (#553)
* support elevenlabs options

* elevenlabs options from synthezier

* wip

* fix
2023-11-30 09:28:12 -05:00
Hoan Luu Huu
eb2d90ffaa fix release freeswitch media properly (#550)
* fix release freeswitch media properly

* if a leg is opus, modify b leg offer opus first

* if a leg is opus, modify b leg offer opus first

* wip

* wip

* fix review comments

* fix review comments

* fix review comments
2023-11-29 10:17:15 -05:00
Anton Voylenko
454ff7d1b8 update envs (#551) 2023-11-28 14:54:39 -05:00
Markus Frindt
7e349fe4e5 Add a custom sanitization function for the "Tag" property (#546)
* Add a custom sanitization function for the tag property in create call body

* remove tag.* property from schema

* do not escape URLs but validate them

* apply suggestions from PR review

---------

Co-authored-by: Markus Frindt <m.frindt@cognigy.com>
2023-11-27 13:03:47 -05:00
Hoan Luu Huu
9478f3a1b8 fix disable gather timeout if interDigitTimeout enabled. (#538) 2023-11-21 19:09:09 -06:00
Hoan Luu Huu
a3c241b569 update drachtio-fsmrf new version for fixing amd and transcribe run parallel (#543) 2023-11-21 11:12:45 -06:00
Anton Voylenko
5a68563f96 live call control for tag (#539) 2023-11-17 08:48:17 -05:00
Hoan Luu Huu
1cdd0cf611 feat Audult call session should have its own requestor (#535)
* feat Audult call session should have its own requestor

* fix eslint

* fix eslint

* if user authenticate from http server instead of databse

* wip

* fix custom STT

* fix custom STT

* fix custom STT
2023-11-16 08:22:35 -05:00
Dave Horton
9ae4b04fc5 update badge 2023-11-14 10:27:16 -05:00
Anton Voylenko
170c3c7ec4 rest:dial: Support sipindialog hook (#531)
* support sipindialog hook

* rename sipindialog method
2023-11-14 09:16:08 -05:00
Dave Horton
7c36a08852 Feat/dial via sip proxy (#532)
* add support for dialing via proxy with target.proxy

* update verb-specifications with support for target.proxy
2023-11-14 09:06:43 -05:00
Hoan Luu Huu
633237da1b let realtimedb-help build configuration from env vars (#526)
* let realtimedb-help build configuration from env vars

* update speech-utils version
2023-11-14 08:57:50 -05:00
Dave Horton
708c2c661f 0.8.5 2023-11-09 12:36:04 -05:00
Hoan Luu Huu
87632c549e feat support Whisper TTS (#523)
* feat support openai

* update speech utils version
2023-11-09 09:51:20 -05:00
Hoan Luu Huu
31559cbb3b user restriction (#520) 2023-11-08 12:39:56 -05:00
Dave Horton
1156bae2de fix for #521 - allow pause in confirmHook applications (#522) 2023-11-07 09:46:41 -05:00
Dave Horton
c6c599ab99 fix dialogflow tts bug (tts not working due to 'default' being assigned to label) and update to drachtio-srf with fix for parsing sip:1234@feature-server (#518) 2023-11-06 15:05:17 -05:00
Dave Horton
4d0f0fe75f prevent exception referencing user.uri (#517) 2023-11-06 13:42:37 -05:00
Dave Horton
6d625d87ad Feat/assemblyai testing (#516)
* handle errors from assemblyai

* wip

* fix alert

* normalizeAssemblyai
2023-11-02 17:05:28 -04:00
Hoan Luu Huu
7fee2ba2dc feat assembly (#515)
* fix

* wip

* wip

* wip

* wip

* fix review comments
2023-11-02 09:25:04 -04:00
Hoan Luu Huu
4b3234f4e4 feat disable direct p2p call by env variable (#514)
* feat disable direct p2p call by env variable

* wip

* wip
2023-11-01 09:59:01 -04:00
Dave Horton
6b9f6a7d90 if dial.confirmHook returns empty array do not create a confirmSession (#513) 2023-11-01 08:47:41 -04:00
Dave Horton
3cdf568fb6 fix logging on child leg after REFER received on A leg (#512) 2023-10-31 20:55:39 -04:00
Hoan Luu Huu
e73bef4af0 google custom voice (#506)
* google custom voice

* fixed

* wip

* wip

* wip

* wip
2023-10-30 20:10:30 -04:00
Anton Voylenko
42d1069617 allow tag verb for waithook (#510) 2023-10-30 18:41:49 -04:00
Dave Horton
e5772d6b85 allow dial referHook to return application to execute on the other le… (#505)
* allow dial referHook to return application to execute on the other leg; fixes #504

* fix session tracking

* minor logging

* minor
2023-10-30 13:59:54 -04:00
Dave Horton
f43a5c1491 deepgram: rework continuous asr, and resolve on speech_final not is_f… (#501)
* deepgram: rework continuous asr, and resolve on speech_final not is_final (wip)

* wip

* deepgram: empty final transcript should trigger resolve with speech if we have buffered transcripts

* wip

* fixes for deepgram compiling multiple transcripts

* test deepgram utteranceEndMs

* more handling of utteranceEndMs

* wip

* better handling of digit strings collected over multiple deepgram responses

* wip

* add support for deepgramOptions.shortUtterance which triggers off of is_final instead of speech_final

* apply deepgram fixes to transcribe

* cleanup continnuous asr

* more continuous asr fixes for deepgram

* update to verb-specifications for handling SttTask properties

* set log level for tests back to error
2023-10-30 13:57:25 -04:00
Dave Horton
67f8f7181a #508 - add support for azureOptions.speechSegmentationSilenceTimeoutMs (#509)
* #508 - add support for azureOptions.speechSegmentationSilenceTimeoutMs

* update verb specs
2023-10-30 13:10:31 -04:00
Dave Horton
ddab4d7548 update to verb-specifications for handling SttTask properties 2023-10-28 14:20:26 -04:00
Dave Horton
916d988dbd add support for deepgram smart_format option (#500)
* add support for deepgram smart_format option

* handle nonexistent hints
2023-10-25 14:29:38 -04:00
Hoan Luu Huu
d6b74c3da8 Fix/dynamic apps (#499)
* refactor utilization jambonz app for queue and user

* refactor utilization jambonz app for queue and user

* refactor utilization jambonz app for queue and user

* refactor utilization jambonz app for queue and user

* refactor utilization jambonz app for queue and user
2023-10-24 20:16:01 -04:00
Hoan Luu Huu
3171b138f9 Feat/support device call (#495)
* support dequeue from registered user

* support dequeue from registered user

* wip

* wip

* wip

* wip

* fix review comments
2023-10-24 08:54:59 -04:00
Dave Horton
168b4dc051 fix #493: remove nulls from cerateCall payload in order not to trip up validation (#494) 2023-10-23 10:58:28 -04:00
Hoan Luu Huu
cf0f4d405f fix device call prioritize call to registered client first (#492) 2023-10-23 07:51:16 -04:00
Hoan Luu Huu
24fccbdae5 Feat/devices call other (#491)
* calls between clients on same domain

* wip

* wip

* wip

* wip
2023-10-20 13:45:13 +02:00
Hoan Luu Huu
7992bc6ca0 feat add http proxy to undicy (#487)
* feat add http proxy to undicy

* fix typo

* fix typo

* add http proxy testcase
2023-10-20 08:36:24 +02:00
Hoan Luu Huu
4b7b0e309b say verb support play audio in array (#482)
* say verb support play audio in array

* fixed review comments

* added separated testcase
2023-10-20 08:14:57 +02:00
dependabot[bot]
1ff4f01d64 Bump undici from 5.22.1 to 5.26.2 (#486)
Bumps [undici](https://github.com/nodejs/undici) from 5.22.1 to 5.26.2.
- [Release notes](https://github.com/nodejs/undici/releases)
- [Commits](https://github.com/nodejs/undici/compare/v5.22.1...v5.26.2)

---
updated-dependencies:
- dependency-name: undici
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-10-19 20:54:28 +02:00
dependabot[bot]
4a5dbb0115 Bump @babel/traverse from 7.22.5 to 7.23.2 (#490)
Bumps [@babel/traverse](https://github.com/babel/babel/tree/HEAD/packages/babel-traverse) from 7.22.5 to 7.23.2.
- [Release notes](https://github.com/babel/babel/releases)
- [Changelog](https://github.com/babel/babel/blob/main/CHANGELOG.md)
- [Commits](https://github.com/babel/babel/commits/v7.23.2/packages/babel-traverse)

---
updated-dependencies:
- dependency-name: "@babel/traverse"
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-10-19 20:45:37 +02:00
Markus Frindt
0a2808e64e Add schema validation to create-call req.body, validate app_json via verb-specifications (#488)
* Add schema for create-call, validate app_json via verb-specifications

* trigger new build

---------

Co-authored-by: Markus Frindt <m.frindt@cognigy.com>
2023-10-19 20:37:30 +02:00
Hoan Luu Huu
320baf4ac8 update speech version (#485)
* update speech version

* create elevenlabs credential for synthesis

* create elevenlabs credential for synthesis
2023-10-14 20:51:19 +02:00
Dave Horton
a92ea9c5da ensure recognizer.altLanguages is an empty array if not otherwise specified 2023-10-12 10:01:14 -04:00
Hoan Luu Huu
4ffa9f915b feat, add sip hedaer to sip info hook (#481) 2023-10-12 09:10:26 -04:00
Hoan Luu Huu
2285ec5329 fix transcribe credential (#480) 2023-10-10 21:06:46 -04:00
Hoan Luu Huu
09ae083c9a fix: transcribe 2 channels (#479)
* fix: transcribe 2 channels

* fix

* fix
2023-10-10 08:46:50 -04:00
Hoan Luu Huu
6a3e12e293 feat support refer teluri (#476)
* feat support refer teluri

* update drachtio source code
2023-10-10 08:42:52 -04:00
Hoan Luu Huu
48f2c57ae2 feat sip indialog actionHook (#477)
* feat sip indialog actionHook

* feat sip indialog actionHook

* feat sip indialog actionHook

* feat sip indialog actionHook

* update verb spec

* fix

* fix

* rename function as required _onRequestWithinDialog
2023-10-10 08:41:32 -04:00
Dave Horton
f651cfa0b7 prune unused function (#478) 2023-10-09 15:03:27 -04:00
Dave Horton
cb78627e66 tag is allowed in siprec (#475) 2023-10-06 08:53:13 -04:00
Hoan Luu Huu
ae9386791f fix cobalt model (#469) 2023-09-26 07:41:43 -04:00
Dave Horton
1aa0b07b8f fix: deepgram redact option 2023-09-22 08:13:54 -04:00
Hoan Luu Huu
4e916acf6c fixed clean dualEp for rest dial dual_stream=true (#468)
* fixed clean dualEp for rest dial dual_stream=true

* fix

* fix

* fix

* fix

* fix
2023-09-22 08:12:34 -04:00
Hoan Luu Huu
991fff3386 dual streams (#467)
* dual streams

* dual streams

* dual streams

* dual streams

* use sdp transform
2023-09-21 07:57:30 -04:00
Dave Horton
76cf4e527f minor: allow hints not to be specified 2023-09-18 10:32:07 -04:00
Dave Horton
d7affddd85 minor fix to create-call: pass cs param to restDial.kill task 2023-09-14 09:10:24 -04:00
Anton Voylenko
d42798e0b4 validate recording env (#466) 2023-09-14 07:40:32 -04:00
Dave Horton
6a8a2aa955 update to provide hostport and model on the command line not via env (#465) 2023-09-13 13:53:00 -04:00
Dave Horton
6587b1f758 include fallback speech selections, if any, in the initial webhook (#464) 2023-09-13 10:19:37 -04:00
Hoan Luu Huu
c29def92e8 feat fast recognition (#461)
* feat fast recognition

* don't use buffer transcript for fast recognition

* update verb specification

* fix merge conflict

---------

Co-authored-by: Dave Horton <daveh@beachdognet.com>
2023-09-13 09:59:12 -04:00
Dave Horton
a1793ac359 Speech vendor/cobalt (#463)
* initial changes for cobalt speech

* wip

* wip

* update to drachtio-fsmrf that supports cobalt

* update to verb-specifications with cobalt speech support

* more wip

* lint

* use node 18 with gh actions

* support for compiling cobalt hints

* fix bug in uuid_cobalt_compile_context

* update verb-specifications

* remove repeated code

* cobalt support for transcribe

* update to verb specs
2023-09-13 09:47:30 -04:00
Hoan Luu Huu
d220733dea add JAMBONZ_DISABLE_DIAL_PAI_HEADER env var (#460) 2023-09-08 20:02:20 -04:00
Hoan Luu Huu
a09605fc51 update speech util version (#458) 2023-09-08 09:58:09 -04:00
Paulo Telles
7f59bba634 issue #456 error on validating hints using microsoft (#454)
* error ono validating hints using microsoft

* fix lint

* fix lint 2

* remove try catch

---------

Co-authored-by: p.souza <p.souza@cognigy.com>
2023-09-08 09:21:44 -04:00
Hoan Luu Huu
1477605e66 fix custom stt transcribe (#457) 2023-09-08 08:09:03 -04:00
Dave Horton
4f0ab83f5f fix #445 (#452) 2023-08-31 08:03:45 -04:00
Hoan Luu Huu
2935574440 feat pause resume transcribe (#438)
* feat pause resume transcribe

* wip

* fix jslint

* update fsmrf
2023-08-30 22:43:50 -04:00
Hoan Luu Huu
c10c561ba1 update speech util 0.0.20 (#446) 2023-08-30 22:43:08 -04:00
Hoan Luu Huu
2ccd33e212 feat azure fromHost (#416)
* feat azure fromHost

* wip

* wip

* wip
2023-08-30 21:04:53 -04:00
Hoan Luu Huu
a03baa8461 update serviceUrl to the success port (#439)
* update serviceUrl to the success port

* correct log
2023-08-30 09:23:35 -04:00
Hoan Luu Huu
90df33a15c fix choose speech dedential by label (#441)
* fix choose speech dedential by label

* wip
2023-08-30 09:22:33 -04:00
two56
a15479e6dc parseInt JAMBONZ_MYSQL_REFRESH_TTL (#443)
Co-authored-by: Matt Preskett <matt.preskett@netcall.com>
2023-08-30 07:27:05 -04:00
Hoan Luu Huu
dd74cb2cc6 update vulnerable xml2j version (#440) 2023-08-28 18:58:11 -04:00
Hoan Luu Huu
7a02c36bad fix missing speech vendor in telemetry metric (#437) 2023-08-28 07:45:00 -04:00
Dave Horton
78fd4549af minor logging 2023-08-22 13:15:06 -04:00
Hoan Luu Huu
b1ecf069bf Feat/fallback speech 02 (#429)
* feat fallback speech

* feat fallback speech

* feat fallback speech

* gather verb

* fix

* wip

* wip

* wip

* wip

* wip

* transcribe

* transcribe

* fix

* wip

* wip

* wip

* wip

* fix

* wip

* fix
2023-08-22 09:22:01 -04:00
Hoan Luu Huu
6f0dbef433 feat moh (#423)
* feat moh

* feat moh

* fix typo

* fix typo

* fix

* fix

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* git commit -a -m wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* fix

* fix

* fix

* fix

* wip

* rebase

* fix

* fix
2023-08-22 08:09:18 -04:00
two56
32dcb2adfa Cancel the transaction instead of killing the task (#431)
Co-authored-by: Matt Preskett <matt.preskett@netcall.com>
Co-authored-by: Hoan Luu Huu <110280845+xquanluu@users.noreply.github.com>
2023-08-21 07:46:53 -04:00
Dave Horton
ee514f7459 update references from jambonz.us to jambonz.cloud (#433) 2023-08-19 14:42:35 -04:00
Anton Voylenko
4cfea0707a fix participant join options (#432)
* fix participant join options

* bump verb specifications
2023-08-19 14:18:13 -04:00
Hoan Luu Huu
f8c5abe9e9 feat: multi speech credential diff labels but same vendor (#426)
* feat: multi speech credential diff labels but same vendor

* update sql

* fix

* fix

* fix jslint

* fix review comment

* update verb spec version
2023-08-15 08:57:49 -04:00
Dave Horton
ad722a55ee generate trace id before outdial so we can include it in custom header (#418)
* generate trace id before outdial so we can include it in custom header

* logging

* logging

* fix #420 race condition on rest outdial when ws is used

* revert unnecessary logging change
2023-08-08 13:00:34 -04:00
Hoan Luu Huu
82939214a2 update stats-collector version (#421) 2023-08-07 21:22:10 -04:00
Dave Horton
043a171f41 remove log message 2023-08-07 15:22:03 -04:00
Dave Horton
c8e9b34b53 fix typo that caused record to fail on rest calls 2023-08-07 14:46:51 -04:00
Hoan Luu Huu
d7dcdb1d0c Continuos ASR for transcribe (#398)
* asrTimeout

* fix jslint

* change log

* fix interrim
2023-08-03 09:49:44 -04:00
Dave Horton
fbd0782258 #388 - support custom speech vendor in transcribe verb (#414)
Co-authored-by: Hoan Luu Huu <110280845+xquanluu@users.noreply.github.com>
2023-08-02 19:06:31 -04:00
Fábio Gomes
38f9329b12 When recordings are enabled, disable bidirectional audio on jambonz-session-record (#415) 2023-08-02 14:21:59 -04:00
Dave Horton
d4bfdf0916 #412 - dont delay sending call status when stopping background listen (#413) 2023-08-02 12:50:13 -04:00
Dave Horton
9203deef0f fix bug in prev commit 2023-08-02 10:27:50 -04:00
Dave Horton
48b182c891 Fix/rest outdial failure session hangs (#411)
* fix #410

* on rest outdial failure, if remote end closed gracefully don't wait for a reconnection
2023-08-01 12:59:30 -04:00
Dave Horton
e8e987cb9d Fix/snake case customer data issue 406 (#409)
* revert recent change on silence trimming

* fix issue with incorrectly snake-casing customer data (#406)
2023-07-27 22:31:43 -04:00
Dave Horton
38ea9e7411 update to speech-utils@0.0.18 which ignores trimming of silence on azure ssml audio 2023-07-25 07:51:46 -04:00
Hoan Luu Huu
7b11a56a53 feat siprec custom header (#400)
* feat siprec custom header

* wip

* update verb specification

* add newline to info siprec body

* add newline to info siprec body
2023-07-20 09:10:41 -04:00
Dave Horton
66305b5aea feature: optionally trim silence from azure tts (#399) 2023-07-19 10:36:24 -04:00
Dave Horton
6793bbf330 fix exception that appears in logs if session ends before last call status update 2023-07-18 13:20:53 -04:00
Hoan Luu Huu
d8543f73f2 execute status callback async (#394)
* execute status callback async

* fix review comment

* revert fix review comment
2023-07-18 12:40:57 -04:00
Hoan Luu Huu
e1dad569dc Fix/background listen tag (#391)
* fix background listen send customerData to api server

* test listen

* fix review comment
2023-07-11 16:03:32 +01:00
Hoan Luu Huu
643bee48c5 feat multi srs (#381) 2023-07-05 08:16:59 +01:00
Dave Horton
487bfd90d9 0.8.4 2023-06-28 09:23:40 +01:00
Hoan Luu Huu
810f6eb695 fix aws-sdk v3 (#387)
* fix aws-sdk v3

* fix jslint

* fix jslint

* fix aws response parser
2023-06-28 09:20:43 +01:00
Hoan Luu Huu
62bc6b4bac feat: add fs service url to sbc ping option (#383)
* feat multi srs

* add fs service URL to SBC ping option
2023-06-23 11:13:08 +01:00
two56
91fe3ceb06 Clear conference details in both Jambonz and FreeSWITCH (#350)
Co-authored-by: Matt Preskett <matt.preskett@netcall.com>
2023-06-14 15:35:04 -04:00
Dave Horton
a7d07ce7ae add channel to transcribe, gather, and dtmf spans (#376) 2023-06-13 09:12:26 -04:00
Dave Horton
7cd6c27f90 update deps (#375)
* update deps

* added fixes
2023-06-09 15:40:50 -04:00
Hoan Luu Huu
aad24744f3 feat: record all calls (#352)
* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* fix jslint

* fix

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* fix: add file ext

* fix: record format

* fix outbound

* update to drachtio-fsmrf with support for multiple recording streams on a call

* enable DTMF during background listen recording

* fix merge commit

---------

Co-authored-by: Dave Horton <daveh@beachdognet.com>
2023-06-09 14:54:53 -04:00
Dave Horton
ab0452879e add X-Application-Sid in outdials so it ends up in cdr (#374) 2023-06-09 12:57:35 -04:00
Dave Horton
ffdb7a0bb5 create transcribe and listen child spans properly for dial (#373)
* create transcribe and listen child spans properly for dial

* fix prev commit: proper time to start span is in dial exec
2023-06-08 13:57:10 -04:00
Hoan Luu Huu
354818b974 feat: sentinel configuraiton (#372)
* feat: sentinel configuraiton

* fixed

* fix jslint
2023-06-07 10:40:31 -04:00
Dave Horton
30beb9c093 transcribe: default hints and altLanguages (#371) 2023-06-06 13:41:31 -04:00
Dave Horton
b978b3bc2f add optional ws ping (#370) 2023-06-05 11:00:14 -04:00
Hoan Luu Huu
a1c38f8a2e fix: queue length in account event hook (#369) 2023-06-05 08:41:10 -04:00
Dave Horton
37f3668016 update to speech-utils 0.0.15 2023-06-03 09:19:26 -04:00
Dave Horton
55935e3f35 skip flaky test 2023-06-03 09:09:49 -04:00
Hoan Luu Huu
b7070121ee feat: advanced queues (#362)
* feat: advanced queues

* feat: advanced queues

* feat: advanced queues

* feat: advanced queues

* update verb specification

* add testcase

* add testcase

* add testcase

* updte testcase

* fixed

* update queue

* fix: fix waithook params

* fix: fix waithook params

* fix: performQueueWebhook with correct members length

* fix merge conflict

* debug log

* debug listen test

* debug listen test

* debug listen test

* debug listen test

* debug listen test

* debug listen issue

* feat: add tts on account level

---------

Co-authored-by: Dave Horton <daveh@beachdognet.com>
2023-06-03 08:16:05 -04:00
Dave Horton
01260ad054 transcribe: create otel child spans for each stt result that is returned in a long-running transcribe (#368) 2023-06-02 14:25:32 -04:00
Dave Horton
bd911c88f9 in verb transcribe, recognizer should be optional (default to application default) but transcriptionHook mudt be mandatory (#367) 2023-06-02 13:15:32 -04:00
Hoan Luu Huu
d96712a8d6 feat: add tts on account level (#366) 2023-06-02 09:31:28 -04:00
Dave Horton
fdd8f7e743 Fix/logger reference (#365)
* fix logger reference

* fix undefined logger reference
2023-06-01 09:46:08 -04:00
Hoan Luu Huu
bb852600c0 fix: app_json is used for outbound call (#358)
* fix: app_json is used for outbound call

* fix jslint

* fix: app_json setter in rest:dial task
2023-06-01 08:52:53 -04:00
Quan HL
210bbcbdbf forward inbound carrier sid to outbound dial 2023-05-29 09:10:33 -04:00
Dave Horton
5910dbf0d3 fix for carrier selection on dial based on calling number 2023-05-26 12:27:51 -04:00
Dave Horton
90468ffe48 handle missing callerId property for anonymous calls 2023-05-25 13:28:42 -04:00
Dave Horton
863c4dfa34 fix docker publish build gh action 2023-05-25 13:19:46 -04:00
Dave Horton
484be8442c fix for #359 - selection of outbound carrier based on calling number 2023-05-25 13:19:46 -04:00
Dave Horton
7393e3bcb7 standardize on passing .query args as array (#356) 2023-05-22 09:56:05 -04:00
Hoan Luu Huu
32a84b7b19 feat: rest:dial amd (#339)
Add support for sending 'amd' property in createCall REST API and also added support for using any of the speech vendors for STT
---------

Co-authored-by: Dave Horton <daveh@beachdognet.com>
2023-05-16 16:20:08 -04:00
Dave Horton
6933e82d46 fix docker build 2023-05-15 14:04:29 -04:00
71 changed files with 13076 additions and 3376 deletions

View File

@@ -9,7 +9,7 @@ jobs:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: 16
node-version: 18
- run: npm ci
- run: npm run jslint
- run: docker pull drachtio/sipp

View File

@@ -2,6 +2,8 @@ name: Docker
on:
push:
branches:
- main
tags:
- '*'
@@ -18,7 +20,7 @@ jobs:
- name: prepare tag
id: prepare_tag
run: |
IMAGE_ID=$GITHUB_REPOSITORY
IMAGE_ID=jambonz/feature-server
# Strip git ref prefix from version
VERSION=$(echo "${{ github.ref }}" | sed -e 's,.*/\(.*\),\1,')

View File

@@ -1,4 +1,4 @@
# jambones-feature-server ![Build Status](https://github.com/jambonz/jambonz-feature-server/workflows/CI/badge.svg)
# jambonz-feature-server [![CI](https://github.com/jambonz/jambonz-feature-server/actions/workflows/build.yml/badge.svg)](https://github.com/jambonz/jambonz-feature-server/actions/workflows/build.yml)
This application implements the core feature server of the jambones platform.
@@ -37,6 +37,11 @@ Configuration is provided via environment variables:
|STATS_PORT| listening port for metrics host|no|
|STATS_PROTOCOL| 'tcp' or 'udp'|no|
|STATS_TELEGRAF| if 1, metrics will be generated in telegraf format|no|
|JAMBONZ_RECORD_WS_BASE_URL| recording websocket URL to send the recording audio|no|
|JAMBONZ_RECORD_WS_USERNAME| recording websocket username|no|
|JAMBONZ_RECORD_WS_PASSWORD| recording websocket password|no|
|ANCHOR_MEDIA_ALWAYS| keep media on media server|no|
|JAMBONZ_DISABLE_DIAL_PAI_HEADER| control P-Asserted-Identity header on B-Leg|no|
### running under pm2
Typically, this application runs under [pm2](https://pm2.io) using an [ecosystem.config.js](https://pm2.keymetrics.io/docs/usage/application-declaration/) file similar to this:

11
app.js
View File

@@ -20,7 +20,9 @@ const tracer = require('./tracer')(JAMBONES_OTEL_SERVICE_NAME);
const api = require('@opentelemetry/api');
srf.locals = {...srf.locals, otel: {tracer, api}};
const opts = {level: JAMBONES_LOGLEVEL};
const opts = {
level: JAMBONES_LOGLEVEL
};
const pino = require('pino');
const logger = pino(opts, pino.destination({sync: false}));
const {LifeCycleEvents, FS_UUID_SET_NAME} = require('./lib/utils/constants');
@@ -107,7 +109,7 @@ const disconnect = () => {
httpServer?.on('close', resolve);
httpServer?.close();
srf.disconnect();
srf.locals.mediaservers.forEach((ms) => ms.disconnect());
srf.locals.mediaservers?.forEach((ms) => ms.disconnect());
});
};
@@ -118,10 +120,15 @@ function handle(signal) {
srf.locals.disabled = true;
logger.info(`got signal ${signal}`);
const setName = `${(JAMBONES_CLUSTER_ID || 'default')}:active-fs`;
const fsServiceUrlSetName = `${(JAMBONES_CLUSTER_ID || 'default')}:fs-service-url`;
if (setName && srf.locals.localSipAddress) {
logger.info(`got signal ${signal}, removing ${srf.locals.localSipAddress} from set ${setName}`);
removeFromSet(setName, srf.locals.localSipAddress);
}
if (fsServiceUrlSetName && srf.locals.serviceUrl) {
logger.info(`got signal ${signal}, removing ${srf.locals.serviceUrl} from set ${fsServiceUrlSetName}`);
removeFromSet(fsServiceUrlSetName, srf.locals.serviceUrl);
}
removeFromSet(FS_UUID_SET_NAME, srf.locals.fsUUID);
if (K8S) {
srf.locals.lifecycleEmitter.operationalState = LifeCycleEvents.ScaleIn;

0
bin/k8s-pre-stop-hook.js Executable file → Normal file
View File

View File

@@ -45,7 +45,7 @@ The GCP credential is the JSON service key in stringified format.
#### Install Docker
The test suite ralso equires [Docker](https://www.docker.com/) and docker-compose to be installed on your laptop. Docker is used to set up a network with all of the elements required to test the jambonz-feature-server in a black-box type of fashion.
The test suite also requires [Docker](https://www.docker.com/) and docker-compose to be installed on your laptop. Docker is used to set up a network with all of the elements required to test the jambonz-feature-server in a black-box type of fashion.
Once you have docker installed, you can optionally make sure everything Docker-wise is working properly by running this command from the project folder:

View File

@@ -8,7 +8,12 @@ const checkEnvs = () => {
assert.ok(process.env.DRACHTIO_PORT || process.env.DRACHTIO_HOST, 'missing DRACHTIO_PORT env var');
assert.ok(process.env.DRACHTIO_SECRET, 'missing DRACHTIO_SECRET env var');
assert.ok(process.env.JAMBONES_FREESWITCH, 'missing JAMBONES_FREESWITCH env var');
assert.ok(process.env.JAMBONES_REDIS_HOST, 'missing JAMBONES_REDIS_HOST env var');
if (process.env.JAMBONES_REDIS_SENTINELS) {
assert.ok(process.env.JAMBONES_REDIS_SENTINEL_MASTER_NAME,
'missing JAMBONES_REDIS_SENTINEL_MASTER_NAME env var, JAMBONES_REDIS_SENTINEL_PASSWORD env var is optional');
} else {
assert.ok(process.env.JAMBONES_REDIS_HOST, 'missing JAMBONES_REDIS_HOST env var');
}
assert.ok(process.env.JAMBONES_NETWORK_CIDR || process.env.K8S, 'missing JAMBONES_SUBNET env var');
};
@@ -20,13 +25,9 @@ const JAMBONES_MYSQL_USER = process.env.JAMBONES_MYSQL_USER;
const JAMBONES_MYSQL_PASSWORD = process.env.JAMBONES_MYSQL_PASSWORD;
const JAMBONES_MYSQL_DATABASE = process.env.JAMBONES_MYSQL_DATABASE;
const JAMBONES_MYSQL_PORT = parseInt(process.env.JAMBONES_MYSQL_PORT, 10) || 3306;
const JAMBONES_MYSQL_REFRESH_TTL = process.env.JAMBONES_MYSQL_REFRESH_TTL;
const JAMBONES_MYSQL_REFRESH_TTL = parseInt(process.env.JAMBONES_MYSQL_REFRESH_TTL, 10) || 0;
const JAMBONES_MYSQL_CONNECTION_LIMIT = parseInt(process.env.JAMBONES_MYSQL_CONNECTION_LIMIT, 10) || 10;
/* redis */
const JAMBONES_REDIS_HOST = process.env.JAMBONES_REDIS_HOST;
const JAMBONES_REDIS_PORT = parseInt(process.env.JAMBONES_REDIS_PORT, 10) || 6379;
/* gather and hints */
const JAMBONES_GATHER_EARLY_HINTS_MATCH = process.env.JAMBONES_GATHER_EARLY_HINTS_MATCH;
const JAMBONZ_GATHER_EARLY_HINTS_MATCH = process.env.JAMBONZ_GATHER_EARLY_HINTS_MATCH;
@@ -51,7 +52,7 @@ const JAMBONES_SBCS = process.env.JAMBONES_SBCS;
/* websockets */
const JAMBONES_WS_HANDSHAKE_TIMEOUT_MS = parseInt(process.env.JAMBONES_WS_HANDSHAKE_TIMEOUT_MS, 10) || 1500;
const JAMBONES_WS_MAX_PAYLOAD = parseInt(process.env.JAMBONES_WS_MAX_PAYLOAD, 10) || 24 * 1024;
const JAMBONES_WS_PING_INTERVAL_MS = parseInt(process.env.JAMBONES_WS_PING_INTERVAL_MS, 10) || 0;
const MAX_RECONNECTS = 5;
const RESPONSE_TIMEOUT_MS = parseInt(process.env.JAMBONES_WS_API_MSG_RESPONSE_TIMEOUT, 10) || 5000;
@@ -116,9 +117,21 @@ const HTTP_POOL = process.env.HTTP_POOL && parseInt(process.env.HTTP_POOL);
const HTTP_POOLSIZE = parseInt(process.env.HTTP_POOLSIZE, 10) || 10;
const HTTP_PIPELINING = parseInt(process.env.HTTP_PIPELINING, 10) || 1;
const HTTP_TIMEOUT = 10000;
const HTTP_PROXY_IP = process.env.JAMBONES_HTTP_PROXY_IP;
const HTTP_PROXY_PORT = process.env.JAMBONES_HTTP_PROXY_PORT;
const HTTP_PROXY_PROTOCOL = process.env.JAMBONES_HTTP_PROXY_PROTOCOL || 'http';
const HTTP_USER_AGENT_HEADER = process.env.JAMBONES_HTTP_USER_AGENT_HEADER || 'jambonz';
const OPTIONS_PING_INTERVAL = parseInt(process.env.OPTIONS_PING_INTERVAL, 10) || 30000;
const JAMBONZ_RECORD_WS_BASE_URL = process.env.JAMBONZ_RECORD_WS_BASE_URL || process.env.JAMBONES_RECORD_WS_BASE_URL;
const JAMBONZ_RECORD_WS_USERNAME = process.env.JAMBONZ_RECORD_WS_USERNAME || process.env.JAMBONES_RECORD_WS_USERNAME;
const JAMBONZ_RECORD_WS_PASSWORD = process.env.JAMBONZ_RECORD_WS_PASSWORD || process.env.JAMBONES_RECORD_WS_PASSWORD;
const JAMBONZ_DISABLE_DIAL_PAI_HEADER = process.env.JAMBONZ_DISABLE_DIAL_PAI_HEADER || false;
const JAMBONES_DISABLE_DIRECT_P2P_CALL = process.env.JAMBONES_DISABLE_DIRECT_P2P_CALL || false;
const JAMBONES_EAGERLY_PRE_CACHE_AUDIO = process.env.JAMBONES_EAGERLY_PRE_CACHE_AUDIO;
module.exports = {
JAMBONES_MYSQL_HOST,
JAMBONES_MYSQL_USER,
@@ -136,13 +149,12 @@ module.exports = {
JAMBONZ_GATHER_EARLY_HINTS_MATCH,
JAMBONES_GATHER_CLEAR_GLOBAL_HINTS_ON_EMPTY_HINTS,
JAMBONES_FREESWITCH,
JAMBONES_REDIS_HOST,
JAMBONES_REDIS_PORT,
SMPP_URL,
JAMBONES_NETWORK_CIDR,
JAMBONES_API_BASE_URL,
JAMBONES_TIME_SERIES_HOST,
JAMBONES_INJECT_CONTENT,
JAMBONES_EAGERLY_PRE_CACHE_AUDIO,
JAMBONES_ESL_LISTEN_ADDRESS,
JAMBONES_SBCS,
JAMBONES_OTEL_ENABLED,
@@ -182,14 +194,24 @@ module.exports = {
HTTP_POOLSIZE,
HTTP_PIPELINING,
HTTP_TIMEOUT,
HTTP_PROXY_IP,
HTTP_PROXY_PORT,
HTTP_PROXY_PROTOCOL,
HTTP_USER_AGENT_HEADER,
OPTIONS_PING_INTERVAL,
RESPONSE_TIMEOUT_MS,
JAMBONES_WS_HANDSHAKE_TIMEOUT_MS,
JAMBONES_WS_MAX_PAYLOAD,
JAMBONES_WS_PING_INTERVAL_MS,
MAX_RECONNECTS,
GCP_JSON_KEY,
MICROSOFT_REGION,
MICROSOFT_API_KEY,
SONIOX_API_KEY,
DEEPGRAM_API_KEY
DEEPGRAM_API_KEY,
JAMBONZ_RECORD_WS_BASE_URL,
JAMBONZ_RECORD_WS_USERNAME,
JAMBONZ_RECORD_WS_PASSWORD,
JAMBONZ_DISABLE_DIAL_PAI_HEADER,
JAMBONES_DISABLE_DIRECT_P2P_CALL
};

54
lib/dynamic-apps.js Normal file
View File

@@ -0,0 +1,54 @@
const appsMap = {
queue: {
// Dummy hook to follow later feature server logic.
call_hook: {
url: 'https://jambonz.org',
method: 'GET'
},
account_sid: '',
app_json: [{
verb: 'dequeue',
name: '',
timeout: 5
}]
},
user: {
// Dummy hook to follow later feature server logic.
call_hook: {
url: 'https://jambonz.org',
method: 'GET'
},
account_sid: '',
app_json: [{
verb: 'dial',
callerId: '',
answerOnBridge: true,
target: [
{
type: 'user',
name: ''
}
]
}]
}
};
const createJambonzApp = (type, {account_sid, name, caller_id}) => {
const app = {...appsMap[type]};
app.account_sid = account_sid;
switch (type) {
case 'queue':
app.app_json[0].name = name;
break;
case 'user':
app.app_json[0].callerId = caller_id;
app.app_json[0].target[0].name = name;
break;
}
app.app_json = JSON.stringify(app.app_json);
return app;
};
module.exports = {
createJambonzApp
};

View File

@@ -5,262 +5,337 @@ const CallInfo = require('../../session/call-info');
const {CallDirection, CallStatus} = require('../../utils/constants');
const uuidv4 = require('uuid-random');
const SipError = require('drachtio-srf').SipError;
const { validationResult, body } = require('express-validator');
const { validate } = require('@jambonz/verb-specifications');
const sysError = require('./error');
const HttpRequestor = require('../../utils/http-requestor');
const WsRequestor = require('../../utils/ws-requestor');
const RootSpan = require('../../utils/call-tracer');
const dbUtils = require('../../utils/db-utils');
const { mergeSdpMedia, extractSdpMedia } = require('../../utils/sdp-utils');
const { createCallSchema, customSanitizeFunction } = require('../schemas/create-call');
router.post('/', async(req, res) => {
const {logger} = req.app.locals;
const accountSid = req.body.account_sid;
const {srf} = require('../../..');
const removeNullProperties = (obj) => (Object.keys(obj).forEach((key) => obj[key] === null && delete obj[key]), obj);
const removeNulls = (req, res, next) => {
req.body = removeNullProperties(req.body);
next();
};
logger.debug({body: req.body}, 'got createCall request');
try {
let uri, cs, to;
const restDial = makeTask(logger, {'rest:dial': req.body});
const {lookupAccountDetails} = dbUtils(logger, srf);
const {getSBC, getFreeswitch} = srf.locals;
const sbcAddress = getSBC();
if (!sbcAddress) throw new Error('no available SBCs for outbound call creation');
const target = restDial.to;
const opts = {
callingNumber: restDial.from,
...(restDial.callerName && {callingName: restDial.callerName}),
headers: req.body.headers || {}
};
const {lookupTeamsByAccount, lookupAccountBySid} = srf.locals.dbHelpers;
const account = await lookupAccountBySid(req.body.account_sid);
const accountInfo = await lookupAccountDetails(req.body.account_sid);
const callSid = uuidv4();
opts.headers = {
...opts.headers,
'X-Jambonz-Routing': target.type,
'X-Jambonz-FS-UUID': srf.locals.fsUUID,
'X-Call-Sid': callSid,
'X-Account-Sid': accountSid,
...(restDial.fromHost && {'X-Preferred-From-Host': restDial.fromHost})
};
switch (target.type) {
case 'phone':
case 'teams':
uri = `sip:${target.number}@${sbcAddress}`;
to = target.number;
if ('teams' === target.type) {
const obj = await lookupTeamsByAccount(accountSid);
if (!obj) throw new Error('dial to ms teams not allowed; account must first be configured with teams info');
Object.assign(opts.headers, {
'X-MS-Teams-FQDN': obj.ms_teams_fqdn,
'X-MS-Teams-Tenant-FQDN': target.tenant || obj.tenant_fqdn
});
if (target.vmail === true) uri = `${uri};opaque=app:voicemail`;
}
break;
case 'user':
uri = `sip:${target.name}`;
to = target.name;
if (target.overrideTo) {
Object.assign(opts.headers, {
'X-Override-To': target.overrideTo
});
}
break;
case 'sip':
uri = target.sipUri;
to = uri;
break;
router.post('/',
removeNulls,
createCallSchema,
body('tag').custom((value) => {
if (value) {
customSanitizeFunction(value);
}
return true;
}),
async(req, res) => {
const {logger} = req.app.locals;
const errors = validationResult(req);
if (!errors.isEmpty()) {
logger.info({errors: errors.array()}, 'POST /Calls: validation errors');
return res.status(400).json({ errors: errors.array() });
}
const accountSid = req.body.account_sid;
const {srf} = require('../../..');
if (target.type === 'phone' && target.trunk) {
const {lookupCarrier} = dbUtils(this.logger, srf);
const voip_carrier_sid = await lookupCarrier(req.body.account_sid, target.trunk);
logger.info(
`createCall: selected ${voip_carrier_sid} for requested carrier: ${target.trunk || 'unspecified'})`);
if (voip_carrier_sid) {
opts.headers['X-Requested-Carrier-Sid'] = voip_carrier_sid;
const app_json = req.body['app_json'];
try {
// app_json is created only by api-server.
if (app_json) {
// if available, delete from req before creating task
delete req.body.app_json;
// validate possible app_json via verb-specifications
validate(logger, JSON.parse(app_json));
}
} catch (err) {
logger.debug({ err }, `invalid app_json: ${err.message}`);
}
/**
logger.debug({body: req.body}, 'got createCall request');
try {
let uri, cs, to;
const restDial = makeTask(logger, { 'rest:dial': req.body });
restDial.appJson = app_json;
const {lookupAccountDetails, lookupCarrierByPhoneNumber, lookupCarrier} = dbUtils(logger, srf);
const {
lookupAppBySid
} = srf.locals.dbHelpers;
const {getSBC, getFreeswitch} = srf.locals;
const sbcAddress = getSBC();
if (!sbcAddress) throw new Error('no available SBCs for outbound call creation');
const target = restDial.to;
const opts = {
callingNumber: restDial.from,
...(restDial.callerName && {callingName: restDial.callerName}),
headers: req.body.headers || {}
};
const {lookupTeamsByAccount, lookupAccountBySid} = srf.locals.dbHelpers;
const account = await lookupAccountBySid(req.body.account_sid);
const accountInfo = await lookupAccountDetails(req.body.account_sid);
const callSid = uuidv4();
const application = req.body.application_sid ? await lookupAppBySid(req.body.application_sid) : null;
const record_all_calls = account.record_all_calls || (application && application.record_all_calls);
const recordOutputFormat = account.record_format || 'mp3';
const rootSpan = new RootSpan('rest-call', {
callSid,
accountSid,
...(req.body?.application_sid && {'X-Application-Sid': req.body.application_sid})
});
opts.headers = {
...opts.headers,
'X-Jambonz-Routing': target.type,
'X-Jambonz-FS-UUID': srf.locals.fsUUID,
'X-Call-Sid': callSid,
'X-Account-Sid': accountSid,
'X-Trace-ID': rootSpan.traceId,
...(req.body?.application_sid && {'X-Application-Sid': req.body.application_sid}),
...(restDial.fromHost && {'X-Preferred-From-Host': restDial.fromHost}),
...(record_all_calls && {'X-Record-All-Calls': recordOutputFormat})
};
switch (target.type) {
case 'phone':
case 'teams':
uri = `sip:${target.number}@${sbcAddress}`;
to = target.number;
if ('teams' === target.type) {
const obj = await lookupTeamsByAccount(accountSid);
if (!obj) throw new Error('dial to ms teams not allowed; account must first be configured with teams info');
Object.assign(opts.headers, {
'X-MS-Teams-FQDN': obj.ms_teams_fqdn,
'X-MS-Teams-Tenant-FQDN': target.tenant || obj.tenant_fqdn
});
if (target.vmail === true) uri = `${uri};opaque=app:voicemail`;
}
break;
case 'user':
uri = `sip:${target.name}`;
to = target.name;
if (target.overrideTo) {
Object.assign(opts.headers, {
'X-Override-To': target.overrideTo
});
}
break;
case 'sip':
uri = target.sipUri;
to = uri;
break;
}
if (target.type === 'phone' && target.trunk) {
const voip_carrier_sid = await lookupCarrier(req.body.account_sid, target.trunk);
logger.info(
`createCall: selected ${voip_carrier_sid} for requested carrier: ${target.trunk || 'unspecified'})`);
if (voip_carrier_sid) {
opts.headers['X-Requested-Carrier-Sid'] = voip_carrier_sid;
}
}
/**
* trunk isn't specified,
* check if from-number matches any existing numbers on Jambonz
* */
if (target.type === 'phone' && !target.trunk) {
const {lookupCarrierByPhoneNumber} = dbUtils(this.logger, srf);
const voip_carrier_sid = await lookupCarrierByPhoneNumber(req.body.account_sid, restDial.from);
logger.info(
`createCall: selected ${voip_carrier_sid} for requested phone number: ${restDial.from || 'unspecified'})`);
if (voip_carrier_sid) {
opts.headers['X-Requested-Carrier-Sid'] = voip_carrier_sid;
if (target.type === 'phone' && !target.trunk) {
const str = restDial.from || '';
const callingNumber = str.startsWith('+') ? str.substring(1) : str;
const voip_carrier_sid = await lookupCarrierByPhoneNumber(req.body.account_sid, callingNumber);
logger.info(
`createCall: selected ${voip_carrier_sid} for requested phone number: ${callingNumber || 'unspecified'})`);
if (voip_carrier_sid) {
opts.headers['X-Requested-Carrier-Sid'] = voip_carrier_sid;
}
}
}
/* create endpoint for outdial */
const ms = getFreeswitch();
if (!ms) throw new Error('no available Freeswitch for outbound call creation');
const ep = await ms.createEndpoint();
logger.debug(`createCall: successfully allocated endpoint, sending INVITE to ${sbcAddress}`);
/* create endpoint for outdial */
const ms = getFreeswitch();
if (!ms) throw new Error('no available Freeswitch for outbound call creation');
const ep = await ms.createEndpoint();
logger.debug(`createCall: successfully allocated endpoint, sending INVITE to ${sbcAddress}`);
/* launch outdial */
let sdp, sipLogger;
const connectStream = async(remoteSdp) => {
if (remoteSdp !== sdp) {
ep.modify(sdp = remoteSdp);
return true;
/* launch outdial */
let sdp, sipLogger;
let dualEp;
let localSdp = ep.local.sdp;
if (req.body.dual_streams) {
dualEp = await ms.createEndpoint();
localSdp = mergeSdpMedia(localSdp, dualEp.local.sdp);
}
return false;
};
Object.assign(opts, {
proxy: `sip:${sbcAddress}`,
localSdp: ep.local.sdp
});
if (target.auth) opts.auth = target.auth;
const connectStream = async(remoteSdp) => {
if (remoteSdp !== sdp) {
sdp = remoteSdp;
if (req.body.dual_streams) {
const [sdpLegA, sdpLebB] = extractSdpMedia(remoteSdp);
await ep.modify(sdpLegA);
await dualEp.modify(sdpLebB);
await ep.bridge(dualEp);
} else {
ep.modify(sdp);
}
return true;
}
return false;
};
Object.assign(opts, {
proxy: `sip:${sbcAddress}`,
localSdp
});
if (target.auth) opts.auth = target.auth;
/**
/**
* create our application object -
* not from the database as per an inbound call,
* but from the provided params in the request
*/
const app = req.body;
const app = req.body;
/**
/**
* attach our requestor and notifier objects
* these will be used for all http requests we make during this call
*/
if ('WS' === app.call_hook?.method || /^wss?:/.test(app.call_hook.url)) {
logger.debug({call_hook: app.call_hook}, 'creating websocket for call hook');
app.requestor = new WsRequestor(logger, account.account_sid, app.call_hook, account.webhook_secret) ;
if (app.call_hook.url === app.call_status_hook.url || !app.call_status_hook?.url) {
logger.debug('reusing websocket for call status hook');
app.notifier = app.requestor;
if ('WS' === app.call_hook?.method || /^wss?:/.test(app.call_hook.url)) {
logger.debug({call_hook: app.call_hook}, 'creating websocket for call hook');
app.requestor = new WsRequestor(logger, account.account_sid, app.call_hook, account.webhook_secret) ;
if (app.call_hook.url === app.call_status_hook.url || !app.call_status_hook?.url) {
logger.debug('reusing websocket for call status hook');
app.notifier = app.requestor;
}
}
else {
logger.debug({call_hook: app.call_hook}, 'creating http client for call hook');
app.requestor = new HttpRequestor(logger, account.account_sid, app.call_hook, account.webhook_secret);
}
if (!app.notifier && app.call_status_hook) {
app.notifier = new HttpRequestor(logger, account.account_sid, app.call_status_hook, account.webhook_secret);
logger.debug({call_hook: app.call_hook}, 'creating http client for call status hook');
}
else if (!app.notifier) {
logger.debug('creating null call status hook');
app.notifier = {request: () => {}, close: () => {}};
}
}
else {
logger.debug({call_hook: app.call_hook}, 'creating http client for call hook');
app.requestor = new HttpRequestor(logger, account.account_sid, app.call_hook, account.webhook_secret);
}
if (!app.notifier && app.call_status_hook) {
app.notifier = new HttpRequestor(logger, account.account_sid, app.call_status_hook, account.webhook_secret);
logger.debug({call_hook: app.call_hook}, 'creating http client for call status hook');
}
else if (!app.notifier) {
logger.debug('creating null call status hook');
app.notifier = {request: () => {}, close: () => {}};
}
/* now launch the outdial */
try {
const dlg = await srf.createUAC(uri, {...opts, followRedirects: true, keepUriOnRedirect: true}, {
cbRequest: (err, inviteReq) => {
/* now launch the outdial */
try {
const dlg = await srf.createUAC(uri, {...opts, followRedirects: true, keepUriOnRedirect: true}, {
cbRequest: (err, inviteReq) => {
/* in case of 302 redirect, this gets called twice, ignore the second
except to update the req so that it can later be canceled if need be
*/
if (res.headersSent) {
logger.info(`create-call: got redirect, updating request to new call-id ${req.get('Call-ID')}`);
if (cs) cs.req = inviteReq;
return;
if (res.headersSent) {
logger.info(`create-call: got redirect, updating request to new call-id ${req.get('Call-ID')}`);
if (cs) cs.req = inviteReq;
return;
}
if (err) {
logger.error(err, 'createCall Error creating call');
res.status(500).send('Call Failure');
return;
}
inviteReq.srf = srf;
inviteReq.locals = {
...(inviteReq || {}),
callSid,
application_sid: app.application_sid
};
/* ok our outbound INVITE is in flight */
const tasks = [restDial];
sipLogger = logger.child({
callSid,
callId: inviteReq.get('Call-ID'),
accountSid,
traceId: rootSpan.traceId
});
app.requestor.logger = app.notifier.logger = sipLogger;
const callInfo = new CallInfo({
direction: CallDirection.Outbound,
req: inviteReq,
to,
tag: app.tag,
callSid,
accountSid: req.body.account_sid,
applicationSid: app.application_sid,
traceId: rootSpan.traceId
});
cs = new RestCallSession({
logger: sipLogger,
application: app,
srf,
req: inviteReq,
ep,
ep2: dualEp,
tasks,
callInfo,
accountInfo,
rootSpan
});
cs.exec(req);
res.status(201).json({sid: cs.callSid, callId: inviteReq.get('Call-ID')});
sipLogger.info({sid: cs.callSid, callId: inviteReq.get('Call-ID')},
`outbound REST call attempt to ${JSON.stringify(target)} has been sent`);
},
cbProvisional: (prov) => {
const callStatus = prov.body ? CallStatus.EarlyMedia : CallStatus.Ringing;
if ([180, 183].includes(prov.status) && prov.body) connectStream(prov.body);
restDial.emit('callStatus', prov.status, !!prov.body);
cs.emit('callStatusChange', {callStatus, sipStatus: prov.status});
}
if (err) {
logger.error(err, 'createCall Error creating call');
res.status(500).send('Call Failure');
return;
}
inviteReq.srf = srf;
inviteReq.locals = {
...(inviteReq || {}),
callSid,
application_sid: app.application_sid
};
/* ok our outbound INVITE is in flight */
const tasks = [restDial];
const rootSpan = new RootSpan('rest-call', inviteReq);
sipLogger = logger.child({
callSid,
callId: inviteReq.get('Call-ID'),
accountSid,
traceId: rootSpan.traceId
});
connectStream(dlg.remote.sdp);
cs.emit('callStatusChange', {
callStatus: CallStatus.InProgress,
sipStatus: 200,
sipReason: 'OK'
});
restDial.emit('callStatus', 200);
restDial.emit('connect', dlg);
}
catch (err) {
let callStatus = CallStatus.Failed;
if (err instanceof SipError) {
if ([486, 603].includes(err.status)) callStatus = CallStatus.Busy;
else if (487 === err.status) callStatus = CallStatus.NoAnswer;
if (sipLogger) sipLogger.info(`REST outdial failed with ${err.status}`);
else console.log(`REST outdial failed with ${err.status}`);
if (cs) cs.emit('callStatusChange', {
callStatus,
sipStatus: err.status,
sipReason: err.reason
});
app.requestor.logger = app.notifier.logger = sipLogger;
const callInfo = new CallInfo({
direction: CallDirection.Outbound,
req: inviteReq,
to,
tag: app.tag,
callSid,
accountSid: req.body.account_sid,
applicationSid: app.application_sid,
traceId: rootSpan.traceId
});
cs = new RestCallSession({
logger: sipLogger,
application: app,
srf,
req: inviteReq,
ep,
tasks,
callInfo,
accountInfo,
rootSpan
});
cs.exec(req);
res.status(201).json({sid: cs.callSid, callId: inviteReq.get('Call-ID')});
sipLogger.info({sid: cs.callSid, callId: inviteReq.get('Call-ID')},
`outbound REST call attempt to ${JSON.stringify(target)} has been sent`);
},
cbProvisional: (prov) => {
const callStatus = prov.body ? CallStatus.EarlyMedia : CallStatus.Ringing;
if ([180, 183].includes(prov.status) && prov.body) connectStream(prov.body);
restDial.emit('callStatus', prov.status, !!prov.body);
cs.emit('callStatusChange', {callStatus, sipStatus: prov.status});
cs.callGone = true;
}
});
connectStream(dlg.remote.sdp);
cs.emit('callStatusChange', {
callStatus: CallStatus.InProgress,
sipStatus: 200,
sipReason: 'OK'
});
restDial.emit('callStatus', 200);
restDial.emit('connect', dlg);
}
catch (err) {
let callStatus = CallStatus.Failed;
if (err instanceof SipError) {
if ([486, 603].includes(err.status)) callStatus = CallStatus.Busy;
else if (487 === err.status) callStatus = CallStatus.NoAnswer;
if (sipLogger) sipLogger.info(`REST outdial failed with ${err.status}`);
else console.log(`REST outdial failed with ${err.status}`);
if (cs) cs.emit('callStatusChange', {
callStatus,
sipStatus: err.status,
sipReason: err.reason
});
else {
if (cs) cs.emit('callStatusChange', {
callStatus,
sipStatus: 500,
sipReason: 'Internal Server Error'
});
if (sipLogger) sipLogger.error({err}, 'REST outdial failed');
else console.error(err);
}
ep.destroy();
if (dualEp) {
dualEp.destroy();
}
setTimeout(restDial.kill.bind(restDial, cs), 5000);
}
else {
if (cs) cs.emit('callStatusChange', {
callStatus,
sipStatus: 500,
sipReason: 'Internal Server Error'
});
if (sipLogger) sipLogger.error({err}, 'REST outdial failed');
else console.error(err);
}
ep.destroy();
setTimeout(restDial.kill.bind(restDial), 5000);
} catch (err) {
sysError(logger, res, err);
}
} catch (err) {
sysError(logger, res, err);
}
});
});
module.exports = router;

View File

@@ -9,25 +9,29 @@ const {CallStatus, CallDirection} = require('../../utils/constants');
*/
function retrieveCallSession(callSid, opts) {
if (opts.call_status_hook && !opts.call_hook) {
throw new DbErrorBadRequest('call_status_hook can be updated only when call_hook is also being updated');
throw new DbErrorBadRequest(
`call_status_hook can be updated only when call_hook is also being updated for call_sid ${callSid}`);
}
const cs = sessionTracker.get(callSid);
if (!cs) {
throw new DbErrorUnprocessableRequest('call session is gone');
throw new DbErrorUnprocessableRequest(`call session is gone for call_sid ${callSid}`);
}
if (opts.call_status === CallStatus.Completed && !cs.hasStableDialog) {
throw new DbErrorUnprocessableRequest('current call state is incompatible with requested action');
throw new DbErrorUnprocessableRequest(
`current call state is incompatible with requested action for call_sid ${callSid}`);
}
else if (opts.call_status === CallStatus.NoAnswer) {
if (cs.direction === CallDirection.Outbound) {
if (!cs.isOutboundCallRinging) {
throw new DbErrorUnprocessableRequest('current call state is incompatible with requested action');
throw new DbErrorUnprocessableRequest(
`current call state is incompatible with requested action for call_sid ${callSid}`);
}
}
else {
if (cs.isInboundCallAnswered) {
throw new DbErrorUnprocessableRequest('current call state is incompatible with requested action');
throw new DbErrorUnprocessableRequest(
`current call state is incompatible with requested action for call_sid ${callSid}`);
}
}
}
@@ -41,7 +45,7 @@ function retrieveCallSession(callSid, opts) {
router.post('/:callSid', async(req, res) => {
const logger = req.app.locals.logger;
const callSid = req.params.callSid;
logger.debug({body: req.body}, 'got upateCall request');
logger.debug({body: req.body}, 'got updateCall request');
try {
const cs = retrieveCallSession(callSid, req.body);
if (!cs) {

View File

@@ -0,0 +1,134 @@
const { checkSchema } = require('express-validator');
/**
* @path api-server {{base_url}}/v1/Accounts/:account_sid/Calls
* @see https://api.jambonz.org/#243a2edd-7999-41db-bd0d-08082bbab401
*/
const createCallSchema = checkSchema({
application_sid: {
isString: true,
optional: true,
isLength: { options: { min: 36, max: 36 } },
errorMessage: 'Invalid application_sid',
},
answerOnBridge: {
isBoolean: true,
optional: true,
errorMessage: 'Invalid answerOnBridge',
},
from: {
errorMessage: 'Invalid from',
isString: true,
isLength: {
options: { min: 1, max: 256 },
},
},
fromHost: {
isString: true,
optional: true,
errorMessage: 'Invalid fromHost',
},
to: {
errorMessage: 'Invalid to',
isObject: true,
},
callerName: {
isString: true,
optional: true,
errorMessage: 'Invalid callerName',
},
amd: {
isObject: true,
optional: true,
},
tag: {
isObject: true,
optional: true,
errorMessage: 'Invalid tag',
},
app_json: {
isString: true,
optional: true,
errorMessage: 'Invalid app_json',
},
account_sid: {
isString: true,
optional: true,
errorMessage: 'Invalid account_sid',
isLength: { options: { min: 36, max: 36 } },
},
timeout: {
isInt: true,
optional: true,
errorMessage: 'Invalid timeout',
},
timeLimit: {
isInt: true,
optional: true,
errorMessage: 'Invalid timeLimit',
},
call_hook: {
isObject: true,
optional: true,
errorMessage: 'Invalid call_hook',
},
call_status_hook: {
isObject: true,
optional: true,
errorMessage: 'Invalid call_status_hook',
},
speech_synthesis_vendor: {
isString: true,
optional: true,
errorMessage: 'Invalid speech_synthesis_vendor',
},
speech_synthesis_language: {
isString: true,
optional: true,
errorMessage: 'Invalid speech_synthesis_language',
},
speech_synthesis_voice: {
isString: true,
optional: true,
errorMessage: 'Invalid speech_synthesis_voice',
},
speech_recognizer_vendor: {
isString: true,
optional: true,
errorMessage: 'Invalid speech_recognizer_vendor',
},
speech_recognizer_language: {
isString: true,
optional: true,
errorMessage: 'Invalid speech_recognizer_language',
}
}, ['body']);
const customSanitizeFunction = (value) => {
try {
if (Array.isArray(value)) {
value = value.map((item) => customSanitizeFunction(item));
} else if (typeof value === 'object') {
Object.keys(value).forEach((key) => {
value[key] = customSanitizeFunction(value[key]);
});
} else if (typeof value === 'string') {
/* trims characters at the beginning and at the end of a string */
value = value.trim();
/* Verify strings including 'http' via new URL */
if (value.includes('http')) {
value = new URL(value).toString();
}
}
} catch (error) {
value = `Error: ${error.message}`;
}
return value;
};
module.exports = {
createCallSchema,
customSanitizeFunction
};

View File

@@ -12,7 +12,9 @@ const RootSpan = require('./utils/call-tracer');
const listTaskNames = require('./utils/summarize-tasks');
const {
JAMBONES_MYSQL_REFRESH_TTL,
JAMBONES_DISABLE_DIRECT_P2P_CALL
} = require('./config');
const { createJambonzApp } = require('./dynamic-apps');
module.exports = function(srf, logger) {
const {
@@ -20,17 +22,21 @@ module.exports = function(srf, logger) {
lookupAppByRegex,
lookupAppBySid,
lookupAppByRealm,
lookupAppByTeamsTenant
lookupAppByTeamsTenant,
registrar,
lookupClientByAccountAndUsername
} = srf.locals.dbHelpers;
const {
writeAlerts,
AlertType
} = srf.locals;
const {lookupAccountDetails} = dbUtils(logger, srf);
const {lookupAccountDetails, lookupGoogleCustomVoice} = dbUtils(logger, srf);
function initLocals(req, res, next) {
async function initLocals(req, res, next) {
const callId = req.get('Call-ID');
const uri = parseUri(req.uri);
logger.info({
uri,
callId,
callingNumber: req.callingNumber,
calledNumber: req.calledNumber
@@ -42,12 +48,52 @@ module.exports = function(srf, logger) {
const callSid = req.has('X-Retain-Call-Sid') ? req.get('X-Retain-Call-Sid') : uuidv4();
const account_sid = req.get('X-Account-Sid');
req.locals = {callSid, account_sid, callId};
if (req.has('X-Application-Sid')) {
let clientDb = null;
if (req.has('X-Authenticated-User')) {
req.locals.originatingUser = req.get('X-Authenticated-User');
let clientSettings;
const arr = /^(.*)@(.*)/.exec(req.locals.originatingUser);
if (arr) {
[clientSettings] = await lookupClientByAccountAndUsername(account_sid, arr[1]);
}
clientDb = await registrar.query(req.locals.originatingUser);
clientDb = {
...clientDb,
...clientSettings,
};
}
// check for call to application
if (uri.user?.startsWith('app-') && req.locals.originatingUser && clientDb?.allow_direct_app_calling) {
const application_sid = uri.user.match(/app-(.*)/)[1];
logger.debug(`got application from Request URI header: ${application_sid}`);
req.locals.application_sid = application_sid;
} else if (req.has('X-Application-Sid')) {
const application_sid = req.get('X-Application-Sid');
logger.debug(`got application from X-Application-Sid header: ${application_sid}`);
req.locals.application_sid = application_sid;
}
if (req.has('X-Authenticated-User')) req.locals.originatingUser = req.get('X-Authenticated-User');
// check for call to queue
if (uri.user?.startsWith('queue-') && req.locals.originatingUser && clientDb?.allow_direct_queue_calling) {
const queue_name = uri.user.match(/queue-(.*)/)[1];
logger.debug(`got Queue from Request URI header: ${queue_name}`);
req.locals.queue_name = queue_name;
}
// check for call to registered user
if (!JAMBONES_DISABLE_DIRECT_P2P_CALL && req.locals.originatingUser && clientDb?.allow_direct_user_calling) {
const arr = /^(.*)@(.*)/.exec(req.locals.originatingUser);
if (arr) {
const sipRealm = arr[2];
const called_user = `${req.calledNumber}@${sipRealm}`;
const reg = await registrar.query(called_user);
if (reg) {
logger.debug(`got called Number is a registered user: ${called_user}`);
req.locals.called_user = called_user;
}
}
}
if (req.has('X-MS-Teams-Tenant-FQDN')) req.locals.msTeamsTenant = req.get('X-MS-Teams-Tenant-FQDN');
if (req.has('X-Cisco-Recording-Participant')) {
const ciscoParticipants = req.get('X-Cisco-Recording-Participant');
@@ -120,7 +166,7 @@ module.exports = function(srf, logger) {
};
logger.info({callId, metadata, sdp}, 'successfully parsed SIPREC payload');
} catch (err) {
logger.info({callId}, 'Error parsing multipart payload');
logger.info({err, callId}, 'Error parsing multipart payload');
return res.send(503);
}
}
@@ -184,15 +230,24 @@ module.exports = function(srf, logger) {
const {span} = rootSpan.startChildSpan('lookupApplication');
try {
let app;
if (req.locals.application_sid) app = await lookupAppBySid(req.locals.application_sid);
else if (req.locals.originatingUser) {
if (req.locals.queue_name) {
logger.debug(`calling to queue ${req.locals.queue_name}, generating queue app`);
app = createJambonzApp('queue', {account_sid, name: req.locals.queue_name});
} else if (req.locals.called_user) {
logger.debug(`calling to registered user ${req.locals.called_user}, generating dial app`);
app = createJambonzApp('user',
{account_sid, name: req.locals.called_user, caller_id: req.locals.callingNumber});
} else if (req.locals.application_sid) {
app = await lookupAppBySid(req.locals.application_sid);
} else if (req.locals.originatingUser) {
const arr = /^(.*)@(.*)/.exec(req.locals.originatingUser);
if (arr) {
const sipRealm = arr[2];
logger.debug(`looking for device calling app for realm ${sipRealm}`);
app = await lookupAppByRealm(sipRealm);
if (app) logger.debug({app}, `retrieved device calling app for realm ${sipRealm}`);
if (app) {
logger.debug({app}, `retrieved device calling app for realm ${sipRealm}`);
}
}
}
else if (req.locals.msTeamsTenant) {
@@ -257,7 +312,22 @@ module.exports = function(srf, logger) {
app2.requestor = new HttpRequestor(logger, account_sid, app.call_hook, accountInfo.account.webhook_secret);
if (app.call_status_hook) app2.notifier = new HttpRequestor(logger, account_sid, app.call_status_hook,
accountInfo.account.webhook_secret);
else app2.notifier = {request: () => {}};
else app2.notifier = {request: () => {}, close: () => {}};
}
// Resolve application.speech_synthesis_voice if it's custom voice
if (app2.speech_synthesis_vendor === 'google' && app2.speech_synthesis_voice.startsWith('custom_')) {
const arr = /custom_(.*)/.exec(app2.speech_synthesis_voice);
if (arr) {
const google_custom_voice_sid = arr[1];
const [custom_voice] = await lookupGoogleCustomVoice(google_custom_voice_sid);
if (custom_voice) {
app2.speech_synthesis_voice = {
reportedUsage: custom_voice.reported_usage,
model: custom_voice.model
};
}
}
}
req.locals.application = app2;
@@ -299,22 +369,30 @@ module.exports = function(srf, logger) {
if (app.app_json) {
json = JSON.parse(app.app_json);
} else {
const defaults = {
synthesizer: {
vendor: app.speech_synthesis_vendor,
...(app.speech_synthesis_label && {label: app.speech_synthesis_label}),
language: app.speech_synthesis_language,
voice: app.speech_synthesis_voice,
...(app.fallback_speech_synthesis_vendor && {fallback_vendor: app.fallback_speech_synthesis_vendor}),
...(app.fallback_speech_synthesis_label && {fallback_label: app.fallback_speech_synthesis_label}),
...(app.fallback_speech_synthesis_language && {fallback_language: app.fallback_speech_synthesis_language}),
...(app.fallback_speech_synthesis_voice && {fallback_voice: app.fallback_speech_synthesis_voice})
},
recognizer: {
vendor: app.speech_recognizer_vendor,
...(app.speech_recognizer_label && {label: app.speech_recognizer_label}),
language: app.speech_recognizer_language,
...(app.fallback_speech_recognizer_vendor && {fallback_vendor: app.fallback_speech_recognizer_vendor}),
...(app.fallback_speech_recognizer_label && {fallback_label: app.fallback_speech_recognizer_label}),
...(app.fallback_speech_recognizer_language && {fallback_language: app.fallback_speech_recognizer_language})
}
};
const params = Object.assign(['POST', 'WS'].includes(app.call_hook.method) ? { sip: req.msg } : {},
req.locals.callInfo,
{ service_provider_sid: req.locals.service_provider_sid },
{
defaults: {
synthesizer: {
vendor: app.speech_synthesis_vendor,
language: app.speech_synthesis_language,
voice: app.speech_synthesis_voice
},
recognizer: {
vendor: app.speech_recognizer_vendor,
language: app.speech_recognizer_language
}
}
});
{ defaults });
logger.debug({ params }, 'sending initial webhook');
const obj = rootSpan.startChildSpan('performAppWebhook');
span = obj.span;
@@ -322,6 +400,7 @@ module.exports = function(srf, logger) {
const httpHeaders = b3 && { b3 };
json = await app.requestor.request('session:new', app.call_hook, params, httpHeaders);
}
app.tasks = normalizeJambones(logger, json).map((tdata) => makeTask(logger, tdata));
span?.setAttributes({
'http.statusCode': 200,

View File

@@ -1,4 +1,6 @@
const CallSession = require('./call-session');
const {CallStatus} = require('../utils/constants');
const moment = require('moment');
/**
* @classdesc Subclass of CallSession. Represents a CallSession
@@ -19,12 +21,14 @@ class AdultingCallSession extends CallSession {
rootSpan
});
this.sd = singleDialer;
this.req = callInfo.req;
this.sd.dlg.on('destroy', () => {
this.logger.info('AdultingCallSession: called party hung up');
this._callReleased();
});
this.sd.emit('adulting');
this.on('callStatusChange', this._notifyCallStatusChange.bind(this));
}
get dlg() {
@@ -49,6 +53,18 @@ class AdultingCallSession extends CallSession {
}
_callerHungup() {
if (this.dlg.connectTime) {
const duration = moment().diff(this.dlg.connectTime, 'seconds');
this.rootSpan.setAttributes({'call.termination': 'hangup by caller'});
this.callInfo.callTerminationBy = 'caller';
this.emit('callStatusChange', {
callStatus: CallStatus.Completed,
duration
});
}
this.logger.info('InboundCallSession: caller hung up');
this._callReleased();
this.req.removeAllListeners('cancel');
}
}

View File

@@ -19,8 +19,10 @@ const HttpRequestor = require('../utils/http-requestor');
const WsRequestor = require('../utils/ws-requestor');
const {
JAMBONES_INJECT_CONTENT,
AWS_REGION
JAMBONES_EAGERLY_PRE_CACHE_AUDIO,
AWS_REGION,
} = require('../config');
const BackgroundTaskManager = require('../utils/background-task-manager');
const BADPRECONDITIONS = 'preconditions not met';
const CALLER_CANCELLED_ERR_MSG = 'Response not sent due to unknown transaction';
@@ -63,6 +65,11 @@ class CallSession extends Emitter {
this.callGone = false;
this.notifiedComplete = false;
this.rootSpan = rootSpan;
this.backgroundTaskManager = new BackgroundTaskManager({
cs: this,
logger,
rootSpan
});
this._origRecognizerSettings = {
vendor: this.application?.speech_recognizer_vendor,
@@ -132,6 +139,10 @@ class CallSession extends Emitter {
return this.callInfo.callStatus;
}
get isBackGroundListen() {
return this.backgroundTaskManager.isTaskRunning('listen');
}
/**
* SIP call-id for the call
*/
@@ -162,6 +173,18 @@ class CallSession extends Emitter {
return this.application.notifier;
}
/**
* syntheizer
*/
get synthesizer() {
return this._synthesizer;
}
set synthesizer(synth) {
this._synthesizer = synth;
}
/**
* default vendor to use for speech synthesis if not provided in the app
*/
@@ -171,6 +194,30 @@ class CallSession extends Emitter {
set speechSynthesisVendor(vendor) {
this.application.speech_synthesis_vendor = vendor;
}
get fallbackSpeechSynthesisVendor() {
return this.application.fallback_speech_synthesis_vendor;
}
set fallbackSpeechSynthesisVendor(vendor) {
this.application.fallback_speech_synthesis_vendor = vendor;
}
/**
* default label to use for speech synthesis if not provided in the app
*/
get speechSynthesisLabel() {
return this.application.speech_synthesis_label;
}
set speechSynthesisLabel(label) {
this.application.speech_synthesis_label = label;
}
get fallbackSpeechSynthesisLabel() {
return this.application.fallback_speech_synthesis_label;
}
set fallbackSpeechSynthesisLabel(label) {
this.application.fallback_speech_synthesis_label = label;
}
/**
* default voice to use for speech synthesis if not provided in the app
*/
@@ -180,6 +227,13 @@ class CallSession extends Emitter {
set speechSynthesisVoice(voice) {
this.application.speech_synthesis_voice = voice;
}
get fallbackSpeechSynthesisVoice() {
return this.application.fallback_speech_synthesis_voice;
}
set fallbackSpeechSynthesisVoice(voice) {
this.application.fallback_speech_synthesis_voice = voice;
}
/**
* default language to use for speech synthesis if not provided in the app
*/
@@ -190,6 +244,13 @@ class CallSession extends Emitter {
this.application.speech_synthesis_language = language;
}
get fallbackSpeechSynthesisLanguage() {
return this.application.fallback_speech_synthesis_language;
}
set fallbackSpeechSynthesisLanguage(language) {
this.application.fallback_speech_synthesis_language = language;
}
/**
* default vendor to use for speech recognition if not provided in the app
*/
@@ -199,6 +260,39 @@ class CallSession extends Emitter {
set speechRecognizerVendor(vendor) {
this.application.speech_recognizer_vendor = vendor;
}
get fallbackSpeechRecognizerVendor() {
return this.application.fallback_speech_recognizer_vendor;
}
set fallbackSpeechRecognizerVendor(vendor) {
this.application.fallback_speech_recognizer_vendor = vendor;
}
/**
* recognizer
*/
get recognizer() {
return this._recognizer;
}
set recognizer(rec) {
this._recognizer = rec;
}
/**
* default vendor to use for speech recognition if not provided in the app
*/
get speechRecognizerLabel() {
return this.application.speech_recognizer_label;
}
set speechRecognizerLabel(label) {
this.application.speech_recognizer_label = label;
}
get fallbackSpeechRecognizerLabel() {
return this.application.fallback_speech_recognizer_label;
}
set fallbackSpeechRecognizerLabel(label) {
this.application.fallback_speech_recognizer_label = label;
}
/**
* default language to use for speech recognition if not provided in the app
*/
@@ -209,6 +303,13 @@ class CallSession extends Emitter {
this.application.speech_recognizer_language = language;
}
get fallbackSpeechRecognizerLanguage() {
return this.application.fallback_speech_recognizer_language;
}
set fallbackSpeechRecognizerLanguage(language) {
this.application.fallback_speech_recognizer_language = language;
}
/**
* indicates whether the call currently in progress
*/
@@ -281,11 +382,11 @@ class CallSession extends Emitter {
}
get isBotModeEnabled() {
return this.backgroundGatherTask;
return this.backgroundTaskManager.isTaskRunning('bargeIn');
}
get isListenEnabled() {
return this.backgroundListenTask;
return this.backgroundTaskManager.isTaskRunning('listen');
}
get b3() {
@@ -330,6 +431,22 @@ class CallSession extends Emitter {
return this._globalSttPunctuation;
}
get onHoldMusic() {
return this._onHoldMusic;
}
set onHoldMusic(url) {
this._onHoldMusic = url;
}
get sipRequestWithinDialogHook() {
return this._sipRequestWithinDialogHook;
}
set sipRequestWithinDialogHook(url) {
this._sipRequestWithinDialogHook = url;
}
hasGlobalSttPunctuation() {
return this._globalSttPunctuation !== undefined;
}
@@ -415,7 +532,10 @@ class CallSession extends Emitter {
'X-Call-Sid': this.callSid,
'X-Account-Sid': this.accountSid,
'X-Application-Sid': this.applicationSid,
}
...(this.recordOptions.headers && {'Content-Type': 'application/json'})
},
// Siprect Client is initiated from startCallRecording, so just need to pass custom headers in startRecording
...(this.recordOptions.headers && {body: JSON.stringify(this.recordOptions.headers) + '\n'})
});
if (res.status === 200) {
this._recordState = RecordState.RecordingOn;
@@ -436,7 +556,7 @@ class CallSession extends Emitter {
const res = await this.dlg.request({
method: 'INFO',
headers: {
'X-Reason': 'stopCallRecording',
'X-Reason': 'stopCallRecording'
}
});
if (res.status === 200) {
@@ -458,7 +578,7 @@ class CallSession extends Emitter {
const res = await this.dlg.request({
method: 'INFO',
headers: {
'X-Reason': 'pauseCallRecording',
'X-Reason': 'pauseCallRecording'
}
});
if (res.status === 200) {
@@ -480,7 +600,7 @@ class CallSession extends Emitter {
const res = await this.dlg.request({
method: 'INFO',
headers: {
'X-Reason': 'resumeCallRecording',
'X-Reason': 'resumeCallRecording'
}
});
if (res.status === 200) {
@@ -495,114 +615,41 @@ class CallSession extends Emitter {
}
}
async startBackgroundListen(opts) {
if (this.isListenEnabled) {
this.logger.info('CallSession:startBackgroundListen - listen is already enabled, ignoring request');
return;
}
try {
this.logger.debug({opts}, 'CallSession:startBackgroundListen');
const t = normalizeJambones(this.logger, [opts]);
this.backgroundListenTask = makeTask(this.logger, t[0]);
const resources = await this._evaluatePreconditions(this.backgroundListenTask);
const {span, ctx} = this.rootSpan.startChildSpan(`background-gather:${this.backgroundListenTask.summary}`);
this.backgroundListenTask.span = span;
this.backgroundListenTask.ctx = ctx;
this.backgroundListenTask.exec(this, resources)
.then(() => {
this.logger.info('CallSession:startBackgroundListen: listen completed');
this.backgroundListenTask && this.backgroundListenTask.removeAllListeners();
this.backgroundListenTask && this.backgroundListenTask.span.end();
this.backgroundListenTask = null;
return;
})
.catch((err) => {
this.logger.info({err}, 'CallSession:startBackgroundListen: listen threw error');
this.backgroundListenTask && this.backgroundListenTask.removeAllListeners();
this.backgroundListenTask && this.backgroundListenTask.span.end();
this.backgroundListenTask = null;
});
} catch (err) {
this.logger.info({err, opts}, 'CallSession:startBackgroundListen - Error creating listen task');
}
}
async stopBackgroundListen() {
try {
if (this.backgroundListenTask) {
this.backgroundListenTask.removeAllListeners();
this.backgroundListenTask.kill().catch(() => {});
}
} catch (err) {
this.logger.info({err}, 'CallSession:stopBackgroundListen - Error stopping listen task');
}
this.backgroundListenTask = null;
}
async enableBotMode(gather, autoEnable) {
try {
const t = normalizeJambones(this.logger, [gather]);
const task = makeTask(this.logger, t[0]);
let task;
if (this.isBotModeEnabled) {
const currInput = this.backgroundGatherTask.input;
const newInput = task.input;
task = this.backgroundTaskManager.getTask('bargeIn');
const currInput = task.input;
const t = normalizeJambones(this.logger, [gather]);
const tmpTask = makeTask(this.logger, t[0]);
const newInput = tmpTask.input;
if (JSON.stringify(currInput) === JSON.stringify(newInput)) {
this.logger.info('CallSession:enableBotMode - bot mode currently enabled, ignoring request to start again');
return;
}
else {
} else {
this.logger.info({currInput, newInput},
'CallSession:enableBotMode - restarting background gather to apply new input type');
this.backgroundGatherTask.sticky = false;
'CallSession:enableBotMode - restarting background bargeIn to apply new input type');
task.sticky = false;
await this.disableBotMode();
}
}
this.backgroundGatherTask = task;
this._bargeInEnabled = true;
this.backgroundGatherTask
.once('dtmf', this._clearTasks.bind(this, this.backgroundGatherTask))
.once('vad', this._clearTasks.bind(this, this.backgroundGatherTask))
.once('transcription', this._clearTasks.bind(this, this.backgroundGatherTask))
.once('timeout', this._clearTasks.bind(this, this.backgroundGatherTask));
this.logger.info({gather}, 'CallSession:enableBotMode - starting background gather');
const resources = await this._evaluatePreconditions(this.backgroundGatherTask);
const {span, ctx} = this.rootSpan.startChildSpan(`background-gather:${this.backgroundGatherTask.summary}`);
this.backgroundGatherTask.span = span;
this.backgroundGatherTask.ctx = ctx;
this.backgroundGatherTask.sticky = autoEnable;
this.backgroundGatherTask.exec(this, resources)
.then(() => {
this.logger.info('CallSession:enableBotMode: gather completed');
this.backgroundGatherTask && this.backgroundGatherTask.removeAllListeners();
this.backgroundGatherTask && this.backgroundGatherTask.span.end();
const sticky = this.backgroundGatherTask?.sticky;
this.backgroundGatherTask = null;
if (sticky && !this.callGone && !this._stopping && this._bargeInEnabled) {
this.logger.info('CallSession:enableBotMode: restarting background gather');
setImmediate(() => this.enableBotMode(gather, true));
}
return;
})
.catch((err) => {
this.logger.info({err}, 'CallSession:enableBotMode: gather threw error');
this.backgroundGatherTask && this.backgroundGatherTask.removeAllListeners();
this.backgroundGatherTask && this.backgroundGatherTask.span.end();
this.backgroundGatherTask = null;
});
task = await this.backgroundTaskManager.newTask('bargeIn', gather);
task.sticky = autoEnable;
task.once('bargeIn-done', () => {
if (this.requestor instanceof WsRequestor) {
try {
this.kill(true);
} catch (err) {}
}
});
this.logger.info({gather}, 'CallSession:enableBotMode - starting background bargeIn');
} catch (err) {
this.logger.info({err, gather}, 'CallSession:enableBotMode - Error creating gather task');
this.logger.info({err, gather}, 'CallSession:enableBotMode - Error creating bargeIn task');
}
}
async disableBotMode() {
this._bargeInEnabled = false;
if (this.backgroundGatherTask) {
try {
this.backgroundGatherTask.removeAllListeners();
await this.backgroundGatherTask.kill();
} catch (err) {}
this.backgroundGatherTask = null;
}
this.backgroundTaskManager.stop('bargeIn');
}
setConferenceDetails(memberId, confName, confUuid) {
@@ -626,15 +673,28 @@ class CallSession extends Emitter {
* Check for speech credentials for the specified vendor
* @param {*} vendor - google or aws
*/
getSpeechCredentials(vendor, type) {
getSpeechCredentials(vendor, type, label = null) {
const {writeAlerts, AlertType} = this.srf.locals;
if (this.accountInfo.speech && this.accountInfo.speech.length > 0) {
const credential = this.accountInfo.speech.find((s) => s.vendor === vendor);
// firstly check if account level has expected credential
let credential = this.accountInfo.speech.find((s) => s.vendor === vendor &&
s.label === label && s.account_sid);
if (!credential) {
// check if SP level has expected credential
credential = this.accountInfo.speech.find((s) => s.vendor === vendor &&
s.label === label && !s.account_sid);
}
if (credential && (
(type === 'tts' && credential.use_for_tts) ||
(type === 'stt' && credential.use_for_stt)
)) {
this.logger.info(
`Speech vendor: ${credential.vendor} ${credential.label ? `, label: ${credential.label}` : ''} selected`);
if ('google' === vendor) {
if (type === 'tts' && !credential.tts_tested_ok ||
type === 'stt' && !credential.stt_tested_ok) {
return;
}
try {
const cred = JSON.parse(credential.service_key.replace(/\n/g, '\\n'));
return {
@@ -666,8 +726,10 @@ class CallSession extends Emitter {
region: credential.region,
use_custom_stt: credential.use_custom_stt,
custom_stt_endpoint: credential.custom_stt_endpoint,
custom_stt_endpoint_url: credential.custom_stt_endpoint_url,
use_custom_tts: credential.use_custom_tts,
custom_tts_endpoint: credential.custom_tts_endpoint
custom_tts_endpoint: credential.custom_tts_endpoint,
custom_tts_endpoint_url: credential.custom_tts_endpoint_url
};
}
else if ('wellsaid' === vendor) {
@@ -712,7 +774,28 @@ class CallSession extends Emitter {
riva_server_uri: credential.riva_server_uri
};
}
else if (vendor.startsWith('custom:')) {
else if ('cobalt' === vendor) {
return {
speech_credential_sid: credential.speech_credential_sid,
cobalt_server_uri: credential.cobalt_server_uri
};
} else if ('elevenlabs' === vendor) {
return {
api_key: credential.api_key,
model_id: credential.model_id,
options: credential.options
};
} else if ('assemblyai' === vendor) {
return {
speech_credential_sid: credential.speech_credential_sid,
api_key: credential.api_key
};
} else if ('whisper' === vendor) {
return {
api_key: credential.api_key,
model_id: credential.model_id
};
} else if (vendor.startsWith('custom:')) {
return {
speech_credential_sid: credential.speech_credential_sid,
auth_token: credential.auth_token,
@@ -750,7 +833,7 @@ class CallSession extends Emitter {
let skip = false;
this.currentTask = task;
if (TaskName.Gather === task.name && this.isBotModeEnabled) {
if (this.backgroundGatherTask.updateTaskInProgress(task) !== false) {
if (this.backgroundTaskManager.getTask('bargeIn').updateTaskInProgress(task) !== false) {
this.logger.info(`CallSession:exec skipping #${stackNum}:${taskNum}: ${task.name}`);
skip = true;
}
@@ -782,23 +865,16 @@ class CallSession extends Emitter {
}
}
if (0 === this.tasks.length && this.requestor instanceof WsRequestor && !this.callGone) {
//let span;
if (0 === this.tasks.length &&
this.requestor instanceof WsRequestor &&
!this.requestor.closedGracefully &&
!this.callGone &&
!this.isConfirmCallSession
) {
try {
//const {span} = this.rootSpan.startChildSpan('waiting for commands');
//const {reason, queue, command} = await this._awaitCommandsOrHangup();
/*
span.setAttributes({
'completion.reason': reason,
'async.request.queue': queue,
'async.request.command': command
});
span.end();
*/
await this._awaitCommandsOrHangup();
if (this.callGone) break;
} catch (err) {
//span.end();
this.logger.info(err, 'CallSession:exec - error waiting for new commands');
break;
}
@@ -978,6 +1054,39 @@ class CallSession extends Emitter {
listenTask.updateListen(opts.listen_status);
}
/**
* perform live call control -- change Transcribe status
* @param {object} opts
* @param {string} opts.transcribe_status - 'pause' or 'resume'
*/
async _lccTranscribeStatus(opts) {
const task = this.currentTask;
if (!task || ![TaskName.Dial, TaskName.Transcribe].includes(task.name)) {
return this.logger.info(`CallSession:_lccTranscribeStatus - invalid transcribe_status in task ${task.name}`);
}
const transcribeTask = task.name === TaskName.Transcribe ? task : task.transcribeTask;
if (!transcribeTask) {
return this.logger
.info('CallSession:_lccTranscribeStatus - invalid transcribe_status: Dial does not have a Transcribe');
}
transcribeTask.updateTranscribe(opts.transcribe_status);
}
/**
* perform live call control -- update customer data
* @param {object} opts
* @param {object} opts.tag - customer data
*/
_lccTag(opts) {
const {tag} = opts;
if (typeof tag !== 'object' || Array.isArray(tag) || tag === null) {
this.logger.info('CallSession:_lccTag - invalid tag data');
return;
}
this.logger.debug({customerData: tag}, 'CallSession:_lccTag set customer data in callInfo');
this.callInfo.customerData = tag;
}
async _lccMuteStatus(callSid, mute) {
// this whole thing requires us to be in a Dial or Conference verb
const task = this.currentTask;
@@ -1028,6 +1137,38 @@ class CallSession extends Emitter {
}
}
/**
* perform live call control - send RFC 2833 DTMF
* @param {obj} opts
* @param {string} opts.dtmf.digit - DTMF digit
* @param {string} opts.dtmf.duration - Optional, Duration
*/
async _lccDtmf(opts, callSid) {
const {dtmf} = opts;
const {digit, duration = 250} = dtmf;
if (!this.hasStableDialog) {
this.logger.info('CallSession:_lccDtmf - invalid command as we do not have a stable call');
return;
}
try {
const dlg = callSid === this.callSid ? this.dlg : this.currentTask.dlg;
const res = await dlg.request({
method: 'INFO',
headers: {
'Content-Type': 'application/dtmf',
'X-Reason': 'Dtmf'
},
body: `Signal=${digit}
Duration=${duration} `
});
this.logger.debug({res}, `CallSession:_lccDtmf
got response to INFO DTMF digit=${digit} and duration=${duration}`);
return res;
} catch (err) {
this.logger.error({err}, 'CallSession:_lccDtmf - error sending INFO RFC 2833 DTMF');
}
}
/**
* perform live call control -- whisper to one party or the other on a call
* @param {array} opts - array of play or say tasks
@@ -1082,7 +1223,7 @@ class CallSession extends Emitter {
/**
* perform live call control
* @param {object} opts - update instructions
* @param {string} callSid - identifies call toupdate
* @param {string} callSid - identifies call to update
*/
async updateCall(opts, callSid) {
this.logger.debug(opts, 'CallSession:updateCall');
@@ -1096,6 +1237,9 @@ class CallSession extends Emitter {
if (opts.listen_status) {
await this._lccListenStatus(opts);
}
if (opts.transcribe_status) {
await this._lccTranscribeStatus(opts);
}
else if (opts.mute_status) {
await this._lccMuteStatus(callSid, opts.mute_status === 'mute');
}
@@ -1108,13 +1252,18 @@ class CallSession extends Emitter {
else if (opts.sip_request) {
const res = await this._lccSipRequest(opts, callSid);
return {status: res.status, reason: res.reason};
} else if (opts.dtmf) {
await this._lccDtmf(opts, callSid);
}
else if (opts.record) {
await this.notifyRecordOptions(opts.record);
}
else if (opts.tag) {
return this._lccTag(opts);
}
// whisper may be the only thing we are asked to do, or it may that
// we are doing a whisper after having muted, paused reccording etc..
// we are doing a whisper after having muted, paused recording etc..
if (opts.whisper) {
return this._lccWhisper(opts, callSid);
}
@@ -1172,7 +1321,6 @@ class CallSession extends Emitter {
get the full transcription.
*/
delete t.bargeIn.enable;
this._bargeInEnabled = false;
this.logger.info('CallSession:kill - found bargein disabled in the stack, clearing to that point');
break;
}
@@ -1183,6 +1331,35 @@ class CallSession extends Emitter {
this.taskIdx = 0;
}
_preCacheAudio(newTasks) {
for (const task of newTasks) {
if (task.name === TaskName.Config && task.hasSynthesizer) {
/* if they change synthesizer settings don't try to precache */
break;
}
if (task.name === TaskName.Say) {
/* identify vendor language, voice, and label */
const vendor = task.synthesizer.vendor && task.synthesizer.vendor !== 'default' ?
task.synthesizer.vendor :
this.speechSynthesisVendor;
const language = task.synthesizer.language && task.synthesizer.language !== 'default' ?
task.synthesizer.language :
this.speechSynthesisLanguage ;
const voice = task.synthesizer.voice && task.synthesizer.voice !== 'default' ?
task.synthesizer.voice :
this.speechSynthesisVoice;
const label = task.synthesizer.label && task.synthesizer.label !== 'default' ?
task.synthesizer.label :
this.speechSynthesisLabel;
this.logger.info({vendor, language, voice, label},
'CallSession:_preCacheAudio - precaching audio for future prompt');
task._synthesizeWithSpecificVendor(this, this.ep, {vendor, language, voice, label, preCache: true})
.catch((err) => this.logger.error(err, 'CallSession:_preCacheAudio - error precaching audio'));
}
}
}
/**
* Append tasks to the current execution stack UNLESS there is a gather in the stack.
* in that case, insert the tasks before the gather AND if the tasks include
@@ -1240,10 +1417,12 @@ class CallSession extends Emitter {
this.replaceApplication(t);
}
else if (JAMBONES_INJECT_CONTENT) {
if (JAMBONES_EAGERLY_PRE_CACHE_AUDIO) this._preCacheAudio(t);
this._injectTasks(t);
this.logger.info({tasks: listTaskNames(this.tasks)}, 'CallSession:_onCommand - updated task list');
}
else {
if (JAMBONES_EAGERLY_PRE_CACHE_AUDIO) this._preCacheAudio(t);
this.tasks.push(...t);
this.logger.info({tasks: listTaskNames(this.tasks)}, 'CallSession:_onCommand - updated task list');
}
@@ -1272,6 +1451,10 @@ class CallSession extends Emitter {
this._lccListenStatus(data);
break;
case 'transcribe:status':
this._lccTranscribeStatus(data);
break;
case 'whisper':
this._lccWhisper(data, call_sid);
break;
@@ -1283,6 +1466,13 @@ class CallSession extends Emitter {
});
break;
case 'dtmf':
this._lccDtmf(data, call_sid)
.catch((err) => {
this.logger.info({err, data}, `CallSession:_onCommand - error sending RFC 2833 DTMF ${data}`);
});
break;
default:
this.logger.info(`CallSession:_onCommand - invalid command ${command}`);
}
@@ -1341,7 +1531,10 @@ class CallSession extends Emitter {
}
// we are going from an early media connection to answer
await this.propagateAnswer();
if (this.direction === CallDirection.Inbound) {
// only do this for inbound call.
await this.propagateAnswer();
}
return {
...resources,
...(this.isSipRecCallSession && {ep2: this.ep2})
@@ -1361,6 +1554,8 @@ class CallSession extends Emitter {
this.ep = ep;
this.logger.debug(`allocated endpoint ${ep.uuid}`);
this._configMsEndpoint();
this.ep.on('destroy', () => {
this.logger.debug(`endpoint was destroyed!! ${this.ep.uuid}`);
});
@@ -1431,6 +1626,7 @@ class CallSession extends Emitter {
return;
}
this.ep = await this.ms.createEndpoint({remoteSdp: this.dlg.remote.sdp});
this._configMsEndpoint();
await this.dlg.modify(this.ep.local.sdp);
this.logger.debug('CallSession:replaceEndpoint completed');
@@ -1461,6 +1657,8 @@ class CallSession extends Emitter {
this.notifier && this.notifier.close();
this.rootSpan && this.rootSpan.end();
// close all background tasks
this.backgroundTaskManager.stopAll();
}
/**
@@ -1512,11 +1710,32 @@ class CallSession extends Emitter {
}
this.dlg.on('modify', this._onReinvite.bind(this));
this.dlg.on('refer', this._onRefer.bind(this));
if (this.sipRequestWithinDialogHook) {
this.dlg.on('info', this._onRequestWithinDialog.bind(this));
this.dlg.on('message', this._onRequestWithinDialog.bind(this));
}
this.logger.debug(`CallSession:propagateAnswer - answered callSid ${this.callSid}`);
}
}
async _onRequestWithinDialog(req, res) {
if (!this.sipRequestWithinDialogHook) {
return;
}
const sip_method = req.method;
if (sip_method === 'INFO') {
res.send(200);
} else if (sip_method === 'MESSAGE') {
res.send(202);
} else {
this.logger.info(`CallSession:_onRequestWithinDialog unsported method: ${req.method}`);
res.send(501);
return;
}
const params = {sip_method, sip_body: req.body, sip_headers: req.headers};
this.currentTask.performHook(this, this.sipRequestWithinDialogHook, params);
}
async _onReinvite(req, res) {
try {
if (this.ep) {
@@ -1525,9 +1744,14 @@ class CallSession extends Emitter {
res.send(200, {body: this.ep.local.sdp});
}
else {
const newSdp = await this.ep.modify(req.body);
res.send(200, {body: newSdp});
this.logger.info({offer: req.body, answer: newSdp}, 'handling reINVITE');
if (this.currentTask.name === TaskName.Dial && this.currentTask.isOnHold) {
this.logger.info('onholdMusic reINVITE after media has been released');
await this.currentTask.handleReinviteAfterMediaReleased(req, res);
} else {
const newSdp = await this.ep.modify(req.body);
res.send(200, {body: newSdp});
this.logger.info({offer: req.body, answer: newSdp}, 'handling reINVITE');
}
}
}
else if (this.currentTask && this.currentTask.name === TaskName.Dial) {
@@ -1574,6 +1798,7 @@ class CallSession extends Emitter {
}
if (!this.ep) {
this.ep = await this.ms.createEndpoint({remoteSdp: this.req.body});
this._configMsEndpoint();
}
return {ms: this.ms, ep: this.ep};
}
@@ -1587,7 +1812,7 @@ class CallSession extends Emitter {
const pp = this._pool.promise();
try {
this.logger.info({accountSid: this.accountSid}, 'performQueueWebhook: looking up account');
const [r] = await pp.query(sqlRetrieveQueueEventHook, this.accountSid);
const [r] = await pp.query(sqlRetrieveQueueEventHook, [this.accountSid]);
if (0 === r.length) {
this.logger.info({accountSid: this.accountSid}, 'performQueueWebhook: no webhook provisioned');
this.queueEventHookRequestor = null;
@@ -1690,7 +1915,7 @@ class CallSession extends Emitter {
wrapDialog(dlg) {
dlg.connectTime = moment();
const origDestroy = dlg.destroy.bind(dlg);
dlg.destroy = () => {
dlg.destroy = (opts) => {
if (dlg.connected) {
dlg.connected = false;
dlg.destroy = origDestroy;
@@ -1699,7 +1924,7 @@ class CallSession extends Emitter {
this.emit('callStatusChange', {callStatus: CallStatus.Completed, duration});
this.logger.debug('CallSession: call terminated by jambonz');
this.rootSpan.setAttributes({'call.termination': 'hangup by jambonz'});
origDestroy().catch((err) => this.logger.info({err}, 'CallSession - error destroying dialog'));
origDestroy(opts).catch((err) => this.logger.info({err}, 'CallSession - error destroying dialog'));
if (this.wakeupResolver) {
this.wakeupResolver({reason: 'session ended'});
this.wakeupResolver = null;
@@ -1728,6 +1953,7 @@ class CallSession extends Emitter {
'X-Reason': 'anchor-media'
}
});
this._configMsEndpoint();
}
async handleReinviteAfterMediaReleased(req, res) {
@@ -1749,6 +1975,16 @@ class CallSession extends Emitter {
async _notifyCallStatusChange({callStatus, sipStatus, sipReason, duration}) {
if (this.callMoved) return;
// manage record all call.
if (callStatus === CallStatus.InProgress) {
if (this.accountInfo.account.record_all_calls ||
this.application.record_all_calls) {
this.backgroundTaskManager.newTask('record');
}
} else if (callStatus == CallStatus.Completed) {
this.backgroundTaskManager.stop('record');
}
/* race condition: we hang up at the same time as the caller */
if (callStatus === CallStatus.Completed) {
if (this.notifiedComplete) return;
@@ -1761,6 +1997,15 @@ class CallSession extends Emitter {
this.callInfo.updateCallStatus(callStatus, sipStatus, sipReason);
if (typeof duration === 'number') this.callInfo.duration = duration;
this.executeStatusCallback(callStatus, sipStatus);
// update calls db
//this.logger.debug(`updating redis with ${JSON.stringify(this.callInfo)}`);
this.updateCallStatus(Object.assign({}, this.callInfo.toJSON()), this.serviceUrl)
.catch((err) => this.logger.error(err, 'redis error'));
}
async executeStatusCallback(callStatus, sipStatus) {
const {span} = this.rootSpan.startChildSpan(`call-status:${this.callInfo.callStatus}`);
span.setAttributes(this.callInfo.toJSON());
try {
@@ -1772,11 +2017,12 @@ class CallSession extends Emitter {
span.end();
this.logger.info(err, `CallSession:_notifyCallStatusChange error sending ${callStatus} ${sipStatus}`);
}
}
// update calls db
//this.logger.debug(`updating redis with ${JSON.stringify(this.callInfo)}`);
this.updateCallStatus(Object.assign({}, this.callInfo.toJSON()), this.serviceUrl)
.catch((err) => this.logger.error(err, 'redis error'));
_configMsEndpoint() {
if (this.onHoldMusic) {
this.ep.set({hold_music: `shout://${this.onHoldMusic.replace(/^https?:\/\//, '')}`});
}
}
/**
@@ -1815,6 +2061,18 @@ class CallSession extends Emitter {
} catch (err) {}
}
}
/**
* startBackgroundTask - Start background task
*/
async startBackgroundTask(type, opts) {
await this.backgroundTaskManager.newTask(type, opts);
}
stopBackgroundTask(type) {
this.backgroundTaskManager.stop(type);
}
}
module.exports = CallSession;

View File

@@ -8,7 +8,7 @@ const moment = require('moment');
* @extends CallSession
*/
class RestCallSession extends CallSession {
constructor({logger, application, srf, req, ep, tasks, callInfo, accountInfo, rootSpan}) {
constructor({logger, application, srf, req, ep, ep2, tasks, callInfo, accountInfo, rootSpan}) {
super({
logger,
application,
@@ -21,6 +21,11 @@ class RestCallSession extends CallSession {
});
this.req = req;
this.ep = ep;
this.ep2 = ep2;
// keep restDialTask reference for closing AMD
if (tasks.length) {
this.restDialTask = tasks[0];
}
this.on('callStatusChange', this._notifyCallStatusChange.bind(this));
this._notifyCallStatusChange({
@@ -44,6 +49,9 @@ class RestCallSession extends CallSession {
* This is invoked when the called party hangs up, in order to calculate the call duration.
*/
_callerHungup() {
if (this.restDialTask) {
this.restDialTask.turnOffAmd();
}
this.callInfo.callTerminationBy = 'caller';
const duration = moment().diff(this.dlg.connectTime, 'seconds');
this.emit('callStatusChange', {callStatus: CallStatus.Completed, duration});

View File

@@ -48,7 +48,7 @@ class Conference extends Task {
this.confName = this.data.name;
[
'beep', 'startConferenceOnEnter', 'endConferenceOnExit', 'joinMuted',
'maxParticipants', 'waitHook', 'statusHook', 'endHook', 'enterHook'
'maxParticipants', 'waitHook', 'statusHook', 'endHook', 'enterHook', 'endConferenceDuration'
].forEach((attr) => this[attr] = this.data[attr]);
this.record = this.data.record || {};
this.statusEvents = [];
@@ -114,7 +114,12 @@ class Conference extends Task {
}
this.emitter.emit('kill');
await this._doFinalMemberCheck(cs);
if (this.ep && this.ep.connected) this.ep.conn.removeAllListeners('esl::event::CUSTOM::*') ;
if (this.ep && this.ep.connected) {
this.ep.conn.removeAllListeners('esl::event::CUSTOM::*');
this.ep.api(`conference ${this.confName} kick ${this.memberId}`)
.catch((err) => this.logger.info({err}, 'Error kicking participant'));
}
cs.clearConferenceDetails();
this.notifyTaskDone();
}
@@ -339,9 +344,13 @@ class Conference extends Task {
}
const opts = {};
if (this.endConferenceOnExit) Object.assign(opts, {flags: {endconf: true}});
if (this.startConferenceOnEnter) Object.assign(opts, {flags: {moderator: true}});
if (this.joinMuted) Object.assign(opts, {flags: {mute: true}});
if (this.endConferenceOnExit || this.startConferenceOnEnter || this.joinMuted) {
Object.assign(opts, {flags: {
...(this.endConferenceOnExit && {endconf: true}),
...(this.startConferenceOnEnter && {moderator: true}),
...(this.joinMuted && {joinMuted: true}),
}});
}
try {
const {memberId, confUuid} = await this.ep.join(this.confName, opts);
@@ -384,6 +393,11 @@ class Conference extends Task {
this.ep.api('conference', `${this.confName} set max_members ${this.maxParticipants}`)
.catch((err) => this.logger.error(err, `Error setting max participants to ${this.maxParticipants}`));
}
if (typeof this.endConferenceDuration === 'number' && this.endConferenceDuration >= 0) {
this.ep.api('conference', `${this.confName} set endconference_grace_time ${this.endConferenceDuration}`)
.catch((err) => this.logger.error(err, `Error setting end conference time to ${this.endConferenceDuration}`));
}
}
/**

View File

@@ -9,7 +9,8 @@ class TaskConfig extends Task {
'recognizer',
'bargeIn',
'record',
'listen'
'listen',
'transcribe'
].forEach((k) => this[k] = this.data[k] || {});
if ('notifyEvents' in this.data) {
@@ -30,6 +31,13 @@ class TaskConfig extends Task {
if (this.bargeIn[k]) this.gatherOpts[k] = this.bargeIn[k];
});
}
if (this.transcribe?.enable) {
this.transcribeOpts = {
verb: 'transcribe',
...this.transcribe
};
delete this.transcribeOpts.enable;
}
if (this.data.reset) {
if (typeof this.data.reset === 'string') this.data.reset = [this.data.reset];
@@ -37,9 +45,15 @@ class TaskConfig extends Task {
else this.data.reset = [];
if (this.bargeIn.sticky) this.autoEnable = true;
this.preconditions = (this.bargeIn.enable || this.record?.action || this.listen?.url || this.data.amd) ?
this.preconditions = (this.bargeIn.enable ||
this.record?.action ||
this.listen?.url ||
this.data.amd ||
this.transcribe?.enable) ?
TaskPreconditions.Endpoint :
TaskPreconditions.None;
this.onHoldMusic = this.data.onHoldMusic;
}
get name() { return TaskName.Config; }
@@ -48,6 +62,7 @@ class TaskConfig extends Task {
get hasRecognizer() { return Object.keys(this.recognizer).length; }
get hasRecording() { return Object.keys(this.record).length; }
get hasListen() { return Object.keys(this.listen).length; }
get hasTranscribe() { return Object.keys(this.transcribe).length; }
get summary() {
const phrase = [];
@@ -70,8 +85,12 @@ class TaskConfig extends Task {
if (this.hasListen) {
phrase.push(this.listen.enable ? `listen ${this.listen.url}` : 'stop listen');
}
if (this.hasTranscribe) {
phrase.push(this.transcribe.enable ? `transcribe ${this.transcribe.transcriptionHook}` : 'stop transcribe');
}
if (this.data.amd) phrase.push('enable amd');
if (this.notifyEvents) phrase.push(`event notification ${this.notifyEvents ? 'on' : 'off'}`);
if (this.onHoldMusic) phrase.push(`onHoldMusic: ${this.onHoldMusic}`);
return `${this.name}{${phrase.join(',')}}`;
}
@@ -83,6 +102,10 @@ class TaskConfig extends Task {
cs.notifyEvents = !!this.data.notifyEvents;
}
if (this.onHoldMusic) {
cs.onHoldMusic = this.onHoldMusic;
}
if (this.data.amd) {
this.startAmd = cs.startAmd;
this.stopAmd = cs.stopAmd;
@@ -102,24 +125,58 @@ class TaskConfig extends Task {
});
if (this.hasSynthesizer) {
cs.synthesizer = this.synthesizer;
cs.speechSynthesisVendor = this.synthesizer.vendor !== 'default'
? this.synthesizer.vendor
: cs.speechSynthesisVendor;
cs.speechSynthesisLabel = this.synthesizer.label !== 'default'
? this.synthesizer.label
: cs.speechSynthesisLabel;
cs.speechSynthesisLanguage = this.synthesizer.language !== 'default'
? this.synthesizer.language
: cs.speechSynthesisLanguage;
cs.speechSynthesisVoice = this.synthesizer.voice !== 'default'
? this.synthesizer.voice
: cs.speechSynthesisVoice;
// fallback vendor
cs.fallbackSpeechSynthesisVendor = this.synthesizer.fallbackVendor !== 'default'
? this.synthesizer.fallbackVendor
: cs.fallbackSpeechSynthesisVendor;
cs.fallbackSpeechSynthesisLabel = this.synthesizer.fallbackLabel !== 'default'
? this.synthesizer.fallbackLabel
: cs.fallbackSpeechSynthesisLabel;
cs.fallbackSpeechSynthesisLanguage = this.synthesizer.fallbackLanguage !== 'default'
? this.synthesizer.fallbackLanguage
: cs.fallbackSpeechSynthesisLanguage;
cs.fallbackSpeechSynthesisVoice = this.synthesizer.fallbackVoice !== 'default'
? this.synthesizer.fallbackVoice
: cs.fallbackSpeechSynthesisVoice;
this.logger.info({synthesizer: this.synthesizer}, 'Config: updated synthesizer');
}
if (this.hasRecognizer) {
cs.recognizer = this.recognizer;
cs.speechRecognizerVendor = this.recognizer.vendor !== 'default'
? this.recognizer.vendor
: cs.speechRecognizerVendor;
cs.speechRecognizerLabel = this.recognizer.label !== 'default'
? this.recognizer.label
: cs.speechRecognizerLabel;
cs.speechRecognizerLanguage = this.recognizer.language !== 'default'
? this.recognizer.language
: cs.speechRecognizerLanguage;
//fallback
cs.fallbackSpeechRecognizerVendor = this.recognizer.fallbackVendor !== 'default'
? this.recognizer.fallbackVendor
: cs.fallbackSpeechRecognizerVendor;
cs.fallbackSpeechRecognizerLabel = this.recognizer.fallbackLabel !== 'default'
? this.recognizer.fallbackLabel
: cs.fallbackSpeechRecognizerLabel;
cs.fallbackSpeechRecognizerLanguage = this.recognizer.fallbackLanguage !== 'default'
? this.recognizer.fallbackLanguage
: cs.fallbackSpeechRecognizerLanguage;
cs.isContinuousAsr = typeof this.recognizer.asrTimeout === 'number' ? true : false;
if (cs.isContinuousAsr) {
cs.asrTimeout = this.recognizer.asrTimeout;
@@ -171,12 +228,30 @@ class TaskConfig extends Task {
const {enable, ...opts} = this.listen;
if (enable) {
this.logger.debug({opts}, 'Config: enabling listen');
cs.startBackgroundListen({verb: 'listen', ...opts});
cs.startBackgroundTask('listen', {verb: 'listen', ...opts});
} else {
this.logger.info('Config: disabling listen');
cs.stopBackgroundListen();
cs.stopBackgroundTask('listen');
}
}
if (this.hasTranscribe) {
if (this.transcribe.enable) {
this.transcribeOpts.recognizer = this.hasRecognizer ?
this.recognizer :
{
vendor: cs.speechRecognizerVendor,
language: cs.speechRecognizerLanguage
};
this.logger.debug(this.transcribeOpts, 'Config: enabling transcribe');
cs.startBackgroundTask('transcribe', this.transcribeOpts);
} else {
this.logger.info('Config: disabling transcribe');
cs.stopBackgroundTask('transcribe');
}
}
if (this.data.sipRequestWithinDialogHook) {
cs.sipRequestWithinDialogHook = this.data.sipRequestWithinDialogHook;
}
}
async kill(cs) {

View File

@@ -16,6 +16,7 @@ class TaskDequeue extends Task {
this.queueName = this.data.name;
this.timeout = this.data.timeout || 0;
this.beep = this.data.beep === true;
this.callSid = this.data.callSid;
this.emitter = new Emitter();
this.state = DequeueResults.Timeout;
@@ -53,7 +54,7 @@ class TaskDequeue extends Task {
}
_getMemberFromQueue(cs) {
const {popFront} = cs.srf.locals.dbHelpers;
const {retrieveFromSortedSet, retrieveByPatternSortedSet} = cs.srf.locals.dbHelpers;
return new Promise(async(resolve) => {
let timer;
@@ -70,7 +71,12 @@ class TaskDequeue extends Task {
do {
try {
const url = await popFront(this.queueName);
let url;
if (this.callSid) {
url = await retrieveByPatternSortedSet(this.queueName, `*${this.callSid}`);
} else {
url = await retrieveFromSortedSet(this.queueName);
}
if (url) {
found = true;
clearTimeout(timer);
@@ -78,7 +84,7 @@ class TaskDequeue extends Task {
resolve(url);
}
} catch (err) {
this.logger.debug({err}, 'TaskDequeue:_getMemberFromQueue error popFront');
this.logger.debug({err}, 'TaskDequeue:_getMemberFromQueue error Sorted Set');
}
await sleepFor(5000);
} while (!this.killed && !timedout && !found);

View File

@@ -12,10 +12,13 @@ const assert = require('assert');
const placeCall = require('../utils/place-outdial');
const sessionTracker = require('../session/session-tracker');
const DtmfCollector = require('../utils/dtmf-collector');
const ConfirmCallSession = require('../session/confirm-call-session');
const dbUtils = require('../utils/db-utils');
const debug = require('debug')('jambonz:feature-server');
const {parseUri} = require('drachtio-srf');
const {ANCHOR_MEDIA_ALWAYS} = require('../config');
const {ANCHOR_MEDIA_ALWAYS, JAMBONZ_DISABLE_DIAL_PAI_HEADER} = require('../config');
const { isOnhold, isOpusFirst } = require('../utils/sdp-utils');
const { normalizeJambones } = require('@jambonz/verb-specifications');
function parseDtmfOptions(logger, dtmfCapture) {
let parentDtmfCollector, childDtmfCollector;
@@ -135,8 +138,13 @@ class TaskDial extends Task {
get name() { return TaskName.Dial; }
get isOnHold() {
return this.isIncomingLegHold || this.isOutgoingLegHold;
}
get canReleaseMedia() {
const keepAnchor = this.data.anchorMedia ||
this.cs.isBackGroundListen ||
ANCHOR_MEDIA_ALWAYS ||
this.listenTask ||
this.transcribeTask ||
@@ -166,6 +174,16 @@ class TaskDial extends Task {
async exec(cs) {
await super.exec(cs);
try {
if (this.listenTask) {
const {span, ctx} = this.startChildSpan(`nested:${this.listenTask.summary}`);
this.listenTask.span = span;
this.listenTask.ctx = ctx;
}
if (this.transcribeTask) {
const {span, ctx} = this.startChildSpan(`nested:${this.transcribeTask.summary}`);
this.transcribeTask.span = span;
this.transcribeTask.ctx = ctx;
}
if (this.data.amd) {
this.startAmd = cs.startAmd;
this.stopAmd = cs.stopAmd;
@@ -186,6 +204,7 @@ class TaskDial extends Task {
await this.performAction(this.results, this.killReason !== KillReason.Replaced);
this._removeDtmfDetection(cs.dlg);
this._removeDtmfDetection(this.dlg);
this._removeSipIndialogRequestListener(this.dlg);
} catch (err) {
this.logger.error({err}, 'TaskDial:exec terminating with error');
this.kill(cs);
@@ -214,7 +233,7 @@ class TaskDial extends Task {
}
this._removeDtmfDetection(cs.dlg);
this._removeDtmfDetection(this.dlg);
this._killOutdials();
await this._killOutdials();
if (this.sd) {
this.sd.kill();
this.sd.removeAllListeners();
@@ -223,10 +242,12 @@ class TaskDial extends Task {
if (this.callSid) sessionTracker.remove(this.callSid);
if (this.listenTask) {
await this.listenTask.kill(cs);
this.listenTask.span.end();
this.listenTask = null;
}
if (this.transcribeTask) {
await this.transcribeTask.kill(cs);
this.transcribeTask.span.end();
this.transcribeTask = null;
}
this.notifyTaskDone();
@@ -301,18 +322,41 @@ class TaskDial extends Task {
const to = parseUri(req.getParsedHeader('Refer-To').uri);
const by = parseUri(req.getParsedHeader('Referred-By').uri);
this.logger.info({to}, 'refer to parsed');
await cs.requestor.request('verb:hook', this.referHook, {
...callInfo,
const json = await cs.requestor.request('verb:hook', this.referHook, {
...(callInfo.toJSON()),
refer_details: {
sip_refer_to: req.get('Refer-To'),
sip_referred_by: req.get('Referred-By'),
sip_user_agent: req.get('User-Agent'),
refer_to_user: to.user,
referred_by_user: by.user,
refer_to_user: to.scheme === 'tel' ? to.number : to.user,
referred_by_user: by.scheme === 'tel' ? by.number : by.user,
referring_call_sid,
referred_call_sid
}
}, httpHeaders);
if (json && Array.isArray(json)) {
try {
const logger = isChild ? this.logger : this.sd.logger;
const tasks = normalizeJambones(logger, json).map((tdata) => makeTask(this.logger, tdata));
if (tasks && tasks.length > 0) {
const legs = isChild ? ['child', 'parent'] : ['parent', 'child'];
logger.info(`Dial:handleRefer received REFER on ${legs[0]} leg, setting new app on ${legs[1]} leg`);
if (isChild) this.redirect(cs, tasks);
else {
logger.info({tasks: json}, 'Dial:handleRefer - new application for for child leg');
const adultingSession = await this.sd.doAdulting({
logger,
application: cs.application,
tasks
});
/* need to update the callSid of the child with its own (new) AdultingCallSession */
sessionTracker.add(adultingSession.callSid, adultingSession);
}
}
} catch (err) {
this.logger.info(err, 'Dial:handleRefer - error setting new application after receiving REFER');
}
}
res.send(202);
this.logger.info('DialTask:handleRefer - sent 202 Accepted');
} catch (err) {
@@ -333,11 +377,16 @@ class TaskDial extends Task {
sd.removeAllListeners('callCreateFail');
}
_killOutdials() {
async _killOutdials() {
for (const [callSid, sd] of Array.from(this.dials)) {
this.logger.debug(`Dial:_killOutdials killing callSid ${callSid}`);
sd.kill().catch((err) => this.logger.info(err, `Dial:_killOutdials Error killing ${callSid}`));
try {
await sd.kill();
} catch (err) {
this.logger.info(err, `Dial:_killOutdials Error killing ${callSid}`);
}
this._removeHandlers(sd);
this.logger.debug(`Dial:_killOutdials killed callSid ${callSid}`);
}
this.dials.clear();
}
@@ -350,8 +399,14 @@ class TaskDial extends Task {
}
_onInfo(cs, dlg, req, res) {
// SIP Indialog will be handled by another handler
if (cs.sipRequestWithinDialogHook) {
return;
}
res.send(200);
if (req.get('Content-Type') !== 'application/dtmf-relay') return;
if (req.get('Content-Type') !== 'application/dtmf-relay') {
return;
}
const dtmfDetector = dlg === cs.dlg ? this.parentDtmfCollector : this.childDtmfCollector;
if (!dtmfDetector) return;
@@ -380,6 +435,20 @@ class TaskDial extends Task {
}
}
_initSipIndialogRequestListener(cs, dlg) {
dlg.on('info', this._onRequestWithinDialog.bind(this, cs));
dlg.on('message', this._onRequestWithinDialog.bind(this, cs));
}
_removeSipIndialogRequestListener(dlg) {
dlg && dlg.removeAllListeners('message');
dlg && dlg.removeAllListeners('info');
}
async _onRequestWithinDialog(cs, req, res) {
cs._onRequestWithinDialog(req, res);
}
async _initializeInbound(cs) {
const {ep} = await cs._evalEndpointPrecondition(this);
this.epOther = ep;
@@ -408,7 +477,9 @@ class TaskDial extends Task {
this.headers = {
'X-Account-Sid': cs.accountSid,
...(req && req.has('X-CID') && {'X-CID': req.get('X-CID')}),
...(req && req.has('P-Asserted-Identity') && {'P-Asserted-Identity': req.get('P-Asserted-Identity')}),
...(req && req.has('P-Asserted-Identity') && !JAMBONZ_DISABLE_DIAL_PAI_HEADER &&
{'P-Asserted-Identity': req.get('P-Asserted-Identity')}),
...(req && req.has('X-Voip-Carrier-Sid') && {'X-Voip-Carrier-Sid': req.get('X-Voip-Carrier-Sid')}),
// Put headers at the end to make sure opt.headers override all default behavior.
...this.headers
};
@@ -417,7 +488,8 @@ class TaskDial extends Task {
headers: this.headers,
proxy: `sip:${sbcAddress}`,
callingNumber: this.callerId || req.callingNumber,
...(this.callerName && {callingName: this.callerName})
...(this.callerName && {callingName: this.callerName}),
opusFirst: isOpusFirst(this.cs.ep.remote.sdp)
};
const t = this.target.find((t) => t.type === 'teams');
@@ -428,10 +500,14 @@ class TaskDial extends Task {
}
const ms = await cs.getMS();
this.timerRing = setTimeout(() => {
this.timerRing = setTimeout(async() => {
this.logger.info(`Dial:_attemptCall: ring no answer timer ${this.timeout}s exceeded`);
this.timerRing = null;
this._killOutdials();
try {
await this._killOutdials();
} catch (err) {
this.logger.info(err, 'Dial:_attemptCall - error killing outdials');
}
this.result = {
dialCallStatus: CallStatus.NoAnswer,
dialSipStatus: 487
@@ -459,7 +535,7 @@ class TaskDial extends Task {
}
if (t.type === 'phone' && t.trunk) {
const voip_carrier_sid = await lookupCarrier(cs.accountSid, t.trunk);
this.logger.info(`Dial:_attemptCalls: selected ${voip_carrier_sid} for requested carrier: ${t.trunk})`);
this.logger.info(`Dial:_attemptCalls: selected ${voip_carrier_sid} for requested carrier: ${t.trunk}`);
if (voip_carrier_sid) {
opts.headers['X-Requested-Carrier-Sid'] = voip_carrier_sid;
}
@@ -470,9 +546,11 @@ class TaskDial extends Task {
* check if number matches any existing numbers
* */
if (t.type === 'phone' && !t.trunk) {
const voip_carrier_sid = await lookupCarrierByPhoneNumber(req.body.account_sid, t.number);
const str = this.callerId || req.callingNumber || '';
const callingNumber = str.startsWith('+') ? str.substring(1) : str;
const voip_carrier_sid = await lookupCarrierByPhoneNumber(cs.accountSid, callingNumber);
this.logger.info(
`Dial:_attemptCalls: selected ${voip_carrier_sid} for requested phone number: ${t.number})`);
`Dial:_attemptCalls: selected ${voip_carrier_sid} for requested phone number: ${callingNumber}`);
if (voip_carrier_sid) {
opts.headers['X-Requested-Carrier-Sid'] = voip_carrier_sid;
}
@@ -491,7 +569,8 @@ class TaskDial extends Task {
callInfo: cs.callInfo,
accountInfo: cs.accountInfo,
rootSpan: cs.rootSpan,
startSpan: this.startSpan.bind(this)
startSpan: this.startSpan.bind(this),
dialTask: this
});
this.dials.set(sd.callSid, sd);
@@ -507,7 +586,8 @@ class TaskDial extends Task {
}
})
.on('callStatusChange', (obj) => {
if (this.results.dialCallStatus !== CallStatus.Completed) {
if (this.results.dialCallStatus !== CallStatus.Completed &&
this.results.dialCallStatus !== CallStatus.NoAnswer) {
Object.assign(this.results, {
dialCallStatus: obj.callStatus,
dialSipStatus: obj.sipStatus,
@@ -560,11 +640,7 @@ class TaskDial extends Task {
}
})
.on('reinvite', (req, res) => {
try {
cs.handleReinviteAfterMediaReleased(req, res);
} catch (err) {
this.logger.error(err, 'Error in dial einvite from B leg');
}
this._onReinvite(req, res);
})
.on('refer', (callInfo, req, res) => {
@@ -600,6 +676,35 @@ class TaskDial extends Task {
this._killOutdials(); // NB: order is important
}
async _onReinvite(req, res) {
try {
let isHandled = false;
if (this.cs.onHoldMusic) {
if (isOnhold(req.body) && !this.epOther && !this.ep) {
await this.cs.handleReinviteAfterMediaReleased(req, res);
// Onhold but media is already released
// reconnect A Leg and Response B leg
await this.reAnchorMedia(this.cs, this.sd);
this.isOutgoingLegHold = true;
isHandled = true;
this._onHoldHook();
} else if (!isOnhold(req.body) && this.epOther && this.ep && this.isOutgoingLegHold && this.canReleaseMedia) {
// Offhold, time to release media
const newSdp = await this.ep.modify(req.body);
await res.send(200, {body: newSdp});
await this._releaseMedia(this.cs, this.sd);
isHandled = true;
this.isOutgoingLegHold = false;
}
}
if (!isHandled) {
this.cs.handleReinviteAfterMediaReleased(req, res);
}
} catch (err) {
this.logger.error(err, 'Error in dial einvite from B leg');
}
}
_onMaxCallDuration(cs) {
this.logger.info(`Dial:_onMaxCallDuration tearing down call as it has reached ${this.timeLimit}s`);
this.ep && this.ep.unbridge();
@@ -652,8 +757,9 @@ class TaskDial extends Task {
if (this.parentDtmfCollector) this._installDtmfDetection(cs, cs.dlg);
if (this.childDtmfCollector) this._installDtmfDetection(cs, this.dlg);
if (cs.sipRequestWithinDialogHook) this._initSipIndialogRequestListener(cs, this.dlg);
if (this.transcribeTask) this.transcribeTask.exec(cs, {ep2: this.epOther, ep:this.ep});
if (this.transcribeTask) this.transcribeTask.exec(cs, {ep: this.epOther, ep2:this.ep});
if (this.listenTask) this.listenTask.exec(cs, {ep: this.epOther});
if (this.startAmd) {
try {
@@ -685,9 +791,11 @@ class TaskDial extends Task {
assert(cs.ep && sd.ep);
try {
// Wait until we got new SDP from B leg to ofter to A Leg
const aLegSdp = cs.ep.remote.sdp;
await sd.releaseMediaToSBC(aLegSdp, cs.ep.local.sdp);
const bLegSdp = sd.dlg.remote.sdp;
await Promise.all[sd.releaseMediaToSBC(aLegSdp, cs.ep.local.sdp), cs.releaseMediaToSBC(bLegSdp)];
await cs.releaseMediaToSBC(bLegSdp);
this.epOther = null;
this.logger.info('Dial:_releaseMedia - successfully released media from freewitch');
} catch (err) {
@@ -704,9 +812,29 @@ class TaskDial extends Task {
}
async handleReinviteAfterMediaReleased(req, res) {
const sdp = await this.dlg.modify(req.body);
this.logger.info({sdp}, 'Dial:handleReinviteAfterMediaReleased - sent reinvite to B leg');
res.send(200, {body: sdp});
let isHandled = false;
if (isOnhold(req.body) && !this.epOther && !this.ep) {
const sdp = await this.dlg.modify(req.body);
res.send(200, {body: sdp});
// Onhold but media is already released
await this.reAnchorMedia(this.cs, this.sd);
isHandled = true;
this.isIncomingLegHold = true;
this._onHoldHook();
} else if (!isOnhold(req.body) && this.epOther && this.ep && this.isIncomingLegHold && this.canReleaseMedia) {
// Offhold, time to release media
const newSdp = await this.epOther.modify(req.body);
await res.send(200, {body: newSdp});
await this._releaseMedia(this.cs, this.sd);
isHandled = true;
this.isIncomingLegHold = false;
}
if (!isHandled) {
const sdp = await this.dlg.modify(req.body);
this.logger.info({sdp}, 'Dial:handleReinviteAfterMediaReleased - sent reinvite to B leg');
res.send(200, {body: sdp});
}
}
_onAmdEvent(cs, evt) {
@@ -717,6 +845,48 @@ class TaskDial extends Task {
this.logger.error({err}, 'Dial:_onAmdEvent - error calling actionHook');
});
}
async _onHoldHook(allowed = [TaskName.Play, TaskName.Say, TaskName.Pause]) {
if (this.data.onHoldHook) {
// send silence for keep Voice quality
await this.epOther.play('silence_stream://500');
let allowedTasks;
do {
try {
const b3 = this.getTracingPropagation();
const httpHeaders = b3 && {b3};
const json = await this.cs.application.requestor.
request('verb:hook', this.data.onHoldHook, this.cs.callInfo.toJSON(), httpHeaders);
const tasks = normalizeJambones(this.logger, json).map((tdata) => makeTask(this.logger, tdata));
allowedTasks = tasks.filter((t) => allowed.includes(t.name));
if (tasks.length !== allowedTasks.length) {
this.logger.debug({tasks, allowedTasks}, 'unsupported task');
throw new Error(`unsupported verb in enqueue waitHook: only ${JSON.stringify(allowed)}`);
}
this.logger.debug(`DialTask:_onHoldHook: executing ${tasks.length} tasks`);
if (tasks.length) {
this._playSession = new ConfirmCallSession({
logger: this.logger,
application: this.cs.application,
dlg: this.isIncomingLegHold ? this.dlg : this.cs.dlg,
ep: this.isIncomingLegHold ? this.ep : this.cs.ep,
callInfo: this.cs.callInfo,
accountInfo: this.cs.accountInfo,
tasks,
rootSpan: this.cs.rootSpan
});
await this._playSession.exec();
this._playSession = null;
}
} catch (error) {
this.logger.info(error, 'DialTask:_onHoldHook: failed retrieving waitHook');
this._playSession = null;
break;
}
} while (allowedTasks && allowedTasks.length > 0 && !this.killed && this.isOnHold);
this.logger.info('Finish onHoldHook');
}
}
}
module.exports = TaskDial;

View File

@@ -58,6 +58,13 @@ class Dialogflow extends Task {
this.vendor = this.data.tts.vendor || 'default';
this.language = this.data.tts.language || 'default';
this.voice = this.data.tts.voice || 'default';
this.speechSynthesisLabel = this.data.tts.label;
// fallback tts
this.fallbackVendor = this.data.tts.fallbackVendor || 'default';
this.fallbackLanguage = this.data.tts.fallbackLanguage || 'default';
this.fallbackVoice = this.data.tts.fallbackLanguage || 'default';
this.fallbackLabel = this.data.tts.fallbackLabel;
}
this.bargein = this.data.bargein;
}
@@ -118,8 +125,15 @@ class Dialogflow extends Task {
this.vendor = cs.speechSynthesisVendor;
this.language = cs.speechSynthesisLanguage;
this.voice = cs.speechSynthesisVoice;
this.speechSynthesisLabel = cs.speechSynthesisLabel;
}
this.ttsCredentials = cs.getSpeechCredentials(this.vendor, 'tts');
if (this.fallbackVendor === 'default') {
this.fallbackVendor = cs.fallbackSpeechSynthesisVendor;
this.fallbackLanguage = cs.fallbackSpeechSynthesisLanguage;
this.fallbackVoice = cs.fallbackSpeechSynthesisVoice;
this.fallbackLabel = cs.fallbackSpeechSynthesisLabel;
}
this.ttsCredentials = cs.getSpeechCredentials(this.vendor, 'tts', this.speechSynthesisLabel);
this.ep.addCustomEventListener('dialogflow::intent', this._onIntent.bind(this, ep, cs));
this.ep.addCustomEventListener('dialogflow::transcription', this._onTranscription.bind(this, ep, cs));
@@ -221,18 +235,8 @@ class Dialogflow extends Task {
}
try {
const obj = {
text: intent.fulfillmentText,
vendor: this.vendor,
language: this.language,
voice: this.voice,
salt: cs.callSid,
credentials: this.ttsCredentials
};
this.logger.debug({obj}, 'Dialogflow:_onIntent - playing message via tts');
const {filePath, servedFromCache} = await synthAudio(stats, obj);
const {filePath} = await this._fallbackSynthAudio(cs, intent, stats, synthAudio);
if (filePath) cs.trackTmpFile(filePath);
if (!this.ttsCredentials && !servedFromCache) cs.billForTts(intent.fulfillmentText.length);
if (this.playInProgress) {
await ep.api('uuid_break', ep.uuid).catch((err) => this.logger.info(err, 'Error killing audio'));
@@ -276,6 +280,46 @@ class Dialogflow extends Task {
}
}
async _fallbackSynthAudio(cs, intent, stats, synthAudio) {
try {
const obj = {
account_sid: cs.accountSid,
text: intent.fulfillmentText,
vendor: this.vendor,
language: this.language,
voice: this.voice,
salt: cs.callSid,
credentials: this.ttsCredentials
};
this.logger.debug({obj}, 'Dialogflow:_onIntent - playing message via tts');
return await synthAudio(stats, obj);
} catch (error) {
this.logger.info({error}, 'Failed to synthesize audio from primary vendor');
try {
if (this.fallbackVendor) {
const credentials = cs.getSpeechCredentials(this.fallbackVendor, 'tts', this.fallbackLabel);
const obj = {
account_sid: cs.accountSid,
text: intent.fulfillmentText,
vendor: this.fallbackVendor,
language: this.fallbackLanguage,
voice: this.fallbackVoice,
salt: cs.callSid,
credentials
};
this.logger.debug({obj}, 'Dialogflow:_onIntent - playing message via fallback tts');
return await synthAudio(stats, obj);
}
} catch (err) {
this.logger.info({err}, 'Failed to synthesize audio from falllback vendor');
throw err;
}
throw error;
}
}
/**
* A transcription - either interim or final - has been returned.
* If we are doing barge-in based on hotword detection, check for the hotword or phrase.

View File

@@ -18,6 +18,7 @@ class TaskEnqueue extends Task {
this.preconditions = TaskPreconditions.Endpoint;
this.queueName = this.data.name;
this.priority = this.data.priority;
this.waitHook = this.data.waitHook;
this.emitter = new Emitter();
@@ -70,12 +71,22 @@ class TaskEnqueue extends Task {
}
async _addToQueue(cs, dlg) {
const {pushBack} = cs.srf.locals.dbHelpers;
const {addToSortedSet, sortedSetLength} = cs.srf.locals.dbHelpers;
const url = getUrl(cs);
this.waitStartTime = Date.now();
this.logger.debug({queue: this.queueName, url}, 'pushing url onto queue');
const members = await pushBack(this.queueName, url);
this.logger.info(`TaskEnqueue:_addToQueue: added to queue, length now ${members}`);
if (this.priority < 0) {
this.logger.warn(`priority ${this.priority} is invalid, need to be non-negative integer,
999 will be used for priority`);
}
let members = await addToSortedSet(this.queueName, url, this.priority);
if (members === 1) {
this.logger.info('TaskEnqueue:_addToQueue: added to queue');
} else {
this.logger.info('TaskEnqueue:_addToQueue: failed to add to queue');
}
members = await sortedSetLength(this.queueName);
this.notifyUrl = url;
/* invoke account-level webhook for queue event notifications */
@@ -90,9 +101,9 @@ class TaskEnqueue extends Task {
}
async _removeFromQueue(cs) {
const {removeFromList, lengthOfList} = cs.srf.locals.dbHelpers;
await removeFromList(this.queueName, getUrl(cs));
return await lengthOfList(this.queueName);
const {retrieveByPatternSortedSet, sortedSetLength} = cs.srf.locals.dbHelpers;
await retrieveByPatternSortedSet(this.queueName, `*${getUrl(cs)}`);
return await sortedSetLength(this.queueName);
}
async performAction() {
@@ -279,13 +290,13 @@ class TaskEnqueue extends Task {
this.emitter.emit('dequeue', opts);
try {
const {lengthOfList} = cs.srf.locals.dbHelpers;
const members = await lengthOfList(this.queueName);
const {sortedSetLength} = cs.srf.locals.dbHelpers;
const members = await sortedSetLength(this.queueName);
this.dequeued = true;
cs.performQueueWebhook({
event: 'leave',
queue: this.data.name,
length: Math.max(members - 1, 0),
length: Math.max(members, 0),
leaveReason: 'dequeued',
leaveTime: Date.now(),
dequeuer: opts.dequeuer
@@ -300,8 +311,9 @@ class TaskEnqueue extends Task {
}
}
async _playHook(cs, dlg, hook, allowed = [TaskName.Play, TaskName.Say, TaskName.Pause, TaskName.Leave]) {
const {lengthOfList, getListPosition} = cs.srf.locals.dbHelpers;
async _playHook(cs, dlg, hook,
allowed = [TaskName.Play, TaskName.Say, TaskName.Pause, TaskName.Leave, TaskName.Tag]) {
const {sortedSetLength, sortedSetPositionByPattern} = cs.srf.locals.dbHelpers;
const b3 = this.getTracingPropagation();
const httpHeaders = b3 && {b3};
@@ -313,9 +325,15 @@ class TaskEnqueue extends Task {
queueTime: getElapsedTime(this.waitStartTime)
};
try {
const queueSize = await lengthOfList(this.queueName);
const queuePosition = await getListPosition(this.queueName, this.notifyUrl);
Object.assign(params, {queueSize, queuePosition});
const queueSize = await sortedSetLength(this.queueName);
const queuePosition = await sortedSetPositionByPattern(this.queueName, `*${this.notifyUrl}`);
Object.assign(params, {
queueSize,
queuePosition: queuePosition.length ? queuePosition[0] : 0,
callSid: this.cs.callSid,
callId: this.cs.callId,
customerData: this.cs.callInfo.customerData
});
} catch (err) {
this.logger.error({err}, `TaskEnqueue:_playHook error retrieving list info for queue ${this.queueName}`);
}

View File

@@ -1,17 +1,17 @@
const Task = require('./task');
const {
TaskName,
TaskPreconditions,
GoogleTranscriptionEvents,
NuanceTranscriptionEvents,
AwsTranscriptionEvents,
AzureTranscriptionEvents,
DeepgramTranscriptionEvents,
SonioxTranscriptionEvents,
CobaltTranscriptionEvents,
IbmTranscriptionEvents,
NvidiaTranscriptionEvents,
JambonzTranscriptionEvents
} = require('../utils/constants');
JambonzTranscriptionEvents,
AssemblyAiTranscriptionEvents
} = require('../utils/constants.json');
const {
JAMBONES_GATHER_EARLY_HINTS_MATCH,
JAMBONZ_GATHER_EARLY_HINTS_MATCH,
@@ -19,34 +19,11 @@ const {
} = require('../config');
const makeTask = require('./make_task');
const assert = require('assert');
const SttTask = require('./stt-task');
const compileTranscripts = (logger, evt, arr) => {
if (!Array.isArray(arr) || arr.length === 0) return;
let t = '';
for (const a of arr) {
t += ` ${a.alternatives[0].transcript}`;
}
t += ` ${evt.alternatives[0].transcript}`;
evt.alternatives[0].transcript = t.trim();
};
class TaskGather extends Task {
class TaskGather extends SttTask {
constructor(logger, opts, parentTask) {
super(logger, opts);
this.preconditions = TaskPreconditions.Endpoint;
const {
setChannelVarsForStt,
normalizeTranscription,
removeSpeechListeners,
setSpeechCredentialsAtRuntime,
compileSonioxTranscripts
} = require('../utils/transcription-utils')(logger);
this.setChannelVarsForStt = setChannelVarsForStt;
this.normalizeTranscription = normalizeTranscription;
this.removeSpeechListeners = removeSpeechListeners;
this.compileSonioxTranscripts = compileSonioxTranscripts;
super(logger, opts, parentTask);
[
'finishOnKey', 'input', 'numDigits', 'minDigits', 'maxDigits',
'interDigitTimeout', 'partialResultHook', 'bargein', 'dtmfBargein',
@@ -62,27 +39,23 @@ class TaskGather extends Task {
this.listenDuringPrompt = this.data.listenDuringPrompt === false ? false : true;
this.minBargeinWordCount = this.data.minBargeinWordCount || 1;
if (this.data.recognizer) {
const recognizer = this.data.recognizer;
this.vendor = recognizer.vendor;
this.language = recognizer.language;
/* let credentials be supplied in the recognizer object at runtime */
this.sttCredentials = setSpeechCredentialsAtRuntime(recognizer);
/* continuous ASR (i.e. compile transcripts until a special timeout or dtmf key) */
this.asrTimeout = typeof recognizer.asrTimeout === 'number' ? recognizer.asrTimeout * 1000 : 0;
if (this.asrTimeout > 0) this.asrDtmfTerminationDigit = recognizer.asrDtmfTerminationDigit;
this.isContinuousAsr = this.asrTimeout > 0;
this.asrTimeout = typeof this.data.recognizer.asrTimeout === 'number' ?
this.data.recognizer.asrTimeout * 1000 : 0;
if (this.asrTimeout > 0) {
this.isContinuousAsr = true;
this.asrDtmfTerminationDigit = this.data.recognizer.asrDtmfTerminationDigit;
}
if (Array.isArray(this.data.recognizer.hints) &&
0 == this.data.recognizer.hints.length && JAMBONES_GATHER_CLEAR_GLOBAL_HINTS_ON_EMPTY_HINTS) {
logger.debug('Gather: an empty hints array was supplied, so we will mask global hints');
this.maskGlobalSttHints = true;
}
this.data.recognizer.hints = this.data.recognizer.hints || [];
this.data.recognizer.altLanguages = this.data.recognizer.altLanguages || [];
// fast Recognition, fire event after a specified time after the last hypothesis.
this.fastRecognitionTimeout = typeof this.data.recognizer.fastRecognitionTimeout === 'number' ?
this.data.recognizer.fastRecognitionTimeout * 1000 : 0;
}
else this.data.recognizer = {hints: [], altLanguages: []};
this.digitBuffer = '';
this._earlyMedia = this.data.earlyMedia === true;
@@ -97,12 +70,8 @@ class TaskGather extends Task {
/* buffer speech for continuous asr */
this._bufferedTranscripts = [];
/* buffer for soniox transcripts */
this._sonioxTranscripts = [];
this.parentTask = parentTask;
this.partialTranscriptsCount = 0;
this.bugname_prefix = 'gather_';
}
get name() { return TaskName.Gather; }
@@ -135,13 +104,12 @@ class TaskGather extends Task {
async exec(cs, {ep}) {
this.logger.debug({options: this.data}, 'Gather:exec');
await super.exec(cs);
await super.exec(cs, {ep});
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, cs.srf);
const {getNuanceAccessToken, getIbmAccessToken} = cs.srf.locals.dbHelpers;
if (cs.hasGlobalSttHints && !this.maskGlobalSttHints) {
const {hints, hintsBoost} = cs.globalSttHints;
const setOfHints = new Set(this.data.recognizer.hints
const setOfHints = new Set((this.data.recognizer.hints || [])
.concat(hints)
.filter((h) => typeof h === 'string' && h.length > 0));
this.data.recognizer.hints = [...setOfHints];
@@ -149,14 +117,6 @@ class TaskGather extends Task {
this.logger.debug({hints: this.data.recognizer.hints, hintsBoost: this.data.recognizer.hintsBoost},
'Gather:exec - applying global sttHints');
}
if (cs.hasAltLanguages) {
this.data.recognizer.altLanguages = this.data.recognizer.altLanguages.concat(cs.altLanguages);
this.logger.debug({altLanguages: this.altLanguages},
'Gather:exec - applying altLanguages');
}
if (cs.hasGlobalSttPunctuation && !this.data.recognizer.punctuation) {
this.data.recognizer.punctuation = cs.globalSttPunctuation;
}
if (!this.isContinuousAsr && cs.isContinuousAsr) {
this.isContinuousAsr = true;
this.asrTimeout = cs.asrTimeout * 1000;
@@ -174,67 +134,26 @@ class TaskGather extends Task {
this.interim = true;
this.logger.debug('Gather:exec - early hints match enabled');
}
this.ep = ep;
if ('default' === this.vendor || !this.vendor) {
this.vendor = cs.speechRecognizerVendor;
if (this.data.recognizer) this.data.recognizer.vendor = this.vendor;
}
if ('default' === this.language || !this.language) {
this.language = cs.speechRecognizerLanguage;
if (this.data.recognizer) this.data.recognizer.language = this.language;
}
if (!this.data.recognizer.vendor) {
this.data.recognizer.vendor = this.vendor;
}
if (this.needsStt && !this.sttCredentials) this.sttCredentials = cs.getSpeechCredentials(this.vendor, 'stt');
if (this.needsStt && !this.sttCredentials) {
const {writeAlerts, AlertType} = cs.srf.locals;
this.logger.info(`TaskGather:exec - ERROR stt using ${this.vendor} requested but creds not supplied`);
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_NOT_PROVISIONED,
vendor: this.vendor
}).catch((err) => this.logger.info({err}, 'Error generating alert for no stt'));
// Notify application that STT vender is wrong.
this.notifyError({
msg: 'ASR error',
details: `No speech-to-text service credentials for ${this.vendor} have been configured`
});
this.notifyTaskDone();
throw new Error(`No speech-to-text service credentials for ${this.vendor} have been configured`);
}
if (this.vendor === 'nuance' && this.sttCredentials.client_id) {
/* get nuance access token */
const {client_id, secret} = this.sttCredentials;
const {access_token, servedFromCache} = await getNuanceAccessToken(client_id, secret, 'asr tts');
this.logger.debug({client_id}, `Gather:exec - got nuance access token ${servedFromCache ? 'from cache' : ''}`);
this.sttCredentials = {...this.sttCredentials, access_token};
}
else if (this.vendor == 'ibm' && this.sttCredentials.stt_api_key) {
/* get ibm access token */
const {stt_api_key, stt_region} = this.sttCredentials;
const {access_token, servedFromCache} = await getIbmAccessToken(stt_api_key);
this.logger.debug({stt_api_key}, `Gather:exec - got ibm access token ${servedFromCache ? 'from cache' : ''}`);
this.sttCredentials = {...this.sttCredentials, access_token, stt_region};
}
const startListening = (cs, ep) => {
const startListening = async(cs, ep) => {
this._startTimer();
if (this.isContinuousAsr && 0 === this.timeout) this._startAsrTimer();
if (this.input.includes('speech') && !this.listenDuringPrompt) {
this._initSpeech(cs, ep)
.then(() => {
if (this.killed) {
this.logger.info('Gather:exec - task was quickly killed so do not transcribe');
return;
}
this._startTranscribing(ep);
return updateSpeechCredentialLastUsed(this.sttCredentials.speech_credential_sid);
})
.catch((err) => {
this.logger.error({err}, 'error in initSpeech');
});
try {
await this._setSpeechHandlers(cs, ep);
if (this.killed) {
this.logger.info('Gather:exec - task was quickly killed so do not transcribe');
return;
}
this._startTranscribing(ep);
return updateSpeechCredentialLastUsed(this.sttCredentials.speech_credential_sid);
} catch (e) {
if (this.fallbackVendor && this.isHandledByPrimaryProvider) {
await this._fallback();
startListening(cs, ep);
} else {
this.logger.error({error: e}, 'error in initSpeech');
}
}
}
};
@@ -288,7 +207,7 @@ class TaskGather extends Task {
}
if (this.input.includes('speech') && this.listenDuringPrompt) {
await this._initSpeech(cs, ep);
await this._setSpeechHandlers(cs, ep);
this._startTranscribing(ep);
updateSpeechCredentialLastUsed(this.sttCredentials.speech_credential_sid)
.catch(() => {/*already logged error */});
@@ -302,7 +221,7 @@ class TaskGather extends Task {
} catch (err) {
this.logger.error(err, 'TaskGather:exec error');
}
this.removeSpeechListeners(ep);
this.removeCustomEventListeners();
}
kill(cs) {
@@ -340,6 +259,16 @@ class TaskGather extends Task {
this._resolve('dtmf-terminator-key');
}
else if (this.input.includes('digits')) {
if (this.digitBuffer.length === 0 && this.needsStt) {
// DTMF is higher priority than STT.
this.removeCustomEventListeners();
ep.stopTranscription({
vendor: this.vendor,
bugname: this.bugname,
})
.catch((err) => this.logger.error({err},
` Received DTMF, Error stopping transcription for vendor ${this.vendor}`));
}
this.digitBuffer += evt.dtmf;
const len = this.digitBuffer.length;
if (len === this.numDigits || len === this.maxDigits) {
@@ -362,38 +291,51 @@ class TaskGather extends Task {
}
}
async _initSpeech(cs, ep) {
async _setSpeechHandlers(cs, ep) {
if (this._speechHandlersSet) return;
this._speechHandlersSet = true;
/* some special deepgram logic */
if (this.vendor === 'deepgram') {
if (this.isContinuousAsr) this._doContinuousAsrWithDeepgram(this.asrTimeout);
if (this.data.recognizer?.deepgramOptions?.shortUtterance) this.shortUtterance = true;
}
const opts = this.setChannelVarsForStt(this, this.sttCredentials, this.data.recognizer);
switch (this.vendor) {
case 'google':
this.bugname = 'google_transcribe';
ep.addCustomEventListener(GoogleTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
ep.addCustomEventListener(GoogleTranscriptionEvents.EndOfUtterance, this._onEndOfUtterance.bind(this, cs, ep));
ep.addCustomEventListener(GoogleTranscriptionEvents.VadDetected, this._onVadDetected.bind(this, cs, ep));
this.bugname = `${this.bugname_prefix}google_transcribe`;
this.addCustomEventListener(
ep, GoogleTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
this.addCustomEventListener(
ep, GoogleTranscriptionEvents.EndOfUtterance, this._onEndOfUtterance.bind(this, cs, ep));
this.addCustomEventListener(
ep, GoogleTranscriptionEvents.VadDetected, this._onVadDetected.bind(this, cs, ep));
break;
case 'aws':
case 'polly':
this.bugname = 'aws_transcribe';
ep.addCustomEventListener(AwsTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
ep.addCustomEventListener(AwsTranscriptionEvents.VadDetected, this._onVadDetected.bind(this, cs, ep));
this.bugname = `${this.bugname_prefix}aws_transcribe`;
this.addCustomEventListener(ep, AwsTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
this.addCustomEventListener(ep, AwsTranscriptionEvents.VadDetected, this._onVadDetected.bind(this, cs, ep));
break;
case 'microsoft':
this.bugname = 'azure_transcribe';
ep.addCustomEventListener(AzureTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
ep.addCustomEventListener(AzureTranscriptionEvents.NoSpeechDetected,
this.bugname = `${this.bugname_prefix}azure_transcribe`;
this.addCustomEventListener(
ep, AzureTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
this.addCustomEventListener(ep, AzureTranscriptionEvents.NoSpeechDetected,
this._onNoSpeechDetected.bind(this, cs, ep));
ep.addCustomEventListener(AzureTranscriptionEvents.VadDetected, this._onVadDetected.bind(this, cs, ep));
this.addCustomEventListener(ep, AzureTranscriptionEvents.VadDetected, this._onVadDetected.bind(this, cs, ep));
break;
case 'nuance':
this.bugname = 'nuance_transcribe';
ep.addCustomEventListener(NuanceTranscriptionEvents.Transcription,
this.bugname = `${this.bugname_prefix}nuance_transcribe`;
this.addCustomEventListener(ep, NuanceTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep));
ep.addCustomEventListener(NuanceTranscriptionEvents.StartOfSpeech,
this.addCustomEventListener(ep, NuanceTranscriptionEvents.StartOfSpeech,
this._onStartOfSpeech.bind(this, cs, ep));
ep.addCustomEventListener(NuanceTranscriptionEvents.TranscriptionComplete,
this.addCustomEventListener(ep, NuanceTranscriptionEvents.TranscriptionComplete,
this._onTranscriptionComplete.bind(this, cs, ep));
ep.addCustomEventListener(NuanceTranscriptionEvents.VadDetected,
this.addCustomEventListener(ep, NuanceTranscriptionEvents.VadDetected,
this._onVadDetected.bind(this, cs, ep));
/* stall timers until prompt finishes playing */
@@ -403,35 +345,71 @@ class TaskGather extends Task {
break;
case 'deepgram':
this.bugname = 'deepgram_transcribe';
ep.addCustomEventListener(DeepgramTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
ep.addCustomEventListener(DeepgramTranscriptionEvents.Connect, this._onDeepgramConnect.bind(this, cs, ep));
ep.addCustomEventListener(DeepgramTranscriptionEvents.ConnectFailure,
this._onDeepGramConnectFailure.bind(this, cs, ep));
this.bugname = `${this.bugname_prefix}deepgram_transcribe`;
this.addCustomEventListener(
ep, DeepgramTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
this.addCustomEventListener(ep, DeepgramTranscriptionEvents.Connect, this._onVendorConnect.bind(this, cs, ep));
this.addCustomEventListener(ep, DeepgramTranscriptionEvents.ConnectFailure,
this._onVendorConnectFailure.bind(this, cs, ep));
/* if app sets deepgramOptions.utteranceEndMs they essentially want continuous asr */
if (opts.DEEPGRAM_SPEECH_UTTERANCE_END_MS) this.isContinuousAsr = true;
break;
case 'soniox':
this.bugname = 'soniox_transcribe';
ep.addCustomEventListener(SonioxTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
this.bugname = `${this.bugname_prefix}soniox_transcribe`;
this.addCustomEventListener(
ep, SonioxTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
break;
case 'cobalt':
this.bugname = `${this.bugname_prefix}cobalt_transcribe`;
this.addCustomEventListener(
ep, CobaltTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
/* cobalt doesnt have language, it has model, which is required */
if (!this.data.recognizer.model) {
throw new Error('Cobalt requires a model to be specified');
}
this.language = this.data.recognizer.model;
/* special case: if using hints with cobalt we need to compile them */
this.hostport = opts.COBALT_SERVER_URI;
if (this.vendor === 'cobalt' && opts.COBALT_SPEECH_HINTS) {
try {
const context = await this.compileHintsForCobalt(
ep,
this.hostport,
this.data.recognizer.model,
opts.COBALT_CONTEXT_TOKEN,
opts.COBALT_SPEECH_HINTS
);
if (context) opts.COBALT_COMPILED_CONTEXT_DATA = context;
delete opts.COBALT_SPEECH_HINTS;
} catch (err) {
this.logger.error({err}, 'Error compiling hints for cobalt');
}
}
delete opts.COBALT_SERVER_URI;
break;
case 'ibm':
this.bugname = 'ibm_transcribe';
ep.addCustomEventListener(IbmTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
ep.addCustomEventListener(IbmTranscriptionEvents.Connect, this._onIbmConnect.bind(this, cs, ep));
ep.addCustomEventListener(IbmTranscriptionEvents.ConnectFailure,
this._onIbmConnectFailure.bind(this, cs, ep));
this.bugname = `${this.bugname_prefix}ibm_transcribe`;
this.addCustomEventListener(ep, IbmTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
this.addCustomEventListener(ep, IbmTranscriptionEvents.Connect, this._onVendorConnect.bind(this, cs, ep));
this.addCustomEventListener(ep, IbmTranscriptionEvents.ConnectFailure,
this._onVendorConnectFailure.bind(this, cs, ep));
break;
case 'nvidia':
this.bugname = 'nvidia_transcribe';
ep.addCustomEventListener(NvidiaTranscriptionEvents.Transcription,
this.bugname = `${this.bugname_prefix}nvidia_transcribe`;
this.addCustomEventListener(ep, NvidiaTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep));
ep.addCustomEventListener(NvidiaTranscriptionEvents.StartOfSpeech,
this.addCustomEventListener(ep, NvidiaTranscriptionEvents.StartOfSpeech,
this._onStartOfSpeech.bind(this, cs, ep));
ep.addCustomEventListener(NvidiaTranscriptionEvents.TranscriptionComplete,
this.addCustomEventListener(ep, NvidiaTranscriptionEvents.TranscriptionComplete,
this._onTranscriptionComplete.bind(this, cs, ep));
ep.addCustomEventListener(NvidiaTranscriptionEvents.VadDetected,
this.addCustomEventListener(ep, NvidiaTranscriptionEvents.VadDetected,
this._onVadDetected.bind(this, cs, ep));
/* I think nvidia has this (??) - stall timers until prompt finishes playing */
@@ -440,13 +418,24 @@ class TaskGather extends Task {
}
break;
case 'assemblyai':
this.bugname = `${this.bugname_prefix}assemblyai_transcribe`;
this.addCustomEventListener(ep, AssemblyAiTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep));
this.addCustomEventListener(
ep, AssemblyAiTranscriptionEvents.Connect, this._onVendorConnect.bind(this, cs, ep));
this.addCustomEventListener(ep, AssemblyAiTranscriptionEvents.Error, this._onVendorError.bind(this, cs, ep));
this.addCustomEventListener(ep, AssemblyAiTranscriptionEvents.ConnectFailure,
this._onVendorConnectFailure.bind(this, cs, ep));
break;
default:
if (this.vendor.startsWith('custom:')) {
this.bugname = `${this.vendor}_transcribe`;
ep.addCustomEventListener(JambonzTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
ep.addCustomEventListener(JambonzTranscriptionEvents.Connect, this._onJambonzConnect.bind(this, cs, ep));
ep.addCustomEventListener(JambonzTranscriptionEvents.ConnectFailure,
this._onJambonzConnectFailure.bind(this, cs, ep));
this.bugname = `${this.bugname_prefix}${this.vendor}_transcribe`;
this.addCustomEventListener(
ep, JambonzTranscriptionEvents.Transcription, this._onTranscription.bind(this, cs, ep));
this.addCustomEventListener(ep, JambonzTranscriptionEvents.Connect, this._onVendorConnect.bind(this, cs, ep));
this.addCustomEventListener(ep, JambonzTranscriptionEvents.ConnectFailure,
this._onVendorConnectFailure.bind(this, cs, ep));
break;
}
else {
@@ -457,7 +446,7 @@ class TaskGather extends Task {
}
/* common handler for all stt engine errors */
ep.addCustomEventListener(JambonzTranscriptionEvents.Error, this._onJambonzError.bind(this, cs, ep));
this.addCustomEventListener(ep, JambonzTranscriptionEvents.Error, this._onJambonzError.bind(this, cs, ep));
await ep.set(opts)
.catch((err) => this.logger.info(err, 'Error setting channel variables'));
}
@@ -469,11 +458,18 @@ class TaskGather extends Task {
interim: this.interim,
bugname: this.bugname
}, 'Gather:_startTranscribing');
/**
* Note: we don't need to ask deepgram for interim results, because they
* already send us words as they are finalized (is_final=true) even before
* the utterance is finalized (speech_final=true)
*/
ep.startTranscription({
vendor: this.vendor,
locale: this.language,
interim: this.interim,
bugname: this.bugname,
hostport: this.hostport,
}).catch((err) => {
const {writeAlerts, AlertType} = this.cs.srf.locals;
this.logger.error(err, 'TaskGather:_startTranscribing error');
@@ -491,7 +487,11 @@ class TaskGather extends Task {
this._clearTimer();
this._timeoutTimer = setTimeout(() => {
if (this.isContinuousAsr) this._startAsrTimer();
else this._resolve(this.digitBuffer.length >= this.minDigits ? 'dtmf-num-digits' : 'timeout');
else if (this.interDigitTimeout <= 0 ||
this.digitBuffer.length < this.minDigits ||
this.needsStt && this.digitBuffer.length === 0) {
this._resolve(this.digitBuffer.length >= this.minDigits ? 'dtmf-num-digits' : 'timeout');
}
}, this.timeout);
}
@@ -503,11 +503,13 @@ class TaskGather extends Task {
}
_startAsrTimer() {
if (this.vendor === 'deepgram') return; // no need
assert(this.isContinuousAsr);
this._clearAsrTimer();
this._asrTimer = setTimeout(() => {
this.logger.debug('_startAsrTimer - asr timer went off');
this._resolve(this._bufferedTranscripts.length > 0 ? 'speech' : 'timeout');
const evt = this.consolidateTranscripts(this._bufferedTranscripts, 1, this.language);
this._resolve(this._bufferedTranscripts.length > 0 ? 'speech' : 'timeout', evt);
}, this.asrTimeout);
this.logger.debug(`_startAsrTimer: set for ${this.asrTimeout}ms`);
}
@@ -517,11 +519,28 @@ class TaskGather extends Task {
this._asrTimer = null;
}
_startFastRecognitionTimer(evt) {
assert(this.fastRecognitionTimeout > 0);
this._clearFastRecognitionTimer();
this._fastRecognitionTimer = setTimeout(() => {
evt.is_final = true;
this._resolve('speech', evt);
}, this.fastRecognitionTimeout);
}
_clearFastRecognitionTimer() {
if (this._fastRecognitionTimer) {
clearTimeout(this._fastRecognitionTimer);
}
this._fastRecognitionTimer = null;
}
_startFinalAsrTimer() {
this._clearFinalAsrTimer();
this._finalAsrTimer = setTimeout(() => {
this.logger.debug('_startFinalAsrTimer - final asr timer went off');
this._resolve(this._bufferedTranscripts.length > 0 ? 'speech' : 'timeout');
const evt = this.consolidateTranscripts(this._bufferedTranscripts, 1, this.language);
this._resolve(this._bufferedTranscripts.length > 0 ? 'speech' : 'timeout', evt);
}, 1000);
this.logger.debug('_startFinalAsrTimer: set for 1 second');
}
@@ -560,11 +579,24 @@ class TaskGather extends Task {
this.logger.debug({evt, bugname, finished}, `Gather:_onTranscription for vendor ${this.vendor}`);
if (bugname && this.bugname !== bugname) return;
if (this.vendor === 'ibm') {
if (evt?.state === 'listening') return;
if (this.vendor === 'ibm' && evt?.state === 'listening') return;
if (this.vendor === 'deepgram' && evt.type === 'UtteranceEnd') {
/* we will only get this when we have set utterance_end_ms */
if (this._bufferedTranscripts.length === 0) {
this.logger.debug('Gather:_onTranscription - got UtteranceEnd event from deepgram but no buffered transcripts');
}
else {
this.logger.debug('Gather:_onTranscription - got UtteranceEnd event from deepgram, return buffered transcript');
evt = this.consolidateTranscripts(this._bufferedTranscripts, 1, this.language);
this._bufferedTranscripts = [];
this._resolve('speech', evt);
}
return;
}
evt = this.normalizeTranscription(evt, this.vendor, 1, this.language);
evt = this.normalizeTranscription(evt, this.vendor, 1, this.language,
this.shortUtterance, this.data.recognizer.punctuation);
if (evt.alternatives.length === 0) {
this.logger.info({evt}, 'TaskGather:_onTranscription - got empty transcript, continue listening');
return;
@@ -586,15 +618,27 @@ class TaskGather extends Task {
const bufferedWords = this._sonioxTranscripts.length +
this._bufferedTranscripts.reduce((count, e) => count + e.alternatives[0]?.transcript.split(' ').length, 0);
let emptyTranscript = false;
if (evt.is_final) {
if (evt.alternatives[0].transcript === '' && !this.callSession.callGone && !this.killed) {
emptyTranscript = true;
if (finished === 'true' && ['microsoft', 'deepgram'].includes(this.vendor)) {
this.logger.debug({evt}, 'TaskGather:_onTranscription - got empty transcript from old gather, disregarding');
return;
}
else {
else if (this.vendor !== 'deepgram') {
this.logger.info({evt}, 'TaskGather:_onTranscription - got empty transcript, continue listening');
return;
}
else if (this.isContinuousAsr) {
this.logger.info({evt},
'TaskGather:_onTranscription - got empty deepgram transcript during continous asr, continue listening');
return;
}
else if (this.vendor === 'deepgram' && this._bufferedTranscripts.length > 0) {
this.logger.info({evt},
'TaskGather:_onTranscription - got empty transcript from deepgram, return the buffered transcripts');
}
return;
}
if (this.isContinuousAsr) {
@@ -606,14 +650,14 @@ class TaskGather extends Task {
this.logger.debug('TaskGather:_onTranscription - removing trailing punctuation');
evt.alternatives[0].transcript = t.slice(0, -1);
}
else this.logger.debug({t}, 'TaskGather:_onTranscription - no trailing punctuation');
}
this.logger.info({evt}, 'TaskGather:_onTranscription - got transcript during continous asr');
this._bufferedTranscripts.push(evt);
this._clearTimer();
if (this._finalAsrTimer) {
this._clearFinalAsrTimer();
return this._resolve(this._bufferedTranscripts.length > 0 ? 'speech' : 'timeout');
const evt = this.consolidateTranscripts(this._bufferedTranscripts, 1, this.language);
return this._resolve(this._bufferedTranscripts.length > 0 ? 'speech' : 'timeout', evt);
}
this._startAsrTimer();
@@ -635,16 +679,25 @@ class TaskGather extends Task {
evt = this.compileSonioxTranscripts(this._sonioxTranscripts, 1, this.language);
this._sonioxTranscripts = [];
}
else if (this.vendor === 'deepgram') {
/* compile transcripts into one */
if (!emptyTranscript) this._bufferedTranscripts.push(evt);
if (this.data.recognizer?.deepgramOptions?.utteranceEndMs) {
this.logger.debug('TaskGather:_onTranscription - got speech_final waiting for UtteranceEnd event');
return;
}
this.logger.debug({evt}, 'TaskGather:_onTranscription - compiling deepgram transcripts');
evt = this.consolidateTranscripts(this._bufferedTranscripts, 1, this.language);
this._bufferedTranscripts = [];
this.logger.debug({evt}, 'TaskGather:_onTranscription - compiled deepgram transcripts');
}
/* here is where we return a final transcript */
this._resolve('speech', evt);
}
}
}
else {
/* google has a measure of stability:
https://cloud.google.com/speech-to-text/docs/basics#streaming_responses
others do not.
*/
//const isStableEnough = typeof evt.stability === 'undefined' || evt.stability > GATHER_STABILITY_THRESHOLD;
this._clearTimer();
this._startTimer();
if (this.bargein && (words + bufferedWords) >= this.minBargeinWordCount) {
@@ -654,6 +707,9 @@ class TaskGather extends Task {
}
this._killAudio(cs);
}
if (this.fastRecognitionTimeout) {
this._startFastRecognitionTimer(evt);
}
if (this.partialResultHook) {
const b3 = this.getTracingPropagation();
const httpHeaders = b3 && {b3};
@@ -667,6 +723,14 @@ class TaskGather extends Task {
this._sonioxTranscripts.push(evt.vendor.finalWords);
}
}
/* deepgram can send a non-final transcript but with words that are final, so we need to buffer */
if (this.vendor === 'deepgram') {
const originalEvent = evt.vendor.evt;
if (originalEvent.is_final && evt.alternatives[0].transcript !== '') {
this.logger.debug({evt}, 'Gather:_onTranscription - buffering a completed (partial) deepgram transcript');
this._bufferedTranscripts.push(evt);
}
}
}
}
_onEndOfUtterance(cs, ep) {
@@ -681,7 +745,7 @@ class TaskGather extends Task {
* getting a transcription. This can happen if someone coughs or mumbles.
* For that reason don't ask for a single utterance and we'll terminate the transcribe operation
* once we get a final transcript.
* However, if the usr has specified a singleUtterance, then we need to restart here
* However, if the user has specified a singleUtterance, then we need to restart here
* since we dont have a final transcript yet.
*/
if (!this.resolved && !this.killed && !this._bufferedTranscripts.length && this.wantsSingleUtterance) {
@@ -698,14 +762,25 @@ class TaskGather extends Task {
_onTranscriptionComplete(cs, ep) {
this.logger.debug('TaskGather:_onTranscriptionComplete');
}
_onDeepgramConnect(_cs, _ep) {
this.logger.debug('TaskGather:_onDeepgramConnect');
}
_onJambonzConnect(_cs, _ep) {
this.logger.debug('TaskGather:_onJambonzConnect');
}
_onJambonzError(cs, _ep, evt) {
async _onJambonzError(cs, ep, evt) {
this.logger.info({evt}, 'TaskGather:_onJambonzError');
if (this.isHandledByPrimaryProvider && this.fallbackVendor) {
ep.stopTranscription({
vendor: this.vendor,
bugname: this.bugname
})
.catch((err) => this.logger.error({err}, `Error stopping transcription for primary vendor ${this.vendor}`));
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, cs.srf);
try {
await this._fallback();
await this._initSpeech(cs, ep);
this._startTranscribing(ep);
updateSpeechCredentialLastUsed(this.sttCredentials.speech_credential_sid);
return;
} catch (error) {
this.logger.info({error}, `There is error while falling back to ${this.fallbackVendor}`);
}
}
const {writeAlerts, AlertType} = cs.srf.locals;
if (this.vendor === 'nuance') {
@@ -723,54 +798,16 @@ class TaskGather extends Task {
this.notifyError({msg: 'ASR error', details:`Custom speech vendor ${this.vendor} error: ${evt.error}`});
}
_onDeepGramConnectFailure(cs, _ep, evt) {
const {reason} = evt;
const {writeAlerts, AlertType} = cs.srf.locals;
this.logger.info({evt}, 'TaskGather:_onDeepgramConnectFailure');
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
message: `Failed connecting to Deepgram speech recognizer: ${reason}`,
vendor: 'deepgram',
}).catch((err) => this.logger.info({err}, 'Error generating alert for deepgram connection failure'));
this.notifyError({msg: 'ASR error', details:`Failed connecting to speech vendor deepgram: ${reason}`});
this.notifyTaskDone();
}
_onJambonzConnectFailure(cs, _ep, evt) {
const {reason} = evt;
const {writeAlerts, AlertType} = cs.srf.locals;
this.logger.info({evt}, 'TaskGather:_onJambonzConnectFailure');
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
message: `Failed connecting to ${this.vendor} speech recognizer: ${reason}`,
vendor: this.vendor,
}).catch((err) => this.logger.info({err}, 'Error generating alert for jambonz custom connection failure'));
this.notifyError({msg: 'ASR error', details:`Failed connecting to speech vendor ${this.vendor}: ${reason}`});
_onVendorConnectFailure(cs, _ep, evt) {
super._onVendorConnectFailure(cs, _ep, evt);
this.notifyTaskDone();
}
_onIbmConnect(_cs, _ep) {
this.logger.debug('TaskGather:_onIbmConnect');
_onVendorError(cs, _ep, evt) {
super._onVendorError(cs, _ep, evt);
this._resolve('stt-error', evt);
}
_onIbmConnectFailure(cs, _ep, evt) {
const {reason} = evt;
const {writeAlerts, AlertType} = cs.srf.locals;
this.logger.info({evt}, 'TaskGather:_onIbmConnectFailure');
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
message: `Failed connecting to IBM watson speech recognizer: ${reason}`,
vendor: 'ibm',
}).catch((err) => this.logger.info({err}, 'Error generating alert for IBM connection failure'));
this.notifyError({msg: 'ASR error', details:`Failed connecting to speech vendor IBM: ${reason}`});
this.notifyTaskDone();
}
_onIbmError(cs, _ep, evt) {
this.logger.info({evt}, 'TaskGather:_onIbmError'); }
_onVadDetected(cs, ep) {
if (this.bargein && this.minBargeinWordCount === 0) {
this.logger.debug('TaskGather:_onVadDetected');
@@ -804,22 +841,18 @@ class TaskGather extends Task {
}
clearTimeout(this.interDigitTimer);
this._clearTimer();
this._clearFastRecognitionTimer();
if (this.isContinuousAsr && reason.startsWith('speech')) {
evt = {
is_final: true,
transcripts: this._bufferedTranscripts
};
this.logger.debug({evt}, 'TaskGather:resolve continuous asr');
}
else if (!this.isContinuousAsr && reason.startsWith('speech') && this._bufferedTranscripts.length) {
compileTranscripts(this.logger, evt, this._bufferedTranscripts);
this.logger.debug({evt}, 'TaskGather:resolve buffered results');
}
this.span.setAttributes({'stt.resolve': reason, 'stt.result': JSON.stringify(evt)});
this.span.setAttributes({
channel: 1,
'stt.resolve': reason,
'stt.result': JSON.stringify(evt)
});
if (this.needsStt && this.ep && this.ep.connected) {
this.ep.stopTranscription({vendor: this.vendor})
this.ep.stopTranscription({
vendor: this.vendor,
bugname: this.bugname
})
.catch((err) => this.logger.error({err}, 'Error stopping transcription'));
}
@@ -851,6 +884,13 @@ class TaskGather extends Task {
await this.performAction({reason: 'timeout'});
}
}
else if (reason.startsWith('stt-error')) {
if (this.parentTask) this.parentTask.emit('stt-error', evt);
else {
this.emit('stt-error', evt);
await this.performAction({reason: 'error', details: evt.error});
}
}
} catch (err) { /*already logged error*/ }
this.notifyTaskDone();
}

View File

@@ -25,6 +25,13 @@ class Lex extends Task {
this.vendor = this.data.tts.vendor || 'default';
this.language = this.data.tts.language || 'default';
this.voice = this.data.tts.voice || 'default';
this.speechCredentialLabel = this.data.tts.label || 'default';
// fallback tts
this.fallbackVendor = this.data.tts.fallbackVendor || 'default';
this.fallbackLanguage = this.data.tts.fallbackLanguage || 'default';
this.fallbackVoice = this.data.tts.fallbackLanguage || 'default';
this.fallbackLabel = this.data.tts.fallbackLabel || 'default';
}
this.botName = `${this.bot}:${this.alias}:${this.region}`;
@@ -102,8 +109,16 @@ class Lex extends Task {
this.vendor = cs.speechSynthesisVendor;
this.language = cs.speechSynthesisLanguage;
this.voice = cs.speechSynthesisVoice;
this.speechCredentialLabel = cs.speechSynthesisLabel;
}
this.ttsCredentials = cs.getSpeechCredentials(this.vendor, 'tts');
if (this.fallbackVendor === 'default') {
this.fallbackVendor = cs.fallbackSpeechSynthesisVendor;
this.fallbackLanguage = cs.fallbackSpeechSynthesisLanguage;
this.fallbackVoice = cs.fallbackSpeechSynthesisVoice;
this.fallbackLabel = cs.fallbackSpeechSynthesisLabel;
}
this.ttsCredentials = cs.getSpeechCredentials(this.vendor, 'tts', this.speechCredentialLabel);
this.ep.addCustomEventListener('lex::intent', this._onIntent.bind(this, ep, cs));
this.ep.addCustomEventListener('lex::transcription', this._onTranscription.bind(this, ep, cs));
@@ -168,6 +183,41 @@ class Lex extends Task {
}
}
async _fallbackSynthAudio(cs, msg, stats, synthAudio) {
try {
const {filePath} = await synthAudio(stats, {
account_sid: cs.accountSid,
text: msg,
vendor: this.vendor,
language: this.language,
voice: this.voice,
salt: cs.callSid,
credentials: this.ttsCredentials
});
return filePath;
} catch (error) {
this.logger.info({error}, 'failed to synth audio from primary vendor');
if (this.fallbackVendor) {
try {
const credential = cs.getSpeechCredentials(this.fallbackVendor, 'tts', this.fallbackLabel);
const {filePath} = await synthAudio(stats, {
account_sid: cs.accountSid,
text: msg,
vendor: this.fallbackVendor,
language: this.fallbackLanguage,
voice: this.fallbackVoice,
salt: cs.callSid,
credentials: credential
});
return filePath;
} catch (err) {
this.logger.info({err}, 'failed to synth audio from fallback vendor');
}
}
}
}
/**
* @param {*} evt - event data
*/
@@ -187,15 +237,7 @@ class Lex extends Task {
try {
this.logger.debug(`tts with ${this.vendor} ${this.voice}`);
// eslint-disable-next-line no-unused-vars
const {filePath, servedFromCache} = await synthAudio(stats, {
text: msg,
vendor: this.vendor,
language: this.language,
voice: this.voice,
salt: cs.callSid,
credentials: this.ttsCredentials
});
const filePath = await this._fallbackSynthAudio(cs, msg, stats, synthAudio);
if (filePath) cs.trackTmpFile(filePath);
if (this.events.includes('start-play')) {

View File

@@ -3,10 +3,12 @@ const {TaskName, TaskPreconditions, ListenEvents, ListenStatus} = require('../ut
const makeTask = require('./make_task');
const moment = require('moment');
const MAX_PLAY_AUDIO_QUEUE_SIZE = 10;
const DTMF_SPAN_NAME = 'dtmf';
class TaskListen extends Task {
constructor(logger, opts, parentTask) {
super(logger, opts);
this.disableBidirectionalAudio = opts.disableBidirectionalAudio;
this.preconditions = TaskPreconditions.Endpoint;
[
@@ -29,6 +31,10 @@ class TaskListen extends Task {
get name() { return TaskName.Listen; }
set bugname(name) { this._bugname = name; }
set ignoreCustomerData(val) { this._ignoreCustomerData = val; }
async exec(cs, {ep}) {
await super.exec(cs);
this.ep = ep;
@@ -65,7 +71,8 @@ class TaskListen extends Task {
if (this.ep && this.ep.connected) {
this.logger.debug('TaskListen:kill closing websocket');
try {
await this.ep.forkAudioStop();
const args = this._bugname ? [this._bugname] : [];
await this.ep.forkAudioStop(...args);
this.logger.debug('TaskListen:kill successfully closed websocket');
} catch (err) {
this.logger.info(err, 'TaskListen:kill');
@@ -85,13 +92,16 @@ class TaskListen extends Task {
async updateListen(status) {
if (!this.killed && this.ep && this.ep.connected) {
const args = this._bugname ? [this._bugname] : [];
this.logger.info(`TaskListen:updateListen status ${status}`);
switch (status) {
case ListenStatus.Pause:
await this.ep.forkAudioPause().catch((err) => this.logger.info(err, 'TaskListen: error pausing audio'));
await this.ep.forkAudioPause(...args)
.catch((err) => this.logger.info(err, 'TaskListen: error pausing audio'));
break;
case ListenStatus.Resume:
await this.ep.forkAudioResume().catch((err) => this.logger.info(err, 'TaskListen: error resuming audio'));
await this.ep.forkAudioResume(...args)
.catch((err) => this.logger.info(err, 'TaskListen: error resuming audio'));
break;
}
}
@@ -104,9 +114,13 @@ class TaskListen extends Task {
async _startListening(cs, ep) {
this._initListeners(ep);
const ci = this.nested ? this.parentTask.sd.callInfo : cs.callInfo.toJSON();
if (this._ignoreCustomerData) {
delete ci.customerData;
}
const metadata = Object.assign(
{sampleRate: this.sampleRate, mixType: this.mixType},
this.nested ? this.parentTask.sd.callInfo : cs.callInfo.toJSON(),
ci,
this.metadata);
if (this.hook.auth) {
this.logger.debug({username: this.hook.auth.username, password: this.hook.auth.password},
@@ -120,6 +134,7 @@ class TaskListen extends Task {
wsUrl: this.hook.url,
mixType: this.mixType,
sampling: this.sampleRate,
...(this._bugname && {bugname: this._bugname}),
metadata
});
this.recordStartTime = moment();
@@ -140,7 +155,7 @@ class TaskListen extends Task {
}
/* support bi-directional audio */
if (!this.disableBiDirectionalAudio) {
if (!this.disableBidirectionalAudio) {
ep.addCustomEventListener(ListenEvents.PlayAudio, this._onPlayAudio.bind(this, ep));
}
ep.addCustomEventListener(ListenEvents.KillAudio, this._onKillAudio.bind(this, ep));
@@ -161,12 +176,25 @@ class TaskListen extends Task {
}
_onDtmf(ep, evt) {
this.logger.debug({evt}, `TaskListen:_onDtmf received dtmf ${evt.dtmf}`);
const {dtmf, duration} = evt;
this.logger.debug({evt}, `TaskListen:_onDtmf received dtmf ${dtmf}`);
if (this.passDtmf && this.ep?.connected) {
const obj = {event: 'dtmf', dtmf: evt.dtmf, duration: evt.duration};
this.ep.forkAudioSendText(obj)
const obj = {event: 'dtmf', dtmf, duration};
const args = this._bugname ? [this._bugname, obj] : [obj];
this.ep.forkAudioSendText(...args)
.catch((err) => this.logger.info({err}, 'TaskListen:_onDtmf error sending dtmf'));
}
/* add a child span for the dtmf event */
const msDuration = Math.floor((duration / 8000) * 1000);
const {span} = this.startChildSpan(`${DTMF_SPAN_NAME}:${dtmf}`);
span.setAttributes({
channel: 1,
dtmf,
duration: `${msDuration}ms`
});
span.end();
if (evt.dtmf === this.finishOnKey) {
this.logger.info(`TaskListen:_onDtmf terminating task due to dtmf ${evt.dtmf}`);
this.results.digits = evt.dtmf;
@@ -192,7 +220,15 @@ class TaskListen extends Task {
try {
const results = await ep.play(evt.file);
logger.debug(`Finished playing file, result: ${JSON.stringify(results)}`);
ep.forkAudioSendText({type: 'playDone', data: Object.assign({id: evt.id}, results)});
const obj = {
type: 'playDone',
data: {
id: evt.id,
...results
}
};
const args = this._bugname ? [this._bugname, obj] : [obj];
ep.forkAudioSendText(...args);
} catch (err) {
logger.error({err}, 'Error playing file');
}

View File

@@ -16,6 +16,7 @@ class TaskRestDial extends Task {
this.to = this.data.to;
this.call_hook = this.data.call_hook;
this.timeout = this.data.timeout || 60;
this.sipRequestWithinDialogHook = this.data.sipRequestWithinDialogHook;
this.on('connect', this._onConnect.bind(this));
this.on('callStatus', this._onCallStatus.bind(this));
@@ -23,6 +24,10 @@ class TaskRestDial extends Task {
get name() { return TaskName.RestDial; }
set appJson(app_json) {
this.app_json = app_json;
}
/**
* INVITE has just been sent at this point
*/
@@ -31,10 +36,20 @@ class TaskRestDial extends Task {
this.cs = cs;
this.canCancel = true;
if (this.data.amd) {
this.startAmd = cs.startAmd;
this.stopAmd = cs.stopAmd;
this.on('amd', this._onAmdEvent.bind(this, cs));
}
this._setCallTimer();
await this.awaitTaskDone();
}
turnOffAmd() {
if (this.callSession.ep && this.callSession.ep.amd) this.stopAmd(this.callSession.ep, this);
}
kill(cs) {
super.kill(cs);
this._clearCallTimer();
@@ -49,12 +64,13 @@ class TaskRestDial extends Task {
this.canCancel = false;
const cs = this.callSession;
cs.setDialog(dlg);
this.logger.debug('TaskRestDial:_onConnect - call connected');
if (this.sipRequestWithinDialogHook) this._initSipRequestWithinDialogHandler(cs, dlg);
try {
const b3 = this.getTracingPropagation();
const httpHeaders = b3 && {b3};
const params = {
...cs.callInfo,
...(cs.callInfo.toJSON()),
defaults: {
synthesizer: {
vendor: cs.speechSynthesisVendor,
@@ -67,7 +83,21 @@ class TaskRestDial extends Task {
}
}
};
const tasks = await cs.requestor.request('session:new', this.call_hook, params, httpHeaders);
if (this.startAmd) {
try {
this.startAmd(this.callSession, this.callSession.ep, this, this.data.amd);
} catch (err) {
this.logger.info({err}, 'Rest:dial:Call established - Error calling startAmd');
}
}
let tasks;
if (this.app_json) {
this.logger.debug('TaskRestDial: using app_json from task data');
tasks = JSON.parse(this.app_json);
} else {
this.logger.debug({call_hook: this.call_hook}, 'TaskRestDial: retrieving application');
tasks = await cs.requestor.request('session:new', this.call_hook, params, httpHeaders);
}
if (tasks && Array.isArray(tasks)) {
this.logger.debug({tasks: tasks}, `TaskRestDial: replacing application with ${tasks.length} tasks`);
cs.replaceApplication(normalizeJambones(this.logger, tasks).map((tdata) => makeTask(this.logger, tdata)));
@@ -99,7 +129,29 @@ class TaskRestDial extends Task {
_onCallTimeout() {
this.logger.debug('TaskRestDial: timeout expired without answer, killing task');
this.timer = null;
this.kill(this.cs);
if (this.canCancel) {
this.canCancel = false;
this.cs?.req?.cancel();
}
}
_onAmdEvent(cs, evt) {
this.logger.info({evt}, 'Rest:dial:_onAmdEvent');
const {actionHook} = this.data.amd;
this.performHook(cs, actionHook, evt)
.catch((err) => {
this.logger.error({err}, 'Rest:dial:_onAmdEvent - error calling actionHook');
});
}
_initSipRequestWithinDialogHandler(cs, dlg) {
cs.sipRequestWithinDialogHook = this.sipRequestWithinDialogHook;
dlg.on('info', this._onRequestWithinDialog.bind(this, cs));
dlg.on('message', this._onRequestWithinDialog.bind(this, cs));
}
async _onRequestWithinDialog(cs, req, res) {
cs._onRequestWithinDialog(req, res);
}
}

View File

@@ -37,6 +37,7 @@ class TaskSay extends Task {
this.synthesizer = this.data.synthesizer || {};
this.disableTtsCache = this.data.disableTtsCache;
this.options = this.synthesizer.options || {};
this.isHandledByPrimaryProvider = true;
}
get name() { return TaskName.Say; }
@@ -49,26 +50,24 @@ class TaskSay extends Task {
return `${this.name}{${this.text[0]}}`;
}
async exec(cs, {ep}) {
await super.exec(cs);
_validateURL(urlString) {
try {
new URL(urlString);
return true;
} catch (e) {
return false;
}
}
async _synthesizeWithSpecificVendor(cs, ep, {vendor, language, voice, label, preCache = false}) {
const {srf} = cs;
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, srf);
const {writeAlerts, AlertType, stats} = srf.locals;
const {synthAudio} = srf.locals.dbHelpers;
const vendor = this.synthesizer.vendor && this.synthesizer.vendor !== 'default' ?
this.synthesizer.vendor :
cs.speechSynthesisVendor;
const language = this.synthesizer.language && this.synthesizer.language !== 'default' ?
this.synthesizer.language :
cs.speechSynthesisLanguage ;
let voice = this.synthesizer.voice && this.synthesizer.voice !== 'default' ?
this.synthesizer.voice :
cs.speechSynthesisVoice;
const engine = this.synthesizer.engine || 'standard';
const salt = cs.callSid;
let credentials = cs.getSpeechCredentials(vendor, 'tts');
let credentials = cs.getSpeechCredentials(vendor, 'tts', label);
/* parse Nuance voices into name and model */
let model;
if (vendor === 'nuance' && voice) {
@@ -77,6 +76,8 @@ class TaskSay extends Task {
voice = arr[1];
model = arr[2];
}
} else if (vendor === 'deepgram') {
model = voice;
}
/* allow for microsoft custom region voice and api_key to be specified as an override */
@@ -87,10 +88,16 @@ class TaskSay extends Task {
credentials.api_key = this.options.apiKey || credentials.apiKey;
credentials.region = this.options.region || credentials.region;
voice = this.options.voice || voice;
} else if (vendor === 'elevenlabs') {
credentials = credentials || {};
credentials.model_id = this.options.model_id || credentials.model_id;
credentials.voice_settings = this.options.voice_settings || {};
credentials.optimize_streaming_latency = this.options.optimize_streaming_latency
|| credentials.optimize_streaming_latency;
voice = this.options.voice_id || voice;
}
this.logger.info({vendor, language, voice, model}, 'TaskSay:exec');
this.ep = ep;
if (!preCache) this.logger.info({vendor, language, voice, model}, 'TaskSay:exec');
try {
if (!credentials) {
writeAlerts({
@@ -113,13 +120,18 @@ class TaskSay extends Task {
if (text.startsWith('silence_stream://')) return text;
/* otel: trace time for tts */
const {span} = this.startChildSpan('tts-generation', {
'tts.vendor': vendor,
'tts.language': language,
'tts.voice': voice
});
let otelSpan;
if (!preCache) {
const {span} = this.startChildSpan('tts-generation', {
'tts.vendor': vendor,
'tts.language': language,
'tts.voice': voice
});
otelSpan = span;
}
try {
const {filePath, servedFromCache, rtt} = await synthAudio(stats, {
account_sid: cs.accountSid,
text,
vendor,
language,
@@ -128,6 +140,7 @@ class TaskSay extends Task {
model,
salt,
credentials,
options: this.options,
disableTtsCache : this.disableTtsCache
});
this.logger.debug(`file ${filePath}, served from cache ${servedFromCache}`);
@@ -137,9 +150,9 @@ class TaskSay extends Task {
updateSpeechCredentialLastUsed(credentials.speech_credential_sid)
.catch(() => {/*already logged error */});
}
span.setAttributes({'tts.cached': servedFromCache});
span.end();
if (!servedFromCache && rtt) {
if (otelSpan) otelSpan.setAttributes({'tts.cached': servedFromCache});
if (otelSpan) otelSpan.end();
if (!servedFromCache && rtt && !preCache) {
this.notifyStatus({
event: 'synthesized-audio',
vendor,
@@ -151,7 +164,7 @@ class TaskSay extends Task {
return filePath;
} catch (err) {
this.logger.info({err}, 'Error synthesizing tts');
span.end();
if (otelSpan) otelSpan.end();
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.TTS_FAILURE,
@@ -159,31 +172,83 @@ class TaskSay extends Task {
detail: err.message
}).catch((err) => this.logger.info({err}, 'Error generating alert for tts failure'));
this.notifyError({msg: 'TTS error', details: err.message || err});
return;
throw err;
}
};
const arr = this.text.map((t) => generateAudio(t));
const filepath = (await Promise.all(arr)).filter((fp) => fp && fp.length);
this.notifyStatus({event: 'start-playback'});
while (!this.killed && (this.loop === 'forever' || this.loop--) && this.ep?.connected) {
let segment = 0;
while (!this.killed && segment < filepath.length) {
if (cs.isInConference) {
const {memberId, confName, confUuid} = cs;
await this.playToConfMember(this.ep, memberId, confName, confUuid, filepath[segment]);
}
else {
this.logger.debug(`Say:exec sending command to play file ${filepath[segment]}`);
await ep.play(filepath[segment]);
this.logger.debug(`Say:exec completed play file ${filepath[segment]}`);
}
segment++;
}
}
const arr = this.text.map((t) => (this._validateURL(t) ? t : generateAudio(t)));
return (await Promise.all(arr)).filter((fp) => fp && fp.length);
} catch (err) {
this.logger.info(err, 'TaskSay:exec error');
throw err;
}
}
async exec(cs, {ep}) {
await super.exec(cs);
this.ep = ep;
const vendor = this.synthesizer.vendor && this.synthesizer.vendor !== 'default' ?
this.synthesizer.vendor :
cs.speechSynthesisVendor;
const language = this.synthesizer.language && this.synthesizer.language !== 'default' ?
this.synthesizer.language :
cs.speechSynthesisLanguage ;
const voice = this.synthesizer.voice && this.synthesizer.voice !== 'default' ?
this.synthesizer.voice :
cs.speechSynthesisVoice;
const label = this.synthesizer.label && this.synthesizer.label !== 'default' ?
this.synthesizer.label :
cs.speechSynthesisLabel;
const fallbackVendor = this.synthesizer.fallbackVendor && this.synthesizer.fallbackVendor !== 'default' ?
this.synthesizer.fallbackVendor :
cs.fallbackSpeechSynthesisVendor;
const fallbackLanguage = this.synthesizer.fallbackLanguage && this.synthesizer.fallbackLanguage !== 'default' ?
this.synthesizer.fallbackLanguage :
cs.fallbackSpeechSynthesisLanguage ;
const fallbackVoice = this.synthesizer.fallbackVoice && this.synthesizer.fallbackVoice !== 'default' ?
this.synthesizer.fallbackVoice :
cs.fallbackSpeechSynthesisVoice;
const fallbackLabel = this.synthesizer.fallbackLabel && this.synthesizer.fallbackLabel !== 'default' ?
this.synthesizer.fallbackLabel :
cs.fallbackSpeechSynthesisLabel;
let filepath;
try {
filepath = await this._synthesizeWithSpecificVendor(cs, ep, {vendor, language, voice, label});
} catch (error) {
if (fallbackVendor && this.isHandledByPrimaryProvider) {
this.isHandledByPrimaryProvider = false;
this.logger.info(`Synthesize error, fallback to ${fallbackVendor}`);
filepath = await this._synthesizeWithSpecificVendor(cs, ep,
{
vendor: fallbackVendor,
language: fallbackLanguage,
voice: fallbackVoice,
label: fallbackLabel
});
} else {
throw error;
}
}
this.notifyStatus({event: 'start-playback'});
while (!this.killed && (this.loop === 'forever' || this.loop--) && this.ep?.connected) {
let segment = 0;
while (!this.killed && segment < filepath.length) {
if (cs.isInConference) {
const {memberId, confName, confUuid} = cs;
await this.playToConfMember(this.ep, memberId, confName, confUuid, filepath[segment]);
}
else {
this.logger.debug(`Say:exec sending command to play file ${filepath[segment]}`);
await ep.play(filepath[segment]);
this.logger.debug(`Say:exec completed play file ${filepath[segment]}`);
}
segment++;
}
}
this.emit('playDone');
}

278
lib/tasks/stt-task.js Normal file
View File

@@ -0,0 +1,278 @@
const Task = require('./task');
const assert = require('assert');
const crypto = require('crypto');
const { TaskPreconditions, CobaltTranscriptionEvents } = require('../utils/constants');
class SttTask extends Task {
constructor(logger, data, parentTask) {
super(logger, data);
this.parentTask = parentTask;
this.preconditions = TaskPreconditions.Endpoint;
const {
setChannelVarsForStt,
normalizeTranscription,
setSpeechCredentialsAtRuntime,
compileSonioxTranscripts,
consolidateTranscripts
} = require('../utils/transcription-utils')(logger);
this.setChannelVarsForStt = setChannelVarsForStt;
this.normalizeTranscription = normalizeTranscription;
this.compileSonioxTranscripts = compileSonioxTranscripts;
this.consolidateTranscripts = consolidateTranscripts;
this.eventHandlers = [];
this.isHandledByPrimaryProvider = true;
if (this.data.recognizer) {
const recognizer = this.data.recognizer;
this.vendor = recognizer.vendor;
this.language = recognizer.language;
this.label = recognizer.label;
//fallback
this.fallbackVendor = recognizer.fallbackVendor || 'default';
this.fallbackLanguage = recognizer.fallbackLanguage || 'default';
this.fallbackLabel = recognizer.fallbackLabel || 'default';
/* let credentials be supplied in the recognizer object at runtime */
this.sttCredentials = setSpeechCredentialsAtRuntime(recognizer);
if (!Array.isArray(this.data.recognizer.altLanguages)) {
this.data.recognizer.altLanguages = [];
}
} else {
this.data.recognizer = {hints: [], altLanguages: []};
}
/* buffer for soniox transcripts */
this._sonioxTranscripts = [];
/*bug name prefix */
this.bugname_prefix = '';
}
async exec(cs, {ep, ep2}) {
super.exec(cs);
this.ep = ep;
this.ep2 = ep2;
// copy all value from config verb to this object.
if (cs.recognizer) {
for (const k in cs.recognizer) {
if (Array.isArray(this.data.recognizer[k]) ||
Array.isArray(cs.recognizer[k])) {
this.data.recognizer[k] = [
...this.data.recognizer[k],
...cs.recognizer[k]
];
} else if (typeof this.data.recognizer[k] === 'object' ||
typeof cs.recognizer[k] === 'object'
) {
this.data.recognizer[k] = {
...this.data.recognizer[k],
...cs.recognizer[k]
};
} else {
this.data.recognizer[k] = cs.recognizer[k] || this.data.recognizer[k];
}
}
}
if ('default' === this.vendor || !this.vendor) {
this.vendor = cs.speechRecognizerVendor;
if (this.data.recognizer) this.data.recognizer.vendor = this.vendor;
}
if ('default' === this.language || !this.language) {
this.language = cs.speechRecognizerLanguage;
if (this.data.recognizer) this.data.recognizer.language = this.language;
}
if ('default' === this.label || !this.label) {
this.label = cs.speechRecognizerLabel;
if (this.data.recognizer) this.data.recognizer.label = this.label;
}
// Fallback options
if ('default' === this.fallbackVendor || !this.fallbackVendor) {
this.fallbackVendor = cs.fallbackSpeechRecognizerVendor;
if (this.data.recognizer) this.data.recognizer.fallbackVendor = this.fallbackVendor;
}
if ('default' === this.fallbackLanguage || !this.fallbackLanguage) {
this.fallbackLanguage = cs.fallbackSpeechRecognizerLanguage;
if (this.data.recognizer) this.data.recognizer.fallbackLanguage = this.fallbackLanguage;
}
if ('default' === this.fallbackLabel || !this.fallbackLabel) {
this.fallbackLabel = cs.fallbackSpeechRecognizerLabel;
if (this.data.recognizer) this.data.recognizer.fallbackLabel = this.fallbackLabel;
}
if (!this.data.recognizer.vendor) {
this.data.recognizer.vendor = this.vendor;
}
if (this.vendor === 'cobalt' && !this.data.recognizer.model) {
// By default, application saves cobalt model in language
this.data.recognizer.model = cs.speechRecognizerLanguage;
}
if (!this.sttCredentials) {
try {
this.sttCredentials = await this._initSpeechCredentials(this.cs, this.vendor, this.label);
} catch (error) {
if (this.fallbackVendor && this.isHandledByPrimaryProvider) {
await this._fallback();
} else {
throw error;
}
}
}
/* when using cobalt model is required */
if (this.vendor === 'cobalt' && !this.data.recognizer.model) {
this.notifyError({ msg: 'ASR error', details:'Cobalt requires a model to be specified'});
throw new Error('Cobalt requires a model to be specified');
}
if (cs.hasAltLanguages) {
this.data.recognizer.altLanguages = this.data.recognizer.altLanguages.concat(cs.altLanguages);
this.logger.debug({altLanguages: this.altLanguages},
'STT:exec - applying altLanguages');
}
if (cs.hasGlobalSttPunctuation && !this.data.recognizer.punctuation) {
this.data.recognizer.punctuation = cs.globalSttPunctuation;
}
}
addCustomEventListener(ep, event, handler) {
this.eventHandlers.push({ep, event, handler});
ep.addCustomEventListener(event, handler);
}
removeCustomEventListeners() {
this.eventHandlers.forEach((h) => h.ep.removeCustomEventListener(h.event, h.handler));
}
async _initSpeechCredentials(cs, vendor, label) {
const {getNuanceAccessToken, getIbmAccessToken} = cs.srf.locals.dbHelpers;
let credentials = cs.getSpeechCredentials(vendor, 'stt', label);
if (!credentials) {
const {writeAlerts, AlertType} = cs.srf.locals;
this.logger.info(`ERROR stt using ${vendor} requested but creds not supplied`);
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_NOT_PROVISIONED,
vendor
}).catch((err) => this.logger.info({err}, 'Error generating alert for no stt'));
// Notify application that STT vender is wrong.
this.notifyError({
msg: 'ASR error',
details: `No speech-to-text service credentials for ${vendor} have been configured`
});
this.notifyTaskDone();
throw new Error(`No speech-to-text service credentials for ${vendor} have been configured`);
}
if (vendor === 'nuance' && credentials.client_id) {
/* get nuance access token */
const {client_id, secret} = credentials;
const {access_token, servedFromCache} = await getNuanceAccessToken(client_id, secret, 'asr tts');
this.logger.debug({client_id}, `got nuance access token ${servedFromCache ? 'from cache' : ''}`);
credentials = {...credentials, access_token};
}
else if (vendor == 'ibm' && credentials.stt_api_key) {
/* get ibm access token */
const {stt_api_key, stt_region} = credentials;
const {access_token, servedFromCache} = await getIbmAccessToken(stt_api_key);
this.logger.debug({stt_api_key}, `got ibm access token ${servedFromCache ? 'from cache' : ''}`);
credentials = {...credentials, access_token, stt_region};
}
return credentials;
}
async _fallback() {
assert(this.fallbackVendor, 'fallback failed without fallbackVendor configuration');
this.isHandledByPrimaryProvider = false;
this.logger.info(`Failed to use primary STT provider, fallback to ${this.fallbackVendor}`);
this.vendor = this.fallbackVendor;
this.language = this.fallbackLanguage;
this.label = this.fallbackLabel;
this.data.recognizer.vendor = this.vendor;
this.data.recognizer.language = this.language;
this.data.recognizer.label = this.label;
this.sttCredentials = await this._initSpeechCredentials(this.cs, this.vendor, this.label);
}
async compileHintsForCobalt(ep, hostport, model, token, hints) {
const {retrieveKey} = this.cs.srf.locals.dbHelpers;
const hash = crypto.createHash('sha1');
hash.update(`${model}:${hints}`);
const key = `cobalt:${hash.digest('hex')}`;
this.context = await retrieveKey(key);
if (this.context) {
this.logger.debug({model, hints}, 'found cached cobalt context for supplied hints');
return this.context;
}
this.logger.debug({model, hints}, 'compiling cobalt context for supplied hints');
return new Promise((resolve, reject) => {
this.cobaltCompileResolver = resolve;
ep.addCustomEventListener(CobaltTranscriptionEvents.CompileContext, this._onCompileContext.bind(this, ep, key));
ep.api('uuid_cobalt_compile_context', [ep.uuid, hostport, model, token, hints], (err, evt) => {
if (err || 0 !== evt.getBody().indexOf('+OK')) {
ep.removeCustomEventListener(CobaltTranscriptionEvents.CompileContext);
return reject(err);
}
});
});
}
_onCompileContext(ep, key, evt) {
const {addKey} = this.cs.srf.locals.dbHelpers;
this.logger.debug({evt}, `received cobalt compile context event, will cache under ${key}`);
this.cobaltCompileResolver(evt.compiled_context);
ep.removeCustomEventListener(CobaltTranscriptionEvents.CompileContext);
this.cobaltCompileResolver = null;
//cache the compiled context
addKey(key, evt.compiled_context, 3600 * 12)
.catch((err) => this.logger.info({err}, `Error caching cobalt context for ${key}`));
}
_doContinuousAsrWithDeepgram(asrTimeout) {
/* deepgram has an utterance_end_ms property that simplifies things */
assert(this.vendor === 'deepgram');
this.logger.debug(`_doContinuousAsrWithDeepgram - setting utterance_end_ms to ${asrTimeout}`);
const dgOptions = this.data.recognizer.deepgramOptions = this.data.recognizer.deepgramOptions || {};
dgOptions.utteranceEndMs = dgOptions.utteranceEndMs || asrTimeout;
}
_onVendorConnect(_cs, _ep) {
this.logger.debug(`TaskGather:_on${this.vendor}Connect`);
}
_onVendorError(cs, _ep, evt) {
this.logger.info({evt}, `${this.name}:_on${this.vendor}Error`);
const {writeAlerts, AlertType} = cs.srf.locals;
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
message: 'STT failure reported by vendor',
detail: evt.error,
vendor: this.vendor,
}).catch((err) => this.logger.info({err}, `Error generating alert for ${this.vendor} connection failure`));
this.notifyError({msg: 'ASR error', details:`Failed connecting to speech vendor ${this.vendor}: ${evt.error}`});
}
_onVendorConnectFailure(cs, _ep, evt) {
const {reason} = evt;
const {writeAlerts, AlertType} = cs.srf.locals;
this.logger.info({evt}, `${this.name}:_on${this.vendor}ConnectFailure`);
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
message: `Failed connecting to ${this.vendor} speech recognizer: ${reason}`,
vendor: this.vendor,
}).catch((err) => this.logger.info({err}, `Error generating alert for ${this.vendor} connection failure`));
this.notifyError({msg: 'ASR error', details:`Failed connecting to speech vendor ${this.vendor}: ${reason}`});
}
}
module.exports = SttTask;

View File

@@ -12,7 +12,7 @@ class TaskTag extends Task {
async exec(cs) {
super.exec(cs);
cs.callInfo.customerData = this.data;
//this.logger.debug({callInfo: cs.callInfo.toJSON()}, 'TaskTag:exec set customer data in callInfo');
this.logger.debug({customerData: cs.callInfo.customerData}, 'TaskTag:exec set customer data in callInfo');
}
}

View File

@@ -1,62 +1,62 @@
const Task = require('./task');
const assert = require('assert');
const {
TaskName,
TaskPreconditions,
GoogleTranscriptionEvents,
NuanceTranscriptionEvents,
AwsTranscriptionEvents,
AzureTranscriptionEvents,
DeepgramTranscriptionEvents,
SonioxTranscriptionEvents,
CobaltTranscriptionEvents,
IbmTranscriptionEvents,
NvidiaTranscriptionEvents,
JambonzTranscriptionEvents
} = require('../utils/constants');
JambonzTranscriptionEvents,
TranscribeStatus,
AssemblyAiTranscriptionEvents
} = require('../utils/constants.json');
const { normalizeJambones } = require('@jambonz/verb-specifications');
const SttTask = require('./stt-task');
class TaskTranscribe extends Task {
const STT_LISTEN_SPAN_NAME = 'stt-listen';
class TaskTranscribe extends SttTask {
constructor(logger, opts, parentTask) {
super(logger, opts);
this.preconditions = TaskPreconditions.Endpoint;
this.parentTask = parentTask;
const {
setChannelVarsForStt,
normalizeTranscription,
removeSpeechListeners,
setSpeechCredentialsAtRuntime,
compileSonioxTranscripts
} = require('../utils/transcription-utils')(logger);
this.setChannelVarsForStt = setChannelVarsForStt;
this.normalizeTranscription = normalizeTranscription;
this.removeSpeechListeners = removeSpeechListeners;
this.compileSonioxTranscripts = compileSonioxTranscripts;
super(logger, opts, parentTask);
this.transcriptionHook = this.data.transcriptionHook;
this.earlyMedia = this.data.earlyMedia === true || (parentTask && parentTask.earlyMedia);
const recognizer = this.data.recognizer;
this.vendor = recognizer.vendor;
this.language = recognizer.language;
this.interim = !!recognizer.interim;
this.separateRecognitionPerChannel = recognizer.separateRecognitionPerChannel;
if (this.data.recognizer) {
this.interim = !!this.data.recognizer.interim;
this.separateRecognitionPerChannel = this.data.recognizer.separateRecognitionPerChannel;
}
/* let credentials be supplied in the recognizer object at runtime */
this.sttCredentials = setSpeechCredentialsAtRuntime(recognizer);
this.childSpan = [null, null];
/* buffer for soniox transcripts */
this._sonioxTranscripts = [];
recognizer.hints = recognizer.hints || [];
recognizer.altLanguages = recognizer.altLanguages || [];
// Continuous asr timeout
this.asrTimeout = typeof this.data.recognizer.asrTimeout === 'number' ? this.data.recognizer.asrTimeout * 1000 : 0;
if (this.asrTimeout > 0) {
this.isContinuousAsr = true;
}
/* buffer speech for continuous asr */
this._bufferedTranscripts = [];
this.bugname_prefix = 'transcribe_';
}
get name() { return TaskName.Transcribe; }
async exec(cs, {ep, ep2}) {
super.exec(cs);
await super.exec(cs, {ep, ep2});
if (this.data.recognizer.vendor === 'nuance') {
this.data.recognizer.nuanceOptions = {
// by default, nuance STT will recognize only 1st utterance.
// enable multiple allow nuance detact all utterances
utteranceDetectionMode: 'multiple',
...this.data.recognizer.nuanceOptions
};
}
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, cs.srf);
const {getNuanceAccessToken, getIbmAccessToken} = cs.srf.locals.dbHelpers;
if (cs.hasGlobalSttHints) {
const {hints, hintsBoost} = cs.globalSttHints;
@@ -65,57 +65,8 @@ class TaskTranscribe extends Task {
this.logger.debug({hints: this.data.recognizer.hints, hintsBoost: this.data.recognizer.hintsBoost},
'Transcribe:exec - applying global sttHints');
}
if (cs.hasAltLanguages) {
this.data.recognizer.altLanguages = this.data.recognizer.altLanguages.concat(cs.altLanguages);
this.logger.debug({altLanguages: this.altLanguages},
'Transcribe:exec - applying altLanguages');
}
if (cs.hasGlobalSttPunctuation && !this.data.recognizer.punctuation) {
this.data.recognizer.punctuation = cs.globalSttPunctuation;
}
this.ep = ep;
this.ep2 = ep2;
if ('default' === this.vendor || !this.vendor) {
this.vendor = cs.speechRecognizerVendor;
if (this.data.recognizer) this.data.recognizer.vendor = this.vendor;
}
if ('default' === this.language || !this.language) {
this.language = cs.speechRecognizerLanguage;
if (this.data.recognizer) this.data.recognizer.language = this.language;
}
if (!this.data.recognizer.vendor) {
this.data.recognizer.vendor = this.vendor;
}
if (!this.sttCredentials) this.sttCredentials = cs.getSpeechCredentials(this.vendor, 'stt');
try {
if (!this.sttCredentials) {
const {writeAlerts, AlertType} = cs.srf.locals;
this.logger.info(`TaskTranscribe:exec - ERROR stt using ${this.vendor} requested but creds not supplied`);
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_NOT_PROVISIONED,
vendor: this.vendor
}).catch((err) => this.logger.info({err}, 'Error generating alert for no stt'));
throw new Error('no provisioned speech credentials for TTS');
}
if (this.vendor === 'nuance' && this.sttCredentials.client_id) {
/* get nuance access token */
const {client_id, secret} = this.sttCredentials;
const {access_token, servedFromCache} = await getNuanceAccessToken(client_id, secret, 'asr tts');
this.logger.debug({client_id},
`Transcribe:exec - got nuance access token ${servedFromCache ? 'from cache' : ''}`);
this.sttCredentials = {...this.sttCredentials, access_token};
}
else if (this.vendor == 'ibm' && this.sttCredentials.stt_api_key) {
/* get ibm access token */
const {stt_api_key, stt_region} = this.sttCredentials;
const {access_token, servedFromCache} = await getIbmAccessToken(stt_api_key);
this.logger.debug({stt_api_key}, `Gather:exec - got ibm access token ${servedFromCache ? 'from cache' : ''}`);
this.sttCredentials = {...this.sttCredentials, access_token, stt_region};
}
await this._startTranscribing(cs, ep, 1);
if (this.separateRecognitionPerChannel && ep2) {
await this._startTranscribing(cs, ep2, 2);
@@ -129,15 +80,17 @@ class TaskTranscribe extends Task {
this.logger.info(err, 'TaskTranscribe:exec - error');
this.parentTask && this.parentTask.emit('error', err);
}
this.removeSpeechListeners(ep);
this.removeCustomEventListeners();
}
async kill(cs) {
super.kill(cs);
async _stopTranscription() {
let stopTranscription = false;
if (this.ep?.connected) {
stopTranscription = true;
this.ep.stopTranscription({vendor: this.vendor})
this.ep.stopTranscription({
vendor: this.vendor,
bugname: this.bugname
})
.catch((err) => this.logger.info(err, 'Error TaskTranscribe:kill'));
}
if (this.separateRecognitionPerChannel && this.ep2 && this.ep2.connected) {
@@ -145,6 +98,13 @@ class TaskTranscribe extends Task {
this.ep2.stopTranscription({vendor: this.vendor})
.catch((err) => this.logger.info(err, 'Error TaskTranscribe:kill'));
}
return stopTranscription;
}
async kill(cs) {
super.kill(cs);
const stopTranscription = this._stopTranscription();
// hangup after 1 sec if we don't get a final transcription
if (stopTranscription) this._timer = setTimeout(() => this.notifyTaskDone(), 1500);
else this.notifyTaskDone();
@@ -152,99 +112,183 @@ class TaskTranscribe extends Task {
await this.awaitTaskDone();
}
async _startTranscribing(cs, ep, channel) {
async updateTranscribe(status) {
if (!this.killed && this.ep && this.ep.connected) {
this.logger.info(`TaskTranscribe:updateTranscribe status ${status}`);
switch (status) {
case TranscribeStatus.Pause:
await this._stopTranscription();
break;
case TranscribeStatus.Resume:
await this._startTranscribing(this.cs, this.ep, 1);
if (this.separateRecognitionPerChannel && this.ep2) {
await this._startTranscribing(this.cs, this.ep2, 2);
}
break;
}
}
}
async _setSpeechHandlers(cs, ep, channel) {
if (this[`_speechHandlersSet_${channel}`]) return;
this[`_speechHandlersSet_${channel}`] = true;
/* some special deepgram logic */
if (this.vendor === 'deepgram') {
if (this.isContinuousAsr) this._doContinuousAsrWithDeepgram(this.asrTimeout);
}
const opts = this.setChannelVarsForStt(this, this.sttCredentials, this.data.recognizer);
switch (this.vendor) {
case 'google':
this.bugname = 'google_transcribe';
ep.addCustomEventListener(GoogleTranscriptionEvents.Transcription,
this.bugname = `${this.bugname_prefix}google_transcribe`;
this.addCustomEventListener(ep, GoogleTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
ep.addCustomEventListener(GoogleTranscriptionEvents.NoAudioDetected,
this.addCustomEventListener(ep, GoogleTranscriptionEvents.NoAudioDetected,
this._onNoAudio.bind(this, cs, ep, channel));
ep.addCustomEventListener(GoogleTranscriptionEvents.MaxDurationExceeded,
this.addCustomEventListener(ep, GoogleTranscriptionEvents.MaxDurationExceeded,
this._onMaxDurationExceeded.bind(this, cs, ep, channel));
break;
case 'aws':
case 'polly':
this.bugname = 'aws_transcribe';
ep.addCustomEventListener(AwsTranscriptionEvents.Transcription,
this.bugname = `${this.bugname_prefix}aws_transcribe`;
this.addCustomEventListener(ep, AwsTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
ep.addCustomEventListener(AwsTranscriptionEvents.NoAudioDetected,
this.addCustomEventListener(ep, AwsTranscriptionEvents.NoAudioDetected,
this._onNoAudio.bind(this, cs, ep, channel));
ep.addCustomEventListener(AwsTranscriptionEvents.MaxDurationExceeded,
this.addCustomEventListener(ep, AwsTranscriptionEvents.MaxDurationExceeded,
this._onMaxDurationExceeded.bind(this, cs, ep, channel));
break;
case 'microsoft':
this.bugname = 'azure_transcribe';
ep.addCustomEventListener(AzureTranscriptionEvents.Transcription,
this.bugname = `${this.bugname_prefix}azure_transcribe`;
this.addCustomEventListener(ep, AzureTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
ep.addCustomEventListener(AzureTranscriptionEvents.NoSpeechDetected,
this.addCustomEventListener(ep, AzureTranscriptionEvents.NoSpeechDetected,
this._onNoAudio.bind(this, cs, ep, channel));
break;
case 'nuance':
this.bugname = 'nuance_transcribe';
ep.addCustomEventListener(NuanceTranscriptionEvents.Transcription,
this.bugname = `${this.bugname_prefix}nuance_transcribe`;
this.addCustomEventListener(ep, NuanceTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
ep.addCustomEventListener(NuanceTranscriptionEvents.StartOfSpeech,
this._onStartOfSpeech.bind(this, cs, ep, channel));
ep.addCustomEventListener(NuanceTranscriptionEvents.TranscriptionComplete,
this._onTranscriptionComplete.bind(this, cs, ep, channel));
break;
case 'deepgram':
this.bugname = 'deepgram_transcribe';
ep.addCustomEventListener(DeepgramTranscriptionEvents.Transcription,
this.bugname = `${this.bugname_prefix}deepgram_transcribe`;
this.addCustomEventListener(ep, DeepgramTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
ep.addCustomEventListener(DeepgramTranscriptionEvents.Connect,
this._onDeepgramConnect.bind(this, cs, ep, channel));
ep.addCustomEventListener(DeepgramTranscriptionEvents.ConnectFailure,
this._onDeepGramConnectFailure.bind(this, cs, ep, channel));
this.addCustomEventListener(ep, DeepgramTranscriptionEvents.Connect,
this._onVendorConnect.bind(this, cs, ep));
this.addCustomEventListener(ep, DeepgramTranscriptionEvents.ConnectFailure,
this._onVendorConnectFailure.bind(this, cs, ep, channel));
/* if app sets deepgramOptions.utteranceEndMs they essentially want continuous asr */
if (opts.DEEPGRAM_SPEECH_UTTERANCE_END_MS) this.isContinuousAsr = true;
break;
case 'soniox':
this.bugname = 'soniox_transcribe';
ep.addCustomEventListener(SonioxTranscriptionEvents.Transcription,
this.bugname = `${this.bugname_prefix}soniox_transcribe`;
this.addCustomEventListener(ep, SonioxTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
break;
case 'ibm':
this.bugname = 'ibm_transcribe';
ep.addCustomEventListener(IbmTranscriptionEvents.Transcription,
case 'cobalt':
this.bugname = `${this.bugname_prefix}cobalt_transcribe`;
this.addCustomEventListener(ep, CobaltTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
ep.addCustomEventListener(IbmTranscriptionEvents.Connect,
this._onIbmConnect.bind(this, cs, ep, channel));
ep.addCustomEventListener(IbmTranscriptionEvents.ConnectFailure,
this._onIbmConnectFailure.bind(this, cs, ep, channel));
/* cobalt doesnt have language, it has model, which is required */
if (!this.data.recognizer.model) {
throw new Error('Cobalt requires a model to be specified');
}
this.language = this.data.recognizer.model;
/* special case: if using hints with cobalt we need to compile them */
this.hostport = opts.COBALT_SERVER_URI;
if (this.vendor === 'cobalt' && opts.COBALT_SPEECH_HINTS) {
try {
const context = await this.compileHintsForCobalt(
ep,
opts.COBALT_SERVER_URI,
this.data.recognizer.model,
opts.COBALT_CONTEXT_TOKEN,
opts.COBALT_SPEECH_HINTS
);
if (context) opts.COBALT_COMPILED_CONTEXT_DATA = context;
delete opts.COBALT_SPEECH_HINTS;
} catch (err) {
this.logger.error({err}, 'Error compiling hints for cobalt');
}
}
break;
case 'ibm':
this.bugname = `${this.bugname_prefix}ibm_transcribe`;
this.addCustomEventListener(ep, IbmTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
this.addCustomEventListener(ep, IbmTranscriptionEvents.Connect,
this._onVendorConnect.bind(this, cs, ep));
this.addCustomEventListener(ep, IbmTranscriptionEvents.ConnectFailure,
this._onVendorConnectFailure.bind(this, cs, ep, channel));
break;
case 'nvidia':
this.bugname = 'nvidia_transcribe';
ep.addCustomEventListener(NvidiaTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep));
ep.addCustomEventListener(NvidiaTranscriptionEvents.StartOfSpeech,
this._onStartOfSpeech.bind(this, cs, ep));
ep.addCustomEventListener(NvidiaTranscriptionEvents.TranscriptionComplete,
this._onTranscriptionComplete.bind(this, cs, ep));
ep.addCustomEventListener(NvidiaTranscriptionEvents.VadDetected,
this._onVadDetected.bind(this, cs, ep));
this.bugname = `${this.bugname_prefix}nvidia_transcribe`;
this.addCustomEventListener(ep, NvidiaTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
break;
case 'assemblyai':
this.bugname = `${this.bugname_prefix}assemblyai_transcribe`;
this.addCustomEventListener(ep, AssemblyAiTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
this.addCustomEventListener(ep,
AssemblyAiTranscriptionEvents.Connect, this._onVendorConnect.bind(this, cs, ep));
this.addCustomEventListener(ep, AssemblyAiTranscriptionEvents.Error, this._onVendorError.bind(this, cs, ep));
this.addCustomEventListener(ep, AssemblyAiTranscriptionEvents.ConnectFailure,
this._onVendorConnectFailure.bind(this, cs, ep, channel));
break;
default:
throw new Error(`Invalid vendor ${this.vendor}`);
if (this.vendor.startsWith('custom:')) {
this.bugname = `${this.bugname_prefix}${this.vendor}_transcribe`;
this.addCustomEventListener(ep, JambonzTranscriptionEvents.Transcription,
this._onTranscription.bind(this, cs, ep, channel));
this.addCustomEventListener(ep, JambonzTranscriptionEvents.Connect, this._onVendorConnect.bind(this, cs, ep));
this.addCustomEventListener(ep, JambonzTranscriptionEvents.ConnectFailure,
this._onVendorConnectFailure.bind(this, cs, ep));
break;
}
else {
this.notifyError({ msg: 'ASR error', details:`Invalid vendor ${this.vendor}`});
this.notifyTaskDone();
throw new Error(`Invalid vendor ${this.vendor}`);
}
}
/* common handler for all stt engine errors */
ep.addCustomEventListener(JambonzTranscriptionEvents.Error, this._onJambonzError.bind(this, cs, ep));
this.addCustomEventListener(ep, JambonzTranscriptionEvents.Error, this._onJambonzError.bind(this, cs, ep));
await ep.set(opts)
.catch((err) => this.logger.info(err, 'Error setting channel variables'));
}
async _startTranscribing(cs, ep, channel) {
await this._setSpeechHandlers(cs, ep, channel);
await this._transcribe(ep);
/* start child span for this channel */
const {span, ctx} = this.startChildSpan(`${STT_LISTEN_SPAN_NAME}:${channel}`);
this.childSpan[channel - 1] = {span, ctx};
}
async _transcribe(ep) {
this.logger.debug(
`TaskTranscribe:_transcribe - starting transcription vendor ${this.vendor} bugname ${this.bugname}`);
await ep.startTranscription({
vendor: this.vendor,
interim: this.interim ? true : false,
locale: this.language,
channels: /*this.separateRecognitionPerChannel ? 2 : */ 1,
bugname: this.bugname
bugname: this.bugname,
hostport: this.hostport
});
}
@@ -253,12 +297,25 @@ class TaskTranscribe extends Task {
const bugname = fsEvent.getHeader('media-bugname');
if (bugname && this.bugname !== bugname) return;
if (this.vendor === 'ibm') {
if (evt?.state === 'listening') return;
if (this.vendor === 'ibm' && evt?.state === 'listening') return;
if (this.vendor === 'deepgram' && evt.type === 'UtteranceEnd') {
/* we will only get this when we have set utterance_end_ms */
if (this._bufferedTranscripts.length === 0) {
this.logger.debug('Gather:_onTranscription - got UtteranceEnd event from deepgram but no buffered transcripts');
}
else {
this.logger.debug('Gather:_onTranscription - got UtteranceEnd event from deepgram, return buffered transcript');
evt = this.consolidateTranscripts(this._bufferedTranscripts, 1, this.language);
this._bufferedTranscripts = [];
this._resolve('speech', evt);
}
return;
}
this.logger.debug({evt}, 'TaskTranscribe:_onTranscription - before normalization');
evt = this.normalizeTranscription(evt, this.vendor, channel, this.language);
evt = this.normalizeTranscription(evt, this.vendor, channel, this.language, undefined,
this.data.recognizer.punctuation);
this.logger.debug({evt}, 'TaskTranscribe:_onTranscription');
if (evt.alternatives.length === 0) {
this.logger.info({evt}, 'TaskTranscribe:_onTranscription - got empty transcript, continue listening');
@@ -285,6 +342,25 @@ class TaskTranscribe extends Task {
}
}
if (this.isContinuousAsr && evt.is_final) {
this._bufferedTranscripts.push(evt);
this._startAsrTimer(channel);
} else {
await this._resolve(channel, evt);
}
}
async _resolve(channel, evt) {
/* we've got a transcript, so end the otel child span for this channel */
if (this.childSpan[channel - 1] && this.childSpan[channel - 1].span) {
this.childSpan[channel - 1].span.setAttributes({
channel,
'stt.resolve': 'transcript',
'stt.result': JSON.stringify(evt)
});
this.childSpan[channel - 1].span.end();
}
if (this.transcriptionHook) {
const b3 = this.getTracingPropagation();
const httpHeaders = b3 && {b3};
@@ -315,16 +391,44 @@ class TaskTranscribe extends Task {
this._clearTimer();
this.notifyTaskDone();
}
else {
/* start another child span for this channel */
const {span, ctx} = this.startChildSpan(`${STT_LISTEN_SPAN_NAME}:${channel}`);
this.childSpan[channel - 1] = {span, ctx};
}
}
_onNoAudio(cs, ep, channel) {
this.logger.debug(`TaskTranscribe:_onNoAudio restarting transcription on channel ${channel}`);
if (this.childSpan[channel - 1] && this.childSpan[channel - 1].span) {
this.childSpan[channel - 1].span.setAttributes({
channel,
'stt.resolve': 'timeout'
});
this.childSpan[channel - 1].span.end();
}
this._transcribe(ep);
/* start new child span for this channel */
const {span, ctx} = this.startChildSpan(`${STT_LISTEN_SPAN_NAME}:${channel}`);
this.childSpan[channel - 1] = {span, ctx};
}
_onMaxDurationExceeded(cs, ep, channel) {
this.logger.debug(`TaskTranscribe:_onMaxDurationExceeded restarting transcription on channel ${channel}`);
if (this.childSpan[channel - 1] && this.childSpan[channel - 1].span) {
this.childSpan[channel - 1].span.setAttributes({
channel,
'stt.resolve': 'max duration exceeded'
});
this.childSpan[channel - 1].span.end();
}
this._transcribe(ep);
/* start new child span for this channel */
const {span, ctx} = this.startChildSpan(`${STT_LISTEN_SPAN_NAME}:${channel}`);
this.childSpan[channel - 1] = {span, ctx};
}
_clearTimer() {
@@ -333,64 +437,76 @@ class TaskTranscribe extends Task {
this._timer = null;
}
}
_onDeepgramConnect(_cs, _ep) {
this.logger.debug('TaskTranscribe:_onDeepgramConnect');
}
_onDeepGramConnectFailure(cs, _ep, _channel, evt) {
const {reason} = evt;
const {writeAlerts, AlertType} = cs.srf.locals;
this.logger.info({evt}, 'TaskTranscribe:_onDeepgramConnectFailure');
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
message: `Failed connecting to Deepgram speech recognizer: ${reason}`,
vendor: 'deepgram',
}).catch((err) => this.logger.info({err}, 'Error generating alert for deepgram connection failure'));
this.notifyError(`Failed connecting to speech vendor deepgram: ${reason}`);
this.notifyTaskDone();
}
_onIbmConnect(_cs, _ep) {
this.logger.debug('TaskTranscribe:_onIbmConnect');
}
_onIbmConnectFailure(cs, _ep, _channel, evt) {
const {reason} = evt;
const {writeAlerts, AlertType} = cs.srf.locals;
this.logger.info({evt}, 'TaskTranscribe:_onIbmConnectFailure');
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
message: `Failed connecting to IBM watson speech recognizer: ${reason}`,
vendor: 'ibm',
}).catch((err) => this.logger.info({err}, 'Error generating alert for IBM connection failure'));
this.notifyError(`Failed connecting to speech vendor IBM: ${reason}`);
this.notifyTaskDone();
}
_onIbmError(cs, _ep, _channel, evt) {
this.logger.info({evt}, 'TaskTranscribe:_onIbmError');
}
_onJambonzError(cs, _ep, evt) {
async _onJambonzError(cs, _ep, evt) {
this.logger.info({evt}, 'TaskTranscribe:_onJambonzError');
const {writeAlerts, AlertType} = cs.srf.locals;
if (this.isHandledByPrimaryProvider && this.fallbackVendor) {
_ep.stopTranscription({
vendor: this.vendor,
bugname: this.bugname
})
.catch((err) => this.logger.error({err}, `Error stopping transcription for primary vendor ${this.vendor}`));
const {updateSpeechCredentialLastUsed} = require('../utils/db-utils')(this.logger, cs.srf);
try {
await this._fallback();
let channel = 1;
if (this.ep !== _ep) {
channel = 2;
}
this._startTranscribing(cs, _ep, channel);
updateSpeechCredentialLastUsed(this.sttCredentials.speech_credential_sid);
return;
} catch (error) {
this.logger.info({error}, `There is error while falling back to ${this.fallbackVendor}`);
}
} else {
const {writeAlerts, AlertType} = cs.srf.locals;
if (this.vendor === 'nuance') {
const {code, error} = evt;
if (code === 404 && error === 'No speech') return this._resolve('timeout');
if (code === 413 && error === 'Too much speech') return this._resolve('timeout');
if (this.vendor === 'nuance') {
const {code, error} = evt;
if (code === 404 && error === 'No speech') return this._resolve('timeout');
if (code === 413 && error === 'Too much speech') return this._resolve('timeout');
}
this.logger.info({evt}, 'TaskTranscribe:_onJambonzError');
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
message: `Custom speech vendor ${this.vendor} error: ${evt.error}`,
vendor: this.vendor,
}).catch((err) => this.logger.info({err}, 'Error generating alert for jambonz custom connection failure'));
this.notifyError({msg: 'ASR error', details:`Custom speech vendor ${this.vendor} error: ${evt.error}`});
}
this.logger.info({evt}, 'TaskTranscribe:_onJambonzError');
writeAlerts({
account_sid: cs.accountSid,
alert_type: AlertType.STT_FAILURE,
message: `Custom speech vendor ${this.vendor} error: ${evt.error}`,
vendor: this.vendor,
}).catch((err) => this.logger.info({err}, 'Error generating alert for jambonz custom connection failure'));
this.notifyError({msg: 'ASR error', details:`Custom speech vendor ${this.vendor} error: ${evt.error}`});
}
_onVendorConnectFailure(cs, _ep, channel, evt) {
super._onVendorConnectFailure(cs, _ep, evt);
if (this.childSpan[channel - 1] && this.childSpan[channel - 1].span) {
this.childSpan[channel - 1].span.setAttributes({
channel,
'stt.resolve': 'connection failure'
});
this.childSpan[channel - 1].span.end();
}
this.notifyTaskDone();
}
_startAsrTimer(channel) {
if (this.vendor === 'deepgram') return; // no need
assert(this.isContinuousAsr);
this._clearAsrTimer(channel);
this._asrTimer = setTimeout(() => {
this.logger.debug(`TaskTranscribe:_startAsrTimer - asr timer went off for channel: ${channel}`);
const evt = this.consolidateTranscripts(this._bufferedTranscripts, channel, this.language);
this._bufferedTranscripts = [];
this._resolve(channel, evt);
}, this.asrTimeout);
this.logger.debug(`TaskTranscribe:_startAsrTimer: set for ${this.asrTimeout}ms for channel ${channel}`);
}
_clearAsrTimer(channel) {
if (this._asrTimer) clearTimeout(this._asrTimer);
this._asrTimer = null;
}
}
module.exports = TaskTranscribe;

View File

@@ -1,9 +1,17 @@
const Emitter = require('events');
const {readFile} = require('fs');
const {
TaskName,
GoogleTranscriptionEvents,
AwsTranscriptionEvents,
AzureTranscriptionEvents,
NuanceTranscriptionEvents,
NvidiaTranscriptionEvents,
IbmTranscriptionEvents,
SonioxTranscriptionEvents,
CobaltTranscriptionEvents,
DeepgramTranscriptionEvents,
JambonzTranscriptionEvents,
AmdEvents,
AvmdEvents
} = require('./constants');
@@ -47,13 +55,19 @@ class Amd extends Emitter {
this.language = opts.recognizer?.language || cs.speechRecognizerLanguage;
if ('default' === this.language) this.language = cs.speechRecognizerLanguage;
this.sttCredentials = cs.getSpeechCredentials(this.vendor, 'stt');
this.sttCredentials = cs.getSpeechCredentials(this.vendor, 'stt',
opts.recognizer?.label || cs.speechRecognizerLabel);
if (!this.sttCredentials) throw new Error(`No speech credentials found for vendor ${this.vendor}`);
this.thresholdWordCount = opts.thresholdWordCount || 9;
const {normalizeTranscription} = require('./transcription-utils')(logger);
this.normalizeTranscription = normalizeTranscription;
const {getNuanceAccessToken, getIbmAccessToken} = cs.srf.locals.dbHelpers;
this.getNuanceAccessToken = getNuanceAccessToken;
this.getIbmAccessToken = getIbmAccessToken;
const {setChannelVarsForStt} = require('./transcription-utils')(logger);
this.setChannelVarsForStt = setChannelVarsForStt;
const {
noSpeechTimeoutMs = 5000,
@@ -229,51 +243,96 @@ module.exports = (logger) => {
const startAmd = async(cs, ep, task, opts) => {
const amd = ep.amd = new Amd(logger, cs, opts);
const {vendor, language, sttCredentials} = amd;
const sttOpts = {};
const {vendor, language} = amd;
let sttCredentials = amd.sttCredentials;
const hints = voicemailHints[language] || [];
if (vendor === 'nuance' && sttCredentials.client_id) {
/* get nuance access token */
const {getNuanceAccessToken} = amd;
const {client_id, secret} = sttCredentials;
const {access_token, servedFromCache} = await getNuanceAccessToken(client_id, secret, 'asr tts');
logger.debug({client_id}, `Gather:exec - got nuance access token ${servedFromCache ? 'from cache' : ''}`);
sttCredentials = {...sttCredentials, access_token};
}
else if (vendor == 'ibm' && sttCredentials.stt_api_key) {
/* get ibm access token */
const {getIbmAccessToken} = amd;
const {stt_api_key, stt_region} = sttCredentials;
const {access_token, servedFromCache} = await getIbmAccessToken(stt_api_key);
logger.debug({stt_api_key}, `Gather:exec - got ibm access token ${servedFromCache ? 'from cache' : ''}`);
sttCredentials = {...sttCredentials, access_token, stt_region};
}
/* set stt options */
logger.info(`starting amd for vendor ${vendor} and language ${language}`);
if ('google' === vendor) {
sttOpts.GOOGLE_APPLICATION_CREDENTIALS = JSON.stringify(sttCredentials.credentials);
sttOpts.GOOGLE_SPEECH_USE_ENHANCED = true;
sttOpts.GOOGLE_SPEECH_HINTS = hints.join(',');
if (opts.recognizer?.altLanguages) {
sttOpts.GOOGLE_SPEECH_ALTERNATIVE_LANGUAGE_CODES = opts.recognizer.altLanguages.join(',');
}
ep.addCustomEventListener(GoogleTranscriptionEvents.Transcription, onTranscription.bind(null, cs, ep, task));
ep.addCustomEventListener(GoogleTranscriptionEvents.EndOfUtterance, onEndOfUtterance.bind(null, cs, ep, task));
}
else if (['aws', 'polly'].includes(vendor)) {
Object.assign(sttOpts, {
AWS_ACCESS_KEY_ID: sttCredentials.accessKeyId,
AWS_SECRET_ACCESS_KEY: sttCredentials.secretAccessKey,
AWS_REGION: sttCredentials.region
});
ep.addCustomEventListener(AwsTranscriptionEvents.Transcription, onTranscription.bind(null, cs, ep, task));
}
else if ('microsoft' === vendor) {
Object.assign(sttOpts, {
'AZURE_SUBSCRIPTION_KEY': sttCredentials.api_key,
'AZURE_REGION': sttCredentials.region
});
sttOpts.AZURE_SPEECH_HINTS = hints.join(',');
if (opts.recognizer?.altLanguages) {
sttOpts.AZURE_SPEECH_ALTERNATIVE_LANGUAGE_CODES = opts.recognizer.altLanguages.join(',');
}
sttOpts.AZURE_INITIAL_SPEECH_TIMEOUT_MS = opts.resolveTimeoutMs || 20000;
const sttOpts = amd.setChannelVarsForStt({name: TaskName.Gather}, sttCredentials, {
vendor,
hints,
enhancedModel: true,
altLanguages: opts.recognizer?.altLanguages || [],
initialSpeechTimeoutMs: opts.resolveTimeoutMs,
});
ep.addCustomEventListener(AzureTranscriptionEvents.Transcription, onTranscription.bind(null, cs, ep, task));
ep.addCustomEventListener(AzureTranscriptionEvents.NoSpeechDetected, onNoSpeechDetected.bind(null, cs, ep, task));
}
await ep.set(sttOpts).catch((err) => logger.info(err, 'Error setting channel variables'));
amd.transcriptionHandler = onTranscription.bind(null, cs, ep, task);
amd.EndOfUtteranceHandler = onEndOfUtterance.bind(null, cs, ep, task);
amd.noSpeechHandler = onNoSpeechDetected.bind(null, cs, ep, task);
switch (vendor) {
case 'google':
ep.addCustomEventListener(GoogleTranscriptionEvents.Transcription, amd.transcriptionHandler);
ep.addCustomEventListener(GoogleTranscriptionEvents.EndOfUtterance, amd.EndOfUtteranceHandler);
break;
case 'aws':
case 'polly':
ep.addCustomEventListener(AwsTranscriptionEvents.Transcription, amd.transcriptionHandler);
break;
case 'microsoft':
ep.addCustomEventListener(AzureTranscriptionEvents.Transcription, amd.transcriptionHandler);
ep.addCustomEventListener(AzureTranscriptionEvents.NoSpeechDetected, amd.noSpeechHandler);
break;
case 'nuance':
ep.addCustomEventListener(NuanceTranscriptionEvents.Transcription, amd.transcriptionHandler);
break;
case 'deepgram':
ep.addCustomEventListener(DeepgramTranscriptionEvents.Transcription, amd.transcriptionHandler);
break;
case 'soniox':
amd.bugname = 'soniox_amd_transcribe';
ep.addCustomEventListener(SonioxTranscriptionEvents.Transcription, amd.transcriptionHandler);
break;
case 'ibm':
ep.addCustomEventListener(IbmTranscriptionEvents.Transcription, amd.transcriptionHandler);
break;
case 'nvidia':
ep.addCustomEventListener(NvidiaTranscriptionEvents.Transcription, amd.transcriptionHandler);
break;
case 'cobalt':
ep.addCustomEventListener(CobaltTranscriptionEvents.Transcription, amd.transcriptionHandler);
break;
default:
if (vendor.startsWith('custom:')) {
ep.addCustomEventListener(JambonzTranscriptionEvents.Transcription, amd.transcriptionHandler);
break;
}
else {
throw new Error(`Invalid vendor ${this.vendor}`);
}
}
amd
.on(AmdEvents.NoSpeechDetected, (evt) => {
task.emit('amd', {type: AmdEvents.NoSpeechDetected, ...evt});
try {
ep.connected && ep.stopTranscription({vendor, bugname});
stopAmd(ep, task);
} catch (err) {
logger.info({err}, 'Error stopping transcription');
}
@@ -281,7 +340,7 @@ module.exports = (logger) => {
.on(AmdEvents.HumanDetected, (evt) => {
task.emit('amd', {type: AmdEvents.HumanDetected, ...evt});
try {
ep.connected && ep.stopTranscription({vendor, bugname});
stopAmd(ep, task);
} catch (err) {
logger.info({err}, 'Error stopping transcription');
}
@@ -292,7 +351,7 @@ module.exports = (logger) => {
.on(AmdEvents.DecisionTimeout, (evt) => {
task.emit('amd', {type: AmdEvents.DecisionTimeout, ...evt});
try {
ep.connected && ep.stopTranscription({vendor, bugname});
stopAmd(ep, task);
} catch (err) {
logger.info({err}, 'Error stopping transcription');
}
@@ -300,7 +359,7 @@ module.exports = (logger) => {
.on(AmdEvents.ToneTimeout, (evt) => {
//task.emit('amd', {type: AmdEvents.ToneTimeout, ...evt});
try {
ep.connected && ep.execute('avmd_stop').catch((err) => logger.info(err, 'Error stopping avmd'));
stopAmd(ep, task);
} catch (err) {
logger.info({err}, 'Error stopping avmd');
}
@@ -308,7 +367,7 @@ module.exports = (logger) => {
.on(AmdEvents.MachineStoppedSpeaking, () => {
task.emit('amd', {type: AmdEvents.MachineStoppedSpeaking});
try {
ep.connected && ep.stopTranscription({vendor, bugname});
stopAmd(ep, task);
} catch (err) {
logger.info({err}, 'Error stopping transcription');
}
@@ -327,6 +386,19 @@ module.exports = (logger) => {
if (ep.amd) {
vendor = ep.amd.vendor;
ep.amd.stopAllTimers();
ep.removeListener(GoogleTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
ep.removeListener(GoogleTranscriptionEvents.EndOfUtterance, ep.amd.EndOfUtteranceHandler);
ep.removeListener(AwsTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
ep.removeListener(AzureTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
ep.removeListener(AzureTranscriptionEvents.NoSpeechDetected, ep.amd.noSpeechHandler);
ep.removeListener(NuanceTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
ep.removeListener(DeepgramTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
ep.removeListener(SonioxTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
ep.removeListener(IbmTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
ep.removeListener(NvidiaTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
ep.removeListener(JambonzTranscriptionEvents.Transcription, ep.amd.transcriptionHandler);
ep.amd = null;
}

View File

@@ -11,15 +11,20 @@ const {LifeCycleEvents} = require('./constants');
const express = require('express');
const app = express();
const getString = bent('string');
const AWS = require('aws-sdk');
const sns = new AWS.SNS({apiVersion: '2010-03-31'});
const autoscaling = new AWS.AutoScaling({apiVersion: '2011-01-01'});
const {
SNSClient,
SubscribeCommand,
UnsubscribeCommand } = require('@aws-sdk/client-sns');
const snsClient = new SNSClient({ region: AWS_REGION, apiVersion: '2010-03-31' });
const {
AutoScalingClient,
DescribeAutoScalingGroupsCommand,
CompleteLifecycleActionCommand } = require('@aws-sdk/client-auto-scaling');
const autoScalingClient = new AutoScalingClient({ region: AWS_REGION, apiVersion: '2011-01-01' });
const {Parser} = require('xml2js');
const parser = new Parser();
const {validatePayload} = require('verify-aws-sns-signature');
AWS.config.update({region: AWS_REGION});
class SnsNotifier extends Emitter {
constructor(logger) {
super();
@@ -69,7 +74,7 @@ class SnsNotifier extends Emitter {
subscriptionRequestId: this.subscriptionRequestId
}, 'response from SNS SubscribeURL');
const data = await this.describeInstance();
this.lifecycleState = data.AutoScalingInstances[0].LifecycleState;
this.lifecycleState = data.AutoScalingGroups[0].Instances[0].LifecycleState;
this.emit('SubscriptionConfirmation', {publicIp: this.publicIp});
break;
@@ -135,11 +140,12 @@ class SnsNotifier extends Emitter {
async subscribe() {
try {
const response = await sns.subscribe({
const params = {
Protocol: 'http',
TopicArn: AWS_SNS_TOPIC_ARM,
Endpoint: this.snsEndpoint
}).promise();
};
const response = await snsClient.send(new SubscribeCommand(params));
this.logger.info({response}, `response to SNS subscribe to ${AWS_SNS_TOPIC_ARM}`);
} catch (err) {
this.logger.error({err}, `Error subscribing to SNS topic arn ${AWS_SNS_TOPIC_ARM}`);
@@ -149,9 +155,10 @@ class SnsNotifier extends Emitter {
async unsubscribe() {
if (!this.subscriptionArn) throw new Error('SnsNotifier#unsubscribe called without an active subscription');
try {
const response = await sns.unsubscribe({
const params = {
SubscriptionArn: this.subscriptionArn
}).promise();
};
const response = await snsClient.send(new UnsubscribeCommand(params));
this.logger.info({response}, `response to SNS unsubscribe to ${AWS_SNS_TOPIC_ARM}`);
} catch (err) {
this.logger.error({err}, `Error unsubscribing to SNS topic arn ${AWS_SNS_TOPIC_ARM}`);
@@ -160,26 +167,29 @@ class SnsNotifier extends Emitter {
completeScaleIn() {
assert(this.scaleInParams);
autoscaling.completeLifecycleAction(this.scaleInParams, (err, response) => {
if (err) return this.logger.error({err}, 'Error completing scale-in');
this.logger.info({response}, 'Successfully completed scale-in action');
});
autoScalingClient.send(new CompleteLifecycleActionCommand(this.scaleInParams))
.then((data) => {
return this.logger.info({data}, 'Successfully completed scale-in action');
})
.catch((err) => {
this.logger.error({err}, 'Error completing scale-in');
});
}
describeInstance() {
return new Promise((resolve, reject) => {
if (!this.instanceId) return reject('instance-id unknown');
autoscaling.describeAutoScalingInstances({
autoScalingClient.send(new DescribeAutoScalingGroupsCommand({
InstanceIds: [this.instanceId]
}, (err, data) => {
if (err) {
}))
.then((data) => {
this.logger.info({data}, 'SnsNotifier: describeInstance');
return resolve(data);
})
.catch((err) => {
this.logger.error({err}, 'Error describing instances');
reject(err);
} else {
this.logger.info({data}, 'SnsNotifier: describeInstance');
resolve(data);
}
});
});
});
}
@@ -193,7 +203,7 @@ module.exports = async function(logger) {
process.on('SIGHUP', async() => {
try {
const data = await notifier.describeInstance();
const state = data.AutoScalingInstances[0].LifecycleState;
const state = data.AutoScalingGroups[0].Instances[0].LifecycleState;
if (state !== notifier.lifecycleState) {
notifier.lifecycleState = state;
switch (state) {

View File

@@ -0,0 +1,197 @@
const { normalizeJambones } = require('@jambonz/verb-specifications');
const makeTask = require('../tasks/make_task');
const { JAMBONZ_RECORD_WS_BASE_URL, JAMBONZ_RECORD_WS_USERNAME, JAMBONZ_RECORD_WS_PASSWORD } = require('../config');
const Emitter = require('events');
class BackgroundTaskManager extends Emitter {
constructor({cs, logger, rootSpan}) {
super();
this.tasks = new Map();
this.cs = cs;
this.logger = logger;
this.rootSpan = rootSpan;
}
isTaskRunning(type) {
return this.tasks.has(type);
}
getTask(type) {
if (this.tasks.has(type)) {
return this.tasks.get(type);
}
}
count() {
return this.tasks.size;
}
async newTask(type, taskOpts) {
this.logger.info({taskOpts}, `initiating Background task ${type}`);
if (this.tasks.has(type)) {
this.logger.info(`Background task ${type} is running, skiped`);
return;
}
let task;
switch (type) {
case 'listen':
task = await this._initListen(taskOpts);
break;
case 'bargeIn':
task = await this._initBargeIn(taskOpts);
break;
case 'record':
task = await this._initRecord();
break;
case 'transcribe':
task = await this._initTranscribe(taskOpts);
break;
default:
break;
}
if (task) {
this.tasks.set(type, task);
}
return task;
}
stop(type) {
const task = this.getTask(type);
if (task) {
this.logger.info(`stopping background task: ${type}`);
task.removeAllListeners();
task.span.end();
task.kill();
// Remove task from managed List
this.tasks.delete(type);
} else {
this.logger.info(`stopping background task, ${type} is not running, skipped`);
}
}
stopAll() {
this.logger.info('BackgroundTaskManager:stopAll');
for (const key of this.tasks.keys()) {
this.stop(key);
}
}
// Initiate Task
// Initiate Listen
async _initListen(opts, bugname = 'jambonz-background-listen', ignoreCustomerData = false, type = 'listen') {
let task;
try {
const t = normalizeJambones(this.logger, [opts]);
task = makeTask(this.logger, t[0]);
task.bugname = bugname;
task.ignoreCustomerData = ignoreCustomerData;
const resources = await this.cs._evaluatePreconditions(task);
const {span, ctx} = this.rootSpan.startChildSpan(`background-${type}:${task.summary}`);
task.span = span;
task.ctx = ctx;
task.exec(this.cs, resources)
.then(this._taskCompleted.bind(this, type, task))
.catch(this._taskError.bind(this, type, task));
} catch (err) {
this.logger.info({err, opts}, `BackgroundTaskManager:_initListen - Error creating ${bugname} task`);
}
return task;
}
// Initiate Gather
async _initBargeIn(opts) {
let task;
try {
const t = normalizeJambones(this.logger, [opts]);
task = makeTask(this.logger, t[0]);
task
.once('dtmf', this._bargeInTaskCompleted.bind(this))
.once('vad', this._bargeInTaskCompleted.bind(this))
.once('transcription', this._bargeInTaskCompleted.bind(this))
.once('timeout', this._bargeInTaskCompleted.bind(this));
const resources = await this.cs._evaluatePreconditions(task);
const {span, ctx} = this.rootSpan.startChildSpan(`background-bargeIn:${task.summary}`);
task.span = span;
task.ctx = ctx;
task.bugname_prefix = 'background_bargeIn_';
task.exec(this.cs, resources)
.then(() => {
this._taskCompleted('bargeIn', task);
if (task.sticky && !this.cs.callGone && !this.cs._stopping) {
this.logger.info('BackgroundTaskManager:_initBargeIn: restarting background bargeIn');
this.newTask('bargeIn', opts);
}
return;
})
.catch(this._taskError.bind(this, 'bargeIn', task));
} catch (err) {
this.logger.info(err, 'BackgroundTaskManager:_initGather - Error creating bargeIn task');
}
return task;
}
// Initiate Record
async _initRecord() {
if (this.cs.accountInfo.account.record_all_calls || this.cs.application.record_all_calls) {
if (!JAMBONZ_RECORD_WS_BASE_URL || !this.cs.accountInfo.account.bucket_credential) {
this.logger.error(`_initRecord: invalid configuration,
missing JAMBONZ_RECORD_WS_BASE_URL or bucket configuration`);
return undefined;
}
const listenOpts = {
url: `${JAMBONZ_RECORD_WS_BASE_URL}/record/${this.cs.accountInfo.account.bucket_credential.vendor}`,
disableBidirectionalAudio: true,
mixType : 'stereo',
passDtmf: true
};
if (JAMBONZ_RECORD_WS_USERNAME && JAMBONZ_RECORD_WS_PASSWORD) {
listenOpts.wsAuth = {
username: JAMBONZ_RECORD_WS_USERNAME,
password: JAMBONZ_RECORD_WS_PASSWORD
};
}
this.logger.debug({listenOpts}, '_initRecord: enabling listen');
return await this._initListen({verb: 'listen', ...listenOpts}, 'jambonz-session-record', true, 'record');
}
}
// Initiate Transcribe
async _initTranscribe(opts) {
let task;
try {
const t = normalizeJambones(this.logger, [opts]);
task = makeTask(this.logger, t[0]);
const resources = await this.cs._evaluatePreconditions(task);
const {span, ctx} = this.rootSpan.startChildSpan(`background-transcribe:${task.summary}`);
task.span = span;
task.ctx = ctx;
task.bugname_prefix = 'background_transcribe_';
task.exec(this.cs, resources)
.then(this._taskCompleted.bind(this, 'transcribe', task))
.catch(this._taskError.bind(this, 'transcribe', task));
} catch (err) {
this.logger.info(err, 'BackgroundTaskManager:_initTranscribe - Error creating transcribe task');
}
return task;
}
_taskCompleted(type, task) {
this.logger.info({type, task}, 'BackgroundTaskManager:_taskCompleted: task completed');
task.removeAllListeners();
task.span.end();
this.tasks.delete(type);
}
_taskError(type, task, error) {
this.logger.info({type, task, error}, 'BackgroundTaskManager:_taskError: task Error');
task.removeAllListeners();
task.span.end();
this.tasks.delete(type);
}
_bargeInTaskCompleted(evt) {
this.logger.info({evt}, 'BackgroundTaskManager:_bargeInTaskCompleted on event from background bargeIn');
this.emit('bargeIn-done', evt);
}
}
module.exports = BackgroundTaskManager;

View File

@@ -2,17 +2,24 @@ const {context, trace} = require('@opentelemetry/api');
const {Dialog} = require('drachtio-srf');
class RootSpan {
constructor(callType, req) {
let tracer, callSid, linkedSpanId;
const {srf} = require('../../');
const tracer = srf.locals.otel.tracer;
let callSid, accountSid, applicationSid, linkedSpanId;
if (req instanceof Dialog) {
const dlg = req;
tracer = dlg.srf.locals.otel.tracer;
callSid = dlg.callSid;
linkedSpanId = dlg.linkedSpanId;
}
else {
tracer = req.srf.locals.otel.tracer;
else if (req.srf) {
callSid = req.locals.callSid;
accountSid = req.get('X-Account-Sid'),
applicationSid = req.locals.application_sid;
}
else {
callSid = req.callSid;
accountSid = req.accountSid;
applicationSid = req.applicationSid;
}
this._span = tracer.startSpan(callType || 'incoming-call');
if (req instanceof Dialog) {
@@ -22,13 +29,20 @@ class RootSpan {
callId: dlg.sip.callId
});
}
else if (req.srf) {
this._span.setAttributes({
callSid,
accountSid,
applicationSid,
callId: req.get('Call-ID'),
externalCallId: req.get('X-CID')
});
}
else {
this._span.setAttributes({
callSid,
accountSid: req.get('X-Account-Sid'),
applicationSid: req.locals.application_sid,
callId: req.get('Call-ID'),
externalCallId: req.get('X-CID')
accountSid,
applicationSid
});
}

View File

@@ -29,7 +29,7 @@
"Tag": "tag",
"Transcribe": "transcribe"
},
"AllowedSipRecVerbs": ["config", "gather", "transcribe", "listen"],
"AllowedSipRecVerbs": ["config", "gather", "transcribe", "listen", "tag"],
"CallStatus": {
"Trying": "trying",
"Ringing": "ringing",
@@ -51,6 +51,11 @@
"Silence": "silence",
"Resume": "resume"
},
"TranscribeStatus": {
"Pause": "pause",
"Silence": "silence",
"Resume": "resume"
},
"TaskPreconditions": {
"None": "none",
"Endpoint": "endpoint",
@@ -90,6 +95,11 @@
"Transcription": "soniox_transcribe::transcription",
"Error": "soniox_transcribe::error"
},
"CobaltTranscriptionEvents": {
"Transcription": "cobalt_speech::transcription",
"CompileContext": "cobalt_speech::compile_context_response",
"Error": "cobalt_speech::error"
},
"IbmTranscriptionEvents": {
"Transcription": "ibm_transcribe::transcription",
"ConnectFailure": "ibm_transcribe::connect_failed",
@@ -116,6 +126,12 @@
"Connect": "jambonz_transcribe::connect",
"Error": "jambonz_transcribe::error"
},
"AssemblyAiTranscriptionEvents": {
"Transcription": "assemblyai_transcribe::transcription",
"Error": "assemblyai_transcribe::error",
"ConnectFailure": "assemblyai_transcribe::connect_failed",
"Connect": "assemblyai_transcribe::connect"
},
"ListenEvents": {
"Connect": "mod_audio_fork::connect",
"ConnectFailure": "mod_audio_fork::connect_failed",

View File

@@ -3,13 +3,10 @@ const {decrypt} = require('./encrypt-decrypt');
const sqlAccountDetails = `SELECT *
FROM accounts account
WHERE account.account_sid = ?`;
const sqlSpeechCredentials = `SELECT *
const sqlSpeechCredentialsForAccount = `SELECT *
FROM speech_credentials
WHERE account_sid = ? `;
const sqlSpeechCredentialsForSP = `SELECT *
FROM speech_credentials
WHERE service_provider_sid =
(SELECT service_provider_sid from accounts where account_sid = ?)`;
WHERE account_sid = ? OR (account_sid is NULL AND service_provider_sid =
(SELECT service_provider_sid from accounts where account_sid = ?))`;
const sqlQueryAccountCarrierByName = `SELECT voip_carrier_sid
FROM voip_carriers vc
WHERE vc.account_sid = ?
@@ -30,6 +27,9 @@ WHERE pn.account_sid IS NULL
AND pn.service_provider_sid =
(SELECT service_provider_sid from accounts where account_sid = ?)
AND pn.number = ?`;
const sqlQueryGoogleCustomVoices = `SELECT *
FROM google_custom_voices
WHERE google_custom_voice_sid = ?`;
const speechMapper = (cred) => {
const {credential, ...obj} = cred;
@@ -49,8 +49,10 @@ const speechMapper = (cred) => {
obj.region = o.region;
obj.use_custom_stt = o.use_custom_stt;
obj.custom_stt_endpoint = o.custom_stt_endpoint;
obj.custom_stt_endpoint_url = o.custom_stt_endpoint_url;
obj.use_custom_tts = o.use_custom_tts;
obj.custom_tts_endpoint = o.custom_tts_endpoint;
obj.custom_tts_endpoint_url = o.custom_tts_endpoint_url;
}
else if ('wellsaid' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
@@ -82,7 +84,22 @@ const speechMapper = (cred) => {
const o = JSON.parse(decrypt(credential));
obj.riva_server_uri = o.riva_server_uri;
}
else if (obj.vendor.startsWith('custom:')) {
else if ('cobalt' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.cobalt_server_uri = o.cobalt_server_uri;
} else if ('elevenlabs' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
obj.model_id = o.model_id;
obj.options = o.options;
} else if ('assemblyai' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
} else if ('whisper' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
obj.api_key = o.api_key;
obj.model_id = o.model_id;
} else if (obj.vendor.startsWith('custom:')) {
const o = JSON.parse(decrypt(credential));
obj.auth_token = o.auth_token;
obj.custom_stt_url = o.custom_stt_url;
@@ -94,27 +111,28 @@ const speechMapper = (cred) => {
return obj;
};
const bucketCredentialDecrypt = (account) => {
const { bucket_credential } = account.account;
if (!bucket_credential || bucket_credential.vendor) return;
account.account.bucket_credential = JSON.parse(decrypt(bucket_credential));
};
module.exports = (logger, srf) => {
const {pool} = srf.locals.dbHelpers;
const pp = pool.promise();
const lookupAccountDetails = async(account_sid) => {
const [r] = await pp.query({sql: sqlAccountDetails, nestTables: true}, account_sid);
const [r] = await pp.query({sql: sqlAccountDetails, nestTables: true}, [account_sid]);
if (0 === r.length) throw new Error(`invalid accountSid: ${account_sid}`);
const [r2] = await pp.query(sqlSpeechCredentials, account_sid);
const [r2] = await pp.query(sqlSpeechCredentialsForAccount, [account_sid, account_sid]);
const speech = r2.map(speechMapper);
/* add service provider creds unless we have that vendor at the account level */
const [r3] = await pp.query(sqlSpeechCredentialsForSP, account_sid);
r3.forEach((s) => {
if (!speech.find((s2) => s2.vendor === s.vendor)) {
speech.push(speechMapper(s));
}
});
const account = r[0];
bucketCredentialDecrypt(account);
return {
...r[0],
...account,
speech
};
};
@@ -154,10 +172,22 @@ module.exports = (logger, srf) => {
}
};
const lookupGoogleCustomVoice = async(google_custom_voice_sid) => {
const pp = pool.promise();
try {
const [r] = await pp.query(sqlQueryGoogleCustomVoices, [google_custom_voice_sid]);
return r;
} catch (err) {
logger.error({err}, `lookupGoogleCustomVoices: Error ${google_custom_voice_sid}`);
}
};
return {
lookupAccountDetails,
updateSpeechCredentialLastUsed,
lookupCarrier,
lookupCarrierByPhoneNumber
lookupCarrierByPhoneNumber,
lookupGoogleCustomVoice
};
};

View File

@@ -6,7 +6,8 @@ const {PORT, HTTP_PORT_MAX} = require('../config');
const doListen = (logger, app, port, resolve) => {
const server = app.listen(port, () => {
const {srf} = app.locals;
logger.info(`listening for HTTP requests on port ${PORT}, serviceUrl is ${srf.locals.serviceUrl}`);
srf.locals.serviceUrl = `http://${srf.locals.ipv4}:${port}`;
logger.info(`listening for HTTP requests on port ${port}, serviceUrl is ${srf.locals.serviceUrl}`);
resolve({server, app});
});
return server;

View File

@@ -1,4 +1,4 @@
const {Client, Pool} = require('undici');
const {request, getGlobalDispatcher, setGlobalDispatcher, Dispatcher, ProxyAgent, Client, Pool} = require('undici');
const parseUrl = require('parse-url');
const assert = require('assert');
const BaseRequestor = require('./base-requestor');
@@ -10,6 +10,11 @@ const {
HTTP_POOLSIZE,
HTTP_PIPELINING,
HTTP_TIMEOUT,
HTTP_PROXY_IP,
HTTP_PROXY_PORT,
HTTP_PROXY_PROTOCOL,
NODE_ENV,
HTTP_USER_AGENT_HEADER,
} = require('../config');
const toBase64 = (str) => Buffer.from(str || '', 'utf8').toString('base64');
@@ -21,6 +26,15 @@ function basicAuth(username, password) {
return {Authorization: header};
}
const defaultDispatcher = HTTP_PROXY_IP ?
new ProxyAgent(`${HTTP_PROXY_PROTOCOL}://${HTTP_PROXY_IP}${HTTP_PROXY_PORT ? `:${HTTP_PROXY_PORT}` : ''}`) :
getGlobalDispatcher();
setGlobalDispatcher(new class extends Dispatcher {
dispatch(options, handler) {
return defaultDispatcher.dispatch(options, handler);
}
}());
class HttpRequestor extends BaseRequestor {
constructor(logger, account_sid, hook, secret) {
@@ -60,6 +74,18 @@ class HttpRequestor extends BaseRequestor {
if (u.port) this.client = new Client(`${u.protocol}://${u.resource}:${u.port}`);
else this.client = new Client(`${u.protocol}://${u.resource}`);
}
if (NODE_ENV == 'test' && process.env.JAMBONES_HTTP_PROXY_IP) {
const defDispatcher =
new ProxyAgent(`${process.env.JAMBONES_HTTP_PROXY_PROTOCOL}://${process.env.JAMBONES_HTTP_PROXY_IP}${
process.env.JAMBONES_HTTP_PROXY_PORT ? `:${process.env.JAMBONES_HTTP_PROXY_PORT}` : ''}`);
setGlobalDispatcher(new class extends Dispatcher {
dispatch(options, handler) {
return defDispatcher.dispatch(options, handler);
}
}());
}
}
get baseUrl() {
@@ -91,6 +117,10 @@ class HttpRequestor extends BaseRequestor {
const url = hook.url || hook;
const method = hook.method || 'POST';
let buf = '';
httpHeaders = {
...httpHeaders,
...(HTTP_USER_AGENT_HEADER && {'user-agent' : HTTP_USER_AGENT_HEADER})
};
assert.ok(url, 'HttpRequestor:request url was not provided');
assert.ok, (['GET', 'POST'].includes(method), `HttpRequestor:request method must be 'GET' or 'POST' not ${method}`);
@@ -139,7 +169,18 @@ class HttpRequestor extends BaseRequestor {
};
const absUrl = this._isRelativeUrl(url) ? `${this.baseUrl}${url}` : url;
this.logger.debug({url, absUrl, hdrs}, 'send webhook');
const {statusCode, headers, body} = await client.request({
const {statusCode, headers, body} = HTTP_PROXY_IP ? await request(
this.baseUrl,
{
path,
query,
method,
headers: hdrs,
...('POST' === method && {body: JSON.stringify(payload)}),
timeout: HTTP_TIMEOUT,
followRedirects: false
}
) : await client.request({
path,
query,
method,

View File

@@ -8,14 +8,13 @@ const {
JAMBONES_MYSQL_CONNECTION_LIMIT,
JAMBONES_MYSQL_PORT,
JAMBONES_FREESWITCH,
JAMBONES_REDIS_HOST,
JAMBONES_REDIS_PORT,
SMPP_URL,
JAMBONES_TIME_SERIES_HOST,
JAMBONES_ESL_LISTEN_ADDRESS,
PORT,
NODE_ENV,
} = require('../config');
const Registrar = require('@jambonz/mw-registrar');
const assert = require('assert');
function initMS(logger, wrapper, ms) {
@@ -138,7 +137,8 @@ function installSrfLocals(srf, logger) {
lookupTeamsByAccount,
lookupAccountBySid,
lookupAccountCapacitiesBySid,
lookupSmppGateways
lookupSmppGateways,
lookupClientByAccountAndUsername
} = require('@jambonz/db-helpers')({
host: JAMBONES_MYSQL_HOST,
user: JAMBONES_MYSQL_USER,
@@ -146,7 +146,7 @@ function installSrfLocals(srf, logger) {
password: JAMBONES_MYSQL_PASSWORD,
database: JAMBONES_MYSQL_DATABASE,
connectionLimit: JAMBONES_MYSQL_CONNECTION_LIMIT || 10
}, logger, tracer);
}, logger);
const {
client,
updateCallStatus,
@@ -167,18 +167,18 @@ function installSrfLocals(srf, logger) {
removeFromList,
getListPosition,
lengthOfList,
} = require('@jambonz/realtimedb-helpers')({
host: JAMBONES_REDIS_HOST,
port: JAMBONES_REDIS_PORT || 6379
}, logger, tracer);
addToSortedSet,
retrieveFromSortedSet,
retrieveByPatternSortedSet,
sortedSetLength,
sortedSetPositionByPattern
} = require('@jambonz/realtimedb-helpers')({}, logger, tracer);
const registrar = new Registrar(logger, client);
const {
synthAudio,
getNuanceAccessToken,
getIbmAccessToken,
} = require('@jambonz/speech-utils')({
host: JAMBONES_REDIS_HOST,
port: JAMBONES_REDIS_PORT || 6379
}, logger, tracer);
} = require('@jambonz/speech-utils')({}, logger);
const {
writeAlerts,
AlertType
@@ -198,6 +198,7 @@ function installSrfLocals(srf, logger) {
srf.locals = {...srf.locals,
dbHelpers: {
client,
registrar,
pool,
lookupAppByPhoneNumber,
lookupAppByRegex,
@@ -208,6 +209,7 @@ function installSrfLocals(srf, logger) {
lookupAccountBySid,
lookupAccountCapacitiesBySid,
lookupSmppGateways,
lookupClientByAccountAndUsername,
updateCallStatus,
retrieveCall,
listCalls,
@@ -228,7 +230,12 @@ function installSrfLocals(srf, logger) {
lengthOfList,
getListPosition,
getNuanceAccessToken,
getIbmAccessToken
getIbmAccessToken,
addToSortedSet,
retrieveFromSortedSet,
retrieveByPatternSortedSet,
sortedSetLength,
sortedSetPositionByPattern
},
parentLogger: logger,
getSBC,

View File

@@ -13,9 +13,12 @@ const moment = require('moment');
const stripCodecs = require('./strip-ancillary-codecs');
const RootSpan = require('./call-tracer');
const uuidv4 = require('uuid-random');
const HttpRequestor = require('./http-requestor');
const WsRequestor = require('./ws-requestor');
const {makeOpusFirst} = require('./sdp-utils');
class SingleDialer extends Emitter {
constructor({logger, sbcAddress, target, opts, application, callInfo, accountInfo, rootSpan, startSpan}) {
constructor({logger, sbcAddress, target, opts, application, callInfo, accountInfo, rootSpan, startSpan, dialTask}) {
super();
assert(target.type);
@@ -37,6 +40,7 @@ class SingleDialer extends Emitter {
this.callGone = false;
this.callSid = uuidv4();
this.dialTask = dialTask;
this.on('callStatusChange', this._notifyCallStatusChange.bind(this));
}
@@ -45,6 +49,10 @@ class SingleDialer extends Emitter {
return this.callInfo.callStatus;
}
get applicationSid() {
return this.application?.application_sid || this.callInfo?.applicationSid;
}
/**
* can be used for all http requests within this session
*/
@@ -71,7 +79,8 @@ class SingleDialer extends Emitter {
...(this.from.host && {'X-Preferred-From-Host': this.from.host}),
'X-Jambonz-Routing': this.target.type,
'X-Call-Sid': this.callSid,
...(this.applicationSid && {'X-Application-Sid': this.applicationSid})
...(this.applicationSid && {'X-Application-Sid': this.applicationSid}),
...(this.target.proxy && {'X-SIP-Proxy': this.target.proxy})
};
if (srf.locals.fsUUID) {
opts.headers = {
@@ -143,7 +152,7 @@ class SingleDialer extends Emitter {
Object.assign(opts, {
proxy: `sip:${this.sbcAddress}`,
localSdp: this.ep.local.sdp
localSdp: opts.opusFirst ? makeOpusFirst(this.ep.local.sdp) : this.ep.local.sdp
});
if (this.target.auth) opts.auth = this.target.auth;
inviteSpan = this.startSpan('invite', {
@@ -171,6 +180,7 @@ class SingleDialer extends Emitter {
* (a) create a logger for this call
*/
req.srf = srf;
this.req = req;
this.callInfo = new CallInfo({
direction: CallDirection.Outbound,
parentCallInfo: this.parentCallInfo,
@@ -243,9 +253,14 @@ class SingleDialer extends Emitter {
.on('modify', async(req, res) => {
try {
if (this.ep) {
const newSdp = await this.ep.modify(req.body);
res.send(200, {body: newSdp});
this.logger.info({offer: req.body, answer: newSdp}, 'SingleDialer:exec: handling reINVITE');
if (this.dialTask && this.dialTask.isOnHold) {
this.logger.info('dial is onhold, emit event');
this.emit('reinvite', req, res);
} else {
const newSdp = await this.ep.modify(req.body);
res.send(200, {body: newSdp});
this.logger.info({offer: req.body, answer: newSdp}, 'SingleDialer:exec: handling reINVITE');
}
}
else {
this.logger.info('SingleDialer:exec: handling reINVITE with released media, emit event');
@@ -316,10 +331,15 @@ class SingleDialer extends Emitter {
try {
// retrieve set of tasks
const json = await this.requestor.request('dial:confirm', confirmHook, this.callInfo.toJSON());
if (!json || (Array.isArray(json) && json.length === 0)) {
this.logger.info('SingleDialer:_executeApp: no tasks returned from confirm hook');
return;
}
const tasks = normalizeJambones(this.logger, json).map((tdata) => makeTask(this.logger, tdata));
// verify it contains only allowed verbs
const allowedTasks = tasks.filter((task) => {
return [
TaskPreconditions.None,
TaskPreconditions.StableCall,
TaskPreconditions.Endpoint
].includes(task.preconditions);
@@ -367,15 +387,35 @@ class SingleDialer extends Emitter {
this.dlg.linkedSpanId = this.rootSpan.traceId;
const rootSpan = new RootSpan('outbound-call', this.dlg);
const newLogger = logger.child({traceId: rootSpan.traceId});
//clone application from parent call with new requestor
//parrent application will be closed in case the parent hangup
const app = {...application};
if ('WS' === app.call_hook?.method ||
app.call_hook?.url.startsWith('ws://') || app.call_hook?.url.startsWith('wss://')) {
const requestor = new WsRequestor(logger, this.accountInfo.account.account_sid,
app.call_hook, this.accountInfo.account.webhook_secret);
app.requestor = requestor;
app.notifier = requestor;
app.call_hook.method = 'WS';
}
else {
app.requestor = new HttpRequestor(logger, this.accountInfo.account.account_sid,
app.call_hook, this.accountInfo.account.webhook_secret);
if (app.call_status_hook) app.notifier = new HttpRequestor(logger,
this.accountInfo.account.account_sid, app.call_status_hook,
this.accountInfo.account.webhook_secret);
else app.notifier = {request: () => {}, close: () => {}};
}
const cs = new AdultingCallSession({
logger: newLogger,
singleDialer: this,
application,
application: app,
callInfo: this.callInfo,
accountInfo: this.accountInfo,
tasks,
rootSpan
});
cs.req = this.req;
cs.exec().catch((err) => newLogger.error({err}, 'doAdulting: error executing session'));
return cs;
}
@@ -426,11 +466,11 @@ class SingleDialer extends Emitter {
}
function placeOutdial({
logger, srf, ms, sbcAddress, target, opts, application, callInfo, accountInfo, rootSpan, startSpan
logger, srf, ms, sbcAddress, target, opts, application, callInfo, accountInfo, rootSpan, startSpan, dialTask
}) {
const myOpts = deepcopy(opts);
const sd = new SingleDialer({
logger, sbcAddress, target, myOpts, application, callInfo, accountInfo, rootSpan, startSpan
logger, sbcAddress, target, myOpts, application, callInfo, accountInfo, rootSpan, startSpan, dialTask
});
sd.exec(srf, ms, myOpts);
return sd;

View File

@@ -101,7 +101,8 @@ module.exports = (logger) => {
method: 'OPTIONS',
headers: {
'X-FS-Status': ms && !dryUpCalls ? 'open' : 'closed',
'X-FS-Calls': srf.locals.sessionTracker.count
'X-FS-Calls': srf.locals.sessionTracker.count,
'X-FS-ServiceUrl': srf.locals.serviceUrl
}
});
req.on('response', (res) => {

58
lib/utils/sdp-utils.js Normal file
View File

@@ -0,0 +1,58 @@
const sdpTransform = require('sdp-transform');
const isOnhold = (sdp) => {
return sdp && (sdp.includes('a=sendonly') || sdp.includes('a=inactive'));
};
const mergeSdpMedia = (sdp1, sdp2) => {
const parsedSdp1 = sdpTransform.parse(sdp1);
const parsedSdp2 = sdpTransform.parse(sdp2);
parsedSdp1.media.push(...parsedSdp2.media);
return sdpTransform.write(parsedSdp1);
};
const getCodecPlacement = (parsedSdp, codec) => parsedSdp?.media[0]?.rtp?.findIndex((e) => e.codec === codec);
const isOpusFirst = (sdp) => {
return getCodecPlacement(sdpTransform.parse(sdp), 'opus') === 0;
};
const makeOpusFirst = (sdp) => {
const parsedSdp = sdpTransform.parse(sdp);
// Find the index of the OPUS codec
const opusIndex = getCodecPlacement(parsedSdp, 'opus');
// Move OPUS codec to the beginning
if (opusIndex > 0) {
const opusEntry = parsedSdp.media[0].rtp.splice(opusIndex, 1)[0];
parsedSdp.media[0].rtp.unshift(opusEntry);
// Also move the corresponding payload type in the "m" line
const opusPayloadType = parsedSdp.media[0].payloads.split(' ')[opusIndex];
const otherPayloadTypes = parsedSdp.media[0].payloads.split(' ').filter((pt) => pt != opusPayloadType);
parsedSdp.media[0].payloads = [opusPayloadType, ...otherPayloadTypes].join(' ');
}
return sdpTransform.write(parsedSdp);
};
const extractSdpMedia = (sdp) => {
const parsedSdp1 = sdpTransform.parse(sdp);
if (parsedSdp1.media.length > 1) {
parsedSdp1.media = [parsedSdp1.media[0]];
const parsedSdp2 = sdpTransform.parse(sdp);
parsedSdp2.media = [parsedSdp2.media[1]];
return [sdpTransform.write(parsedSdp1), sdpTransform.write(parsedSdp2)];
} else {
return [sdp, sdp];
}
};
module.exports = {
isOnhold,
mergeSdpMedia,
extractSdpMedia,
isOpusFirst,
makeOpusFirst
};

View File

@@ -97,8 +97,12 @@ const parseSiprecPayload = (req, logger) => {
obj[`${prefix}participantstreamassoc`].forEach((ps) => {
const part = participants[ps.$.participant_id];
if (part) {
part.send = ps[`${prefix}send`][0];
part.recv = ps[`${prefix}recv`][0];
if (ps.hasOwnProperty(`${prefix}send`)) {
part.send = ps[`${prefix}send`][0];
}
if (ps.hasOwnProperty(`${prefix}recv`)) {
part.recv = ps[`${prefix}recv`][0];
}
}
});
}
@@ -109,9 +113,9 @@ const parseSiprecPayload = (req, logger) => {
obj[`${prefix}stream`].forEach((s) => {
const streamId = s.$.stream_id;
let sender;
for (const [k, v] of Object.entries(participants)) {
for (const v of Object.values(participants)) {
if (v.send === streamId) {
sender = k;
sender = v;
break;
}
}
@@ -121,9 +125,15 @@ const parseSiprecPayload = (req, logger) => {
sender.label = s[`${prefix}label`][0];
if (-1 !== ['1', 'a_leg', 'inbound'].indexOf(sender.label)) {
opts.caller.aor = sender.aor ;
if (-1 !== ['1', 'a_leg', 'inbound', '10'].indexOf(sender.label)) {
opts.caller.aor = sender.aor;
if (sender.name) opts.caller.name = sender.name;
// Remap the sdp stream base on sender label
if (!opts.sdp1.includes(`a=label:${sender.label}`)) {
const tmp = opts.sdp1;
opts.sdp1 = opts.sdp2;
opts.sdp2 = tmp;
}
}
else {
opts.callee.aor = sender.aor ;

View File

@@ -1,14 +1,6 @@
const {
TaskName,
AzureTranscriptionEvents,
GoogleTranscriptionEvents,
AwsTranscriptionEvents,
NuanceTranscriptionEvents,
DeepgramTranscriptionEvents,
SonioxTranscriptionEvents,
NvidiaTranscriptionEvents,
JambonzTranscriptionEvents
} = require('./constants');
} = require('./constants.json');
const stickyVars = {
google: [
@@ -33,12 +25,14 @@ const stickyVars = {
'AZURE_SERVICE_ENDPOINT',
'AZURE_INITIAL_SPEECH_TIMEOUT_MS',
'AZURE_USE_OUTPUT_FORMAT_DETAILED',
'AZURE_SPEECH_SEGMENTATION_SILENCE_TIMEOUT_MS'
],
deepgram: [
'DEEPGRAM_SPEECH_KEYWORDS',
'DEEPGRAM_API_KEY',
'DEEPGRAM_SPEECH_TIER',
'DEEPGRAM_SPEECH_MODEL',
'DEEPGRAM_SPEECH_ENABLE_SMART_FORMAT',
'DEEPGRAM_SPEECH_ENABLE_AUTOMATIC_PUNCTUATION',
'DEEPGRAM_SPEECH_PROFANITY_FILTER',
'DEEPGRAM_SPEECH_REDACT',
@@ -49,6 +43,7 @@ const stickyVars = {
'DEEPGRAM_SPEECH_SEARCH',
'DEEPGRAM_SPEECH_REPLACE',
'DEEPGRAM_SPEECH_ENDPOINTING',
'DEEPGRAM_SPEECH_UTTERANCE_END_MS',
'DEEPGRAM_SPEECH_VAD_TURNOFF',
'DEEPGRAM_SPEECH_TAG'
],
@@ -92,12 +87,68 @@ const stickyVars = {
nvidia: [
'NVIDIA_HINTS'
],
cobalt: [
'COBALT_SPEECH_HINTS',
'COBALT_COMPILED_CONTEXT_DATA',
'COBALT_METADATA'
],
soniox: [
'SONIOX_PROFANITY_FILTER',
'SONIOX_MODEL'
],
assemblyai: [
'ASSEMBLYAI_API_KEY',
'ASSEMBLYAI_WORD_BOOST'
]
};
const consolidateTranscripts = (bufferedTranscripts, channel, language) => {
if (bufferedTranscripts.length === 1) return bufferedTranscripts[0];
let totalConfidence = 0;
const finalTranscript = bufferedTranscripts.reduce((acc, evt) => {
totalConfidence += evt.alternatives[0].confidence;
let newTranscript = evt.alternatives[0].transcript;
// If new transcript consists only of digits, spaces, and a trailing comma or period
if (newTranscript.match(/^[\d\s]+[,.]?$/)) {
newTranscript = newTranscript.replace(/\s/g, ''); // Remove all spaces
if (newTranscript.endsWith(',')) {
newTranscript = newTranscript.slice(0, -1); // Remove the trailing comma
} else if (newTranscript.endsWith('.')) {
newTranscript = newTranscript.slice(0, -1); // Remove the trailing period
}
}
const lastChar = acc.alternatives[0].transcript.slice(-1);
const firstChar = newTranscript.charAt(0);
if (lastChar.match(/\d/) && firstChar.match(/\d/)) {
acc.alternatives[0].transcript += newTranscript;
} else {
acc.alternatives[0].transcript += ` ${newTranscript}`;
}
return acc;
}, {
language_code: language,
channel_tag: channel,
is_final: true,
alternatives: [{
transcript: ''
}]
});
finalTranscript.alternatives[0].confidence = bufferedTranscripts.length === 1 ?
bufferedTranscripts[0].alternatives[0].confidence :
totalConfidence / bufferedTranscripts.length;
finalTranscript.alternatives[0].transcript = finalTranscript.alternatives[0].transcript.trim();
finalTranscript.vendor = {
name: 'deepgram',
evt: bufferedTranscripts
};
return finalTranscript;
};
const compileSonioxTranscripts = (finalWordChunks, channel, language) => {
const words = finalWordChunks.flat();
const transcript = words.reduce((acc, word) => {
@@ -155,7 +206,7 @@ const normalizeSoniox = (evt, channel, language) => {
};
};
const normalizeDeepgram = (evt, channel, language) => {
const normalizeDeepgram = (evt, channel, language, shortUtterance) => {
const copy = JSON.parse(JSON.stringify(evt));
const alternatives = (evt.channel?.alternatives || [])
.map((alt) => ({
@@ -163,10 +214,14 @@ const normalizeDeepgram = (evt, channel, language) => {
transcript: alt.transcript,
}));
/**
* note difference between is_final and speech_final in Deepgram:
* https://developers.deepgram.com/docs/understand-endpointing-interim-results
*/
return {
language_code: language,
channel_tag: channel,
is_final: evt.is_final,
is_final: shortUtterance ? evt.is_final : evt.speech_final,
alternatives: [alternatives[0]],
vendor: {
name: 'deepgram',
@@ -225,12 +280,37 @@ const normalizeGoogle = (evt, channel, language) => {
};
};
const normalizeCustom = (evt, channel, language) => {
const normalizeCobalt = (evt, channel, language) => {
const copy = JSON.parse(JSON.stringify(evt));
const alternatives = (evt.alternatives || [])
.map((alt) => ({
confidence: alt.confidence,
transcript: alt.transcript_formatted,
}));
return {
language_code: language,
channel_tag: channel,
is_final: evt.is_final,
alternatives: [evt.alternatives[0]]
alternatives,
vendor: {
name: 'cobalt',
evt: copy
}
};
};
const normalizeCustom = (evt, channel, language, vendor) => {
const copy = JSON.parse(JSON.stringify(evt));
return {
language_code: language,
channel_tag: channel,
is_final: evt.is_final,
alternatives: [evt.alternatives[0]],
vendor: {
name: vendor,
evt: copy
}
};
};
@@ -248,19 +328,20 @@ const normalizeNuance = (evt, channel, language) => {
};
};
const normalizeMicrosoft = (evt, channel, language) => {
const normalizeMicrosoft = (evt, channel, language, punctuation = true) => {
const copy = JSON.parse(JSON.stringify(evt));
const nbest = evt.NBest;
const language_code = evt.PrimaryLanguage?.Language || language;
const alternatives = nbest ? nbest.map((n) => {
return {
confidence: n.Confidence,
transcript: n.Display
// remove all puntuation if needed
transcript: punctuation ? n.Display : n.Display.replace(/\p{P}/gu, '')
};
}) :
[
{
transcript: evt.DisplayText || evt.Text
transcript: punctuation ? evt.DisplayText || evt.Text : (evt.DisplayText || evt.Text).replace(/\p{P}/gu, '')
}
];
@@ -290,16 +371,34 @@ const normalizeAws = (evt, channel, language) => {
};
};
const normalizeAssemblyAi = (evt, channel, language) => {
const copy = JSON.parse(JSON.stringify(evt));
return {
language_code: language,
channel_tag: channel,
is_final: evt.message_type === 'FinalTranscript',
alternatives: [
{
confidence: evt.confidence,
transcript: evt.text,
}
],
vendor: {
name: 'ASSEMBLYAI',
evt: copy
}
};
};
module.exports = (logger) => {
const normalizeTranscription = (evt, vendor, channel, language) => {
const normalizeTranscription = (evt, vendor, channel, language, shortUtterance, punctuation) => {
//logger.debug({ evt, vendor, channel, language }, 'normalizeTranscription');
switch (vendor) {
case 'deepgram':
return normalizeDeepgram(evt, channel, language);
return normalizeDeepgram(evt, channel, language, shortUtterance);
case 'microsoft':
return normalizeMicrosoft(evt, channel, language);
return normalizeMicrosoft(evt, channel, language, punctuation);
case 'google':
return normalizeGoogle(evt, channel, language);
case 'aws':
@@ -312,9 +411,13 @@ module.exports = (logger) => {
return normalizeNvidia(evt, channel, language);
case 'soniox':
return normalizeSoniox(evt, channel, language);
case 'cobalt':
return normalizeCobalt(evt, channel, language);
case 'assemblyai':
return normalizeAssemblyAi(evt, channel, language, shortUtterance);
default:
if (vendor.startsWith('custom:')) {
return normalizeCustom(evt, channel, language);
return normalizeCustom(evt, channel, language, vendor);
}
logger.error(`Unknown vendor ${vendor}`);
return evt;
@@ -356,12 +459,13 @@ module.exports = (logger) => {
...(rOpts.punctuation === false && {GOOGLE_SPEECH_ENABLE_AUTOMATIC_PUNCTUATION: 0}),
...(rOpts.words == false && {GOOGLE_SPEECH_ENABLE_WORD_TIME_OFFSETS: 0}),
...(rOpts.diarization === false && {GOOGLE_SPEECH_SPEAKER_DIARIZATION: 0}),
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'string' &&
...(rOpts.hints?.length > 0 && typeof rOpts.hints[0] === 'string' &&
{GOOGLE_SPEECH_HINTS: rOpts.hints.join(',')}),
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'object' &&
...(rOpts.hints?.length > 0 && typeof rOpts.hints[0] === 'object' &&
{GOOGLE_SPEECH_HINTS: JSON.stringify(rOpts.hints)}),
...(typeof rOpts.hintsBoost === 'number' && {GOOGLE_SPEECH_HINTS_BOOST: rOpts.hintsBoost}),
...(rOpts.altLanguages.length > 0 &&
// When altLanguages is emptylist, we have to send value to freeswitch to clear the previous settings
...(rOpts.altLanguages &&
{GOOGLE_SPEECH_ALTERNATIVE_LANGUAGE_CODES: [...new Set(rOpts.altLanguages)].join(',')}),
...(rOpts.interactionType &&
{GOOGLE_SPEECH_METADATA_INTERACTION_TYPE: rOpts.interactionType}),
@@ -384,28 +488,34 @@ module.exports = (logger) => {
};
}
else if ('microsoft' === vendor) {
const {azureOptions = {}} = rOpts;
opts = {
...opts,
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'string' &&
...(rOpts.hints?.length > 0 && typeof rOpts.hints[0] === 'string' &&
{AZURE_SPEECH_HINTS: rOpts.hints.map((h) => h.trim()).join(',')}),
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'object' &&
...(rOpts.hints?.length > 0 && typeof rOpts.hints[0] === 'object' &&
{AZURE_SPEECH_HINTS: rOpts.hints.map((h) => h.phrase).join(',')}),
...(rOpts.altLanguages && rOpts.altLanguages.length > 0 &&
// When altLanguages is emptylist, we have to send value to freeswitch to clear the previous settings
...(rOpts.altLanguages &&
{AZURE_SPEECH_ALTERNATIVE_LANGUAGE_CODES: [...new Set(rOpts.altLanguages)].join(',')}),
...(rOpts.requestSnr && {AZURE_REQUEST_SNR: 1}),
...(rOpts.profanityOption && {AZURE_PROFANITY_OPTION: rOpts.profanityOption}),
...(sttCredentials.use_custom_stt && sttCredentials.custom_stt_endpoint_url &&
{AZURE_SERVICE_ENDPOINT: sttCredentials.custom_stt_endpoint_url}),
...(rOpts.azureServiceEndpoint && {AZURE_SERVICE_ENDPOINT: rOpts.azureServiceEndpoint}),
...(rOpts.initialSpeechTimeoutMs > 0 &&
{AZURE_INITIAL_SPEECH_TIMEOUT_MS: rOpts.initialSpeechTimeoutMs}),
...(rOpts.requestSnr && {AZURE_REQUEST_SNR: 1}),
...(rOpts.audioLogging && {AZURE_AUDIO_LOGGING: 1}),
...{AZURE_USE_OUTPUT_FORMAT_DETAILED: 1},
...(azureOptions.speechSegmentationSilenceTimeoutMs &&
{AZURE_SPEECH_SEGMENTATION_SILENCE_TIMEOUT_MS: azureOptions.speechSegmentationSilenceTimeoutMs}),
...(sttCredentials && {
AZURE_SUBSCRIPTION_KEY: sttCredentials.api_key,
AZURE_REGION: sttCredentials.region,
...(sttCredentials.api_key && {AZURE_SUBSCRIPTION_KEY: sttCredentials.api_key}),
...(sttCredentials.region && {AZURE_REGION: sttCredentials.region}),
}),
...(sttCredentials.use_custom_stt && sttCredentials.custom_stt_endpoint &&
{AZURE_SERVICE_ENDPOINT_ID: sttCredentials.custom_stt_endpoint})
{AZURE_SERVICE_ENDPOINT_ID: sttCredentials.custom_stt_endpoint}),
};
}
else if ('nuance' === vendor) {
@@ -468,10 +578,12 @@ module.exports = (logger) => {
{DEEPGRAM_SPEECH_MODEL: deepgramOptions.model},
...(deepgramOptions.punctuate) &&
{DEEPGRAM_SPEECH_ENABLE_AUTOMATIC_PUNCTUATION: 1},
...(deepgramOptions.smartFormatting) &&
{DEEPGRAM_SPEECH_ENABLE_SMART_FORMAT: 1},
...(deepgramOptions.profanityFilter) &&
{DEEPGRAM_SPEECH_PROFANITY_FILTER: 1},
...(deepgramOptions.redact) &&
{DEEPGRAM_SPEECH_REDACT: 1},
{DEEPGRAM_SPEECH_REDACT: deepgramOptions.redact},
...(deepgramOptions.diarize) &&
{DEEPGRAM_SPEECH_DIARIZE: 1},
...(deepgramOptions.diarizeVersion) &&
@@ -486,14 +598,16 @@ module.exports = (logger) => {
{DEEPGRAM_SPEECH_SEARCH: deepgramOptions.search.join(',')},
...(deepgramOptions.replace) &&
{DEEPGRAM_SPEECH_REPLACE: deepgramOptions.replace.join(',')},
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'string' &&
...(rOpts.hints && rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'string' &&
{DEEPGRAM_SPEECH_KEYWORDS: rOpts.hints.map((h) => h.trim()).join(',')}),
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'object' &&
...(rOpts.hints && rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'object' &&
{DEEPGRAM_SPEECH_KEYWORDS: rOpts.hints.map((h) => h.phrase).join(',')}),
...(deepgramOptions.keywords) &&
{DEEPGRAM_SPEECH_KEYWORDS: deepgramOptions.keywords.join(',')},
...('endpointing' in deepgramOptions) &&
{DEEPGRAM_SPEECH_ENDPOINTING: deepgramOptions.endpointing},
{DEEPGRAM_SPEECH_ENDPOINTING: deepgramOptions.endpointing === false ? 'false' : deepgramOptions.endpointing},
...(deepgramOptions.utteranceEndMs) &&
{DEEPGRAM_SPEECH_UTTERANCE_END_MS: deepgramOptions.utteranceEndMs},
...(deepgramOptions.vadTurnoff) &&
{DEEPGRAM_SPEECH_VAD_TURNOFF: deepgramOptions.vadTurnoff},
...(deepgramOptions.tag) &&
@@ -507,9 +621,9 @@ module.exports = (logger) => {
...opts,
...(sttCredentials.api_key) &&
{SONIOX_API_KEY: sttCredentials.api_key},
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'string' &&
...(rOpts.hints?.length > 0 && typeof rOpts.hints[0] === 'string' &&
{SONIOX_HINTS: rOpts.hints.join(',')}),
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'object' &&
...(rOpts.hints?.length > 0 && typeof rOpts.hints[0] === 'object' &&
{SONIOX_HINTS: JSON.stringify(rOpts.hints)}),
...(typeof rOpts.hintsBoost === 'number' &&
{SONIOX_HINTS_BOOST: rOpts.hintsBoost}),
@@ -567,9 +681,9 @@ module.exports = (logger) => {
...(rOpts.diarization && rOpts.diarizationMaxSpeakers > 0 &&
{NVIDIA_DIARIZATION_SPEAKER_COUNT: rOpts.diarizationMaxSpeakers}),
...(rOpts.separateRecognitionPerChannel && {NVIDIA_SEPARATE_RECOGNITION_PER_CHANNEL: 1}),
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'string' &&
...(rOpts.hints?.length > 0 && typeof rOpts.hints[0] === 'string' &&
{NVIDIA_HINTS: rOpts.hints.join(',')}),
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'object' &&
...(rOpts.hints?.length > 0 && typeof rOpts.hints[0] === 'object' &&
{NVIDIA_HINTS: JSON.stringify(rOpts.hints)}),
...(typeof rOpts.hintsBoost === 'number' &&
{NVIDIA_HINTS_BOOST: rOpts.hintsBoost}),
@@ -577,21 +691,49 @@ module.exports = (logger) => {
{NVIDIA_CUSTOM_CONFIGURATION: JSON.stringify(nvidiaOptions.customConfiguration)}),
};
}
else if ('cobalt' === vendor) {
const {cobaltOptions = {}} = rOpts;
const cobaltUri = cobaltOptions.serverUri || sttCredentials.cobalt_server_uri;
opts = {
...opts,
...(rOpts.words && {COBALT_WORD_TIME_OFFSETS: 1}),
...(!rOpts.words && {COBALT_WORD_TIME_OFFSETS: 0}),
...(rOpts.model && {COBALT_MODEL: rOpts.model}),
...(cobaltUri && {COBALT_SERVER_URI: cobaltUri}),
...(rOpts.hints?.length > 0 && typeof rOpts.hints[0] === 'string' &&
{COBALT_SPEECH_HINTS: rOpts.hints.join(',')}),
...(rOpts.hints?.length > 0 && typeof rOpts.hints[0] === 'object' &&
{COBALT_SPEECH_HINTS: JSON.stringify(rOpts.hints)}),
...(rOpts.hints?.length > 0 &&
{COBALT_CONTEXT_TOKEN: cobaltOptions.contextToken || 'unk:default'}),
...(cobaltOptions.metadata && {COBALT_METADATA: cobaltOptions.metadata}),
...(cobaltOptions.enableConfusionNetwork && {COBALT_ENABLE_CONFUSION_NETWORK: 1}),
...(cobaltOptions.compiledContextData && {COBALT_COMPILED_CONTEXT_DATA: cobaltOptions.compiledContextData}),
};
} else if ('assemblyai' === vendor) {
opts = {
...opts,
...(sttCredentials.api_key) &&
{ASSEMBLYAI_API_KEY: sttCredentials.api_key},
...(rOpts.hints?.length > 0 &&
{ASSEMBLYAI_WORD_BOOST: JSON.stringify(rOpts.hints)})
};
}
else if (vendor.startsWith('custom:')) {
let {options = {}} = rOpts;
const {auth_token, custom_stt_url} = sttCredentials;
options = {
...options,
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'string' &&
...(rOpts.hints?.length > 0 && typeof rOpts.hints[0] === 'string' &&
{hints: rOpts.hints}),
...(rOpts.hints.length > 0 && typeof rOpts.hints[0] === 'object' &&
...(rOpts.hints?.length > 0 && typeof rOpts.hints[0] === 'object' &&
{hints: JSON.stringify(rOpts.hints)}),
...(typeof rOpts.hintsBoost === 'number' && {hintsBoost: rOpts.hintsBoost})
};
opts = {
...opts,
JAMBONZ_STT_API_KEY: auth_token,
...(auth_token && {JAMBONZ_STT_API_KEY: auth_token}),
JAMBONZ_STT_URL: custom_stt_url,
...(Object.keys(options).length > 0 && {JAMBONZ_STT_OPTIONS: JSON.stringify(options)}),
};
@@ -603,41 +745,6 @@ module.exports = (logger) => {
return opts;
};
const removeSpeechListeners = (ep) => {
ep.removeCustomEventListener(GoogleTranscriptionEvents.Transcription);
ep.removeCustomEventListener(GoogleTranscriptionEvents.EndOfUtterance);
ep.removeCustomEventListener(GoogleTranscriptionEvents.VadDetected);
ep.removeCustomEventListener(AwsTranscriptionEvents.Transcription);
ep.removeCustomEventListener(AwsTranscriptionEvents.VadDetected);
ep.removeCustomEventListener(AzureTranscriptionEvents.Transcription);
ep.removeCustomEventListener(AzureTranscriptionEvents.NoSpeechDetected);
ep.removeCustomEventListener(AzureTranscriptionEvents.VadDetected);
ep.removeCustomEventListener(NuanceTranscriptionEvents.Transcription);
ep.removeCustomEventListener(NuanceTranscriptionEvents.TranscriptionComplete);
ep.removeCustomEventListener(NuanceTranscriptionEvents.StartOfSpeech);
ep.removeCustomEventListener(NuanceTranscriptionEvents.VadDetected);
ep.removeCustomEventListener(DeepgramTranscriptionEvents.Transcription);
ep.removeCustomEventListener(DeepgramTranscriptionEvents.Connect);
ep.removeCustomEventListener(DeepgramTranscriptionEvents.ConnectFailure);
ep.removeCustomEventListener(SonioxTranscriptionEvents.Transcription);
ep.removeCustomEventListener(NvidiaTranscriptionEvents.Transcription);
ep.removeCustomEventListener(NvidiaTranscriptionEvents.TranscriptionComplete);
ep.removeCustomEventListener(NvidiaTranscriptionEvents.StartOfSpeech);
ep.removeCustomEventListener(NvidiaTranscriptionEvents.VadDetected);
ep.removeCustomEventListener(JambonzTranscriptionEvents.Transcription);
ep.removeCustomEventListener(JambonzTranscriptionEvents.Connect);
ep.removeCustomEventListener(JambonzTranscriptionEvents.ConnectFailure);
ep.removeCustomEventListener(JambonzTranscriptionEvents.Error);
};
const setSpeechCredentialsAtRuntime = (recognizer) => {
if (!recognizer) return;
if (recognizer.vendor === 'nuance') {
@@ -657,6 +764,10 @@ module.exports = (logger) => {
const {apiKey} = recognizer.sonioxOptions || {};
if (apiKey) return {api_key: apiKey};
}
else if (recognizer.vendor === 'cobalt') {
const {serverUri} = recognizer.cobaltOptions || {};
if (serverUri) return {cobalt_server_uri: serverUri};
}
else if (recognizer.vendor === 'ibm') {
const {ttsApiKey, ttsRegion, sttApiKey, sttRegion, instanceId} = recognizer.ibmOptions || {};
if (ttsApiKey || sttApiKey) return {
@@ -672,8 +783,8 @@ module.exports = (logger) => {
return {
normalizeTranscription,
setChannelVarsForStt,
removeSpeechListeners,
setSpeechCredentialsAtRuntime,
compileSonioxTranscripts
compileSonioxTranscripts,
consolidateTranscripts
};
};

View File

@@ -6,9 +6,11 @@ const Websocket = require('ws');
const snakeCaseKeys = require('./snakecase-keys');
const {
RESPONSE_TIMEOUT_MS,
JAMBONES_WS_PING_INTERVAL_MS,
MAX_RECONNECTS,
JAMBONES_WS_HANDSHAKE_TIMEOUT_MS,
JAMBONES_WS_MAX_PAYLOAD
JAMBONES_WS_MAX_PAYLOAD,
HTTP_USER_AGENT_HEADER
} = require('../config');
class WsRequestor extends BaseRequestor {
@@ -42,6 +44,7 @@ class WsRequestor extends BaseRequestor {
async request(type, hook, params, httpHeaders = {}) {
assert(HookMsgTypes.includes(type));
const url = hook.url || hook;
const wantsAck = !['call:status', 'verb:status', 'jambonz:error'].includes(type);
if (this.maliciousClient) {
this.logger.info({url: this.url}, 'WsRequestor:request - discarding msg to malicious client');
@@ -72,11 +75,19 @@ class WsRequestor extends BaseRequestor {
if (this.connectInProgress) {
this.logger.debug(
`WsRequestor:request(${this.id}) - queueing ${type} message since we are connecting`);
this.queuedMsg.push({type, hook, params, httpHeaders});
if (wantsAck) {
const p = new Promise((resolve, reject) => {
this.queuedMsg.push({type, hook, params, httpHeaders, promise: {resolve, reject}});
});
return p;
}
else {
this.queuedMsg.push({type, hook, params, httpHeaders});
}
return;
}
this.connectInProgress = true;
this.logger.debug(`WsRequestor:request(${this.id}) - connecting since we do not have a connection`);
this.logger.debug(`WsRequestor:request(${this.id}) - connecting since we do not have a connection for ${type}`);
if (this.connections >= MAX_RECONNECTS) {
return Promise.reject(`max attempts connecting to ${this.url}`);
}
@@ -115,9 +126,14 @@ class WsRequestor extends BaseRequestor {
const sendQueuedMsgs = () => {
if (this.queuedMsg.length > 0) {
for (const {type, hook, params, httpHeaders} of this.queuedMsg) {
for (const {type, hook, params, httpHeaders, promise} of this.queuedMsg) {
this.logger.debug(`WsRequestor:request - preparing queued ${type} for sending`);
setImmediate(this.request.bind(this, type, hook, params, httpHeaders));
if (promise) {
this.request(type, hook, params, httpHeaders)
.then((res) => promise.resolve(res))
.catch((err) => promise.reject(err));
}
else setImmediate(this.request.bind(this, type, hook, params, httpHeaders));
}
this.queuedMsg.length = 0;
}
@@ -136,8 +152,8 @@ class WsRequestor extends BaseRequestor {
}
/* simple notifications */
if (['call:status', 'verb:status', 'jambonz:error'].includes(type) || reconnectingWithoutAck) {
this.ws.send(JSON.stringify(obj), () => {
if (!wantsAck || reconnectingWithoutAck) {
this.ws?.send(JSON.stringify(obj), () => {
this.logger.debug({obj}, `WsRequestor:request websocket: sent (${url})`);
sendQueuedMsgs();
});
@@ -178,9 +194,17 @@ class WsRequestor extends BaseRequestor {
});
}
_stopPingTimer() {
if (this._pingTimer) {
clearInterval(this._pingTimer);
this._pingTimer = null;
}
}
close() {
this.closedGracefully = true;
this.logger.debug('WsRequestor:close closing socket');
this._stopPingTimer();
try {
if (this.ws) {
this.ws.close(1000);
@@ -195,6 +219,7 @@ class WsRequestor extends BaseRequestor {
_connect() {
assert(!this.ws);
this._stopPingTimer();
return new Promise((resolve, reject) => {
const handshakeTimeout = JAMBONES_WS_HANDSHAKE_TIMEOUT_MS ?
parseInt(JAMBONES_WS_HANDSHAKE_TIMEOUT_MS) :
@@ -204,6 +229,9 @@ class WsRequestor extends BaseRequestor {
maxRedirects: 2,
handshakeTimeout,
maxPayload: JAMBONES_WS_MAX_PAYLOAD ? parseInt(JAMBONES_WS_MAX_PAYLOAD) : 24 * 1024,
headers: {
...(HTTP_USER_AGENT_HEADER && {'user-agent' : HTTP_USER_AGENT_HEADER})
}
};
if (this.username && this.password) opts = {...opts, auth: `${this.username}:${this.password}`};
@@ -255,10 +283,15 @@ class WsRequestor extends BaseRequestor {
this.connectInProgress = false;
this.connections++;
this.emit('ready', ws);
if (JAMBONES_WS_PING_INTERVAL_MS > 15000) {
this._pingTimer = setInterval(() => this.ws?.ping(), JAMBONES_WS_PING_INTERVAL_MS);
}
}
_onClose(code) {
this.logger.info(`WsRequestor(${this.id}) - closed from far end ${code}`);
this._stopPingTimer();
if (this.connections > 0 && code !== 1000) {
this.logger.info({url: this.url}, 'WsRequestor - socket closed unexpectedly from remote side');
this.emit('socket-closed');
@@ -283,6 +316,7 @@ class WsRequestor extends BaseRequestor {
_onSocketClosed() {
this.ws = null;
this.emit('connection-dropped');
this._stopPingTimer();
if (this.connections > 0 && this.connections < MAX_RECONNECTS && !this.closedGracefully) {
if (!this._initMsgId) this._clearPendingMessages();
this.logger.debug(`WsRequestor:_onSocketClosed waiting ${this.backoffMs} to reconnect`);

10014
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "jambonz-feature-server",
"version": "0.8.3",
"version": "0.8.5",
"main": "app.js",
"engines": {
"node": ">= 10.16.0"
@@ -19,19 +19,22 @@
"bugs": {},
"scripts": {
"start": "node app",
"test": "NODE_ENV=test JAMBONES_HOSTING=1 HTTP_POOL=1 ENCRYPTION_SECRET=foobar DRACHTIO_HOST=127.0.0.1 DRACHTIO_PORT=9060 DRACHTIO_SECRET=cymru JAMBONES_MYSQL_HOST=127.0.0.1 JAMBONES_MYSQL_PORT=3360 JAMBONES_MYSQL_USER=jambones_test JAMBONES_MYSQL_PASSWORD=jambones_test JAMBONES_MYSQL_DATABASE=jambones_test JAMBONES_REDIS_HOST=127.0.0.1 JAMBONES_REDIS_PORT=16379 JAMBONES_LOGLEVEL=error ENABLE_METRICS=0 HTTP_PORT=3000 JAMBONES_SBCS=172.38.0.10 JAMBONES_FREESWITCH=127.0.0.1:8022:JambonzR0ck$:docker-host JAMBONES_TIME_SERIES_HOST=127.0.0.1 JAMBONES_NETWORK_CIDR=172.38.0.0/16 node test/ ",
"test": "NODE_ENV=test JAMBONES_HOSTING=1 HTTP_POOL=1 JAMBONES_TTS_TRIM_SILENCE=1 ENCRYPTION_SECRET=foobar DRACHTIO_HOST=127.0.0.1 DRACHTIO_PORT=9060 DRACHTIO_SECRET=cymru JAMBONES_MYSQL_HOST=127.0.0.1 JAMBONES_MYSQL_PORT=3360 JAMBONES_MYSQL_USER=jambones_test JAMBONES_MYSQL_PASSWORD=jambones_test JAMBONES_MYSQL_DATABASE=jambones_test JAMBONES_REDIS_HOST=127.0.0.1 JAMBONES_REDIS_PORT=16379 JAMBONES_LOGLEVEL=error ENABLE_METRICS=0 HTTP_PORT=3000 JAMBONES_SBCS=172.38.0.10 JAMBONES_FREESWITCH=127.0.0.1:8022:JambonzR0ck$:docker-host JAMBONES_TIME_SERIES_HOST=127.0.0.1 JAMBONES_NETWORK_CIDR=172.38.0.0/16 node test/ ",
"coverage": "./node_modules/.bin/nyc --reporter html --report-dir ./coverage npm run test",
"jslint": "eslint app.js tracer.js lib",
"jslint:fix": "eslint app.js tracer.js lib --fix"
},
"dependencies": {
"@jambonz/db-helpers": "^0.7.10",
"@aws-sdk/client-auto-scaling": "^3.360.0",
"@aws-sdk/client-sns": "^3.360.0",
"@jambonz/db-helpers": "^0.9.1",
"@jambonz/http-health-check": "^0.0.1",
"@jambonz/realtimedb-helpers": "^0.7.2",
"@jambonz/speech-utils": "^0.0.13",
"@jambonz/stats-collector": "^0.1.8",
"@jambonz/time-series": "^0.2.5",
"@jambonz/verb-specifications": "^0.0.22",
"@jambonz/mw-registrar": "^0.2.4",
"@jambonz/realtimedb-helpers": "^0.8.7",
"@jambonz/speech-utils": "^0.0.33",
"@jambonz/stats-collector": "^0.1.9",
"@jambonz/time-series": "^0.2.8",
"@jambonz/verb-specifications": "^0.0.50",
"@opentelemetry/api": "^1.4.0",
"@opentelemetry/exporter-jaeger": "^1.9.0",
"@opentelemetry/exporter-trace-otlp-http": "^0.35.0",
@@ -41,13 +44,13 @@
"@opentelemetry/sdk-trace-base": "^1.9.0",
"@opentelemetry/sdk-trace-node": "^1.9.0",
"@opentelemetry/semantic-conventions": "^1.9.0",
"aws-sdk": "^2.1313.0",
"bent": "^7.3.12",
"debug": "^4.3.4",
"deepcopy": "^2.1.0",
"drachtio-fsmrf": "^3.0.21",
"drachtio-srf": "^4.5.23",
"drachtio-fsmrf": "^3.0.33",
"drachtio-srf": "^4.5.31",
"express": "^4.18.2",
"express-validator": "^7.0.1",
"ip": "^1.1.8",
"moment": "^2.29.4",
"parse-url": "^8.1.0",
@@ -58,11 +61,11 @@
"short-uuid": "^4.2.2",
"sinon": "^15.0.1",
"to-snake-case": "^1.0.0",
"undici": "^5.19.1",
"undici": "^5.26.2",
"uuid-random": "^1.3.2",
"verify-aws-sns-signature": "^0.1.0",
"ws": "^8.9.0",
"xml2js": "^0.5.0"
"xml2js": "^0.6.2"
},
"devDependencies": {
"clear-module": "^4.1.2",

View File

@@ -39,7 +39,7 @@ test('\'config: listen\'', async(t) => {
}
];
provisionCallHook(from, verbs);
await provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success-send-bye.xml', '172.38.0.10', from);
@@ -86,7 +86,7 @@ test('\'config: listen - stop\'', async(t) => {
}
];
provisionCallHook(from, verbs);
await provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success-send-bye.xml', '172.38.0.10', from);

View File

@@ -0,0 +1,86 @@
#
# Recommended minimum configuration:
#
# Example rule allowing access from your local networks.
# Adapt to list your (internal) IP networks from where browsing
# should be allowed
acl localnet src 0.0.0.1-0.255.255.255 # RFC 1122 "this" network (LAN)
acl localnet src 10.0.0.0/8 # RFC 1918 local private network (LAN)
acl localnet src 100.64.0.0/10 # RFC 6598 shared address space (CGN)
acl localnet src 169.254.0.0/16 # RFC 3927 link-local (directly plugged) machines
acl localnet src 172.16.0.0/12 # RFC 1918 local private network (LAN)
acl localnet src 172.38.0.0/12 # RFC 1918 local private network (LAN)
acl localnet src 192.168.0.0/16 # RFC 1918 local private network (LAN)
acl localnet src fc00::/7 # RFC 4193 local private network range
acl localnet src fe80::/10 # RFC 4291 link-local (directly plugged) machines
acl SSL_ports port 443
acl Safe_ports port 80 # http
acl Safe_ports port 21 # ftp
acl Safe_ports port 443 # https
acl Safe_ports port 70 # gopher
acl Safe_ports port 210 # wais
acl Safe_ports port 1025-65535 # unregistered ports
acl Safe_ports port 280 # http-mgmt
acl Safe_ports port 488 # gss-http
acl Safe_ports port 591 # filemaker
acl Safe_ports port 777 # multiling http
#
# Recommended minimum Access Permission configuration:
#
# Deny requests to certain unsafe ports
http_access allow !Safe_ports
# Deny CONNECT to other than secure SSL ports
http_access allow CONNECT !SSL_ports
# Only allow cachemgr access from localhost
http_access allow localhost manager
http_access allow manager
# This default configuration only allows localhost requests because a more
# permissive Squid installation could introduce new attack vectors into the
# network by proxying external TCP connections to unprotected services.
http_access allow localhost
# The two deny rules below are unnecessary in this default configuration
# because they are followed by a "deny all" rule. However, they may become
# critically important when you start allowing external requests below them.
# Protect web applications running on the same server as Squid. They often
# assume that only local users can access them at "localhost" ports.
http_access allow to_localhost
# Protect cloud servers that provide local users with sensitive info about
# their server via certain well-known link-local (a.k.a. APIPA) addresses.
# http_access deny to_linklocal
#
# INSERT YOUR OWN RULE(S) HERE TO ALLOW ACCESS FROM YOUR CLIENTS
#
# For example, to allow access from your local networks, you may uncomment the
# following rule (and/or add rules that match your definition of "local"):
# http_access allow localnet
# And finally deny all other access to this proxy
http_access allow all
# Squid normally listens to port 3128
http_port 3128
# Uncomment and adjust the following to add a disk cache directory.
#cache_dir ufs /usr/local/var/cache/squid 100 16 256
# Leave coredumps in the first cache dir
coredump_dir /usr/local/var/cache/squid
#
# Add any of your own refresh_pattern entries above these.
#
refresh_pattern ^ftp: 1440 20% 10080
refresh_pattern ^gopher: 1440 0% 1440
refresh_pattern -i (/cgi-bin/|\?) 0 0% 0
refresh_pattern . 0 20% 4320

View File

@@ -37,7 +37,7 @@ test('test create-call timeout', async(t) => {
'account_sid':account_sid,
'timeout': 1,
"call_hook": {
"url": "https://public-apps.jambonz.us/hello-world",
"url": "https://public-apps.jambonz.cloud/hello-world",
"method": "POST"
},
"from": "15083718299",
@@ -88,17 +88,133 @@ test('test create-call call-hook basic authentication', async(t) => {
let verbs = [
{
"verb": "say",
"text": "hello"
"verb": "pause",
"length": 1
}
];
provisionCallHook(from, verbs);
await provisionCallHook(from, verbs);
//THEN
await p;
let obj = await getJSON(`http:127.0.0.1:3100/lastRequest/${from}`)
t.ok(obj.headers.Authorization = 'Basic dXNlcm5hbWU6cGFzc3dvcmQ=',
'create-call: call-hook contains basic authentication header');
t.ok(obj.headers['user-agent'] = 'jambonz',
'create-call: call-hook contains user-agent header');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});
test('test create-call amd', async(t) => {
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
let from = 'create-call-amd';
let account_sid = 'bb845d4b-83a9-4cde-a6e9-50f3743bab3f';
// Give UAS app time to come up
const p = sippUac('uas.xml', '172.38.0.10', from);
await waitFor(1000);
const post = bent('http://127.0.0.1:3000/', 'POST', 'json', 201);
post('v1/createCall', {
'account_sid':account_sid,
"call_hook": {
"url": "http://127.0.0.1:3100/",
"method": "POST",
"username": "username",
"password": "password"
},
"from": from,
"to": {
"type": "phone",
"number": "15583084809"
},
"amd": {
"actionHook": "/actionHook"
},
"speech_recognizer_vendor": "google",
"speech_recognizer_language": "en"
});
let verbs = [
{
"verb": "pause",
"length": 7
}
];
await provisionCallHook(from, verbs);
//THEN
await p;
let obj = await getJSON(`http:127.0.0.1:3100/lastRequest/${from}_actionHook`)
t.ok(obj.body.type = 'amd_no_speech_detected',
'create-call: AMD detected');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});
test('test create-call app_json', async(t) => {
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
let from = 'create-call-app-json';
let account_sid = 'bb845d4b-83a9-4cde-a6e9-50f3743bab3f';
// Give UAS app time to come up
const p = sippUac('uas.xml', '172.38.0.10', from);
await waitFor(1000);
const app_json = `[
{
"verb": "pause",
"length": 7
}
]`;
const post = bent('http://127.0.0.1:3000/', 'POST', 'json', 201);
post('v1/createCall', {
'account_sid':account_sid,
"call_hook": {
"url": "http://127.0.0.1:3100/",
"method": "POST",
"username": "username",
"password": "password"
},
app_json,
"from": from,
"to": {
"type": "phone",
"number": "15583084809"
},
"amd": {
"actionHook": "/actionHook"
},
"speech_recognizer_vendor": "google",
"speech_recognizer_language": "en"
});
//THEN
await p;
disconnect();
} catch (err) {
console.log(`error received: ${err}`);

File diff suppressed because it is too large Load Diff

View File

@@ -1,5 +1,4 @@
/* SQLEditor (MySQL (2))*/
SET FOREIGN_KEY_CHECKS=0;
DROP TABLE IF EXISTS account_static_ips;
@@ -14,8 +13,12 @@ DROP TABLE IF EXISTS beta_invite_codes;
DROP TABLE IF EXISTS call_routes;
DROP TABLE IF EXISTS clients;
DROP TABLE IF EXISTS dns_records;
DROP TABLE IF EXISTS lcr;
DROP TABLE IF EXISTS lcr_carrier_set_entry;
DROP TABLE IF EXISTS lcr_routes;
@@ -52,6 +55,8 @@ DROP TABLE IF EXISTS smpp_addresses;
DROP TABLE IF EXISTS speech_credentials;
DROP TABLE IF EXISTS system_information;
DROP TABLE IF EXISTS users;
DROP TABLE IF EXISTS smpp_gateways;
@@ -124,6 +129,16 @@ application_sid CHAR(36) NOT NULL,
PRIMARY KEY (call_route_sid)
) COMMENT='a regex-based pattern match for call routing';
CREATE TABLE clients
(
client_sid CHAR(36) NOT NULL UNIQUE ,
account_sid CHAR(36) NOT NULL,
is_active BOOLEAN NOT NULL DEFAULT 1,
username VARCHAR(64),
password VARCHAR(1024),
PRIMARY KEY (client_sid)
);
CREATE TABLE dns_records
(
dns_record_sid CHAR(36) NOT NULL UNIQUE ,
@@ -136,11 +151,23 @@ PRIMARY KEY (dns_record_sid)
CREATE TABLE lcr_routes
(
lcr_route_sid CHAR(36),
lcr_sid CHAR(36) NOT NULL,
regex VARCHAR(32) NOT NULL COMMENT 'regex-based pattern match against dialed number, used for LCR routing of PSTN calls',
description VARCHAR(1024),
priority INTEGER NOT NULL UNIQUE COMMENT 'lower priority routes are attempted first',
priority INTEGER NOT NULL COMMENT 'lower priority routes are attempted first',
PRIMARY KEY (lcr_route_sid)
) COMMENT='Least cost routing table';
) COMMENT='An ordered list of digit patterns in an LCR table. The patterns are tested in sequence until one matches';
CREATE TABLE lcr
(
lcr_sid CHAR(36) NOT NULL UNIQUE ,
name VARCHAR(64) COMMENT 'User-assigned name for this LCR table',
is_active BOOLEAN NOT NULL DEFAULT 1,
default_carrier_set_entry_sid CHAR(36) COMMENT 'default carrier/route to use when no digit match based results are found.',
service_provider_sid CHAR(36),
account_sid CHAR(36),
PRIMARY KEY (lcr_sid)
) COMMENT='An LCR (least cost routing) table that is used by a service provider or account to make decisions about routing outbound calls when multiple carriers are available.';
CREATE TABLE password_settings
(
@@ -248,7 +275,10 @@ CREATE TABLE sbc_addresses
sbc_address_sid CHAR(36) NOT NULL UNIQUE ,
ipv4 VARCHAR(255) NOT NULL,
port INTEGER NOT NULL DEFAULT 5060,
tls_port INTEGER,
wss_port INTEGER,
service_provider_sid CHAR(36),
last_updated DATETIME,
PRIMARY KEY (sbc_address_sid)
);
@@ -304,9 +334,17 @@ last_tested DATETIME,
tts_tested_ok BOOLEAN,
stt_tested_ok BOOLEAN,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
label VARCHAR(64),
PRIMARY KEY (speech_credential_sid)
);
CREATE TABLE system_information
(
domain_name VARCHAR(255),
sip_domain_name VARCHAR(255),
monitoring_domain_name VARCHAR(255)
);
CREATE TABLE users
(
user_sid CHAR(36) NOT NULL UNIQUE ,
@@ -357,6 +395,7 @@ smpp_inbound_password VARCHAR(64),
register_from_user VARCHAR(128),
register_from_domain VARCHAR(255),
register_public_ip_in_contact BOOLEAN NOT NULL DEFAULT false,
register_status VARCHAR(4096),
PRIMARY KEY (voip_carrier_sid)
) COMMENT='A Carrier or customer PBX that can send or receive calls';
@@ -385,7 +424,7 @@ PRIMARY KEY (smpp_gateway_sid)
CREATE TABLE phone_numbers
(
phone_number_sid CHAR(36) UNIQUE ,
number VARCHAR(132) NOT NULL UNIQUE ,
number VARCHAR(132) NOT NULL,
voip_carrier_sid CHAR(36),
account_sid CHAR(36),
application_sid CHAR(36),
@@ -403,6 +442,7 @@ inbound BOOLEAN NOT NULL COMMENT 'if true, whitelist this IP to allow inbound ca
outbound BOOLEAN NOT NULL COMMENT 'if true, include in least-cost routing when placing calls to the PSTN',
voip_carrier_sid CHAR(36) NOT NULL,
is_active BOOLEAN NOT NULL DEFAULT 1,
protocol ENUM('udp','tcp','tls', 'tls/srtp') DEFAULT 'udp' COMMENT 'Outbound call protocol',
PRIMARY KEY (sip_gateway_sid)
) COMMENT='A whitelisted sip gateway used for origination/termination';
@@ -435,13 +475,16 @@ account_sid CHAR(36) COMMENT 'account that this application belongs to (if null,
call_hook_sid CHAR(36) COMMENT 'webhook to call for inbound calls ',
call_status_hook_sid CHAR(36) COMMENT 'webhook to call for call status events',
messaging_hook_sid CHAR(36) COMMENT 'webhook to call for inbound SMS/MMS ',
app_json VARCHAR(16384),
app_json TEXT,
speech_synthesis_vendor VARCHAR(64) NOT NULL DEFAULT 'google',
speech_synthesis_language VARCHAR(12) NOT NULL DEFAULT 'en-US',
speech_synthesis_voice VARCHAR(64),
speech_synthesis_label VARCHAR(64),
speech_recognizer_vendor VARCHAR(64) NOT NULL DEFAULT 'google',
speech_recognizer_language VARCHAR(64) NOT NULL DEFAULT 'en-US',
speech_recognizer_label VARCHAR(64),
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
record_all_calls BOOLEAN NOT NULL DEFAULT false,
PRIMARY KEY (application_sid)
) COMMENT='A defined set of behaviors to be applied to phone calls ';
@@ -479,6 +522,9 @@ subspace_client_secret VARCHAR(255),
subspace_sip_teleport_id VARCHAR(255),
subspace_sip_teleport_destinations VARCHAR(255),
siprec_hook_sid CHAR(36),
record_all_calls BOOLEAN NOT NULL DEFAULT false,
record_format VARCHAR(16) NOT NULL DEFAULT 'mp3',
bucket_credential VARCHAR(8192) COMMENT 'credential used to authenticate with storage service',
PRIMARY KEY (account_sid)
) COMMENT='An enterprise that uses the platform for comm services';
@@ -499,9 +545,20 @@ ALTER TABLE call_routes ADD FOREIGN KEY account_sid_idxfk_3 (account_sid) REFERE
ALTER TABLE call_routes ADD FOREIGN KEY application_sid_idxfk (application_sid) REFERENCES applications (application_sid);
CREATE INDEX client_sid_idx ON clients (client_sid);
ALTER TABLE clients ADD CONSTRAINT account_sid_idxfk_13 FOREIGN KEY account_sid_idxfk_13 (account_sid) REFERENCES accounts (account_sid);
CREATE INDEX dns_record_sid_idx ON dns_records (dns_record_sid);
ALTER TABLE dns_records ADD FOREIGN KEY account_sid_idxfk_4 (account_sid) REFERENCES accounts (account_sid);
CREATE INDEX lcr_sid_idx ON lcr_routes (lcr_sid);
ALTER TABLE lcr_routes ADD FOREIGN KEY lcr_sid_idxfk (lcr_sid) REFERENCES lcr (lcr_sid);
CREATE INDEX lcr_sid_idx ON lcr (lcr_sid);
ALTER TABLE lcr ADD FOREIGN KEY default_carrier_set_entry_sid_idxfk (default_carrier_set_entry_sid) REFERENCES lcr_carrier_set_entry (lcr_carrier_set_entry_sid);
CREATE INDEX service_provider_sid_idx ON lcr (service_provider_sid);
CREATE INDEX account_sid_idx ON lcr (account_sid);
CREATE INDEX permission_sid_idx ON permissions (permission_sid);
CREATE INDEX predefined_carrier_sid_idx ON predefined_carriers (predefined_carrier_sid);
CREATE INDEX predefined_sip_gateway_sid_idx ON predefined_sip_gateways (predefined_sip_gateway_sid);
@@ -555,8 +612,6 @@ CREATE INDEX smpp_address_sid_idx ON smpp_addresses (smpp_address_sid);
CREATE INDEX service_provider_sid_idx ON smpp_addresses (service_provider_sid);
ALTER TABLE smpp_addresses ADD FOREIGN KEY service_provider_sid_idxfk_4 (service_provider_sid) REFERENCES service_providers (service_provider_sid);
CREATE UNIQUE INDEX speech_credentials_idx_1 ON speech_credentials (vendor,account_sid);
CREATE INDEX speech_credential_sid_idx ON speech_credentials (speech_credential_sid);
CREATE INDEX service_provider_sid_idx ON speech_credentials (service_provider_sid);
ALTER TABLE speech_credentials ADD FOREIGN KEY service_provider_sid_idxfk_5 (service_provider_sid) REFERENCES service_providers (service_provider_sid);
@@ -593,6 +648,8 @@ CREATE INDEX smpp_gateway_sid_idx ON smpp_gateways (smpp_gateway_sid);
CREATE INDEX voip_carrier_sid_idx ON smpp_gateways (voip_carrier_sid);
ALTER TABLE smpp_gateways ADD FOREIGN KEY voip_carrier_sid_idxfk (voip_carrier_sid) REFERENCES voip_carriers (voip_carrier_sid);
CREATE UNIQUE INDEX phone_numbers_unique_idx_voip_carrier_number ON phone_numbers (number,voip_carrier_sid);
CREATE INDEX phone_number_sid_idx ON phone_numbers (phone_number_sid);
CREATE INDEX number_idx ON phone_numbers (number);
CREATE INDEX voip_carrier_sid_idx ON phone_numbers (voip_carrier_sid);
@@ -647,5 +704,4 @@ ALTER TABLE accounts ADD FOREIGN KEY queue_event_hook_sid_idxfk (queue_event_hoo
ALTER TABLE accounts ADD FOREIGN KEY device_calling_application_sid_idxfk (device_calling_application_sid) REFERENCES applications (application_sid);
ALTER TABLE accounts ADD FOREIGN KEY siprec_hook_sid_idxfk (siprec_hook_sid) REFERENCES applications (application_sid);
SET FOREIGN_KEY_CHECKS=1;

View File

@@ -5,6 +5,8 @@ const getJSON = bent('json')
const clearModule = require('clear-module');
const {provisionCallHook} = require('./utils')
const sleepFor = (ms) => new Promise((r) => setTimeout(r, ms));
process.on('unhandledRejection', (reason, p) => {
console.log('Unhandled Rejection at: Promise', p, 'reason:', reason);
});
@@ -43,10 +45,11 @@ test('\'dial-phone\'', async(t) => {
}
];
provisionCallHook(from, verbs);
await provisionCallHook(from, verbs);
// THEN
const p = sippUac('uas-dial.xml', '172.38.0.10', undefined, undefined, 2);
await sleepFor(1000);
let account_sid = '622f62e4-303a-49f2-bbe0-eb1e1714e37a';
let post = bent('http://127.0.0.1:3000/', 'POST', 'json', 201);
@@ -84,7 +87,7 @@ test('\'dial-sip\'', async(t) => {
try {
await connect(srf);
// wait for fs connected to drachtio server.
await new Promise(r => setTimeout(r, 1000));
await sleepFor(1000);
// GIVEN
const from = "dial_sip";
let verbs = [
@@ -102,7 +105,7 @@ test('\'dial-sip\'', async(t) => {
}
];
provisionCallHook(from, verbs);
await provisionCallHook(from, verbs);
// THEN
const p = sippUac('uas-dial.xml', '172.38.0.10', undefined, undefined, 2);
@@ -169,7 +172,7 @@ test('\'dial-user\'', async(t) => {
}
];
provisionCallHook(from, verbs);
await provisionCallHook(from, verbs);
// THEN
const p = sippUac('uas-dial.xml', '172.38.0.10', undefined, undefined, 2);

View File

@@ -42,7 +42,7 @@ services:
ipv4_address: 172.38.0.7
drachtio:
image: drachtio/drachtio-server:latest
image: drachtio/drachtio-server:0.8.24
restart: always
command: drachtio --contact "sip:*;transport=udp" --mtu 4096 --address 0.0.0.0 --port 9022
ports:
@@ -57,7 +57,7 @@ services:
condition: service_healthy
freeswitch:
image: drachtio/drachtio-freeswitch-mrf:0.4.18
image: drachtio/drachtio-freeswitch-mrf:0.6.1
restart: always
command: freeswitch --rtp-range-start 20000 --rtp-range-end 20100
environment:
@@ -92,3 +92,13 @@ services:
networks:
fs:
ipv4_address: 172.38.0.90
squid:
image: ubuntu/squid:edge
ports:
- "3128:3128"
volumes:
- ./configuration/squid.conf:/etc/squid/squid.conf
networks:
fs:
ipv4_address: 172.38.0.91

View File

@@ -50,7 +50,7 @@ test('\'gather\' test - google', async(t) => {
}
];
let from = "gather_success";
provisionCallHook(from, verbs);
await provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
@@ -86,7 +86,7 @@ test('\'gather\' test - default (google)', async(t) => {
}
];
let from = "gather_success";
provisionCallHook(from, verbs);
await provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
@@ -133,7 +133,7 @@ test('\'config\' test - reset to app defaults', async(t) => {
}
];
let from = "gather_success";
provisionCallHook(from, verbs);
await provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
@@ -173,7 +173,7 @@ test('\'gather\' test - microsoft', async(t) => {
}
];
let from = "gather_success";
provisionCallHook(from, verbs);
await provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
@@ -213,7 +213,7 @@ test('\'gather\' test - aws', async(t) => {
}
];
let from = "gather_success";
provisionCallHook(from, verbs);
await provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
@@ -256,7 +256,7 @@ test('\'gather\' test - deepgram', async(t) => {
}
];
let from = "gather_success";
provisionCallHook(from, verbs);
await provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
@@ -298,7 +298,7 @@ test('\'gather\' test - soniox', async(t) => {
}
];
let from = "gather_success";
provisionCallHook(from, verbs);
await provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);

53
test/hangup-test.js Normal file
View File

@@ -0,0 +1,53 @@
const test = require('tape');
const { sippUac } = require('./sipp')('test_fs');
const clearModule = require('clear-module');
const {provisionCallHook, provisionCustomHook} = require('./utils')
const bent = require('bent');
const getJSON = bent('json')
process.on('unhandledRejection', (reason, p) => {
console.log('Unhandled Rejection at: Promise', p, 'reason:', reason);
});
function connect(connectable) {
return new Promise((resolve, reject) => {
connectable.on('connect', () => {
return resolve();
});
});
}
test('\'hangup\' custom headers', async(t) => {
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
const verbs = [
{
verb: 'play',
url: 'https://example.com/example.mp3'
},
{
"verb": "hangup",
"headers": {
"X-Reason" : "maximum call duration exceeded"
}
}
];
const from = 'hangup_custom_headers';
await provisionCallHook(from, verbs)
// THEN
await sippUac('uac-success-received-bye.xml', '172.38.0.10', from);
t.pass('play: succeeds when using single link');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});

72
test/http-proxy-test.js Normal file
View File

@@ -0,0 +1,72 @@
const test = require('tape');
const { sippUac } = require('./sipp')('test_fs');
const clearModule = require('clear-module');
const {provisionCallHook, provisionCustomHook} = require('./utils')
const bent = require('bent');
const getJSON = bent('json')
process.on('unhandledRejection', (reason, p) => {
console.log('Unhandled Rejection at: Promise', p, 'reason:', reason);
});
function connect(connectable) {
return new Promise((resolve, reject) => {
connectable.on('connect', () => {
return resolve();
});
});
}
test('\'HTTP proxy\' test Info', async(t) => {
clearModule.all();
process.env.JAMBONES_HTTP_PROXY_IP = "127.0.0.1";
process.env.JAMBONES_HTTP_PROXY_PROTOCOL = "http";
process.env.JAMBONES_HTTP_PROXY_PORT = 3128;
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
const verbs = [
{
verb: 'config',
sipRequestWithinDialogHook: '/customHook'
},
{
verb: 'play',
url: 'silence_stream://5000',
}
];
const waitHookVerbs = [
{
verb: 'hangup'
}
];
const from = 'http_proxy_info';
await provisionCustomHook(from, waitHookVerbs)
await provisionCallHook(from, verbs);
// THEN
await sippUac('uac-success-info-received-bye.xml', '172.38.0.10', from, "16174000015");
t.pass('sip Info: success send Info');
// Make sure that sipRequestWithinDialogHook is called and success
const json = await getJSON(`http:127.0.0.1:3100/lastRequest/${from}_customHook`)
t.pass(json.body.sip_method === 'INFO', 'sipRequestWithinDialogHook contains sip_method')
t.pass(json.body.sip_body === 'hello jambonz\r\n', 'sipRequestWithinDialogHook contains sip_method')
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
} finally {
process.env.JAMBONES_HTTP_PROXY_IP = null;
process.env.JAMBONES_HTTP_PROXY_PROTOCOL = null;
process.env.JAMBONES_HTTP_PROXY_PORT = null;
}
});

65
test/in-dialog-test.js Normal file
View File

@@ -0,0 +1,65 @@
const test = require('tape');
const { sippUac } = require('./sipp')('test_fs');
const clearModule = require('clear-module');
const {provisionCallHook, provisionCustomHook} = require('./utils')
const bent = require('bent');
const getJSON = bent('json')
process.on('unhandledRejection', (reason, p) => {
console.log('Unhandled Rejection at: Promise', p, 'reason:', reason);
});
function connect(connectable) {
return new Promise((resolve, reject) => {
connectable.on('connect', () => {
return resolve();
});
});
}
test('\'sip Indialog\' test Info', async(t) => {
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
const verbs = [
{
verb: 'config',
sipRequestWithinDialogHook: '/customHook'
},
{
verb: 'play',
url: 'silence_stream://5000',
}
];
const waitHookVerbs = [
{
verb: 'hangup'
}
];
const from = 'sip_indialog_info';
await provisionCustomHook(from, waitHookVerbs)
await provisionCallHook(from, verbs);
// THEN
await sippUac('uac-success-info-received-bye.xml', '172.38.0.10', from);
t.pass('sip Info: success send Info');
// Make sure that sipRequestWithinDialogHook is called and success
const json = await getJSON(`http:127.0.0.1:3100/lastRequest/${from}_customHook`)
t.pass(json.body.sip_method === 'INFO', 'sipRequestWithinDialogHook contains sip_method')
t.pass(json.body.sip_body === 'hello jambonz\r\n', 'sipRequestWithinDialogHook contains sip_method')
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});

View File

@@ -14,5 +14,10 @@ require('./play-tests');
require('./sip-refer-tests');
require('./listen-tests');
require('./config-test');
require('./queue-test');
require('./in-dialog-test');
require('./hangup-test');
require('./sdp-utils-test');
require('./http-proxy-test');
require('./remove-test-db');
require('./docker_stop');

View File

@@ -35,7 +35,7 @@ test('\'listen-success\'', async(t) => {
}
];
provisionCallHook(from, verbs);
await provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success-send-bye.xml', '172.38.0.10', from);
@@ -57,7 +57,7 @@ test('\'listen-success\'', async(t) => {
}
});
test('\'listen-maxLength\'', async(t) => {
test.skip('\'listen-maxLength\'', async(t) => {
clearModule.all();
const {srf, disconnect} = require('../app');
try {
@@ -69,13 +69,13 @@ test('\'listen-maxLength\'', async(t) => {
{
"verb": "listen",
"url": `ws://172.38.0.60:3000/${from}`,
"mixType" : "mixed",
"mixType" : "stereo",
"timeout": 2,
"maxLength": 2
}
];
provisionCallHook(from, verbs);
await provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
@@ -109,7 +109,7 @@ test('\'listen-pause-resume\'', async(t) => {
}
];
provisionCallHook(from, verbs);
await provisionCallHook(from, verbs);
// THEN
const p = sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);

View File

@@ -33,7 +33,7 @@ test('\'play\' tests single link in plain text', async(t) => {
];
const from = 'play_single_link';
provisionCallHook(from, verbs)
await provisionCallHook(from, verbs)
// THEN
await sippUac('uac-success-received-bye.xml', '172.38.0.10', from);
@@ -62,7 +62,7 @@ test('\'play\' tests multi links in array', async(t) => {
];
const from = 'play_multi_links_in_array';
provisionCallHook(from, verbs)
await provisionCallHook(from, verbs)
// THEN
await sippUac('uac-success-received-bye.xml', '172.38.0.10', from);
@@ -100,8 +100,8 @@ test('\'play\' tests single link in conference', async(t) => {
waitHook: `/customHook`
}
];
provisionCustomHook(from, waitHookVerbs)
provisionCallHook(from, verbs)
await provisionCustomHook(from, waitHookVerbs)
await provisionCallHook(from, verbs)
// THEN
await sippUac('uac-success-send-bye.xml', '172.38.0.10', from);
@@ -141,8 +141,8 @@ test('\'play\' tests multi links in array in conference', async(t) => {
waitHook: `/customHook`
}
];
provisionCustomHook(from, waitHookVerbs)
provisionCallHook(from, verbs)
await provisionCustomHook(from, waitHookVerbs)
await provisionCallHook(from, verbs)
// THEN
await sippUac('uac-success-send-bye.xml', '172.38.0.10', from);
@@ -178,8 +178,8 @@ test('\'play\' tests with seekOffset and actionHook', async(t) => {
const waitHookVerbs = [];
const from = 'play_action_hook';
provisionCallHook(from, verbs)
provisionCustomHook(from, waitHookVerbs)
await provisionCallHook(from, verbs)
await provisionCustomHook(from, waitHookVerbs)
// THEN
await sippUac('uac-success-received-bye.xml', '172.38.0.10', from);
@@ -188,7 +188,7 @@ test('\'play\' tests with seekOffset and actionHook', async(t) => {
const seconds = parseInt(obj.body.playback_seconds);
const milliseconds = parseInt(obj.body.playback_milliseconds);
const lastOffsetPos = parseInt(obj.body.playback_last_offset_pos);
//console.log({obj}, 'lastRequest');
console.log({obj}, 'lastRequest');
t.ok(obj.body.reason === "playCompleted", "play: actionHook success received");
t.ok(seconds === 2, "playback_seconds: actionHook success received");
t.ok(milliseconds === 2048, "playback_milliseconds: actionHook success received");
@@ -218,7 +218,7 @@ test('\'play\' tests with earlymedia', async(t) => {
];
const from = 'play_early_media';
provisionCallHook(from, verbs)
await provisionCallHook(from, verbs)
// THEN
await sippUac('uac-invite-expect-183-cancel.xml', '172.38.0.10', from);

127
test/queue-test.js Normal file
View File

@@ -0,0 +1,127 @@
const test = require('tape');
const { sippUac } = require('./sipp')('test_fs');
const clearModule = require('clear-module');
const {provisionCallHook, provisionActionHook, provisionAnyHook} = require('./utils');
const bent = require('bent');
const getJSON = bent('json');
process.on('unhandledRejection', (reason, p) => {
console.log('Unhandled Rejection at: Promise', p, 'reason:', reason);
});
function connect(connectable) {
return new Promise((resolve, reject) => {
connectable.on('connect', () => {
return resolve();
});
});
}
const sleepFor = (ms) => new Promise((resolve) => setTimeout(() => resolve(), ms));
test('\'enqueue-dequeue\' tests', async(t) => {
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
const verbs = [
{
verb: 'enqueue',
name: 'support',
actionHook: '/actionHook'
}
];
const verbs2 = [
{
verb: 'dequeue',
name: 'support'
}
];
const actionVerbs = [
{
verb: 'play',
url: 'silence_stream://1000',
earlyMedia: true
}
];
const from = 'enqueue_success';
await provisionCallHook(from, verbs);
await provisionActionHook(from, actionVerbs)
const from2 = 'dequeue_success';
await provisionCallHook(from2, verbs2);
// THEN
const p1 = sippUac('uac-success-received-bye.xml', '172.38.0.10', from);
await sleepFor(1000);
const p2 = sippUac('uac-success-send-bye.xml', '172.38.0.11', from2);
await Promise.all([p1, p2]);
const obj = await getJSON(`http:127.0.0.1:3100/lastRequest/${from}_actionHook`);
t.ok(obj.body.queue_result === 'bridged');
t.pass('enqueue-dequeue: succeeds connect');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});
test('\leave\' tests', async(t) => {
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
const verbs = [
{
verb: 'enqueue',
name: 'support1',
waitHook: '/anyHook/enqueue_success_leave',
actionHook: '/actionHook'
}
];
const anyHookVerbs = [
{
verb: 'leave'
}
];
const actionVerbs = [
{
verb: 'play',
url: 'silence_stream://1000',
earlyMedia: true
}
];
const from = 'enqueue_success_leave';
await provisionCallHook(from, verbs);
await provisionAnyHook(from, anyHookVerbs);
await provisionActionHook(from, actionVerbs)
// THEN
await sippUac('uac-success-received-bye.xml', '172.38.0.10', from);
const obj = await getJSON(`http:127.0.0.1:3100/lastRequest/enqueue_success_leave`);
t.ok(obj.body.queue_position === 0);
const obj1 = await getJSON(`http:127.0.0.1:3100/lastRequest/${from}_actionHook`);
t.ok(obj1.body.queue_result === 'leave');
t.pass('enqueue-dequeue: succeeds connect');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});

View File

@@ -31,7 +31,7 @@ test('\'say\' tests', async(t) => {
];
const from = 'say_test_success';
provisionCallHook(from, verbs)
await provisionCallHook(from, verbs)
// THEN
await sippUac('uac-success-received-bye.xml', '172.38.0.10', from);
@@ -71,7 +71,47 @@ test('\'config\' reset synthesizer tests', async(t) => {
];
const from = 'say_test_success';
provisionCallHook(from, verbs)
await provisionCallHook(from, verbs)
// THEN
await sippUac('uac-success-received-bye.xml', '172.38.0.10', from);
t.pass('say: succeeds when using using account credentials');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});
test('Say verb array test', async(t) => {
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
const verbs = [
{
"verb": "config",
"synthesizer": {
"vendor": "microsft",
"voice": "foobar"
},
},
{
"verb": "config",
"reset": 'synthesizer',
},
{
verb: 'say',
text: ['hello', 'https://samplelib.com/lib/preview/mp3/sample-3s.mp3']
}
];
const from = 'say_test_success';
await provisionCallHook(from, verbs)
// THEN
await sippUac('uac-success-received-bye.xml', '172.38.0.10', from);
@@ -111,7 +151,7 @@ if (MICROSOFT_CUSTOM_API_KEY && MICROSOFT_DEPLOYMENT_ID && MICROSOFT_CUSTOM_REGI
];
const from = 'say_test_success';
provisionCallHook(from, verbs)
await provisionCallHook(from, verbs)
// THEN
await sippUac('uac-success-received-bye.xml', '172.38.0.10', from);

View File

@@ -0,0 +1,114 @@
<?xml version="1.0" encoding="ISO-8859-1" ?>
<!DOCTYPE scenario SYSTEM "sipp.dtd">
<scenario name="Basic Sipstone UAC">
<!-- In client mode (sipp placing calls), the Call-ID MUST be -->
<!-- generated by sipp. To do so, use [call_id] keyword. -->
<send retrans="500">
<![CDATA[
INVITE sip:[to]@[remote_ip]:[remote_port] SIP/2.0
Via: SIP/2.0/[transport] [local_ip]:[local_port];branch=[branch]
From: [from] <sip:[from]@[local_ip]:[local_port]>;tag=[pid]SIPpTag00[call_number]
To: <sip:[to]@[remote_ip]:[remote_port]>
Call-ID: [call_id]
CSeq: 1 INVITE
Contact: sip:[from]@[local_ip]:[local_port]
Max-Forwards: 70
X-Account-Sid: bb845d4b-83a9-4cde-a6e9-50f3743bab3f
Subject: uac-say
Content-Type: application/sdp
Content-Length: [len]
v=0
o=user1 53655765 2353687637 IN IP[local_ip_type] [local_ip]
s=-
c=IN IP[media_ip_type] [media_ip]
t=0 0
m=audio [media_port] RTP/AVP 0
a=rtpmap:0 PCMU/8000
]]>
</send>
<recv response="100"
optional="true">
</recv>
<recv response="180" optional="true">
</recv>
<recv response="183" optional="true">
</recv>
<!-- By adding rrs="true" (Record Route Sets), the route sets -->
<!-- are saved and used for following messages sent. Useful to test -->
<!-- against stateful SIP proxies/B2BUAs. -->
<recv response="200" rtd="true">
</recv>
<!-- Packet lost can be simulated in any send/recv message by -->
<!-- by adding the 'lost = "10"'. Value can be [1-100] percent. -->
<send>
<![CDATA[
ACK sip:[to]@[remote_ip]:[remote_port] SIP/2.0
Via: SIP/2.0/[transport] [local_ip]:[local_port];branch=[branch]
From: [from] <sip:[from]@[local_ip]:[local_port]>;tag=[pid]SIPpTag00[call_number]
To: [to] <sip:[to]@[remote_ip]:[remote_port]>[peer_tag_param]
Call-ID: [call_id]
CSeq: 1 ACK
Contact: sip:[from]@[local_ip]:[local_port]
Max-Forwards: 70
Subject: uac-say
Content-Length: 0
]]>
</send>
<pause milliseconds="2000"/>
<!-- Send an INFO message -->
<send>
<![CDATA[
INFO sip:[service]@[remote_ip]:[remote_port] SIP/2.0
Via: SIP/2.0/[transport] [local_ip]:[local_port];branch=[branch]
From: [from] <sip:[from]@[local_ip]:[local_port]>;tag=[pid]SIPpTag00[call_number]
To: [to] <sip:[to]@[remote_ip]:[remote_port]>[peer_tag_param]
Call-ID: [call_id]
CSeq: 2 INFO
Contact: sip:[from]@[local_ip]:[local_port]
Max-Forwards: 70
Subject: Performance Test
Content-Type: text/plain
Content-Length: [len]
hello jambonz
]]>
</send>
<!-- Receive 200 OK -->
<recv response="200">
</recv>
<recv request="BYE">
</recv>
<send>
<![CDATA[
SIP/2.0 200 OK
[last_Via:]
[last_From:]
[last_To:]
[last_Call-ID:]
[last_CSeq:]
Contact: <sip:[local_ip]:[local_port];transport=[transport]>
Content-Length: 0
]]>
</send>
</scenario>

26
test/sdp-utils-test.js Normal file
View File

@@ -0,0 +1,26 @@
const test = require('tape');
const {makeOpusFirst, isOpusFirst} = require('../lib/utils/sdp-utils');
const sdpTransform = require('sdp-transform');
test('test opus first', (t) => {
const sdp = 'v=0\r\no=- 3348584794228993675 2 IN IP4 127.0.0.1\r\ns=-\r\nt=0 0\r\na=group:BUNDLE 0\r\na=extmap-allow-mixed\r\na=msid-semantic: WMS caca8b77-5ae5-4e73-a4d5-de1fce930335\r\nm=audio 57088 UDP/TLS/RTP/SAVPF 111 63 9 0 8 13 110 126\r\nc=IN IP4 14.238.89.50\r\na=rtcp:9 IN IP4 0.0.0.0\r\na=candidate:1401281302 1 udp 2122260223 10.231.36.146 57088 typ host generation 0 network-id 1 network-cost 10\r\na=candidate:2173263513 1 udp 1686052607 14.238.89.50 57088 typ srflx raddr 10.231.36.146 rport 57088 generation 0 network-id 1 network-cost 10\r\na=ice-ufrag:k5nc\r\na=ice-pwd:J0qwMs6HrIcFNZbDG5m8Kqpk\r\na=ice-options:trickle\r\na=fingerprint:sha-256 66:DE:9A:76:CE:11:2D:65:C4:08:C7:87:B4:90:7E:F1:8D:07:B9:F4:FF:E3:81:D7:E7:7D:C6:56:47:01:6E:55\r\na=setup:actpass\r\na=mid:0\r\na=extmap:1 urn:ietf:params:rtp-hdrext:ssrc-audio-level\r\na=extmap:2 http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time\r\na=extmap:3 http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01\r\na=extmap:4 urn:ietf:params:rtp-hdrext:sdes:mid\r\na=sendrecv\r\na=msid:caca8b77-5ae5-4e73-a4d5-de1fce930335 52ad01f1-b1df-4b8e-a208-9201e98b6f7b\r\na=rtcp-mux\r\na=rtcp-fb:111 transport-cc\r\na=fmtp:111 minptime=10;useinbandfec=1\r\na=rtpmap:63 red/48000/2\r\na=fmtp:63 111/111\r\na=rtpmap:9 G722/8000\r\na=rtpmap:0 PCMU/8000\r\na=rtpmap:8 PCMA/8000\r\na=rtpmap:13 CN/8000\r\na=rtpmap:111 opus/48000/2\r\na=rtpmap:110 telephone-event/48000\r\na=rtpmap:126 telephone-event/8000\r\na=ssrc:3207459321 cname:4nyPJ6KXvseBUIhu\r\na=ssrc:3207459321 msid:caca8b77-5ae5-4e73-a4d5-de1fce930335 52ad01f1-b1df-4b8e-a208-9201e98b6f7b\r\n';
const opusSdp = makeOpusFirst(sdp);
const parsedSdp = sdpTransform.parse(opusSdp);
const opusIndex = parsedSdp.media[0].rtp.findIndex((entry) => entry.codec === 'opus');
t.ok(opusIndex === 0, 'succesffuly move opus to be first offer')
t.end();
});
test('test is opus first', (t) => {
const sdp = 'v=0\r\no=- 3348584794228993675 2 IN IP4 127.0.0.1\r\ns=-\r\nt=0 0\r\na=group:BUNDLE 0\r\na=extmap-allow-mixed\r\na=msid-semantic: WMS caca8b77-5ae5-4e73-a4d5-de1fce930335\r\nm=audio 57088 UDP/TLS/RTP/SAVPF 111 63 9 0 8 13 110 126\r\nc=IN IP4 14.238.89.50\r\na=rtcp:9 IN IP4 0.0.0.0\r\na=candidate:1401281302 1 udp 2122260223 10.231.36.146 57088 typ host generation 0 network-id 1 network-cost 10\r\na=candidate:2173263513 1 udp 1686052607 14.238.89.50 57088 typ srflx raddr 10.231.36.146 rport 57088 generation 0 network-id 1 network-cost 10\r\na=ice-ufrag:k5nc\r\na=ice-pwd:J0qwMs6HrIcFNZbDG5m8Kqpk\r\na=ice-options:trickle\r\na=fingerprint:sha-256 66:DE:9A:76:CE:11:2D:65:C4:08:C7:87:B4:90:7E:F1:8D:07:B9:F4:FF:E3:81:D7:E7:7D:C6:56:47:01:6E:55\r\na=setup:actpass\r\na=mid:0\r\na=extmap:1 urn:ietf:params:rtp-hdrext:ssrc-audio-level\r\na=extmap:2 http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time\r\na=extmap:3 http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01\r\na=extmap:4 urn:ietf:params:rtp-hdrext:sdes:mid\r\na=sendrecv\r\na=msid:caca8b77-5ae5-4e73-a4d5-de1fce930335 52ad01f1-b1df-4b8e-a208-9201e98b6f7b\r\na=rtcp-mux\r\na=rtpmap:111 opus/48000/2\r\na=rtcp-fb:111 transport-cc\r\na=fmtp:111 minptime=10;useinbandfec=1\r\na=rtpmap:63 red/48000/2\r\na=fmtp:63 111/111\r\na=rtpmap:9 G722/8000\r\na=rtpmap:0 PCMU/8000\r\na=rtpmap:8 PCMA/8000\r\na=rtpmap:13 CN/8000\r\na=rtpmap:110 telephone-event/48000\r\na=rtpmap:126 telephone-event/8000\r\na=ssrc:3207459321 cname:4nyPJ6KXvseBUIhu\r\na=ssrc:3207459321 msid:caca8b77-5ae5-4e73-a4d5-de1fce930335 52ad01f1-b1df-4b8e-a208-9201e98b6f7b\r\n';
t.ok(isOpusFirst(sdp), "opus is first offer");
const sdp2 = 'v=0\r\no=- 3348584794228993675 2 IN IP4 127.0.0.1\r\ns=-\r\nt=0 0\r\na=group:BUNDLE 0\r\na=extmap-allow-mixed\r\na=msid-semantic: WMS caca8b77-5ae5-4e73-a4d5-de1fce930335\r\nm=audio 57088 UDP/TLS/RTP/SAVPF 111 63 9 0 8 13 110 126\r\nc=IN IP4 14.238.89.50\r\na=rtcp:9 IN IP4 0.0.0.0\r\na=candidate:1401281302 1 udp 2122260223 10.231.36.146 57088 typ host generation 0 network-id 1 network-cost 10\r\na=candidate:2173263513 1 udp 1686052607 14.238.89.50 57088 typ srflx raddr 10.231.36.146 rport 57088 generation 0 network-id 1 network-cost 10\r\na=ice-ufrag:k5nc\r\na=ice-pwd:J0qwMs6HrIcFNZbDG5m8Kqpk\r\na=ice-options:trickle\r\na=fingerprint:sha-256 66:DE:9A:76:CE:11:2D:65:C4:08:C7:87:B4:90:7E:F1:8D:07:B9:F4:FF:E3:81:D7:E7:7D:C6:56:47:01:6E:55\r\na=setup:actpass\r\na=mid:0\r\na=extmap:1 urn:ietf:params:rtp-hdrext:ssrc-audio-level\r\na=extmap:2 http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time\r\na=extmap:3 http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01\r\na=extmap:4 urn:ietf:params:rtp-hdrext:sdes:mid\r\na=sendrecv\r\na=msid:caca8b77-5ae5-4e73-a4d5-de1fce930335 52ad01f1-b1df-4b8e-a208-9201e98b6f7b\r\na=rtcp-mux\r\na=rtcp-fb:111 transport-cc\r\na=fmtp:111 minptime=10;useinbandfec=1\r\na=rtpmap:63 red/48000/2\r\na=fmtp:63 111/111\r\na=rtpmap:9 G722/8000\r\na=rtpmap:0 PCMU/8000\r\na=rtpmap:8 PCMA/8000\r\na=rtpmap:13 CN/8000\r\na=rtpmap:111 opus/48000/2\r\na=rtpmap:110 telephone-event/48000\r\na=rtpmap:126 telephone-event/8000\r\na=ssrc:3207459321 cname:4nyPJ6KXvseBUIhu\r\na=ssrc:3207459321 msid:caca8b77-5ae5-4e73-a4d5-de1fce930335 52ad01f1-b1df-4b8e-a208-9201e98b6f7b\r\n';
t.ok(!isOpusFirst(sdp2), "opus is not first offer")
const sdp3 = 'v=0\r\no=xhoaluu2 1314 1504 IN IP4 192.168.1.4\r\ns=Talk\r\nc=IN IP4 192.168.1.4\r\nt=0 0\r\na=ice-pwd:397d063ea23fdc05164e3ee4\r\na=ice-ufrag:16c449a3\r\na=rtcp-xr:rcvr-rtt=all:10000 stat-summary=loss,dup,jitt,TTL voip-metrics\r\na=group:BUNDLE as\r\na=record:off\r\nm=audio 56542 RTP/AVPF 0 8\r\nc=IN IP4 14.226.233.151\r\na=rtcp-mux\r\na=mid:as\r\na=extmap:1 urn:ietf:params:rtp-hdrext:sdes:mid\r\na=rtcp:63076 IN IP4 192.168.1.4\r\na=candidate:1 1 UDP 2130706303 192.168.1.4 56542 typ host\r\na=candidate:1 2 UDP 2130706302 192.168.1.4 63076 typ host\r\na=candidate:2 1 UDP 2130706431 2001:ee0:d744:dcf0:c1d3:d73f:7a93:dc9f 56542 typ host\r\na=candidate:2 2 UDP 2130706430 2001:ee0:d744:dcf0:c1d3:d73f:7a93:dc9f 63076 typ host\r\na=candidate:3 1 UDP 2130706431 2001:ee0:d744:dcf0:15:6be3:8e6b:b736 56542 typ host\r\na=candidate:3 2 UDP 2130706430 2001:ee0:d744:dcf0:15:6be3:8e6b:b736 63076 typ host\r\na=candidate:4 1 UDP 1694498687 14.226.233.151 56542 typ srflx raddr 192.168.1.4 rport 56542\r\na=rtcp-fb:* trr-int 1000\r\na=rtcp-fb:* ccm tmmbr';
t.ok(!isOpusFirst(sdp2), "opus is not first offer")
t.end();
});

View File

@@ -41,8 +41,8 @@ test('\'refer\' tests w/202 and NOTIFY', {timeout: 25000}, async(t) => {
const noVerbs = [];
const from = 'refer_with_notify';
provisionCallHook(from, verbs);
provisionActionHook(from, noVerbs)
await provisionCallHook(from, verbs);
await provisionActionHook(from, noVerbs)
// THEN
await sippUac('uac-refer-with-notify.xml', '172.38.0.10', from);
@@ -81,8 +81,8 @@ test('\'refer\' tests w/202 but no NOTIFY', {timeout: 25000}, async(t) => {
const noVerbs = [];
const from = 'refer_no_notify';
provisionCallHook(from, verbs);
provisionActionHook(from, noVerbs)
await provisionCallHook(from, verbs);
await provisionActionHook(from, noVerbs)
// THEN
await sippUac('uac-refer-no-notify.xml', '172.38.0.10', from);

View File

@@ -40,7 +40,7 @@ test('sending SIP in-dialog requests tests', async(t) => {
}
];
let from = "sip_indialog_test";
provisionCallHook(from, verbs);
await provisionCallHook(from, verbs);
// THEN
await sippUac('uac-send-info-during-dialog.xml', '172.38.0.10', from);
const obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);

View File

@@ -36,7 +36,7 @@ obj.sippUac = (file, bindAddress, from='sipp', to='16174000000', loop=1) => {
'-cid_str', `%u-%p@%s-${idx++}`,
'172.38.0.50',
'-key','from', from,
'-key','to', to, '-trace_msg'
'-key','to', to, '-trace_msg', '-trace_err'
];
if (bindAddress) args.splice(5, 0, '--ip', bindAddress);

View File

@@ -48,10 +48,11 @@ test('\'transcribe\' test - google', async(t) => {
}
];
let from = "gather_success";
provisionCallHook(from, verbs);
await provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
//console.log(JSON.stringify(obj));
t.ok(obj.body.speech.alternatives[0].transcript.toLowerCase().startsWith('i\'d like to speak to customer support'),
'transcribe: succeeds when using google credentials');
@@ -85,10 +86,11 @@ test('\'transcribe\' test - microsoft', async(t) => {
}
];
let from = "gather_success";
provisionCallHook(from, verbs);
await provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
//console.log(JSON.stringify(obj));
t.ok(obj.body.speech.alternatives[0].transcript.toLowerCase().startsWith('i\'d like to speak to customer support'),
'transcribe: succeeds when using microsoft credentials');
@@ -122,10 +124,11 @@ test('\'transcribe\' test - aws', async(t) => {
}
];
let from = "gather_success";
provisionCallHook(from, verbs);
await provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
//console.log(JSON.stringify(obj));
t.ok(obj.body.speech.alternatives[0].transcript.toLowerCase().startsWith('i\'d like to speak to customer support'),
'transcribe: succeeds when using aws credentials');
@@ -137,6 +140,71 @@ test('\'transcribe\' test - aws', async(t) => {
}
});
test('\'transcribe\' test - deepgram config options', async(t) => {
if (!DEEPGRAM_API_KEY ) {
t.pass('skipping deepgram tests');
return t.end();
}
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
let verbs = [
{
"verb": "config",
"recognizer": {
"vendor": "deepgram",
"language": "en-US",
"altLanguages": [
"en-US"
],
"deepgramOptions": {
"model": "2-ea",
"tier": "nova",
"numerals": true,
"ner": true,
"vadTurnoff": 10,
"keywords": [
"CPT"
]
}
}
},
{
"verb": "transcribe",
"transcriptionHook": "/transcriptionHook",
"recognizer": {
"vendor": "deepgram",
"altLanguages": [
"en-AU"
],
"hints": ["customer support", "sales", "human resources", "HR"],
"deepgramOptions": {
"apiKey": DEEPGRAM_API_KEY,
}
}
}
];
let from = "gather_success";
await provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
//console.log(JSON.stringify(obj));
t.ok(obj.body.speech.alternatives[0].transcript.toLowerCase().includes('like to speak to customer support'),
'transcribe: succeeds when using deepgram credentials');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});
test('\'transcribe\' test - deepgram', async(t) => {
if (!DEEPGRAM_API_KEY ) {
t.pass('skipping deepgram tests');
@@ -162,10 +230,11 @@ test('\'transcribe\' test - deepgram', async(t) => {
}
];
let from = "gather_success";
provisionCallHook(from, verbs);
await provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
//console.log(JSON.stringify(obj));
t.ok(obj.body.speech.alternatives[0].transcript.toLowerCase().includes('like to speak to customer support'),
'transcribe: succeeds when using deepgram credentials');
@@ -202,7 +271,7 @@ test('\'transcribe\' test - soniox', async(t) => {
}
];
let from = "gather_success";
provisionCallHook(from, verbs);
await provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
@@ -210,6 +279,168 @@ test('\'transcribe\' test - soniox', async(t) => {
t.ok(obj.body.speech.alternatives[0].transcript.toLowerCase().startsWith('i\'d like to speak to customer support'),
'transcribe: succeeds when using soniox credentials');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});
test('\'transcribe\' test - google with asrTimeout', async(t) => {
if (!GCP_JSON_KEY) {
t.pass('skipping google tests');
return t.end();
}
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
let verbs = [
{
"verb": "transcribe",
"recognizer": {
"vendor": "google",
"hints": ["customer support", "sales", "human resources", "HR"],
"asrTimeout": 4
},
"transcriptionHook": "/transcriptionHook"
}
];
let from = "gather_success";
await provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
//console.log(JSON.stringify(obj));
t.ok(obj.body.speech.alternatives[0].transcript.toLowerCase().startsWith('i\'d like to speak to customer support'),
'transcribe: succeeds when using google credentials');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});
test('\'transcribe\' test - deepgram config options altLanguages', async(t) => {
if (!DEEPGRAM_API_KEY ) {
t.pass('skipping deepgram tests');
return t.end();
}
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
let verbs = [
{
"verb": "config",
"recognizer": {
"vendor": "deepgram",
"language": "en-US",
"altLanguages": [
"en-US"
],
"deepgramOptions": {
"model": "2-ea",
"tier": "nova",
"numerals": true,
"ner": true,
"vadTurnoff": 10,
"keywords": [
"CPT"
]
}
}
},
{
"verb": "transcribe",
"transcriptionHook": "/transcriptionHook",
"recognizer": {
"vendor": "deepgram",
"hints": ["customer support", "sales", "human resources", "HR"],
"deepgramOptions": {
"apiKey": DEEPGRAM_API_KEY,
}
}
}
];
let from = "gather_success_no_altLanguages";
await provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
//console.log(JSON.stringify(obj));
t.ok(obj.body.speech.alternatives[0].transcript.toLowerCase().includes('like to speak to customer support'),
'transcribe: succeeds when using deepgram credentials');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);
disconnect();
t.error(err);
}
});
test('\'transcribe\' test - deepgram config options altLanguages', async(t) => {
if (!DEEPGRAM_API_KEY ) {
t.pass('skipping deepgram tests');
return t.end();
}
clearModule.all();
const {srf, disconnect} = require('../app');
try {
await connect(srf);
// GIVEN
let verbs = [
{
"verb": "config",
"recognizer": {
"vendor": "deepgram",
"language": "en-US",
"altLanguages": [
"en-US"
],
"deepgramOptions": {
"model": "2-ea",
"tier": "nova",
"numerals": true,
"ner": true,
"vadTurnoff": 10,
"keywords": [
"CPT"
]
}
}
},
{
"verb": "transcribe",
"transcriptionHook": "/transcriptionHook",
"recognizer": {
"vendor": "deepgram",
"hints": ["customer support", "sales", "human resources", "HR"],
"altLanguages": [],
"deepgramOptions": {
"apiKey": DEEPGRAM_API_KEY,
}
}
}
];
let from = "gather_success_has_altLanguages";
await provisionCallHook(from, verbs);
// THEN
await sippUac('uac-gather-account-creds-success.xml', '172.38.0.10', from);
let obj = await getJSON(`http://127.0.0.1:3100/lastRequest/${from}_actionHook`);
//console.log(JSON.stringify(obj));
t.ok(obj.body.speech.alternatives[0].transcript.toLowerCase().includes('like to speak to customer support'),
'transcribe: succeeds when using deepgram credentials');
disconnect();
} catch (err) {
console.log(`error received: ${err}`);

View File

@@ -6,31 +6,40 @@ const bent = require('bent');
* The function help testcase to register desired jambonz json response for an application call
* When a call has From number match the registered hook event, the desired jambonz json will be responded.
*/
const provisionCallHook = (from, verbs) => {
const provisionCallHook = async (from, verbs) => {
const mapping = {
from,
data: JSON.stringify(verbs)
};
const post = bent('http://127.0.0.1:3100', 'POST', 'string', 200);
post('/appMapping', mapping);
await post('/appMapping', mapping);
}
const provisionCustomHook = (from, verbs) => {
const provisionCustomHook = async(from, verbs) => {
const mapping = {
from,
data: JSON.stringify(verbs)
};
const post = bent('http://127.0.0.1:3100', 'POST', 'string', 200);
post(`/customHookMapping`, mapping);
await post(`/customHookMapping`, mapping);
}
const provisionActionHook = (from, verbs) => {
const provisionActionHook = async(from, verbs) => {
const mapping = {
from,
data: JSON.stringify(verbs)
};
const post = bent('http://127.0.0.1:3100', 'POST', 'string', 200);
post(`/actionHook`, mapping);
await post(`/actionHook`, mapping);
}
module.exports = { provisionCallHook, provisionCustomHook, provisionActionHook}
const provisionAnyHook = async(key, verbs) => {
const mapping = {
key,
data: JSON.stringify(verbs)
};
const post = bent('http://127.0.0.1:3100', 'POST', 'string', 200);
await post(`/anyHookMapping`, mapping);
}
module.exports = { provisionCallHook, provisionCustomHook, provisionActionHook, provisionAnyHook}

View File

@@ -2,6 +2,7 @@ const express = require('express');
const app = express();
const Websocket = require('ws');
const listenPort = process.env.HTTP_PORT || 3000;
const any_hook_json_mapping = new Map();
let json_mapping = new Map();
let hook_mapping = new Map();
let ws_packet_count = new Map();
@@ -61,7 +62,7 @@ app.all('/', (req, res) => {
console.log(req.body, 'POST /');
const key = req.body.from
addRequestToMap(key, req, hook_mapping);
return getJsonFromMap(key, req, res);
return getJsonFromMap(json_mapping, key, req, res);
});
app.post('/appMapping', (req, res) => {
@@ -106,7 +107,7 @@ app.post('/actionHook', (req, res) => {
app.all('/customHook', (req, res) => {
let key = `${req.body.from}_customHook`;;
console.log(req.body, `POST /customHook`);
return getJsonFromMap(key, req, res);
return getJsonFromMap(json_mapping, key, req, res);
});
app.post('/customHookMapping', (req, res) => {
@@ -116,6 +117,23 @@ app.post('/customHookMapping', (req, res) => {
return res.sendStatus(200);
});
/**
* Any Hook
*/
app.all('/anyHook/:key', (req, res) => {
let key = req.params.key;
console.log(req.body, `POST /anyHook/${key}`);
return getJsonFromMap(any_hook_json_mapping, key, req, res);
});
app.post('/anyHookMapping', (req, res) => {
let key = req.body.key;
console.log(req.body, `POST /anyHookMapping/${key}`);
any_hook_json_mapping.set(key, req.body.data);
return res.sendStatus(200);
});
// Fetch Requests
app.get('/requests/:key', (req, res) => {
let key = req.params.key;
@@ -162,9 +180,9 @@ app.get('/ws_metadata/:key', (req, res) => {
* private function
*/
function getJsonFromMap(key, req, res) {
if (!json_mapping.has(key)) return res.sendStatus(404);
const retData = JSON.parse(json_mapping.get(key));
function getJsonFromMap(map, key, req, res) {
if (!map.has(key)) return res.sendStatus(404);
const retData = JSON.parse(map.get(key));
console.log(retData, ` Response to ${req.method} ${req.url}`);
addRequestToMap(key, req, hook_mapping);
return res.json(retData);

View File

@@ -45,7 +45,7 @@ test('basic webhook tests', async(t) => {
];
const from = 'sip_decline_test_success';
provisionCallHook(from, verbs)
await provisionCallHook(from, verbs)
await sippUac('uac-expect-603.xml', '172.38.0.10', from);
t.pass('webhook successfully declines call');
@@ -73,7 +73,7 @@ test('invalid jambonz json create alert tests', async(t) => {
};
const from = 'invalid_json_create_alert';
provisionCallHook(from, verbs)
await provisionCallHook(from, verbs)
// THEN
await sippUac('uac-invite-expect-480.xml', '172.38.0.10', from);