Compare commits

...

25 Commits

Author SHA1 Message Date
Quan HL
ae62244904 fix jambones-sql.sql missing FOREIGN_KEY_CHECKS 2023-09-26 06:56:48 +07:00
Hoan Luu Huu
f4d6fd14b8 allow sip port is null (#232)
* allow sip port is null

* update upgrade script

* fix review comment
2023-09-25 19:54:43 -04:00
Anton Voylenko
b190334839 validate recording auth (#235) 2023-09-24 08:19:13 -04:00
Hoan Luu Huu
209a58ff51 add try catch for mp3 encoder (#234) 2023-09-20 22:31:17 -04:00
Dave Horton
f8720bab9f update to jambonz.cloud for saas 2023-09-20 20:56:18 -04:00
Dave Horton
77363d54d1 #230 - support for option to pad crypto on outdial using srtp (#231) 2023-09-15 13:34:03 -04:00
Markus Frindt
ad483ba0b7 Add try catch to getUpload to catch init errors with invalid credentials (#229)
* add try catch to getUpload to catch init errors with invalid credentials

* properly handle errors occured while streaming

---------

Co-authored-by: Markus Frindt <m.frindt@cognigy.com>
2023-09-13 07:52:28 -04:00
Anton Voylenko
02c9a951d4 S3 compatible storage (#228)
* compatible credential test

* support s3 compatible storages

* fix typo

* change logging

* add missing option
2023-09-12 12:25:06 -04:00
EgleH
d5f5e3a86f Filter phone numbers result (#227)
Co-authored-by: eglehelms <e.helms@cognigy.com>
Co-authored-by: Hoan Luu Huu <110280845+xquanluu@users.noreply.github.com>
2023-08-31 12:04:16 -04:00
Hoan Luu Huu
62cea3a9e9 update LCC transcribe status (#225) 2023-08-30 22:54:56 -04:00
Hoan Luu Huu
6d3bfd527e feat azure fromhost (#214)
* feat azure fromhost

* wip

* wip

* wip
2023-08-30 21:06:03 -04:00
Hoan Luu Huu
9002bacf8f fix account level get phone number (#217)
* fix account level get phone number

* fix account level get phone number
2023-08-30 09:24:29 -04:00
Hoan Luu Huu
92473454d6 support delete record (#224)
* support delete record

* wip

* wip
2023-08-23 12:51:49 -04:00
Hoan Luu Huu
1c2280af88 fix fallback init sql (#223) 2023-08-22 19:28:43 -04:00
Hoan Luu Huu
7d16bdd774 feat fallback speech vendors (#220)
* feat fallback speech vendors

* wip

* update verb specification
2023-08-22 09:22:39 -04:00
Hoan Luu Huu
79e1bc8d12 support moh (#219) 2023-08-22 08:05:09 -04:00
Hoan Luu Huu
9d24ef6238 Support azure storage (#221)
* azure storage

* azure uploader

* azure uploader

* azure uploader

* fix
2023-08-22 07:50:30 -04:00
Dave Horton
042ad9f629 update to jambonz.cloud 2023-08-18 08:41:17 -04:00
Hoan Luu Huu
7351f0ad68 feat support multi speech credential with diff labels and same vendor (#218)
* feat support multi speech credential with diff labels and same vendor

* fix review comment

* wip

* fix review comments

* update verb spec version
2023-08-15 08:53:16 -04:00
Dave Horton
de7b74f898 fix exception when receiving webhook with no type (#213) 2023-08-03 19:34:38 -04:00
Hoan Luu Huu
d361f1aeb1 fix record all call does not work on wav format (#211)
* fix #210

* fix throw error without new

* fix throw error without new
2023-08-01 07:53:58 -04:00
Hoan Luu Huu
f3d002cfca fix record format (#210)
* fix record format

* fix assert require

* fix assert require
2023-07-30 22:42:38 -04:00
Hoan Luu Huu
3121c2a197 fix hosted app, register by email (#196)
* fix hosted app, register by email

* update mailgun configuration

* update payment method when update card

* fix

* fix

* fix

* change free plan settings

* fix forgot password

* fix forgot password

* fix

* fix
2023-07-30 22:35:38 -04:00
Anton Voylenko
b7bdf300c6 fix sip request payload validation (#209) 2023-07-29 11:13:02 -04:00
Hoan Luu Huu
c96159268e feat google storage (#207)
* feat google storage

* feat google storage

* add google storage writablestream

* add google storage writablestream

* add google storage writablestream

* add metadata to google storage

* add metadata to google storage

* add metadata to google storage

* add tags to google storage

* fix

* fix

* fix

* fix
2023-07-28 12:04:40 -04:00
39 changed files with 1337 additions and 508 deletions

View File

@@ -33,6 +33,8 @@ Configuration is provided via environment variables:
|K8S| service running as kubernetes service |no|
|K8S_FEATURE_SERVER_SERVICE_NAME| feature server name(required for K8S) |no|
|K8S_FEATURE_SERVER_SERVICE_PORT| feature server port(required for K8S) |no|
|JAMBONZ_RECORD_WS_USERNAME| recording websocket username|no|
|JAMBONZ_RECORD_WS_PASSWORD| recording websocket password|no|
#### Database dependency
A mysql database is used to store long-lived objects such as Accounts, Applications, etc. To create the database schema, use or review the scripts in the 'db' folder, particularly:

23
app.js
View File

@@ -175,11 +175,22 @@ const server = app.listen(PORT);
const isValidWsKey = (hdr) => {
const username = process.env.JAMBONZ_RECORD_WS_USERNAME;
const password = process.env.JAMBONZ_RECORD_WS_PASSWORD;
const token = Buffer.from(`${username}:${password}`).toString('base64');
const arr = /^Basic (.*)$/.exec(hdr);
return arr[1] === token;
const username = process.env.JAMBONZ_RECORD_WS_USERNAME || process.env.JAMBONES_RECORD_WS_USERNAME;
const password = process.env.JAMBONZ_RECORD_WS_PASSWORD || process.env.JAMBONES_RECORD_WS_PASSWORD;
if (username && password) {
if (!hdr) {
// auth header is missing
return false;
}
const token = Buffer.from(`${username}:${password}`).toString('base64');
const arr = /^Basic (.*)$/.exec(hdr);
if (!Array.isArray(arr)) {
// malformed auth header
return false;
}
return arr[1] === token;
}
return true;
};
server.on('upgrade', (request, socket, head) => {
@@ -196,7 +207,7 @@ server.on('upgrade', (request, socket, head) => {
/* verify the api key */
if (!isValidWsKey(request.headers['authorization'])) {
logger.info(`invalid auth header: ${request.headers['authorization']}`);
logger.info(`invalid auth header: ${request.headers['authorization'] || 'authorization header missing'}`);
return socket.write('HTTP/1.1 403 Forbidden \r\n\r\n', () => socket.destroy());
}

View File

@@ -334,6 +334,7 @@ last_tested DATETIME,
tts_tested_ok BOOLEAN,
stt_tested_ok BOOLEAN,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
label VARCHAR(64),
PRIMARY KEY (speech_credential_sid)
);
@@ -436,11 +437,12 @@ CREATE TABLE sip_gateways
sip_gateway_sid CHAR(36),
ipv4 VARCHAR(128) NOT NULL COMMENT 'ip address or DNS name of the gateway. For gateways providing inbound calling service, ip address is required.',
netmask INTEGER NOT NULL DEFAULT 32,
port INTEGER NOT NULL DEFAULT 5060 COMMENT 'sip signaling port',
port INTEGER COMMENT 'sip signaling port',
inbound BOOLEAN NOT NULL COMMENT 'if true, whitelist this IP to allow inbound calls from the gateway',
outbound BOOLEAN NOT NULL COMMENT 'if true, include in least-cost routing when placing calls to the PSTN',
voip_carrier_sid CHAR(36) NOT NULL,
is_active BOOLEAN NOT NULL DEFAULT 1,
pad_crypto BOOLEAN NOT NULL DEFAULT 0,
protocol ENUM('udp','tcp','tls', 'tls/srtp') DEFAULT 'udp' COMMENT 'Outbound call protocol',
PRIMARY KEY (sip_gateway_sid)
) COMMENT='A whitelisted sip gateway used for origination/termination';
@@ -478,8 +480,18 @@ app_json TEXT,
speech_synthesis_vendor VARCHAR(64) NOT NULL DEFAULT 'google',
speech_synthesis_language VARCHAR(12) NOT NULL DEFAULT 'en-US',
speech_synthesis_voice VARCHAR(64),
speech_synthesis_label VARCHAR(64),
speech_recognizer_vendor VARCHAR(64) NOT NULL DEFAULT 'google',
speech_recognizer_language VARCHAR(64) NOT NULL DEFAULT 'en-US',
speech_recognizer_label VARCHAR(64),
use_for_fallback_speech BOOLEAN DEFAULT false,
fallback_speech_synthesis_vendor VARCHAR(64),
fallback_speech_synthesis_language VARCHAR(12),
fallback_speech_synthesis_voice VARCHAR(64),
fallback_speech_synthesis_label VARCHAR(64),
fallback_speech_recognizer_vendor VARCHAR(64),
fallback_speech_recognizer_language VARCHAR(64),
fallback_speech_recognizer_label VARCHAR(64),
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
record_all_calls BOOLEAN NOT NULL DEFAULT false,
PRIMARY KEY (application_sid)
@@ -609,8 +621,6 @@ CREATE INDEX smpp_address_sid_idx ON smpp_addresses (smpp_address_sid);
CREATE INDEX service_provider_sid_idx ON smpp_addresses (service_provider_sid);
ALTER TABLE smpp_addresses ADD FOREIGN KEY service_provider_sid_idxfk_4 (service_provider_sid) REFERENCES service_providers (service_provider_sid);
CREATE UNIQUE INDEX speech_credentials_idx_1 ON speech_credentials (vendor,account_sid);
CREATE INDEX speech_credential_sid_idx ON speech_credentials (speech_credential_sid);
CREATE INDEX service_provider_sid_idx ON speech_credentials (service_provider_sid);
ALTER TABLE speech_credentials ADD FOREIGN KEY service_provider_sid_idxfk_5 (service_provider_sid) REFERENCES service_providers (service_provider_sid);

View File

@@ -884,7 +884,7 @@
<y>958.00</y>
</location>
<size>
<width>368.00</width>
<width>302.00</width>
<height>280.00</height>
</size>
<zorder>14</zorder>
@@ -981,24 +981,11 @@
<notNull><![CDATA[1]]></notNull>
<uid><![CDATA[8860648C-4790-4A01-9E2E-60DC52A287FA]]></uid>
</SQLField>
<SQLIndex>
<name><![CDATA[speech_credentials_idx_1]]></name>
<fieldName><![CDATA[vendor]]></fieldName>
<fieldName><![CDATA[account_sid]]></fieldName>
<SQLIndexEntry>
<name><![CDATA[vendor]]></name>
<prefixSize><![CDATA[]]></prefixSize>
<fieldUid><![CDATA[9D8FCF55-D68E-44D3-90DF-27B5ABD1D0BE]]></fieldUid>
</SQLIndexEntry>
<SQLIndexEntry>
<name><![CDATA[account_sid]]></name>
<prefixSize><![CDATA[]]></prefixSize>
<fieldUid><![CDATA[7E964ED2-EC2E-4BCB-8DEC-C455B87FAC07]]></fieldUid>
</SQLIndexEntry>
<indexNamePrefix><![CDATA[speech_credentials]]></indexNamePrefix>
<indexType><![CDATA[UNIQUE]]></indexType>
<uid><![CDATA[554ABEC2-3E1B-41B1-BF07-25F403D5E3B4]]></uid>
</SQLIndex>
<SQLField>
<name><![CDATA[label]]></name>
<type><![CDATA[VARCHAR(64)]]></type>
<uid><![CDATA[0D42A22C-DF14-42A1-BDE2-A53AC8B0D8D6]]></uid>
</SQLField>
<labelWindowIndex><![CDATA[21]]></labelWindowIndex>
<ui.treeExpanded><![CDATA[1]]></ui.treeExpanded>
<uid><![CDATA[49A68E1C-DEE2-446C-A4EB-9850E16155CC]]></uid>
@@ -2031,8 +2018,8 @@
<name><![CDATA[dns_records]]></name>
<schema><![CDATA[]]></schema>
<location>
<x>947.00</x>
<y>1303.00</y>
<x>1270.00</x>
<y>1425.00</y>
</location>
<size>
<width>262.00</width>
@@ -2197,8 +2184,8 @@
<name><![CDATA[clients]]></name>
<schema><![CDATA[]]></schema>
<location>
<x>916.00</x>
<y>1447.00</y>
<x>974.00</x>
<y>1496.00</y>
</location>
<size>
<width>228.00</width>
@@ -2261,7 +2248,7 @@
</location>
<size>
<width>281.00</width>
<height>220.00</height>
<height>240.00</height>
</size>
<zorder>7</zorder>
<SQLField>
@@ -2287,8 +2274,7 @@
<SQLField>
<name><![CDATA[port]]></name>
<type><![CDATA[INTEGER]]></type>
<defaultValue><![CDATA[5060]]></defaultValue>
<notNull><![CDATA[1]]></notNull>
<notNull><![CDATA[0]]></notNull>
<objectComment><![CDATA[sip signaling port]]></objectComment>
<uid><![CDATA[26B20F1E-4DB0-48C0-90F7-CA90A06A1070]]></uid>
</SQLField>
@@ -2329,6 +2315,13 @@
<notNull><![CDATA[1]]></notNull>
<uid><![CDATA[27D4A5BD-8093-4ADD-B5B5-D546844206F9]]></uid>
</SQLField>
<SQLField>
<name><![CDATA[pad_crypto]]></name>
<type><![CDATA[BOOLEAN]]></type>
<defaultValue><![CDATA[0]]></defaultValue>
<notNull><![CDATA[1]]></notNull>
<uid><![CDATA[C5C0043B-100A-4476-BF01-BE0777AE27C0]]></uid>
</SQLField>
<SQLIndex>
<name><![CDATA[sip_gateway_idx_hostport]]></name>
<fieldName><![CDATA[ipv4]]></fieldName>
@@ -2368,7 +2361,7 @@
</location>
<size>
<width>345.00</width>
<height>340.00</height>
<height>540.00</height>
</size>
<zorder>0</zorder>
<SQLField>
@@ -2487,6 +2480,12 @@
<notNull><![CDATA[0]]></notNull>
<uid><![CDATA[929D66F0-64B9-4D7C-AB4B-24F131E1178F]]></uid>
</SQLField>
<SQLField>
<name><![CDATA[speech_synthesis_label]]></name>
<type><![CDATA[VARCHAR(64)]]></type>
<notNull><![CDATA[0]]></notNull>
<uid><![CDATA[BFA24DF2-9CF5-47B0-848D-8B685B7C6750]]></uid>
</SQLField>
<SQLField>
<name><![CDATA[speech_recognizer_vendor]]></name>
<type><![CDATA[VARCHAR(64)]]></type>
@@ -2501,10 +2500,65 @@
<notNull><![CDATA[1]]></notNull>
<uid><![CDATA[A03AFB7B-492F-48E3-AE3C-B1416D5B6B12]]></uid>
</SQLField>
<SQLField>
<name><![CDATA[speech_recognizer_label]]></name>
<type><![CDATA[VARCHAR(64)]]></type>
<notNull><![CDATA[0]]></notNull>
<uid><![CDATA[A247A784-CCD6-40B4-9D0A-2F0EF8F8AFD2]]></uid>
</SQLField>
<SQLField>
<name><![CDATA[use_for_fallback_speech]]></name>
<type><![CDATA[BOOLEAN]]></type>
<defaultValue><![CDATA[false]]></defaultValue>
<uid><![CDATA[DDA48DD6-4B0F-4AD5-9B32-D508BBA1A8EE]]></uid>
</SQLField>
<SQLField>
<name><![CDATA[fallback_speech_synthesis_vendor]]></name>
<type><![CDATA[VARCHAR(64)]]></type>
<notNull><![CDATA[0]]></notNull>
<uid><![CDATA[26BBDEEF-E179-4280-9917-6F2BD6367459]]></uid>
</SQLField>
<SQLField>
<name><![CDATA[fallback_speech_synthesis_language]]></name>
<type><![CDATA[VARCHAR(12)]]></type>
<notNull><![CDATA[0]]></notNull>
<uid><![CDATA[E008D6D7-9BB7-4372-8B46-F92C0EB15082]]></uid>
</SQLField>
<SQLField>
<name><![CDATA[fallback_speech_synthesis_voice]]></name>
<type><![CDATA[VARCHAR(64)]]></type>
<notNull><![CDATA[0]]></notNull>
<uid><![CDATA[6A0E92C9-32B9-4179-A893-3DADF5DD7728]]></uid>
</SQLField>
<SQLField>
<name><![CDATA[fallback_speech_synthesis_label]]></name>
<type><![CDATA[VARCHAR(64)]]></type>
<notNull><![CDATA[0]]></notNull>
<uid><![CDATA[8576DEF6-D81A-4D4D-8980-00580779D164]]></uid>
</SQLField>
<SQLField>
<name><![CDATA[fallback_speech_recognizer_vendor]]></name>
<type><![CDATA[VARCHAR(64)]]></type>
<notNull><![CDATA[0]]></notNull>
<uid><![CDATA[14ECF5EA-81C5-4EAE-9575-9785CEB672E6]]></uid>
</SQLField>
<SQLField>
<name><![CDATA[fallback_speech_recognizer_language]]></name>
<type><![CDATA[VARCHAR(64)]]></type>
<notNull><![CDATA[0]]></notNull>
<uid><![CDATA[EC792500-6B2B-4E54-AA89-43E7A0FD8642]]></uid>
</SQLField>
<SQLField>
<name><![CDATA[fallback_speech_recognizer_label]]></name>
<type><![CDATA[VARCHAR(64)]]></type>
<notNull><![CDATA[0]]></notNull>
<uid><![CDATA[65AA5173-6523-49F7-9D95-78C4B3A7C7E6]]></uid>
</SQLField>
<SQLField>
<name><![CDATA[created_at]]></name>
<type><![CDATA[DATETIME]]></type>
<defaultValue><![CDATA[CURRENT_TIMESTAMP]]></defaultValue>
<forcedUnique><![CDATA[0]]></forcedUnique>
<noQuoteDefault><![CDATA[1]]></noQuoteDefault>
<notNull><![CDATA[1]]></notNull>
<uid><![CDATA[C09B1BDB-8390-4B8A-B70A-642EC5E12899]]></uid>
@@ -2959,7 +3013,7 @@
<overviewPanelHidden><![CDATA[0]]></overviewPanelHidden>
<pageBoundariesVisible><![CDATA[0]]></pageBoundariesVisible>
<PageGridVisible><![CDATA[0]]></PageGridVisible>
<RightSidebarWidth><![CDATA[1681.000000]]></RightSidebarWidth>
<RightSidebarWidth><![CDATA[1235.000000]]></RightSidebarWidth>
<sidebarIndex><![CDATA[2]]></sidebarIndex>
<snapToGrid><![CDATA[0]]></snapToGrid>
<SourceSidebarWidth><![CDATA[0.000000]]></SourceSidebarWidth>
@@ -2968,7 +3022,7 @@
<windowHeight><![CDATA[1055.000000]]></windowHeight>
<windowLocationX><![CDATA[0.000000]]></windowLocationX>
<windowLocationY><![CDATA[24.000000]]></windowLocationY>
<windowScrollOrigin><![CDATA[{157, 832}]]></windowScrollOrigin>
<windowScrollOrigin><![CDATA[{90, 0}]]></windowScrollOrigin>
<windowWidth><![CDATA[1682.000000]]></windowWidth>
</SQLDocumentInfo>
<AllowsIndexRenamingOnInsert><![CDATA[1]]></AllowsIndexRenamingOnInsert>

View File

@@ -22,7 +22,7 @@ values ('3f35518f-5a0d-4c2e-90a5-2407bb3b36f0', '38700987-c7a4-4685-a5bb-af378f9
-- create one service provider and one account
insert into service_providers (service_provider_sid, name, root_domain)
values ('2708b1b3-2736-40ea-b502-c53d8396247f', 'default service provider', 'sip.jambonz.us');
values ('2708b1b3-2736-40ea-b502-c53d8396247f', 'default service provider', 'sip.jambonz.cloud');
insert into accounts (account_sid, service_provider_sid, name, webhook_secret)
values ('9351f46a-678c-43f5-b8a6-d4eb58d131af','2708b1b3-2736-40ea-b502-c53d8396247f', 'default account', 'wh_secret_cJqgtMDPzDhhnjmaJH6Mtk');
@@ -38,9 +38,9 @@ values ('3f35518f-5a0d-4c2e-90a5-2407bb3b36fs', '38700987-c7a4-4685-a5bb-af378f9
-- create two applications
insert into webhooks(webhook_sid, url, method)
values
('84e3db00-b172-4e46-b54b-a503fdb19e4a', 'https://public-apps.jambonz.us/call-status', 'POST'),
('d31568d0-b193-4a05-8ff6-778369bc6efe', 'https://public-apps.jambonz.us/hello-world', 'POST'),
('81844b05-714d-4295-8bf3-3b0640a4bf02', 'https://public-apps.jambonz.us/dial-time', 'POST');
('84e3db00-b172-4e46-b54b-a503fdb19e4a', 'https://public-apps.jambonz.cloud/call-status', 'POST'),
('d31568d0-b193-4a05-8ff6-778369bc6efe', 'https://public-apps.jambonz.cloud/hello-world', 'POST'),
('81844b05-714d-4295-8bf3-3b0640a4bf02', 'https://public-apps.jambonz.cloud/dial-time', 'POST');
insert into applications (application_sid, account_sid, name, call_hook_sid, call_status_hook_sid, speech_synthesis_vendor, speech_synthesis_language, speech_synthesis_voice, speech_recognizer_vendor, speech_recognizer_language)
VALUES

View File

@@ -24,10 +24,9 @@ values ('09e92f3c-9d73-4303-b63f-3668574862ce', '1cf2f4f4-64c4-4249-9a3e-5bb4cb5
-- create two applications
insert into webhooks(webhook_sid, url, method)
values
('84e3db00-b172-4e46-b54b-a503fdb19e4a', 'https://public-apps.jambonz.us/call-status', 'POST'),
('d31568d0-b193-4a05-8ff6-778369bc6efe', 'https://public-apps.jambonz.us/hello-world', 'POST'),
('81844b05-714d-4295-8bf3-3b0640a4bf02', 'https://public-apps.jambonz.us/dial-time', 'POST');
('84e3db00-b172-4e46-b54b-a503fdb19e4a', 'https://public-apps.jambonz.cloud/call-status', 'POST'),
('d31568d0-b193-4a05-8ff6-778369bc6efe', 'https://public-apps.jambonz.cloud/hello-world', 'POST'),
('81844b05-714d-4295-8bf3-3b0640a4bf02', 'https://public-apps.jambonz.cloud/dial-time', 'POST');
insert into applications (application_sid, account_sid, name, call_hook_sid, call_status_hook_sid, speech_synthesis_vendor, speech_synthesis_language, speech_synthesis_voice, speech_recognizer_vendor, speech_recognizer_language)
VALUES
('7087fe50-8acb-4f3b-b820-97b573723aab', '9351f46a-678c-43f5-b8a6-d4eb58d131af', 'hello world', 'd31568d0-b193-4a05-8ff6-778369bc6efe', '84e3db00-b172-4e46-b54b-a503fdb19e4a', 'google', 'en-US', 'en-US-Wavenet-C', 'google', 'en-US'),

View File

@@ -159,6 +159,22 @@ const sql = {
'CREATE INDEX client_sid_idx ON clients (client_sid)',
'ALTER TABLE clients ADD CONSTRAINT account_sid_idxfk_13 FOREIGN KEY account_sid_idxfk_13 (account_sid) REFERENCES accounts (account_sid)',
'ALTER TABLE sip_gateways ADD COLUMN protocol ENUM(\'udp\',\'tcp\',\'tls\', \'tls/srtp\') DEFAULT \'udp\''
],
8005: [
'DROP INDEX speech_credentials_idx_1 ON speech_credentials',
'ALTER TABLE speech_credentials ADD COLUMN label VARCHAR(64)',
'ALTER TABLE applications ADD COLUMN speech_synthesis_label VARCHAR(64)',
'ALTER TABLE applications ADD COLUMN speech_recognizer_label VARCHAR(64)',
'ALTER TABLE applications ADD COLUMN use_for_fallback_speech BOOLEAN DEFAULT false',
'ALTER TABLE applications ADD COLUMN fallback_speech_synthesis_vendor VARCHAR(64)',
'ALTER TABLE applications ADD COLUMN fallback_speech_synthesis_language VARCHAR(12)',
'ALTER TABLE applications ADD COLUMN fallback_speech_synthesis_voice VARCHAR(64)',
'ALTER TABLE applications ADD COLUMN fallback_speech_synthesis_label VARCHAR(64)',
'ALTER TABLE applications ADD COLUMN fallback_speech_recognizer_vendor VARCHAR(64)',
'ALTER TABLE applications ADD COLUMN fallback_speech_recognizer_language VARCHAR(64)',
'ALTER TABLE applications ADD COLUMN fallback_speech_recognizer_label VARCHAR(64)',
'ALTER TABLE sip_gateways ADD COLUMN pad_crypto BOOLEAN NOT NULL DEFAULT 0',
'ALTER TABLE sip_gateways MODIFY port INTEGER'
]
};
@@ -190,6 +206,7 @@ const doIt = async() => {
if (val < 8000) upgrades.push(...sql['8000']);
if (val < 8003) upgrades.push(...sql['8003']);
if (val < 8004) upgrades.push(...sql['8004']);
if (val < 8005) upgrades.push(...sql['8005']);
// perform all upgrades
logger.info({upgrades}, 'applying schema upgrades..');

View File

@@ -2,7 +2,7 @@ SET FOREIGN_KEY_CHECKS=0;
-- create one service provider
insert into service_providers (service_provider_sid, name, description, root_domain)
values ('2708b1b3-2736-40ea-b502-c53d8396247f', 'jambonz.us', 'jambonz.us service provider', 'sip.yakeeda.com');
values ('2708b1b3-2736-40ea-b502-c53d8396247f', 'jambonz.cloud', 'jambonz.cloud service provider', 'sip.yakeeda.com');
insert into api_keys (api_key_sid, token)
values ('3f35518f-5a0d-4c2e-90a5-2407bb3b36f0', '38700987-c7a4-4685-a5bb-af378f9734de');
@@ -19,8 +19,8 @@ insert into sip_gateways (sip_gateway_sid, voip_carrier_sid, ipv4, port, inbound
values ('46b727eb-c7dc-44fa-b063-96e48d408e4a', '5145b436-2f38-4029-8d4c-fd8c67831c7a', '3.3.3.3', 5060, 1, 1, 1);
-- create the test application and test phone number
insert into webhooks (webhook_sid, url, method) values ('d9c205c6-a129-443e-a9c0-d1bb437d4bb7', 'https://flows.jambonz.us/testCall', 'POST');
insert into webhooks (webhook_sid, url, method) values ('6ac36aeb-6bd0-428a-80a1-aed95640a296', 'https://flows.jambonz.us/callStatus', 'POST');
insert into webhooks (webhook_sid, url, method) values ('d9c205c6-a129-443e-a9c0-d1bb437d4bb7', 'https://flows.jambonz.cloud/testCall', 'POST');
insert into webhooks (webhook_sid, url, method) values ('6ac36aeb-6bd0-428a-80a1-aed95640a296', 'https://flows.jambonz.cloud/callStatus', 'POST');
insert into applications (application_sid, name, service_provider_sid, call_hook_sid, call_status_hook_sid,
speech_synthesis_vendor, speech_synthesis_language, speech_synthesis_voice, speech_recognizer_vendor, speech_recognizer_language)
values ('7a489343-02ed-471e-8df0-fc5e1b98ce8f', 'Test application', '2708b1b3-2736-40ea-b502-c53d8396247f',

View File

@@ -34,7 +34,7 @@ AND effective_end_date IS NULL
AND pending=0`;
const updatePaymentInfoSql = `UPDATE account_subscriptions
SET last4 = ?, exp_month = ?, exp_year = ?, card_type = ?
SET last4 = ?, stripe_payment_method_id=?, exp_month = ?, exp_year = ?, card_type = ?
WHERE account_sid = ?
AND effective_end_date IS NULL`;
@@ -206,10 +206,10 @@ class Account extends Model {
}
static async updatePaymentInfo(logger, account_sid, pm) {
const {card} = pm;
const {id, card} = pm;
const last4_encrypted = encrypt(card.last4);
await promisePool.execute(updatePaymentInfoSql,
[last4_encrypted, card.exp_month, card.exp_year, card.brand, account_sid]);
[last4_encrypted, id, card.exp_month, card.exp_year, card.brand, account_sid]);
}
static async provisionPendingSubscription(logger, account_sid, products, payment_method, subscription_id) {

View File

@@ -16,7 +16,7 @@ class PhoneNumber extends Model {
}
static async retrieveAll(account_sid) {
if (!account_sid) return super.retrieveAll();
if (!account_sid) return await super.retrieveAll();
const [rows] = await promisePool.query(sql, account_sid);
return rows;
}

View File

@@ -51,6 +51,10 @@ SipGateway.fields = [
name: 'is_active',
type: 'number'
},
{
name: 'pad_crypto',
type: 'number'
},
{
name: 'account_sid',
type: 'string'

View File

@@ -20,6 +20,17 @@ class SpeechCredential extends Model {
return rows;
}
static async isAvailableVendorAndLabel(service_provider_sid, account_sid, vendor, label) {
let sql;
if (account_sid) {
sql = 'SELECT * FROM speech_credentials WHERE account_sid = ? AND vendor = ? AND label = ?';
} else {
sql = 'SELECT * FROM speech_credentials WHERE service_provider_sid = ? AND vendor = ? AND label = ?';
}
const [rows] = await promisePool.query(sql, [account_sid ? account_sid : service_provider_sid, vendor, label]);
return rows;
}
static async disableStt(account_sid) {
await promisePool.execute('UPDATE speech_credentials SET use_for_stt = 0 WHERE account_sid = ?', [account_sid]);
}
@@ -86,6 +97,10 @@ SpeechCredential.fields = [
{
name: 'last_tested',
type: 'date'
},
{
name: 'label',
type: 'string'
}
];

View File

@@ -0,0 +1,41 @@
const { Writable } = require('stream');
const { BlobServiceClient } = require('@azure/storage-blob');
const { v4: uuidv4 } = require('uuid');
class AzureStorageUploadStream extends Writable {
constructor(logger, opts) {
super(opts);
const blobServiceClient = BlobServiceClient.fromConnectionString(opts.connection_string);
this.blockBlobClient = blobServiceClient.getContainerClient(opts.bucketName).getBlockBlobClient(opts.Key);
this.metadata = opts.metadata;
this.blocks = [];
}
async _write(chunk, encoding, callback) {
const blockID = uuidv4().replace(/-/g, '');
this.blocks.push(blockID);
try {
await this.blockBlobClient.stageBlock(blockID, chunk, chunk.length);
callback();
} catch (error) {
callback(error);
}
}
async _final(callback) {
try {
await this.blockBlobClient.commitBlockList(this.blocks);
// remove all null/undefined props
const filteredObj = Object.entries(this.metadata).reduce((acc, [key, val]) => {
if (val !== undefined && val !== null) acc[key] = val;
return acc;
}, {});
await this.blockBlobClient.setMetadata(filteredObj);
callback();
} catch (error) {
callback(error);
}
}
}
module.exports = AzureStorageUploadStream;

View File

@@ -2,7 +2,7 @@ const { Transform } = require('stream');
const lamejs = require('@jambonz/lamejs');
class PCMToMP3Encoder extends Transform {
constructor(options) {
constructor(options, logger) {
super(options);
const channels = options.channels || 1;
@@ -11,33 +11,40 @@ class PCMToMP3Encoder extends Transform {
this.encoder = new lamejs.Mp3Encoder(channels, sampleRate, bitRate);
this.channels = channels;
this.logger = logger;
}
_transform(chunk, encoding, callback) {
// Convert chunk buffer into Int16Array for lamejs
const samples = new Int16Array(chunk.buffer, chunk.byteOffset, chunk.length / 2);
try {
// Convert chunk buffer into Int16Array for lamejs
const samples = new Int16Array(chunk.buffer, chunk.byteOffset, chunk.length / 2);
// Split input samples into left and right channel arrays if stereo
let leftChannel, rightChannel;
if (this.channels === 2) {
leftChannel = new Int16Array(samples.length / 2);
rightChannel = new Int16Array(samples.length / 2);
// Split input samples into left and right channel arrays if stereo
let leftChannel, rightChannel;
if (this.channels === 2) {
leftChannel = new Int16Array(samples.length / 2);
rightChannel = new Int16Array(samples.length / 2);
for (let i = 0; i < samples.length; i += 2) {
leftChannel[i / 2] = samples[i];
rightChannel[i / 2] = samples[i + 1];
for (let i = 0; i < samples.length; i += 2) {
leftChannel[i / 2] = samples[i];
rightChannel[i / 2] = samples[i + 1];
}
} else {
leftChannel = samples;
}
} else {
leftChannel = samples;
}
// Encode the input data
const mp3Data = this.encoder.encodeBuffer(leftChannel, rightChannel);
// Encode the input data
const mp3Data = this.encoder.encodeBuffer(leftChannel, rightChannel);
if (mp3Data.length > 0) {
this.push(Buffer.from(mp3Data));
if (mp3Data.length > 0) {
this.push(Buffer.from(mp3Data));
}
callback();
} catch (err) {
this.logger.error(
{ err },
'Error while mp3 transform');
}
callback();
}
_flush(callback) {

View File

@@ -0,0 +1,41 @@
const { Storage } = require('@google-cloud/storage');
const { Writable } = require('stream');
class GoogleStorageUploadStream extends Writable {
constructor(logger, opts) {
super(opts);
this.logger = logger;
this.metadata = opts.metadata;
const storage = new Storage(opts.bucketCredential);
this.gcsFile = storage.bucket(opts.bucketName).file(opts.Key);
this.writeStream = this.gcsFile.createWriteStream();
this.writeStream.on('error', (err) => this.logger.error(err));
this.writeStream.on('finish', () => {
this.logger.info('google storage Upload completed.');
this._addMetadata();
});
}
_write(chunk, encoding, callback) {
this.writeStream.write(chunk, encoding, callback);
}
_final(callback) {
this.writeStream.end();
this.writeStream.once('finish', callback);
}
async _addMetadata() {
try {
await this.gcsFile.setMetadata({metadata: this.metadata});
this.logger.info('Google storage Upload and metadata setting completed.');
} catch (err) {
this.logger.error(err, 'Google storage An error occurred while setting metadata');
}
}
}
module.exports = GoogleStorageUploadStream;

View File

@@ -1,17 +1,6 @@
const path = require('node:path');
async function record(logger, socket, url) {
const p = path.basename(url);
const idx = p.lastIndexOf('/');
const vendor = p.substring(idx + 1);
switch (vendor) {
case 'aws_s3':
return require('./s3')(logger, socket);
default:
logger.info(`unknown bucket vendor: ${vendor}`);
socket.send(`unknown bucket vendor: ${vendor}`);
socket.close();
}
async function record(logger, socket) {
return require('./upload')(logger, socket);
}
module.exports = record;

View File

@@ -11,7 +11,7 @@ class S3MultipartUploadStream extends Writable {
super(opts);
this.logger = logger;
this.bucketName = opts.bucketName;
this.objectKey = opts.Key;
this.objectKey = opts.objectKey;
this.uploadId = null;
this.partNumber = 1;
this.multipartETags = [];

View File

@@ -1,11 +1,10 @@
const Account = require('../models/account');
const Websocket = require('ws');
const PCMToMP3Encoder = require('./encoder');
const S3MultipartUploadStream = require('./s3-multipart-upload-stream');
const wav = require('wav');
const { getUploader } = require('./utils');
async function upload(logger, socket) {
socket._recvInitialMetadata = false;
socket.on('message', async function(data, isBinary) {
try {
@@ -13,9 +12,9 @@ async function upload(logger, socket) {
socket._recvInitialMetadata = true;
logger.debug(`initial metadata: ${data}`);
const obj = JSON.parse(data.toString());
logger.info({obj}, 'received JSON message from jambonz');
const {sampleRate, accountSid, callSid, direction, from, to,
callId, applicationSid, originatingSipIp, originatingSipTrunkName} = obj;
logger.info({ obj }, 'received JSON message from jambonz');
const { sampleRate, accountSid, callSid, direction, from, to,
callId, applicationSid, originatingSipIp, originatingSipTrunkName } = obj;
const account = await Account.retrieve(accountSid);
if (account && account.length && account[0].bucket_credential) {
const obj = account[0].bucket_credential;
@@ -39,27 +38,19 @@ async function upload(logger, socket) {
}
// create S3 path
const day = new Date();
let Key = `${day.getFullYear()}/${(day.getMonth() + 1).toString().padStart(2, '0')}`;
Key += `/${day.getDate().toString().padStart(2, '0')}/${callSid}.${account[0].record_format}`;
let key = `${day.getFullYear()}/${(day.getMonth() + 1).toString().padStart(2, '0')}`;
key += `/${day.getDate().toString().padStart(2, '0')}/${callSid}.${account[0].record_format}`;
// Uploader
const uploaderOpts = {
bucketName: obj.name,
Key,
metadata,
bucketCredential: {
credentials: {
accessKeyId: obj.access_key_id,
secretAccessKey: obj.secret_access_key,
},
region: obj.region || 'us-east-1'
}
};
const uploadStream = new S3MultipartUploadStream(logger, uploaderOpts);
const uploadStream = getUploader(key, metadata, obj, logger);
if (!uploadStream) {
logger.info('There is no available record uploader, close the socket.');
socket.close();
}
/**encoder */
let encoder;
if (obj.output_format === 'wav') {
if (account[0].record_format === 'wav') {
encoder = new wav.Writer({ channels: 2, sampleRate, bitDepth: 16 });
} else {
// default is mp3
@@ -67,28 +58,41 @@ async function upload(logger, socket) {
channels: 2,
sampleRate: sampleRate,
bitrate: 128
});
}, logger);
}
const handleError = (err, streamType) => {
logger.error(
{ err },
`Error while streaming for vendor: ${obj.vendor}, pipe: ${streamType}: ${err.message}`
);
};
/* start streaming data */
const duplex = Websocket.createWebSocketStream(socket);
duplex.pipe(encoder).pipe(uploadStream);
duplex
.on('error', (err) => handleError(err, 'duplex'))
.pipe(encoder)
.on('error', (err) => handleError(err, 'encoder'))
.pipe(uploadStream)
.on('error', (err) => handleError(err, 'uploadStream'));
} else {
logger.info(`account ${accountSid} does not have any bucket credential, close the socket`);
socket.close();
}
}
} catch (err) {
logger.error({err, data}, 'error parsing message during connection');
logger.error({ err, data }, 'error parsing message during connection');
}
});
socket.on('error', function(err) {
logger.error({err}, 'aws upload: error');
logger.error({ err }, 'record upload: error');
});
socket.on('close', (data) => {
logger.info({data}, 'aws_s3: close');
logger.info({ data }, 'record upload: close');
});
socket.on('end', function(err) {
logger.error({err}, 'aws upload: socket closed from jambonz');
logger.error({ err }, 'record upload: socket closed from jambonz');
});
}

58
lib/record/utils.js Normal file
View File

@@ -0,0 +1,58 @@
const AzureStorageUploadStream = require('./azure-storage');
const GoogleStorageUploadStream = require('./google-storage');
const S3MultipartUploadStream = require('./s3-multipart-upload-stream');
const getUploader = (key, metadata, bucket_credential, logger) => {
const uploaderOpts = {
bucketName: bucket_credential.name,
objectKey: key,
metadata
};
try {
switch (bucket_credential.vendor) {
case 'aws_s3':
uploaderOpts.bucketCredential = {
credentials: {
accessKeyId: bucket_credential.access_key_id,
secretAccessKey: bucket_credential.secret_access_key,
},
region: bucket_credential.region || 'us-east-1'
};
return new S3MultipartUploadStream(logger, uploaderOpts);
case 's3_compatible':
uploaderOpts.bucketCredential = {
endpoint: bucket_credential.endpoint,
credentials: {
accessKeyId: bucket_credential.access_key_id,
secretAccessKey: bucket_credential.secret_access_key,
},
region: 'us-east-1',
forcePathStyle: true
};
return new S3MultipartUploadStream(logger, uploaderOpts);
case 'google':
const serviceKey = JSON.parse(bucket_credential.service_key);
uploaderOpts.bucketCredential = {
projectId: serviceKey.project_id,
credentials: {
client_email: serviceKey.client_email,
private_key: serviceKey.private_key
}
};
return new GoogleStorageUploadStream(logger, uploaderOpts);
case 'azure':
uploaderOpts.connection_string = bucket_credential.connection_string;
return new AzureStorageUploadStream(logger, uploaderOpts);
default:
logger.error(`unknown bucket vendor: ${bucket_credential.vendor}`);
break;
}
} catch (err) {
logger.error(`Error creating uploader, vendor: ${bucket_credential.vendor}, reason: ${err.message}`);
}
return null;
};
module.exports = {
getUploader
};

View File

@@ -1,7 +1,7 @@
const router = require('express').Router();
const assert = require('assert');
const request = require('request');
const {DbErrorBadRequest, DbErrorForbidden, DbErrorUnprocessableRequest, DbError} = require('../../utils/errors');
const {DbErrorBadRequest, DbErrorForbidden, DbErrorUnprocessableRequest} = require('../../utils/errors');
const Account = require('../../models/account');
const Application = require('../../models/application');
const Webhook = require('../../models/webhook');
@@ -23,8 +23,8 @@ const {
} = require('./utils');
const short = require('short-uuid');
const VoipCarrier = require('../../models/voip-carrier');
const { encrypt, decrypt } = require('../../utils/encrypt-decrypt');
const { testAwsS3 } = require('../../utils/storage-utils');
const { encrypt } = require('../../utils/encrypt-decrypt');
const { testS3Storage, testGoogleStorage, testAzureStorage } = require('../../utils/storage-utils');
const translator = short();
let idx = 0;
@@ -176,6 +176,7 @@ function validateUpdateCall(opts) {
'child_call_hook',
'call_status',
'listen_status',
'transcribe_status',
'conf_hold_status',
'conf_mute_status',
'mute_status',
@@ -216,8 +217,8 @@ function validateUpdateCall(opts) {
throw new DbErrorBadRequest('invalid conf_mute_status');
}
if (opts.sip_request &&
(!opts.sip_request.method && !opts.sip_request.content_type || !opts.sip_request.content_type)) {
throw new DbErrorBadRequest('sip_request requires content_type and content properties');
(!opts.sip_request.method || !opts.sip_request.content_type || !opts.sip_request.content)) {
throw new DbErrorBadRequest('sip_request requires method, content_type and content properties');
}
if (opts.record && !opts.record.action) {
throw new DbErrorBadRequest('record requires action property');
@@ -541,7 +542,10 @@ function encryptBucketCredential(obj) {
name,
access_key_id,
secret_access_key,
tags
tags,
service_key,
connection_string,
endpoint
} = obj.bucket_credential;
switch (vendor) {
@@ -554,11 +558,31 @@ function encryptBucketCredential(obj) {
secret_access_key, tags});
obj.bucket_credential = encrypt(awsData);
break;
case 's3_compatible':
assert(access_key_id, 'invalid aws S3 bucket credential: access_key_id is required');
assert(secret_access_key, 'invalid aws S3 bucket credential: secret_access_key is required');
assert(name, 'invalid aws bucket name: name is required');
assert(endpoint, 'invalid endpoint uri: endpoint is required');
const s3Data = JSON.stringify({vendor, endpoint, name, access_key_id,
secret_access_key, tags});
obj.bucket_credential = encrypt(s3Data);
break;
case 'google':
assert(service_key, 'invalid google cloud storage credential: service_key is required');
const googleData = JSON.stringify({vendor, name, service_key, tags});
obj.bucket_credential = encrypt(googleData);
break;
case 'azure':
assert(name, 'invalid azure container name: name is required');
assert(connection_string, 'invalid azure cloud storage credential: connection_string is required');
const azureData = JSON.stringify({vendor, name, connection_string, tags});
obj.bucket_credential = encrypt(azureData);
break;
case 'none':
obj.bucket_credential = null;
break;
default:
throw DbErrorBadRequest(`unknow storage vendor: ${vendor}`);
throw new DbErrorBadRequest(`unknown storage vendor: ${vendor}`);
}
}
@@ -708,33 +732,26 @@ router.post('/:sid/BucketCredentialTest', async(req, res) => {
try {
const account_sid = parseAccountSid(req);
await validateRequest(req, account_sid);
let {vendor, name, region, access_key_id, secret_access_key} = req.body;
const {vendor, name, region, access_key_id, secret_access_key, service_key, connection_string, endpoint} = req.body;
const ret = {
status: 'not tested'
};
if (secret_access_key.endsWith('XXXXXX')) {
// this is when the password already saved in account
const service_provider_sid = req.user.hasServiceProviderAuth ? req.user.service_provider_sid : null;
const results = await Account.retrieve(account_sid, service_provider_sid);
if (results.length === 0) throw new DbError('Invalid Account Sid');
const {bucket_credential} = results[0];
if (bucket_credential) {
const o = JSON.parse(decrypt(bucket_credential));
vendor = o.vendor;
switch (vendor) {
case 'aws_s3':
name = o.name;
region = o.region;
access_key_id = o.access_key_id;
secret_access_key = o.secret_access_key;
break;
}
}
}
switch (vendor) {
case 'aws_s3':
await testAwsS3(logger, {vendor, name, region, access_key_id, secret_access_key});
await testS3Storage(logger, {vendor, name, region, access_key_id, secret_access_key});
ret.status = 'ok';
break;
case 's3_compatible':
await testS3Storage(logger, {vendor, name, endpoint, access_key_id, secret_access_key});
ret.status = 'ok';
break;
case 'google':
await testGoogleStorage(logger, {vendor, name, service_key});
ret.status = 'ok';
break;
case 'azure':
await testAzureStorage(logger, {vendor, name, connection_string});
ret.status = 'ok';
break;
default:

View File

@@ -6,6 +6,7 @@ const {validateEmail, emailSimpleText} = require('../../utils/email-utils');
const {promisePool} = require('../../db');
const {cacheClient} = require('../../helpers');
const sysError = require('../error');
const assert = require('assert');
const sql = `SELECT * from users user
LEFT JOIN accounts AS acc
ON acc.account_sid = user.account_sid
@@ -26,7 +27,8 @@ function createOauthEmailText(provider) {
}
function createResetEmailText(link) {
const baseUrl = 'http://localhost:3001';
assert(process.env.JAMBONZ_BASE_URL, 'process.env.JAMBONZ_BASE_URL is missing');
const baseUrl = process.env.JAMBONZ_BASE_URL;
return `Hi there!

View File

@@ -94,9 +94,9 @@ decorate(router, PhoneNumber, ['add', 'update', 'delete'], preconditions);
router.get('/', async(req, res) => {
const logger = req.app.locals.logger;
try {
const results = req.user.hasAdminAuth ?
await PhoneNumber.retrieveAll(req.user.hasAccountAuth ? req.user.account_sid : null) :
await PhoneNumber.retrieveAllForSP(req.user.service_provider_sid);
const results = req.user.hasServiceProviderAuth ?
await PhoneNumber.retrieveAllForSP(req.user.service_provider_sid) :
await PhoneNumber.retrieveAll(req.user.hasAccountAuth ? req.user.account_sid : null);
res.status(200).json(results);
} catch (err) {
sysError(logger, res, err);
@@ -120,6 +120,9 @@ router.get('/:sid', async(req, res) => {
throw new DbErrorBadRequest('insufficient privileges');
}
}
if (req.user.hasAccountAuth && results.length > 1) {
return res.status(200).json(results.filter((r) => r.phone_number_sid === sid)[0]);
}
return res.status(200).json(results[0]);
}
catch (err) {

View File

@@ -4,7 +4,14 @@ const {DbErrorBadRequest} = require('../../utils/errors');
const {getHomerApiKey, getHomerSipTrace, getHomerPcap} = require('../../utils/homer-utils');
const {getJaegerTrace} = require('../../utils/jaeger-utils');
const Account = require('../../models/account');
const { getS3Object } = require('../../utils/storage-utils');
const {
getS3Object,
getGoogleStorageObject,
getAzureStorageObject,
deleteS3Object,
deleteGoogleStorageObject,
deleteAzureStorageObject
} = require('../../utils/storage-utils');
const parseAccountSid = (url) => {
const arr = /Accounts\/([^\/]*)/.exec(url);
@@ -124,25 +131,75 @@ router.get('/:call_sid/record/:year/:month/:day/:format', async(req, res) => {
const r = await Account.retrieve(account_sid);
if (r.length === 0 || !r[0].bucket_credential) return res.sendStatus(404);
const {bucket_credential} = r[0];
const getOptions = {
...bucket_credential,
key: `${year}/${month}/${day}/${call_sid}.${format || 'mp3'}`
};
let stream;
switch (bucket_credential.vendor) {
case 'aws_s3':
const getS3Options = {
...bucket_credential,
key: `${year}/${month}/${day}/${call_sid}.${format || 'mp3'}`
};
const stream = await getS3Object(logger, getS3Options);
res.set({
'Content-Type': `audio/${format || 'mp3'}`
});
stream.pipe(res);
case 's3_compatible':
stream = await getS3Object(logger, getOptions);
break;
case 'google':
stream = await getGoogleStorageObject(logger, getOptions);
break;
case 'azure':
stream = await getAzureStorageObject(logger, getOptions);
break;
default:
logger.error(`There is no handler for fetching record from ${bucket_credential.vendor}`);
return res.sendStatus(500);
}
res.set({
'Content-Type': `audio/${format || 'mp3'}`
});
if (stream) {
stream.pipe(res);
} else {
return res.sendStatus(404);
}
} catch (err) {
logger.error({err}, ` error retrieving recording ${call_sid}`);
res.sendStatus(404);
}
});
router.delete('/:call_sid/record/:year/:month/:day/:format', async(req, res) => {
const {logger} = req.app.locals;
const {call_sid, year, month, day, format} = req.params;
try {
const account_sid = parseAccountSid(req.originalUrl);
const r = await Account.retrieve(account_sid);
if (r.length === 0 || !r[0].bucket_credential) return res.sendStatus(404);
const {bucket_credential} = r[0];
const deleteOptions = {
...bucket_credential,
key: `${year}/${month}/${day}/${call_sid}.${format || 'mp3'}`
};
switch (bucket_credential.vendor) {
case 'aws_s3':
case 's3_compatible':
await deleteS3Object(logger, deleteOptions);
break;
case 'google':
await deleteGoogleStorageObject(logger, deleteOptions);
break;
case 'azure':
await deleteAzureStorageObject(logger, deleteOptions);
break;
default:
logger.error(`There is no handler for deleting record from ${bucket_credential.vendor}`);
return res.sendStatus(500);
}
res.sendStatus(204);
} catch (err) {
logger.error({err}, ` error deleting recording ${call_sid}`);
res.sendStatus(404);
}
});
module.exports = router;

View File

@@ -16,8 +16,9 @@ const insertUserSql = `INSERT into users
(user_sid, account_sid, name, email, provider, provider_userid, email_validated)
values (?, ?, ?, ?, ?, ?, 1)`;
const insertUserLocalSql = `INSERT into users
(user_sid, account_sid, name, email, email_activation_code, email_validated, provider, hashed_password)
values (?, ?, ?, ?, ?, 0, 'local', ?)`;
(user_sid, account_sid, name, email, email_activation_code, email_validated, provider,
hashed_password, service_provider_sid)
values (?, ?, ?, ?, ?, 0, 'local', ?, ?)`;
const insertAccountSql = `INSERT into accounts
(account_sid, service_provider_sid, name, is_active, webhook_secret, trial_end_date)
values (?, ?, ?, ?, ?, CURDATE() + INTERVAL 21 DAY)`;
@@ -36,7 +37,7 @@ const insertSignupHistorySql = `INSERT into signup_history
values (?, ?)`;
const addLocalUser = async(logger, user_sid, account_sid,
name, email, email_activation_code, passwordHash) => {
name, email, email_activation_code, passwordHash, service_provider_sid) => {
const [r] = await promisePool.execute(insertUserLocalSql,
[
user_sid,
@@ -44,7 +45,8 @@ const addLocalUser = async(logger, user_sid, account_sid,
name,
email,
email_activation_code,
passwordHash
passwordHash,
service_provider_sid
]);
debug({r}, 'Result from adding user');
};
@@ -145,7 +147,7 @@ router.post('/', async(req, res) => {
const user = await doGithubAuth(logger, req.body);
logger.info({user}, 'retrieved user details from github');
Object.assign(userProfile, {
name: user.name,
name: user.email,
email: user.email,
email_validated: user.email_validated,
avatar_url: user.avatar_url,
@@ -157,7 +159,7 @@ router.post('/', async(req, res) => {
const user = await doGoogleAuth(logger, req.body);
logger.info({user}, 'retrieved user details from google');
Object.assign(userProfile, {
name: user.name || user.email,
name: user.email || user.email,
email: user.email,
email_validated: user.verified_email,
picture: user.picture,
@@ -170,7 +172,7 @@ router.post('/', async(req, res) => {
logger.info({user}, 'retrieved user details for local provider');
debug({user}, 'retrieved user details for local provider');
Object.assign(userProfile, {
name: user.name,
name: user.email,
email: user.email,
provider: 'local',
email_activation_code: user.email_activation_code
@@ -280,7 +282,8 @@ router.post('/', async(req, res) => {
const passwordHash = await generateHashedPassword(req.body.password);
debug(`hashed password: ${passwordHash}`);
await addLocalUser(logger, userProfile.user_sid, userProfile.account_sid,
userProfile.name, userProfile.email, userProfile.email_activation_code, passwordHash);
userProfile.name, userProfile.email, userProfile.email_activation_code,
passwordHash, req.body.service_provider_sid);
debug('added local user');
}
else {
@@ -293,17 +296,25 @@ router.post('/', async(req, res) => {
const callStatusSid = uuid();
const helloWordSid = uuid();
const dialTimeSid = uuid();
const echoSid = uuid();
/* 3 webhooks */
await promisePool.execute(insertWebookSql, [callStatusSid, 'https://public-apps.jambonz.us/call-status', 'POST']);
await promisePool.execute(insertWebookSql, [helloWordSid, 'https://public-apps.jambonz.us/hello-world', 'POST']);
await promisePool.execute(insertWebookSql, [dialTimeSid, 'https://public-apps.jambonz.us/dial-time', 'POST']);
/* 4 webhooks */
await promisePool.execute(insertWebookSql,
[callStatusSid, 'https://public-apps.jambonz.cloud/call-status', 'POST']);
await promisePool.execute(insertWebookSql,
[helloWordSid, 'https://public-apps.jambonz.cloud/hello-world', 'POST']);
await promisePool.execute(insertWebookSql,
[dialTimeSid, 'https://public-apps.jambonz.cloud/dial-time', 'POST']);
await promisePool.execute(insertWebookSql,
[echoSid, 'https://public-apps.jambonz.cloud/echo', 'POST']);
/* 2 applications */
await promisePool.execute(insertApplicationSql, [uuid(), userProfile.account_sid, 'hello world',
helloWordSid, callStatusSid, 'google', 'en-US', 'en-US-Wavenet-C', 'google', 'en-US']);
await promisePool.execute(insertApplicationSql, [uuid(), userProfile.account_sid, 'dial time clock',
dialTimeSid, callStatusSid, 'google', 'en-US', 'en-US-Wavenet-C', 'google', 'en-US']);
await promisePool.execute(insertApplicationSql, [uuid(), userProfile.account_sid, 'simple echo test',
echoSid, callStatusSid, 'google', 'en-US', 'en-US-Wavenet-C', 'google', 'en-US']);
Object.assign(userProfile, {
pristine: true,
@@ -327,7 +338,7 @@ router.post('/', async(req, res) => {
await addLocalUser(logger, userProfile.user_sid, userProfile.account_sid,
userProfile.name, userProfile.email, userProfile.email_activation_code,
passwordHash);
passwordHash, req.body.service_provider_sid);
/* note: we deactivate the old user once the new email is validated */
}
@@ -349,6 +360,8 @@ router.post('/', async(req, res) => {
const token = jwt.sign({
user_sid: userProfile.user_sid,
account_sid: userProfile.account_sid,
service_provider_sid: req.body.service_provider_sid,
scope: 'account',
email: userProfile.email,
name: userProfile.name
}, process.env.JWT_SECRET, { expiresIn });

View File

@@ -114,8 +114,10 @@ const encryptCredential = (obj) => {
nuance_stt_uri,
use_custom_tts,
custom_tts_endpoint,
custom_tts_endpoint_url,
use_custom_stt,
custom_stt_endpoint,
custom_stt_endpoint_url,
tts_api_key,
tts_region,
stt_api_key,
@@ -147,15 +149,19 @@ const encryptCredential = (obj) => {
return encrypt(awsData);
case 'microsoft':
assert(region, 'invalid azure speech credential: region is required');
assert(api_key, 'invalid azure speech credential: api_key is required');
if (!custom_tts_endpoint_url && !custom_stt_endpoint_url) {
assert(region, 'invalid azure speech credential: region is required');
assert(api_key, 'invalid azure speech credential: api_key is required');
}
const azureData = JSON.stringify({
region,
api_key,
...(region && {region}),
...(api_key && {api_key}),
use_custom_tts,
custom_tts_endpoint,
custom_tts_endpoint_url,
use_custom_stt,
custom_stt_endpoint
custom_stt_endpoint,
custom_stt_endpoint_url
});
return encrypt(azureData);
@@ -207,6 +213,7 @@ router.post('/', async(req, res) => {
use_for_stt,
use_for_tts,
vendor,
label
} = req.body;
const account_sid = req.user.account_sid || req.body.account_sid;
const service_provider_sid = req.user.service_provider_sid ||
@@ -221,11 +228,21 @@ router.post('/', async(req, res) => {
}
}
// Check if vendor and label is already used for account or SP
if (label) {
const existingSpeech = await SpeechCredential.isAvailableVendorAndLabel(
service_provider_sid, account_sid, vendor, label);
if (existingSpeech.length > 0) {
throw new DbErrorUnprocessableRequest(`Label ${label} is already in use for another speech credential`);
}
}
const encrypted_credential = encryptCredential(req.body);
const uuid = await SpeechCredential.make({
account_sid,
service_provider_sid,
vendor,
label,
use_for_tts,
use_for_stt,
credential: encrypted_credential
@@ -284,8 +301,10 @@ router.get('/', async(req, res) => {
obj.region = o.region;
obj.use_custom_tts = o.use_custom_tts;
obj.custom_tts_endpoint = o.custom_tts_endpoint;
obj.custom_tts_endpoint_url = o.custom_tts_endpoint_url;
obj.use_custom_stt = o.use_custom_stt;
obj.custom_stt_endpoint = o.custom_stt_endpoint;
obj.custom_stt_endpoint_url = o.custom_stt_endpoint_url;
logger.info({obj, o}, 'retrieving azure speech credential');
}
else if ('wellsaid' === obj.vendor) {
@@ -372,8 +391,10 @@ router.get('/:sid', async(req, res) => {
obj.region = o.region;
obj.use_custom_tts = o.use_custom_tts;
obj.custom_tts_endpoint = o.custom_tts_endpoint;
obj.custom_tts_endpoint_url = o.custom_tts_endpoint_url;
obj.use_custom_stt = o.use_custom_stt;
obj.custom_stt_endpoint = o.custom_stt_endpoint;
obj.custom_stt_endpoint_url = o.custom_stt_endpoint_url;
}
else if ('wellsaid' === obj.vendor) {
const o = JSON.parse(decrypt(credential));
@@ -477,8 +498,10 @@ router.put('/:sid', async(req, res) => {
const {
use_custom_tts,
custom_tts_endpoint,
custom_tts_endpoint_url,
use_custom_stt,
custom_stt_endpoint,
custom_stt_endpoint_url,
custom_stt_url,
custom_tts_url
} = req.body;
@@ -490,8 +513,10 @@ router.put('/:sid', async(req, res) => {
aws_region,
use_custom_tts,
custom_tts_endpoint,
custom_tts_endpoint_url,
use_custom_stt,
custom_stt_endpoint,
custom_stt_endpoint_url,
stt_region,
tts_region,
riva_server_uri,
@@ -611,8 +636,10 @@ router.get('/:sid/test', async(req, res) => {
region,
use_custom_tts,
custom_tts_endpoint,
custom_tts_endpoint_url,
use_custom_stt,
custom_stt_endpoint
custom_stt_endpoint,
custom_stt_endpoint_url
} = credential;
if (cred.use_for_tts) {
try {
@@ -621,8 +648,10 @@ router.get('/:sid/test', async(req, res) => {
region,
use_custom_tts,
custom_tts_endpoint,
custom_tts_endpoint_url,
use_custom_stt,
custom_stt_endpoint
custom_stt_endpoint,
custom_stt_endpoint_url
});
results.tts.status = 'ok';
SpeechCredential.ttsTestResult(sid, true);

View File

@@ -338,8 +338,8 @@ router.put('/:user_sid', async(req, res) => {
//if (req.user.user_sid && req.user.user_sid !== user_sid) return res.sendStatus(403);
if (!hasAdminAuth &&
!(hasAccountAuth && req.user.account_sid === user[0].account_sid) &&
!(hasServiceProviderAuth && req.user.service_provider_sid === user[0].service_provider_sid) &&
!(hasAccountAuth && user[0] && req.user.account_sid === user[0].account_sid) &&
!(hasServiceProviderAuth && user[0] && req.user.service_provider_sid === user[0].service_provider_sid) &&
(req.user.user_sid && req.user.user_sid !== user_sid)) {
return res.sendStatus(403);
}

View File

@@ -61,8 +61,7 @@ router.post('/', express.raw({type: 'application/json'}), async(req, res) => {
}
/* process event */
logger.info(`received webhook: ${evt.type}`);
if (evt.type.startsWith('invoice.')) handleInvoiceEvents(logger, evt);
if (evt?.type?.startsWith('invoice.')) handleInvoiceEvents(logger, evt);
else {
logger.debug(evt, 'unhandled stripe webook');
}

View File

@@ -786,7 +786,7 @@ paths:
required: true
schema:
type: string
example: mycorp.sip.jambonz.us
example: mycorp.sip.jambonz.cloud
responses:
200:
description: indicates whether value is already in use

View File

@@ -47,13 +47,15 @@ const sendEmailByCustomVendor = async(logger, from, to, subject, text) => {
};
const sendEmailByMailgun = async(logger, from, to, subject, text) => {
const mg = mailgun.client({
username: 'api',
key: process.env.MAILGUN_API_KEY
});
if (!process.env.MAILGUN_API_KEY) throw new Error('MAILGUN_API_KEY env variable is not defined!');
if (!process.env.MAILGUN_DOMAIN) throw new Error('MAILGUN_DOMAIN env variable is not defined!');
const mg = mailgun.client({
username: 'api',
key: process.env.MAILGUN_API_KEY,
...(process.env.MAILGUN_URL && {url: process.env.MAILGUN_URL})
});
try {
const res = await mg.messages.create(process.env.MAILGUN_DOMAIN, {
from,

View File

@@ -2,7 +2,7 @@
"trial": [
{
"category": "voice_call_session",
"quantity": 20
"quantity": 5
},
{
"category": "device",

View File

@@ -0,0 +1 @@
Hello From Jambonz. This file was created because Record all call bucket credential test.

View File

@@ -1,42 +1,134 @@
const { S3Client, PutObjectCommand, GetObjectCommand } = require('@aws-sdk/client-s3');
const { S3Client, PutObjectCommand, GetObjectCommand, DeleteObjectCommand } = require('@aws-sdk/client-s3');
const {Storage} = require('@google-cloud/storage');
const fs = require('fs');
const { BlobServiceClient } = require('@azure/storage-blob');
async function testAwsS3(logger, opts) {
const s3 = new S3Client({
// Azure
async function testAzureStorage(logger, opts) {
const blobServiceClient = BlobServiceClient.fromConnectionString(opts.connection_string);
const containerClient = blobServiceClient.getContainerClient(opts.name);
const blockBlobClient = containerClient.getBlockBlobClient('jambonz-sample.text');
await blockBlobClient.uploadFile(`${__dirname}/jambonz-sample.text`);
}
async function getAzureStorageObject(logger, opts) {
const blobServiceClient = BlobServiceClient.fromConnectionString(opts.connection_string);
const containerClient = blobServiceClient.getContainerClient(opts.name);
const blockBlobClient = containerClient.getBlockBlobClient(opts.key);
const response = await blockBlobClient.download(0);
return response.readableStreamBody;
}
async function deleteAzureStorageObject(logger, opts) {
const blobServiceClient = BlobServiceClient.fromConnectionString(opts.connection_string);
const containerClient = blobServiceClient.getContainerClient(opts.name);
const blockBlobClient = containerClient.getBlockBlobClient(opts.key);
await blockBlobClient.delete();
}
// Google
function _initGoogleClient(opts) {
const serviceKey = JSON.parse(opts.service_key);
return new Storage({
projectId: serviceKey.project_id,
credentials: {
client_email: serviceKey.client_email,
private_key: serviceKey.private_key
},
});
}
async function testGoogleStorage(logger, opts) {
return new Promise((resolve, reject) => {
const storage = _initGoogleClient(opts);
const blob = storage.bucket(opts.name).file('jambonz-sample.text');
fs.createReadStream(`${__dirname}/jambonz-sample.text`)
.pipe(blob.createWriteStream())
.on('error', (err) => reject(err))
.on('finish', () => resolve());
});
}
async function getGoogleStorageObject(logger, opts) {
const storage = _initGoogleClient(opts);
const bucket = storage.bucket(opts.name);
const file = bucket.file(opts.key);
const [exists] = await file.exists();
if (exists) {
return file.createReadStream();
}
}
async function deleteGoogleStorageObject(logger, opts) {
const storage = _initGoogleClient(opts);
const bucket = storage.bucket(opts.name);
const file = bucket.file(opts.key);
await file.delete();
}
// S3
function _initS3Client(opts) {
return new S3Client({
credentials: {
accessKeyId: opts.access_key_id,
secretAccessKey: opts.secret_access_key,
},
region: opts.region || 'us-east-1'
region: opts.region || 'us-east-1',
...(opts.vendor === 's3_compatible' && { endpoint: opts.endpoint, forcePathStyle: true })
});
}
async function testS3Storage(logger, opts) {
const s3 = _initS3Client(opts);
const input = {
'Body': 'Hello From Jambonz',
'Bucket': opts.name,
'Key': 'jambonz-sample.text'
};
const command = new PutObjectCommand(input);
await s3.send(command);
}
async function getS3Object(logger, opts) {
const s3 = new S3Client({
credentials: {
accessKeyId: opts.access_key_id,
secretAccessKey: opts.secret_access_key,
},
region: opts.region || 'us-east-1'
});
const command = new GetObjectCommand({
Bucket: opts.name,
Key: opts.key
});
const s3 = _initS3Client(opts);
const command = new GetObjectCommand(
{
Bucket: opts.name,
Key: opts.key
}
);
const res = await s3.send(command);
return res.Body;
}
async function deleteS3Object(logger, opts) {
const s3 = _initS3Client(opts);
const command = new DeleteObjectCommand(
{
Bucket: opts.name,
Key: opts.key
}
);
await s3.send(command);
}
module.exports = {
testAwsS3,
getS3Object
testS3Storage,
getS3Object,
deleteS3Object,
testGoogleStorage,
getGoogleStorageObject,
deleteGoogleStorageObject,
testAzureStorage,
getAzureStorageObject,
deleteAzureStorageObject
};

910
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -27,7 +27,7 @@
"@jambonz/realtimedb-helpers": "^0.8.6",
"@jambonz/speech-utils": "^0.0.15",
"@jambonz/time-series": "^0.2.8",
"@jambonz/verb-specifications": "^0.0.26",
"@jambonz/verb-specifications": "^0.0.29",
"@jambonz/lamejs": "^1.2.2",
"@soniox/soniox-node": "^1.1.1",
"argon2": "^0.30.3",
@@ -40,7 +40,7 @@
"helmet": "^5.1.0",
"ibm-watson": "^7.1.2",
"jsonwebtoken": "^9.0.0",
"mailgun.js": "^3.7.3",
"mailgun.js": "^9.1.2",
"microsoft-cognitiveservices-speech-sdk": "^1.24.1",
"mysql2": "^2.3.3",
"nocache": "3.0.4",
@@ -53,7 +53,9 @@
"uuid": "^8.3.2",
"yamljs": "^0.3.0",
"ws": "^8.12.1",
"wav": "^1.0.2"
"wav": "^1.0.2",
"@google-cloud/storage": "^6.12.0",
"@azure/storage-blob": "^12.15.0"
},
"devDependencies": {
"eslint": "^8.39.0",

View File

@@ -53,7 +53,7 @@ test('application tests', async(t) => {
]'
}
});
t.ok(result.statusCode === 400, 'Cant create application with invalid app_josn');
t.ok(result.statusCode === 400, 'Cant create application with invalid app_json');
/* add an application */
result = await request.post('/Applications', {
@@ -81,7 +81,15 @@ test('application tests', async(t) => {
"seekOffset": 8000,\
"actionHook": "/play/action"\
}\
]'
]',
use_for_fallback_speech: 1,
fallback_speech_synthesis_vendor: 'google',
fallback_speech_synthesis_language: 'en-US',
fallback_speech_synthesis_voice: 'man',
fallback_speech_synthesis_label: 'label1',
fallback_speech_recognizer_vendor: 'google',
fallback_speech_recognizer_language: 'en-US',
fallback_speech_recognizer_label: 'label1'
}
});
t.ok(result.statusCode === 201, 'successfully created application');
@@ -102,6 +110,14 @@ test('application tests', async(t) => {
});
t.ok(result.name === 'daveh' , 'successfully retrieved application by sid');
t.ok(result.messaging_hook.url === 'http://example.com/sms' , 'successfully retrieved messaging_hook from application');
t.ok(result.use_for_fallback_speech === 1, 'successfully create use_for_fallback_speech');
t.ok(result.fallback_speech_synthesis_vendor === 'google', 'successfully create fallback_speech_synthesis_vendor');
t.ok(result.fallback_speech_synthesis_language === 'en-US', 'successfully create fallback_speech_synthesis_language');
t.ok(result.fallback_speech_synthesis_voice === 'man', 'successfully create fallback_speech_synthesis_voice');
t.ok(result.fallback_speech_synthesis_label === 'label1', 'successfully create fallback_speech_synthesis_label');
t.ok(result.fallback_speech_recognizer_vendor === 'google', 'successfully create fallback_speech_recognizer_vendor');
t.ok(result.fallback_speech_recognizer_language === 'en-US', 'successfully create fallback_speech_recognizer_language');
t.ok(result.fallback_speech_recognizer_label === 'label1', 'successfully create fallback_speech_recognizer_label');
let app_json = JSON.parse(result.app_json);
t.ok(app_json[0].verb === 'play', 'successfully retrieved app_json from application')
@@ -126,7 +142,15 @@ test('application tests', async(t) => {
}\
}\
]',
record_all_calls: true
record_all_calls: true,
use_for_fallback_speech: 0,
fallback_speech_synthesis_vendor: 'microsoft',
fallback_speech_synthesis_language: 'en-US',
fallback_speech_synthesis_voice: 'woman',
fallback_speech_synthesis_label: 'label2',
fallback_speech_recognizer_vendor: 'microsoft',
fallback_speech_recognizer_language: 'en-US',
fallback_speech_recognizer_label: 'label2'
}
});
t.ok(result.statusCode === 204, 'successfully updated application');
@@ -140,6 +164,14 @@ test('application tests', async(t) => {
app_json = JSON.parse(result.app_json);
t.ok(app_json[0].verb === 'hangup', 'successfully updated app_json from application')
t.ok(result.record_all_calls === 1, 'successfully updated record_all_calls from application')
t.ok(result.use_for_fallback_speech === 0, 'successfully update use_for_fallback_speech');
t.ok(result.fallback_speech_synthesis_vendor === 'microsoft', 'successfully update fallback_speech_synthesis_vendor');
t.ok(result.fallback_speech_synthesis_language === 'en-US', 'successfully update fallback_speech_synthesis_language');
t.ok(result.fallback_speech_synthesis_voice === 'woman', 'successfully update fallback_speech_synthesis_voice');
t.ok(result.fallback_speech_synthesis_label === 'label2', 'successfully update fallback_speech_synthesis_label');
t.ok(result.fallback_speech_recognizer_vendor === 'microsoft', 'successfully update fallback_speech_recognizer_vendor');
t.ok(result.fallback_speech_recognizer_language === 'en-US', 'successfully update fallback_speech_recognizer_language');
t.ok(result.fallback_speech_recognizer_label === 'label2', 'successfully update fallback_speech_recognizer_label');
/* remove applications app_json*/
result = await request.put(`/Applications/${sid}`, {

View File

@@ -31,8 +31,8 @@ test('Create Call Success With Synthesizer in Payload', async (t) => {
auth: authUser,
json: true,
body: {
call_hook: "https://public-apps.jambonz.us/hello-world",
call_status_hook: "https://public-apps.jambonz.us/call-status",
call_hook: "https://public-apps.jambonz.cloud/hello-world",
call_status_hook: "https://public-apps.jambonz.cloud/call-status",
from: "15083778299",
to: {
type: "phone",
@@ -73,8 +73,8 @@ test('Create Call Success Without Synthesizer in Payload', async (t) => {
auth: authUser,
json: true,
body: {
call_hook: "https://public-apps.jambonz.us/hello-world",
call_status_hook: "https://public-apps.jambonz.us/call-status",
call_hook: "https://public-apps.jambonz.cloud/hello-world",
call_status_hook: "https://public-apps.jambonz.cloud/call-status",
from: "15083778299",
to: {
type: "phone",

View File

@@ -7,7 +7,7 @@ const test = async() => {
headers: {
Authorization: `Bearer ${process.env.GH_CODE}`,
Accept: 'application/json',
'User-Agent': 'jambonz.us'
'User-Agent': 'jambonz.cloud'
}
}, (err, response, body) => {
if (err) console.log(error);

View File

@@ -84,6 +84,7 @@ test('speech credentials tests', async(t) => {
json: true,
body: {
vendor: 'google',
label: 'label1',
service_key: jsonKey,
use_for_tts: true,
use_for_stt: true
@@ -111,6 +112,7 @@ test('speech credentials tests', async(t) => {
json: true,
});
t.ok(result.vendor === 'google' , 'successfully retrieved speech credential by sid');
t.ok(result.label === 'label1' , 'label is successfully created');
/* query all credentials */
result = await request.get(`/Accounts/${account_sid}/SpeechCredentials`, {

View File

@@ -192,7 +192,7 @@ test('webapp tests', async(t) => {
t.ok(result.statusCode === 200 && result.body.available === true, 'indicates when email is available');
/* check if a subdomain is available */
result = await request.get('/Availability?type=subdomain&value=mycompany.sip.jambonz.us', {
result = await request.get('/Availability?type=subdomain&value=mycompany.sip.jambonz.cloud', {
resolveWithFullResponse: true,
auth: authUser,
json: true,