mirror of
https://github.com/jambonz/jambonz-api-server.git
synced 2026-01-25 02:08:24 +00:00
Compare commits
43 Commits
feat/metad
...
v0.8.4-4
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c96159268e | ||
|
|
8e200251ca | ||
|
|
898f3aec4a | ||
|
|
6f85752352 | ||
|
|
fe7cc9ad58 | ||
|
|
1ffdfebdb2 | ||
|
|
dcf1895920 | ||
|
|
c509b9d277 | ||
|
|
eff8474997 | ||
|
|
b4237beeeb | ||
|
|
0406e42c19 | ||
|
|
533cd2f47d | ||
|
|
742884cc72 | ||
|
|
9fccfa2a73 | ||
|
|
3356b7302a | ||
|
|
9f533ed17c | ||
|
|
a0797a3a4c | ||
|
|
0b33ef0c2c | ||
|
|
71ecf453f8 | ||
|
|
494f1cf784 | ||
|
|
da74e2526a | ||
|
|
e35a03c7ad | ||
|
|
46fb9b8875 | ||
|
|
f9df2b3028 | ||
|
|
32ff023b14 | ||
|
|
f3d3afee73 | ||
|
|
3c8cbd97c5 | ||
|
|
eba9c98412 | ||
|
|
c2065ef787 | ||
|
|
307787526d | ||
|
|
3141646dfd | ||
|
|
cac6e2117d | ||
|
|
6d34d6f886 | ||
|
|
964afc1660 | ||
|
|
d09dca47b9 | ||
|
|
f3ec847474 | ||
|
|
cf7ce675f5 | ||
|
|
34895daf4f | ||
|
|
b06032b5f0 | ||
|
|
3486ff958c | ||
|
|
f79f96b884 | ||
|
|
2aa3d40268 | ||
|
|
148fc49f06 |
60
.github/workflows/docker-publish-dbcreate.yml
vendored
60
.github/workflows/docker-publish-dbcreate.yml
vendored
@@ -2,16 +2,10 @@ name: Docker
|
||||
|
||||
on:
|
||||
push:
|
||||
# Publish `main` as Docker `latest` image.
|
||||
branches:
|
||||
- main
|
||||
|
||||
# Publish `v1.2.3` tags as releases.
|
||||
tags:
|
||||
- v*
|
||||
|
||||
env:
|
||||
IMAGE_NAME: db-create
|
||||
- '*'
|
||||
|
||||
jobs:
|
||||
push:
|
||||
@@ -20,32 +14,42 @@ jobs:
|
||||
if: github.event_name == 'push'
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Build image
|
||||
run: docker build . --file Dockerfile.db-create --tag $IMAGE_NAME
|
||||
|
||||
- name: Log into registry
|
||||
run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||
|
||||
- name: Push image
|
||||
- name: prepare tag
|
||||
id: prepare_tag
|
||||
run: |
|
||||
IMAGE_ID=ghcr.io/${{ github.repository_owner }}/$IMAGE_NAME
|
||||
IMAGE_ID=jambonz/db-create
|
||||
|
||||
# Change all uppercase to lowercase
|
||||
IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]')
|
||||
# Strip git ref prefix from version
|
||||
VERSION=$(echo "${{ github.ref }}" | sed -e 's,.*/\(.*\),\1,')
|
||||
|
||||
# Strip git ref prefix from version
|
||||
VERSION=$(echo "${{ github.ref }}" | sed -e 's,.*/\(.*\),\1,')
|
||||
# Strip "v" prefix from tag name
|
||||
[[ "${{ github.ref }}" == "refs/tags/"* ]] && VERSION=$(echo $VERSION | sed -e 's/^v//')
|
||||
|
||||
# Strip "v" prefix from tag name
|
||||
[[ "${{ github.ref }}" == "refs/tags/"* ]] && VERSION=$(echo $VERSION | sed -e 's/^v//')
|
||||
# Use Docker `latest` tag convention
|
||||
[ "$VERSION" == "main" ] && VERSION=latest
|
||||
|
||||
# Use Docker `latest` tag convention
|
||||
[ "$VERSION" == "main" ] && VERSION=latest
|
||||
echo IMAGE_ID=$IMAGE_ID
|
||||
echo VERSION=$VERSION
|
||||
|
||||
echo IMAGE_ID=$IMAGE_ID
|
||||
echo VERSION=$VERSION
|
||||
echo "image_id=$IMAGE_ID" >> $GITHUB_OUTPUT
|
||||
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
docker tag $IMAGE_NAME $IMAGE_ID:$VERSION
|
||||
docker push $IMAGE_ID:$VERSION
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile.db-create
|
||||
push: true
|
||||
tags: ${{ steps.prepare_tag.outputs.image_id }}:${{ steps.prepare_tag.outputs.version }}
|
||||
build-args: |
|
||||
GITHUB_REPOSITORY=$GITHUB_REPOSITORY
|
||||
GITHUB_REF=$GITHUB_REF
|
||||
|
||||
4
.github/workflows/docker-publish.yml
vendored
4
.github/workflows/docker-publish.yml
vendored
@@ -2,6 +2,8 @@ name: Docker
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
tags:
|
||||
- '*'
|
||||
|
||||
@@ -18,7 +20,7 @@ jobs:
|
||||
- name: prepare tag
|
||||
id: prepare_tag
|
||||
run: |
|
||||
IMAGE_ID=$GITHUB_REPOSITORY
|
||||
IMAGE_ID=jambonz/api-server
|
||||
|
||||
# Strip git ref prefix from version
|
||||
VERSION=$(echo "${{ github.ref }}" | sed -e 's,.*/\(.*\),\1,')
|
||||
|
||||
65
app.js
65
app.js
@@ -13,7 +13,12 @@ assert.ok(process.env.JAMBONES_MYSQL_HOST &&
|
||||
process.env.JAMBONES_MYSQL_USER &&
|
||||
process.env.JAMBONES_MYSQL_PASSWORD &&
|
||||
process.env.JAMBONES_MYSQL_DATABASE, 'missing JAMBONES_MYSQL_XXX env vars');
|
||||
assert.ok(process.env.JAMBONES_REDIS_HOST, 'missing JAMBONES_REDIS_HOST env var');
|
||||
if (process.env.JAMBONES_REDIS_SENTINELS) {
|
||||
assert.ok(process.env.JAMBONES_REDIS_SENTINEL_MASTER_NAME,
|
||||
'missing JAMBONES_REDIS_SENTINEL_MASTER_NAME env var, JAMBONES_REDIS_SENTINEL_PASSWORD env var is optional');
|
||||
} else {
|
||||
assert.ok(process.env.JAMBONES_REDIS_HOST, 'missing JAMBONES_REDIS_HOST env var');
|
||||
}
|
||||
assert.ok(process.env.JAMBONES_TIME_SERIES_HOST, 'missing JAMBONES_TIME_SERIES_HOST env var');
|
||||
assert.ok(process.env.ENCRYPTION_SECRET || process.env.JWT_SECRET, 'missing ENCRYPTION_SECRET env var');
|
||||
assert.ok(process.env.JWT_SECRET, 'missing JWT_SECRET env var');
|
||||
@@ -33,17 +38,20 @@ const {
|
||||
retrieveCall,
|
||||
deleteCall,
|
||||
listCalls,
|
||||
listQueues,
|
||||
listSortedSets,
|
||||
purgeCalls,
|
||||
retrieveSet,
|
||||
addKey,
|
||||
retrieveKey,
|
||||
deleteKey,
|
||||
incrKey
|
||||
incrKey,
|
||||
JAMBONES_REDIS_SENTINELS
|
||||
} = require('./lib/helpers/realtimedb-helpers');
|
||||
const {
|
||||
getTtsVoices
|
||||
} = require('@jambonz/speech-utils')({
|
||||
getTtsVoices,
|
||||
getTtsSize,
|
||||
purgeTtsCache
|
||||
} = require('@jambonz/speech-utils')(JAMBONES_REDIS_SENTINELS || {
|
||||
host: process.env.JAMBONES_REDIS_HOST,
|
||||
port: process.env.JAMBONES_REDIS_PORT || 6379
|
||||
}, logger);
|
||||
@@ -66,6 +74,7 @@ const {
|
||||
const PORT = process.env.HTTP_PORT || 3000;
|
||||
const authStrategy = require('./lib/auth')(logger, retrieveKey);
|
||||
const {delayLoginMiddleware} = require('./lib/middleware');
|
||||
const Websocket = require('ws');
|
||||
|
||||
passport.use(authStrategy);
|
||||
|
||||
@@ -76,7 +85,7 @@ app.locals = {
|
||||
retrieveCall,
|
||||
deleteCall,
|
||||
listCalls,
|
||||
listQueues,
|
||||
listSortedSets,
|
||||
purgeCalls,
|
||||
retrieveSet,
|
||||
addKey,
|
||||
@@ -84,6 +93,8 @@ app.locals = {
|
||||
retrieveKey,
|
||||
deleteKey,
|
||||
getTtsVoices,
|
||||
getTtsSize,
|
||||
purgeTtsCache,
|
||||
lookupAppBySid,
|
||||
lookupAccountBySid,
|
||||
lookupAccountByPhoneNumber,
|
||||
@@ -114,6 +125,12 @@ const limiter = rateLimit({
|
||||
legacyHeaders: false, // Disable the `X-RateLimit-*` headers
|
||||
});
|
||||
|
||||
// Setup websocket for recording audio
|
||||
const recordWsServer = require('./lib/record');
|
||||
const wsServer = new Websocket.Server({ noServer: true });
|
||||
wsServer.setMaxListeners(0);
|
||||
wsServer.on('connection', recordWsServer.bind(null, logger));
|
||||
|
||||
if (process.env.JAMBONES_TRUST_PROXY) {
|
||||
const proxyCount = parseInt(process.env.JAMBONES_TRUST_PROXY);
|
||||
if (!isNaN(proxyCount) && proxyCount > 0) {
|
||||
@@ -154,7 +171,41 @@ app.use((err, req, res, next) => {
|
||||
});
|
||||
});
|
||||
logger.info(`listening for HTTP traffic on port ${PORT}`);
|
||||
app.listen(PORT);
|
||||
const server = app.listen(PORT);
|
||||
|
||||
|
||||
const isValidWsKey = (hdr) => {
|
||||
const username = process.env.JAMBONZ_RECORD_WS_USERNAME;
|
||||
const password = process.env.JAMBONZ_RECORD_WS_PASSWORD;
|
||||
const token = Buffer.from(`${username}:${password}`).toString('base64');
|
||||
const arr = /^Basic (.*)$/.exec(hdr);
|
||||
return arr[1] === token;
|
||||
};
|
||||
|
||||
server.on('upgrade', (request, socket, head) => {
|
||||
logger.debug({
|
||||
url: request.url,
|
||||
headers: request.headers,
|
||||
}, 'received upgrade request');
|
||||
|
||||
/* verify the path starts with /transcribe */
|
||||
if (!request.url.includes('/record/')) {
|
||||
logger.info(`unhandled path: ${request.url}`);
|
||||
return socket.write('HTTP/1.1 404 Not Found \r\n\r\n', () => socket.destroy());
|
||||
}
|
||||
|
||||
/* verify the api key */
|
||||
if (!isValidWsKey(request.headers['authorization'])) {
|
||||
logger.info(`invalid auth header: ${request.headers['authorization']}`);
|
||||
return socket.write('HTTP/1.1 403 Forbidden \r\n\r\n', () => socket.destroy());
|
||||
}
|
||||
|
||||
/* complete the upgrade */
|
||||
wsServer.handleUpgrade(request, socket, head, (ws) => {
|
||||
logger.info(`upgraded to websocket, url: ${request.url}`);
|
||||
wsServer.emit('connection', ws, request.url);
|
||||
});
|
||||
});
|
||||
|
||||
// purge old calls from active call set every 10 mins
|
||||
async function purge() {
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
/* SQLEditor (MySQL (2))*/
|
||||
|
||||
SET FOREIGN_KEY_CHECKS=0;
|
||||
|
||||
DROP TABLE IF EXISTS account_static_ips;
|
||||
@@ -14,6 +13,8 @@ DROP TABLE IF EXISTS beta_invite_codes;
|
||||
|
||||
DROP TABLE IF EXISTS call_routes;
|
||||
|
||||
DROP TABLE IF EXISTS clients;
|
||||
|
||||
DROP TABLE IF EXISTS dns_records;
|
||||
|
||||
DROP TABLE IF EXISTS lcr;
|
||||
@@ -128,6 +129,16 @@ application_sid CHAR(36) NOT NULL,
|
||||
PRIMARY KEY (call_route_sid)
|
||||
) COMMENT='a regex-based pattern match for call routing';
|
||||
|
||||
CREATE TABLE clients
|
||||
(
|
||||
client_sid CHAR(36) NOT NULL UNIQUE ,
|
||||
account_sid CHAR(36) NOT NULL,
|
||||
is_active BOOLEAN NOT NULL DEFAULT 1,
|
||||
username VARCHAR(64),
|
||||
password VARCHAR(1024),
|
||||
PRIMARY KEY (client_sid)
|
||||
);
|
||||
|
||||
CREATE TABLE dns_records
|
||||
(
|
||||
dns_record_sid CHAR(36) NOT NULL UNIQUE ,
|
||||
@@ -145,7 +156,7 @@ regex VARCHAR(32) NOT NULL COMMENT 'regex-based pattern match against dialed num
|
||||
description VARCHAR(1024),
|
||||
priority INTEGER NOT NULL COMMENT 'lower priority routes are attempted first',
|
||||
PRIMARY KEY (lcr_route_sid)
|
||||
) COMMENT='An ordered list of digit patterns in an LCR table. The pat';
|
||||
) COMMENT='An ordered list of digit patterns in an LCR table. The patterns are tested in sequence until one matches';
|
||||
|
||||
CREATE TABLE lcr
|
||||
(
|
||||
@@ -156,7 +167,7 @@ default_carrier_set_entry_sid CHAR(36) COMMENT 'default carrier/route to use whe
|
||||
service_provider_sid CHAR(36),
|
||||
account_sid CHAR(36),
|
||||
PRIMARY KEY (lcr_sid)
|
||||
) COMMENT='An LCR (least cost routing) table that is used by a service ';
|
||||
) COMMENT='An LCR (least cost routing) table that is used by a service provider or account to make decisions about routing outbound calls when multiple carriers are available.';
|
||||
|
||||
CREATE TABLE password_settings
|
||||
(
|
||||
@@ -412,7 +423,7 @@ PRIMARY KEY (smpp_gateway_sid)
|
||||
CREATE TABLE phone_numbers
|
||||
(
|
||||
phone_number_sid CHAR(36) UNIQUE ,
|
||||
number VARCHAR(132) NOT NULL UNIQUE ,
|
||||
number VARCHAR(132) NOT NULL,
|
||||
voip_carrier_sid CHAR(36),
|
||||
account_sid CHAR(36),
|
||||
application_sid CHAR(36),
|
||||
@@ -430,6 +441,7 @@ inbound BOOLEAN NOT NULL COMMENT 'if true, whitelist this IP to allow inbound ca
|
||||
outbound BOOLEAN NOT NULL COMMENT 'if true, include in least-cost routing when placing calls to the PSTN',
|
||||
voip_carrier_sid CHAR(36) NOT NULL,
|
||||
is_active BOOLEAN NOT NULL DEFAULT 1,
|
||||
protocol ENUM('udp','tcp','tls', 'tls/srtp') DEFAULT 'udp' COMMENT 'Outbound call protocol',
|
||||
PRIMARY KEY (sip_gateway_sid)
|
||||
) COMMENT='A whitelisted sip gateway used for origination/termination';
|
||||
|
||||
@@ -469,6 +481,7 @@ speech_synthesis_voice VARCHAR(64),
|
||||
speech_recognizer_vendor VARCHAR(64) NOT NULL DEFAULT 'google',
|
||||
speech_recognizer_language VARCHAR(64) NOT NULL DEFAULT 'en-US',
|
||||
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
record_all_calls BOOLEAN NOT NULL DEFAULT false,
|
||||
PRIMARY KEY (application_sid)
|
||||
) COMMENT='A defined set of behaviors to be applied to phone calls ';
|
||||
|
||||
@@ -506,6 +519,9 @@ subspace_client_secret VARCHAR(255),
|
||||
subspace_sip_teleport_id VARCHAR(255),
|
||||
subspace_sip_teleport_destinations VARCHAR(255),
|
||||
siprec_hook_sid CHAR(36),
|
||||
record_all_calls BOOLEAN NOT NULL DEFAULT false,
|
||||
record_format VARCHAR(16) NOT NULL DEFAULT 'mp3',
|
||||
bucket_credential VARCHAR(8192) COMMENT 'credential used to authenticate with storage service',
|
||||
PRIMARY KEY (account_sid)
|
||||
) COMMENT='An enterprise that uses the platform for comm services';
|
||||
|
||||
@@ -526,6 +542,9 @@ ALTER TABLE call_routes ADD FOREIGN KEY account_sid_idxfk_3 (account_sid) REFERE
|
||||
|
||||
ALTER TABLE call_routes ADD FOREIGN KEY application_sid_idxfk (application_sid) REFERENCES applications (application_sid);
|
||||
|
||||
CREATE INDEX client_sid_idx ON clients (client_sid);
|
||||
ALTER TABLE clients ADD CONSTRAINT account_sid_idxfk_13 FOREIGN KEY account_sid_idxfk_13 (account_sid) REFERENCES accounts (account_sid);
|
||||
|
||||
CREATE INDEX dns_record_sid_idx ON dns_records (dns_record_sid);
|
||||
ALTER TABLE dns_records ADD FOREIGN KEY account_sid_idxfk_4 (account_sid) REFERENCES accounts (account_sid);
|
||||
|
||||
@@ -628,6 +647,8 @@ CREATE INDEX smpp_gateway_sid_idx ON smpp_gateways (smpp_gateway_sid);
|
||||
CREATE INDEX voip_carrier_sid_idx ON smpp_gateways (voip_carrier_sid);
|
||||
ALTER TABLE smpp_gateways ADD FOREIGN KEY voip_carrier_sid_idxfk (voip_carrier_sid) REFERENCES voip_carriers (voip_carrier_sid);
|
||||
|
||||
CREATE UNIQUE INDEX phone_numbers_unique_idx_voip_carrier_number ON phone_numbers (number,voip_carrier_sid);
|
||||
|
||||
CREATE INDEX phone_number_sid_idx ON phone_numbers (phone_number_sid);
|
||||
CREATE INDEX number_idx ON phone_numbers (number);
|
||||
CREATE INDEX voip_carrier_sid_idx ON phone_numbers (voip_carrier_sid);
|
||||
@@ -682,5 +703,4 @@ ALTER TABLE accounts ADD FOREIGN KEY queue_event_hook_sid_idxfk (queue_event_hoo
|
||||
ALTER TABLE accounts ADD FOREIGN KEY device_calling_application_sid_idxfk (device_calling_application_sid) REFERENCES applications (application_sid);
|
||||
|
||||
ALTER TABLE accounts ADD FOREIGN KEY siprec_hook_sid_idxfk (siprec_hook_sid) REFERENCES applications (application_sid);
|
||||
|
||||
SET FOREIGN_KEY_CHECKS=1;
|
||||
SET FOREIGN_KEY_CHECKS=1;
|
||||
219
db/jambones.sqs
219
db/jambones.sqs
File diff suppressed because one or more lines are too long
@@ -68,9 +68,6 @@ VALUES
|
||||
('d2ccfcb1-9198-4fe9-a0ca-6e49395837c4', '7d509a18-bbff-4c5d-b21e-b99bf8f8c49a', '54.172.60.0', 30, 5060, 1, 0),
|
||||
('6b1d0032-4430-41f1-87c6-f22233d394ef', '7d509a18-bbff-4c5d-b21e-b99bf8f8c49a', '54.244.51.0', 30, 5060, 1, 0),
|
||||
('0de40217-8bd5-4aa8-a9fd-1994282953c6', '7d509a18-bbff-4c5d-b21e-b99bf8f8c49a', '54.171.127.192', 30, 5060, 1, 0),
|
||||
('48b108e3-1ce7-4f18-a4cb-e41e63688bdf', '7d509a18-bbff-4c5d-b21e-b99bf8f8c49a', '54.171.127.193', 30, 5060, 1, 0),
|
||||
('d9131a69-fe44-4c2a-ba82-4adc81f628dd', '7d509a18-bbff-4c5d-b21e-b99bf8f8c49a', '54.171.127.194', 30, 5060, 1, 0),
|
||||
('34a6a311-4bd6-49ca-aa77-edd3cb92c6e1', '7d509a18-bbff-4c5d-b21e-b99bf8f8c49a', '54.171.127.195', 30, 5060, 1, 0),
|
||||
('37bc0b20-b53c-4c31-95a6-f82b1c3713e3', '7d509a18-bbff-4c5d-b21e-b99bf8f8c49a', '35.156.191.128', 30, 5060, 1, 0),
|
||||
('39791f4e-b612-4882-a37e-e92711a39f3f', '7d509a18-bbff-4c5d-b21e-b99bf8f8c49a', '54.65.63.192', 30, 5060, 1, 0),
|
||||
('81a0c8cb-a33e-42da-8f20-99083da6f02f', '7d509a18-bbff-4c5d-b21e-b99bf8f8c49a', '54.252.254.64', 30, 5060, 1, 0),
|
||||
|
||||
@@ -89,6 +89,7 @@ const sql = {
|
||||
'ALTER TABLE `users` ADD COLUMN `is_active` BOOLEAN NOT NULL default true',
|
||||
],
|
||||
8003: [
|
||||
'SET FOREIGN_KEY_CHECKS=0',
|
||||
'ALTER TABLE `voip_carriers` ADD COLUMN `register_status` VARCHAR(4096)',
|
||||
'ALTER TABLE `sbc_addresses` ADD COLUMN `last_updated` DATETIME',
|
||||
'ALTER TABLE `sbc_addresses` ADD COLUMN `tls_port` INTEGER',
|
||||
@@ -99,6 +100,65 @@ const sql = {
|
||||
sip_domain_name VARCHAR(255),
|
||||
monitoring_domain_name VARCHAR(255)
|
||||
)`,
|
||||
'DROP TABLE IF EXISTS `lcr_routes`',
|
||||
'DROP TABLE IF EXISTS `lcr_carrier_set_entry`',
|
||||
`CREATE TABLE lcr_routes
|
||||
(
|
||||
lcr_route_sid CHAR(36),
|
||||
lcr_sid CHAR(36) NOT NULL,
|
||||
regex VARCHAR(32) NOT NULL COMMENT 'regex-based pattern match against dialed number, used for LCR routing of PSTN calls',
|
||||
description VARCHAR(1024),
|
||||
priority INTEGER NOT NULL COMMENT 'lower priority routes are attempted first',
|
||||
PRIMARY KEY (lcr_route_sid)
|
||||
)`,
|
||||
`CREATE TABLE lcr
|
||||
(
|
||||
lcr_sid CHAR(36) NOT NULL UNIQUE ,
|
||||
name VARCHAR(64) COMMENT 'User-assigned name for this LCR table',
|
||||
is_active BOOLEAN NOT NULL DEFAULT 1,
|
||||
default_carrier_set_entry_sid CHAR(36) COMMENT 'default carrier/route to use when no digit match based results are found.',
|
||||
service_provider_sid CHAR(36),
|
||||
account_sid CHAR(36),
|
||||
PRIMARY KEY (lcr_sid)
|
||||
)`,
|
||||
`CREATE TABLE lcr_carrier_set_entry
|
||||
(
|
||||
lcr_carrier_set_entry_sid CHAR(36),
|
||||
workload INTEGER NOT NULL DEFAULT 1 COMMENT 'represents a proportion of traffic to send through the associated carrier; can be used for load balancing traffic across carriers with a common priority for a destination',
|
||||
lcr_route_sid CHAR(36) NOT NULL,
|
||||
voip_carrier_sid CHAR(36) NOT NULL,
|
||||
priority INTEGER NOT NULL DEFAULT 0 COMMENT 'lower priority carriers are attempted first',
|
||||
PRIMARY KEY (lcr_carrier_set_entry_sid)
|
||||
)`,
|
||||
'CREATE INDEX lcr_sid_idx ON lcr_routes (lcr_sid)',
|
||||
'ALTER TABLE lcr_routes ADD FOREIGN KEY lcr_sid_idxfk (lcr_sid) REFERENCES lcr (lcr_sid)',
|
||||
'CREATE INDEX lcr_sid_idx ON lcr (lcr_sid)',
|
||||
'ALTER TABLE lcr ADD FOREIGN KEY default_carrier_set_entry_sid_idxfk (default_carrier_set_entry_sid) REFERENCES lcr_carrier_set_entry (lcr_carrier_set_entry_sid)',
|
||||
'CREATE INDEX service_provider_sid_idx ON lcr (service_provider_sid)',
|
||||
'CREATE INDEX account_sid_idx ON lcr (account_sid)',
|
||||
'ALTER TABLE lcr_carrier_set_entry ADD FOREIGN KEY lcr_route_sid_idxfk (lcr_route_sid) REFERENCES lcr_routes (lcr_route_sid)',
|
||||
'ALTER TABLE lcr_carrier_set_entry ADD FOREIGN KEY voip_carrier_sid_idxfk_3 (voip_carrier_sid) REFERENCES voip_carriers (voip_carrier_sid)',
|
||||
'SET FOREIGN_KEY_CHECKS=1',
|
||||
],
|
||||
8004: [
|
||||
'alter table accounts add column record_all_calls BOOLEAN NOT NULL DEFAULT false',
|
||||
'alter table accounts add column bucket_credential VARCHAR(8192)',
|
||||
'alter table accounts add column record_format VARCHAR(16) NOT NULL DEFAULT \'mp3\'',
|
||||
'alter table applications add column record_all_calls BOOLEAN NOT NULL DEFAULT false',
|
||||
'alter table phone_numbers DROP INDEX number',
|
||||
'create unique index phone_numbers_unique_idx_voip_carrier_number ON phone_numbers (number,voip_carrier_sid)',
|
||||
`CREATE TABLE clients
|
||||
(
|
||||
client_sid CHAR(36) NOT NULL UNIQUE ,
|
||||
account_sid CHAR(36) NOT NULL,
|
||||
is_active BOOLEAN NOT NULL DEFAULT 1,
|
||||
username VARCHAR(64),
|
||||
password VARCHAR(1024),
|
||||
PRIMARY KEY (client_sid)
|
||||
)`,
|
||||
'CREATE INDEX client_sid_idx ON clients (client_sid)',
|
||||
'ALTER TABLE clients ADD CONSTRAINT account_sid_idxfk_13 FOREIGN KEY account_sid_idxfk_13 (account_sid) REFERENCES accounts (account_sid)',
|
||||
'ALTER TABLE sip_gateways ADD COLUMN protocol ENUM(\'udp\',\'tcp\',\'tls\', \'tls/srtp\') DEFAULT \'udp\''
|
||||
]
|
||||
};
|
||||
|
||||
@@ -129,6 +189,7 @@ const doIt = async() => {
|
||||
if (val < 7007) upgrades.push(...sql['7007']);
|
||||
if (val < 8000) upgrades.push(...sql['8000']);
|
||||
if (val < 8003) upgrades.push(...sql['8003']);
|
||||
if (val < 8004) upgrades.push(...sql['8004']);
|
||||
|
||||
// perform all upgrades
|
||||
logger.info({upgrades}, 'applying schema upgrades..');
|
||||
|
||||
@@ -1,10 +1,31 @@
|
||||
const logger = require('../logger');
|
||||
|
||||
const JAMBONES_REDIS_SENTINELS = process.env.JAMBONES_REDIS_SENTINELS ? {
|
||||
sentinels: process.env.JAMBONES_REDIS_SENTINELS.split(',').map((sentinel) => {
|
||||
let host, port = 26379;
|
||||
if (sentinel.includes(':')) {
|
||||
const arr = sentinel.split(':');
|
||||
host = arr[0];
|
||||
port = parseInt(arr[1], 10);
|
||||
} else {
|
||||
host = sentinel;
|
||||
}
|
||||
return {host, port};
|
||||
}),
|
||||
name: process.env.JAMBONES_REDIS_SENTINEL_MASTER_NAME,
|
||||
...(process.env.JAMBONES_REDIS_SENTINEL_PASSWORD && {
|
||||
password: process.env.JAMBONES_REDIS_SENTINEL_PASSWORD
|
||||
}),
|
||||
...(process.env.JAMBONES_REDIS_SENTINEL_USERNAME && {
|
||||
username: process.env.JAMBONES_REDIS_SENTINEL_USERNAME
|
||||
})
|
||||
} : null;
|
||||
|
||||
const {
|
||||
retrieveCall,
|
||||
deleteCall,
|
||||
listCalls,
|
||||
listQueues,
|
||||
listSortedSets,
|
||||
purgeCalls,
|
||||
retrieveSet,
|
||||
addKey,
|
||||
@@ -12,7 +33,7 @@ const {
|
||||
deleteKey,
|
||||
incrKey,
|
||||
client: redisClient,
|
||||
} = require('@jambonz/realtimedb-helpers')({
|
||||
} = require('@jambonz/realtimedb-helpers')(JAMBONES_REDIS_SENTINELS || {
|
||||
host: process.env.JAMBONES_REDIS_HOST || 'localhost',
|
||||
port: process.env.JAMBONES_REDIS_PORT || 6379
|
||||
}, logger);
|
||||
@@ -21,12 +42,13 @@ module.exports = {
|
||||
retrieveCall,
|
||||
deleteCall,
|
||||
listCalls,
|
||||
listQueues,
|
||||
listSortedSets,
|
||||
purgeCalls,
|
||||
retrieveSet,
|
||||
addKey,
|
||||
retrieveKey,
|
||||
deleteKey,
|
||||
redisClient,
|
||||
incrKey
|
||||
incrKey,
|
||||
JAMBONES_REDIS_SENTINELS
|
||||
};
|
||||
|
||||
@@ -1,11 +1,7 @@
|
||||
const opts = Object.assign({
|
||||
timestamp: () => {
|
||||
return `, "time": "${new Date().toISOString()}"`;
|
||||
}
|
||||
}, {
|
||||
const opts = {
|
||||
level: process.env.JAMBONES_LOGLEVEL || 'info'
|
||||
});
|
||||
|
||||
const logger = require('pino')(opts);
|
||||
};
|
||||
const pino = require('pino');
|
||||
const logger = pino(opts, pino.destination(1, {sync: false}));
|
||||
|
||||
module.exports = logger;
|
||||
|
||||
@@ -4,7 +4,7 @@ const {getMysqlConnection} = require('../db');
|
||||
const {promisePool} = require('../db');
|
||||
const { v4: uuid } = require('uuid');
|
||||
|
||||
const {encrypt} = require('../utils/encrypt-decrypt');
|
||||
const {encrypt, decrypt} = require('../utils/encrypt-decrypt');
|
||||
|
||||
const retrieveSql = `SELECT * from accounts acc
|
||||
LEFT JOIN webhooks AS rh
|
||||
@@ -55,6 +55,13 @@ WHERE account_sid = ?
|
||||
AND effective_end_date IS NULL
|
||||
AND pending = 0`;
|
||||
|
||||
const extractBucketCredential = (obj) => {
|
||||
const {bucket_credential} = obj;
|
||||
if (bucket_credential) {
|
||||
obj.bucket_credential = JSON.parse(decrypt(bucket_credential));
|
||||
}
|
||||
};
|
||||
|
||||
function transmogrifyResults(results) {
|
||||
return results.map((row) => {
|
||||
const obj = row.acc;
|
||||
@@ -75,6 +82,8 @@ function transmogrifyResults(results) {
|
||||
else obj.queue_event_hook = null;
|
||||
delete obj.queue_event_hook_sid;
|
||||
|
||||
extractBucketCredential(obj);
|
||||
|
||||
return obj;
|
||||
});
|
||||
}
|
||||
@@ -238,7 +247,6 @@ class Account extends Model {
|
||||
}));
|
||||
return account_subscription_sid;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
Account.table = 'accounts';
|
||||
@@ -319,7 +327,15 @@ Account.fields = [
|
||||
type: 'string',
|
||||
},
|
||||
{
|
||||
name: 'lcr_sid',
|
||||
name: 'record_all_calls',
|
||||
type: 'number'
|
||||
},
|
||||
{
|
||||
name: 'record_format',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
name: 'bucket_credential',
|
||||
type: 'string'
|
||||
}
|
||||
];
|
||||
|
||||
@@ -120,6 +120,10 @@ Application.fields = [
|
||||
{
|
||||
name: 'messaging_hook_sid',
|
||||
type: 'string',
|
||||
},
|
||||
{
|
||||
name: 'record_all_calls',
|
||||
type: 'number',
|
||||
}
|
||||
];
|
||||
|
||||
|
||||
58
lib/models/client.js
Normal file
58
lib/models/client.js
Normal file
@@ -0,0 +1,58 @@
|
||||
const Model = require('./model');
|
||||
const {promisePool} = require('../db');
|
||||
|
||||
class Client extends Model {
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
|
||||
static async retrieveAllByAccountSid(account_sid) {
|
||||
const sql = `SELECT * FROM ${this.table} WHERE account_sid = ?`;
|
||||
const [rows] = await promisePool.query(sql, account_sid);
|
||||
return rows;
|
||||
}
|
||||
|
||||
static async retrieveAllByServiceProviderSid(service_provider_sid) {
|
||||
const sql = `SELECT c.client_sid, c.account_sid, c.is_active, c.username, c.hashed_password
|
||||
FROM ${this.table} AS c LEFT JOIN accounts AS acc ON c.account_sid = acc.account_sid
|
||||
LEFT JOIN service_providers AS sp ON sp.service_provider_sid = accs.service_provider_sid
|
||||
WHERE sp.service_provider_sid = ?`;
|
||||
const [rows] = await promisePool.query(sql, service_provider_sid);
|
||||
return rows;
|
||||
}
|
||||
|
||||
static async retrieveByAccountSidAndUserName(account_sid, username) {
|
||||
const sql = `SELECT * FROM ${this.table} WHERE account_sid = ? AND username = ?`;
|
||||
const [rows] = await promisePool.query(sql, [account_sid, username]);
|
||||
return rows;
|
||||
}
|
||||
}
|
||||
|
||||
Client.table = 'clients';
|
||||
Client.fields = [
|
||||
{
|
||||
name: 'client_sid',
|
||||
type: 'string',
|
||||
primaryKey: true
|
||||
},
|
||||
{
|
||||
name: 'account_sid',
|
||||
type: 'string',
|
||||
required: true
|
||||
},
|
||||
{
|
||||
name: 'is_active',
|
||||
type: 'number'
|
||||
},
|
||||
{
|
||||
name: 'username',
|
||||
type: 'string',
|
||||
required: true
|
||||
},
|
||||
{
|
||||
name: 'password',
|
||||
type: 'string'
|
||||
}
|
||||
];
|
||||
|
||||
module.exports = Client;
|
||||
@@ -1,6 +1,6 @@
|
||||
const Model = require('./model');
|
||||
const {promisePool} = require('../db');
|
||||
const sql = 'SELECT * from phone_numbers WHERE account_sid = ?';
|
||||
const sql = 'SELECT * from phone_numbers WHERE account_sid = ? ORDER BY number';
|
||||
const sqlSP = `SELECT *
|
||||
FROM phone_numbers
|
||||
WHERE account_sid IN
|
||||
@@ -8,7 +8,7 @@ WHERE account_sid IN
|
||||
SELECT account_sid
|
||||
FROM accounts
|
||||
WHERE service_provider_sid = ?
|
||||
)`;
|
||||
) ORDER BY number`;
|
||||
|
||||
class PhoneNumber extends Model {
|
||||
constructor() {
|
||||
|
||||
@@ -58,6 +58,10 @@ SipGateway.fields = [
|
||||
{
|
||||
name: 'application_sid',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
name: 'protocol',
|
||||
type: 'string'
|
||||
}
|
||||
];
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
const Model = require('./model');
|
||||
const {promisePool} = require('../db');
|
||||
const retrieveSql = 'SELECT * from speech_credentials WHERE account_sid = ?';
|
||||
const retrieveSqlForSP = 'SELECT * from speech_credentials WHERE service_provider_sid = ?';
|
||||
const retrieveSqlForSP = 'SELECT * from speech_credentials WHERE service_provider_sid = ? and account_sid is null';
|
||||
|
||||
class SpeechCredential extends Model {
|
||||
constructor() {
|
||||
|
||||
54
lib/record/encoder.js
Normal file
54
lib/record/encoder.js
Normal file
@@ -0,0 +1,54 @@
|
||||
const { Transform } = require('stream');
|
||||
const lamejs = require('@jambonz/lamejs');
|
||||
|
||||
class PCMToMP3Encoder extends Transform {
|
||||
constructor(options) {
|
||||
super(options);
|
||||
|
||||
const channels = options.channels || 1;
|
||||
const sampleRate = options.sampleRate || 8000;
|
||||
const bitRate = options.bitRate || 128;
|
||||
|
||||
this.encoder = new lamejs.Mp3Encoder(channels, sampleRate, bitRate);
|
||||
this.channels = channels;
|
||||
}
|
||||
|
||||
_transform(chunk, encoding, callback) {
|
||||
// Convert chunk buffer into Int16Array for lamejs
|
||||
const samples = new Int16Array(chunk.buffer, chunk.byteOffset, chunk.length / 2);
|
||||
|
||||
// Split input samples into left and right channel arrays if stereo
|
||||
let leftChannel, rightChannel;
|
||||
if (this.channels === 2) {
|
||||
leftChannel = new Int16Array(samples.length / 2);
|
||||
rightChannel = new Int16Array(samples.length / 2);
|
||||
|
||||
for (let i = 0; i < samples.length; i += 2) {
|
||||
leftChannel[i / 2] = samples[i];
|
||||
rightChannel[i / 2] = samples[i + 1];
|
||||
}
|
||||
} else {
|
||||
leftChannel = samples;
|
||||
}
|
||||
|
||||
// Encode the input data
|
||||
const mp3Data = this.encoder.encodeBuffer(leftChannel, rightChannel);
|
||||
|
||||
if (mp3Data.length > 0) {
|
||||
this.push(Buffer.from(mp3Data));
|
||||
}
|
||||
callback();
|
||||
}
|
||||
|
||||
_flush(callback) {
|
||||
// Finalize encoding and flush the internal buffers
|
||||
const mp3Data = this.encoder.flush();
|
||||
|
||||
if (mp3Data.length > 0) {
|
||||
this.push(Buffer.from(mp3Data));
|
||||
}
|
||||
callback();
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = PCMToMP3Encoder;
|
||||
41
lib/record/google-storage.js
Normal file
41
lib/record/google-storage.js
Normal file
@@ -0,0 +1,41 @@
|
||||
const { Storage } = require('@google-cloud/storage');
|
||||
const { Writable } = require('stream');
|
||||
|
||||
class GoogleStorageUploadStream extends Writable {
|
||||
|
||||
constructor(logger, opts) {
|
||||
super(opts);
|
||||
this.logger = logger;
|
||||
this.metadata = opts.metadata;
|
||||
|
||||
const storage = new Storage(opts.bucketCredential);
|
||||
this.gcsFile = storage.bucket(opts.bucketName).file(opts.Key);
|
||||
this.writeStream = this.gcsFile.createWriteStream();
|
||||
|
||||
this.writeStream.on('error', (err) => this.logger.error(err));
|
||||
this.writeStream.on('finish', () => {
|
||||
this.logger.info('google storage Upload completed.');
|
||||
this._addMetadata();
|
||||
});
|
||||
}
|
||||
|
||||
_write(chunk, encoding, callback) {
|
||||
this.writeStream.write(chunk, encoding, callback);
|
||||
}
|
||||
|
||||
_final(callback) {
|
||||
this.writeStream.end();
|
||||
this.writeStream.once('finish', callback);
|
||||
}
|
||||
|
||||
async _addMetadata() {
|
||||
try {
|
||||
await this.gcsFile.setMetadata({metadata: this.metadata});
|
||||
this.logger.info('Google storage Upload and metadata setting completed.');
|
||||
} catch (err) {
|
||||
this.logger.error(err, 'Google storage An error occurred while setting metadata');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = GoogleStorageUploadStream;
|
||||
6
lib/record/index.js
Normal file
6
lib/record/index.js
Normal file
@@ -0,0 +1,6 @@
|
||||
|
||||
async function record(logger, socket) {
|
||||
return require('./upload')(logger, socket);
|
||||
}
|
||||
|
||||
module.exports = record;
|
||||
103
lib/record/s3-multipart-upload-stream.js
Normal file
103
lib/record/s3-multipart-upload-stream.js
Normal file
@@ -0,0 +1,103 @@
|
||||
const { Writable } = require('stream');
|
||||
const {
|
||||
S3Client,
|
||||
CreateMultipartUploadCommand,
|
||||
UploadPartCommand,
|
||||
CompleteMultipartUploadCommand,
|
||||
} = require('@aws-sdk/client-s3');
|
||||
|
||||
class S3MultipartUploadStream extends Writable {
|
||||
constructor(logger, opts) {
|
||||
super(opts);
|
||||
this.logger = logger;
|
||||
this.bucketName = opts.bucketName;
|
||||
this.objectKey = opts.Key;
|
||||
this.uploadId = null;
|
||||
this.partNumber = 1;
|
||||
this.multipartETags = [];
|
||||
this.buffer = Buffer.alloc(0);
|
||||
this.minPartSize = 5 * 1024 * 1024; // 5 MB
|
||||
this.s3 = new S3Client(opts.bucketCredential);
|
||||
this.metadata = opts.metadata;
|
||||
}
|
||||
|
||||
async _initMultipartUpload() {
|
||||
const command = new CreateMultipartUploadCommand({
|
||||
Bucket: this.bucketName,
|
||||
Key: this.objectKey,
|
||||
Metadata: this.metadata
|
||||
});
|
||||
const response = await this.s3.send(command);
|
||||
return response.UploadId;
|
||||
}
|
||||
|
||||
async _uploadBuffer() {
|
||||
const uploadPartCommand = new UploadPartCommand({
|
||||
Bucket: this.bucketName,
|
||||
Key: this.objectKey,
|
||||
PartNumber: this.partNumber,
|
||||
UploadId: this.uploadId,
|
||||
Body: this.buffer,
|
||||
});
|
||||
|
||||
const uploadPartResponse = await this.s3.send(uploadPartCommand);
|
||||
this.multipartETags.push({
|
||||
ETag: uploadPartResponse.ETag,
|
||||
PartNumber: this.partNumber,
|
||||
});
|
||||
this.partNumber += 1;
|
||||
}
|
||||
|
||||
async _write(chunk, encoding, callback) {
|
||||
try {
|
||||
if (!this.uploadId) {
|
||||
this.uploadId = await this._initMultipartUpload();
|
||||
}
|
||||
|
||||
this.buffer = Buffer.concat([this.buffer, chunk]);
|
||||
|
||||
if (this.buffer.length >= this.minPartSize) {
|
||||
await this._uploadBuffer();
|
||||
this.buffer = Buffer.alloc(0);
|
||||
}
|
||||
|
||||
callback(null);
|
||||
} catch (error) {
|
||||
callback(error);
|
||||
}
|
||||
}
|
||||
|
||||
async _finalize(err) {
|
||||
try {
|
||||
if (this.buffer.length > 0) {
|
||||
await this._uploadBuffer();
|
||||
}
|
||||
|
||||
const completeMultipartUploadCommand = new CompleteMultipartUploadCommand({
|
||||
Bucket: this.bucketName,
|
||||
Key: this.objectKey,
|
||||
MultipartUpload: {
|
||||
Parts: this.multipartETags.sort((a, b) => a.PartNumber - b.PartNumber),
|
||||
},
|
||||
UploadId: this.uploadId,
|
||||
});
|
||||
|
||||
await this.s3.send(completeMultipartUploadCommand);
|
||||
this.logger.info('Finished upload to S3');
|
||||
} catch (error) {
|
||||
this.logger.error('Error completing multipart upload:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async _final(callback) {
|
||||
try {
|
||||
await this._finalize();
|
||||
callback(null);
|
||||
} catch (error) {
|
||||
callback(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = S3MultipartUploadStream;
|
||||
87
lib/record/upload.js
Normal file
87
lib/record/upload.js
Normal file
@@ -0,0 +1,87 @@
|
||||
const Account = require('../models/account');
|
||||
const Websocket = require('ws');
|
||||
const PCMToMP3Encoder = require('./encoder');
|
||||
const wav = require('wav');
|
||||
const { getUploader } = require('./utils');
|
||||
|
||||
async function upload(logger, socket) {
|
||||
|
||||
socket._recvInitialMetadata = false;
|
||||
socket.on('message', async function(data, isBinary) {
|
||||
try {
|
||||
if (!isBinary && !socket._recvInitialMetadata) {
|
||||
socket._recvInitialMetadata = true;
|
||||
logger.debug(`initial metadata: ${data}`);
|
||||
const obj = JSON.parse(data.toString());
|
||||
logger.info({obj}, 'received JSON message from jambonz');
|
||||
const {sampleRate, accountSid, callSid, direction, from, to,
|
||||
callId, applicationSid, originatingSipIp, originatingSipTrunkName} = obj;
|
||||
const account = await Account.retrieve(accountSid);
|
||||
if (account && account.length && account[0].bucket_credential) {
|
||||
const obj = account[0].bucket_credential;
|
||||
// add tags to metadata
|
||||
const metadata = {
|
||||
accountSid,
|
||||
callSid,
|
||||
direction,
|
||||
from,
|
||||
to,
|
||||
callId,
|
||||
applicationSid,
|
||||
originatingSipIp,
|
||||
originatingSipTrunkName,
|
||||
sampleRate: `${sampleRate}`
|
||||
};
|
||||
if (obj.tags && obj.tags.length) {
|
||||
obj.tags.forEach((tag) => {
|
||||
metadata[tag.Key] = tag.Value;
|
||||
});
|
||||
}
|
||||
// create S3 path
|
||||
const day = new Date();
|
||||
let Key = `${day.getFullYear()}/${(day.getMonth() + 1).toString().padStart(2, '0')}`;
|
||||
Key += `/${day.getDate().toString().padStart(2, '0')}/${callSid}.${account[0].record_format}`;
|
||||
|
||||
// Uploader
|
||||
const uploadStream = getUploader(Key, metadata, obj, logger);
|
||||
if (!uploadStream) {
|
||||
logger.info('There is no available record uploader, close the socket.');
|
||||
socket.close();
|
||||
}
|
||||
|
||||
/**encoder */
|
||||
let encoder;
|
||||
if (obj.output_format === 'wav') {
|
||||
encoder = new wav.Writer({ channels: 2, sampleRate, bitDepth: 16 });
|
||||
} else {
|
||||
// default is mp3
|
||||
encoder = new PCMToMP3Encoder({
|
||||
channels: 2,
|
||||
sampleRate: sampleRate,
|
||||
bitrate: 128
|
||||
});
|
||||
}
|
||||
/* start streaming data */
|
||||
const duplex = Websocket.createWebSocketStream(socket);
|
||||
duplex.pipe(encoder).pipe(uploadStream);
|
||||
} else {
|
||||
logger.info(`account ${accountSid} does not have any bucket credential, close the socket`);
|
||||
socket.close();
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error({err, data}, 'error parsing message during connection');
|
||||
}
|
||||
});
|
||||
socket.on('error', function(err) {
|
||||
logger.error({err}, 'aws upload: error');
|
||||
});
|
||||
socket.on('close', (data) => {
|
||||
logger.info({data}, 'aws_s3: close');
|
||||
});
|
||||
socket.on('end', function(err) {
|
||||
logger.error({err}, 'aws upload: socket closed from jambonz');
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = upload;
|
||||
40
lib/record/utils.js
Normal file
40
lib/record/utils.js
Normal file
@@ -0,0 +1,40 @@
|
||||
const GoogleStorageUploadStream = require('./google-storage');
|
||||
const S3MultipartUploadStream = require('./s3-multipart-upload-stream');
|
||||
|
||||
const getUploader = (Key, metadata, bucket_credential, logger) => {
|
||||
const uploaderOpts = {
|
||||
bucketName: bucket_credential.name,
|
||||
Key,
|
||||
metadata
|
||||
};
|
||||
switch (bucket_credential.vendor) {
|
||||
case 'aws_s3':
|
||||
uploaderOpts.bucketCredential = {
|
||||
credentials: {
|
||||
accessKeyId: bucket_credential.access_key_id,
|
||||
secretAccessKey: bucket_credential.secret_access_key,
|
||||
},
|
||||
region: bucket_credential.region || 'us-east-1'
|
||||
};
|
||||
return new S3MultipartUploadStream(logger, uploaderOpts);
|
||||
case 'google':
|
||||
const serviceKey = JSON.parse(bucket_credential.service_key);
|
||||
uploaderOpts.bucketCredential = {
|
||||
projectId: serviceKey.project_id,
|
||||
credentials: {
|
||||
client_email: serviceKey.client_email,
|
||||
private_key: serviceKey.private_key
|
||||
}
|
||||
};
|
||||
return new GoogleStorageUploadStream(logger, uploaderOpts);
|
||||
|
||||
default:
|
||||
logger.error(`unknown bucket vendor: ${bucket_credential.vendor}`);
|
||||
break;
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
getUploader
|
||||
};
|
||||
@@ -1,4 +1,5 @@
|
||||
const router = require('express').Router();
|
||||
const assert = require('assert');
|
||||
const request = require('request');
|
||||
const {DbErrorBadRequest, DbErrorForbidden, DbErrorUnprocessableRequest} = require('../../utils/errors');
|
||||
const Account = require('../../models/account');
|
||||
@@ -22,6 +23,8 @@ const {
|
||||
} = require('./utils');
|
||||
const short = require('short-uuid');
|
||||
const VoipCarrier = require('../../models/voip-carrier');
|
||||
const { encrypt } = require('../../utils/encrypt-decrypt');
|
||||
const { testAwsS3, testGoogleStorage } = require('../../utils/storage-utils');
|
||||
const translator = short();
|
||||
|
||||
let idx = 0;
|
||||
@@ -38,20 +41,14 @@ const getFsUrl = async(logger, retrieveSet, setName) => {
|
||||
logger.info('No available feature servers to handle createCall API request');
|
||||
return ;
|
||||
}
|
||||
const ip = stripPort(fs[idx++ % fs.length]);
|
||||
logger.info({fs}, `feature servers available for createCall API request, selecting ${ip}`);
|
||||
return `http://${ip}:3000/v1/createCall`;
|
||||
const f = fs[idx++ % fs.length];
|
||||
logger.info({fs}, `feature servers available for createCall API request, selecting ${f}`);
|
||||
return `${f}/v1/createCall`;
|
||||
} catch (err) {
|
||||
logger.error({err}, 'getFsUrl: error retreving feature servers from redis');
|
||||
}
|
||||
};
|
||||
|
||||
const stripPort = (hostport) => {
|
||||
const arr = /^(.*):(.*)$/.exec(hostport);
|
||||
if (arr) return arr[1];
|
||||
return hostport;
|
||||
};
|
||||
|
||||
const validateRequest = async(req, account_sid) => {
|
||||
try {
|
||||
if (req.user.hasScope('admin')) {
|
||||
@@ -89,6 +86,7 @@ router.use('/:sid/Charges', hasAccountPermissions, require('./charges'));
|
||||
router.use('/:sid/SipRealms', hasAccountPermissions, require('./sip-realm'));
|
||||
router.use('/:sid/PredefinedCarriers', hasAccountPermissions, require('./add-from-predefined-carrier'));
|
||||
router.use('/:sid/Limits', hasAccountPermissions, require('./limits'));
|
||||
router.use('/:sid/TtsCache', hasAccountPermissions, require('./tts-cache'));
|
||||
router.get('/:sid/Applications', async(req, res) => {
|
||||
const logger = req.app.locals.logger;
|
||||
try {
|
||||
@@ -263,6 +261,7 @@ async function validateCreateCall(logger, sid, req) {
|
||||
const application = await lookupAppBySid(obj.application_sid);
|
||||
Object.assign(obj, {
|
||||
call_hook: application.call_hook,
|
||||
app_json: application.app_json,
|
||||
call_status_hook: application.call_status_hook,
|
||||
speech_synthesis_vendor: application.speech_synthesis_vendor,
|
||||
speech_synthesis_language: application.speech_synthesis_language,
|
||||
@@ -449,7 +448,6 @@ router.get('/:sid', async(req, res) => {
|
||||
try {
|
||||
const account_sid = parseAccountSid(req);
|
||||
await validateRequest(req, account_sid);
|
||||
|
||||
const service_provider_sid = req.user.hasServiceProviderAuth ? req.user.service_provider_sid : null;
|
||||
const results = await Account.retrieve(account_sid, service_provider_sid);
|
||||
if (results.length === 0) return res.status(404).end();
|
||||
@@ -535,6 +533,41 @@ router.delete('/:sid/SubspaceTeleport', async(req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
function encryptBucketCredential(obj) {
|
||||
if (!obj.bucket_credential) return;
|
||||
const {
|
||||
vendor,
|
||||
region,
|
||||
name,
|
||||
access_key_id,
|
||||
secret_access_key,
|
||||
tags,
|
||||
service_key
|
||||
} = obj.bucket_credential;
|
||||
|
||||
switch (vendor) {
|
||||
case 'aws_s3':
|
||||
assert(access_key_id, 'invalid aws S3 bucket credential: access_key_id is required');
|
||||
assert(secret_access_key, 'invalid aws S3 bucket credential: secret_access_key is required');
|
||||
assert(name, 'invalid aws bucket name: name is required');
|
||||
assert(region, 'invalid aws bucket region: region is required');
|
||||
const awsData = JSON.stringify({vendor, region, name, access_key_id,
|
||||
secret_access_key, tags});
|
||||
obj.bucket_credential = encrypt(awsData);
|
||||
break;
|
||||
case 'google':
|
||||
assert(service_key, 'invalid aws S3 bucket credential: service_key is required');
|
||||
const googleData = JSON.stringify({vendor, name, service_key, tags});
|
||||
obj.bucket_credential = encrypt(googleData);
|
||||
break;
|
||||
case 'none':
|
||||
obj.bucket_credential = null;
|
||||
break;
|
||||
default:
|
||||
throw DbErrorBadRequest(`unknow storage vendor: ${vendor}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* update
|
||||
*/
|
||||
@@ -581,6 +614,8 @@ router.put('/:sid', async(req, res) => {
|
||||
delete obj.registration_hook;
|
||||
delete obj.queue_event_hook;
|
||||
|
||||
encryptBucketCredential(obj);
|
||||
|
||||
const rowsAffected = await Account.update(sid, obj);
|
||||
if (rowsAffected === 0) {
|
||||
return res.status(404).end();
|
||||
@@ -673,6 +708,36 @@ account_subscriptions WHERE account_sid = ?)
|
||||
}
|
||||
});
|
||||
|
||||
/* Test Bucket credential Keys */
|
||||
router.post('/:sid/BucketCredentialTest', async(req, res) => {
|
||||
const logger = req.app.locals.logger;
|
||||
try {
|
||||
const account_sid = parseAccountSid(req);
|
||||
await validateRequest(req, account_sid);
|
||||
const {vendor, name, region, access_key_id, secret_access_key, service_key} = req.body;
|
||||
const ret = {
|
||||
status: 'not tested'
|
||||
};
|
||||
|
||||
switch (vendor) {
|
||||
case 'aws_s3':
|
||||
await testAwsS3(logger, {vendor, name, region, access_key_id, secret_access_key});
|
||||
ret.status = 'ok';
|
||||
break;
|
||||
case 'google':
|
||||
await testGoogleStorage(logger, {vendor, name, service_key});
|
||||
ret.status = 'ok';
|
||||
break;
|
||||
default:
|
||||
throw new DbErrorBadRequest(`Does not support test for ${vendor}`);
|
||||
}
|
||||
return res.status(200).json(ret);
|
||||
}
|
||||
catch (err) {
|
||||
return res.status(200).json({status: 'failed', reason: err.message});
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* retrieve account level api keys
|
||||
*/
|
||||
@@ -696,7 +761,7 @@ router.get('/:sid/ApiKeys', async(req, res) => {
|
||||
*/
|
||||
router.post('/:sid/Calls', async(req, res) => {
|
||||
const {retrieveSet, logger} = req.app.locals;
|
||||
const setName = `${(process.env.JAMBONES_CLUSTER_ID || 'default')}:active-fs`;
|
||||
const setName = `${(process.env.JAMBONES_CLUSTER_ID || 'default')}:fs-service-url`;
|
||||
const serviceUrl = await getFsUrl(logger, retrieveSet, setName);
|
||||
|
||||
if (!serviceUrl) {
|
||||
@@ -857,7 +922,7 @@ router.post('/:sid/Messages', async(req, res) => {
|
||||
const account_sid = parseAccountSid(req);
|
||||
await validateRequest(req, account_sid);
|
||||
|
||||
const setName = `${(process.env.JAMBONES_CLUSTER_ID || 'default')}:active-fs`;
|
||||
const setName = `${(process.env.JAMBONES_CLUSTER_ID || 'default')}:fs-service-url`;
|
||||
const serviceUrl = await getFsUrl(logger, retrieveSet, setName);
|
||||
if (!serviceUrl) res.json({msg: 'no available feature servers at this time'}).status(480);
|
||||
await validateCreateMessage(logger, account_sid, req);
|
||||
@@ -894,12 +959,12 @@ router.post('/:sid/Messages', async(req, res) => {
|
||||
* retrieve info for a group of queues under an account
|
||||
*/
|
||||
router.get('/:sid/Queues', async(req, res) => {
|
||||
const {logger, listQueues} = req.app.locals;
|
||||
const {logger, listSortedSets} = req.app.locals;
|
||||
const { search } = req.query || {};
|
||||
try {
|
||||
const accountSid = parseAccountSid(req);
|
||||
await validateRequest(req, accountSid);
|
||||
const queues = search ? await listQueues(accountSid, search) : await listQueues(accountSid);
|
||||
const queues = search ? await listSortedSets(accountSid, search) : await listSortedSets(accountSid);
|
||||
logger.debug(`retrieved ${queues.length} queues for account sid ${accountSid}`);
|
||||
res.status(200).json(queues);
|
||||
updateLastUsed(logger, accountSid, req).catch((err) => {});
|
||||
|
||||
88
lib/routes/api/clients.js
Normal file
88
lib/routes/api/clients.js
Normal file
@@ -0,0 +1,88 @@
|
||||
const router = require('express').Router();
|
||||
const decorate = require('./decorate');
|
||||
const sysError = require('../error');
|
||||
const Client = require('../../models/client');
|
||||
const Account = require('../../models/account');
|
||||
const { DbErrorBadRequest, DbErrorForbidden } = require('../../utils/errors');
|
||||
const { encrypt, decrypt, obscureKey } = require('../../utils/encrypt-decrypt');
|
||||
|
||||
const commonCheck = async(req) => {
|
||||
if (req.user.hasAccountAuth) {
|
||||
req.body.account_sid = req.user.account_sid;
|
||||
} else if (req.user.hasServiceProviderAuth && req.body.account_sid) {
|
||||
const accounts = await Account.retrieve(req.body.account_sid, req.user.service_provider_sid);
|
||||
if (accounts.length === 0) {
|
||||
throw new DbErrorForbidden('insufficient permissions');
|
||||
}
|
||||
}
|
||||
|
||||
if (req.body.password) {
|
||||
req.body.password = encrypt(req.body.password);
|
||||
}
|
||||
};
|
||||
|
||||
const validateAdd = async(req) => {
|
||||
await commonCheck(req);
|
||||
|
||||
const clients = await Client.retrieveByAccountSidAndUserName(req.body.account_sid, req.body.username);
|
||||
if (clients.length) {
|
||||
throw new DbErrorBadRequest('the client\'s username already exists');
|
||||
}
|
||||
};
|
||||
|
||||
const validateUpdate = async(req, sid) => {
|
||||
await commonCheck(req);
|
||||
|
||||
const clients = await Client.retrieveByAccountSidAndUserName(req.body.account_sid, req.body.username);
|
||||
if (clients.length && clients[0].client_sid !== sid) {
|
||||
throw new DbErrorBadRequest('the client\'s username already exists');
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
const preconditions = {
|
||||
add: validateAdd,
|
||||
update: validateUpdate,
|
||||
};
|
||||
|
||||
decorate(router, Client, ['add', 'update', 'delete'], preconditions);
|
||||
|
||||
router.get('/', async(req, res) => {
|
||||
const logger = req.app.locals.logger;
|
||||
try {
|
||||
const results = req.user.hasAdminAuth ?
|
||||
await Client.retrieveAll() : req.user.hasAccountAuth ?
|
||||
await Client.retrieveAllByAccountSid(req.user.hasAccountAuth ? req.user.account_sid : null) :
|
||||
await Client.retrieveAllByServiceProviderSid(req.user.service_provider_sid);
|
||||
const ret = results.map((c) => {
|
||||
c.password = obscureKey(decrypt(c.password), 1);
|
||||
return c;
|
||||
});
|
||||
res.status(200).json(ret);
|
||||
} catch (err) {
|
||||
sysError(logger, res, err);
|
||||
}
|
||||
});
|
||||
|
||||
router.get('/:sid', async(req, res) => {
|
||||
const logger = req.app.locals.logger;
|
||||
try {
|
||||
const results = await Client.retrieve(req.params.sid);
|
||||
if (results.length === 0) return res.sendStatus(404);
|
||||
const client = results[0];
|
||||
client.password = obscureKey(decrypt(client.password), 1);
|
||||
if (req.user.hasAccountAuth && client.account_sid !== req.user.account_sid) {
|
||||
return res.sendStatus(404);
|
||||
} else if (req.user.hasServiceProviderAuth) {
|
||||
const accounts = await Account.retrieve(client.account_sid, req.user.service_provider_sid);
|
||||
if (!accounts.length) {
|
||||
return res.sendStatus(404);
|
||||
}
|
||||
}
|
||||
return res.status(200).json(client);
|
||||
} catch (err) {
|
||||
sysError(logger, res, err);
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
@@ -17,6 +17,7 @@ const isAdminScope = (req, res, next) => {
|
||||
|
||||
api.use('/BetaInviteCodes', isAdminScope, require('./beta-invite-codes'));
|
||||
api.use('/SystemInformation', isAdminScope, require('./system-information'));
|
||||
api.use('/TtsCache', isAdminScope, require('./tts-cache'));
|
||||
api.use('/ServiceProviders', require('./service-providers'));
|
||||
api.use('/VoipCarriers', require('./voip-carriers'));
|
||||
api.use('/Webhooks', require('./webhooks'));
|
||||
@@ -50,6 +51,7 @@ api.use('/PasswordSettings', require('./password-settings'));
|
||||
api.use('/Lcrs', require('./lcrs'));
|
||||
api.use('/LcrRoutes', require('./lcr-routes'));
|
||||
api.use('/LcrCarrierSetEntries', require('./lcr-carrier-set-entries'));
|
||||
api.use('/Clients', require('./clients'));
|
||||
|
||||
// messaging
|
||||
api.use('/Smpps', require('./smpps')); // our smpp server info
|
||||
|
||||
@@ -3,6 +3,8 @@ const sysError = require('../error');
|
||||
const {DbErrorBadRequest} = require('../../utils/errors');
|
||||
const {getHomerApiKey, getHomerSipTrace, getHomerPcap} = require('../../utils/homer-utils');
|
||||
const {getJaegerTrace} = require('../../utils/jaeger-utils');
|
||||
const Account = require('../../models/account');
|
||||
const { getS3Object, getGoogleStorageObject } = require('../../utils/storage-utils');
|
||||
|
||||
const parseAccountSid = (url) => {
|
||||
const arr = /Accounts\/([^\/]*)/.exec(url);
|
||||
@@ -20,7 +22,7 @@ router.get('/', async(req, res) => {
|
||||
logger.debug({opts: req.query}, 'GET /RecentCalls');
|
||||
const account_sid = parseAccountSid(req.originalUrl);
|
||||
const service_provider_sid = account_sid ? null : parseServiceProviderSid(req.originalUrl);
|
||||
const {page, count, trunk, direction, days, answered, start, end} = req.query || {};
|
||||
const {page, count, trunk, direction, days, answered, start, end, filter} = req.query || {};
|
||||
if (!page || page < 1) throw new DbErrorBadRequest('missing or invalid "page" query arg');
|
||||
if (!count || count < 25 || count > 500) throw new DbErrorBadRequest('missing or invalid "count" query arg');
|
||||
|
||||
@@ -35,6 +37,7 @@ router.get('/', async(req, res) => {
|
||||
answered,
|
||||
start: days ? undefined : start,
|
||||
end: days ? undefined : end,
|
||||
filter
|
||||
});
|
||||
res.status(200).json(data);
|
||||
}
|
||||
@@ -49,6 +52,7 @@ router.get('/', async(req, res) => {
|
||||
answered,
|
||||
start: days ? undefined : start,
|
||||
end: days ? undefined : end,
|
||||
filter
|
||||
});
|
||||
res.status(200).json(data);
|
||||
}
|
||||
@@ -74,12 +78,12 @@ router.get('/:call_id', async(req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
router.get('/:call_id/pcap', async(req, res) => {
|
||||
router.get('/:call_id/:method/pcap', async(req, res) => {
|
||||
const {logger} = req.app.locals;
|
||||
try {
|
||||
const token = await getHomerApiKey(logger);
|
||||
if (!token) return res.sendStatus(400, {msg: 'getHomerApiKey: Failed to get Homer API token; check server config'});
|
||||
const stream = await getHomerPcap(logger, token, [req.params.call_id]);
|
||||
const stream = await getHomerPcap(logger, token, [req.params.call_id], req.params.method);
|
||||
if (!stream) {
|
||||
logger.info(`getHomerApiKey: unable to get sip traces from Homer for ${req.params.call_id}`);
|
||||
return res.sendStatus(404);
|
||||
@@ -111,4 +115,38 @@ router.get('/trace/:trace_id', async(req, res) => {
|
||||
}
|
||||
});
|
||||
|
||||
router.get('/:call_sid/record/:year/:month/:day/:format', async(req, res) => {
|
||||
const {logger} = req.app.locals;
|
||||
const {call_sid, year, month, day, format} = req.params;
|
||||
|
||||
try {
|
||||
const account_sid = parseAccountSid(req.originalUrl);
|
||||
const r = await Account.retrieve(account_sid);
|
||||
if (r.length === 0 || !r[0].bucket_credential) return res.sendStatus(404);
|
||||
const {bucket_credential} = r[0];
|
||||
const getOptions = {
|
||||
...bucket_credential,
|
||||
key: `${year}/${month}/${day}/${call_sid}.${format || 'mp3'}`
|
||||
};
|
||||
let stream;
|
||||
switch (bucket_credential.vendor) {
|
||||
case 'aws_s3':
|
||||
stream = await getS3Object(logger, getOptions);
|
||||
break;
|
||||
case 'google':
|
||||
stream = await getGoogleStorageObject(logger, getOptions);
|
||||
break;
|
||||
default:
|
||||
logger.error(`There is no handler for fetching record from ${bucket_credential.vendor}`);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
res.set({
|
||||
'Content-Type': `audio/${format || 'mp3'}`
|
||||
});
|
||||
stream.pipe(res);
|
||||
} catch (err) {
|
||||
logger.error({err}, ` error retrieving recording ${call_sid}`);
|
||||
res.sendStatus(404);
|
||||
}
|
||||
});
|
||||
module.exports = router;
|
||||
|
||||
@@ -28,20 +28,13 @@ router.get('/', async(req, res) => {
|
||||
|
||||
if (req.user.hasAccountAuth) {
|
||||
const [r] = await promisePool.query('SELECT * from accounts WHERE account_sid = ?', req.user.account_sid);
|
||||
if (0 === r.length) throw new Error('invalid account_sid');
|
||||
if (0 === r.length) throw new DbErrorBadRequest('invalid account_sid');
|
||||
|
||||
service_provider_sid = r[0].service_provider_sid;
|
||||
}
|
||||
|
||||
if (req.user.hasServiceProviderAuth) {
|
||||
const [r] = await promisePool.query(
|
||||
'SELECT * from service_providers where service_provider_sid = ?',
|
||||
service_provider_sid);
|
||||
if (0 === r.length) throw new Error('invalid account_sid');
|
||||
|
||||
service_provider_sid = r[0].service_provider_sid;
|
||||
|
||||
if (!service_provider_sid) throw new DbErrorBadRequest('missing service_provider_sid in query');
|
||||
service_provider_sid = req.user.service_provider_sid;
|
||||
}
|
||||
|
||||
/** generally, we have a global set of SBCs that all accounts use.
|
||||
|
||||
@@ -14,20 +14,14 @@ const getFsUrl = async(logger, retrieveSet, setName, provider) => {
|
||||
logger.info('No available feature servers to handle createCall API request');
|
||||
return ;
|
||||
}
|
||||
const ip = stripPort(fs[idx++ % fs.length]);
|
||||
logger.info({fs}, `feature servers available for createCall API request, selecting ${ip}`);
|
||||
return `http://${ip}:3000/v1/messaging/${provider}`;
|
||||
const f = fs[idx++ % fs.length];
|
||||
logger.info({fs}, `feature servers available for createCall API request, selecting ${f}`);
|
||||
return `${f}/v1/messaging/${provider}`;
|
||||
} catch (err) {
|
||||
logger.error({err}, 'getFsUrl: error retreving feature servers from redis');
|
||||
}
|
||||
};
|
||||
|
||||
const stripPort = (hostport) => {
|
||||
const arr = /^(.*):(.*)$/.exec(hostport);
|
||||
if (arr) return arr[1];
|
||||
return hostport;
|
||||
};
|
||||
|
||||
const doSendResponse = async(res, respondFn, body) => {
|
||||
if (typeof respondFn === 'number') res.sendStatus(respondFn);
|
||||
else if (typeof respondFn !== 'function') res.sendStatus(200);
|
||||
@@ -44,7 +38,7 @@ router.post('/:provider', async(req, res) => {
|
||||
lookupAppByPhoneNumber,
|
||||
logger
|
||||
} = req.app.locals;
|
||||
const setName = `${process.env.JAMBONES_CLUSTER_ID || 'default'}:active-fs`;
|
||||
const setName = `${process.env.JAMBONES_CLUSTER_ID || 'default'}:fs-service-url`;
|
||||
logger.debug({path: req.path, body: req.body}, 'incomingSMS from carrier');
|
||||
|
||||
// search for provider module
|
||||
|
||||
@@ -3,7 +3,7 @@ const assert = require('assert');
|
||||
const Account = require('../../models/account');
|
||||
const SpeechCredential = require('../../models/speech-credential');
|
||||
const sysError = require('../error');
|
||||
const {decrypt, encrypt} = require('../../utils/encrypt-decrypt');
|
||||
const {decrypt, encrypt, obscureKey} = require('../../utils/encrypt-decrypt');
|
||||
const {parseAccountSid, parseServiceProviderSid, parseSpeechCredentialSid} = require('./utils');
|
||||
const {DbErrorUnprocessableRequest, DbErrorForbidden} = require('../../utils/errors');
|
||||
const {
|
||||
@@ -99,17 +99,6 @@ const validateTest = async(req, speech_credentials) => {
|
||||
}
|
||||
};
|
||||
|
||||
const obscureKey = (key) => {
|
||||
const key_spoiler_length = 6;
|
||||
const key_spoiler_char = 'X';
|
||||
|
||||
if (key.length <= key_spoiler_length) {
|
||||
return key;
|
||||
}
|
||||
|
||||
return `${key.slice(0, key_spoiler_length)}${key_spoiler_char.repeat(key.length - key_spoiler_length)}`;
|
||||
};
|
||||
|
||||
const encryptCredential = (obj) => {
|
||||
const {
|
||||
vendor,
|
||||
@@ -489,7 +478,9 @@ router.put('/:sid', async(req, res) => {
|
||||
use_custom_tts,
|
||||
custom_tts_endpoint,
|
||||
use_custom_stt,
|
||||
custom_stt_endpoint
|
||||
custom_stt_endpoint,
|
||||
custom_stt_url,
|
||||
custom_tts_url
|
||||
} = req.body;
|
||||
|
||||
const newCred = {
|
||||
@@ -505,7 +496,9 @@ router.put('/:sid', async(req, res) => {
|
||||
tts_region,
|
||||
riva_server_uri,
|
||||
nuance_stt_uri,
|
||||
nuance_tts_uri
|
||||
nuance_tts_uri,
|
||||
custom_stt_url,
|
||||
custom_tts_url
|
||||
};
|
||||
logger.info({o, newCred}, 'updating speech credential with this new credential');
|
||||
obj.credential = encryptCredential(newCred);
|
||||
|
||||
@@ -8,7 +8,7 @@ router.post('/', async(req, res) => {
|
||||
|
||||
router.get('/', async(req, res) => {
|
||||
const [sysInfo] = await SystemInformation.retrieveAll();
|
||||
res.status(200).json(sysInfo);
|
||||
res.status(200).json(sysInfo || {});
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
|
||||
29
lib/routes/api/tts-cache.js
Normal file
29
lib/routes/api/tts-cache.js
Normal file
@@ -0,0 +1,29 @@
|
||||
const router = require('express').Router();
|
||||
const {
|
||||
parseAccountSid
|
||||
} = require('./utils');
|
||||
|
||||
router.delete('/', async(req, res) => {
|
||||
const {purgeTtsCache} = req.app.locals;
|
||||
const account_sid = parseAccountSid(req);
|
||||
if (account_sid) {
|
||||
await purgeTtsCache({account_sid});
|
||||
} else {
|
||||
await purgeTtsCache();
|
||||
}
|
||||
res.sendStatus(204);
|
||||
});
|
||||
|
||||
router.get('/', async(req, res) => {
|
||||
const {getTtsSize} = req.app.locals;
|
||||
const account_sid = parseAccountSid(req);
|
||||
let size = 0;
|
||||
if (account_sid) {
|
||||
size = await getTtsSize(`tts:${account_sid}:*`);
|
||||
} else {
|
||||
size = await getTtsSize();
|
||||
}
|
||||
res.status(200).json({size});
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
@@ -3046,6 +3046,12 @@ paths:
|
||||
enum:
|
||||
- inbound
|
||||
- outbound
|
||||
- in: query
|
||||
name: filter
|
||||
required: false
|
||||
schema:
|
||||
type: string
|
||||
description: Filter value can be caller ID, callee ID or call Sid
|
||||
get:
|
||||
tags:
|
||||
- Accounts
|
||||
@@ -3245,6 +3251,18 @@ paths:
|
||||
enum:
|
||||
- inbound
|
||||
- outbound
|
||||
- in: query
|
||||
name: from
|
||||
required: false
|
||||
schema:
|
||||
type: string
|
||||
description: calling number to retrieve
|
||||
- in: query
|
||||
name: to
|
||||
required: false
|
||||
schema:
|
||||
type: string
|
||||
description: called number to retrieve
|
||||
get:
|
||||
tags:
|
||||
- Service Providers
|
||||
|
||||
@@ -23,7 +23,18 @@ const decrypt = (data) => {
|
||||
return decrpyted.toString();
|
||||
};
|
||||
|
||||
const obscureKey = (key, key_spoiler_length = 6) => {
|
||||
const key_spoiler_char = 'X';
|
||||
|
||||
if (!key || key.length <= key_spoiler_length) {
|
||||
return key;
|
||||
}
|
||||
|
||||
return `${key.slice(0, key_spoiler_length)}${key_spoiler_char.repeat(key.length - key_spoiler_length)}`;
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
encrypt,
|
||||
decrypt
|
||||
decrypt,
|
||||
obscureKey
|
||||
};
|
||||
|
||||
@@ -64,7 +64,7 @@ const getHomerSipTrace = async(logger, apiKey, callId) => {
|
||||
}
|
||||
};
|
||||
|
||||
const getHomerPcap = async(logger, apiKey, callIds) => {
|
||||
const getHomerPcap = async(logger, apiKey, callIds, method) => {
|
||||
if (!process.env.HOMER_BASE_URL || !process.env.HOMER_USERNAME || !process.env.HOMER_PASSWORD) {
|
||||
logger.debug('getHomerPcap: Homer integration not installed');
|
||||
}
|
||||
@@ -73,18 +73,23 @@ const getHomerPcap = async(logger, apiKey, callIds) => {
|
||||
const stream = await postPcap('/api/v3/export/call/messages/pcap', {
|
||||
param: {
|
||||
transaction: {
|
||||
call: true,
|
||||
registration: true,
|
||||
call: method === 'invite',
|
||||
registration: method === 'register',
|
||||
rest: false
|
||||
},
|
||||
orlogic: true,
|
||||
search: {
|
||||
'1_call': {
|
||||
callid: callIds
|
||||
},
|
||||
'1_registration': {
|
||||
callid: callIds
|
||||
}
|
||||
...(method === 'invite' && {
|
||||
'1_call': {
|
||||
callid: callIds
|
||||
}
|
||||
})
|
||||
,
|
||||
...(method === 'register' && {
|
||||
'1_registration': {
|
||||
callid: callIds
|
||||
}
|
||||
})
|
||||
},
|
||||
},
|
||||
timestamp: {
|
||||
|
||||
@@ -9,7 +9,8 @@ const getJaegerTrace = async(logger, traceId) => {
|
||||
try {
|
||||
return await getJSON(`/api/v3/traces/${traceId}`);
|
||||
} catch (err) {
|
||||
logger.error({err}, `getJaegerTrace: Error retrieving spans for traceId ${traceId}`);
|
||||
const url = `${process.env.JAEGER_BASE_URL}/api/traces/${traceId}`;
|
||||
logger.error({err, traceId}, `getJaegerTrace: Error retrieving spans from ${url}`);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
1
lib/utils/jambonz-sample.text
Normal file
1
lib/utils/jambonz-sample.text
Normal file
@@ -0,0 +1 @@
|
||||
Hello From Jambonz. This file was created because Record all call bucket credential test.
|
||||
82
lib/utils/storage-utils.js
Normal file
82
lib/utils/storage-utils.js
Normal file
@@ -0,0 +1,82 @@
|
||||
const { S3Client, PutObjectCommand, GetObjectCommand } = require('@aws-sdk/client-s3');
|
||||
const {Storage} = require('@google-cloud/storage');
|
||||
const fs = require('fs');
|
||||
|
||||
function testGoogleStorage(logger, opts) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const serviceKey = JSON.parse(opts.service_key);
|
||||
const storage = new Storage({
|
||||
projectId: serviceKey.project_id,
|
||||
credentials: {
|
||||
client_email: serviceKey.client_email,
|
||||
private_key: serviceKey.private_key
|
||||
},
|
||||
});
|
||||
|
||||
const blob = storage.bucket(opts.name).file('jambonz-sample.text');
|
||||
|
||||
fs.createReadStream(`${__dirname}/jambonz-sample.text`)
|
||||
.pipe(blob.createWriteStream())
|
||||
.on('error', (err) => reject(err))
|
||||
.on('finish', () => resolve());
|
||||
});
|
||||
}
|
||||
|
||||
async function getGoogleStorageObject(logger, opts) {
|
||||
const serviceKey = JSON.parse(opts.service_key);
|
||||
const storage = new Storage({
|
||||
projectId: serviceKey.project_id,
|
||||
credentials: {
|
||||
client_email: serviceKey.client_email,
|
||||
private_key: serviceKey.private_key
|
||||
},
|
||||
});
|
||||
|
||||
const bucket = storage.bucket(opts.name);
|
||||
const file = bucket.file(opts.key);
|
||||
|
||||
return file.createReadStream();
|
||||
}
|
||||
|
||||
async function testAwsS3(logger, opts) {
|
||||
const s3 = new S3Client({
|
||||
credentials: {
|
||||
accessKeyId: opts.access_key_id,
|
||||
secretAccessKey: opts.secret_access_key,
|
||||
},
|
||||
region: opts.region || 'us-east-1'
|
||||
});
|
||||
|
||||
const input = {
|
||||
'Body': 'Hello From Jambonz',
|
||||
'Bucket': opts.name,
|
||||
'Key': 'jambonz-sample.text'
|
||||
};
|
||||
|
||||
const command = new PutObjectCommand(input);
|
||||
|
||||
await s3.send(command);
|
||||
}
|
||||
|
||||
async function getS3Object(logger, opts) {
|
||||
const s3 = new S3Client({
|
||||
credentials: {
|
||||
accessKeyId: opts.access_key_id,
|
||||
secretAccessKey: opts.secret_access_key,
|
||||
},
|
||||
region: opts.region || 'us-east-1'
|
||||
});
|
||||
const command = new GetObjectCommand({
|
||||
Bucket: opts.name,
|
||||
Key: opts.key
|
||||
});
|
||||
const res = await s3.send(command);
|
||||
return res.Body;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
testAwsS3,
|
||||
getS3Object,
|
||||
testGoogleStorage,
|
||||
getGoogleStorageObject
|
||||
};
|
||||
4026
package-lock.json
generated
4026
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
27
package.json
27
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "jambonz-api-server",
|
||||
"version": "v0.8.2",
|
||||
"version": "0.8.4",
|
||||
"description": "",
|
||||
"main": "app.js",
|
||||
"scripts": {
|
||||
@@ -19,15 +19,17 @@
|
||||
"url": "https://github.com/jambonz/jambonz-api-server.git"
|
||||
},
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-transcribe": "^3.290.0",
|
||||
"@deepgram/sdk": "^1.10.2",
|
||||
"@google-cloud/speech": "^5.1.0",
|
||||
"@jambonz/db-helpers": "^0.7.8",
|
||||
"@jambonz/realtimedb-helpers": "^0.7.1",
|
||||
"@jambonz/speech-utils": "^0.0.12",
|
||||
"@jambonz/time-series": "^0.2.5",
|
||||
"@jambonz/verb-specifications": "^0.0.17",
|
||||
"@soniox/soniox-node": "^1.1.0",
|
||||
"@aws-sdk/client-transcribe": "^3.363.0",
|
||||
"@aws-sdk/client-s3": "^3.363.0",
|
||||
"@deepgram/sdk": "^1.21.0",
|
||||
"@google-cloud/speech": "^5.2.0",
|
||||
"@jambonz/db-helpers": "^0.9.0",
|
||||
"@jambonz/realtimedb-helpers": "^0.8.6",
|
||||
"@jambonz/speech-utils": "^0.0.15",
|
||||
"@jambonz/time-series": "^0.2.8",
|
||||
"@jambonz/verb-specifications": "^0.0.26",
|
||||
"@jambonz/lamejs": "^1.2.2",
|
||||
"@soniox/soniox-node": "^1.1.1",
|
||||
"argon2": "^0.30.3",
|
||||
"bent": "^7.3.12",
|
||||
"cors": "^2.8.5",
|
||||
@@ -49,7 +51,10 @@
|
||||
"stripe": "^8.222.0",
|
||||
"swagger-ui-express": "^4.4.0",
|
||||
"uuid": "^8.3.2",
|
||||
"yamljs": "^0.3.0"
|
||||
"yamljs": "^0.3.0",
|
||||
"ws": "^8.12.1",
|
||||
"wav": "^1.0.2",
|
||||
"@google-cloud/storage" : "^6.12.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"eslint": "^8.39.0",
|
||||
|
||||
@@ -14,7 +14,7 @@ const {
|
||||
createPhoneNumber,
|
||||
deleteObjectBySid} = require('./utils');
|
||||
const logger = require('../lib/logger');
|
||||
const { pushBack } = require('@jambonz/realtimedb-helpers')({
|
||||
const { addToSortedSet } = require('@jambonz/realtimedb-helpers')({
|
||||
host: process.env.JAMBONES_REDIS_HOST,
|
||||
port: process.env.JAMBONES_REDIS_PORT || 6379
|
||||
}, logger);
|
||||
@@ -168,11 +168,33 @@ test('account tests', async(t) => {
|
||||
queue_event_hook: {
|
||||
url: 'http://example.com/q',
|
||||
method: 'post'
|
||||
},
|
||||
record_all_calls: true,
|
||||
record_format: 'wav',
|
||||
bucket_credential: {
|
||||
vendor: 'aws_s3',
|
||||
region: 'us-east-1',
|
||||
name: 'recordings',
|
||||
access_key_id: 'access_key_id',
|
||||
secret_access_key: 'secret access key'
|
||||
}
|
||||
|
||||
}
|
||||
});
|
||||
t.ok(result.statusCode === 204, 'successfully updated account using account level token');
|
||||
|
||||
/* verify that bucket credential is updated*/
|
||||
result = await request.get(`/Accounts/${sid}`, {
|
||||
auth: {bearer: accountLevelToken},
|
||||
json: true,
|
||||
});
|
||||
|
||||
t.ok(result.bucket_credential.vendor === 'aws_s3', 'bucket_vendor was updated');
|
||||
t.ok(result.bucket_credential.name === 'recordings', 'bucket_name was updated');
|
||||
t.ok(result.bucket_credential.access_key_id === 'access_key_id', 'bucket_access_key_id was updated');
|
||||
t.ok(result.record_all_calls === 1, 'record_all_calls was updated');
|
||||
t.ok(result.record_format === 'wav', 'record_format was updated');
|
||||
|
||||
/* verify that account level api key last_used was updated*/
|
||||
result = await request.get(`/Accounts/${sid}/ApiKeys`, {
|
||||
auth: {bearer: accountLevelToken},
|
||||
@@ -268,8 +290,8 @@ test('account tests', async(t) => {
|
||||
t.ok(result.statusCode === 204, 'successfully deleted a call session limit for an account');
|
||||
|
||||
/* query account queues */
|
||||
await pushBack(`queue:${sid}:test`, 'url1');
|
||||
await pushBack(`queue:${sid}:dummy`, 'url2');
|
||||
await addToSortedSet(`queue:${sid}:test`, 'url1');
|
||||
await addToSortedSet(`queue:${sid}:dummy`, 'url2');
|
||||
|
||||
result = await request.get(`/Accounts/${sid}/Queues`, {
|
||||
auth: authAdmin,
|
||||
|
||||
@@ -125,7 +125,8 @@ test('application tests', async(t) => {
|
||||
"X-Reason" : "maximum call duration exceeded"\
|
||||
}\
|
||||
}\
|
||||
]'
|
||||
]',
|
||||
record_all_calls: true
|
||||
}
|
||||
});
|
||||
t.ok(result.statusCode === 204, 'successfully updated application');
|
||||
@@ -138,6 +139,7 @@ test('application tests', async(t) => {
|
||||
t.ok(result.messaging_hook.url === 'http://example2.com/mms' , 'successfully updated messaging_hook');
|
||||
app_json = JSON.parse(result.app_json);
|
||||
t.ok(app_json[0].verb === 'hangup', 'successfully updated app_json from application')
|
||||
t.ok(result.record_all_calls === 1, 'successfully updated record_all_calls from application')
|
||||
|
||||
/* remove applications app_json*/
|
||||
result = await request.put(`/Applications/${sid}`, {
|
||||
|
||||
@@ -83,4 +83,74 @@ test('Create Call Success Without Synthesizer in Payload', async (t) => {
|
||||
}
|
||||
}).then(data => { t.ok(false, 'Create Call should not be success') })
|
||||
.catch(error => { t.ok(error.response.statusCode === 400, 'Call failed for no synthesizer') });
|
||||
});
|
||||
|
||||
test("Create call with application sid and app_json", async(t) => {
|
||||
const app = require('../app');
|
||||
|
||||
const service_provider_sid = await createServiceProvider(request, 'account3_has_synthesizer');
|
||||
const account_sid = await createAccount(request, service_provider_sid, 'account3_has_synthesizer');
|
||||
|
||||
const token = jwt.sign({
|
||||
account_sid,
|
||||
scope: "account",
|
||||
permissions: ["PROVISION_USERS", "PROVISION_SERVICES", "VIEW_ONLY"]
|
||||
}, process.env.JWT_SECRET, { expiresIn: '1h' });
|
||||
const authUser = { bearer: token };
|
||||
const speech_sid = await createGoogleSpeechCredentials(request, account_sid, null, authUser, true, true);
|
||||
|
||||
// GIVEN
|
||||
/* add an application */
|
||||
|
||||
const app_json = '[\
|
||||
{\
|
||||
"verb": "play",\
|
||||
"url": "https://example.com/example.mp3",\
|
||||
"timeoutSecs": 10,\
|
||||
"seekOffset": 8000,\
|
||||
"actionHook": "/play/action"\
|
||||
}\
|
||||
]';
|
||||
let result = await request.post('/Applications', {
|
||||
resolveWithFullResponse: true,
|
||||
auth: authUser,
|
||||
json: true,
|
||||
body: {
|
||||
name: 'daveh',
|
||||
account_sid,
|
||||
call_hook: {
|
||||
url: 'http://example.com'
|
||||
},
|
||||
call_status_hook: {
|
||||
url: 'http://example.com/status',
|
||||
method: 'POST'
|
||||
},
|
||||
messaging_hook: {
|
||||
url: 'http://example.com/sms'
|
||||
},
|
||||
app_json
|
||||
}
|
||||
});
|
||||
t.ok(result.statusCode === 201, 'successfully created application');
|
||||
const sid = result.body.sid;
|
||||
|
||||
// WHEN
|
||||
result = await request.post(`/Accounts/${account_sid}/Calls`, {
|
||||
resolveWithFullResponse: true,
|
||||
auth: authUser,
|
||||
json: true,
|
||||
body: {
|
||||
application_sid: sid,
|
||||
from: "15083778299",
|
||||
to: {
|
||||
type: "phone",
|
||||
number: "15089084809"
|
||||
},
|
||||
}
|
||||
});
|
||||
// THEN
|
||||
t.ok(result.statusCode === 201, 'successfully created Call without Synthesizer && application_sid');
|
||||
const fs_request = await getLastRequestFromFeatureServer('15083778299_createCall');
|
||||
const obj = JSON.parse(fs_request);
|
||||
t.ok(obj.body.app_json == app_json, 'app_json successfully added')
|
||||
});
|
||||
119
test/clients.js
Normal file
119
test/clients.js
Normal file
@@ -0,0 +1,119 @@
|
||||
const test = require('tape') ;
|
||||
const ADMIN_TOKEN = '38700987-c7a4-4685-a5bb-af378f9734de';
|
||||
const authAdmin = {bearer: ADMIN_TOKEN};
|
||||
const request = require('request-promise-native').defaults({
|
||||
baseUrl: 'http://127.0.0.1:3000/v1'
|
||||
});
|
||||
|
||||
|
||||
process.on('unhandledRejection', (reason, p) => {
|
||||
console.log('Unhandled Rejection at: Promise', p, 'reason:', reason);
|
||||
});
|
||||
|
||||
test('client test', async(t) => {
|
||||
const app = require('../app');
|
||||
|
||||
try {
|
||||
let result;
|
||||
/* add a service provider */
|
||||
result = await request.post('/ServiceProviders', {
|
||||
resolveWithFullResponse: true,
|
||||
auth: authAdmin,
|
||||
json: true,
|
||||
body: {
|
||||
name: 'client_sp',
|
||||
}
|
||||
});
|
||||
t.ok(result.statusCode === 201, 'successfully created client service provider');
|
||||
const sp_sid = result.body.sid;
|
||||
|
||||
/* add an account */
|
||||
result = await request.post('/Accounts', {
|
||||
resolveWithFullResponse: true,
|
||||
auth: authAdmin,
|
||||
json: true,
|
||||
body: {
|
||||
name: 'sample_account',
|
||||
service_provider_sid: sp_sid,
|
||||
registration_hook: {
|
||||
url: 'http://example.com/reg',
|
||||
method: 'get'
|
||||
},
|
||||
webhook_secret: 'foobar'
|
||||
}
|
||||
});
|
||||
t.ok(result.statusCode === 201, 'successfully created account');
|
||||
const account_sid = result.body.sid;
|
||||
|
||||
/* add new entity */
|
||||
result = await request.post('/Clients', {
|
||||
resolveWithFullResponse: true,
|
||||
auth: authAdmin,
|
||||
json: true,
|
||||
body: {
|
||||
account_sid,
|
||||
username: 'client1',
|
||||
password: 'sdf12412',
|
||||
is_active: 1
|
||||
}
|
||||
});
|
||||
t.ok(result.statusCode === 201, 'successfully created Client');
|
||||
const sid = result.body.sid;
|
||||
|
||||
/* query all entity */
|
||||
result = await request.get('/Clients', {
|
||||
auth: authAdmin,
|
||||
json: true,
|
||||
});
|
||||
t.ok(result.length === 1 , 'successfully queried all Clients');
|
||||
|
||||
/* query one entity */
|
||||
result = await request.get(`/Clients/${sid}`, {
|
||||
auth: authAdmin,
|
||||
json: true,
|
||||
});
|
||||
t.ok(result.account_sid === account_sid , 'successfully retrieved Client by sid');
|
||||
t.ok(result.client_sid, 'successfully retrieved Client by sid');
|
||||
t.ok(result.username === 'client1', 'successfully retrieved Client by sid');
|
||||
t.ok(result.is_active === 1 , 'successfully retrieved Client by sid');
|
||||
t.ok(result.password === 'sXXXXXXX' , 'successfully retrieved Client by sid');
|
||||
|
||||
/* update the entity */
|
||||
result = await request.put(`/Clients/${sid}`, {
|
||||
auth: authAdmin,
|
||||
json: true,
|
||||
resolveWithFullResponse: true,
|
||||
body: {
|
||||
is_active: 0
|
||||
}
|
||||
});
|
||||
t.ok(result.statusCode === 204, 'successfully updated Client');
|
||||
/* query one entity */
|
||||
result = await request.get(`/Clients/${sid}`, {
|
||||
auth: authAdmin,
|
||||
json: true,
|
||||
});
|
||||
t.ok(result.is_active === 0 , 'successfully updated Client');
|
||||
t.ok(result.password === 'sXXXXXXX' , 'successfully retrieved Client by sid');
|
||||
|
||||
/* delete Client */
|
||||
result = await request.delete(`/Clients/${sid}`, {
|
||||
resolveWithFullResponse: true,
|
||||
simple: false,
|
||||
json: true,
|
||||
auth: authAdmin
|
||||
});
|
||||
t.ok(result.statusCode === 204, 'successfully deleted Clients');
|
||||
|
||||
/* query all entity */
|
||||
result = await request.get('/Clients', {
|
||||
auth: authAdmin,
|
||||
json: true,
|
||||
});
|
||||
t.ok(result.length === 0 , 'successfully queried all Clients');
|
||||
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
t.end(err);
|
||||
}
|
||||
})
|
||||
@@ -23,4 +23,6 @@ require('./system-information');
|
||||
require('./lcr-carriers-set-entries');
|
||||
require('./lcr-routes');
|
||||
require('./lcrs');
|
||||
require('./tts-cache');
|
||||
require('./clients');
|
||||
require('./docker_stop');
|
||||
|
||||
@@ -75,8 +75,29 @@ test('recent calls tests', async(t) => {
|
||||
auth: authUser,
|
||||
json: true,
|
||||
});
|
||||
console.log(JSON.stringify(result));
|
||||
t.ok(result.data.length === 5, 'retrieved 5 recent calls by account');
|
||||
//console.log({data: result.data}, 'Account recent calls');
|
||||
|
||||
result = await request.get(`/Accounts/${account_sid}/RecentCalls?page=1&count=25&filter=1508`, {
|
||||
auth: authUser,
|
||||
json: true,
|
||||
});
|
||||
console.log(JSON.stringify(result));
|
||||
t.ok(result.data.length === 5, 'retrieved 5 recent calls by account and from');
|
||||
|
||||
result = await request.get(`/Accounts/${account_sid}/RecentCalls?page=1&count=25&filter=15080`, {
|
||||
auth: authUser,
|
||||
json: true,
|
||||
});
|
||||
console.log(JSON.stringify(result));
|
||||
t.ok(result.data.length === 0, 'retrieved 0 recent calls by account and non-matching from');
|
||||
|
||||
result = await request.get(`/Accounts/${account_sid}/RecentCalls?page=1&count=25&filter=1888`, {
|
||||
auth: authUser,
|
||||
json: true,
|
||||
});
|
||||
console.log(JSON.stringify(result));
|
||||
t.ok(result.data.length === 5, 'retrieved 5 recent calls by account and to');
|
||||
|
||||
/* query last 7 days by service provider */
|
||||
result = await request.get(`/ServiceProviders/${service_provider_sid}/RecentCalls?page=1&count=25`, {
|
||||
@@ -84,6 +105,19 @@ test('recent calls tests', async(t) => {
|
||||
json: true,
|
||||
});
|
||||
t.ok(result.data.length === 5, 'retrieved 5 recent calls by service provider');
|
||||
|
||||
result = await request.get(`/ServiceProviders/${service_provider_sid}/RecentCalls?page=1&count=25&filter=1508`, {
|
||||
auth: authAdmin,
|
||||
json: true,
|
||||
});
|
||||
t.ok(result.data.length === 5, 'retrieved 5 recent calls by service provider and from');
|
||||
|
||||
result = await request.get(`/ServiceProviders/${service_provider_sid}/RecentCalls?page=1&count=25&filter=1888`, {
|
||||
auth: authAdmin,
|
||||
json: true,
|
||||
});
|
||||
t.ok(result.data.length === 5, 'retrieved 5 recent calls by service provider and to');
|
||||
|
||||
//console.log({data: result.data}, 'SP recent calls');
|
||||
|
||||
/* pull sip traces and pcap from homer */
|
||||
|
||||
45
test/sbcs.js
45
test/sbcs.js
@@ -17,6 +17,37 @@ test('sbc_addresses tests', async(t) => {
|
||||
let result;
|
||||
const service_provider_sid = await createServiceProvider(request);
|
||||
|
||||
/* add service_provider user */
|
||||
const sp_name = 'sbc_service_provider';
|
||||
const sp_password = 'password';
|
||||
result = await request.post(`/Users`, {
|
||||
resolveWithFullResponse: true,
|
||||
json: true,
|
||||
auth: authAdmin,
|
||||
body: {
|
||||
name: sp_name,
|
||||
email: 'sbc_sp@jambonz.com',
|
||||
is_active: true,
|
||||
force_change: false,
|
||||
initial_password: sp_password,
|
||||
service_provider_sid,
|
||||
}
|
||||
});
|
||||
t.ok(result.statusCode === 201 && result.body.user_sid, 'SBC service_provider scope user created');
|
||||
const sbc_sp_user_sid = result.body.user_sid;
|
||||
|
||||
result = await request.post('/login', {
|
||||
resolveWithFullResponse: true,
|
||||
json: true,
|
||||
body: {
|
||||
username: sp_name,
|
||||
password: sp_password,
|
||||
}
|
||||
});
|
||||
t.ok(result.statusCode === 200 && result.body.token, 'successfully logged in as sbc user');
|
||||
const authSbcSp = {bearer: result.body.token};
|
||||
|
||||
|
||||
/* add a service provider sbc */
|
||||
result = await request.post('/Sbcs', {
|
||||
resolveWithFullResponse: true,
|
||||
@@ -38,6 +69,20 @@ test('sbc_addresses tests', async(t) => {
|
||||
//console.log(result.body)
|
||||
t.ok(result.body.length === 1 && result.body[0].ipv4 === '192.168.1.4', 'successfully retrieved service provider sbc');
|
||||
|
||||
result = await request.get('/Sbcs', {
|
||||
resolveWithFullResponse: true,
|
||||
auth: authSbcSp,
|
||||
json: true
|
||||
});
|
||||
//console.log(result.body)
|
||||
t.ok(result.body.length === 1 && result.body[0].ipv4 === '192.168.1.4', 'successfully retrieved service provider sbc');
|
||||
|
||||
await request.delete(`/Users/${sbc_sp_user_sid}`, {
|
||||
resolveWithFullResponse: true,
|
||||
json: true,
|
||||
auth: authAdmin,
|
||||
});
|
||||
|
||||
await deleteObjectBySid(request, '/Sbcs', sid);
|
||||
await deleteObjectBySid(request, '/ServiceProviders', service_provider_sid);
|
||||
|
||||
|
||||
@@ -27,7 +27,8 @@ test('sip gateway tests', async(t) => {
|
||||
ipv4: '192.168.1.1',
|
||||
netmask: 32,
|
||||
inbound: true,
|
||||
outbound: true
|
||||
outbound: true,
|
||||
protocol: 'tcp'
|
||||
}
|
||||
});
|
||||
t.ok(result.statusCode === 201, 'successfully created sip gateway ');
|
||||
@@ -48,6 +49,7 @@ test('sip gateway tests', async(t) => {
|
||||
});
|
||||
//console.log(`result: ${JSON.stringify(result)}`);
|
||||
t.ok(result.ipv4 === '192.168.1.1' , 'successfully retrieved voip carrier by sid');
|
||||
t.ok(result.protocol === 'tcp' , 'successfully retrieved voip carrier by sid');
|
||||
|
||||
|
||||
/* update sip gateway */
|
||||
@@ -58,7 +60,8 @@ test('sip gateway tests', async(t) => {
|
||||
body: {
|
||||
port: 5061,
|
||||
netmask:24,
|
||||
outbound: false
|
||||
outbound: false,
|
||||
protocol: 'udp'
|
||||
}
|
||||
});
|
||||
t.ok(result.statusCode === 204, 'successfully updated voip carrier');
|
||||
|
||||
56
test/tts-cache.js
Normal file
56
test/tts-cache.js
Normal file
@@ -0,0 +1,56 @@
|
||||
const test = require('tape') ;
|
||||
const jwt = require('jsonwebtoken');
|
||||
const ADMIN_TOKEN = '38700987-c7a4-4685-a5bb-af378f9734de';
|
||||
const authAdmin = {bearer: ADMIN_TOKEN};
|
||||
const request = require('request-promise-native').defaults({
|
||||
baseUrl: 'http://127.0.0.1:3000/v1'
|
||||
});
|
||||
const crypto = require('crypto');
|
||||
const logger = require('../lib/logger');
|
||||
|
||||
const {
|
||||
client,
|
||||
} = require('@jambonz/speech-utils')({
|
||||
host: process.env.JAMBONES_REDIS_HOST,
|
||||
port: process.env.JAMBONES_REDIS_PORT || 6379
|
||||
}, logger);
|
||||
|
||||
function makeSynthKey({account_sid = '', vendor, language, voice, engine = '', text}) {
|
||||
const hash = crypto.createHash('sha1');
|
||||
hash.update(`${language}:${vendor}:${voice}:${engine}:${text}`);
|
||||
return `tts${account_sid ? (':' + account_sid) : ''}:${hash.digest('hex')}`;
|
||||
}
|
||||
|
||||
test('tts-cache', async(t) => {
|
||||
const app = require('../app');
|
||||
try {
|
||||
// create caches
|
||||
const minRecords = 8;
|
||||
for (const i in Array(minRecords).fill(0)) {
|
||||
await client.set(makeSynthKey({vendor: i, language: i, voice: i, engine: i, text: i}), i);
|
||||
}
|
||||
|
||||
let result = await request.get('/TtsCache', {
|
||||
auth: authAdmin,
|
||||
json: true,
|
||||
});
|
||||
|
||||
t.ok(result.size === minRecords, 'get cache correctly');
|
||||
|
||||
result = await request.delete('/TtsCache', {
|
||||
auth: authAdmin,
|
||||
resolveWithFullResponse: true,
|
||||
});
|
||||
t.ok(result.statusCode === 204, 'successfully deleted application after removing phone number');
|
||||
|
||||
result = await request.get('/TtsCache', {
|
||||
auth: authAdmin,
|
||||
json: true,
|
||||
});
|
||||
|
||||
t.ok(result.size === 0, 'deleted cache successfully');
|
||||
} catch(err) {
|
||||
console.error(err);
|
||||
t.end(err);
|
||||
}
|
||||
});
|
||||
Reference in New Issue
Block a user