Compare commits

..

29 Commits

Author SHA1 Message Date
Quan HL
66388617e6 fix 2023-07-28 21:14:07 +07:00
Quan HL
9211c9461c fix 2023-07-28 20:51:27 +07:00
Quan HL
35435340c1 fix 2023-07-28 20:33:10 +07:00
Quan HL
8c9f377671 fix 2023-07-28 20:29:27 +07:00
Quan HL
6e7154a31b add tags to google storage 2023-07-28 19:39:06 +07:00
Quan HL
1f54b10d72 add metadata to google storage 2023-07-28 17:56:59 +07:00
Quan HL
7e1f85ec14 add metadata to google storage 2023-07-28 17:55:33 +07:00
Quan HL
e173c12d43 add metadata to google storage 2023-07-28 17:54:47 +07:00
Quan HL
a507f67fbc add google storage writablestream 2023-07-28 17:51:22 +07:00
Quan HL
f2a0c93ba8 add google storage writablestream 2023-07-28 17:48:44 +07:00
Quan HL
cb483a74c2 add google storage writablestream 2023-07-28 17:48:35 +07:00
Quan HL
1372f8fc4c feat google storage 2023-07-28 15:19:33 +07:00
Quan HL
e02269c004 feat google storage 2023-07-28 15:17:00 +07:00
Dave Horton
8e200251ca slight change to pino logger construction (#206)
* slight change to pino logger construction

* remove console.log in test

* added test logging back in

* test
2023-07-23 11:26:57 -04:00
Hoan Luu Huu
898f3aec4a update verb specification (#204) 2023-07-20 09:00:18 -04:00
Hoan Luu Huu
6f85752352 fix custom speech cannot update urls (#199) 2023-07-17 19:15:04 -04:00
dependabot[bot]
fe7cc9ad58 Bump fast-xml-parser, @aws-sdk/client-transcribe, @aws-sdk/client-s3 and @aws-sdk/client-polly (#192)
Bumps [fast-xml-parser](https://github.com/NaturalIntelligence/fast-xml-parser) to 4.2.5 and updates ancestor dependencies [fast-xml-parser](https://github.com/NaturalIntelligence/fast-xml-parser), [@aws-sdk/client-transcribe](https://github.com/aws/aws-sdk-js-v3/tree/HEAD/clients/client-transcribe), [@aws-sdk/client-s3](https://github.com/aws/aws-sdk-js-v3/tree/HEAD/clients/client-s3) and [@aws-sdk/client-polly](https://github.com/aws/aws-sdk-js-v3/tree/HEAD/clients/client-polly). These dependencies need to be updated together.


Updates `fast-xml-parser` from 4.2.4 to 4.2.5
- [Release notes](https://github.com/NaturalIntelligence/fast-xml-parser/releases)
- [Changelog](https://github.com/NaturalIntelligence/fast-xml-parser/blob/master/CHANGELOG.md)
- [Commits](https://github.com/NaturalIntelligence/fast-xml-parser/compare/v4.2.4...v4.2.5)

Updates `@aws-sdk/client-transcribe` from 3.348.0 to 3.359.0
- [Release notes](https://github.com/aws/aws-sdk-js-v3/releases)
- [Changelog](https://github.com/aws/aws-sdk-js-v3/blob/main/clients/client-transcribe/CHANGELOG.md)
- [Commits](https://github.com/aws/aws-sdk-js-v3/commits/v3.359.0/clients/client-transcribe)

Updates `@aws-sdk/client-s3` from 3.348.0 to 3.359.0
- [Release notes](https://github.com/aws/aws-sdk-js-v3/releases)
- [Changelog](https://github.com/aws/aws-sdk-js-v3/blob/main/clients/client-s3/CHANGELOG.md)
- [Commits](https://github.com/aws/aws-sdk-js-v3/commits/v3.359.0/clients/client-s3)

Updates `@aws-sdk/client-polly` from 3.348.0 to 3.359.0
- [Release notes](https://github.com/aws/aws-sdk-js-v3/releases)
- [Changelog](https://github.com/aws/aws-sdk-js-v3/blob/main/clients/client-polly/CHANGELOG.md)
- [Commits](https://github.com/aws/aws-sdk-js-v3/commits/v3.359.0/clients/client-polly)

---
updated-dependencies:
- dependency-name: fast-xml-parser
  dependency-type: indirect
- dependency-name: "@aws-sdk/client-transcribe"
  dependency-type: direct:production
- dependency-name: "@aws-sdk/client-s3"
  dependency-type: direct:production
- dependency-name: "@aws-sdk/client-polly"
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-07-05 10:44:54 +01:00
Hoan Luu Huu
1ffdfebdb2 multi srs (#189) 2023-07-05 08:15:46 +01:00
Dave Horton
dcf1895920 db_upgrade: add missing schema change to add sip_gateways.protocol 2023-07-03 13:38:17 +01:00
Hoan Luu Huu
c509b9d277 feat add recent call filter (#197)
* feat add recent call filter

* update timeseries

* add filter to swagger recent call
2023-07-03 08:25:05 +01:00
Hoan Luu Huu
eff8474997 fix sp user cannot fetch sbcs (#195) 2023-06-29 11:06:59 +01:00
Dave Horton
b4237beeeb minor logging 2023-06-28 09:22:17 +01:00
Dave Horton
0406e42c19 logging 2023-06-25 14:08:15 +01:00
Dave Horton
533cd2f47d minor logging 2023-06-25 14:01:30 +01:00
Dave Horton
742884cc72 fix parens in upgrade script 2023-06-25 13:07:42 +01:00
Dave Horton
9fccfa2a73 bugfix: 0.8.4 schema upgrades were not being applied 2023-06-25 12:58:21 +01:00
Dave Horton
3356b7302a 0.8.4 2023-06-24 20:23:28 +01:00
Hoan Luu Huu
9f533ed17c use fs-service-url redis cache set (#191) 2023-06-23 14:26:33 +01:00
Hoan Luu Huu
a0797a3a4c encrypt client password and fix upgrade db script (#188)
* encrypt client password and fix upgrade db script

* encrypt client password and fix upgrade db script

* obfuscate client password
2023-06-15 20:46:22 -04:00
19 changed files with 2169 additions and 1837 deletions

View File

@@ -143,7 +143,7 @@ const sql = {
8004: [
'alter table accounts add column record_all_calls BOOLEAN NOT NULL DEFAULT false',
'alter table accounts add column bucket_credential VARCHAR(8192)',
'alter table accounts add column record_format VARCHAR(16) NOT NULL DEFAULT `mp3`',
'alter table accounts add column record_format VARCHAR(16) NOT NULL DEFAULT \'mp3\'',
'alter table applications add column record_all_calls BOOLEAN NOT NULL DEFAULT false',
'alter table phone_numbers DROP INDEX number',
'create unique index phone_numbers_unique_idx_voip_carrier_number ON phone_numbers (number,voip_carrier_sid)',
@@ -157,7 +157,8 @@ const sql = {
PRIMARY KEY (client_sid)
)`,
'CREATE INDEX client_sid_idx ON clients (client_sid)',
'ALTER TABLE clients ADD CONSTRAINT account_sid_idxfk_13 FOREIGN KEY account_sid_idxfk_13 (account_sid) REFERENCES accounts (account_sid)'
'ALTER TABLE clients ADD CONSTRAINT account_sid_idxfk_13 FOREIGN KEY account_sid_idxfk_13 (account_sid) REFERENCES accounts (account_sid)',
'ALTER TABLE sip_gateways ADD COLUMN protocol ENUM(\'udp\',\'tcp\',\'tls\', \'tls/srtp\') DEFAULT \'udp\''
]
};
@@ -188,6 +189,7 @@ const doIt = async() => {
if (val < 7007) upgrades.push(...sql['7007']);
if (val < 8000) upgrades.push(...sql['8000']);
if (val < 8003) upgrades.push(...sql['8003']);
if (val < 8004) upgrades.push(...sql['8004']);
// perform all upgrades
logger.info({upgrades}, 'applying schema upgrades..');

View File

@@ -1,11 +1,7 @@
const opts = Object.assign({
timestamp: () => {
return `, "time": "${new Date().toISOString()}"`;
}
}, {
const opts = {
level: process.env.JAMBONES_LOGLEVEL || 'info'
});
const logger = require('pino')(opts);
};
const pino = require('pino');
const logger = pino(opts, pino.destination(1, {sync: false}));
module.exports = logger;

View File

@@ -0,0 +1,41 @@
const { Storage } = require('@google-cloud/storage');
const { Writable } = require('stream');
class GoogleStorageUploadStream extends Writable {
constructor(logger, opts) {
super(opts);
this.logger = logger;
this.metadata = opts.metadata;
const storage = new Storage(opts.bucketCredential);
this.gcsFile = storage.bucket(opts.bucketName).file(opts.Key);
this.writeStream = this.gcsFile.createWriteStream();
this.writeStream.on('error', (err) => this.logger.error(err));
this.writeStream.on('finish', () => {
this.logger.info('google storage Upload completed.');
this._addMetadata();
});
}
_write(chunk, encoding, callback) {
this.writeStream.write(chunk, encoding, callback);
}
_final(callback) {
this.writeStream.end();
this.writeStream.once('finish', callback);
}
async _addMetadata() {
try {
await this.gcsFile.setMetadata({metadata: this.metadata});
this.logger.info('Google storage Upload and metadata setting completed.');
} catch (err) {
this.logger.error(err, 'Google storage An error occurred while setting metadata');
}
}
}
module.exports = GoogleStorageUploadStream;

View File

@@ -1,17 +1,6 @@
const path = require('node:path');
async function record(logger, socket, url) {
const p = path.basename(url);
const idx = p.lastIndexOf('/');
const vendor = p.substring(idx + 1);
switch (vendor) {
case 'aws_s3':
return require('./s3')(logger, socket);
default:
logger.info(`unknown bucket vendor: ${vendor}`);
socket.send(`unknown bucket vendor: ${vendor}`);
socket.close();
}
async function record(logger, socket) {
return require('./upload')(logger, socket);
}
module.exports = record;

View File

@@ -1,8 +1,8 @@
const Account = require('../models/account');
const Websocket = require('ws');
const PCMToMP3Encoder = require('./encoder');
const S3MultipartUploadStream = require('./s3-multipart-upload-stream');
const wav = require('wav');
const { getUploader } = require('./utils');
async function upload(logger, socket) {
@@ -11,6 +11,7 @@ async function upload(logger, socket) {
try {
if (!isBinary && !socket._recvInitialMetadata) {
socket._recvInitialMetadata = true;
logger.debug(`initial metadata: ${data}`);
const obj = JSON.parse(data.toString());
logger.info({obj}, 'received JSON message from jambonz');
const {sampleRate, accountSid, callSid, direction, from, to,
@@ -42,19 +43,11 @@ async function upload(logger, socket) {
Key += `/${day.getDate().toString().padStart(2, '0')}/${callSid}.${account[0].record_format}`;
// Uploader
const uploaderOpts = {
bucketName: obj.name,
Key,
metadata,
bucketCredential: {
credentials: {
accessKeyId: obj.access_key_id,
secretAccessKey: obj.secret_access_key,
},
region: obj.region || 'us-east-1'
}
};
const uploadStream = new S3MultipartUploadStream(logger, uploaderOpts);
const uploadStream = getUploader(Key, metadata, obj, logger);
if (!uploadStream) {
logger.info('There is no available record uploader, close the socket.');
socket.close();
}
/**encoder */
let encoder;
@@ -77,7 +70,7 @@ async function upload(logger, socket) {
}
}
} catch (err) {
logger.error({err}, 'error parsing message during connection');
logger.error({err, data}, 'error parsing message during connection');
}
});
socket.on('error', function(err) {

40
lib/record/utils.js Normal file
View File

@@ -0,0 +1,40 @@
const GoogleStorageUploadStream = require('./google-storage');
const S3MultipartUploadStream = require('./s3-multipart-upload-stream');
const getUploader = (Key, metadata, bucket_credential, logger) => {
const uploaderOpts = {
bucketName: bucket_credential.name,
Key,
metadata
};
switch (bucket_credential.vendor) {
case 'aws_s3':
uploaderOpts.bucketCredential = {
credentials: {
accessKeyId: bucket_credential.access_key_id,
secretAccessKey: bucket_credential.secret_access_key,
},
region: bucket_credential.region || 'us-east-1'
};
return new S3MultipartUploadStream(logger, uploaderOpts);
case 'google':
const serviceKey = JSON.parse(bucket_credential.service_key);
uploaderOpts.bucketCredential = {
projectId: serviceKey.project_id,
credentials: {
client_email: serviceKey.client_email,
private_key: serviceKey.private_key
}
};
return new GoogleStorageUploadStream(logger, uploaderOpts);
default:
logger.error(`unknown bucket vendor: ${bucket_credential.vendor}`);
break;
}
return null;
};
module.exports = {
getUploader
};

View File

@@ -1,7 +1,7 @@
const router = require('express').Router();
const assert = require('assert');
const request = require('request');
const {DbErrorBadRequest, DbErrorForbidden, DbErrorUnprocessableRequest, DbError} = require('../../utils/errors');
const {DbErrorBadRequest, DbErrorForbidden, DbErrorUnprocessableRequest} = require('../../utils/errors');
const Account = require('../../models/account');
const Application = require('../../models/application');
const Webhook = require('../../models/webhook');
@@ -23,8 +23,8 @@ const {
} = require('./utils');
const short = require('short-uuid');
const VoipCarrier = require('../../models/voip-carrier');
const { encrypt, decrypt } = require('../../utils/encrypt-decrypt');
const { testAwsS3 } = require('../../utils/storage-utils');
const { encrypt } = require('../../utils/encrypt-decrypt');
const { testAwsS3, testGoogleStorage } = require('../../utils/storage-utils');
const translator = short();
let idx = 0;
@@ -41,20 +41,14 @@ const getFsUrl = async(logger, retrieveSet, setName) => {
logger.info('No available feature servers to handle createCall API request');
return ;
}
const ip = stripPort(fs[idx++ % fs.length]);
logger.info({fs}, `feature servers available for createCall API request, selecting ${ip}`);
return `http://${ip}:3000/v1/createCall`;
const f = fs[idx++ % fs.length];
logger.info({fs}, `feature servers available for createCall API request, selecting ${f}`);
return `${f}/v1/createCall`;
} catch (err) {
logger.error({err}, 'getFsUrl: error retreving feature servers from redis');
}
};
const stripPort = (hostport) => {
const arr = /^(.*):(.*)$/.exec(hostport);
if (arr) return arr[1];
return hostport;
};
const validateRequest = async(req, account_sid) => {
try {
if (req.user.hasScope('admin')) {
@@ -547,7 +541,8 @@ function encryptBucketCredential(obj) {
name,
access_key_id,
secret_access_key,
tags
tags,
service_key
} = obj.bucket_credential;
switch (vendor) {
@@ -560,6 +555,11 @@ function encryptBucketCredential(obj) {
secret_access_key, tags});
obj.bucket_credential = encrypt(awsData);
break;
case 'google':
assert(service_key, 'invalid aws S3 bucket credential: service_key is required');
const googleData = JSON.stringify({vendor, name, service_key, tags});
obj.bucket_credential = encrypt(googleData);
break;
case 'none':
obj.bucket_credential = null;
break;
@@ -714,35 +714,20 @@ router.post('/:sid/BucketCredentialTest', async(req, res) => {
try {
const account_sid = parseAccountSid(req);
await validateRequest(req, account_sid);
let {vendor, name, region, access_key_id, secret_access_key} = req.body;
const {vendor, name, region, access_key_id, secret_access_key, service_key} = req.body;
const ret = {
status: 'not tested'
};
if (secret_access_key.endsWith('XXXXXX')) {
// this is when the password already saved in account
const service_provider_sid = req.user.hasServiceProviderAuth ? req.user.service_provider_sid : null;
const results = await Account.retrieve(account_sid, service_provider_sid);
if (results.length === 0) throw new DbError('Invalid Account Sid');
const {bucket_credential} = results[0];
if (bucket_credential) {
const o = JSON.parse(decrypt(bucket_credential));
vendor = o.vendor;
switch (vendor) {
case 'aws_s3':
name = o.name;
region = o.region;
access_key_id = o.access_key_id;
secret_access_key = o.secret_access_key;
break;
}
}
}
switch (vendor) {
case 'aws_s3':
await testAwsS3(logger, {vendor, name, region, access_key_id, secret_access_key});
ret.status = 'ok';
break;
case 'google':
await testGoogleStorage(logger, {vendor, name, service_key});
ret.status = 'ok';
break;
default:
throw new DbErrorBadRequest(`Does not support test for ${vendor}`);
}
@@ -776,7 +761,7 @@ router.get('/:sid/ApiKeys', async(req, res) => {
*/
router.post('/:sid/Calls', async(req, res) => {
const {retrieveSet, logger} = req.app.locals;
const setName = `${(process.env.JAMBONES_CLUSTER_ID || 'default')}:active-fs`;
const setName = `${(process.env.JAMBONES_CLUSTER_ID || 'default')}:fs-service-url`;
const serviceUrl = await getFsUrl(logger, retrieveSet, setName);
if (!serviceUrl) {
@@ -937,7 +922,7 @@ router.post('/:sid/Messages', async(req, res) => {
const account_sid = parseAccountSid(req);
await validateRequest(req, account_sid);
const setName = `${(process.env.JAMBONES_CLUSTER_ID || 'default')}:active-fs`;
const setName = `${(process.env.JAMBONES_CLUSTER_ID || 'default')}:fs-service-url`;
const serviceUrl = await getFsUrl(logger, retrieveSet, setName);
if (!serviceUrl) res.json({msg: 'no available feature servers at this time'}).status(480);
await validateCreateMessage(logger, account_sid, req);

View File

@@ -4,7 +4,7 @@ const {DbErrorBadRequest} = require('../../utils/errors');
const {getHomerApiKey, getHomerSipTrace, getHomerPcap} = require('../../utils/homer-utils');
const {getJaegerTrace} = require('../../utils/jaeger-utils');
const Account = require('../../models/account');
const { getS3Object } = require('../../utils/storage-utils');
const { getS3Object, getGoogleStorageObject } = require('../../utils/storage-utils');
const parseAccountSid = (url) => {
const arr = /Accounts\/([^\/]*)/.exec(url);
@@ -22,7 +22,7 @@ router.get('/', async(req, res) => {
logger.debug({opts: req.query}, 'GET /RecentCalls');
const account_sid = parseAccountSid(req.originalUrl);
const service_provider_sid = account_sid ? null : parseServiceProviderSid(req.originalUrl);
const {page, count, trunk, direction, days, answered, start, end, from, to} = req.query || {};
const {page, count, trunk, direction, days, answered, start, end, filter} = req.query || {};
if (!page || page < 1) throw new DbErrorBadRequest('missing or invalid "page" query arg');
if (!count || count < 25 || count > 500) throw new DbErrorBadRequest('missing or invalid "count" query arg');
@@ -37,8 +37,7 @@ router.get('/', async(req, res) => {
answered,
start: days ? undefined : start,
end: days ? undefined : end,
from,
to
filter
});
res.status(200).json(data);
}
@@ -53,8 +52,7 @@ router.get('/', async(req, res) => {
answered,
start: days ? undefined : start,
end: days ? undefined : end,
from,
to
filter
});
res.status(200).json(data);
}
@@ -126,22 +124,26 @@ router.get('/:call_sid/record/:year/:month/:day/:format', async(req, res) => {
const r = await Account.retrieve(account_sid);
if (r.length === 0 || !r[0].bucket_credential) return res.sendStatus(404);
const {bucket_credential} = r[0];
const getOptions = {
...bucket_credential,
key: `${year}/${month}/${day}/${call_sid}.${format || 'mp3'}`
};
let stream;
switch (bucket_credential.vendor) {
case 'aws_s3':
const getS3Options = {
...bucket_credential,
key: `${year}/${month}/${day}/${call_sid}.${format || 'mp3'}`
};
const stream = await getS3Object(logger, getS3Options);
res.set({
'Content-Type': `audio/${format || 'mp3'}`
});
stream.pipe(res);
stream = await getS3Object(logger, getOptions);
break;
case 'google':
stream = await getGoogleStorageObject(logger, getOptions);
break;
default:
logger.error(`There is no handler for fetching record from ${bucket_credential.vendor}`);
return res.sendStatus(500);
}
res.set({
'Content-Type': `audio/${format || 'mp3'}`
});
stream.pipe(res);
} catch (err) {
logger.error({err}, ` error retrieving recording ${call_sid}`);
res.sendStatus(404);

View File

@@ -28,20 +28,13 @@ router.get('/', async(req, res) => {
if (req.user.hasAccountAuth) {
const [r] = await promisePool.query('SELECT * from accounts WHERE account_sid = ?', req.user.account_sid);
if (0 === r.length) throw new Error('invalid account_sid');
if (0 === r.length) throw new DbErrorBadRequest('invalid account_sid');
service_provider_sid = r[0].service_provider_sid;
}
if (req.user.hasServiceProviderAuth) {
const [r] = await promisePool.query(
'SELECT * from service_providers where service_provider_sid = ?',
service_provider_sid);
if (0 === r.length) throw new Error('invalid account_sid');
service_provider_sid = r[0].service_provider_sid;
if (!service_provider_sid) throw new DbErrorBadRequest('missing service_provider_sid in query');
service_provider_sid = req.user.service_provider_sid;
}
/** generally, we have a global set of SBCs that all accounts use.

View File

@@ -14,20 +14,14 @@ const getFsUrl = async(logger, retrieveSet, setName, provider) => {
logger.info('No available feature servers to handle createCall API request');
return ;
}
const ip = stripPort(fs[idx++ % fs.length]);
logger.info({fs}, `feature servers available for createCall API request, selecting ${ip}`);
return `http://${ip}:3000/v1/messaging/${provider}`;
const f = fs[idx++ % fs.length];
logger.info({fs}, `feature servers available for createCall API request, selecting ${f}`);
return `${f}/v1/messaging/${provider}`;
} catch (err) {
logger.error({err}, 'getFsUrl: error retreving feature servers from redis');
}
};
const stripPort = (hostport) => {
const arr = /^(.*):(.*)$/.exec(hostport);
if (arr) return arr[1];
return hostport;
};
const doSendResponse = async(res, respondFn, body) => {
if (typeof respondFn === 'number') res.sendStatus(respondFn);
else if (typeof respondFn !== 'function') res.sendStatus(200);
@@ -44,7 +38,7 @@ router.post('/:provider', async(req, res) => {
lookupAppByPhoneNumber,
logger
} = req.app.locals;
const setName = `${process.env.JAMBONES_CLUSTER_ID || 'default'}:active-fs`;
const setName = `${process.env.JAMBONES_CLUSTER_ID || 'default'}:fs-service-url`;
logger.debug({path: req.path, body: req.body}, 'incomingSMS from carrier');
// search for provider module

View File

@@ -478,7 +478,9 @@ router.put('/:sid', async(req, res) => {
use_custom_tts,
custom_tts_endpoint,
use_custom_stt,
custom_stt_endpoint
custom_stt_endpoint,
custom_stt_url,
custom_tts_url
} = req.body;
const newCred = {
@@ -494,7 +496,9 @@ router.put('/:sid', async(req, res) => {
tts_region,
riva_server_uri,
nuance_stt_uri,
nuance_tts_uri
nuance_tts_uri,
custom_stt_url,
custom_tts_url
};
logger.info({o, newCred}, 'updating speech credential with this new credential');
obj.credential = encryptCredential(newCred);

View File

@@ -3047,17 +3047,11 @@ paths:
- inbound
- outbound
- in: query
name: from
name: filter
required: false
schema:
type: string
description: calling number to retrieve
- in: query
name: to
required: false
schema:
type: string
description: called number to retrieve
description: Filter value can be caller ID, callee ID or call Sid
get:
tags:
- Accounts

View File

@@ -9,7 +9,8 @@ const getJaegerTrace = async(logger, traceId) => {
try {
return await getJSON(`/api/v3/traces/${traceId}`);
} catch (err) {
logger.error({err}, `getJaegerTrace: Error retrieving spans for traceId ${traceId}`);
const url = `${process.env.JAEGER_BASE_URL}/api/traces/${traceId}`;
logger.error({err, traceId}, `getJaegerTrace: Error retrieving spans from ${url}`);
}
};

View File

@@ -0,0 +1 @@
Hello From Jambonz. This file was created because Record all call bucket credential test.

View File

@@ -1,4 +1,42 @@
const { S3Client, PutObjectCommand, GetObjectCommand } = require('@aws-sdk/client-s3');
const {Storage} = require('@google-cloud/storage');
const fs = require('fs');
function testGoogleStorage(logger, opts) {
return new Promise((resolve, reject) => {
const serviceKey = JSON.parse(opts.service_key);
const storage = new Storage({
projectId: serviceKey.project_id,
credentials: {
client_email: serviceKey.client_email,
private_key: serviceKey.private_key
},
});
const blob = storage.bucket(opts.name).file('jambonz-sample.text');
fs.createReadStream(`${__dirname}/jambonz-sample.text`)
.pipe(blob.createWriteStream())
.on('error', (err) => reject(err))
.on('finish', () => resolve());
});
}
async function getGoogleStorageObject(logger, opts) {
const serviceKey = JSON.parse(opts.service_key);
const storage = new Storage({
projectId: serviceKey.project_id,
credentials: {
client_email: serviceKey.client_email,
private_key: serviceKey.private_key
},
});
const bucket = storage.bucket(opts.name);
const file = bucket.file(opts.key);
return file.createReadStream();
}
async function testAwsS3(logger, opts) {
const s3 = new S3Client({
@@ -38,5 +76,7 @@ async function getS3Object(logger, opts) {
module.exports = {
testAwsS3,
getS3Object
getS3Object,
testGoogleStorage,
getGoogleStorageObject
};

3599
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "jambonz-api-server",
"version": "0.8.3",
"version": "0.8.4",
"description": "",
"main": "app.js",
"scripts": {
@@ -19,15 +19,15 @@
"url": "https://github.com/jambonz/jambonz-api-server.git"
},
"dependencies": {
"@aws-sdk/client-transcribe": "^3.348.0",
"@aws-sdk/client-s3": "^3.348.0",
"@aws-sdk/client-transcribe": "^3.363.0",
"@aws-sdk/client-s3": "^3.363.0",
"@deepgram/sdk": "^1.21.0",
"@google-cloud/speech": "^5.2.0",
"@jambonz/db-helpers": "^0.9.0",
"@jambonz/realtimedb-helpers": "^0.8.6",
"@jambonz/speech-utils": "^0.0.15",
"@jambonz/time-series": "^0.2.7",
"@jambonz/verb-specifications": "^0.0.24",
"@jambonz/time-series": "^0.2.8",
"@jambonz/verb-specifications": "^0.0.26",
"@jambonz/lamejs": "^1.2.2",
"@soniox/soniox-node": "^1.1.1",
"argon2": "^0.30.3",
@@ -53,7 +53,8 @@
"uuid": "^8.3.2",
"yamljs": "^0.3.0",
"ws": "^8.12.1",
"wav": "^1.0.2"
"wav": "^1.0.2",
"@google-cloud/storage" : "^6.12.0"
},
"devDependencies": {
"eslint": "^8.39.0",

View File

@@ -75,32 +75,29 @@ test('recent calls tests', async(t) => {
auth: authUser,
json: true,
});
console.log(JSON.stringify(result));
t.ok(result.data.length === 5, 'retrieved 5 recent calls by account');
result = await request.get(`/Accounts/${account_sid}/RecentCalls?page=1&count=25&from=16`, {
auth: authUser,
json: true,
});
t.ok(result.data.length === 0, 'retrieved 5 recent calls by account and from');
result = await request.get(`/Accounts/${account_sid}/RecentCalls?page=1&count=25&from=15`, {
result = await request.get(`/Accounts/${account_sid}/RecentCalls?page=1&count=25&filter=1508`, {
auth: authUser,
json: true,
});
console.log(JSON.stringify(result));
t.ok(result.data.length === 5, 'retrieved 5 recent calls by account and from');
result = await request.get(`/Accounts/${account_sid}/RecentCalls?page=1&count=25&to=19`, {
result = await request.get(`/Accounts/${account_sid}/RecentCalls?page=1&count=25&filter=15080`, {
auth: authUser,
json: true,
});
t.ok(result.data.length === 0, 'retrieved 5 recent calls by account and to');
console.log(JSON.stringify(result));
t.ok(result.data.length === 0, 'retrieved 0 recent calls by account and non-matching from');
result = await request.get(`/Accounts/${account_sid}/RecentCalls?page=1&count=25&to=18`, {
result = await request.get(`/Accounts/${account_sid}/RecentCalls?page=1&count=25&filter=1888`, {
auth: authUser,
json: true,
});
console.log(JSON.stringify(result));
t.ok(result.data.length === 5, 'retrieved 5 recent calls by account and to');
//console.log({data: result.data}, 'Account recent calls');
/* query last 7 days by service provider */
result = await request.get(`/ServiceProviders/${service_provider_sid}/RecentCalls?page=1&count=25`, {
@@ -109,29 +106,18 @@ test('recent calls tests', async(t) => {
});
t.ok(result.data.length === 5, 'retrieved 5 recent calls by service provider');
result = await request.get(`/ServiceProviders/${service_provider_sid}/RecentCalls?page=1&count=25&from=16`, {
auth: authAdmin,
json: true,
});
t.ok(result.data.length === 0, 'retrieved 5 recent calls by service provider and from');
result = await request.get(`/ServiceProviders/${service_provider_sid}/RecentCalls?page=1&count=25&from=15`, {
result = await request.get(`/ServiceProviders/${service_provider_sid}/RecentCalls?page=1&count=25&filter=1508`, {
auth: authAdmin,
json: true,
});
t.ok(result.data.length === 5, 'retrieved 5 recent calls by service provider and from');
result = await request.get(`/ServiceProviders/${service_provider_sid}/RecentCalls?page=1&count=25&to=19`, {
auth: authAdmin,
json: true,
});
t.ok(result.data.length === 0, 'retrieved 5 recent calls by service provider and to');
result = await request.get(`/ServiceProviders/${service_provider_sid}/RecentCalls?page=1&count=25&to=18`, {
result = await request.get(`/ServiceProviders/${service_provider_sid}/RecentCalls?page=1&count=25&filter=1888`, {
auth: authAdmin,
json: true,
});
t.ok(result.data.length === 5, 'retrieved 5 recent calls by service provider and to');
//console.log({data: result.data}, 'SP recent calls');
/* pull sip traces and pcap from homer */

View File

@@ -17,6 +17,37 @@ test('sbc_addresses tests', async(t) => {
let result;
const service_provider_sid = await createServiceProvider(request);
/* add service_provider user */
const sp_name = 'sbc_service_provider';
const sp_password = 'password';
result = await request.post(`/Users`, {
resolveWithFullResponse: true,
json: true,
auth: authAdmin,
body: {
name: sp_name,
email: 'sbc_sp@jambonz.com',
is_active: true,
force_change: false,
initial_password: sp_password,
service_provider_sid,
}
});
t.ok(result.statusCode === 201 && result.body.user_sid, 'SBC service_provider scope user created');
const sbc_sp_user_sid = result.body.user_sid;
result = await request.post('/login', {
resolveWithFullResponse: true,
json: true,
body: {
username: sp_name,
password: sp_password,
}
});
t.ok(result.statusCode === 200 && result.body.token, 'successfully logged in as sbc user');
const authSbcSp = {bearer: result.body.token};
/* add a service provider sbc */
result = await request.post('/Sbcs', {
resolveWithFullResponse: true,
@@ -38,6 +69,20 @@ test('sbc_addresses tests', async(t) => {
//console.log(result.body)
t.ok(result.body.length === 1 && result.body[0].ipv4 === '192.168.1.4', 'successfully retrieved service provider sbc');
result = await request.get('/Sbcs', {
resolveWithFullResponse: true,
auth: authSbcSp,
json: true
});
//console.log(result.body)
t.ok(result.body.length === 1 && result.body[0].ipv4 === '192.168.1.4', 'successfully retrieved service provider sbc');
await request.delete(`/Users/${sbc_sp_user_sid}`, {
resolveWithFullResponse: true,
json: true,
auth: authAdmin,
});
await deleteObjectBySid(request, '/Sbcs', sid);
await deleteObjectBySid(request, '/ServiceProviders', service_provider_sid);