mirror of
https://github.com/jambonz/jambonz-api-server.git
synced 2026-01-25 02:08:24 +00:00
Compare commits
29 Commits
fix/client
...
feat/googl
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
66388617e6 | ||
|
|
9211c9461c | ||
|
|
35435340c1 | ||
|
|
8c9f377671 | ||
|
|
6e7154a31b | ||
|
|
1f54b10d72 | ||
|
|
7e1f85ec14 | ||
|
|
e173c12d43 | ||
|
|
a507f67fbc | ||
|
|
f2a0c93ba8 | ||
|
|
cb483a74c2 | ||
|
|
1372f8fc4c | ||
|
|
e02269c004 | ||
|
|
8e200251ca | ||
|
|
898f3aec4a | ||
|
|
6f85752352 | ||
|
|
fe7cc9ad58 | ||
|
|
1ffdfebdb2 | ||
|
|
dcf1895920 | ||
|
|
c509b9d277 | ||
|
|
eff8474997 | ||
|
|
b4237beeeb | ||
|
|
0406e42c19 | ||
|
|
533cd2f47d | ||
|
|
742884cc72 | ||
|
|
9fccfa2a73 | ||
|
|
3356b7302a | ||
|
|
9f533ed17c | ||
|
|
a0797a3a4c |
@@ -143,7 +143,7 @@ const sql = {
|
||||
8004: [
|
||||
'alter table accounts add column record_all_calls BOOLEAN NOT NULL DEFAULT false',
|
||||
'alter table accounts add column bucket_credential VARCHAR(8192)',
|
||||
'alter table accounts add column record_format VARCHAR(16) NOT NULL DEFAULT `mp3`',
|
||||
'alter table accounts add column record_format VARCHAR(16) NOT NULL DEFAULT \'mp3\'',
|
||||
'alter table applications add column record_all_calls BOOLEAN NOT NULL DEFAULT false',
|
||||
'alter table phone_numbers DROP INDEX number',
|
||||
'create unique index phone_numbers_unique_idx_voip_carrier_number ON phone_numbers (number,voip_carrier_sid)',
|
||||
@@ -157,7 +157,8 @@ const sql = {
|
||||
PRIMARY KEY (client_sid)
|
||||
)`,
|
||||
'CREATE INDEX client_sid_idx ON clients (client_sid)',
|
||||
'ALTER TABLE clients ADD CONSTRAINT account_sid_idxfk_13 FOREIGN KEY account_sid_idxfk_13 (account_sid) REFERENCES accounts (account_sid)'
|
||||
'ALTER TABLE clients ADD CONSTRAINT account_sid_idxfk_13 FOREIGN KEY account_sid_idxfk_13 (account_sid) REFERENCES accounts (account_sid)',
|
||||
'ALTER TABLE sip_gateways ADD COLUMN protocol ENUM(\'udp\',\'tcp\',\'tls\', \'tls/srtp\') DEFAULT \'udp\''
|
||||
]
|
||||
};
|
||||
|
||||
@@ -188,6 +189,7 @@ const doIt = async() => {
|
||||
if (val < 7007) upgrades.push(...sql['7007']);
|
||||
if (val < 8000) upgrades.push(...sql['8000']);
|
||||
if (val < 8003) upgrades.push(...sql['8003']);
|
||||
if (val < 8004) upgrades.push(...sql['8004']);
|
||||
|
||||
// perform all upgrades
|
||||
logger.info({upgrades}, 'applying schema upgrades..');
|
||||
|
||||
@@ -1,11 +1,7 @@
|
||||
const opts = Object.assign({
|
||||
timestamp: () => {
|
||||
return `, "time": "${new Date().toISOString()}"`;
|
||||
}
|
||||
}, {
|
||||
const opts = {
|
||||
level: process.env.JAMBONES_LOGLEVEL || 'info'
|
||||
});
|
||||
|
||||
const logger = require('pino')(opts);
|
||||
};
|
||||
const pino = require('pino');
|
||||
const logger = pino(opts, pino.destination(1, {sync: false}));
|
||||
|
||||
module.exports = logger;
|
||||
|
||||
41
lib/record/google-storage.js
Normal file
41
lib/record/google-storage.js
Normal file
@@ -0,0 +1,41 @@
|
||||
const { Storage } = require('@google-cloud/storage');
|
||||
const { Writable } = require('stream');
|
||||
|
||||
class GoogleStorageUploadStream extends Writable {
|
||||
|
||||
constructor(logger, opts) {
|
||||
super(opts);
|
||||
this.logger = logger;
|
||||
this.metadata = opts.metadata;
|
||||
|
||||
const storage = new Storage(opts.bucketCredential);
|
||||
this.gcsFile = storage.bucket(opts.bucketName).file(opts.Key);
|
||||
this.writeStream = this.gcsFile.createWriteStream();
|
||||
|
||||
this.writeStream.on('error', (err) => this.logger.error(err));
|
||||
this.writeStream.on('finish', () => {
|
||||
this.logger.info('google storage Upload completed.');
|
||||
this._addMetadata();
|
||||
});
|
||||
}
|
||||
|
||||
_write(chunk, encoding, callback) {
|
||||
this.writeStream.write(chunk, encoding, callback);
|
||||
}
|
||||
|
||||
_final(callback) {
|
||||
this.writeStream.end();
|
||||
this.writeStream.once('finish', callback);
|
||||
}
|
||||
|
||||
async _addMetadata() {
|
||||
try {
|
||||
await this.gcsFile.setMetadata({metadata: this.metadata});
|
||||
this.logger.info('Google storage Upload and metadata setting completed.');
|
||||
} catch (err) {
|
||||
this.logger.error(err, 'Google storage An error occurred while setting metadata');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = GoogleStorageUploadStream;
|
||||
@@ -1,17 +1,6 @@
|
||||
|
||||
const path = require('node:path');
|
||||
async function record(logger, socket, url) {
|
||||
const p = path.basename(url);
|
||||
const idx = p.lastIndexOf('/');
|
||||
const vendor = p.substring(idx + 1);
|
||||
switch (vendor) {
|
||||
case 'aws_s3':
|
||||
return require('./s3')(logger, socket);
|
||||
default:
|
||||
logger.info(`unknown bucket vendor: ${vendor}`);
|
||||
socket.send(`unknown bucket vendor: ${vendor}`);
|
||||
socket.close();
|
||||
}
|
||||
async function record(logger, socket) {
|
||||
return require('./upload')(logger, socket);
|
||||
}
|
||||
|
||||
module.exports = record;
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
const Account = require('../models/account');
|
||||
const Websocket = require('ws');
|
||||
const PCMToMP3Encoder = require('./encoder');
|
||||
const S3MultipartUploadStream = require('./s3-multipart-upload-stream');
|
||||
const wav = require('wav');
|
||||
const { getUploader } = require('./utils');
|
||||
|
||||
async function upload(logger, socket) {
|
||||
|
||||
@@ -11,6 +11,7 @@ async function upload(logger, socket) {
|
||||
try {
|
||||
if (!isBinary && !socket._recvInitialMetadata) {
|
||||
socket._recvInitialMetadata = true;
|
||||
logger.debug(`initial metadata: ${data}`);
|
||||
const obj = JSON.parse(data.toString());
|
||||
logger.info({obj}, 'received JSON message from jambonz');
|
||||
const {sampleRate, accountSid, callSid, direction, from, to,
|
||||
@@ -42,19 +43,11 @@ async function upload(logger, socket) {
|
||||
Key += `/${day.getDate().toString().padStart(2, '0')}/${callSid}.${account[0].record_format}`;
|
||||
|
||||
// Uploader
|
||||
const uploaderOpts = {
|
||||
bucketName: obj.name,
|
||||
Key,
|
||||
metadata,
|
||||
bucketCredential: {
|
||||
credentials: {
|
||||
accessKeyId: obj.access_key_id,
|
||||
secretAccessKey: obj.secret_access_key,
|
||||
},
|
||||
region: obj.region || 'us-east-1'
|
||||
}
|
||||
};
|
||||
const uploadStream = new S3MultipartUploadStream(logger, uploaderOpts);
|
||||
const uploadStream = getUploader(Key, metadata, obj, logger);
|
||||
if (!uploadStream) {
|
||||
logger.info('There is no available record uploader, close the socket.');
|
||||
socket.close();
|
||||
}
|
||||
|
||||
/**encoder */
|
||||
let encoder;
|
||||
@@ -77,7 +70,7 @@ async function upload(logger, socket) {
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error({err}, 'error parsing message during connection');
|
||||
logger.error({err, data}, 'error parsing message during connection');
|
||||
}
|
||||
});
|
||||
socket.on('error', function(err) {
|
||||
40
lib/record/utils.js
Normal file
40
lib/record/utils.js
Normal file
@@ -0,0 +1,40 @@
|
||||
const GoogleStorageUploadStream = require('./google-storage');
|
||||
const S3MultipartUploadStream = require('./s3-multipart-upload-stream');
|
||||
|
||||
const getUploader = (Key, metadata, bucket_credential, logger) => {
|
||||
const uploaderOpts = {
|
||||
bucketName: bucket_credential.name,
|
||||
Key,
|
||||
metadata
|
||||
};
|
||||
switch (bucket_credential.vendor) {
|
||||
case 'aws_s3':
|
||||
uploaderOpts.bucketCredential = {
|
||||
credentials: {
|
||||
accessKeyId: bucket_credential.access_key_id,
|
||||
secretAccessKey: bucket_credential.secret_access_key,
|
||||
},
|
||||
region: bucket_credential.region || 'us-east-1'
|
||||
};
|
||||
return new S3MultipartUploadStream(logger, uploaderOpts);
|
||||
case 'google':
|
||||
const serviceKey = JSON.parse(bucket_credential.service_key);
|
||||
uploaderOpts.bucketCredential = {
|
||||
projectId: serviceKey.project_id,
|
||||
credentials: {
|
||||
client_email: serviceKey.client_email,
|
||||
private_key: serviceKey.private_key
|
||||
}
|
||||
};
|
||||
return new GoogleStorageUploadStream(logger, uploaderOpts);
|
||||
|
||||
default:
|
||||
logger.error(`unknown bucket vendor: ${bucket_credential.vendor}`);
|
||||
break;
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
getUploader
|
||||
};
|
||||
@@ -1,7 +1,7 @@
|
||||
const router = require('express').Router();
|
||||
const assert = require('assert');
|
||||
const request = require('request');
|
||||
const {DbErrorBadRequest, DbErrorForbidden, DbErrorUnprocessableRequest, DbError} = require('../../utils/errors');
|
||||
const {DbErrorBadRequest, DbErrorForbidden, DbErrorUnprocessableRequest} = require('../../utils/errors');
|
||||
const Account = require('../../models/account');
|
||||
const Application = require('../../models/application');
|
||||
const Webhook = require('../../models/webhook');
|
||||
@@ -23,8 +23,8 @@ const {
|
||||
} = require('./utils');
|
||||
const short = require('short-uuid');
|
||||
const VoipCarrier = require('../../models/voip-carrier');
|
||||
const { encrypt, decrypt } = require('../../utils/encrypt-decrypt');
|
||||
const { testAwsS3 } = require('../../utils/storage-utils');
|
||||
const { encrypt } = require('../../utils/encrypt-decrypt');
|
||||
const { testAwsS3, testGoogleStorage } = require('../../utils/storage-utils');
|
||||
const translator = short();
|
||||
|
||||
let idx = 0;
|
||||
@@ -41,20 +41,14 @@ const getFsUrl = async(logger, retrieveSet, setName) => {
|
||||
logger.info('No available feature servers to handle createCall API request');
|
||||
return ;
|
||||
}
|
||||
const ip = stripPort(fs[idx++ % fs.length]);
|
||||
logger.info({fs}, `feature servers available for createCall API request, selecting ${ip}`);
|
||||
return `http://${ip}:3000/v1/createCall`;
|
||||
const f = fs[idx++ % fs.length];
|
||||
logger.info({fs}, `feature servers available for createCall API request, selecting ${f}`);
|
||||
return `${f}/v1/createCall`;
|
||||
} catch (err) {
|
||||
logger.error({err}, 'getFsUrl: error retreving feature servers from redis');
|
||||
}
|
||||
};
|
||||
|
||||
const stripPort = (hostport) => {
|
||||
const arr = /^(.*):(.*)$/.exec(hostport);
|
||||
if (arr) return arr[1];
|
||||
return hostport;
|
||||
};
|
||||
|
||||
const validateRequest = async(req, account_sid) => {
|
||||
try {
|
||||
if (req.user.hasScope('admin')) {
|
||||
@@ -547,7 +541,8 @@ function encryptBucketCredential(obj) {
|
||||
name,
|
||||
access_key_id,
|
||||
secret_access_key,
|
||||
tags
|
||||
tags,
|
||||
service_key
|
||||
} = obj.bucket_credential;
|
||||
|
||||
switch (vendor) {
|
||||
@@ -560,6 +555,11 @@ function encryptBucketCredential(obj) {
|
||||
secret_access_key, tags});
|
||||
obj.bucket_credential = encrypt(awsData);
|
||||
break;
|
||||
case 'google':
|
||||
assert(service_key, 'invalid aws S3 bucket credential: service_key is required');
|
||||
const googleData = JSON.stringify({vendor, name, service_key, tags});
|
||||
obj.bucket_credential = encrypt(googleData);
|
||||
break;
|
||||
case 'none':
|
||||
obj.bucket_credential = null;
|
||||
break;
|
||||
@@ -714,35 +714,20 @@ router.post('/:sid/BucketCredentialTest', async(req, res) => {
|
||||
try {
|
||||
const account_sid = parseAccountSid(req);
|
||||
await validateRequest(req, account_sid);
|
||||
let {vendor, name, region, access_key_id, secret_access_key} = req.body;
|
||||
const {vendor, name, region, access_key_id, secret_access_key, service_key} = req.body;
|
||||
const ret = {
|
||||
status: 'not tested'
|
||||
};
|
||||
|
||||
if (secret_access_key.endsWith('XXXXXX')) {
|
||||
// this is when the password already saved in account
|
||||
const service_provider_sid = req.user.hasServiceProviderAuth ? req.user.service_provider_sid : null;
|
||||
const results = await Account.retrieve(account_sid, service_provider_sid);
|
||||
if (results.length === 0) throw new DbError('Invalid Account Sid');
|
||||
const {bucket_credential} = results[0];
|
||||
if (bucket_credential) {
|
||||
const o = JSON.parse(decrypt(bucket_credential));
|
||||
vendor = o.vendor;
|
||||
switch (vendor) {
|
||||
case 'aws_s3':
|
||||
name = o.name;
|
||||
region = o.region;
|
||||
access_key_id = o.access_key_id;
|
||||
secret_access_key = o.secret_access_key;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
switch (vendor) {
|
||||
case 'aws_s3':
|
||||
await testAwsS3(logger, {vendor, name, region, access_key_id, secret_access_key});
|
||||
ret.status = 'ok';
|
||||
break;
|
||||
case 'google':
|
||||
await testGoogleStorage(logger, {vendor, name, service_key});
|
||||
ret.status = 'ok';
|
||||
break;
|
||||
default:
|
||||
throw new DbErrorBadRequest(`Does not support test for ${vendor}`);
|
||||
}
|
||||
@@ -776,7 +761,7 @@ router.get('/:sid/ApiKeys', async(req, res) => {
|
||||
*/
|
||||
router.post('/:sid/Calls', async(req, res) => {
|
||||
const {retrieveSet, logger} = req.app.locals;
|
||||
const setName = `${(process.env.JAMBONES_CLUSTER_ID || 'default')}:active-fs`;
|
||||
const setName = `${(process.env.JAMBONES_CLUSTER_ID || 'default')}:fs-service-url`;
|
||||
const serviceUrl = await getFsUrl(logger, retrieveSet, setName);
|
||||
|
||||
if (!serviceUrl) {
|
||||
@@ -937,7 +922,7 @@ router.post('/:sid/Messages', async(req, res) => {
|
||||
const account_sid = parseAccountSid(req);
|
||||
await validateRequest(req, account_sid);
|
||||
|
||||
const setName = `${(process.env.JAMBONES_CLUSTER_ID || 'default')}:active-fs`;
|
||||
const setName = `${(process.env.JAMBONES_CLUSTER_ID || 'default')}:fs-service-url`;
|
||||
const serviceUrl = await getFsUrl(logger, retrieveSet, setName);
|
||||
if (!serviceUrl) res.json({msg: 'no available feature servers at this time'}).status(480);
|
||||
await validateCreateMessage(logger, account_sid, req);
|
||||
|
||||
@@ -4,7 +4,7 @@ const {DbErrorBadRequest} = require('../../utils/errors');
|
||||
const {getHomerApiKey, getHomerSipTrace, getHomerPcap} = require('../../utils/homer-utils');
|
||||
const {getJaegerTrace} = require('../../utils/jaeger-utils');
|
||||
const Account = require('../../models/account');
|
||||
const { getS3Object } = require('../../utils/storage-utils');
|
||||
const { getS3Object, getGoogleStorageObject } = require('../../utils/storage-utils');
|
||||
|
||||
const parseAccountSid = (url) => {
|
||||
const arr = /Accounts\/([^\/]*)/.exec(url);
|
||||
@@ -22,7 +22,7 @@ router.get('/', async(req, res) => {
|
||||
logger.debug({opts: req.query}, 'GET /RecentCalls');
|
||||
const account_sid = parseAccountSid(req.originalUrl);
|
||||
const service_provider_sid = account_sid ? null : parseServiceProviderSid(req.originalUrl);
|
||||
const {page, count, trunk, direction, days, answered, start, end, from, to} = req.query || {};
|
||||
const {page, count, trunk, direction, days, answered, start, end, filter} = req.query || {};
|
||||
if (!page || page < 1) throw new DbErrorBadRequest('missing or invalid "page" query arg');
|
||||
if (!count || count < 25 || count > 500) throw new DbErrorBadRequest('missing or invalid "count" query arg');
|
||||
|
||||
@@ -37,8 +37,7 @@ router.get('/', async(req, res) => {
|
||||
answered,
|
||||
start: days ? undefined : start,
|
||||
end: days ? undefined : end,
|
||||
from,
|
||||
to
|
||||
filter
|
||||
});
|
||||
res.status(200).json(data);
|
||||
}
|
||||
@@ -53,8 +52,7 @@ router.get('/', async(req, res) => {
|
||||
answered,
|
||||
start: days ? undefined : start,
|
||||
end: days ? undefined : end,
|
||||
from,
|
||||
to
|
||||
filter
|
||||
});
|
||||
res.status(200).json(data);
|
||||
}
|
||||
@@ -126,22 +124,26 @@ router.get('/:call_sid/record/:year/:month/:day/:format', async(req, res) => {
|
||||
const r = await Account.retrieve(account_sid);
|
||||
if (r.length === 0 || !r[0].bucket_credential) return res.sendStatus(404);
|
||||
const {bucket_credential} = r[0];
|
||||
const getOptions = {
|
||||
...bucket_credential,
|
||||
key: `${year}/${month}/${day}/${call_sid}.${format || 'mp3'}`
|
||||
};
|
||||
let stream;
|
||||
switch (bucket_credential.vendor) {
|
||||
case 'aws_s3':
|
||||
const getS3Options = {
|
||||
...bucket_credential,
|
||||
key: `${year}/${month}/${day}/${call_sid}.${format || 'mp3'}`
|
||||
};
|
||||
const stream = await getS3Object(logger, getS3Options);
|
||||
res.set({
|
||||
'Content-Type': `audio/${format || 'mp3'}`
|
||||
});
|
||||
stream.pipe(res);
|
||||
stream = await getS3Object(logger, getOptions);
|
||||
break;
|
||||
case 'google':
|
||||
stream = await getGoogleStorageObject(logger, getOptions);
|
||||
break;
|
||||
default:
|
||||
logger.error(`There is no handler for fetching record from ${bucket_credential.vendor}`);
|
||||
return res.sendStatus(500);
|
||||
}
|
||||
res.set({
|
||||
'Content-Type': `audio/${format || 'mp3'}`
|
||||
});
|
||||
stream.pipe(res);
|
||||
} catch (err) {
|
||||
logger.error({err}, ` error retrieving recording ${call_sid}`);
|
||||
res.sendStatus(404);
|
||||
|
||||
@@ -28,20 +28,13 @@ router.get('/', async(req, res) => {
|
||||
|
||||
if (req.user.hasAccountAuth) {
|
||||
const [r] = await promisePool.query('SELECT * from accounts WHERE account_sid = ?', req.user.account_sid);
|
||||
if (0 === r.length) throw new Error('invalid account_sid');
|
||||
if (0 === r.length) throw new DbErrorBadRequest('invalid account_sid');
|
||||
|
||||
service_provider_sid = r[0].service_provider_sid;
|
||||
}
|
||||
|
||||
if (req.user.hasServiceProviderAuth) {
|
||||
const [r] = await promisePool.query(
|
||||
'SELECT * from service_providers where service_provider_sid = ?',
|
||||
service_provider_sid);
|
||||
if (0 === r.length) throw new Error('invalid account_sid');
|
||||
|
||||
service_provider_sid = r[0].service_provider_sid;
|
||||
|
||||
if (!service_provider_sid) throw new DbErrorBadRequest('missing service_provider_sid in query');
|
||||
service_provider_sid = req.user.service_provider_sid;
|
||||
}
|
||||
|
||||
/** generally, we have a global set of SBCs that all accounts use.
|
||||
|
||||
@@ -14,20 +14,14 @@ const getFsUrl = async(logger, retrieveSet, setName, provider) => {
|
||||
logger.info('No available feature servers to handle createCall API request');
|
||||
return ;
|
||||
}
|
||||
const ip = stripPort(fs[idx++ % fs.length]);
|
||||
logger.info({fs}, `feature servers available for createCall API request, selecting ${ip}`);
|
||||
return `http://${ip}:3000/v1/messaging/${provider}`;
|
||||
const f = fs[idx++ % fs.length];
|
||||
logger.info({fs}, `feature servers available for createCall API request, selecting ${f}`);
|
||||
return `${f}/v1/messaging/${provider}`;
|
||||
} catch (err) {
|
||||
logger.error({err}, 'getFsUrl: error retreving feature servers from redis');
|
||||
}
|
||||
};
|
||||
|
||||
const stripPort = (hostport) => {
|
||||
const arr = /^(.*):(.*)$/.exec(hostport);
|
||||
if (arr) return arr[1];
|
||||
return hostport;
|
||||
};
|
||||
|
||||
const doSendResponse = async(res, respondFn, body) => {
|
||||
if (typeof respondFn === 'number') res.sendStatus(respondFn);
|
||||
else if (typeof respondFn !== 'function') res.sendStatus(200);
|
||||
@@ -44,7 +38,7 @@ router.post('/:provider', async(req, res) => {
|
||||
lookupAppByPhoneNumber,
|
||||
logger
|
||||
} = req.app.locals;
|
||||
const setName = `${process.env.JAMBONES_CLUSTER_ID || 'default'}:active-fs`;
|
||||
const setName = `${process.env.JAMBONES_CLUSTER_ID || 'default'}:fs-service-url`;
|
||||
logger.debug({path: req.path, body: req.body}, 'incomingSMS from carrier');
|
||||
|
||||
// search for provider module
|
||||
|
||||
@@ -478,7 +478,9 @@ router.put('/:sid', async(req, res) => {
|
||||
use_custom_tts,
|
||||
custom_tts_endpoint,
|
||||
use_custom_stt,
|
||||
custom_stt_endpoint
|
||||
custom_stt_endpoint,
|
||||
custom_stt_url,
|
||||
custom_tts_url
|
||||
} = req.body;
|
||||
|
||||
const newCred = {
|
||||
@@ -494,7 +496,9 @@ router.put('/:sid', async(req, res) => {
|
||||
tts_region,
|
||||
riva_server_uri,
|
||||
nuance_stt_uri,
|
||||
nuance_tts_uri
|
||||
nuance_tts_uri,
|
||||
custom_stt_url,
|
||||
custom_tts_url
|
||||
};
|
||||
logger.info({o, newCred}, 'updating speech credential with this new credential');
|
||||
obj.credential = encryptCredential(newCred);
|
||||
|
||||
@@ -3047,17 +3047,11 @@ paths:
|
||||
- inbound
|
||||
- outbound
|
||||
- in: query
|
||||
name: from
|
||||
name: filter
|
||||
required: false
|
||||
schema:
|
||||
type: string
|
||||
description: calling number to retrieve
|
||||
- in: query
|
||||
name: to
|
||||
required: false
|
||||
schema:
|
||||
type: string
|
||||
description: called number to retrieve
|
||||
description: Filter value can be caller ID, callee ID or call Sid
|
||||
get:
|
||||
tags:
|
||||
- Accounts
|
||||
|
||||
@@ -9,7 +9,8 @@ const getJaegerTrace = async(logger, traceId) => {
|
||||
try {
|
||||
return await getJSON(`/api/v3/traces/${traceId}`);
|
||||
} catch (err) {
|
||||
logger.error({err}, `getJaegerTrace: Error retrieving spans for traceId ${traceId}`);
|
||||
const url = `${process.env.JAEGER_BASE_URL}/api/traces/${traceId}`;
|
||||
logger.error({err, traceId}, `getJaegerTrace: Error retrieving spans from ${url}`);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
1
lib/utils/jambonz-sample.text
Normal file
1
lib/utils/jambonz-sample.text
Normal file
@@ -0,0 +1 @@
|
||||
Hello From Jambonz. This file was created because Record all call bucket credential test.
|
||||
@@ -1,4 +1,42 @@
|
||||
const { S3Client, PutObjectCommand, GetObjectCommand } = require('@aws-sdk/client-s3');
|
||||
const {Storage} = require('@google-cloud/storage');
|
||||
const fs = require('fs');
|
||||
|
||||
function testGoogleStorage(logger, opts) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const serviceKey = JSON.parse(opts.service_key);
|
||||
const storage = new Storage({
|
||||
projectId: serviceKey.project_id,
|
||||
credentials: {
|
||||
client_email: serviceKey.client_email,
|
||||
private_key: serviceKey.private_key
|
||||
},
|
||||
});
|
||||
|
||||
const blob = storage.bucket(opts.name).file('jambonz-sample.text');
|
||||
|
||||
fs.createReadStream(`${__dirname}/jambonz-sample.text`)
|
||||
.pipe(blob.createWriteStream())
|
||||
.on('error', (err) => reject(err))
|
||||
.on('finish', () => resolve());
|
||||
});
|
||||
}
|
||||
|
||||
async function getGoogleStorageObject(logger, opts) {
|
||||
const serviceKey = JSON.parse(opts.service_key);
|
||||
const storage = new Storage({
|
||||
projectId: serviceKey.project_id,
|
||||
credentials: {
|
||||
client_email: serviceKey.client_email,
|
||||
private_key: serviceKey.private_key
|
||||
},
|
||||
});
|
||||
|
||||
const bucket = storage.bucket(opts.name);
|
||||
const file = bucket.file(opts.key);
|
||||
|
||||
return file.createReadStream();
|
||||
}
|
||||
|
||||
async function testAwsS3(logger, opts) {
|
||||
const s3 = new S3Client({
|
||||
@@ -38,5 +76,7 @@ async function getS3Object(logger, opts) {
|
||||
|
||||
module.exports = {
|
||||
testAwsS3,
|
||||
getS3Object
|
||||
getS3Object,
|
||||
testGoogleStorage,
|
||||
getGoogleStorageObject
|
||||
};
|
||||
|
||||
3599
package-lock.json
generated
3599
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
13
package.json
13
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "jambonz-api-server",
|
||||
"version": "0.8.3",
|
||||
"version": "0.8.4",
|
||||
"description": "",
|
||||
"main": "app.js",
|
||||
"scripts": {
|
||||
@@ -19,15 +19,15 @@
|
||||
"url": "https://github.com/jambonz/jambonz-api-server.git"
|
||||
},
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-transcribe": "^3.348.0",
|
||||
"@aws-sdk/client-s3": "^3.348.0",
|
||||
"@aws-sdk/client-transcribe": "^3.363.0",
|
||||
"@aws-sdk/client-s3": "^3.363.0",
|
||||
"@deepgram/sdk": "^1.21.0",
|
||||
"@google-cloud/speech": "^5.2.0",
|
||||
"@jambonz/db-helpers": "^0.9.0",
|
||||
"@jambonz/realtimedb-helpers": "^0.8.6",
|
||||
"@jambonz/speech-utils": "^0.0.15",
|
||||
"@jambonz/time-series": "^0.2.7",
|
||||
"@jambonz/verb-specifications": "^0.0.24",
|
||||
"@jambonz/time-series": "^0.2.8",
|
||||
"@jambonz/verb-specifications": "^0.0.26",
|
||||
"@jambonz/lamejs": "^1.2.2",
|
||||
"@soniox/soniox-node": "^1.1.1",
|
||||
"argon2": "^0.30.3",
|
||||
@@ -53,7 +53,8 @@
|
||||
"uuid": "^8.3.2",
|
||||
"yamljs": "^0.3.0",
|
||||
"ws": "^8.12.1",
|
||||
"wav": "^1.0.2"
|
||||
"wav": "^1.0.2",
|
||||
"@google-cloud/storage" : "^6.12.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"eslint": "^8.39.0",
|
||||
|
||||
@@ -75,32 +75,29 @@ test('recent calls tests', async(t) => {
|
||||
auth: authUser,
|
||||
json: true,
|
||||
});
|
||||
console.log(JSON.stringify(result));
|
||||
t.ok(result.data.length === 5, 'retrieved 5 recent calls by account');
|
||||
|
||||
result = await request.get(`/Accounts/${account_sid}/RecentCalls?page=1&count=25&from=16`, {
|
||||
auth: authUser,
|
||||
json: true,
|
||||
});
|
||||
t.ok(result.data.length === 0, 'retrieved 5 recent calls by account and from');
|
||||
|
||||
result = await request.get(`/Accounts/${account_sid}/RecentCalls?page=1&count=25&from=15`, {
|
||||
result = await request.get(`/Accounts/${account_sid}/RecentCalls?page=1&count=25&filter=1508`, {
|
||||
auth: authUser,
|
||||
json: true,
|
||||
});
|
||||
console.log(JSON.stringify(result));
|
||||
t.ok(result.data.length === 5, 'retrieved 5 recent calls by account and from');
|
||||
|
||||
result = await request.get(`/Accounts/${account_sid}/RecentCalls?page=1&count=25&to=19`, {
|
||||
result = await request.get(`/Accounts/${account_sid}/RecentCalls?page=1&count=25&filter=15080`, {
|
||||
auth: authUser,
|
||||
json: true,
|
||||
});
|
||||
t.ok(result.data.length === 0, 'retrieved 5 recent calls by account and to');
|
||||
console.log(JSON.stringify(result));
|
||||
t.ok(result.data.length === 0, 'retrieved 0 recent calls by account and non-matching from');
|
||||
|
||||
result = await request.get(`/Accounts/${account_sid}/RecentCalls?page=1&count=25&to=18`, {
|
||||
result = await request.get(`/Accounts/${account_sid}/RecentCalls?page=1&count=25&filter=1888`, {
|
||||
auth: authUser,
|
||||
json: true,
|
||||
});
|
||||
console.log(JSON.stringify(result));
|
||||
t.ok(result.data.length === 5, 'retrieved 5 recent calls by account and to');
|
||||
//console.log({data: result.data}, 'Account recent calls');
|
||||
|
||||
/* query last 7 days by service provider */
|
||||
result = await request.get(`/ServiceProviders/${service_provider_sid}/RecentCalls?page=1&count=25`, {
|
||||
@@ -109,29 +106,18 @@ test('recent calls tests', async(t) => {
|
||||
});
|
||||
t.ok(result.data.length === 5, 'retrieved 5 recent calls by service provider');
|
||||
|
||||
result = await request.get(`/ServiceProviders/${service_provider_sid}/RecentCalls?page=1&count=25&from=16`, {
|
||||
auth: authAdmin,
|
||||
json: true,
|
||||
});
|
||||
t.ok(result.data.length === 0, 'retrieved 5 recent calls by service provider and from');
|
||||
|
||||
result = await request.get(`/ServiceProviders/${service_provider_sid}/RecentCalls?page=1&count=25&from=15`, {
|
||||
result = await request.get(`/ServiceProviders/${service_provider_sid}/RecentCalls?page=1&count=25&filter=1508`, {
|
||||
auth: authAdmin,
|
||||
json: true,
|
||||
});
|
||||
t.ok(result.data.length === 5, 'retrieved 5 recent calls by service provider and from');
|
||||
|
||||
result = await request.get(`/ServiceProviders/${service_provider_sid}/RecentCalls?page=1&count=25&to=19`, {
|
||||
auth: authAdmin,
|
||||
json: true,
|
||||
});
|
||||
t.ok(result.data.length === 0, 'retrieved 5 recent calls by service provider and to');
|
||||
|
||||
result = await request.get(`/ServiceProviders/${service_provider_sid}/RecentCalls?page=1&count=25&to=18`, {
|
||||
result = await request.get(`/ServiceProviders/${service_provider_sid}/RecentCalls?page=1&count=25&filter=1888`, {
|
||||
auth: authAdmin,
|
||||
json: true,
|
||||
});
|
||||
t.ok(result.data.length === 5, 'retrieved 5 recent calls by service provider and to');
|
||||
|
||||
//console.log({data: result.data}, 'SP recent calls');
|
||||
|
||||
/* pull sip traces and pcap from homer */
|
||||
|
||||
45
test/sbcs.js
45
test/sbcs.js
@@ -17,6 +17,37 @@ test('sbc_addresses tests', async(t) => {
|
||||
let result;
|
||||
const service_provider_sid = await createServiceProvider(request);
|
||||
|
||||
/* add service_provider user */
|
||||
const sp_name = 'sbc_service_provider';
|
||||
const sp_password = 'password';
|
||||
result = await request.post(`/Users`, {
|
||||
resolveWithFullResponse: true,
|
||||
json: true,
|
||||
auth: authAdmin,
|
||||
body: {
|
||||
name: sp_name,
|
||||
email: 'sbc_sp@jambonz.com',
|
||||
is_active: true,
|
||||
force_change: false,
|
||||
initial_password: sp_password,
|
||||
service_provider_sid,
|
||||
}
|
||||
});
|
||||
t.ok(result.statusCode === 201 && result.body.user_sid, 'SBC service_provider scope user created');
|
||||
const sbc_sp_user_sid = result.body.user_sid;
|
||||
|
||||
result = await request.post('/login', {
|
||||
resolveWithFullResponse: true,
|
||||
json: true,
|
||||
body: {
|
||||
username: sp_name,
|
||||
password: sp_password,
|
||||
}
|
||||
});
|
||||
t.ok(result.statusCode === 200 && result.body.token, 'successfully logged in as sbc user');
|
||||
const authSbcSp = {bearer: result.body.token};
|
||||
|
||||
|
||||
/* add a service provider sbc */
|
||||
result = await request.post('/Sbcs', {
|
||||
resolveWithFullResponse: true,
|
||||
@@ -38,6 +69,20 @@ test('sbc_addresses tests', async(t) => {
|
||||
//console.log(result.body)
|
||||
t.ok(result.body.length === 1 && result.body[0].ipv4 === '192.168.1.4', 'successfully retrieved service provider sbc');
|
||||
|
||||
result = await request.get('/Sbcs', {
|
||||
resolveWithFullResponse: true,
|
||||
auth: authSbcSp,
|
||||
json: true
|
||||
});
|
||||
//console.log(result.body)
|
||||
t.ok(result.body.length === 1 && result.body[0].ipv4 === '192.168.1.4', 'successfully retrieved service provider sbc');
|
||||
|
||||
await request.delete(`/Users/${sbc_sp_user_sid}`, {
|
||||
resolveWithFullResponse: true,
|
||||
json: true,
|
||||
auth: authAdmin,
|
||||
});
|
||||
|
||||
await deleteObjectBySid(request, '/Sbcs', sid);
|
||||
await deleteObjectBySid(request, '/ServiceProviders', service_provider_sid);
|
||||
|
||||
|
||||
Reference in New Issue
Block a user