feat: Record all calls (#169)

* feat: schema change

* feat: record all calls

* add bucket test for S3

* wip: add S3 upload stream implementation

* wip

* wip: add ws server

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip: modify sub folder

* wip: add record endpoint

* wip: add record endpoint

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* fix: failing testcase

* bucket credentials with tags

* add tagging

* wip

* wip

* wip

* wip

* wip

* wip

* fixed phone number is not in order

* feat: schema change

* feat: record all calls

* add bucket test for S3

* wip: add S3 upload stream implementation

* wip

* wip: add ws server

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip: modify sub folder

* wip: add record endpoint

* wip: add record endpoint

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* wip

* fix: failing testcase

* bucket credentials with tags

* add tagging

* wip

* wip

* wip

* wip

* wip

* fixed phone number is not in order

* add schema changes to upgrade script

* use aws-sdk v3

* jambonz lamejs

* jambonz lamejs

* add back wav encoder

* wip: add record format to schema

* add record_format

* fix: record file ext

* fix: record file ext

* fix: record file ext

* fix: record file ext

* fix download audio

* bug fix: dtmf metadata is causing closure of websocket

* fix: add extra data to S3 metadata

* upgrade db script

* bugfix: region was being ignored in test s3 upload

---------

Co-authored-by: Dave Horton <daveh@beachdognet.com>
This commit is contained in:
Hoan Luu Huu
2023-06-10 01:57:06 +07:00
committed by GitHub
parent 46fb9b8875
commit e35a03c7ad
20 changed files with 3374 additions and 44 deletions

View File

@@ -4,7 +4,7 @@ const {getMysqlConnection} = require('../db');
const {promisePool} = require('../db');
const { v4: uuid } = require('uuid');
const {encrypt} = require('../utils/encrypt-decrypt');
const {encrypt, decrypt} = require('../utils/encrypt-decrypt');
const retrieveSql = `SELECT * from accounts acc
LEFT JOIN webhooks AS rh
@@ -55,6 +55,13 @@ WHERE account_sid = ?
AND effective_end_date IS NULL
AND pending = 0`;
const extractBucketCredential = (obj) => {
const {bucket_credential} = obj;
if (bucket_credential) {
obj.bucket_credential = JSON.parse(decrypt(bucket_credential));
}
};
function transmogrifyResults(results) {
return results.map((row) => {
const obj = row.acc;
@@ -75,6 +82,8 @@ function transmogrifyResults(results) {
else obj.queue_event_hook = null;
delete obj.queue_event_hook_sid;
extractBucketCredential(obj);
return obj;
});
}
@@ -238,7 +247,6 @@ class Account extends Model {
}));
return account_subscription_sid;
}
}
Account.table = 'accounts';
@@ -319,7 +327,15 @@ Account.fields = [
type: 'string',
},
{
name: 'lcr_sid',
name: 'record_all_calls',
type: 'number'
},
{
name: 'record_format',
type: 'string'
},
{
name: 'bucket_credential',
type: 'string'
}
];

View File

@@ -120,6 +120,10 @@ Application.fields = [
{
name: 'messaging_hook_sid',
type: 'string',
},
{
name: 'record_all_calls',
type: 'number',
}
];

View File

@@ -1,6 +1,6 @@
const Model = require('./model');
const {promisePool} = require('../db');
const sql = 'SELECT * from phone_numbers WHERE account_sid = ?';
const sql = 'SELECT * from phone_numbers WHERE account_sid = ? ORDER BY number';
const sqlSP = `SELECT *
FROM phone_numbers
WHERE account_sid IN
@@ -8,7 +8,7 @@ WHERE account_sid IN
SELECT account_sid
FROM accounts
WHERE service_provider_sid = ?
)`;
) ORDER BY number`;
class PhoneNumber extends Model {
constructor() {

54
lib/record/encoder.js Normal file
View File

@@ -0,0 +1,54 @@
const { Transform } = require('stream');
const lamejs = require('@jambonz/lamejs');
class PCMToMP3Encoder extends Transform {
constructor(options) {
super(options);
const channels = options.channels || 1;
const sampleRate = options.sampleRate || 8000;
const bitRate = options.bitRate || 128;
this.encoder = new lamejs.Mp3Encoder(channels, sampleRate, bitRate);
this.channels = channels;
}
_transform(chunk, encoding, callback) {
// Convert chunk buffer into Int16Array for lamejs
const samples = new Int16Array(chunk.buffer, chunk.byteOffset, chunk.length / 2);
// Split input samples into left and right channel arrays if stereo
let leftChannel, rightChannel;
if (this.channels === 2) {
leftChannel = new Int16Array(samples.length / 2);
rightChannel = new Int16Array(samples.length / 2);
for (let i = 0; i < samples.length; i += 2) {
leftChannel[i / 2] = samples[i];
rightChannel[i / 2] = samples[i + 1];
}
} else {
leftChannel = samples;
}
// Encode the input data
const mp3Data = this.encoder.encodeBuffer(leftChannel, rightChannel);
if (mp3Data.length > 0) {
this.push(Buffer.from(mp3Data));
}
callback();
}
_flush(callback) {
// Finalize encoding and flush the internal buffers
const mp3Data = this.encoder.flush();
if (mp3Data.length > 0) {
this.push(Buffer.from(mp3Data));
}
callback();
}
}
module.exports = PCMToMP3Encoder;

17
lib/record/index.js Normal file
View File

@@ -0,0 +1,17 @@
const path = require('node:path');
async function record(logger, socket, url) {
const p = path.basename(url);
const idx = p.lastIndexOf('/');
const vendor = p.substring(idx + 1);
switch (vendor) {
case 'aws_s3':
return require('./s3')(logger, socket);
default:
logger.info(`unknown bucket vendor: ${vendor}`);
socket.send(`unknown bucket vendor: ${vendor}`);
socket.close();
}
}
module.exports = record;

View File

@@ -0,0 +1,103 @@
const { Writable } = require('stream');
const {
S3Client,
CreateMultipartUploadCommand,
UploadPartCommand,
CompleteMultipartUploadCommand,
} = require('@aws-sdk/client-s3');
class S3MultipartUploadStream extends Writable {
constructor(logger, opts) {
super(opts);
this.logger = logger;
this.bucketName = opts.bucketName;
this.objectKey = opts.Key;
this.uploadId = null;
this.partNumber = 1;
this.multipartETags = [];
this.buffer = Buffer.alloc(0);
this.minPartSize = 5 * 1024 * 1024; // 5 MB
this.s3 = new S3Client(opts.bucketCredential);
this.metadata = opts.metadata;
}
async _initMultipartUpload() {
const command = new CreateMultipartUploadCommand({
Bucket: this.bucketName,
Key: this.objectKey,
Metadata: this.metadata
});
const response = await this.s3.send(command);
return response.UploadId;
}
async _uploadBuffer() {
const uploadPartCommand = new UploadPartCommand({
Bucket: this.bucketName,
Key: this.objectKey,
PartNumber: this.partNumber,
UploadId: this.uploadId,
Body: this.buffer,
});
const uploadPartResponse = await this.s3.send(uploadPartCommand);
this.multipartETags.push({
ETag: uploadPartResponse.ETag,
PartNumber: this.partNumber,
});
this.partNumber += 1;
}
async _write(chunk, encoding, callback) {
try {
if (!this.uploadId) {
this.uploadId = await this._initMultipartUpload();
}
this.buffer = Buffer.concat([this.buffer, chunk]);
if (this.buffer.length >= this.minPartSize) {
await this._uploadBuffer();
this.buffer = Buffer.alloc(0);
}
callback(null);
} catch (error) {
callback(error);
}
}
async _finalize(err) {
try {
if (this.buffer.length > 0) {
await this._uploadBuffer();
}
const completeMultipartUploadCommand = new CompleteMultipartUploadCommand({
Bucket: this.bucketName,
Key: this.objectKey,
MultipartUpload: {
Parts: this.multipartETags.sort((a, b) => a.PartNumber - b.PartNumber),
},
UploadId: this.uploadId,
});
await this.s3.send(completeMultipartUploadCommand);
this.logger.info('Finished upload to S3');
} catch (error) {
this.logger.error('Error completing multipart upload:', error);
throw error;
}
}
async _final(callback) {
try {
await this._finalize();
callback(null);
} catch (error) {
callback(error);
}
}
}
module.exports = S3MultipartUploadStream;

94
lib/record/s3.js Normal file
View File

@@ -0,0 +1,94 @@
const Account = require('../models/account');
const Websocket = require('ws');
const PCMToMP3Encoder = require('./encoder');
const S3MultipartUploadStream = require('./s3-multipart-upload-stream');
const wav = require('wav');
async function upload(logger, socket) {
socket._recvInitialMetadata = false;
socket.on('message', async function(data, isBinary) {
try {
if (!isBinary && !socket._recvInitialMetadata) {
socket._recvInitialMetadata = true;
const obj = JSON.parse(data.toString());
logger.info({obj}, 'received JSON message from jambonz');
const {sampleRate, accountSid, callSid, direction, from, to,
callId, applicationSid, originatingSipIp, originatingSipTrunkName} = obj;
const account = await Account.retrieve(accountSid);
if (account && account.length && account[0].bucket_credential) {
const obj = account[0].bucket_credential;
// add tags to metadata
const metadata = {
accountSid,
callSid,
direction,
from,
to,
callId,
applicationSid,
originatingSipIp,
originatingSipTrunkName,
sampleRate: `${sampleRate}`
};
if (obj.tags && obj.tags.length) {
obj.tags.forEach((tag) => {
metadata[tag.Key] = tag.Value;
});
}
// create S3 path
const day = new Date();
let Key = `${day.getFullYear()}/${(day.getMonth() + 1).toString().padStart(2, '0')}`;
Key += `/${day.getDate().toString().padStart(2, '0')}/${callSid}.${account[0].record_format}`;
// Uploader
const uploaderOpts = {
bucketName: obj.name,
Key,
metadata,
bucketCredential: {
credentials: {
accessKeyId: obj.access_key_id,
secretAccessKey: obj.secret_access_key,
},
region: obj.region || 'us-east-1'
}
};
const uploadStream = new S3MultipartUploadStream(logger, uploaderOpts);
/**encoder */
let encoder;
if (obj.output_format === 'wav') {
encoder = new wav.Writer({ channels: 2, sampleRate, bitDepth: 16 });
} else {
// default is mp3
encoder = new PCMToMP3Encoder({
channels: 2,
sampleRate: sampleRate,
bitrate: 128
});
}
/* start streaming data */
const duplex = Websocket.createWebSocketStream(socket);
duplex.pipe(encoder).pipe(uploadStream);
} else {
logger.info(`account ${accountSid} does not have any bucket credential, close the socket`);
socket.close();
}
}
} catch (err) {
logger.error({err}, 'error parsing message during connection');
}
});
socket.on('error', function(err) {
logger.error({err}, 'aws upload: error');
});
socket.on('close', (data) => {
logger.info({data}, 'aws_s3: close');
});
socket.on('end', function(err) {
logger.error({err}, 'aws upload: socket closed from jambonz');
});
}
module.exports = upload;

View File

@@ -1,6 +1,7 @@
const router = require('express').Router();
const assert = require('assert');
const request = require('request');
const {DbErrorBadRequest, DbErrorForbidden, DbErrorUnprocessableRequest} = require('../../utils/errors');
const {DbErrorBadRequest, DbErrorForbidden, DbErrorUnprocessableRequest, DbError} = require('../../utils/errors');
const Account = require('../../models/account');
const Application = require('../../models/application');
const Webhook = require('../../models/webhook');
@@ -22,6 +23,8 @@ const {
} = require('./utils');
const short = require('short-uuid');
const VoipCarrier = require('../../models/voip-carrier');
const { encrypt, decrypt } = require('../../utils/encrypt-decrypt');
const { testAwsS3 } = require('../../utils/storage-utils');
const translator = short();
let idx = 0;
@@ -451,7 +454,6 @@ router.get('/:sid', async(req, res) => {
try {
const account_sid = parseAccountSid(req);
await validateRequest(req, account_sid);
const service_provider_sid = req.user.hasServiceProviderAuth ? req.user.service_provider_sid : null;
const results = await Account.retrieve(account_sid, service_provider_sid);
if (results.length === 0) return res.status(404).end();
@@ -537,6 +539,35 @@ router.delete('/:sid/SubspaceTeleport', async(req, res) => {
}
});
function encryptBucketCredential(obj) {
if (!obj.bucket_credential) return;
const {
vendor,
region,
name,
access_key_id,
secret_access_key,
tags
} = obj.bucket_credential;
switch (vendor) {
case 'aws_s3':
assert(access_key_id, 'invalid aws S3 bucket credential: access_key_id is required');
assert(secret_access_key, 'invalid aws S3 bucket credential: secret_access_key is required');
assert(name, 'invalid aws bucket name: name is required');
assert(region, 'invalid aws bucket region: region is required');
const awsData = JSON.stringify({vendor, region, name, access_key_id,
secret_access_key, tags});
obj.bucket_credential = encrypt(awsData);
break;
case 'none':
obj.bucket_credential = null;
break;
default:
throw DbErrorBadRequest(`unknow storage vendor: ${vendor}`);
}
}
/**
* update
*/
@@ -583,6 +614,8 @@ router.put('/:sid', async(req, res) => {
delete obj.registration_hook;
delete obj.queue_event_hook;
encryptBucketCredential(obj);
const rowsAffected = await Account.update(sid, obj);
if (rowsAffected === 0) {
return res.status(404).end();
@@ -675,6 +708,51 @@ account_subscriptions WHERE account_sid = ?)
}
});
/* Test Bucket credential Keys */
router.post('/:sid/BucketCredentialTest', async(req, res) => {
const logger = req.app.locals.logger;
try {
const account_sid = parseAccountSid(req);
await validateRequest(req, account_sid);
let {vendor, name, region, access_key_id, secret_access_key} = req.body;
const ret = {
status: 'not tested'
};
if (secret_access_key.endsWith('XXXXXX')) {
// this is when the password already saved in account
const service_provider_sid = req.user.hasServiceProviderAuth ? req.user.service_provider_sid : null;
const results = await Account.retrieve(account_sid, service_provider_sid);
if (results.length === 0) throw new DbError('Invalid Account Sid');
const {bucket_credential} = results[0];
if (bucket_credential) {
const o = JSON.parse(decrypt(bucket_credential));
vendor = o.vendor;
switch (vendor) {
case 'aws_s3':
name = o.name;
region = o.region;
access_key_id = o.access_key_id;
secret_access_key = o.secret_access_key;
break;
}
}
}
switch (vendor) {
case 'aws_s3':
await testAwsS3(logger, {vendor, name, region, access_key_id, secret_access_key});
ret.status = 'ok';
break;
default:
throw new DbErrorBadRequest(`Does not support test for ${vendor}`);
}
return res.status(200).json(ret);
}
catch (err) {
return res.status(200).json({status: 'failed', reason: err.message});
}
});
/**
* retrieve account level api keys
*/

View File

@@ -3,6 +3,8 @@ const sysError = require('../error');
const {DbErrorBadRequest} = require('../../utils/errors');
const {getHomerApiKey, getHomerSipTrace, getHomerPcap} = require('../../utils/homer-utils');
const {getJaegerTrace} = require('../../utils/jaeger-utils');
const Account = require('../../models/account');
const { getS3Object } = require('../../utils/storage-utils');
const parseAccountSid = (url) => {
const arr = /Accounts\/([^\/]*)/.exec(url);
@@ -115,4 +117,34 @@ router.get('/trace/:trace_id', async(req, res) => {
}
});
router.get('/:call_sid/record/:year/:month/:day/:format', async(req, res) => {
const {logger} = req.app.locals;
const {call_sid, year, month, day, format} = req.params;
try {
const account_sid = parseAccountSid(req.originalUrl);
const r = await Account.retrieve(account_sid);
if (r.length === 0 || !r[0].bucket_credential) return res.sendStatus(404);
const {bucket_credential} = r[0];
switch (bucket_credential.vendor) {
case 'aws_s3':
const getS3Options = {
...bucket_credential,
key: `${year}/${month}/${day}/${call_sid}.${format || 'mp3'}`
};
const stream = await getS3Object(logger, getS3Options);
res.set({
'Content-Type': `audio/${format || 'mp3'}`
});
stream.pipe(res);
break;
default:
logger.error(`There is no handler for fetching record from ${bucket_credential.vendor}`);
return res.sendStatus(500);
}
} catch (err) {
logger.error({err}, ` error retrieving recording ${call_sid}`);
res.sendStatus(404);
}
});
module.exports = router;

View File

@@ -3,7 +3,7 @@ const assert = require('assert');
const Account = require('../../models/account');
const SpeechCredential = require('../../models/speech-credential');
const sysError = require('../error');
const {decrypt, encrypt} = require('../../utils/encrypt-decrypt');
const {decrypt, encrypt, obscureKey} = require('../../utils/encrypt-decrypt');
const {parseAccountSid, parseServiceProviderSid, parseSpeechCredentialSid} = require('./utils');
const {DbErrorUnprocessableRequest, DbErrorForbidden} = require('../../utils/errors');
const {
@@ -99,17 +99,6 @@ const validateTest = async(req, speech_credentials) => {
}
};
const obscureKey = (key) => {
const key_spoiler_length = 6;
const key_spoiler_char = 'X';
if (!key || key.length <= key_spoiler_length) {
return key;
}
return `${key.slice(0, key_spoiler_length)}${key_spoiler_char.repeat(key.length - key_spoiler_length)}`;
};
const encryptCredential = (obj) => {
const {
vendor,

View File

@@ -23,7 +23,19 @@ const decrypt = (data) => {
return decrpyted.toString();
};
const obscureKey = (key) => {
const key_spoiler_length = 6;
const key_spoiler_char = 'X';
if (!key || key.length <= key_spoiler_length) {
return key;
}
return `${key.slice(0, key_spoiler_length)}${key_spoiler_char.repeat(key.length - key_spoiler_length)}`;
};
module.exports = {
encrypt,
decrypt
decrypt,
obscureKey
};

View File

@@ -0,0 +1,42 @@
const { S3Client, PutObjectCommand, GetObjectCommand } = require('@aws-sdk/client-s3');
async function testAwsS3(logger, opts) {
const s3 = new S3Client({
credentials: {
accessKeyId: opts.access_key_id,
secretAccessKey: opts.secret_access_key,
},
region: opts.region || 'us-east-1'
});
const input = {
'Body': 'Hello From Jambonz',
'Bucket': opts.name,
'Key': 'jambonz-sample.text'
};
const command = new PutObjectCommand(input);
await s3.send(command);
}
async function getS3Object(logger, opts) {
const s3 = new S3Client({
credentials: {
accessKeyId: opts.access_key_id,
secretAccessKey: opts.secret_access_key,
},
region: opts.region || 'us-east-1'
});
const command = new GetObjectCommand({
Bucket: opts.name,
Key: opts.key
});
const res = await s3.send(command);
return res.Body;
}
module.exports = {
testAwsS3,
getS3Object
};