Compare commits

...

6 Commits

Author SHA1 Message Date
Hoan Luu Huu
c96159268e feat google storage (#207)
* feat google storage

* feat google storage

* add google storage writablestream

* add google storage writablestream

* add google storage writablestream

* add metadata to google storage

* add metadata to google storage

* add metadata to google storage

* add tags to google storage

* fix

* fix

* fix

* fix
2023-07-28 12:04:40 -04:00
Dave Horton
8e200251ca slight change to pino logger construction (#206)
* slight change to pino logger construction

* remove console.log in test

* added test logging back in

* test
2023-07-23 11:26:57 -04:00
Hoan Luu Huu
898f3aec4a update verb specification (#204) 2023-07-20 09:00:18 -04:00
Hoan Luu Huu
6f85752352 fix custom speech cannot update urls (#199) 2023-07-17 19:15:04 -04:00
dependabot[bot]
fe7cc9ad58 Bump fast-xml-parser, @aws-sdk/client-transcribe, @aws-sdk/client-s3 and @aws-sdk/client-polly (#192)
Bumps [fast-xml-parser](https://github.com/NaturalIntelligence/fast-xml-parser) to 4.2.5 and updates ancestor dependencies [fast-xml-parser](https://github.com/NaturalIntelligence/fast-xml-parser), [@aws-sdk/client-transcribe](https://github.com/aws/aws-sdk-js-v3/tree/HEAD/clients/client-transcribe), [@aws-sdk/client-s3](https://github.com/aws/aws-sdk-js-v3/tree/HEAD/clients/client-s3) and [@aws-sdk/client-polly](https://github.com/aws/aws-sdk-js-v3/tree/HEAD/clients/client-polly). These dependencies need to be updated together.


Updates `fast-xml-parser` from 4.2.4 to 4.2.5
- [Release notes](https://github.com/NaturalIntelligence/fast-xml-parser/releases)
- [Changelog](https://github.com/NaturalIntelligence/fast-xml-parser/blob/master/CHANGELOG.md)
- [Commits](https://github.com/NaturalIntelligence/fast-xml-parser/compare/v4.2.4...v4.2.5)

Updates `@aws-sdk/client-transcribe` from 3.348.0 to 3.359.0
- [Release notes](https://github.com/aws/aws-sdk-js-v3/releases)
- [Changelog](https://github.com/aws/aws-sdk-js-v3/blob/main/clients/client-transcribe/CHANGELOG.md)
- [Commits](https://github.com/aws/aws-sdk-js-v3/commits/v3.359.0/clients/client-transcribe)

Updates `@aws-sdk/client-s3` from 3.348.0 to 3.359.0
- [Release notes](https://github.com/aws/aws-sdk-js-v3/releases)
- [Changelog](https://github.com/aws/aws-sdk-js-v3/blob/main/clients/client-s3/CHANGELOG.md)
- [Commits](https://github.com/aws/aws-sdk-js-v3/commits/v3.359.0/clients/client-s3)

Updates `@aws-sdk/client-polly` from 3.348.0 to 3.359.0
- [Release notes](https://github.com/aws/aws-sdk-js-v3/releases)
- [Changelog](https://github.com/aws/aws-sdk-js-v3/blob/main/clients/client-polly/CHANGELOG.md)
- [Commits](https://github.com/aws/aws-sdk-js-v3/commits/v3.359.0/clients/client-polly)

---
updated-dependencies:
- dependency-name: fast-xml-parser
  dependency-type: indirect
- dependency-name: "@aws-sdk/client-transcribe"
  dependency-type: direct:production
- dependency-name: "@aws-sdk/client-s3"
  dependency-type: direct:production
- dependency-name: "@aws-sdk/client-polly"
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-07-05 10:44:54 +01:00
Hoan Luu Huu
1ffdfebdb2 multi srs (#189) 2023-07-05 08:15:46 +01:00
13 changed files with 2089 additions and 1779 deletions

View File

@@ -1,11 +1,7 @@
const opts = Object.assign({
timestamp: () => {
return `, "time": "${new Date().toISOString()}"`;
}
}, {
const opts = {
level: process.env.JAMBONES_LOGLEVEL || 'info'
});
const logger = require('pino')(opts);
};
const pino = require('pino');
const logger = pino(opts, pino.destination(1, {sync: false}));
module.exports = logger;

View File

@@ -0,0 +1,41 @@
const { Storage } = require('@google-cloud/storage');
const { Writable } = require('stream');
class GoogleStorageUploadStream extends Writable {
constructor(logger, opts) {
super(opts);
this.logger = logger;
this.metadata = opts.metadata;
const storage = new Storage(opts.bucketCredential);
this.gcsFile = storage.bucket(opts.bucketName).file(opts.Key);
this.writeStream = this.gcsFile.createWriteStream();
this.writeStream.on('error', (err) => this.logger.error(err));
this.writeStream.on('finish', () => {
this.logger.info('google storage Upload completed.');
this._addMetadata();
});
}
_write(chunk, encoding, callback) {
this.writeStream.write(chunk, encoding, callback);
}
_final(callback) {
this.writeStream.end();
this.writeStream.once('finish', callback);
}
async _addMetadata() {
try {
await this.gcsFile.setMetadata({metadata: this.metadata});
this.logger.info('Google storage Upload and metadata setting completed.');
} catch (err) {
this.logger.error(err, 'Google storage An error occurred while setting metadata');
}
}
}
module.exports = GoogleStorageUploadStream;

View File

@@ -1,17 +1,6 @@
const path = require('node:path');
async function record(logger, socket, url) {
const p = path.basename(url);
const idx = p.lastIndexOf('/');
const vendor = p.substring(idx + 1);
switch (vendor) {
case 'aws_s3':
return require('./s3')(logger, socket);
default:
logger.info(`unknown bucket vendor: ${vendor}`);
socket.send(`unknown bucket vendor: ${vendor}`);
socket.close();
}
async function record(logger, socket) {
return require('./upload')(logger, socket);
}
module.exports = record;

View File

@@ -1,8 +1,8 @@
const Account = require('../models/account');
const Websocket = require('ws');
const PCMToMP3Encoder = require('./encoder');
const S3MultipartUploadStream = require('./s3-multipart-upload-stream');
const wav = require('wav');
const { getUploader } = require('./utils');
async function upload(logger, socket) {
@@ -43,19 +43,11 @@ async function upload(logger, socket) {
Key += `/${day.getDate().toString().padStart(2, '0')}/${callSid}.${account[0].record_format}`;
// Uploader
const uploaderOpts = {
bucketName: obj.name,
Key,
metadata,
bucketCredential: {
credentials: {
accessKeyId: obj.access_key_id,
secretAccessKey: obj.secret_access_key,
},
region: obj.region || 'us-east-1'
}
};
const uploadStream = new S3MultipartUploadStream(logger, uploaderOpts);
const uploadStream = getUploader(Key, metadata, obj, logger);
if (!uploadStream) {
logger.info('There is no available record uploader, close the socket.');
socket.close();
}
/**encoder */
let encoder;

40
lib/record/utils.js Normal file
View File

@@ -0,0 +1,40 @@
const GoogleStorageUploadStream = require('./google-storage');
const S3MultipartUploadStream = require('./s3-multipart-upload-stream');
const getUploader = (Key, metadata, bucket_credential, logger) => {
const uploaderOpts = {
bucketName: bucket_credential.name,
Key,
metadata
};
switch (bucket_credential.vendor) {
case 'aws_s3':
uploaderOpts.bucketCredential = {
credentials: {
accessKeyId: bucket_credential.access_key_id,
secretAccessKey: bucket_credential.secret_access_key,
},
region: bucket_credential.region || 'us-east-1'
};
return new S3MultipartUploadStream(logger, uploaderOpts);
case 'google':
const serviceKey = JSON.parse(bucket_credential.service_key);
uploaderOpts.bucketCredential = {
projectId: serviceKey.project_id,
credentials: {
client_email: serviceKey.client_email,
private_key: serviceKey.private_key
}
};
return new GoogleStorageUploadStream(logger, uploaderOpts);
default:
logger.error(`unknown bucket vendor: ${bucket_credential.vendor}`);
break;
}
return null;
};
module.exports = {
getUploader
};

View File

@@ -1,7 +1,7 @@
const router = require('express').Router();
const assert = require('assert');
const request = require('request');
const {DbErrorBadRequest, DbErrorForbidden, DbErrorUnprocessableRequest, DbError} = require('../../utils/errors');
const {DbErrorBadRequest, DbErrorForbidden, DbErrorUnprocessableRequest} = require('../../utils/errors');
const Account = require('../../models/account');
const Application = require('../../models/application');
const Webhook = require('../../models/webhook');
@@ -23,8 +23,8 @@ const {
} = require('./utils');
const short = require('short-uuid');
const VoipCarrier = require('../../models/voip-carrier');
const { encrypt, decrypt } = require('../../utils/encrypt-decrypt');
const { testAwsS3 } = require('../../utils/storage-utils');
const { encrypt } = require('../../utils/encrypt-decrypt');
const { testAwsS3, testGoogleStorage } = require('../../utils/storage-utils');
const translator = short();
let idx = 0;
@@ -541,7 +541,8 @@ function encryptBucketCredential(obj) {
name,
access_key_id,
secret_access_key,
tags
tags,
service_key
} = obj.bucket_credential;
switch (vendor) {
@@ -554,6 +555,11 @@ function encryptBucketCredential(obj) {
secret_access_key, tags});
obj.bucket_credential = encrypt(awsData);
break;
case 'google':
assert(service_key, 'invalid aws S3 bucket credential: service_key is required');
const googleData = JSON.stringify({vendor, name, service_key, tags});
obj.bucket_credential = encrypt(googleData);
break;
case 'none':
obj.bucket_credential = null;
break;
@@ -708,35 +714,20 @@ router.post('/:sid/BucketCredentialTest', async(req, res) => {
try {
const account_sid = parseAccountSid(req);
await validateRequest(req, account_sid);
let {vendor, name, region, access_key_id, secret_access_key} = req.body;
const {vendor, name, region, access_key_id, secret_access_key, service_key} = req.body;
const ret = {
status: 'not tested'
};
if (secret_access_key.endsWith('XXXXXX')) {
// this is when the password already saved in account
const service_provider_sid = req.user.hasServiceProviderAuth ? req.user.service_provider_sid : null;
const results = await Account.retrieve(account_sid, service_provider_sid);
if (results.length === 0) throw new DbError('Invalid Account Sid');
const {bucket_credential} = results[0];
if (bucket_credential) {
const o = JSON.parse(decrypt(bucket_credential));
vendor = o.vendor;
switch (vendor) {
case 'aws_s3':
name = o.name;
region = o.region;
access_key_id = o.access_key_id;
secret_access_key = o.secret_access_key;
break;
}
}
}
switch (vendor) {
case 'aws_s3':
await testAwsS3(logger, {vendor, name, region, access_key_id, secret_access_key});
ret.status = 'ok';
break;
case 'google':
await testGoogleStorage(logger, {vendor, name, service_key});
ret.status = 'ok';
break;
default:
throw new DbErrorBadRequest(`Does not support test for ${vendor}`);
}

View File

@@ -4,7 +4,7 @@ const {DbErrorBadRequest} = require('../../utils/errors');
const {getHomerApiKey, getHomerSipTrace, getHomerPcap} = require('../../utils/homer-utils');
const {getJaegerTrace} = require('../../utils/jaeger-utils');
const Account = require('../../models/account');
const { getS3Object } = require('../../utils/storage-utils');
const { getS3Object, getGoogleStorageObject } = require('../../utils/storage-utils');
const parseAccountSid = (url) => {
const arr = /Accounts\/([^\/]*)/.exec(url);
@@ -124,22 +124,26 @@ router.get('/:call_sid/record/:year/:month/:day/:format', async(req, res) => {
const r = await Account.retrieve(account_sid);
if (r.length === 0 || !r[0].bucket_credential) return res.sendStatus(404);
const {bucket_credential} = r[0];
const getOptions = {
...bucket_credential,
key: `${year}/${month}/${day}/${call_sid}.${format || 'mp3'}`
};
let stream;
switch (bucket_credential.vendor) {
case 'aws_s3':
const getS3Options = {
...bucket_credential,
key: `${year}/${month}/${day}/${call_sid}.${format || 'mp3'}`
};
const stream = await getS3Object(logger, getS3Options);
res.set({
'Content-Type': `audio/${format || 'mp3'}`
});
stream.pipe(res);
stream = await getS3Object(logger, getOptions);
break;
case 'google':
stream = await getGoogleStorageObject(logger, getOptions);
break;
default:
logger.error(`There is no handler for fetching record from ${bucket_credential.vendor}`);
return res.sendStatus(500);
}
res.set({
'Content-Type': `audio/${format || 'mp3'}`
});
stream.pipe(res);
} catch (err) {
logger.error({err}, ` error retrieving recording ${call_sid}`);
res.sendStatus(404);

View File

@@ -478,7 +478,9 @@ router.put('/:sid', async(req, res) => {
use_custom_tts,
custom_tts_endpoint,
use_custom_stt,
custom_stt_endpoint
custom_stt_endpoint,
custom_stt_url,
custom_tts_url
} = req.body;
const newCred = {
@@ -494,7 +496,9 @@ router.put('/:sid', async(req, res) => {
tts_region,
riva_server_uri,
nuance_stt_uri,
nuance_tts_uri
nuance_tts_uri,
custom_stt_url,
custom_tts_url
};
logger.info({o, newCred}, 'updating speech credential with this new credential');
obj.credential = encryptCredential(newCred);

View File

@@ -0,0 +1 @@
Hello From Jambonz. This file was created because Record all call bucket credential test.

View File

@@ -1,4 +1,42 @@
const { S3Client, PutObjectCommand, GetObjectCommand } = require('@aws-sdk/client-s3');
const {Storage} = require('@google-cloud/storage');
const fs = require('fs');
function testGoogleStorage(logger, opts) {
return new Promise((resolve, reject) => {
const serviceKey = JSON.parse(opts.service_key);
const storage = new Storage({
projectId: serviceKey.project_id,
credentials: {
client_email: serviceKey.client_email,
private_key: serviceKey.private_key
},
});
const blob = storage.bucket(opts.name).file('jambonz-sample.text');
fs.createReadStream(`${__dirname}/jambonz-sample.text`)
.pipe(blob.createWriteStream())
.on('error', (err) => reject(err))
.on('finish', () => resolve());
});
}
async function getGoogleStorageObject(logger, opts) {
const serviceKey = JSON.parse(opts.service_key);
const storage = new Storage({
projectId: serviceKey.project_id,
credentials: {
client_email: serviceKey.client_email,
private_key: serviceKey.private_key
},
});
const bucket = storage.bucket(opts.name);
const file = bucket.file(opts.key);
return file.createReadStream();
}
async function testAwsS3(logger, opts) {
const s3 = new S3Client({
@@ -38,5 +76,7 @@ async function getS3Object(logger, opts) {
module.exports = {
testAwsS3,
getS3Object
getS3Object,
testGoogleStorage,
getGoogleStorageObject
};

3581
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -19,15 +19,15 @@
"url": "https://github.com/jambonz/jambonz-api-server.git"
},
"dependencies": {
"@aws-sdk/client-transcribe": "^3.348.0",
"@aws-sdk/client-s3": "^3.348.0",
"@aws-sdk/client-transcribe": "^3.363.0",
"@aws-sdk/client-s3": "^3.363.0",
"@deepgram/sdk": "^1.21.0",
"@google-cloud/speech": "^5.2.0",
"@jambonz/db-helpers": "^0.9.0",
"@jambonz/realtimedb-helpers": "^0.8.6",
"@jambonz/speech-utils": "^0.0.15",
"@jambonz/time-series": "^0.2.8",
"@jambonz/verb-specifications": "^0.0.24",
"@jambonz/verb-specifications": "^0.0.26",
"@jambonz/lamejs": "^1.2.2",
"@soniox/soniox-node": "^1.1.1",
"argon2": "^0.30.3",
@@ -53,7 +53,8 @@
"uuid": "^8.3.2",
"yamljs": "^0.3.0",
"ws": "^8.12.1",
"wav": "^1.0.2"
"wav": "^1.0.2",
"@google-cloud/storage" : "^6.12.0"
},
"devDependencies": {
"eslint": "^8.39.0",

View File

@@ -75,32 +75,29 @@ test('recent calls tests', async(t) => {
auth: authUser,
json: true,
});
console.log(JSON.stringify(result));
t.ok(result.data.length === 5, 'retrieved 5 recent calls by account');
result = await request.get(`/Accounts/${account_sid}/RecentCalls?page=1&count=25&filter=16`, {
auth: authUser,
json: true,
});
t.ok(result.data.length === 0, 'retrieved 5 recent calls by account and from');
result = await request.get(`/Accounts/${account_sid}/RecentCalls?page=1&count=25&filter=15`, {
result = await request.get(`/Accounts/${account_sid}/RecentCalls?page=1&count=25&filter=1508`, {
auth: authUser,
json: true,
});
console.log(JSON.stringify(result));
t.ok(result.data.length === 5, 'retrieved 5 recent calls by account and from');
result = await request.get(`/Accounts/${account_sid}/RecentCalls?page=1&count=25&filter=19`, {
result = await request.get(`/Accounts/${account_sid}/RecentCalls?page=1&count=25&filter=15080`, {
auth: authUser,
json: true,
});
t.ok(result.data.length === 0, 'retrieved 5 recent calls by account and to');
console.log(JSON.stringify(result));
t.ok(result.data.length === 0, 'retrieved 0 recent calls by account and non-matching from');
result = await request.get(`/Accounts/${account_sid}/RecentCalls?page=1&count=25&filter=18`, {
result = await request.get(`/Accounts/${account_sid}/RecentCalls?page=1&count=25&filter=1888`, {
auth: authUser,
json: true,
});
console.log(JSON.stringify(result));
t.ok(result.data.length === 5, 'retrieved 5 recent calls by account and to');
//console.log({data: result.data}, 'Account recent calls');
/* query last 7 days by service provider */
result = await request.get(`/ServiceProviders/${service_provider_sid}/RecentCalls?page=1&count=25`, {
@@ -109,29 +106,18 @@ test('recent calls tests', async(t) => {
});
t.ok(result.data.length === 5, 'retrieved 5 recent calls by service provider');
result = await request.get(`/ServiceProviders/${service_provider_sid}/RecentCalls?page=1&count=25&filter=16`, {
auth: authAdmin,
json: true,
});
t.ok(result.data.length === 0, 'retrieved 5 recent calls by service provider and from');
result = await request.get(`/ServiceProviders/${service_provider_sid}/RecentCalls?page=1&count=25&filter=15`, {
result = await request.get(`/ServiceProviders/${service_provider_sid}/RecentCalls?page=1&count=25&filter=1508`, {
auth: authAdmin,
json: true,
});
t.ok(result.data.length === 5, 'retrieved 5 recent calls by service provider and from');
result = await request.get(`/ServiceProviders/${service_provider_sid}/RecentCalls?page=1&count=25&filter=19`, {
auth: authAdmin,
json: true,
});
t.ok(result.data.length === 0, 'retrieved 5 recent calls by service provider and to');
result = await request.get(`/ServiceProviders/${service_provider_sid}/RecentCalls?page=1&count=25&filter=18`, {
result = await request.get(`/ServiceProviders/${service_provider_sid}/RecentCalls?page=1&count=25&filter=1888`, {
auth: authAdmin,
json: true,
});
t.ok(result.data.length === 5, 'retrieved 5 recent calls by service provider and to');
//console.log({data: result.data}, 'SP recent calls');
/* pull sip traces and pcap from homer */