mirror of
https://github.com/jambonz/jambonz-api-server.git
synced 2026-01-25 02:08:24 +00:00
Add try catch to getUpload to catch init errors with invalid credentials (#229)
* add try catch to getUpload to catch init errors with invalid credentials * properly handle errors occured while streaming --------- Co-authored-by: Markus Frindt <m.frindt@cognigy.com>
This commit is contained in:
@@ -12,9 +12,9 @@ async function upload(logger, socket) {
|
||||
socket._recvInitialMetadata = true;
|
||||
logger.debug(`initial metadata: ${data}`);
|
||||
const obj = JSON.parse(data.toString());
|
||||
logger.info({obj}, 'received JSON message from jambonz');
|
||||
const {sampleRate, accountSid, callSid, direction, from, to,
|
||||
callId, applicationSid, originatingSipIp, originatingSipTrunkName} = obj;
|
||||
logger.info({ obj }, 'received JSON message from jambonz');
|
||||
const { sampleRate, accountSid, callSid, direction, from, to,
|
||||
callId, applicationSid, originatingSipIp, originatingSipTrunkName } = obj;
|
||||
const account = await Account.retrieve(accountSid);
|
||||
if (account && account.length && account[0].bucket_credential) {
|
||||
const obj = account[0].bucket_credential;
|
||||
@@ -60,26 +60,39 @@ async function upload(logger, socket) {
|
||||
bitrate: 128
|
||||
});
|
||||
}
|
||||
const handleError = (err, streamType) => {
|
||||
logger.error(
|
||||
{ err },
|
||||
`Error while streaming for vendor: ${obj.vendor}, pipe: ${streamType}: ${err.message}`
|
||||
);
|
||||
};
|
||||
|
||||
/* start streaming data */
|
||||
const duplex = Websocket.createWebSocketStream(socket);
|
||||
duplex.pipe(encoder).pipe(uploadStream);
|
||||
duplex
|
||||
.on('error', (err) => handleError(err, 'duplex'))
|
||||
.pipe(encoder)
|
||||
.on('error', (err) => handleError(err, 'encoder'))
|
||||
.pipe(uploadStream)
|
||||
.on('error', (err) => handleError(err, 'uploadStream'));
|
||||
|
||||
} else {
|
||||
logger.info(`account ${accountSid} does not have any bucket credential, close the socket`);
|
||||
socket.close();
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error({err, data}, 'error parsing message during connection');
|
||||
logger.error({ err, data }, 'error parsing message during connection');
|
||||
}
|
||||
});
|
||||
socket.on('error', function(err) {
|
||||
logger.error({err}, 'record upload: error');
|
||||
logger.error({ err }, 'record upload: error');
|
||||
});
|
||||
socket.on('close', (data) => {
|
||||
logger.info({data}, 'record upload: close');
|
||||
logger.info({ data }, 'record upload: close');
|
||||
});
|
||||
socket.on('end', function(err) {
|
||||
logger.error({err}, 'record upload: socket closed from jambonz');
|
||||
logger.error({ err }, 'record upload: socket closed from jambonz');
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -8,43 +8,47 @@ const getUploader = (key, metadata, bucket_credential, logger) => {
|
||||
objectKey: key,
|
||||
metadata
|
||||
};
|
||||
switch (bucket_credential.vendor) {
|
||||
case 'aws_s3':
|
||||
uploaderOpts.bucketCredential = {
|
||||
credentials: {
|
||||
accessKeyId: bucket_credential.access_key_id,
|
||||
secretAccessKey: bucket_credential.secret_access_key,
|
||||
},
|
||||
region: bucket_credential.region || 'us-east-1'
|
||||
};
|
||||
return new S3MultipartUploadStream(logger, uploaderOpts);
|
||||
case 's3_compatible':
|
||||
uploaderOpts.bucketCredential = {
|
||||
endpoint: bucket_credential.endpoint,
|
||||
credentials: {
|
||||
accessKeyId: bucket_credential.access_key_id,
|
||||
secretAccessKey: bucket_credential.secret_access_key,
|
||||
},
|
||||
region: 'us-east-1',
|
||||
forcePathStyle: true
|
||||
};
|
||||
return new S3MultipartUploadStream(logger, uploaderOpts);
|
||||
case 'google':
|
||||
const serviceKey = JSON.parse(bucket_credential.service_key);
|
||||
uploaderOpts.bucketCredential = {
|
||||
projectId: serviceKey.project_id,
|
||||
credentials: {
|
||||
client_email: serviceKey.client_email,
|
||||
private_key: serviceKey.private_key
|
||||
}
|
||||
};
|
||||
return new GoogleStorageUploadStream(logger, uploaderOpts);
|
||||
case 'azure':
|
||||
uploaderOpts.connection_string = bucket_credential.connection_string;
|
||||
return new AzureStorageUploadStream(logger, uploaderOpts);
|
||||
default:
|
||||
logger.error(`unknown bucket vendor: ${bucket_credential.vendor}`);
|
||||
break;
|
||||
try {
|
||||
switch (bucket_credential.vendor) {
|
||||
case 'aws_s3':
|
||||
uploaderOpts.bucketCredential = {
|
||||
credentials: {
|
||||
accessKeyId: bucket_credential.access_key_id,
|
||||
secretAccessKey: bucket_credential.secret_access_key,
|
||||
},
|
||||
region: bucket_credential.region || 'us-east-1'
|
||||
};
|
||||
return new S3MultipartUploadStream(logger, uploaderOpts);
|
||||
case 's3_compatible':
|
||||
uploaderOpts.bucketCredential = {
|
||||
endpoint: bucket_credential.endpoint,
|
||||
credentials: {
|
||||
accessKeyId: bucket_credential.access_key_id,
|
||||
secretAccessKey: bucket_credential.secret_access_key,
|
||||
},
|
||||
region: 'us-east-1',
|
||||
forcePathStyle: true
|
||||
};
|
||||
return new S3MultipartUploadStream(logger, uploaderOpts);
|
||||
case 'google':
|
||||
const serviceKey = JSON.parse(bucket_credential.service_key);
|
||||
uploaderOpts.bucketCredential = {
|
||||
projectId: serviceKey.project_id,
|
||||
credentials: {
|
||||
client_email: serviceKey.client_email,
|
||||
private_key: serviceKey.private_key
|
||||
}
|
||||
};
|
||||
return new GoogleStorageUploadStream(logger, uploaderOpts);
|
||||
case 'azure':
|
||||
uploaderOpts.connection_string = bucket_credential.connection_string;
|
||||
return new AzureStorageUploadStream(logger, uploaderOpts);
|
||||
default:
|
||||
logger.error(`unknown bucket vendor: ${bucket_credential.vendor}`);
|
||||
break;
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(`Error creating uploader, vendor: ${bucket_credential.vendor}, reason: ${err.message}`);
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user