Skip to content

Commit

Permalink
feat google storage (#207)
Browse files Browse the repository at this point in the history
* feat google storage

* feat google storage

* add google storage writablestream

* add google storage writablestream

* add google storage writablestream

* add metadata to google storage

* add metadata to google storage

* add metadata to google storage

* add tags to google storage

* fix

* fix

* fix

* fix
  • Loading branch information
xquanluu authored Jul 28, 2023
1 parent 8e20025 commit c961592
Show file tree
Hide file tree
Showing 10 changed files with 326 additions and 66 deletions.
41 changes: 41 additions & 0 deletions lib/record/google-storage.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
const { Storage } = require('@google-cloud/storage');
const { Writable } = require('stream');

class GoogleStorageUploadStream extends Writable {

constructor(logger, opts) {
super(opts);
this.logger = logger;
this.metadata = opts.metadata;

const storage = new Storage(opts.bucketCredential);
this.gcsFile = storage.bucket(opts.bucketName).file(opts.Key);
this.writeStream = this.gcsFile.createWriteStream();

this.writeStream.on('error', (err) => this.logger.error(err));
this.writeStream.on('finish', () => {
this.logger.info('google storage Upload completed.');
this._addMetadata();
});
}

_write(chunk, encoding, callback) {
this.writeStream.write(chunk, encoding, callback);
}

_final(callback) {
this.writeStream.end();
this.writeStream.once('finish', callback);
}

async _addMetadata() {
try {
await this.gcsFile.setMetadata({metadata: this.metadata});
this.logger.info('Google storage Upload and metadata setting completed.');
} catch (err) {
this.logger.error(err, 'Google storage An error occurred while setting metadata');
}
}
}

module.exports = GoogleStorageUploadStream;
15 changes: 2 additions & 13 deletions lib/record/index.js
Original file line number Diff line number Diff line change
@@ -1,17 +1,6 @@

const path = require('node:path');
async function record(logger, socket, url) {
const p = path.basename(url);
const idx = p.lastIndexOf('/');
const vendor = p.substring(idx + 1);
switch (vendor) {
case 'aws_s3':
return require('./s3')(logger, socket);
default:
logger.info(`unknown bucket vendor: ${vendor}`);
socket.send(`unknown bucket vendor: ${vendor}`);
socket.close();
}
async function record(logger, socket) {
return require('./upload')(logger, socket);
}

module.exports = record;
20 changes: 6 additions & 14 deletions lib/record/s3.js → lib/record/upload.js
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
const Account = require('../models/account');
const Websocket = require('ws');
const PCMToMP3Encoder = require('./encoder');
const S3MultipartUploadStream = require('./s3-multipart-upload-stream');
const wav = require('wav');
const { getUploader } = require('./utils');

async function upload(logger, socket) {

Expand Down Expand Up @@ -43,19 +43,11 @@ async function upload(logger, socket) {
Key += `/${day.getDate().toString().padStart(2, '0')}/${callSid}.${account[0].record_format}`;

// Uploader
const uploaderOpts = {
bucketName: obj.name,
Key,
metadata,
bucketCredential: {
credentials: {
accessKeyId: obj.access_key_id,
secretAccessKey: obj.secret_access_key,
},
region: obj.region || 'us-east-1'
}
};
const uploadStream = new S3MultipartUploadStream(logger, uploaderOpts);
const uploadStream = getUploader(Key, metadata, obj, logger);
if (!uploadStream) {
logger.info('There is no available record uploader, close the socket.');
socket.close();
}

/**encoder */
let encoder;
Expand Down
40 changes: 40 additions & 0 deletions lib/record/utils.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
const GoogleStorageUploadStream = require('./google-storage');
const S3MultipartUploadStream = require('./s3-multipart-upload-stream');

const getUploader = (Key, metadata, bucket_credential, logger) => {
const uploaderOpts = {
bucketName: bucket_credential.name,
Key,
metadata
};
switch (bucket_credential.vendor) {
case 'aws_s3':
uploaderOpts.bucketCredential = {
credentials: {
accessKeyId: bucket_credential.access_key_id,
secretAccessKey: bucket_credential.secret_access_key,
},
region: bucket_credential.region || 'us-east-1'
};
return new S3MultipartUploadStream(logger, uploaderOpts);
case 'google':
const serviceKey = JSON.parse(bucket_credential.service_key);
uploaderOpts.bucketCredential = {
projectId: serviceKey.project_id,
credentials: {
client_email: serviceKey.client_email,
private_key: serviceKey.private_key
}
};
return new GoogleStorageUploadStream(logger, uploaderOpts);

default:
logger.error(`unknown bucket vendor: ${bucket_credential.vendor}`);
break;
}
return null;
};

module.exports = {
getUploader
};
39 changes: 15 additions & 24 deletions lib/routes/api/accounts.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
const router = require('express').Router();
const assert = require('assert');
const request = require('request');
const {DbErrorBadRequest, DbErrorForbidden, DbErrorUnprocessableRequest, DbError} = require('../../utils/errors');
const {DbErrorBadRequest, DbErrorForbidden, DbErrorUnprocessableRequest} = require('../../utils/errors');
const Account = require('../../models/account');
const Application = require('../../models/application');
const Webhook = require('../../models/webhook');
Expand All @@ -23,8 +23,8 @@ const {
} = require('./utils');
const short = require('short-uuid');
const VoipCarrier = require('../../models/voip-carrier');
const { encrypt, decrypt } = require('../../utils/encrypt-decrypt');
const { testAwsS3 } = require('../../utils/storage-utils');
const { encrypt } = require('../../utils/encrypt-decrypt');
const { testAwsS3, testGoogleStorage } = require('../../utils/storage-utils');
const translator = short();

let idx = 0;
Expand Down Expand Up @@ -541,7 +541,8 @@ function encryptBucketCredential(obj) {
name,
access_key_id,
secret_access_key,
tags
tags,
service_key
} = obj.bucket_credential;

switch (vendor) {
Expand All @@ -554,6 +555,11 @@ function encryptBucketCredential(obj) {
secret_access_key, tags});
obj.bucket_credential = encrypt(awsData);
break;
case 'google':
assert(service_key, 'invalid aws S3 bucket credential: service_key is required');
const googleData = JSON.stringify({vendor, name, service_key, tags});
obj.bucket_credential = encrypt(googleData);
break;
case 'none':
obj.bucket_credential = null;
break;
Expand Down Expand Up @@ -708,35 +714,20 @@ router.post('/:sid/BucketCredentialTest', async(req, res) => {
try {
const account_sid = parseAccountSid(req);
await validateRequest(req, account_sid);
let {vendor, name, region, access_key_id, secret_access_key} = req.body;
const {vendor, name, region, access_key_id, secret_access_key, service_key} = req.body;
const ret = {
status: 'not tested'
};

if (secret_access_key.endsWith('XXXXXX')) {
// this is when the password already saved in account
const service_provider_sid = req.user.hasServiceProviderAuth ? req.user.service_provider_sid : null;
const results = await Account.retrieve(account_sid, service_provider_sid);
if (results.length === 0) throw new DbError('Invalid Account Sid');
const {bucket_credential} = results[0];
if (bucket_credential) {
const o = JSON.parse(decrypt(bucket_credential));
vendor = o.vendor;
switch (vendor) {
case 'aws_s3':
name = o.name;
region = o.region;
access_key_id = o.access_key_id;
secret_access_key = o.secret_access_key;
break;
}
}
}
switch (vendor) {
case 'aws_s3':
await testAwsS3(logger, {vendor, name, region, access_key_id, secret_access_key});
ret.status = 'ok';
break;
case 'google':
await testGoogleStorage(logger, {vendor, name, service_key});
ret.status = 'ok';
break;
default:
throw new DbErrorBadRequest(`Does not support test for ${vendor}`);
}
Expand Down
24 changes: 14 additions & 10 deletions lib/routes/api/recent-calls.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ const {DbErrorBadRequest} = require('../../utils/errors');
const {getHomerApiKey, getHomerSipTrace, getHomerPcap} = require('../../utils/homer-utils');
const {getJaegerTrace} = require('../../utils/jaeger-utils');
const Account = require('../../models/account');
const { getS3Object } = require('../../utils/storage-utils');
const { getS3Object, getGoogleStorageObject } = require('../../utils/storage-utils');

const parseAccountSid = (url) => {
const arr = /Accounts\/([^\/]*)/.exec(url);
Expand Down Expand Up @@ -124,22 +124,26 @@ router.get('/:call_sid/record/:year/:month/:day/:format', async(req, res) => {
const r = await Account.retrieve(account_sid);
if (r.length === 0 || !r[0].bucket_credential) return res.sendStatus(404);
const {bucket_credential} = r[0];
const getOptions = {
...bucket_credential,
key: `${year}/${month}/${day}/${call_sid}.${format || 'mp3'}`
};
let stream;
switch (bucket_credential.vendor) {
case 'aws_s3':
const getS3Options = {
...bucket_credential,
key: `${year}/${month}/${day}/${call_sid}.${format || 'mp3'}`
};
const stream = await getS3Object(logger, getS3Options);
res.set({
'Content-Type': `audio/${format || 'mp3'}`
});
stream.pipe(res);
stream = await getS3Object(logger, getOptions);
break;
case 'google':
stream = await getGoogleStorageObject(logger, getOptions);
break;
default:
logger.error(`There is no handler for fetching record from ${bucket_credential.vendor}`);
return res.sendStatus(500);
}
res.set({
'Content-Type': `audio/${format || 'mp3'}`
});
stream.pipe(res);
} catch (err) {
logger.error({err}, ` error retrieving recording ${call_sid}`);
res.sendStatus(404);
Expand Down
1 change: 1 addition & 0 deletions lib/utils/jambonz-sample.text
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Hello From Jambonz. This file was created because Record all call bucket credential test.
42 changes: 41 additions & 1 deletion lib/utils/storage-utils.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,42 @@
const { S3Client, PutObjectCommand, GetObjectCommand } = require('@aws-sdk/client-s3');
const {Storage} = require('@google-cloud/storage');
const fs = require('fs');

function testGoogleStorage(logger, opts) {
return new Promise((resolve, reject) => {
const serviceKey = JSON.parse(opts.service_key);
const storage = new Storage({
projectId: serviceKey.project_id,
credentials: {
client_email: serviceKey.client_email,
private_key: serviceKey.private_key
},
});

const blob = storage.bucket(opts.name).file('jambonz-sample.text');

fs.createReadStream(`${__dirname}/jambonz-sample.text`)
.pipe(blob.createWriteStream())
.on('error', (err) => reject(err))
.on('finish', () => resolve());
});
}

async function getGoogleStorageObject(logger, opts) {
const serviceKey = JSON.parse(opts.service_key);
const storage = new Storage({
projectId: serviceKey.project_id,
credentials: {
client_email: serviceKey.client_email,
private_key: serviceKey.private_key
},
});

const bucket = storage.bucket(opts.name);
const file = bucket.file(opts.key);

return file.createReadStream();
}

async function testAwsS3(logger, opts) {
const s3 = new S3Client({
Expand Down Expand Up @@ -38,5 +76,7 @@ async function getS3Object(logger, opts) {

module.exports = {
testAwsS3,
getS3Object
getS3Object,
testGoogleStorage,
getGoogleStorageObject
};
Loading

0 comments on commit c961592

Please sign in to comment.