how can I set a custom bucket name when using readstream - node.js

I want to upload large files to mongodb with GridFS and but the bucket name is set to fs.file/fs.chunks. Is there any way I can change the name to a custom name.
const uri = 'mongodb://localhost:27017';
const dbName = 'registration';
const client = new mongodb.MongoClient(uri);
client.connect(function(error) {
assert.ifError(error);
const db = client.db(dbName);
var bucket = new mongodb.GridFSBucket(db);
//UPLOAD FILE TO DB THROUGH STREAMING
fs.createReadStream('./uploads/' + filename + '.html').
pipe(bucket.openUploadStream(filename + ".html")).
on('error', function(error) {
assert.ifError(error);
}).
on('finish', function(res) {
//process.exit(0);
});

Related

Download a excel file from azure blob and process its data without needing to save file to local directory

I want to download an excel file from azure blob and process it's data using the 'xlsx' npm module.
I have achieved this with saving the file to local directory on my node.js server.
But I have to Implement this without needing to save the file locally on server.
How do I achieve this ?
Following is my js file using - download to local directory method.
const xlsx = require('xlsx');
const azureStorageConfig = {
accountName: "",
accountKey: "",
blobURL: "",
containerName: "test-container"
};
let fileName = "test_blob.xlsx";
const downloadBlob = async (blobName, downloadFilePath) => {
return new Promise((resolve, reject) => {
const name = path.basename(blobName);
const blobService = azureStorage.createBlobService(azureStorageConfig.accountName,azureStorageConfig.accountKey);
blobService.getBlobToLocalFile(azureStorageConfig.containerName,blobName,`${downloadFilePath}${name}`, function(error, serverBlob) {
if (error) {
reject(error);
} else {
resolve(downloadFilePath);
}
});
});
};
downloadBlob(fileName,'./local_dir/').then((downloadFilePath)=>{
parseExcel(downloadFilePath + fileName);
});
const parseExcel = function(downloaded_file_path){
let workbook = xlsx.readFile(downloaded_file_path);
// Parse further
}
How this code will change when following a process which does not require saving the file to local directory ?
As reference for you, here is my idea with sample code for your needs, as below.
Generate a blob url with SAS token
var azure = require('azure-storage');
var accountName = '<your account name>';
var accountKey = '<your account key>';
var blobService = azure.createBlobService(accountName, accountKey);
var containerName = 'test-container';
var blobName = 'test_blob.xlsx';
var startDate = new Date();
var expiryDate = new Date(startDate);
expiryDate.setMinutes(startDate.getMinutes() + 100);
startDate.setMinutes(startDate.getMinutes() - 100);
var sharedAccessPolicy = {
AccessPolicy: {
Permissions: azure.BlobUtilities.SharedAccessPermissions.READ,
Start: startDate,
Expiry: expiryDate
}
};
var token = blobService.generateSharedAccessSignature(containerName, blobName, sharedAccessPolicy);
var sasUrl = blobService.getUrl(containerName, blobName, token);
Read blob body bytes via request, then to use XLSX.read(data, read_opts) to parse blob body as Uint8Array.
var request = require('request');
var XLSX = require('xlsx');
request(sasUrl, {encoding: null}, function (error, response, body) {
var workbook = XLSX.read(body, {type:"buffer"});
console.log(workbook.Sheets.Sheet1);
});
Hope it helps.

Download a file from S3 to Local machine

I am trying to download an audio(mp3) file from AWS S3 to local computer. It works fine when I execute on local host, but after after deploying same code onto AWS. It's downloading files to server machine instead of User's local machine.
Tried these two versions. Both are doing in same way
Version 1:
const key = track.audio_transcode_filename.substring(20);
var s3Client = knox.createClient(envConfig.S3_BUCKET_TRACKS);
const os = require('os');
const downloadPath = os.homedir().toString();
const config =require('../../config/environment');
const fs = require('fs');
var filePath=downloadPath + "\\Downloads\\" + track.formatted_title + ".mp3";
if (fs.existsSync(filePath)) {
var date = new Date();
var timestamp = date.getTime();
filePath=downloadPath + "\\Downloads\\" + track.formatted_title + "_" + timestamp + ".mp3";
}
const file = fs.createWriteStream(filePath);
s3Client.getFile(key, function(err, res) {
res.on('data', function(data) { file.write(data); });
res.on('end', function(chunk) { file.end(); });
});
version 2:
var audioStream = '';
s3Client.getFile(key, function(err, res) {
res.on('data', function(chunk) { audioStream += chunk });
res.on('end', function() { fs.writeFile(filePath + track.formatted_title + ".mp3", audioStream, 'binary')})
});
Thanks,
Kanth
Instead of getting the file and sending to client again, how about getting the url of the file and redirecting the client?
Something like:
s3Client.getResourceUrl(key, function(err, resourceUrl) {
res.redirect(resourceUrl);
)};
You'll need to send it to the user. So, I think you have an expressJS and the user can get the element using your API endpoint.
After all you have done in your question, you will need to send it to the user.
res.sendFile('/path/to/downloaded/s3/object')
Thank you both #Rashomon and #Martin do santos.
I'had to add client side script to read response stream and download file in the following way
downloadTrack(track).then((result) =>{
//var convertedBuffer = new Uint8Array(result.data);
const url = window.URL.createObjectURL(new Blob([result.data],{type: 'audio/mpeg'}));
const link = document.createElement('a');
link.href = url;
link.setAttribute('download', track.formatted_title + '.mp3');
document.body.appendChild(link);
link.click();
}, (error) =>{
console.error(error);
})

minizip-asm extract function takes forever to execute

I am trying to fetch an AES encrypted password protected zip file from a google storage and extract a csv file from it. I am using google cloud functions for it with node 6.
I've tried using minizip-asm.js library to extract the file. It works intermittently. I am a newbie when it comes to node so would really appreciate some help :).
Here's the relevant piece of code which might help. Could someone help me figure out what's going wrong here.
exports.processFile = (event, callback) => {
const file = event.data;
const filename = file.name;
const projectId = "abc1234";
const bucketName = "abc_reports";
const Storage = require('#google-cloud/storage');
const storage = Storage({
projectId: projectId
});
const folder = storage.bucket(bucketName);
const minizip = require('minizip-asm.js');
if (file.metageneration === '1' && filename.match(".zip") != null) {
// metageneration attribute is updated on metadata changes.
// on create value is 1
console.log(`File ${file.name} uploaded.`);
folder.file(filename).download().then(function(data) {
console.log('Download of file complete');
//create csv file
var csvName = filename.split(".zip")[0] + ".csv"
var mz = new minizip(data[0]);
console.log(data[0]);
console.log(mz.list());
var extract = mz.extract(mz.list()[0].filepath,{
password: 'ABC#123'
})
console.log("extracted");
//write unzipped contents to file
folder.file(csvName).save(extract, function(err) {
if (!err) {
console.log("unzipped csv");
}
else console.log("Error in saving csv : "+err);
});
});
});
}
callback(null, 'Success!');
};
Thanks for the help.

Save node canvas image to firebase server

I'm trying to create and image on the Google Firebase server with node-canvas and store it in Firebase Storage.
const functions = require('firebase-functions');
const admin = require('firebase-admin');
const gcs = require('#google-cloud/storage')();
const path = require('path');
const Canvas = require('canvas-prebuilt');
const env = require('dotenv').config();
try {admin.initializeApp(functions.config().firebase);} catch(e) {}
//Trigger on creation of a new post
exports.default = functions.database.ref('/posts/{postId}').onCreate(event => {
//Get the postID
const postId = event.params.postId;
console.log('We have a new post ' + postId);
//Get data from the postid
return admin.database().ref('/posts/' + postId).once('value').then(function(snapshot) {
const text = snapshot.val().text;
const canvas = new Canvas(1024, 512);
const ctx = canvas.getContext('2d');
//Draw Background
ctx.fillStyle = '#000';
ctx.fillRect(0, 0, 1024 , 512);
//Draw Text
ctx.font = 'bold 66px Arial';
ctx.textAlign = 'center';
ctx.fillStyle = '#fff';
ctx.fillText(text, 120, 256, 784);
// Use the postID to name the file Ex : -L1rVJUAtSbc_FampT0D.png
var filename = postId + '.png';
// Create the file metadata
var metadata = {
contentType: 'image/png'
};
const bucket = gcs.bucket('images');
const filePath = 'images/' + filename;
return canvas.toDataURL('image/png', function(err, png){
//Save on Firebase Storage
return bucket.upload(png, {
destination: filePath,
metadata: metadata
}).then(() => {
console.log('Image uploaded to Storage at ', filePath);
});
});
});
});
But, when I try to save it with toDataURL I get this error :
ENAMETOOLONG: name too long, stat 'data:image/png;base64,iVBORw0 ...'
And when I try with toBuffer I get this one :
TypeError: Path must be a string. Received
at assertPath (path.js:7:11)
at Object.basename (path.js:1362:5)
at Bucket.upload (/user_code/node_modules/#google-cloud/storage/src/bucket.js:2259:43)
at /user_code/node_modules/#google-cloud/storage/node_modules/#google-cloud/common/src/util.js:777:22
at Bucket.wrapper [as upload] (/user_code/node_modules/#google-cloud/storage/node_modules/#google-cloud/common/src/util.js:761:12)
at /user_code/sendTweet.js:107:21
I also try toBlob but the function doesn't exist server side with node-canvas.
Anyone know how I should save the image server side before transfer it to Firebase Storage?
Thanks!

Google cloud function download file and redirect to bucket storage

I am trying to use a google cloud function in Node.js to download a file for wordpress repo then send the file into a google cloud bucket. I have the wordpress file downloading but it fails to write to the google bucket.
function writeToBucket(jsonObject){
/*
* Google API authentication
*/
var gcs = require('#google-cloud/storage')({
projectId: 'wp-media-cdn',
keyFilename: 'wp-media-cdn-d9d7c61bfad9.json'
});
/*
* rename image file with image size, format: size X size imgName
*/
var pluginUrl = "https://downloads.wordpress.org/plugin/bbpress.2.5.14.zip";
newPluginName = "bbpress";
/*
* Read image into stream, upload image to bucket
*/
var request = require('request');
var fs = require('fs'); //used for createWriteString()
var myBucket = gcs.bucket('test_buckyy'); //PUT BUCKET NAME HERE
var file = myBucket.file(nnewPluginName);
// file.exists() returns true if file already in bucket, then returns file url, exits function
if(file.exists()){
return 'https://storage.googleapis.com/${test_buckyy}/${file}';
}
//pipes image data into fileStream
var fileStream = myBucket.file(newImageName).createWriteStream();
request(imgUrl).pipe(fileStream)
.on('error', function(err) {
console.log('upload failed');
})
.on('finish', function() {
console.log('file uploaded');
});
/*
* return image url
* use getSignedUrl
*/
return 'https://storage.googleapis.com/${test_buckyy}/${file}';
}
I just replicated your use case scenario and I successfully downloaded the file into the temporary folder of a Cloud Function and from there I copied this file into a bucket.
In order to achieve this, I downloaded the file using createWriteStream into the /tmp folder since is the only folder where we can store files in a Cloud Function, as stated in the Cloud Functions Execution Environment documentation.
After that, I just copied the file to a bucket following this Cloud Storage Uploading Objects documentation.
You can take a look of my sample function
Index.js
const {Storage} = require('#google-cloud/storage');
exports.writeToBucket = (req, res) => {
const http = require('http');
const fs = require('fs');
const file = fs.createWriteStream("/tmp/yourfile.jpg");
const request = http.get("YOUR_URL_TO_DOWNLOAD_A_FILE", function(response) {
response.pipe(file);
});
console.log('file downloaded');
// Imports the Google Cloud client library
const {Storage} = require('#google-cloud/storage');
// Creates a client
const storage = new Storage();
const bucketName = 'YOUR_BUCKET_NAME';
const filename = '/tmp/yourfile.jpg';
// Uploads a local file to the bucket
storage.bucket(bucketName).upload(filename, {
gzip: true,
metadata: {
cacheControl: 'no-cache',
},
});
res.status(200).send(`${filename} uploaded to ${bucketName}.`);
};
package.json
{
"name": "sample-http",
"version": "0.0.1",
"dependencies": {
"#google-cloud/storage": "^3.0.3"
}
}
Using Chris32's answer I've created a similar version but avoiding the download of the image to the tmp folder. Hope it's useful !
'use strict';
const http = require('http');
const {Storage} = require('#google-cloud/storage');
exports.http = (request, response) => {
const imageUrl = request.body.url;
const fileName = imageUrl.substring(imageUrl.lastIndexOf('/') + 1);
const storage = new Storage({keyFilename: "keyfile.json"});
const bucket = storage.bucket('MY_BUCKET_NAME');
const file = bucket.file(fileName);
console.log('Uploading image')
http.get(imageUrl, function(res) {
res.pipe(
file.createWriteStream({
resumable: false,
public: true,
metadata: {
contentType: res.headers["content-type"]
}
})
);
});
console.log('Image uploaded')
response.status(201).send('Image successful uploaded!');
};
exports.event = (event, callback) => {
callback();
};

Resources