Uploading a buffer to google cloud storage - node.js

I'm trying to save a Buffer (of a file uploaded from a form) to Google Cloud storage, but it seems like the Google Node SDK only allows files with a given path to be uploaded (Read / Write streams).
This is what I have used for AWS (S3) - is the anything else similar in the Google node SDK?:
var fileContents = new Buffer('buffer');
var params = {
Bucket: //bucket name
Key: //file name
ContentType: // Set mimetype
Body: fileContents
};
s3.putObject(params, function(err, data) {
// Do something
});
The only way that I have found to do it so far is write the buffer to disk, upload the file using the SDK (specifying the path to the new file) and then delete the file once it's uploaded successfully - the downside to this is that the whole process is significantly slower, to where it seems to be unfeasible to use Google storage. Is there any work around / way to upload a buffer?

.save to save the day! Some code below where I save my "pdf" that I created.
https://googleapis.dev/nodejs/storage/latest/File.html#save
const { Storage } = require("#google-cloud/storage");
const gc = new Storage({
keyFilename: path.join(__dirname, "./path to your service account .json"),
projectId: "your project id",
});
const file = gc.bucket(bucketName).file("tester.pdf");
file.save(pdf, (err) => {
if (!err) {
console.log("cool");
} else {
console.log("error " + err);
}
});

This is actually easy:
let remotePath = 'some/key/to/store.json';
let localReadStream = new stream.PassThrough();
localReadStream.end(JSON.stringify(someObject, null, ' '));
let remoteWriteStream = bucket.file(remotePath).createWriteStream({
metadata : {
contentType : 'application/json'
}
});
localReadStream.pipe(remoteWriteStream)
.on('error', err => {
return callback(err);
})
.on('finish', () => {
return callback();
});

We have an issue about supporting this more easily: https://github.com/GoogleCloudPlatform/gcloud-node/issues/1179
But for now, you can try:
file.createWriteStream()
.on('error', function(err) {})
.on('finish', function() {})
.end(fileContents);

The following snippet is from a google example. The example assumes you have used multer, or something similar, and can access the file at req.file. You can stream the file to cloud storage using middleware that resembles the following:
function sendUploadToGCS (req, res, next) {
if (!req.file) {
return next();
}
const gcsname = Date.now() + req.file.originalname;
const file = bucket.file(gcsname);
const stream = file.createWriteStream({
metadata: {
contentType: req.file.mimetype
},
resumable: false
});
stream.on('error', (err) => {
req.file.cloudStorageError = err;
next(err);
});
stream.on('finish', () => {
req.file.cloudStorageObject = gcsname;
file.makePublic().then(() => {
req.file.cloudStoragePublicUrl = getPublicUrl(gcsname);
next();
});
});
stream.end(req.file.buffer);
}

I have this approach working to me:
const destFileName = `someFolder/${file.name}`;
const fileCloud = this.storage.bucket(bucketName).file(destFileName);
fileCloud.save(file.buffer, {
contentType: file.mimetype
}, (err) => {
if (err) {
console.log("error");
}
});

Related

How to make express download link with GridFsBucket?

As the title says, how do you make a direct download link with a file from mongoDB(GridFsBucket) using express?
The file should be downloadable from memory, as i dont want to save it temporarily on the server.
I have this method:
async function downloadFileFromDB(fileId) {
var gridfsbucket = new mongoose.mongo.GridFSBucket(mongoose.connection.db, {
chunkSizeBytes: 1024,
bucketName: 'filesBucket'
});
try {
const stream = gridfsbucket.openDownloadStream(fileId)
const fileBuffer = Buffer.from(stream)
return fileBuffer
} catch (err) {
stream.on('error', () => {
console.log("Some error occurred in download:" + error);
})
console.log(err);
}
}
And this route:
router.get('/download-file', async (req,res) => {
const fileId = req.query.fileId
const ObjectFileId = new ObjectId(fileId)
const fileBuffer = await fileFacade.downloadFileFromDB(ObjectFileId)
res.download(fileBuffer)
})
But res.download wants a path and not a buffer. Aswell im not sure i can make a buffer directly from the openDownloadStream method.
Can anyone help?
I believe you need to write the data to your res object. I accomplished this like:
const readStream = gridfs.openDownloadStreamByName(filename);
readStream.on("data", (chunk) => {
res.write(chunk);
});
readStream.on("end", () => {
res.status(200).end();
mongoClient.close();
});
readStream.on("error", (err) => {
console.log(err);
res.status(500).send(err);
});
So, you may just have to do:
res.write(fileBuffer).end();
//// Instead of doing:
// res.download(fileBuffer);

Express Router - uploading to S3 and MongoDb

I'm trying to do the following in Node.js using express router, Multer-S3, Multer, AWS and Mongodb.
I want to:
1: Check if filetype is image, price is number etc (some kind of quality check)
2: If above true, upload image to S3 to get Image url
3: If Image Url was generated, upload to Mongodb, including the generated image url..
Trying with below code but can only get one of these to work at same time..
const express = require("express");
const router = express.Router();
const shopController = require("../controllers/shop");
router.post(
"/shop/create/:shopId",
shopController.creatingShop,
shopController.createShopItem
);
const ShopItem = require("../models/shopitem"); //Mongoose Schema
const multer = require("multer");
const fileview = multer().single("file1"); //Trying to use this to view file before uploading to S3
const uploader = require("../services/file-upload");
const singleUpload = uploader.single("file1"); //Using this to upload to S3
exports.createShopItem = (req, res, next) => {
fileview(req, res, function (err) {
const file = req.file;
const title = req.body.title;
const price = req.body.price;
const description = req.body.description;
const location = req.body.location;
const user = "OrreSnorre";
if (
file.mimetype != "image/jpeg" &&
file.mimetype != "image/jpg" &&
file.mimetype != "image/png"
) {
return next(new Error("invalid file type"));
}
if (file.size > 2500000) {
return next(new Error("Your image is to big. Maximum 2.5mb"));
}
next();
console.log(
"Here I want to add upload text to mongoDb... including URL from S3 after it is generated"
);
});
exports.creatingShop = (req, res, next) => {
singleUpload(req, res, function (err) {
console.log(req.file);
// res.json({ "image-url": req.file.location });
});
next();
};
Anyone got ideas? Or examples that work?
Best regards,
Oscar
There are 2 ways to do this, either you can use only multer or multer-s3.
For simplicity, I will show you the way using only multer.
Flow of processing as follow:
Multer process and save to local
You read from local, and upload to s3 using s3 SDK (You should explore how to remove the file after upload as well, but I wont clutter you with this logic here)
If upload is successful, you retrieve the URL and pass it to your MongoDB.
// Make "temp" directory as multer.diskStorage wont create folder
fs.mkdir('./temp', { recursive: true }, (err) => {
if (err) throw err;
});
const PORT = parseInt(process.argv[2]) || parseInt(process.env.PORT) || 3000;
// Multer
const storage = multer.diskStorage({
destination: function (req, file, cb) {
cb(null, './temp');
},
filename: function (req, file, cb) {
let extArray = file.mimetype.split('/');
let extension = extArray[extArray.length - 1];
cb(null, new Date().getTime() + '.' + extension);
},
});
const upload = multer({ storage: storage });
const endpoint = new AWS.Endpoint(AWS_S3_HOSTNAME);
const s3 = new AWS.S3({
endpoint,
accessKeyId: AWS_S3_ACCESSKEY_ID,
secretAccessKey: AWS_S3_SECRET_ACCESSKEY,
});
// Get the uploaded file in local here
const readFile = (path) =>
new Promise((resolve, reject) =>
fs.readFile(path, (err, buff) => {
if (null != err) reject(err);
else resolve(buff);
})
// Upload to AWS S3 here
const putObject = (file, buff, s3) =>
new Promise((resolve, reject) => {
const params = {
Bucket: AWS_S3_BUCKET_NAME,
Key: file.filename,
Body: buff,
ACL: 'public-read',
ContentType: file.mimetype,
ContentLength: file.size,
};
s3.putObject(params, (err, result) => {
if (null != err) reject(err);
else resolve(file.filename);
});
});
);
const mongoClient = new MongoClient(MONGO_URL, {
useNewUrlParser: true,
useUnifiedTopology: true,
});
app.post('/api/post', upload.single('imageFile'), async (req, res) => {
readFile(req.file.path)
.then((buff) =>
// Insert Image to S3 upon succesful read
putObject(req.file, buff, s3)
)
.then((results) => {
// build url of the resource upon successful insertion
const resourceURL = `https://${AWS_S3_BUCKET_NAME}.${AWS_S3_HOSTNAME}/${results}`;
const doc = {
comments,
title,
ts: new Date(),
image: resourceURL, // Your URL reference to image here
};
// Insert to your mongoDB
mongoClient
.db(MONGO_DB)
.collection(MONGO_COLLECTION)
.insertOne(doc)
.then((results) => {
// delete the temp file when no error from MONGO & AWS S3
fs.unlink(req.file.path, () => {});
// return the inserted object
res.status(200).json(results.ops[0]);
})
.catch((error) => {
console.error('Mongo insert error: ', error);
res.status(500);
res.json({ error });
});
})
.catch((error) => {
console.error('insert error: ', error);
res.status(500);
res.json({ error });
});
}

Upload multiple images with node in s3 map problem

I'm struggling finding a solution to upload two files to s3. I can upload one file with multer and I have learnt how to do it, but when I try to do a map inside all files in the formdata and upload each file, I push into an array each location URL which is the one I save in my database. Then I try to print each url but for my surprise they are print inside the if statement but not when I save it in the database outside the if. Could it be for an asychronous problem?.
Thanks.
tournamentsCtrl.createTournament = async (req, res) => {
var files_upload = []
if (req.files) {
aws.config.setPromisesDependency();
aws.config.update({
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
//region: process.env.REGION
});
const s3 = new aws.S3();
req.files.map((item) => {
var params = {
ACL: 'public-read',
Bucket: process.env.AWS_BUCKET_NAME,
Body: fs.createReadStream(item.path),
Key: `tournament_img/${uuidv4()/* +req.file.originalname */}`
};
await s3.upload(params, (err, data) => {
if (err) {
console.log('Error occured while trying to upload to S3 bucket', err);
}
if (data) {
fs.unlinkSync(item.path); // Empty temp folder
const locationUrl = data.Location;
files_upload.push(locationUrl);
console.log(files_upload)
}
});
});
}
console.log(files_upload)
const new_data = { ...JSON.parse(req.body.values), img_source: files_upload[0], info_url: files_upload[1] }
console.log(new_data)
const newUser = new Tournaments(new_data);
newUser
.save()
.then(user => {
res.json({ message: 'User created successfully', user });
})
.catch(err => {
console.log('Error occured while trying to save to DB');
});
};
If you look at the docs for upload it does not return a promise so you should not call await on it. The default map method is not compatible with async code in this form. You need to either use async.map or wrap the async code in a promise like
return await new Promise((resolve, reject) => {
...
if (data) {
fs.unlinkSync(item.path);
resolve(data.location);
}
}
Your other code has some issues as well. A map function should return a value. If you dont want to return anything you should use foreach.
This is a bad place to ask for code advice but something like the following
function uploadFile(s3, element) {
return new Promise((resolve, reject) => {
let folder;
if (element.fieldname.includes('img')) {
folder = 'club_images'
} else if (element.fieldname.inlcudes('poster')) {
folder = 'poster_tournament'
} else {
folder = 'info_tournament'
}
const params = {
ACL: 'public-read',
Bucket: process.env.AWS_BUCKET_NAME,
Body: fs.createReadStream(element.path),
Key: `${folder + '/' + uuidv4() + element.fieldname}`
};
s3.upload(params, (err, data) => {
if (err) {
return reject(err);
}
if (data) {
return fs.unlink(element.path, err=> {
if(err) {
console.error("Failed to unlink file", element.path);
}
return resolve({[element.fieldname]: data.Location});
}); // Empty temp folder
}
return resolve();
});
})
}
tournamentsCtrl.createTournament = async (req, res) => {
aws.config.setPromisesDependency();
aws.config.update({
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
//region: process.env.REGION
});
const s3 = new aws.S3();
try {
const uploadData = await Promise.all(req.files.map(element => uploadFile(s3, element)));
const returnData = Object.assign({}, ...uploadData);
console.log(Object.assign(JSON.parse(req.body.values), returnData));
} catch(e) {
console.error('Failed to upload file', e);
return res.sendStatus(500);
}
const newUser = new Tournaments(Object.assign(JSON.parse(req.body.values), files_upload));
console.log(newUser)
try {
const user = await newUser.save()
res.json({message: 'User created successfully', user});
} catch(e) {
console.error('Error occured while trying to save to DB');
return res.sendStatus(500);
}
};

Imagen with 0 bytes in Firebase-storage upload from Node js

I'm trying to upload an image to Firebase-storage from Node.js,
I followed the follow that gives firebase in their doc and all run fine but when the image is in the storage the size is 0 bytes and you can not see the preview.
This is my code:
const uploadImageToStorage = (file,filename) => {
let prom = new Promise((resolve, reject) => {
if (!file) {
reject('No image file');
}
let newFileName = `${file.originalname}_${Date.now()}`;
let fileUpload = bucket.file(newFileName);
const blobStream = fileUpload.createWriteStream({
metadata: {
contentType: file.mimetype
}
});
blobStream.on('error', (error) => {
reject('Something is wrong! Unable to upload at the moment.');
});
blobStream.on('finish', () => {
// The public URL can be used to directly access the file via HTTP.
const url = `https://storage.googleapis.com/${bucket.name}/${fileUpload.name}`;
resolve(url);
});
blobStream.end(file.buffer);
});
return prom;
}
This is my app.post method:
app.post('/Upload_img',multer.single("file"), function (req, res) {
console.log("Upload Imagennes");
let url = "";
let file= req.file;
if (file) {
uploadImageToStorage(file,file.name).then((success) => {
url = success;
res.status(200).send(url);
}).catch((error) => {
console.error(error);
});
}
}
The Storage:
I managed to upload a png file using your uploadImageToStorage function. I was calling it directly, without using multer:
(async () => {
const buffer = fs.readFileSync('./android.png');
const file = {
originalname: 'android.png',
mimetype: 'image/png',
buffer,
}
try {
return await uploadImageToStorage(file, file.originalname);
} catch (err) {
console.log(err);
}
})();
This works as expected. So the problem is probably in your Express.js code or in multer. Try logging req.file and req.file.buffer in the server code, and see it has the expected data.

Creating a GIF from remote stream in graphicsmagick

I am creating a GIF from remote files in node currently by downloading each image to the file system into a tmp folder.
I want to bypass saving the image to a tmp folder and save in memory instead. Is this possible?
As you can see, I have a download function in my AWS class which saves to a tmp folder:
download(key){
return new Promise((resolve, reject) => {
request.head(`${this.base_url}/${this.bucket}/${key}`, (err, res, body) => {
request(`${this.base_url}/${this.bucket}/${key}`)
.pipe(fs.createWriteStream(`tmp/${key}`)).on('close', resolve )
})
})
};
Once they have all downloaded, I have a createGif function in my GifService class which adds each file path as a custom argument of gm, adds a delay of 50ms, resizes then outputs as buffer which I am then uploading to AWS s3.
import gm from 'gm';
...
constructor(){
this.gm = gm()
}
generateGif(images, prefix){
return new Promise((resolve, reject) => {
// for each image we want in array, we pass to gm
images.forEach(image => {
this.gm.in(`tmp/${image.Key}`)
})
// Now we create the gif with 50sec delay between images, sized to 600px x 2
this.gm
.delay(50)
.resize(600,600)
.toBuffer('gif', async (err, buffer) => {
if (err) reject(err)
const params = {
ACL: 'public-read',
Bucket: config.aws_bucket,
ContentType: 'image/gif',
Key: `${prefix}/${uuid()}.gif`,
Body: buffer
}
try{
// uplaod to S3
const upload = await this.aws.upload(params)
// resolve s3 URL
resolve(upload)
}catch(err) {
console.log('err', err)
reject(err)
}
});
})
}
Ideally if I could pass a remote file stream as custom argument, or pass a buffer in as a custom argument as opposed to how I am currently passing in the tmp file path:
images.forEach(image => {
this.gm.in(`tmp/${image.Key}`)
})
I managed to make it work using only streams by converting first the images to miff and concat them into a single stream. Then passing the buffer or the stream into gm again with delay does the trick.
You will need to instal concat-stream npm for this to work.
Sorry for the mixed ES5 code.
import gm from 'gm';
var concat = require('concat-stream');
...
constructor() {
this.gm = gm()
}
start() {
return getYourReadAbleStreamsSomehow().then(streams => {
return generateGif(streams);
}).then(gifBuffer => {
return uploadToAWS(gifBuffer, prefix);
}).catch(err => {
console.log(err)
})
}
uploadToAWS(buffer, prefix) {
const params = {
ACL: 'public-read',
Bucket: config.aws_bucket,
ContentType: 'image/gif',
Key: `${prefix}/${uuid()}.gif`,
Body: buffer
}
try {
// uplaod to S3
const upload = await this.aws.upload(params)
// resolve s3 URL
resolve(upload)
} catch (err) {
console.log('err', err)
reject(err)
}
}
generateGif(imageStreams, delay) {
return new Promise((resolve, reject) => {
var write = concat(function(buffer) {
gm(buffer)
.delay(delay)
.toBuffer('gif', function(err, buffer) {
if (err)
reject(err);
resolve(buffer);
})
})
//Convert to miff and concat streams
var i = 0;
var streamHandler = function() {
gm(imageStreams[i])
.resize('600', '600')
.stream('miff', function(err, stdout, stderr) {
if (err)
reject(err)
var lastOne = i === streams.length - 1;
if (!lastOne)
stdout.once('end', streamHandler)
stdout.pipe(write, {
end: lastOne
});
i++;
});
}
streamHandler();
})
}

Resources