problem in sending base64 data in GET request - node.js

Hi I am facing issues sending base64 data in GET request.
I was successful in converting the image into base64 data and inserting it in receivedFile
but during response the attachments come as an empty array while the rest of the data i.e user_id is flowing successfully.
Hence if you could please help me to resolve this issue.
Below is the code
router.js
router.get('/users/data/expand/:nid',async (req,res) => {
var idselected = req.params.nid;
var dir = '\images';
var receivedFile = [];
try {
const checkData = await user.find({"user_id": idselected});
await checkData[0].attachments.forEach (element => {
fs.readdir(dir,function(err,files) {
if(err) {
console.log(err)
}else {
files.forEach((filename) => {
filename = element;
fs.readFile(filename,'base64', (err,base64Data) => {
if(err) {
console.log(err);
}
receivedFile.push(base64Data);
})
})
}
})
})
//issue is here the attachments is coming as empty instead of base64 data
const returnUser = new User({
user_id: checkData.user_id,
attachments: receivedFile
})
res.status(201).send(returnUser);
}
catch(e) {
res.status(500).send(e)
}
})

Well its always good to create helper functions and to promisfy it so you can use async / await syntax.
I have changed your code. I didnt tested it but i guess it should work:#
router.get("/users/data/expand/:nid", async (req, res) => {
var idselected = req.params.nid;
var dir = "images";
try {
const checkData = await user.findOne({ user_id: idselected });
let receivedFile = await Promise.all(
checkData.attachments.flatMap(async element => {
let files = await readDirectory(dir);
return await Promise.all(
files.map(filename => {
filename = element;
return readFile(filename)
})
);
})
);
const returnUser = new User({
user_id: checkData.user_id,
attachments: receivedFile
});
let savedUser = await returnUser.save();
res.status(201).send(savedUser);
} catch (e) {
res.status(500).send(e);
}
});
function readDirectory(dir) {
return new Promise((res, rej) => {
fs.readdir(dir, function(err, files) {
if (err) {
rej(err);
} else {
res(files);
}
});
});
}
function readFile(filename) {
return new Promise((res, rej) => {
fs.readFile(filename, "base64", (err, base64Data) => {
if (err) {
rej(err);
}
res(base64Data);
});
});
}
I guess you use mongoose.
There is an method called findOne and also you forgot to save your model with returnUser.save()

Related

Issue when sending multiple images in a GET Method in Nodejs

I am facing issue with sending image in a GET Method in node.js. As you can see in the below code the file is saved in a folder called images and the path of that file is stored in the Object.
I tried searching for the file in the directory and then convert it to base64 to send the images but instead all the files in the directory are being converted to base64 and sent. Hence if you could please help me in resolving this issue.
Please let me know if you require any further information from my end.
Router.js
router.get("/users/data/expand/:nid", async (req, res) => {
var idselected = req.params.nid;
var dir = "images";
try {
const checkData = await user.findOne({ user_id: idselected });
let receivedFile = await Promise.all(
checkData.attachments.flatMap(async element => {
let files = await readDirectory(dir);
return await Promise.all(
files.map(filename => {
filename = element;
return readFile(filename)
})
);
})
);
const returnUser = new User({
user_id: checkData.user_id,
attachments: receivedFile
});
let savedUser = await returnUser.save();
res.status(201).send(savedUser);
} catch (e) {
res.status(500).send(e);
}
});
function readDirectory(dir) {
return new Promise((res, rej) => {
fs.readdir(dir, function(err, files) {
if (err) {
rej(err);
} else {
res(files);
}
});
});
}
function readFile(filename) {
return new Promise((res, rej) => {
fs.readFile(filename, "base64", (err, base64Data) => {
if (err) {
rej(err);
}
res(base64Data);
});
});
}

When trying to write image file it moves ahead and write works late

What i am trying to do here is i am writing a image file after resizing it with node and after that i will use it to upload it. but when i am writing it my uploading code started at that time and file is still not written.
async function getdataFromJimpSmall(image,filePath,FILE_NAME){
try{
var isSucess = false ;
const im = await Jimp.read(image);
return new Promise((resolve, reject) => {
try{
im.resize(150, 215, Jimp.RESIZE_BEZIER, function(err){
if (err) throw err;
});
im.write(filePath+FILE_NAME)
resolve(true)
}
catch(err){
console.log(err);
reject(false);
}
})
}
catch(err){
console.log('getdataFromJimpSmall Err====>'+err)
return false
}
}
I tried to use this but it is getting ahead of my code
it started from here and it is getting called from here.
isThumbnailUrlavailable = await jimpController.imageCoverJimpAndUploadToS3small(getISBNBook, "-#gmail.com", "SaleIsbn")
isThumbnailUrlavailableMedium = await jimpController.imageCoverJimpAndUploadToS3medium(getISBNBook, "-#gmail.com", "SaleIsbn")
And the first function imageCoverJimpAndUploadToS3small() is this:
exports.imageCoverJimpAndUploadToS3small = async (getContent,email,file) =>{
try{
var elementType = 15;
var filePath = path.join(__dirname, '../../uploads/t/')
var FILE_NAME = `${getContent.contentMetaDataRef.isbn}_s.jpg`
var image = getContent.thumbnail
let tempdata = await getdataFromJimpSmall(image,filePath,FILE_NAME)
console.log('temp - ', tempdata);
if(tempdata==true)
{
var data = await commonController.uploadthumbnailTOPublicS3(filePath, FILE_NAME)
console.log('s3', data);
requestImageSize(data.url).then(size =>
{
console.log(size);
if(size.height>0&&size.width>0)
{
tempdata=true;
const getContentElement = ContentElement.findOne({contentId: getContent._id,elementType,elementData: data.keyName}).lean()
if(getContentElement){
ContentElement.findByIdAndUpdate(getContentElement._id, {
createdOn: Date(), modifiedOn: Date(),
}, { new: true })
}
else
{
return tempdata;
}else
{
tempdata=false;
return tempdata;
}
}).catch(err =>
{
console.error(err);
tempdata=false;
return tempdata;
});
}
}
catch(error){
console.log(error)
tempdata=false;
return tempdata;
}
}
But it is not working...
Again i am calling imageCoverJimpAndUploadToS3small() and it starts after that.
as per docs https://www.npmjs.com/package/jimp#writing-to-files-and-buffers, .write runs asynchronously and takes a callback, which is why the promise resolves before image is written
so, you need to use callback, and resolve from there:
im.write(filePath+FILE_NAME, (err)=>{
if(err) throw err;
resolve(true);
});
or, as the docs say, you might use and await .writeAsync, which is promise-based:
await im.writeAsync(filePath+FILE_NAME);
resolve(true);

upload files to aws s3 from node js

I am using aws sdk to uplod user input image and then get the image link from aws and i will store the link in mongoDB. In that case when i run .upload() it is async.
const imgSRC = [];
for (let img of image) {
console.log(img);
const params = {
Bucket: process.env.AWS_BUCKET,
Key: `${img.originalname}_${userID}`,
Body: img.buffer,
};
s3.upload(params, (error, data) => {
if (error) {
console.log(error);
res.status(500).json({ msg: "server error" });
}
imgSRC.push(data.Location);
console.log(imgSRC);
});
}
const newPost = new Post({
userID: userID,
contentID: contentID,
posts: [
{
caption: caption,
data: imgSRC,
},
],
});
const post = await newPost.save();
in that case when the .save to mongodb run, there is no imgLinks from aws yet. How can i fix that things.
I've already tried async and it didn't work
You need to use Promise.all() in this manner
const uploadImage = (obj) => {
return new Promise((resolve, reject) => {
const params = {
Bucket: process.env.AWS_BUCKET,
Key: obj.key,
Body: obj.body,
}
s3.upload(params, (error, data) => {
if (error) {
console.log(error);
return reject(error);
}
return data;
});
})
}
const mainFunction = async () => {
const promises = [];
for (let img of image) {
const options = {
key: `${img.originalname}_${userID}`,
body: img.buffer
};
promises.push(uploadImage(options));
}
const result = await Promise.all(promises);
const imgSRC = result.map((r) => { return r.Location });
return imgSRC;
}
If you use await on s3.upload method you should remove the callback for this method.
try {
const data = await s3.upload(params);
imgSRC.push(data.Location);
console.log(imgSRC);
} catch(e) {
console.log(error);
res.status(500).json({ msg: "server error" });
}
Let me know if it works.

Axios POSTed FormData has empty body on serverside

This is the client side code. Data is not empty, file is getting uploaded correctly.
export function addGame(data) {
return dispatch => {
const formData = new FormData();
formData.append("game.cover", data.gameCover[0]);
formData.append("game.title", data.gameTitle);
formData.append("game.price", data.gamePrice);
formData.append("game.description", data.description);
return axios.post(apiUrl + "/games/add", formData).then(res => {
dispatch({ type: ADD_GAME, payload: res.data.game });
});
};
}
and this is the serverside
router.post("/add", auth, async (req, res) => {
const body = await req.body;
console.log(body);
let formErrors = [];
if (!body.gameTitle) formErrors.push("Game title is required.");
if (!body.description) formErrors.push("Description is required.");
if (!body.gamePrice) formErrors.push("Price is required.");
if (formErrors.length) res.status(400).send({ success: false, formErrors });
else {
let gameCoverFileName;
if (!fileUpload(req, gameCoverFileName))
formErrors.push("Failed to upload file");
const result = await gameModel.create({
title: body.gameTitle,
cover: gameCoverFileName,
price: body.gamePrice,
description: body.description
});
if (result)
res.status(201).send({
success: true,
game: {
gameTitle: result.title,
gameCover: gameCoverFileName,
gamePrice: result.price,
description: result.description
}
});
} });
And I'm getting empty body
You need to additionally process the multipart form-data. For example with multiparty:
const multiparty = require("multiparty");
router.post("/add", auth, async (req, res) => {
try {
const parse = function (req) {
return new Promise(function(resolve, reject) {
const form = new multiparty.Form()
form.parse(req, function(err, fields, files) {
!err ? resolve([fields, files]) : reject(err)
})
})
}
const [body] = await parse(req)
console.log(body)
} catch (err) {
console.log(err)
}
res.json("ok")
})

Mongoose remove and create in get route

I have a small issue with mongoose, what I am doing is getting data from online rss feeds, parsing it, and passing it to an array, from which I feed a mongoose model, and all this happens in the get route, what I want to accomplish is delete all the data first from the mongoose model and then populate it with the new data, but it always either deletes the data all together, since the parser iterates a few times, or it doesn't delete anything and the data just keeps adding to the model.
Here's my code
'use strict';
const Promise = require('bluebird');
const request = require('request');
const FeedParser = require('feedparser');
const express = require('express');
const router = express.Router();
const xray = require('x-ray')();
var Post = require('../models/post');
var dataArray = [];
router.get('/', function (req, res) {
const fetch = (url) => {
return new Promise((resolve, reject) => {
if (!url) {
return reject(new Error(`Bad URL (url: ${url}`));
}
const feedparser = new FeedParser();
const items = [];
feedparser.on('error', (e) => {
return reject(e);
}).on('readable', () => {
// This is where the action is!
var item;
console.time('loading')
while (item = feedparser.read()) {
items.push(item);
}
}).on('end', () => {
resolve({
meta: feedparser.meta,
records: items
});
});
request({
method: 'GET',
url: url
}, (e, res, body) => {
if (e) {
return reject(e);
} else if (res.statusCode != 200) {
return reject(new Error(`Bad status code (status: ${res.statusCode}, url: ${url})`));
}
feedparser.end(body);
feedparser.on('end', function () {
console.log('Done');
});
});
});
};
Promise.map([
'url',
'url',
'url',
'url'], (url) => fetch(url), { concurrency: 4 }) // note that concurrency limit
.then((feeds) => {
feeds.forEach(feed => {
feed.records.forEach(record => {
dataArray.push(record);
});
});
}).catch(function (error) {
console.log(error);
});
Post.remove({}, function (err) {
if (err) {
console.log(err);
} else {
console.log('collection removed');
}
});
dataArray.forEach(post => {
Post.create({
title: post.title,
content: post.description,
created: post.date,
image: post['rss:image']['#'],
link: post.link
}, function (err, newPost) {
console.log(newPost.title);
});
});
Post.find({}, function (err, posts) {
if (err) {
console.log(err);
} else {
res.render('index/home', {
posts: posts
});
}
});
});
module.exports = router;
None of this is going to run synchronously. You can do Something like this :
'use strict';
const Promise = require('bluebird');
const request = require('request');
const FeedParser = require('feedparser');
const express = require('express');
const router = express.Router();
const xray = require('x-ray')();
var Post = require('../models/post');
var dataArray = [];
const fetch;
router.get('/', function (req, res) {
Post.remove({}, function (err) {
if (err) {
console.log(err);
} else {
console.log('collection removed. Starting to fetch Posts from Service');
fetch = (url) => {
return new Promise((resolve, reject) => {
if (!url) {
return reject(new Error(`Bad URL (url: ${url}`));
}
const feedparser = new FeedParser();
const items = [];
feedparser.on('error', (e) => {
return reject(e);
}).on('readable', () => {
// This is where the action is!
var item;
console.time('loading')
while (item = feedparser.read()) {
items.push(item);
}
}).on('end', () => {
resolve({
meta: feedparser.meta,
records: items
});
});
request({
method: 'GET',
url: url
}, (e, res, body) => {
if (e) {
return reject(e);
} else if (res.statusCode != 200) {
return reject(new Error(`Bad status code (status: ${res.statusCode}, url: ${url})`));
}
feedparser.end(body);
feedparser.on('end', function () {
console.log('Done');
});
});
});
};
}
});
Promise.map([
'url',
'url',
'url',
'url'], (url) => fetch(url), { concurrency: 4 }) // note that concurrency limit
.then((feeds) => {
feeds.forEach(feed => {
dataArray = dataArray.concat(feed.records);
/*feed.records.forEach(record => {
dataArray.push(record);
});*/
});
console.log('inserting posts in the collection');
dataArray.forEach(post => {
Post.create({
title: post.title,
content: post.description,
created: post.date,
image: post['rss:image']['#'],
link: post.link
}, function (err, newPost) {
console.log(newPost.title);
});
});
console.log("Fetching posts from the collection");
Post.find({}, function (err, posts) {
if (err) {
console.log(err);
} else {
res.render('index/home', {
posts: posts
});
}
});
}).catch(function (error) {
console.log(error);
});
});
module.exports = router;
I haven't tested this. Please test it on your end. Let me know if there's an error or something.

Resources