Nodejs formidable async mongoose await issue - node.js

I'm trying to make a OCR parse of an image.
All the things works well but I have a problem this mongoose and syncronysm.
But cannot use "await" on the mongoose find call as the function is not async. How do I solve that.
Here is my code:
// post processImage
router.post('/', async (req, res) => {
try {
var baseUrl;
const form = formidable({ multiples: true });
form.parse(req, function (error, fields, files) {
var imatgeAProcessar = files.image.path;
var extname = path.extname(files.image.name);
getTextFromImage(imatgeAProcessar) // OCR process of the image
.then(res => {
const boss_name_req = res.boss_name;
const boss = Boses.findOne({"name" : boss_name_req}).exec();
// ERROR HERE // return nothing althought it exist on database (no await?)
console.log(JSON.stringify(boss)); // writes "{}"
const processedImage = {
"success": true,
"boss_name": boss.name,
"boss_image": baseUrl + 'images/' + boss.num + ".png"
}
res.json(processedImage);
})
});
} catch (err) {
res.json({message: err});
}
});

*edited
// post processImage
router.post('/', async(req, res) => {
try {
var baseUrl;
const form = formidable({ multiples: true });
var formfields = await new Promise(function(resolve, reject) {
form.parse(req, function(err, fields, files) {
if (err) {
reject(err);
return;
}
resolve(files);
}); // form.parse
});
var imatgeAProcessar = formfields.image.path;
var extname = path.extname(formfields.image.name);
const res = await getTextFromImage(imatgeAProcessar)
const boss_name_req = res.boss_name;
const boss = await Boses.findOne({ "name": boss_name_req }).limit(4).skip(0).exec();
const processedImage = {
"success": true,
"boss_name": boss.name,
"boss_image": baseUrl + 'images/' + boss.num + ".png"
}
res.json(processedImage)
} catch (err) {
res.json({ message: err });
}
});

Finally I found the way... I wrote a callback on the findOne call as :
const boss = Boss.findOne({"name" : boss_name_req})
.then( resMongoose => {
try {
const processedImage = {
"success": true,
"gym": resOCR.gym,
"boss_name": resMongoose.name,
}
res.json(processedImage);
} catch (err) {
res.json({message: err});
}
});

Related

problem in sending base64 data in GET request

Hi I am facing issues sending base64 data in GET request.
I was successful in converting the image into base64 data and inserting it in receivedFile
but during response the attachments come as an empty array while the rest of the data i.e user_id is flowing successfully.
Hence if you could please help me to resolve this issue.
Below is the code
router.js
router.get('/users/data/expand/:nid',async (req,res) => {
var idselected = req.params.nid;
var dir = '\images';
var receivedFile = [];
try {
const checkData = await user.find({"user_id": idselected});
await checkData[0].attachments.forEach (element => {
fs.readdir(dir,function(err,files) {
if(err) {
console.log(err)
}else {
files.forEach((filename) => {
filename = element;
fs.readFile(filename,'base64', (err,base64Data) => {
if(err) {
console.log(err);
}
receivedFile.push(base64Data);
})
})
}
})
})
//issue is here the attachments is coming as empty instead of base64 data
const returnUser = new User({
user_id: checkData.user_id,
attachments: receivedFile
})
res.status(201).send(returnUser);
}
catch(e) {
res.status(500).send(e)
}
})
Well its always good to create helper functions and to promisfy it so you can use async / await syntax.
I have changed your code. I didnt tested it but i guess it should work:#
router.get("/users/data/expand/:nid", async (req, res) => {
var idselected = req.params.nid;
var dir = "images";
try {
const checkData = await user.findOne({ user_id: idselected });
let receivedFile = await Promise.all(
checkData.attachments.flatMap(async element => {
let files = await readDirectory(dir);
return await Promise.all(
files.map(filename => {
filename = element;
return readFile(filename)
})
);
})
);
const returnUser = new User({
user_id: checkData.user_id,
attachments: receivedFile
});
let savedUser = await returnUser.save();
res.status(201).send(savedUser);
} catch (e) {
res.status(500).send(e);
}
});
function readDirectory(dir) {
return new Promise((res, rej) => {
fs.readdir(dir, function(err, files) {
if (err) {
rej(err);
} else {
res(files);
}
});
});
}
function readFile(filename) {
return new Promise((res, rej) => {
fs.readFile(filename, "base64", (err, base64Data) => {
if (err) {
rej(err);
}
res(base64Data);
});
});
}
I guess you use mongoose.
There is an method called findOne and also you forgot to save your model with returnUser.save()

Return async function to express router

This may sound dum. But what I am stuck here with is
1: returning the converted JSON file
2: getting the returned object into the route
routes.js
const express = require('express');
const router = express.Router();
const routings = require('./src/services/routings');
router.get('/routings', async(req, res) => {
const routesRes = await routings.getRoutings();
res.end(JSON.stringify(routesRes, null, " ")).catch(function (err) {
console.log(err);
});
});
module.exports = router;
routings.js
const parseXml = require('xml2js')
let data = `<?xml version="1.0" encoding="UTF-8"?>...'
getRoutings = async() => {
await parseXml.parseStringPromise(data).then(function (result) {
console.log('Done');
return result;
})
.catch(function (err) {
console.log(err);
});
}
module.exports = {getRoutings}
Your getRoutings() function does not have a return value. Therefore when you do this:
const routesRes = await routings.getRoutings();
routesRes will always be undefined.
I would suggest this:
getRoutings = () => {
return parseXml.parseStringPromise(data).then(function (result) {
console.log('Done');
return result;
}).catch(function (err) {
console.log(err);
throw err; // make sure error is propagated
});
}

Axios POSTed FormData has empty body on serverside

This is the client side code. Data is not empty, file is getting uploaded correctly.
export function addGame(data) {
return dispatch => {
const formData = new FormData();
formData.append("game.cover", data.gameCover[0]);
formData.append("game.title", data.gameTitle);
formData.append("game.price", data.gamePrice);
formData.append("game.description", data.description);
return axios.post(apiUrl + "/games/add", formData).then(res => {
dispatch({ type: ADD_GAME, payload: res.data.game });
});
};
}
and this is the serverside
router.post("/add", auth, async (req, res) => {
const body = await req.body;
console.log(body);
let formErrors = [];
if (!body.gameTitle) formErrors.push("Game title is required.");
if (!body.description) formErrors.push("Description is required.");
if (!body.gamePrice) formErrors.push("Price is required.");
if (formErrors.length) res.status(400).send({ success: false, formErrors });
else {
let gameCoverFileName;
if (!fileUpload(req, gameCoverFileName))
formErrors.push("Failed to upload file");
const result = await gameModel.create({
title: body.gameTitle,
cover: gameCoverFileName,
price: body.gamePrice,
description: body.description
});
if (result)
res.status(201).send({
success: true,
game: {
gameTitle: result.title,
gameCover: gameCoverFileName,
gamePrice: result.price,
description: result.description
}
});
} });
And I'm getting empty body
You need to additionally process the multipart form-data. For example with multiparty:
const multiparty = require("multiparty");
router.post("/add", auth, async (req, res) => {
try {
const parse = function (req) {
return new Promise(function(resolve, reject) {
const form = new multiparty.Form()
form.parse(req, function(err, fields, files) {
!err ? resolve([fields, files]) : reject(err)
})
})
}
const [body] = await parse(req)
console.log(body)
} catch (err) {
console.log(err)
}
res.json("ok")
})

Avoiding callback hell

I have this code that serves every markdown file in the './markdown' folder. At '/api/markdown/filename'.
var apiRouter = express.Router();
markdownFolder = './markdown/';
apiRouter.get('/:markdown_file_noext', function(req, res) {
fs.readdir(markdownFolder, function(err, markdown) {
if (err) throw err;
markdown.forEach(function(file) {
fs.readFile(markdownFolder + file, 'utf8', function(err, file_content) {
if (err) throw err;
fileNoExtension = file.slice(0, file.indexOf('.'));
if (req.params.markdown_file_noext == fileNoExtension) {
res.json({
'title': fileNoExtension,
'markdown': marked(file_content)
});
};
});
});
});
});
But i end having a ton of callbacks do the the nature of the 'fs' methods. How do i avoid this?
Using Q as promise library:
const Q = require('q');
const fs = require('fs');
const markdownFolder = './markdown/';
const readdir = Q.nfbind(fs.readdir);
const readFile = Q.nfbind(fs.readFile);
readdir(markdownFolder).then(markdown => {
const promises = [];
markdown.forEach(file => promises.push(readFile(markdownFolder + file, 'utf8')));
return Q.all(promises);
}).then(files => {
// Do your magic.
}).catch(error => {
// Do something with error.
});
You have different option.
Use named Function instead of anonymus functinos. It would make it a little bit more readable but you will still be using callbacks.
Use Promises, but you will need to use bluebird to wrap the fs module.
For a more advance option, you can use generators and Promises to make your code look more like a sync way. Take a look at co or bluebird.coroutine.
With Promises you could do like this:
const path = require('path');
var apiRouter = express.Router();
markdownFolder = './markdown/';
apiRouter.get('/:markdown_file_noext', function(req, res) {
readdir(markdownFolder)
.then((files) => {
const tasks = files.map((file) => {
const filePath = path.resolve(markdownFolder, file);
return readFile(filePath);
});
return Promise.all(tasks); // Read all files
})
.then((fileContents) => {
return fileContents.map((content) => {
fileNoExtension = file.slice(0, file.indexOf('.'));
if (req.params.markdown_file_noext == fileNoExtension) {
return {
'title': fileNoExtension,
'markdown': marked(content)
};
};
})
})
.then((results) => {
// It's better if you aggregate all results in one array and return it,
// instead of calling res.json for each result
res.json(results);
})
.catch((err) => {
// All errors are catched here
console.log(err);
})
});
function readdir(folderPath) {
return new Promise((resolve, reject) => {
fs.readdir(folderPath, (err, files) {
if (err) {
return reject(err);
}
resolve(files);
});
});
}
function readFile(filePath) {
return new Promise((resolve, reject) => {
fs.readFile(filePath, 'utf8', (err, file_content) => {
if (err) {
return reject(err);
}
resolve(file_content);
});
});
}

Mongoose remove and create in get route

I have a small issue with mongoose, what I am doing is getting data from online rss feeds, parsing it, and passing it to an array, from which I feed a mongoose model, and all this happens in the get route, what I want to accomplish is delete all the data first from the mongoose model and then populate it with the new data, but it always either deletes the data all together, since the parser iterates a few times, or it doesn't delete anything and the data just keeps adding to the model.
Here's my code
'use strict';
const Promise = require('bluebird');
const request = require('request');
const FeedParser = require('feedparser');
const express = require('express');
const router = express.Router();
const xray = require('x-ray')();
var Post = require('../models/post');
var dataArray = [];
router.get('/', function (req, res) {
const fetch = (url) => {
return new Promise((resolve, reject) => {
if (!url) {
return reject(new Error(`Bad URL (url: ${url}`));
}
const feedparser = new FeedParser();
const items = [];
feedparser.on('error', (e) => {
return reject(e);
}).on('readable', () => {
// This is where the action is!
var item;
console.time('loading')
while (item = feedparser.read()) {
items.push(item);
}
}).on('end', () => {
resolve({
meta: feedparser.meta,
records: items
});
});
request({
method: 'GET',
url: url
}, (e, res, body) => {
if (e) {
return reject(e);
} else if (res.statusCode != 200) {
return reject(new Error(`Bad status code (status: ${res.statusCode}, url: ${url})`));
}
feedparser.end(body);
feedparser.on('end', function () {
console.log('Done');
});
});
});
};
Promise.map([
'url',
'url',
'url',
'url'], (url) => fetch(url), { concurrency: 4 }) // note that concurrency limit
.then((feeds) => {
feeds.forEach(feed => {
feed.records.forEach(record => {
dataArray.push(record);
});
});
}).catch(function (error) {
console.log(error);
});
Post.remove({}, function (err) {
if (err) {
console.log(err);
} else {
console.log('collection removed');
}
});
dataArray.forEach(post => {
Post.create({
title: post.title,
content: post.description,
created: post.date,
image: post['rss:image']['#'],
link: post.link
}, function (err, newPost) {
console.log(newPost.title);
});
});
Post.find({}, function (err, posts) {
if (err) {
console.log(err);
} else {
res.render('index/home', {
posts: posts
});
}
});
});
module.exports = router;
None of this is going to run synchronously. You can do Something like this :
'use strict';
const Promise = require('bluebird');
const request = require('request');
const FeedParser = require('feedparser');
const express = require('express');
const router = express.Router();
const xray = require('x-ray')();
var Post = require('../models/post');
var dataArray = [];
const fetch;
router.get('/', function (req, res) {
Post.remove({}, function (err) {
if (err) {
console.log(err);
} else {
console.log('collection removed. Starting to fetch Posts from Service');
fetch = (url) => {
return new Promise((resolve, reject) => {
if (!url) {
return reject(new Error(`Bad URL (url: ${url}`));
}
const feedparser = new FeedParser();
const items = [];
feedparser.on('error', (e) => {
return reject(e);
}).on('readable', () => {
// This is where the action is!
var item;
console.time('loading')
while (item = feedparser.read()) {
items.push(item);
}
}).on('end', () => {
resolve({
meta: feedparser.meta,
records: items
});
});
request({
method: 'GET',
url: url
}, (e, res, body) => {
if (e) {
return reject(e);
} else if (res.statusCode != 200) {
return reject(new Error(`Bad status code (status: ${res.statusCode}, url: ${url})`));
}
feedparser.end(body);
feedparser.on('end', function () {
console.log('Done');
});
});
});
};
}
});
Promise.map([
'url',
'url',
'url',
'url'], (url) => fetch(url), { concurrency: 4 }) // note that concurrency limit
.then((feeds) => {
feeds.forEach(feed => {
dataArray = dataArray.concat(feed.records);
/*feed.records.forEach(record => {
dataArray.push(record);
});*/
});
console.log('inserting posts in the collection');
dataArray.forEach(post => {
Post.create({
title: post.title,
content: post.description,
created: post.date,
image: post['rss:image']['#'],
link: post.link
}, function (err, newPost) {
console.log(newPost.title);
});
});
console.log("Fetching posts from the collection");
Post.find({}, function (err, posts) {
if (err) {
console.log(err);
} else {
res.render('index/home', {
posts: posts
});
}
});
}).catch(function (error) {
console.log(error);
});
});
module.exports = router;
I haven't tested this. Please test it on your end. Let me know if there's an error or something.

Resources