so i've written a function that should query everything in the files inventory of my mongodb using mongoose, but instead... doesn't. while each element is in fact being read, files.push() doesn't seem to have any effect on the array, as the array is still empty.
while i know that console.log() is not an effective way to debug, considering express still does not render the contents of the array, it is in fact not being populated.
yes, it is being called as getAllFiles(Image).
code below:
const Image = module.exports = mongoose.model('files', imageSchema);
function getAllFiles(collection) {
let files = [];
collection.find({}, (err, buns) => {
buns.forEach((bun) => {
let fin = bun.path.replace("public/", "");
files.push(fin);
console.log(fin);
});
});
console.log(files);
return files;
}
terminal output (ignore extraneous outputs):
wildflower :: src/bunnydb ยป node app.js
(node:23296) DeprecationWarning: current URL string parser is deprecated, and will be removed in a future version. To use the new parser, pass option { useNewUrlParser: true } to MongoClient.connect.running on port 3000
[]
uploads/9160d961-3d9b-4dea-a39c-f79b86647408.jpg
was able to fix by adding a callback as it was running asynchronously:
function getAllFiles(collection, cb) {
let files = [];
collection.find({}, (err, buns) => {
console.log('err: ' + err);
console.log('buns: ' + buns);
buns.forEach((bun) => {
let fin = bun.path.replace("public/", "");
files.push(fin);
console.log('data: ' + fin);
});
cb(files);
});
console.log('arr: ' + files);
return files;
}
and on invocation the callback argument can be used to do stuff with the files
Related
Running NodeJS (v8.16.2) locally on the command line. I scrape an e-commerce website, gather the relevant information into a data-structure, then try to write it into a plain-text CSV file (my records don't have a fixed set of fields) manually by creating a write stream. This last step isn't working.
// Other stuff
const exitHandler = function(options, exitCode) {
if (exitCode || exitCode !== 0) console.log(exitCode);
// Other stuff
writeToCsv();
if (options.exit) process.exit();
}
const writeToCsv = function() {
let ws = fs.createWriteStream('./final-data.csv');
const crlf = '\n\r'; // might need to reverse this
// Please ignore the weird layout
for (let seller in finalData.sellers) {
ws.write('Seller:,' + seller + crlf + ',Brands:');
for (let brand of finalData.sellers[seller].brands) {
ws.write(',' + brand);
}
ws.write(crlf + ',Addresses:');
for (let addr of finalData.sellers[seller].addrs) {
ws.write(',"' + addr + '"');
}
ws.write(crlf);
}
ws.on('finish', () => {
console.log('Wrote all data'); // never prints this
});
ws.end();
}
process.on('exit', exitHandler.bind(null,{cleanup:true}));
I suspect this is because NodeJS exits before the data has been flushed to disk, but can't figure out a way to make NodeJS flush the data synchronously.
PS: new to NodeJS
please check out below example and integrate it
as per your comment i updated code
async function writeDataInCSV(filePath, dynamicHeader, data) {
const csvWriter = createCsvWriter({
path: filePath,
header: dynamicHeader
});
await csvWriter
.writeRecords(data)
.then(()=> console.log('The CSV file was written successfully'));
}
writeDataInCSV('out.csv',dynamicHeader, Data)
here set array of header and make data and pass to writeDataInCSV method
I think the rendering takes place before the searching of the string on the files, i have tried different methods but don't seems to get this working. any help will be appreciated. im a noob on to the nodejs. im trying to get the id of the user and query and get all the data and there after see if he is in any of the lists given and finally render the page.
const j = [];
let name = '';
const filename = [];
var ext = '';
module.exports = function(app, express) {
app.use(bodyParser.urlencoded({ extended: false }));
app.use(bodyParser.json());
app.post('/cusdetails', isLoggedIn, function (req, res) {
var cusid=req.body.cusid;
var insertQuerys = "SELECT * FROM customer WHERE cusid=? ORDER BY rowid DESC LIMIT 1";
connection.query(insertQuerys,[cusid],
function(err, rows){
rows.forEach( (row) => {
name=row.fncus;
});
fs.readdir('./views/iplist', function(err, files) {
if (err)
throw err;
for (var index in files) {
j.push(files[index])
}
j.forEach(function(value) {
var k = require('path').resolve(__dirname, '../views/iplist/',value);
fs.exists(k, function(fileok){
if(fileok) {
fs.readFile(k, function(err, content) {
if (err) throw err;
if (content.indexOf(name) > -1) {
ext = path.extname(k);
filename.push(path.basename(k, ext));
}
});
}
else {
console.log(" FileNotExist ");
}
});
});
});
console.log(filename);
res.render('cusdetails.ejs', {rows: rows, user:req.user , aml: filename });
});
})
You can create simple Promise wrapper and then use it inside async/await function to pause execution until resolved.
// use mysql2 package as it provides promise, less work to write promise wrappers
const mysql = require('mysql2/promise');
// create the connection to database
const connection = mysql.createConnection({
host: 'localhost',
user: 'root',
database: 'test'
});
// sample wrapper
function some(k) {
// more advisable to have local variables, why do you need this to be array?
var filename = [];
return new Promise((resolve, reject) => {
// doing this is also not recommended check nodejs documentation **fs.exists** for more info
fs.exists(k, function(fileok){
if(fileok) {
fs.readFile(k, function(err, content) {
if (err) reject(err);
if (content.indexOf(name) > -1) {
ext = path.extname(k);
filename.push(path.basename(k, ext));
resolve(filename)
}
});
}
else {
// reject(new Error("FileNotExist"))
console.log(" FileNotExist ");
}
});
})
}
// note the use of async
app.post('/cusdetails', isLoggedIn, async function (req, res) {
var cusid=req.body.cusid;
var insertQuerys = "SELECT * FROM customer WHERE cusid=? ORDER BY rowid DESC LIMIT 1";
// using await to pause excution, waits till query is finished
const [rows] = await connection.query(insertQuerys,[cusid])
rows.forEach( (row) => {
name=row.fncus;
});
// then you can
var result = await some(k)
...
Note however this way you loose the advantage of concurrent execution, as it's kindoff blocking. If the result of one call is not used in another, you can execute in parallel and await for result to achieve sequencing like
const [rows] = connection.query(insertQuerys,[cusid])
var result = some(k)
console.log(await rows) // do something
console.log(await result) // do something
JavaScript is asynchronous. This means that if you have a function with a callback (i.e. your query), the callback will be called asynchronously, at an unknown time, while the other code executes.
You need to look up some tutorials how to deal with callbacks, to get a proper understanding of it. Another method is using async/await and/or promises.
Basically, if you take the following code:
console.log("this will print first");
setTimeout(function () {
console.log("this will print last");
}, 1000);
console.log("this will print second");
If you run the code above, the top level is executed synchronously, so, it first calls console.log, then it executes setTimeout, which is synchronous. It sets a timeout, then says "I'm ready", and the code continues to the other console.log. After 1 second (1000 milliseconds), the callback in the setTimeout function is executed, and only then that console.log is called. You can not make the rest of the code wait this way, you need to restructure your code or read into promises.
I updated the function to create the CSV file but now I'm getting an error:
In upload function
internal/streams/legacy.js:57
throw er; // Unhandled stream error in pipe.
^
Error: ENOENT: no such file or directory, open 'C:\Users\shiv\WebstormProjects\slackAPIProject\billingData\CSV\1548963844106output.csv'
var csvFilePath = '';
var JSONFilePath = '';
function sendBillingData(){
var message = '';
axios.get(url, {
params: {
token: myToken
}
}).then(function (response) {
message = response.data;
fields = billingDataFields;
// saveFiles(message, fields, 'billingData/');
saveFilesNew(message, fields, 'billingData/');
var file = fs.createReadStream(__dirname + '/' + csvFilePath); // <--make sure this path is correct
console.log(__dirname + '/' + csvFilePath);
uploadFile(file);
})
.catch(function (error) {
console.log(error);
});
}
The saveFilesNew function is:
function saveFilesNew(message, options, folder){
try {
const passedData = message;
var relevantData='';
if (folder == 'accessLogs/'){
const loginsJSON = message.logins;
relevantData = loginsJSON;
console.log(loginsJSON);
}
if(folder == 'billingData/'){
relevantData = passedData.members;
const profile = passedData.members[0].profile;
}
//Save JSON to the output folder
var date = Date.now();
var directoryPath = folder + 'JSON/' + date + "output";
JSONFilePath = directoryPath + '.json';
fs.writeFileSync(JSONFilePath, JSON.stringify(message, null, 4), function(err) {
if (err) {
console.log(err);
}
});
//parse JSON onto the CSV
const json2csvParser = new Json2csvParser({ fields });
const csv = json2csvParser.parse(relevantData);
// console.log(csv);
//function to process the CSV onto the file
var directoryPath = folder + 'CSV/' + date + "output";
csvFilePath = directoryPath + '.csv';
let data = [];
let columns = {
real_name: 'real_name',
display_name: 'display_name',
email: 'email',
account_type: 'account_type'
};
var id = passedData.members[0].real_name;
console.log(id);
console.log("messageLength is" +Object.keys(message.members).length);
for (var i = 0; i < Object.keys(message.members).length; i++) {
console.log("value of i is" + i);
var display_name = passedData.members[i].profile.display_name;
var real_name = passedData.members[i].profile.real_name_normalized;
var email = passedData.members[i].profile.email;
var account_type = 'undefined';
console.log("name: " + real_name);
if(passedData.members[i].is_owner){
account_type = 'Org Owner';
}
else if(passedData.members[i].is_admin){
account_type = 'Org Admin';
}
else if(passedData.members[i].is_bot){
account_type = 'Bot'
}
else account_type = 'User';
data.push([real_name, display_name, email, account_type]);
}
console.log(data);
stringify(data, { header: true, columns: columns }, (err, output) => {
if (err) throw err;
fs.writeFileSync(csvFilePath, output, function(err) {
console.log(output);
if (err) {
console.log(err);
}
console.log('my.csv saved.');
});
});
} catch (err) {
console.error(err);
}
}
The upload file function is:
function uploadFile(file){
console.log("In upload function");
const form = new FormData();
form.append('token', botToken);
form.append('channels', 'testing');
form.append('file', file);
axios.post('https://slack.com/api/files.upload', form, {
headers: form.getHeaders()
}).then(function (response) {
var serverMessage = response.data;
console.log(serverMessage);
});
}
So I think the error is getting caused because node is trying to upload the file before its being created. I feel like this has something to do with the asynchronous nature of Node.js but I fail to comprehend how to rectify the code. Please let me know how to correct this and mention any improvements to the code structure/design too.
Thanks!
You don't wait for the callback provided to stringify to be executed, and it's where you create the file. (Assuming this stringify function really does acccept a callback.)
Using callbacks (you can make this cleaner with promises and these neat async/await controls, but let's just stick to callbacks here), it should be more like:
function sendBillingData() {
...
// this callback we'll use to know when the file writing is done, and to get the file path
saveFilesNew(message, fields, 'billingData/', function(err, csvFilePathArgument) {
// this we will execute when saveFilesNew calls it, not when saveFilesNew returns, see below
uploadFile(fs.createReadStream(__dirname + '/' + csvFilePathArgument))
});
}
// let's name this callback... "callback".
function saveFilesNew(message, options, folder, callback) {
...
var csvFilePath = ...; // local variable only instead of your global
...
stringify(data, { header: true, columns: columns }, (err, output) => {
if (err) throw err; // or return callbcack(err);
fs.writeFile(csvFilePath , output, function(err) { // NOT writeFileSync, or no callback needed
console.log(output);
if (err) {
console.log(err);
// callback(err); may be a useful approach for error-handling at a higher level
}
console.log('my.csv saved.'); // yes, NOW the CSV is saved, not before this executes! Hence:
callback(null, csvFilePath); // no error, clean process, pass the file path
});
});
console.log("This line is executed before stringify's callback is called!");
return; // implicitly, yes, yet still synchronous and that's why your version crashes
}
Using callbacks that are called only when the expected events happen (a file is done writing, a buffer/string is done transforming...) allows JS to keep executing code in the meantime. And it does keep executing code, so when you need data from an async code, you need to tell JS you need it done before executing your piece.
Also, since you can pass data when calling back (it's just a function), here I could avoid relying on a global csvFilePath. Using higher level variables makes things monolithic, like you could not transfer saveFilesNew to a dedicated file where you keep your toolkit of file-related functions.
Finally, if your global process is like:
function aDayAtTheOffice() {
sendBillingData();
getCoffee();
}
then you don't need to wait for the billing data to be processed before starting making coffee. However, if your boss told you that you could NOT get a coffee until the billing data was settled, then your process would look like:
function aDayAtTheOffice() {
sendBillingData(function (err) {
// if (err) let's do nothing here: you wanted a coffee anyway, right?
getCoffee();
});
}
(Note that callbacks having potential error as first arg and data as second arg is a convention, nothing mandatory.)
IMHO you should read about scope (the argument callback could be accessed at a time where the call to saveFilesNew was already done and forgotten!), and about the asynchronous nature of No... JavaScript. ;) (Sorry, probably not the best links but they contain the meaningful keywords, and then Google is your buddy, your friend, your Big Brother.)
I'm trying to get data from MongoDB collection and then save it to a global object.Later I need to parse it to HTML template.
Here is my code:
When user log onto his profile: then we need to get his projects and here we call findeprojects() function
usrRouter.route('/profile')
.all(function (req,res,next) {
if(!req.user){
res.redirect('/');
}
next();
})
.get(function (req,res,userObj) {
// var proj = findprojects();
userObj = req.user;
var pro = {};
pro = findprojects(userObj);
res.render('index',{name:userObj.username, email:userObj.email});
//res.sendFile('profile.html',{root:path.join(__dirname,'../public'),},{name:userObj.username});
});
Here is findeprojects function code:
var findprojects = function(obj) {
var usern = obj.username;
mongodb.connect(url,function(err, db){
if(err) throw err;
var collection = db.collection('projects');
//console.log(usern);
collection.find({'pusername':usern});
cursor =db.collection('projects').find({ 'pusername': usern }).toArray(function(err,items){
//console.log(items);
var i;
for(i=0; i<items.length;){
userProjects.createdBy = items[i].pusername;
userProjects.proName = items[i].projectName;
userProjects.proType = items[i].projectType;
userProjects.proDesc = items[i].projectDesc;
//return userProjects;
i = i+1;
}
});
console.log(userProjects);
});
};
I have declared global object at the top like:
userProjects = {
createdBy:'',
proName:'',
proType:'',
proDesc:''
};
But when I console userprojects object after calling the findeprojects() function it displays empty values.
why dont you use mongoose to model your stuff.
its more intuitive and you no need to declare the global object and do the mapping in the for loop that you are doing.
also your approach is a bit wrong in terms of when you iterate through for aren't you overwriting ?
say you have two documents where pusername is abdul.
so in your case you loose first object which will get overwritten by the second one.
i see that you commented out a return statement but even that wont work properly.
from a design point of view your approach is not efficient.
in mongoose you can do:
{
var userProjectSchema = new mongoose.Schema({
createdBy: { type: String }
, proName: String
, proType: String
, proDesc: String
});
// Find a single document by username.
userProjectSchema.findOne({ pusername : 'abdul' }, function(err, resDoc) {
if (err) return console.error(err);
// do your html stuff here
});
// Find all documents.
userProjectSchema.find(function(err, results) {
if (err) return console.error(err);
// do your html stuff here
});
}
(Please note this is not a duplicate of two similarly titled questions, those two questions use Mongoose and the answers apply to Mongoose queries only)
I have a list of directories, each of these directories contains a file. I want to return a JSON list with the contents of each of these files. I can load the files no problem, but because I'm looping over the array with forEach, my empty response is sent before I've actually loaded the contents of the files:
function getInputDirectories() {
return fs.readdirSync(src_path).filter(function(file) {
return fs.statSync(path.join(src_path, file)).isDirectory();
});
}
router.get('/list', function(req, res, next) {
var modules = [];
var input_dirs = getInputDirectories();
input_dirs.forEach(function(dir) {
path = __dirname+'/../../modules/input/'+dir+'/module.json'
fs.readFile(path, 'utf8', function(err, data) {
modules.push(data);
});
});
res.status(200).json(modules);
});
How can I make sure that I only send down the modules array once it's fully loaded, ie: once the forEach is done.
Since fs.readFile is asynchronous, the behaviour that you are having is most likely the expected one.
What you need to do is return your modules when all modules have been read. You could do this inside fs.readFile.
As far as I have understood, you can obtain the total number of directories through input_dirs.length (since I guess getInputDirectories() is returning an array). Now you need some kind of a counter that helps you understand if you have read the last directory or not, and if yes, then you return your modules. Something like this should work:
router.get('/list', function(req, res, next) {
var modules = [];
var input_dirs = getInputDirectories();
var c = 0;
input_dirs.forEach(function(dir) {
path = __dirname+'/../../modules/input/' + dir + '/module.json'
fs.readFile(path, 'utf8', function(err, data) {
c++;
modules.push(data);
if(c == input_dirs.length) {
return res.status(200).json(modules);
}
});
});
});
I suggest you use Promises, example:
var Promise = require('bluebird');
router.get('/list', function(req, res, next) {
var modules = [];
var input_dirs = getInputDirectories();
// 'each' will try to fulfill all promises, if one fails, it returns a
// failed promise.
return Promise.each(input_dirs, function(dir){
path = __dirname+'/../../modules/input/'+dir+'/module.json';
return new Promise(function(resolve, reject){
fs.readFile(path, 'utf8', function(err, data) {
if (err) return reject(err);
return resolve(data);
});
});
}).then(function(modules){
return res.status(200).json(modules);
})
.catch(function(err){
if (err) {
//handle error
}
});
});
This way you move one once you fulfilled your promises.
Instead of fs.readFile use fs.readFileSync