My requirement is like below:
I have 3 Big files big_file_1, big_file_2 and big_file_3. I want to read this 3 big files asynchronously and process my rest of the code only after the reading of all files are completed.
fs.readFile('big_file_1', function (err, data) {
if (err) throw err;
content_1 = data;
});
fs.readFile('big_file_2', function (err, data) {
if (err) throw err;
content_2 = data;
});
fs.readFile('big_file_3', function (err, data) {
if (err) throw err;
content_3 = data;
});
// Do something with content_1, content_2 and content_3
How can I achieve this in Node.JS ?
You can do it using the parallel function of the async library:
async.parallel([
fs.readFile.bind(fs, 'big_file_1'),
fs.readFile.bind(fs, 'big_file_2'),
fs.readFile.bind(fs, 'big_file_3')
], function (err, results) {
if (err) throw err;
content_1 = results[0]
content_2 = results[1]
content_3 = results[2]
/* TODO do some cool stuff */
})
Alternatively, you can do it manually:
int steps_done = 0
fs.readFile('big_file_1', function(err, data) {
if (err) throw err
content_1 = data
if (++steps_done == 3) do_next_step()
})
fs.readFile('big_file_2', function(err, data) {
if (err) throw err
content_2 = data
if (++steps_done == 3) do_next_step()
})
fs.readFile('big_file_3', function(err, data) {
if (err) throw err
content_3 = data
if (++steps_done == 3) do_next_step()
})
Related
stackoverflow!
I am fairly new to node.js, and this seems to be node-specific. I keep defining a variable and I get errors that fileName is undefined. I have no idea why this is happening because, from my perspective, I'm just assigning a global variable inside a function, and that should work in all other programming languages I've worked in. Does the argument function in fs.readFile() somehow differ from a normal function? I honestly have no idea. Anyways, this is my full code:
var fs = require('fs');
var dateObject = new Date();
var fileName;
function Start() {
fs.readFile('./counter.txt', function (err, data) {
if (err) throw err;
var current = parseInt(data);
current++;
fs.writeFile('./counter.txt', current.toString(), function(err) {
if (err) throw err;
console.log("Written!");
});
var fullDate = `${dateObject.getDate()}-${dateObject.getMonth() + 1}-${dateObject.getFullYear()}`;
fileName = `./logs/${fullDate} ${current}.txt`;
console.log(fileName);
fs.appendFile(fileName, "Logger Initiated!", function(err){
if (err) throw err;
})
});
}
function PAL (text) {
if (fileName === undefined) {
console.log("...");
return "500";
}
console.log(fileName);
fs.appendFile(fileName, text, function(err) {
if (err) throw err;
})
}
module.exports = {Start, PAL};
Another file:
const logger = require('./logger')
// ....
app.listen(port, () => {
logger.Start();
logger.PAL("test");
})
You got an asynchronous trap in code, fs.readFile is an async function, so when you run .PAL, I think you expected .Start function done, but it's not true. Start function:
async function Start() {
return new Promise((resolve, reject) => {
fs.readFile('./counter.txt', function (err, data) {
if (err) throw reject(err);
var current = parseInt(data);
current++;
fs.writeFile('./counter.txt', current.toString(), function(err) {
if (err) reject(err);
console.log("Written!");
var fullDate = `${dateObject.getDate()}-${dateObject.getMonth() + 1}-${dateObject.getFullYear()}`;
fileName = `./logs/${fullDate} ${current}.txt`;
console.log("FILENAME: ", fileName);
fs.appendFile(fileName, "Logger Initiated!", function(err){
if (err) reject(err);
resolve();
})
});
});
})
}
Call: logger.Start().then(() => logger.PAL("test"));
I'm trying to convert the following code to fs.readFile? I'm running this on a nodeserver and need it to be async. I read the node documentation and I found it very difficult to follow.
I'm reading a file and setting it to a variable, im then saving it to the database (mongo). I can't find a way to asynchronously use fs.readFile and set it to a variable.
Here is my current undesired synchronous code:
a.img.data = fs.readFileSync(path.resolve(__dirname + '/imgTest/image.png'));
a.img.contentType = 'image/jpg';
a._id = accountId;
a.save(function (err, a) {
if (err) throw err;
console.log('saved img to mongo');
})
Here is my attempt but it doesn't work:
await fs.readFile(path.resolve(__dirname + '/imgTest/image.jpg'), function (err, data) {
if (err) return console.error(err);
a.img.data = data;
});
a.img.contentType = 'image/jpg';
a._id = accountId;
a.save(function (err, a) {
if (err) throw err;
console.log('saved img to mongo');
})
You are mixing syntaxes. The normal fs library does not deal in promises so you can't use await, and you do all the work in the callback:
fs.readFile(path.resolve(__dirname + '/imgTest/image.jpg'), function (err, data) {
if (err) return console.error(err);
a.img.data = data;
a.img.contentType = 'image/jpg';
a._id = accountId;
a.save(function (err, a) {
if (err) throw err;
console.log('saved img to mongo');
})
});
or you go the Promises route
const fsp = require("fs/promises");
try {
const data = await fsp.readFile(path.resolve(__dirname + '/imgTest/image.jpg'));
a.img.data = data;
a.img.contentType = 'image/jpg';
a._id = accountId;
a.save(function (err, a) {
if (err) throw err;
console.log('saved img to mongo');
})
} catch(err) {
console.log(err);
}
I have a multer multi-upload form, then i process the images with the Cloud Vision api, do some process on the OCR result and i want to redirect to another route (/next2) after ALL the files are processed.
I edited my code with async.forEach but i got a
TypeError: Cannot read property '0' of undefined
What i got wrong ?
app.post('/vision/upload', upload.array("photos", 10), function(req, res) {
async.forEach(req.files, function (file, cb) {
var post = {url: file.location};
connection.query('SET FOREIGN_KEY_CHECKS=0;', function (err) {
if (err) throw err;
});
connection.query('SELECT * FROM documents WHERE documents.url = ?', file.location, function (err, res54) {
var o2 = isEmpty(res54);
var m9 = {};
if (o2) {
connection.query('INSERT INTO documents SET ?', post, function (err, res5) {
if (err) throw err;
DocumentsNextPage.push(res5.insertId);
});
} else {
connection.query('SELECT * FROM documents WHERE documents.url = ?', file.location, function (err, res9) {
m9 = res9;
});
connection.query('UPDATE documents SET ? WHERE ?', [{url: file.location}, {url: file.location}], function (err) {
if (err) throw err;
DocumentsNextPage.push(m9[0].id);
});
}
if (err) throw err;
});
const req2 = new vision.Request({
image: new vision.Image({
url: file.location
}),
features: [
new vision.Feature('DOCUMENT_TEXT_DETECTION', 10),
]
});
DocumentsNextPage.length = 0;
vision.annotate(req2).then((res2) => {
p1 = JSON.stringify(res2.responses);
p1up = p1.toUpperCase();
x7 = res2.responses[0].textAnnotations;
console.log(x7);
})
occurrencesText = new Occurrences(p1up, {ignored: arrayIgnoredWords});
var tt1 = occurrencesText.getSorted();
var oc1 = toArray(tt1);
var oc2 = unique(oc1);
for (var i = 0; i < 10; i++) {
occurencesResults.push(oc2[i][0]);
var postOccu = {name: oc2[i][0], active: 0, isOccurenceMeta: 1, url: file.location};
connection.query('REPLACE INTO metas SET ?', postOccu, function (err) {
if (err) throw err;
});
}
connection.query(queryString, function (err, rows, fields) {
if (err) throw err;
for (var i in rows) {
var fuse = new Fuse(x7, options);
var result = fuse.search(rows[i].name);
var t1 = isEmpty(result);
if (t1) {
} else {
arrayResults.push(rows[i].name);
var posTag0 = {name: [rows[i].name], active: 0, isOccurenceMeta: 0, url: file.location};
connection.query('INSERT INTO metas SET ?', posTag0, function (err) {
if (err) throw err;
});
}
}
connection.query('SELECT * FROM documents INNER JOIN metas ON documents.url = metas.url WHERE metas.url = ? GROUP BY metas.name ORDER BY documents.url DESC', file.location, function (err, res99) {
if (err) throw err;
for (var i in res99) {
if (res99[i].id != undefined) {
resultMetasDocs[i] = {'documents_id': res99[i].id, 'metas_id': res99[i].id_meta};
}
}
});
for (var i in resultMetasDocs) {
var documentHasMetas = resultMetasDocs[i];
connection.query('REPLACE INTO documents_has_metas SET ?', documentHasMetas, function (err) {
if (err) throw err;
});
}
})
})
cb();
}, function () {
res.redirect('/next2');
});
Several Issues:
var m9 = {};
You have defined m9 to be an object but you later try to access its members as if it was an array. Ensure that m9 has a property you are trying to access and is the of correct type.
connection.query('SELECT * FROM documents WHERE documents.url = ?', file.location, function (err, res9) {
m9 = res9;
});
connection.query('UPDATE documents SET ? WHERE ?', [{url: file.location}, {url: file.location}], function (err) {
if (err) throw err;
DocumentsNextPage.push(m9[0].id);
});`
You are probably trying to access results from one asynchronous operation inside another callback that has no knowledge regarding the state of the asynchronous operation that is supposed to get that result.
connection.query('SELECT * FROM documents WHERE documents.url = ?',
file.location, function (err, res9) {
m9 = res9;
connection.query('UPDATE documents SET ? WHERE ?', [{url:
file.location}, {url: file.location}], function (err) {
if (err) throw err;
/*
because this callback is inside the callback of the former
operation, we can be sure that we will have access to the
results from that operation `m9` in this case
*/
DocumentsNextPage.push(m9[0].id);
});
});
I need to read a file line by line and write newlines to same file while reading, if each line satisfy certain set of conditions. What could be the best way.
function (file, callback) {
fs.readFile(file, (err, 'utf8', data) => {
if (err) return callback(err);
var lines = data.split('\n');
fs.open(file, 'w', (err, fd) => {
if (err) return callback(err)
lines.forEach(line => {
if (line === 'meet your condition') {
// do your write using fs.write(fd, )
}
})
callback();
})
})
}
use node fs module with the help of fs you can perform operation asynchronously as well as synchronously. below is as an example of asynchronously
function readWriteData(savPath, srcPath) {
fs.readFile(srcPath, 'utf8', function (err, data) {
if (err) throw err;
//Do your processing, MD5, send a satellite to the moon or can add conditions , etc.
fs.writeFile (savPath, data, function(err) {
if (err) throw err;
console.log('complete');
});
});
}
Synchronously example
function readFileContent(srcPath, callback) {
fs.readFile(srcPath, 'utf8', function (err, data) {
if (err) throw err;
callback(data);
}
);
}
function writeFileContent(savPath, srcPath) {
readFileContent(srcPath, function(data) {
fs.writeFile (savPath, data, function(err) {
if (err) throw err;
console.log('complete');
});
});
}
Lets say, for example, I want to write a nodejs program where I have two or three independent parts like fs.readdir, fs.copy, etc. on different locations, but the result all three actions is to be sent to a json file like this:
var fs = require('fs-extra');
var jsd = {
"act1" : false,
"act2" : false,
"act3" : false
}
fs.readdir(path1, function (err, files) {
if (err) jsd.act1 = err;
for (x in files) console.log(files[x]);
jsd.act1 = true;
});
fs.copy(path2, path3, function (err) {
if (err) jsd.act2 = err;
jsd.act2 = true;
});
fs.remove(path4, function (err) {
if (err) jsd.act3 = err;
jsd.act3 = true;
});
// all three of the above actions are independent, so it makes sense that all of them are executed asynchronously.
// Now we write jsd object to a json file; jsd's contents are dependent on the above actions though
fs.writeJson("./data.json", jsd, function (err, files) {
if (err) return console.error(err);
});
How do I make sure that the correct data is entered into the file data.json, i.e fs.writeJson executes after the actions previous to it are executed first?
I know one way is to nest all of them, i.e,
readdir() {
copy() {
remove() {
writeJson();
}
}
}
But this may result in callback hell, so is there a better way to do this?
you can use Promise or module async,
if you use promise, first you must convert all callback function into Promise like this:
const reddir = function(path) {
return new Promise((resolve, reject) => {
fs.readdir(path, (err, files) => {
if (err) return reject(err);
for (x in files) console.log(files[x]);
resolve(true);
});
})
}
then you can use
Promise.all([reddir(path1), copy(path2, path3), remove(path4)])
.spread((act1, act2, act3) => { //.spread is bluebird feature
return writeJson(./data.json);
})
.catch(e => {
// all error can handled in this
})
if you use async module, you can write like this:
async.parallel({
act1: function(cb){
fs.reddir(path1, (err, files) => {
if (err) return cb(err);
for (x in files) console.log(files[x]);
cb(true);
})
},
act2: ...
},(err, jsd) => { // jsd will be {act1: true, act2: ...}
if (err) return console.error(err); // handle all above error here;
fs.writeJson("./data.json", jsd, function (err, files) {
if (err) return console.error(err);
});
})