How to convert readFileSync code to readFile? - node.js

I'm trying to convert the following code to fs.readFile? I'm running this on a nodeserver and need it to be async. I read the node documentation and I found it very difficult to follow.
I'm reading a file and setting it to a variable, im then saving it to the database (mongo). I can't find a way to asynchronously use fs.readFile and set it to a variable.
Here is my current undesired synchronous code:
a.img.data = fs.readFileSync(path.resolve(__dirname + '/imgTest/image.png'));
a.img.contentType = 'image/jpg';
a._id = accountId;
a.save(function (err, a) {
if (err) throw err;
console.log('saved img to mongo');
})
Here is my attempt but it doesn't work:
await fs.readFile(path.resolve(__dirname + '/imgTest/image.jpg'), function (err, data) {
if (err) return console.error(err);
a.img.data = data;
});
a.img.contentType = 'image/jpg';
a._id = accountId;
a.save(function (err, a) {
if (err) throw err;
console.log('saved img to mongo');
})

You are mixing syntaxes. The normal fs library does not deal in promises so you can't use await, and you do all the work in the callback:
fs.readFile(path.resolve(__dirname + '/imgTest/image.jpg'), function (err, data) {
if (err) return console.error(err);
a.img.data = data;
a.img.contentType = 'image/jpg';
a._id = accountId;
a.save(function (err, a) {
if (err) throw err;
console.log('saved img to mongo');
})
});
or you go the Promises route
const fsp = require("fs/promises");
try {
const data = await fsp.readFile(path.resolve(__dirname + '/imgTest/image.jpg'));
a.img.data = data;
a.img.contentType = 'image/jpg';
a._id = accountId;
a.save(function (err, a) {
if (err) throw err;
console.log('saved img to mongo');
})
} catch(err) {
console.log(err);
}

Related

Node.js - undefined variable after assignment in fs functions

stackoverflow!
I am fairly new to node.js, and this seems to be node-specific. I keep defining a variable and I get errors that fileName is undefined. I have no idea why this is happening because, from my perspective, I'm just assigning a global variable inside a function, and that should work in all other programming languages I've worked in. Does the argument function in fs.readFile() somehow differ from a normal function? I honestly have no idea. Anyways, this is my full code:
var fs = require('fs');
var dateObject = new Date();
var fileName;
function Start() {
fs.readFile('./counter.txt', function (err, data) {
if (err) throw err;
var current = parseInt(data);
current++;
fs.writeFile('./counter.txt', current.toString(), function(err) {
if (err) throw err;
console.log("Written!");
});
var fullDate = `${dateObject.getDate()}-${dateObject.getMonth() + 1}-${dateObject.getFullYear()}`;
fileName = `./logs/${fullDate} ${current}.txt`;
console.log(fileName);
fs.appendFile(fileName, "Logger Initiated!", function(err){
if (err) throw err;
})
});
}
function PAL (text) {
if (fileName === undefined) {
console.log("...");
return "500";
}
console.log(fileName);
fs.appendFile(fileName, text, function(err) {
if (err) throw err;
})
}
module.exports = {Start, PAL};
Another file:
const logger = require('./logger')
// ....
app.listen(port, () => {
logger.Start();
logger.PAL("test");
})
You got an asynchronous trap in code, fs.readFile is an async function, so when you run .PAL, I think you expected .Start function done, but it's not true. Start function:
async function Start() {
return new Promise((resolve, reject) => {
fs.readFile('./counter.txt', function (err, data) {
if (err) throw reject(err);
var current = parseInt(data);
current++;
fs.writeFile('./counter.txt', current.toString(), function(err) {
if (err) reject(err);
console.log("Written!");
var fullDate = `${dateObject.getDate()}-${dateObject.getMonth() + 1}-${dateObject.getFullYear()}`;
fileName = `./logs/${fullDate} ${current}.txt`;
console.log("FILENAME: ", fileName);
fs.appendFile(fileName, "Logger Initiated!", function(err){
if (err) reject(err);
resolve();
})
});
});
})
}
Call: logger.Start().then(() => logger.PAL("test"));

NodeJS request not returned

Swagger Inspector
This is the source code
API.js
router.post('/curriculum2_section/', (req,res)=> {
curriculum2_section.CREATE(req.body.curr2_section,req.body.curr2_id,req.body.curr2_section_student_amount)
})
curriculum2_section.js
module.exports.CREATE = function CREATE(curr2_section,curr2_id,curr2_section_student_amount) {
var sqlString = `INSERT INTO curriculum2_section(curr2_id,curr2_section,curr2_section_student_amount)VALUES('${curr2_id}',${curr2_section},${curr2_section_student_amount})`
pool.getConnection((err, conn) => {
if(err) throw err
pool.query(sqlString, function (err, rows) {
console.log('Query String:\n '+this.sql);
if(err) throw err
console.log('Data Insert: '+curr2_section+','+curr2_id+','+curr2_section_student_amount)
conn.release();
})
})
}
I've tried using the callback but it doesn't work. I think it because I don't understand how to use it
Please help me
You need to pass the data you want to output to a method on res in order to get output from Express.
Given what you have here, your best bet might be to add a callback as the last parameter of curriculum2_section.CREATE and use that callback to pass data to res.
https://expressjs.com/en/4x/api.html#res
router.post('/curriculum2_section/', (req,res)=> {
curriculum2_section.CREATE(req.body.curr2_section,req.body.curr2_id,req.body.curr2_section_student_amount, (err, data) => {
res.send(data);
});
});
module.exports.CREATE = function CREATE(curr2_section,curr2_id,curr2_section_student_amount, cb) {
var sqlString = `INSERT INTO curriculum2_section(curr2_id,curr2_section,curr2_section_student_amount)VALUES('${curr2_id}',${curr2_section},${curr2_section_student_amount})`
pool.getConnection((err, conn) => {
if(err) throw err
pool.query(sqlString, function (err, rows) {
console.log('Query String:\n '+this.sql);
if(err) throw err
console.log('Data Insert: '+curr2_section+','+curr2_id+','+curr2_section_student_amount)
conn.release();
cb(null, 'Data Insert: '+curr2_section+','+curr2_id+','+curr2_section_student_amount);
})
})
}

node script just stops

I have a very large multi section script with a LOT of loops and some recursion in it. When I run it on a Very Large dataset, the script will simply stop running. It stops with a 0 exit code. It VERY clearly does not actually finish running...it just...stops.
asyncLib.waterfall([
getPronghornToken,
saveSchedulers,
saveServices,
populateServRefs,
saveServiceGroups,
saveNetworks,
populateNetRefs, //never actually gets out of this function. Just exits with code 0
saveNetworkGroups,
saveRuleGroups,
fetchRuleGroupIds,
populateRules,
saveRules,
getPolicyId,
linkRuleGroup
], function (err, result) {
if (err){
console.error("Something bad happened. Please try again");
process.exit(1);
}
console.log("done");
});
What I'm looking for: Why would a script just stop mid loop and exit with a 0 code?
Note: Alternate code.
getPronghornToken((err, token) => {
if(err) {
console.log("Error occured getPronghornToken");
throw err;
}
saveSchedulers(token, (err, token) => {
if(err) {
console.log("Error occured saveSchedulers");
throw err;
}
saveServices(token, (err, token) => {
if(err) {
console.log("Error occured saveServices");
throw err;
}
populateServRefs(token, (err, token) => {
if(err) {
console.log("Error occured populateServRefs");
throw err;
}
saveServiceGroups(token, (err, token) => {
if(err) {
console.log("Error occured saveServiceGroups");
throw err;
}
saveNetworks(token, (err, token) => {
if(err) {
console.log("Error occured saveNetworks");
throw err;
}
populateNetRefs(token, (err, token) => {
if(err) {
console.log("Error occured populateNetRefs");
throw err;
}
saveNetworkGroups(token, (err, token) => {
if(err) {
console.log("Error occured saveNetworkGroups");
throw err;
}
saveRuleGroups(token, (err, token) => {
if(err) {
console.log("Error occured saveRuleGroups");
throw err;
}
fetchRuleGroupIds(token, (err, token) => {
if(err) {
console.log("Error occured fetchRuleGroupIds");
throw err;
}
populateRules(token, (err, token) => {
if(err) {
console.log("Error occured populateRules");
throw err;
}
saveRules(token, (err, token) => {
if(err) {
console.log("Error occured saveRules");
throw err;
}
getPolicyId(token, (err, token) => {
if(err) {
console.log("Error occured getPolicyId");
throw err;
}
linkRuleGroup(token, (err, token) => {
if(err) {
console.log("Error occured linkRuleGroup");
throw err;
}
console.log("Successfully installed all files");
});
});
});
});
});
});
});
});
});
});
});
});
});
});
No errors thrown. Does NOT print out the innermost message. Callback pattern verified.
Last Function running looks like this:
async function populateNetRefs(token, callback) {
//let newNetRefs = [];
for(let index = 0; index < networkGroups.length; index++) {
if (index >= networkGroups.length) {
console.log("Net Refs Finished")
return callback(null, token);
}
let networkGroup = networkGroups[index];
try {
console.log(`fetching network number: ${index+1} / ${networkGroups.length}`);
let newNetRefs = await fetchNetId(token, networkGroup._netRefs);
networkGroup._netRefs = newNetRefs;
} catch (err) {
console.log(`An error occurrent fetching the network id for index ${index+1} / ${networkGroups.length}: ${err}`);
}
}
}
The Inner Function:
function fetchNetId(token, _netRefs) {
let fetchFinished = 0;
let newNetRefs = [];
let errCount = 1;
console.log("ZZ Fetchid Start ZZ");
return new Promise((resolve, reject) => {
_netRefs.forEach(function(_netRef) {
let options = {
//Required to be hidden
};
let req = https.request(options, (res) => {
let reply = [];
res.setEncoding('utf8');
res.on('data', (chunk) => {
console.log("YY GET DATA CHUNK YY");
reply.push(chunk);
});
res.on('end', () => {
fetchFinished++;
console.log("Reply is : " + reply.join());
//There is some logic in this spot. Not for you.
console.log("fetchFinished is: " + fetchFinished + ", size is: " + _netRefs.length);
if (fetchFinished === _netRefs.length) {
resolve(newNetRefs);
}
});
});
req.on('error', (e) => {
console.error(`problem with request ${errCount++}: ${e.message}`);
//reject(e);
});
let body = JSON.stringify({
"options" : {
"start": 0,
"limit": 5,
"sort": {
},
"filter": {
"name":{"$eq":_netRef}
}
}
});
console.log("XX Sending Request XX");
req.write(body);
req.end();
});
});
}
BEHAVIOR UPDATE - More Console Logs
Here's the end of the console log:
fetching network number: 49 / 711
ZZ Fetchid Start ZZ
XX Sending Request XX
XX Sending Request XX
YY GET DATA CHUNK YY
Reply is : {hidden from you}
TroubleShootingDias: some guid
fetchFinished is: 1, size is: 2
YY GET DATA CHUNK YY
Reply is : {hidden from you}
TroubleShootingDias: some guid
fetchFinished is: 2, size is: 2
fetch success
fetching network number: 50 / 711
ZZ Fetchid Start ZZ
[vagrant#vag-host1 space-parser]$
I highly recommend you look at async.waterfall to help structure code like this as it can be a mare to debug and read. A lot to grasp in your code above, but it could be helpful to wrap the following in a try catch. While you are handling req errors - those are only request errors and there may be something else including possible malformed url etc that will throw and you don't have the promise returning in this instance.
try {
let req = https.request(options, (res) => {
let reply = [];
res.setEncoding('utf8');
res.on('data', (chunk) => {
console.log("YY GET DATA CHUNK YY");
reply.push(chunk);
});
res.on('end', () => {
fetchFinished++;
console.log("Reply is : " + reply.join());
//There is some logic in this spot. Not for you.
console.log("fetchFinished is: " + fetchFinished + ", size is: " + _netRefs.length);
if (fetchFinished === _netRefs.length) {
resolve(newNetRefs);
}
});
});
req.on('error', (err) => {
console.error(`problem with request ${errCount++}: ${err.message}`);
return reject(err);
});
}
catch(err) {
console.error(`problem with request ${err.message}`);
return reject(err);
}
Actual solution to the problem: If the array is empty, then the promise never resolves. Added an empty check to the very top, before the loop.
function fetchNetId(token, _netRefs) {
let fetchFinished = 0;
let newNetRefs = [];
let errCount = 1;
console.log("ZZ Fetchid Start ZZ");
return new Promise((resolve, reject) => {
if(_netRefs.length === 0) return resolve([]) // <==============
_netRefs.forEach(function(_netRef) {
let options = {
//Required to be hidden
};
let req = https.request(options, (res) => {
let reply = [];
res.setEncoding('utf8');
res.on('data', (chunk) => {
console.log("YY GET DATA CHUNK YY");
reply.push(chunk);
});
//........

Calculate a file hash and save the file

Users upload files into my express app. I need to calc hash of the uploaded file and then write file to disk using calculated hash as a filename. I try to do it using the following code:
function storeFileStream(file, next) {
createFileHash(file, function(err, hash) {
if (err) {
return next(err);
}
var fileName = path.join(config.storagePath, hash),
stream = fs.createWriteStream(fileName);
stream.on('error', function(err) {
return next(err);
});
stream.on('finish', function() {
return next();
});
file.pipe(stream);
});
}
function createFileHash(file, next) {
var hash = crypto.createHash('sha1');
hash.setEncoding('hex');
file.on('error', function(err) {
return next(err);
});
file.on('end', function(data) {
hash.end();
return next(null, hash.read());
});
file.pipe(hash);
}
The problem is that after I calc file hash the writed file size is 0. What is the best way do solve this task?
Update
According #poke suggestion I try to duplicate my stream. Now my code is:
function storeFileStream(file, next) {
var s1 = new pass;
var s2 = new pass;
file.pipe(s1);
file.pipe(s2);
createFileHash(s1, function(err, hash) {
if (err) {
return next(err);
}
var fileName = path.join(config.storagePath, hash),
stream = fs.createWriteStream(fileName);
stream.on('error', function(err) {
return next(err);
});
stream.on('finish', function() {
return next();
});
s2.pipe(stream);
});
}
function createFileHash(file, next) {
var hash = crypto.createHash('sha1');
hash.setEncoding('hex');
file.on('error', function(err) {
return next(err);
});
file.on('end', function(data) {
hash.end();
return next(null, hash.read());
});
file.pipe(hash);
}
The problem of this code is that events end and finish are not emited. If I comment file.pipe(s2); events are emited, but I again get my origin problem.
This code fix the problem:
var s1 = new passThrough,
s2 = new passThrough;
file.on('data', function(data) {
s1.write(data);
s2.write(data);
});
file.on('end', function() {
s1.end();
s2.end();
});
The correct and simple way should be as follow:
we should resume the passthroughed stream
function storeFileStream(file, directory, version, reject, resolve) {
const fileHashSource = new PassThrough();
const writeSource = new PassThrough();
file.pipe(fileHashSource);
file.pipe(writeSource);
// this is the key point, see https://nodejs.org/api/stream.html#stream_three_states
fileHashSource.resume();
writeSource.resume();
createFileHash(fileHashSource, function(err, hash) {
if (err) {
return reject(err);
}
const fileName = path.join(directory, version + '_' + hash.slice(0, 8) + '.zip');
const writeStream = fs.createWriteStream(fileName);
writeStream.on('error', function(err) {
return reject(err);
});
writeStream.on('finish', function() {
return resolve();
});
writeSource.pipe(writeStream);
});
}
function createFileHash(readStream, next) {
const hash = crypto.createHash('sha1');
hash.setEncoding('hex');
hash.on('error', function(err) {
return next(err);
});
hash.on('finish', function(data) {
return next(null, hash.read());
});
readStream.pipe(hash);
}
You could use the async module (not tested but should work):
async.waterfall([
function(done) {
var hash = crypto.createHash('sha1');
hash.setEncoding('hex');
file.on('error', function(err) {
done(err);
});
file.on('end', function(data) {
done(null, hash.read);
});
file.pipe(hash);
},
function(hash, done) {
var fileName = path.join(config.storagePath, hash),
stream = fs.createWriteStream(fileName);
stream.on('error', function(err) {
done(err);
});
stream.on('finish', function() {
done(null);
});
file.pipe(stream);
}
], function (err) {
console.log("Everything is done!");
});

Handle timeout callback event in readFile method of Node.js

My requirement is like below:
I have 3 Big files big_file_1, big_file_2 and big_file_3. I want to read this 3 big files asynchronously and process my rest of the code only after the reading of all files are completed.
fs.readFile('big_file_1', function (err, data) {
if (err) throw err;
content_1 = data;
});
fs.readFile('big_file_2', function (err, data) {
if (err) throw err;
content_2 = data;
});
fs.readFile('big_file_3', function (err, data) {
if (err) throw err;
content_3 = data;
});
// Do something with content_1, content_2 and content_3
How can I achieve this in Node.JS ?
You can do it using the parallel function of the async library:
async.parallel([
fs.readFile.bind(fs, 'big_file_1'),
fs.readFile.bind(fs, 'big_file_2'),
fs.readFile.bind(fs, 'big_file_3')
], function (err, results) {
if (err) throw err;
content_1 = results[0]
content_2 = results[1]
content_3 = results[2]
/* TODO do some cool stuff */
})
Alternatively, you can do it manually:
int steps_done = 0
fs.readFile('big_file_1', function(err, data) {
if (err) throw err
content_1 = data
if (++steps_done == 3) do_next_step()
})
fs.readFile('big_file_2', function(err, data) {
if (err) throw err
content_2 = data
if (++steps_done == 3) do_next_step()
})
fs.readFile('big_file_3', function(err, data) {
if (err) throw err
content_3 = data
if (++steps_done == 3) do_next_step()
})

Resources