Node.js writing data to file throws an error - node.js

Got this error when I run my program
TypeError: Callback is not a function
// Update data from a new file
lib.update = function(dir,file,callback){
//Open the file for writing
fs.open(lib.baseDir+dir+'/'+'.json','r+',function(err,fileDescriptor){
if(!err && fileDescriptor){
var stringData= JSON.stringify(data);
//Truncate the file before writing
fs.truncate(fileDescriptor,function(err){
if(!err){
//Write to the file and close it
fs.writeFile(fileDescriptor,stringData,function(err){
if(!err){
fs.close(fileDescriptor,function(err){
if(!err){
callback(false);
}else {
callback('Error closing existing file!')
}
})
}else {
callback('Error writing to existing file')
}
});
}else {
callback('Error Truncating file')
}
});
}else {
callback('Could not open file for updating! May not exist yet')
}
});
}

I propose you to refactor your code to be a little more clear about what you are doing. Something like this, it will help you find where errors come. Using promises better than use a lot of callbacks.
Just my contribution.
lib.update = function(dir, file, data) {
const updateP = new CustomPromise();
openFile()
.then(truncateFile)
.then(writeFile.bind(this, JSON.stringify(data)))
.then(closeFile)
.then(updateP.resolve) // at this point all the functions was successful
.catch((errorType) =>
errorType !== ERROR_TYPE.ERROR_CLOSING_FILE // something fail try to close the file
? closeFile().finally(() => updateP.reject(errorType))
: updateP.reject(errorType),
);
return updateP.promise;
};
// Constants
const ERROR_TYPE = Object.freeze({
ERROR_OPEN_FILE: 'error-open-file',
ERROR_TRUNCATING_FILE: 'error-truncating-file',
ERROR_WRITING_FILE: 'error-writing-file',
ERROR_CLOSING_FILE: 'error-closing-file',
});
// Private functions
function CustomPromise() {
this.promise = new Promise(function(resolve, reject) {
this.resolve = resolve;
this.reject = reject;
});
}
function openFile() {
const openP = new CustomPromise();
fs.open(lib.baseDir + dir + '/' + '.json', 'r+', function(err, fileDescriptor) {
(err || !fileDescriptor) && openP.reject(ERROR_TYPE.ERROR_OPEN_FILE);
openP.resolve(fileDescriptor);
});
return openP.promise;
}
function truncateFile(fileDescriptor) {
const truncateP = new CustomPromise();
fs.truncate(fileDescriptor, function(err) {
err && truncateP.reject(ERROR_TYPE.ERROR_TRUNCATING_FILE);
truncateP.resolve(fileDescriptor);
});
return truncateP.promise;
}
function writeFile(data, fileDescriptor) {
const writeFileP = new CustomPromise();
fs.writeFile(fileDescriptor, data, function(err) {
err && writeFileP.reject(ERROR_TYPE.ERROR_WRITING_FILE);
writeFileP.resolve(fileDescriptor);
});
return writeFileP.promise;
}
function closeFile(fileDescriptor) {
const closeP = new CustomPromise();
fs.close(fileDescriptor, function(err) {
err && closeP.reject(ERROR_TYPE.ERROR_CLOSING_FILE);
closeP.resolve();
});
return closeP.promise;
}

Related

Error: NJS-012: encountered invalid bind data type in parameter 2

Even though I have searched for the solution of this error and i found some answers but none of them helped me fix this error, Error: NJS-012: encountered invalid bind data type in parameter 2.Maybe, one error can occur in a different scenarios.
Stored procedure definition
create or replace PROCEDURE SP_MEAL_GETMEALTYPES
(
p_DataSource OUT Sys_RefCursor
)
AS
BEGIN
OPEN p_DataSource FOR
select mealtypeid,description from mealtypes;
END;
File name: menusStoredProc.js
"use strict";
var dbParams = require('../../oracle/dbParams');
function storedProcs() {
this.SP_USER_GETMENUS = {
name: 'sp_meal_getmealtypes',
params: {
dataSource: {val: null, type: dbParams.CURSOR, dir: dbParams.BIND_OUT}
},
resultSetColumns: ['mealTypeId','description']
}
}
module.exports = storedProcs;
File name: menus.js
var express = require('express');
var MenusStoreProc = require('../storedProcedures/menusStoredProc');
var oraDbAssist = require('../../oracle/oracleDbAssist');
var router = express.Router();
router.get('/getmenus', (req, res, next) => {
var sp = new MenusStoreProc().SP_USER_GETMENUS;
oraDbAssist.getConnection(function (err, conn) {
if (err)
return console.log('Connecting to db failed - ' + err);
oraDbAssist.executeSqlWithConn(sp, false, conn, function (err, menus) {
if (err)
return console.log('Executing ' + sp.name + ' failed - ' + err);
res.status(200).json(JSON.stringify(menus));
});
});
});
module.exports = router;
Function definition added - executeSqlWithConn
function executeSqlWithConn(sp, autoCommit, connection, next) {
var sql = createProcedureSqlString(sp.name, sp.params);
var params = buildParams(sp.params);
connection.execute(sql, params, {autoCommit: autoCommit}, function(err, result) {
if (err) {
next(err, null);
return;
}
var allRows = [];
var numRows = 50; // number of rows to return from each call to getRows()
for(var attributeName in result.outBinds) {
if(result.outBinds[attributeName] && result.outBinds[attributeName].metaData) { // db response is a result set
function fetchRowsFromResultSet(pResultSet, pNumRows) {
pResultSet.getRows(pNumRows, function(readErr, rows) {
if(err) {
pResultSet.close(function (err) { // always close the result set
next(readErr);
});
return;
}
allRows.push(rows);
if (rows.length === pNumRows) {
fetchRowsFromResultSet(result.outBinds[attributeName], numRows);
return;
}
var allRowsResult = Array.prototype.concat.apply([], allRows);
generateJsonFromDbResultSet(pResultSet.metaData, allRowsResult, sp, function(resultSet) {
pResultSet.close(function (err) { // always close the result set
next(null, resultSet);
});
});
});
}
fetchRowsFromResultSet(result.outBinds[attributeName], numRows);
return;
}
}
next(null, result.outBinds);
});
}
Function definition added - buildParams
function buildParams(params) {
for(var attributeName in params) {
params[attributeName].val = typeof params[attributeName].val === 'undefined' ? null : params[attributeName].val;
if(params[attributeName].type.is(dbParams.DATE))
params[attributeName].val = params[attributeName].val ? new Date(params[attributeName].val) : null;
params[attributeName].type = params[attributeName].type.value;
params[attributeName].dir = params[attributeName].dir.value;
}
return params;
}
Any help, dear members ?

How to get code to execute in order in node.js

I am trying to finish my script, but for some reason i don't know, it refuses to execute in the order i put it in.
I've tried placing a 'wait' function between the JoinRequest update function and the following code, but when run, it acts as if the function call and wait function were the other way round, countering the point of the wait().
const Roblox = require('noblox.js')
var fs = require('fs');
var joinRequests = []
...
function wait(ms) {
var d = new Date();
var d2 = null;
do { d2 = new Date(); }
while(d2-d < ms*1000);
};
...
function updateJReqs() {
Roblox.getJoinRequests(4745601).then((array) => {
var i;
var final = [];
for(i = 0; i < array.length; i++) {
final.push(array[i].username);
};
if(final === '') {
final = '-None';
};
joinRequests = final
console.log('Updated join requests.')
});
}
function check() {
setTimeout(() => {
fs.readFile('Request.txt',encoding = 'utf-8', function(err, data) {
if (err) {
check();
} else {
updateJReqs(); //for some reason this function is executed alongside the below, not before it.
// Tried putting wait(x) in here.
console.log('Request received: ' + data)
var solution = joinRequests
console.log('Fuffiling request with ' + solution)
fufillRequest(solution)
fs.unlink('Request.txt', function(err) {
if(err) throw err;
});
check();
}
});
}, 400)
}
check();
The script is supposed to wait until a file is created (accomplished), update the list of join requests (accomplished) and then create a new file with the list of join requests in(not accomplished).
if I understand your code you work with async code, you need to return a promise in updateJReqs and add a condition of leaving from the function because you have an infinite recursion
function updateJReqs() {
return new Promise(resolve => {
Roblox.getJoinRequests(4745601).then((array) => {
var i;
var final = [];
for(i = 0; i < array.length; i++) {
final.push(array[i].username);
};
if(final === '') {
final = '-None';
};
joinRequests = final
console.log('Updated join requests.')
resolve();
});
}
}
async function check() {
setTimeout(() => {
fs.readFile('Request.txt',encoding = 'utf-8', function(err, data) {
if (err) {
await check();
} else {
await updateJReqs();
// Tried putting wait(x) in here.
console.log('Request received: ' + data)
var solution = joinRequests
console.log('Fuffiling request with ' + solution)
fufillRequest(solution)
fs.unlink('Request.txt', function(err) {
if(err) throw err;
});
// you dont have an exit from your function check();
return 'Success';
}
});
}, 400)
}
check().then(res => console.log(res));

Node js Promises with recursive function

I want to read the all (text) files from a specific directory and it's all subdirecoty recursively.. I am able to read the file and append the result to a global variable. but i want to access the variable at the end of all operation. I am trying with promises but i am unable to access it. please help
var file_path = `C:\\Users\\HP\\Desktop\\test_folder`;
const fs = require('fs');
var final_array = [];
let getFolderTree = function(file_path) {
return new Promise(function(resolve, reject) {
fs.readdir(file_path, function(err, folders) {
if (err) {
console.log("error reading folder :: " + err);
} else {
if (folders.length !== 0) {
for (let i = 0; i < folders.length; i++) {
if (folders[i].endsWith("txt")) {
let text_file_path = file_path + `\\` + folders[i];
fs.readFile(text_file_path, function(error_read, data) {
if (error_read) {
console.log("error reading " + error_read);
} else {
return resolve(final_array.push(data));// want to access final_array at the end of all operations
}
});
} else {
let current_path = file_path + `\\` + folders[i];
getFolderTree(current_path);
}
}
}
}
});
});
}
getFolderTree(file_path).then(function() {
console.log(final_array); // this is not working
});
I think i have found the solution but I am still confused about how it works.
I took reference from another code and able to figure out some how.
var fs = require('fs');
var path = require('path');
let root_path = "C:\\Users\\HP\\Desktop\\test_folder";
function getAllDirectoriesPath(current_path) {
var results = [];
return new Promise(function (resolve, reject) {
fs.readdir(current_path, function (erro, sub_dirs) {
if (erro) {
console.log(error);
} else {
let no_of_subdir = sub_dirs.length;
if (!no_of_subdir) {
return resolve(results);
} else {
sub_dirs.forEach(function (dir) {
dir = path.resolve(current_path, dir);
fs.stat(dir, function (err, stat) {
if (stat && stat.isDirectory()) {
getAllDirectoriesPath(dir).then(function (res) {
results = results.concat(res);
if (!--no_of_subdir) {
resolve(results);
}
});
} else {
fs.readFile(dir, function (err, data) {
results.push(data.toString());
if (!--no_of_subdir) {
resolve(results);
}
});
}
});
});
}
}
});
});
}
getAllDirectoriesPath(root_path).then(function (results) {
console.log(results);
});

Node stream hangs when emitting error

I have a stream that's checking a CSV. It works fine except when emitting an error it hangs even after I send the response back.
export function ValidateCSV(options) {
let opt = options;
if (!(this instanceof ValidateCSV)) return new ValidateCSV(opt);
if (!opt) opt = {};
opt.objectMode = true;
opt.highWaterMark = 1000000;
Transform.call(this, opt);
}
util.inherits(ValidateCSV, Transform);
ValidateCSV.prototype.destroy = function () {
this.readable = false;
this.writable = false;
this.emit('end');
};
ValidateCSV.prototype._transform = function (chunk, encoding, done) {
// Do some stuff to the chunk
// Emit error
if (required.length > 0) {
this.emit('error', `The following columns are required: ${required.join(', ')}`);
}
done();
};
I was able to fix it by adding a destroy method but it is still slow and hangs for a few seconds with it. Is there a better way to end/destroy a Transform stream?
ValidateCSV.prototype.destroy = function () {
this.readable = false;
this.writable = false;
this.emit('end');
};
EDIT:
Here is how I'm using the stream with busboy:
function processMultipart(req, res) {
const userId = req.query._userId;
const busboy = new Busboy({ headers: req.headers, limits: { files: 1 } });
const updateId = req.params.id;
// Transform stream to validate the csv
const validateCSV = new ValidateCSV();
validateCSV
.on('finish', () => {
// Process the csv
})
.on('error', (er) => {
//Do some logging
res.status(500).json(er).end();
});
// Multipart upload handler
busboy
.on('file', (fieldname, file, filename) => {
dataset.name = fieldname.length > 0 ?
fieldname : filename.substr(0, filename.indexOf('.csv'));
file
.on('error', (er) => {
//Send Error
})
.on('end', () => {
// Save dataset to mongo
if (dataset._update) {
res.status(200).json(dataset).end();
} else {
Dataset.create(dataset, (er) => {
if (er) {
res.status(500).json(er).end();
} else {
res.status(200).json(dataset).end();
}
});
}
}).pipe(validateCSV);
});
req.pipe(busboy);
}

Maximum open cursors exceeded in Node.js

I'm working on some code to process a csv in Node.js and store it in an Oracle database. So far, things are going well, but with large amounts of rows in the csv, I get "ORA-01000: maximum open cursors exceeded." I am connecting to Oracle once at the beginning of the script. For each record in the csv, I am preforming multiple SELECTs, INSERTs, and DELETEs and then moving on to the next entry to process all using the same connection. At the end, I close the connection. One thought I had was to get a new connection each time from a pool, but I read other posts saying I should use one connection. Perhaps I need to set a special setting to handle all these queries on one connection?
The script is kind of long, so I will post the important parts...I can post more if needed. Using Q, csvtojson, and oracledb.
...
var conn = null;
function connect() {
var deferred = Q.defer();
oracledb.outFormat = oracledb.OBJECT;
oracledb.getConnection(
{
user : 'foo',
password : 'bar',
connectString : 'foo.bar/bar',
},
function(err, c) {
if(err) deferred.reject(new Error(err));
// set global connection
conn = c;
deferred.resolve();
}
);
return deferred.promise;
}
function closeConnection(conn) {
var deferred = Q.defer();
conn.release(function(err){
if(err) deferred.reject(err);
else return deferred.resolve();
});
return deferred.promise;
}
/* Process All Data, Promise Loop */
function process(data) {
return processEntry(data.shift()).then(function(){
console.log('Finished processing entry.');
return data.length > 0 ? process(data) : true;
});
}
/* Process an Entry */
function processEntry(entry) {
var deferred = Q.defer();
var data = {};
entryExists(entry)
.then(function(result) {
if(result) return entryLogicError(null, 'Entry exists. Skipping.');
else return getUserFromReleaseCode(entry);
})
.then(function(result) {
if(typeof result != 'undefined' && result.length > 0) {
data.user = result[0];
return getPanelCode(entry);
}
else return entryLogicError(entry, 'No valid release code.');
})
.then(function(result){
if(typeof result != 'undefined' && result.length > 0) {
return createHeader(result[0].foo, result[0].bar);
}
else return entryLogicError(entry, 'No valid panel code.');
})
... More of the same kind of statements processing the entry ...
.then(function() {
return logEntry(entry);
})
.catch(function(error) { console.log("DATA ERROR: " + error) })
.done(function(){
deferred.resolve();
});
return deferred.promise;
}
function entryLogicError() {
// logs entry to be corrected, return a rejected promise to go to the next entry
}
/* Check if record has been processed */
function entryExists(entry) {
var deferred = Q.defer();
var foo = entry[ENTRY_CONST.FOO];
var bar = entry[ENTRY_CONST.BAR];
conn.execute(
'SELECT * FROM TBL_FOO ' +
'WHERE FOO = :foo AND ' +
'BAR = :bar',
[foo, bar],
function(err, result) {
if(err) deferred.reject(err);
else {
deferred.resolve(result.rows.length > 0);
}
});
return deferred.promise;
}
/* Get User from Release Code */
function getUserFromReleaseCode(entry) {
var deferred = Q.defer();
var foo = entry[ENTRY_CONST.FOO];
conn.execute(
'SELECT * FROM TBL_BAR ' +
'WHERE FOO = :foo',
[foo],
function(err, result) {
if(err) deferred.reject(err);
else {
deferred.resolve(result.rows);
}
});
return deferred.promise;
}
/* Create Header */
function createHeader(foo, bar) {
var deferred = Q.defer();
conn.execute(
'BEGIN INSERT INTO TBL_DR_FOO VALUES (NULL,:foo, :bar,' +
'1,NULL,1,NULL,NULL,NULL,NULL) RETURNING DR_FOO_ID INTO :DR_FOO_ID; COMMIT; END;',
{ foo: foo,
bar: bar,
DR_FOO_ID: { dir: oracledb.BIND_OUT, type: oracledb.NUMBER }
},
function(err, result) {
if(err) deferred.reject(err);
else deferred.resolve(result.outBinds);
});
return deferred.promise;
}
function cleanHistory() {
// statement that deletes records from a certain date using conn.execute(..)
}
/* Main */
connect().then(function(){
var converter = new Converter({ noheader: false });
converter.on('end_parsed', function(data) {
process(data).then(function(){
return cleanHistory();
})
.then(function(){
return closeConnection();
}).done();
});
fs.createReadStream(batch).pipe(converter);
}, function(err){
return console.error(err);
});

Resources