Trying to add data in unsupported state [duplicate] - node.js

This question already has answers here:
Trying to add data in unsupported state at Cipher.update
(3 answers)
Closed 2 years ago.
This is the code of my site. First time I run, it worked and showed data but the other times it shows the error "Trying to add data in unsupported state" Please help me.
var connection = require("./dbconnection");
var keys = require('./keys');
var mysql = require('mysql');
var crypto = require('crypto');
var algoritm = "aes-256-ctr";
var nameCipher = crypto.createCipher(algoritm, keys.name);
var passwordCipher = crypto.createCipher(algoritm, keys.password);
var nameDecipher = crypto.createDecipher(algoritm, keys.name);
var passwordDecipher = crypto.createCipher(algoritm, keys.password);
function encrypt(text, type) {
if (type === 1) {
var nameC = nameCipher.update(text, "utf8", "hex");
nameC += nameCipher.final("hex");
return nameC;
} else if (type === 2) {
var passwordC = passwordCipher.update(text, "utf8", "hex");
passwordC += passwordCipher.final("hex");
return passwordC;
}
}
function decrypt(text, type) {
if (type === 1) {
var nameDec = nameDecipher.update(text, "hex", "utf8");
nameDec += nameDecipher.final("utf8");
return nameDec;
} else if (type === 2) {
var passwordDec = passwordDecipher.update(text, "hex", "utf8");
passwordDec += passwordDecipher.final("utf8");
return passwordDec;
}
}
function logIn(name, password) {
var eName = encrypt(name, 1);
var ePassword = encrypt(password, 2);
var dData;
console.dir(eName);
console.dir(ePassword);
connection.getConnection(function (err, connection) {
connection.query("SELECT * FROM students WHERE name=\"" + eName + "\" AND password=\"" + ePassword + "\"",
function (err, data) {
if (err) throw err;
console.dir(data);
dData = data;
})
})
}
exports.logIn = logIn;

Always create a new cipher before updating. You should create them inside the encrypt and decrypt functions.
function encrypt(text, type) {
var nameCipher = crypto.createCipher(algoritm, keys.name); //<---- here
var passwordCipher = crypto.createCipher(algoritm, keys.password); //<---- here
if (type === 1) {
var nameC = nameCipher.update(text, "utf8", "hex");
nameC += nameCipher.final("hex");
return nameC;
} else if (type === 2) {
var passwordC = passwordCipher.update(text, "utf8", "hex");
passwordC += passwordCipher.final("hex");
return passwordC;
}
}

Related

Why doesn't my async function return any result?

I wrote this small program to fetch data. This however is done async. Since I nonetheless need to use the function holeVertreter(kzl) as a function in another module, I'd like to get a return value which I can eventually pass on.
Excuse my spaghetti code (I usually prettify the code when I am done with my task ...).
Credentials are stored in a file and are therefore not found in this file.
I'd like to end up with "vertreter" as a return value.
Thank you in advance.
const node = require("deasync");
const DSB = require('dsbapi');
const tabletojson = require('tabletojson');
const https = require('https');
const cred = require("./vertrCred");
const dsb = new DSB(cred["dsb"]["user"], cred["dsb"]["passw"]); //Sanitized - no Credentials here
//Stackoverflow 2332811
String.prototype.capitalize = function(lower) {
return (lower ? this.toLowerCase() : this).replace(/(?:^|\s)\S/g, function(a) { return a.toUpperCase(); });
};
function holePlan(kuerzel) {
dsb.fetch()
.then(data => {
const timetables = DSB.findMethodInData('timetable', data);
const tiles = DSB.findMethodInData('tiles', data);
var tilesStr = JSON.stringify(tiles["data"][0]["url"]);
var url = JSON.parse(tilesStr);
https.get(url, (resp) => {
let data = '';
resp.on('data', (chunk) => {
data += chunk;
});
resp.on('end',() => {
var tableasjson = tabletojson.convert(data);
var erstetab = tableasjson[0];
var zweitetab = tableasjson[1];
var drittetab = tableasjson[2];
var viertetab = tableasjson[3];
var fuenftetab = tableasjson[4];
var sechstetab = tableasjson[5];
var siebtetab = tableasjson[6];
var achtetab = tableasjson[7];
if (typeof kuerzel === "undefined")
{
var regenechse = '(Aaa|Aaa[A-Za-z?]|[A-Za-z?]Aaa)';
}
else {
var name = kuerzel.capitalize(true);
var regenechse = '('+name+'|'+name+'[A-Za-z?]|[A-Za-z?]'+name+')';
}
const regex = new RegExp(regenechse,'g');
var sammel = Object.assign(drittetab,fuenftetab);
var z= 0;
var vertreter = {}
var y = JSON.parse(JSON.stringify(sammel));
for (i=0;i<y.length;i++) {
if (typeof y[i].Vertreter =='undefined') {
}
else {
if(y[i].Vertreter.match(regex))
{
z += 1;
vertreter[z] = y[i];
}
}
}
if (z == 0) {
// console.log("Es gibt nichts zu vertreten");
}
else {
//console.log("Es werden "+z+" Stunden vertreten");
return (vertreter);
} ;
});
})
})
.catch(e => {
// An error occurred :(
console.log(e);
});
}
//Stackoverflow
function warte(promise) {
var done = 0;
var result = null;
promise.then(
function (value) {
done = 1;
result = value;
return (value);
},
function (reason) {
done = 1;
throw reason;
}
);
while (!done)
node.runLoopOnce();
return (result);
}
function holeVertretung(kzl) {
var aufgabe = new Promise((resolve,reject) => {
setTimeout(resolve,1000,holePlan(kzl));
});
var ergebnis = warte(aufgabe);
if (typeof ergebnis === "undefined") {
console.log("Mist");
}
else {
console.log(ergebnis);
}
return ergebnis;
}
holeVertretung("Aaa");
That's not the right way to work with promises. If you do such infinite loop, it beats the whole purpose of using promises. Instead, return value from the promise, and use async-await like this:
function warte(promise) {
var done = 0;
var result = null;
return promise.then(
...
}
async function holeVertretung(kzl) {
var aufgabe = new Promise((resolve, reject) => {
setTimeout(resolve, 1000, holePlan(kzl));
});
var ergebnis = await warte(aufgabe);
...
If async-await does not work for some reason, use then clause:
warte(aufgabe).then(value => {
var ergebnis = value;
});

How to handle the node server side with react

Im working on react and node project. Im new to both technologies, and i developed a system for my internship. But I feel like, I didn't handle the server side (Node part) properly. This is my server side file. It container almost 700 lines. Do I have to break this page in to several pages? If so, how exactly I should do it? Can someone give me a suggestion please. The application is working as expected. I just want to clean the code in the server side. I used express framework with node.
var express = require('express');
var mysql = require('mysql');
var _ = require('underscore');
var crypto = require('crypto');
var app = express();
var connections = [];
var title = 'Fishery Logistics';
var flds = [];
var currentFld = '';
var fldDetails = [];
var lfrSaved = false;
var userSiteInfoSaved = false;
var userList = [];
var userTypes = [];
var validUserName = false;
var fldNumbers = [];
var productCodes = [];
var containerTypes = [];
var areaCodes = [];
var fldRows = 0;
var fldInfo = {};
var productInfo = {};
var weighInList = {};
var weighInSaved = false;
var weighInNumbers = [];
var weighInWeights = [];
var fldHeaderInfo = [];
var weighInHeaderInfo = [];
var fldWeights = [];
var userDeleted = false;
// From where express should access our files
app.use(express.static('./public'));
app.use(express.static('./node_modules/bootstrap/dist'));
var server = app.listen(process.env.PORT || 3000);
// Creating a socket server which is also listeing to localhost:<port>
var io = require('socket.io').listen(server);
var connection = mysql.createConnection({
host: "localhost",
user: "root",
password: '',
database: 'fish_landing'
});
io.sockets.on('connection', function(socket) {
// Load fld list relevant to a LFR
socket.on('lfrFldListLoad', function(payload) {
var lfr_id = payload.lfrId;
var sql_lfr_fld_list = 'SELECT F.fld_id, F.fld_number, DATE_FORMAT(F.landed_date, "%Y-%m-%d") AS "landed_date", F.vessel_name, F.port_name, F.transport_company_name, F.driver_name, truck_number, FS.status ' +
'FROM fld F, fld_status FS, lfr L ' +
'WHERE F.status_id = FS.status_id ' +
'AND F.lfr_id = L.lfr_id ' +
'AND L.lfr_id = ' + lfr_id +
' ORDER BY F.fld_id DESC';
connection.query(sql_lfr_fld_list, function(error, result) {
if (error) {
console.log(error);
}
if (result.length !== 0) {
flds = result;
io.sockets.emit('lfrFldListLoaded', flds);
} else {
console.log('No Records Found')
}
});
});
// Load fld with all the details
socket.on('fldViewLoad', function(payload) {
var fld_id = payload.fldId;
var sql_fld_by_id =
'SELECT F.fld_id, F.fld_number, DATE_FORMAT(F.landed_date, "%Y-%m-%d") AS "landed_date", DATE_FORMAT(F.landed_date, "%T") AS "landed_time", ' +
'F.port_name, F.vessel_name, F.skipper_name, F.transport_company_name, F.truck_number, F.driver_name, F.driver_license, F.vehicle_clean, ' +
'F.vehicle_refrigerated, F.containers_clean, F.containers_iced, F.skipper_signature, F.supervisor_signature, F.lfr_staff_signature, ' +
'F.skipper_signature_time, F.supervisor_signature_time, F.lfr_staff_signature_time, F.comment, FS.status, CONCAT(U.first_name, " ", U.last_name) AS "full_name", ' +
'DATE_FORMAT(F.fld_created_time, "%Y-%m-%d") AS "fld_created_date", DATE_FORMAT(F.fld_created_time, "%T") AS "fld_created_time" ' +
'FROM fld F, fld_status FS, user_personal_info U ' +
'WHERE F.status_id = FS.status_id ' +
'AND F.fld_created_user_id = U.user_id ' +
'AND F.fld_id = "' + fld_id + '"';
var sql_fld_detail_list =
'SELECT FD.fld_detail_id, SP.species, PS.state, C.container, FD.no_of_containers, FD.fld_id ' +
'FROM fld_details FD, species SP, processed_state PS, container C, fld F ' +
'WHERE F.fld_id = FD.fld_id ' +
'AND SP.species_id = FD.species_id ' +
'AND PS.state_id = FD.state_id ' +
'AND C.container_id = FD.container_id ' +
'AND F.fld_id = "' + fld_id + '"';
connection.query(sql_fld_by_id, function(errorFld, resultFld) {
if (errorFld) {
console.log(errorFld);
}
if (resultFld.length !== 0) {
currentFld = resultFld;
connection.query(sql_fld_detail_list, function(errorFldDetails, resultFldDetails) {
if (errorFldDetails) {
console.log(errorFldDetails);
} else {
fldDetails = resultFldDetails;
io.sockets.emit('fldViewLoaded', currentFld, fldDetails);
}
});
} else {
console.log('Fld Length Error')
}
});
});
// Save company info
socket.on('saveCompanyInfo', function(payload) {
var companyName = payload.companyName;
var registrationNo = payload.registrationNo;
var landlineNo = payload.landlineNo;
var mobileNo = payload.mobileNo;
var emailAddress = payload.emailAddress;
var companyLogo = payload.companyLogo;
var jsonFile = payload.jsonFile;
var sql_save_company_info =
`INSERT INTO lfr (lfr_name, registration_number, landline_number, mobile_number, email_address, lfr_logo, json_file)
VALUES ('${companyName}', '${registrationNo}', '${landlineNo}', '${mobileNo}', '${emailAddress}', '${companyLogo}', '${jsonFile}')`;
connection.query(sql_save_company_info, function(errorLfrInfo, resultLfrInfo) {
if (errorLfrInfo) {
lfrSaved = false;
console.log(errorLfrInfo);
}
if (resultLfrInfo) {
lfrSaved = true;
} else {
lfrSaved = false;
}
io.sockets.emit('companyInfoSaved', lfrSaved);
});
});
// Load user list for lfr
socket.on('userListLoad', function(payload) {
var lfrId = payload.lfrId;
var load_user_list =
`SELECT USI.user_id, USI.user_name, UT.user_type, USI.email_address, USI.passcord, US.status_type
FROM user_site_info USI, user_types UT, user_status US
WHERE USI.user_type_id = UT.user_type_id
AND USI.user_status_id = US.user_status_id
AND lfr_id = ${lfrId}`;
connection.query(load_user_list, function(errorUserList, resultUserList) {
if (errorUserList) {
console.log(errorUserList);
} else {
userList = resultUserList;
io.sockets.emit('userListLoaded', userList);
}
});
});
// Load organization form
socket.on('loadOrganization', function() {
io.sockets.emit('organizationLoaded');
});
// Load main form
socket.on('loadMain', function() {
io.sockets.emit('mainLoaded');
});
// Delete user
socket.on('deleteUser', function(payload) {
var lfrId = payload.lfrId;
var userId = payload.userId;
var delete_user =
`UPDATE user_site_info
SET user_status_id = '2'
WHERE user_id = ${userId}`;
var load_user_list =
`SELECT USI.user_id, USI.user_name, UT.user_type, USI.email_address, USI.passcord, US.status_type
FROM user_site_info USI, user_types UT, user_status US
WHERE USI.user_type_id = UT.user_type_id
AND USI.user_status_id = US.user_status_id
AND lfr_id = ${lfrId}`;
connection.query(delete_user, function(error, result) {
if (error) {
console.log(error);
}
if (result) {
connection.query(load_user_list, function(errorUserList, resultUserList) {
if (errorUserList) {
console.log(errorUserList);
} else {
userDeleted = true;
userList = resultUserList;
io.sockets.emit('userDeleted', userDeleted, userList);
}
});
} else {
userDeleted = false;
}
});
});
// Delete weigh in
socket.on('deleteWeighIn', function(payload) {
var weighInId = payload.weighInId;
var sql_delete_weigh_in =
`DELETE FROM weigh_in
WHERE weigh_in_id = ${weighInId}`;
var sql_delete_weigh_in_details =
`DELETE FROM weigh_in_details
WHERE weigh_in_id = ${weighInId}`;
connection.query(sql_delete_weigh_in, function(errorDeleteWightIn, resultDeleteWightIn) {
if (errorDeleteWightIn) {
console.log(errorDeleteWightIn);
}
connection.query(sql_delete_weigh_in_details, function(errorDeleteWightInDetails, resultDeleteWightInDetails) {
if (errorDeleteWightInDetails) {
console.log(errorDeleteWightInDetails);
}
if (resultDeleteWightInDetails) {
io.sockets.emit('weighInDeleted');
} else {
console.log('Weigh-In Deletion Error');
}
});
});
});
// Reset weigh-in list
socket.on('resetWeighInList', function() {
io.sockets.emit('weighInListReset');
});
// Save user site info
socket.on('saveUserSiteInfo', function(payload) {
var userName = payload.userName;
var userTypeId = payload.userType;
var emailAddress = payload.emailAddress;
var passcord = crypto.createHash('sha1').update(payload.passcord).digest("hex");
var userStatusId = 1;
var lfrId = payload.lfrId;
var sql_user_site_info =
`INSERT INTO user_site_info (user_name, user_type_id, email_address, passcord, user_status_id, lfr_id)
VALUES ('${userName}','${userTypeId}', '${emailAddress}', '${(passcord)}','${userStatusId}', '${lfrId}')`;
var load_user_list =
`SELECT USI.user_id, USI.user_name, UT.user_type, USI.email_address, USI.passcord, US.status_type
FROM user_site_info USI, user_types UT, user_status US
WHERE USI.user_type_id = UT.user_type_id
AND USI.user_status_id = US.user_status_id
AND lfr_id = ${lfrId}`;
connection.query(sql_user_site_info, function(errorUserInfo, resultUserInfo) {
if (errorUserInfo) {
userSiteInfoSaved = false;
console.log(errorUserInfo);
}
if (resultUserInfo) {
userSiteInfoSaved = true;
connection.query(load_user_list, function(errorUserList, resultUserList) {
if (errorUserList) {
console.log(errorUserList);
} else {
userList = resultUserList;
io.sockets.emit('userSiteInfoSaved', userSiteInfoSaved, userList);
}
});
} else {
console.log('User Info Saving Error')
}
});
});
// Save weigh in info
socket.on('saveWeighInRecord', function(payload) {
var fldId = payload.fldId;
var weighInId = payload.fldId;
var productId = payload.productId;
var containerId = payload.containerId;
var amount = payload.amount;
var netWeight = payload.netWeight;
var areaId = payload.areaId;
var userId = payload.userId;
// Check if the record is the first of the weigh in id,
var sql_check_weigh_in =
`SELECT * FROM weigh_in
WHERE weigh_in_id = '${weighInId}'`;
connection.query(sql_check_weigh_in, function(errorCheck, resultCheck) {
if (errorCheck) {
console.log(errorCheck);
}
// If there is no recrod related to weigh in id, create the weigh in id
if (resultCheck.length === 0) {
var sql_weigh_in_header =
`INSERT INTO weigh_in (weigh_in_id, fld_id, logged_user_id, created_time)
VALUES('${weighInId}', '${fldId}', '${userId}', NOW())`;
connection.query(sql_weigh_in_header, function(errorHeader, resultHeader) {
if (errorHeader) {
console.log(errorHeader);
}
});
}
});
var sql_weigh_in_record =
`INSERT INTO weigh_in_details (product_id, container_id, number_of_containers, net_weight, area_id, weigh_in_id)
VALUES ('${productId}', '${containerId}','${amount}','${netWeight}','${areaId}','${weighInId}')`;
var sql_load_records =
`SELECT P.product_code, C.container, WD.number_of_containers, WD.net_weight, S.species_code, PS.state_code, G.grade, A.area_code
FROM product P, container C, species S, processed_state PS, grade G, area A, weigh_in_details WD
WHERE WD.product_id = P.product_id
AND WD.container_id = C.container_id
AND WD.area_id = A.area_id
AND P.species_id = S.species_id
AND P.state_id = PS.state_id
AND P.grade_id = G.grade_id
AND weigh_in_id = '${weighInId}'
ORDER BY weigh_in_detail_id ASC`;
connection.query(sql_weigh_in_record, function(errorRecord, resultRecord) {
if (errorRecord) {
console.log(errorRecord);
}
if (resultRecord) {
connection.query(sql_load_records, function(errorList, resultList) {
if (errorList) {
console.log(errorList);
} else {
weighInList = resultList;
io.sockets.emit('weighInRecordSaved', weighInList);
}
});
} else {
console.log('Weigh In Saving Error')
}
});
});
// Load user types
socket.on('loadUserTypes', function() {
var sql_user_types =
`SELECT user_type_id, user_type FROM user_types
ORDER BY user_type ASC`;
connection.query(sql_user_types, function(error, result) {
if (error) {
console.log(error);
}
if (result.length !== 0) {
userTypes = result;
io.sockets.emit('userTypesLoaded', userTypes);
} else {
console.log('User Type Error')
}
});
});
// Load weigh-in numbers
socket.on('loadWeighInNumbers', function(payload) {
var lfrId = payload.lfrId
var sql_load_weigh_in =
`SELECT W.weigh_in_id
FROM weigh_in W, fld F
WHERE W.weigh_in_id = F.fld_id
AND F.lfr_id = ${lfrId}`;
connection.query(sql_load_weigh_in, function(error, result) {
if (error) {
console.log(error);
}
if (result.length !== 0) {
weighInNumbers = result;
io.sockets.emit('weighInNumbersLoaded', weighInNumbers);
} else {
console.log('Weigh-In Error')
}
});
});
// Load fld, weigh-in weights
socket.on('loadWeighInWeights', function(payload) {
// weigh_in table weigh_in_id and fld table fld_id are same
var weighInId = payload.weighInId;
var sql_load_fld_weights =
`SELECT S.species, S.species_code, SUM(FD.no_of_containers * C.content_weight) AS 'fld_weight'
FROM fld_details FD, species S, container C
WHERE S.species_id = FD.species_id
AND FD.container_id = C.container_id
AND FD.fld_id = '${weighInId}'
GROUP BY S.species_code, 'fld_weight'
ORDER BY S.species_code`;
var sql_load_weigh_in_weights =
`SELECT S.species, S.species_code, SUM(WD.net_weight) AS 'weigh_in_weight'
FROM weigh_in_details WD, species S, product P, container C
WHERE P.species_id = S.species_id
AND WD.product_id = P.product_id
AND WD.container_id = C.container_id
AND WD.weigh_in_id = '${weighInId}'
GROUP BY S.species_code, 'weigh_in_weight'
ORDER BY S.species_code`;
var sql_load_fld_info =
`SELECT DATE_FORMAT(F.fld_created_time, "%Y-%m-%d") AS "fld_created_date", CONCAT(U.first_name, " ", U.last_name) AS "fld_created_by", COUNT(FD.fld_id) AS "fld_records"
FROM fld F, user_personal_info U, fld_details FD
WHERE F.fld_created_user_id = U.user_id
AND F.fld_id = FD.fld_id
AND F.fld_id = '${weighInId}'`;
var sql_load_weigh_info =
`SELECT DATE_FORMAT(created_time, "%Y-%m-%d") AS "weigh_in_created_date", CONCAT(U.first_name, " ", U.last_name) AS "weigh_in_created_by", COUNT(WD.weigh_in_id) AS "weigh_in_records"
FROM weigh_in W, user_personal_info U, weigh_in_details WD
WHERE W.logged_user_id = U.user_id
AND W.weigh_in_id = WD.weigh_in_id
AND W.weigh_in_id = '${weighInId}'`;
connection.query(sql_load_fld_weights, function(errorFldWeights, resultFldWeights) {
if (errorFldWeights) {
console.log(errorFldWeights);
}
if (resultFldWeights.length !== 0) {
connection.query(sql_load_weigh_in_weights, function(errorweighInWeights, resultWeighInWeights) {
if (errorweighInWeights) {
console.log(errorweighInWeights);
}
if (resultWeighInWeights.length !== 0) {
connection.query(sql_load_fld_info, function(errorFldInfo, resultFldInfo) {
connection.query(sql_load_weigh_info, function(errorWeighInInfo, resultWeighInInfo) {
fldWeights = resultFldWeights;
weighInWeights = resultWeighInWeights;
fldHeaderInfo = resultFldInfo;
weighInHeaderInfo = resultWeighInInfo;
io.sockets.emit('weighInWeightsLoaded', fldWeights, weighInWeights, fldHeaderInfo, weighInHeaderInfo);
});
});
} else {
console.log('Weigh-In Weights Error')
}
});
}
});
});
// Load weigh in combo boxes
socket.on('loadWeighInComboBoxes', function(payload) {
var lfr_id = payload.lfr_id;
var sql_load_fld_numbers = `
SELECT fld_id, fld_number FROM fld WHERE lfr_id = '${lfr_id}'
ORDER BY fld_number DESC `;
var sql_load_product_codes = `
SELECT product_id, product_code FROM product ORDER BY product_code ASC `;
var sql_load_containers = `
SELECT container_id, container FROM container WHERE lfr_id = ${lfr_id} ORDER BY container ASC`;
var sql_load_area_codes = `
SELECT area_id, area_code FROM area ORDER BY area_code ASC `;
connection.query(sql_load_fld_numbers, function(errorFld, resultFld) {
if (errorFld) {
console.log(errorFld);
}
connection.query(sql_load_product_codes, function(errorProducts, resultProducts) {
if (errorProducts) {
console.log(errorProducts);
}
connection.query(sql_load_containers, function(errorContainer, resultContainer) {
if (errorContainer) {
console.log(errorContainer);
}
connection.query(sql_load_area_codes, function(errorArea, resultArea) {
if (errorArea) {
console.log(errorArea);
}
fldNumbers = resultFld;
productCodes = resultProducts;
containerTypes = resultContainer;
areaCodes = resultArea;
io.sockets.emit('weighInComboBoxesLoaded', fldNumbers, productCodes, containerTypes, areaCodes);
});
});
});
});
});
// Get fld info and weigh in records that are relavent to weighInId
socket.on('loadFldWeighInInfo', function(payload) {
var fldId = payload.fldId;
var weighInId = payload.fldId;
var sql_get_fld_count =
`SELECT COUNT( * ) AS 'fld_rows'
FROM fld_details WHERE fld_id = '${fldId}'`;
var sql_get_fld_info =
`SELECT DATE_FORMAT(F.fld_created_time, "%Y-%m-%d") AS "fld_created_date", DATE_FORMAT(F.fld_created_time, "%T") AS "fld_created_time", CONCAT(U.first_name, " ", U.last_name) AS "created_by"
FROM fld F, user_personal_info U
WHERE F.fld_created_user_id = U.user_id
AND F.fld_id = '${fldId}'`;
var sql_load_records =
`SELECT P.product_code, C.container, WD.number_of_containers, WD.net_weight, S.species_code, PS.state_code, G.grade, A.area_code
FROM product P, container C, species S, processed_state PS, grade G, area A, weigh_in_details WD
WHERE WD.product_id = P.product_id
AND WD.container_id = C.container_id
AND WD.area_id = A.area_id
AND P.species_id = S.species_id
AND P.state_id = PS.state_id
AND P.grade_id = G.grade_id
AND weigh_in_id = '${weighInId}'
ORDER BY weigh_in_detail_id ASC `;
connection.query(sql_get_fld_count, function(errorFldCount, resultFldCount) {
if (errorFldCount) {
console.log(errorFldCount);
}
connection.query(sql_get_fld_info, function(errorFldInfo, resultFldInfo) {
if (errorFldInfo) {
console.log(errorFldInfo);
}
connection.query(sql_load_records, function(errorList, resultList) {
if (errorList) {
console.log(errorList);
} else {
fldRows = resultFldCount;
fldInfo = resultFldInfo;
weighInList = resultList;
io.sockets.emit('fldWeighInInfoLoaded', fldRows, fldInfo, weighInList);
}
});
});
});
});
// Get product info
socket.on('loadProductInfo', function(payload) {
var productId = payload.productId;
var sql_get_product_info =
`SELECT P.product_code, S.species_code, PS.state_code, G.grade
FROM product P, species S, processed_state PS, grade G
WHERE P.species_id = S.species_id
AND P.state_id = PS.state_id
AND P.grade_id = G.grade_id
AND P.product_id = '${productId}'`;
connection.query(sql_get_product_info, function(error, result) {
if (error) {
console.log(error);
}
if (result.length !== 0) {
productInfo = result;
io.sockets.emit('productInfoLoaded', productInfo);
} else {
console.log('No Records Found')
}
});
});
// Load user types
socket.on('checkUserName', function(payload) {
var userName = payload.userName;
var sql_check_user_name =
`SELECT * from user_site_info
WHERE user_name = '${userName}'`;
connection.query(sql_check_user_name, function(error, result) {
if (error) {
console.log(error);
}
if (result.length !== 0) {
validUserName = false;
} else {
validUserName = true;
}
io.sockets.emit('userNameChecked', validUserName);
});
});
// socket.emit => emit events that are handled by the client
socket.emit('welcome', {
title: title,
flds: flds,
currentFld: currentFld,
fldDetails: fldDetails,
lfrSaved: lfrSaved,
userSiteInfoSaved: userSiteInfoSaved,
userList: userList,
userTypes: userTypes,
fldNumbers: fldNumbers,
productCodes: productCodes,
containerTypes: containerTypes,
areaCodes: areaCodes,
fldRows: fldRows,
fldInfo: fldInfo,
productInfo: productInfo,
weighInList: weighInList,
weighInNumbers: weighInNumbers,
weighInWeights: weighInWeights,
fldHeaderInfo: fldHeaderInfo,
weighInHeaderInfo: weighInHeaderInfo,
fldWeights: fldWeights,
userDeleted: userDeleted
});
connections.push(socket);
});
console.log(`Fishery Logistics is running at port ${ server.address().port }`);
You should definitely break this into several modules. Rule of thumb is to have controller modules and models.

how to parse data from two csv in node.js

I am very new to node.js. I have one script that will parse the csv and generate the required output file. Now I want to fetch some of column data from another csv at the same time and add that value to the output file.
Script :
var csv = require('csv');
var fs = require('fs');
var progress = require('progress-stream');
var date = require('date-and-time');
var indexStat = 0;
var header = [];
var headerLine = '$lang=en\n\nINSERT_UPDATE Customer;uid;name;address;phoneno'
var delimeter = ',';
var semicolon = ';';
var inputFile = __dirname+'/project/customer.csv';
var outputFile = __dirname+'/project/customer.impex';
var inputFileName = 'customer.csv';
var outputFileName = 'customer.impex';
function generateRecord(json) {
var record = semicolon + json.uid + semicolon + json.name + semicolon + json.address;
return record;
}
var writeStream = fs.createWriteStream(outputFile);
var parser = csv.parse({
delimiter: delimeter
}, function (err, data) {
if (err) {
console.log(err);
}
});
var transformer = csv.transform(function (data) {
var line = '';
if (indexStat == 0) {
header = data;
var line = headerLine;
} else {
var line = generateRecord(generateRecordObject(data));
}
indexStat++;
writeStream.write(line + '\r\n');
});
function stringSplitter(dataRow) {
var str = dataRow.toString();
return str.split(delimeter);
}
function generateRecordObject(dataRow) {
var record = {};
dataRow.forEach(function (value, index) {
if (header[index] != '') {
record[header[index].toLowerCase()] = value;
}
});
return record;
}
var stat = fs.statSync(inputFile);
var str = progress({
length: stat.size,
time: 100
});
str.on('progress', function (progress) {
writeCompletedPercentageForRead(progress.percentage, progress.eta, progress.runtime, progress.speed);
});
function removeLineBreaks(obj) {
obj = obj.replace(/\\N/g, '');
obj = obj.replace(/&/g, '&');
return obj;
}
function writeCompletedPercentageForRead(p, e, r, s) {
process.stdout.clearLine();
process.stdout.cursorTo(0);
process.stdout.write(`${inputFileName} read in progress to write ${outputFileName} ... Completed:${parseInt(p, 10)} %, ETA:${e} seconds, Elapsed:${r} seconds, Rate:${parseInt(s/1024, 10)} KBps`);
};
fs.createReadStream(inputFile).pipe(str).pipe(parser).pipe(transformer);
customer.csv ->
uid,name,address
1234,manish,bangalore
The above script is working fine and generating customer.impex file as below
INSERT_UPDATE Customer;uid;name;address;phoneno
;1234;manish;bangalore
Now i want to populate phoneno as well but phoneno field is define in another csv file lets say 'customer_phone.csv'.
customer_phone.csv -
uid,phoneno
1234,98777767467
I want to match uid column of customer.csv with customer_phone.csv and get the phoneno from that csv. Finally i want to add phoneno in the customer.impex outfile file.
I have no idea how to parse two csv here and generate the file. Any help ?
var Converter = require("csvtojson").Converter;
var fs = require("fs");
var pathToMainCSV = "./customer.csv";
var pathToContactsCSV = "./contact.csv";
var customerConverter = new Converter({});
var contactConverter = new Converter({});
var contanierObj = {};
function processContacts() {
fs.createReadStream(pathToContactsCSV).pipe(contactConverter);
}
function createImpexFile() {
var headerLine = '$lang=en\n\nINSERT_UPDATE Customer;uid;name;address;phoneno;\n';
Object.keys(contanierObj).forEach(obj => {
Object.keys(contanierObj[obj]).forEach(data => {
headerLine += contanierObj[obj][data] + ';';
});
headerLine += '\n';
});
fs.writeFile("./new.impex", headerLine, function(err) {
if (err) {
return console.log(err);
}
console.log("The file was saved!");
});
}
customerConverter.on("end_parsed", function(jsonArray) {
jsonArray.forEach(v => {
contanierObj[v.uid] = v;
});
processContacts();
});
contactConverter.on("end_parsed", function(jsonArray) {
jsonArray.forEach(v => {
contanierObj[v.uid].contact = v.phoneno;
});
createImpexFile();
});
fs.createReadStream(pathToMainCSV).pipe(customerConverter);
Kindly use something like i have done above, format the string according to your needs

Deserialize strange querystring with Node.js

I have the following string and want to deserialize it to an object(s):
responseEnvelope.timestamp=2015-07-06T15%3A20%3A49.534-07%3A00
&responseEnvelope.ack=Failure&responseEnvelope.correlationId=a58a06c1000d8
&responseEnvelope.build=15743565&error(0).errorId=520003&error(0).domain=PLATFORM
&error(0).subdomain=Application&error(0).severity=Error&error(0).category=Application
(BTW - It is the response from PayPal Adaptive Payments using the Classic API)
https://developer.paypal.com/webapps/developer/docs/classic/api/adaptive-payments/Pay_API_Operation/
Using Node.js I can convert it into an array of items using
var qs = require('querystring');
var response = the-response-from-paypal;
var paypalResponse = qs.parse(response.toString());
console.log(paypalResponse["responseEnvelope.ack"])
//Renders: Failure
but I want to convert it into an object, that can be accessed like:
var ack = deserializedResponse.responseEnvelope.ack;
//etc
Or as JSON:
{
responseEnvelope: {
ack: "Failure",
timestamp: blah,
correlationId: "a58a06c1000d8",
...
},
error: [
{
errorId: 520003,
domain: "Platform",
...
}
]
}
Is there some handy tool to do this?
Basing this off of my answer for your other SO question. You would just need to wrap the recursion function in a preprocess that would build the entire object for you.
var newObj = input.split('&').reduce(function (o, str) {
var pair = str.split('=');
return setObjVal(o, pair[0].split('.'), pair[1]);
}, {});
function setObjVal(obj, paths, val) {
var path;
var arrayInfo;
if (paths.length === 0) {
return val;
}
obj = obj || {};
path = paths.shift();
arrayInfo = path.match(arrayRegExp);
if (arrayInfo) {
path = arrayInfo[1];
if (!Array.isArray(obj[path])) {
obj[path] = [];
}
obj[path][arrayInfo[2]] = setObjVal(obj[path][arrayInfo[2]], paths, val);
}
else {
obj[path] = setObjVal(obj[path], paths, val);
}
return obj;
}
var arrayRegExp = /^(\w+)\((\d+)\)$/;
var input = 'responseEnvelope.timestamp=2015-07-06T15%3A20%3A49.534-07%3A00&responseEnvelope.ack=Failure&responseEnvelope.correlationId=a58a06c1000d8&responseEnvelope.build=15743565&error(0).errorId=520003&error(0).domain=PLATFORM&error(0).subdomain=Application&error(0).severity=Error&error(0).category=Application';
var newObj = input.split('&').reduce(function (o, str) {
var pair = str.split('=');
return setObjVal(o, pair[0].split('.'), pair[1]);
}, {});
function setObjVal(obj, paths, val) {
var path;
var arrayInfo;
if (paths.length === 0) {
return val;
}
obj = obj || {};
path = paths.shift();
arrayInfo = path.match(arrayRegExp);
if (arrayInfo) {
path = arrayInfo[1];
if (!Array.isArray(obj[path])) {
obj[path] = [];
}
obj[path][arrayInfo[2]] = setObjVal(obj[path][arrayInfo[2]], paths, val);
}
else {
obj[path] = setObjVal(obj[path], paths, val);
}
return obj;
}
document.write('<pre>' + JSON.stringify(newObj, null, 4) + '</pre>');
Alternatively if you want to use lodash as in the other answer you could use _.set():
var newObj = input.split('&').reduce(function (o, str) {
var pair = str.split('=');
return _.set(o, pair[0].replace(/\(/g, '[').replace(/\)/g, ']'), pair[1]);
}, {});
var input = 'responseEnvelope.timestamp=2015-07-06T15%3A20%3A49.534-07%3A00&responseEnvelope.ack=Failure&responseEnvelope.correlationId=a58a06c1000d8&responseEnvelope.build=15743565&error(0).errorId=520003&error(0).domain=PLATFORM&error(0).subdomain=Application&error(0).severity=Error&error(0).category=Application';
var newObj = input.split('&').reduce(function (o, str) {
var pair = str.split('=');
return _.set(o, pair[0].replace(/\(/g, '[').replace(/\)/g, ']'), pair[1]);
}, {});
document.write('<pre>' + JSON.stringify(newObj, null, 4) + '</pre>');
<script src="https://cdnjs.cloudflare.com/ajax/libs/lodash.js/3.10.0/lodash.min.js"></script>

Nodejs streams writablestream drain event not firing

I am trying to read in a large file, do some computation and then write to a much bigger file. To prevent excessive memory consumption, I am using streams. The problem that I am facing is that the writestream is not firing the "drain" event, which signals that the writes have been flushed to disk. In order to prevent "back-pressure", I am waiting for the drain event to be fired before I start writing to the buffer again. While debugging I found that after a .write() call returns false and the line fvfileStream.once('drain', test) is executed, the program just stops and does not do anything.
Here is the code:
var fs = require('fs');
//a test function I created to see if the callback is called after drain.
var test = function(){
console.log("Done Draining");
}
fs.readFile('/another/file/to/be/read', {
encoding: "utf8"
}, function(err, data) {
if (err) throw err;
//Make an array containing tags.
var tags = data.split('\n');
//create a write stream.
var fvfileStream = fs.createWriteStream('/path/TagFeatureVectors.csv');
//read in the question posts
var qfileStream = fs.createReadStream('/Big/file/QuestionsWithTags.csv', {
encoding: "utf8"
});
var partialRow = null;
var writable = true;
var count = 0;
var doRead = function() {
var qData = qfileStream.read();
var questions = qData.split('\n');
if (partialRow != null) {
questions[0] = partialRow + questions[0];
partialRow = null;
}
var lastRow = questions[questions.length - 1];
if (lastRow.charAt(lastRow.length - 1) != '\n') {
partialRow = lastRow;
}
questions.forEach(function(row, index, array) {
count++;
var fields = row.split(',');
console.log("Processing question number: " + count + " id: " + fields[0]);
var tagString = fields[1];
var regex = new RegExp(/<([^>]+)>/g);
tags.forEach(function(tag, index, array) {
var found = false;
var questionTags;
while ((questionTags = regex.exec(tagString)) != null) {
var currentTag = questionTags[1]
if (currentTag === tag) {
found = true;
break;
}
};
//This is where the writestream is written to
if (found) {
writable = fvfileStream.write("1,", "utf8");
}else {
writable = fvfileStream.write("0,","utf8");
}
});
});
fvfileStream.write("\n");
}
qfileStream.on('readable', function() {
if (writable) {
doRead();
} else {
//Waiting for drain event.
fvfileStream.once('drain', test);
}
});
qfileStream.on('end', function() {
fvfileStream.end();
});
});
Updated
Based on advise provided by #loganfsmyth, I implemented transform streams, but still ran into the same issue. Here is my updated code:
var fs = require('fs');
var stream = require('stream');
var util = require('util');
var Transform = stream.Transform;
function FVCreator(options) {
// allow use without new
if (!(this instanceof FVCreator)) {
return new FVCreator(options);
}
// init Transform
Transform.call(this, options);
}
util.inherits(FVCreator, Transform);
var partialRow = null;
var count = 0;
var tags;
FVCreator.prototype._transform = function(chunk, enc, cb) {
var that = this;
var questions = chunk.toString().split('\n');
if (partialRow != null) {
questions[0] = partialRow + questions[0];
partialRow = null;
}
var lastRow = questions[questions.length - 1];
if (lastRow.charAt(lastRow.length - 1) != '\n') {
partialRow = lastRow;
questions.splice(questions.length - 1, 1);
}
questions.forEach(function(row, index, array) {
count++;
var fields = row.split(',');
console.log("Processing question number: " + count + " id: " + fields[0]);
var tagString = fields[1];
var regex = new RegExp(/<([^>]+)>/g);
tags.forEach(function(tag, index, array) {
var found = false;
var questionTags;
while ((questionTags = regex.exec(tagString)) != null) {
var currentTag = questionTags[1]
if (currentTag === tag) {
found = true;
break;
}
};
if (found) {
that.push("1,", "utf8");
} else {
that.push("0,", "utf8");
}
});
});
this.push("\n", "utf8");
cb();
};
fs.readFile('/another/file/to/be/read', {
encoding: "utf8"
}, function(err, data) {
if (err) throw err;
//Make an array containing tags.
tags = data.split('\n');
//write to a file.
var fvfileStream = fs.createWriteStream('/path/TagFeatureVectors.csv');
//read in the question posts
var qfileStream = fs.createReadStream('/large/file/to/be/read', {
encoding: "utf8"
});
var fvc = new FVCreator();
qfileStream.pipe(fvc).pipe(fvfileStream);
});
I am running this on OSX Yosemite.

Resources