cant get variable value in node js - node.js

I tried to make the function async but when I print the attacks it prints out {} without anything in it but when I print the values right after adding them in attacks I can print them why is it like that? how can I use the value?
var fs = require('fs');
var http = require('http');
var attacks = {};
var phase_name;
var directory = 'cti-master\\enterprise-attack\\attack-pattern\\';
// getting all files names.
async function getData(directory){
fs.readdir(directory, (err, files) => {
if(err) { return;}
var fileNum = 0;
// opening all the files and sorting the data in them.
while (fileNum < files.length - 1)
{
fs.readFile(directory + files[fileNum], 'utf8', (err, data) =>
{
// parsing the data from json.
var fileData = JSON.parse(data);
// sometimes there is no phase name.
if(fileData['objects'][0]['kill_chain_phases'] == undefined){phase_name = undefined;}
else{phase_name = fileData['objects'][0]['kill_chain_phases'][0]['phase_name'];}
// sorting data by name to make it easier later.
attacks[fileData['objects'][0]['name']] = {
id: fileData['objects'][0]['id'],
type: fileData['objects'][0]['type'],
description: fileData['objects'][0]['description'],
x_mitre_platforms: fileData['objects'][0]['x_mitre_platforms'],
x_mitre_detection: fileData['objects'][0]['x_mitre_detection'],
phase_name: phase_name};
});
fileNum += 1;
};
});
var keys = Object.keys(attacks);
console.log(attacks);
}
getData(directory);

The reason for the empty log here because the node does not wait to finish while loop Hence you are getting empty log. Basically, you can improve this code by using the async-await method.
But if you are stick with this code, I am just suggesting this logic.
Just bring your log inside an if condition block. which have condition "print only if expected file count reached"
for example.
if(fileNum === files.length) {
var keys = Object.keys(attacks);
console.log(attacks);
}
So now log print only when this condition is satisfied which means after completion of while loop

Related

Node Js Acessing a Jason File

still new to JSON and while ive searched the net and created a function to create a init file if none exists i'm coming up blank for search and retrive the data of the new existing file or how I add new entries or update new entries
so far i can do a read file and export the resits in a console log so i know the assignment work, its a global variable so the data should persist out of the read file loop but when i try and access it later to make the local array i'll pull data from and use for updating later it reads as undefined.
fs.readFile(path, 'utf8', (error, data) => {
if(error){
console.log(error);
return;
}
//console.log(JSON.parse(data));
JSONData = JSON.parse(data);
for (let i = 0; i < JSONData.length; i++) {
console.log(i+": ["+JSONData[i].unique+"] "+JSONData[i].name);
}
});//fs.readFile
var playerKey = "KuroTO";
playerKey = playerKey.toLowerCase();
for (let i = 0; i < JSONData.length; i++) {
if (JSONData[i].unique.toLowerCase() == playerKey){
console.log("["+i+"] "+JSONData[i].unique.toLowerCase()+": "+playerKey);
PlayerCard1.push(JSONData[i].userid);//0
PlayerCard1.push(JSONData[i].username);//1
PlayerCard1.push(JSONData[i].unique);//2
PlayerCard1.push(JSONData[i].name);//3
PlayerCard1.push(JSONData[i].avatarurl);//4
PlayerCard1.push(JSONData[i].level);//5
PlayerCard1.push(JSONData[i].Rank);//6
PlayerCard1.push(JSONData[i].henshined);//7
PlayerCard1.push(JSONData[i].Strength);//8
PlayerCard1.push(JSONData[i].Perception);//9
PlayerCard1.push(JSONData[i].Endurance);//10
PlayerCard1.push(JSONData[i].Wisdom);//11
PlayerCard1.push(JSONData[i].Intelligence)//12;
PlayerCard1.push(JSONData[i].Luck)//13;
PlayerCard1.push(JSONData[i].Agility)//14;
PlayerCard1.push(JSONData[i].Flexability)//15;
PlayerCard1.push(JSONData[i].RatedSpeed)//16;
};//if unique matches
};//for
this is ther psudo code concept im trying to do
if (JSONData.stringify.unique == {SearchUID}){toonname = JSONData.stringify.name;}
as i understand it you cant really apend just rewrite the file over again with new data and i think i can figure that out on my own once i cand figure out how to real the file into an array i can search like above
To read JSON, simply require the file.
JSON:
{
"key": "H"
}
JS:
let jsonFile = require("./path/to/json");
console.log(jsonFile.key); // H
Editing is just as simple.
let jsonFile = require("./path/to/json");
jsonFile.key = "A"
console.log(jsonFile.key) // A
Saving edits requires use of FileSystem:
const fs = require("fs")
let jsonFile = require("./path/to/json");
jsonFile.key = "A"
// first argument is the file path
// second argument is the JSON to write - the file is overwritten already
// due to above, so just JSON.stringify() the required file.
// third argument is an error callback
fs.writeFile("./path/to/jsonFile", JSON.stringify(jsonFile), (err) => {
if (err) throw new Error(err);
});
This can also be used to slightly clean up your current init function if you wanted, but that's up to you of course.

Array.push overriding the existing value instead of pushing new value

I am receiving data from the socket server and pushing it into an array. I am also checking if the data I received from the socket exists in an array, If it exists ignore it and if it doesn't push it into an existing array. For some reason though, the socketarray.push part seems to be overwriting previous data in the socketarray variable. No idea what am I doing wrong here
var socketarray = [];
socket.on('data', async data => {
var server_data = data.toString();
var parsed_result = m.parsingData(data.toString());
if (parsed_result) {
var car_id = parsed_result.car_id;
var find_string = socketarray.findIndex(x => x.car_id == car_id);
if (find_string === -1) {
length = socketarray.push({ "car_id": car_id, });
} else {
console.log("already present");
console.log(socketarray);
}
}
});
But there is also one weird thing which is happening. If I add this code
var length = socketarray.push({ "car_id": car_id,});
Before this line var find_string = socketarray.findIndex(x => x.car_id == car_id); which is basically putting a condition to make sure it doesn't save duplicate data. Then multiple data pushed successfully. But the only issue is the duplicity. So I have no idea why the code works before the conditional line and not after it.

Correct way to organise this process in Node

I need some advice on how to structure this function as at the moment it is not happening in the correct order due to node being asynchronous.
This is the flow I want to achieve; I don't need help with the code itself but with the order to achieve the end results and any suggestions on how to make it efficient
Node routes a GET request to my controller.
Controller reads a .csv file on local system and opens a read stream using fs module
Then use csv-parse module to convert that to an array line by line (many 100,000's of lines)
Start a try/catch block
With the current row from the csv, take a value and try to find it in a MongoDB
If found, take the ID and store the line from the CSV and this id as a foreign ID in a separate database
If not found, create an entry into the DB and take the new ID and then do 6.
Print out to terminal the row number being worked on (ideally at some point I would like to be able to send this value to the page and have it update like a progress bar as the rows are completed)
Here is a small part of the code structure that I am currently using;
const fs = require('fs');
const parse = require('csv-parse');
function addDataOne(req, id) {
const modelOneInstance = new InstanceOne({ ...code });
const resultOne = modelOneInstance.save();
return resultOne;
}
function addDataTwo(req, id) {
const modelTwoInstance = new InstanceTwo({ ...code });
const resultTwo = modelTwoInstance.save();
return resultTwo;
}
exports.add_data = (req, res) => {
const fileSys = 'public/data/';
const parsedData = [];
let i = 0;
fs.createReadStream(`${fileSys}${req.query.file}`)
.pipe(parse({}))
.on('data', (dataRow) => {
let RowObj = {
one: dataRow[0],
two: dataRow[1],
three: dataRow[2],
etc,
etc
};
try {
ModelOne.find(
{ propertyone: RowObj.one, propertytwo: RowObj.two },
'_id, foreign_id'
).exec((err, searchProp) => {
if (err) {
console.log(err);
} else {
if (searchProp.length > 1) {
console.log('too many returned from find function');
}
if (searchProp.length === 1) {
addDataOne(RowObj, searchProp[0]).then((result) => {
searchProp[0].foreign_id.push(result._id);
searchProp[0].save();
});
}
if (searchProp.length === 0) {
let resultAddProp = null;
addDataTwo(RowObj).then((result) => {
resultAddProp = result;
addDataOne(req, resultAddProp._id).then((result) => {
resultAddProp.foreign_id.push(result._id);
resultAddProp.save();
});
});
}
}
});
} catch (error) {
console.log(error);
}
i++;
let iString = i.toString();
process.stdout.clearLine();
process.stdout.cursorTo(0);
process.stdout.write(iString);
})
.on('end', () => {
res.send('added');
});
};
I have tried to make the functions use async/await but it seems to conflict with the fs.openReadStream or csv parse functionality, probably due to my inexperience and lack of correct use of code...
I appreciate that this is a long question about the fundamentals of the code but just some tips/advice/pointers on how to get this going would be appreciated. I had it working when the data was sent one at a time via a post request from postman but can't implement the next stage which is to read from the csv file which contains many records
First of all you can make the following checks into one query:
if (searchProp.length === 1) {
if (searchProp.length === 0) {
Use upsert option in mongodb findOneAndUpdate query to update or upsert.
Secondly don't do this in main thread. Use a queue mechanism it will be much more efficient.
Queue which I personally use is Bull Queue.
https://github.com/OptimalBits/bull#basic-usage
This also provides the functionality you need of showing progress.
Also regarding using Async Await with ReadStream, a lot of example can be found on net such as : https://humanwhocodes.com/snippets/2019/05/nodejs-read-stream-promise/

Nodejs read file from data and create objects

I am trying to write a nodejs program. I have a file data.json , it contains json objects. For every object i have to add another key as review ,value as a text file data. here , I wrote code for reading data from json file, in that file , for every object I inserted key and value pairs. and stored in a array named 'matter', In below code, I used callback to post data to calling function. but callback is executing before 'for loop' in Fetchdata function. How to call callback after for loop.
var fs = require('fs');
var jf = require('jsonfile')
var file = '../data/data.json'
function readfile(str,callback) {
fs.readFile(str, function (err, data) {
callback && callback(data.toString());
});
}
function Fetchdata(callback) {
var matter = [];
jf.readFile(file, function (err, jsonData) {
var j=0;
for (var i = 0; i < jsonData.length; ++i) {
obj = jsonData[i];
var purchase_url = obj["purchase_url"];
if (purchase_url.indexOf("flipkart") > -1) {
var ss = purchase_url.split("pid=");
if (ss[1]) {
var s2 = ss[1].split('&');
readfile(s2[0],function(some){
"use strict";
obj["review"]= some;
matter.push(obj);
})
}
}
}
callback && callback(matter);
});
}
Fetchdata(function (some) {
console.log(some[0]);
});
You can use sync version of readFile
function readfile(filename,callback) {
callback(fs.readFileSync(filename).toString())
}
The sync version of readFile, will not continue to the next line. It will return the content of the file.
You can also, do it in one line fs.readFileSync(str,'utf-8') instead of using toString.
More info
readFileSync API
Synchronous vs Asynchronous code with Node.js

Node.js while loop return deffered result

What I'm trying to do is this, I have 2 users an attacker and a defender. I want the call hit() function until one of the runs out of Health, hit() should be called on turns, first attacker, then defender, then attacker and so on until one reaches 0 hp.
My idea was to do it with a while loop, with current code all I get is the console.log from hit(), an infinite loop. The data from hit() is returned correctly if it's not inside a loop ... I could simply just work in the while loop and not use the hit function but it would mean repeating a lot of code, since there will be a few things to consider before a hit actually happens.
If you have an alternative to the while loop I'm open to ideas, also I should mention I'm new at node so keep it as simple as possible please. Thank you.
This is the relevant part of the code:
var prepareAttack = function(attacker,defender) {
connectdb().done(function(connection) {
query(connection,'SELECT * FROM members WHERE name = ?', attacker).done(function(result) {
var attackerData = result[0][0]
query(connection,'SELECT * FROM members WHERE name = ?', defender).done(function(result) {
var defenderData = result[0][0]
var attackerHp = attackerData.hp
var defenderHp = defenderData.hp
while(attackerHp > 0 && defenderHp > 0) {
hit(attackerData,defenderData).done(function(result){
defenderHp = result;
console.log(defenderHp)
})
hit(defenderData, attackerData).done(function(result) {
attackerHp = result;
console.log(attackerHp)
})
}
})
})
})
}
var hit = function (attackerData, defenderData) { // simplified code just relevant parts inside.
console.log('hitting')
var defer = Q.defer()
var newHp = 0;
defer.resolve(newHp)
return defer.promise
}

Resources