I am receiving data from the socket server and pushing it into an array. I am also checking if the data I received from the socket exists in an array, If it exists ignore it and if it doesn't push it into an existing array. For some reason though, the socketarray.push part seems to be overwriting previous data in the socketarray variable. No idea what am I doing wrong here
var socketarray = [];
socket.on('data', async data => {
var server_data = data.toString();
var parsed_result = m.parsingData(data.toString());
if (parsed_result) {
var car_id = parsed_result.car_id;
var find_string = socketarray.findIndex(x => x.car_id == car_id);
if (find_string === -1) {
length = socketarray.push({ "car_id": car_id, });
} else {
console.log("already present");
console.log(socketarray);
}
}
});
But there is also one weird thing which is happening. If I add this code
var length = socketarray.push({ "car_id": car_id,});
Before this line var find_string = socketarray.findIndex(x => x.car_id == car_id); which is basically putting a condition to make sure it doesn't save duplicate data. Then multiple data pushed successfully. But the only issue is the duplicity. So I have no idea why the code works before the conditional line and not after it.
Related
still new to JSON and while ive searched the net and created a function to create a init file if none exists i'm coming up blank for search and retrive the data of the new existing file or how I add new entries or update new entries
so far i can do a read file and export the resits in a console log so i know the assignment work, its a global variable so the data should persist out of the read file loop but when i try and access it later to make the local array i'll pull data from and use for updating later it reads as undefined.
fs.readFile(path, 'utf8', (error, data) => {
if(error){
console.log(error);
return;
}
//console.log(JSON.parse(data));
JSONData = JSON.parse(data);
for (let i = 0; i < JSONData.length; i++) {
console.log(i+": ["+JSONData[i].unique+"] "+JSONData[i].name);
}
});//fs.readFile
var playerKey = "KuroTO";
playerKey = playerKey.toLowerCase();
for (let i = 0; i < JSONData.length; i++) {
if (JSONData[i].unique.toLowerCase() == playerKey){
console.log("["+i+"] "+JSONData[i].unique.toLowerCase()+": "+playerKey);
PlayerCard1.push(JSONData[i].userid);//0
PlayerCard1.push(JSONData[i].username);//1
PlayerCard1.push(JSONData[i].unique);//2
PlayerCard1.push(JSONData[i].name);//3
PlayerCard1.push(JSONData[i].avatarurl);//4
PlayerCard1.push(JSONData[i].level);//5
PlayerCard1.push(JSONData[i].Rank);//6
PlayerCard1.push(JSONData[i].henshined);//7
PlayerCard1.push(JSONData[i].Strength);//8
PlayerCard1.push(JSONData[i].Perception);//9
PlayerCard1.push(JSONData[i].Endurance);//10
PlayerCard1.push(JSONData[i].Wisdom);//11
PlayerCard1.push(JSONData[i].Intelligence)//12;
PlayerCard1.push(JSONData[i].Luck)//13;
PlayerCard1.push(JSONData[i].Agility)//14;
PlayerCard1.push(JSONData[i].Flexability)//15;
PlayerCard1.push(JSONData[i].RatedSpeed)//16;
};//if unique matches
};//for
this is ther psudo code concept im trying to do
if (JSONData.stringify.unique == {SearchUID}){toonname = JSONData.stringify.name;}
as i understand it you cant really apend just rewrite the file over again with new data and i think i can figure that out on my own once i cand figure out how to real the file into an array i can search like above
To read JSON, simply require the file.
JSON:
{
"key": "H"
}
JS:
let jsonFile = require("./path/to/json");
console.log(jsonFile.key); // H
Editing is just as simple.
let jsonFile = require("./path/to/json");
jsonFile.key = "A"
console.log(jsonFile.key) // A
Saving edits requires use of FileSystem:
const fs = require("fs")
let jsonFile = require("./path/to/json");
jsonFile.key = "A"
// first argument is the file path
// second argument is the JSON to write - the file is overwritten already
// due to above, so just JSON.stringify() the required file.
// third argument is an error callback
fs.writeFile("./path/to/jsonFile", JSON.stringify(jsonFile), (err) => {
if (err) throw new Error(err);
});
This can also be used to slightly clean up your current init function if you wanted, but that's up to you of course.
I tried to make the function async but when I print the attacks it prints out {} without anything in it but when I print the values right after adding them in attacks I can print them why is it like that? how can I use the value?
var fs = require('fs');
var http = require('http');
var attacks = {};
var phase_name;
var directory = 'cti-master\\enterprise-attack\\attack-pattern\\';
// getting all files names.
async function getData(directory){
fs.readdir(directory, (err, files) => {
if(err) { return;}
var fileNum = 0;
// opening all the files and sorting the data in them.
while (fileNum < files.length - 1)
{
fs.readFile(directory + files[fileNum], 'utf8', (err, data) =>
{
// parsing the data from json.
var fileData = JSON.parse(data);
// sometimes there is no phase name.
if(fileData['objects'][0]['kill_chain_phases'] == undefined){phase_name = undefined;}
else{phase_name = fileData['objects'][0]['kill_chain_phases'][0]['phase_name'];}
// sorting data by name to make it easier later.
attacks[fileData['objects'][0]['name']] = {
id: fileData['objects'][0]['id'],
type: fileData['objects'][0]['type'],
description: fileData['objects'][0]['description'],
x_mitre_platforms: fileData['objects'][0]['x_mitre_platforms'],
x_mitre_detection: fileData['objects'][0]['x_mitre_detection'],
phase_name: phase_name};
});
fileNum += 1;
};
});
var keys = Object.keys(attacks);
console.log(attacks);
}
getData(directory);
The reason for the empty log here because the node does not wait to finish while loop Hence you are getting empty log. Basically, you can improve this code by using the async-await method.
But if you are stick with this code, I am just suggesting this logic.
Just bring your log inside an if condition block. which have condition "print only if expected file count reached"
for example.
if(fileNum === files.length) {
var keys = Object.keys(attacks);
console.log(attacks);
}
So now log print only when this condition is satisfied which means after completion of while loop
okay. I'm confused as to the best way to do this:
the following pieces are in play: a node js server, a client-side react(with redux), a MYSql DB.
in the client app I have lists (many but for this issue, assume one), that I want to be able to reorder by drag and drop.
in the mysql DB the times are stored to represent a linked list (with a nextKey, lastKey, and productionKey(primary), along with the data fields),
//mysql column [productionKey, lastKey,nextKey, ...(other data)]
the current issue I'm having is a render issue. it stutters after every change.
I'm using these two function to get the initial order and to reorder
function SortLinkedList(linkedList)
{
var sortedList = [];
var map = new Map();
var currentID = null;
for(var i = 0; i < linkedList.length; i++)
{
var item = linkedList[i];
if(item?.lastKey === null)
{
currentID = item?.productionKey;
sortedList.push(item);
}
else
{
map.set(item?.lastKey, i);
}
}
while(sortedList.length < linkedList.length)
{
var nextItem = linkedList[map.get(currentID)];
sortedList.push(nextItem);
currentID = nextItem?.productionKey;
}
const filteredSafe=sortedList.filter(x=>x!==undefined)
//undefined appear because server has not fully updated yet, so linked list is broken
//nothing will render without this
return filteredSafe
;
}
const reorder = (list, startIndex, endIndex) => {
const result = Array.from(list);
const [removed] = result.splice(startIndex, 1);
result.splice(endIndex, 0, removed);
const adjustedResult = result.map((x,i,arr)=>{
if(i==0){
x.lastKey=null;
}else{
x.lastKey=arr[i-1].productionKey;
}
if(i==arr.length-1){
x.nextKey=null;
}else{
x.nextKey=arr[i+1].productionKey;
}
return x;
})
return adjustedResult;
};
I've got this function to get the items
const getItems = (list,jobList) =>
{
return list.map((x,i)=>{
const jobName=jobList.find(y=>y.jobsessionkey==x.attachedJobKey)?.JobName;
return {
id:`ProductionCardM${x.machineID}ID${x.productionKey}`,
attachedJobKey: x.attachedJobKey,
lastKey: x.lastKey,
machineID: x.machineID,
nextKey: x.nextKey,
productionKey: x.productionKey,
content:jobName
}
})
}
my onDragEnd
const onDragEnd=(result)=> {
if (!result.destination) {
return;
}
// dropped outside the list
const items = reorder(
state.items,
result.source.index,
result.destination.index,
);
dispatch(sendAdjustments(items));
//sends update to server
//server updates mysql
//server sends back update events from mysql in packets
//props sent to DnD component are updated
}
so the actual bug looks like the graphics are glitching - as things get temporarily filtered in the sortLinkedList function - resulting in jumpy divs. is there a smoother way to handle this client->server->DB->server->client dataflow that results in a consistent handling in DnD?
UPDATE:
still trying to solve this. currently implemented a lock pattern.
useEffect(()=>{
if(productionLock){
setState({
items: SortLinkedList(getItems(data,jobList)),
droppables: [{ id: "Original: not Dynamic" }]
})
setLoading(false);
}else{
console.log("locking first");
setLoading(true);
}
},[productionLock])
where production lock is set to true and false from triggers on the server...
basically: the app sends the data to the server, the server processes the request, then sends new data back, when it's finished the server sends the unlock signal.
which should trigger this update happening once, but it does not, it still re-renders on each state update to the app from the server.
What’s the code for sendAdjustments()?
You should update locally first, otherwise DnD pulls it back to its original position while you wait for backend to finish. This makes it appear glitchy. E.g:
Set the newly reordered list locally as your state
Send network request
If it fails, reverse local list state back to the original list
I'm setting up a Google Cloud Functions (GCF) function that gets triggered often enough that there are multiple instances running at the same time.
I am getting errors from a readStream the source file of the stream does not exist, but at this point in my program I've actually just created it.
I've made sure the file exists before the start of the stream by console.log()-ing the file JSON, so the file does actually exist. I've also made sure that the file I'm trying to access has finished being written by a previous stream with an await, but no dice.
EDIT: The code now contains the entire script. The section that seems to be throwing the error is the function columnDelete().
var parse = require('fast-csv');
var Storage = require('#google-cloud/storage');
var Transform = require('readable-stream').Transform;
var storage = new Storage();
var bucket = storage.bucket('<BUCKET>');
const DMSs = ['PBS','CDK','One_Eighty','InfoBahn'];
class DeleteColumns extends Transform{
constructor(){
super({objectMode:true})
}
_transform(row, enc, done){
//create an array 2 elements shorter than received
let newRow = new Array(row.length - 2);
//write all data but the first two columns
for(let i = 0; i < newRow.length; i++){
newRow[i] = row[i+2];
}
this.push(newRow.toString() + '\n');
done();
}
}
function rename(file, originalFile, DMS){
return new Promise((resolve, reject) => {
var dealer;
var date;
var header = true;
var parser = parse({delimiter : ",", quote:'\\'});
//for each row of data
var stream = originalFile.createReadStream();
stream.pipe(parser)
.on('data', (row)=>{
//if this is the first line do nothing
if(header){
header = false;
}
//otherwise record the contents of the first two columns and then destroy the stream
else {
dealer = row[0].toString().replace('"', '').replace('"', '');
date = row[1].toString().replace('"', '').replace('"', '');
stream.end();
}
})
.on('finish', function(){
var newName = dealer + ' ' + date + '_' + DMS + 'temp.csv';
//if this was not triggered by the renaming of a file
if(!file.name.includes(dealer)&&!file.name.includes(':')){
console.log('Renamed ' + file.name);
originalFile.copy(newName);
originalFile.copy(newName.replace('temp',''));
}else{
newName = 'Not Renamed';
console.log('Oops, triggered by the rename');
}
resolve(newName);
});
});
}
function columnDelete(fileName){
return new Promise((resolve, reject) =>{
console.log('Deleting Columns...');
console.log(bucket.file(fileName));
var parser = parse({delimiter : ",", quote:'\\'});
var del = new DeleteColumns();
var temp = bucket.file(fileName);
var final = bucket.file(fileName.replace('temp', ''));
//for each row of data
temp.createReadStream()
//parse the csv
.pipe(parser)
//delete first two columns
.pipe(del)
//write to new file
.pipe(final.createWriteStream()
.on('finish', function(){
console.log('Columns Deleted');
temp.delete();
resolve();
})
);
});
}
exports.triggerRename = async(data, context) => {
var DMS = 'Triple';
var file = data;
//if not a temporary file
if(!file.name.includes('temp')){
//create a new File object from the name of the data passed
const originalFile = bucket.file(file.name);
//identify which database this data is from
DMSs.forEach(function(database){
if(file.name.includes(database)){
DMS = database;
}
});
//rename the file
var tempName = await rename(file, originalFile, DMS);
//if it was renamed, delete the extra columns
if (!tempName.includes('Not Renamed')){
await columnDelete(tempName);
}
} else if(file.name.includes('undefined')){
console.log(file.name + ' is invalid. Deleted.');
bucket.file(file.name).delete();
}
else {
console.log( file.name + ' is a temporary file. Did not rename.');
}
};
What I expect to be output is as below:
Deleting Columns...
Columns Deleted
Nice and simple, letting us know when it has started and finished.
However, I get this instead:
Deleting Columns...
ApiError: No such object: <file> at at Object.parseHttpRespMessage(......)
finished with status: 'crash'
Which is not wanted for obvious reasons. My next thought is to make sure that the file hasn't been deleted by another instance of the script midway through, but to do that I would have to check to see if the file is being used by another stream, which is, to my knowledge, not possible.
Any ideas out there?
When I was creating the file I called the asynchronous function copy() and moved on, meaning that when trying to access the file it was not finished copying. Unknown to me, the File Object is a reference variable, and did not actually contain the file itself. While the file was copying, the pointer was present but it was pointing to an unfinished file.
Thus, "No Such Object". To fix this, I simply used a callback to make sure that the copying was finished before I was accessing the file.
Thanks to Doug Stevenson for letting me know about the pointer!
I have a log file with about 14.000 aircraft position datapoints captured from a system called Flarm, it looks like this:
{"addr":"A","time":1531919658.578100,"dist":902.98,"alt":385,"vs":-8}
{"addr":"A","time":1531919658.987861,"dist":914.47,"alt":384,"vs":-7}
{"addr":"A","time":1531919660.217471,"dist":925.26,"alt":383,"vs":-7}
{"addr":"A","time":1531919660.623466,"dist":925.26,"alt":383,"vs":-7}
What I need to do is find a way to 'play' this file back in real-time (as if it were occuring right now, even though it's pre-recorded), and emit an event whenever a log entry 'occurs'. The file is not being added to, it's pre-recorded and the playing back would occur at a later stage.
The reason for doing this is that I don't have access to the receiving equipment when I'm developing.
The only way I can think to do it is to set a timeout for every log entry, but that doesn't seem like the right way to do it. Also, this process would have to scale to longer recordings (this one was only an hour long).
Are there other ways of doing this?
If you want to "play them back" with the actual time difference, a setTimeout is pretty much what you have to do.
const processEntry = (entry, index) => {
index++;
const nextEntry = getEntry(index);
if (nextEntry == null) return;
const timeDiff = nextEntry.time - entry.time;
emitEntryEvent(entry);
setTimeout(processEntry, timeDiff, nextEntry, index);
};
processEntry(getEntry(0), 0);
This emits the current entry and then sets a timeout based on the difference until the next entry.
getEntry could either fetch lines from a prefilled array or fetch lines individually based on the index. In the latter case only two lines of data would only be in memory at the same time.
Got it working in the end! setTimeout turned out to be the answer, and combined with the input of Lucas S. this is what I ended up with:
const EventEmitter = require('events');
const fs = require('fs');
const readable = fs.createReadStream("./data/2018-07-18_1509log.json", {
encoding: 'utf8',
fd: null
});
function read_next_line() {
var chunk;
var line = '';
// While this is a thing we can do, assign chunk
while ((chunk = readable.read(1)) !== null) {
// If chunk is a newline character, return the line
if (chunk === '\n'){
return JSON.parse(line);
} else {
line += chunk;
}
}
return false;
}
var lines = [];
var nextline;
const processEntry = () => {
// If lines is empty, read a line
if (lines.length === 0) lines.push(read_next_line());
// Quit here if we've reached the last line
if ((nextline = read_next_line()) == false) return true;
// Else push the just read line into our array
lines.push(nextline);
// Get the time difference in milliseconds
var delay = Number(lines[1].time - lines[0].time) * 1000;
// Remove the first line
lines.shift();
module.exports.emit('data', lines[0]);
// Repeat after the calculated delay
setTimeout(processEntry, delay);
}
var ready_to_start = false;
// When the stream becomes readable, allow starting
readable.on('readable', function() {
ready_to_start = true;
});
module.exports = new EventEmitter;
module.exports.start = function() {
if (ready_to_start) processEntry();
if (!ready_to_start) return false;
}
Assuming you want to visualize the flight logs, you can use fs watch as below, to watch the log file for changes:
fs.watch('somefile', function (event, filename) {
console.log('event is: ' + event);
if (filename) {
console.log('filename provided: ' + filename);
} else {
console.log('filename not provided');
}
});
Code excerpt is from here. For more information on fs.watch() check out here
Then, for seamless update on frontend, you can setup a Websocket to your server where you watch the log file and send newly added row via that socket to frontend.
After you get the data in frontend you can visualize it there. While I haven't done any flight visualization project before, I've used D3js to visualize other stuff (sound, numerical data, metric analysis and etc.) couple of times and it did the job every time.