NodeJS debugging coderbyte Task - node.js

node.js debugging json data in the javascript file, you have a program that performs a get request on the route https://coderbyte.com/api/challenges/json/age-counting which contains a data key and the value is a string which contains items in the format: key=string, age=integer. your goal is to print out the key id's between indices 10 and 15 in string format (e.g. q3kg6,mgqpf,tg2vm,...). the program provided already parses the data and loops through each item, but you need to figure out how to fix it so that it correctly populates keyarray with only key id's.
here is code given by coderbyte.
const https = require('https');
https.get('https://coderbyte.com/api/challenges/json/age-counting', (resp) => {
let content = '';
// push data character by character to "content" variable
resp.on('data', (c) => content += c);
// when finished reading all data, parse it for what we need now
resp.on('end', () => {
let jsonContent = JSON.parse(content);
let jsonContentArray = jsonContent.data.split(',');
let keyArray = [];
for (let i = 0; i < jsonContentArray.length; i++) {
let keySplit = jsonContentArray[i].trim().split('=');
keyArray.push(keySplit[1]);
}
console.log(keyArray.toString());
});
});

const https = require('https');
https.get('https://coderbyte.com/api/challenges/json/age-counting', (resp) => {
let content = '';
// push data character by character to "content" variable
resp.on('data', (c) => content += c);
// when finished reading all data, parse it for what we need now
resp.on('end', () => {
let jsonContent = JSON.parse(content);
let jsonContentArray = jsonContent.data.split(',');
let keyArray = [];
console.log(jsonContentArray.length);
// Traversing over the json content array and checking only for keys while ignoring the age.
for (let i = 0; i < jsonContentArray.length; i+=2) {
// extracting only the keys by triming the white space and splitting the data by "=" and taking the first character from the array.
let key = jsonContentArray[i].trim().split('=')[1];
// Now pushing only the 10*2 for getting the keys and ignoring the age. If you look into the json you get a better understanding.
// Here is an eg of json considered.
// data":"key=IAfpK, age=58, key=WNVdi, age=64, key=jp9zt, age=47, key=0Sr4C, age=68, key=CGEqo, age=76, key=IxKVQ, age=79, key=eD221, age=29, key=XZbHV, age=32, key=k1SN5, age=88, .....etc
if (i>=20 && i< 30) {
keyArray.push(key);
}
}
console.log(keyArray);
});
});

Related

cant get variable value in node js

I tried to make the function async but when I print the attacks it prints out {} without anything in it but when I print the values right after adding them in attacks I can print them why is it like that? how can I use the value?
var fs = require('fs');
var http = require('http');
var attacks = {};
var phase_name;
var directory = 'cti-master\\enterprise-attack\\attack-pattern\\';
// getting all files names.
async function getData(directory){
fs.readdir(directory, (err, files) => {
if(err) { return;}
var fileNum = 0;
// opening all the files and sorting the data in them.
while (fileNum < files.length - 1)
{
fs.readFile(directory + files[fileNum], 'utf8', (err, data) =>
{
// parsing the data from json.
var fileData = JSON.parse(data);
// sometimes there is no phase name.
if(fileData['objects'][0]['kill_chain_phases'] == undefined){phase_name = undefined;}
else{phase_name = fileData['objects'][0]['kill_chain_phases'][0]['phase_name'];}
// sorting data by name to make it easier later.
attacks[fileData['objects'][0]['name']] = {
id: fileData['objects'][0]['id'],
type: fileData['objects'][0]['type'],
description: fileData['objects'][0]['description'],
x_mitre_platforms: fileData['objects'][0]['x_mitre_platforms'],
x_mitre_detection: fileData['objects'][0]['x_mitre_detection'],
phase_name: phase_name};
});
fileNum += 1;
};
});
var keys = Object.keys(attacks);
console.log(attacks);
}
getData(directory);
The reason for the empty log here because the node does not wait to finish while loop Hence you are getting empty log. Basically, you can improve this code by using the async-await method.
But if you are stick with this code, I am just suggesting this logic.
Just bring your log inside an if condition block. which have condition "print only if expected file count reached"
for example.
if(fileNum === files.length) {
var keys = Object.keys(attacks);
console.log(attacks);
}
So now log print only when this condition is satisfied which means after completion of while loop

SSE returned data is x50 large than the file size

We have a hapijs server running which returns an SSE stream containing rows returned from a readable SQL Server (NOT MySQL) stored procedure.
We observed 50,000 rows returned in chunks of 1000 events. Both Chrome and Firefox indicated that the size of the data returned in the request was just under 250mbs. We copied the data returned (JSON) and placed it into a .txt file and noticed that the data was only 20mb.
The content encoding is set to identity. We implemented SUSIE and are using that to return the event source.
let result = await server.methods.services.worklist.getData(request.query);
let rowsToProcess = [];
let chunkSizeSmall = 50; // #todo move this to the config
let chunkSizeLarge = 1000; // #todo move this to the config
let chunkSize = chunkSizeSmall;
result.on('recordset', columns => {
h.event({ event: 'columns', data: columns })
});
result.on('row', row => {
rowsToProcess.push(row);
if (rowsToProcess.length >= chunkSize) {
chunkSize = chunkSizeLarge;
result.pause();
processRows();
}
});
result.on('done', () => {
processRows();
h.event(null)
});
function processRows() {
// process rows
h.event({ event: 'data', data: rowsToProcess })
rowsToProcess = [];
result.resume();
}
return h.event({});
We are expecting the output to be the same or somewhat near the same size as the filesize of the JSON.

Node - Readable stream pipe() overwrite previous streams in a for loop

I am trying to stream a data collection to multiple files with the code below:
for (var key in data) {
// skip if collection length is 0
if (data[key].length > 0) {
// Use the key and jobId to open file for appending
let filePath = folderPath + '/' + key + '_' + jobId + '.txt';
// Using stream to append the data output to file, which should perform better when file gets big
let rs = new Readable();
let n = data[key].length;
let i = 0;
rs._read = function () {
rs.push(data[key][i++]);
if (i === n) {
rs.push(null);
}
};
rs.pipe(fs.createWriteStream(filePath, {flags: 'a', encoding: 'utf-8'}));
}
}
However, I end up getting all files being populated with the same data, which is the array for the last key in data object. It seems the reader stream is overridden for each loop, and the pipe() to writable stream doesn't start until the for loop is finished. How is that possible?
So the reason why you code is probably not working is because rs._read method is called asynchronically, and your key variable is function scoped(because of var keyword).
Every rs stream that you create points to the same variable which is key, at the end of main loop, each of those callbacks will have the same value.
When you change "var" to "let", then in each iteration new key variable will be created and it will solve your problem(_read function will have its own copy of key variable instead of shared one).
If you change it to let it should work.
This is happening because the key you're defining in the loop statement is not block-scoped. This is not a problem at first, but when you create a closure on it inside the rs._read function, all subsequent stream reads are using the last known value, which is the last value of data array.
And while we at it, I can propose a bit of a refactoring to make the code cleaner and more reusable:
const writeStream = (folderPath, index, jobId) => {
const filePath = `${folderPath}/${index}_${jobId}.txt`;
return fs.createWriteStream(filePath, {
flags: 'a', encoding: 'utf-8'
});
}
data.forEach((value, index) => {
const length = value.length;
if (length > 0) {
const rs = new Readable();
const n = length;
let i = 0;
rs._read = () => {
rs.push(value[i++]);
if (i === n) rs.push(null);
}
rs.pipe(writeStream(folderPath, index, jobId));
}
});

Why is my for-loop only counting to one object in the JSON-response?

I'm trying to make a map with map-annotations which are being generated from a REST-JSON response. I've succeeded with making one, the JSON response contains two objects. Why are only one printed out?
I'm using RestSharp and Xamarin.iOS.
Here's a Gist-clone of the original respones
The function that grabs the data to later-on make map-annotations on our map:
Action getAllMarkers = () => {
var client = new RestClient("http://www.example.com/");
var request = new RestRequest(String.Format("api/?function=searchByName&key=&name=Sundsvall"));
client.ExecuteAsync (request, response => {
JsonValue data = JsonValue.Parse(response.Content);
for (var i = 0; i < data.Count; i++){
Double lat = data["result"][i]["lat"];
Double lng = data["result"][i]["lng"];
String name = data["result"][i]["title"];
String adress = data["result"][i]["adress"];
var store = new BasicMapAnnotation (new CLLocationCoordinate2D(lat, lng), name, adress);
Console.WriteLine(response.Content);
InvokeOnMainThread ( () => {
// manipulate UI controls
map.AddAnnotation(store);
});
}
});
};
getAllMarkers();
data.Count is 1, because there is one top level "result" node in your json. Use data["result"].Count instead.
your result is main array in which rest of the data is so use:data["result"].Count instead of data.Count

Splitting url parameter node.js?

I am having the following url parameter
sample value actual value contains so many parameters
var data = "browserName=MSIE&cookies=Rgn=%7CCode%3DMUMBAI%7Ctext%3DMumbai%7C; NREUM=s=1376355292394&r=220970&p=2080092;cs_uuid=209712058942163; cs_si=209712058942163.1&javascriptEnabled=true";
Following function is used to get the particular parameter
//Generic Function to get particular parameter
getParameterValue : function(data, parameter) {
var value = null;
if (data.length > 0) {
var paramArray = data.split("&");
for ( var i = 0; len = paramArray.length, i < len; i++) {
var param = paramArray[i];
var splitter = parameter + "=";
if (param.indexOf(splitter) > -1) {
value = param.substring(param.indexOf(splitter)
+ splitter.length, param.length);
break;
}
}
}
return value;
}
Example
getParameterValue(data, "browserName");
output is MSIE //correct
Problem is
getParameterValue(data, "cookies");
Output is
Rgn=%7CCode%3DMUMBAI%7Ctext%3DMumbai%7C; NREUM=s=1376355292394
But required output is
Rgn=%7CCode%3DMUMBAI%7Ctext%3DMumbai%7C; NREUM=s=1376355292394&r=220970&p=2080092;cs_uuid=209712058942163; cs_si=209712058942163.1
To Know :
1.URL parameter is encoded(clientside) while sending to node server and decoded.
2.NREUM is not encoded, So getParameterValue method splits upto 1376355292394.
Any help to improve getParameterValue function.
Ready to explain more.
Well, getParameterValue() does seem to be parsing data correctly, but data is not encoded properly to distinguish between delimiters and value characters.
The value of cookies, for example:
cookies=Rgn=%7CCode%3DMUMBAI%7Ctext%3DMumbai%7C; NREUM=s=1376355292394&r=220970&p=2080092;cs_uuid=209712058942163; cs_si=209712058942163.1
Should itself be encoded (on top of any encoding used for the cookie values themselves).
cookies=Rgn%3D%257CCode%253DMUMBAI%257Ctext%253DMumbai%257C%3B%20NREUM%3Ds%3D1376355292394%26r%3D220970%26p%3D2080092%3Bcs_uuid%3D209712058942163%3B%20cs_si%3D209712058942163.1
And, it's rather late to try to "fix" this server-side because of the ambiguity. It'll need to be encoded client-side, which can be done with encodeURIComponent().
'cookies=' + encodeURIComponent(cookies)
If you happen to be using jQuery, you can use jQuery.param() to help ensure proper encoding.
var data = {
browserName: 'MSIE',
cookies: document.cookie,
javascriptEnabled: true
};
data = $.param(data); // browserName=MSIE&cookies=Rgn%3D%257CCode...
Or, you can use a light-weight recreation, at least for the "traditional" format.
function urlParam(params) {
var res = [];
for (var name in params) {
res.push(encodeURIComponent(name) + '=' + encodeURIComponent(params[name]));
}
return res.join('&');
}
var data = urlParam(data);
Example: http://jsfiddle.net/GQpTB/
Also, are you aware that Node.js has querystring.parse()?
// with: var qs = require('querystring');
var query = qs.parse(data);
console.log(query.cookies); // ...

Resources