Variable inside readline.createInterface({}).on({}) is not updating - node.js
Trying to construct a request body with values from a .csv file, but my points array is not getting updated. I've done console.log() for the points array inside and at the end of the on() function, and the values are there as expected. But I console.log outside of the on() and it says the array is empty.
I know this is a scope issue, but not familiar with Node.js so much, so can't figure it out. Thanks.
// Detect anomalies in your spreadsheet
var points = []
var dict = {}
// Read the .csv file, convert date to Date object and number to float.
readline.createInterface({
input: fs.createReadStream(CSV_FILE),
terminal: false
}).on('line', function(line) {
var row = line.split(",")
dict[new Date(row[0])] = parseFloat(row[1])
points.push(dict)
});
// Create request body for API call
let body = { series: points, granularity: 'hourly' }
// Make the call
anomalyDetectorClient.entireDetect(body)
.then((response) => {
for (item in response.isAnomaly) {
if (item) {
console.log("An anomaly was detected from the series.")
console.log("Value: " + response.expectedValues[response.indexOf(item)])
}
}
}).catch((error) => {
console.log(error)
})
UPDATE:
I'm trying to move the async function into the on(), so the points[] has values in it...but there is a bad body in the request, my points array is taking an accumulative dictionary instead of a new dictionary each time it pushes. So that needs to be fixed first, before I can look again at the un-updated points[] outside of the last on().
Current code, I moved the API call into the last on() function (previously it was outside of it, with an await before it):
async function main() {
let CSV_FILE = './request-data.csv'
// Detect anomalies in your spreadsheet
var points = []
// Read the .csv file
await readline.createInterface({
input: fs.createReadStream(CSV_FILE),
terminal: false
}).on('line', function(line) {
var row = line.split(",")
// Convert date to Date object and number to float.
var date = new Date(row[0])
var dict = {}
dict[date] = parseFloat(row[1])
points.push(dict)
console.log(points)
}).on('close', function() {
// Create request body for API call
let body = { series: points, granularity: 'hourly' }
console.log("Body series: " + body.series.timestamp + "Body granularity: " + body.granularity)
// Make the call
anomalyDetectorClient.entireDetect(body)
.then((response) => {
for (item in response.isAnomaly) {
if (item) {
console.log("An anomaly was detected from the series.")
console.log("Value: " + response.expectedValues[response.indexOf(item)])
}
}
}).catch((error) => {
console.log(error)
})
})
UPDATE 2:
Using new code in the answer, there is a new error:
Body series: undefinedBody granularity: hourly
{ Error: Time points should be uniformly spaced in time in hourly granularity with 1 gran as interval, ratio of missing points should be less than 10%, between 2018-03-01 00:00:00 and 2018-04-16 00:00:00 there should be at least 994 points, but got 47 points. What does this means?
Confused because the body object looks good, it was printed with the error:
{ Error: Time points should be uniformly spaced in time in hourly granularity with 1 gran as interval, ratio of missing points should be less than 10%, between 2018-03-01 00:00:00 and 2018-04-16 00:00:00 there should be at least 994 points, but got 47 points.
at new RestError (C:\Users\v-wiazur\Desktop\Anomaly Detector\node_modules\#azure\ms-rest-js\dist\msRest.node.js:1399:28)
at C:\Users\v-wiazur\Desktop\Anomaly Detector\node_modules\#azure\ms-rest-js\dist\msRest.node.js:2494:37
at process._tickCallback (internal/process/next_tick.js:68:7)
code: 'InvalidSeries',
statusCode: 400,
request:
WebResource {
streamResponseBody: false,
url:
'https://winonaanomalydetector.cognitiveservices.azure.com/anomalydetector/v1.0/timeseries/entire/detect',
method: 'POST',
headers: HttpHeaders { _headersMap: [Object] },
body:
'{"series":[{"timestamp":"2018-03-01T00:00:00.000Z","value":32858923},{"timestamp":"2018-03-02T00:00:00.000Z","value":29615278},{"timestamp":"2018-03-03T00:00:00.000Z","value":22839355},{"timestamp":"2018-03-04T00:00:00.000Z","value":25948736},{"timestamp":"2018-03-05T00:00:00.000Z","value":34139159},{"timestamp":"2018-03-06T00:00:00.000Z","value":33843985},{"timestamp":"2018-03-07T00:00:00.000Z","value":33637661},{"timestamp":"2018-03-08T00:00:00.000Z","value":32627350},{"timestamp":"2018-03-09T00:00:00.000Z","value":29881076},{"timestamp":"2018-03-10T00:00:00.000Z","value":22681575},{"timestamp":"2018-03-11T00:00:00.000Z","value":24629393},{"timestamp":"2018-03-12T00:00:00.000Z","value":34010679},{"timestamp":"2018-03-13T00:00:00.000Z","value":33893888},{"timestamp":"2018-03-14T00:00:00.000Z","value":33760076},{"timestamp":"2018-03-15T00:00:00.000Z","value":33093515},{"timestamp":"2018-03-16T00:00:00.000Z","value":29945555},{"timestamp":"2018-03-17T00:00:00.000Z","value":22676212},{"timestamp":"2018-03-18T00:00:00.000Z","value":25262514},{"timestamp":"2018-03-19T00:00:00.000Z","value":33631649},{"timestamp":"2018-03-20T00:00:00.000Z","value":34468310},{"timestamp":"2018-03-21T00:00:00.000Z","value":34212281},{"timestamp":"2018-03-22T00:00:00.000Z","value":38144434},{"timestamp":"2018-03-23T00:00:00.000Z","value":34662949},{"timestamp":"2018-03-24T00:00:00.000Z","value":24623684},{"timestamp":"2018-03-25T00:00:00.000Z","value":26530491},{"timestamp":"2018-03-26T00:00:00.000Z","value":35445003},{"timestamp":"2018-03-27T00:00:00.000Z","value":34250789},{"timestamp":"2018-03-28T00:00:00.000Z","value":33423012},{"timestamp":"2018-03-29T00:00:00.000Z","value":30744783},{"timestamp":"2018-03-30T00:00:00.000Z","value":25825128},{"timestamp":"2018-03-31T00:00:00.000Z","value":21244209},{"timestamp":"2018-04-01T00:00:00.000Z","value":22576956},{"timestamp":"2018-04-02T00:00:00.000Z","value":31957221},{"timestamp":"2018-04-03T00:00:00.000Z","value":33841228},{"timestamp":"2018-04-04T00:00:00.000Z","value":33554483},{"timestamp":"2018-04-05T00:00:00.000Z","value":32383350},{"timestamp":"2018-04-06T00:00:00.000Z","value":29494850},{"timestamp":"2018-04-07T00:00:00.000Z","value":22815534},{"timestamp":"2018-04-08T00:00:00.000Z","value":25557267},{"timestamp":"2018-04-09T00:00:00.000Z","value":34858252},{"timestamp":"2018-04-10T00:00:00.000Z","value":34750597},{"timestamp":"2018-04-11T00:00:00.000Z","value":34717956},{"timestamp":"2018-04-12T00:00:00.000Z","value":34132534},{"timestamp":"2018-04-13T00:00:00.000Z","value":30762236},{"timestamp":"2018-04-14T00:00:00.000Z","value":22504059},{"timestamp":"2018-04-15T00:00:00.000Z","value":26149060},{"timestamp":"2018-04-16T00:00:00.000Z","value":35250105}],"granularity":"hourly"}',
query: undefined,
formData: undefined,
withCredentials: false,
abortSignal: undefined,
timeout: 0,
onUploadProgress: undefined,
onDownloadProgress: undefined,
proxySettings: undefined,
operationSpec:
{ httpMethod: 'POST',
path: 'timeseries/entire/detect',
urlParameters: [Array],
requestBody: [Object],
responses: [Object],
serializer: [Serializer] } },
response:
{ body:
'{"code":"InvalidSeries","message":"Time points should be uniformly spaced in time in hourly granularity with 1 gran as interval, ratio of missing points should be less than 10%, between 2018-03-01 00:00:00 and 2018-04-16 00:00:00 there should be at least 994 points, but got 47 points."}\n',
headers: HttpHeaders { _headersMap: [Object] },
status: 400 },
body:
{ code: 'InvalidSeries',
message:
'Time points should be uniformly spaced in time in hourly granularity with 1 gran as interval, ratio of missing points should be less than 10%, between 2018-03-01 00:00:00 and 2018-04-16 00:00:00 there should be at least 994 points, but got 47 points.' } }
Your array is getting updated. If you do console.log(points) inside your line event handler, you will see it accumulate date each time.
The issue is that you are looking at the array in the wrong place, before it has been populated. The readline interface is asynchronous. So, you call readline.createInterface() and set your event handler for the line event and then your code just keeps on running. You are immediately calling anomalyDetectorClient.entireDetect() before the readline operation has done it's job.
The solution is to wait until the readline interface is done reading the whole file and until your points array is fully populated before you try to use the points array.
Here's one way to do that, where you process the points array in the close event handler for the readline interface:
// Detect anomalies in your spreadsheet
var points = [];
var dict = {};
// Read the .csv file, convert date to Date object and number to float.
readline.createInterface({
input: fs.createReadStream(CSV_FILE),
terminal: false
}).on('line', function(line) {
var row = line.split(",");
let point = {
timestamp: new Date(row[0]),
value: parseFloat(row[1])
};
dict[point.date] = point.value;
points.push(point);
console.log(points);
}).on('close', function() {
// now we're done reading the file
console.log(points); // all the points are here
// now process the points
// Create request body for API call
let body = { series: points, granularity: 'hourly' };
// Make the call
anomalyDetectorClient.entireDetect(body).then((response) => {
for (let item in response.isAnomaly) {
if (item) {
console.log("An anomaly was detected from the series.")
console.log("Value: " + response.expectedValues[response.indexOf(item)])
}
}
}).catch((error) => {
console.log(error)
});
}).on('error', function(err) {
// handle errors here
console.log(err);
});
FYI, are you really sure you should be using item in response.isAnomaly? in is NOT for arrays, it's for properties of objects. It sorta, kinda works for arrays, but it can also not work lots of times. for/of is built specifically for arrays as in for (let item of response.isAnomaly) {...}. Also, for an array, in gets you the index of the value, not the array value. of gets you the array value which is usually what you want.
Related
Why steam web api returns undefined json value node js
Hello these code does request to Steam Web API const request = require("request"); request({ url: "https://api.steampowered.com/IEconService/GetTradeOffers/v1/?key=MYAPIKEY&get_sent_offers=1&active_only=1&format=json", json: true }, (err, responser, body) => { console.log(body.response['trade_offers_sent']); }); And this is what it returns: { trade_offers_sent: [ { tradeofferid: '3974708687', accountid_other: 82613664, message: 'test message', expiration_time: 1587017229, trade_offer_state: 9, items_to_give: [Array], is_our_offer: true, time_created: 1585807629, time_updated: 1585807629, from_real_time_trade: false, escrow_end_date: 0, confirmation_method: 2 } ] } But when i'm trying to get value of accountid_other this way: console.log(body.response['trade_offers_sent'].accountid_other); it returns undefined
The square brackets [ ... ] within trade_offers_sent indicate that it holds an array and not just a single item (it just happens to be a single item in the array). Thus to obtain you value you are interested in you need to specify the index of the item you want or use a loop to go through it. In this case you want the first item and you can use ['trade_offers_sent'][0] to obtain it.
How to query batch by batch from ElasticSearch in nodejs
I'm trying to get data from ElasticSearch with my node application. In my index, there are 1 million records, thus I cannot be sent to another services with the whole records. That's why I want to get 10,000 records per request, as per example: const getCodesFromElasticSearch = async (batch) => { let startingCount = 0; if (batch > 1) { startingCount = (batch * 1000); } else if (batch === 1) { startingCount = 0; } return await esClient.search({ index: `myIndex`, type: 'codes', _source: ['column1', 'column2', 'column3'], body: { from: startingCount, size: 1000, query: { bool: { must: [ .... ], filter: { .... } } }, sort: { sequence: { order: "asc" } } } }).then(data => data.hits.hits.map(esObject => esObject._source)); } It's still working when batch=1. But when goes to batch=2, that got problem that from should not be larger than 10,000 as per its documentation. And I don't want to change max_records as well. Please let me know any alternate way to get 10,000 by 10,000.
The scroll API can be used to retrieve large numbers of results (or even all results) from a single search request, in much the same way as you would use the cursor on a traditional database. So you can use scroll API to get your whole 1M dataset below-something like below without using from because elasticsearch normal search has a limit of 10k record in max request so when you try to use from with greater value then it'll return error, that's why scrolling is good solutions for this kind of scenarios. let allRecords = []; // first we do a search, and specify a scroll timeout var { _scroll_id, hits } = await esclient.search({ index: 'myIndex', type: 'codes', scroll: '30s', body: { query: { "match_all": {} }, _source: ["column1", "column2", "column3"] } }) while(hits && hits.hits.length) { // Append all new hits allRecords.push(...hits.hits) console.log(`${allRecords.length} of ${hits.total}`) var { _scroll_id, hits } = await esclient.scroll({ scrollId: _scroll_id, scroll: '30s' }) } console.log(`Complete: ${allRecords.length} records retrieved`) You can also add your query and sort with this existing code snippets. As per comment: Step 1. Do normal esclient.search and get the hits and _scroll_id. Here you need to send the hits data to your other service and keep the _scroll_id for a future batch of data calling. Step 2 Use the _scroll_id from the first batch and use a while loop until you get all your 1M record with esclient.scroll. Here you need to keep in mind that you don't need to wait for all of your 1M data, within the while loop when you get response back just send it to your service batch by batch. See Scroll API: https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/scroll_examples.html **See Search After **: https://www.elastic.co/guide/en/elasticsearch/reference/5.2/search-request-search-after.html
How to get valid address through #google/maps?
In Node.js microservice I'm using: "#google/maps": "^0.5.5" googleMapsClient.geocode({address: '160 Market St, Paterson, NJ 07505'}) .asPromise() .then((response) => { console.log("result: " + JSON.stringify(response.json)); }) .catch((err) => { console.log("error: " + err); }); in response I get: "location_type":"ROOFTOP" and "types":["street_address"] that means the address is valid If I try to validate same address but with invalid state, e.g. 'NO' it still returns "location_type":"ROOFTOP" and "types":["street_address"]. Assume because google API formats it that can be seen in the response: "formatted_address":"160 Market St, Paterson, NJ 07505, USA" Sometimes google API returns "location_type":"ROOFTOP" and "types":["premise"] Sure I can filter the results by location_type and types but I really want to consider address as valid if it can be found in #types/googlemaps AutoComplete. This is what I'm using in UI (Angular): "#types/googlemaps": "3.30.16" const autocomplete = new google.maps.places.Autocomplete(e.target, { types: ['address'] }); var place = google.maps.places.PlaceResult = autocomplete.getPlace(); Even it's just defined as types: ['address'] in AutoComplete it can be found as "types":["street_address"] or "types":["premise"] in "#google/maps".. So how make Node.js returns only addresses that can be found in AutoComplete?
Since Places API is also supported by #google/maps library, it could be accomplished like this: //1. query predictions googleMapsClient.placesQueryAutoComplete( { input: "160 Market St, Paterson, NJ 07505" }, function(err, response) { if (!err) { if (response.json.predictions.length === 0) { console.log("Place not found"); } else { var prediction = response.json.predictions[0]; //select first prediction //2. query place by prediction googleMapsClient.place( { placeid: prediction.place_id }, function(err, response) { if (!err) { console.log(response.json.result); //prinat place } } ); } } } ); Explanation: placesQueryAutoComplete function is utilized first which returns array of query predictions based on query place function returns the place details by providing placeId parameter extracted from previous response
How to track iteration progress using Rally's Web Services API
I am writing a custom app to track iteration progress by day. Is there a builtin way in Rally to get the number of user stories that are in the "Accepted" state for a specific date, and the number of points (or do I have to get all user stories and parse their revision histories)?
There is IterationCumulativeFlowData object in WS API, which is populated at midnight of the Workspace Timezone when the Data Collection runs on workdays specified in the Workspace Setup screen. Data is stored for each day of the Iteration and a corresponding state. There is CumulativeFlowData object for Day 1 of the Iteration for everything in a Defined state, Day 1 of Release for everything in an In-Progress state, etc. The CumulativeFlowData object also stores CardEstimateTotal which is the sum of the estimates of cards in every state. Here is a example of an app written with rally-node that returns iteration data for specific state (Accepted) as of the last day of the iteration. In this examle the CreationDate of the last result is '2013-08-27T06:00:00.000Z, while the EndDate of the iteration in question was 2013-08-27 11:59:59 PM America/Denver (which is 2013-08-28T05:59:59.000Z), so I had to manipulate a date in order to make this query condition return the data for the last day of the iteration: query = query.and('CreationDate', '>', endDateMinusOneDay); Here is the full js file of the example: var rally = require('rally'), queryUtils = rally.util.query, restApi = rally({ user: 'user#co.com', pass: 'secret', apiVersion: 'v2.0', server: 'https://rally1.rallydev.com', requestOptions: { headers: { 'X-RallyIntegrationName': 'My cool node.js program', 'X-RallyIntegrationVendor': 'My company', 'X-RallyIntegrationVersion': '1.0' }, } }); function findIteration() { return restApi.query({ type: 'Iteration', start: 1, pageSize: 2, limit: 10, fetch: ['ObjectID', 'EndDate'], scope: { project: '/project/12352608219', up: false, down: false }, query: queryUtils.where('Name', '=', 'i777') }); } function queryIterationData(result) { var endDate = result.Results[0].EndDate, oid = result.Results[0].ObjectID; console.log('endDate',endDate); var date1 = new Date(endDate); var ms = date1.getTime() - 86400000; //86400000 is the number of milliseconds in a day var date2 = new Date(ms); var endDateMinusOneDay = date2.toISOString(); console.log('date2 ISO', date2.toISOString()); var query = queryUtils.where('IterationObjectID', '=',oid ); query = query.and('CardState', '=', 'Accepted'); query = query.and('CreationDate', '>', endDateMinusOneDay); return restApi.query({ type: 'IterationCumulativeFlowData', fetch: ['CardCount', 'CardEstimateTotal', 'CardState', 'CardState', 'CreationDate'], query: query, }); } function onSuccess(result) { console.log('Success!', result); } function onError(errors) { console.log('Failure!', errors); } findIteration() .then(queryIterationData) .then(onSuccess) .fail(onError); It returns:
JqueryUI Autocomplete : only one character is displayed per list item
I'm using jquery-1.4.2.min and jquery-ui-1.8.6.custom to get the autocomplete data on a jsp page, here is the code snippet: $(document).ready(function() { $("input#airportfrom").autocomplete({minLength: 3, source: function(request, response) { $.ajax({ url: "MLocationLookupSearchPopUpAuto.action?LANGUAGE=${param.LANGUAGE}&SITE=${param.SITE}&RESULT_FILTER=3", dataType:"text/html", data: { MATCH : $("#airportfrom").val() }, success: function(data) { response(data); } }); } }); }); The result returned is correct as I have used an alert(data); inside the success function and it gave correct result, but in the list, it is showing one character or one alphabet per line, hence if I want to get LONDON, it is displayed as: l o n d o n Any idea why this happening ? Whether we have to give data as json only because here I'm getting the data from a jsp.
Try to split the response data into lines using "\n" $("#tags").autocomplete({ source: function(request,response) { $.ajax({ dataType: "text", url: 'yourscript.php?extraParam=foo', data: { term: request.term }, success: function(data) { var lines = data.split("\n"); response(lines); } })} });
I had the same problem and it was down to serializing the object twice (by mistake) on the server side. The JSON data returned to the client was being de-serialized to a string rather than an array.