writefilesync not writing all variables to a json file - node.js

I have this code:
circular = () => { //fix circular stuff for json.stringify
seen = new WeakSet();
return (key, value) => {
if (typeof value === 'object' && value !== null) {
if (seen.has(value)) {
return;
}
seen.add(value);
}
return value;
};
};
var gameon = 1;
var fighter1 = {"userid":"97","username":"john","items":{},"ailments":{}};
var fighter2 = {"userid":"91","username":"james","items":{},"ailments":{}};
var resume = 30;
all = {gameon:gameon,fighter1:fighter1,fighter2:fighter2,resume:resume,inturn:fighter1,outturn:fighter2};
fs.writeFileSync(file,JSON.stringify(all,circular()),{encoding:'utf8',flag:'w'});
I expect to have the next output written to file:
{
"gameon":1,
"fighter1":{
"userid":"97",
"username":"john",
"items": {},
"ailments":{}
},
"fighter2":{
"userid":"91",
"username":"james",
"items":{},
"ailments":{}
},
"resume":"",
"inturn":{
"userid":"97",
"username":"john",
"items":{},
"ailments":{}
},
"outturn":{
"userid":"91",
"username":"james",
"items":{},
"ailments":{}
}
but this is what I get instead:
{
"gameon":1,
"fighter1":{
"userid":"97",
"username":"john",
"items":{},
"ailments":{}
},
"fighter2":{
"userid":"91",
"username":"james",
"items":{},
"ailments":{}
},
"resume":""
}
Please notice how the string truncates after "resume" like it couldn't read the variables fighter1 and fighter2 despite it could do it for the first iteration.
Why is that?
Thank you.

Related

nodejs - convert one obj to some objects

I have one obj that I received from the client,
const record = {
"date":"2021-09-20",
"startAt":"10:00",
"endAt":"16:00",
"employeeId": [5,2],<==
"projectId":[50,48],<==
"notes":"blahhh"
}
I want to convert it to some objects, same details , but I want for example
employeeId 5 worked in two projects, 50 and 48
I want to have two objects with same details but the employeeId is 50 and the projectId : first object with 50 and the sec obj with 48
{
....
"employeeId": [5],<== //first emplyee
"projectId":[50],<== // first project
....
}
{
....
"employeeId": [5],//first emplyee
"projectId":[48], // sec project
...
}
{
....
"employeeId": [2],//sec employee
"projectId":[50], //first project
....
}
{
....
"employeeId": [2],//sec employee
"projectId":[48], //sec project
....
}
thank you for helping me
You can do for example:
const record = {
date: "2021-09-20",
startAt: "10:00",
endAt: "16:00",
employeeId: [5, 2],
projectId: [50, 48],
notes: "blahhh",
};
const records = record.employeeId
.map((employeeId) =>
record.projectId.map((projectId) => ({
...record,
employeeId: [employeeId],
projectId: [projectId]
}))
)
.flat();
console.log(records);
1- Extract all of keys which have array values
let arrayKeys = Object.entries(record).filter(([key, value]) => typeof value === 'object' && Array.isArray(value))
2- Create your default object with default keys:
let defaultObj = Object.entries(record).reduce((obj, [key, value]) => {
if(!typeof value === 'object' || !Array.isArray(value)) {
obj[key] = value
}
return obj;
}, {})
3- Create a function which fill an array with your final objects recursively:
function addKeys(array, obj, keys) {
if(!keys.length) {
array.push(obj);
return;
}
let [key, values] = keys.pop();
values.forEach(val => {
obj[key] = [val];
addKeys(array, {...obj}, [...keys])
});
}
Full code:
const record = {
"date":"2021-09-20",
"startAt":"10:00",
"endAt":"16:00",
"employeeId": [5,2],
"projectId":[50,48, 60, 70],
"notes":"blahhh",
"extraArrayKey": ['a', 'b']
}
let arrayKeys = Object.entries(record).filter(([key, value]) => typeof value === 'object' && Array.isArray(value))
let defaultObj = Object.entries(record).reduce((obj, [key, value]) => {
if(!typeof value === 'object' || !Array.isArray(value)) {
obj[key] = value
}
return obj;
}, {})
function addKeys(array, obj, keys) {
if(!keys.length) {
array.push(obj);
return;
}
let [key, values] = keys.pop();
values.forEach(val => {
obj[key] = [val];
addKeys(array, {...obj}, [...keys])
});
}
let output = [];
addKeys(output, defaultObj, arrayKeys)
console.log(output)

Unable to write item(s) to DynamoDB table utilizing DocumentClient - Nodejs

I'm absolutely brand new to DynamoDb and I'm trying to simply write an object from a NodeJS Lambda. Based on what I've read and researched I should probably be using DocumentClient from the aws-sdk. I also found the following question here regarding issues with DocumentClient, but it doesn't seem to address my specific issue....which I can't really find/pinpoint unfortunately. I've set up a debugger to help with SAM local development, but it appears to be only providing some of the errors.
The code's implementation is shown here.
var params = {
TableName: "March-Madness-Teams",
Item: {
"Id": {"S": randstring.generate(9)},
"School":{"S": team_name},
"Seed": {"S": seed},
"ESPN_Id": {"S": espn_id}
}
}
console.log(JSON.stringify(params))
dynamodb.put(params, (error,data) => {
if (error) {
console.log("Error ", error)
} else {
console.log("Success! ", data)
}
})
Basically I'm scrubbing a website utilizing cheerio library and cherry picking values from the DOM and saving them into the json object shown below.
{
"TableName": "March-Madness-Teams",
"Item": {
"Id": {
"S": "ED311Oi3N"
},
"School": {
"S": "BAYLOR"
},
"Seed": {
"S": "1"
},
"ESPN_Id": {
"S": "239"
}
}
}
When I attempt to push this json object to Dynamo, I get errors says
Error MultipleValidationErrors: There were 2 validation errors:
* MissingRequiredParameter: Missing required key 'TableName' in params
* MissingRequiredParameter: Missing required key 'Item' in params
The above error is all good in well....I assume it didn't like the fact that I had wrapped those to keys in strings, so I removed the quotes and sent the following
{
TableName: "March-Madness-Teams",
Item: {
"Id": {
"S": "ED311Oi3N"
},
"School": {
"S": "BAYLOR"
},
"Seed": {
"S": "1"
},
"ESPN_Id": {
"S": "239"
}
}
}
However, when I do that...I kind of get nothing.
Here is a larger code snippet.
return new Promise((resolve,reject) => {
axios.get('http://www.espn.com/mens-college-basketball/bracketology')
.then(html => {
const dynamodb = new aws.DynamoDB.DocumentClient()
let $ = cheerio.load(html.data)
$('.region').each(async function(index, element){
var preregion = $(element).children('h3,b').text()
var region = preregion.substr(0, preregion.indexOf('(') - 1)
$(element).find('a').each(async function(index2, element2){
var seed = $(element2).siblings('span.rank').text()
if (seed.length > 2){
seed = $(element2).siblings('span.rank').text().substring(0, 2)
}
var espn_id = $(element2).attr('href').split('/').slice(-2)[0]
var team_name = $(element2).text()
var params = {
TableName: "March-Madness-Teams",
Item: {
"Id": randstring.generate(9),
"School":team_name,
"Seed": seed,
"ESPN_Id": espn_id
}
}
console.log(JSON.stringify(params))
// dynamodb.put(params)
// .then(function(data) {
// console.log(`Success`, data)
// })
})
})
})
})
Can you try without the type?
Instead of
"School":{"S": team_name},
for example, use
"School": team_name,
From your code, I can see the mis promise on the dynamodb request. Try to change your lines :
dynamodb.put(params).then(function(data) {
console.log(`Success`, data)
})
to be :
dynamodb.put(params).promise().then(function(data) {
console.log(`Success`, data)
})
you can combine with await too :
await dynamodb.put(params).promise().then(function(data) {
console.log(`Success`, data)
})
exports.lambdaHandler = async (event, context) => {
const html = await axios.get('http://www.espn.com/mens-college-basketball/bracketology')
let $ = cheerio.load(html.data)
const schools = buildCompleteSchoolObject(html, $)
try {
await writeSchoolsToDynamo(schools)
return { statusCode: 200 }
} catch (error) {
return { statusCode: 400, message: error.message }
}
}
const writeSchoolsToDynamo = async (schools) => {
const promises = schools.map(async school => {
await dynamodb.put(school).promise()
})
await Promise.all(promises)
}
const buildCompleteSchoolObject = (html, $) => {
const schools = []
$('.region').each(loopThroughSubRegions(schools, $))
return schools
}
const loopThroughSubRegions = (schools, $) => {
return (index, element) => {
var preregion = $(element).children('h3,b').text()
var region = preregion.substr(0, preregion.indexOf('(') - 1)
$(element).find('a').each(populateSchoolObjects(schools, $))
}
}
const populateSchoolObjects = (schools, $) => {
return (index, element) => {
var seed = $(element).siblings('span.rank').text()
if (seed.length > 2) {
seed = $(element).siblings('span.rank').text().substring(0, 2)
}
var espn_id = $(element).attr('href').split('/').slice(-2)[0]
var team_name = $(element).text()
schools.push({
TableName: "March-Madness-Teams",
Item: {
"Id": randstring.generate(9),
"School": team_name,
"Seed": seed,
"ESPN_Id": espn_id
}
})
}
}
I know this is drastically different from what I started with but I did some more digging and kind of kind of worked to this...I'm not sure if this is the best way, but I seemed to get it to work...Let me know if something should change!
Oh I understand what you want.
Maybe you can see the code above works, but there is one concept you have to improve here about async - await and promise especially on lambda function.
I have some notes here from your code above, maybe can be your consideration to improve your lambda :
Using await for every promise in lambda is not the best approach because we know the lambda time limitation. But sometimes we can do that for other case.
Maybe you can change the dynamodb.put method to be dynamodb.batchWriteItem :
The BatchWriteItem operation puts or deletes multiple items in one or more tables.
Or If you have to use dynamodb.put instead, try to get improve the code to be like so :
const writeSchoolsToDynamo = async (schools) => {
const promises = schools.map(school => {
dynamodb.put(school).promise()
})
return Promise.all(promises)
}

Array full of regex, match to string, return path

Still very much a beginner, Just editing to make a little more sense
Here is an example payload.
{
"Status":
{
"result":[
{
"LastUpdate":"2017-09-07 06:47:09",
"Type":"2",
"Value":"' s the inside temperature",
"idx":"4"
}
],
"status":"OK",
"title":"GetUserVariable"
},
"Devices":
{
"28":
{
"rid":"28",
"regex":"(the )?(AC|(Air( )?(Con)?(ditioner)?))( Power)?( )?$/i"
},
"71":
{
"rid":"71",
"regex":"(the )?inside temp/i"
}
}
}
I want to filter the "Devices" array down to entires that match Status.result[0].Value.
I have the follow code working, but it is in working in reverse, it returns the matching string, not the filtered array, just not sure how to reverse it.
var devices = msg.payload.Devices;
var request = [ msg.payload.Status.result[0].Value ];
var matches = request.filter(function (text) {
return devices.some(function (regex) {
var realregex = new RegExp(regex, "i");
return realregex.test(text);
});
});
msg = { topic:"Inputs", payload: { devices:devices, request:request } };
msg2 = { topic:"Output", payload: { matches:matches } };
return [ [ msg, msg2 ] ];
Thanks,
Wob

Firebase Flashlight (ElasticSearch) filtering, sorting, pagination

I am using Flashlight Firebase plugin
I am using this example and it's working fine
In the example you can see example.js file have method for query as below
// display search results
function doSearch(index, type, query) {
var ref = database.ref().child(PATH);
var key = ref.child('request').push( { index: index, type: type, query: query } ).key;
ref.child('response/'+key).on('value', showResults);
}
above function returning me the results when I pass values like following JSON
{ index: index, type: type, query: query }
It returning me nothing when i am trying to pass values like following JSON
{ index: index, type: type, query: { "from" : 1, "size" : 5 , "query": query }
but the following ElasticSearch API returning me the result
http://localhost:9200/firebase/user/_search?q=*mani*&pretty&size=5&from=1
and How do i filtering the query using Flashlight like following
{
"query": {
"filtered": {
"query": {
"query_string": {
"query": "drama"
}
},
"filter": {
//Filter to apply to the query
}
}
}
}
I am using following security rules
{
"rules": {
".read": false,
".write": false,
"search": {
"request": {
"$recid": {
// I can only read records assigned to me
".read": "auth.id === data.child('id').val() || auth.uid === data.child('id').val()",
// I can only write new records that don't exist yet
".write": "!data.exists() && (newData.child('id').val() === auth.id || newData.child('id').val() === auth.uid)",
".validate": "newData.hasChildren(['query', 'index', 'type'])",
"index": {
// accepts arrays or strings
".validate": "(newData.isString() && newData.val().length < 1000) || newData.hasChildren()",
"$child": {
".validate": "newData.isString() && newData.val().length < 1000"
}
},
"type": {
// accepts arrays or strings
".validate": "(newData.isString() && newData.val().length < 1000) || newData.hasChildren()",
"$child": {
".validate": "newData.isString() && newData.val().length < 1000"
}
},
"query": {
// structure of the query object is pretty open-ended
".validate": "newData.isString() || newData.hasChildren()"
},
"$other": {
".validate": false
}
}
},
"response": {
"$recid": {
// I can only read/write records assigned to me
".read": "auth.id === data.child('id').val() || auth.uid === data.child('id').val()",
".write": "auth.id === data.child('id').val() || auth.uid === data.child('id').val()",
// Assumes that Flashlight will be writing the records using a secret or a token that has admin: true
// The only thing a logged in user needs to do is delete results after reading them
".validate": false
}
}
}
}
}
Please let me know how to perform complex queries and filtering with Flashlight
Finally I did it myself
here is the solution
You need to update SearchQueue.js
_process: function (snap) {
var dat = snap.val();
var key = snap.key;
if (this._assertValidSearch(key, dat)) {
// get your query string
var q = dat.query.query;
console.log('search', "test", JSON.stringify(dat, null, 2));
// build your ES query
//var q1 = {"query":{"match":{"_all":q}}};
// Perform (a very simple) ElasticSearch query
this.esc.search({
index: dat.index,
type: dat.type,
// add options
from : dat.query.from,
size : dat.query.size,
// add ES Query
//body : q1
q:dat.query.query
}, function (error, response) {
if (error) {
this._reply(key, {error: error, total: 0});
} else {
this._reply(key, response);
}
}.bind(this));
}
}
and update Example.js
// display search results
function doSearch(index, type, query) {
var ref = database.ref().child(PATH);
var jsonOBJ = {
index: index,
type: type,
query: { size:1, from:0, query:query},
};
var key = ref.child('request').push(jsonOBJ).key;
console.log('search', key, JSON.stringify(jsonOBJ, null, 2));
ref.child('response/'+key).on('value', showResults);
}

requireJs - Jquery UI autocomplete does not work

Im getting the following error when I try to use jqueru autocomplete together with require js:
'cannot call methods on autocomplete prior to initialization; attempted to call method '/recruiter/temp-search/api/locations/get''
I have a module which initilizes my autocomplete:
define(['jqueryUi'],function($) {
function Location() {
this.autocompleteUrl = "/recruiter/temp-search/api/locations/get";
};
Location.prototype.initAutocomplete = function($txtTown, onSuccessDelegate, countryId, regionId, ignoredInputHandler, includeCountries) {
///<param name="$txtTown" type="jQuery">input text element to decorate with autocomplete</param>
///<param name="onSuccessDelegate" type="function">Invoked with upon item selection with selected value passed as a parameter</param>
///<param name="regionId" type="int">Region constraint. Defaults to null</param>
///<param name="countryId" type="int">Country Id. Defaults to UK id</param>
///<param name="ignoredInputHandler" type="function">
/// function(string term, function success(string term, {data, result, value}[] data), function failure(string term)) delegate
/// that is invoked on autocomplete requests before user input at leaset 2 chars
///</param>
var cId = countryId ? countryId : null;
var rId = regionId ? regionId : null;
var inclCountries = includeCountries === undefined ? false : includeCountries;
var onSuccess = onSuccessDelegate ? onSuccessDelegate : function() {};
$txtTown.autocomplete(this.autocompleteUrl, {
dataType: 'json',
parse: function(data) {
/* validation Location*/
/*To remove error field check on the parentsearch.js self.elements.searchLocation.on('keyup'*/
if ($txtTown.selector === "#Location") {
if (data.Locations.length == 0 && !data.IsPostcode && $txtTown.val().length > 0) {
var locationError = "We couldn't find the location you entered";
jQuery("[data-valmsg-for = 'Location']").text(locationError);
$('#Location').closest('.search-row').find('.search-error').show();
} else {
jQuery("[data-valmsg-for = 'Location']").text("");
}
}
/**/
var rows = [];
if ($.isArray(data.Locations)) {
var locations = data.Locations;
if (locations !== null) {
for (var i = 0; i < locations.length; i++) {
rows[i] = { data: locations[i], value: locations[i], result: locations[i] };
}
}
} else {
if (data.IsPostcode) {
onSuccess(data.Location, data.Postcode);
}
}
return rows;
},
extraParams: { countryId: cId, regionId: rId, includeCountries: inclCountries },
formatItem: function(row) { return row; },
width: 'auto',
minChars: 2,
delay: 100,
max: 10,
ignoredInputHandler: ignoredInputHandler,
selectFirst: false,
cacheLength: 1,
scroll: false
}).result(function(event, row) {
onSuccess(row);
});
};
return new Location();
});
It is being called like this:
location.initAutocomplete(this.elements.searchLocation, onSuccessAutocomplete, undefined, undefined, undefined, true);
here is my config file:
require.config({
paths: {
"JqueryAutocomplete": "../scripts/jquery/plugins/jquery.autocomplete",
"jqueryValidate": "../scripts/jquery/plugins/jquery.validate",
"jqueryUi": "../scripts/jquery/plugins/jquery-ui-1.10.3.custom",
"jquery": "../scripts/jquery/jquery-1.9.1",
"knockout": "../scripts/knockout/knockout-2.3.0",
"ko": "common/knockout-extensions"
},
shim: {
"JqueryAutocomplete": {
exports: "$",
deps: ['jquery']
},
"jqueryValidate": {
exports: "$",
deps: ['jquery']
},
"jqueryUi": {
exports: "$",
deps: ['jquery']
},
"knockout-extensions": {
exports: "knockout",
deps: ['knockout']
}
}
});

Resources