Issue while merging multiple JSON Objects - node.js

I'm trying to auto-generate a JSON object for creating a dynamic interactive list message that will be sent to users on WhatsApp.
I need a JSON object in following format:
"sections": [
{
"title": "Main Title",
"rows": [
{
"title": "row 1 title",
},
{
"title": "row 2 title",
}
]
}
]
The above code will generate a list like this
But I don't want to hard code the row's title part {"title": "row 1 title",},{"title": "row 2 title", } .
I tried using the below method where I fetch the title value from an array and merge it using spread syntax but it only returns undefined and doesn't combine all the objects.
method.js
async function genJSON() {
var arr = ['row 1', 'row 2', 'row3']
let data1, data2, d = {}
let i = 0
while (i < arr.length) {
data1 = JSON.parse(`{"title": "${arr[i]}"}`)
data2 = JSON.parse(`{"title": "${arr[i + 1]}"}`)
i++
d = { ...data1, ...data2 }
}
console.log(d)
catch (e) {
console.log(e)
}
}
genJSON()
OUTPUT:
Interactive List message: https://developers.facebook.com/docs/whatsapp/guides/interactive-messages/
How can I achieve the following output {"title": "row 1",},{"title": "row 2", ..} ? Any help or advice is appreciated.

There were two issues with your code:
You were trying to merge two dictionaries with the same keys, hence the previous key gets overwritten. Instead, you should use a JSON Array and append to it.
data2 will be undefined when i = arr.length - 1
I've fixed both the errors in the below code snippet
function genJSON() {
var arr = ['row 1', 'row 2', 'row3']
try {
let d = []
for (const row of arr) {
d.push({
title: row
})
}
console.log(d)
} catch (e) {
console.log(e)
}
}
genJSON()

Related

Iterate object tree with arrays and objects to clean out null/undefined or empty string using node.js

So, I've found a ton of good suggestions on how to iterate over an object "tree" and filter out null, undefined, etc. which all works fine for objects and attributes.
I am trying to clean a object tree that has a ton of arrays:
"cac:Delivery": [
{
"cac:DeliveryLocation": [
{
"cbc:Name": [
{
"_": "Company Name"
}
],
"cac:Address": [
{
"cbc:StreetName": [
{
"_": "The Street 123"
}
],
"cbc:AdditionalStreetName": [
{
"_": null
}
],
"cbc:CityName": [
{
"_": "The City"
}
],
"cbc:PostalZone": [
{
"_": ""
}
],
"cac:Country": [
{
"cbc:IdentificationCode": [
{
"_": ""
}
]
}
]
}
]
}
]
}
]
The above sample is a snippet of the full message and why it looks like this is because the original is a UBL XML message that we run xml2js on to transform it from XML to JSON.
We then need to run some other mappings and extraction of values on the tree. The next step of the flow won't accept any null, undefined or empty string as values.
The problem I have a that I can't figure out any nifty way of traversing the tree with the arrays and clean out the tree (=remove the "empty" attributes).
In the sample, cleaning out cbc:IdentificationCode will then of course make cac:Country to be empty in turn...
There are hundreds of "groups" that I need to "clean" so I need to come up with something dynamic and it is also important that the order of attributes are kept...
The above should result in:
"cac:Delivery": [
{
"cac:DeliveryLocation": [
{
"cbc:Name": [
{
"_": "Company Name"
}
],
"cac:Address": [
{
"cbc:StreetName": [
{
"_": "The Street 123"
}
],
"cbc:CityName": [
{
"_": "The City"
}
]
}
]
}
]
}
]
EDIT:
The data is based upon UBL JSON representation: https://docs.oasis-open.org/ubl/UBL-2.1-JSON/v2.0/cnd01/UBL-2.1-JSON-v2.0-cnd01.html
Various samples can be found here: https://docs.oasis-open.org/ubl/UBL-2.1-JSON/v2.0/cnd01/json/
(I am currently working on the "Invoice" message)
EDIT2:
Figured to share to "iterator" I came up with that is traversing the entire tree and handles each Object/Array/Attribute:
function iterate(obj) {
Object.entries(obj).forEach(([key, value]) => {
console.log('KEY:', key);
if (Array.isArray(obj[key])) {
console.log('ARRAY');
obj[key].forEach((k) => iterate(k));
} else if (typeof obj[key] === 'object') {
console.log('OBJECT');
if (obj[key]) {
iterate(obj[key]);
}
} else {
console.log('Key / Value:', key, value);
if (!value) {
console.log('THIS IS AN EMPTY VALUE!');
}
}
});
}
Well, maybe not the most elegant of solutions but after a few hours of pondering this I solved it using lodash _.set() which can create a object tree from paths... I called my function deepClean() and this now returns an object without any nullish values (but I do want to keep 0's):
function iterate(obj, arrObj, path = '') {
Object.entries(obj).forEach(([key, value]) => {
// Construct the path to the object/array that we are currently looking at
const newPath = `${path}${path ? '.' : ''}['${key}']`;
if (Array.isArray(obj[key])) {
// If it is an array we have to add the [index] to the path as well, then iterate over the array
obj[key].forEach((k, index) => iterate(k, arrObj, `${newPath}[${index}]`));
} else if (typeof obj[key] === 'object') {
// If it's an object, and the object has any child/value iterate again
if (obj[key]) {
iterate(obj[key], arrObj, newPath);
}
} else {
// If this is a "value" push it to the Array
arrObj.push([`${path}.${key}`, value]);
}
});
}
const deepClean = (obj) => {
// We need to clean the whole object tree and remove any array/object/key that are "nullish"
// however, we must keep values as zero (number) as e.g. VAT can be zero
const arrObj = [];
const newObj = {};
// Call itarate() to loop through the entire tree recursivly and build an array as [path-to-object, value]
iterate(obj, arrObj);
arrObj.forEach((o) => {
// Start creating the new cleaned object from the values/paths using lodash _.set()
if (typeof o[1] === 'number' || o[1]) {
_.set(newObj, o[0], o[1]);
}
});
return newObj;
};
ElasticObject is a wrapper for objects that adds array methods to objects. I would use that and than its filter method.

NodeJS Replace all values of a defined key in a json file

im trying to go from this:
{
"Level":"A",
"Group LVL2":{
"Level":"B",
"Group LVL3":{
"Level":"C"
}}}
To This
{
"Level":"C",
"Group LVL2":{
"Level":"C",
"Group LVL3":{
"Level":"C"
}}}
So i basically want to replace all values of a json key to be the same.
This a part of the code im using:
const fs = require('fs');
const fileName = './' + (Profile) + ".json";
const file = require(fileName);
const key = (Root);
file[Root] = (Value);
fs.writeFile(fileName, JSON.stringify(file, null, 2), function writeJSON(error) {
if (error) return console.log(error);
But it only replaces the Level Value of the first json group/line:
{
"Level":"THIS WILL BE REPLACED",
"Group LVL2":{
"Level":"THIS WILL NOT BE REPLACED",
"Group LVL3":{
"Level":"THIS WILL NOT BE REPLACED"
}
}
}
Hope i can find a solution to this, i count on you!
(There doesn't seem to be any beginner friendly solution online)
Below is the general solution for this kind of problem, maybe you can make it more efficient depending on your problem.
It loops through all keys of the object that are nested objects themselves, recurses for each object in the json and updates the "Level" key.
function replaceValues(obj) {
if(obj.hasOwnProperty(key))
obj[key] = value;
for (let property of Object.keys(obj)) {
if (typeof(obj[property]) === "object") {
replaceValues(obj[property]);
}
}
}
Tested on the data you provided with Value = "D"
{
"Level": "D",
"Group LVL2": {
"Level": "D",
"Group LVL3": {
"Level": "D"
}
}
}

Parsing JSON files in nodejs

I want to parse this JSON data to a file like this :
JSON
{
"nodes": [
{"id": 1, "name" : "a", "radius" :0.5, "angle" : 2.64159265359},
{"id": 2, "name" : "b", "radius" : 0.6, "angle" : 3.64159265359}
],
"links": [
{"source": "a", "target": "b", "delay": "10ms"}
]
}
File :
[nodes]
a: _ radius=0.5 angle=2.64159265359
b: _ radius=0.6 angle=3.64159265359
[links]
a:b delay=10ms
So far my code is just reading the JSON file
const fs = require('fs');
data = JSON.parse(fs.readFileSync("topo.json","utf-8"));
for (node in data)
{
for (link in data[node])
{
console.log(data[node][link]);
}
}
How can I get those values saved and create a new file having those values in them ?
You did not specify what file type you want to use, so I went with a simple .txt file.
The only small Issue with the code posted, is that these are arrays, so you have to loop a bit differently over them. Apart from that simply write the data into placeholders in your format string and append it to a global object that will be written to the file. Then you should be good to like that:
const fs = require('fs');
const data = JSON.parse(fs.readFileSync('topo.json','utf-8'));
let textFileContent = '';
textFileContent += '[nodes]\n';
data.nodes.forEach(node => {
textFileContent += `${node.name}: _ radius=${node.radius} angle=${node.angle}\n`;
});
textFileContent += '[links]\n';
data.links.forEach(link => {
textFileContent += `${link.source}:${link.target} delay=${link.delay}\n`;
});
fs.writeFile('parsed.txt', textFileContent, function(err) {
if(err) {
return console.log(err);
}
});
Note that this could probably be done more elegant and it won`t scale very well, if your JSON is more complex, than whats shown in the sample...

Updating mongoose nested array of mixed types

I have a mongoose schema of mixed types like so:
const user = mongoose.Schema({
...
"links" : []
...
After populating this schema, I ended up with data like so:
[
[
{
"step1": "post-url-google", // This field is unique for each entry
"step2": {
"title": "Heading 1",
"likes": 4
}
},
],
[
{
"step1": "post-url-microsoft",
"step2": {
"title": "Heading 1",
"likes": 1
}
},
{
"step1": "post-url-apple",
"step2": {
"title": "Heading 2",
"likes": 6 // I want to update this to 7
}
}
]
]
What I want to achieve is to update the "step1": "post-url-apple" field from 6 to have a likes of 7
So I tried using the User.save() function like so:
let user = await User.findOne({"_id" : "some_id"})
user.links[1].some(object => {
if (object.step1 === "post-url-apple") {
object.step2.likes = 7
(async function (){
user.save() // I also did error handling
})()
return
}
})
This method works fine and the user gets updated but it keeps throwing ParallelSaveError possibly because I am calling the save() function in parallel on the same user instance in some other parts of my code.
So I decided to use the User.findOneAndUpdate() method, but my queries keep failing when using the mongodb dot notation $[<identifier>], obviously because I don't know how to use it properly.
Like so:
let update = {
"$set" : {
"links.$[index1].$[index2].step2.likes" : 7,
}
}
let conditions = {
arrayFilters : [
{"index1" : 1},
{"index2.step1" : "post-url-apple"}
]
}
try {
let result = await Users.findOneAndUpdate({"_id" : "some_id"}, update, conditions)
console.log(result)
} catch (err) {
console.log(err)
}
For all good reasons, I'm not hiting the catch block but the update was equally not successful
How do I achieve updating the "step1": "post-url-apple" likes field to 7 using findOneAndUpdate?
Thank you.
In arrayFilters you should define the conditions to be applied to all the array elements, not the index
If you are sure, you always update the second array element (index = 1) of the outer array, then you can use the dot notation for the outer array, and for the inner array you can use the array filters to get the element that has step1 = 'post-url-apple'
your code may look something like that
let update = {
"$set" : {
'links.1.$[item].step2.likes': 7 // here we used links.1 to access the second element of the outer array
}
}
let conditions = {
arrayFilters : [
{ 'item.step1' : 'post-url-apple' } // item here is the element in the inner array that has step1 = post-url-apple
]
}
then do your update query
hope it helps

How to pull a range of objects from an array of objects and re-insert them at a new position in the array?

Desired Behaviour
Pull a range of objects from an array of objects and push them back to the array at a new index.
For example, pull objects from the array where their index is between 0 and 2, and push them back to the array at position 6.
For reference, in jQuery, the desired behaviour can be achieved with:
if (before_or_after === "before") {
$("li").eq(new_position).before($("li").slice(range_start, range_end + 1));
} else if (before_or_after === "after") {
$("li").eq(new_position).after($("li").slice(range_start, range_end + 1));
}
jsFiddle demonstration
Schema
{
"_id": ObjectId("*********"),
"title": "title text",
"description": "description text",
"statements": [
{
"text": "string",
"id": "********"
},
{
"text": "string",
"id": "********"
},
{
"text": "string",
"id": "********"
},
{
"text": "string",
"id": "********"
},
{
"text": "string",
"id": "********"
}]
}
What I've Tried
I am able to reposition a single object in an array of objects with the code below.
It uses pull to remove the object from the array and push to add it back to the array at a new position.
In order to do the same for a range of objects, I think I just need to modify the $pull and $push variables but:
I can't figure out how to use $slice in this context, either as a projection or an aggregation, in a $pull operation
Because I can't figure out the first bit, I don't know how to attempt the second bit - the $push operation
// define the topic_id to search for
var topic_id = request_body.topic_id;
// make it usable as a search query
var o_id = new ObjectID(topic_id);
// define the statement_id to search for
var statement_id = request_body.statement_id;
// define new position
var new_position = Number(request_body.new_position);
// define old position
var old_position = Number(request_body.old_position);
// define before or after (this will be relevant later)
// var before_or_after = request_body.before_or_after;
// define the filter
var filter = { _id: o_id };
// define the pull update - to remove the object from the array of objects
var pull_update = {
$pull: {
statements: { id: statement_id } // <----- how do i pull a range of objects here
}
};
// define the projection so that only the 'statements' array is returned
var options = { projection: { statements: 1 } };
try {
// perform the pull update
var topic = await collection.findOneAndUpdate(filter, pull_update, options);
// get the returned statement object so that it can be inserted at the desired index
var returned_statement = topic.value.statements[old_position];
// define the push update - to add the object back to the array at the desired position
var push_update = {
$push: {
statements: {
$each: [returned_statement],
$position: new_position
}
} // <----- how do i push the range of objects back into the array here
};
// perform the push update
var topic = await collection.findOneAndUpdate(filter, push_update);
}
Environments
##### local
$ mongod --version
db version v4.0.3
$ npm view mongodb version
3.5.9
$ node -v
v10.16.3
$ systeminfo
OS Name: Microsoft Windows 10 Home
OS Version: 10.0.18363 N/A Build 18363
##### production
$ mongod --version
db version v3.6.3
$ npm view mongodb version
3.5.9
$ node -v
v8.11.4
RedHat OpenShift Online, Linux
Edit
Gradually, figuring out parts of the problem, I think:
Using the example here, the following returns objects from array with index 0 - 2 (ie 3 objects):
db.topics.aggregate([
{ "$match": { "_id": ObjectId("********") } },
{ "$project": { "statements": { "$slice": ["$statements", 0, 3] }, _id: 0 } }
])
Not sure how to use that in a pull yet...
I also looked into using $in (even though i would prefer to just grab a range of objects than have to specify each object's id), but realised it does not preserve the order of the array values provided in the results returned:
Does MongoDB's $in clause guarantee order
Here is one solution to re-ordering results from $in in Node:
https://stackoverflow.com/a/34751295
Here an example with mongo 3.5
const mongo = require('mongodb')
;(async function (params) {
const client = await mongo.connect('mongodb://localhost:27017')
const coll = client.db('test').collection('test')
const from0to99 = Array(100).fill('0').map((_, i) => String(i))
const from5To28 = Array(24).fill('0').map((_, i) => String(i + 5))
const insert = { statements: from0to99.map(_ => ({ id: _ })) }
await coll.insertOne(insert)
const all100ElementsRead = await coll.findOneAndUpdate(
{ _id: insert._id },
{
$pull: {
statements: {
id: { $in: from5To28 }
}
}
},
{ returnOriginal: true }
)
/**
* It shows the object with the desired _id BEFORE doing the $pull
* You can process all the old elements as you wish
*/
console.log(all100ElementsRead.value.statements)
// I use the object read from the database to push back
// since I know the $in condition, I must filter the array returned
const pushBack = all100ElementsRead.value.statements.filter(_ => from5To28.includes(_.id))
// push back the 5-28 range at position 72
const pushed = await coll.findOneAndUpdate(
{ _id: insert._id },
{
$push: {
statements: {
$each: pushBack,
$position: 72 // 0-indexed
}
}
},
{ returnOriginal: false }
)
console.log(pushed.value.statements) // show all the 100 elements
client.close()
})()
This old issue helped
if you want "desired behavior" when mutating arrays ,
you add these to checklist:
array.length atleast==7 if you want to add ,splice at 6
creates a new array if u use concat
mutates orignal if used array.push or splice or a[a.length]='apple'
USE slice() to select between incex1 to index2.
or run a native for loop to select few elements of array or
apply a array.filter() finction.
once you select your elements which needed to be manupulated you mentioned you want to add it to end. so this is the method below.
about adding elements at end:
CONCAT EXAMPLE
const original = ['🦊']; //const does not mean its immutable just that it cant be reassigned
let newArray;
newArray = original.concat('🦄');
newArray = [...original, '🦄'];
// Result
newArray; // ['🦊', '🦄']
original; // ['🦊']
SPLICE EXAMPLE:
const zoo = ['🦊', '🐮'];
zoo.splice(
zoo.length, // We want add at the END of our array
0, // We do NOT want to remove any item
'🐧', '🐦', '🐤', // These are the items we want to add
);
console.log(zoo); // ['🦊', '🐮', '🐧', '🐦', '🐤']

Resources