When I try to convert an XML file (requested from an external server) to JSON, it seems to me that xml2json does convert it however, not to a correct JSON file. Is there something that needs to be adjusted. I seem to be missing quotes for the keys.
This is my current code
app.get('/api/convertabstract/:id', async (req, res, next) => {
var data = '';
var finaldata = '';
function vertaaldata(){
return new Promise (resolve => {
https.get('https://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi?db=pubmed&id=' + ' 11748933,11700088' +'&retmode=xml', function(res) {
if (res.statusCode >= 200 && res.statusCode < 400) {
res.on('data', function(data_) { data += data_.toString(); });
res.on('end', function() {
parser.parseString(data, function(err, result) {
finaldata = util.inspect(result, false, null, true);
}), resolve('klaar');
});
}})})
}
async function calltranslator() {
const result = await vertaaldata();
console.log(finaldata.PubmedArticleSet);
res.send('dit is de data:' + finaldata)
}
calltranslator();
});
JSON output:
{ PubmedArticleSet: {
PubmedArticle: [
{
MedlineCitation: [
{
[32m'$'[39m: { Status: [32m'MEDLINE'[39m, Owner: [32m'NLM'[39m },
PMID: [ { _: [32m'11748933'[39m, [32m'$'[39m: { Version: [32m'1'[39m } } ],
DateCompleted: [ { Year: [ [32m'2002'[39m ], Month: [ [32m'03'[39m ], Day: [ [32m'04'[39m ] } ],
DateRevised: [ { Year: [ [32m'2006'[39m ], Month: [ [32m'11'[39m ], Day: [ [32m'15'[39m ] } ],
Article: [
{
[32m'$'[39m: { PubModel: [32m'Print'[39m },
Journal: [
{
ISSN: [ { _: [32m'0011-2240'[39m, [32m'$'[39m: { IssnType: [32m'Print'[39m } } ],
JournalIssue: [....etc....
As far as I can tell, xml2js and xml2json are completely unrelated libraries. Which are you actually using - you mention both! xml2js doesn't claim to generate JSON, it claims to generate Javascript.
I am new to NodeJS - I am doing this in a AWS lambda function and have the below JSON object
{
"subnetsToUse": [
{
"subnetId": "subnet-0g6e5ad78f2841dc9" },
{
"subnetId": "subnet-089e0d4de075664b3" },
{
"subnetId": "subnet-06778539a55adc513" }
]
}
I need to return a list of subnetIds from this.
subnet-0g6e5ad78f2841dc9,subnet-0g6e5ad78f2841dc9,subnet-0g6e5ad78f2841dc9
Here is what I have tried so far
var objectKeysArray = Object.keys(subnetsToUse)
objectKeysArray.forEach(function(subnetId)
{ var objValue = subnetsToUse[subnetId] })
How do I achieve this in NodeJS.
You can use the Array.map or Array.reduce to iterate over the object values and push them into an array for example.
const data = {
"subnetsToUse": [
{
"subnetId": "subnet-0g6e5ad78f2841dc9",
"availabilityZone": "us-west-2c"
},
{
"subnetId": "subnet-089e0d4de075664b3",
"availabilityZone": "us-west-2b"
},
{
"subnetId": "subnet-06778539a55adc513",
"availabilityZone": "us-west-2a"
}
]
}
const mapRes = data.subnetsToUse.map((currentValue) => {
return currentValue.subnetId;
});
console.log("mapRes", mapRes)
const reduceRes = data.subnetsToUse.reduce((accumulator, currentValue) => {
accumulator.push(currentValue.subnetId);
return accumulator;
}, []);
console.log("reduceRes",reduceRes)
I am developing a web-app in MEAN stack (my back-end code runs in NodeJS server) for registering the assets of my network. I have the data collection inside my database, which comprises of some entries. One of the fields of each entry is ip field. My final goal is to sort by IP.
ip field values are strings. If I use the sort({ ip : 1 }), I will have the following problem: the entry with ip 192.168.1.145 will appear before entry with ip 192.168.1.2, because comparing becomes digit by digit.
So, to overcome this problem, I thought to transform each IP in a form like 192.168.001.145 and 192.168.001.002. I want this transformation to become only during the sorting process and to avoid adding another field or just save the data in that form in the IP field.
So far, I have:
//------------ Get all entries, sorted by IP ------------
router.get('/getEntriesSortedByIP', (req, res, next) => {
db.get().collection('data')
.find()
.sort(function(u) {
if (u.ip != '') {
let ip_part = u.ip.split('.');
while (ip_part[0].length < 3) {
ip_part[0] = '0' + ip_part[0];
}
while (ip_part[1].length < 3) {
ip_part[1] = '0' + ip_part[1];
}
while (ip_part[2].length < 3) {
ip_part[2] = '0' + ip_part[2];
}
while (ip_part[3].length < 3) {
ip_part[3] = '0' + ip_part[3];
}
u.ip = ip_part[0] + '.' + ip_part[1] + '.' + ip_part[2] + '.' + ip_part[3];
}
return u;
})
.toArray((err, data) => {
if (err) { return res.send(err) }
res.json(data);
});
});
The tranformation is successful. The only problem is that the sorting is not happening.
Any ideas? Thanks!
How about MonogBD solution:
Split ip field into 4 temp fields. We convert String into Integer. Now we sort by temporal variables. At the end, with $unset we remove from result temp variables.
db.collection.aggregate([
{
$addFields: {
ip1: {
$toInt: {
$arrayElemAt: [
{
$split: [
"$ip",
"."
]
},
0
]
}
},
ip2: {
$toInt: {
$arrayElemAt: [
{
$split: [
"$ip",
"."
]
},
1
]
}
},
ip3: {
$toInt: {
$arrayElemAt: [
{
$split: [
"$ip",
"."
]
},
2
]
}
},
ip4: {
$toInt: {
$arrayElemAt: [
{
$split: [
"$ip",
"."
]
},
3
]
}
}
}
},
{
$sort: {
ip1: 1,
ip2: 1,
ip3: 1,
ip4: 1
}
},
{
$unset: [
"ip1",
"ip2",
"ip3",
"ip4"
]
}
])
MongoPlayground
Anyone suggests me how can I trigger recursive function after executing the async.each.I have done the logic by following
function recursive(aggs, callback) {
async.each(datas, function(data, asynccallback) {
// after some process
asynccallback(null, true);
}, function(err, resultData) {
//after execute the async.each I need to do the following
if (resultData.flage) {
// flage true call recirsive
recursive(args, callback);
} else {
// flage flase means return the callback to called function
callback(null, data)
}
});
}
When I try the above way I got
error: callback was already called.
Can anyone guide me to achieve this?.Thanks in Advance
Maybe try Lodash-recursive :
https://www.npmjs.com/package/lodash-recursive
They provided you an example :
var assert = require('assert')
var recursive = require('lodash-recursive')
var nodes = [
{
value: 'alpha',
children: [
{
value: 'beta'
}
]
},
{
value: 'gamma'
}
]
var newNodes = recursive.map(nodes, function (node, recursive, map) {
if (node.children) recursive(node.children)
return map(node)
}, function (node) {
if (node.value == 'beta') node.valid = true
return node
})
var expected = [
{
"value": "alpha",
"children": [
{
"value": "beta",
"valid": true
}
]
},
{
"value": "gamma"
}
]
assert.deepEqual(expected, newNodes)
I have array 'pets': [{'fido': ['abc']} that is a embeded document. When I add a pet to the array, how can I check to see if that pet already exists? For instance, if I added fido again... how can I check if only fido exists and not add it? I was hoping I could use $addToSet but I only want to check part of the set(the pets name).
User.prototype.updatePetArray = function(userId, petName) {
userId = { _id: ObjectId(userId) };
return this.collection.findOneAndUpdate(userId,
{ $addToSet: { pets: { [petName]: [] } } },
{ returnOriginal: false,
maxTimeMS: QUERY_TIME });
Result of adding fido twice:
{u'lastErrorObject': {u'updatedExisting': True, u'n': 1}, u'ok': 1, u'value': {u'username': u'bob123', u'_id': u'56d5fc8381c9c28b3056f794', u'location': u'AT', u'pets': [{u'fido': []}]}}
{u'lastErrorObject': {u'updatedExisting': True, u'n': 1}, u'ok': 1, u'value': {u'username': u'bob123', u'_id': u'56d5fc8381c9c28b3056f794', u'location': u'AT', u'pets': [{u'fido': [u'abc']}, {u'fido': []}]}}
If there is always going to be "variable" content within each member of the "pets" array ( i.e petName as the key ) then $addToSet is not for you. At least not not at the array level where you are looking to apply it.
Instead you basically need an $exists test on the "key" of the document being contained in the array, then either $addToSet to the "contained" array of that matched key with the positional $ operator, or where the "key" was not matched then $push directly to the "pets" array, with the new inner content directly as the sole array member.
So if you can live with not returning the modified document, then "Bulk" operations are for you. In modern drivers with bulkWrite():
User.prototype.updatePetArray = function(userId, petName, content) {
var filter1 = { "_id": ObjectId(userId) },
filter2 = { "_id": ObjectId(userId) },
update1 = { "$addToSet": {} },
update2 = { "$push": { "pets": {} } };
filter1["pets." + petName] = { "$exists": true };
filter2["pets." + petName] = { "$exists": false };
var setter1 = {};
setter1["pets.$." + petName] = content;
update1["$addToSet"] = setter1;
var setter2 = {};
setter2[petName] = [content];
update2["$push"]["pets"] = setter2;
// Return the promise that yields the BulkWriteResult of both calls
return this.collection.bulkWrite([
{ "updateOne": {
"filter": filter1,
"update": update1
}},
{ "updateOne": {
"filter": filter2,
"update": update2
}}
]);
};
If you must return the modified document, then you are going to need to resolve each call and return the one that actually matched something:
User.prototype.updatePetArray = function(userId, petName, content) {
var filter1 = { "_id": ObjectId(userId) },
filter2 = { "_id": ObjectId(userId) },
update1 = { "$addToSet": {} },
update2 = { "$push": { "pets": {} } };
filter1["pets." + petName] = { "$exists": true };
filter2["pets." + petName] = { "$exists": false };
var setter1 = {};
setter1["pets.$." + petName] = content;
update1["$addToSet"] = setter1;
var setter2 = {};
setter2[petName] = [content];
update2["$push"]["pets"] = setter2;
// Return the promise that returns the result that matched and modified
return new Promise(function(resolve,reject) {
var operations = [
this.collection.findOneAndUpdate(filter1,update1,{ "returnOriginal": false}),
this.collection.findOneAndUpdate(filter2,update2,{ "returnOriginal": false})
];
// Promise.all runs both, and discard the null document
Promise.all(operations).then(function(result) {
resolve(result.filter(function(el) { return el.value != null } )[0].value);
},reject);
});
};
In either case this requires "two" update attempts where only "one" will actually succeed and modify the document, since only one of the $exists tests is going to be true.
So as an example of that first case, the "query" and "update" are resolving after interpolation as:
{
"_id": ObjectId("56d7b759e955e2812c6c8c1b"),
"pets.fido": { "$exists": true }
},
{ "$addToSet": { "pets.$.fido": "ccc" } }
And the second update as:
{
"_id": ObjectId("56d7b759e955e2812c6c8c1b"),
"pets.fido": { "$exists": false }
},
{ "$push": { "pets": { "fido": ["ccc"] } } }
Given varibles of:
userId = "56d7b759e955e2812c6c8c1b",
petName = "fido",
content = "ccc";
Personally I would not be naming keys like this, but rather change the structure to:
{
"_id": ObjectId("56d7b759e955e2812c6c8c1b"),
"pets": [{ "name": "fido", "data": ["abc"] }]
}
That makes the update statements easier, and without the need for variable interpolation into the key names. For example:
{
"_id": ObjectId(userId),
"pets.name": petName
},
{ "$addToSet": { "pets.$.data": content } }
and:
{
"_id": ObjectId(userId),
"pets.name": { "$ne": petName }
},
{ "$push": { "pets": { "name": petName, "data": [content] } } }
Which feels a whole lot cleaner and can actually use an "index" for matching, which of course $exists simply cannot.
There is of course more overhead if using .findOneAndUpdate(), since this is afterall "two" actual calls to the server for which you need to await a response as opposed to the Bulk method which is just "one".
But if you need the returned document ( option is the default in the driver anyway ) then either do that or similarly await the Promise resolve from the .bulkWrite() and then fetch the document via .findOne() after completion. Albeit that doing it via .findOne() after the modification would not truly be "atomic" and could possibly return the document "after" another similar modification was made, and not only in the state of that particular change.
N.B Also assuming that apart from the keys of the subdocuments in "pets" as a "set" that your other intention for the array contained was adding to that "set" as well via the additional content supplied to the function. If you just wanted to overwrite a value, then just apply $set instead of $addToSet and similarly wrap as an array.
But it sounds reasonable that the former was what you were asking.
BTW. Please clean up by horrible setup code in this example for the query and update objects in your actual code :)
As a self contained listing to demonstrate:
var async = require('async'),
mongodb = require('mongodb'),
MongoClient = mongodb.MongoClient;
MongoClient.connect('mongodb://localhost/test',function(err,db) {
var coll = db.collection('pettest');
var petName = "fido",
content = "bbb";
var filter1 = { "_id": 1 },
filter2 = { "_id": 1 },
update1 = { "$addToSet": {} },
update2 = { "$push": { "pets": {} } };
filter1["pets." + petName] = { "$exists": true };
filter2["pets." + petName] = { "$exists": false };
var setter1 = {};
setter1["pets.$." + petName] = content;
update1["$addToSet"] = setter1;
var setter2 = {};
setter2[petName] = [content];
update2["$push"]["pets"] = setter2;
console.log(JSON.stringify(update1,undefined,2));
console.log(JSON.stringify(update2,undefined,2));
function CleanInsert(callback) {
async.series(
[
// Clean data
function(callback) {
coll.deleteMany({},callback);
},
// Insert sample
function(callback) {
coll.insert({ "_id": 1, "pets": [{ "fido": ["abc"] }] },callback);
}
],
callback
);
}
async.series(
[
CleanInsert,
// Modify Bulk
function(callback) {
coll.bulkWrite([
{ "updateOne": {
"filter": filter1,
"update": update1
}},
{ "updateOne": {
"filter": filter2,
"update": update2
}}
]).then(function(res) {
console.log(JSON.stringify(res,undefined,2));
coll.findOne({ "_id": 1 }).then(function(res) {
console.log(JSON.stringify(res,undefined,2));
callback();
});
},callback);
},
CleanInsert,
// Modify Promise all
function(callback) {
var operations = [
coll.findOneAndUpdate(filter1,update1,{ "returnOriginal": false }),
coll.findOneAndUpdate(filter2,update2,{ "returnOriginal": false })
];
Promise.all(operations).then(function(res) {
//console.log(JSON.stringify(res,undefined,2));
console.log(
JSON.stringify(
res.filter(function(el) { return el.value != null })[0].value
)
);
callback();
},callback);
}
],
function(err) {
if (err) throw err;
db.close();
}
);
});
And the output:
{
"$addToSet": {
"pets.$.fido": "bbb"
}
}
{
"$push": {
"pets": {
"fido": [
"bbb"
]
}
}
}
{
"ok": 1,
"writeErrors": [],
"writeConcernErrors": [],
"insertedIds": [],
"nInserted": 0,
"nUpserted": 0,
"nMatched": 1,
"nModified": 1,
"nRemoved": 0,
"upserted": []
}
{
"_id": 1,
"pets": [
{
"fido": [
"abc",
"bbb"
]
}
]
}
{"_id":1,"pets":[{"fido":["abc","bbb"]}]}
Feel free to change to different values to see how different "sets" are applied.
Please try this one with string template, here is one example running under mongo shell
> var name = 'fido';
> var t = `pets.${name}`; \\ string temple, could parse name variable
> db.pets.find()
{ "_id" : ObjectId("56d7b5019ed174b9eae2b9c5"), "pets" : [ { "fido" : [ "abc" ]} ] }
With the following update command, it will not update it if the same pet name exists.
> db.pets.update({[t]: {$exists: false}}, {$addToSet: {pets: {[name]: []}}})
WriteResult({ "nMatched" : 0, "nUpserted" : 0, "nModified" : 0 })
If the pets document is
> db.pets.find()
{ "_id" : ObjectId("56d7b7149ed174b9eae2b9c6"), "pets" : [ { "fi" : [ "abc" ] } ] }
After update with
> db.pets.update({[t]: {$exists: false}}, {$addToSet: {pets: {[name]: []}}})
WriteResult({ "nMatched" : 1, "nUpserted" : 0, "nModified" : 1 })
The result shows add the pet name if it does Not exist.
> db.pets.find()
{ "_id" : ObjectId("56d7b7149ed174b9eae2b9c6"), "pets" : [ { "fi" : [ "abc" ] }, { "fido" : [ ] } ] }