Node Ecommerce Cart NaN Issue - node.js

Working on calculating a total for my cart which it seems that I'm running into a number issue. I'm not really sure where I am going wrong with this function. Please disregard if I haven't calculated the totals correctly as I just coded it, but with the JSON I am passing numbers and not strings.
JSON
{
"id": "611afa8b9069c9126cff3357",
"discount": {
"title": "None",
"type": "None",
"percent": 0
},
"items": [
{
"sku": 1000,
"qty": 2,
"price": 10.99
},
{
"sku": 1001,
"qty": 2,
"price": 16.99
},
{
"sku": 1003,
"qty": 1,
"price": 15.99
}
]
}
const calculateTotal = (items, discount) => {
let total = 0;
let discountAmt = 0;
console.log(discount);
console.log(items);
console.log(items.length);
for (let i = 0; (j = items.length), i < j; i++) {
discountAmt = 0;
if (discount.type == "Item" && discount.itemNum == j.sku) {
discountAmt = j.price * (discount.percent / 100);
total = total - discountAmt;
} else {
total = total + j.price * j.qty;
}
}
if (discount.type == "Order") {
discountAmt = 0;
discountAmt = total * (discount.percent / 100);
total = total - discountAmt;
}
console.log(total);
return total;
};
Console Returning
{ title: 'None', type: 'None', percent: 0 }
[
{ sku: 1000, qty: 2, price: 10.99 },
{ sku: 1001, qty: 2, price: 16.99 },
{ sku: 1003, qty: 1, price: 15.99 }
]
3
NaN
NaN

If you want to loop through your items, then your loop is not correct.
You can do something like this:
for (let i = 0; i < items.length; i++) { // i as index
let j = items[i]; // j will have the object based on the index
discountAmt = 0;
if (discount.type == "Item" && discount.itemNum == j.sku) {
discountAmt = j.price * (discount.percent / 100);
total = total - discountAmt;
} else {
total = total + j.price * j.qty;
}
}
or much simpler:
for (let j of items) {
discountAmt = 0;
if (discount.type == "Item" && discount.itemNum == j.sku) {
discountAmt = j.price * (discount.percent / 100);
total = total - discountAmt;
} else {
total = total + j.price * j.qty;
}
}

Related

CouchDB display distinct values

I have a document like below,
{
"id": "7d9fdc2f4846544d62da3421bf011b31",
"al": [
{ "id16": "0x1d42",
"pos": {
"x": 10.32,
"y": 11.13,
"z": 1.22
},
"resultTime": "2020-06-01T20:45:34.976Z"
},
{ "id16": "0x1342",
"pos": {
"x": 0.32,
"y": 1.13,
"z": 13.22
},
"resultTime": "2021-06-01T20:45:34.976Z"
}
.
.
.
],
"Timestamp": 272179,
"Oid": "Onion1",
}
and Design document is like below
{
"id": "_design/GetALwithAnchorID",
"key": "_design/GetALwithAnchorID",
"value": {
"rev": "32-6db6c4e105336d47a6c8e7e8458ee345"
},
"doc": {
"_id": "_design/GetALwithAnchorID",
"_rev": "32-6db6c4e105336d47a6c8e7e8458ee345",
"views": {
"GetALwithAnchorID": {
"map": "function (doc) {\n\n for (var i=0; i<doc.al.length; i++) { \n emit(doc.al[i].id16, doc.al[i].pos);\n }\n \n}\n\n",
"reduce": "_approx_count_distinct"
}
},
"language": "javascript"
}
}
when I query the view like
http://127.0.0.1:5984/rtls/_design/GetALwithAnchorID/_view/GetALwithAnchorID?group_level=1&key=%220x1d42%22
I get the results as below
{"rows":[
{"key":"0x1d42","value":1}
]}
But I want distinct values of id16 and pos of id16. and to sort these distinct values by time and display the values of pos instead of "value":1 when Iquery?
thank you in advance.
OK so not quite the same as this similar answer. Anyone coming across this Q/A, I recommend reading over that answer.
Consider the following emit for your given doc structure:
doc.al.forEach(e => emit(
[e.pos.x, e.pos.y, e.pos.z, e.resultTime], // key
[e.id16, e.pos, e.resultTime]) // value
));
The emit's complex key visualized in the index (loosely not verbatim):
[-3,-2,-1,"2017-10-28T22:56:58.852Z"]
[-3,-2,-1,"2019-01-23T03:33:20.958Z"] **
. . .
[0,0,0,"2016-05-27T01:38:36.305Z"]
[0,0,0,"2016-12-27T05:17:02.255Z"] **
. . .
[1,2,3,"2016-11-14T17:31:59.468Z"]
[1,2,3,"2017-07-17T07:52:38.180Z"] **
Where each ** the last item in the pos group and significantly the most recent resultTime. All due to CouchDB's collation.
Working with CouchDB demands understanding the B-tree, and it's documentation has a great rundown of it in its Reduce/Rereduce documentation.
Now consider this reduce function:
function(keys,values,rereduce) {
return values[0];
}
It doesn't look terribly impressive, but further consider calling the view with these parameters:
{
reduce: true,
group_level: 1,
descending: true
}
By reversing the order of the index scan with descending the reduce function is guaranteed to return the most recent row with respect to resultTime of any given pos group.
Here's a simple demo using pouchDB. It generates 6 documents with random resultTime's and randomly selects pos from a pool of 3. Have a look at the design doc.
async function showReduceDocs(view) {
let result = await db.query(view, {
reduce: true,
group_level: 1,
descending: true
});
// show
debugger;
gel('view_reduce').innerText = result.rows.map(row => `${JSON.stringify(row.value)}`.split(',').join(', ')).join('\n');
return result;
}
async function showViewDocs(view) {
let result = await db.query(view, {
reduce: false,
include_docs: false
});
//show
gel('view_docs').innerText = result.rows.map(row => JSON.stringify(row.key))
.join('\n');
}
function getDocsToInstall(count) {
// design document
const ddoc = {
"_id": "_design/SO-66231293",
"views": {
"id16": {
"map": `function (doc) {
doc.al.forEach((e) => emit([e.pos.x, e.pos.y, e.pos.z, e.resultTime],[e.id16, e.pos, e.resultTime]));
}`,
"reduce": `function(keys,values,rereduce) {
return values[0];
}`
}
}
};
// create a set of random documents.
let docs = new Array(count);
let docId = 65;
const posSeed = [{
x: 0,
y: 0,
z: 0
},
{
x: 1,
y: 2,
z: 3
},
{
x: -3,
y: -2,
z: -1
}
];
const dateSeed = [new Date(2000, 0, 1), new Date(), 0, 24];
while (count--) {
let n = 6;
let doc = {
_id: String.fromCharCode(docId++),
al: new Array(n)
};
while (n-- > 0) {
doc.al[n] = {
"id16": "0x000" + n,
"pos": posSeed[Math.floor(Math.random() * 100) % 3],
"resultTime": randomDate(...dateSeed).toISOString()
};
}
docs[count] = doc;
}
docs.push(ddoc);
return docs;
}
const db = new PouchDB('SO-66231293', {
adapter: 'memory'
});
(async() => {
// install docs and show view in various forms.
await db.bulkDocs(getDocsToInstall(6));
gel('content').classList.remove('hide')
showReduceDocs('SO-66231293/id16');
showViewDocs('SO-66231293/id16');
})();
const gel = id => document.getElementById(id);
/*
https://stackoverflow.com/questions/31378526/generate-random-date-between-two-dates-and-times-in-javascript/31379050#31379050
*/
function randomDate(start, end, startHour, endHour) {
var date = new Date(+start + Math.random() * (end - start));
var hour = startHour + Math.random() * (endHour - startHour) | 0;
date.setHours(hour);
return date;
}
<script src="https://cdn.jsdelivr.net/npm/pouchdb#7.1.1/dist/pouchdb.min.js"></script>
<script src="https://github.com/pouchdb/pouchdb/releases/download/7.1.1/pouchdb.memory.min.js"></script>
<div id='content' class='hide'>
<div>View: reduce</div>
<pre id='view_reduce'></pre>
<hr/>
<div>View: complex key</div>
<pre id='view_docs'></pre>
</div>
Edit
Amended the demo snippet according to OP's comments.

MongoDB optimized way to select document count change timeline

I want to select the progress of changing the number of documents for each day of the current week.
Users collection (very schematically): { _id, username (str), social (obj, default: null), createdAt (int timestamp) }
I need to select this data:
// Count of the new documents per each day. Example of the data I need:
{
all: { // Social field is null
timeline: [Monday: 1, Tuesday: 0, Wednesday: 4, Thursday: 26, Friday: 24, Saturday: 30, Sunday: 47]
},
social: { // Social field is not null
timeline: [Monday: 0, Tuesday: 0, Wednesday: 2, Thursday: 8, Friday: 5, Saturday: 16, Sunday: 9]
}
}
Better way will be if this was aggregator.
Here is my not optimized code:
let obj = { all: { timeline: [] }, social: { timeline: [] } }
const mondayStartTime = getMonday(new Date()).getTime();
const weekDaysStartTime = getWeekDaysStartTime(mondayStartTime);
const this_week_registered = await (
await users_collection.find({ createdAt: { $gte: mondayStartTime } })
).toArray();
for (let i = 0; i < weekDaysStartTime.length; i++) {
obj.all.timeline.push(
this_week_registered.filter((obj) =>
obj.createdAt >= weekDaysStartTime[i] && weekDaysStartTime[i + 1]
? obj.createdAt < weekDaysStartTime[i + 1]
: true,
).length,
);
obj.social.timeline.push(
this_week_registered.filter((obj) =>
obj.social && obj.createdAt >= weekDaysStartTime[i] && weekDaysStartTime[i + 1]
? obj.createdAt < weekDaysStartTime[i + 1]
: true,
).length,
);
}
I need to make this in one query.

How to get all groups from tabulator table

I need an array of "group" objects from a multi-level-grouped tabulator table where
group = { field: String, name: String }
but when I try to parse the payload of tabulator.getGroups(), I'm missing one of the groups. Is there a tabulator api function for retrieving all the groups from a table? tabulator.getGroups() was the only function I could find in the docs.
https://codepen.io/awcastellano/pen/vMpydW
let data = [
{
"partType": "Disc Brake Pad",
"vehicle": "2019 chevy silverado",
"brand": "Cardone",
"partApplication": "Left Front/MKD794"
},
{
"partType": "Disc Brake Pad",
"vehicle": "2019 ford F150",
"brand": "STS",
"partApplication": "Left Front/MKD794"
},
{
"partType": "Disc Brake Rotor",
"vehicle": "2019 chevy silverado",
"brand": "Cardone",
"partApplication": "Left Front/MKD795"
},
{
"partType": "Disc Brake Pad",
"vehicle": "2019 chevy silverado",
"brand": "Cardone",
"partApplication": "Left Front/MKD795"
},
{
"partType": "Disc Brake Caliper",
"vehicle": "2019 chevy silverado",
"brand": "Cardone",
"partApplication": "Left Front/MKD796"
}
]
let columns = [{
title: "Part",
field: "partApplication"
}, ]
let table = new Tabulator('#tabulator', {
data: data,
columns: columns,
groupBy: ["vehicle", "partType", "brand"]
})
function groups() {
let groups = []
let groupComponents = table.getGroups()
if (groupComponents.length == 0) return groups
for (var i = 0; i < groupComponents.length; i++) {
groups.push({ field: groupComponents[i]._group.field, name: groupComponents[i]._group.key })
}
let groupList = groupComponents[0]._group.groupList
while (groupList.length != 0) {
for (var i = 0; i < groupList.length; i++) {
groups.push({ field: groupList[i].field, name: groupList[i].key })
}
groupList = groupList[0].groupList
}
return groups
}
Call groups() in the codepen console. { field: "brand", name: "STS" } is missing from the array, yet is displayed in the table. I tried to paste the result here but couldn't figure out how to copy the result from codepen.
Thanks in advance!
Check Console
https://codepen.io/dota2pro/pen/NmVzmv
const getAllGroups = function(input){
for(let i=0; i< input.length; i++){
console.log(allRows[i].getGroup());
// Use this or
console.log(allRows[i].getGroup()._group.key
,allRows[i].getGroup()._group.field);
// Use this
console.log('field: ', input[i]._row.modules.group.field,'key:',input[i]._row.modules.group.key );
}}

postman POST request is populating the database correctly, but is getting null values when writing to file

I have been having a weird issue with one of my node controllers. For context, on a POST request for this specific controller, I store a an object in a mongo database, and also write the necessary parts of the object to a file. There is a website that already exists, which interfaces directly with the server, but I am writing a REST api for customers who would like a custom interface. (I did not write the node server or the website.)
My issue is that for some reason, the values being written to the file in this case are coming through as Null after a "post" like so:
{"legs":[{"ptu":{"tilt":{},"pan":{}}},{"audio":{"mute":false,"vol":0},"ptu":{"tilt":{"abs":null},"pan":{"abs":null}}},{"audio":{"mute":true,"vol":0},"ptu":{"tilt":{"abs":null},"pan":{"abs":null}}}]}
however, the forms on the website populate correctly, and if I press "save" from the website, the file is correctly updated. i.e.
{"legs":[{"ptu":{"tilt":{"abs":0},"pan":{"abs":0}}},{"audio":{"track":"/home/rahd/ult0316-p002/resources/tracks/Maid with the Flaxen Hair.mp3","vol":0,"mute":false},"ptu":{"tilt":{"abs":10},"pan":{"abs":10}}},{"audio":{"track":null,"vol":0,"mute":true},"ptu":{"tilt":{"abs":10},"pan":{"abs":10}}}]}
here is my postman request which is being sent as raw JSON:
{
"name": "NicksCoolTour3",
"location": "/home/rahd/ult0316-p002/resources/tours/5982374cb492c516c20c40d0.json",
"legs": [
{
"audio": {
"mute": true,
"volPercent": 0,
"vol": -120,
"track": null
},
"ptu": {
"poi": "59823726b492c516c20c40cd",
"tilt": {
"vel": 5,
"rel": 0,
"abs": 0
},
"pan": {
"vel": 5,
"rel": 0,
"abs": 0
},
"direction": "quickest"
},
"time": 0,
"velMode": "time",
"ptuMode": "poi"
},
{
"_id": "5982374cb492c516c20c40d2",
"audio": {
"mute": false,
"volPercent": 100,
"vol": -120,
"track": "5983222d79930a1dbd4d94ac"
},
"ptu": {
"tilt": {
"vel": 5,
"rel": 10,
"abs": 0
},
"pan": {
"vel": 5,
"rel": 10,
"abs": 0
},
"direction": "quickest"
},
"time": 0,
"velMode": "time",
"ptuMode": "rel"
},
{
"_id": "5982374cb492c516c20c40d1",
"audio": {
"mute": true,
"volPercent": 100,
"vol": -120,
"track": "59823711b492c516c20c40cc"
},
"ptu": {
"tilt": {
"vel": 5,
"rel": 0,
"abs": 0
},
"pan": {
"vel": 5,
"rel": 0,
"abs": 0
},
"direction": "quickest"
},
"time": 0,
"velMode": "time",
"ptuMode": "rel"
}
]
}
and here is my POST controller :
router.post('/',function (req, res, next){
var new_tour = new Tour(req.body);
new_tour._id = new mongoose.Types.ObjectId;
new_tour.save( function(err, tour) {
if (err) return next(err);
res.json({ message: "tours database sucessfully updated" });
});
});
I am not sure what could be causing this, it seems that the database is getting the correct values, but the function that writes a request to the file is not behaving properly.
here is the schema which handles the file writing:
var mongoose = require("mongoose")
, fs = require('fs')
, path = require('path')
, resources = require(path.join(__dirname, '..', '..', 'config', 'resources'));
var schema = new mongoose.Schema({
name: { type: String, default: '', unique: true, required: true },
location: { type: String },
legs: [{
ptuMode: { type: String, default: 'abs' }, // abs || rel || poi
velMode: { type: String, default: 'vel' }, // vel || time
time: { type: Number, default: 0 }, // vel || time
ptu: {
direction: { type: String, default: 'cw' }, // cw || ccw
pan: {
rel: { type: Number },
abs: { type: Number },
vel: { type: Number },
},
tilt: {
rel: { type: Number },
abs: { type: Number },
vel: { type: Number },
},
poi: {
type: mongoose.Schema.Types.ObjectId,
ref: 'POI'
},
},
audio: {
mute: { type: Boolean },
vol: { type: Number },
volPercent: { type: Number },
track: {
type: mongoose.Schema.Types.ObjectId,
ref: 'Track'
},
}
}]
},
{
timestamps: true
});
schema.pre('save', function (next) {
var tour = this;
if (!tour.location || tour.location.length < 1) {
tour.location = path.join(resources.tours, tour._id + '.json');
}
tour.writeToFile(function (err) {
console.error(err);
});
next();
});
schema.post('remove', function (tour) {
if (tour.location && tour.location.length > 0) {
fs.exists(tour.location, function (exists) {
if (exists) {
fs.unlink(tour.location);
console.log('Deleted Tour: ' + tour.location);
} else {
console.log('Tour not found, so not deleting file.');
}
});
}
});
schema.methods.writeToFile = function (callback) {
function saveToFile(tour, callback) {
var filePath = tour.location;
var name = tour.name;
delete tour.location;
delete tour.name;
delete tour.createdAt;
delete tour.updatedAt;
delete tour._id;
delete tour.__v;
for (li in tour.legs) {
var leg = tour.legs[li];
var index = li;
if (typeof index === 'string') {
index = parseInt(li);
}
delete leg._id;
// Delete rel, force abs
delete leg.ptu.tilt.rel;
delete leg.ptu.pan.rel;
if (leg.audio.hasOwnProperty("volPercent")) {
var x = leg.audio.volPercent;
delete leg.audio.volPercent;
var n = -120;
if (x > 0) {
var val = Math.pow((x / 100), 4);
n = Math.max(20 * Math.log10(val), -120)
}
leg.audio.vol = n;
}
if (index == 0) {
delete leg.ptu.pan.vel;
delete leg.ptu.tilt.vel;
} else {
if (leg.ptu.pan.vel == 0) {
leg.ptu.pan.vel = 50;
}
if (leg.ptu.tilt.vel == 0) {
leg.ptu.tilt.vel = 50;
}
if (leg.ptu.direction === 'ccw') {
leg.ptu.pan.vel = -(Math.abs(leg.ptu.pan.vel));
}
}
if (leg.ptu.direction === 'quickest') {
delete leg.ptu.tilt.vel;
delete leg.ptu.pan.vel;
}
if (typeof (leg.audio.track) === 'object' && leg.audio.track !== null) {
leg.audio.track = leg.audio.track.location;
}
// Handle Delay
if (leg.ptuMode == 'delay') {
delete leg.ptu.pan;
delete leg.ptu.tilt;
} else {
delete leg.ptu.delay;
}
delete leg.ptu.poi;
delete leg.time;
delete leg.ptu.direction;
delete leg.ptuMode;
delete leg.velMode;
if (index == 0) {
delete leg.audio;
}
}
if (filePath && filePath.length > 0) {
fs.writeFile(filePath, JSON.stringify(tour), function (err) {
if (err) {
if (callback) callback(err);
return console.error(err);
}
console.log("Tour Written: " + name);
});
} else {
console.error("Tour location empty: " + name);
}
}
var tour = this.prepareExport();
saveToFile(tour, callback);
};
schema.methods.prepareExport = function () {
// TODO: Ensure Track and POI are loaded
var tour = this.toObject();
var prevLeg = false;
// Calculate proper abs positions before prepare for export
for (li in tour.legs) {
var leg = tour.legs[li];
if (leg.ptuMode == 'poi') {
leg.ptu.pan.abs = leg.ptu.poi.pan;
leg.ptu.tilt.abs = leg.ptu.poi.tilt;
} else if (leg.ptuMode == 'rel' && prevLeg) {
leg.ptu.pan.abs = prevLeg.ptu.pan.abs + leg.ptu.pan.rel;
leg.ptu.tilt.abs = prevLeg.ptu.tilt.abs + leg.ptu.tilt.rel;
}
if (leg.ptuMode !== 'delay') {
prevLeg = leg;
}
}
// Calulcate degrees per second for each leg
prevLeg = false;
for (li in tour.legs) {
var leg = tour.legs[li];
if (prevLeg && leg.velMode == 'time') {
var time = Math.abs(leg.time) || 0;
if (time > 0) {
if (leg.ptuMode == 'delay') {
leg.ptu.delay = time;
} else if (leg.ptuMode == 'rel') {
leg.ptu.pan.vel = leg.ptu.pan.rel / time;
leg.ptu.tilt.vel = leg.ptu.tilt.rel / time;
} else {
leg.ptu.pan.vel = (leg.ptu.pan.abs - prevLeg.ptu.pan.abs) / time;
leg.ptu.tilt.vel = (leg.ptu.tilt.abs - prevLeg.ptu.tilt.abs) / time;
}
} else {
leg.ptu.pan.vel = 0;
leg.ptu.tilt.vel = 0;
}
leg.ptu.pan.vel = Math.abs(leg.ptu.pan.vel);
leg.ptu.tilt.vel = Math.abs(leg.ptu.tilt.vel);
if (leg.ptu.direction === 'ccw') {
leg.ptu.pan.vel = -leg.ptu.pan.vel;
}
// Vel bounds
if (leg.ptu.pan.vel > 50) {
leg.ptu.pan.vel = 50;
} else if (leg.ptu.pan.vel < 5 && leg.ptu.pan.vel > 0) {
leg.ptu.pan.vel = 5;
}
if (leg.ptu.tilt.vel > 50) {
leg.ptu.tilt.vel = 50;
} else if (leg.ptu.tilt.vel < 5 && leg.ptu.tilt.vel > 0) {
leg.ptu.tilt.vel = 5;
}
// Quickest was using 50ยบ/s, but should be omitted
if (leg.ptu.direction === 'quickest' && time === 0) {
delete leg.ptu.pan.vel;
delete leg.ptu.tilt.vel;
}
// Remove invalid tilt velocity when tilt diff is 0 so vector vel on control server is calculated correctly
if (prevLeg.ptu.tilt.abs - leg.ptu.tilt.abs == 0) {
delete leg.ptu.tilt.vel;
}
}
prevLeg = leg;
}
return tour;
};
/* bounds logic
if (leg.ptu.pan.abs > 180) {
leg.ptu.pan.abs = leg.ptu.pan.abs - 360;
} else if (leg.ptu.pan.abs < -180) {
leg.ptu.pan.abs = leg.ptu.pan.abs + 360;
}
if (leg.ptu.tilt.abs > 90) {
leg.ptu.tilt.abs = 90;
} else if (leg.ptu.tilt.abs < -90) {
leg.ptu.tilt.abs = -90;
}
*/
var Tour = mongoose.model("Tour", schema);
module.exports = Tour;
There's error on your async functions :
schema.pre('save', function (next), next must me in the callback function
tour.writeToFile(function (err) {
console.error(err);
next();
});
schema.methods.writeToFile = function (callback) : you must return callback on success to and not only on error
if (filePath && filePath.length > 0) {
fs.writeFile(filePath, JSON.stringify(tour), function (err) {
if (err) {
if (callback) callback(err);
return console.error(err);
}
else
{
console.log("Tour Written: " + name);
if (callback) callback(err);
}
});
} else {
console.error("Tour location empty: " + name);
if (callback) callback(err);
}

Update age based on dob in a collection in Mongodb

I have 1000 Documents in one of the collection.
{ "_id": ObjectId("56d97671f6ad671b7d1c3d76"), "parseId":
"TdKxj9FFPY", "phone": "6643545645", "dob": "15-06-87", "age": 121
"createdAt": ISODate("2016-03-01T16:39:00.947Z"), "updatedAt":
ISODate("2016-03-01T16:39:00.947Z"), "__v": 0 }
{ "_id":ObjectId("56d97671f6ad671b7d1c3d76"), "parseId": "TdKxj9FFPY",
"phone": "9847523654", "dob": "15-06-93", "age": 100 "createdAt":
ISODate("2016-03-01T16:39:00.947Z"), "updatedAt":
ISODate("2016-03-01T16:39:00.947Z"), "__v": 0 }
{ "_id":ObjectId("56d97671f6ad671b7d1c3d76"), "parseId": "TdKxj9FFPY",
"phone": "4564646646", "dob": "15-06-43", "age": 152 "createdAt":
ISODate("2016-03-01T16:39:00.947Z"), "updatedAt":
ISODate("2016-03-01T16:39:00.947Z"), "__v": 0 }
...................
...................
But some of the values of age are wrong.The Values of dob are right.So i need to update the values of age based on the dob in a single query manually?
Finally i found a solution.I just export the collection in to a json file and updated all the documents by using js function and import the collections in to the db.
html
<html>
<head>
<script type="text/javascript" src="https://rawgit.com/eligrey/FileSaver.js/master/FileSaver.js"></script>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.12.4/jquery.min.js"></script>
</head>
<body>
<button id="save">save</button>
</body>
<script type="text/javascript">
function getAge(dateString) {
var today = new Date();
var birthDate = new Date(dateString);
var age = today.getFullYear() - birthDate.getFullYear();
var m = today.getMonth() - birthDate.getMonth();
if (m < 0 || (m === 0 && today.getDate() < birthDate.getDate())) {
age--;
}
return age;
}
var exportData;
$.getJSON("input.json", function(data) {
exportData = data;
exportData.forEach(function(item, index) {
// console.log('item dob', item.dob);
var dobInfo = item.dob.split('-');
var dd = dobInfo[0];
if (dd < 10) {
dd = '0' + dd;
}
var mm = dobInfo[1];
if (mm < 10) {
mm = '0' + mm;
}
var yy = dobInfo[2];
yy = (yy < 17) ? '20' + yy : '19' + yy;
// console.log('dd', dd);
// console.log('mm', mm);
// console.log('yy', yy);
var newdate = mm + '-' + dd + '-' + yy;
// console.log('newdate',newdate);
console.log('index[' + index + ']', item.dob);
var age = getAge(newdate);
console.log('age--->', age);
exportData[index].age = age;
});
});
document.getElementById('save').onclick = function() {
var textToSave = JSON.stringify(exportData),
filename = 'output.json',
blob = new Blob([textToSave], {
type: "text/plain;charset=utf-8"
});
saveAs(blob, filename);
}
</script>
</html>
EDITED: my bad about the first one.
Export your data to a JSON and run it through this and reimport it. Your only dependency is fs and momentjs.
var moment = require('moment'),
fs = require('fs'),
json = [
{ "_id":"56d97671f6ad671b7d1c3d76", "parseId": "TdKxj9FFPY", "phone": "6643545645", "dob": "15-06-87", "age": 121, "createdAt": "2016-03-01T16:39:00.947Z", "updatedAt": "2016-03-01T16:39:00.947Z", "__v": 0 },
{ "_id":"56d97671f6ad671b7d1c3d76", "parseId": "TdKxj9FFPY", "phone": "9847523654", "dob": "15-06-93", "age": 100, "createdAt": "2016-03-01T16:39:00.947Z", "updatedAt": "2016-03-01T16:39:00.947Z", "__v": 0 },
{ "_id":"56d97671f6ad671b7d1c3d76", "parseId": "TdKxj9FFPY", "phone": "4564646646", "dob": "15-06-43", "age": 152, "createdAt": "2016-03-01T16:39:00.947Z", "updatedAt": "2016-03-01T16:39:00.947Z", "__v": 0 }
];
Object.keys(json).forEach(function(key) {
age = moment().diff(moment(json[key].dob,"DD/MM/YY"),'years');
//
//dates before 1970 are negative
//
if (parseInt(age) < 0) {
age += 100;
}
json[key].age = age;
});
fs.writeFile('data.json', JSON.stringify(json), function (err) {
if (err) return console.log(err);
console.log('compeleted');
});
age outputs = [29, 23, 74];

Resources