MongoDB Fetch check if data exists - node.js

I am trying to find the best way to write this code. I fetch locations from a remote resource and need to check if there are any new locations present in the data, if there are I should add them to my database, if they are not new I just want to update them.
const http = require('http');
const timeout = 5000; //5 seconds
const MongoClient = require('mongodb').MongoClient;
// Database Name
const dbName = 'weatherApp';
const url = 'mongodb://localhost:27017';
// Connect using MongoClient
MongoClient.connect(url, function(err, client) {
if(err){
console.log(err);
return;
}
const locationsCollection = client.db(dbName).collection('locations');
(function fetchTemperatureLoop(){
console.log('Started http request..');
http.get('remote url..', function(resp){
var data = '';
resp.on('data', (chunk) => {
data += chunk;
});
resp.on('end', () => {
if(data.isJson()){
var locations = JSON.parse(data).toArray();
(function locationsLoop(){
var location = locations.pop();
locationsCollection.findOne({location: location.location}, function(err, result){
if(err){
console.log(err);
return;
}
if(result){
//Exists
var measurements = result.measurements;
measurements.push({timestamp: +new Date, temperature: location.temperature})
locationsCollection.update({location: location.location}, {$set: {measurements: measurements}}, function(err){
if(err){
console.log(err);
return;
}
console.log('Added new temperature for location: ' + location.location);
continueLocationsLoop();
});
}else{
//Doesnt exist
location.measurements = [];
location.measurements.push({timestamp: +new Date, temperature: location.temperature});
locationsCollection.insert(location, function(err){
if(err){
console.log(err);
return;
}
console.log('Created new location: ' + location.location);
continueLocationsLoop();
});
}
});
function continueLocationsLoop(){
if(locations.length){
locationsLoop()
}else{
setTimeout(fetchTemperatureLoop, timeout);
}
}
})();
}
});
}).on("error", (err) => {
console.log("Error: " + err.message);
console.log("Continue anyways..");
setTimeout(fetchTemperatureLoop, timeout);
});
})();
});
String.prototype.isJson = function(){
try{
JSON.parse(this);
}catch(e){
return false;
}
return true;
}
Object.prototype.toArray = function(){
var arr = [];
for(var key in this){
if(this.hasOwnProperty(key)){
arr.push(this[key]);
}
}
return arr;
}
I really want to avoid using so many closures but I dont want to repeat myself either. Any help rewriting this code in an optimal way is much appriciated.
My main problem was illiterating through the locations and doing the calls to the database.

Related

Streaming data from oracle db to browser with node.js

I am learning node.js and database. I am trying to stream heavy data about 7,700,000 rows and 96 columns from oracle to client. Later i use that data for virtual table. But in client it is showing only one row and then in node command error is displaying "Cannot set headers after they are sent to the client". How to stream data in client. Please help
var oracledb = require('oracledb');
const cors = require('cors');
var express = require('express');
var app = express();
app.use(cors());
oracledb.outFormat = oracledb.ARRAY;
oracledb.getConnection({
user: 'user',
password: 'password',
connectString: 'some string'
},
(err, connection) => {
if (err) {
console.error(err.message);
return;
}
var rowsProcessed = 0;
var startTime = Date.now();
var dataSize = 0;
var stream = connection.queryStream(
'SELECT * FROM table',
);
// stream.on('data', function (data) {
// rowsProcessed++;
// // console.log(JSON.stringify(data));
// // console.log(data);
// dataSize = dataSize + data.length;
// // oracleData.push(data);
// // console.log("pushing");
// // console.log(oracleData);
// // app.get('/data', (req, res) => {
// // res.send(data);
// // })
// // console.log(data);
// });
app.get('/data', (req, res) => {
stream.on('data', (data) => {
rowsProcessed++;
dataSize = dataSize + data.length;
res.send(JSON.stringify(data));
})
})
stream.on('end', function () {
var t = ((Date.now() - startTime) / 1000);
console.log('queryStream(): rows: ' + rowsProcessed +
', seconds: ' + t);
// console.log(dataSize + ' bytes');
connection.close(
function (err) {
if (err) {
console.error(err.message);
} else {
console.log("connection closed")
}
}
)
})
}
);
app.listen(5000, () => {
console.log('Listening at 5000')
})
I tried using above approach. But it is failing. How can I achieve the output?
The browser is freezing if I output entire data at single time that's why I am trying to use streaming and in the node command prompt it is displaying out of memory if I load entire data at single time.
Thank you.
The first thing you'll want to do is organize your app a little better. Separation of concerns is important, you should have a connection pool, etc. Have a look at this series for some ideas: https://jsao.io/2018/03/creating-a-rest-api-with-node-js-and-oracle-database/
Once you get the organization figured out, incorporate this example of streaming a large result set out.
const oracledb = require('oracledb');
async function get(req, res, next) {
try {
const conn = await oracledb.getConnection();
const stream = await conn.queryStream('select * from employees', [], {outFormat: oracledb.OBJECT});
res.writeHead(200, {'Content-Type': 'application/json'});
res.write('[');
stream.on('data', (row) => {
res.write(JSON.stringify(row));
res.write(',');
});
stream.on('end', () => {
res.end(']');
});
stream.on('close', async () => {
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
stream.on('error', async (err) => {
next(err);
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
} catch (err) {
next(err);
}
}
module.exports.get = get;
If you find you're doing this a lot, simplify things by creating a reusable transform stream:
const oracledb = require('oracledb');
const { Transform } = require('stream');
class ToJSONArray extends Transform {
constructor() {
super({objectMode: true});
this.push('[');
}
_transform (row, encoding, callback) {
if (this._prevRow) {
this.push(JSON.stringify(this._prevRow));
this.push(',');
}
this._prevRow = row;
callback(null);
}
_flush (done) {
if (this._prevRow) {
this.push(JSON.stringify(this._prevRow));
}
this.push(']');
delete this._prevRow;
done();
}
}
async function get(req, res, next) {
try {
const toJSONArray = new ToJSONArray();
const conn = await oracledb.getConnection();
const stream = await conn.queryStream('select * from employees', [], {outFormat: oracledb.OBJECT});
res.writeHead(200, {'Content-Type': 'application/json'});
stream.pipe(toJSONArray).pipe(res);
stream.on('close', async () => {
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
stream.on('error', async (err) => {
next(err);
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
} catch (err) {
next(err);
}
}
module.exports.get = get;

async.eachSeries runs only once with async.waterfall inside for each iteration

I am new to async library. I have used async.eachSeries and async.waterfall for each iteration. I see, the async.waterfall runs only once.
Here is my code :
var fs = require('fs'),
async = require('async'),
Client = require('node-rest-client').Client;
// REST API Call and output in jsonOutput.results
console.log(jsonOutput.results.length); // jsonOutput.results has 124 records.
async.eachSeries(jsonOutput.results, function(account, callback) {
var dataObject = {};
dataObject.updatetime = new Date();
var setAccountInfoURL = ""; // Data Update REST API Request
async.waterfall([
function setAccountInfo(updateCallback) {
// client.get(setAccountInfoURL, function (data, response) {
// var jsonOutput = JSON.parse(data.toString('utf8'));
updateCallback(null, "output", account)
// });
},
function saveAccountInfo(jsonOutput, account, updateCallback) {
var debuglog = JSON.stringify(account) + "\n" + jsonOutput;
fs.appendFile("debuginfo.json", debuglog + "\n", function (err) {
if(err) {
console.log(err);
}
console.log("JSON saved to " + "debuginfo.json");
updateCallback(null);
});
}
],function asyncComplete(err) {
if (err) {
console.warn('Error setting account info.', err);
}
console.log('async completed');
});
}, function(err){
if (err) {
console.log('error in loop');
}
console.log('loop completed');
});
Output:
124
JSON saved to debuginfo.json
async completed
Any help is really appreciated.
I found my mistake. I missed calling the callback after each iteration just after async is completed.
var fs = require('fs'),
async = require('async'),
Client = require('node-rest-client').Client;
// REST API Call and output in jsonOutput.results
console.log(jsonOutput.results.length); // jsonOutput.results has 124 records.
async.eachSeries(jsonOutput.results, function(account, callback) {
var dataObject = {};
dataObject.updatetime = new Date();
var setAccountInfoURL = ""; // Data Update REST API Request
async.waterfall([
function setAccountInfo(updateCallback) {
// client.get(setAccountInfoURL, function (data, response) {
// var jsonOutput = JSON.parse(data.toString('utf8'));
updateCallback(null, "output", account)
// });
},
function saveAccountInfo(jsonOutput, account, updateCallback) {
var debuglog = JSON.stringify(account) + "\n" + jsonOutput;
fs.appendFile("debuginfo.json", debuglog + "\n", function (err) {
if(err) {
console.log(err);
}
console.log("JSON saved to " + "debuginfo.json");
updateCallback(null);
});
}
],function asyncComplete(err) {
if (err) {
console.warn('Error setting account info.', err);
}
console.log('async completed');
callback(null); // this is the change.
});
}, function(err){
if (err) {
console.log('error in loop');
}
console.log('loop completed');
});

Not able to display image in the browser for a simple express code

I have my express code::
var express=require('express');
var fs=require('fs');
var http=require('http');
var crypto=require('crypto');
var mysql=require('mysql');
var async=require('async');
var app=express();
var connection=mysql.createConnection({
host: 'localhost',
user: '************',
password: '************',
database: 'ImagePostingDB'
});
connection.connect(function(err) {
if ( !err ) {
console.log("Connected to MySQL");
} else if ( err )
{
console.log(err);
} });
app.set('port',process.env.PORT||7004);
app.use('/Details',express.static(__dirname+'/public/images'));
app.use(express.bodyParser());
app.get('/DescriptionSortedPrice/',function(request,response){
connection.query('SELECT * FROM ImagePostingtable ORDER BY Sl_no', function(err, rows, fields) {
if (err) {
return response.send(500, err.message);
}
console.log('Found results:', rows);
response.json({
'restaurants' : rows
});
});
});
app.post('/Details/',function(req,res,next) {
var file_name=req.files.key.originalFilename;
var file_name1=req.body.key1;
var name;
console.log(file_name);
console.log(file_name1);
async.series( [
// Get the first table contents
function ( callback ) {
crypto.randomBytes(8, function(ex, buf) {
var array = req.files.key.originalFilename.split('.');
var type = array[array.length - 1];
name = buf.toString('hex') + '.' + type;
fs.rename(req.files.key.path, './public/images/' + name, function(e) {
if (e) {
res.send(500, e.message);
} else {
res.send("I got the message - This i confirm");
}
return callback(null);
});
});
},
// Updating the database
function ( callback ) {
connection.query('INSERT INTO ImagePostingtable (Image_Name,Person_Name) VALUES (?,?)', [name,file_name1], function (err, rows, fields) {
console.log('Connection result error ' + err);
return callback(null);
});
}
]);
});
app.get('/Details/',function(req,res){
res.send("Image displayed");
});
http.createServer(app).listen(app.get('port'),function(){
console.log('Express server listening on port'+app.get('port'));
});
Snapshot shows i am connected to mysql and to server
I already have few images as shown in the snapshot below in the location /public/images
Now if i test in my browser for one of the images
http://54.218.73.244:7004/c92beeaf5ba50e65.jpg
i get error as below and image is not displayed in browser
Cannot GET /c92beeaf5ba50e65.jpg
HOW TO RESOLVE THIS ! ! !
Hoping this might help someone
With the help of Andrew in one of the answers i resolved this
I just changed the line of code to below::
app.use(express.static(__dirname+'/public/images'));

Event-driven asynchronous callbacks in Node js

Is there a way to block the asynchronous callback property of node.js?
Please Advice...
For example,
var express = require('express');
var app = express();
var MongoClient = require('mongodb').MongoClient,
format = require('util').format;
var cors = require('cors');
app.get('/gantt', cors(), function (request, response) {
MongoClient.connect('mongodb://127.0.0.1:27017/test', function (err, db) {
if (err) throw err;
var collection = db.collection('ganttdata');
collection.find({}, {
"_id": 0
}).toArray(function (err, results) {
var jsonString = JSON.stringify(results);
response.setHeader('Content-Type', 'text/plain');
response.send('{\"data\":' + jsonString + '}');
});
});
});
app.listen(3000);
console.log('Listening on port 3000...');
Inspite the Node.js prints the console statement first,i want app.get() to be executed.
My scenario is same as that of the above one.
This is my scenario
var ganttresult = new Array();
app.get('/get', cors(), function (request, response) {
console.log('hello');
connection.query("SELECT distinct id FROM ganttdata", function (err, rows) {
if (err) {
console.log('error in fetching ' + err);
} else {
var all_id = rows;
for (var j = 0; j < all_id.length; j++) {
console.log('hello1');
connection.query("SELECT id,tailName FROM ganttdata where id= '" + all_id[j].id + "'", function (err, rows) {
if (err) {
console.log('error in fetching ' + err);
} else {
var jsonString1 = rows;
var set_id = jsonString1[0].id;
connection.query("SELECT item_id,name,start,end FROM ganttdata where id= '" + set_id + "'", function (err, rows) {
if (err) {
console.log('error in fetching ' + err);
} else {
var jsonString2 = rows;
var gantt1 = new Object();
gantt1.id = jsonString1[0].id;
gantt1.tailName = jsonString1[0].tailName;
var series = new Array();
for (var i = 0; i < jsonString2.length; i++) {
var gantt2 = new Object();
gantt2.item = jsonString2[i];
series.push(gantt2);
gantt1.series = series;
}
//console.log(gantt1);
console.log('hi');
ganttresult.push(gantt1);
console.log(ganttresult);
}
});
}
});
}
var result = JSON.stringify(ganttresult);
console.log(result);
response.send('{\"data\":' + result + '}');
response.end();
}
});
});
When I run this code,
I get an empty resultset and when I re-run I get the result.
I guess it is due to asynchronous callback nature of node js.
Please advice...
Thanks
I have tried async.waterfall method as given below
app.get('/get',cors(), function(request,response) {
async.waterfall([
function(result) {
connection.query("SELECT id FROM Gantt",function(err, rows) {
if (err) {
console.log('error in fetching ' + err);
}
else{
var all_id=rows;
for(var j=0;j<all_id.length;j++){
connection.query("SELECT id,tailName FROM Gantt where id= '"+all_id[j].id+"'",function(err, rows) {
if (err) {
console.log('error in fetching ' + err);
}
else{
var jsonString1=rows;
var set_id=jsonString1[0].id;
connection.query("SELECT item_id,name,start,end FROM GanttFlight where id= '"+set_id+"'",function(err, rows) {
if (err) {
console.log('error in fetching ' + err);
}
else{
var jsonString2=rows;
var gantt1=new Object();
gantt1.id=jsonString1[0].id;
gantt1.name=jsonString1[0].tailName;
var series = new Array();
series=[];
for(var i=0;i<jsonString2.length;i++){
var gantt2=new Object();
gantt2.item=jsonString2[i];
series.push(gantt2);
gantt1.series=series;
}
ganttresult.push(gantt1);
}
});
}
});
}
var result= JSON.stringify(ganttresult);
console.log(ganttresult);
response.send(ganttresult);
ganttresult=[];
//response.send('{\"data\":'+result+'}');
response.end();
}
});
}
], function(err, status) {
console.log(status);
});
});
app.listen(3000);
console.log('Listening on port 3000...');
i am getting empty result first and when refresh the browser,i get the required result
Please Advice

How to pass changes from middleware to socket.io?

I am using node.js with socket.io to push real time notifications to users. However, currently I am just sending back a query result done in my socket.io code and sending it back to the client but I need to let socket know about the changes that occur and to either update with the changes or re-query the db to check for the new number and send that to the client.
For example if a user gets a friend request then the notification count will change and I want socket.io to push the new notification count number to the user.
here is my socket.io code in my app.js file:
io.on('connection', function(socket) {
var sessionID = socket.handshake.sessionID,
session = new connect.middleware.session.Session({ sessionStore: sessionStore }, socket.handshake.session)
console.log('socket: new ' + sessionID)
socket.broadcast.emit('arpNewConn', session.passport.user)
var intervalID = setInterval(function() {
socket.handshake.session.reload(function() {
socket.handshake.session.touch().save()
})
socket.emit('pulse', { heartbeat: new Date().toString(), timestamp: new Date().getTime() })
}, 300 * 1000)
socket.on('disconnect', function() {
console.log('socket: dump ' + sessionID)
socket.broadcast.emit('arpLostConn', session.passport.user)
clearInterval(intervalID)
})
socket.emit('entrance', {message: 'Message works'});
dbnotif.findOne(userID, function (err, user) {
if(err) throw err;
notify = user.notifications;
socket.emit('notify', {notific: notify});
});
});
Here is the client side:
div#CheckSocket
script(src='http://localhost:3000/socket.io/socket.io.js')
script.
$(document).ready(function () {
console.log('socket');
var socket = io.connect('http://localhost:3000/');
console.log('entered1');
socket.on('entrance', function (data) {
console.log('entered');
console.log(data.message);
});
socket.on('notify', function (data) {
console.log('noting');
console.log(data.notific);
if(data.notific !== 0)
$('.notifications').html(data.notific);
});
socket.on('reconnecting', function(data) {
setStatus('reconnecting');
console.log('entered2');
});
function setStatus(msg) {
console.log('connection status: ' + msg);
console.log('entered5');
}
});
Here is the example of adding a friend in the route file:
exports.addContactPost = function(req, res, err) {
async.waterfall([
function(callback) {
var success;
var newFriend = new Friend ({
userId: req.signedCookies.userid,
friend_id: mongoose.Types.ObjectId(req.body.otherUser),
friend_status: 1
});
newFriend.save(function(err){
if(err) {
console.log(err);
} else {
console.log("saved it");
success = true;
}
});
callback(null, success)
},
function(success, callback) {
//if(success === true) {
var success2;
var newFriend2 = new Friend ({
userId: mongoose.Types.ObjectId(req.body.otherUser),
friend_id: req.signedCookies.userid,
friend_status: 2
});
newFriend2.save(function(err){
if(err) {
res.send("request not received");
} else {
success2 = true;
}
});
callback(null, success2);
//} else {
// res.send("error with request sent");
//}
},
function(success2, callback) {
console.log('callback3');
//if(success2 === true) {
var success3;
Notification.findOneAndUpdate({userId: mongoose.Types.ObjectId(req.body.otherUser)}, {
$inc: {notifications: 1}
}, function(err, notify) {
if(err) {
res.send(err);
} else {
console.log(notify);
if(notify.added_notifications === true) {
// enable mail and include general u have got a new request... do not include name because not storing it
}
}
success3 = true;
callback(null, success3);
}],
function(err, results) {
res.json({response: true});
console.log("Add successful");
});
};
Notes: dbnotif is a model being called by mongoose,
userID is a global variable available to the file
I helped him solve this question offline, but we ended up using an EventEmitter as a proxy.
// main.js
var EventEmitter = require('events').EventEmitter;
var emitter = new EventEmitter();
Then add it to each request as middleware:
// elsewhere in main.js
app.use(function(req, res, next) {
req.emitter = emitter;
next();
});
Then in external routes file:
// routes.js
exports.addContactPost = function(req, res, err) {
req.emitter.emit( 'some-key', whatever, data, you, want );
};

Resources