Node.js: Why not saved session? - node.js

Here's the code:
app.get('/vklogin', function(request, response) {
console.log('Авторизация через соц.сеть "Вконтакте"'.green);
var url_parts = url.parse(request.url, true);
var query = url_parts.query;
var data = querystring.stringify({
client_id: '4836170',
client_secret: 'cPkR53zhon0lU7TAiz9f',
code: query.code,
redirect_uri: 'http://' + request.headers.host + '/vklogin'
});
var options = {
host: 'oauth.vk.com',
port: 443,
path: '/access_token?' + data,
method: 'GET'
};
var httpsreq = https.request(options, function(response) {
response.setEncoding('utf8');
response.on('data', function(chunk) {
var chunk = JSON.parse(chunk);
pg.connect(dbconfig, function(err, client, done) {
if (err) {
return console.error('Ошибка подключения к БД',err);
}
client.query('select * from users where vk = $1', [chunk.user_id], function(err, result) {
done();
if (err) {
console.error('Ошибка получения данных',err);
} else {
if (result.rows[0]) {
console.log(result.rows[0]);
request.session.authorized = true;
request.session.userid = result.rows[0].id;
} else {
console.log('Попытка создания нового пользователя. ');
client.query("insert into users (email, vk) values ('" + chunk.email + "', " + chunk.user_id + ") returning id", function(err, result) {
done();
if (err) {
console.error('Ошибка записи данных в БД', err);
} else {
request.session.authorized = true;
request.session.userid = result.rows[0].id;
console.log('Добавлен новый пользователь # ' + result.rows[0].id);
}
});
}
}
client.end();
});
console.log("№ пользователья: " + request.session.userid);
});
});
});
httpsreq.end();
if (request.session.authorized) {
response.writeHead(301, {
Location: 'http://' + request.headers.host + '/cabinet'
});
} else {
response.writeHead(301, {
Location: 'http://' + request.headers.host
});
}
response.end();
});
That is why outside functions session is not saved? What is wrong in my code?
Inside the function, everything is fine, outside functions - undefined.
After this session, the logic must be maintained and be available everywhere, too, everywhere, or is not it?
Tried to declare a variable with the session, but it also did not work, and no error does not give, do not even know where to dig.
var sess;
app.get('/vklogin', function(request, response) {
sess = request.session;
// other code...
});
UPD:
My problem is related to the lack of understanding of the control of asynchronous processes. I can not understand how to perform the originally one - database queries, information preservation in the session, and then check the session variables and forwarding to the desired page.
If you know how to make the correct execution order for me, write the answer.

Ok, I find need async pattern. Look here: http://book.mixu.net/node/ch7.html

Related

twitter.stream is not a function in node.js?

I cannot able to monitor the twitter.I followed the procedure to do the sentiment analysis (twitter) in node.js code.It verified my twitter account correctly.but it shows stream is not a function. I enclosed the code .can anyone solve this issue.Thanks in advance...
app.get('/watchTwitter', function (req, res) {
const twitter = new twitterAPI({
consumerKey: "asas",
consumerSecret: "sdcscs"
});
const accessToken = "cdccd";
const accessTokenSecret = "csdcs";
var stream;
var testTweetCount = 0;
var phrase = 'bieber';
twitter.verifyCredentials(accessToken, accessTokenSecret, params, function (error, data, response) {
if (error) {
console.log(error);
} else {
console.log(data["screen_name"]);
stream = twitter.stream('statuses/filter',
{
'track': phrase
}, function (stream) {
res.send("Monitoring Twitter for \'" + phrase + "\'... Logging Twitter traffic.");
stream.on('data', function (data)
{
testTweetCount++;
if (testTweetCount % 50 === 0)
{
console.log("Tweet #" + testTweetCount + ": " + data.text);
}
});
});
}
});
});
app.listen(8086,function()
{
console.log("port is listen on 8086");
});
You are referring Twitter npm package, but you are using node-twitter-api. See the documentations. For statuses, you need to use following method.
twitter.statuses("update", {
status: "Hello world!"
},
accessToken,
accessTokenSecret,
function(error, data, response) {
if (error) {
console.log(error);
} else {
console.log(data);
}
}
);

Converting AWS Lambda function to use promises?

I am writing a simple HTTP 'ping' function that is being periodically executed using AWS Lambda. It uses four asynchronous functions: http.get, S3.getObject, S3.putObject, and nodemailer.sendMail. Each seems to have a slightly different callback model.
After reading about promises, I spent way too much time trying to convert the following code to use Q promises and failed miserably.
For my own education and hopefully that of others, I was hoping someone could help me convert this to using promises (doesn't have to be Q):
'use strict';
var http = require('http');
var nodemailer = require('nodemailer');
var AWS = require('aws-sdk');
var s3 = new AWS.S3( { params: { Bucket: 'my-bucket' } } );
exports.handler = (event, context, callback) => {
var lastStatus;
var options = {
host: event.server.host,
port: event.server.port ? event.server.port : 80,
path: event.server.path ? event.server.path : '',
method: event.server.method ? event.server.method : 'HEAD',
timeout: 5000
};
var transporter = nodemailer.createTransport({
host: event.mail.host,
port: event.mail.port ? event.mail.port : 587,
auth: {
user: event.mail.user,
pass: event.mail.pass
}
});
var d = new Date();
var UTCstring = d.toUTCString();
// email templates
var downMail = {
from: event.mail.from,
to: event.mail.to,
subject: 'Lambda DOWN alert: SITE (' + event.server.host + ') is DOWN',
text: 'LambdaAlert DOWN:\r\nSITE (' + event.server.host + ') is DOWN as at ' + UTCstring + '.'
};
var upMail = {
from: event.mail.from,
to: event.mail.to,
subject: 'Lambda UP alert: SITE (' + event.server.host + ') is UP',
text: 'LambdaAlert UP:\r\nSITE (' + event.server.host + ') is UP as at ' + UTCstring + '.'
};
// Run async chain to ensure that S3 calls execute in proper order
s3.getObject( { Key: 'lastPingStatus' }, (err, data) => {
// get last status from S3
if (err) { lastStatus = "UP"; } else {
lastStatus = data.Body.toString();
console.log("Last observed status: " + lastStatus);
}
http_request(options, lastStatus);
});
function http_request(requestOptions, lastStatus) {
var req = http.request(requestOptions, function(res) {
if (res.statusCode == 200) {
if (lastStatus == "DOWN") {
console.log('Email up notice sending...');
transporter.sendMail(upMail, function(error, info) {
if (error) {
console.log("ERROR: " + error);
callback(null, "ERROR: " + error);
} else {
console.log('No further details available.');
callback(null, 'Up message sent');
}
});
}
s3.putObject({ Key: 'lastPingStatus', Body: 'UP', ContentType: 'text/plain' }, (error, data) => { console.log("Saved last state as UP"); });
callback(null, 'Website is OK.');
}
});
req.on('error', function(e) {
if (lastStatus == "UP") {
console.log('Email down notice sending...');
transporter.sendMail(downMail, function(error, info) {
if (error) {
console.log("ERROR: " + error);
callback(null, "ERROR: " + error);
} else {
console.log('No further details available.');
callback(null, 'Down message sent');
}
});
s3.putObject({ Key: 'lastPingStatus', Body: 'DOWN', ContentType: 'text/plain' }, (error, data) => { console.log("Saved last state as DOWN"); });
callback(null, 'Website is DOWN.');
}
});
req.end();
}
};
EDIT: First attempt at writing using promises:
'use strict';
var http = require('http');
var nodemailer = require('nodemailer');
var AWS = require('aws-sdk');
var s3 = new AWS.S3( { params: { Bucket: 'lambda-key-storage' } } );
exports.handler = (event, context, callback) => {
var lastStatus;
var options = {
host: event.server.host,
port: event.server.port ? event.server.port : 80,
path: event.server.path ? event.server.path : '',
method: event.server.method ? event.server.method : 'HEAD',
timeout: 5000
};
var transporter = nodemailer.createTransport({
host: event.mail.host,
port: event.mail.port ? event.mail.port : 587,
auth: {
user: event.mail.user,
pass: event.mail.pass
}
});
var d = new Date();
var UTCstring = d.toUTCString();
// email templates
var downMail = {
from: event.mail.from,
to: event.mail.to,
subject: 'Lambda DOWN alert: SITE (' + event.server.host + ') is DOWN',
text: 'LambdaAlert DOWN:\r\nSITE (' + event.server.host + ') is DOWN as at ' + UTCstring + '.'
};
var upMail = {
from: event.mail.from,
to: event.mail.to,
subject: 'Lambda UP alert: SITE (' + event.server.host + ') is UP',
text: 'LambdaAlert UP:\r\nSITE (' + event.server.host + ') is UP as at ' + UTCstring + '.'
};
var myProm = new Promise(function(resolve, reject) {
console.log("called 1");
s3.getObject( { Key: 'lastPingStatus' }, (err, data) => {
// get last status from S3
if (err) {
resolve("UP");
} else {
resolve(data.Body.toString());
}
});
})
.then(function(lastStatus) {
console.log("called 2");
console.log("Last observed status: " + lastStatus);
var req = http.request(options, function(res) {
resolve(res.statusCode);
});
req.on('error', function(e) {
reject(e);
});
req.end();
return "??";
})
.then(function(statusCode) {
console.log("called 3");
if (statusCode == 200) {
if (lastStatus == "DOWN") {
console.log('Email up notice sending...');
resolve("upTrigger");
} else {
resolve("upNoTrigger");
}
s3.putObject({ Key: 'lastPingStatus', Body: 'UP', ContentType: 'text/plain' }, (err, data) => { console.log("Saved last state as UP"); });
callback(null, 'Website is OK.');
}
})
.catch(function(err){
console.log("called 3 - error");
// Send mail notifying of error
if (lastStatus == "UP") {
console.log('Email down notice sending...');
resolve("downTrigger");
s3.putObject({ Key: 'lastPingStatus', Body: 'DOWN', ContentType: 'text/plain' }, (error, data) => { console.log("Saved last state as DOWN"); });
callback(null, 'Website is DOWN.');
return("downTrigger");
} else {
return "downNoTrigger";
}
})
.then(function(trigger) {
console.log("called 4");
if (trigger == "upTrigger") {
transporter.sendMail(upMail, (error, info) => {
if (error) {
console.log("ERROR: " + error);
callback(null, "ERROR: " + error);
} else {
console.log('Up message sent.');
callback(null, 'Up message sent');
}
});
} else if (trigger == "downTrigger") {
transporter.sendMail(downMail, (error, info) => {
if (error) {
console.log("ERROR: " + error);
callback(null, "ERROR: " + error);
} else {
console.log('Down message sent.');
callback(null, 'Down message sent');
}
});
}
console.log("Outcome of ping was: ", trigger);
});
};
This doesn't quite work. The result logs are:
called 1
called 2
Last observed status: UP
called 3
called 4
Outcome of ping was: undefined
ReferenceError: resolve is not defined
Converting your typical async function to a promise is pretty straight forward. I'd rather try and demonstrate how to convert it than write the code as you don't learn anything from that.
Usually with node you'll have something that looks similar to this:
doSomethingAsync(callback);
function doSomethingAsync(callback){
var err, result;
// Do some work
...
callback(err, result);
}
function callback(err, result){
if(err){
// Handle error
} else{
// Success so do something with result
}
}
A promise wrapping an async function generally looks something like this:
var myProm = new Promise(function(resolve, reject){
doSomethingAsync(function(err, result){
if(err){
reject(err);
} else{
resolve(result)
}
});
})
.then(function(result){
// Success so do something with result
console.log("Success:", result)
})
.catch(function(err){
// Handle error
console.log("Error: ", err);
})
.then(function(result){
// Where's my result? - result == undefined as we didn't return anything up the chain
console.log("I always execute but result is gone", result)
})
To pass the result down the chain to our "always then" method we need to return a promise or a value:
var myProm = new Promise(function(resolve, reject){
doSomethingAsync(function(err, result){
if(err){
reject(err);
} else{
resolve(result)
}
});
})
.then(function(result){
// Success so do something with result
console.log("Success:", result)
return result;
})
.catch(function(err){
// Handle error
console.log("Error: ", err);
return err;
})
.then(function(result){
// The err/result now gets passed down the chain :)
console.log("Oh there it is", result)
})
I think that using the above patterns should cater to most of the async methods and events in your code example if any particular ones are giving you trouble drop a comment in and I'll try to cover those specific examples.
Here's an attempt at converting it over to promises - I'm pretty tired so apologies about any mess or mistakes - also there's still plenty of cleanup that could be done.
Essentially what I've done is try to break down the code into tasks and wrap each of those tasks in a promise. That way we can resolve/reject and chain them as needed.
'use strict';
var http = require('http');
var nodemailer = require('nodemailer');
var AWS = require('aws-sdk');
var s3 = new AWS.S3( { params: { Bucket: 'my-bucket' } } );
exports.handler = function (event, context, callback) {
var lastStatus;
var options = {
host: event.server.host,
port: event.server.port ? event.server.port : 80,
path: event.server.path ? event.server.path : '',
method: event.server.method ? event.server.method : 'HEAD',
timeout: 5000
};
var transporter = nodemailer.createTransport({
host: event.mail.host,
port: event.mail.port ? event.mail.port : 587,
auth: {
user: event.mail.user,
pass: event.mail.pass
}
});
var d = new Date();
var UTCstring = d.toUTCString();
// email templates
var downMail = {
from: event.mail.from,
to: event.mail.to,
subject: 'Lambda DOWN alert: SITE (' + event.server.host + ') is DOWN',
text: 'LambdaAlert DOWN:\r\nSITE (' + event.server.host + ') is DOWN as at ' + UTCstring + '.'
};
var upMail = {
from: event.mail.from,
to: event.mail.to,
subject: 'Lambda UP alert: SITE (' + event.server.host + ') is UP',
text: 'LambdaAlert UP:\r\nSITE (' + event.server.host + ') is UP as at ' + UTCstring + '.'
};
// Run async chain to ensure that S3 calls execute in proper order
function getLastPingStatus(){
return new Promise(function(resolve, reject){
s3.getObject( { Key: 'lastPingStatus' }, function(err, data) {
// get last status from S3
if (err) {
lastStatus = "UP";
reject(lastStatus)
} else {
lastStatus = data.Body.toString();
resolve(lastStatus);
console.log("Last observed status: " + lastStatus);
}
});
})
}
getLastPingStatus()
.then(httpRequest)
.catch(httpRequest); // Otherwise a reject will throw an error
function sendMail(mail, status){ // status = "up" or "down" -
return new Promise(function(resolve, reject){
transporter.sendMail(mail, function(error, info) {
if (error) {
console.log("ERROR: " + error);
reject(null, "ERROR: " + error);
} else {
console.log('No further details available.');
resolve(null, status + ' message sent');
}
});
});
}
function saveStatus(up) {
return new Promise(function (resolve, reject) {
var saveOptions,
message;
// I didn't bother refactoring these as promises at they do the same thing regardless of outcome
if(up){
saveOptions = [{ Key: 'lastPingStatus', Body: 'UP', ContentType: 'text/plain' }, function(error, data) { console.log("Saved last state as UP"); }];
message = 'Website is OK.';
} else{
saveOptions = [{ Key: 'lastPingStatus', Body: 'DOWN', ContentType: 'text/plain' }, function(error, data) { console.log("Saved last state as DOWN"); }];
message = 'Website is DOWN.';
}
s3.putObject.apply(this, saveOptions);
callback(null, message);
});
}
function httpRequest(lastStatus) {
var requestOptions = options;
return new Promise (function (resolve, reject){
var req = http.request(requestOptions, function(res) {
if (res.statusCode == 200) {
if (lastStatus == "DOWN") {
console.log('Email up notice sending...');
sendMail(upMail, "Up")
.then(resolve, reject)
.then(saveStatus(true))
.then(callback)
}
}
});
req.on('error', function(e) {
if (lastStatus == "UP") {
console.log('Email down notice sending...');
sendmail(downMail, "Down")
.then(resolve, reject)
.then(saveStatus(false))
.then(callback)
}
});
req.end();
});
}
};
The AWS-SDK supports native promises, for all services. Some need additional parameters to return properly, like Lambda.invoke().
You would essentially do
s3.putObject({ Key: 'key', Bucket: 'bucket' }).promise()
.then(data => {
// this is the same as the data callback parameter
})
.catch(error => {
// handle your error
})
Or, you could use async/await:
const file = await s3.getObject(params).promise()
// do things with the result
For quick access to the actual file (and not metadata):
const file = JSON.parse(await s3.getObject(params).promise().then(res => res.Body));
https://docs.aws.amazon.com/sdk-for-javascript/v2/developer-guide/using-promises.html
In order to "promisify" callback function, imho, the easiest and cleaner way is to use bluebird. You just don't want to write glue code in order to simplify your code, it's counter productive (and it's error prone).
From the doc :
var Promise = require("bluebird");
var readFile = Promise.promisify(require("fs").readFile);
readFile("myfile.js", "utf8").then(function(contents) {
return eval(contents);
}).then(function(result) {
console.log("The result of evaluating myfile.js", result);
}).catch(SyntaxError, function(e) {
console.log("File had syntax error", e);
//Catch any other error
}).catch(function(e) {
console.log("Error reading file", e);
});
After reading slaughtr's answer I decided to do it like this, for doing some data saving when pressing the AWS IoT button:
var AWS = require("aws-sdk");
var iot = new AWS.Iot();
exports.handler = (event, context, callback) => {
iot.listThings({
attributeName: 'dsn',
attributeValue: event.serialNumber,
maxResults: 1
})
.promise()
.then(response => {
return iot.listThingGroupsForThing({thingName: response.things[0].thingName}).promise();
})
.then(groupsList => insertRecordIntoDDB(date, serialNumber, groupsList.thingGroups[0].groupName))
.catch(err => console.log(err))
};
and shortly after I decided to compress it even further with async/await
exports.handler = async (event, context, callback) => {
var eventText = JSON.stringify(event, null, 2);
var thingsList = await iot.listThings({
attributeName: 'dsn',
attributeValue: event.serialNumber,
maxResults: 1
}).promise()
var groupsList = await iot.listThingGroupsForThing({
'thingName': thingsList.things[0].thingName
}).promise();
insertRecordIntoDDB(date, serialNumber, groupsList.thingGroups[0].groupName)
};
I'm still fairly new at this async programming stuff so I'm not sure what I should like the most. Promise chaining can get a bit spaghetti-like, while async await only helps mask all that into something that's easier to comprehend
Using node http promisified to call external api in aws lambda.
exports.handler = async (event) => {
return httprequest().then((data) => {
const response = {
statusCode: 200,
body: JSON.stringify(data),
};
return response;
});
};
function httprequest() {
return new Promise((resolve, reject) => {
const options = {
host: 'jsonplaceholder.typicode.com',
path: '/todos',
port: 443,
method: 'GET'
};
const req = http.request(options, (res) => {
if (res.statusCode < 200 || res.statusCode >= 300) {
return reject(new Error('statusCode=' + res.statusCode));
}
var body = [];
res.on('data', function(chunk) {
body.push(chunk);
});
res.on('end', function() {
try {
body = JSON.parse(Buffer.concat(body).toString());
} catch(e) {
reject(e);
}
resolve(body);
});
});
req.on('error', (e) => {
reject(e.message);
});
// send the request
req.end();
});
}

How to write a setup for an NPM package

I am writing my first node.js package to help make a REST API easier to use and I am having trouble formatting the package to allow the person using the package to do the below in their applications.
var Water= require("water-reservation");
var water = Water({username: myusername, password: mypassword});
// Problem here because Water({}) needs to do a REST call to get a bearer
// token to use the API I am trying to simplify. Thus, it tries to
// get_water(callabck) before the bearer_token var is set in my package.
water.get_water(function(err, result){
if(err){
console.log(err);
console.log("----------------------------------------------");
}
else{
console.log(result);
console.log("----------------------------------------------");
}
});
In my current setup I take the user's username and password and pass it to a REST endpoint to get a bearer token that I use in all my REST calls for the package. The async nature will call get_water in the users file before I am done with setting the bearer_token variable in my package.
Here is what my package looks like:
var request = require('request');
var bearer_token = "";
var api_url = "";
var Water = function(credentials){
api_url = credentials.api_url;
var username = credentials.username;
var password = credentials.password;
get_bearer_token(username, password, function(err, access_token){
bearer_token = access_token;
});
};
function get_bearer_token(username, password, callback){
var request_options = {
url: api_url + "/auth",
method: "GET",
'auth': {
'user': username,
'pass': password
}
};
request(request_options, function(err, res, body){
if(err) {
return callback("Water Initialization Error: " + err, null);
}
else {
return callback(null, body);
}
});
}
// Get water
Water.prototype.get_water = function(callback) {
var request_options = {
url: api_url + "/water",
method: "GET",
'auth': {
'bearer': bearer_token
}
};
request(request_options, function(err, res, body){
if(err) {
return callback(err, null);
}
else{
return callback(null, body);
}
});
};
// more package functions...
module.exports = Water;
I am trying to find a way to make the setup so the user can use the package as described above. I am not set on that style, but it seems to be the easiest for a user to understand and use.
The only way I could think of fixing this is add a callback to Water({}) but that would make a lot of mess having the user wrap all their water related code in the callback. I know it can be done by looking at the Twitter package for example, but I still could wrap my mind around how they did it. Any help would be appreciated, thanks.
You should delegate the responsibility of getting the token to when they make a request. That way the package is initialised instantly. To make it more efficient you can cache the token the first time it is fetched. I've made an example here, but you could tidy it up using something like async:
var request = require('request');
var Water = function (credentials) {
this.api_url = credentials.api_url;
this.username = credentials.username;
this.password = credentials.password;
return this;
};
Water.prototype.get_bearer_token = function (callback) {
// We already have the bearer token, so return straight away
if (this.bearer_token) return callback(null, this.bearer_token);
var self = this;
var request_options = {
url: this.api_url + "/auth",
method: "GET",
'auth': {
'user': this.username,
'pass': this.password
}
};
request(request_options, function(err, res, body){
if(err) {
return callback("Water Initialization Error: " + err, null);
}
else {
self.bearer_token = body;
return callback(null, body);
}
});
}
// Get water
Water.prototype.get_water = function(callback) {
var self = this;
this.get_bearer_token(function (err, token) {
if (err) return callback(err);
var request_options = {
url: self.api_url + "/water",
method: "GET",
'auth': {
'bearer': token
}
};
request(request_options, function(err, res, body){
if(err) {
return callback(err, null);
}
else{
return callback(null, body);
}
});
});
};
// more package functions..
module.exports = Water;
To use it:
var Water = require('water-reservation');
var water = new Water({
username: 'username',
password: 'pass',
api_url: 'http://example.com'
});
water.get_water(function (err, res) {
});

Error Handling middleware in ExpressJS for spawn of multiple child_process

I have written a nice little error reporting middleware, that sits after all the GET and POST handling (after app.use(app.router); ). See below.
This works great for simple quick GET and POST that goes to the PostGIS database etc.
But I have one POST request that is designed to create a bunch of directories, a number of files, and then spawn 1 -> 8 child_processes tasks
childProcess.execFile(job.config.FMEPath, ["PARAMETER_FILE", job.fmeParamFile], { cwd: job.root },
All that setup does not take much time (less than a second, and it is all async (I use the async library at one point to sequence 5 steps (see below).
My issue is error handling. Right now I return a response immediately before creating all the files and doing all the steps. This means that next(err) is not working as expected. What is a good paradigm for reporting back the errors? I am using WINSTON to log errors [logger.log() ], but should I just log the errors on the server, or should I also report it to the original request. here is the current post request (and remember, I would have to keep the rest, and req and next object around for quite a while to be able to call next(err).
exports.build = function (req, res, next) {
var config = global.app.settings.config;
var jobBatch = groupJobs(req.body.FrameList);
var ticket = tools.newGuid("", true);
var fileCount = req.body.FrameList.length * nitfMultiplier;
var ts = timespan.fromSeconds(fileCount / config.TileRate);
var estimate = ts.hours + ":" + tools.pad(ts.minutes, 2) + ":" + tools.pad(ts.seconds, 2);
res.set({ 'Content-Type': 'application/json; charset=utf-8' });
res.send({ ticket: ticket, maxTiles: fileCount, timeEstimate: estimate, tileRate: config.TileRate, wwwURL: config.WWWUrl });
jobBatchRoot(req, res, jobBatch, config, ticket, next);
};
jobBatchRoot() (I will then go off and do a lot of processing, I did not include all that code.
exports.bugs = function (err, req, res, next) {
global.app.settings.stats.errors += 1;
if (err.code == undefined) {
err.code = 500;
err.message = "Server Error";
}
res.status(err.code);
logger.log('error', '%s url: %s status: %d \n', req.method, req.url, err.code, { query: req.query, body: req.body, message: err.message, stack: err.stack });
var desc = req.method + " " + req.url;
var body = util.format("%j", req.body);
var query = util.format("%j", req.query);
var stack = err.stack.split('\n');
res.format({
text: function () {
res.send(util.format("%j", { title: err.message, code: err.code, desc: desc, query: query, message: err.message, stack: err.stack, body: body}));
},
html: function () {
query = tools.pretty(req.query);
res.render('error', { title: err.message, code: err.code, desc: desc, query: query, message: err.message, stack: stack, body: body });
},
json: function () {
res.send({ title: err.message, code: err.code, desc: desc, query: query, message: err.message, stack: err.stack, body: body });
}
});
};
Perhaps I should be re-factoring this (maybe object oriented), anyway I thought I would post the full module here and all I am looking for is a few tips on structure, best practices.
var util = require('util');
var query = require("pg-query");
var timespan = require('timespan');
var _ = require('lodash');
var path = require('path');
var fs = require('fs');
var query = require("pg-query");
var async = require("async");
var childProcess = require("child_process");
var tools = require("../tools/tools");
var nitfMultiplier = 99;
var manifestVersionID = 5;
exports.setup = function (app) {
};
exports.estimate = function (req, res, next) {
var config = global.app.settings.config;
var fileCount = req.body.FrameList.length * nitfMultiplier;
var ts = timespan.fromSeconds(fileCount / config.TileRate);
var estimate = ts.hours + ":" + tools.pad(ts.minutes, 2) + ":" + tools.pad(ts.seconds, 2);
res.set({ 'Content-Type': 'application/json; charset=utf-8' });
res.send({ ticket: "Estimate", maxTiles: fileCount, timeEstimate: estimate, tileRate: config.TileRate, wwwURL: config.WWWUrl });
};
exports.build = function (req, res, next) {
var config = global.app.settings.config;
var jobBatch = groupJobs(req.body.FrameList);
var ticket = tools.newGuid("", true);
var fileCount = req.body.FrameList.length * nitfMultiplier;
var ts = timespan.fromSeconds(fileCount / config.TileRate);
var estimate = ts.hours + ":" + tools.pad(ts.minutes, 2) + ":" + tools.pad(ts.seconds, 2);
res.set({ 'Content-Type': 'application/json; charset=utf-8' });
res.send({ ticket: ticket, maxTiles: fileCount, timeEstimate: estimate, tileRate: config.TileRate, wwwURL: config.WWWUrl });
jobBatchRoot(req, res, jobBatch, config, ticket, next);
};
function groupJobs(list) {
var jobBatch = {};
_.forEach(list, function (obj) {
if (jobBatch[obj.type] == undefined) {
jobBatch[obj.type] = [];
}
jobBatch[obj.type].push(obj);
});
return jobBatch;
};
function jobBatchRoot(req, res, jobBatch, config, ticket, next) {
var batchRoot = path.join(config.JobsPath, ticket);
fs.mkdir(batchRoot, function (err) {
if (err) return next(err);
var mapInfoFile = path.join(batchRoot, "MapInfo.json");
var mapInfo = {
Date: (new Date()).toISOString(),
Version: manifestVersionID,
Zoom: req.body.Zoom,
CenterLat: req.body.CenterLat,
CenterLon: req.body.CenterLon
};
fs.writeFile(mapInfoFile, tools.pretty(mapInfo), function (err) {
if (err) return next(err);
spawnJobs(req, res, batchRoot, mapInfo, config, ticket, jobBatch, next);
});
});
};
function spawnJobs(req, res, root, mapInfo, config, ticket, jobBatch, next) {
_.forEach(jobBatch, function (files, key) {
var job = {
req: req,
res: res,
type: key,
files: files,
batchRoot: root,
mapInfo: mapInfo,
config: config,
ticket: ticket,
missingFiles: [],
run: true,
next: next
};
setup(job);
});
};
function setup(job) {
job.root = path.join(job.batchRoot, job.type);
job.fmeParamFile = path.join(job.root, "fmeParameters.txt");
job.fmeWorkSpace = path.join(job.config.LibrarianPath, "TileBuilder.fmw");
job.fmeLogFile = path.join(job.root, "jobLog.log");
job.errorLog = path.join(job.root, "errorLog.log");
job.jobFile = path.join(job.root, "jobFile.json");
job.manifestFile = path.join(job.root, "manifest.json");
async.series({
one: function (callback) {
maxZoom(job, callback);
},
two: function (callback) {
fs.mkdir(job.root, function (err) {
if (err) return job.next(err);
callback(null, "Job Root Created");
});
},
three: function (callback) {
makeParamFile(job, callback);
},
four: function (callback) {
delete job.req;
delete job.res;
fs.writeFile(job.jobFile, tools.pretty(job), function (err) {
if (err) return job.next(err);
callback(null, "Wrote Job File");
});
},
five: function (callback) {
runJob(job, callback);
},
six: function (callback) {
tileList(job, callback);
},
seven: function (callback) {
finish(job, callback);
},
},
function (err, results) {
if (err) return job.next(err);
console.log(tools.pretty(results));
});
}
function maxZoom(job, callback) {
var qString = util.format('SELECT type, "maxZoom" FROM portal.m_type WHERE type=\'%s\'', job.type);
query(qString, function (err, rows, result) {
if (err) {
var err = new Error(queryName);
err.message = err.message + " - " + qString;
err.code = 400;
return job.next(err);
}
job.maxZoom = rows[0].maxZoom - 1; // kludge for 2x1 root layer in leaflet
return callback(null, "Got MaxZoom");
});
}
function makeParamFile(job, callback) {
var text = util.format("%s\n", job.fmeWorkSpace);
text += util.format("--OutputDir %s\n", job.root);
text += util.format("--LogFile %s\n", job.fmeLogFile);
var source = "";
_.forEach(job.files, function (file) {
var path = ('development' == process.env.NODE_ENV) ? file.path.replace(job.config.SourceRootRaw, job.config.SourceRoot) : file.path;
if (fs.existsSync(path)) {
source += wrap(path, '\\"') + " ";
}
else {
job.missingFiles.push(path);
}
});
source = wrap(wrap(source, '\\"'), '"');
text += "--Sources " + source;
if (job.missingFiles.length == job.files.length) job.run = false;
fs.writeFile(job.fmeParamFile, text, function (err) {
if (err) return job.next(err);
return callback(null, "Wrote Paramaters File");
})
};
function wrap(content, edge) {
return edge+content+edge;
}
function runJob(job, callback) {
if (!job.run) return callback(null, "Skipped JOB, no files");
childProcess.execFile(job.config.FMEPath, ["PARAMETER_FILE", job.fmeParamFile], { cwd: job.root },
function (err, stdout, stderr) {
if (err) return job.next(err);
job.stdout = stdout;
job.stderr = stderr;
var bar = "\n--------------------------------------------------------------------------------------------------------\n";
var results = util.format("%s STDOUT: \n %s%s STDERR: \n %s", bar, job.stdout, bar, job.stderr);
fs.appendFile(job.fmeLogFile, results, function (err) {
return callback(err, "FME JOB " + job.type + " run completed");
});
});
}
function tileList(job, callback) {
var tiles = [];
var byteCount = 0;
fs.readdir(job.root, function (err, files) {
if (err) {
logger.log('error', 'tileList directory read: %s \n', job.root, { message: err.message, stack: err.stack });
return job.next(err);
}
async.each(files, function (file, done) {
var fileName = file.split(".");
fs.lstat(job.root + "\\" + file, function (err, stats) {
if (!err && stats.isFile() && (fileName[1] == "png")) {
tiles.push({ id: fileName[0], size: stats.size });
byteCount += stats.size;
};
done(null);
});
}, function (err) {
job.tileList = tiles;
job.byteCount = byteCount;
return callback(null, "got tile list");}
);
});
}
function finish(job, callback) {
var manifest = {
Date: (new Date()).toISOString(),
Version: manifestVersionID,
MaxZoom: job.maxZoom,
Class: "OVERLAY",
FileCount: job.tileList.length,
Size: job.byteCount / (1024 * 1024), // Mbytes
files: job.tileList
};
fs.writeFile(job.manifestFile, tools.pretty(manifest), function (err) {
if (err) {
logger.log('error', 'manifest write: %s \n', job.manifestFile, { message: err.message, stack: err.stack });
return job.next(err);
}
return callback(null, "JOB " + job.type + " completed");
});
}
I went and re-factored this. I created a module, with module.exports = function(..) {...}
and then added lots of state and methods to create a class. That contains the Job definition. So now I create the top level directories, return a response, and spawn the sub jobs. They all run async after the express response. But they should not get errors, and if they do, then I use WINSTON to log them in the server, and also return a job done information to the user when all the builds are done.

Node.js Async | insert into postgresql database results from api

I am quite newbie with node.js. What i am trying to achieve is the following:
Connect to my postgresql database and get info of a place (id, coordinates).
call a weather api and get the info of that spot using the coordinates obtained in the previous step.
Insert the returned json in the database. I get 8 hourly objects, with the weather info every 3 hours (0,3,6,9,12,15,18,21). I need to iterate through this objects and the store them in 8 records in the database.
I wrote the following code:
app.get('/getapi', function(req, res){
var json_bbdd;
//------------ BBDD CONNECTION----------------
var pg = require('pg');
var conString = "postgres://postgres:postgres2#localhost/places";
var client = new pg.Client(conString);
client.connect(function(err) {
if(err) {
console.log('could not connect to postgres');
}
client.query('SELECT * from places where id=3276', function(err, result) {
if(err) {
console.log('error running query');
}
json_bbdd=result.rows[0];
var coords = JSON.parse(json_bbdd.json).coordinates;
var id = json_bbdd.id;
var input = {
query: coords[1] + ',' + coords[0] ,
format: 'JSON',
fx: '',
callback: 'MarineWeatherCallback'
};
var url = _PremiumApiBaseURL + "marine.ashx?q=" + input.query + "&format=" + input.format + "&fx=" + input.fx + "&key=" + _PremiumApiKey + "&tide=yes";
$.ajax({
type: 'GET',
url: url,
async: false,
contentType: "application/json",
dataType: 'jsonp',
success: function (json) {
var date= json.data.weather[0].date;
for (var i=0; i < 8; i++){
var hourly = json.data.weather[0].hourly[i];
var time= hourly.time;
client.query('INSERT into parte (id, date, time) VALUES($1, $2, $3)', [id, date, time],
function(err, result) {
if (err) {
console.log(err);
} else {
console.log('row inserted: ' + id + ' ' + time);
}
});
} // FOR
},
error: function (e) {
console.log(e.message);
}
});
client.end();
});
});
});
The steps 1 and 2 are performed perfectly. The third step, on the other hand, does nothing and it doesn't even throw an error.
I read in this post: node-postgres will not insert data, but doesn't throw errors either that using async module could help but i have no idea how to rewrite the code. I need some help.
Regards,
Aitor
I didn't test your snippet, I can only help you with things which looks bad to my eyes.
It is better not to use jQuery on node server. There is excellent library called request to do remote http requests.
You should better handle database errors because in your example your code will continue after DB error.
You are calling client.end() too early and at the time when you try to insert data to the database a connection is already closed. You have to move client.end() at the end of success and error functions and wait to all callbacks are done.
I think it is also better to use connection pool instead of Client.
You can possibly use JSON type in PostgreSQL to avoid serializing/deserializing JSON data in your code.
Here is revised example(untested). I didn't replace jQuery here, some minor tweaking included.
var pg = require('pg');
var conString = "postgres://postgres:postgres2#localhost/places";
app.get('/getapi', function(req, res, next){
var json_bbdd;
//------------ BBDD CONNECTION----------------
pg.connect(conString, function(err, client, done) {
if(err) {
// example how can you handle errors
console.error('could not connect to postgres');
return next(new Error('Database error'));
}
client.query('SELECT * from places where id=3276', function(err, result) {
if(err) {
console.error('error running query');
done();
return next(new Error('Database error'));
}
json_bbdd = result.rows[0];
var coords = JSON.parse(json_bbdd.json).coordinates;
var id = json_bbdd.id;
var input = {
query: coords[1] + ',' + coords[0] ,
format: 'JSON',
fx: '',
callback: 'MarineWeatherCallback'
};
var url = _PremiumApiBaseURL + "marine.ashx?q=" +
input.query + "&format=" + input.format +
"&fx=" + input.fx + "&key=" +
_PremiumApiKey + "&tide=yes";
$.ajax({
type: 'GET',
url: url,
async: false,
contentType: "application/json",
dataType: 'jsonp',
success: function (json) {
var date = json.data.weather[0].date;
var callbacks = 0;
for (var i=0; i < 8; i++) {
var hourly = json.data.weather[0].hourly[i];
var time= hourly.time;
client.query(
'INSERT into parte (id, date, time) VALUES($1, $2, $3)',
[id, date, time],
function(err, result) {
if (err) {
console.log(err);
} else {
console.log('row inserted: ' + id + ' ' + time);
}
callbacks++;
if (callbacks === 8) {
console.log('All callbacks done!');
done(); // done(); is rough equivalent of client.end();
}
});
} // FOR
},
error: function (e) {
console.error(e.message);
done(); // done(); is rough equivalent of client.end();
return next(new Error('Http error'));
}
});
});
});
});
Ok, now cam up with another problem...i was doubting of creating a new post but i think that maybe could have relation with the previous post.
The aim is to read from the database instead of one place 3 places and do the same process than before for each one.
The code is as follows (with the changes proposed by ivoszz):
app.get('/getapi', function(req, res, next){
//------------ BBDD CONNECTION----------------
pg.connect(conString, function(err, client, done) {
if(err) {
// example how can you handle errors
console.error('could not connect to postgres',err);
return next(new Error('Database error'));
}
client.query('SELECT * from places where id>3274 and id<3278', function(err, result) {
if(err) {
console.error('error running query',err);
done();
return next(new Error('Database error'));
}
var first_callback = 0;
for (var y=0; y<result.rows.length; y++) {
var coords = JSON.parse(result.rows[y].json).coordinates;
var id = result.rows[y].id;
var input = {
query: coords[1] + ',' + coords[0] ,
format: 'JSON',
fx: ''
};
var url = _PremiumApiBaseURL + "marine.ashx?q=" + input.query + "&format=" + input.format + "&fx=" + input.fx + "&key=" + _PremiumApiKey;
request(url, function(err, resp, body) {
body = JSON.parse(body);
if (!err && resp.statusCode == 200) {
var date = body.data.weather[0].date;
var callbacks = 0;
for (var i=0; i < 8; i++) {
var hourly = body.data.weather[0].hourly[i];
client.query(
'INSERT into parte (id, date, time) VALUES($1, $2, $3)',
[id, date, hourly.time],
function(err, result) {
if (err) {
console.log(err);
} else {
console.log('row inserted: ' + id + ' iteration ' + i);
}
callbacks++;
if (callbacks === 8) {
console.log('All callbacks done!from id '+id);
//done(); // done(); is rough equivalent of client.end();
//res.send("done");
}
});
} // FOR
}
else { // if the API http request throws an error
console.error(err);
done(); // done(); is rough equivalent of client.end();
return next(new Error('Http API error'));
}
}); // REQUEST API URL
first_callback++;
if (first_callback === result.rows.length-1) {
console.log('All global callbacks done!');
done(); // done(); is rough equivalent of client.end();
res.send("done");
}}
}); // SELECT from pg
}); // CONNECT to pg
}); // app.get
I don't know why it tries to insert the id=3277 three times instead of inserting id=3275, id=3276 and then id=3277... what it does instead is: it inserts the first 8 records ok the first time (id=3277), but then it throws an error saying that the records are already inserted (primary key=id,date,time) with id 3277...
It seems that first does the 3 iterations of the first FOR and then does the 3 iteration of the second FOR but with the info of the last iteration(place). I can't understand it very well...

Resources