How to synchronize sqlite3 update with command execution - node.js

I am working with a NodeJS project where i need to update a table and afterwards restart a service. Unfortunately the service restarts before the table has been updated. So i assume this is a normal async behaviour.
How do i synchronize this?
var sqlite3 = require('sqlite3').verbose();
var express = require('express');
var app = express();
var router = express.Router();
var db = new sqlite3.Database('/home/test/testApp.db', 'OPEN_READWRITE');
router.route('/')
.get(function(req, res) {
res.render('index', { data: dbConfigRow });
})
.post(function(req, res) {
// console.log(req.body);
db.serialize(function() {
for (var key in req.body) {
db.run("UPDATE config SET " + key + "='" + req.body[key] + "'");
}
exec('systemctl restart demoApp');
});
res.json(200);
});

You should check out Async or any one of the popular promise libraries (When.js, Q.js, Bluebird).
Any of these should solve your problem. In Async it might look something like this using series:
.post(function(req, res) {
async.series([
function(callback){
db.serialize(function() {
for (var key in req.body) {
db.run("UPDATE config SET " + key + "='" + req.body[key] + "'");
}
callback()
})
},
function(callback){
exec('systemctl restart demoApp'); //Assuming this is synchronous
callback()
}
],
function(error, results){ //Using the optional callback
res.send(200);
}
);
});
This assumes db.run is synchronous (it looks like it is).
All this said, it looks like your current implementation was returning 200 before it finished all the db/restarting tasks. You might try moving the response after the exec. That may also work. Let me know if this solves your issue.

Related

Node.Js Retrieve specific data from SQL Server and encode it into JSON Array

I know this question have many duplicates, but I have already wasted too much time searching for the right solution.
First take a look at my Node.JS:
var express = require('express');
var app = express();
app.get('/', function (req, res) {
var sql = require("mssql");
// config for your database
var config = {
user: 'myuser',
password: 'mypass',
server: 'myip',
database: 'mydatabase'
};
sql.close();
// connect to your database
sql.connect(config, function (err) {
if (err) console.log(err);
var dataqu = '';
// create Request object
var request = new sql.Request();
// query to the database and get the records
request.query("select * from AR_Invoices", function (err, recordset) {
if (err) console.log(err)
res.json(recordset);
sql.close();
});
});
});
var server = app.listen(5000,'0.0.0.0', function () {
console.log('Server is running..');
});
This code runs fine, but the json result structure is like this :
{"recordsets":[[{"Tipe":"Invoices","InvoiceID":411891,"InvoiceNumber":"SR.1701.0001"}]],"recordset":[{"Tipe":"Invoices","InvoiceID":411891,"InvoiceNumber":"SR.1701.0001"}],"output":{},"rowsAffected":[1]}
I don't know why but for some reason the result is always resulting in duplicate.
And how to just select InvoiceID and InvoiceNumber ?
I already tested using recordset.InvoiceID or recordset[0].InvoiceID but all is always in vain, and the result always in duplicate.
Can anyone explain how to do this properly?
I want the final result became like this :
[
{ "InvoiceID":"1", "InvoiceNumber":"mynumber" }
]
For the future reference, i finally got how to do this here is my full code
var express = require('express');
var app = express();
var dateFormat = require('dateformat');
app.get('/', function (req, res) {
var sql = require("mssql");
// config for your database
var config = {
user: 'myuser',
password: 'mypassword',
server: 'myip',
database: 'mydb'
};
sql.close();
// connect to your database
sql.connect(config, function (err) {
if (err) console.log(err);
// create Request object
var request = new sql.Request();
// query to the database and get the records
request.query("select top 2 'Invoices' as Tipe,InvoiceID,InvoiceNumber,InvoiceDate,(select top 1DriverPicture from dbDigitalApp.dbo.tbdriver) as Blob from AR_Invoices", function (err, result) {
if (err) console.log(err)
var myarr = new Array();
for (var i = 0; i < result.recordset.length; ++i) {
var InvoiceNumber = result.recordset[i].InvoiceNumber;
var InvoiceDate = dateFormat(result.recordset[i].InvoiceDate, "dd mmmm yyyy");
var Blob = result.recordset[i].Blob;
myarr.push({'InvoiceNumber':InvoiceNumber,'InvoiceDate':InvoiceDate,'Blob':Buffer.from(Blob).toString('base64')});
}
res.json(myarr);
sql.close();
});
});
});
var server = app.listen(5000,'0.0.0.0', function () {
console.log('Server is running..');
});
and the result of above code is like this :
answers
with above code you can get specific field only and do whatever you want with those specific data, such as change date format or encode base64.
i don't know if this the cleanest way to do this since the node.js has its own function using res.json that can set all field of retrieved data without need to loop through it.
But at least here is my kind of solution, hope it will be helpful to there future people who wondering the same thing like me.

How could I improve my nodeJS program

I used promises, callbacks and external API for the firt time but I'm not sure that's the best way to use them.
My program traslates words from a langage to another using a langae Pivot and systran.io API.
the function Translate will translate word and send response via a callback.
then in the POST request I used promises to chain tasks.
var express = require('express');
var request = require('request');
var router = express.Router();
router.post("/", function(req, res) {
console.log
var resultat
var promise = new Promise((resolve, reject) => {
translate(req.query.source, "en", req.query.content, function(resa) {
resolve(resa);
})
}).then(function(resolve) {
console.log(resolve);
translate("en", req.query.target, resolve, function(resa2) {
console.log(resa2);
})
});
});
function translate(source, target, content, callback) {
let result;
result = request("https://api-platform.systran.net/translation/text/translate?input=" + content + "&source=" + source + "&target=" + target + "&key=xxxxxxxx-783f-4f90-aea4-7fb357016647", function(err, data, body) {
body = JSON.parse(body);
console.log(body);
callback(body.outputs[0].output)
})
}
module.exports = router;
Is there a best way to write my program which is already working ?

lwip.open doesn't work after mongoose findOne

So I have mongoose, multer and lwip (they are required from the top part).
var express = require('express');
var router = express.Router();
var mongoose = require('mongoose');
var jwt = require('jsonwebtoken');
var Users = require('../models/users.js');
var multer = require('multer');
var mime = require('mime');
var lwip = require('lwip');
If I comment the Users.findOne part, the image is cropped as I want it to be cropped. But if I uncomment it the lwip part stops working, though no errors are thrown. It just doesn't enter lwip.open().
router.post('/image', upload.single('file'), function (req, res) {
//This part works always.
Users.findOne({userid: req.body.userid}, function (err, user) {
var imgpath = req.file.path.split("public\\")[1];
user.user_photos.push(imgpath);
user.save(function (err) {
if (err)
console.log('error');
else
console.log('success');
});
});
//If I remove the top part, this part will work too.
lwip.open(req.file.path, function(err, image){
image.batch()
.crop(200,200)
.writeFile(req.file.path, function(err) {
if (err)
console.log('error');
else
console.log('success');
});
});
});
You might need my multer config too, so here it is:
var storage = multer.diskStorage({
destination: function (req, file, cb) {
cb(null, './public/uploads/')
},
filename: function (req, file, cb) {
var extension;
if (mime.extension(file.mimetype) == "jpeg") {
extension = "jpg";
}
else {
extension = mime.extension(file.mimetype);
}
cb(null, file.fieldname + '-' + Date.now() + '.' + extension);
}
});
Can you help me to figure out what the problem is here?
They're both async functions. How can you guarantee both are done before the function exits? I recommend using a promise library like Bluebird. With it you can run multiple async functions at the same time and specify what happens when they both return.
Don't forget to 'promisify' any libraries that are used that you want to treat as promises. You're code will look something like:
my route('blah', function (){
return Promise.all([ myfunct1, myfunct2], (retval) => { return {f1val: retval[1], f2val: retval[2]}})
I know some asshat is going to come along and take my answer and write out the code for you so that all you have to do is copy paste it, but I really do hope that you take the time to learn WHY and HOW it works if you do not already know.

exporting a module in a callback function?

is there a way to export some variables that are inside a callback function? for example, if i need to use room.room_id in another file, what should i do? i tried module.exports.roomId = room.room_id but roomId in another file appeared to be undefined.thanks!
var Room = require('../models/database').Room
exports.create = function (req, res) {
Room
.create({
room_name: req.body.roomName
})
.complete(function () {
Room
.find({where: {room_name: req.body.roomName}})
.success(function (room) {
// if(err) console.log(err);
res.redirect('rooms/videochat/' + req.body.roomName + '/' + room.room_id);
console.log("room_id: " + room.room_id);
module.exports.roomId = room.room_id;
})
})
};
You can't do it like that because modules are evaluated synchronously and you're mutating module.exports some time in the future. What you need to do is supply a callback and either pass the value in or use the callback as an indicator that you can successfully read from the exported property.
This is not the best way to solve this problem, because modules are read once synchronously and cached but your code seems to handle requests and responses.
You will want rather export something like this:
var rooms = {};
exports.create = function (req, res, next) {
Room.create({
room_name: req.body.roomName
}).complete(function () {
Room.find({where: {room_name: req.body.roomName}})
.success(function (room) {
res.redirect('rooms/videochat/' + req.body.roomName + '/' + room.room_id);
rooms[req.body.roomName] = room.room_id;
});
});
};
exports.rooms = rooms;
If you are using Express.js, you can register in another place a route like this:
var roomsManager = require('./path/to/the/module');
//handle the create room endpoint
app.post('/room', roomsManager.create);
//get the room_id given a room name:
console.log('the room id of "some room" is:', roomsManager.rooms["some room"]);

How to use events keep mongodb logic out of node.js request handlers

I'm looking for a package (or pattern) to handle events from mongodb so I can avoid nested callbacks and keep mongodb logic out of my request handlers.
Right now I've got code that looks like this:
start-express.js (server)
var express = require('express');
var Resource = require('express-resource');
var app = express.createServer();
// create express-resource handler which essentially does app.get('things', ...)
var things = app.resource('things', require('./things.js'));
app.listen(port);
things.js (express-resource request handler)
require('./things-provider');
// handle request 'http://example.com/things'
exports.index = function(request, response) {
sendThings(db, response);
};
things-provider.js (handles mongodb queries)
var mongodb = require('mongodb')
// create database connection
var server = new mongodb.Server(host, port, {auto_reconnect: true});
var db = new mongodb.Db(dbName, server);
db.open(function (err, db) {
if (err) { }
// auto_reconnect will reopen connection when needed
});
function sendThings(db, response) {
db.collection('things', function(err, collection) {
collection.find(function(err, cursor) {
cursor.toArray(function(err, things) {
response.send(things);
});
});
});
}
module.exports.sendThings = sendThings;
I'd like to avoid passing my http response object to my database handler or (worse) handling my db request in my http response handler.
I recently realized that what I want to do is create an event handler that registers an http request/response and waits for a response (event) from database before processing and sending the http response.
That sounds like a lot of duplication of what node.js already does though. Is there an existing framework that handles this use case?
Here's the solution I've come up with.
I used mongojs which greatly simplifies the mongodb interface --at the cost of flexibility in configuration-- but it hides the nested callbacks the mongodb driver requires. It also makes the syntax much more like the mongo client.
I then wrap the HTTP Response object in a closure and pass this closure to the mongodb query method in a callback.
var MongoProvider = require('./MongoProvider');
MongoProvider.setCollection('things');
exports.index = function(request, response){
function sendResponse(err, data) {
if (err) {
response.send(500, err);
}
response.send(data);
};
MongoProvider.fetchAll(things, sendResponse);
};
It is still essentially just passing the response object to the database provider, but by wrapping it in a closure that knows how to handle the response, it keeps that logic out of my database module.
A slight improvement is to use a function to create a response handler closure outside my request handler:
function makeSendResponse(response){
return function sendResponse(err, data) {
if (err) {
console.warn(err);
response.send(500, {error: err});
return;
}
response.send(data);
};
}
So now my request handler just looks like this:
exports.index = function(request, response) {
response.send(makeSendResponse(response));
}
And my MongoProvider looks like this:
var mongojs = require('mongojs');
MongoProvider = function(config) {
this.configure(config);
this.db = mongojs.connect(this.url, this.collections);
}
MongoProvider.prototype.configure = function(config) {
this.url = config.host + "/" + config.name;
this.collections = config.collections;
}
MongoProvider.prototype.connect = function(url, collections) {
return mongojs.connect(this.url, this.collections);
}
MongoProvider.prototype.fetchAll = function fetchAll(collection, callback) {
this.db(collection).find(callback);
}
MongoProvider.prototype.fetchById = function fetchById(id, collection, callback) {
var objectId = collection.db.bson_serializer.ObjectID.createFromHexString(id.toString());
this.db(collection).findOne({ "_id": objectId }, callback);
}
MongoProvider.prototype.fetchMatches = function fetchMatches(json, collection, callback) {
this.db(collection).find(Json.parse(json), callback);
}
module.exports = MongoProvider;
I can also extend MongoProvider for specific collections to simplify the API and do additional validation:
ThingsProvider = function(config) {
this.collection = 'things';
this.mongoProvider = new MongoProvider(config);
things = mongoProvider.db.collection('things');
}
ThingsProvider.prototype.fetchAll = function(callback) {
things.fetchAll(callback);
}
//etc...
module.exports = ThingsProvider;
Well, first off I find Mongoose somewhat easier to use in a well-structured app than straight mongo. So that might help you.
Second, I think what you're trying to do could easily be accomplished through middleware (app level or route level), since you're using express already. Alternatively, parameter-filtering, if your query will vary based on params. A pattern I've seen on the last looks like this:
var User = mongoose.model("user'); // assumes your schema is previously defined
app.param('user_id', function(req,res,next, id){
User.find(id, function(err,user){
if(err) next(err);
else {
req.user = user;
next();
}
});
});
It still has some nesting, but not nearly so bad as your example, much more manageable. Then, let's say you have a '/profile' endpoint, you can just do:
app.get('/profile/:user_id', function(req,res){ res.render('profile', req.user); }

Resources