I'm writing a web application on using Express. It has a RESTful API. The authentication schemes are different for the web interface and the RESTful API. Now I have something like this. The following code has the auth mechanisms stripped off.
problem.js
// Web
exports.list = function(req, res) {
listProblems(function(err, problems) {
if(err) {
res.send(err);
} else {
res.render('problems', { title: 'Problems', problems: problems });
}
});
};
// REST API
exports.apiList = function(req, res) {
listProblems(function(err, problems) {
if(err) {
res.send(err);
} else {
res.json(problems);
}
});
};
// Named function to query MongoDB, common to both Web and REST
function listProblems(callback) {
var query = Problem.find().select(DEFAULT_FIELDS);
query.exec(callback);
};
Routes in app.js
app.get('/problems', problem.list);
app.get('/api/v1/problems', problem.apiList);
I'm not sure if I'm doing the right thing. Please let me know. Many thanks in advance.
Related
First of all, this is one of my first projects in Node.js so I'm very new to it.
I have a project I want to make that is a SOAP (I know, SOAP... backwards compatibility, huh?) interface that connects to an Oracle database.
So I have a WSDL describing what these functions look like (validation for addresses and stuff) and I have a connection to the database.
Now when using the SOAP npm module, you need to create a server and listen using a service that allows you to respond to requests. I have a separate file that contains my SOAP service but this service should do queries on the database to get its results.
How would I go about sort of 'injecting' my database service into my SOAP service so that whenever a SOAP call is done, it orchestrates this to the correct method in my database service?
This is what my code looks like:
databaseconnection.js
var oracledb = require('oracledb');
var dbConfig = require('../../config/development');
var setup = exports.setup = (callback) => {
oracledb.createPool (
{
user : dbConfig.user,
password : dbConfig.password,
connectString : dbConfig.connectString
},
function(err, pool)
{
if (err) { console.error(err.message); return; }
pool.getConnection (
function(err, connection)
{
if (err) {
console.error(err.message);
return callback(null);
}
return callback(connection);
}
);
}
);
};
databaseservice.js
var DatabaseService = function (connection) {
this.database = connection;
};
function doSomething(callback) {
if (!this.database) { console.log('Database not available.'); return; }
this.database.execute('SELECT * FROM HELP', function(err, result) {
callback(result);
});
};
module.exports = {
DatabaseService: DatabaseService,
doSomething: doSomething
};
soapservice.js
var myService = {
CVService: {
CVServicePort: {
countryvalidation: function (args, cb, soapHeader) {
console.log('Validating Country');
cb({
name: args
});
}
}
}
};
server.js
app.use(bodyParser.raw({type: function(){return true;}, limit: '5mb'}));
app.listen(8001, function(){
databaseconnection.setup((callback) => {
var temp = databaseservice.DatabaseService(callback);
soapservice.Init(temp);
var server = soap.listen(app, '/soapapi/*', soapservice.myService, xml);
databaseservice.doSomething((result) => {
console.log(result.rows.length, ' results.');
});
});
console.log('Server started');
});
How would I go about adding the databaseservice.doSomething() to the countryvalidation soap method instead of 'name: args'?
Also: I feel like the structure of my code is very, very messy. I tried finding some good examples on how to structure the code online but as for services and database connections + combining them, I didn't find much. Any comments on this structure are very welcome. I'm here to learn, after all.
Thank you
Dieter
The first thing I see that looks a little off is the databaseconnection.js. It should be creating the pool, but that's it. Generally speaking, a connection should be obtained from the pool when a request comes in and release when you're done using it to service that request.
Have a look at this post: https://jsao.io/2015/02/real-time-data-with-node-js-socket-io-and-oracle-database/ There are some sample apps you could have a look at that might help. Between the two demos, the "employees-cqn-demo" app is better organized.
Keep in mind that the post is a little dated now, we've made enhancements to the driver that make it easier to use now. It's on my list to do a post on how to build a RESTful API with Node.js and Oracle Database but I haven't had a chance to do it yet.
Yesterday I faced with unusual behavior for MongoDB.
So.. I store countries and languages with their codes in collections and when client side application need this data - it sends 'get' request to get data. It happens simultaneously
function init() {
helperService
.getCountries()
.then(success)
.catch(commonService.handleError);
function success(res) {
self.countries = res.data;
}
}
function init() {
helperService
.getLanguages()
.then(success)
.catch(commonService.handleError);
function success(res) {
self.languages = res.data;
}
}
Here I send request to get data in angular component $onInit
Backend code looks pretty simple:
var country = require('countryModel');
var language = require('languageModel');
function getCountries(req, res, next) {
return country
.find({})
.then(success)
.catch(next);
function success(data) {
res.json(data);
}
}
function getLanguages(req, res, next) {
return language
.find({})
.then(success)
.catch(next);
function success(data) {
res.json(data);
}
}
Locally all works as expected. But after deploying application on linux server I often see error 404 'Cannot GET /api/language' and 'Cannot GET /api/country'. Sometimes I got data but more often I got one error or this two errors above.
Could anybody give me idea what is wrong?
It seems to me that you have problems with registering routes. Check it please
I am relatively new to web development and am trying to figure out how to publish the server environment (dev, test, prod, etc) to the front-end.
I am using Node.js with Express, but the following code is closer to pseudo-code because is this is more of a design pattern question.
One way to publish the server environment with Express is to add this to the middleware:
app.use(function(req,res,next){
res.locals.env= {}; //we add the property env to locals
});
so now in a front-end template we can access the 'env' locals variable:
<div>
<% app.env = env %> //hopefully 'env' will be available in a front-end template
</div>
I am not sure if the above is standard, but I feel like it certainly isn't ideal.
So I was thinking, perhaps we could do this instead, for either the first HTTP request, first socket request, or all requests(?):
//pseudo-code below
app.get('/', function(req,res,next){
if(req.isAuthenticated()){
socket.on('received-env-info', function(err,msg){
res.render('index',{auth:true});
}
socket.emit('send-env-info', env);
}
else{
res.render('index',{auth:false});
}
});
in this way we can be assured that the client knows what the environment is (or any other server variables), before any html is sent to the server. (We assume there is some socket.io handler on the client that subsequently sets the global env on the client to the variable that was sent from the server).
is this a good design pattern, or a bad one?
extra credit: we could take the same idea, and use it for authentication too:
app.get('/', function(req,res,next){
var infoForClient = {auth:null,env:env}; //should probably set auth to false for security but using 'null' for illustration purposes
if(req.isAuthenticated()){
infoForClient.auth = true;
}
else{
infoForClient.auth = false;
}
socket.on('received-info-from-client', function(msg){
if(msg === 'success'{ //some success message or another
res.render('index',infoForClient);
}
else{
infoForClient.auth = false;
res.render('index',infoForClient);
}
}
socket.emit('send-info-to-client', infoForClient);
}
});
I just need to know if this is a sane approach or not
This would work, but what's the gain vs
// B
app.get('/', function(req, res) {
res.json({
info: info,
html: template.render(info)
});
});
or
// C
socket.on('get-index', function () {
socket.emit('index', {
info: info,
html: template.render(info)
});
});
or with the template moved client side, simply return the info.
Once an intelligent client is in the picture, rendering the template server-side is no longer needed in the first place.
So looking at the client code
// A
socket.on('send-info-to-client', function (infoForClient) {
handleInfo(infoForClient);
socket.emit('recieved-info-from-client', true);
});
request('/', function (err, res, body) {
if (err) { return handleErr(err) }
handleHtml(body);
});
handle info and handle html end up split in two.
// B
request('/', function (err, res, body) {
if (err) { return handleErr(err) }
handleInfo(body.info);
handleHtml(body.html);
});
// C
socket.emit('get-index');
socket.on('index', function (msg) {
handleInfo(msg.info);
handleHtml(msg.html);
});
the handlers can be unified
and in the final case
// D
request('/', function (err, res, body) {
if (err) { return handleErr(err) }
handleInfo(body.info);
handleHtml(template.render(body.info));
});
I'm converting an MS Access database to a webapp. I'm using Angular JS, Node JS with the express framework and MySQL as database.
In ms access you don't have any edit/save features. When you edit something, the database changes instantly. I like this. Feels smooth. So I want to have this the same way in the web app. My question is. Will there be any problems with this approach in my webbapp?
This is a piece of my node js code which updates the database with a restcall:
/*
Post /api/products/ HTTP/1.1
*/
exports.editProduct = function(req, res) {
console.log(req.body);
var post = [{title_en: req.body.title_en},req.params.id];
if (connection) {
connection.query("UPDATE products SET ? WHERE id = ?", post, function(err, rows, fields) {
if (err) throw err;
res.contentType('application/json');
res.write(JSON.stringify(rows));
res.end();
});
}
};
And on the client side I use the a the $resource object
$scope.save = function(){
$scope.product.$save(function(){
console.log('Save successfull);
});
};
And in the view. I simply have inputs with ng-change:
<input ng-model="product.title_en" ng-change="save()".
Will this work good in production mode with a couple hundred users? Is the chances of blocking/crashing etc?
The only thing I see is if (err) throw err;
if there is an error the server crash so change it with a json response with a 500 status.
By the way express has a build-in way to output json
It's better off to validate title_en and id
exports.editProduct = function(req, res) {
console.log(req.body);
var post = [{title_en: req.body.title_en},req.params.id];
if (connection) {
connection.query("UPDATE products SET ? WHERE id = ?", post, function(err, rows, fields) {
if (err) {
return res.json(500,{ error: 'Cannot update the product' });
}
res.json(200,rows);
});
}
an other thing try to use restangular instead of resource it's a lot of fun :)
};
I am new to Angular JS and node.js/express framework. I am working on a small application which uses angular and express frameworks. I have express app running with couple of end points. One for POST action and one for GET action. I am using node-mysql module to store and fetch from mysql database.
This application is running on my laptop.
angular.js client:
controller
function ItemController($scope, storageService) {
$scope.savedItems = storageService.savedItems();
alert($scope.savedItems);
}
service
myApp.service('storageService', function($resource) {
var Item = $resource('http://localhost\\:3000/item/:id',
{
id:'#id',
},
{
query: {
method: 'GET',
isArray: true
}
);
this.savedItems = function() {
Item.query(function(data){
//alert(data);
return data;
});
}
Express server with mysql database:
...
app.get('/item', item.list);
...
items.js
---------
exports.list = function(req, res) {
var sql = 'select * from item';
connect: function() {
var mysql = require('mysql');
var connection = mysql.createConnection({
host : 'localhost',
user : 'admin',
database : 'test'
});
return connection;
},
query: function(sql) {
var connection = this.connect();
return connection.query(sql, function(err, results) {
if (err) throw err;
return results;
});
},
res.send(results);
};
When I send static array of items (json) from server, $scope.savedItems() is getting populated.
but when I access items in database, even though server is returning items, $scope.savedItems in client is empty. Using $http directly did not help either.
I read async nature of $resource and $http from angular.js documentation and I am still missing something or doing something wrong.
Thanks in advance and appreciate your help.
This has to do with the async nature of angular $resource.
$scope.savedItems = storageService.savedItems();
Returns immediately an empty array, which is populated after the data returns. Your alert($scope.savedItems); will therefore show only an empty array. If you look at your $scope.savedItems a little bit later you would see that it has been populated. If you would like to use the data just after it has been returned you can use a callback:
$scope.savedItems = storageService.savedItems(function(result) {alert(result); });
Just as a quick note. You could also watch the savedItems.
function ItemController($scope, storageService) {
$scope.savedItems = storageService.savedItems();
$scope.$watch(function() {
return $scope.savedItems;
}, function(newValue, oldValue) {
if (typeof newValue !== 'undefined') {
// Do something cool
}
},
true);
}
i suspect, node is not returning mysql results. The fact that it works for static files and not for mysql rules out issues with angular. Can you add firebug logs for the http call or chrome developer logs. This can shed more light on the matter