I built a nodejs server to act as an adapter server, which upon receiving a post request containing some data, extracts the data from the request body and then forwards it to a few other external servers. Finally, my server will send a response consisting of the responses from each of the external server (success/fail).
If there's only 1 endpoint to forward to, it seems fairly straightforward. However, when I have to forward to more than one servers, I have to rely on things like Promise.All(), which has a fail-fast behaviour. That means if one promise is rejected (an external server is down), all other promises will also be rejected immediately and the rest the servers will not receive my data.
May be this ain't be the exact solution. But, what I am posting could be the work around of your problem.
Few days back I had the same problem, as I wanted to implement API versioning. Here is the solution I implemented, please have a look.
Architecture Diagram
Let me explain this diagram
Here in the diagram is the initial configuration for the server as we do. all the api request come here will pass to the "index.js" file inside the release directory.
index.js (in release directory)
const express = require('express');
const fid = require('./core/file.helper');
const router = express.Router();
fid.getFiles(__dirname,'./release').then(releases => {
releases.forEach(release => {
// release = release.replace(/.js/g,'');
router.use(`/${release}`,require(`./release/${release}/index`))
})
})
module.exports = router
code snippet for helper.js
//requiring path and fs modules
const path = require('path');
const fs = require('fs');
module.exports = {
getFiles: (presentDirectory, directoryName) => {
return new Promise((resolve, reject) => {
//joining path of directory
const directoryPath = path.join(presentDirectory, directoryName);
//passsing directoryPath and callback function
fs.readdir(directoryPath, function (err, files) {
// console.log(files);
//handling error
if (err) {
console.log('Unable to scan directory: ' + err);
reject(err)
}
//listing all files using forEach
// files.forEach(function (file) {
// // Do whatever you want to do with the file
// console.log(file);
// });
resolve(files)
});
})
}
}
Now, from this index file all the index.js inside each version folder is mapped
Here is the code bellow for "index.js" inside v1 or v2 ...
const express = require('express');
const mongoose = require('mongoose');
const fid = require('../../core/file.helper');
const dbconf = require('./config/datastore');
const router = express.Router();
// const connection_string = `mongodb+srv://${dbconf.atlas.username}:${dbconf.atlas.password}#${dbconf.atlas.host}/${dbconf.atlas.database}`;
const connection_string = `mongodb://${dbconf.default.username}:${dbconf.default.password}#${dbconf.default.host}:${dbconf.default.port}/${dbconf.default.database}`;
mongoose.connect(connection_string,{
useCreateIndex: true,
useNewUrlParser:true
}).then(status => {
console.log(`Database connected to mongodb://${dbconf.atlas.username}#${dbconf.atlas.host}/${dbconf.atlas.database}`);
fid.getFiles(__dirname,'./endpoints').then(files => {
files.forEach(file => {
file = file.replace(/.js/g,'');
router.use(`/${file}`,require(`./endpoints/${file}`))
});
})
}).catch(err => {
console.log(`Error connecting database ${err}`);
})
module.exports = router
In each of this index.js inside version folder is actually mapped to each endpoints inside endpoints folder.
code for one of the endpoints is given bellow
const express = require('express');
const router = express.Router();
const userCtrl = require('../controllers/users');
router.post('/signup', userCtrl.signup);
router.post('/login', userCtrl.login);
module.exports = router;
Here in this file actually we are connecting the endpoints to its controllers.
var config = {'targets':
[
'https://abc.api.xxx',
'https://xyz.abc',
'https://stackoverflow.net'
]};
relay(req, resp, config);
function relay(req, resp, config) {
doRelay(req, resp, config['targets'], relayOne);
}
function doRelay(req, resp, servers, relayOne) {
var finalresponses = [];
if (servers.length > 0) {
var loop = function(servers, index, relayOne, done) {
relayOne(req, servers[index], function(response) {
finalresponses.push[response];
if (++index < servers.length) {
setTimeout(function(){
loop(servers, index, relayOne, done);
}, 0);
} else {
done(resp, finalresponses);
}
});
};
loop(servers, 0, relayOne, done);
} else {
done(resp, finalresponses);
}
}
function relayOne(req, targetserver, relaydone) {
//call the targetserver and return the response data
/*return relaydone(response data);*/
}
function done(resp, finalresponses){
console.log('ended');
resp.writeHead(200, 'OK', {
'Content-Type' : 'text/plain'
});
resp.end(finalresponses);
return;
}
It sounds like you are trying to design a reverse proxy. If you are struggling to get custom code to work, there is a free npm library which is very robust.
I would recommend node-http-proxy
I have posted link below, which will lead you directly to the "modify response", since you mentioned modification of the API format in your question. Be sure to read the entire page though.
https://github.com/http-party/node-http-proxy#modify-a-response-from-a-proxied-server
Note: this library is also very good because it can support SSL, and proxies to both localhost (servers on the same machine) and servers on other machines (remote).
Promise.all() from MDN
It rejects with the reason of the first promise that rejects.
To overcome the problem, you'll need to catch() each request you've made.
e.g.
Promise.all([
request('<url 1>').catch(err => /* .. error handling */),
request('<url 2>').catch(err => /* .. error handling */),
request('<url 3>').catch(err => /* .. error handling */)
])
.then(([result1, result2, result3]) => {
if(result1.err) { }
if(result2.err) { }
if(result3.err) { }
})
Related
I´m developing a rest full api with node and exrpess, my database is postgresql, I need to use the postgres package pg-promise.
I know that I need to connect my app with the database in the app.js file, but my question is, How I should use this connection in my endpoints.
I have routes and I am using controllers.
For example
app.js
//in this file, suppously I have to to the connection
const db = pgp('postgres://john:pass123#localhost:5432/products');
app.use('/products', productsRoute);
products.js (route)
router.get('/', ProductsController.get_all_products);
products.js (controller)
exports.get_all_products = (req, res, next ) => {
// Here i want to use de database connection to do the query to find all
//products in the database
}
How do I get access to the connection to do something like
db.any('SELECT * FROM products WHERE active = $1', [true])
.then(function(data) {
// success;
})
.catch(function(error) {
// error;
});
From the controller.
Update
Ok, I´m using now node-prostgres, pg. I saw is better, Thanks for the advice people.
I want to create one time de db instance, and call it anywhere, in specific in the controllers
Could I use app.local to save my client?, connect, do a query and then close it. Do this anywhere
I haven't used pg-promise.
If it helps, you can use PostgreSQL client for Node.js. You can also use async/await with it.
Instead of a router, you can use Express middle-ware straightaway as follows.
//app.js:
const express = require('express')
const bodyParser = require('body-parser')
const app = express()
const port = 1234
const db = require('./dbconnector')
//...omitted for brevity`
// 'db' is exported from a file such as
// dbconnector.js.
app.get('/products', db.getProducts)
//In dbconnector.js:
const Pool = require('pg').Pool
const pool = new Pool({
user: 'postgres',
host: 'localhost',
database: 'mydb',
password: 'mypwd',
port: 5432,
})
const getProducts = (request, response) => {
pool.query('SELECT * FROM products ORDER BY id
ASC', (error, results) => {
if (error) {
throw error
}
response.status(200).json(results.rows)
})
}
// ...omitted for brevity
module.exports = {
getProducts
}
For modular design, please use a separate file (not app.js/index.js/server.js) for db connections as best practice and require that in your main app.js.
Here is help on pg module.
Here's an example how to use it:
// mydb.js
async function someDbQuery() {
let result;
try {
result = db.any('SELECT * FROM products WHERE active = $1', [true])
} catch (e) {
throw e
}
return result;
}
module.exports = someDbQuery;
// in your controller after importing
const { someDbQuery } = require('./mydb.js')
exports.get_all_products = async (req, res, next ) => {
// Here i want to use de database connection to do the query to find all
//products in the database
try {
const result = await someDbQuery();
// use result here
} catch (e) {
// handle error
console.error(e)
}
}
Side note:
From the docs pg-promise
Built on top of node-postgres
node-postgres now supports promise too.
You do not need to do anything, pg-promise manages connections automatically. It will be allocated for the query and released right after. See examples.
I have NODE.js code which works perfectly locally (127.0.0.1:CUSTOM_PORT). But now I would like to set it up to run it on Google Cloud Function.
This is the code which I'm using to run code locally:
function connect_to_server() {
const PORT = process.env.PORT || 8080;
app.listen(PORT,'127.0.0.1',function () {
console.log('---> SERVER IS RUNNNG <---')
})
}
Does someone know the way to set a running server with Google Cloud Functions?
What port should I use and URL INSIDE THE NODE.JS ?? Or I do not need to use it at all as GCF already initially set up a server for me?
GCF Provide trigger URL which can be hit, but it still does not work.
Full function with out app.listen()
// CONFIGURATION
const express = require('express')
const app = express()
const config = require('./config')
const bodyParser = require('body-parser')
const moment = require('moment')
const sql = require("mssql")
const jwt = require('jwt-simple')
const compression = require('compression')
function token(token) {
var secret = Buffer.from('xxx', 'hex')
return jwt.decode(token, secret)
}
function sql_puller(res, req) {
sql.connect(config, function (err) {
if (err) {
console.log(err)
res.send(err.code)
}
const request = new sql.PreparedStatement()
const {
x
} = req.body
let newProps = {}
x.forEach(filters => {
newProps[filters.x] = filters.x
})
const isValidInput = validateInput(x, x, x, res)
if (!isValidInput) {
return
}
request.input('1', sql.VarChar(1))
request.input('2', sql.VarChar(1))
request.input('3', sql.VarChar(1))
sqlQuery = `XXXXXX`
request.prepare(sqlQuery, err => {
if (err) {
console.log(err)
res.send(err.code)
return
}
request.execute({
iso: x,
start: x,
end: x
}, (err, recordset) => {
request.unprepare(err => {
if (err) {
console.log(err)
res.send(err.code)
return
}
})
if (err) {
console.log(err)
res.send(err.code)
return
}
res.json(recordset)
sql.close()
})
})
})
sql.on('close', function (err) {
console.log('SQL Connection Closed.', err)
})
sql.on('error', function (err) {
sql.close()
console.log('SQL error occurred.', err)
})
}
exports.main = function main() {
app.use(compression())
app.use(bodyParser.json())
app.post('/', function (req, res) {
try {
res.setHeader('Cache-Control', 'public, max-age=3600')
var decodedToken = token(req.body['Token'])
console.log(req.body)
console.log('Successefully connected - token accepted')
// connect to your databas
if (decodedToken == "XXXXXX") {
sql_puller(res, req)
} else {
console.log('Incorrect Token')
}
} catch (err) {
if (err) {
console.log(err)
res.send('Invalid Token')
return
}
}
})
}
You cannot the way you have designed it. Google Cloud Functions has a maximum runtime and then the function is terminated. As of today this limit is 540 seconds. Cloud Functions are invoked by an outside process, Cloud Functions do not wait for someone to connect to them (e.g. they don't listen, they are not asleep). The exception is HTTP Trigger, but this is not usable to present a website, but can be usable for actions.
There are companies that run their entire website using Cloud Functions, Cloud Datastore and Cloud Storage. The magic is using an API gateway product. An API gateway provides the URL, www.example.com, that customers go to. The API gateway then invokes Cloud Functions to handle the request. You create similar mappings for each page on your serverless website to Cloud Functions.
Many developers use Google App Engine to accomplish what you are trying to do. Very low cost and very easy to develop for. Another excellent Google product for you to consider is Google Firebase. Google has many other products that are not serverless such as Containers on Compute Engine and Kubernetes.
I have successfully completed fetching data from MongoDB using express API and node js and binding it to angular ng2-smart-table.
On click of add button of the table, I am calling express API but my post method is not called in api.js, I tried printing console.log inside post but it is not called.
For better understanding hereby I post the code:
Component.ts
onCreateConfirm(event,_dataService): void {
this._dataService.postMutualFunds(event.newData);
event.confirm.resolve(event.newData);
}
data.service.ts
postMutualFunds(parameterValue:any ){
return this._http.post('/add_mutual_funds',
{params:parameterValue}).map(result => this.result = result.json().data);
}
api.js
var express = require('express');
var router = express.Router();
var MongoClient = require('mongodb').MongoClient;
var ObjectID = require('mongodb').ObjectID;
var bodyParser = require("body-parser");
const connection = (closure) => {
return
MongoClient.connect('mongodb://xxx.com:63898/xx', (err,db) => {
if(err){
return console.log(err);
}
closure(db);
});
}
//Get call
router.get('/mutual_funds',(req,res) =>{
console.log("test get"); //This is executed
connection((db) => {
var projection = {_id:0};
db.collection('mutual_funds').find().project(projection).
toArray().then((mutual_funds) => {
response.data = mutual_funds;
res.json(response);
})
})
})
//Post call
router.post('/mutual_funds',(req,res) => {
console.log(req.body.params);
console.log("test post");
db.collection('mutual_funds').insertOne(req.body.params)
})
module.exports = router;
Now i am able to call the api js but not able to insert the data to mongodb please find the screenshot below of the api.js exeution output
//Why am I not able to post the data?(Updated my post code above) encountering error 500
You didn't subscribe to the service
onCreateConfirm(event,_dataService): void {
this._dataService.postMutualFunds(event.newData).subscribe(
(data)=>{
console.log(data)
},
(err)=>{
console.log(err)
}
)
event.confirm.resolve(event.newData);
}
The service method returns an Observable of configuration
data, the component subscribes to the method's return value. The
subscription callback copies the data fields into the component's
config object, which is data-bound in the component template for
display.
why are you not subscribing to the service function call
this._dataService.postMutualFunds(event.newData).subscribe((data)=>{ console.log(data)},
(err)=>console.log(err),
()=>console.log('Observable complete')
);
I am trying to check if an image exists in a folder.
If it exists I want to pipe its stream to res (I'm using Express)
If it does not exist I want to do another thing.
I created an async function that is supposed to either return the image's stream if it exists or false if it doesn't.
I get a stream when I do it but I get an infinite load on the browser, as if there was an issue with the stream.
Here is the minimal reproduction I could have :
Link to runnable code
const express = require('express');
const path = require('path');
const fs = require('fs');
const app = express();
app.get('/', async (req, res) => {
// Check if the image is already converted by returning a stream or false
const ext = 'jpg';
const imageConvertedStream = await imageAlreadyConverted(
'./foo',
1,
'100x100',
80,
ext
);
// Image already converted, we send it back
if (imageConvertedStream) {
console.log('image exists');
res.type(`image/${ext}`);
imageConvertedStream.pipe(res);
return;
} else {
console.log('Image not found');
}
});
app.listen(3000, () => {
console.log('Server started on port 3000');
});
async function imageAlreadyConverted(
basePath,
id,
size,
quality,
extWanted
) {
return new Promise(resolve => {
// If we know the wanted extension, we check if it exists
let imagePath;
if (extWanted) {
imagePath = path.join(
basePath,
size,
`img_${id}_${quality}.${extWanted}`
);
} else {
imagePath = path.join(basePath, size, `img_${id}_${quality}.jpg`);
}
console.log(imagePath);
const readStream = fs.createReadStream(imagePath);
readStream.on('error', () => {
console.log('error');
resolve(false);
});
readStream.on('readable', () => {
console.log('readable');
resolve(readStream);
});
});
}
95% of my images will be available and I need performance, I suppose checking with fs.stats and then creating the stream is taking longer than trying to create the stream and handling the error.
The issue was with the "readable" event. Once I switched to the "open" event, everything is fine.
I want to use gridfs-stream in a nodejs application.
A simple example is given in the documentation:
var mongoose = require('mongoose');
var Grid = require('gridfs-stream');
Grid.mongo = mongoose.mongo;
mongoose.connect('mongodb://localhost:27017/test');
// make sure the db instance is open before passing into `Grid`
mongoose.connection.once('open', function () {
var gfs = Grid(mongoose.connection);
// all set!
})
My problem is described by the comment:
make sure the db instance is open before passing into Grid
I try to use gfs in a post request. Now when the code gets initialized, the gfs variable is not defined yet.
api.post('/upload', function(req, res) {
req.pipe(gfs.createWriteStream({
filename: 'test'
}).on('close', function(savedFile){
console.log('file saved', savedFile);
return res.json({file: savedFile});
}));
})
Initializing my route from a callback seems kind of odd.
I read in this post (Asynchronous initialization of Node.js module) that require('') is performed synchronous, and since I rely on the connection being established, I'm kind of forced to wait
Basically I'm not sure if I should use a async pattern on startup now, or if I just miss a more elegant way to solve this.
I have a very similar problem with my server. In my case I am reading https certs asynchronously, the software version from git asynchronously and I want to make sure I have it all together by the time the user comes to log in so I can pass the software version back as a reply to login.
The solution is to use promises. Create the promises on user start up for each activity. Then in the code where you want to be sure its all ready, just call then on either the promise itself or Promise.all(array of promises).then()
Here is an example of what I am doing to read the ssl certs to start the server
class Web {
constructor(manager,logger) {
var self = this;
this.server = false;
this.logger = logger;
var key = new Promise((resolve,reject) => {
fs.readFile(path.resolve(__dirname, 'key.pem'),(err,data) => {
if (err) {
reject(err);
} else {
resolve(data);
}
});
});
var cert = new Promise((resolve,reject) => {
fs.readFile(path.resolve(__dirname, 'certificate.pem'), (err,data) => {
if (err) {
reject(err);
} else {
resolve(data);
}
});
});
Promise.all([key,cert]).then(values => {
var certs = {
key: values[0],
cert: values[1],
};
return certs;
}).then(certs => {
self.server = require('http2').createServer(certs,(req,res) => {
// NOW Started and can do the rest of the stuff
});
self.server.listen(...);
});
NEEDS SOME MORE CLOSING BRACKETS