Unable to get variable from module exports - node.js

I have a connector.js file which using which I want to export dbResult object.
(function(){
var Massive = require("massive");
var connectionString = "postgres://postgres:postgres#localhost/postgres";
var db = Massive.connectSync({connectionString : connectionString});
var dbResult ;
db.query("Select * from company", function (err, data) {
dbResult = data;
console.log(data);
});
})(module.exports);
Now in Another file I am trying to get the dbResult and display the data:
var express = require("express");
var app = express();
var connectorObject = require("./Connector.js");
var Massive = require("massive");
app.get("/api/Steves",function(req,res){
res.set("Content-Type","application/json");
res.send(connectorObject.dbResult);
});
app.listen(3000);
console.log("Server Started on port 3000...");
But when I start the URL , not able to see any response .
Am I missing anything here.

What you want to do, is return a function that can be evaluated later for the result:
var Massive = require("massive");
var connectionString = "postgres://postgres:postgres#localhost/postgres";
var db = Massive.connectSync({connectionString : connectionString});
module.exports.getCompanies = function(callback) {
db.query("Select * from company", callback);
}
Then you can access it from your other files as:
var connector = require('./Connector');
connector.getCompanies(function( err, data ) {
if ( err ) return console.error( err );
console.log( data );
});

Related

Data does not display on first try only upon refresh

I am new to node, express and mongo so I need some guidance. I set up a mongo database with 3 objects. I created an application with a route so that http://localhost:3000/Employeeid will lead the page to display all the 3 objects in the database, which the page does. However for each subsequent refresh, the same data gets displayed multiple number of times based on the number of refresh. (i.e. For the first refresh, the data duplicates once. For the second refresh, the data duplicates twice.) Does anyone know what may be wrong?
var express = require('express')
var app = express()
var MongoClient = require('mongodb').MongoClient
var url = 'mongodb://localhost:27017'
var str = ''
app.route('/Employeeid').get(function(req, res) {
MongoClient.connect(url, { useNewUrlParser: true }, function(err, client) {
var db = client.db('EmployeeDB')
var cursor = db.collection('Employee').find()
cursor.forEach(function(item) {
if (item != null) {
str = str + ' Employee id&nbsp ' +
item.Employeeid + '</br>'
}
})
res.send(str)
client.close()
})
})
var server = app.listen(3000, function() {})
You are appending to str every time the /EmployeeId route is called. To fix this, move str inside the callback:
app.route('/Employeeid').get(function(req, res) {
MongoClient.connect(url, { useNewUrlParser: true }, function(err, client) {
var str = ''
var db = client.db('EmployeeDB')
var cursor = db.collection('Employee').find()
cursor.forEach(function(item) {
if (item != null) {
str = str + ' Employee id&nbsp ' +
item.Employeeid + '</br>'
}
})
res.send(str)
client.close()
})
})
Side note - you should reuse the Mongo connection instead of calling connect and close on every request.
The Reason why your getting duplicate results is you are not making str = null after sending the result. you are concatenating str for each request because str is a global variable. your code can be modified for performance as like this.
var express = require('express')
var app = express()
var MongoClient = require('mongodb').MongoClient
var url = 'mongodb://localhost:27017'
var str = ''
var db = null
MongoClient.connect(url, { useNewUrlParser: true }, function(err, client) {
if( err ) {
throw new Error( err );
}
db = client.db('EmployeeDB') // we are storing db reference in global variable db
});
app.route('/Employeeid').get(function(req, res) {
db.collection('Employee').find().toArray( function(err, cursor){
if( err ) {
res.send('');
throw new Error(err );
}
cursor.forEach(function(item) {
if (item != null) {
str = str + ' Employee id&nbsp ' +
item.Employeeid + '</br>'
}
}
res.send(str)
str = '';
})
})
})
var server = app.listen(3000, function() {})
If you don't move res.send() into callback function you may send result before getting data from db due to async nature of node.

Multiple queries in documentdb-q-promises for Nodejs

I want to render a page getting info for two different queries in CosmoDB using documentdb.
I have 2 queries:
var FirstQuery = {
query: 'SELECT * FROM FactoryData',
};
var SecondQuery = {
query: 'SELECT * FROM StoreData',
};
And have this to get the data
docDbClient.queryDocuments(collLink, FirstQuery ).toArray(function (err, results) {
value1 = results;
});
docDbClient.queryDocuments(collLink, SecondQuery ).toArray(function (err, results) {
value2 = results;
});
then i want to render the view with those results but i cant get it rendering from outise of this funcions.
res.render('view.html', {"value1" : value1 , "value2" : value2});
I know that this code will not work, but i was trying to implement promises and didn't know how to do it with documentdb-q-promises.
I already read a lot of documentation about Q promise but i dont get it.
Can someone explain to me how i can do it , I`m a beginner.
Based on your requirements,I followed the npm doc and test code on github to test following code in my local express project. Please refer to it.
var express = require('express');
var router = express.Router();
var DocumentClient = require('documentdb-q-promises').DocumentClientWrapper;
var host = 'https://***.documents.azure.com:443/'; // Add your endpoint
var masterKey = '***'; // Add the massterkey of the endpoint
var client = new DocumentClient(host, {masterKey: masterKey});
var collLink1 = 'dbs/db/colls/import';
var FirstQuery = 'select c.id,c.name from c';
var collLink2 = 'dbs/db/colls/item';
var returnArray = [];
client.queryDocuments(collLink1, FirstQuery).toArrayAsync().
then(function(response){
console.log(response.feed);
var map = {};
map['value1'] = response.feed;
returnArray.push(map);
return client.queryDocuments(collLink2, FirstQuery).toArrayAsync()
})
.then(function(response) {
console.log(response.feed);
var map = {};
map['value2'] = response.feed;
returnArray.push(map);
})
.fail(function(error) {
console.log("An error occured", error);
});
router.get('/', function(req, res, next) {
res.send(returnArray);
});
module.exports = router;
Test Result:
Hope it helps you.

NodeJS Mongoose Not Save Data

My system will create around 30 collection for every hour. My server get thousands request for one hour. I have big data and multiple collections. And i use MongoDB-NodeJS for saving data.
The modelControl function of pariteModel class at the paritemodel.js -as the below codes- i do check if this schema is created before. And i will create schema or use created schema.
First collections creating and saving data to MongoDB. But when create another collections it's not doing. For example:
EURJPY_20160107_16 collection is created but EURJPY_20160107_17 collection is not create. I have check mongoose.models at modelparite.js EURJPY_20160107_17 is created but instance created from EURJPY_20160107_17 Schema not saved to database.
My server files like this:
app.js file is my bootstrap file:
var http = require('http'),
dispatcher = require('httpdispatcher');
require('./mongo.js');
function handleRequest(request, response){
try {
dispatcher.dispatch(request, response);
} catch(err) {
console.log(err);
}
}
var server = http.createServer(handleRequest);
server.listen(3663, function(){
console.log('listening the port: ' + 3663);
});
This is mongo.js which i call in app.js. This file uses for save data to mongodb:
var dispatcher = require('httpdispatcher'),
url = require('url'),
moment = require('moment'),
md = moment().format("YYYYMMDD_HH"),
myModel = require('./modelparite.js');
dispatcher.onGet('/mongo', function(req, res){
var url_parts = url.parse(req.url, true);
// This is collection name. Output like this: USDTRY_20160107_16
var colName = url_parts.query.symbol + '_' + md;
var tarih = new Date();
var pariteModel = myModel.returnModel(colName);
var yeniParite = new pariteModel({
symbol: url_parts.query.symbol,
bid: url_parts.query.bid,
ask: url_parts.query.ask,
timeup: moment(tarih).format("YYYY-MM-DD HH:mm:ss")
});
yeniParite.save(function (err, data) {
if (err) console.log(err);
console.dir(data,true);
});
res.writeHead(200, {'Content-Type': 'text/html'});
res.end();
});
And this is my model modelparite.js file which i call in mongo.js. This file uses to create schema with Mongoose:
var mongoose = require('mongoose'),
helper = require('./helper.js');
require('mongoose-double')(mongoose);
mongoose.connect('mongodb://localhost:27017/forex');
var pariteModel = {
pariteSchema: "",
initSchema: function(){
var Schema = mongoose.Schema;
this.pariteSchema = new Schema({
symbol: String,
bid: mongoose.Schema.Types.Double,
ask: mongoose.Schema.Types.Double,
timeup: Date
});
},
modelControl: function(modelName){
if(mongoose.models[modelName]){
return true;
}
return false;
},
returnModel: function(modelName){
modelName = helper.whichParity(modelName);
//console.log(modelName);
if(this.modelControl(modelName)){
//console.log(mongoose.model(modelName));
return mongoose.model(modelName);
}else{
this.initSchema();
return mongoose.model(modelName, this.pariteSchema);
}
},
}
module.exports = pariteModel;

mocha test sends `test` as variable to node app

When writing the tests for my entry file, index.js I run into the problem that the command mocha test passes test as an argument to index.js as it uses process.argv to receive parameters to run on a development environment. I had thought that by using something like minimist to name the parameters would fix this, however this problem still remains when running the tests. In this way my tests do not use the object provided in my test suits, as shown in the following code.
How do I get around this, so that when running my tests, it uses the event object I provide in my test set-up and not the command mocha test?
index.js
'use strict';
var _ = require("underscore");
var async = require('async');
var argv = require("minimist")(process.argv.slice(2));
var getprotocol = require("./getProtocol");
var _getprotocol = getprotocol.getProtocol;
var S3rs = require("./S3resizer");
var s3resizer = S3rs.rs;
var objCr = require("./objectCreator");
var createObj = objCr.creator;
var fileRs = require("./fileResizer");
var fileResizer = fileRs.rs;
var configs = require("./configs.json");
var mkDir = require("./makeDir");
var makeDir = mkDir.handler;
exports.imageRs = function (event, context) {
var _path = argv.x || event.path; //argv.x used to be process.argv[2]
console.log("Path, %s", _path);
var _dir = argv.y; // used to be process.argv[3]
console.log(_dir);
var parts = _getprotocol(_path);
var imgName = parts.pathname.split("/").pop();
console.log("imgName: %s", imgName);
var s3Bucket = parts.hostname;
var s3Key = imgName;
var _protocol = parts.protocol;
console.log(_protocol);
// RegExp to check for image type
var imageTypeRegExp = /(?:(jpg)|(png)|(jpeg))$/;
var sizesConfigs = configs.sizes;
var obj = createObj(_path);
// Check if file has a supported image extension
var imgExt = imageTypeRegExp.exec(s3Key);
if (imgExt === null) {
console.error('unable to infer the image type for key %s', s3Key);
context.done(new Error('unable to infer the image type for key %s' + s3Key));
return;
}
var imageType = imgExt[1] || imgExt[2];
// Do more stuff here
};
if (!process.env.LAMBDA_TASK_ROOT) {
exports.imageRs();
}
test.js
describe("imgeRs", function () {
var getprotocol = require("../getProtocol");
var S3rs = require("../S3resizer");
var objCr = require("../objectCreator");
var mkDir = require("../makeDir");
var fileResizer = require("../fileResizer");
describe("Calling S3", function () {
describe("Success call", function () {
var testedModule, eventObj, contextDoneSpy, S3resizerStub, objCreatorStub, getProtocolStub, fakeResults, mkDirStub, fileResizerStub;
before(function (done) {
contextDoneSpy = sinon.spy();
S3resizerStub = sinon.stub(S3rs, "rs");
objCreatorStub = sinon.stub(objCr, 'creator');
getProtocolStub = sinon.stub(getprotocol, "getProtocol");
mkDirStub = sinon.stub(mkDir, "handler");
fileResizerStub = sinon.stub(fileResizer, "rs");
eventObj = {"path": "s3://theBucket/image.jpeg"};
fakeResults = ["resized"];
testedModule = proxyquire("../index", {
'./getProtocol': {
'getProtocol': getProtocolStub
},
'./S3resizer': {
'rs': S3resizerStub
},
'./objectCreator': {
'creator': objCreatorStub
},
'./makeDir': {
'handler': mkDirStub
},
'./fileResizer': {
'rs': fileResizerStub
}
});
S3resizerStub.callsArgWith(5, null, fakeResults);
testedModule.imageRs(eventObj, {done: function (error) {
contextDoneSpy.apply(null, arguments);
done();
}});
});
after(function () {
S3rs.rs.restore();
objCr.creator.restore();
getprotocol.getProtocol.restore();
mkDir.handler.restore();
fileResizer.rs.restore();
});
it("calls context.done with no error", function () {
expect(contextDoneSpy).has.been.called;
});
});
});
});

Get model from mongoose db

I'm currently looking into building a small REST based service to which I can POST some data into a mongoose db and GET the data back.
Here's my main.js file:
var http = require("http");
var DAO = require("./DAO");
var express = require("express");
var util = require('util');
var app = express();
app.use(express.bodyParser());
app.post('/postIsles',function(req,res){
DAO[req.method](req.body);
res.send("body" + req.body.name);
});
app.get('/getIsles',function(req,res){
var isleVar = DAO[req.method](req);
res.send(isleVar);
});
app.listen("3000");
console.log("\nApp available at http://127.0.0.1:3000\n");
And DAO.js:
var mongoose = require('mongoose');
//Connect to database
mongoose.connect( 'mongodb://127.0.0.1:27017/library_database' );
//Schemas
var Isle = new mongoose.Schema({
name: String,
description: String,
lastStocked: Date
});
//Models
var IsleModel = mongoose.model( 'Isle', Isle );
function POST(request) {
var name = request.name;
var description = request.description;
var lastStocked = request.lastStocked;
console.log("POST REQ +" + request);
var isle = new IsleModel({
name: name,
description: description,
lastStocked: lastStocked
});
isle.save( function( err ) {
if( !err ) {
return console.log( 'created' );
} else {
return console.log( err );
}
});
}
function GET(request) {
return IsleModel.find( function( err, islesT ) {
if( !err ) {
console.log("isles :"+islesT);
return islesT;
} else {
return console.log( err );
}
});
}
exports.POST = POST;
exports.GET = GET;
When I try to run the GET, I get the following error:
TypeError: Converting circular structure to JSON
at Object.stringify (native)
I'm a bit unsure how to overcome this.
Remember when using Node.js: any operation that involves IO will be asynchronous.
Model#find is an asynchronous method, so isleVar is not set to the result you're expecting. Your result will only be available inside of the anonymous function that you pass into IsleModel.find
To fix your GET method, you'll need to modify your code to take into account the asynchronicity of the DB request and only send the response once your app has had a chance to retrieve data.
Below, is an example of one possible solution to fix /getIsles:
In main.js, modify your get route to pass in res (so it can be handled asynchronously)
app.get('/getIsles',function(req,res){
return DAO[req.method](req, res);
});
In DAO.js, have response send the data inside of your callback to IsleModel.find
function GET(request, response) {
IsleModel.find( function( err, islesT ) {
if( !err ) {
console.log("isles :"+islesT);
response.send(islesT);
} else {
return console.log( err );
}
});
}

Resources