I have problem with mongoosastic it not update Elasticsearch from first update. I update 1 time and nothing happend and then update 2 time and in elasticsearch make update from what I updated 1st time...
Here is my code:
var User = system.mongoose.model('User', userSchema, 'user');
User.createMapping(function (err, mapping) {
if (err) {
console.log("error creating mapping");
console.log(err);
} else {
console.log("Mapping created");
console.log(mapping);
}
});
//{$in: ['doctor','nurse']}
var stream = User.synchronize();
var count = 0;
stream.on('data', function (data) {
count++;
});
stream.on('close', function () {
console.log("Indexed " + count + " documents");
});
stream.on('error', function (err) {
console.log(err);
});
Anyone know what is problem?
It is not clear from your question what you expected, and what you got.
But I am going to guess that you are experiencing a race condition because you started User.synchronize() before User.createMapping() finished.
If so, this might work better:
var User = system.mongoose.model('User', userSchema, 'user');
User.createMapping(function (err, mapping) {
if (err) {
console.log("error creating mapping");
console.log(err);
} else {
console.log("Mapping created");
console.log(mapping);
doTheNextThing();
}
});
function doTheNextThing () {
//{$in: ['doctor','nurse']}
var stream = User.synchronize();
var count = 0;
stream.on('data', function (data) {
count++;
});
stream.on('close', function () {
console.log("Indexed " + count + " documents");
});
stream.on('error', function (err) {
console.log(err);
});
}
Related
I am trying to call a function inside my post API. I have multiple queries and want to wait for the function to complete in order to execute the next queries. I am having an issue here. The function isn't completing the execution and the rest of the API gets executed as expected. How can I wait for the function to complete its execution? I have searched but couldn't find something convenient and related to my code.
Here's the code:
Node.js
function verifyEmail(mailToUpper)
{
var emailResult;
db2.open(mydbConnection, (err, conn) => {
if(!err)
{
console.log("Connected Successfully");
}
else
{
console.log("Error occurred while connecting to the database " + err.message);
}
conn.query(checkEmailQuery, [mailToUpper], (err, results) => {
if(!err)
{
if(results.length > 0)
{
// res.write("Email already exists");
emailResult = 0;
}
else
{
emailResult = 1;
}
}
conn.close((err) => {
if(!err)
{
console.log("Connection closed with the database");
}
else
{
console.log("Error occurred while trying to close the connection with the database " +
err.message);
}
})
})
})
return emailResult;
}
router.post('/api/postData', (req, res) => {
//some stuff
var waitingForResult;
setTimeout(() => {
waitingForResult = verifyEmail(mailToUpper);
}, 2000)
console.log(waitingForResult); //throwing an error of undefined
if(waitingForResult === 1) //not executing this
{
//my other queries
}
else //always executes this
{
res.send("Email already exists");
}
});
function verifyEmail(mailToUpper) {
return new Promise((resolve, reject) => {
db2.open(mydbConnection, (err, conn) => {
if (!err) {
console.log("Connected Successfully");
} else {
console.log("Error occurred while connecting to the database " + err.message);
}
conn.query(checkEmailQuery, [mailToUpper], (err, results) => {
if (!err) {
if (results.length > 0) {
// res.write("Email already exists");
resolve(0);
} else {
resolve(1);
}
}
conn.close((err) => {
if (!err) {
console.log("Connection closed with the database");
} else {
console.log("Error occurred while trying to close the connection with the database " +
err.message);
}
})
})
})
})
}
router.post('/api/postData', async (req, res) => {
const waitingForResult = await verifyEmail( mailToUpper );
if( waitingForResult === 1 ){
//my other queries
} else {
res.send("Email already exists");
}
});
I am trying to find the best way to write this code. I fetch locations from a remote resource and need to check if there are any new locations present in the data, if there are I should add them to my database, if they are not new I just want to update them.
const http = require('http');
const timeout = 5000; //5 seconds
const MongoClient = require('mongodb').MongoClient;
// Database Name
const dbName = 'weatherApp';
const url = 'mongodb://localhost:27017';
// Connect using MongoClient
MongoClient.connect(url, function(err, client) {
if(err){
console.log(err);
return;
}
const locationsCollection = client.db(dbName).collection('locations');
(function fetchTemperatureLoop(){
console.log('Started http request..');
http.get('remote url..', function(resp){
var data = '';
resp.on('data', (chunk) => {
data += chunk;
});
resp.on('end', () => {
if(data.isJson()){
var locations = JSON.parse(data).toArray();
(function locationsLoop(){
var location = locations.pop();
locationsCollection.findOne({location: location.location}, function(err, result){
if(err){
console.log(err);
return;
}
if(result){
//Exists
var measurements = result.measurements;
measurements.push({timestamp: +new Date, temperature: location.temperature})
locationsCollection.update({location: location.location}, {$set: {measurements: measurements}}, function(err){
if(err){
console.log(err);
return;
}
console.log('Added new temperature for location: ' + location.location);
continueLocationsLoop();
});
}else{
//Doesnt exist
location.measurements = [];
location.measurements.push({timestamp: +new Date, temperature: location.temperature});
locationsCollection.insert(location, function(err){
if(err){
console.log(err);
return;
}
console.log('Created new location: ' + location.location);
continueLocationsLoop();
});
}
});
function continueLocationsLoop(){
if(locations.length){
locationsLoop()
}else{
setTimeout(fetchTemperatureLoop, timeout);
}
}
})();
}
});
}).on("error", (err) => {
console.log("Error: " + err.message);
console.log("Continue anyways..");
setTimeout(fetchTemperatureLoop, timeout);
});
})();
});
String.prototype.isJson = function(){
try{
JSON.parse(this);
}catch(e){
return false;
}
return true;
}
Object.prototype.toArray = function(){
var arr = [];
for(var key in this){
if(this.hasOwnProperty(key)){
arr.push(this[key]);
}
}
return arr;
}
I really want to avoid using so many closures but I dont want to repeat myself either. Any help rewriting this code in an optimal way is much appriciated.
My main problem was illiterating through the locations and doing the calls to the database.
Users upload files into my express app. I need to calc hash of the uploaded file and then write file to disk using calculated hash as a filename. I try to do it using the following code:
function storeFileStream(file, next) {
createFileHash(file, function(err, hash) {
if (err) {
return next(err);
}
var fileName = path.join(config.storagePath, hash),
stream = fs.createWriteStream(fileName);
stream.on('error', function(err) {
return next(err);
});
stream.on('finish', function() {
return next();
});
file.pipe(stream);
});
}
function createFileHash(file, next) {
var hash = crypto.createHash('sha1');
hash.setEncoding('hex');
file.on('error', function(err) {
return next(err);
});
file.on('end', function(data) {
hash.end();
return next(null, hash.read());
});
file.pipe(hash);
}
The problem is that after I calc file hash the writed file size is 0. What is the best way do solve this task?
Update
According #poke suggestion I try to duplicate my stream. Now my code is:
function storeFileStream(file, next) {
var s1 = new pass;
var s2 = new pass;
file.pipe(s1);
file.pipe(s2);
createFileHash(s1, function(err, hash) {
if (err) {
return next(err);
}
var fileName = path.join(config.storagePath, hash),
stream = fs.createWriteStream(fileName);
stream.on('error', function(err) {
return next(err);
});
stream.on('finish', function() {
return next();
});
s2.pipe(stream);
});
}
function createFileHash(file, next) {
var hash = crypto.createHash('sha1');
hash.setEncoding('hex');
file.on('error', function(err) {
return next(err);
});
file.on('end', function(data) {
hash.end();
return next(null, hash.read());
});
file.pipe(hash);
}
The problem of this code is that events end and finish are not emited. If I comment file.pipe(s2); events are emited, but I again get my origin problem.
This code fix the problem:
var s1 = new passThrough,
s2 = new passThrough;
file.on('data', function(data) {
s1.write(data);
s2.write(data);
});
file.on('end', function() {
s1.end();
s2.end();
});
The correct and simple way should be as follow:
we should resume the passthroughed stream
function storeFileStream(file, directory, version, reject, resolve) {
const fileHashSource = new PassThrough();
const writeSource = new PassThrough();
file.pipe(fileHashSource);
file.pipe(writeSource);
// this is the key point, see https://nodejs.org/api/stream.html#stream_three_states
fileHashSource.resume();
writeSource.resume();
createFileHash(fileHashSource, function(err, hash) {
if (err) {
return reject(err);
}
const fileName = path.join(directory, version + '_' + hash.slice(0, 8) + '.zip');
const writeStream = fs.createWriteStream(fileName);
writeStream.on('error', function(err) {
return reject(err);
});
writeStream.on('finish', function() {
return resolve();
});
writeSource.pipe(writeStream);
});
}
function createFileHash(readStream, next) {
const hash = crypto.createHash('sha1');
hash.setEncoding('hex');
hash.on('error', function(err) {
return next(err);
});
hash.on('finish', function(data) {
return next(null, hash.read());
});
readStream.pipe(hash);
}
You could use the async module (not tested but should work):
async.waterfall([
function(done) {
var hash = crypto.createHash('sha1');
hash.setEncoding('hex');
file.on('error', function(err) {
done(err);
});
file.on('end', function(data) {
done(null, hash.read);
});
file.pipe(hash);
},
function(hash, done) {
var fileName = path.join(config.storagePath, hash),
stream = fs.createWriteStream(fileName);
stream.on('error', function(err) {
done(err);
});
stream.on('finish', function() {
done(null);
});
file.pipe(stream);
}
], function (err) {
console.log("Everything is done!");
});
At the moment I'm running this task:
var skip = 0;
var limit = 5;
gulp.task('add coordinates to visits', function(done) {
(function recurse() {
Visit.find({})
.skip(skip)
.limit(limit)
.populate('zone')
.exec(function cb(err, visits) {
if (err) {
throw err;
}
if (visits.length === 0) {
return;
}
async.each(visits, function iterateEvents(visit, next) {
if (!visit.zone) {
return next();
} else if (!visit.coordinates.lat || !visit.coordinates.lng) {
visit.coordinates = {
lat: visit.zone.geo.coordinates.lat,
lng: visit.zone.geo.coordinates.lng
};
}
visit.save(next);
}, function cb(err) {
if (err) {
throw err;
}
skip += limit;
setTimeout(recurse, 1000);
});
});
})();
});
But I'm sure there must be a more elegant and optimal method than using skip, limit, `setTimeout. Is there some mongo or mongoose method for running updating tasks?
Based on our conversation in the comments it seems like Mongoose's querystream might be what you are looking for:
var stream = Visits.find().populate('zone').stream();
stream.on('data', function processDoc(visit) {
var self = this;
if (visit.zone && (!visit.coordinates.lat || !visit.coordinates.lng)) {
self.pause();
visit.update({
coordinates: {
lat: visit.zone.geo.coordinates.lat,
lng: visit.zone.geo.coordinates.lng
}
}, function(err, result) {
if (err) { console.log(err); };
self.resume();
});
}
});
stream.on('error', function(err) {
console.log('error', err);
});
stream.on('close', function() {
console.log('closed');
});
Hi there is some nodejs code as follow:
console.log("Start", new Date().getTime());
var fs = require('fs');
for (var i = 1; i < 10; i++) {
fs.readFile("file1.zip",function(err, data) {
if(err)
console.log("read error: ", err);
else {
fs.writeFile(__dirname + "/file2.zip", data , function(err) {
if(err) {
console.log("write error: ", err);
}
});
}
});
};
console.log("Finished", new Date().getTime());
I want to get the start and finish time when all the work is done, but it seems like the the second log is too early
Instead of console.log("Finished", new Date().getTime()); you might want to try:
process.on('exit', function () {
console.log("Finished", new Date().getTime());
});
The second log function is called too early because of node's async behavior. You have to put it inside the last callback:
console.log("Start", new Date().getTime());
var fs = require('fs');
for (var i = 1; i < 10; i++) {
fs.readFile("file1.zip",function(err, data) {
if(err) {
console.log("read error: ", err);
} else {
fs.writeFile(__dirname + "/file2.zip", data , function(err) {
if(err) {
console.log("write error: ", err);
}
// Put it here
console.log("Finished", new Date().getTime());
});
}
});
};