CouchDB throws conflict error after immediate update - couchdb

I'm using CouchDB 1.5 and trying to fix some values in documents with a rather simple request. I simply get a document, modify a value in it and then put it back immediately. Given that my database has low usage, I don't expect this simple operation to produce a conflict. And yet, the 85 documents belonging to 85 different users all fail to update with conflict errors, with no apparent reasons.
Here's the code I'm using:
var _ = require('lodash');
var PouchDB = require('pouchdb');
var couchdbUrl = 'https://USER:PASS#DOMAIN.COM';
var usersDb = new PouchDB(`${couchdbUrl}/_users`, {
skip_setup: true
});
usersDb.query('faulty_users/object_username', {
include_docs: true
})
.then((userDocs) => {
userDocs.rows
.forEach(function(userDoc) {
userDb = new PouchDB(`${couchdbUrl}/user%2F${userDoc.doc.hoodieId}`);
userDb.get('accountvalues/default', {
conflicts: true
})
.then((doc) => {
console.log(doc._id, doc._rev, doc._conflicts);
doc.values.accountValues.username = userDoc.doc.name.replace(/^user\//, '');
userDb.put(doc)
.catch((e) => {
console.log(userDoc.doc.hoodieId, e);
});
});
});
});
I've read and read again the documentation of PouchDB and I can't find what I'm doing wrong. I hope the error will be pretty obvious to someone here :-)

Related

Mocha: Promises timing out no matter the time limit

I'm working on an interview project for which I have to add an endpoint that lets me POST an array of products (listings) and it should create them (MongoDB + Mongoose) or update them accordingly. The problem is I'm clearly not dealing with Promises properly and I'm getting a timeout on my test.
Here's the spec:
it.only('should create listings or update them if they already exist, incrementing the quantity with the difference ' +
'between the current sold_quantity and the initial_sold_quantity', (done) => {
var iPhone = { ... };
var samsung = { ... };
request(app).post('/listings/myEndpoint').send([iPhone, samsung]).expect(200, {
created: 1,
updated: 1
}, done);
});
exports.myEndpoint = (req, res) => {
var listings = req.body;
var created, updated = 0;
listings.forEach(reqListing => {
Listing.findOne({ listing_id: reqListing.listing_id })
.then(foundListing => {
if (!foundListing) {
var newListing = reqListing;
newListing.initial_sold_quantity = newListing.sold_quantity;
Listing.create(newListing);
created++;
} else {
var newQuantity = reqListing.sold_quantity - foundListing._doc.initial_sold_quantity;
if (foundListing._doc.quantity != newQuantity) {
foundListing._doc.quantity = newQuantity;
foundListing.save();
updated++;
}
}
});
return {
created: created,
updated: updated
};
});
};
THINGS I'VE TRIED:
Giving it more time. I tried changing the default timeout for Mocha tests but it doesn't really matter if it's 2 seconds or 20, it'll still timeout.
Isolating the update vs the creation. Really doesn't matter either if I'm only updating a product or if I'm only creating one, it'll still timeout.
Removing the logic. As far as I've checked it doesn't really matter what happens inside the if/else blocks because it'll still give me a timeout. So even if the code looks like this:
exports.myEndpoint = (req, res) => {
var listings = req.body;
var created, updated = 0;
listings.forEach(reqListing => {
Listing.findOne({ listing_id: reqListing.listing_id })
.then(foundListing => {
if (!foundListing) {
console.log("creating");
} else {
console.log("updating");
}
});
return {
created: created,
updated: updated
};
});
};
it'll still timeout.
After asking some questions in the Nodeiflux Discord server I managed to find a solution, maybe not the prettiest because it doesn't make use of async/await but I'm not supposed to change the style of the project too much so I'll leave it without async/await.
First, the silly problem which came after fixing the post's question:
var created = 0, updated = 0;
instead of not initializing created.
Second, making use of forEach with Promises inside didn't make too much sense because it would discard whatever was returning inside so I put the return outside the forEach clause and changed the forEach iteration for a map instead. I also made use of Promise.all() to get all promises to resolve before returning:
exports.upsert = (req, res) => {
var listings = req.body;
var created = 0, updated = 0;
var allowedArrayLength = 50;
return Promise.all(listings.map(reqListing =>
Listing.findOne({
listing_id: reqListing.listing_id
})
.then(foundListing => {
if (!foundListing) {
createListing(reqListing);
created++;
} else {
var prevQuantity = foundListing.quantity;
if (updateListing(reqListing, foundListing).quantity > prevQuantity) {
updated++;
}
}
})
)).then(() => ({ created: created, updated: updated }));
};

MongoError: pool destroyed when fetching all data without conditions

I am new to mongoDb, as I am trying to query from different collection and in order to do that, when I am fetching data from category collection I mean when I am running select * from collection it is throwing error, MongoError: pool destroyed.
As per my understanding it is because of some find({}) is creating a pool and that is being destroyed.
The code which I am using inside model is below,
const MongoClient = require('mongodb').MongoClient;
const dbConfig = require('../configurations/database.config.js');
export const getAllCategoriesApi = (req, res, next) => {
return new Promise((resolve, reject ) => {
let finalCategory = []
const client = new MongoClient(dbConfig.url, { useNewUrlParser: true });
client.connect(err => {
const collection = client.db(dbConfig.db).collection("categories");
debugger
if (err) throw err;
let query = { CAT_PARENT: { $eq: '0' } };
collection.find(query).toArray(function(err, data) {
if(err) return next(err);
finalCategory.push(data);
resolve(finalCategory);
// db.close();
});
client.close();
});
});
}
When my finding here is when I am using
let query = { CAT_PARENT: { $eq: '0' } };
collection.find(query).toArray(function(err, data) {})
When I am using find(query) it is returning data but with {} or $gte/gt it is throwing Pool error.
The code which I have written in controller is below,
import { getAllCategoriesListApi } from '../models/fetchAllCategory';
const redis = require("redis");
const client = redis.createClient(process.env.REDIS_PORT);
export const getAllCategoriesListData = (req, res, next, query) => {
// Try fetching the result from Redis first in case we have it cached
return client.get(`allstorescategory:${query}`, (err, result) => {
// If that key exist in Redis store
if (false) {
res.send(result)
} else {
// Key does not exist in Redis store
getAllCategoriesListApi(req, res, next).then( function ( data ) {
const responseJSON = data;
// Save the Wikipedia API response in Redis store
client.setex(`allstorescategory:${query}`, 3600, JSON.stringify({ source: 'Redis Cache', responseJSON }));
res.send(responseJSON)
}).catch(function (err) {
console.log(err)
})
}
});
}
Can any one tell me what mistake I am doing here. How I can fix pool issue.
Thanking you in advance.
I assume that toArray is asynchronous (i.e. it invokes the callback passed in as results become available, i.e. read from the network).
If this is true the client.close(); call is going to get executed prior to results having been read, hence likely yielding your error.
The close call needs to be done after you have finished iterating the results.
Separately from this, you should probably not be creating the client instance in the request handler like this. Client instances are expensive to create (they must talk to all of the servers in the deployment before they can actually perform queries) and generally should be created per running process rather than per request.

MongoDB & NodeJS: the options [servers] is not supported && the options [caseTranslate] is not supported

Here, I am currently learning how to use MongoDB with NodeJS, but here is 1h or 2h blocked on internet looking now for a solution with 2 errors that I have and impossible to find the problem here is my code (Sorry, I am French, in the console.log, the sentences are in fr):
var MongoClient = require('mongodb').MongoClient
const mongodbUrl = "mongodb://localhost:27017/"
let client = new MongoClient(mongodbUrl, {useNewUrlParser: true})
let essai
var randomstring = '';
test()
function test(){
var chars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXTZabcdefghiklmnopqrstuvwxyz-*/+.,?;:!§ù%$*£¤¨^=+})°]à#ç^_\è`-|(['{#é~&²";
var string_length = 14;
randomstring = '';
for (var i=0; i<string_length; i++) {
var rnum = Math.floor(Math.random() * chars.length);
randomstring += chars.substring(rnum,rnum+1);
}
//console.log(randomstring)
CheckBDD(randomstring)
//setTimeout(1000)
}
function CheckBDD(randomstring){
client.connect((err, client) => {
if (err) throw err
//console.log("Connexion à la bdd avec succès")
const dbName = "wordlist"
let db = client.db("wordlist")
var collection = 'wordlist';
db.collection('wordlist').findOne({wordlist: randomstring}, (err, essai) => {
if (err) throw err
//console.log(essai)
if (essai == null)
{
db.collection('wordlist').insertOne( { wordlist: randomstring} );
console.log("Valeur inconnue:" + randomstring)
} else {
console.log("Valeur déjà connue:" + randomstring)
}
test()
});
});
}
Errors:
the options [servers] is not supported
the options [caseTranslate] is not supported
From what I found, it will come from the connection options but... There you go. I hope you can help me thank you very much in advance,
Owzlaa
I just started having the exact same "errors" appearing in some of my database requests. It isn't all of the time though. I had exactly the same call in two different places in my code: one inside my server's connection callback and another in a module used inside of a route controller. The one inside my server's connection callback did not trigger the error whereas the other one did.
Doing a console log on the client object showed exactly the same options. So what was the difference? The server connection callback was only a "read operation" whereas the one in the other module eventually calls a method to perform an update on the collection itself.
I modified the code to have a conditional in it so when one case is true it modifies the collection and when the other case is true it does not. Guess what? The second case, where there's no modification to the collection, does not trigger the error. It would seem that doing some sort of write causes this thing to trigger.
I am not sure I want to call it an error. It's more like a warning because it is non-blocking (at least in my case). It also doesn't seem to trigger any of my error handling.
I still don't have a solution for making it go away other than perhaps we have to make copies of the database object... I haven't tried it yet though.
// Express code : NOTE - client is the MongoClient exported from somewhere else
let db;
client.connect()
.then(c => {
if (c) {
db = c.db(DATABASE_NAME);
req.client = c;
return verifiers.verifyForFiles(db);
} else {
req.errStatus = 500;
throw new Error('Database error');
}
})
.then(checkResult => {
const { hasCollection, hasSchema } = checkResult;
if (!hasCollection) {
/** triggers errors */
return collections.createFilesCollection(db);
} else {
if (hasSchema) {
/** does not trigger errors */
return;
} else {
/** triggers errors */
return collections.updateFilesCollection(db);
}
}
})
.then(() => next())
.catch(next);
// more Express stuff

Yelp API - Too Many Request Per Second in Node.js

Experts,
It seems that yelp recently changed their REST API to limit the amount of requests you can make per second. I've tried using setTimeout and various sleep functions with no success. I believe it has to do with setTimeout although. I only get a few responses back and a slew of TOO_Many_Requests_Per_Second. Also, I'm using the Node.js Fusion API Client. Any help would be appreciated. Thanks in advance.
Here is the code below as I'm getting the Yelp URL from my Parse Server, and I want to get the Yelp Business Name response:
'use strict';
var Parse = require('parse/node');
Parse.initialize("ServerName");
Parse.serverURL = 'ParseServerURL';
const yelp = require('yelp-fusion');
const client = yelp.client('Key');
var object;
var Business = Parse.Object.extend("Business");
var query = new Parse.Query(Business);
query.notEqualTo("YelpURL", "Bus");
query.find({
success: function(results) {
for (var i = 0; i < results.length; i++) {
object = results[i];
//I belive a setTimeout block needs to come somewhere in here. Tried many places but with no success.
client.business(object.get('YelpURL')).then(response => {
console.log(response.jsonBody.name);
}).catch(e => {
console.log(e);
});
}
},
error: function(error) {
alert("Error" + error.code + " " + error.message);
}
});
Use query each, which will iterate over each object and perform the requests in a sequence rather than all more or less at once:
query.each(
function(object) {
return client.business(object.get('YelpURL')).then(response => {
console.log(response.jsonBody.name);
});
}
).catch( e => {
res.json('error');
});
One cool thing about this is that it'll automatically propagate the error from client.bussiness() call if there is one to the catch block at the bottom. It will iterate over the objects one at a time, and since we "return" the results of the client.business() call, it's not going to move on to the next object until you've gotten the response. query.each() will also iterate over every object in a collection that meets your query criteria, so you don't have to worry about limits.
Im not quite sure if this is what your looking for, but you can retrieve up to 50 records per request, in the example below will return 20 business names within that zip code, or you can tweak it a little to return all that data for those businesses, does this help:
app.get('/:id', (req, res) => {
let zipcode = req.params.id;
let names = [];
let searchRequest = {
term: 'Business', // or for ex. food
limit: 20, //set the number of responses you want up to 50
radius: 20000, // 20 miles
location: zipcode
};
client.search(searchRequest)
.then(response => {
response.jsonBody.businesses.map(elem => {
names.push(elem.name);
})
res.json(names); // business names only
//or
//res.json(response.jsonBody.businesses) //all details included with business name
}).catch(e => {
res.json('error');
});
})

MongoDB table records are not getting persisted with ES6 Promise.ALL

I am new to MongoDB and working on small tutorial to create associations between tables in MongoDB.
As per the tutorial, we need to create association between three tables.
1. User table
2. BlogPost table
3. Comment table
User may have multiple blogpost and a blogpost may have list of comments and comment is also mapped with user.
User -> BlogPost -> Comment
I have written following test case to create three records and to test the association :
const assert = require('assert');
const User = require('../src/users');
const BlogPost = require('../src/blogPost');
const Comment = require('../src/comment');
describe('Association', () => {
let ascUer, blogPost, comment;
beforeEach( (done) => {
ascUer = new User({name:'associationUser'});
blogPost = new BlogPost({title:'JS is great',content:'Yep, It is !!'});
comment = new Comment({content:'Congratulation for the great poost !!!'});
ascUer.blogPosts.push(blogPost);
blogPost.comments.push(comment);
comment.user = ascUer;
/*
// THIS IS NOT WORKING
Promise.all([ascUer.save(),blogPost.save(),comment.save()])
.then(() => done());
*/
ascUer.save()
.then(() => {
console.log('***********Association user saved ***************');
blogPost.save()
.then(() => {
console.log('***********Blogpost saved ***************');
comment.save()
.then(() => {
console.log('***********Comment saved ***************');
done();
});
});
});
});
it.only('Saves a relation between a user and a blogpost', (done) => {
User.findOne({name:'associationUser'})
.populate('blogPosts')
.then((user) => {
console.log(user);
done();
});
});
});
I am facing weird behavior while running the test case with help of mocha and nodeJs. The test case is getting executed successfully but only "user" table was created while using "Promise.All" feature of ES6. I have commented out "Promise.All" in above code snippet and one by one saving each of three records.
Below image show result of test case execution:
Below image shows snap of RoboMongo tool where only "user" table is present :
Updated : I also have created one more test case file "connection_helper.js" where I have written "beforeEach" block to make a DB connection and to drop all the schema before execution of any test cases.
Below image shows the project directory structure :
And below is the code written in "connection_helper" js file :
const mongoose = require('mongoose');
let isSchemaDropped = false;
mongoose.Promise = global.Promise;
before((done) => {
mongoose.connect('mongodb://localhost:27017/users_test');
mongoose.connection
.once('open', () => {
console.log('Connected to Mongose DB !!');
done();
})
.on('error', (error) => {
console.warn('Error while connecting to Mongose DB !!',error);
});
});
beforeEach((done) => {
if(!isSchemaDropped){
isSchemaDropped = true;
console.log("Dropping database schema !!!");
try{
const {users,comments,blogposts,employees} = mongoose.connection.collections;
users.drop(() => {
comments.drop(() => {
blogposts.drop(() => {
employees.drop(() => {
console.log("**********************************************");
console.log("**********************************************");
console.log("******Dropped All Schema******");
console.log("**********************************************");
console.log("**********************************************");
});
});
});
});
}catch(e){
console.log(e);
}
}else{
//console.log("Database schema is already dropped before !!!");
}
done();
});
Why ES6 "Promise.All" is not working properly in my system ? Any suggestions appreciated. You may also refer my code over GitHub : https://github.com/shahgunjan07/MongoDBTutorial.git for further details.
I think your issue is related to your beforeEach, which isn't properly waiting for all collections to be dropped before continuing with the tests. This could result in collections being dropped while your tests are running, causing unexpected behaviour.
Here's an alternative implementation:
beforeEach(() => {
if (isSchemaDropped) return;
isSchemaDropped = true;
console.log("Dropping database schema !!!");
const {
users,
comments,
blogposts,
employees
} = mongoose.connection.collections;
return Promise.all([
users.drop(),
comments.drop(),
blogposts.drop(),
employees.drop()
]);
});

Resources