Etherpad plugin development: Document update conflict on using db.findKeys() - couchdb

I am currently developing an Etherpad Lite plug-in, using Etherpad Lite (1.8.14) and a CouchDB (3.2.2) database. I have first written a small test function which I connected to the server-side hook 'expressCreateServer'.
const db = require('ep_etherpad-lite/node/db/DB');
function expressCreateServer(hook, args, cb) {
args.app.get(`/test`,
async (req, res) => {
await db.init();
const keys = await db.findKeys(`pad:*:revs:*`, null);
res.json(keys);
}
);
return cb;
}
exports.expressCreateServer = expressCreateServer;
I can use db.get() or db.set() without any problems, but Etherpad terminates when I use db.findKeys() and throws the following error.
[2022-08-31 08:41:43.480] [ERROR] server - Error: Error: Document update conflict.
at handleError (/opt/etherpad-lite/src/node_modules/ueberdb2/databases/couch_db.js:26:17)
at /opt/etherpad-lite/src/node_modules/ueberdb2/databases/couch_db.js:127:9
at Request._callback (/opt/etherpad-lite/src/node_modules/nano/lib/nano.js:174:9)
at Request.self.callback (/opt/etherpad-lite/src/node_modules/request/request.js:185:22)
at Request.emit (events.js:375:28)
at Request.emit (domain.js:470:12)
at Request.<anonymous> (/opt/etherpad-lite/src/node_modules/request/request.js:1154:10)
at Request.emit (events.js:375:28)
at Request.emit (domain.js:470:12)
at IncomingMessage.<anonymous> (/opt/etherpad-lite/src/node_modules/request/request.js:1076:12)
This error always occurs exactly once when Etherpad and the CouchDB are used for the first time.
Does anyone have any idea what the problem is?

Related

Getting the widths of a lot of images causes ENOTFOUND in Node.js

I have several sources of lists of images (flicker, images stored at s3, imgur, etc)
I want to get the dimenions of these images.
I use node and https://github.com/nodeca/probe-image-size to go over each url and use that to get the width of the image and count how many images are at a certain width via the following code
probes = [];
_.forEach(image_urls, url => {
probes.push(probe(url));
});
results = await Promise.all(probes);
_.forEach(results, result_of_image => {
width = parseInt(result_of_image.width / 10) * 10;
if (!widthes[width]) {
widthes[width] = 1;
} else {
widthes[width]++;
}
});
even though all urls are accessible, I sometimes get getaddrinfo ENOTFOUND with the stack
at ClientRequest.req.once.err (/image_script/node_modules/got/index.js:73:21)
at Object.onceWrapper (events.js:293:19)
at emitOne (events.js:101:20)
at ClientRequest.emit (events.js:191:7)
at TLSSocket.socketErrorListener (_http_client.js:358:9)
at emitOne (events.js:96:13)
at TLSSocket.emit (events.js:191:7)
at connectErrorNT (net.js:1031:8)
at _combinedTickCallback (internal/process/next_tick.js:80:11)
at process._tickDomainCallback (internal/process/next_tick.js:128:9)
I suspect that because the url list is very large (in the thousands) that node just takes all resources of the system and things just stop working properly (this is a guess)
Is there a better way to do the above? or provide node with some connection pool?

Neo4j's NPM component blows up without error

I'm using the thingdom/node-neo4j module like this:
var neo = require('neo4j');
var db = new new.GraphDatabase(...);
but when I call the cypher method with some invalid script, instead of returning an error (allowing me to handle it), it blows up:
db.cypher('// invalid cypher script', (err, res) => {
if (err) console.log('ERROR: ' + err);
console.log(res);
})
what I get is a stack trace that looks like this:
/Users/ekkis/dev/test/inc/node_modules/neo4j/lib-new/errors.js:20
Error.captureStackTrace(this, this.constructor);
^
TypeError: Error.captureStackTrace is not a function
at ClientError.Error [as constructor] (/Users/ekkis/dev/test/inc/node_modules/neo4j/lib-new/errors.js:20:13)
at new ClientError (/Users/ekkis/dev/test/inc/node_modules/neo4j/lib-new/errors.js:81:48)
at Function.__dirname.Error.Error._fromObject (/Users/ekkis/dev/test/inc/node_modules/neo4j/lib-new/errors.js:70:14)
at /Users/ekkis/dev/test/inc/node_modules/neo4j/lib-new/GraphDatabase.js:302:25
at Request._callback (/Users/ekkis/dev/test/inc/node_modules/neo4j/lib-new/GraphDatabase.js:92:20)
at Request.self.callback (/Users/ekkis/dev/test/inc/node_modules/request/request.js:187:22)
at emitTwo (events.js:106:13)
at Request.emit (events.js:191:7)
at Request. (/Users/ekkis/dev/test/inc/node_modules/request/request.js:969:12)
at emitNone (events.js:91:20)
at IncomingMessage.emit (events.js:185:7)
at endReadableNT (_stream_readable.js:974:12)
at _combinedTickCallback (internal/process/next_tick.js:74:11)
at process._tickCallback (internal/process/next_tick.js:98:9)
I'm using node v7.3.0 and posted an issue with the maintainer here: https://github.com/thingdom/node-neo4j/issues/214 but so far no solution. is anyone else running into this? any solutions?

gcloud error: ApiError: Not Found at new util.ApiError

Anyone familiar with this gcloud exception:
ApiError: Not Found at new util.ApiError
(/site/node_modules/gcloud/lib/common/util.js:128:10) at
Object.parseHttpRespBody
(/site/node_modules/gcloud/lib/common/util.js:206:30) at
Object.handleResp
(/site/node_modules/gcloud/lib/common/util.js:146:18) at
/site/node_modules/gcloud/lib/common/util.js:447:12 at
Request.onResponse [as _callback]
(/site/node_modules/gcloud/node_modules/retry-request/index.js:120:7)
at Request.self.callback
(/site/node_modules/request/request.js:187:22) at Request.emit
(events.js:98:17) at Request.
(/site/node_modules/request/request.js:1044:10) at Request.emit
(events.js:95:17) at IncomingMessage.
(/site/node_modules/request/request.js:965:12) at IncomingMessage.emit
(events.js:117:20) at _stream_readable.js:944:16 at
process._tickDomainCallback (node.js:492:13)
It appears only in production (of course) and currently consistently. It used to appear periodically and the assumption was that it is a glitch # gCloud since locally it could not be reproduced. It is related to the part of the code which uses the simplest gCloud lib method bucket.upload with no parameters other than the file ... Here is the current function which does it:
function uploadToGoogleCloud(filePath, makePublic) {
var gstorage = gcloud.storage({
projectId: EXAMPLE_projectId,
credentials: EXAMPLE_credentials,
});
var spBucket = Promise.promisifyAll(gstorage.bucket(EXAMPLE_bucket));
return spBucket.uploadAsync(filePath).then(function(file) {
if (makePublic) {
var fileAsync = Promise.promisifyAll(file);
return fileAsync.makePublicAsync().then(function() {
return file;
});
}
return file;
});
}
Any feedback is greatly appreciated.
The error is a bit obscure, but was correct for me. I got this error when I in some cases were trying to write to a bigquery table that did not exist (I accidentally passed "undefined" as the tablename), I suspect that there is some 404 error coercion going on under the hood.

window.addEventListener does not work for empty windows

JSDOM version : 0.6.5
Nitrous.io Cloud IDE environment
Issue observed:
Creating an empty Window and then attaching an event handler fails with exception trace:
TypeError: Cannot read property 'Node' of undefined
at Object.DOMWindow.addEventListener (/home/action/workspace/Projects/nodejs/node_modules/jsdom/lib/jsdom/browser/index.js:180:10)
at Request._callback (/home/action/workspace/Projects/nodejs/page_test_demo.js:22:14)
at Request.self.callback (/home/action/workspace/Projects/nodejs/node_modules/request/index.js:148:22)
at Request.EventEmitter.emit (events.js:98:17)
at Request. (/home/action/workspace/Projects/nodejs/node_modules/request/index.js:886:14)
at Request.EventEmitter.emit (events.js:117:20)
at IncomingMessage. (/home/action/workspace/Projects/nodejs/node_modules/request/index.js:837:12)
at IncomingMessage.EventEmitter.emit (events.js:117:20)
at _stream_readable.js:910:16
at process._tickCallback (node.js:415:13)
Here is the code snippet in question:
var window = jsdom.createWindow();
console.log(window);
window.addEventListener('load', function() {
console.log('Load event completed');
});
window.document = jsdom.jsdom(body);
Can anyone tell me what I am missing out here?
The issue you mention in your comment has fixed 7 months ago, so as long as you have a recent version of JSDOM, you should be able to do this:
var jsdom = require('jsdom');
var body = '<html><body></body></html>';
var document = jsdom.jsdom(body);
var window = document.createWindow();
window.addEventListener('load', function() {
console.log('Load event completed');
});

Zombie.js "assert is not defined" for multiple visits?

I'm setting up integration testing with Zombie.js and Mocha, and running into the puzzling problem that only the first browser.visit() call seems to succeed. My specs look like this:
browser = new Browser site: "http://localhost:101010"
describe '/docs', ->
['app', 'server', 'timetable', 'util'].forEach (file) ->
describe "/#{file}.html", -> it "documents #{file}.coffee", (done) ->
browser.visit "/docs/#{file}.html", ->
browser.text('title').should.equal "#{file}.coffee"
do done
The first of those tests, which loads /docs/app.html, passes without incident. However, all subsequent tests fail, producing a stacktrace like the following:
ReferenceError: assert is not defined
at Object.HTML5Parser.phases.inBody.startTagBody (/home/$USER/projects/timetable/node_modules/zombie/node_modules/html5/lib/html5/parser.js:1828:4)
at Object.HTML5Parser.phases.base.processStartTag (/home/$USER/projects/timetable/node_modules/zombie/node_modules/html5/lib/html5/parser.js:65:40)
at EventEmitter.Parser.do_token (/home/$USER/projects/timetable/node_modules/zombie/node_modules/html5/lib/html5/parser.js:2436:21)
at EventEmitter.<anonymous> (/home/$USER/projects/timetable/node_modules/zombie/node_modules/html5/lib/html5/parser.js:2457:30)
at EventEmitter.emit (events.js:117:20)
at EventEmitter.emitToken (/home/$USER/projects/timetable/node_modules/zombie/node_modules/html5/lib/html5/tokenizer.js:99:9)
at emit_current_token (/home/$USER/projects/timetable/node_modules/zombie/node_modules/html5/lib/html5/tokenizer.js:873:3)
at tag_name_state (/home/$USER/projects/timetable/node_modules/zombie/node_modules/html5/lib/html5/tokenizer.js:400:4)
at EventEmitter.<anonymous> (/home/$USER/projects/timetable/node_modules/zombie/node_modules/html5/lib/html5/tokenizer.js:941:10)
at EventEmitter.emit (events.js:95:17)
at EventEmitter.HTML5Tokenizer.pump (/home/$USER/projects/timetable/node_modules/zombie/node_modules/html5/lib/html5/tokenizer.js:932:11)
at EventEmitter.HTML5Tokenizer.tokenize (/home/$USER/projects/timetable/node_modules/zombie/node_modules/html5/lib/html5/tokenizer.js:89:23)
at EventEmitter.Parser.parse (/home/$USER/projects/timetable/node_modules/zombie/node_modules/html5/lib/html5/parser.js:2391:17)
at HtmlToDom.appendHtmlToElement (/home/$USER/projects/timetable/node_modules/zombie/node_modules/jsdom/lib/jsdom/browser/htmltodom.js:91:50)
at Object.innerHTML (/home/$USER/projects/timetable/node_modules/zombie/node_modules/jsdom/lib/jsdom/browser/index.js:481:17)
at Object.core.HTMLDocument.write (/home/$USER/projects/timetable/node_modules/zombie/node_modules/jsdom/lib/jsdom/level2/html.js:406:22)
at Object.exports.jsdom (/home/$USER/projects/timetable/node_modules/zombie/node_modules/jsdom/lib/jsdom.js:70:9)
at History._createDocument (/home/$USER/projects/timetable/node_modules/zombie/lib/zombie/history.js:174:22)
at /home/$USER/projects/timetable/node_modules/zombie/lib/zombie/history.js:126:30
at /home/$USER/projects/timetable/node_modules/zombie/lib/zombie/resources.js:147:16
at Request._callback (/home/$USER/projects/timetable/node_modules/zombie/lib/zombie/resources.js:335:16)
at Request.self.callback (/home/$USER/projects/timetable/node_modules/zombie/node_modules/request/main.js:120:22)
at Request.EventEmitter.emit (events.js:98:17)
at Request.<anonymous> (/home/$USER/projects/timetable/node_modules/zombie/node_modules/request/main.js:633:16)
at Request.EventEmitter.emit (events.js:95:17)
at IncomingMessage.<anonymous> (/home/$USER/projects/timetable/node_modules/zombie/node_modules/request/main.js:595:14)
at IncomingMessage.EventEmitter.emit (events.js:117:20)
at _stream_readable.js:872:14
at process._tickCallback (node.js:415:13)
Is Zombie's browser.visit() not intended to be called more than once, or is there some other issue here?
Editing to note versions:
Node.js v0.10.1
Mocha v1.8.1
Chai v1.5.0
Zombie.js v1.4.1
It seems this is a compatibility issue arising in Node versions >=0.10, as mentioned here: https://github.com/assaf/zombie/issues/487
The workaround mentioned at https://github.com/assaf/zombie/issues/487#issuecomment-15548684 solves my particular problem. I added the following code to my testi/common.coffee file immediately before importing Zombie:
do patchEventEmitterToHideMaxListenerWarning = ->
return if global.eventEmitterPatched
global.eventEmitterPatched = true
events = require 'events'
Old = events.EventEmitter
events.EventEmitter = ->
this.setMaxListeners(0)
events.EventEmitter.prototype = Old.prototype
Having done so, all my tests pass without complaint. I expect that this issue will be patched up in future Zombie revisions, but for now the above hack renders it workable.

Resources