Marklogic 9 + Roxy: can't connect to created database using Node.js - node.js

I'm trying out the Roxy deployer. The Roxy app was created using the default app-type. I setup a new ML 9 database, and I ran "ml local bootstrap" using the default ports (8040 and 8041)
Then I setup a node application. I tried the following (sample code from https://docs.marklogic.com/jsdoc/index.html)
var marklogic = require('marklogic');
var conn = {
host: '192.168.33.10',
port: 8040,
user: 'admin',
password: 'admin',
authType: 'DIGEST'
}
var db = marklogic.createDatabaseClient(conn);
db.createCollection(
'/books',
{author: 'Beryl Markham'},
{author: 'WG Sebald'}
)
.result(function(response) {
console.log(JSON.stringify(response, null, 2));
}, function (error) {
console.log(JSON.stringify(error, null, 2));
});
Running the script gave me an error like:
$ node test.js
{
"message": "write document list: cannot process response with 500 status",
"statusCode": 500,
"body": "<error:error xsi:schemaLocation=\"http://marklogic.com/xdmp/error error.xsd\" xmlns:error=\"http://marklogic.com/xdmp/error\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n <error:code>XDMP-IMPMODNS</error:code>\n <error:name>err:XQST0059</error:name>\n <error:xquery-version>1.0-ml</error:xquery-version>\n <error:message>Import module namespace mismatch</error:message>\n <error:format-string>XDMP-IMPMODNS: (err:XQST0059) Import module namespace http://marklogic.com/rest-api/endpoints/config does not match target namespace http://marklogic.com/rest-api/endpoints/config_DELETE_IF_UNUSED of imported module /MarkLogic/rest-api/endpoints/config.xqy</error:format-string>\n <error:retryable>false</error:retryable>\n <error:expr/>\n <error:data>\n <error:datum>http://marklogic.com/rest-api/endpoints/config</error:datum>\n <error:datum>http://marklogic.com/rest-api/endpoints/config_DELETE_IF_UNUSED</error:datum>\n <error:datum>/MarkLogic/rest-api/endpoints/config.xqy</error:datum>\n </error:data>\n <error:stack>\n <error:frame>\n <error:uri>/roxy/lib/rewriter-lib.xqy</error:uri>\n <error:line>5</error:line>\n <error:column>0</error:column>\n <error:xquery-version>1.0-ml</error:xquery-version>\n </error:frame>\n </error:stack>\n</error:error>\n"
}
If I change the port to 8000 (the default appserver that inserts into Documents), the node function executes correctly as expected. I'm not sure if I need to configure anything else with the Roxy-created appserver so that it works with the node.js application.
I'm not sure where the "DELETE_IF_UNUSED" part in the error message is coming from either. There doesn't seem to be any such text in the configuration files generated by Roxy.
Edit: When accessing 192.168.33.10:8040 via the browser, I get a an xml with a similar error:
<error:error xsi:schemaLocation="http://marklogic.com/xdmp/error error.xsd" xmlns:error="http://marklogic.com/xdmp/error" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<error:code>XDMP-IMPMODNS</error:code>
<error:name>err:XQST0059</error:name>
<error:xquery-version>1.0-ml</error:xquery-version>
<error:message>Import module namespace mismatch</error:message>
<error:format-string>XDMP-IMPMODNS: (err:XQST0059) Import module namespace http://marklogic.com/rest-api/endpoints/config does not match target namespace http://marklogic.com/rest-api/endpoints/config_DELETE_IF_UNUSED of imported module /MarkLogic/rest-api/endpoints/config.xqy</error:format-string>
<error:retryable>false</error:retryable>
<error:expr/>
<error:data>
<error:datum>http://marklogic.com/rest-api/endpoints/config</error:datum>
<error:datum>http://marklogic.com/rest-api/endpoints/config_DELETE_IF_UNUSED</error:datum>
<error:datum>/MarkLogic/rest-api/endpoints/config.xqy</error:datum>
</error:data>
<error:stack>
<error:frame>
<error:uri>/roxy/lib/rewriter-lib.xqy</error:uri>
<error:line>5</error:line>
<error:column>0</error:column>
<error:xquery-version>1.0-ml</error:xquery-version>
</error:frame>
</error:stack>
</error:error>
If it matters, MarkLogic version is 9.0-3.1. It's a fresh install too.
Any advice?

Based on the comments, it looks like the problem is that the Node.js Client API expects to talk to a REST API endpoint, but the default Roxy configuration is an MVC application. If you haven't already done anything major with your Roxy app, I'd remove it and create one with --app-type=rest.
$ ml new my-app --app-type=rest --server-version=9
$ ml local bootstrap
$ ml local deploy modules
Then try your Node app.

Related

Node modules with sub-directories: "Error parsing triggers: Cannot find module 'ibm-watson'"

I have a Firebase Cloud Function that calls IBM Watson to get a token. I'm updating it from the old username/password auth to the current IAM auth.
Here's the code from the IBM documentation:
const watson = require('ibm-watson');
const { IamAuthenticator } = require('ibm-watson/auth');
// to get an IAM Access Token
const authorization = new watson.AuthorizationV1({
authenticator: new IamAuthenticator({ apikey: 'fakekey-1234' }),
});
authorization.getToken(function (err, token) {
if (!token) {
console.log('error: ', err);
} else {
// Use your token here
}
});
When I run firebase deploy --only functions I get this error:
Error: Error parsing triggers: Cannot find module 'ibm-watson'
Require stack:
- /Users/TDK/LanguageTwo/functions/index.js
- /Users/TDK/.nvm/versions/node/v13.10.1/lib/node_modules/firebase-tools/lib/triggerParser.js
ibm-watson is installed in my /functions/node_modules directory:
I reinstalled ibm-watson, and for good measure I ran npm install in my functions directory. Plus I ran npm-check and updated all my node modules.
The specific line that causes the error is:
const watson = require('ibm-watson');
When I comment out that line the functions deploy without error. Unfortunately, the function doesn't run. :-)
This line does not cause the deploy error:
const { IamAuthenticator } = require('ibm-watson/auth');
I use IBM Watson in other Firebase Cloud Functions in the same index.js file. These lines from other functions don't cause deploy errors:
let TextToSpeechV1 = require('ibm-watson/text-to-speech/v1');
...
var LanguageTranslatorV3 = require('ibm-watson/language-translator/v3');
The problem seems to be that requiring the parent directory ibm-watson fails, but requiring the subdirectories of the parent directory works. Any suggestions?
This is as expected. If you take a look at the GitHub repo for ibm-watson - https://github.com/watson-developer-cloud/node-sdk - you will notice that there is no example requiring the top level library. This stops you from pulling in the full library, when you only need to pull in a small sub-component.

Creating sub connections with azure-mobile-apps and NodeJS

I'm trying to create an API using nodeJS, express and azure-mobile-apps to do some data synchronisation between an Ionic3 mobile app (which use an SQLite local database) and a Microsoft SQL Database.
My API has to create a synchronisation connection for each mobile application. Each application will be linked to a distant database. For example, if user_01 wants to synchronise his data, he's going to be linked to his client_01 database. So each time it'll have to, the API will create a new process running on a different port.
here is an example : https://zupimages.net/up/19/36/szhu.png
The problem is that i'm not able to create more than one connection with azure-mobile-apps. The first one always works, but the second, third etc are still using the first connection that i have instantiated. I've looked into the app stack and everything seems fine.
Is that an issue with azure-mobile-app, or did I misunderstand something with express ?
Thanks for your responses !
var azureMobileApps = require('azure-mobile-apps');
var express = require('express');
module.exports = {
async createConnection(client) {
try {
let app = express();
mobileApp = azureMobileApps({
homePage: true,
swagger: true,
data: {
server: '****',
user: client.User,
password: client.Password,
port: '1443',
database: client.Database,
provider: 'mssql',
dynamicSchema: false,
options: {
encrypt: false
}
}
});
await mobileApp.tables.import('./tables');
await mobileApp.tables.initialize();
app.listen(global.portCounter);
app.use(mobileApp);
console.log(app._router.stack);
console.log('Listening on port ',global.portCounter);
global.portCounter++;
} catch (error) {
console.log(error);
}
}
}
It's working now. The thing is, it's impossible to do multiple connection with the azure-mobile-apps SDK for nodeJS.
I had to use worker-thread which seems to isolate the memory in a sub-proccess.
Hope it can help somebody one day

Error when attempting knex seed:run after successful knex migrate:latest for remote database

I'm running the following error when attempting to run knex seed:run against my remote postgres database (not localhost): Knex:Error Pool2 - Error: connect ECONNREFUSED 127.0.0.1:5432.
I am able to run knex migrate:latest successfully and can see that the tables are created on my postgres server, but when I try to seed I get that error. I've run the same migrations/seed file against my local configuration and it has worked without a problem, but when I attempt to seed my heroku postgres instance, it throws this error (I'm not running my local pg service when I'm seeding the new db, which is likely why it's throwing an error).
Any thoughts on why it is attempting to connect to localhost instead of the specified db? Sample of my file provided below:
var User = require("./models/User");
var Project = require("./models/Project");
exports.seed = function(knex, Promise) {
console.log(knex.client.config.connection); //This returns the correct db info.
return knex('user').del()
.then(function() {
return knex('project').del()
}).then(function() {
return new User({id: 1, firstName: "James", lastName: "Lee", phone: "123-456-2000", email: "test#test.com"}).save(null, {method: "insert"});
}).then(function() {
return new Project({id: 1, name: "Test"}).save(null, {method: "insert"});
})
};
This seems to have occurred due to how I was setting up the migrations / seeds. The configurations were actually pulling from two different places, one which had the correct SSL settings in place, the other without (seed file). Adding the correct settings in both places seemed to resolve the issue.

ArangoDB-Foxx with Relay Framework

Can anyone tell me or point me to the link about how to use ArangoDB-Foxx along with Relay Framework (or to be specific: relay-fullstack)? I've looked everywhere and got no luck.
I have a Relay project using relay-fullstack and I want to make it working with ArangoDB-Foxx (currently I'm using a schema from Relay framework's tutorial). As I know, ArangoDB-Foxx uses graphql-sync instead of graphql. So it breaks the building process of relay-fullstack.
Any help would be very appreciated. Thanks.. :)
I don't know much about relay-fullstack but if your only problem is generating the schema file provided in the tutorial, just check how it is generated: https://github.com/relayjs/relay-starter-kit/blob/master/scripts/updateSchema.js
With the latest version of graphql (or graphql-sync) the introspectionQuery and printSchema utilities are exported from the package directly.
You can emulate the updateSchema script in Foxx by creating a new Foxx script called update-schema:
First add the script to your manifest:
"scripts": {
"update-schema": "scripts/updateSchema.js"
}
Then implement the script itself as scripts/updateSchema.js like so (assuming your GraphQL schema lives in data/schema.js):
'use strict'
const fs = require('fs')
const path = require('path')
const Schema = require('../data/schema')
const gql = require('graphql')
const result = gql.graphql(Schema, gql.introspectionQuery)
if (result.errors) {
console.error(
'ERROR introspecting schema: ',
JSON.stringify(result.errors.map((err) => gql.formatError(err)), null, 2)
)
} else {
fs.writeFileSync(
path.join(__dirname, '../data/schema.json'),
JSON.stringify(result, null, 2)
)
}
fs.writeFileSync(
path.join(__dirname, '../data/schema.graphql'),
gql.printSchema(Schema)
)
You can now run the script from the web interface by going into the settings tab of your service and picking it from the dropdown (you don't need to pass any arguments, just press OK). The script should generate the two JSON and GraphQL files for your schema like the one in the starter kit does.

Hapi.js Catbox Redis returning "server.cache is not a function"

So I'm like 99% sure I'm just screwing up something dumb here.
I'm trying to set up catbox to cache objects to redis. I have redis up and running and I can hit it with RDM (sql pro like utility for redis) but Hapi is not cooperating.
I register the redis catbox cache like so:
const server = new Hapi.Server({
cache: [
{
name: 'redisCache',
engine: require('catbox-redis'),
host: 'redis',
partition: 'cache',
password: 'devpassword'
}
]
});
I am doing this in server.js After this block of code I go on to register some more plugins and start the server. I also export the server at the end of the file
module.exports = server;
Then in my routes file, I am attempting to set up a testing route like so:
{
method: 'GET',
path: '/cacheSet/{key}/{value}',
config: { auth: false },
handler: function(req, res) {
const testCache = server.cache({
cache: 'redisCache',
expireIn: 1000
});
testCache.set(req.params.key, req.params.value, 1000, function(e) {
console.log(e);
res(Boom.create(e.http_code, e.message));
})
res(req.params.key + " " + req.params.value);
}
},
Note: My routes are in an external file, and are imported into server.js where I register them.
If I comment out all the cache stuff on this route, the route runs fine and returns my params.
If I run this with the cache stuff, at first I got "server not defined". So I then added
const server = require('./../server.js');
to import the server.
Now when I run this, I get "server.cache is not a function" and a 500 error.
I don't understand what I'm doing wrong. My guess is that I'm importing server, but perhaps it's the object without all the configs set so it's unable to use the .cache method. However this seems wrong because .cache should always be a default method with the default memory cache, so even if my cache registration isn't active yet, server.cache should theoretically still be a method.
I know it has to be something basic I'm messing up, but what?
I was correct. I was doing something stupid. It had to do with how I was exporting my server. I modified my structure to pull out the initial server creation and make it more modular. Now I am simply exporting JUST the server like so:
'use strict';
const Hapi = require('hapi');
const server = new Hapi.Server({
cache: [
{
name: 'redisCache',
engine: require('catbox-redis'),
host: 'redis',
partition: 'cache',
password: 'devpassword'
}
]
});
module.exports = server;
I then import that into my main server file (now index.js previously server.js) and everything runs fine. I can also import this into any other file (in this case my routes file) and access the server for appropriate methods.
Redis is happily storing keys and Hapi is happily not giving me errors.
Leaving here in case anyone else runs into a dumb mistake like this.

Resources