I am currently trying to attach a global Mongoose on runtime with no luck. My plugin requires a few dependencies and options generated upon my app's bootstrapping thus I need to add it sequentially. Mongoose seems to ignore everything wrapped within a closure.
const mongoose = require('mongoose');
const config = {};
const {DB_CONNECT} = process.env;
const myPlugin = schema => {
console.log('done'); // this line is not logged at all
schema.methods.mymethod = () => {};
}
const connectAndAddPlugins = async () => {
await mongoose.connect(
DB_CONNECT,
{...config}
);
mongoose.plugin(myPlugin)
};
connectAndAddPlugins();
Any help will be highly appreciated.
Apparently, since a model gets compiled and loaded with Mongoose global plugins are not attached anymore thus models should get registered afterwards:
const mongoose = require('mongoose');
const config = {};
const {DB_CONNECT} = process.env;
const myPlugin = schema => {
console.log('done'); // this line is not logged at all
schema.methods.mymethod = () => {};
}
const connectAndAddPlugins = async () => {
await mongoose.connect(
DB_CONNECT,
{...config}
);
mongoose.plugin(myPlugin)
};
const loadModels = () => {
const model = mongoose.model('Cat', { name: String });
}
connectAndAddPlugins();
loadModels();
Related
I'm writing a Node.js cli in which I've to read from one Mongo Atlas DB and write to another Mongo Atlas DB. I'll be reading documents from one db and writing equivalent documents in the other db, one document at a time. I've two separate connection files like this:
ReadDB.js:
require('dotenv').config();
const mongoose = require('mongoose');
const read_db_url = process.env.READDB_URI;
const readDB = async () => {
try {
await mongoose.connect(read_db_url,
{
useNewUrlParser: true,
useUnifiedTopology: true,
dbName: "dbProd"
}
);
} catch (err) {
console.error(err);
}
}
module.exports = readDB
WriteDB.js:
require('dotenv').config();
const mongoose = require('mongoose');
const write_db_url = process.env.WRITEDB_URI;
const writeDB = async () => {
try {
await mongoose.connect(write_db_url,
{
useNewUrlParser: true,
useUnifiedTopology: true,
dbName: "dbQA"
}
);
} catch (err) {
console.error(err);
}
}
module.exports = writeDB
This what I've so far for the main application (cli.js):
cli.js:
require('dotenv').config();
const mongoose = require('mongoose');
const connectReadDB = require('./ReadDB.js');
const connectWriteDB = require('./WriteDB.js');
connectReadDB();
connectWriteDB();
const findProduct = async (productId) => {
products = await Products.find({_id:productId});
}
I guess my confusion is how Node.js will know which db to read from to begin with? Will I need separate set of models, one for read and one for write? How can I establish two simultaneous connections in the same Node.js app?
Mongoose handling connections via connections pool http://mongoosejs.com/docs/connections.html
You can use server: {poolSize: 5} option for increase/decrease pool (number of parallel connections)
If you need connections to different databases look here Mongoose and multiple database in single node.js project
Example of multiple connections:
const mongoose = require('mongoose')
const connection = mongoose.createConnection('mongodb://localhost/db1');
const connection2 = mongoose.createConnection('mongodb://localhost/db2');
const Schema = new mongoose.Schema({})
const model1 = connection.model('User', Schema);
const model2 = connection2.model('Item', Schema);
model1.find({}, function() {
console.log("this will print out last");
});
model2.find({}, function() {
console.log("this will print out first");
});
I'm writing tests with jest and sequelize and I need to keep my database clean for every test, so I want to set a transaction for every test and then rollback at the end.
This is what I've got, but it wont pass the transaction to my tests:
beforeEach(async () => {
this.transaction = await db.sequelize.transaction();
});
test('Database should be clean', async () => {
const role = await db.role.create({
name: 'someName',
});
expect(role.id).toBe(1);
});
afterEach(async () => {
await this.transaction.rollback();
});
Sequelize is already setted to use cls
const Sequelize = require('sequelize');
const config = require('../../config/config.js');
const cls = require('cls-hooked');
const namespace = cls.createNamespace('testing-namespace');
Sequelize.useCLS(namespace);
const sequelize = new Sequelize(config);
...
It would be really helpful if somenone could explain me how to use unmanaged transactions with cls-hooked.
I managed to keep my database clean by using umzug to run seeds programatically, this is my code:
const { db } = require('../models');
const Umzug = require('umzug');
const path = require('path');
const umzug = new Umzug({
migrations: {
path: path.join(__dirname, './../../seeders'),
params: [db.sequelize.getQueryInterface()],
},
logging: false,
});
beforeEach(async () => {
const reverted = await umzug.down({ to: 0 });
const executed = await umzug.up();
});
I tried to use umzug v3 (beta), but it didn't work, so I used the stable version 2. This approach isn't as performant as I'd like, but gets the job done.
I have been working on this project for 2 years now, and I'm thinking this was caused by the recent update, but am wondering if there are any kind, intelligent, Mongoose/NoSQL DBA, souls out there who would do the awesome service of helping me either track-down, and/or resolve this issue.
So, as you can see below, this is a simple mongoose find query over express to MongoDB. This is rather evident, at a high-level, and for most devs, the interactions will be natural, as any Mongo, Express, Node Stack using Mongoose.
The is issue is that, when I send this query, disregarding environment (a production project), it does not resolve.
The "data" seems to get lost somewhere, and therefore, the query simply never resolves.
It's a simple setup, really a test endpoint, so help out, run it through, and send some feedback.
Greatly Appreciated!
Model.js
const mongoose = require('mongoose');
const mongoosePaginate = require('mongoose-paginate');
const Schema = mongoose.Schema;
const TestSchema = new Schema({
data: {
type: String,
unique: false,
required: true
},
}, {
timestamps: true
});
TestSchema.plugin(mongoosePaginate);
module.exports = mongoose.model('Test', TestSchema);
Constructor.js
class Constructor {
constructor() {}
getAll() {
return TestSchema.find({}, function (err, tests) {})
}
}
module.exports = Constructor
db.js
let mongoose = require('mongoose')
// Connect to db
mongoose.connect('mongodb://localhost:27017/test', {useNewUrlParser: true, useUnifiedTopology: true }, err => {
if (err)
return console.log("Cannot connect to DB")
connectionCallback()
console.log("DB Connected")
});
let connectionCallback = () => {}
module.exports.onConnect = cb => {
connectionCallback = cb
}
App.js
const express = require('express');
const app = express();
const ip = require('ip');
let db = require('./db')
const router = express.Router();
const port = 8888;
const http = require('http').createServer(app);
let ipAddress = 'localhost'; // only works to the local host
try {
// will enable the server to be accessed from the network
ipAddress = ip.address();
} catch( err ){
console.err( err );
}
http.listen(port, ipAddress,
() => {
let message = [
`Server is running at ${ipAddress}:${port}`,
];
console.log( ...message )
});
db.onConnect(() => {
let Constructor = require("./pathTo/Constructor")
let construct = new Constructor()
app.use('/api', router.get('/test', function(req, res) {construct.getAll()}))
})
Your problem is with the constructor.js getAll function, as you are returning also and passed a callback also, the promise will never be resolved. You should either resolve the promise or return the response from the callback.
Resolve Promise:
class Constructor {
constructor() {}
async getAll() {
return await TestSchema.find({})
}
}
module.exports = Constructor
Return from callback:
class Constructor {
constructor() {}
getAll() {
TestSchema.find({}, function (err, tests){
return tests.
})
}
}
module.exports = Constructor
I ended up just scaling the project for production. I put the connectionCallback in a class and called it with the createConnection mongoose function.
Looks like this:
mongoose.Promise = global.Promise;
const url = 'mongodb://localhost/db'
const connection = mongoose.createConnection(url, options);
//load models
require('/models').connectionCallback();
modules.export = connectionInstance;
Please note, I am no longer using express!
I've been testing loopback for couple hours now and everyhing is working fine when i'm creating models manually and modify the model.json created to match my oracleDb columns name.
But i'm getting stuck when i want to get a model from my oracle db to avoid to write the 50 columns manually..
I made a test with a table called "atest" and contains a column "name" and "id".
It's create the atest.json , and add this in model-config.json:
"Atest": {
"dataSource": "oracledb",
"public": true
}
But in my atest.json there is just a "undefined"..
My discover-models.js file :
'use strict';
const loopback = require('loopback');
const promisify = require('util').promisify;
const fs = require('fs');
const writeFile = promisify(fs.writeFile);
const readFile = promisify(fs.readFile);
const mkdirp = promisify(require('mkdirp'));
const DATASOURCE_NAME = 'oracledb';
const dataSourceConfig = require('./server/datasources.json');
const db = new loopback.DataSource(dataSourceConfig[DATASOURCE_NAME]);
discover().then(
success => process.exit(),
error => { console.error('UNHANDLED ERROR:\n', error); process.exit(1); },
);
async function discover() {
// It's important to pass the same "options" object to all calls
// of dataSource.discoverSchemas(), it allows the method to cache
// discovered related models
const options = { relations: false };
// Discover models and relations
const atestSchemas = await db.discoverSchemas('ATEST', options);
// Create model definition files
await mkdirp('common/models');
var response = await writeFile(
'common/models/atest.json',
JSON.stringify(atestSchemas['ATEST'], null, 2)
);
console.log(response);
// Expose models via REST API
const configJson = await readFile('server/model-config.json', 'utf-8');
console.log('MODEL CONFIG', configJson);
const config = JSON.parse(configJson);
config.Atest = { dataSource: DATASOURCE_NAME, public: true };
await writeFile(
'server/model-config.json',
JSON.stringify(config, null, 2)
);
}
My oracle connection is working fine, i don't get it, any idea?
Add a console.log after you invoke discoverSchemas:
// Discover models and relations
const atestSchemas = await db.discoverSchemas('ATEST', options);
console.log(atestSchemas);
You should see that the key is not just 'ATEST', as referenced later with atestSchemas['ATEST']. The key is 'SCHEMA_NAME.ATEST' (SCHEMA_NAME will vary as per your environment).
If you target the appropriate key, you should get what you're looking for.
I am currently working on some code in NodeJS.
My code looks like this:
const AWS = require('aws-sdk');
const PARAMS_ACCOUNT = {
AttributeNames: [
"max-instances"
]
};
const ec2 = new AWS.EC2();
const getAccountAttributes = ec2.describeAccountAttributes(PARAMS_ACCOUNT).promise();
function maxInstances() {
return getAccountAttributes.then(data => {
return data.AccountAttributes[0].AttributeValues[0].AttributeValue;
});
}
I've tried to use aws-sdk-mock, beside chaiJS and mochaJS but I'am not sure how to mock the .promise() of the aws-sdk.
My attempt looks like the following:
const chai = require('chai');
const mocha = require('mocha');
const awsSDK = require('aws-sdk');
const aws = require('aws-sdk-mock');
const server = require('../instance_limit.js');
const expect = chai.expect;
describe('Sample test', () => {
it('this is also a test', () => {
aws.mock('EC2','describeAccountAttributes',function (params, callback){
callback(null, {AccountAttributes:[{AttributeValues:[{AttributeValue: 10}]}]});
});
var awsEC2 = new awsSDK.EC2();
const instances = server.maxInstances();
expect(instances).to.equal(10);
});
});
But with this I am not able to mock the aws-sdk call.
Thanks for your help.
I was able to solve the problem by using the principle of dependency injection which prevents from mocking the aws sdk in this case, for that it was necessary to adjust my function a little bit and set a default value.
It now looks like this:
const AWS = require('aws-sdk');
const PARAMS_ACCOUNT = {
AttributeNames: [
"max-instances"
]
};
const ec2 = new AWS.EC2();
const getAccountAttributes = ec2.describeAccountAttributes(PARAMS_ACCOUNT).promise();
function maxInstances(accountAttributes = getAccountAttributes()) {
return accountAttributes.then(data => {
return data.AccountAttributes[0].AttributeValues[0].AttributeValue;
});
}
This now allows me to provide a sample value during the tests, like this:
const chai = require('chai');
const mocha = require('mocha');
const server = require('../instance_limit.js');
const expect = chai.expect;
describe('data calculation based on ec2 values', () => {
it(' expects to return max Instances value equal 10', () => {
const accountAttributeObject = {AccountAttributes:[{AttributeValues:[{AttributeValue: 10}]}]}
const accountAttributes = Promise.resolve(accountAttributeObject);
const instances = server.maxInstances(accountAttributes);
return instances.then( data => {
expect(data).to.equal(10);
});
});
})