Knex pool full on migration - node.js

I'm trying to get started with knex.js and I can't get migrations to work. Knex works fine for my API calls. Here's my setup:
knexfile.js
const env = process.env;
module.exports = {
client: 'mysql',
connection: {
host: env.DB_HOST,
database: env.DB_NAME,
user: env.DB_USER,
password: env.DB_PASSWORD,
port: env.PORT
},
pool: {
min: 0,
max: 50
},
migrations: {
directory: './db/migrations',
tableName: 'knex_migrations'
},
seeds: {
directory: './db/seeds'
}
};
knex.js
const config = require('../knexfile.js');
module.exports = require('knex')(config);
events.js
const express = require('express');
const router = express.Router();
const knex = require('../../db/knex.js');
// GET api/events
router.get('/', (req, res) => {
knex('events')
.then(events => { res.send(events) }
.catch(err => { console.log(err); })
});
module.exports = router;
and then I have a file in the migrations folder with:
exports.up = function(knex) {
return knex.schema.createTable('users', function (t) {
t.increments('id').primary()
t.string('username').notNullable()
t.string('password').notNullable()
t.timestamps(false, true)
}).then(() => { console.log('created users table') })
.catch((err) => { throw err} )
.finally(() => { knex.destroy() })
};
exports.down = function(knex) {
return knex.schema.dropTableIfExists('users')
};
When I run knex migrate:latest I get TimeoutError: Knex: Timeout acquiring a connection. The pool is probably full. Are you missing a .transacting(trx) call?
I know similar questions have been asked before, but I can't seem to find any that shed light on my particular situation. I've tried adding a knex.destroy() to the end of my GET request but that doesn't seem to help (it just makes the connection unusable if I add other request handlers below).
I did try checking the knex.client.pool in a finally clause at the end of the GET request. numUsed was 0, numFree was 1, numPendingAcquires and numPendingCreates were both 0. I do find it odd that numFree was only 1 given that my knexfile specifies max 50. Any advice greatly appreciated.

Following #technogeek1995's comment, the answer turned out to be adding require('dotenv').config({path: '../.env'}); to knexfile.js (in retrospect, this part seems obvious), and running the cli from the same directory. Hope this helps someone else.

Related

What is the correct way to make MongoDB Atlas connection available in middleware functions in Node.js/Express?

Environment
Node.js (18 LTS) / Express (^4.18.2)
MongoDB Native Driver (^4.12.0)
MongoDB Atlas (5.0.14)
Application Structure
.github
config
- mongodb_client.js
dist
middleware
node_modules
routes
src
views
.env
.gitignore
app.js
package.json
README.md
Connection Code
As a sanity check, this is the connection code that is provided in the MongoDB Atlas interface:
As a screenshot:
As code:
const { MongoClient, ServerApiVersion } = require('mongodb');
const uri = "mongodb+srv://admin:<password>#cluster0.******.mongodb.net/?retryWrites=true&w=majority";
const client = new MongoClient(uri, { useNewUrlParser: true, useUnifiedTopology: true, serverApi: ServerApiVersion.v1 });
client.connect(err => {
const collection = client.db("test").collection("devices");
// perform actions on the collection object
client.close();
});
Desired Behaviour
The code snippet provided in the MongoDB Atlas interface performs the connection and subsequent database calls in one file.
However, I would like to:
Create a file that contains the MongoDB Atlas connection (e.g mongodb_client.js)
Export the connection so that it can be used in middleware files (e.g my_middleware_01.js)
So, in pseudo code, I imagine it would look something like this:
config / mongodb_client.js
import { MongoClient, ServerApiVersion } from 'mongodb';
const connection_string = process.env.MONGODB_CONNECTION_STRING;
const mongodb_client = new MongoClient(connection_string, { useNewUrlParser: true, useUnifiedTopology: true, serverApi: ServerApiVersion.v1 });
// export the connection somehow
export { mongodb_client };
middleware / my_middleware_01.js
import { mongodb_client } from '../config/mongodb_client.js';
const api_myResource_get = async (req, res) => {
mongodb_client.open();
let collection = mongodb_client.db('myDatabase').collection('myCollection');
let result = await collection.findOne(query, options);
res.json({ result: result });
mongodb_client.close();
};
export { api_myResource_get };
What I've Tried
It seems I was grappling with this dynamic over a year ago and posted my solution here:
https://stackoverflow.com/a/70135909
However, I think that conventions have changed since then.
For example when instantiating the client, the current method seems to be:
const client = new MongoClient(connection_string, { useNewUrlParser: true, useUnifiedTopology: true, serverApi: ServerApiVersion.v1 });
whereas previously it was something like:
await MongoClient.connect(connection_string);
I've Google searched:
how to make mongodb connection available in node.js middleware?
But all the results seem to reference this older convention and I'd like to ensure I am using best practice (and most recent conventions).
Related Questions and Resources
Passing Mongo DB Object DB to Express Middleware
What is best way to handle global connection of Mongodb in NodeJs
How do I manage MongoDB connections in a Node.js web application?
What is the difference between MongoClient and the client object which we get in the callback of MongoClient.connect() method
How to properly reuse connection to Mongodb across NodeJs application and modules
MongoDB Driver Connection Documentation
EDIT 01:
Below is one attempt which is returning the error:
TypeError: Cannot read properties of null (reading 'db')
config / mongodb_connection.js
import { MongoClient, ServerApiVersion } from 'mongodb';
const connection_string = process.env.MONGODB_CONNECTION_STRING;
class mongodb_connection {
static async open() {
if (this.conn) return this.conn;
this.conn = await MongoClient.connect(connection_string, { useNewUrlParser: true, useUnifiedTopology: true, serverApi: ServerApiVersion.v1 });
return this.conn;
}
}
mongodb_connection.conn = null;
mongodb_connection.url = connection_string;
export { mongodb_connection };
middleware / api_mongodb_get.js
import { mongodb_connection } from '../../config/mongodb_connection.js';
const api_mongodb_get = async (req, res) => {
try {
mongodb_connection.open();
const collection = mongodb_connection.conn.db('pages').collection('pages');
const result = await collection.findOne({ "my_key": "my value" });
res.json({ data: result });
mongodb_connection.close();
} catch (error) {
console.error(error);
res.status(500).send(error);
}
};
export { api_mongodb_get };
EDIT 02:
The following 'works' but I don't know if it is best practice or not.
In other words, I don't know if I am overlooking something that will cause undesired behavior.
config / mongodb_connection.js
import { MongoClient, ServerApiVersion } from 'mongodb';
const connection_string = process.env.MONGODB_CONNECTION_STRING;
const mongodb_connection = new MongoClient(connection_string, { useNewUrlParser: true, useUnifiedTopology: true, serverApi: ServerApiVersion.v1 });
export { mongodb_connection };
middleware / api_mongodb_get.js
import { mongodb_connection } from '../../config/mongodb_connection.js';
const api_mongodb_get = async (req, res) => {
try {
mongodb_connection.connect(async err => {
const collection = mongodb_connection.db('pages').collection('pages');
const result = await collection.findOne({ "my_key": "my value" });
res.json({ data: result });
mongodb_connection.close();
});
} catch (error) {
console.error(error);
res.status(500).send(error);
}
};
export { api_mongodb_get };
Insetead of using mongodb, use mongoose library to establish the connection.
Here is an example to establish the connection with mongodb cluster:
connectDb.js:
const dotenv = require('dotenv').config();
const DB_URL = process.env.DB_URL;
const mongoose = require('mongoose');
const connectDb = async () => {
try {
const connection = await mongoose.connect(DB_URL)
console.log(`Connected to database Successfully: ${connection}`)
} catch (error) {
console.log(error)
}
}
module.exports = connectDb;
and I think I don't need to mention that the DB_URL is the URL which is provided by the mondodb cluster.

MongooseError: Operation `orders.deleteMany()` buffering timed out after 10000ms

when I run my app with npm run seeder
then I have face this error
I have checked my database connection carefully, it's ok.
also, I have checked my ordermodels file it's also ok. I have used MongoDB compass there is nothing problem. I don't know why showing buffering timed out.
MongooseError: Operation `orders.deleteMany()` buffering timed out after 10000ms
seeder.js
import mongoose from "mongoose";
import dotenv from "dotenv";
import colors from "colors";
import users from "./data/users.js";
import products from "./data/products.js";
import User from "./models/userModel.js";
import Product from "./models/productModel.js";
import Order from "./models/orderModel.js";
import connectDB from "./config/db.js";
dotenv.config();
connectDB();
const importData = async () => {
try {
await Order.deleteMany();
await Product.deleteMany();
await User.deleteMany();
const createUsers = await User.insertMany(users);
const adminUser = createUsers[0]._id;
const sampleProducts = products.map((product) => {
return { ...product, user: adminUser };
});
await Product.insertMany(sampleProducts);
console.log("Data Imported".green.inverse);
process.exit();
} catch (error) {
console.error(`${error}`.red.inverse);
process.exit(1);
}
};
const DeleteData = async () => {
try {
await Order.deleteMany();
await Product.deleteMany();
await User.deleteMany();
console.log("Data Deleted".red.inverse);
process.exit();
} catch (error) {
console.error(`${error}`.red.inverse);
process.exit(1);
}
};
if (process.argv[2] === "-d") {
DeleteData();
} else {
importData();
}
I have the same issue and I just did a research and I find that your MongoDB are trying to execute the function User.deleteMany() before the database is connected.
just put an await before connectDB();
await connectDB();
const importData = async () => {
try {
await connectDB();
await Category.collection.deleteMany({});
await Category.insertMany(categoryData);
console.log('Success ');
} catch (error) {
console.log('error in processing data ', error);
}
};
importData();
use following code for connect to mongodb
const mongoose = require('mongoose')
mongoose.connect(url, { useNewUrlParser: true, useUnifiedTopology: true, useCreateIndex: true })
after that you should removing the node_module folder and all .json files and reinstalling the mongoose and use npm update
I have faced the same issue, but I managed to resolve it.
So in package.json, in the scripts section I've added:
"data:import": "node backend/seeder",
"data:destroy": "node backend/seeder -d",
Then:
npm run data:import
After trying a couple of times, I've got Data Imported message in the console.
Note: I haven't uninstalled any node_modules.
Write useFindAndModify:true, in mongoose.connect(DB,{}) , because by default it is false and you are trying to delete something form database.
So, write it like this
mongoose.connect(DB,{
useNewUrlParser:true,
useCreateIndex:true,
useFindAndModify:true,
useUnifiedTopology: true
})
you can move connectDB() to importData() and deleteData(). and add await before connectDB(). like this:
const importData = async () => {
try {
await connectDB();
//...
}
}
and it worked for me.

import runs all code from module instead of just the imported function

This is my index.js file, located in the ./src directory:
import { MongoClient } from "mongodb";
import CharacterDAO from "./dao/character";
import GearDAO from "./dao/gear";
import { startServer } from "./server";
import { seedData } from "./dataSeed";
// connect mongoDb, seed data if needed, run fastify server
export const runServer = async ({ dbUrl, dbName, environment, port }) => {
// test seed data when starting server if running a test suite
if (environment === "test") {
await seedData({
hostUrl: dbUrl,
databaseName: dbName
});
}
await MongoClient.connect(dbUrl, {
poolSize: 50,
useNewUrlParser: true,
useUnifiedTopology: true,
wtimeout: 2500
})
.then(async conn => {
const database = await conn.db(dbName);
// inject database connection into DAO objects
CharacterDAO.injectDB(database);
GearDAO.injectDB(database);
// start the fastify server
startServer(port);
})
.catch(err => {
console.log(err.stack);
// process.exit(1);
});
};
const serverArguments = process.argv.slice(2).map(arg => {
return arg.split("=")[1];
});
const serverOptions = {
dbUrl: serverArguments[0],
dbName: serverArguments[1],
environment: serverArguments[2],
port: serverArguments[3]
};
runServer({
...serverOptions
});
jestconfig.json
{
"transform": {
"^.+\\.(t|j)sx?$": "ts-jest"
},
"testEnvironment": "node",
"testRegex": "(/__tests__/.*|(\\.|/)(test|spec))\\.(jsx?|tsx?)$",
"moduleFileExtensions": ["ts", "tsx", "js", "jsx", "json", "node"]
}
Test script from package.json used to run the test (db credentials are omitted)
"test": "dbUrl=mongodb+srv://sdaw-dsawdad-dsadsawd#cluster0-jopi5.mongodb.net dbName=untitled-combat-game-test environment=test port=4000 jest --config jestconfig.json"
My test file:
import { runServer } from "../index";
beforeAll(async () => {
const serverOptions = {
dbUrl: process.env.dbUrl,
dbName: process.env.dbName,
environment: process.env.environment,
port: process.env.port
};
console.log(serverOptions);
await runServer({
...serverOptions
});
});
describe("mock test", () => {
it("should run a basic test", () => {
expect(true).toBe(true);
});
});
What happens when I run the test:
the test script runs runServer
the index.js file runs runServer
This causes a invalid URI error (since the process.argv referenced in index.js does not include a valid mongodb URI). I double-checked this by commenting out the runServer call at the bottom of my index.js file - and everything runs fine.
Moving the runServer function to a different file and importing it from there also solves the issue. So importing in both index.js and the test file does not result in multiple calls.
What am I doing wrong?
Importing/requiring a file evaluates the code inside of it (read: runs the code inside of it). You're not technically doing anything wrong, but for the purpose of your tests the code as you have written it won't work.
In your index.js file you are executing runServer(). Whenever that file is imported/required, that function call is also run.
Having a start.js file or similar which will actually start your server is a common pattern. This will help you avoid the issue you're experiencing.
I would split the definition of your server and invoking your server into two different files, say server.js and index.js. I will leave the fixing up of the imports to you, but this is the idea:
server.js
// connect mongoDb, seed data if needed, run fastify server
export const runServer = async ({ dbUrl, dbName, environment, port }) => {
// test seed data when starting server if running a test suite
if (environment === "test") {
await seedData({
hostUrl: dbUrl,
databaseName: dbName
});
}
await MongoClient.connect(dbUrl, {
poolSize: 50,
useNewUrlParser: true,
useUnifiedTopology: true,
wtimeout: 2500
})
.then(async conn => {
const database = await conn.db(dbName);
// inject database connection into DAO objects
CharacterDAO.injectDB(database);
GearDAO.injectDB(database);
// start the fastify server
startServer(port);
})
.catch(err => {
console.log(err.stack);
// process.exit(1);
});
};
index.js
import { runServer } from './server';
const serverArguments = process.argv.slice(2).map(arg => {
return arg.split("=")[1];
});
const serverOptions = {
dbUrl: serverArguments[0],
dbName: serverArguments[1],
environment: serverArguments[2],
port: serverArguments[3]
};
runServer({
...serverOptions
});

vsphere-connect lib of nodejs

I am trying to use the same instance of a vsphere in many functions
Here is the lib vsphere-connect
const vSphereClient = vSphereConnect.createClient({
host: '',
username: '',
password: '',
ignoreSSL: true,
autoLogin: true,
exclusive: true,
})
Actually I got this.
exports.getVCenterInfo = function getVCenterInfo(req, res) {
!I would like to use an instance there!
}
I tried
vSphereClient.retrieve({ some code here})
Into my function but it doesn't work.
Can someone help me please ?
As i understand from the lib link you can't use it like this const vSphereClient = vSphereConnect.createClient({ because in vSphereClient you will get pending promise.
Maybe you will write lib named vsphereLib like this:
const connect = require('vsphere-connect');
let clientInstance;
connect.createClient({
host: 'vcenter.mydomain.com',
username: 'administrator#vsphere.local',
password: 'vmware1',
ignoreSSL: true,
autoLogin: true
})
.then(function (client) {
clientInstance = client;
});
module.exports = {
getClient: () => clientInstance,
};
Usage will be:
const client = require('./vsphereLib').getClient;
exports.getVCenterInfo = function getVCenterInfo(req, res) {
client()
.retrieve({
type: 'VirtualMachine',
id: ['vm-1234', 'vm-5678'],
properties: ['name', 'config.version']
})
.then(function (results) {
console.log(results);
})
.caught(function (err) {
console.log(err);
});
}
The only one problem is if connection will fail your code will fail aswell,so maybe you should extend vsphereLib with init function and run it before server starts

How to set another database for test on knexfile?

I have 2 databases; the first for the developement and production and the second for the test.
knexfile.js:
const config = require('config');
const knexConfig = config.get('knexConfig');
module.exports = {
development: {
client: knexConfig.client,
connection: {
host: knexConfig.host,
database: knexConfig.database,
user: knexConfig.user,
password: knexConfig.password,
},
pool: {
min: knexConfig.min,
max: knexConfig.max,
},
},
test: {
client: 'sqlite3',
connection: {
filename: './file.db',
},
},
};
route.test.js:
const Model = require('objection').Model;
const provider = require('../../../server/models/provider');
const Knex = require('knex');
const knexConfig = require('../../../knexfile');
const knex = Knex(knexConfig.test);
Model.knex(knex);
describe('Should test provider Model', () => {
test('should return provider', () => {
provider
.query()
.then((providers) => {
expect(providers).toBe('array');
});
});
});
I got this error:
Test suite failed to run
ProcessTerminatedError: cancel after 2 retries!
at Farm.<anonymous> (node_modules/worker-farm/lib/farm.js:87:25)
at Array.forEach (<anonymous>)
at Farm.<anonymous> (node_modules/worker-farm/lib/farm.js:81:36)
at ontimeout (timers.js:469:11)
at tryOnTimeout (timers.js:304:5)
at Timer.listOnTimeout (timers.js:264:5)
A worker process has quit unexpectedly! Most likely this is an initialization error.
I want to connect to sqlite database for the test but I can't do that to pass tests.How can I fix that?
Your config seems to be fine, since following test works.
https://runkit.com/embed/3w0umojslatc
require('sqlite3');
var knex = require("knex")({
client: 'sqlite3',
connection: {
filename: './db.db'
}
})
const { Model } = require('objection');
Model.knex(knex);
await Model.query().select('1');
Looks like you are mixing DBB and TDD syntax and have some error of not waiting async results before returning from test.
Try first this:
const Model = require('objection').Model;
const provider = require('../../../server/models/provider');
const Knex = require('knex');
const knexConfig = require('../../../knexfile');
const knex = Knex(knexConfig.test);
Model.knex(knex);
describe('Should test provider Model', () => {
it('should return provider', () => {
return Model.query().from('providers')
.then((providers) => {
expect(providers).toBe('array');
});
});
});
If that works then try to do the query with your own providers class that you hadn't included in question

Resources