How to use cls-hooked unmanaged transactions? - jestjs

I'm writing tests with jest and sequelize and I need to keep my database clean for every test, so I want to set a transaction for every test and then rollback at the end.
This is what I've got, but it wont pass the transaction to my tests:
beforeEach(async () => {
this.transaction = await db.sequelize.transaction();
});
test('Database should be clean', async () => {
const role = await db.role.create({
name: 'someName',
});
expect(role.id).toBe(1);
});
afterEach(async () => {
await this.transaction.rollback();
});
Sequelize is already setted to use cls
const Sequelize = require('sequelize');
const config = require('../../config/config.js');
const cls = require('cls-hooked');
const namespace = cls.createNamespace('testing-namespace');
Sequelize.useCLS(namespace);
const sequelize = new Sequelize(config);
...
It would be really helpful if somenone could explain me how to use unmanaged transactions with cls-hooked.

I managed to keep my database clean by using umzug to run seeds programatically, this is my code:
const { db } = require('../models');
const Umzug = require('umzug');
const path = require('path');
const umzug = new Umzug({
migrations: {
path: path.join(__dirname, './../../seeders'),
params: [db.sequelize.getQueryInterface()],
},
logging: false,
});
beforeEach(async () => {
const reverted = await umzug.down({ to: 0 });
const executed = await umzug.up();
});
I tried to use umzug v3 (beta), but it didn't work, so I used the stable version 2. This approach isn't as performant as I'd like, but gets the job done.

Related

node-media-server: session.reject() not working

I am trying to create an RTMP-server with the npm package: http://github.com/illuspas/Node-Media-Server. So the server works fine but I need to implement authentication in it. I am trying to check the authentication on "prePublish" event. I am querying the database and retrieving the user if the user was found then I want to let the user stream otherwise rejected. But the problem is, it doesn't leave it instead disconnects and then the stream automatically reconnected to it then it disconnects again and the loop goes on. How do I fix this problem?
Here is the code for the event:
const NodeMediaServer = require('node-media-server');
const config = require('./config').rtmp_server;
const db = require('./db');
const nms = new NodeMediaServer(config);
const getStreamKeyFromStreamPath = (path) => {
const parts = path.split('/');
return parts[parts.length - 1];
};
nms.on('prePublish', async (id, StreamPath, args) => {
const session = nms.getSession(id);
try {
const streamKey = getStreamKeyFromStreamPath(StreamPath);
const validStream = (
await db.query('SELECT * FROM public."People" WHERE stream_key = $1', [streamKey])
).rows[0];
console.log(validStream);
if (validStream) {
// do stuff
} else {
session.reject((reason) => {
console.log(reason);
});
}
console.log(
'[NodeEvent on prePublish]',
`id=${id} StreamPath=${StreamPath} args=${JSON.stringify(args)}`
);
} catch (err) {
session.reject();
}
});
module.exports = nms;
Here is the code of the entry point of the server:
require("dotenv").config();
const db = require("./db");
const nms = require("./nms");
// database connection
db.connect()
.then(() => {
console.log("Connected to database");
// start the rtmp server
nms.run();
})
.catch((err) => console.log(err.message));
Here is the db file:
const { Pool } = require('pg');
const connectionString = process.env.PG_CONNECTION_STRING;
const poolOptions = {
host: process.env.PG_HOST,
user: process.env.PG_USER,
port: process.env.PG_PORT,
password: process.env.PG_PASSWORD,
database: process.env.PG_DATABASE,
};
const pool = new Pool(process.env.NODE_ENV === 'production' ? connectionString : poolOptions);
module.exports = pool;
My procedures to solve that problem:
Instead of the async function, I tried to handle the database query using a callback but it didn't work.
Before I was calling session.reject() now I am passing a callback there but the behavior is still the same
If you have any solution for that, please let me know.
Thanks in advance

How to properly connect to MongoDB using Cloud functions?

I would like to connect to my Atlas cluster only once per instance running Cloud Functions.
Here is my code for an instance :
const MongoClient = require("mongodb").MongoClient;
const client = new MongoClient("myUrl", {
useNewUrlParser: true,
useUnifiedTopology: true,
});
exports.myHttpMethod = functions.region("europe-west1").runWith({
memory: "128MB",
timeoutSeconds: 20,
}).https.onCall((data, context) => {
console.log("Data is: ", data);
client.connect(() => {
const testCollection = client.db("myDB").collection("test");
testCollection.insertOne(data);
});
});
And i would like to avoid the client.connect() in each function call that seems to be really too much.
I would like to do something like this :
const MongoClient = require("mongodb").MongoClient;
const client = await MongoClient.connect("myUrl", {
useNewUrlParser: true,
useUnifiedTopology: true,
});
const db = client.db("myDB");
exports.myHttpMethod = functions.region("europe-west1").runWith({
memory: "128MB",
timeoutSeconds: 20,
}).https.onCall((data, context) => {
console.log("Data is: ", data);
const testCollection = db.collection("test");
testCollection.insertOne(data);
});
But i can't await like this.
In my AWS Lambda functions (running in python) i have not this issue and i am able to connect only once per instance, so i guess there is an equivalent but i don't know much JS / Node JS.
You can store your database client as a global variable. From the documentation,
Cloud Functions often recycles the execution environment of a previous invocation. If you declare a variable in global scope, its value can be reused in subsequent invocations without having to be recomputed.
Try refactoring the code as shown below:
import * as functions from "firebase-functions";
import { MongoClient } from "mongodb";
let client: MongoClient | null;
const getClient = async () => {
if (!client) {
const mClient = new MongoClient("[MONGODB_URI]", {});
client = await mClient.connect();
functions.logger.log("Connected to MongoDB");
} else {
functions.logger.log("Using existing MongoDB connection");
}
functions.logger.log("Returning client");
return client;
};
export const helloWorld = functions.https.onRequest(
async (request, response) => {
const db = (await getClient()).db("[DATABASE]");
const result = await db.collection("[COLLECTION]").findOne({});
response.send("Hello from Firebase!");
}
);
This should reuse the connection for that instance.

Loopback get model from datasource with discoverSchemas

I've been testing loopback for couple hours now and everyhing is working fine when i'm creating models manually and modify the model.json created to match my oracleDb columns name.
But i'm getting stuck when i want to get a model from my oracle db to avoid to write the 50 columns manually..
I made a test with a table called "atest" and contains a column "name" and "id".
It's create the atest.json , and add this in model-config.json:
"Atest": {
"dataSource": "oracledb",
"public": true
}
But in my atest.json there is just a "undefined"..
My discover-models.js file :
'use strict';
const loopback = require('loopback');
const promisify = require('util').promisify;
const fs = require('fs');
const writeFile = promisify(fs.writeFile);
const readFile = promisify(fs.readFile);
const mkdirp = promisify(require('mkdirp'));
const DATASOURCE_NAME = 'oracledb';
const dataSourceConfig = require('./server/datasources.json');
const db = new loopback.DataSource(dataSourceConfig[DATASOURCE_NAME]);
discover().then(
success => process.exit(),
error => { console.error('UNHANDLED ERROR:\n', error); process.exit(1); },
);
async function discover() {
// It's important to pass the same "options" object to all calls
// of dataSource.discoverSchemas(), it allows the method to cache
// discovered related models
const options = { relations: false };
// Discover models and relations
const atestSchemas = await db.discoverSchemas('ATEST', options);
// Create model definition files
await mkdirp('common/models');
var response = await writeFile(
'common/models/atest.json',
JSON.stringify(atestSchemas['ATEST'], null, 2)
);
console.log(response);
// Expose models via REST API
const configJson = await readFile('server/model-config.json', 'utf-8');
console.log('MODEL CONFIG', configJson);
const config = JSON.parse(configJson);
config.Atest = { dataSource: DATASOURCE_NAME, public: true };
await writeFile(
'server/model-config.json',
JSON.stringify(config, null, 2)
);
}
My oracle connection is working fine, i don't get it, any idea?
Add a console.log after you invoke discoverSchemas:
// Discover models and relations
const atestSchemas = await db.discoverSchemas('ATEST', options);
console.log(atestSchemas);
You should see that the key is not just 'ATEST', as referenced later with atestSchemas['ATEST']. The key is 'SCHEMA_NAME.ATEST' (SCHEMA_NAME will vary as per your environment).
If you target the appropriate key, you should get what you're looking for.

Add global plugin for Mongoose on runtime

I am currently trying to attach a global Mongoose on runtime with no luck. My plugin requires a few dependencies and options generated upon my app's bootstrapping thus I need to add it sequentially. Mongoose seems to ignore everything wrapped within a closure.
const mongoose = require('mongoose');
const config = {};
const {DB_CONNECT} = process.env;
const myPlugin = schema => {
console.log('done'); // this line is not logged at all
schema.methods.mymethod = () => {};
}
const connectAndAddPlugins = async () => {
await mongoose.connect(
DB_CONNECT,
{...config}
);
mongoose.plugin(myPlugin)
};
connectAndAddPlugins();
Any help will be highly appreciated.
Apparently, since a model gets compiled and loaded with Mongoose global plugins are not attached anymore thus models should get registered afterwards:
const mongoose = require('mongoose');
const config = {};
const {DB_CONNECT} = process.env;
const myPlugin = schema => {
console.log('done'); // this line is not logged at all
schema.methods.mymethod = () => {};
}
const connectAndAddPlugins = async () => {
await mongoose.connect(
DB_CONNECT,
{...config}
);
mongoose.plugin(myPlugin)
};
const loadModels = () => {
const model = mongoose.model('Cat', { name: String });
}
connectAndAddPlugins();
loadModels();

Custom service in feathersjs

I try to write a custom service, but it doesn't work at all. I try to post a request and make two update queries in the collections, but i will not work at all
this is my code
// Initializes the `bedrijven` service on path `/bedrijven`
const createService = require('feathers-mongoose');
const createModel = require('../../models/bedrijven.model');
const hooks = require('./bedrijven.hooks');
const filters = require('./bedrijven.filters');
module.exports = function() {
const app = this;
const Model = createModel(app);
const paginate = app.get('paginate');
const options = {
name: 'bedrijven',
Model,
paginate
};
// Initialize our service with any options it requires
app.post('/bedrijven/setfavo', function(req, res) {
Promise.all([
app.service('bedrijven').update({
owner: req.body.userid
}, {
favo: false
}),
app.service('bedrijven').update(req.body._id, {
favo: true
})
]);
});
app.use('/bedrijven', createService(options));
// Get our initialized service so that we can register hooks and filters
const service = app.service('bedrijven');
service.hooks(hooks);
if (service.filter) {
service.filter(filters);
}
};
Make sure this file is included in your main app.js file.
Something like:
const bedrijven = require('./bedrijven/bedrijven.service.js');
app.configure(bedrijven);
Is there a reason you don't want to use feathers generate service? It would take care of these questions for you.

Resources