How to insert initial data using sequelize migrations/seeds? - node.js

I'm trying to create my initial migration to populate the test database but I can't get it working. This is what I have in my migration:
'use strict';
module.exports = {
up: function (queryInterface, Sequelize) {
return [
queryInterface.bulkInsert('Users', [
{ username: "user1" },
{ username: "user2" }
])];
},
down: function (queryInterface, Sequelize) {
return queryInterface.dropTable('Users');
}
};
And I get this error:
== 20151024144833-create-conjugation: migrating =======
{ [SequelizeUniqueConstraintError: Validation error]
name: 'SequelizeUniqueConstraintError',
message: 'Validation error',
errors: [],
fields: [] }
There must be an easier way to do this. I've checked other SO questions, but the syntax has changed in the current version of sequelize.
UPDATE
Ok, I realized my mistake: I was assuming that sequelize would take care of the timestamps. This fixes the problem:
up: function (queryInterface, Sequelize) {
console.log(User);
return [
queryInterface.bulkInsert('Users', [
{ username: "user1", createdAt: Date.now(), updatedAt: Date.now() },
{ username: "user2", createdAt: Date.now(), updatedAt: Date.now() }
])
];
}
But I'm still wondering if this is the right way to seed my database. Is there a way to do it using User.create({})?

new Date()
also required for mysql, i.e.
return queryInterface.bulkInsert('users', [
{
"Forename":"A",
"Surname": "User",
"UserType":"1",
"Email":"auser#gmail.com",
"Password":"password",
"LastLogin":0,
"Tokens": JSON.stringify({"tokens":[]}),
"CreatedBy": 999,
"CreatedAt": new Date(),
"UpdatedAt": new Date()
}]);

You can use next:
const City = sequelize.define('city', {
name: { type: Sequelize.STRING },
order_: { type: Sequelize.INTEGER }
});
City.sync().then(() => {
City.create({
name: 'Neuquen',
order_: 0
});
City.create({
name: 'General Roca',
order_: 1
});
});
Or read about "migrations" at http://docs.sequelizejs.com/en/latest/docs/migrations/

An alternative could be use : sequelize fixtures , you could init your tables with default data declared as a json file or other format.

For a quick and easy way (without seeds or migrations) on sequelize v6:
I modified my sequelize.sync() call:
import { Sequelize } from 'sequelize';
// This will create an in-memory sqlite db
const sequelize = new Sequelize('sqlite::memory:', {
logging: sequelizeLogger
});
await sequelize
.sync({ force: true })
.then(() => {
// seed db
Users.create({ username: 'user1' })
});

Related

NodeJS + Sequelize / How to perform triple model query?

I come from this topic:
NodeJS & Sequelize: How can I join 3 models?
I come to you now, because I need someone who can teach me about how to get started on junction models with NodeJS and Sequelize.
What I'm trying to do is, having 2 main models, for example, Employee and Office, a 3rd model called EmployeeOffice enters and connects the 2 main models, including its own field called "employee_chair". When i call a method, for example, Office.findAll, this is what I would get:
{
"id": 1,
"office_name": "Mars"
"office_color": "Red",
"createdAt": "2021-09-30T18:53:38.000Z",
"updatedAt": "2021-09-30T18:53:38.000Z",
"employees": [
{
"id": 1,
"employee_name": "Albert",
"employee_mail": "qalbert443#gmail.com",
"createdAt": "2021-09-30T18:53:45.000Z",
"updatedAt": "2021-09-30T18:53:45.000Z",
"employee_office": {
"createdAt": "2021-09-30T18:53:45.000Z",
"updatedAt": "2021-09-30T18:53:45.000Z"
}
}
]
}
NOW. What I need, is to have the model called instead of the junction table that is automatically created. Because I can join the 3 tables, but the 3rd table has the field "employee_chair", that I mentioned earlier. The desired response would look like this:
{
"id": 1,
"office_name": "Mars"
"office_color": "Red",
"createdAt": "2021-09-30T18:53:38.000Z",
"updatedAt": "2021-09-30T18:53:38.000Z",
"employees": [
{
"id": 1,
"employee_name": "Albert",
"employee_mail": "qalbert443#gmail.com",
"createdAt": "2021-09-30T18:53:45.000Z",
"updatedAt": "2021-09-30T18:53:45.000Z",
"employee_office": {
"employee_chair": 3,
"createdAt": "2021-09-30T18:53:45.000Z",
"updatedAt": "2021-09-30T18:53:45.000Z"
}
}
]
}
How can I do to make (or force) sequelize to make the relations through the model and not through the automatically created table?
Hope you guys can help me, I'm stuck and I don't know where to ask
#cupid22 Here is my index.js, userproject model and the controller function im calling:
index.js:
const dbConfig = require("../config/db.config.js");
const Sequelize = require("sequelize");
const sequelize = new Sequelize(dbConfig.DB, dbConfig.USER, dbConfig.PASSWORD, {
host: dbConfig.HOST,
dialect: dbConfig.dialect,
operatorsAliases: false,
pool: {
max: dbConfig.pool.max,
min: dbConfig.pool.min,
acquire: dbConfig.pool.acquire,
idle: dbConfig.pool.idle
}
});
const db = {};
db.Sequelize = Sequelize;
db.sequelize = sequelize;
db.projects = require("./project.model.js")(sequelize, Sequelize);
db.users = require("./user.model.js")(sequelize, Sequelize);
db.projects.belongsToMany(db.users, {
through: "users_projects",
as: "users",
foreignKey: "user_id",
});
db.users.belongsToMany(db.projects, {
through: "users_projects",
as: "projects",
foreignKey: "project_id",
});
module.exports = db;
Controller function:
// Retrieve all Projects from the database.
exports.findAll = (req, res) => {
const title = req.query.title;
var condition = title ? { title: { [Op.like]: `%${title}%` } } : null;
Project.findAll({
include: [
{
model: User,
as: "users",
}]
})
.then(data => {
res.send(data);
})
.catch(err => {
res.status(500).send({
message:
err.message || "Some error occurred while retrieving projects."
});
});
};
UserProject Model:
module.exports = (sequelize, Sequelize) => {
const UserProject = sequelize.define('user_project', {
user_id: {
type: Sequelize.INTEGER,
references: {
model: 'user',
key: 'id'
}
},
project_id: {
type: Sequelize.INTEGER,
references: {
model: 'project',
key: 'id'
}
},
user_type: {
type: Sequelize.INTEGER,
}
}, {
timestamps: false,
tableName: 'users_projects'
});
db.users.belongsToMany(db.projects, { through: User_Profile });
db.projects.belongsToMany(db.users, { through: User_Profile });
return UserProject;
};

mongoose delete from array

I need to remove the user's id from all objects in the collection except the one that was passed, in my example it is value: 'Тата', tell me how to make such a request?
console.log(result)
[
{
_id: 5fa702b2f18e5723b4c00d9f,
value: 'Тата',
vote: { '36e7da32-f818-4771-bb5e-1807b2954b5f': [Array] },
date: 2020-11-07T20:25:22.611Z,
__v: 0
}
]
console.log(req.body)
{ value: 'Тата', habalkaId: '36e7da32-f818-4771-bb5e-1807b2954b5f' }
console.log(req.user._id)
5f63a251f17f1f38bc92bdab
that's all I could do, just find
router.post('/', passport.authenticate('jwt', {session: false}), (req, res) => {
FirstName.find({value: req.body.value})
.then(result => {
if (result.length) {
console.log(result)
console.log(req.body)
console.log(req.user._id)
FirstName.find({value: {$ne: 'Слоник'}}, function (err, arr) {
arr.map(e => {
if (e.vote[req.body.habalkaId].length) {
if(e.vote[req.body.habalkaId].includes(String(req.user._id))){
console.log(e.vote[req.body.habalkaId])
}
}
})
})
} else {
new FirstName({
value: req.body.value,
vote: {[req.body.habalkaId]: [String(req.user._id)]}
}).save();
}
})
// res.json({res: req.body})
})
FirstName.js
const mongoose = require('mongoose');
const Schema = mongoose.Schema;
// Create Schema
const FirstNameSchema = new Schema({
value: {
type: String
},
vote: {
type: Object
},
date: {
type: Date,
default: Date.now
}
});
module.exports = FirstName = mongoose.model('firstname', FirstNameSchema);
If I've understand well, you want something like this:
db.collection.update({
"value": {
"$ne": "tata"
}
},
{
"$pull": {
"vote.array_name": "id_value"
}
},
{
multi: true
})
First of all, find all document that not match the value with the given one. Then, for each document found, delete the object from the array, using $pull where the id given matches.
Example here
Please check the payground and check if I've used the correct schema and it shows the expected output.

Cannot read property 'ClientSession' of undefined [duplicate]

I am using MongoDB Atlas cloud(https://cloud.mongodb.com/) and Mongoose library.
I tried to create multiple documents using transaction concept, but it is not working.
I am not getting any error. but, it seems rollback is not working properly.
app.js
//*** more code here
var app = express();
require('./models/db');
//*** more code here
models/db.js
var mongoose = require( 'mongoose' );
// Build the connection string
var dbURI = 'mongodb+srv://mydb:pass#cluster0-****.mongodb.net/mydb?retryWrites=true';
// Create the database connection
mongoose.connect(dbURI, {
useCreateIndex: true,
useNewUrlParser: true,
});
// Get Mongoose to use the global promise library
mongoose.Promise = global.Promise;
models/user.js
const mongoose = require("mongoose");
const UserSchema = new mongoose.Schema({
userName: {
type: String,
required: true
},
pass: {
type: String,
select: false
}
});
module.exports = mongoose.model("User", UserSchema, "user");
myroute.js
const db = require("mongoose");
const User = require("./models/user");
router.post("/addusers", async (req, res, next) => {
const SESSION = await db.startSession();
await SESSION.startTransaction();
try {
const newUser = new User({
//*** data for user ***
});
await newUser.save();
//*** for test purpose, trigger some error ***
throw new Error("some error");
await SESSION.commitTransaction();
//*** return data
} catch (error) {
await SESSION.abortTransaction();
} finally {
SESSION.endSession();
}
});
Above code works without error, but it still creates user in the DB. It suppose to rollback the created user and the collection should be empty.
I don't know what I have missed here. Can anyone please let me know whats wrong here?
app, models, schema and router are in different files.
You need to include the session within the options for all read/write operations which are active during a transaction. Only then are they actually applied to the transaction scope where you are able to roll them back.
As a bit more complete listing, and just using the more classic Order/OrderItems modelling which should be pretty familiar to most people with some relational transactions experience:
const { Schema } = mongoose = require('mongoose');
// URI including the name of the replicaSet connecting to
const uri = 'mongodb://localhost:27017/trandemo?replicaSet=fresh';
const opts = { useNewUrlParser: true };
// sensible defaults
mongoose.Promise = global.Promise;
mongoose.set('debug', true);
mongoose.set('useFindAndModify', false);
mongoose.set('useCreateIndex', true);
// schema defs
const orderSchema = new Schema({
name: String
});
const orderItemsSchema = new Schema({
order: { type: Schema.Types.ObjectId, ref: 'Order' },
itemName: String,
price: Number
});
const Order = mongoose.model('Order', orderSchema);
const OrderItems = mongoose.model('OrderItems', orderItemsSchema);
// log helper
const log = data => console.log(JSON.stringify(data, undefined, 2));
// main
(async function() {
try {
const conn = await mongoose.connect(uri, opts);
// clean models
await Promise.all(
Object.entries(conn.models).map(([k,m]) => m.deleteMany())
)
let session = await conn.startSession();
session.startTransaction();
// Collections must exist in transactions
await Promise.all(
Object.entries(conn.models).map(([k,m]) => m.createCollection())
);
let [order, other] = await Order.insertMany([
{ name: 'Bill' },
{ name: 'Ted' }
], { session });
let fred = new Order({ name: 'Fred' });
await fred.save({ session });
let items = await OrderItems.insertMany(
[
{ order: order._id, itemName: 'Cheese', price: 1 },
{ order: order._id, itemName: 'Bread', price: 2 },
{ order: order._id, itemName: 'Milk', price: 3 }
],
{ session }
);
// update an item
let result1 = await OrderItems.updateOne(
{ order: order._id, itemName: 'Milk' },
{ $inc: { price: 1 } },
{ session }
);
log(result1);
// commit
await session.commitTransaction();
// start another
session.startTransaction();
// Update and abort
let result2 = await OrderItems.findOneAndUpdate(
{ order: order._id, itemName: 'Milk' },
{ $inc: { price: 1 } },
{ 'new': true, session }
);
log(result2);
await session.abortTransaction();
/*
* $lookup join - expect Milk to be price: 4
*
*/
let joined = await Order.aggregate([
{ '$match': { _id: order._id } },
{ '$lookup': {
'from': OrderItems.collection.name,
'foreignField': 'order',
'localField': '_id',
'as': 'orderitems'
}}
]);
log(joined);
} catch(e) {
console.error(e)
} finally {
mongoose.disconnect()
}
})()
So I would generally recommend calling the variable session in lowercase, since this is the name of the key for the "options" object where it is required on all operations. Keeping this in the lowercase convention allows for using things like the ES6 Object assignment as well:
const conn = await mongoose.connect(uri, opts);
...
let session = await conn.startSession();
session.startTransaction();
Also the mongoose documentation on transactions is a little misleading, or at least it could be more descriptive. What it refers to as db in the examples is actually the Mongoose Connection instance, and not the underlying Db or even the mongoose global import as some may misinterpret this. Note in the listing and above excerpt this is obtained from mongoose.connect() and should be kept within your code as something you can access from a shared import.
Alternately you can even grab this in modular code via the mongoose.connection property, at any time after a connection has been established. This is usually safe inside things such as server route handlers and the like since there will be a database connection by the time that code is called.
The code also demonstrates the session usage in the different model methods:
let [order, other] = await Order.insertMany([
{ name: 'Bill' },
{ name: 'Ted' }
], { session });
let fred = new Order({ name: 'Fred' });
await fred.save({ session });
All the find() based methods and the update() or insert() and delete() based methods all have a final "options block" where this session key and value are expected. The save() method's only argument is this options block. This is what tells MongoDB to apply these actions to the current transaction on that referenced session.
In much the same way, before a transaction is committed any requests for a find() or similar which do not specify that session option do not see the state of the data whilst that transaction is in progress. The modified data state is only available to other operations once the transaction completes. Note this has effects on writes as covered in the documentation.
When an "abort" is issued:
// Update and abort
let result2 = await OrderItems.findOneAndUpdate(
{ order: order._id, itemName: 'Milk' },
{ $inc: { price: 1 } },
{ 'new': true, session }
);
log(result2);
await session.abortTransaction();
Any operations on the active transaction are removed from state and are not applied. As such they are not visible to resulting operations afterwards. In the example here the value in the document is incremented and will show a retrieved value of 5 on the current session. However after session.abortTransaction() the previous state of the document is reverted. Note that any global context which was not reading data on the same session, does not see that state change unless committed.
That should give the general overview. There is more complexity that can be added to handle varying levels of write failure and retries, but that is already extensively covered in documentation and many samples, or can be answered to a more specific question.
Output
For reference, the output of the included listing is shown here:
Mongoose: orders.deleteMany({}, {})
Mongoose: orderitems.deleteMany({}, {})
Mongoose: orders.insertMany([ { _id: 5bf775986c7c1a61d12137dd, name: 'Bill', __v: 0 }, { _id: 5bf775986c7c1a61d12137de, name: 'Ted', __v: 0 } ], { session: ClientSession("80f827fe077044c8b6c0547b34605cb2") })
Mongoose: orders.insertOne({ _id: ObjectId("5bf775986c7c1a61d12137df"), name: 'Fred', __v: 0 }, { session: ClientSession("80f827fe077044c8b6c0547b34605cb2") })
Mongoose: orderitems.insertMany([ { _id: 5bf775986c7c1a61d12137e0, order: 5bf775986c7c1a61d12137dd, itemName: 'Cheese', price: 1, __v: 0 }, { _id: 5bf775986c7c1a61d12137e1, order: 5bf775986c7c1a61d12137dd, itemName: 'Bread', price: 2, __v: 0 }, { _id: 5bf775986c7c1a61d12137e2, order: 5bf775986c7c1a61d12137dd, itemName: 'Milk', price: 3, __v: 0 } ], { session: ClientSession("80f827fe077044c8b6c0547b34605cb2") })
Mongoose: orderitems.updateOne({ order: ObjectId("5bf775986c7c1a61d12137dd"), itemName: 'Milk' }, { '$inc': { price: 1 } }, { session: ClientSession("80f827fe077044c8b6c0547b34605cb2") })
{
"n": 1,
"nModified": 1,
"opTime": {
"ts": "6626894672394452998",
"t": 139
},
"electionId": "7fffffff000000000000008b",
"ok": 1,
"operationTime": "6626894672394452998",
"$clusterTime": {
"clusterTime": "6626894672394452998",
"signature": {
"hash": "AAAAAAAAAAAAAAAAAAAAAAAAAAA=",
"keyId": 0
}
}
}
Mongoose: orderitems.findOneAndUpdate({ order: ObjectId("5bf775986c7c1a61d12137dd"), itemName: 'Milk' }, { '$inc': { price: 1 } }, { session: ClientSession("80f827fe077044c8b6c0547b34605cb2"), upsert: false, remove: false, projection: {}, returnOriginal: false })
{
"_id": "5bf775986c7c1a61d12137e2",
"order": "5bf775986c7c1a61d12137dd",
"itemName": "Milk",
"price": 5,
"__v": 0
}
Mongoose: orders.aggregate([ { '$match': { _id: 5bf775986c7c1a61d12137dd } }, { '$lookup': { from: 'orderitems', foreignField: 'order', localField: '_id', as: 'orderitems' } } ], {})
[
{
"_id": "5bf775986c7c1a61d12137dd",
"name": "Bill",
"__v": 0,
"orderitems": [
{
"_id": "5bf775986c7c1a61d12137e0",
"order": "5bf775986c7c1a61d12137dd",
"itemName": "Cheese",
"price": 1,
"__v": 0
},
{
"_id": "5bf775986c7c1a61d12137e1",
"order": "5bf775986c7c1a61d12137dd",
"itemName": "Bread",
"price": 2,
"__v": 0
},
{
"_id": "5bf775986c7c1a61d12137e2",
"order": "5bf775986c7c1a61d12137dd",
"itemName": "Milk",
"price": 4,
"__v": 0
}
]
}
]
I think this is the quickest way to start performing transaction with mongoose
const mongoose = require("mongoose");
// starting session on mongoose default connection
const session = await mongoose.startSession();
mongoose.connection.transaction(async function executor(session) {
try {
// creating 3 collections in isolation with atomicity
const price = new Price(priceSchema);
const variant = new Variant(variantSchema);
const item = new Item(itemSchema);
await price.save({ session });
await variant.save({ session });
// throw new Error("opps some error in transaction");
return await item.save({ session });
} catch (err) {
console.log(err);
}
});

Mongoose $push keeps adding two entries

Here are my user and product schemas:
const productSchema = new Schema({
//...
addedBy: {
type: mongoose.Schema.Types.ObjectId,
ref: "users"
}
});
const userSchema = new Schema({
//...
addedItems: [{
type: mongoose.Schema.ObjectId,
ref: "products"
}]
});
mongoose.model("products", productSchema);
mongoose.model("users", userSchema);
In my Node back end route I do this query:
User.findOneAndUpdate(
{ _id: req.body.id },
{ $push: { addedItems: newProduct._id } },
{ upsert: true, new: true },
function(err, doc) {
console.log(err, doc);
}
);
The console.log prints out this:
{
//...
addedItems: [ 5ab0223118599214f4dd7803 ]
}
Everything looks good. I go to actually look at the data using the front-end website for my mongo db; I'm using mlab.com, and this is what shows:
{
//...
"addedItems": [
{
"$oid": "5ab0223118599214f4dd7803"
},
{
"$oid": "5ab0223118599214f4dd7803"
}
]
}
Question: What the heck happened? Why does it add an additional entry into addedItems ?! Even though my console.log only showed one.
Note:
I tested to see if the backend route was being called more than once. It is not.
It seems to be a problem with $push because if I just have { addedItems: newProduct._id } then only one entry goes in, but it overwrites the entire array.
Edit:
Made a test project to produce the same results: https://github.com/philliprognerud/test-mcve-stackoverflow
Can anyone figure out what's going on?
The problem is caused by your mixed used of promises (via async/await) and callbacks with the findOneAndUpdate call which ends up executing the command twice.
To fix the problem:
const updatedUser = await User.findOneAndUpdate(
{ id: userID },
{ $push: { addedItems: newProduct.id } },
{ upsert: true, new: true }
);
console.log(updatedUser);
Future readers note that the use of await isn't shown here in the question, but is in the MCVE.
I am facing similar issue. Just landed to this page. I find that previous answer is not very descriptive. So posting this:
export const updateUserHandler = async (req, res) => {
const request = req.body;
await User.findOneAndUpdate( //<== remove await
{ _id: request.id },
{ $push: { addedItems: newProduct._id } },
{ upsert: true, new: true },
(findErr, findRes) => {
if (findErr) {
res.status(500).send({
message: 'Failed: to update user',
IsSuccess: false,
result: findErr
});
} else {
res.status(200).send({
message: 'Success: to update user',
IsSuccess: true,
result: findRes
});
}
}
);
}
Here there are two async calls one is the async and other is await. Because of this there are two entries in the document. Just remove await from await User.findOneAndUpdate. It will work perfectly.
Thanks!!
When you await Query you are using the promise-like, specifically, .then() and .catch(() of Query. Passing a callback as well will result in the behavior you're describing.
If you await Query and .then() of Query simultaneously, would make the query execute twice
use:
await Model.findOneAndUpdate(query, doc, options)
OR
Model.findOneAndUpdate(query, doc, options, callback)
This code $push keeps adding two entries:
const ali={ "_id": "5eaa39a18e7719140e3f4430" };
// return await customerModel.findOneAndUpdate(
// ali,
// {
// "$push": {
// "address": [objAdr],
// },
// },
// function (error: any, success: any) {
// if (error) {
// console.log(error);
// } else {
// console.log(success);
// }
// }
// );
My solutions working true:
return await customerModel
.findOneAndUpdate(
{ _id: ids },
{ $push: { "address": objAdr } }
)
.catch((err: string | undefined) => new Error(err));

sequelize seed unable to insert the data in SQLite

I am trying to insert the dummy data using sequelize-cli command
sequelize db:seed --seed seeders/20170212081140-subject_tags.js
here is my config file
{
"development": {
"username": "root",
"password": null,
"database": "database_development",
"host": "127.0.0.1",
"dialect": "sqlite",
"seederStorage": "sequelize",
"storage": "./test"
}
}
and here my seeder file
use strict';
module.exports = {
up: function(queryInterface, Sequelize) {
return
queryInterface.bulkUpdate('subject_tags', [
{
tag: 'agricultural-sciences',
tag_description: '',
subject_category: 'biological_&_medical_sciences',
createdAt: new Date(),
updatedAt: new Date()
}, {
tag: 'biochemistry',
tag_description: '',
subject_category: 'biological_&_medical_sciences',
createdAt: new Date(),
updatedAt: new Date()
}, {
tag: 'bioinformatics',
tag_description: '',
subject_category: 'biological_&_medical_sciences',
createdAt: new Date(),
updatedAt: new Date()
}
] , {});
},
down: function(queryInterface, Sequelize) {
return
queryInterface.bulkDelete('subject_tags', null, {});
}
};
Though I am getting the status
Using environment "development".
== 20170212081140-subject_tags: migrating =======
== 20170212081140-subject_tags: migrated (0.053s)
I tried bulkCreate and bulkInsert in the seed file , all of them run successful, but data does not get inserted into the table
the data does not get inserted. Is I am doing something wrong?
It seems to be issue with sequlizer, after return statement it unable to handle space with newline character
module.exports = {
up: function(queryInterface, Sequelize) {
//old code
//return
// queryInterface.bulkUpdate('subject_tags', [
//new code
return queryInterface.bulkUpdate('subject_tags', [
//.........
Javascript will automatically add a semi-colon after a dangling return statement.
It doesn't reach the bulkUpdate code.

Resources