I have a MongoDB instance to which I want to add entries, and I am using the mongoose library for this.
import mongoose from 'mongoose'
const personSchema = new mongoose.Schema({
name: String,
number: String
})
const Person = mongoose.model('Person', personSchema)
// addToPhonebook adds an entry to the phonebook
//
const addToPhonebook = (name, number) => {
const person = new Person({
name: name,
number: number
})
return person.save()
.then(_result => {
console.log(`added ${name} number ${number} to phonebook`)
mongoose.connection.close()
},
error => console.log(`nothing happened: ${error}`))
}
// Do some initialization
const url = 'mongodb+srv://...'
await mongoose.connect(url, {
useNewUrlParser: true,
useUnifiedTopology: true,
useFindAndModify: false,
useCreateIndex: true,
}).then(
_result => {
console.log('logged in to phonebook')
},
error => {
console.log(`couldn't log in to phonebook: ${error}`)
process.exit(2)
}
)
await addToPhonebook('Some name', '1-800-xxx-xxxx')
The code above works as expected. But, if I were to rearrange addToPhonebook such that mongoose.connection.close() is called after the first .then:
const addToPhonebook = (name, number) => {
const person = new Person({
name: name,
number: number
})
return person.save()
.then(_result => console.log(`added ${name} number ${number} to phonebook`),
error => console.log(`nothing happened: ${error}`))
.then(mongoose.connection.close())
}
It always returns nothing happened: MongoError: server is closed.
Am I doing something wrong or is my knowledge about promises faulty?
You're using your second .then() function wrong. You have to change it to this:
.then(() => mongoose.connection.close))
Explanation:
.then() needs a function as parameter. If you call the function just like this:
.then(mongoose.connection.close());
... you're passing the return value of .close() as parameter, therefore nothing happens.
mongoose.connection.close() is deprecated, check here use mongoose.disconnect().
I would suggest you not to close connection on every query, may be you need it at this moment, but try to make it more robust.
This stack overflow thread is good to check how to close connection i.e whenever your node process might end up or any other thing which might cause your application to restart.
const addToPhonebook = (name, number) => {
const person = new Person({
name: name,
number: number
})
return person.save()
.then(_result => console.log(`added ${name} number ${number} to phonebook`),
error => console.log(`nothing happened: ${error}`))
.then(mongoose.disconnect) // this will execute the method automatically
}
Related
I'm making a discord bot to scrape prices from Amazon. Im using a mongoDB database to store links users give to the bot to track the price of the item the link leads to.
My issue is when I run my code and use the add command, my console reads...
Starting...
Online! Logged in as Amazon Price Tracker#6927
Connected to Database
null
MongooseError: document must have an _id before saving
at C:\Users\logic\Documents\Disc Bot\node_modules\mongoose\lib\model.js:291:18
at processTicksAndRejections (node:internal/process/task_queues:78:11)
Disconnected from Database
I've read the doc's and my understanding is mongoose generates a unique id automatically. I am aware that you can override this my defining an id in your schema, but I haven't done this so I don't know why console.log(a) prints null, and the .save() errors out.
My add.js file
//add function using mongoose for mongodb
const { SlashCommandBuilder } = require("#discordjs/builders");
const mongoose = require("mongoose");
const { MongoDBurl } = require("../config.json");
const Link = require("../Schemas/Link.js");
module.exports = {
//Build the slash command
data: new SlashCommandBuilder()
.setName("add")
.setDescription("add a url to watch list")
.addStringOption(option =>
option.setName("url")
.setDescription("url to add to watch list")
.setRequired(true),
),
//Function that runs when the command is used
async execute (interaction) {
const URL = interaction.options.getString("url");
const user = interaction.user.username;
await interaction.reply(`On it! Adding ${URL} to your watch list`)
//Connect to the database, throws an error if it can't connect
await mongoose.connect(MongoDBurl)
.then( () => console.log("Connected to Database"))
.catch(err => console.log(err));
//Check if the link is already in the database
var exists = await Link.exists({ link: URL}).exec()
.catch(err => console.log(err))
if (exists) {
console.log("This Document Already Exists")
interaction.editReply(`Oops! That link is already in my database.`)
} else {
//If the link dosen't exist, create a document and save it to the database
var newLink = new Link({ user: user }, { link: URL }, { price: "N/A" })
// Debuging variable
var a = newLink.id;
console.log(a)
await newLink.save()
.then( () => {
console.log("Document Saved")
interaction.editReply(`All done! I have saved ${URL} to your watch list.`)
})
.catch(err => {
console.log(err)
interaction.editReply("Oops! Something went wrong, I wasen't able to save this link.")
})
}
//Close the connection when we finish
await mongoose.connection.close()
.then( () => console.log("Disconnected from Database"))
}
};
My Link.js file
const mongoose = require("mongoose");
const Schema = mongoose.Schema;
const LinkSchema = new Schema({
user: {
type: String,
requiered: true
},
link: {
type: String,
requiered: true
},
price: {
type: String,
requiered: true
},
})
module.exports = mongoose.model("Link", LinkSchema);
When creating a new modal, the options must be within the same pair of curly braces, however when updating, its separate since you are changing multiple elements.
That's why the error was occurring. You have already shared a working piece of code so I'm guessing you no longer need one.
So I found my issue. I changed this line
var newLink = new Link({ user: user }, { link: URL }, { price: "N/A" })
To
const newLink = new Link({ user: user, link: URL, price: "N/A" });
I don't know why this fixed it, I don't think its because I changed var -> const, and looking at the documentation I thought the first line was the correct way to do this
The line I originally used from the documentation
Tank.updateOne({ size: 'large' }, { name: 'T-90' }, function(err, res) {
// Updated at most one doc, `res.nModified` contains the number
// of docs that MongoDB updated
});
Is this an error in the documentation? or a possible bug? either way the issue is now resolved.
I have to read a really large CSV file so search through the google and get to know about createReadStream. I am using a program that read the csv file data and insert it into the mongoDB.
process I am following
process the data using createReadStream (I think it read the file line by line).
Storing data into an array.
Insert the data into mongoDB using insertMany
Now the problem is whole file is first get stored into an array and then I insert into the database.
But what I think is the better approach would be I only store first 500 line/rows into an array insert it into the DB and again follow the same step for the next 500 records
Is it possible to achieve this ?
and also is it the right way to do this ?
my program
const test = async () => {
const stream = fs.createReadStream(workerData)
.pipe(parse())
.on('data', async function(csvrow) {
try{
stream.pause()
if(!authorName.includes(csvrow.author)) {
const author = new Author({author: csvrow.author})
authorId = author._id
authorName.push(author.author)
authorData.push(author)
}
if(!companyName.includes(csvrow.company_name)) {
const company = new Company({companyName: csvrow.company_name})
companyID = company._id
companyName.push(company.companyName)
companyData.push(company)
}
users = new User({
name: csvrow.firstname,
dob: csvrow.dob,
address: csvrow.address,
phone: csvrow.phone,
state: csvrow.state,
zip: csvrow.zip,
email: csvrow.email,
gender: csvrow.gender,
userType: csvrow.userType
})
userData.push(users)
book = new Book({
book_number: csvrow.book_number,
book_name: csvrow.book_name,
book_desc: csvrow.book_desc,
user_id: users._id,
author_id: authorId
})
bookData.push(book)
relationalData.push({
username: users.name,
author_id: authorId,
book_id: book._id,
company_id: companyID
})
}finally {
stream.resume()
}
})
.on('end', async function() {
try {
Author.insertMany(authorData)
User.insertMany(userData)
Book.insertMany(bookData)
Company.insertMany(companyData)
await Relational.insertMany(relationalData)
parentPort.postMessage("true")
}catch(e){
console.log(e)
parentPort.postMessage("false")
}
})
}
test()
This program is working fine also inserting the data into the DB, But I am looking for something like this:
const stream = fs.createReadStream(workerData)
.pipe(parse())
.on('data', async function(csvrow, maxLineToRead: 500) {
// whole code/logic of insert data into DB
})
so maxLineToRead is my imaginary term.
basically my point is I want to process 500 data at a time and insert it into the DB and want to repeat this process till the end.
You can create a higher scoped array variable where you accumulate rows of data as they arrive on the data event. When you get to 500 rows, fire off your database operation to insert them. If not yet at 500 rows, then just add the next one to the array and wait for more data events to come.
Then, in the end event insert any remaining rows still in the higher scoped array.
In this way, you will insert 500 at a time and then however many are left at the end. This has an advantage vs. inserting them all at the end that you spread out the database load over the time you are parsing.
Here's an attempt to implement that type of processing. There are some unknowns (documented with comments) based on an incomplete description of exactly what you're trying to accomplish in some circumstances):
const test = () => {
return new Promise((resolve, reject) => {
const accumulatedRows = [];
async function processRows(rows) {
// initialize data arrays that we will insert
const authorData = [],
companyData = [],
userData = [],
bookData = [],
relationalData = [];
// this code still has a problem that I don't have enough context
// to know how to solve
// If authorName contains csvrow.author, then the variable
// authorId is not initialized, but is used later in the code
// This is a problem that needs to be fixed.
// The same issue occurs for companyID
for (let csvrow of rows) {
let authorId, companyID;
if (!authorName.includes(csvrow.author)) {
const author = new Author({ author: csvrow.author })
authorId = author._id
authorName.push(author.author)
authorData.push(author)
}
if (!companyName.includes(csvrow.company_name)) {
const company = new Company({ companyName: csvrow.company_name })
companyID = company._id
companyName.push(company.companyName)
companyData.push(company)
}
let users = new User({
name: csvrow.firstname,
dob: csvrow.dob,
address: csvrow.address,
phone: csvrow.phone,
state: csvrow.state,
zip: csvrow.zip,
email: csvrow.email,
gender: csvrow.gender,
userType: csvrow.userType
});
userData.push(users)
let book = new Book({
book_number: csvrow.book_number,
book_name: csvrow.book_name,
book_desc: csvrow.book_desc,
user_id: users._id,
author_id: authorId
});
bookData.push(book)
relationalData.push({
username: users.name,
author_id: authorId,
book_id: book._id,
company_id: companyID
});
}
// all local arrays of data are populated now for this batch
// so add this data to the database
await Author.insertMany(authorData);
await User.insertMany(userData);
await Book.insertMany(bookData);
await Company.insertMany(companyData);
await Relational.insertMany(relationalData);
}
const batchSize = 50;
const stream = fs.createReadStream(workerData)
.pipe(parse())
.on('data', async function(csvrow) {
try {
accumulatedRows.push(csvRow);
if (accumulatedRows.length >= batchSize) {
stream.pause();
await processRows(accumulatedRows);
// clear out the rows we just processed
acculatedRows.length = 0;
stream.resume();
}
} catch (e) {
// calling destroy(e) will prevent leaking a stream
// and will trigger the error event to be called with that error
stream.destroy(e);
}
}).on('end', async function() {
try {
await processRows(accumulatedRows);
resolve();
} catch (e) {
reject(e);
}
}).on('error', (e) => {
reject(e);
});
});
}
test().then(() => {
parentPort.postMessage("true");
}).catch(err => {
console.log(err);
parentPort.postMessage("false");
});
MongoDB 4.2.2 and Mongoose 5.8.3 (latest) and NodeJS 13.3.0 (Windows x64)
If I create a schema and model, then create an instance of the model and add some data, then run validate(), then save(): even if validate() fails, the data is saved into the collection, without throwing an additional validation error.
Is this a bug, or am I doing something wrong?
Here's the test code:
var mongoose = require('mongoose')
mongoose.connect("mongodb://user:pass#localhost/mydb")
db = mongoose.connection
var Schema = mongoose.Schema
var PartSchema = new Schema({
name: {
type: String,
required: true,
validate: {
validator: (v) => v !== 'asdf' // Don't allow name to be 'asdf'
}
},
number: {
type: String,
required: true,
validate: {
validator: (v) => !v.includes(' ') // Don't allow spaces in part number.
}
}
})
var ProductSchema = new Schema({
name: String,
parts: [PartSchema]
})
var Part = mongoose.model('Part', PartSchema)
var Product = mongoose.model('Product', ProductSchema)
var p1 = new Product({name:"Baseball Bat", parts:[ new Part({name:"First part", number: "003344"}), new Part({name: "Second part", number: "554422"}) ]})
p1.parts.push(new Part({name: "No number, so invalid"})) // this one is invalid because no part number is specified (required)
p1.parts.push(new Part({name: 'asdf', number: 'zzzzzaaaa'}))
p1.parts.push(new Part({name: 'bbbb', number: 'with a space'})) // This one is invalid because number has spaces.
p1.validate()
.then(() => {console.log('Validation successful')})
.catch((err) => { console.log("Validation failed.")})
p1.save()
.then(()=>{ console.log("Saved successfully")})
.catch((err)=>{console.log("Save ERROR", err)})
Running this code yields the following:
Validation failed.
Saved successfully
And the new document appears in the database:
However, if I remove the p1.validate() before calling save(), the save function's catch() block triggers and the item is not saved:
Save ERROR Error [ValidationError]: Product validation failed: parts.2.number: Path `number` is required., parts.3.name: Validator failed for path `name` with value `asdf`, parts.4.number: Validator failed for path `number` with value `with a space`
at ValidationError.inspect
... snipped
May be you need to use p1.save() inside the promise chain.
p1.validate()
.then(res => {
console.log("Validation successful");
})
.then(() => {
return p1.save();
})
.then(res => {
console.log("saved success ", res);
})
.catch(err => {
console.log("Some error.", err);
});
When stating a transaction, how to achieve some statements is out of transaction.
See the following example, the first create statement didn't get transaction option, so it should be out of transaction, and when rolling back it should be in database. However, both are in transaction and both rolled back. Any comments?
describe.only('sequelzie transaction test', () => {
const sequelize = new Sequelize('test', null, null, {
dialect: 'sqlite',
})
const UserModel = sequelize.define('user', {
name: {
type: Sequelize.STRING,
primaryKey: true,
},
})
beforeEach (async () => {
await sequelize.sync({force: true})
})
it('partial rollback transaction should work', async () => {
const transaction = await sequelize.transaction()
try {
await UserModel.create({name: 'ron'} )
await UserModel.create({name: 'ron', {transaction}})
await transaction.commit()
}
catch (e) {
await transaction.rollback()
}
const users = await UserModel.findAll()
expect(users).to.have.length(1) // failed here, it was 0, both are rolled back
})
})
One problem i did find out in your code was that transaction was passed incorrectly in create function
await UserModel.create({name: 'ron'}, {transaction})
Transaction is part of options object. Also try enabling the debug option to check what queries are being executed. You will be able to discern the problem.
The bookshelf documentation indicates that I should be able to pass an array of models into collection.attach(), and they demonstrate this with the following code:
var admin1 = new Admin({username: 'user1', password: 'test'});
var admin2 = new Admin({username: 'user2', password: 'test'});
Promise.all([admin1.save(), admin2.save()])
.then(function() {
return Promise.all([
new Site({id: 1}).admins().attach([admin1, admin2]),
new Site({id: 2}).admins().attach(admin2)
]);
})
It doesn't seem to work in my case, however. My save operation works fine if I pass in an array of ids:
export function create({ blocks, tags, ...rest }) {
const attributes = {
blocks: JSON.stringify(blocks),
...rest
}
return Post.forge(attributes).save().then(post => {
return post.tags().attach(tags.map(t => t.id)).then(() => post.refresh())
})
}
However, if I try to pass in the tags instead, like this:
return post.tags().attach(tags).then(() => post.refresh())
Then I receive an error:
Unhandled rejection error: column "id" of relation "posts_tags" does not exist
Am I misreading the documentation? Should I not be able to do this?