I have a large dataset that I want to insert into a postgres db, I can achieve this using pg-promise like this
function batchUpload (req, res, next) {
var data = req.body.data;
var cs = pgp.helpers.ColumnSet(['firstname', 'lastname', 'email'], { table: 'customer' });
var query = pgp.helpers.insert(data, cs);
db.none(query)
.then(data => {
// success;
})
.catch(error => {
// error;
return next(error);
});
}
The dataset is an array of objects like this:
[
{
firstname : 'Lola',
lastname : 'Solo',
email: 'mail#solo.com',
},
{
firstname : 'hello',
lastname : 'world',
email: 'mail#example.com',
},
{
firstname : 'mami',
lastname : 'water',
email: 'mami#example.com',
}
]
The challenge is I have a column added_at which isn't included in the dataset and cannot be null. How do I add a timestamp for each record insertion to the query.
As per the ColumnConfig syntax:
const col = {
name: 'added_at',
def: () => new Date() // default to the current Date/Time
};
const cs = pgp.helpers.ColumnSet(['firstname', 'lastname', 'email', col], { table: 'customer' });
Alternatively, you can define it in a number of other ways, as ColumnConfig is very flexible.
Example:
const col = {
name: 'added_at',
mod: ':raw', // use raw-text modifier, to inject the string directly
def: 'now()' // use now() for the column
};
or you can use property init to set the value dynamically:
const col = {
name: 'added_at',
mod: ':raw', // use raw-text modifier, to inject the string directly
init: () => {
return 'now()';
}
};
See the ColumnConfig syntax for details.
P.S. I'm the author of pg-promise.
Related
I want to sum a column in a Bookshelfjs relationship. I have my query set up as
return this.hasMany('MutualFundPortfolio').query().sum('balance');
But I am having this error TypeError: Cannot read property 'parentFk' of undefined any body has any clue how solve this? It seems Bookshelf doesn't support sum
const moment = require('moment');
const Bookshelf = require('../bookshelf');
require('./wishlist');
require('./kyc');
require('./wallet');
const User = Bookshelf.Model.extend({
tableName: 'users',
hasTimestamps: true,
hidden: ['code', 'password'],
toJSON(...args) {
const attrs = Bookshelf.Model.prototype.toJSON.apply(this, args);
attrs.created_at = moment(this.get('created_at')).add(1, 'hour').format('YYYY-MM-DD HH:mm:ss');
attrs.updated_at = moment(this.get('updated_at')).add(1, 'hour').format('YYYY-MM-DD HH:mm:ss');
return attrs;
},
local_wallet() {
return this.hasMany('LocalWallet').query((qb) => {
qb.orderBy('id', 'DESC').limit(1);
});
},
mutual_fund_portfolio() {
return this.hasMany('MutualFundPortfolio').query().sum('balance');
},
global_wallet() {
return this.hasMany('GlobalWallet').query((qb) => {
qb.orderBy('id', 'DESC').limit(1);
});
},
local_gift_card_wallet() {
return this.hasMany('LocalGiftCardWallet').query((qb) => {
qb.orderBy('id', 'DESC').limit(1);
});
},
global_gift_card_wallet() {
return this.hasMany('GlobalGiftCardWallet').query((qb) => {
qb.orderBy('id', 'DESC').limit(1);
});
}
});
module.exports = Bookshelf.model('User', User);
Above is the full user model. I am then getting the value as
return User.where({ id })
.orderBy('id', 'DESC')
.fetch({
withRelated: [
'mutual_fund_portfolio',
'local_wallet',
'global_wallet',
'local_gift_card_wallet',
'global_gift_card_wallet'
]
})
The mutual_fund_portfolio comes out as an empty array.
hasMany performs a simple SQL join on a key. I believe the TypeError: Cannot read property 'parentFk' of undefined error refers to the fact that the table you are referencing here MutualFundPortfolio does not share a key with the table in the model you are using here.
It's not visible above sample but I'm assuming it's something like:
const User = bookshelf.model('User', {
tableName: 'users',
books() {
return this.hasMany('MutualFundPortfolio').query().sum('balance');
}
})
In my hypothetical example the users table has a primary key id column userId that is also in MutualFundPortfolio as a foreign key. My guess is that the error is because MutualFundPortfolio does not have that column/foreign key.
I have a Schema that looks like this:
const RefSchema = {
active: Boolean,
items: [{}],
};
const TopLevelSchema = new mongoose.Schema({
refs: [RefSchema],
...
}, { timestamps: true });
I'm making an API call to update this one of the refs using its id (below its rid) and some data that's inside the API call:
async function updateRef(id, rid, data) {
// First get the TopLevelSchema by the ID - this is OK
const instance = await this.findById(id).exec();
// Prepare the data:
const $set = _.mapKeys(data, (v, k) => `refs.$.${k}`);
// Update the data
await instance.update(
{ 'refs.id': rid },
{ $set },
);
What's happening is that the data (and e.g. I'm passing { active: true }) is not updated.
What am I doing wrong?
There is no need to first get the TopLevelSchema etc. You can update the child like this:
async function updateRef(rid, data) {
let $set = _.mapKeys(data, (v, k) => `refs.$.${k}`)
await TopLevelSchema.updateOne(
{ 'refs._id' : mongoose.Types.ObjectId(rid) },
{ $set })
}
are you using custom ids? because you should do { '_id': rid } instead { 'refs.id': rid }
I'm trying to add a subdocument to a parent schema with Mongoose and MongoDB however I'm being thrown the following error:
TypeError: User is not a constructor
This is based off Mongoose's documentation on subdocuments and I think everything is the same. How can I debug this further?
Router
// Add a destination to the DB
router.post('/add', function(req, res, next) {
let airport = req.body.destination
let month = req.body.month
let id = (req.user.id)
User.findById(id , function (err, User) {
if (err) return handleError(err)
function addToCart (airport, month, id) {
var user = new User ({
destinations: [(
airport = '',
month = ''
)]
})
dog.destinations[0].airport = airport
dog.destinations[0].month = month
dog.save(callback)
res.status(200).send('added')
}
addToCart()
})
console.log(airport)
})
Schema
var destinationSchema = new Schema({
airport: String,
month: String
})
// Define the scheme
var User = new Schema ({
firstName: {
type: String,
index: true
},
lastName: {
type: String,
index: true
},
email: {
type: String,
index: true
},
homeAirport: {
type: String,
index: true
},
destinations: [destinationSchema]
})
User.plugin(passportLocalMongoose)
module.exports = mongoose.model('User', User)
JavaScript is case sensitive about the variable names. You have User model and the User result with the same name.
Your code will work with the following change :
User.findById(id , function (err, user) {
/* ^ use small `u` */
if (err) return handleError(err)
/* rest of your code */
Also keep in mind that further in your code you are declaring another variable named user. You will need to change that to something different.
I'd like to pass dictionaries with column names as keys, thus avoiding declaring the column names within the query itself (typing them directly).
Assume I have a table User with 2 column names:
idUser(INT)
fullName(VARCHAR)
To create a record using node-postgres, I'll need to declare within the query the column names like so:
var idUser = 2;
var fullName = "John Doe";
var query = 'INSERT INTO User(idUser, age) VALUES ($1, $2)';
database.query(query, [idUser, fullName], function(error, result) {
callback(error, result.rows);
database.end();
});
I'd prefer if there was a way to just pass a dictionary & have it infer the column names from the keys - If there's an easy trick I'd like to hear it.
E.g something like this:
var values = {
idUser : 2,
fullName: "John Doe"
};
var query = 'INSERT INTO User VALUES ($1)';
database.query(query, [values], function(error, result) {
callback(error, result.rows);
database.end();
});
A complete example of doing it with pg-promise:
const pgp = require('pg-promise')(/*options*/);
const cn = 'postgres://username:password#host:port/database';
const db = pgp(cn);
const values = {
idUser: 2,
fullName: 'John Doe'
};
// generating the insert query:
const query = pgp.helpers.insert(values, null, 'User');
//=> INSERT INTO "User"("idUser","fullName") VALUES(2,'John Doe')
db.none(query)
.then(data => {
// success;
})
.catch(error => {
// error;
});
And with focus on high performance it would change to this:
// generating a set of columns from the object (only once):
const cs = new pgp.helpers.ColumnSet(values, {table: 'User'});
// generating the insert query:
const query = pgp.helpers.insert(values, cs);
//=> INSERT INTO "User"("idUser","fullName") VALUES(2,'John Doe')
There's no support for key-value values in the insert statement, so it can not be done with native sql.
However, the node-postgres extras page mentions multiple sql generation tools, and for example Squel.js parameters can be used to construct sql in a way very close like what you're looking for:
squel.insert()
.into("User")
.setFieldsRows([
{ idUser: 2, fullName: "John Doe" }
])
.toParam()
// => { text: 'INSERT INTO User (idUser, fullName) VALUES (?, ?)',
// values: [ 2, 'John Doe' ] }
My case was a bit special as I had a field named order in the JSON object which is a keyword in SQL. Therefore I had to wrap everything in quotes using a JSONify() function.
Also note the numberedParameters argument as well as the double quotes around the 'Messages' string.
import { pool } from './connection';
function JSONify(obj: Map<string, any>) {
var o = {};
for (var i in obj) {
o['"' + i + '"'] = obj[i]; // make the quotes
}
return o;
}
// I have a table named "Messages" with the columns order and name
// I also supply the createdAt and updatedAt timestamps just in case
const messages = [
{
order: 0,
name: 'Message with index 0',
createdAt: new Date().toISOString(),
updatedAt: new Date().toISOString(),
}
]
// Create the insert statement
const insertStatement = insert({ numberedParameters: true })
.into('"Messages"')
.setFieldsRows(messages.map((message) => JSONify(message)))
.toParam();
console.log(insertStatement);
// Notice the quotes wrapping the table and column names
// => { text: 'INSERT INTO "Messages" ("order", "name", "createdAt", "updatedAt") VALUES ($1, $2, $3, $4)',
// values: [ 0, 'Message with index 0', '2022-07-22T13:51:27.679Z', '2022-07-22T13:51:27.679Z' ] }
// Create
await pool.query(insertStatement.text, insertStatement.values);
See the Squel documentation for more details.
And this is how I create the pool object if anyone is curious.
import { Pool } from 'pg';
import { DB_CONFIG } from './config';
export const pool = new Pool({
user: DB_CONFIG[process.env.NODE_ENV].username,
host: DB_CONFIG[process.env.NODE_ENV].host,
database: DB_CONFIG[process.env.NODE_ENV].database,
password: DB_CONFIG[process.env.NODE_ENV].password,
port: DB_CONFIG[process.env.NODE_ENV].port,
});
I'm sending query params as JSON format in req.query.p from my front-end MVC framework , the point is that this could be a dynamic key and value, for example:
req.query.p = {nombre : 'juan'}
or
req.query.p = {pais : 'chile'}
So I need the key, and the value to put them in the where statement, something like this
exports.select = function(req, res){
console.log('=> GET | Obtener peliculas'.bold.get);
db.Pelicula
.findAndCountAll({
limit : req.query.limit,
offset : req.query.offset,
where : req.query.p ? [req.query.p.KEY + " = ?", req.query.p.VAL] : null
})
.success(function(resp){
console.log(JSON.stringify(resp.rows, null, 4).bold.get);
res.json({peliculas : resp.rows, meta : { total : resp.count}});
});
}
The where parameter can be an object, so you can just pass where: req.query.p
Usually I put the entire object, so if it comes empty, it will work normally as if there is no conditional WHERE.
You don't need to add {} in the where, because the object that comes from req.query already has it.
const filter = req.query;
example= await ModelExample.findAndCountAll({
where:
filter
})
With ES6 and with usage of the dynamic properties I'll do it like this
const { Op } = require("sequelize");
const from = new Date()
// const to = new Date().setMinutes(40)
const to = null
let where = {
timestamp: {
[Op.or]: {}
}
}
if (from) {
where.timestamp[Op.or][Op.gte] = new Date(from)
}
if (to) {
where.timestamp[Op.or][Op.lte] = new Date(to)
}
console.log(where);
Model.find({ where })