I'm creating a ColumnSet object with pg-promise, according to this:
const cs = new pgp.helpers.ColumnSet([
{name: 'Id',prop: 'Id'},
{name: 'Lat',prop: 'Lat'},
{name: 'Lng',prop: 'Lng'},
{name: 'CreationDateTime',prop: 'CreationDateTime'},
{name: 'Topic',prop: 'Topic'},
{name: 'UserId',prop: 'UserId'},
{name: 'shape',mod: ':raw',prop: 'shape',def: 'point'},
{name: 'UserName',prop: 'UserName'},
{name: 'appName',prop: 'appName'},
{name: 'appVersion',prop: 'appVersion'}
], {
table: 'Location'
});
def: 'point' point is method to converting into geometry-- This is a value or how can i run point method and do bind in this column (shape) ?
and write this method for bulk inserting :
async function insertMany(values) {
try {
let results = await db.none(pgp.helpers.insert(values, cs));
} catch (error) {
console.log(error);
}
}
for converting lat and lng i wrote this method :
const point = (lat, lng) => ({
toPostgres: () => pgp.as.format('ST_SetSRID(ST_MakePoint($1, $2), 4326)', [Lag, Lng]),
rawType: true
});
But I got this error:
TypeError: Values null/undefined cannot be used as raw text
According this page:
Raw-text variables end with :raw or symbol ^, and prevent escaping the text. Such variables are not allowed to be null or undefined, or the method will throw TypeError = Values null/undefined cannot be used as raw text.
When point method is not executed, of course that shape filed is null.
First, you are misusing option prop, which is documented as to be used when the destination property name differs from the column name, which is not your case.
And def, as documented also, represents the value when the property is missing. When the property is there set to null or undefined, the value of def isn't used.
You are trying to override the resulting value, that means you need to use property init.
Another issue - your variables inside point implementation switch cases.
In all, your code should look something like this:
const getPoint = col => {
const p = col.value;
// we assume that when not null, the property is an object of {lat, lng},
// otherwise we will insert NULL.
return p ? pgp.as.format('ST_SetSRID(ST_MakePoint(${lat}, ${lng}), 4326)', p) : 'NULL';
};
const cs = new pgp.helpers.ColumnSet([
'Id',
'Lat',
'Lng',
'CreationDateTime',
'Topic',
'UserId',
{name: 'shape', mod: ':raw', init: getPoint},
'UserName',
'appName',
'appVersion',
], {
table: 'Location'
});
And version that uses Custom Type Formatting would look like this:
const getPoint = col => {
const p = col.value;
if(p) {
return {
toPostgres: () => pgp.as.format('ST_SetSRID(ST_MakePoint(${lat}, ${lng}), 4326)', p),
rawType: true
};
}
// otherwise, we return nothing, which will result into NULL automatically
};
const cs = new pgp.helpers.ColumnSet([
'Id',
'Lat',
'Lng',
'CreationDateTime',
'Topic',
'UserId',
{name: 'shape', init: getPoint},
'UserName',
'appName',
'appVersion',
], {
table: 'Location'
});
Related
I want to sum a column in a Bookshelfjs relationship. I have my query set up as
return this.hasMany('MutualFundPortfolio').query().sum('balance');
But I am having this error TypeError: Cannot read property 'parentFk' of undefined any body has any clue how solve this? It seems Bookshelf doesn't support sum
const moment = require('moment');
const Bookshelf = require('../bookshelf');
require('./wishlist');
require('./kyc');
require('./wallet');
const User = Bookshelf.Model.extend({
tableName: 'users',
hasTimestamps: true,
hidden: ['code', 'password'],
toJSON(...args) {
const attrs = Bookshelf.Model.prototype.toJSON.apply(this, args);
attrs.created_at = moment(this.get('created_at')).add(1, 'hour').format('YYYY-MM-DD HH:mm:ss');
attrs.updated_at = moment(this.get('updated_at')).add(1, 'hour').format('YYYY-MM-DD HH:mm:ss');
return attrs;
},
local_wallet() {
return this.hasMany('LocalWallet').query((qb) => {
qb.orderBy('id', 'DESC').limit(1);
});
},
mutual_fund_portfolio() {
return this.hasMany('MutualFundPortfolio').query().sum('balance');
},
global_wallet() {
return this.hasMany('GlobalWallet').query((qb) => {
qb.orderBy('id', 'DESC').limit(1);
});
},
local_gift_card_wallet() {
return this.hasMany('LocalGiftCardWallet').query((qb) => {
qb.orderBy('id', 'DESC').limit(1);
});
},
global_gift_card_wallet() {
return this.hasMany('GlobalGiftCardWallet').query((qb) => {
qb.orderBy('id', 'DESC').limit(1);
});
}
});
module.exports = Bookshelf.model('User', User);
Above is the full user model. I am then getting the value as
return User.where({ id })
.orderBy('id', 'DESC')
.fetch({
withRelated: [
'mutual_fund_portfolio',
'local_wallet',
'global_wallet',
'local_gift_card_wallet',
'global_gift_card_wallet'
]
})
The mutual_fund_portfolio comes out as an empty array.
hasMany performs a simple SQL join on a key. I believe the TypeError: Cannot read property 'parentFk' of undefined error refers to the fact that the table you are referencing here MutualFundPortfolio does not share a key with the table in the model you are using here.
It's not visible above sample but I'm assuming it's something like:
const User = bookshelf.model('User', {
tableName: 'users',
books() {
return this.hasMany('MutualFundPortfolio').query().sum('balance');
}
})
In my hypothetical example the users table has a primary key id column userId that is also in MutualFundPortfolio as a foreign key. My guess is that the error is because MutualFundPortfolio does not have that column/foreign key.
I have query results from MongoDB as an array of documents with nested subdocuments and arrays of subdocuments.
[
{
RecordID: 9000,
RecordType: 'Item',
Location: {
_id: 5d0699326e310a6fde926a08,
LocationName: 'Example Location A'
}
Items: [
{
Title: 'Example Title A',
Format: {
_id: 5d0699326e310a6fde926a01,
FormatName: 'Example Format A'
}
},
{
Title: 'Example Title B',
Format: {
_id: 5d0699326e310a6fde926a01,
FormatName: 'Example Format B'
}
}
],
},
{
RecordID: 9001,
RecordType: 'Item',
Location: {
_id: 5d0699326e310a6fde926a08,
LocationName: 'Example Location C'
},
Items: [
{
Title: 'Example Title C',
Format: {
_id: 5d0699326e310a6fde926a01,
FormatName: 'Example Format C'
}
}
],
}
]
Problem
I need to export the results to XLSX in column order. The XLSX library is working to export the top-level properties (such as RecordID and RecordType) only. I also need to export the nested objects and arrays of objects. Given a list of property names e.g. RecordID, RecordType, Location.LocationName, Items.Title, Items.Format.FormatName the properties must be exported to XLSX columns in the specified order.
Desired result
Here is the desired 'flattened' structure (or something similar) that
I think should be able to convert to XLSX columns.
[
{
'RecordID': 9000,
'RecordType': 'Item',
'Location.LocationName': 'Example Location A',
'Items.Title': 'Example Title A, Example Title B',
'Items.Format.FormatName': 'Example Format A, Example Format B',
},
{
'RecordID': 9001,
'RecordType': 'Item',
'Location.LocationName': 'Example Location C',
'Items.Title': 'Example Title C',
'Items.Format.FormatName': 'Example Format C',
}
]
I am using the XLSX library to convert the query results to XLSX which works for top-level properties only.
const worksheet: XLSX.WorkSheet = XLSX.utils.json_to_sheet(results.data);
const workbook: XLSX.WorkBook = { Sheets: { 'data': worksheet }, SheetNames: ['data'] };
const excelBuffer: any = XLSX.write(workbook, { bookType: 'xlsx', type: 'array' });
const data: Blob = new Blob([excelBuffer], { type: EXCEL_TYPE });
FileSaver.saveAs(data, new Date().getTime());
POSSIBLE OPTIONS
I am guessing I need to 'flatten' the structure either using aggregation in the query or by performing post-processing when the query is returned.
Option 1: Build the logic in the MongoDB query to flatten the results.
$replaceRoot might work since it is able to "promote an existing embedded document to the top level". Although I am not sure if this will solve the problem exactly, I do not want to modify the documents in place, I just need to flatten the results for exporting.
Here is the MongoDB query I am using to produce the results:
records.find({ '$and': [ { RecordID: { '$gt': 9000 } } ]},
{ skip: 0, limit: 10, projection: { RecordID: 1, RecordType: 1, 'Items.Title': 1, 'Items.Location': 1 }});
Option 2: Iterate and flatten the results on the Node server
This is likely not the most performant option, but might be the easiest if I can't find a way to do so within the MongoDB query.
UPDATE:
I may be able to use MongoDB aggregate $project to 'flatten' the results. For example, this aggregate query effectively 'flattens' the results by 'renaming' the properties. I just need to figure out how to implement the query conditions within the aggregate operation.
db.records.aggregate({
$project: {
RecordID: 1,
RecordType: 1,
Title: '$Items.Title',
Format: '$Items.Format'
}
})
UPDATE 2:
I have abandoned the $project solution because I would need to change the entire API to support aggregation. Also, I would need to find a solution for populate because aggregate does not support it, rather, it uses $lookup which is possible but time consuming because I would need to write the queries dynamically. I am going back to look into how to flatten the object by creating a function to iterate the array of objects recursively.
Below is a solution for transforming the Mongo data on the server via a function flattenObject which recursively flattens nested objects and returns a 'dot-type' key for nested paths.
Note that the snippet below contains a function that renders and editable table to preview, however, the important part you want (download the file), should be triggered when you run the snippet and click the 'Download' button.
const flattenObject = (obj, prefix = '') =>
Object.keys(obj).reduce((acc, k) => {
const pre = prefix.length ? prefix + '.' : '';
if (typeof obj[k] === 'object') Object.assign(acc, flattenObject(obj[k], pre + k));
else acc[pre + k] = obj[k];
return acc;
}, {});
var data = [{
RecordID: 9000,
RecordType: "Item",
Location: {
_id: "5d0699326e310a6fde926a08",
LocationName: "Example Location A"
},
Items: [{
Title: "Example Title A",
Format: {
_id: "5d0699326e310a6fde926a01",
FormatName: "Example Format A"
}
},
{
Title: "Example Title B",
Format: {
_id: "5d0699326e310a6fde926a01",
FormatName: "Example Format B"
}
}
]
},
{
RecordID: 9001,
RecordType: "Item",
Location: {
_id: "5d0699326e310a6fde926a08",
LocationName: "Example Location C"
},
Items: [{
Title: "Example Title C",
Format: {
_id: "5d0699326e310a6fde926a01",
FormatName: "Example Format C"
}
}]
}
];
const EXCEL_MIME_TYPE = `application/vnd.ms-excel`;
const flattened = data.map(e => flattenObject(e));
const ws_default_header = XLSX.utils.json_to_sheet(flattened);
const ws_custom_header = XLSX.utils.json_to_sheet(flattened, {
header: ['Items.Title', 'RecordID', 'RecordType', 'Location.LocationName', 'Items.Format.FormatName']
});
const def_workbook = XLSX.WorkBook = {
Sheets: {
'data': ws_default_header
},
SheetNames: ['data']
}
const custom_workbook = XLSX.WorkBook = {
Sheets: {
'data': ws_custom_header
},
SheetNames: ['data']
}
const def_excelBuffer = XLSX.write(def_workbook, {
bookType: 'xlsx',
type: 'array'
});
const custom_excelBuffer = XLSX.write(custom_workbook, {
bookType: 'xlsx',
type: 'array'
});
const def_blob = new Blob([def_excelBuffer], {
type: EXCEL_MIME_TYPE
});
const custom_blob = new Blob([custom_excelBuffer], {
type: EXCEL_MIME_TYPE
});
const def_button = document.getElementById('dl-def')
/* trigger browser to download file */
def_button.onclick = e => {
e.preventDefault()
saveAs(def_blob, `${new Date().getTime()}.xlsx`);
}
const custom_button = document.getElementById('dl-cus')
/* trigger browser to download file */
custom_button.onclick = e => {
e.preventDefault()
saveAs(custom_blob, `${new Date().getTime()}.xlsx`);
}
/*
render editable table to preview (for SO convenience)
*/
const html_string_default = XLSX.utils.sheet_to_html(ws_default_header, {
id: "data-table",
editable: true
});
const html_string_custom = XLSX.utils.sheet_to_html(ws_custom_header, {
id: "data-table",
editable: true
});
document.getElementById("container").innerHTML = html_string_default;
document.getElementById("container-2").innerHTML = html_string_custom;
<script src="https://cdnjs.cloudflare.com/ajax/libs/xlsx/0.14.3/xlsx.full.min.js"></script>
<head>
<title>Excel file generation from JSON</title>
<meta charset="utf-8" />
<style>
.xport,
.btn {
display: inline;
text-align: center;
}
a {
text-decoration: none
}
#data-table,
#data-table th,
#data-table td {
border: 1px solid black
}
</style>
</head>
<script>
function render(type, fn, dl) {
var elt = document.getElementById('data-table');
var wb = XLSX.utils.table_to_book(elt, {
sheet: "Sheet JS"
});
return dl ?
XLSX.write(wb, {
bookType: type,
bookSST: true,
type: 'array'
}) :
XLSX.writeFile(wb, fn || ('SheetJSTableExport.' + (type || 'xlsx')));
}
</script>
<div>Default Header</div>
<div id="container"></div>
<br/>
<div>Custom Header</div>
<div id="container-2"></div>
<br/>
<table id="xport"></table>
<button type="button" id="dl-def">Download Default Header Config</button>
<button type="button" id="dl-cus">Download Custom Header Config</button>
<script src="https://cdnjs.cloudflare.com/ajax/libs/FileSaver.js/1.3.8/FileSaver.min.js"></script>
I wrote a function to iterate all object in the results array and create new flattened objects recursively. The flattenObject function shown here is similar to the previous answer and I took additional inspiration from this related answer.
The '_id' properties are specifically excluded from being added to the flattened object, since ObjectIds are still being returned as bson types even though I have the lean() option set.
I still need to figure out how to sort the objects such that they are in the order given e.g. RecordID, RecordType, Items.Title. I believe that might be easiest to achieve by creating a separate function to iterate the flattened results, although not necessarily the most performant. Let me know if anyone has any suggestions on how to achieve the object sorting by a given order or has any improvements to the solution.
const apiCtrl = {};
/**
* Async array iterator
*/
apiCtrl.asyncForEach = async (array, callback) => {
for (let index = 0; index < array.length; index++) {
await callback(array[index], index, array)
}
}
// Check if a value is an object
const isObject = (val) => {
return typeof val == 'object' && val instanceof Object && !(val instanceof Array);
}
// Check if a value is a date object
const isDateObject = (val) => {
return Object.prototype.toString.call(val) === '[object Date]';
}
/**
* Iterate object properties recursively and flatten all values to top level properties
* #param {object} obj Object to flatten
* #param {string} prefix A string to hold the property name
* #param {string} res A temp object to store the current iteration
* Return a new object with all properties on the top level only
*
*/
const flattenObject = (obj, prefix = '', res = {}) =>
Object.entries(obj).reduce((acc, [key, val]) => {
const k = `${prefix}${key}`
// Skip _ids since they are returned as bson values
if (k.indexOf('_id') === -1) {
// Check if value is an object
if (isObject(val) && !isDateObject(val)) {
flattenObject(val, `${k}.`, acc)
// Check if value is an array
} else if (Array.isArray(val)) {
// Iterate each array value and call function recursively
val.map(element => {
flattenObject(element, `${k}.`, acc);
});
// If value is not an object or an array
} else if (val !== null & val !== 'undefined') {
// Check if property has a value already
if (res[k]) {
// Check for duplicate values
if (typeof res[k] === 'string' && res[k].indexOf(val) === -1) {
// Append value with a separator character at the beginning
res[k] += '; ' + val;
}
} else {
// Set value
res[k] = val;
}
}
}
return acc;
}, res);
/**
* Convert DB query results to an array of flattened objects
* Required to build a format that is exportable to csv, xlsx, etc.
* #param {array} results Results of DB query
* Return a new array of objects with all properties on the top level only
*/
apiCtrl.buildExportColumns = async (results) => {
const data = results.data;
let exportColumns = [];
if (data && data.length > 0) {
try {
// Iterate all records in results data array
await apiCtrl.asyncForEach(data, async (record) => {
// Convert the multi-level object to a flattened object
const flattenedObject = flattenObject(record);
// Push flattened object to array
exportColumns.push(flattenedObject);
});
} catch (e) {
console.error(e);
}
}
return exportColumns;
}
I have a large dataset that I want to insert into a postgres db, I can achieve this using pg-promise like this
function batchUpload (req, res, next) {
var data = req.body.data;
var cs = pgp.helpers.ColumnSet(['firstname', 'lastname', 'email'], { table: 'customer' });
var query = pgp.helpers.insert(data, cs);
db.none(query)
.then(data => {
// success;
})
.catch(error => {
// error;
return next(error);
});
}
The dataset is an array of objects like this:
[
{
firstname : 'Lola',
lastname : 'Solo',
email: 'mail#solo.com',
},
{
firstname : 'hello',
lastname : 'world',
email: 'mail#example.com',
},
{
firstname : 'mami',
lastname : 'water',
email: 'mami#example.com',
}
]
The challenge is I have a column added_at which isn't included in the dataset and cannot be null. How do I add a timestamp for each record insertion to the query.
As per the ColumnConfig syntax:
const col = {
name: 'added_at',
def: () => new Date() // default to the current Date/Time
};
const cs = pgp.helpers.ColumnSet(['firstname', 'lastname', 'email', col], { table: 'customer' });
Alternatively, you can define it in a number of other ways, as ColumnConfig is very flexible.
Example:
const col = {
name: 'added_at',
mod: ':raw', // use raw-text modifier, to inject the string directly
def: 'now()' // use now() for the column
};
or you can use property init to set the value dynamically:
const col = {
name: 'added_at',
mod: ':raw', // use raw-text modifier, to inject the string directly
init: () => {
return 'now()';
}
};
See the ColumnConfig syntax for details.
P.S. I'm the author of pg-promise.
I'd like to pass dictionaries with column names as keys, thus avoiding declaring the column names within the query itself (typing them directly).
Assume I have a table User with 2 column names:
idUser(INT)
fullName(VARCHAR)
To create a record using node-postgres, I'll need to declare within the query the column names like so:
var idUser = 2;
var fullName = "John Doe";
var query = 'INSERT INTO User(idUser, age) VALUES ($1, $2)';
database.query(query, [idUser, fullName], function(error, result) {
callback(error, result.rows);
database.end();
});
I'd prefer if there was a way to just pass a dictionary & have it infer the column names from the keys - If there's an easy trick I'd like to hear it.
E.g something like this:
var values = {
idUser : 2,
fullName: "John Doe"
};
var query = 'INSERT INTO User VALUES ($1)';
database.query(query, [values], function(error, result) {
callback(error, result.rows);
database.end();
});
A complete example of doing it with pg-promise:
const pgp = require('pg-promise')(/*options*/);
const cn = 'postgres://username:password#host:port/database';
const db = pgp(cn);
const values = {
idUser: 2,
fullName: 'John Doe'
};
// generating the insert query:
const query = pgp.helpers.insert(values, null, 'User');
//=> INSERT INTO "User"("idUser","fullName") VALUES(2,'John Doe')
db.none(query)
.then(data => {
// success;
})
.catch(error => {
// error;
});
And with focus on high performance it would change to this:
// generating a set of columns from the object (only once):
const cs = new pgp.helpers.ColumnSet(values, {table: 'User'});
// generating the insert query:
const query = pgp.helpers.insert(values, cs);
//=> INSERT INTO "User"("idUser","fullName") VALUES(2,'John Doe')
There's no support for key-value values in the insert statement, so it can not be done with native sql.
However, the node-postgres extras page mentions multiple sql generation tools, and for example Squel.js parameters can be used to construct sql in a way very close like what you're looking for:
squel.insert()
.into("User")
.setFieldsRows([
{ idUser: 2, fullName: "John Doe" }
])
.toParam()
// => { text: 'INSERT INTO User (idUser, fullName) VALUES (?, ?)',
// values: [ 2, 'John Doe' ] }
My case was a bit special as I had a field named order in the JSON object which is a keyword in SQL. Therefore I had to wrap everything in quotes using a JSONify() function.
Also note the numberedParameters argument as well as the double quotes around the 'Messages' string.
import { pool } from './connection';
function JSONify(obj: Map<string, any>) {
var o = {};
for (var i in obj) {
o['"' + i + '"'] = obj[i]; // make the quotes
}
return o;
}
// I have a table named "Messages" with the columns order and name
// I also supply the createdAt and updatedAt timestamps just in case
const messages = [
{
order: 0,
name: 'Message with index 0',
createdAt: new Date().toISOString(),
updatedAt: new Date().toISOString(),
}
]
// Create the insert statement
const insertStatement = insert({ numberedParameters: true })
.into('"Messages"')
.setFieldsRows(messages.map((message) => JSONify(message)))
.toParam();
console.log(insertStatement);
// Notice the quotes wrapping the table and column names
// => { text: 'INSERT INTO "Messages" ("order", "name", "createdAt", "updatedAt") VALUES ($1, $2, $3, $4)',
// values: [ 0, 'Message with index 0', '2022-07-22T13:51:27.679Z', '2022-07-22T13:51:27.679Z' ] }
// Create
await pool.query(insertStatement.text, insertStatement.values);
See the Squel documentation for more details.
And this is how I create the pool object if anyone is curious.
import { Pool } from 'pg';
import { DB_CONFIG } from './config';
export const pool = new Pool({
user: DB_CONFIG[process.env.NODE_ENV].username,
host: DB_CONFIG[process.env.NODE_ENV].host,
database: DB_CONFIG[process.env.NODE_ENV].database,
password: DB_CONFIG[process.env.NODE_ENV].password,
port: DB_CONFIG[process.env.NODE_ENV].port,
});
How can I find a value in the collection "inputs" based on this schema:
var inputJournalSchema = new Schema({
createdAt: {type: Date, default: Date.now},
inputs: [{ key: String, value: String}]
});
Basically, I'd like to check whether there is an element with a specifific key and if so, at which index position.
But how do I do that?
Once an object has been created with the schema:
object.inputs[0].key;
Here is a simple way to find a specific key, assuming objects are stored in an array. If the objects are not stored in array the loop is not needed.
var findThis = 1;
var objects = [{}, {}, {}, {}];
objects.forEach(function (object) {
object.inputs.forEach(function (input, index) {
if (input.key == findThis) {
// logic for once found here
console.log(index);
}
});
});
This code could also be wrapped in a function that returns the index using the key as an argument:
function findIndex (objects, key) {
objects.forEach(function (object) {
object.inputs.forEach(function (input, index) {
if (input.key == key) {
// logic for once found here
return index;
}
});
});
};