I am trying to create an array of JSON objects from an SQL Server query using NodeJS but using just JSON.Stringify on each row I get the results I am looking for as all individual JSON objects but not in an array. Ideally I am just trying to write these results to a file as a JSON array. Any ideas appreciated.
Tried the following
const sql=require('mssql');
const fs=require('fs');
const config = {
// Creds removed
},
};
sql.connect(config, err => {
console.log(err);
const request = new sql.Request()
request.stream = true // You can set streaming differently for each request
request.query(fs.readFileSync('./new-query.sql').toString()); // or request.execute(procedure)
request.on('row', row => {
console.log(JSON.stringify(row));
})
request.on('error', err => {
console.log(err);
// May be emitted multiple times
})
request.on('done', result => {
// console.log('done emitted', result);
sql.close();
})
})
As always, reading the library notes in more detail led me to a solution. Using the MSSQL library you can actually work on the entire returned recordset as opposed to every column. Using this code works as I was wanting it to.
const sql = require('mssql')
const fs=require('fs');
const config = {
// cred removed
};
sql.connect(config).then(() => {
return sql.query(fs.readFileSync('./new-query.sql').toString())
}).then(result => {
console.log(JSON.stringify(result.recordsets[0]))
}).catch(err => {
// ... error checks
})
sql.on('error', err => {
// ... error handler
})
How can I return results with parameterized query in Nodejs?
Query runs fine if I remove RETURNING*
Right now, server returns this error
error: syntax error at or near "RETURNING"
server.js
const text = "UPDATE users SET info = JSONB_SET(info, '{geometry,coordinates}', '"+coords+"') WHERE id=$1 RETURNING*";
const values = [id];
pool.query(text, values, (err, res) => {
if (err) {
//log errors
console.log(err.stack);
//return error to client
} else {
//success
//console.log(res.rows);
}
});
To use postgres parameterized queries with node, the proper way is to use promise syntax. A similar issue was raised on Github here and here.
It seems that the problem with the syntax I'm using in my question is that that pg is reading the $1 as part of the literal string instead of a placeholder because it is wrapped in quotes.
Promise syntax seems to be fixing this issue. Well, works for me.
UPDATE Query
const queryOpts = {
text: "UPDATE users SET info = jsonb_set(info, '{geometry,coordinates}', $1) WHERE id = $2",
values: [coords, userid]
}
pool.query(queryOpts).then(response => {
console.log(response.rows)
}).catch(error => {
console.log(error)
})
SELECT Query
var email = JSON.stringify(logins.email);
const queryOpts = {
text: "SELECT * FROM users WHERE info -> 'email'=$1",
values: [email]
}
pool.query(queryOpts).then(response => {
console.log(response.rows)
}).catch(error => {
console.log(error)
})
I am new to mongoDb, as I am trying to query from different collection and in order to do that, when I am fetching data from category collection I mean when I am running select * from collection it is throwing error, MongoError: pool destroyed.
As per my understanding it is because of some find({}) is creating a pool and that is being destroyed.
The code which I am using inside model is below,
const MongoClient = require('mongodb').MongoClient;
const dbConfig = require('../configurations/database.config.js');
export const getAllCategoriesApi = (req, res, next) => {
return new Promise((resolve, reject ) => {
let finalCategory = []
const client = new MongoClient(dbConfig.url, { useNewUrlParser: true });
client.connect(err => {
const collection = client.db(dbConfig.db).collection("categories");
debugger
if (err) throw err;
let query = { CAT_PARENT: { $eq: '0' } };
collection.find(query).toArray(function(err, data) {
if(err) return next(err);
finalCategory.push(data);
resolve(finalCategory);
// db.close();
});
client.close();
});
});
}
When my finding here is when I am using
let query = { CAT_PARENT: { $eq: '0' } };
collection.find(query).toArray(function(err, data) {})
When I am using find(query) it is returning data but with {} or $gte/gt it is throwing Pool error.
The code which I have written in controller is below,
import { getAllCategoriesListApi } from '../models/fetchAllCategory';
const redis = require("redis");
const client = redis.createClient(process.env.REDIS_PORT);
export const getAllCategoriesListData = (req, res, next, query) => {
// Try fetching the result from Redis first in case we have it cached
return client.get(`allstorescategory:${query}`, (err, result) => {
// If that key exist in Redis store
if (false) {
res.send(result)
} else {
// Key does not exist in Redis store
getAllCategoriesListApi(req, res, next).then( function ( data ) {
const responseJSON = data;
// Save the Wikipedia API response in Redis store
client.setex(`allstorescategory:${query}`, 3600, JSON.stringify({ source: 'Redis Cache', responseJSON }));
res.send(responseJSON)
}).catch(function (err) {
console.log(err)
})
}
});
}
Can any one tell me what mistake I am doing here. How I can fix pool issue.
Thanking you in advance.
I assume that toArray is asynchronous (i.e. it invokes the callback passed in as results become available, i.e. read from the network).
If this is true the client.close(); call is going to get executed prior to results having been read, hence likely yielding your error.
The close call needs to be done after you have finished iterating the results.
Separately from this, you should probably not be creating the client instance in the request handler like this. Client instances are expensive to create (they must talk to all of the servers in the deployment before they can actually perform queries) and generally should be created per running process rather than per request.
I am trying to pass URL parameter into the SQL query. I have a column called "puppy_id" and one of the values is puppy1.
I want to call this URL :- localhost:3000/api/puppies/puppy1
and it should execute the query in the database SELECT * FROM puppytable WHERE puppy_id='puppy1' and return the output.
I have no problem to connect to the database. But, it is showing that no data returned. I think, I am doing something wrong in executing the query.
My Code :-
index.js
var express = require('express');
var router = express.Router();
var db = require('../queries');
router.get('/api/puppies/:puppy_id', db.getPuppyStatus);
module.exports = router;
queries.js
module.exports = {
getPuppyStatus: getPuppyStatus
};
function getPuppyStatus(req, res, next) {
var puppyID = parseInt(req.params.puppy_id);
db.any('select * from puppytable where puppy_id =$1', puppyID)
.then(function (data) {
res.status(200)
.json({
status: 'success',
data: data,
message: 'Retrieved puppies'
});
})
.catch(function (err) {
return next(err);
});
}
queries.js is in root of project directory.
It is calling from here in index.js
var db = require('../queries');
This is my output :-
{"status":"success","data":[],"message":"Retrieved puppies"}
To debug when I am doing console.log(puppyID); , it is giving me NaN
What should be the recommended way to do this ?
I don't see where req.params.family_id is coming from, but it looks like it should be req.params.puppy_id - as below - otherwise it would be undefined, which would not match anything in your database.
function getPuppyStatus(req, res, next) {
var puppyID = req.params.puppy_id;
//call puppy_id, not family_id
//puppy_id is also a string being passed in, it can't be turned into an integer
db.any('select * from puppytable where puppy_id =$1', puppyID)
.then(function (data) {
res.status(200)
.json({
status: 'success',
data: data,
message: 'Retrieved puppies'
});
})
.catch(function (err) {
return next(err);
});
}
You're converting to a number a string "puppy1". This is the reason you're getting NaN.
I don't know what's the type of the id in your column.
You've two options:
id as number, try to send a number instead of a string and you're code should be fine.
id as string, remove the parseInt.
var puppyID = req.params.puppy_id;
Does Mongoose v3.6+ support batch inserts now? I've searched for a few minutes but anything matching this query is a couple of years old and the answer was an unequivocal no.
Edit:
For future reference, the answer is to use Model.create(). create() accepts an array as its first argument, so you can pass your documents to be inserted as an array.
See Model.create() documentation
Model.create() vs Model.collection.insert(): a faster approach
Model.create() is a bad way to do inserts if you are dealing with a very large bulk. It will be very slow. In that case you should use Model.collection.insert, which performs much better. Depending on the size of the bulk, Model.create() will even crash! Tried with a million documents, no luck. Using Model.collection.insert it took just a few seconds.
Model.collection.insert(docs, options, callback)
docs is the array of documents to be inserted;
options is an optional configuration object - see the docs
callback(err, docs) will be called after all documents get saved or an error occurs. On success, docs is the array of persisted documents.
As Mongoose's author points out here, this method will bypass any validation procedures and access the Mongo driver directly. It's a trade-off you have to make since you're handling a large amount of data, otherwise you wouldn't be able to insert it to your database at all (remember we're talking hundreds of thousands of documents here).
A simple example
var Potato = mongoose.model('Potato', PotatoSchema);
var potatoBag = [/* a humongous amount of potato objects */];
Potato.collection.insert(potatoBag, onInsert);
function onInsert(err, docs) {
if (err) {
// TODO: handle error
} else {
console.info('%d potatoes were successfully stored.', docs.length);
}
}
Update 2019-06-22: although insert() can still be used just fine, it's been deprecated in favor of insertMany(). The parameters are exactly the same, so you can just use it as a drop-in replacement and everything should work just fine (well, the return value is a bit different, but you're probably not using it anyway).
Reference
Mongo documentation
Aaron Heckman on Google Groups discussing bulk inserts
Mongoose 4.4.0 now supports bulk insert
Mongoose 4.4.0 introduces --true-- bulk insert with the model method .insertMany(). It is way faster than looping on .create() or providing it with an array.
Usage:
var rawDocuments = [/* ... */];
Book.insertMany(rawDocuments)
.then(function(mongooseDocuments) {
/* ... */
})
.catch(function(err) {
/* Error handling */
});
Or
Book.insertMany(rawDocuments, function (err, mongooseDocuments) { /* Your callback function... */ });
You can track it on:
https://github.com/Automattic/mongoose/issues/723
https://github.com/Automattic/mongoose/blob/1887e72694829b62f4e3547283783cebbe66b46b/lib/model.js#L1774
Indeed, you can use the "create" method of Mongoose, it can contain an array of documents, see this example:
Candy.create({ candy: 'jelly bean' }, { candy: 'snickers' }, function (err, jellybean, snickers) {
});
The callback function contains the inserted documents.
You do not always know how many items has to be inserted (fixed argument length like above) so you can loop through them:
var insertedDocs = [];
for (var i=1; i<arguments.length; ++i) {
insertedDocs.push(arguments[i]);
}
Update: A better solution
A better solution would to use Candy.collection.insert() instead of Candy.create() - used in the example above - because it's faster (create() is calling Model.save() on each item so it's slower).
See the Mongo documentation for more information:
http://docs.mongodb.org/manual/reference/method/db.collection.insert/
(thanks to arcseldon for pointing this out)
Here are both way of saving data with insertMany and save
1) Mongoose save array of documents with insertMany in bulk
/* write mongoose schema model and export this */
var Potato = mongoose.model('Potato', PotatoSchema);
/* write this api in routes directory */
router.post('/addDocuments', function (req, res) {
const data = [/* array of object which data need to save in db */];
Potato.insertMany(data)
.then((result) => {
console.log("result ", result);
res.status(200).json({'success': 'new documents added!', 'data': result});
})
.catch(err => {
console.error("error ", err);
res.status(400).json({err});
});
})
2) Mongoose save array of documents with .save()
These documents will save parallel.
/* write mongoose schema model and export this */
var Potato = mongoose.model('Potato', PotatoSchema);
/* write this api in routes directory */
router.post('/addDocuments', function (req, res) {
const saveData = []
const data = [/* array of object which data need to save in db */];
data.map((i) => {
console.log(i)
var potato = new Potato(data[i])
potato.save()
.then((result) => {
console.log(result)
saveData.push(result)
if (saveData.length === data.length) {
res.status(200).json({'success': 'new documents added!', 'data': saveData});
}
})
.catch((err) => {
console.error(err)
res.status(500).json({err});
})
})
})
You can perform bulk insert using mongoose, as the highest score answer.
But the example cannot work, it should be:
/* a humongous amount of potatos */
var potatoBag = [{name:'potato1'}, {name:'potato2'}];
var Potato = mongoose.model('Potato', PotatoSchema);
Potato.collection.insert(potatoBag, onInsert);
function onInsert(err, docs) {
if (err) {
// TODO: handle error
} else {
console.info('%d potatoes were successfully stored.', docs.length);
}
}
Don't use a schema instance for the bulk insert, you should use a plain map object.
It seems that using mongoose there is a limit of more than 1000 documents, when using
Potato.collection.insert(potatoBag, onInsert);
You can use:
var bulk = Model.collection.initializeOrderedBulkOp();
async.each(users, function (user, callback) {
bulk.insert(hash);
}, function (err) {
var bulkStart = Date.now();
bulk.execute(function(err, res){
if (err) console.log (" gameResult.js > err " , err);
console.log (" gameResult.js > BULK TIME " , Date.now() - bulkStart );
console.log (" gameResult.js > BULK INSERT " , res.nInserted)
});
});
But this is almost twice as fast when testing with 10000 documents:
function fastInsert(arrOfResults) {
var startTime = Date.now();
var count = 0;
var c = Math.round( arrOfResults.length / 990);
var fakeArr = [];
fakeArr.length = c;
var docsSaved = 0
async.each(fakeArr, function (item, callback) {
var sliced = arrOfResults.slice(count, count+999);
sliced.length)
count = count +999;
if(sliced.length != 0 ){
GameResultModel.collection.insert(sliced, function (err, docs) {
docsSaved += docs.ops.length
callback();
});
}else {
callback()
}
}, function (err) {
console.log (" gameResult.js > BULK INSERT AMOUNT: ", arrOfResults.length, "docsSaved " , docsSaved, " DIFF TIME:",Date.now() - startTime);
});
}
You can perform bulk insert using mongoDB shell using inserting the values in an array.
db.collection.insert([{values},{values},{values},{values}]);
Sharing working and relevant code from our project:
//documentsArray is the list of sampleCollection objects
sampleCollection.insertMany(documentsArray)
.then((res) => {
console.log("insert sampleCollection result ", res);
})
.catch(err => {
console.log("bulk insert sampleCollection error ", err);
});