I am using nodejs and back-end DB as PostgreSQL and the ORM is SequalizeJS.
I want to add list of records into a table. The records are array of objects.
I am iterating this array and pushing each record into database . But sometimes , the order is not maintained.
Can you suggest some other way to fix this? . I want to add record one by one serially.
var users = <array of users>;
var createdUsers = [];
for (var Index = 0; Index < users.length; Index++) {
logger.debug("Insert User" + users[Index].user_name);
models.User.create(users[Index]).then(function (user) {
logger.debug("Inserted User" + user.user_name);
createdUsers.push(user);
if (createdUsers.length === users.length) {
response.status(200).json(createdUsers);
}
}).catch(function (error) {
response.status(500).json(error);
});
}
users contains [{user_name:"AAA"},{user_name:"BBB"},{user_name:"CCC"},{user_name:"DDD"},{user_name:"EEE"},{user_name:"FFF"}].
After insert , sometimes the order will be BBB,AAA,FFF,EEE,DDD.
Related
I Have One Query , i am using Amazon DynamoDB, and i want data from table where user_id === "1" , for that i have to scan whole table and then apply filter fnction on it , is there any way i get only that records from user === "1" , that will save my time complexity as i no longer need to loop over whole data.
Here Is What i Am using :
let data = await db.scan(params).promise();
for (var i = 0; data.Items.length > i; i++) {
if (e.id == data.Items[i].parts.createdBy) {
arr.push(data.Items[i].parts);
}
}
}
I am trying to get form data from MongoDB server and showing it into data table using nodeJs.I successfully have done server-side pagination using npm Paginate v-2 plugin. But now the searching is not working. Below is my NodeJs and javascript files code. Please help me for searching.
NodeJs code
app.get('/gettable',(req,res)=>{
console.log(req.query);
user.paginate({},{
page:Math.ceil(req.query.start / req.query.length) + 1,
limit:parseInt(req.query.length)
},function(err,result){
var mytable = {
draw:req.query.draw,
recordsTotal:0,
recordsFiltered:0,
data:[],
}
if(err) {
console.log(err);
res.json(mytable);
} else {
if(result.totalDocs > 0) {
mytable.recordsTotal = result.totalDocs;
mytable.recordsFiltered = result.totalDocs;
for(var key in result.docs) {
mytable.data.push([
result.docs[key]['name'],
result.docs[key]['lastname'],
result.docs[key]['email'],
result.docs[key]['pass'],
result.docs[key]['birthdate'],
result.docs[key]['zipcode'],
result.docs[key]['phonenumber'],
]);
}
}
res.json(mytable);
}
});
DisplayTable.Js code
$(document).ready(function(){
$('#example').DataTable({
"processing": true,
"serverSide": true,
"ajax": "http://localhost:8080/gettable"
});
})
As I said, I am successfully getting data from a server and showing into data table with server-side pagination but searching is not working but in searching div whatever I search, I am getting that value in search array, like this
search: { value: 'svs', regex: 'false' },
_: '1548653540009' }
But its not implementing in datatable to filter columns.
As I said in the comment that search will not work out of the box when server side is enabled in DataTable, it is because now the whole functionality, whether sorting, paging, limit, and search has to be implemented in the server. DataTable will only send the parameter needed for doing the functionality. Following is the code just for your reference, it is not tested and you may get an error also. You may get inputs from the following code. Feel free to edit the following code if in case of getting errors so that it can help future readers.
app.get('/gettable',(req,res)=>{
console.log(req.query);
var query = {},
// array of columns that you want to show in table
columns = ['name', 'lastname', 'email', 'pass', 'birthdate', 'zipcode', 'phonenumber',];
// check if global search is enabled and it's value is defined
if (typeof req.query.search !== 'undefined' && req.query.search.value != '') {
// get global search value
var text = req.query.search.value;
// iterate over each field definition to check whether search is enabled
// for that particular column or not. You can set search enable/disable
// in datatable initialization.
for (var i=0; i<req.query.columns.length; i++) {
requestColumn = req.query.columns[i];
column = columns[requestColumn.data];
// if search is enabled for that particular field then create query
if (requestColumn.searchable == 'true') {
query[column] = {
$regex: text,
};
}
}
}
user.paginate(query,{
page:Math.ceil(req.query.start / req.query.length) + 1,
limit:parseInt(req.query.length)
},function(err,result){
var mytable = {
draw:req.query.draw,
recordsTotal:0,
recordsFiltered:0,
data:[],
}
if(err) {
console.log(err);
res.json(mytable);
} else {
if(result.totalDocs > 0) {
mytable.recordsTotal = result.totalDocs;
mytable.recordsFiltered = result.totalDocs;
for(var key in result.docs) {
var data = [];
for(var column in columns) {
data.push(result.docs[key][column]);
}
mytable.data.push(data);
}
}
res.json(mytable);
}
});
I'm stuck in mongoose query. I've an array of ids as input, I want to search that array of ids in a database for matching elements. It returns perfect result after using $in in find query.
Now, I want that Ids from an array which is not found in the database. what is the best way to do it?
Please try to comment it.
Template.find({
_ids : [
"as6d87as67da7s8d87a87", // available in database
"as6dasd8sa9d8a9a9s8d7", // not-available in database
"6756asd5as6dsadghasd3", // available in database
]
}, function(err, result){
// Need result as "as6dasd8sa9d8a9a9s8d7", which is not matched. or in object.
// Can we do with `aggregate` query?
});
I think this makes what you want
var _ids = [
"as6d87as67da7s8d87a87",
"as6dasd8sa9d8a9a9s8d7",
"6756asd5as6dsadghasd3"
];
Template.find({
_ids : _ids
}, function(err, result){
var filteredResult = _ids.filter(currentId => !result.some(item => item._id.toString() == currentId));
});
Answer by David will work. The idea here is to compare _ids array with the _ids in the result and return the missing ones. Adding more code just for understanding purposes:
Push _id of result into a new array
Compare _ids and the new array to return missing elements
var resIDs = new Array();
for (var i = 0; i < result.length; i++) {
resIDs.push(result[i]._id.toString());
}
var resultFiltered =
_ids.filter(function (v) {
return !resIDs.includes(v.toString());
})
I am using NodeJS, PostgreSQL and the amazing pg-promise library. In my case, I want to execute three main queries:
Insert one tweet in the table 'tweets'.
In case there is hashtags in the tweet, insert them into another table 'hashtags'
Them link both tweet and hashtag in a third table 'hashtagmap' (many to many relational table)
Here is a sample of the request's body (JSON):
{
"id":"12344444",
"created_at":"1999-01-08 04:05:06 -8:00",
"userid":"#postman",
"tweet":"This is the first test from postman!",
"coordinates":"",
"favorite_count":"0",
"retweet_count":"2",
"hashtags":{
"0":{
"name":"test",
"relevancetraffic":"f",
"relevancedisaster":"f"
},
"1":{
"name":"postman",
"relevancetraffic":"f",
"relevancedisaster":"f"
},
"2":{
"name":"bestApp",
"relevancetraffic":"f",
"relevancedisaster":"f"
}
}
All the fields above should be included in the table "tweets" besides hashtags, that in turn should be included in the table "hashtags".
Here is the code I am using based on Nested transactions from pg-promise docs inside a NodeJS module. I guess I need nested transactions because I need to know both tweet_id and hashtag_id in order to link them in the hashtagmap table.
// Columns
var tweetCols = ['id','created_at','userid','tweet','coordinates','favorite_count','retweet_count'];
var hashtagCols = ['name','relevancetraffic','relevancedisaster'];
//pgp Column Sets
var cs_tweets = new pgp.helpers.ColumnSet(tweetCols, {table: 'tweets'});
var cs_hashtags = new pgp.helpers.ColumnSet(hashtagCols, {table:'hashtags'});
return{
// Transactions
add: body =>
rep.tx(t => {
return t.one(pgp.helpers.insert(body,cs_tweets)+" ON CONFLICT(id) DO UPDATE SET coordinates = "+body.coordinates+" RETURNING id")
.then(tweet => {
var queries = [];
for(var i = 0; i < body.hashtags.length; i++){
queries.push(
t.tx(t1 => {
return t1.one(pgp.helpers.insert(body.hashtags[i],cs_hashtags) + "ON CONFLICT(name) DO UPDATE SET fool ='f' RETURNING id")
.then(hash =>{
t1.tx(t2 =>{
return t2.none("INSERT INTO hashtagmap(tweetid,hashtagid) VALUES("+tweet.id+","+hash.id+") ON CONFLICT DO NOTHING");
});
});
}));
}
return t.batch(queries);
});
})
}
The problem is with this code I am being able to successfully insert the tweet but nothing happens then. I cannot insert the hashtags nor link the hashtag to the tweets.
Sorry but I am new to coding so I guess I didn't understood how to properly return from the transaction and how to perform this simple task. Hope you can help me.
Thank you in advance.
Jean
Improving on Jean Phelippe's own answer:
// Columns
var tweetCols = ['id', 'created_at', 'userid', 'tweet', 'coordinates', 'favorite_count', 'retweet_count'];
var hashtagCols = ['name', 'relevancetraffic', 'relevancedisaster'];
//pgp Column Sets
var cs_tweets = new pgp.helpers.ColumnSet(tweetCols, {table: 'tweets'});
var cs_hashtags = new pgp.helpers.ColumnSet(hashtagCols, {table: 'hashtags'});
return {
/* Tweets */
// Add a new tweet and update the corresponding hash tags
add: body =>
db.tx(t => {
return t.one(pgp.helpers.insert(body, cs_tweets) + ' ON CONFLICT(id) DO UPDATE SET coordinates = ' + body.coordinates + ' RETURNING id')
.then(tweet => {
var queries = Object.keys(body.hashtags).map((_, idx) => {
return t.one(pgp.helpers.insert(body.hashtags[i], cs_hashtags) + 'ON CONFLICT(name) DO UPDATE SET fool = $1 RETURNING id', 'f')
.then(hash => {
return t.none('INSERT INTO hashtagmap(tweetid, hashtagid) VALUES($1, $2) ON CONFLICT DO NOTHING', [+tweet.id, +hash.id]);
});
});
return t.batch(queries);
});
})
.then(data => {
// transaction was committed;
// data = [null, null,...] as per t.none('INSERT INTO hashtagmap...
})
.catch(error => {
// transaction rolled back
})
},
NOTES:
As per my notes earlier, you must chain all queries, or else you will end up with loose promises
Stay away from nested transactions, unless you understand exactly how they work in PostgreSQL (read this, and specifically the Limitations section).
Avoid manual query formatting, it is not safe, always rely on the library's query formatting.
Unless you are passing the result of transaction somewhere else, you should at least provide the .catch handler.
P.S. For the syntax like +tweet.id, it is the same as parseInt(tweet.id), just shorter, in case those are strings ;)
For those who will face similar problem, I will post the answer.
Firstly, my mistakes:
In the for loop : body.hashtag.length doesn't exist because I am dealing with an object (very basic mistake here). Changed to Object.keys(body.hashtags).length
Why using so many transactions? Following the answer by vitaly-t in: Interdependent Transactions with pg-promise I removed the extra transactions. It's not yet clear for me how you can open one transaction and use the result of one query into another in the same transaction.
Here is the final code:
// Columns
var tweetCols = ['id','created_at','userid','tweet','coordinates','favorite_count','retweet_count'];
var hashtagCols = ['name','relevancetraffic','relevancedisaster'];
//pgp Column Sets
var cs_tweets = new pgp.helpers.ColumnSet(tweetCols, {table: 'tweets'});
var cs_hashtags = new pgp.helpers.ColumnSet(hashtagCols, {table:'hashtags'});
return {
/* Tweets */
// Add a new tweet and update the corresponding hashtags
add: body =>
rep.tx(t => {
return t.one(pgp.helpers.insert(body,cs_tweets)+" ON CONFLICT(id) DO UPDATE SET coordinates = "+body.coordinates+" RETURNING id")
.then(tweet => {
var queries = [];
for(var i = 0; i < Object.keys(body.hashtags).length; i++){
queries.push(
t.one(pgp.helpers.insert(body.hashtags[i],cs_hashtags) + "ON CONFLICT(name) DO UPDATE SET fool ='f' RETURNING id")
.then(hash =>{
t.none("INSERT INTO hashtagmap(tweetid,hashtagid) VALUES("+tweet.id+","+hash.id+") ON CONFLICT DO NOTHING");
})
);
}
return t.batch(queries);
});
}),
Is there a way to set a fixed limit on the number of documents that can be inserted via a bulk insert in mongodb using the node.js client?
I am inserting a number of documents into a collection that has a unique index on fieldA via a bulk insert. Some of the inserts will fail due to fieldA being non-unique, so I can't know how many will be inserted beforehand, but I want to limit the nInserted so that the total of these documents never goes over 5000.
All I can think to do is to run the full insert and if nInserted brings the total above 5000 I remove the n last inserted documents such that the total is 5000 but this seems a bit silly.
The ordered bulk insert is almost right but I don't want it to stop on the first index conflict but keep going if there is still room (ie < 5000 total).
Here's an example of what I'm trying to achieve:
db.myCol.count({foo: val}, function(err, count) {
var remaining = 5000 - count;
if (remaining > 0) {
var bulk = db.myCol.initializeUnorderedBulkOp();
toInsert.forEach(function(item) {
bulk.insert(item);
});
// make sure no more than remaining is inserted
bulk.execute(function(err, result) {
// currently, I would just insert all and
// then remove the overflow with another db action
// if nInserted + count > 5000
});
}
});
Currently there is no way to tell the Bulk API to stop inserting any records once the limit of successful inserts has been reached.
One way of doing it in the client side,
Feed the Bulk API at most n(5000 in this case) documents at a
time
If any error has occurred during the insert, Bulk insert the
remaining.
Do it recursively.
You can further add logic to process only remaining number of
records if remaining < max.
Modified code:
var toInsert = [..]; // documents to be inserted.
var max = 5000; // max records for Bulk insert.
function execBulk(start,end){
db.myCol.count({foo: 'bar'}, function(err, count) {
var remaining = total - count;
if (remaining > 0 && toInsert.length > start) {
var bulk = db.myCol.initializeUnorderedBulkOp();
toInsert.slice(start,end).forEach(function(item) {
bulk.insert(item);
});
// insert the records
bulk.execute(function(err, result) {
if(err){
console.log(err);
// insert next set of at most 5000 records.
execBulk(end,end+max-1)
}
else
{
console.log(results);
}
});
}
});
}
Invoking the function:
execBulk(0,max);