I am currently writing an API in NodeJS using express that retrieves data from a Sqlite database. The table containing the data, called DATA, to be retrieved has three columns:
ID - Integer
VALUE - Real
DATERECORDED - Integer
The data from the last five months is to be returned, and to do this, I am using the following query:
SELECT ID, VALUE FROM DATA WHERE DATERECORDED BEWTEEN date('now') AND date('now', '-5 months')
I recently removed several records from the database and entered new records. However, when I make a GET request to my API, I still get back the data that would've been returned when the old records were contained in the database. I even deleted all of the records in the database, but the old results are still being returned. Is there anyway that I can prevent this caching ?
I have also tried setting the cache_size to 0. The query is being used in the following code:
app.get('/get-data', function(req, res) {
console.log('Sending performance');
var stmnt = // query comes here
if(!exists) {
res.send({});
} else {
var db = new sqlite3.Database(db_path);
db.all(stmnt, function(err, rows) {
if(err) {
res.send({});
}
if(rows) {
var docs = {};
var by_id = _.groupBy(rows, function(row) {
return row.ID;
});
for(var key in by_id) {
if(by_isp.hasOwnProperty(key)) {
docs[key] = _.map(by_id[key], 'VALUE');
}
}
res.send(docs);
} else {
res.send({});
}
});
db.close();
}});
Related
I'm a bit puzzled by the situation I have now.
I've a simple SQL statement I execute from NodeJs on a SQLite database. The SQL statement returns values with a lot of decimals; although my data only contain two decimals.
When I run the exact same query in DB Browser for SQLite, I have a correct result.
My NodeJs code
app.get('/payerComparison/', (req, res) => {
// Returns labels and values within response
var response = {};
let db = new sqlite3.Database('./Spending.db', sqlite3.OPEN_READONLY, (err) => {
if(err){console.log(err.message); return}
});
response['labels'] = [];
response['data'] = [];
db.each("SELECT payer, sum(amount) AS sum FROM tickets GROUP BY payer", (err, row) => {
if(err){console.log(err.message); return}
response['labels'].push(row.payer);
response['data'].push(row.sum);
});
db.close((err) => {
if(err){console.log(err.message); return}
// Send data
console.log(response);
res.send(JSON.stringify(response));
});
})
What I have in the command line
{
labels: [ 'Aurélien', 'Commun', 'GFIS', 'Pauline' ],
data: [ 124128.26, 136426.43000000008, 5512.180000000001, 39666.93 ]
}
The result in DB Browser
I hope you can help me clarify this mystery!
Thank you
Round the values up to 2 decimals :).
SELECT payer, round(sum(amount),2) AS sum FROM tickets GROUP BY payer
currently i have data that needs to be inserted to mongodb. The data have been successfully inserted into mongodb, however there is some value that i would like to add to the data and append it into mongodb.
How can i do so? This is my code for inserting data into mongodb
var MongoClient = require('mongodb').MongoClient
, format = require('util').format;
MongoClient.connect('mongodb://127.0.0.1:27017/test', function(err, db) {
if(err) throw err;
var collection = db.collection('test_insert');
collection.insert({ values:parsestring() }, function(err, docs) {
collection.count(function(err, count) {
console.log(format("count = %s", count));
});
});
// Locate all the entries using find
collection.find().toArray(function(err, results) {
console.dir(results);
// Let's close the db
db.close();
});
});
// "1,61,54,87,20,12/3/2016,8:39AM" this default value
function parsestring(str="1,30,40,50,20,10/10/2016,10:39AM")
{
return str.split(",");
}
I would like to add value to the text string.
For example:
Machine Unit: 1,
Air Temperature °C: 30,
Water Temperature °C: 40,
Heat Temperature °C: 50,
Room Temperature °C: 20,
Date: 10/10/2016,
Time: 10:39AM
Like already mentioned in the comments, you should store your data as objects (or arrays). The line str.split(",") already returns you an array, which you store. In your code you also fetch your data as an Array. When you console.dir() your results, you could map your results.entries to a specific output string if you prefer.
collection.find().toArray(function(err, results) {
console.dir(results); // <= results.entries
// Let's close the db
db.close();
});
See also:
https://developer.mozilla.org/de/docs/Web/JavaScript/Reference/Global_Objects/Array/map &
https://developer.mozilla.org/de/docs/Web/JavaScript/Reference/Global_Objects/Array/entries
However, if you want to store objects (e.g. for accessing data via identifier) you could simply do it like:
function parsestring(str="1,30,40,50,20,10/10/2016,10:39AM"){
var dataArr = str.split(",");
var dbEntry = {};
dbEntry.machine = dataArr[0];
dbEntry.airTemp = dataArr[1];
dbEntry.waterTemp = dataArr[2];
dbEntry.heatTemp = dataArr[3];
dbEntry.roomTemp = dataArr[4];
dbEntry.date = dataArr[5];
dbEntry.time = dataArr[6];
return dbEntry;
}
When returning a simple object from mongoDB, you won't need .toArray()
This may be a basic question but I've looked through the github Cloudant library and the Cloudant documentation and deleting a specific document from the database is mentioned but never thoroughly explained. It's very frustrating. The closest I've gotten to deleting a document is using an http request rather then the functions Cloudant library offers and I continuously get a "Document Update Conflict" even though I'm passing through the _rev of the document. Can anybody explain deleting a document from a Cloudant database using nodejs with an example to help sort this out. Thanks.
You can use the destroy method of nano like #JakePeyser has said, instead of using http APIs since you are using nodejs.
But since you are sending _rev and getting a "Document Update Conflict" error, it leads me to doubt if you have the latest _rev with you. "Document Update Conflict" happens mostly if the local _rev doesn't match the remote _rev. I would therefore suggest wrapping your destroy function in a get function. So an update to #JakePeyser's example would be:
var nano = require("nano")("cloudantURL"),
db = nano.db.use("yourDB");
db.get(docUniqueId, function(err, body) {
if (!err) {
var latestRev = body._rev;
db.destroy(docUniqueId, latestRev, function(err, body, header) {
if (!err) {
console.log("Successfully deleted doc", docUniqueId);
}
});
}
})
It depends what node module you are using for communicating with Cloudant. With the nano driver, you can use the destroy method to delete a document. See the following code example:
var nano = require("nano")("cloudantURL"),
db = nano.db.use("yourDB");
db.destroy(docUniqueId, docRevNum, function(err, body, header) {
if (!err) {
console.log("Successfully deleted doc", docUniqueId);
}
});
Key
cloudantURL - URL of your Cloudant instance, with username and password embedded
yourDB - your database name
docUniqueId - Unique ID of the doc you want to delete
docRevNum - Revision number of the doc you want to delete
Sample script to delete/destroy a doc from a collection "mytable" based on the value of the field "fkId".
var Cloudant = require('cloudant');
var Config = require('config-js');
var config = new Config('./settings.js');
var username = config.get('CLOUDANT_USER');
var password = config.get('CLOUDANT_PASWORD');
var cloudant = Cloudant({account:username, password:password});
var db = cloudant.db.use('mytable');
var key = 'fkId';
var value = '11234567890';
...
...
db.list({
'include_docs': true
}, function (err, body) {
/* istanbul ignore next */
if (err)
res.json({success: false, msg: 'Unable to fetch documents'});
else {
var rows = body.rows;
var items = [];
var rec_found = false;
rows.forEach(function (row) {
if (row.doc[key] === value) {
rec_found = true;
items.push(row.doc);
}
});
if (items.length === 0) {
console.log("No record found with fkId: "+ value);
res.json({success: true, msg: 'No record found with fkId: '+ value});
} else {
var docId = items[0]._id;
var docRev = items[0]._rev;
db.destroy(docId, docRev, function(err) {
if (!err) {
console.log("Successfully deleted doc with fkId: "+ value);
res.json({success: true, msg: 'Successfully deleted the item from the database.'});
} else {
res.json({success: false, msg: 'Failed to delete with fkId from the database, please try again.'});
}
});
}
}
});
I am using node-mongodb-native in my application. I send multiple POST requests to nodejs server to save/update each documents, but only one document is getting updated and all other document are not changing. The data received in the server is correct.
save : function(req,res) {
data = req.body;
if(!data._id){
data._id = new ObjectID();
}else{
data._id = ObjectID(data._id);
}
mColl(req.params.collname, function (collection,db) {
collection.save(data, {safe:true}, function(err, result) {
if(err){
res.send(err);
}
else {
res.send(result);
}
});
});
}
I am not getting the response for the request also.
For starters, don't do this:
data = req.body;
When a new request comes in, you're overwriting the (global!) data variable, and all kinds of undefined stuff can happen. So always declare a new variable:
var data = req.body;
Does Mongoose v3.6+ support batch inserts now? I've searched for a few minutes but anything matching this query is a couple of years old and the answer was an unequivocal no.
Edit:
For future reference, the answer is to use Model.create(). create() accepts an array as its first argument, so you can pass your documents to be inserted as an array.
See Model.create() documentation
Model.create() vs Model.collection.insert(): a faster approach
Model.create() is a bad way to do inserts if you are dealing with a very large bulk. It will be very slow. In that case you should use Model.collection.insert, which performs much better. Depending on the size of the bulk, Model.create() will even crash! Tried with a million documents, no luck. Using Model.collection.insert it took just a few seconds.
Model.collection.insert(docs, options, callback)
docs is the array of documents to be inserted;
options is an optional configuration object - see the docs
callback(err, docs) will be called after all documents get saved or an error occurs. On success, docs is the array of persisted documents.
As Mongoose's author points out here, this method will bypass any validation procedures and access the Mongo driver directly. It's a trade-off you have to make since you're handling a large amount of data, otherwise you wouldn't be able to insert it to your database at all (remember we're talking hundreds of thousands of documents here).
A simple example
var Potato = mongoose.model('Potato', PotatoSchema);
var potatoBag = [/* a humongous amount of potato objects */];
Potato.collection.insert(potatoBag, onInsert);
function onInsert(err, docs) {
if (err) {
// TODO: handle error
} else {
console.info('%d potatoes were successfully stored.', docs.length);
}
}
Update 2019-06-22: although insert() can still be used just fine, it's been deprecated in favor of insertMany(). The parameters are exactly the same, so you can just use it as a drop-in replacement and everything should work just fine (well, the return value is a bit different, but you're probably not using it anyway).
Reference
Mongo documentation
Aaron Heckman on Google Groups discussing bulk inserts
Mongoose 4.4.0 now supports bulk insert
Mongoose 4.4.0 introduces --true-- bulk insert with the model method .insertMany(). It is way faster than looping on .create() or providing it with an array.
Usage:
var rawDocuments = [/* ... */];
Book.insertMany(rawDocuments)
.then(function(mongooseDocuments) {
/* ... */
})
.catch(function(err) {
/* Error handling */
});
Or
Book.insertMany(rawDocuments, function (err, mongooseDocuments) { /* Your callback function... */ });
You can track it on:
https://github.com/Automattic/mongoose/issues/723
https://github.com/Automattic/mongoose/blob/1887e72694829b62f4e3547283783cebbe66b46b/lib/model.js#L1774
Indeed, you can use the "create" method of Mongoose, it can contain an array of documents, see this example:
Candy.create({ candy: 'jelly bean' }, { candy: 'snickers' }, function (err, jellybean, snickers) {
});
The callback function contains the inserted documents.
You do not always know how many items has to be inserted (fixed argument length like above) so you can loop through them:
var insertedDocs = [];
for (var i=1; i<arguments.length; ++i) {
insertedDocs.push(arguments[i]);
}
Update: A better solution
A better solution would to use Candy.collection.insert() instead of Candy.create() - used in the example above - because it's faster (create() is calling Model.save() on each item so it's slower).
See the Mongo documentation for more information:
http://docs.mongodb.org/manual/reference/method/db.collection.insert/
(thanks to arcseldon for pointing this out)
Here are both way of saving data with insertMany and save
1) Mongoose save array of documents with insertMany in bulk
/* write mongoose schema model and export this */
var Potato = mongoose.model('Potato', PotatoSchema);
/* write this api in routes directory */
router.post('/addDocuments', function (req, res) {
const data = [/* array of object which data need to save in db */];
Potato.insertMany(data)
.then((result) => {
console.log("result ", result);
res.status(200).json({'success': 'new documents added!', 'data': result});
})
.catch(err => {
console.error("error ", err);
res.status(400).json({err});
});
})
2) Mongoose save array of documents with .save()
These documents will save parallel.
/* write mongoose schema model and export this */
var Potato = mongoose.model('Potato', PotatoSchema);
/* write this api in routes directory */
router.post('/addDocuments', function (req, res) {
const saveData = []
const data = [/* array of object which data need to save in db */];
data.map((i) => {
console.log(i)
var potato = new Potato(data[i])
potato.save()
.then((result) => {
console.log(result)
saveData.push(result)
if (saveData.length === data.length) {
res.status(200).json({'success': 'new documents added!', 'data': saveData});
}
})
.catch((err) => {
console.error(err)
res.status(500).json({err});
})
})
})
You can perform bulk insert using mongoose, as the highest score answer.
But the example cannot work, it should be:
/* a humongous amount of potatos */
var potatoBag = [{name:'potato1'}, {name:'potato2'}];
var Potato = mongoose.model('Potato', PotatoSchema);
Potato.collection.insert(potatoBag, onInsert);
function onInsert(err, docs) {
if (err) {
// TODO: handle error
} else {
console.info('%d potatoes were successfully stored.', docs.length);
}
}
Don't use a schema instance for the bulk insert, you should use a plain map object.
It seems that using mongoose there is a limit of more than 1000 documents, when using
Potato.collection.insert(potatoBag, onInsert);
You can use:
var bulk = Model.collection.initializeOrderedBulkOp();
async.each(users, function (user, callback) {
bulk.insert(hash);
}, function (err) {
var bulkStart = Date.now();
bulk.execute(function(err, res){
if (err) console.log (" gameResult.js > err " , err);
console.log (" gameResult.js > BULK TIME " , Date.now() - bulkStart );
console.log (" gameResult.js > BULK INSERT " , res.nInserted)
});
});
But this is almost twice as fast when testing with 10000 documents:
function fastInsert(arrOfResults) {
var startTime = Date.now();
var count = 0;
var c = Math.round( arrOfResults.length / 990);
var fakeArr = [];
fakeArr.length = c;
var docsSaved = 0
async.each(fakeArr, function (item, callback) {
var sliced = arrOfResults.slice(count, count+999);
sliced.length)
count = count +999;
if(sliced.length != 0 ){
GameResultModel.collection.insert(sliced, function (err, docs) {
docsSaved += docs.ops.length
callback();
});
}else {
callback()
}
}, function (err) {
console.log (" gameResult.js > BULK INSERT AMOUNT: ", arrOfResults.length, "docsSaved " , docsSaved, " DIFF TIME:",Date.now() - startTime);
});
}
You can perform bulk insert using mongoDB shell using inserting the values in an array.
db.collection.insert([{values},{values},{values},{values}]);
Sharing working and relevant code from our project:
//documentsArray is the list of sampleCollection objects
sampleCollection.insertMany(documentsArray)
.then((res) => {
console.log("insert sampleCollection result ", res);
})
.catch(err => {
console.log("bulk insert sampleCollection error ", err);
});