Mongodb seems to be arbitrarily returning early when I try to upload a large file (1.8GB) to GridFS. Smaller files work just fine.
I'm using the node.js native driver. The code (with a few things omitted for brevity) is as follows:
var objectId = new ObjectID(),
gridStore = new GridStore(db, objectId, filename /*declared elsewhere*/, "w", { "content_type": contentType /* declared elsewhere */ }),
obj = {};
gridStore.open(function (err, gs) {
console.log("gridStore open");
gs.writeFile(tempFile, function (err, doc) {
if (err) {
throw err;
}
console.log("file written");
obj.fileId = doc._id;
// double check the md5 of the uploaded file against what was uploaded
// (md5 variable declared elsewhere)
if (doc.md5 !== md5) {
console.log(doc);
console.log(doc.md5);
console.log(md5);
//delete bad file
GridStore.unlink(db, doc.filename, function (err, gridStore) {
if (err) {
throw err;
}
});
} else {
// do the desired stuff
}
});
});
The "doc" object always seems to return with a different length (and obviously a different md5).
The issue was apparently with earlier versions of Node's stream implementation. 10Gen's Node driver team wrote the new versions of the driver to use the newer Node stream implementation. So, an upgrade of Node and the native client driver fixed this issue.
Related
The front-end is written in ReactJS, more specifically grommet. There are multiple pdf files to be served to the user on clicking the Download button. The files are stored in GridFS. I wish to give the user a zipped folder which contains all these files. How can I achieve this?
Thanks in advance.
I have it!! Super simple solution with archiver. Worked at first time.
Note: I am using sails.js. DBFile is my Model.
const GridFsAdapter = require('../adapters/gridfs-adapter');
const archiver = require('archiver');
async function downloadMultiple (query, res, filename) {
// create a stream for download
const archive = archiver('zip', {
zlib: {level: 9} // Sets the compression level.
});
// catch warnings (ie stat failures and other non-blocking errors)
archive.on('warning', (err) => {
if (err.code === 'ENOENT') {
// log warning
sails.log.warn(err);
} else {
// throw error
throw err;
}
});
archive.on('error', (err) => {
throw err;
});
// set file name
res.attachment(filename);
// pipe the stream to response before appending files/streams
archive.pipe(res);
// add your streams
await DBFile
.stream(query)
// like mongoDBs cursor.forEach() function. Avoids to have all record in memory at once
.eachRecord(async (dbFile) => {
// get the stream from db
const {stream, data} = await GridFsAdapter().read(dbFile.fileId);
// append stream including filename to download stream
archive.append(stream, {name: data.filename});
});
// tell the download stream, you have all your files added
archive.finalize();
}
I'm using nano db library (also try this with cloudant ant library)
for node js
I have this sample code :
exports.insert = function (db_name, data, callback) {
var db_name=db_name; if(!db){ db = nano.use(db_name); } console.log(`try to make bulk copy to Couchdb`); db.bulk(data, function (err, body) {
if (!err) {
console.log(`data in db ${db_name} inserted successfully`);
return callback(null, body);
}
console.log(`err ouccre ${err}`);
return callback(err);
}); }
The "data" variable is an object that contain the 'docs' property and
'docs' contain array.
I'ts always do the bulk work and put the docs array into my cloudnat
db , but
Often doesn't return any callback (err / success) to my node js
process
and it stuck...
Any suggestion?
I am getting below error when I try to data from redshift with the below mentioned code: I feel that it is due to jdbc module not installed correctly.Could
/*
* GET users listing.
*/
var jdbc =require('jdbc');
var express = require('express');
var config = {
libpath: '../RedshiftJDBC41-1.1.6.1006.jar',
drivername: 'com.amazon.redshift.jdbc41.Driver',
url: 'jdbc:redshift://exampleCluster.abcdkcmf0ug1.us-west-2.redshift.amazonaws.com:5439/xyz',
properties: [
['user', 'XXXXX'],
['password', 'XXXXXX']
]
};
console.log(config);
var hsqldb=new jdbc(config);
hsqldb.status();
hsqldb.initialize(config, function(err, res) {
console.log("Connection opened successfully!");
if (err) {
console.log("Error during Initializing");
console.log(err);
}
});
exports.list = function(req, res){
hsqldb.open(function(err, conn) {
if (conn) {
// SELECT statements are called with executeQuery
hsqldb.executeQuery("select * from schema.table", function(err, results) {
if (err) {
console.log(err);
} else if (results) {
console.log(results);
res.type('text/plain'); // set content-type
res.send(results);
}
hsqldb.close(function(err) {
if(err) {
console.log(err);
} else {
console.log("Connection closed successfully!");
}
});
}
);
}
});
// res.send("respond with a resource");
};
Error:
C:\Users\ABCD\node_modules\jdbc\lib\pool.js:97
return callback(null);
^
TypeError: object is not a function
at C:\Users\ABCD\node_modules\jdbc\lib\pool.js:97:12
at C:\Users\ABCD\node_modules\jdbc\node_modules\async\lib\async.js:52:16
at C:\Users\ABCD\node_modules\jdbc\node_modules\async\lib\async.js:363:13
at C:\Users\ABCD\node_modules\jdbc\node_modules\async\lib\async.js:52:16
at done (C:\Users\ABCD\node_modules\jdbc\node_modules\async\lib\async.js:243:17)
at C:\Users\ABCD\node_modules\jdbc\node_modules\async\lib\async.js:44:16
at C:\Users\ABCD\node_modules\jdbc\node_modules\async\lib\async.js:360:17
at C:\Users\ABCD\node_modules\jdbc\lib\pool.js:90:7
at C:\Users\ABCD\node_modules\jdbc\lib\pool.js:28:14
at C:\Users\ABCD\node_modules\jdbc\lib\drivermanager.js:26:18
Similar Post: Redshift data access from node jdbc
Could Some one please let me know what went wrong here.Is it some thing wrong with the code? or is it with the modules not installed correctly. I feel it is due to modules not installed correctly since I saw few errors while installing the jdbc module.
Please let me know the steps to be taken to install jdbc module in node js.
The node-jdbc API has been completely reworked since version 0.1.1 was released, so any old documentation relating to older versions of node-jdbc, including my answer to the question linked above, is no longer helpful.
Using node-jdbc 0.1.1, the code to connect to your database should look something like the following:
var jdbc = require('jdbc');
var jinst = require('jdbc/lib/jinst');
if (!jinst.isJvmCreated()) {
jinst.addOption("-Xrs");
jinst.setupClasspath(['../RedshiftJDBC41-1.1.6.1006.jar']);
}
var config = {
drivername: 'com.amazon.redshift.jdbc41.Driver',
url: 'jdbc:redshift://exampleCluster.abcdkcmf0ug1.us-west-2.redshift.amazonaws.com:5439/xyz',
properties: {
"user": "XXXXX",
"password": "XXXXX"
}
};
var hsqldb=new JDBC(config);
hsqldb.status();
hsqldb.initialize(function(err, res) {
/* ... */
});
In particular:
The classpath is now configured in a jinst object and has moved out of the configuration.
properties is now an object, not an array of 2-element arrays as it was in version 0.0.15.
The initialize method now takes only one parameter, the callback. It no longer takes the configuration as a parameter as that was provided in the constructor.
node-jdbc supposedly supports specifying the user and password properties in the config object, which avoids using properties for this. However, I would recommend avoiding this as it does not work. If you try it, you may well get errors about the username and password not being valid, even if you've typed them in correctly.
The reason you are getting the somewhat unintelligible error is because you were passing the configuration object to the initialize method, which was expecting only a callback. The error doesn't get noticed until Node tries to 'call' the configuration object as if it were a function, which of course fails.
I'm saving and loading binary data to a mongoDB database using Grid. I'm using nodejs. Following all the examples I've been able to found (which are pretty similar) my code is:
router.get('/getlog',function(req,res) {
if (req.isAuthenticated())
{
var mongo = require('mongodb');
var Grid = require('gridfs-stream');
var db = new mongo.Db('logApp', new mongo.Server("127.0.0.1", 27017));
db.open(function (err) {
if (err) {
return handleError(err);
}
var gfs = Grid(db, mongo);
var readStream = gfs.createReadStream({
_id: req.query.id
}).pipe(res);
});
}
else
res.redirect('/home');
});
req.query.id is the id of the file I need. The response I get is:
MongoError: file with id 557aa98e6f1373cb11e8f294 not opened for writing
which makes no sense because I'm not writing, I'm reading the file.
The file did not exist. I checked using:
gfs.exist({_id: req.query.id+''}, function (err, found) {
if (err) return handleError(err);
found ? console.log('File exists') : console.log('File does not exist');
res.send(found)
});
I was using the wrong id.
If someone is using Mongoose, make sure you wrap the string id with:
mongoose.Types.ObjectId(req.articleId)
Otherwise MongoDB will not recognize the id and will throw an error.
I'm currently building web using Sails.js and got stuck in retrieving image file from GridFS. I have successfully uploading the file using skipper-gridfs to my mongo gridfs. I have no idea to display the file in the correct way (I'm new in Sails.js and Node system)
Here is my code for retrieving image file from gridfs looks like in FileController.js (I'm using gridfs-stream):
show: function (req, res, next) {
var mongo = require('mongodb');
var Grid = require('gridfs-stream');
var buffer="";
// create or use an existing mongodb-native db instance
var db = new mongo.Db('testDb', new mongo.Server("192.168.0.2", 27017), {safe:true});
var gfs = Grid(db, mongo);
// streaming from gridfs
var readstream = gfs.createReadStream({
filename: 'e1ecfb02-e095-4e2f.png'
});
//check if file exist
gfs.exist({
filename: 'e1ecfb02-e095-4e2f.png'
}, function (err, found) {
if (err) return handleError(err);
found ? console.log('File exists') : console.log('File does not exist');
});
//buffer data
readstream.on("data", function (chunk) {
buffer += chunk;
console.log("adsf", chunk);
});
// dump contents to console when complete
readstream.on("end", function () {
console.log("contents of file:\n\n", buffer);
});
}
When I ran it, the console showed nothing.
There is no error either.
How should I fix this?
Additional Question:
Is it better & easier to store/read file to/from local disk instead of using gridfs?
Am I correct in choosing gridfs-stream to retrieve the file form gridfs?
In the skipper-gridfs codes and there's a 'read' method that accept fd value and returns the required file corresponding to that value. So, you just have to pull that file from mongo by that method and send as a response. It should work file.
download: function (req, res) {
var blobAdapter = require('skipper-gridfs')({
uri: 'mongodb://localhost:27017/mydbname.images'
});
var fd = req.param('fd'); // value of fd comes here from get request
blobAdapter.read(fd, function(error , file) {
if(error) {
res.json(error);
} else {
res.contentType('image/png');
res.send(new Buffer(file));
}
});
}
I hope it helps :)
Additional Questions:
Yes, using gridfs is better both in performance and efficiency. And normally mongodb has a limitation of 16MB probably for binary files, but using gridfs you can store any size file, it breaks them in chunks and stores them.
Retrieving has been shown above.
You can now use skipper-gridfs in sails to manage uploads/downloads.
var blobAdapter = require('skipper-gridfs')({uri: 'mongodb://jimmy#j1mtr0n1xx#mongo.jimmy.com:27017/coolapp.avatar_uploads' });
Upload:
req.file('avatar')
.upload(blobAdapter().receive(), function whenDone(err, uploadedFiles) {
if (err) return res.negotiate(err);
else return res.ok({
files: uploadedFiles,
textParams: req.params.all()
});
});
Download
blobAdapter.read(filename, callback);
Bear in mind the file name will change once you upload it to mongo, you have to use the file name returned in the first response.