I'd like to store a Stream in a GridFS. However it doesn't work as expected (i.e. no console log).
var mongo = require("mongodb");
var db = ...
db.open(function (err, db) {
var stream = ...
var name = 'foobar';
var gs = new mongo.GridStore(db, new mongo.ObjectID, name, "w");
gs.open(function (err, gs) {
stream.on("data", function (data) {
gs.write(data, function (err, gs) {
if (err) {
// ...
}
});
});
stream.on("end", function () {
gs.close(function (err, gs) {
if (!err) {
console.log(name + " has been stored to database.");
}
});
});
});
});
Does anyone know why it doesn't work?
I think you forgot to pipe the stream into the gridfs file. You can do it directly since a gridfs file behaves like a stream (see the last line within the db.open callback). Also you forgot the parenthesis on constructor invocation of ObjectID and you don't check for any errors when opening either db or file. My code looks structurally equal and it does work. Hope that helps...
db.open(function (err, db) {
var stream = ...
var name = 'foobar';
var gs = new mongo.GridStore(db, new mongo.ObjectID(), name, "w");
gs.open(function (err, gs) {
if(err) console.log(...)
else{
stream.on("data", function (data) {
gs.write(data, function (err, gs) {
if (err) {
// ...
}
});
});
stream.on("end", function () {
gs.close(function (err, gs) {
if (!err) {
console.log(name + " has been stored to database.");
}
});
});
stream.pipe(gs);
}
});
});
Related
stackoverflow!
I am fairly new to node.js, and this seems to be node-specific. I keep defining a variable and I get errors that fileName is undefined. I have no idea why this is happening because, from my perspective, I'm just assigning a global variable inside a function, and that should work in all other programming languages I've worked in. Does the argument function in fs.readFile() somehow differ from a normal function? I honestly have no idea. Anyways, this is my full code:
var fs = require('fs');
var dateObject = new Date();
var fileName;
function Start() {
fs.readFile('./counter.txt', function (err, data) {
if (err) throw err;
var current = parseInt(data);
current++;
fs.writeFile('./counter.txt', current.toString(), function(err) {
if (err) throw err;
console.log("Written!");
});
var fullDate = `${dateObject.getDate()}-${dateObject.getMonth() + 1}-${dateObject.getFullYear()}`;
fileName = `./logs/${fullDate} ${current}.txt`;
console.log(fileName);
fs.appendFile(fileName, "Logger Initiated!", function(err){
if (err) throw err;
})
});
}
function PAL (text) {
if (fileName === undefined) {
console.log("...");
return "500";
}
console.log(fileName);
fs.appendFile(fileName, text, function(err) {
if (err) throw err;
})
}
module.exports = {Start, PAL};
Another file:
const logger = require('./logger')
// ....
app.listen(port, () => {
logger.Start();
logger.PAL("test");
})
You got an asynchronous trap in code, fs.readFile is an async function, so when you run .PAL, I think you expected .Start function done, but it's not true. Start function:
async function Start() {
return new Promise((resolve, reject) => {
fs.readFile('./counter.txt', function (err, data) {
if (err) throw reject(err);
var current = parseInt(data);
current++;
fs.writeFile('./counter.txt', current.toString(), function(err) {
if (err) reject(err);
console.log("Written!");
var fullDate = `${dateObject.getDate()}-${dateObject.getMonth() + 1}-${dateObject.getFullYear()}`;
fileName = `./logs/${fullDate} ${current}.txt`;
console.log("FILENAME: ", fileName);
fs.appendFile(fileName, "Logger Initiated!", function(err){
if (err) reject(err);
resolve();
})
});
});
})
}
Call: logger.Start().then(() => logger.PAL("test"));
I am new to async library. I have used async.eachSeries and async.waterfall for each iteration. I see, the async.waterfall runs only once.
Here is my code :
var fs = require('fs'),
async = require('async'),
Client = require('node-rest-client').Client;
// REST API Call and output in jsonOutput.results
console.log(jsonOutput.results.length); // jsonOutput.results has 124 records.
async.eachSeries(jsonOutput.results, function(account, callback) {
var dataObject = {};
dataObject.updatetime = new Date();
var setAccountInfoURL = ""; // Data Update REST API Request
async.waterfall([
function setAccountInfo(updateCallback) {
// client.get(setAccountInfoURL, function (data, response) {
// var jsonOutput = JSON.parse(data.toString('utf8'));
updateCallback(null, "output", account)
// });
},
function saveAccountInfo(jsonOutput, account, updateCallback) {
var debuglog = JSON.stringify(account) + "\n" + jsonOutput;
fs.appendFile("debuginfo.json", debuglog + "\n", function (err) {
if(err) {
console.log(err);
}
console.log("JSON saved to " + "debuginfo.json");
updateCallback(null);
});
}
],function asyncComplete(err) {
if (err) {
console.warn('Error setting account info.', err);
}
console.log('async completed');
});
}, function(err){
if (err) {
console.log('error in loop');
}
console.log('loop completed');
});
Output:
124
JSON saved to debuginfo.json
async completed
Any help is really appreciated.
I found my mistake. I missed calling the callback after each iteration just after async is completed.
var fs = require('fs'),
async = require('async'),
Client = require('node-rest-client').Client;
// REST API Call and output in jsonOutput.results
console.log(jsonOutput.results.length); // jsonOutput.results has 124 records.
async.eachSeries(jsonOutput.results, function(account, callback) {
var dataObject = {};
dataObject.updatetime = new Date();
var setAccountInfoURL = ""; // Data Update REST API Request
async.waterfall([
function setAccountInfo(updateCallback) {
// client.get(setAccountInfoURL, function (data, response) {
// var jsonOutput = JSON.parse(data.toString('utf8'));
updateCallback(null, "output", account)
// });
},
function saveAccountInfo(jsonOutput, account, updateCallback) {
var debuglog = JSON.stringify(account) + "\n" + jsonOutput;
fs.appendFile("debuginfo.json", debuglog + "\n", function (err) {
if(err) {
console.log(err);
}
console.log("JSON saved to " + "debuginfo.json");
updateCallback(null);
});
}
],function asyncComplete(err) {
if (err) {
console.warn('Error setting account info.', err);
}
console.log('async completed');
callback(null); // this is the change.
});
}, function(err){
if (err) {
console.log('error in loop');
}
console.log('loop completed');
});
I'm trying MongoDB document insertion from a CSV stream using async.queue.
But I face this following error. I've tried all the remedies given in similar SO posts.
Exact error message is:
C:\Users\admin\node_modules\mongodb\lib\mongo_client.js:406
throw err
TypeError:object is not a function
at C:\Users\admin\Desktop\mynodefile.js:13:2
at C:\Users\admin\node_modules\mongodb\lib\mongo_client.js:403:11
at process._tickCallback(node.js:355:11)
node.js code I used:
var csv = require('csv');
var async = require('async');
var fs = require('fs');
var MongoClient = require('mongodb').MongoClient;
MongoClient.connect('mongodb://localhost:27017', function(err, db) {
if (err) throw err;
var collection = db.collection('myCSVs');
var queue = async.queue(collection.insert.bind(collection), 5);
csv()
.from.path('./input.csv', { columns: true })
.transform(function (data, index, cb) {
queue.push(data, function (err, res) {
if (err) return cb(err);
cb(null, res[0]);
});
})
.on('error', function (err) {
console.log('ERROR: ' + err.message);
})
.on('end', function () {
queue.drain = function() {
collection.count(function(err, count) {
console.log('Number of documents:', count);
db.close();
});
};
});
});
You haven't mentioned the database name in your MongoClient.connect function call. You can do so like this:
MongoClient.connect('mongodb://localhost:27017/database_name',function(err, db) {
Then you can do:
var collection = db.collection('myCSVs');
If myCSVs is a collection inside database_name
Or you can also do:
MongoClient.connect('mongodb://localhost:27017',function(err, mongoclient) {
var db = mongoclient.db('database_name');
var collection = db.collection('myCSVs');
});
You have to change
var queue = async.queue(collection.insert.bind(collection), 5);
Into:
var q = async.queue(function (task, callback) {
console.log('hello ' + task.name);
callback();
}, 2);
IN this line :
queue.push(data, function (err, res) {
if (err) return cb(err);
cb(null, res[0]);
});
you are calling push with data and with a callback, but its not implemented in your
var queue = async.queue(collection.insert.bind(collection), 5);
I'm trying to get a file from S3, and store it in NoSQL Couchbase.
I'm trying to store the outputStream, with the following code:
var outputStream1 = fs.createWriteStream("./tmp/" + url);
// if statusCode == 200, then we have the file, lets save it in out cache and then send it to the client
res.on('data', function (chunk) {
outputStream1.write(chunk);
});
res.on('end', function () {
var value = outputStream1;
cb.set(key, value, function (err, result) {
if (err) { console.log(err); }
console.log("Set item for key with CAS: " + result.cas);
cb.get(key, function (err, result) {
if (err) { console.log(err); }
console.log("Value for key is: " + result.value);
var readStream = result.value;
readStream.on('open', function () {
console.log(response);
});
readStream.on('end', function () {
readStream.close();
});
});
});
The problem is that I store the outputStream, and then trying to read it.
I search for a way to store the data object we get from S3 in Couchbase, and then have the ability to send it to the client. Is it possible?
I think you can just set a Buffer as value and get that later. First you need to save the data in a buffer:
var buffers = [];
res.on('data', function (chunk) {
buffers.push(chunk);
});
res.on('end', function () {
var value = Buffer.concat(buffers);
cb.set(key, value, function (err, result) {
if (err) { console.log(err); }
console.log("Set item for key with CAS: " + result.cas);
cb.get(key, function (err, result) {
if (err) { console.log(err); }
console.log("Value for key is: " + result.value); // You should get your value in here });
});
You can also use concat-stream module to help you. I've answered a similar question on buffering stream.
I'm trying to move uploaded file from /tmp to home directory using NodeJS/ExpressJS:
fs.rename('/tmp/xxxxx', '/home/user/xxxxx', function(err){
if (err) res.json(err);
console.log('done renaming');
});
But it didn't work and no error encountered. But when new path is also in /tmp, that will work.
Im using Ubuntu, home is in different partition. Any fix?
Thanks
Yes, fs.rename does not move file between two different disks/partitions. This is the correct behaviour. fs.rename provides identical functionality to rename(2) in linux.
Read the related issue posted here.
To get what you want, you would have to do something like this:
var source = fs.createReadStream('/path/to/source');
var dest = fs.createWriteStream('/path/to/dest');
source.pipe(dest);
source.on('end', function() { /* copied */ });
source.on('error', function(err) { /* error */ });
Another way is to use fs.writeFile. fs.unlink in callback will remove the temp file from tmp directory.
var oldPath = req.files.file.path;
var newPath = ...;
fs.readFile(oldPath , function(err, data) {
fs.writeFile(newPath, data, function(err) {
fs.unlink(oldPath, function(){
if(err) throw err;
res.send("File uploaded to: " + newPath);
});
});
});
Updated ES6 solution ready to use with promises and async/await:
function moveFile(from, to) {
const source = fs.createReadStream(from);
const dest = fs.createWriteStream(to);
return new Promise((resolve, reject) => {
source.on('end', resolve);
source.on('error', reject);
source.pipe(dest);
});
}
This example taken from: Node.js in Action
A move() function that renames, if possible, or falls back to copying
var fs = require('fs');
module.exports = function move (oldPath, newPath, callback) {
fs.rename(oldPath, newPath, function (err) {
if (err) {
if (err.code === 'EXDEV') {
copy();
} else {
callback(err);
}
return;
}
callback();
});
function copy () {
var readStream = fs.createReadStream(oldPath);
var writeStream = fs.createWriteStream(newPath);
readStream.on('error', callback);
writeStream.on('error', callback);
readStream.on('close', function () {
fs.unlink(oldPath, callback);
});
readStream.pipe(writeStream);
}
}