I have this code using socket.io on a Node server:
io.sockets.on(
'connection'
,function (socket) {
reader = require('fs');
fileContents = reader.readFile(__dirname + '/textCharacters.txt'
,'utf8'
,function(data, err) {
if (err) throw err;
console.log(data);
}
);
socket.emit('retrievedFileContent', {content:fileContents} );
}
);
When I check the Node server debug, the error shows the contents of the file, so I know the file is being read, but why isn't it being returned to the fileContents variable?
Because the readFile(filename, encoding, callback) function doesn't return the file contents, it passes them as the second argument to the given callback function. Try modifying your code as such:
var fs = require('fs');
io.sockets.on('connection', function (socket) {
var filename = __dirname + '/textCharacters.txt';
fs.readFile(filename, 'utf8', function(err, data) {
if (err) throw err;
socket.emit('retrievedFileContent', {content:data});
});
});
Related
I am a beginner to the nodejs. When I type the below, the code error occurs like this:
TypeError [ERR_INVALID_CALLBACK]: Callback must be a function
var fs = require('fs');
fs.readFile('readMe.txt', 'utf8', function (err, data) {
fs.writeFile('writeMe.txt', data);
});
Fs.writeFile() according to the documentation here takes (
file, data[, options]and callback ) params so your code will be like this :
var fs = require('fs');
fs.readFile('readMe.txt', 'utf8', function (err, data) {
fs.writeFile('writeMe.txt', data, function(err, result) {
if(err) console.log('error', err);
});
});
fs.writeFile(...) requires a third (or fourth) parameter which is a callback function to be invoked when the operation completes. You should either provide a callback function or use fs.writeFileSync(...)
See node fs docs for more info.
Since node 10, it is mandatory to pass a callback on fs.writefile()
Node.js documented the purpose for the change: https://github.com/nodejs/node/blob/master/doc/api/deprecations.md#dep0013-fs-asynchronous-function-without-callback
You could add an empty callback like this fs.writeFile('writeMe.txt', data, () => {})
you also use like this
var file2 = fs.readFileSync("./Public/n2.jpeg")
You simply could use the sync function
var fs = require('fs');
fs.readFileSync('readMe.txt', 'utf8', function (err, data) {
fs.writeFileSync('writeMe.txt', data);
});
or use callback function
you can import the fs module from the fs/promises as they are promise-fied version of the modules so we don't need to use the callback function unnecessarily.
import fs from 'fs/promises';
fs.readFileSync('readMe.txt', 'utf8', function (err, data) {
fs.writeFileSync('writeMe.txt', data);`});`
var fs = require('fs');
fs.readFile('readme.txt', 'utf8', function(err, data) {
fs.writeFile('writemeee.txt', data, function(err, result) {
if (err) console.log('error', err);
});
});
try this .I have written the code using Promises.
const {readFile} = require('fs');
const {writeFileSync} = require('fs');
const readText = (path)=>{
return new Promise((resolve,reject) => {
readFile(path,'utf8',(err,result)=>{
if(err)
reject(err);
else
resolve(result);
})
})
}
readText('./contents/sample.txt')
.then(val=>writeFileSync('./results.txt',val))
.catch(err=>console.log(err));
This error hit me in the face when I was doing the following;
var hello = myfunction( callme() );
rather than
var hello = myfunction( callme );
I'm trying MongoDB document insertion from a CSV stream using async.queue.
But I face this following error. I've tried all the remedies given in similar SO posts.
Exact error message is:
C:\Users\admin\node_modules\mongodb\lib\mongo_client.js:406
throw err
TypeError:object is not a function
at C:\Users\admin\Desktop\mynodefile.js:13:2
at C:\Users\admin\node_modules\mongodb\lib\mongo_client.js:403:11
at process._tickCallback(node.js:355:11)
node.js code I used:
var csv = require('csv');
var async = require('async');
var fs = require('fs');
var MongoClient = require('mongodb').MongoClient;
MongoClient.connect('mongodb://localhost:27017', function(err, db) {
if (err) throw err;
var collection = db.collection('myCSVs');
var queue = async.queue(collection.insert.bind(collection), 5);
csv()
.from.path('./input.csv', { columns: true })
.transform(function (data, index, cb) {
queue.push(data, function (err, res) {
if (err) return cb(err);
cb(null, res[0]);
});
})
.on('error', function (err) {
console.log('ERROR: ' + err.message);
})
.on('end', function () {
queue.drain = function() {
collection.count(function(err, count) {
console.log('Number of documents:', count);
db.close();
});
};
});
});
You haven't mentioned the database name in your MongoClient.connect function call. You can do so like this:
MongoClient.connect('mongodb://localhost:27017/database_name',function(err, db) {
Then you can do:
var collection = db.collection('myCSVs');
If myCSVs is a collection inside database_name
Or you can also do:
MongoClient.connect('mongodb://localhost:27017',function(err, mongoclient) {
var db = mongoclient.db('database_name');
var collection = db.collection('myCSVs');
});
You have to change
var queue = async.queue(collection.insert.bind(collection), 5);
Into:
var q = async.queue(function (task, callback) {
console.log('hello ' + task.name);
callback();
}, 2);
IN this line :
queue.push(data, function (err, res) {
if (err) return cb(err);
cb(null, res[0]);
});
you are calling push with data and with a callback, but its not implemented in your
var queue = async.queue(collection.insert.bind(collection), 5);
I'm trying to use NodeJS to read a txt file by using fs. This is the code of app.js:
var fs = require('fs');
function read(file) {
return fs.readFile(file, 'utf8', function(err, data) {
if (err) {
console.log(err);
}
return data;
});
}
var output = read('file.txt');
console.log(output);
When i do:
node app.js
It says
undefined
I have fs installed and there is a file.txt in the same directory, why is it not working?
Your read function is returning the result of the fs.readFile function, which is undefined because it doesn't have a return clause (it uses callbacks). Your second return clause is in an anonymous function, so it only returns to that scope. Anyhow, your function knows its finished after that first return.
The standard way to use fs.readFile is to use callbacks.
var fs = require('fs');
function read(file, callback) {
fs.readFile(file, 'utf8', function(err, data) {
if (err) {
console.log(err);
}
callback(data);
});
}
var output = read('file.txt', function(data) {
console.log(data);
});
I'm trying to move uploaded file from /tmp to home directory using NodeJS/ExpressJS:
fs.rename('/tmp/xxxxx', '/home/user/xxxxx', function(err){
if (err) res.json(err);
console.log('done renaming');
});
But it didn't work and no error encountered. But when new path is also in /tmp, that will work.
Im using Ubuntu, home is in different partition. Any fix?
Thanks
Yes, fs.rename does not move file between two different disks/partitions. This is the correct behaviour. fs.rename provides identical functionality to rename(2) in linux.
Read the related issue posted here.
To get what you want, you would have to do something like this:
var source = fs.createReadStream('/path/to/source');
var dest = fs.createWriteStream('/path/to/dest');
source.pipe(dest);
source.on('end', function() { /* copied */ });
source.on('error', function(err) { /* error */ });
Another way is to use fs.writeFile. fs.unlink in callback will remove the temp file from tmp directory.
var oldPath = req.files.file.path;
var newPath = ...;
fs.readFile(oldPath , function(err, data) {
fs.writeFile(newPath, data, function(err) {
fs.unlink(oldPath, function(){
if(err) throw err;
res.send("File uploaded to: " + newPath);
});
});
});
Updated ES6 solution ready to use with promises and async/await:
function moveFile(from, to) {
const source = fs.createReadStream(from);
const dest = fs.createWriteStream(to);
return new Promise((resolve, reject) => {
source.on('end', resolve);
source.on('error', reject);
source.pipe(dest);
});
}
This example taken from: Node.js in Action
A move() function that renames, if possible, or falls back to copying
var fs = require('fs');
module.exports = function move (oldPath, newPath, callback) {
fs.rename(oldPath, newPath, function (err) {
if (err) {
if (err.code === 'EXDEV') {
copy();
} else {
callback(err);
}
return;
}
callback();
});
function copy () {
var readStream = fs.createReadStream(oldPath);
var writeStream = fs.createWriteStream(newPath);
readStream.on('error', callback);
writeStream.on('error', callback);
readStream.on('close', function () {
fs.unlink(oldPath, callback);
});
readStream.pipe(writeStream);
}
}
I'd like to store a Stream in a GridFS. However it doesn't work as expected (i.e. no console log).
var mongo = require("mongodb");
var db = ...
db.open(function (err, db) {
var stream = ...
var name = 'foobar';
var gs = new mongo.GridStore(db, new mongo.ObjectID, name, "w");
gs.open(function (err, gs) {
stream.on("data", function (data) {
gs.write(data, function (err, gs) {
if (err) {
// ...
}
});
});
stream.on("end", function () {
gs.close(function (err, gs) {
if (!err) {
console.log(name + " has been stored to database.");
}
});
});
});
});
Does anyone know why it doesn't work?
I think you forgot to pipe the stream into the gridfs file. You can do it directly since a gridfs file behaves like a stream (see the last line within the db.open callback). Also you forgot the parenthesis on constructor invocation of ObjectID and you don't check for any errors when opening either db or file. My code looks structurally equal and it does work. Hope that helps...
db.open(function (err, db) {
var stream = ...
var name = 'foobar';
var gs = new mongo.GridStore(db, new mongo.ObjectID(), name, "w");
gs.open(function (err, gs) {
if(err) console.log(...)
else{
stream.on("data", function (data) {
gs.write(data, function (err, gs) {
if (err) {
// ...
}
});
});
stream.on("end", function () {
gs.close(function (err, gs) {
if (!err) {
console.log(name + " has been stored to database.");
}
});
});
stream.pipe(gs);
}
});
});