Move File in ExpressJS/NodeJS - node.js

I'm trying to move uploaded file from /tmp to home directory using NodeJS/ExpressJS:
fs.rename('/tmp/xxxxx', '/home/user/xxxxx', function(err){
if (err) res.json(err);
console.log('done renaming');
});
But it didn't work and no error encountered. But when new path is also in /tmp, that will work.
Im using Ubuntu, home is in different partition. Any fix?
Thanks

Yes, fs.rename does not move file between two different disks/partitions. This is the correct behaviour. fs.rename provides identical functionality to rename(2) in linux.
Read the related issue posted here.
To get what you want, you would have to do something like this:
var source = fs.createReadStream('/path/to/source');
var dest = fs.createWriteStream('/path/to/dest');
source.pipe(dest);
source.on('end', function() { /* copied */ });
source.on('error', function(err) { /* error */ });

Another way is to use fs.writeFile. fs.unlink in callback will remove the temp file from tmp directory.
var oldPath = req.files.file.path;
var newPath = ...;
fs.readFile(oldPath , function(err, data) {
fs.writeFile(newPath, data, function(err) {
fs.unlink(oldPath, function(){
if(err) throw err;
res.send("File uploaded to: " + newPath);
});
});
});

Updated ES6 solution ready to use with promises and async/await:
function moveFile(from, to) {
const source = fs.createReadStream(from);
const dest = fs.createWriteStream(to);
return new Promise((resolve, reject) => {
source.on('end', resolve);
source.on('error', reject);
source.pipe(dest);
});
}

This example taken from: Node.js in Action
A move() function that renames, if possible, or falls back to copying
var fs = require('fs');
module.exports = function move (oldPath, newPath, callback) {
fs.rename(oldPath, newPath, function (err) {
if (err) {
if (err.code === 'EXDEV') {
copy();
} else {
callback(err);
}
return;
}
callback();
});
function copy () {
var readStream = fs.createReadStream(oldPath);
var writeStream = fs.createWriteStream(newPath);
readStream.on('error', callback);
writeStream.on('error', callback);
readStream.on('close', function () {
fs.unlink(oldPath, callback);
});
readStream.pipe(writeStream);
}
}

Related

Only few files being extracted using node.js unzip modules

I've been using Unzip library in NodeJS, however, only few files are being extracted, and on each execution the files extracted are different
fs.createReadStream('test.zip').pipe(unzipper.Extract({ path: 'test' })
.on('finish', function() {
console.log("finish")
})
To fix the above issue, I had to implement the below instead of the code posted in the question
fs.createReadStream("test.zip").on("error", log).pipe(unzipper.Parse()).on("entry", function (entry) {
var isDir = entry.type === "Directory";
var fullpath = path.join(dest, entry.path);
// console.log("fullpath ", fullpath)
var directory = isDir ? fullpath : path.dirname(fullpath);
mkdirp(directory, function (err) {
if (err)
throw err;
if (!isDir) {
entry.pipe(fs.createWriteStream(fullpath));
}
})
.on("close", function (message) {
console.log("close");
resolve("/DEST")
}).on("finish", function (message) {
console.log("finish");
resolve("/DEST")
});
});

Node.js script to copy files overwriting previous ones?

I wish to create a Node.js script to copy files overwriting previous versions of the same files.
My code doesn't work:
const fs = require('fs');
var src = "srcPath/file.json";
var newPath = "newPath/file.json";
var callback = (result) => {
console.log(result);
}
copy(src, newPath, callback);
function copy(src, newPath, callback){
fs.copyFile(src, newPath, (err) => {
if (err) {
callback("Error Found:", err);
}
else {
callback("Success: "+newPath);
}
});
}
execute()
It will work, if you take this line off:
execute()

Node.js - undefined variable after assignment in fs functions

stackoverflow!
I am fairly new to node.js, and this seems to be node-specific. I keep defining a variable and I get errors that fileName is undefined. I have no idea why this is happening because, from my perspective, I'm just assigning a global variable inside a function, and that should work in all other programming languages I've worked in. Does the argument function in fs.readFile() somehow differ from a normal function? I honestly have no idea. Anyways, this is my full code:
var fs = require('fs');
var dateObject = new Date();
var fileName;
function Start() {
fs.readFile('./counter.txt', function (err, data) {
if (err) throw err;
var current = parseInt(data);
current++;
fs.writeFile('./counter.txt', current.toString(), function(err) {
if (err) throw err;
console.log("Written!");
});
var fullDate = `${dateObject.getDate()}-${dateObject.getMonth() + 1}-${dateObject.getFullYear()}`;
fileName = `./logs/${fullDate} ${current}.txt`;
console.log(fileName);
fs.appendFile(fileName, "Logger Initiated!", function(err){
if (err) throw err;
})
});
}
function PAL (text) {
if (fileName === undefined) {
console.log("...");
return "500";
}
console.log(fileName);
fs.appendFile(fileName, text, function(err) {
if (err) throw err;
})
}
module.exports = {Start, PAL};
Another file:
const logger = require('./logger')
// ....
app.listen(port, () => {
logger.Start();
logger.PAL("test");
})
You got an asynchronous trap in code, fs.readFile is an async function, so when you run .PAL, I think you expected .Start function done, but it's not true. Start function:
async function Start() {
return new Promise((resolve, reject) => {
fs.readFile('./counter.txt', function (err, data) {
if (err) throw reject(err);
var current = parseInt(data);
current++;
fs.writeFile('./counter.txt', current.toString(), function(err) {
if (err) reject(err);
console.log("Written!");
var fullDate = `${dateObject.getDate()}-${dateObject.getMonth() + 1}-${dateObject.getFullYear()}`;
fileName = `./logs/${fullDate} ${current}.txt`;
console.log("FILENAME: ", fileName);
fs.appendFile(fileName, "Logger Initiated!", function(err){
if (err) reject(err);
resolve();
})
});
});
})
}
Call: logger.Start().then(() => logger.PAL("test"));

Instructions not executed into the right order with nodejs

I am new to nodejs and trying to cat multiple css files on-the-fly while coding. The package chokidar allow me to call a function when a file is modified, however I have a problem with the execution.
var goconcat =
fs.readdir(paths, function (err, files) {
if (err) {console.log(err);}
fs.unlink(paths + 'concat.css', function (err) {
if (err) throw err;
var list = files.map(function (files) {
return path.join(paths, files);
});
concat(list, paths + 'concat.css', function(err) {
if (err) throw err
});
});
});
I want to first delete the previous file, then read the directory and then write a new "concat.css". However I have an error;
Error: ENOENT: no such file or directory, open 'public/css/concat.css'
at error (native)
It appears that the function concat() is executed before the directory update and not after, and therefore it is trying to cat a file that just have been deleted. Why ?
I know that nodejs is executing functions in a synchronous way but I can't find a way to solve this problem. I tried async but I can't declare a variable between two functions and I couldn't manage to make it work.
If it cannot exist in a callback, using the setTimeout(fn, 0) trick may help make sure it's executed after the variable assignment.
var goconcat =
fs.readdir(paths, function (err, files) {
if (err) {console.log(err);}
fs.unlink(paths + 'concat.css', function (err) {
if (err) throw err;
var list = files.map(function (files) {
return path.join(paths, files);
});
setTimeout(function() {
concat(list, paths + 'concat.css', function(err) {
if (err) throw err
})}, 0);
});
});
The problem you're having is that your concat function is being invoked before the file is deleted by invoking unlink. You can prevent this by having nested callbacks; however, you can probably have better control flow if you use a module like async, and prevent yourself from dealing with Callback Hell.
Below is an example on how you can use the async module.
var fs = require('fs');
var async = require('async');
var myDir = __dirname + '/data';
async.waterfall([function(callback) {
fs.readdir(myDir, 'utf-8', function(error, files) {
if (error) {
return callback(error);
}
return callback(null, files);
});
}, function(files, callback) {
fs.open(myDir + '/myFile', 'wx', function(error, f) {
if (error && error.code === 'EEXIST') {
return callback(null, 'EEXIST');
}
return callback(null, 'CREATE');
});
}, function(fileStatus, callback) {
if (fileStatus === 'EEXIST') {
console.log('File exists. Deleting file...');
fs.unlink(myDir + '/myFile', function(error) {
if (error) {
return callback(error);
} else {
return callback(null);
}
});
} else {
console.log('File does not exist...');
return callback(null);
}
}, function(callback) {
fs.writeFile(myDir + '/myFile', "Hello World", function(err) {
if(err) {
return callback(error);
}
return callback(null, 'File Created');
});
}], function(error, results) {
console.error(error);
console.log(results);
});
The waterfall function runs the tasks array of functions in series,
each passing their results to the next in the array. However, if any
of the tasks pass an error to their own callback, the next function is
not executed, and the main callback is immediately called with the
error.

Store Stream in GridFS using Node.js

I'd like to store a Stream in a GridFS. However it doesn't work as expected (i.e. no console log).
var mongo = require("mongodb");
var db = ...
db.open(function (err, db) {
var stream = ...
var name = 'foobar';
var gs = new mongo.GridStore(db, new mongo.ObjectID, name, "w");
gs.open(function (err, gs) {
stream.on("data", function (data) {
gs.write(data, function (err, gs) {
if (err) {
// ...
}
});
});
stream.on("end", function () {
gs.close(function (err, gs) {
if (!err) {
console.log(name + " has been stored to database.");
}
});
});
});
});
Does anyone know why it doesn't work?
I think you forgot to pipe the stream into the gridfs file. You can do it directly since a gridfs file behaves like a stream (see the last line within the db.open callback). Also you forgot the parenthesis on constructor invocation of ObjectID and you don't check for any errors when opening either db or file. My code looks structurally equal and it does work. Hope that helps...
db.open(function (err, db) {
var stream = ...
var name = 'foobar';
var gs = new mongo.GridStore(db, new mongo.ObjectID(), name, "w");
gs.open(function (err, gs) {
if(err) console.log(...)
else{
stream.on("data", function (data) {
gs.write(data, function (err, gs) {
if (err) {
// ...
}
});
});
stream.on("end", function () {
gs.close(function (err, gs) {
if (!err) {
console.log(name + " has been stored to database.");
}
});
});
stream.pipe(gs);
}
});
});

Resources