How can i upload multiple files in node.js? - node.js

I need help in upload multiple files using node.js upload file reader.
I am using the fs = require('fs').
I have problem in choose two files,only one file only write in upload directory.
This is my backend
var files = req.files.files[0];
for (var i = 0; i < files.length; i++) {
file = files[i];
fs.readFile(files[i].path, function(error, data) {
// console.log( files[i].path ) ,here displayed two same both
fs.writeFile(uploadDirectory() + newFileName, data, function(error) {
});
});
}
Please help me.
what is the problem in my code.
Thanks.

You should avoid using files[i] in asynchronous function's callback which is directly written inside of for-loop.
the reason why console.log( files[i].path ) displays same thing twice is because when the code is loaded,the for-loop has already done. so you always get the last element of the array.
the easiest way to fix that is making a new scope(function)
for (var i = 0; i < files.length; i++) {
readAndWriteFile(files[i]);
}
var readAndWriteFile = function(file){
fs.readFile(file.path, function(error, data) {
// console.log( file.path ) displays what you expect.
fs.writeFile(/* define new file name */, data, function(error) {
});
});
}

Related

Nodejs module export returns undefined

I am using the recursive-readdir module to list all files types in a directory, the code below works, however I get "undefined" being displayed before my array.
The code below is inserted into a js file "test.js" and is ran from the cmd line using "node test.js".
Any suggestings on how best to return an array of all the required files without the undefined string being returned?
const recursive = require('recursive-readdir');
exports.file = function() {
recursive(__dirname, ['!*.md'], function(error, files) {
var arr = [];
for (var i = 0; i < files.length; i++) {
arr.push(files[i]);
}
console.log(arr);
});
};
console.log( exports.file() );
As mentioned in the comments exports.file does not have a return value, which is why you get undefined when you try to log it.
If you want the caller of exports.file to have access to the files, you need to update exports.file to take a callback that can be invoked with the files as a parameter.
const recursive = require('recursive-readdir');
exports.file = function(callback) {
recursive(__dirname, ['!*.md'], function(error, files) {
var arr = [];
for (var i = 0; i < files.length; i++) {
arr.push(files[i]);
}
console.log(arr);
callback(error, arr);
});
};
exports.file(function(error, files) {
console.log(files);
});
That will log the files with minimal changes to your original code.
Depending on how you need this code to evolve, you can probably simplify this quite a bit. First, do you really need arr in addition to files? If not, you can simply pass files to the callback:
const recursive = require('recursive-readdir');
exports.file = function(callback) {
recursive(__dirname, ['!*.md'], function(error, files) {
callback(error, files);
});
};
exports.file(function(error, files) {
console.log(files);
});
And now since your callback to recursive just invokes the callback to exports.file, you could simply pass the exports.file callback to recursive
const recursive = require('recursive-readdir');
exports.file = function(callback) {
recursive(__dirname, ['!*.md'], callback);
};
exports.file(function(error, files) {
console.log(files);
});

Javascript: Write and complete an existing file

I try to write some nodejs code that completes an existing file. But each time I start the script, it over writes the content of my test file.
var fs = require('fs');
var writer = fs.createWriteStream('test.txt', {flags: 'w'});
writer.on('finish', function() {
console.log('data has been saved successfully');
});
function writeInList(id) {
console.log(id);
writer.write(id+' \n');
}
for (var id = 0; id<10; id++){
writeInList(id);
}
writer.end();
Of course I have searched for a solution:
Writing large files with Node.js
But I'm not able to make it run. Could anybody help me please?

Nodejs - Shortage of file size on nodejs archiver and fs stat

the logic of my program is as follow:
get object from aws s3
zip all the object obtained from s3 ( I create a zip file called a.zip )
put z.aip file to s3
Now I face a problem that my a.zip file always lack of a few bytes. From the following code snippet, the result of console.log(archive.pointer()) and console.log(stat.size) will be different. Does anyone have an idea ? Thank you.
async.map(filename, util.getS3Obj, function(err, result) {
var archiver = require('archiver');
var archive = archiver.create('zip', {});
archive.pipe(fs.createWriteStream('./tmp/Archive.zip'));
for(var i=0 ; i<result.length ; i++) {
archive.append(result[i], {name: fileNameArr[i]});
}
archive.finalize();
archive.on('end', function() {
console.log(archive.pointer());
fs.stat('./tmp/Archive.zip', function(err, stat) {
if(err) {}
console.log(stat.size);
});
});
})
I solve the problem by checking the file size in the close event of the createWriteStream without using end event of the archive. Because in the end event of archive, the writeFileStream is not finish.
var out = fs.createWriteStream('Archive.zip');
out.on('close', function() {
// check file size here ...
});

How node.js implement async-callback with single process

I don't know how node implement its amazing idea. And i have a question when use it.
I have to read four files file1.js file2.js file3.js file4.js and concat them into one big javascript file result.js. It's important to keep their order.
So it's normal for me to use readFileSync instead of readFile.
I know it's a bad solution. Anyone has a good idea to do that?
Q: Is it possible for node.js to read four files at the same time?
Hope someone can explain the principle of node.js and when process.nextTick will be fired.
A: yes it is possible for node to read 4 files at the same time.
My answer would be, it depends on your situation, for reading the files synchronously or asynchronously. If it's configuration data, or the files can be cached, I would suggest just doing it synchronously, it's easy, and it's only done once. So you won't be waiting around very much. Long operations on initialization are typical, and can make things in the long run more efficient. That being said, reading four files in order, asynchronously, so that your program can do other things in the background isn't that hard. I will work on sync and async examples of each and add an edit.
/* jshint node:true*/
var fs = require('fs');
function readFilesSync(fileNames) {
'use strict';
var results = '';
for (var i = 0; i < fileNames.length; i++) {
results += fs.readFileSync(fileNames[i]);
}
return results;
}
function readFiles(fileNames, callback) {
'use strict';
var results = '';
function readFile(index) {
if (index < fileNames.length) {
fs.readFile(fileNames[index], function (err, data) {
results += data;
readFile(index + 1);
});
} else {
callback(results);
}
}
readFile(0);
}
function readAllFilesAtOnce(fileNames, callback) {
'use strict';
var results = {};
var numFiles = fileNames.length;
function callBackWrapper() {
var resultsOrdered = '';
for (var i = 0; i < fileNames.length; i++) {
resultsOrdered += results[fileNames[i]];
}
callback(resultsOrdered);
}
function readFileAsync(fileName) {
fs.readFile(fileName, function (err, data) {
results[fileName] = data;
numFiles--;
if (numFiles === 0) {
callBackWrapper();
}
});
}
for (var i = 0; i < fileNames.length; i++) {
readFileAsync(fileNames[i]);
}
}
function doSomethingWithTheData(data) {
'use strict';
console.log('Results async: ' + data);
}
function doSomethingWithTheData2(data) {
'use strict';
console.log('Results async all at once: ' + data);
}
var fileNamesArray = ['blah.js', 'file.js', 'hello.txt'];
console.log('The results sync: ' + readFilesSync(fileNamesArray));
readFiles(fileNamesArray, doSomethingWithTheData);
readAllFilesAtOnce(fileNamesArray, doSomethingWithTheData2);
EDIT: There I added a method to read all of the files at once.
Process.nextTick does no more than process this function on the next time around the event loop. EX:
process.nextTick(function() {
console.log('never printed out');
});
while(true);
ex 2:
process.nextTick(function() {
console.log('printed last');
});
console.log('printed first');

Asynchronously reading and caching multiple files in nodejs

I have an array which keeps URL of several files. For example:
var files = ['1.html', '2.html', '3.html'];
I need to read them asynchronously and save them in an object named cache (cache = {}).
To do this I used the code:
for(var i = 0; i < files.length; i++){
require('fs').readFile(files[i], 'utf8', function (error,data) {
cache[files[i]]=data;
});
}
In the end I have the result:
cache = { undefined : 'File 3 content' }
I do understand that the "readFile" acts after the loop is ended and it looses it's scope. Is there a way to fix this or another method to read files from an array and cache them?
When your callback to readFile executes, the for loop will already have finished. So i will be files.length and files[i] will be undefined. To mitigate this, you need to wrap the variables in a closure. The simplest way to do this is to create a function which does your readFile call, and call that in the loop:
function read(file) {
require('fs').readFile(file, 'utf8', function (error,data) {
cache[file]=data;
});
}
for(var i = 0; i < files.length; i++){
read(files[i]);
}
For even better execution control, you might want to look into async:
function readAsync(file, callback) {
fs.readFile(file, 'utf8', callback);
}
async.map(files, readAsync, function(err, results) {
// results = ['file 1 content', 'file 2 content', ...]
});
Edit: Made use of helper function for async example.
The existing answer didn't work for me. I did find an NPM package which did the job: https://www.npmjs.com/package/read-multiple-files. After npm install read-multiple-files at the command line, here's the code I used:
var files = ['1.html', '2.html', '3.html'];
console.log("\n");
readMultipleFiles(files, 'utf8', function(err, inputFiles) {
if(err) {
console.log("Read Error: " + err);
}
fileOne = inputFiles[0];
fileTwo = inputFiles[1];
...
console.log(fileOne);
console.log(fileTwo);
});

Resources