Nodejs multiple file upload - node.js

So, I have this:
async function uploadImageToFtp(fileName, path) {
const client = new ftp.Client()
client.ftp.verbose = true
try {
await client.access({
host: process.env.FTP_HOST,
user: process.env.FTP_USER,
password: '123',
secure: false
})
await client.uploadFrom(path, "tables/" + fileName)
} catch (err) {
console.log(err)
}
client.close()
}
fs.readdir('plates', function(err, files) {
//handling error
if (err) {
return console.log('Unable to scan directory: ' + err);
}
//listing all files using forEach
files.forEach(function(file) {
uploadImageToFtp(file, 'plates/' + file);
console.log(file);
});
});
But I get "too many FTP connections...".
So, how to wait for 1 file to upload and then to continue with seconds etc...?
Thank you!

Use for-loop instead of forEach, and use async/await completely for your example:
fs.readdir('plates', async function (err, files) { // async function, carefully this line, `readdir` still is a callback function
//handling error
if (err) {
return console.log('Unable to scan directory: ' + err);
}
//listing all files using forEach
for (const file of files) {
await uploadImageToFtp(file, 'plates/' + file); // wait until it done
console.log(file);
}
});

Related

Read from Dropbox stream into Buffer

I need to download a file from Dropbox into buffer on my server. Due to a security issues I can't download a file directly to a client. Therefore I send request to my server, then fetch the file from Dropbox and then forward it to the client. I managed to implement this writing Dropbox stream to a file on my server and then sending it to a client.
I need to implement this mechanism without writing Dropbox stream into file on my server. I need to create a buffer and write into it and then forward the buffer to a client.
export const downloadFileFromDropbox = async function downloadFileFromDropbox(fileName,
folderNameOnDropbox) {
let isSucceeded;
const message = [];
let downloadResult;
let fileBuffered = "";
// authentication
const dropbox = dropboxV2Api.authenticate({
token: process.env.DEV_DROPBOX_SECRET_KEY
});
// configuring parameters
const params = Object.freeze({
resource: "files/download",
parameters: {
path: `/${folderNameOnDropbox}/${fileName}`
}
});
let dropboxPromise = new Promise(function (resolve, reject) {
dropbox(params, function (err, result) {
if (err) {
reject(err);
} else {
resolve(result);
}
}).pipe(fs.createWriteStream(/* need to implement buffer here */));
});
await dropboxPromise.then(async function (resultObj) {
isSucceeded = true;
message.push("fileDownload_OK");
}).catch(async function (err) {
isSucceeded = false;
message.push(err.message);
});
downloadResult = {
isSucceeded,
message,
/* Buffer */
};
return downloadResult;
};
There is no way to convert a file into buffer without writing it in some location
What worked for me is:
write the file in a temporary folder
Read it and convert into buffer
send the buffer back to the client
delete the file from the temporary location
here's my code :
dropbox = dropboxV2Api.authenticate({ token: credentials.access_token });
dropbox(
{
resource: 'files/download',
parameters: {
path: `${req.query.folder}` + `/${req.query.filename}`,
},
},
(err, result, response) => {
//download completed
if (err) {
return res.status(500).send(err);
} else {
console.log('download completed');
}
}
).pipe(
fs
.createWriteStream(`./app/tmp/` + `${req.query.filename}`)
.on('finish', () => {
fs.readFile(
`./app/tmp/` + `${req.query.filename}`,
function (err, buffer) {
if (err) {
res.status(500).send(err);
} else {
fs.unlink(`./app/tmp/` + `${req.query.filename}`, (err) => {
if (err) {
console.error(err);
return;
}
//file removed
res.status(200).send(buffer);
});
}
}
);
})
);
I hope this will help you even though it's a little bit late

Instructions not executed into the right order with nodejs

I am new to nodejs and trying to cat multiple css files on-the-fly while coding. The package chokidar allow me to call a function when a file is modified, however I have a problem with the execution.
var goconcat =
fs.readdir(paths, function (err, files) {
if (err) {console.log(err);}
fs.unlink(paths + 'concat.css', function (err) {
if (err) throw err;
var list = files.map(function (files) {
return path.join(paths, files);
});
concat(list, paths + 'concat.css', function(err) {
if (err) throw err
});
});
});
I want to first delete the previous file, then read the directory and then write a new "concat.css". However I have an error;
Error: ENOENT: no such file or directory, open 'public/css/concat.css'
at error (native)
It appears that the function concat() is executed before the directory update and not after, and therefore it is trying to cat a file that just have been deleted. Why ?
I know that nodejs is executing functions in a synchronous way but I can't find a way to solve this problem. I tried async but I can't declare a variable between two functions and I couldn't manage to make it work.
If it cannot exist in a callback, using the setTimeout(fn, 0) trick may help make sure it's executed after the variable assignment.
var goconcat =
fs.readdir(paths, function (err, files) {
if (err) {console.log(err);}
fs.unlink(paths + 'concat.css', function (err) {
if (err) throw err;
var list = files.map(function (files) {
return path.join(paths, files);
});
setTimeout(function() {
concat(list, paths + 'concat.css', function(err) {
if (err) throw err
})}, 0);
});
});
The problem you're having is that your concat function is being invoked before the file is deleted by invoking unlink. You can prevent this by having nested callbacks; however, you can probably have better control flow if you use a module like async, and prevent yourself from dealing with Callback Hell.
Below is an example on how you can use the async module.
var fs = require('fs');
var async = require('async');
var myDir = __dirname + '/data';
async.waterfall([function(callback) {
fs.readdir(myDir, 'utf-8', function(error, files) {
if (error) {
return callback(error);
}
return callback(null, files);
});
}, function(files, callback) {
fs.open(myDir + '/myFile', 'wx', function(error, f) {
if (error && error.code === 'EEXIST') {
return callback(null, 'EEXIST');
}
return callback(null, 'CREATE');
});
}, function(fileStatus, callback) {
if (fileStatus === 'EEXIST') {
console.log('File exists. Deleting file...');
fs.unlink(myDir + '/myFile', function(error) {
if (error) {
return callback(error);
} else {
return callback(null);
}
});
} else {
console.log('File does not exist...');
return callback(null);
}
}, function(callback) {
fs.writeFile(myDir + '/myFile', "Hello World", function(err) {
if(err) {
return callback(error);
}
return callback(null, 'File Created');
});
}], function(error, results) {
console.error(error);
console.log(results);
});
The waterfall function runs the tasks array of functions in series,
each passing their results to the next in the array. However, if any
of the tasks pass an error to their own callback, the next function is
not executed, and the main callback is immediately called with the
error.

TypeError: res.json is not a function when using require('fs');

Trying to pass contents for files I am reading via res.json. I think I am over writing my res function, but I dont see a fix.
app.get('/uploads/', (res, req) => {
dirname = './client/uploads'
fs.readdir(dirname, function(err, filenames) {
console.log(filenames)
if (err) {
console.log(err);
return;
}
filenames.forEach(function(filename) {
if (filename != '.DS_Store'){
fs.readFile(dirname + "/" + filename, 'utf-8', function(err, content) {
res.json({content: content})
if (err) {
//onError(err);
console.log(err)
return;
}
});
}
});
});
})
You mis-matched the arguments of /uploads route handler, req is the first argument
app.get('/uploads/', (req, res) => {
//...
})

Lambda Function Error : EROFS: read-only file system, open './tmp/test.zip' Process exited before completing request

I have download a zip file from s3 bucket then extracting the zip file
and finally upload one file to s3 bucket in Lambda function using
Node JS.But am getting the error
==> Error: EROFS: read-only file system, open './tmp/test.zip'
"Process exited before completing> request"
exports.handler = function (callback) {
downloadZipFile(params, downloadPath, function (err) {
if (err) {
callback(err);
} else {
processZipFile(downloadPath, function (err) {
if (err) {
callback(err);
} else {
callback(null);
}
});
}
});
};
function downloadZipFile(params, downloadPath, callback) {
const file = fs.createWriteStream(downloadPath);
s3.getObject(params)
.on('httpData', function (chunk) {
file.write(chunk);
})
.on('success', function () {
callback(null);
})
.on('error', function (err) {
callback(err);
})
.on('complete', function () {
file.end();
})
.send();
}
function processZipFile(filePath) {
const stats = fs.statSync(filePath)
const fileSizeInBytes = stats.size
if (fileSizeInBytes > 0) {
var srcPath = filePath;
var destPath = "./tmp";
targz.decompress({
src: srcPath,
dest: destPath
}, function (err) {
if (err) {
console.log(err);
} else {
console.log("Done!");
UploadFile();
}
});
}
}
function UploadFile() {
var body = fs.createReadStream('./tmp/SampleFile.txt')
var srcfileKey = "SampleFile.txt";
// Upload the stream
var s3obj = new AWS.S3({ params: { Bucket: bucketName, Key: srcfileKey } });
s3obj.upload({ Body: body }, function (err, data) {
if (err) {
console.log("An error occurred", err);
}
console.log("Uploaded the file at", data.Location);
})
}
You need to change the file path to just /tmp instead of ./tmp. Lambda only allows you to write to the /tmp directory.

How to upload files to s3 synchronously using node.js api

I have the following piece of code:
array.forEach(function (item) {
// *** some processing on each item ***
var params = {Key: item.id, Body: item.body};
s3bucket.upload(params, function(err, data) {
if (err) {
console.log("Error uploading data. ", err);
} else {
console.log("Success uploading data");
}});
});
Because s3bucket.upload is being executed asynchronously - the loop finishes before uploading all the items.
How can I force s3bucket.upload to be synchronous?
Meaning don't jump to next iteration until this item was uploaded (or failed) to S3.
Thanks
you can use https://github.com/caolan/async#each each or eachSeries
function upload(array, next) {
async.eachSeries(array, function(item, cb) {
var params = {Key: item.id, Body: item.body};
s3bucket.upload(params, function(err, data) {
if (err) {
console.log("Error uploading data. ", err);
cb(err)
} else {
console.log("Success uploading data");
cb()
}
})
}, function(err) {
if (err) console.log('one of the uploads failed')
else console.log('all files uploaded')
next(err)
})
}
Better to use promises as suggested in one of the comments:
const uploadToS3 = async (items) => {
for (const item of array) {
const params = { Key: item.id, Body: item.body };
try {
const data = await s3bucket.upload(params).promise();
console.log("Success uploading data");
} catch (err) {
console.log("Error uploading data. ", err);
}
}
}
You could pass a post back function, this way the rest of the code is executed only when the upload has been completed. This does not answer your question but could be an alternative option:
array.forEach(function (item) {
// *** some processing on each item ***
var params = {Key: item.id, Body: item.body};
var f1=function(){
// stuff to do when upload is ok!
}
var f2=function(){
// stuff to do when upload fails
}
s3bucket.upload(params, function(err, data) {
if (err) {
f2();
console.log("Error uploading data. ", err);
// run my function
} else {
// run my function
f1();
console.log("Success uploading data");
}});
});

Resources