I am uploading an image from nodeJs.
Control is not getting into req.on('end'). Nothing is getting printed inside req.on('end'). I could not identify where is the problem. I am making an API and calling from js to upload an image in the server at a specific location.
app.post('/tde/api/photo/:widgetId/:choosenFileName',function(req,res){
console.log("In file Upload..");
console.log(req.params.widgetId);
console.log(req.params.choosenFileName);
res.writeHead(200, { 'Content-Type': 'application/binary' });
var filedata = '';
var chunks = [];
//req.setEncoding('binary');
req.on('data', function(chunk){
//filedata+= chunk;
chunks.push(chunk);
})
req.on('end', function (chunk) {
var dir = 'uploads/'+req.params.widgetId
if (!fs.existsSync(dir)){
fs.mkdirSync(dir);
console.log("directory created..");
}
fs.readdir(dir, function(err, filenames) {
if (err) {
onError(err);
return;
}
filenames.forEach(function(filename) {
console.log(filename);
fs.unlink(dir+'/'+filename, function(err) {
if (err) {
return console.error(err);
}
console.log("File deleted successfully!");
});
});
//fs.writeFile('uploads/'+req.params.widgetId+'/sanmoy.jpg', chunk, function(err) {
var fileName = req.params.choosenFileName;
var widgetId = req.params.widgetId;
//fs.writeFile('uploads/'+widgetId+'/'+fileName, filedata, 'binary', function(err) {
var buffer = Buffer.concat(chunks)
fs.writeFile('uploads/'+widgetId+'/'+fileName, buffer, function(err) {
if (err) {
return console.error(err);
}
console.log("writing file success!");
})
});
});
res.end("File is uploaded");
});
Related
I am trying to download files from google cloud storage and zip them.
async function makeZippedFiles(destination, all_file_links) {
console.log("In the zip file function");
for (let i in all_file_links) {
let name = all_file_links[i]['name']
let archive = archiver('zip', {
zlib: {level: 9} // Sets the compression level.
});
archive.on('error', function (err) {
throw err;
});
let output = fs.createWriteStream(__dirname + `/${name}.zip`);
console.log("loop number", i);
let sourceFile = all_file_links[i]['source'];
console.log(sourceFile, name);
let remoteFile = bucket.file(sourceFile);
let read_file = remoteFile.createReadStream();
await archive.append(read_file, {name: name});
read_file
.on('error', function (err) {
console.log(err);
})
.on('response', function (response) {
console.log("writing file", name);
// console.log(response);
// Server connected and responded with the specified status and headers.
})
.on('end', function () {
console.log("file downloaded", name);
// The file is fully downloaded.
})
archive.pipe(output);
archive.finalize();
}
}
In the example on top, I am looping through all files and creating individual archives. i.e., if I download two files, I would be creating two separate archives. This works.
However, if I want to zip all the files into one archive, I get the following error:
start of central directory not found; zipfile corrupt. (please
check that you have transferred or created the zipfile in the
appropriate BINARY mode and that you have compiled UnZip properly)
The code I used is:
async function makeZippedFiles(destination, all_file_links) {
console.log("In the zip file function");
let archive = archiver('zip', {
zlib: {level: 9} // Sets the compression level.
});
archive.on('error', function (err) {
throw err;
});
let output = fs.createWriteStream(__dirname + `/${destination}.zip`);
for (let i in all_file_links) {
let name = all_file_links[i]['name']
console.log("loop number", i);
let sourceFile = all_file_links[i]['source'];
console.log(sourceFile, name);
let remoteFile = bucket.file(sourceFile);
let read_file = remoteFile.createReadStream();
await archive.append(read_file, {name: name});
read_file
.on('error', function (err) {
console.log(err);
})
.on('response', function (response) {
console.log("writing file", name);
// console.log(response);
// Server connected and responded with the specified status and headers.
})
.on('end', function () {
console.log("file downloaded", name);
// The file is fully downloaded.
})
archive.pipe(output);
}
archive.finalize();
}
Found the solution. It was carelessness actually.
async function makeZippedFiles(destination, all_file_links) {
console.log("In the zip file function");
let archive = archiver('zip', {
zlib: {level: 9} // Sets the compression level.
});
archive.on('error', function (err) {
throw err;
});
let output = fs.createWriteStream(__dirname + `/${destination}.zip`);
archive.pipe(output);
for (let i in all_file_links) {
let name = all_file_links[i]['name']
console.log("loop number", i);
let sourceFile = all_file_links[i]['source'];
console.log(sourceFile, name);
let remoteFile = bucket.file(sourceFile);
let read_file = remoteFile.createReadStream();
await archive.append(read_file, {name: name});
read_file
.on('error', function (err) {
console.log(err);
})
.on('response', function (response) {
console.log("writing file", name);
// console.log(response);
// Server connected and responded with the specified status and headers.
})
.on('end', function () {
console.log("file downloaded", name);
// The file is fully downloaded.
})
}
archive.finalize();
}
I moved the archive.pipe(output) before the for loop and it works.
I have a function that downloads the user input(currently named app.json) from browser(client) to the server
function downloadUpload(callback){
//Using formidable node package for downloading user input to server
var form = new formidable.IncomingForm();
form.on('fileBegin', function(name, file) {
file.path = file.name;
});
form.parse(req, function(err, fields, files) {
res.writeHead(200, { 'content-type': 'text/plain' });
res.write('received upload:\n\n');
res.end(util.inspect({ fields: fields, files: files }));
});
callback(null);
}
I have another function that takes the file downloaded above and converts it into required format(final.json) something like this.
function UpdateCode(callback){
var obj = fs.readFileSync('app.json', 'utf8');
var object = JSON.parse(obj);
var data2 = [];
for (var j = 0; j < object.length; j++) {
if (object[j].value == "TEST") {
data2.push(object[j]);
}
}
console.log(data2);
fs.appendFile('final.json', JSON.stringify(data2), function(err) {
if (err) throw err;
console.log('Saved!');
});
callback(null);
}
I want them to run in an order, so I used async series method like this
async.series([
downloadUpload,
UpdateCode
],function(err,result){
if(err) throw err;
else{
console.log(result);
}
});
The problem is the file(app.json) is getting downloaded and an error is displayed saying that app.json doesn't exist in the current folder or directory. Where am I going wrong?
This is likely what you need.
function downloadUpload(callback) {
//Using formidable node package for downloading user input to server
var form = new formidable.IncomingForm();
form.on('fileBegin', function(name, file) {
file.path = "app.json";
});
form.parse(req, function(err, fields, files) {
res.writeHead(200, {
'content-type': 'text/plain'
});
res.write('received upload:\n\n');
res.end(util.inspect({
fields: fields,
files: files
}));
});
form.on('end', function() {
callback(null);
});
}
function UpdateCode(callback) {
var obj = fs.readFileSync('app.json', 'utf8');
var object = JSON.parse(obj);
var data2 = [];
for (var j = 0; j < object.length; j++) {
if (object[j].value == "TEST") {
data2.push(object[j]);
}
}
console.log(data2);
fs.appendFile('final.json', JSON.stringify(data2), function(err) {
if (err) throw err;
console.log('Saved!');
callback(null);
});
}
async.series([
downloadUpload,
UpdateCode
], function(err, result) {
if (err) throw err;
else {
console.log(result);
}
});
Also use nodemon -e js app.js. Otherwise nodemon will restart the program as soon as the json uploads.
I have download a zip file from s3 bucket then extracting the zip file
and finally upload one file to s3 bucket in Lambda function using
Node JS.But am getting the error
==> Error: EROFS: read-only file system, open './tmp/test.zip'
"Process exited before completing> request"
exports.handler = function (callback) {
downloadZipFile(params, downloadPath, function (err) {
if (err) {
callback(err);
} else {
processZipFile(downloadPath, function (err) {
if (err) {
callback(err);
} else {
callback(null);
}
});
}
});
};
function downloadZipFile(params, downloadPath, callback) {
const file = fs.createWriteStream(downloadPath);
s3.getObject(params)
.on('httpData', function (chunk) {
file.write(chunk);
})
.on('success', function () {
callback(null);
})
.on('error', function (err) {
callback(err);
})
.on('complete', function () {
file.end();
})
.send();
}
function processZipFile(filePath) {
const stats = fs.statSync(filePath)
const fileSizeInBytes = stats.size
if (fileSizeInBytes > 0) {
var srcPath = filePath;
var destPath = "./tmp";
targz.decompress({
src: srcPath,
dest: destPath
}, function (err) {
if (err) {
console.log(err);
} else {
console.log("Done!");
UploadFile();
}
});
}
}
function UploadFile() {
var body = fs.createReadStream('./tmp/SampleFile.txt')
var srcfileKey = "SampleFile.txt";
// Upload the stream
var s3obj = new AWS.S3({ params: { Bucket: bucketName, Key: srcfileKey } });
s3obj.upload({ Body: body }, function (err, data) {
if (err) {
console.log("An error occurred", err);
}
console.log("Uploaded the file at", data.Location);
})
}
You need to change the file path to just /tmp instead of ./tmp. Lambda only allows you to write to the /tmp directory.
I'm resizing an image using gm library but it create an empty image. I use different ways for writing result but everytime get empty image.
gm(readStream)
.size({bufferStream: true}, function(err, size) {
this.resize(100, 100, '%')
.stream()
.on('end',function(){
console.log('done');
})
.on('error',function(err){
console.log(err);
})
.pipe(writeStream);
});
I also use this way but result was the same.
gm(srcPath)
.resize(100, 100)
.stream(function(err, stdout, stderr) {
if(err){
console.log('ERRoor:', err);
}
// ws = fs.createWriteStream( output );
i = [];
stdout.on( 'data', function(data){
// console.log('data');
i.push( data );
});
stdout.on( 'close', function(){
console.log( 'close' );
var image = Buffer.concat( i );
writeStream.write( image.toString('base64'), 'base64' );
writeStream.end();
});
});
And this way too:
gm(srcPath)
.resize(100, 100)
.write(writeStream, function (err) {
var thumbUrl, error;
if (err) {
error = err;
} else{
thumbUrl = thumbPath.replace(/\\/g, '/');
thumbUrl = thumbUrl.replace(/^\.\/public/, '');
}
callback(error, thumbUrl);
});
this way every time return en empty object({}) as error!
Users upload files into my express app. I need to calc hash of the uploaded file and then write file to disk using calculated hash as a filename. I try to do it using the following code:
function storeFileStream(file, next) {
createFileHash(file, function(err, hash) {
if (err) {
return next(err);
}
var fileName = path.join(config.storagePath, hash),
stream = fs.createWriteStream(fileName);
stream.on('error', function(err) {
return next(err);
});
stream.on('finish', function() {
return next();
});
file.pipe(stream);
});
}
function createFileHash(file, next) {
var hash = crypto.createHash('sha1');
hash.setEncoding('hex');
file.on('error', function(err) {
return next(err);
});
file.on('end', function(data) {
hash.end();
return next(null, hash.read());
});
file.pipe(hash);
}
The problem is that after I calc file hash the writed file size is 0. What is the best way do solve this task?
Update
According #poke suggestion I try to duplicate my stream. Now my code is:
function storeFileStream(file, next) {
var s1 = new pass;
var s2 = new pass;
file.pipe(s1);
file.pipe(s2);
createFileHash(s1, function(err, hash) {
if (err) {
return next(err);
}
var fileName = path.join(config.storagePath, hash),
stream = fs.createWriteStream(fileName);
stream.on('error', function(err) {
return next(err);
});
stream.on('finish', function() {
return next();
});
s2.pipe(stream);
});
}
function createFileHash(file, next) {
var hash = crypto.createHash('sha1');
hash.setEncoding('hex');
file.on('error', function(err) {
return next(err);
});
file.on('end', function(data) {
hash.end();
return next(null, hash.read());
});
file.pipe(hash);
}
The problem of this code is that events end and finish are not emited. If I comment file.pipe(s2); events are emited, but I again get my origin problem.
This code fix the problem:
var s1 = new passThrough,
s2 = new passThrough;
file.on('data', function(data) {
s1.write(data);
s2.write(data);
});
file.on('end', function() {
s1.end();
s2.end();
});
The correct and simple way should be as follow:
we should resume the passthroughed stream
function storeFileStream(file, directory, version, reject, resolve) {
const fileHashSource = new PassThrough();
const writeSource = new PassThrough();
file.pipe(fileHashSource);
file.pipe(writeSource);
// this is the key point, see https://nodejs.org/api/stream.html#stream_three_states
fileHashSource.resume();
writeSource.resume();
createFileHash(fileHashSource, function(err, hash) {
if (err) {
return reject(err);
}
const fileName = path.join(directory, version + '_' + hash.slice(0, 8) + '.zip');
const writeStream = fs.createWriteStream(fileName);
writeStream.on('error', function(err) {
return reject(err);
});
writeStream.on('finish', function() {
return resolve();
});
writeSource.pipe(writeStream);
});
}
function createFileHash(readStream, next) {
const hash = crypto.createHash('sha1');
hash.setEncoding('hex');
hash.on('error', function(err) {
return next(err);
});
hash.on('finish', function(data) {
return next(null, hash.read());
});
readStream.pipe(hash);
}
You could use the async module (not tested but should work):
async.waterfall([
function(done) {
var hash = crypto.createHash('sha1');
hash.setEncoding('hex');
file.on('error', function(err) {
done(err);
});
file.on('end', function(data) {
done(null, hash.read);
});
file.pipe(hash);
},
function(hash, done) {
var fileName = path.join(config.storagePath, hash),
stream = fs.createWriteStream(fileName);
stream.on('error', function(err) {
done(err);
});
stream.on('finish', function() {
done(null);
});
file.pipe(stream);
}
], function (err) {
console.log("Everything is done!");
});