Unable to send a response while multipart form is being streamed - node.js

I'm using nodejs combined with sailsjs and skipper to upload files to a server.
I have a use case where I need to check the file length and send a 413 error if the upload exceeds A certain file size. However when I try send any kind of response nothing happens. What exactly is happening here? Does the form upload need to be completed before a response can be sent?
var getReceiver = function () {
var receiver = new Writable({objectMode: true});
receiver._write = function (file, enc, done) {
file.fd = directory + file.fd;
var fileStream = fs.createWriteStream(file.fd);
fileStream.on('error', function (err) {
done(err);
}).once('finish', function () {
done();
});
var fileLength = 0;
file.on('data', function (chunk) {
fileLength = fileLength + chunk.length;
if (fileLength > maxFileSize) {
var err = new Error('upload exceeds maxFileSize.');
file.unpipe();
fs.unlink(file.fd, function (fsErr) {
if (fsErr && (typeof fsErr !== 'object' || fsErr.code !== 'ENOENT')) {
return done([err].concat([fsErr]));
}
});
return done(err);
}
});
file.pipe(fileStream);
};
return receiver;
};
req.file('file').upload(getReceiver(), function (err, uploadedFiles) {
if (err) {
return res.badRequest(err);
}
// Do stuff
}
});
});
});

Related

How to use a nested loop in request.head - Node JS

I am trying to move download images from parse and save it to my local. I have this piece of code that does the job for me. This works well when there is only one request but when I put in a loop, it doesn't hold good.
`for(var i = 0; i < 5; i++) {
console.log(i);//to debug
var filename = results_jsonObj[i].imageFile.name;
var uri = results_jsonObj[i].imageFile.url;
request.head(uri, function(err, res, body){
if (err){
console.log(err);
console.log(item);
return;
}else {
console.log(i); //to debug
var stream = request(uri);
stream.pipe(
fs.createWriteStream("images/"+filename)
.on('error', function(err){
callback(error, filename);
stream.read();
})
)
}
});
}`
Irrespective of the loop condition I have, only one image downloads to the mentioned directory.
Below is the op
The input is from a Json file and I have the request, fs, parse module included in the node js program.
Any help on how to go about this?
I have got this fixed now. As advised in the comments it was async which helped me do the trick.
for(var i = 0; i < 900; i++) {
async.forEachOf(results_jsonObj[i], function(value, key, callback){
var image = {};
image.key = key;
image.value = value;
if(image.key == 'imageFile')
{
var filename = image.value.name;
var uri = image.value.url;
// console.log(filename, uri);
}
request.head(uri, function(err, res, body){
if (err){
console.log(err);
// console.log(item);
return;
}else {
// console.log(i,res.headers['content-type']); //to debug
var stream = request(uri);
stream.pipe(
fs.createWriteStream("images/"+filename)
.on('error', function(err){
callback(error, filename);
stream.read();
})
)
}
});
callback();
}, function(err){
if (err) {
console.log('one of the api failed, the whole thing will fail now');
}
});
}

Get buffer from Rackspace download using pkgcloud

This may not be possible but I am trying to return a buffer object of an image on Rackspace using the pkgcloud module without having to write to the filesystem. I've seen this done before however both examples show piping the download to the File System.
function get() {
return new Promise(function (resolve, reject) {
_this._RackClient.download(options, function(err, results) {
if (err !== null) {
return reject(err);
console.log("Errow Downloading:", err);
}
resolve(buffer);
});
});
}
return get();
This is ideally how I would like it to work but there currently is not a body present in the request. Can I use a stream.passThrough() and return that similar to uploading a buffer?
.download() returns a Readable stream, so it should just be a matter of buffering that output. For example:
var stream = _this._RackClient.download(options);
var buf = [];
var nb = 0;
var hadErr = false;
stream.on('data', function(chunk) {
buf.push(chunk);
nb += chunk.length;
}).on('end', function() {
if (hadErr)
return;
switch (buf.length) {
case 0:
return resolve(new Buffer(0));
case 1:
return resolve(buf[0]);
default:
return resolve(Buffer.concat(buf, nb));
}
}).on('error', function(err) {
hadErr = true;
reject(err);
});

Limiting outside API requests

I'm trying to limit my use of an external API in my node.js code.
I've set up node rate limiter, but it doesn't seem to be working. I still hit 429's. What else should I be doing that I'm not?
var RateLimiter = require('limiter').RateLimiter; // Rate limits
var limiter = new RateLimiter(1, 2000); // one call every two seconds
self.riotAPI = function(options, cb){
limiter.removeTokens(1, function() {
https.request(options, function(response) {
// Error handling
response.on('error', function (e) {
console.log(e);
});
var str = '';
// Another chunk of data has been recieved, so append it to `str`
response.on('data', function (chunk) {
str += chunk;
});
// Parse and return the object
response.on('end', function () {
if(response.statusCode >= 400) {
var err = "HTTP response "+response.statusCode;
console.log(err);
cb(new Error("err"), null);
}
else {
cb(null, JSON.parse(str));
}
});
}).end();
});
}
I switched to Bottleneck and got everything functioning as desired.
self.riotAPI = function(options, cb){
limiter.submit( function(lcb) {
https.request(options, function(response) {
// Error handling
response.on('error', function (e) {
console.log(e);
});
var str = '';
// Another chunk of data has been recieved, so append it to `str`
response.on('data', function (chunk) {
str += chunk;
});
// Parse and return the object
response.on('end', function () {
if(response.statusCode >= 400) {
var err = "HTTP response "+response.statusCode;
console.log(err);
// If it's a 429, retry
if(response.statusCode == 429) {
console.log("retrying...");
self.riotAPI(options, cb);
}
// If not, fail
else {
cb(new Error("err"), null);
lcb();
}
}
else {
cb(null, JSON.parse(str));
lcb();
}
});
}).end();
}, null);
}

Perform print operation on cups using Node.js

I would like to print documents through http requests on Node.js. Is there any way to send print jobs and query CUPS server using Node.js. I found this project while exploring around, is it the only/correct way to do that??
You could use the shell to do so. I built a project some time ago where I needed to read certain hashtag from instagram and print the photos uploaded to IG with that hashtag using a raspberry pi and a photo printer.
var fs = require('fs'),
exec = require('child_process').exec;
exec("lp /path/to/somepic.jpg");
// get printer jobs
exec("lpq",function (error, stdout, stderr) {
console.log('stdout: ' + stdout);
console.log('stderr: ' + stderr);
if (error !== null) {
console.log('exec error: ' + error);
}
});
The command lp /path/to/somepic.jpg sends /path/to/somepic.jpg to the default printer. The command lpq displays the printer queue. For better use, read the CUPS documentation.
Following snippet seems useful. Not tried it as I am no longer working on this problem!
It may be helpful for others. Original source: https://gist.github.com/vodolaz095/5325917
var ipp = require('ipp'); //get it from there - https://npmjs.org/package/ipp - $npm install ipp
var request = require('request'); //get it from there - https://npmjs.org/package/request - $npm install request
var fs = require('fs');
function getPrinterUrls(callback) {
var CUPSurl = 'http://localhost:631/printers';//todo - change of you have CUPS running on other host
request(CUPSurl, function (error, response, body) {
if (!error && response.statusCode == 200) {
var printersMatches = body.match(/<TR><TD><A HREF="\/printers\/([a-zA-Z0-9-^"]+)">/gm);//i know, this is terrible, sorry(
var printersUrls = [];
var i;
if (printersMatches) {
for (i = 0; i < printersMatches.length; i++) {
var a = (/"\/printers\/([a-zA-Z0-9-^"]+)"/).exec(printersMatches[i]);
if (a) {
printersUrls.push(CUPSurl + '/' + a[1]);
}
}
}
}
callback(error, printersUrls);
});
};
function doPrintOnSelectedPrinter(printer, bufferToBePrinted, callback) {
printer.execute("Get-Printer-Attributes", null, function(err, printerStatus){
if(printerStatus['printer-attributes-tag']['printer-state']=='idle'){
//printer ready to work
//*/
printer.execute("Print-Job",
{
"operation-attributes-tag":{
"requesting-user-name":"nap",
"job-name":"testing"
},
"job-attributes-tag":{},
data:bufferToBePrinted
},
function (err, res) {
if (res.statusCode == 'successful-ok') {
var jobUri = res['job-attributes-tag']['job-uri'];
var tries = 0;
var t = setInterval(function () {
printer.execute("Get-Job-Attributes",
{"operation-attributes-tag":{'job-uri':jobUri}},
function (err2, job) {
// console.log(job);
if (err2) throw err2;
tries++;
if (job && job["job-attributes-tag"]["job-state"] == 'completed') {
clearInterval(t);
// console.log('Testins if job is ready. Try N '+tries);
callback(null, job);//job is succesefully printed!
}
if (tries > 50) {//todo - change it to what you need!
clearInterval(t);
printer.execute("Cancel-Job", {
"operation-attributes-tag":{
//"job-uri":jobUri, //uncomment this
//*/
"printer-uri":printer.uri, //or uncomment this two lines - one of variants should work!!!
"job-id":job["job-attributes-tag"]["job-id"]
//*/
}
}, function (err, res) {
if (err) throw err;
console.log('Job with id '+job["job-attributes-tag"]["job-id"]+'is being canceled');
});
callback(new Error('Job is canceled - too many tries and job is not printed!'), null);
}
});
}, 2000);
} else {
callback(new Error('Error sending job to printer!'), null);
}
});
//*/
} else {
callback(new Error('Printer '+printerStatus['printer-attributes-tag']['printer-name']+' is not ready!'),null);
}
});
}
function doPrintOnAllPrinters(data, callback) {
var b = new Buffer(data, 'binary');
getPrinterUrls(function (err, printers) {
if (err) throw err;
if (printers) {
for (var i = 0; i < printers.length; i++) {
var printer = ipp.Printer(printers[i]);
doPrintOnSelectedPrinter(printer, b, callback);
}
} else {
throw new Error('Unable to find printer. Do you have printer installed and accessible via CUPS?');
}
});
}
/*
Example of usage
*/
fs.readFile('package.json', function (err, data) {
doPrintOnAllPrinters(data, function (err, job) {
if (err) {
console.error('Error printing');
console.error(err);
} else {
console.log('Printed. Job parameters are: ');
console.log(job);
}
}
);
});

Stream a file from NodeJS

I am using weed-fs to store files and trying to stream file to client using below code
var fileName = [__dirname, '/uploads/', req.params.id, ".png"].join('');
try {
var writeStream = fs.createWriteStream(fileName);
weedfs.read(req.params.id, writeStream);
writeStream.on("close", function () {
var readStream = fs.createReadStream(fileName);
response.pipe(readStream);
response.on('error', function (err) {
readStream.end();
});
response.on('end', function (err) {
removeFile(fileName);
});
});
} catch (e) {
res.send(404);
}
But I am getting error Error: connect ECONNREFUSED
If I look at the weedfs read method, I found below code
read: function(fid, stream, cb) {
var ins = this;
if ((typeof stream == "function") && !cb) {
cb = stream;
stream = false;
}
this.find(fid, function(pub) {
if (pub[0]) {
if (stream) {
ins.http(pub[0]).pipe(stream);
......
Am I doing something wrong. Please help me to fix this issue.

Resources