Piping STDOUT of spawn process into superagent upload - node.js

I'm struggling to work out how to take the output of a spawned child process and feed that output into a multipart mime upload.
Here is what I have that as far as I can tell should work
var request = require('superagent');
var spawn = require('child_process').spawn;
var spawned = spawn('echo', ['hello', 'world']);
request.post('http://localhost/api/upload')
.attach('file', spawned.stdout)
.end(function(res) {
console.log("DONE", res);
});
Unfortunately this throws the rather unhelpful Error: socket hang up response from Node.

You're quite close!
Here's the final version which does what you want:
var sys = require('sys')
var exec = require('child_process').exec;
var request = require('superagent');
exec('echo hello world', function(err, stdout, stderr) {
request.post('http://localhost/api/upload')
.attach('file', stdout)
.end(function(res) {
console.log('DONE', res);
});
I'm using exec here, because it's callback function outputs the stdout and stderr streams, which it seems like you want.

Related

How to enable access https from curl command in nodejs

My bash emulator can run curl command correctly. But when I call it within nodejs with child_process module, I get an error refering to Protocol "'https" not supported or disabled in libcurl.
When I run "curl 'https://ehire.51job.com/Candidate/SearchResumeNew.aspx'" I can get a page content.
Here's the nodejs code below:
var child_process = require("child_process");
var curl = "curl 'https://ehire.51job.com/Candidate/SearchResumeNew.aspx'";
var child = child_process.exec(curl, (err, stdout, stderr) =>
{
console.log(stdout);
console.log(err);
console.log(stderr);
});
screenshot
Referencing this, exchanged the double quotes with single quotes and vice-versa, the following code works:
var child_process = require("child_process");
var curl = 'curl "https://ehire.51job.com/Candidate/SearchResumeNew.aspx"';
var child = child_process.exec(curl, (err, stdout, stderr) =>
{
console.log(stdout);
console.log(err);
console.log(stderr);
});

Execute a cgi script from a Node Express server

So I have a website with a member area. That member area is managed through a payment processor called CCBill. In order for CCBill to manage a password file on my server, they need to execute a cgi script.
Right now, I've looked at cgi and serve-cgi npm modules. But I'm not sure if they can do what I need. Can anyone help me with this?
My Express Router get function:
router.get('*', function(req, res, next) {
console.log('in');
var mPath = path.join(appRoot, '/cgi-bin' + req.params[0]);
console.log(mPath);
const execFile = require('child_process').execFile;
const child = execFile(mPath, function(error, stdout, stderr) {
if (error) {
console.log(error);
throw error;
}
console.log(stdout);
});
});
Scripts (and other executables) can be invoked with the exec() function:
var exec = require('exec');
exec('/path/to/your/script',
function (stderr, stdout, errorCode) {
// You get here when the executable completes
}
}
EDIT
With newer node.js versions exec() is deprecated, so it's better to use child_process.execFile():
const execFile = require('child_process').execFile;
const child = execFile('/path/to/your/script', [parameters], (error, stdout, stderr) => {
// You get here when the executable completes
});

Node pipe stops working

My client sends an image file to the server. It works 5 times and then it suddenly stops. I am pretty new using streams and pipe so I am not sure what I am doing wrong.
Server Code
http.createServer(function(req, res) {
console.log("File received");
// This opens up the writeable stream to `output`
var name = "./test"+i+".jpg";
var writeStream = fs.createWriteStream(name);
// This pipes the POST data to the file
req.pipe(writeStream);
req.on('end', function () {
console.log("File saved");
i++;
});
// This is here incase any errors occur
writeStream.on('error', function (err) {
console.log(err);
});
}).listen(3000);
Client code
var request = require('request');
var fs = require('fs');
setInterval(function () {
var readStream = fs.createReadStream('./test.jpg');
readStream.on('open', function () {
// This just pipes the read stream to the response object (which goes to the client)
readStream.pipe(request.post('http://192.168.1.100:3000/test'));
console.log("Send file to server");
});
}, 1000);
Behaves like a resource exhaustion issue. Not sure which calls throw errors and which just return. Does the server connect on the 6th call? Does the write stream open? Does the pipe open?
Try ending the connection and closing the pipe after the image is saved. Maybe close the write stream too, don't remember if node garbage collects file descriptors.
I had to do the following on the server side to make this work :
res.statusCode = 200;
res.end();

Proper way to pipe buffers in Node JS

Take a look:
var Client = require('ftp');
var fs = require('fs');
var c = new Client();
c.on('ready', function() {
c.get('foo.txt', function(err, stream) {
if (err) throw err;
stream.once('close', function() { c.end(); });
stream.pipe(fs.createWriteStream('foo.local-copy.txt'));
});
});
// connect to localhost:21 as anonymous
c.connect();
This piece of code is from https://www.npmjs.org/package/ftp. Basically it opens a read stream and pipes it into a write stream. At the end It closes the connection from the source.
Does the pipe method close the target stream after the piped stream (source) is closed? I couldn't find it on the API Documentation.
I made some test that from witch I can conclude it does it but I am no sure.
The destination stream is closed when the source emits an end event. This is documented in Stream.pipe:
By default end() is called on the destination when the source stream
emits end, so that destination is no longer writable.
This allows calls of the form:
var http = require('http'),
fs = require('fs');
http.createServer(function (req, res) {
fs.createReadStream('path/to/file').pipe(res);
}).listen(3000);
If end wasn't called on the response object, the request would time out.
This would make the request time out:
fs.createReadStream('path/to/file').pipe(res, {end: false});

child_process module not returning directory

I'm doing this tutorial on NodeJS: http://www.nodebeginner.org.
Here is the code I find confusing:
var exec = require("child_process").exec;
function start(response) {
console.log("Request handler 'start' was called.");
var content = "empty";
exec("ls -lah", function(error, stdout, stderr) {
response.writeHead(200, {"Content-type":"text/plain"});
response.write(stdout);
console.log(stdout);
response.end();
});
}
I have a router that passes the http response to a request handler that calls the start function. This is happening without a problem. However, the stdout parameter is not returning anything in the browser or in the console. I understand that ls -lah is supposed to give a list of files in the current directory. I have 5 other files in my directory, but nothing is being returned. Any ideas on what is happening here?

Resources