I'm trying to create a Node.js HTTP file streaming server. I want to do the following:
sender --HTTP POST--> Node.js HTTP Server <--HTTP GET-- receiver
I have this already working with the following code:
var app = express();
// ...
app.post('/upload', function (req, res) {
// Get the `req` and `res` arguments of the HTTP GET request
var downloadRequest = getDownloadClientRequest();
// parse a file upload
var form = new formidable.IncomingForm();
form.onPart = function (part) {
if (!part.filename) {
// let formidable handle all non-file parts
form.handlePart(part);
return;
}
downloadRequest.res.writeHead(200, {
'Content-Disposition': 'attachment; filename=' + part.filename
});
part.on('data', function(data) {
downloadRequest.res.write(data);
});
part.on('end', function() {
downloadRequest.res.end();
});
};
form.parse(req, function (err, fields, files) {
res.writeHead(200);
res.write('Successfully streamed');
res.end();
});
});
app.get('/download', function (req, res) {
storeDownloadClientRequest(req, res);
});
What obviously happens is that if the uploading client is much faster than the downloading client, the upload completes very fast and my Node.js process keeps the whole file in memory until the downloading client finishes it.
How can I throttle the uploader's speed? I guess I can pause his stream every now and then but how do I know if the downloader's stream/buffer is filling up?
PS: I throttled the downloading client with wget --limit-rate=100k, if it helps.
part.pipe(downloadRequest) should automatically handle pausing/resuming when buffer is full.
Related
I can display the file contents in the browser and update the changes with reloading. I need to update the changes in the text file in the browser without reloading it every time.
var http = require('http');
var fs = require('fs');
var filepath="C:/Projects/Test/doc.txt";
var server = http.createServer(function (req, resp) {
if (req.url === "/read") {
fs.readFile("C:/Projects/Test/doc.txt", function (error, pgResp) {
if (error) {
resp.writeHead(404);
resp.write('Contents you are looking are Not Found');
}
else {
var file=fs.readFileSync(filepath);
fs.watchFile(filepath,function(){
console.log("file changed");
file=fs.readFileSync(filepath);
resp.writeHead(200, { 'Content-Type': 'text/html' });
});
}
resp.end(file);
});
}
});
server.listen(5050);
console.log('Server Started listening on 5050');
You can use Socket to maintain continuous data transfer between server and client. It's very easy to start with.
However, if your goal is only to achieve this specific API in your app and no other feature related to Socket then you can implement firing APIs at some time interval which keeps checking the file content.
I'm trying to setup some socket.io communications, the communication between my server (app.js)(runs on a raspberry pi) and a website(public/index.html) works fine. Now I want to expand it so when my app.js receives a call from index.html it emits it further to another node.js script(bed.js) that will run on another raspberry pi. I tried to use the npm module socket.io-client, but this can only receive apparently
!edit! problem has narrowed down to the setrgb part, there it won't emit.
!edit 2! when i receive setRGB, i emit setRGBclient, but that can only be received in bed.js, not in index.html, there lays my problem, i need to share the connections or force it to another connection, no clue how i fix it though
APP.JS:
let http = require('http').createServer(handler); //require http server, and create server with function handler()
let fs = require('fs'); //require filesystem module
let io = require('socket.io')(http) //require socket.io module and pass the http object (server)
let delay = require('delay');
console.log('Define each color from RGB Strip light.');
http.listen(8080); //listen to port 8080
function handler (req, res) { //create server
fs.readFile(__dirname + '/public/index.html', function(err, data) { //read file index.html in public folder
if (err) {
res.writeHead(404, {'Content-Type': 'text/html'}); //display 404 on error
return res.end("404 Not Found");
}
res.writeHead(200, {'Content-Type': 'text/html'}); //write HTML
res.write(data); //write data from index.html
return res.end();
});
}
io.sockets.on('connection', function (socket) {// WebSocket Connection
socket.on("test", function(){
console.log("sampletext");
});
socket.on("setRGB", function(data){
socket.emit("setRGBClient", data);
console.log(data);
console.log("test");
});
});
bed.js:
let socket = require('socket.io-client')('http://localhost:8080');
let lightstate = false;
let stayOff = false;
let fadeState = false;
console.log("check");
socket.emit("test");
socket.on("setRGBClient" ,function(data) {
console.log(data);
});
I can just broadcast setRGBClient.
socket.broadcast.emit("setRGBClient", data);
I guess this is a learning exercise. Otherwise I’d caution against socket.io for such applications.
However I can only see the subscription for ‘setRGB’ not the emit-part.
Currently, I have a lot of linux based clients downloading firmware updates from my webserver.
After the client has successfully downloaded the firmware file, my server needs to execute a few scripts, which logs in to the client and performs some tasks.
Is there a way for a node server to keep track of the clients download progress, so I can execute the needed scripts once the file has been downloaded?
Ok so I will try.
If you serve your Firmware as static files through Apache/Nginx and direct url call. You don't get the progress inside your NodeJS script.
If you serve your files via stream inside a express controller you can listen to the progress. Look at this answer here https://stackoverflow.com/a/42273080/3168392
You will have to use a socket connection to make sure the node server gets update from the client of the progress of the file being downloaded.
Something like this
CLIENT_CODE
var socket = io('http://localhost');
socket.on('connect', function(){});
socket.on('data_reciving', parts.pop(),function(percentage){
if(parse_data(percentage) === 100){
client.socket.emit('downloadCompleted', {fileName:'test'});
}else{
//do nothing
}
});
SERVER_CODE:
sockets.on('connection', function (socket) {
//listen to the event from client
socket.on('downloadCompleted', function (data) {
connect_to_client();
do_some_operation();
socket.emit('ALLDONE',{some_data});
});
});
I guess this helps ,you can use this post for reference.
If you just want to run some code when a download has finished, you can use on-finished:
const onFinished = require('on-finished');
app.use((req, res, next) => {
onFinished(res, (err, res) => {
...log some data, perform some housekeeping, etc...
});
next();
});
As is, it will attach a "finished" listener to all responses, which is probably not what you want. Since this is plain Express middleware, you can attach it to specific routes instead (but how depends on how exactly the files are being served).
I found some code that seems to fit my needs.
With the code below, I can detect both the progress of a user's download from the server-side, and fire an event once the file transfer completes.
var http = require("http");
var fs = require("fs");
var filename = "./large-file";
var serv = http.createServer(function (req, res) {
var sent = 0;
var lastChunkSize = 0;
var stat = fs.statSync(filename);
res.setHeader('Content-disposition', 'attachment; filename=large-file.iso');
res.setHeader('Accept-Ranges', 'bytes');
res.setHeader('Keep-Alive', 'timeout=5, max=100');
res.writeHeader(200, {"Content-Length": stat.size});
var fReadStream = fs.createReadStream(filename, { highWaterMark: 128 * 1024 });
fReadStream.on('data', function (chunk) {
if(!res.write(chunk)){
fReadStream.pause();
lastChunkSize = chunk.length;
console.log('Sent', sent, 'of', stat.size);
}
});
fReadStream.on('end', function () {
console.log('Transfer complete.');
res.end();
});
res.on("drain", function () {
sent += lastChunkSize;
fReadStream.resume();
});
});
serv.listen(3001);
I am trying to prompt a download of a PDF generated on the server using Node.js. Currently I can save it to the server but not force a download.
I am using npm html-pdf and currently it is saving to the server with this code.
pdf.create(htmlbody).toStream(function(err, stream){
console.log(stream);
if (err) return next(err);
stream.pipe(fs.createWriteStream('./foo.pdf'));
res.status(200);
res.send('Success');
});
How can I make it force a download?
I tried setting headers like
res.setHeader('content-type', 'application/pdf');
res.setHeader("Content-Disposition", "attachment; filename="+pdfName+";");
I am testing on localhost but I don't presume this to be the issue.
You don't have to save it to the server (unless that is a business logic requirement). What you can do is simply this:
pdf.create(htmlbody).toStream(function(err, stream){
if (err) return next(err);
res.setHeader('Content-Type', 'application/pdf');
res.setHeader('Content-Disposition', 'attachment; filename='+pdfName+';');
stream.pipe(res);
});
Basically, all you need to do is set the appropriate headers and then pipe the stream you get from html-pdf to your res stream.
If you however need to save the PDF on the server first, then you'll have to write it to the file system and once it's done, read it back and send it over the response stream again.
Edit: Here's the code snippet with a full working example (running on Node v4.0.0).
var http = require('http');
var pdf = require('html-pdf');
const PORT = 8765;
var server = http.createServer((req, res) => {
var html = '<html><head><title>PDF Test</title></head><body><h1>Welcome</h1><p>This is a test to see if we can get <b>PDF generation</b> to work on the server side.</p><p>PDF files can be very tricky to do on the server so this will take a bit of <u>hit and trail</u> before we can be confident it works fine.</p></body></html>';
pdf.create(html).toStream((err, stream) => {
if (err) {
console.error(err);
res.status(500);
res.end(JSON.stringify(err));
return;
}
res.setHeader('Content-Type', 'application/pdf');
res.setHeader('Content-Disposition', 'attachment; filename=test-file.pdf;');
stream.pipe(res);
});
});
server.listen(PORT, () => {
console.log('Server listening on Port: %s', PORT);
});
Save the above as something like pdf.js and run in with node (node pdf.js) and open http://localhost:8765/. This should trigger a download in your browser.
I'm new to node.js. Trying to get a console to print when the request ends. I try to go to localhost:8080 and also localhost:8080/ but nothing prints in the terminal. Any idea why? Doing this because when I run this example because when I try to run the demo at http://tutorialzine.com/2012/08/nodejs-drawing-game/ the terminal says socket started but it does not render the index.html page. So I can't figure out why this code to serve static files for other is not working for me.
var static = require('node-static');
//
// Create a node-static server instance to serve the './public' folder
//
// var file = new(static.Server)('./');
require('http').createServer(function (request, response) {
request.addListener('end', function () {
console.log("ended");
});
}).listen(8080);
It seems that your are using Node.js 0.10.x and in the new version you have to resume the readable streams to make them emit events:
require('http').createServer(function (request, response) {
var body = '';
request.setEncoding('utf8');
request.on('readable', function () {
body+= this.read();
}
request.on('end', function () {
console.log('ended');
console.log('Body: ' + body);
});
request.resume();
}).listen(8080);
You should be call node-static serve inside the request handler so that you can get index.html
var static = require('node-static');
var fileServer = new static.Server('./');
require('http').createServer(function (request, response) {
fileServer.serve(request, response); //add this line
request.addListener('end', function () {
console.log("ended");
});
}).listen(8080);