Create audio buffer from server - node.js

Got to build a server from scratch that takes a audio file and sends it over a stream to multiple end-users. I have looked into streams and buffers and have some basic idea about sending text data via buffers to client-side but in my scenario, I also need to start playing the audio when the buffer is received at the user end.

Figured out the way to send audio as streams:
var http = require('http'),
fileSystem = require('fs'),
path = require('path'),
util = require('util');
http.createServer(function(request, response) {
var filePath = 'read.mp3';
var stat = fileSystem.statSync(filePath);
response.writeHead(200, {
'Content-Type': 'audio/mpeg',
'Content-Length': stat.size
});
var readStream = fileSystem.createReadStream(filePath,{ highWaterMark: 1 * 16 });
readStream.on('data', (chunk) => {
response.write(chunk);
});
console.log("Serving");
})
.listen(2000);

Related

get file response progress from server

Got a server that sends huge files ~50MB as response.
Using http and piping the file as follows:
var http = require('http'),
fileSystem = require('fs'),
path = require('path');
http.createServer(function(request, response) {
var filePath = path.join(__dirname, 'myfile.mp3');
var stat = fileSystem.statSync(filePath);
response.writeHead(200, {
'Content-Type': 'audio/mpeg',
'Content-Length': stat.size
});
var readStream = fileSystem.createReadStream(filePath);
// We replaced all the event handlers with a simple call to readStream.pipe()
readStream.pipe(response);
})
.listen(2000);
As expected, it takes a huge amount of time to send the response. But I want to have a responsive page when the request is being made.
Anyway I can send the progress of the response that is sent have it show at the frontend?
Can use I socket.io?
What I have tried so far:
Tried calling the pipe event on the readStream but it was called only once.
.on("pipe", (src) => {
console.log("Something is piping into the writer.");
// assert.equal(src, reader);
})

Node - Uploaded jpg cannot be opened on server

UPDATE
Thanks to #robertklep and #vallo for pointing out that I was not parsing the multipart request properly.
Here's the updated server code with some re-worked sample code from Busboy:
'use strict';
// Require
var http = require('http');
var Busboy = require('busboy');
var fs = require('fs');
// Server
var server = http.createServer(function(request, response) {
if (request.method === 'POST') {
var busboy = new Busboy({ headers: request.headers });
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
file.pipe(fs.createWriteStream(`../db/images/${filename}`));
});
busboy.on('finish', function() {
response.writeHead(200, { 'Connection': 'close' });
response.end("That's all folks!");
});
return request.pipe(busboy);
}
response.writeHead(404);
response.end();
});
server.listen(8000, '192.168.7.25', () => {});
I am trying to post a jpg to an endpoint but the resulting image cannot be opened :
The file “image_copy.jpg” could not be opened. It may be damaged or
use a file format that Preview doesn’t recognize.
Some background:
Everything (servers, storage) are being hosted locally
Have made a decision to only use native Node modules like http and fs due to storage constraints on a microcontroller board
Am using form-data as it eases the pain of multi-part forms and uploads as well as sets the correct request headers
Here is some sample code broken into two scripts:
Server
'use strict';
// Require
var http = require('http');
// Server
var server = http.createServer((request, response) => {
var body = [];
request.on('data', function(chunk) {
body.push(chunk);
});
request.on('end', function() {
saveImage(Buffer.concat(body),null);
response.statusCode = 200;
response.end('thanks')
});
});
server.listen(8000, '192.168.7.25', () => {});
// Process
function saveImage(data,callback) {
var fs = require('fs');
fs.writeFile('../db/images/image_copy.jpg', data, function(err) {});
}
Client
'use strict';
// Require
var FormData = require('form-data');
var fs = require('fs');
var http = require('http');
// Vars
var form = new FormData();
// Process
form.append('my_file', fs.createReadStream('/temp/1.jpg'));
var request = http.request({
hostname: '192.168.7.25',
port: 8000,
path: '/api/premises/v1/image',
method: 'POST',
headers: form.getHeaders()
});
form.pipe(request);
request.on('response', function(res) {
console.log(res.statusCode);
});
After executing, the jpg is uploaded and saved to the correct file location (and also has the same file size as the source jpg) but the new image can't be opened.
Even if I encode the incoming chunk as binary and set the encoding on fs.writeFile to binary, I get a similar result.
What am I doing wrong? Thank you!
The client is uploading in multipart/form-data format, which is a format that can contain, amongst others, file data.
However, this means that the server should parse this format to extract the file data. Right now, the server is just taking the request body verbatim and writing it to a file.
The multiparty module can help you, and one of its usage examples shows you how to hook it up with http.Server: https://github.com/pillarjs/multiparty#usage
var multiparty = require('multiparty');
var http = require('http');
var util = require('util');
http.createServer(function(req, res) {
// parse a file upload
var form = new multiparty.Form();
form.parse(req, function(err, fields, files) {
res.writeHead(200, {'content-type': 'text/plain'});
res.write('received upload:\n\n');
res.end(util.inspect({fields: fields, files: files}));
});
}).listen(8000);
Using that, you can extract the file data from (I think) files.my_file and write it to the file.

download large file from node http server,the server out of memory

I write a simple http server,and download a large file(1.5G),the server crash by out of memory
my code as follows:
var http = require("http");
var fs = require("fs");
var filename = "file.iso";
var serv = http.createServer(function(req,res){
var stat = fs.statSync(filename);
res.writeHeader(200,{"Content-Length":stat.size});
var fReadStream = fs.createReadStream(filename);
fReadStream.on('data', function (chunk) {
res.write(chunk);
});
fReadStream.on('end', function () {
res.end();
});
});
serv.listen(8888);
but when I change to use pipe method of stream,it ok,like this:
var http = require("http");
var fs = require("fs");
var filename = "file.iso";
var serv = http.createServer(function(req,res){
var stat = fs.statSync(filename);
res.writeHeader(200,{"Content-Length":stat.size});
var fReadStream = fs.createReadStream(filename);
fReadStream.pipe(res);
});
serv.listen(8888);
my question is why the first code do not work?
I have been working with large files in Node and have noticed that the async calls to load the data can complete far faster than the write calls in the http stream. This results in a bottleneck that will crash your server because it runs out of memory with all the outstanding write requests (sitting in the buffer). Piping is designed to manage the stream so that this does not happen.
From Node documentation:
readable.pipe(destination, [options])#
destination Writable Stream The destination for writing data
options Object Pipe options
end Boolean End the writer when the reader ends. Default = true
This method pulls all the data out of a readable stream, and writes it to the supplied destination, automatically managing the flow so that the destination is not overwhelmed by a fast readable stream.
we can balance read and write without pipelike this:
var http = require("http");
var fs = require("fs");
var filename = "file.iso";
var serv = http.createServer(function (req, res) {
var stat = fs.statSync(filename);
res.writeHeader(200, {"Content-Length": stat.size});
var fReadStream = fs.createReadStream(filename);
fReadStream.on('data', function (chunk) {
if(!res.write(chunk)){
fReadStream.pause();
}
});
fReadStream.on('end', function () {
res.end();
});
res.on("drain", function () {
fReadStream.resume();
});
});
serv.listen(8888);

Nodejs binary http stream

I need to stream files from a client (nodejs command line) and a server (express nodejs).
This is the client side:
var request = require('request');
var fs = require('fs');
// ...
var readStream = fs.createReadStream(file.path);
readStream.on('end', function() {
that.emit('finished');
});
readStream.pipe(request.post(target));
// ...
This is the server side:
var fs = require('fs');
var path = require('path');
// ...
app.post('/:filename', function(req, res) {
req.setEncoding('binary');
var filename = path.basename(req.params.filename);
filename = path.resolve(destinationDir, filename);
var dst = fs.createWriteStream(filename);
req.pipe(dst);
req.on('end', function() {
res.send(200);
});
});
// ...
All is working, files are saved correctly on the server side... but they are about 50% bigger than the source files. I tried to see difference between the two files with hexdump and the server side file has similar content but with 0xC2 sometimes. I guess this is related to encoding.
Don't call req.setEncoding('binary').
This will convert every single chunk into strings and is mainly intended if you want to read strings from the stream. As you directly pipe the request to a file, you don't need to do it.

Send zip created by node-zip

Let's say you create a zip file in-memory following the example from node-zip's documentation:
var zip = new require('node-zip')()
zip.file('test.file', 'hello there')
var data = zip.generate({type:'string'})
How do you then send data to a browser such that it will accept it as a download?
I tried this, but the download hangs at 150/150 bytes AND makes Chrome start eating 100% CPU:
res.setHeader('Content-type: application/zip')
res.setHeader('Content-disposition', 'attachment; filename=Zippy.zip');
res.send(data)
So what's the proper way to send zip data to a browser?
Using the archiver and string-stream packages:
var archiver = require('archiver')
var fs = require('fs')
var StringStream = require('string-stream')
http.createServer(function(request, response) {
var dl = archiver('zip')
dl.pipe(response)
dl.append(new fs.createReadStream('/path/to/some/file.txt'), {name:'YoDog/SubFolder/static.txt'})
dl.append(new StringStream("Ooh dynamic stuff!"), {name:'YoDog/dynamic.txt'})
dl.finalize(function (err) {
if (err) res.send(500)
})
}).listen(3000)
I recommend you to use streams for this approach.
var fs = require('fs');
var zlib = require('zlib');
var http = require('http');
http.createServer(function(request, response) {
response.writeHead(200, { 'Content-Type': 'application/octet-stream' });
var readStream = fs.createReadStream('test.file');
var unzipStream = zlib.createUnzip();
readStream.pipe(unzipStream.pipe(response));
}).listen(3000);
This will properly not work in real world (as I am not common with zlib) but it may give you the direction

Resources