Loading Audio In Web Audio API From Node API - node.js

I'm trying to load audio from the server using Web Audio API. So far, all of my attempts to return data using a Node/Express API have failed to return anything that would play in my browser.
My current attempt is to store the contents of the file into a buffer within Mongo. When requested, the audio gets converted to an ArrayBuffer, and then encoded into Base64. The client then decodes the string and passes it into the Web Audio API Buffer Source.
I'm sure I'm doing this the hard way, but this the only thing I've come up with. I've posted my code below. If you know of a better way, I would appreciate the guidance.
Thanks
Load Data Into MongoDB
var buffer = fs.readFileSync('C4.mp3');
Sound.create({
instrument: 'Piano',
audio: buffer,
note: 'C4'
});
Controller Converts Audio
var Sound = require('./sound.model');
var base64 = require('base64-arraybuffer');
// Get list of sounds
exports.index = function(req, res) {
Sound.find(function (err, sounds) {
if(err) { return handleError(res, err); }
var soundsToReturn = [];
for (var i = 0; i < sounds.length; i++) {
soundsToReturn.push( {instrument: sounds[i].instrument,
audio: base64.encode(toArrayBuffer(sounds[i].audio)),
note: sounds[i].note});
};
return res.status(200).json(soundsToReturn);
});
};
function toArrayBuffer(buffer) {
var ab = new ArrayBuffer(buffer.length);
var view = new Uint8Array(ab);
for (var i = 0; i < buffer.length; ++i) {
view[i] = buffer[i];
}
return ab;
}
Client Decodes Audio
$.ajax({
url: url,
dataType: 'json',
success: function(data) {
audioCtx.decodeAudioData(_base64ToArrayBuffer(data[0].audio), function(b){
buffer = b;
})
}
});
function _base64ToArrayBuffer(base64) {
var binary_string = window.atob(base64);
var len = binary_string.length;
var bytes = new Uint8Array( len );
for (var i = 0; i < len; i++) {
var ascii = binary_string.charCodeAt(i);
bytes[i] = ascii;
}
return bytes.buffer;
}

Since your sound data is already in Buffer format, you should be able to reduce your controller code to just:
var Sound = require('./sound.model');
// Get list of sounds
exports.index = function(req, res) {
Sound.find(function (err, sounds) {
if (err)
return handleError(res, err);
var soundsToReturn = [];
for (var i = 0; i < sounds.length; i++) {
soundsToReturn.push( {instrument: sounds[i].instrument,
audio: sounds[i].audio.toString('base64'),
note: sounds[i].note});
};
return res.status(200).json(soundsToReturn);
});
};
As far as the client side goes, about the only thing you might look into there is testing base64 decoding performance. Other than that I think everything else looks ok.

Related

Saving buffer/stream that comes from nodejs server

I am not sure what I am doing wrong.
I have a html content and want to save it as pdf. I use html-pdf (from npm) and a download library http://danml.com/download.html
Actually when I directly save to file or show it as a result I can get the pdf without problem. But I call my webservice method from a js method and I have a stream/buffer as a return value and saving with the 'download' library
Here is my code
pdf.create(html, options).toFile('./mypdf.pdf', function (err, res) {
if (err) return console.log(err);
console.log(res);
});
pdf.create(html,options).toBuffer(function (err, buffer) {
if (err) return reject(err);
return resolve(buffer);
});
//res.setHeader('Content-type', 'application/pdf');
pdf.create(html, options).toStream(function (err, stream) {
if (err) return res.send(err);
//res.type('pdf');
return resolve(stream);// .pipe(res);
});
I can save the content as a pdf it works fine. but when I try to send stream or buffer, somehow the page is empty. I opened the both pdf files with notepad. There are some differences. For example, probless one is 44kb the other one 78 kb. and the empty one contains also the following lines
%PDF-1.4 1 0 obj << /Title (��) /Creator (��) /Producer (�� Q t 5 .
5 . 1) /CreationDate (D:20190524152156)
endobj
I think toBuffer or toStream method has a problem in my case. Because the stream seems not bad. at least I can see that it is a pdf file (no error, just page is empty)
Anyway, here is my API router
let result = await
routerVoucher.CreatePdfStream(req.query.VoucherCode,req.query.AppName);
res.setHeader('Content-type', 'application/pdf');
res.type('pdf');
//result.pipe(res);
res.end(result, 'binary');
and here is my js consumer
$.ajax({
type: "GET",
url: '/api/vouchers/GetLicensePdf',
data:data,
success: function (pdfFile) {
if (!pdfFile)
throw new Error('There is nothing to download');
download(pdfFile,voucherCode + '.pdf', 'application/pdf')
I've solved the problem.
Firstly I converted buffer to base64
const base64 = buffer.toString('base64')
and then converted base64 to blob by using the following code
function base64toBlob (base64Data, contentType) {
contentType = contentType || '';
var sliceSize = 1024;
var byteCharacters = atob(base64Data);
//var byteCharacters = decodeURIComponent(escape(window.atob(base64Data)))
var bytesLength = byteCharacters.length;
var slicesCount = Math.ceil(bytesLength / sliceSize);
var byteArrays = new Array(slicesCount);
for (var sliceIndex = 0; sliceIndex < slicesCount; ++sliceIndex) {
var begin = sliceIndex * sliceSize;
var end = Math.min(begin + sliceSize, bytesLength);
var bytes = new Array(end - begin);
for (var offset = begin, i = 0 ; offset < end; ++i, ++offset) {
bytes[i] = byteCharacters[offset].charCodeAt(0);
}
byteArrays[sliceIndex] = new Uint8Array(bytes);
}
return new Blob(byteArrays, { type: contentType });
}
and then
again I've used my download method (from download.js library) as follow
download(new Blob([base64toBlob(base64PDF,"application/pdf")]),
voucherCode + '.pdf', "application/pdf");
then everything is fine :)

Problems when I upload images and show them in angular and node using the GraphicsMagick

I use this code to upload images in Node:
req.file('image[]')
.upload({
maxBytes: 5000000, // Files limit(in bytes)
dirname: path.resolve(sails.config.appPath, 'assets/images/user') // Path to copy the files
}, function whenDone(err, uploadedFiles) {
if (err) {
// here must delete the created user before the error
console.log(err);
}
var image_real_name = '';
var json = [];
for(var i = 0; i < uploadedFiles.length; i++){
image_real_name = 'images/user/'+path.basename(uploadedFiles[i].fd);
json.push(image_real_name);
}
res(json);
});
but I needed to compress the images to gain space on the server so I used the gm GraphicsMagick:
`
var receiver = new Writable({objectMode: true});
receiver._write = function(file, enc, cb) {
// The output stream to pipe to
var output = require('fs').createWriteStream('assets/images/user/' + file.fd);
gm(file).resize('200', '200').stream().pipe(output);
cb();
};
req.file('image[]').upload(receiver, function(err, files){
if (err) {
// here must delete the created user before the error
console.log(err);
}
var image_real_name = '';
var json = [];
for(var i = 0; i < files.length; i++){
image_real_name = 'images/user/'+path.basename(files[i].fd);
json.push(image_real_name);
}
res(json);
});
`
I am using angular and when I change the direction of the image by binding the new image should be shown but it is not loaded, with the first code all perfect but using gm does not work anymore. I have to restart the page so that the image is displayed.
track:
apparently the image has not been fully processed, if a setTime is entered, the image is displayed correctly:
setTimeout((dataService: DataService, image: any) => {
dataService.getUser().image = image;
}, 1000, this.data, dataI.json().url);
any idea?
There is a finish event when the file is fully loaded, I solved the problem by adding the following:
output.on("finish", function() { return cb(); });

XLSX File corrupted when sent from Node.js via Restify to Client

I am working on a project where I am creating an excel file using XLSX node.js library, sending it to a client via Restify where I then use the FileSaver.js library to save it on the local computer. When I write the xlsx workbook to file on the backend, it opens fine, however, when I open it on the client, it is corrupted. I get the error: "Excel cannot open this file. The file format or file extension is not valid. Verify that the file has not been corrupted and that the file extension matches the format of the file".
Here is my code for writing and sending the file on the backend:
var wopts = { bookType:'xlsx', bookSST:false, type:'binary' };
var workbook = xlsx.write(wb, wopts);
res.send(200, workbook);
On the front end, I am using code from the XLSX documentation:
function s2ab(s) {
var buf = new ArrayBuffer(s.length);
var view = new Uint8Array(buf);
for (var i=0; i!=s.length; ++i)
view[i] = s.charCodeAt(i) & 0xFF;
return buf;
}
saveAs(new Blob([s2ab(response.data)],{type:""}), "test.xlsx");
Any thoughts on why this would not work? Any help would be much appreciated. Thanks.
As Luke mentioned in the comments, you have to do a base64 encoding before sending the buffer. Here's a snippet that used the NPM module node-xlsx.
var xlsx = require('node-xlsx');
router.get('/history', function (req, res) {
var user = new User();
user.getHistory(req.user.userId, req.query.offset, req.query.limit)
.then(function (history) {
if (req.headers.contenttype && req.headers.contenttype.indexOf('excel') > -1) {
var data = [['Data', 'amount'], ['19/12/2016', '10']];
var xlsxBuffer = xlsx.build([{ name: 'History', data: data }]);
res.end(xlsxBuffer.toString('base64'));
} else {
res.send(history);
}
})
.catch(function (err) {
res.status(500).send(err);
});
});
And this is the frontend code using Angular:
$scope.getXlsFile = function() {
var config = {
params: {
offset: $scope.offset,
limit: $scope.limit
},
headers: {
'contentType': 'application/vnd.ms-excel',
'responseType': 'arraybuffer'
}
};
$http.get('/api/history', config)
.then(function(res) {
var blob = new Blob([convert.base64ToArrayBuffer(res.data)]);
FileSaver.saveAs(blob, 'historial.xlsx');
})
}
where convert is the following factory:
.factory('convert', function () {
return {
base64ToArrayBuffer: function (base64) {
var binary_string = window.atob(base64);
var len = binary_string.length;
var bytes = new Uint8Array(len);
for (var i = 0; i < len; i++) {
bytes[i] = binary_string.charCodeAt(i);
}
return bytes.buffer;
}
}
})

Streaming a file from server to client with socket.io-stream

I've managed to upload files in chunk from a client to a server, but now i want to achieve the opposite way. Unfortunately the documentation on the offical module page lacks for this part.
I want to do the following:
emit a stream and 'download'-event with the filename to the server
the server should create a readstream and pipe it to the stream emitted from the client
when the client reaches the stream, a download-popup should appear and ask where to save the file
The reason why i don't wanna use simple file-hyperlinks is obfuscating: the files on the server are encrpted and renamed, so i have to decrypt and rename them for each download request.
Any code snippets around to get me started with this?
This is a working example I'm using. But somehow (maybe only in my case) this can be very slow.
//== Server Side
ss(socket).on('filedownload', function (stream, name, callback) {
//== Do stuff to find your file
callback({
name : "filename",
size : 500
});
var MyFileStream = fs.createReadStream(name);
MyFileStream.pipe(stream);
});
//== Client Side
/** Download a file from the object store
* #param {string} name Name of the file to download
* #param {string} originalFilename Overrules the file's originalFilename
* #returns {$.Deferred}
*/
function downloadFile(name, originalFilename) {
var deferred = $.Deferred();
//== Create stream for file to be streamed to and buffer to save chunks
var stream = ss.createStream(),
fileBuffer = [],
fileLength = 0;
//== Emit/Request
ss(mysocket).emit('filedownload', stream, name, function (fileError, fileInfo) {
if (fileError) {
deferred.reject(fileError);
} else {
console.log(['File Found!', fileInfo]);
//== Receive data
stream.on('data', function (chunk) {
fileLength += chunk.length;
var progress = Math.floor((fileLength / fileInfo.size) * 100);
progress = Math.max(progress - 2, 1);
deferred.notify(progress);
fileBuffer.push(chunk);
});
stream.on('end', function () {
var filedata = new Uint8Array(fileLength),
i = 0;
//== Loop to fill the final array
fileBuffer.forEach(function (buff) {
for (var j = 0; j < buff.length; j++) {
filedata[i] = buff[j];
i++;
}
});
deferred.notify(100);
//== Download file in browser
downloadFileFromBlob([filedata], originalFilename);
deferred.resolve();
});
}
});
//== Return
return deferred;
}
var downloadFileFromBlob = (function () {
var a = document.createElement("a");
document.body.appendChild(a);
a.style = "display: none";
return function (data, fileName) {
var blob = new Blob(data, {
type : "octet/stream"
}),
url = window.URL.createObjectURL(blob);
a.href = url;
a.download = fileName;
a.click();
window.URL.revokeObjectURL(url);
};
}());
Answer My dear friend Jeffrey van Norden That's right. it worked for me. But there was a small bug so I changed the server side code this way:
//== Server Side
ss(socket).on('filedownload', function (stream, name, callback) {
//== Do stuff to find your file
try {
let stats = fs.statSync(name);
let size = stats.size;
callback(false,{
name: name,
size: size
});
let MyFileStream = fs.createReadStream(name);
MyFileStream.pipe(stream);
}
catch (e){
callback(true,{});
}
});
//== Client Side
/** Download a file from the object store
* #param {string} name Name of the file to download
* #param {string} originalFilename Overrules the file's originalFilename
* #returns {$.Deferred}
*/
function downloadFile(name, originalFilename) {
var deferred = $.Deferred();
//== Create stream for file to be streamed to and buffer to save chunks
var stream = ss.createStream(),
fileBuffer = [],
fileLength = 0;
//== Emit/Request
ss(mysocket).emit('filedownload', stream, name, function (fileError, fileInfo) {
if (fileError) {
deferred.reject(fileError);
} else {
console.log(['File Found!', fileInfo]);
//== Receive data
stream.on('data', function (chunk) {
fileLength += chunk.length;
var progress = Math.floor((fileLength / fileInfo.size) * 100);
progress = Math.max(progress - 2, 1);
deferred.notify(progress);
fileBuffer.push(chunk);
});
stream.on('end', function () {
var filedata = new Uint8Array(fileLength),
i = 0;
//== Loop to fill the final array
fileBuffer.forEach(function (buff) {
for (var j = 0; j < buff.length; j++) {
filedata[i] = buff[j];
i++;
}
});
deferred.notify(100);
//== Download file in browser
downloadFileFromBlob([filedata], originalFilename);
deferred.resolve();
});
}
});
//== Return
return deferred;
}
var downloadFileFromBlob = (function () {
var a = document.createElement("a");
document.body.appendChild(a);
a.style = "display: none";
return function (data, fileName) {
var blob = new Blob(data, {
type : "octet/stream"
}),
url = window.URL.createObjectURL(blob);
a.href = url;
a.download = fileName;
a.click();
window.URL.revokeObjectURL(url);
};
}());

Node.js - MJPEG TCP stream to base64 images

Based on paparazzo.js lib, I'm trying to get base64 images from a MJPEG stream (streamed over TCP with GStreamer) in a Node.js server, and to send them to the clients via websockets.
I think I'm pretty close, but my images are corrupted. Here is the code I'm using :
var boundary = "----videoboundary";
var data = "";
var tcpServer = net.createServer(function (socket) {
socket.on('data', function(chunk) {
var boundaryIndex = chunk.toString().indexOf(boundary);
if (boundaryIndex !== -1) {
// Get the image's last piece of data :
data += chunk.toString().substring(0, boundaryIndex);
// Convert the data to a base64 image and broadcast it :
var image = new Buffer(data).toString('base64');
io.sockets.emit('image', image);
// Reset the data :
data = '';
// Get the remaining data (with new image's headers) :
var remaining = chunk.toString().substring(boundaryIndex);
// Remove the new image's headers and add the remaining data :
var contentTypeMatches = remaining.match(/Content-Type:\s+image\/jpeg\s+/);
var contentLengthMatches = remaining.match(/Content-Length:\s+(\d+)\s+/);
if(contentLengthMatches != null && contentLengthMatches.length > 1) {
var newImageBeginning = remaining.indexOf(contentLengthMatches[0]) + contentLengthMatches[0].length;
data += remaining.substring(newImageBeginning);
}
else if(contentTypeMatches != null) {
var newImageBeginning = remaining.indexOf(contentTypeMatches[0]) + contentTypeMatches[0].length;
data += remaining.substring(newImageBeginning);
}
else {
var newImageBeginning = boundaryIndex + boundary.length;
data += remaining.substring(newImageBeginning);
io.sockets.emit('error', { message: 'Could not find beginning of next image' });
}
}
else {
data += chunk;
}
});
});
Any idea ?
Thanks
chunk.toString() converts the binary Buffer to a utf8-encoded string (by default), so for binary image data that will probably cause you some problems.
Another option that might help simplify things for you is to use the dicer module. With that, your code may look like:
var Dicer = require('dicer');
var boundary = '----videoboundary';
var tcpServer = net.createServer(function(socket) {
var dice = new Dicer({ boundary: boundary });
dice.on('part', function(part) {
var frameEncoded = '';
part.setEncoding('base64');
part.on('header', function(header) {
// here you can verify content-type, content-length, or any other header
// values if you need to
}).on('data', function(data) {
frameEncoded += data;
}).on('end', function() {
io.sockets.emit('image', frameEncoded);
});
}).on('finish', function() {
console.log('End of parts');
});
socket.pipe(dice);
});

Resources