Saving buffer/stream that comes from nodejs server - node.js

I am not sure what I am doing wrong.
I have a html content and want to save it as pdf. I use html-pdf (from npm) and a download library http://danml.com/download.html
Actually when I directly save to file or show it as a result I can get the pdf without problem. But I call my webservice method from a js method and I have a stream/buffer as a return value and saving with the 'download' library
Here is my code
pdf.create(html, options).toFile('./mypdf.pdf', function (err, res) {
if (err) return console.log(err);
console.log(res);
});
pdf.create(html,options).toBuffer(function (err, buffer) {
if (err) return reject(err);
return resolve(buffer);
});
//res.setHeader('Content-type', 'application/pdf');
pdf.create(html, options).toStream(function (err, stream) {
if (err) return res.send(err);
//res.type('pdf');
return resolve(stream);// .pipe(res);
});
I can save the content as a pdf it works fine. but when I try to send stream or buffer, somehow the page is empty. I opened the both pdf files with notepad. There are some differences. For example, probless one is 44kb the other one 78 kb. and the empty one contains also the following lines
%PDF-1.4 1 0 obj << /Title (��) /Creator (��) /Producer (�� Q t 5 .
5 . 1) /CreationDate (D:20190524152156)
endobj
I think toBuffer or toStream method has a problem in my case. Because the stream seems not bad. at least I can see that it is a pdf file (no error, just page is empty)
Anyway, here is my API router
let result = await
routerVoucher.CreatePdfStream(req.query.VoucherCode,req.query.AppName);
res.setHeader('Content-type', 'application/pdf');
res.type('pdf');
//result.pipe(res);
res.end(result, 'binary');
and here is my js consumer
$.ajax({
type: "GET",
url: '/api/vouchers/GetLicensePdf',
data:data,
success: function (pdfFile) {
if (!pdfFile)
throw new Error('There is nothing to download');
download(pdfFile,voucherCode + '.pdf', 'application/pdf')

I've solved the problem.
Firstly I converted buffer to base64
const base64 = buffer.toString('base64')
and then converted base64 to blob by using the following code
function base64toBlob (base64Data, contentType) {
contentType = contentType || '';
var sliceSize = 1024;
var byteCharacters = atob(base64Data);
//var byteCharacters = decodeURIComponent(escape(window.atob(base64Data)))
var bytesLength = byteCharacters.length;
var slicesCount = Math.ceil(bytesLength / sliceSize);
var byteArrays = new Array(slicesCount);
for (var sliceIndex = 0; sliceIndex < slicesCount; ++sliceIndex) {
var begin = sliceIndex * sliceSize;
var end = Math.min(begin + sliceSize, bytesLength);
var bytes = new Array(end - begin);
for (var offset = begin, i = 0 ; offset < end; ++i, ++offset) {
bytes[i] = byteCharacters[offset].charCodeAt(0);
}
byteArrays[sliceIndex] = new Uint8Array(bytes);
}
return new Blob(byteArrays, { type: contentType });
}
and then
again I've used my download method (from download.js library) as follow
download(new Blob([base64toBlob(base64PDF,"application/pdf")]),
voucherCode + '.pdf', "application/pdf");
then everything is fine :)

Related

How to handle client downloading gzip served from Node.js server app?

I have a question about handling a gzip response on my client side application. I would like the client's browser to pop up an alert "how do you want to handle?" download prompt.
My Node.js server is compressing my files into a gzip format then sending it with a HTTP write response. My client receives a HTTP 200 status although the size of the response is very small compared to my file and nothing doesn't populate my web app. I have anticipated the browser to handle this sort of response to a server sending gzip. similar to how gmail handles downloading files. Can you help me to see if I have missed anything?
server.js
var server = http.createServer(function(request, response) {
if (request.url === '/download'){
let data_zip = retrievedata()
const scopedata_zip = ('./scopedata.txt.gz')
response.writeHead(200, { 'Content-Encoding': 'gzip' });
response.writeHead(200, { 'Content-Type': 'application/javascript' });
response.write(scopedata_zip);
}
})
var retrievedata = () =>{
const gzip = zlib.createGzip();
const inp = fs.createReadStream('scopedata.txt');
const out = fs.createWriteStream('scopedata.txt.gz');
inp.pipe(gzip).pipe(out);
return out
}
Client.js
var downloadData=()=>{
var xhr = new XMLHttpRequest();
xhr.open('POST', 'download', true);
//xhr.setRequestHeader("Accept-Encoding", "gzip")
xhr.setRequestHeader("Encoding", "null")
xhr.onload = function (){
if(this.status == 200){
let form = document.createElement("form");
let element1 = document.createElement("input");
document.body.appendChild(form);
let response = this.responseText
console.log(response)
document.getElementById("state").innerHTML = 'download'
document.getElementById("index").innerHTML = response;
// document.getElementById("state").appendChild(form)
}
}
xhr.onerror = function(err){
console.log("request error...",err)
}
xhr.send()
}
The client is just populating my index div the response to, but nothing is received.
my gzip file is 327mb.
Chrome inspector network says this request is only 170B so I am not receiving my file.
Note xhr.setRequestHeader("Accept-Encoding", "gzip") is commented out becuase I get this error: Refused to set unsafe header "Accept-Encoding". I have set it to null to allow the browser to handle this.
Any input on what I am doing wrong?
There were three things I was doing wrong. I managed to get the browser window by creating a new element, checking if the element has a download attribute and appending the XHR.Response as the location from the href. The second portion of my issue was not receiving the zip file with the appropriate request headers. Because my zip file was a larger size the browser handles the binary buffer stream as a blob. Read more about XHR response types XHR.response. The other issue was on my server side which was using fs.readFile to read the zip as a buffer. Because my zip was made up of multiple files fs.readFile it would stop reading as it hit the end of the first file.
so my client code looks like
var xhr = new XMLHttpRequest();
document.getElementById("state").innerHTML = ' '
document.getElementById("index").innerHTML = ' ';
xhr.open('POST', 'download', true);
xhr.setRequestHeader('Content-disposition', 'attachment')
xhr.setRequestHeader("Content-type","application/zip"); //content-type must be set
xhr.setRequestHeader("Encoding", "null") //unsure of why I need this but it doesnt work with out it for me
xhr.responseType = "blob"; // This must be set otherwise the browser was interpretting the buffer stream as string instead of binary
xhr.onload = function (){
if(this.status == 200){
let form = document.createElement("form");
let element1 = document.createElement("input");
document.body.appendChild(form);
let response = this.response // defined as blob above
document.getElementById("state").innerHTML = 'download'
document.getElementById("index").innerHTML = response;
var blob = new Blob([response], {type: "application/zip"});
var file = URL.createObjectURL(blob);
filename = 'Data.zip'
var a = document.createElement("a");
if ("download" in a) { //check if element can download
a.href = file;
a.download = filename;
document.body.appendChild(a);
a.click(); //automatically browser download
document.body.removeChild(a);
}
}
Server side
else if (request.url === '/download'){
archiveZip((data)=>{ // using archivezip and adding a callback function to insert my routes XHR response
response.setHeader('Content-Type', 'application/zip')
response.setHeader('Content-Length', data.length) // this is important header because without it the browser might truncate the entire response especially if there are end of file characters zipped up in the buffer stream
response.setHeader('Content-disposition', 'attachment; filename="Data.zip"');
response.end(data);
})
}
var archiveZip = (callback) =>{
var output = fs.createWriteStream(__dirname + '/Data.zip'); //output
var archive = archiver('zip', {zlib: { level: 9 }});
output.on('close', function() {
console.log(archive.pointer() + ' total bytes');
console.log('archiver has been finalized and the output file descriptor has closed.');
fs.readFile('./Data.zip', function (err, content) {
if (err) {
response.writeHead(400, {'Content-type':'text/html'})
console.log(err);
response.end("No such file");
} else {
callback(content);
}
});
});
output.on('end', function() {
console.log('Data has been drained');
});
archive.on('error', function(err) {
throw err;
});
archive.pipe(output);
// append a file
archive.file(data_files + '/parsed_scope.json', { name: 'parsed_scope.json' });
archive.file(data_files + '/scopedata_index.json', { name: 'scopedata_index.json' });
archive.file(data_files + '/scopedata.txt', { name: 'scopedata.txt' });
archive.finalize();
There are many zip libraries I was looking at ones that can handle zipping a directory with multiple files and went with archiver. I would have like to use the built in zlib that comes with node but only supports single small files.

Problems when I upload images and show them in angular and node using the GraphicsMagick

I use this code to upload images in Node:
req.file('image[]')
.upload({
maxBytes: 5000000, // Files limit(in bytes)
dirname: path.resolve(sails.config.appPath, 'assets/images/user') // Path to copy the files
}, function whenDone(err, uploadedFiles) {
if (err) {
// here must delete the created user before the error
console.log(err);
}
var image_real_name = '';
var json = [];
for(var i = 0; i < uploadedFiles.length; i++){
image_real_name = 'images/user/'+path.basename(uploadedFiles[i].fd);
json.push(image_real_name);
}
res(json);
});
but I needed to compress the images to gain space on the server so I used the gm GraphicsMagick:
`
var receiver = new Writable({objectMode: true});
receiver._write = function(file, enc, cb) {
// The output stream to pipe to
var output = require('fs').createWriteStream('assets/images/user/' + file.fd);
gm(file).resize('200', '200').stream().pipe(output);
cb();
};
req.file('image[]').upload(receiver, function(err, files){
if (err) {
// here must delete the created user before the error
console.log(err);
}
var image_real_name = '';
var json = [];
for(var i = 0; i < files.length; i++){
image_real_name = 'images/user/'+path.basename(files[i].fd);
json.push(image_real_name);
}
res(json);
});
`
I am using angular and when I change the direction of the image by binding the new image should be shown but it is not loaded, with the first code all perfect but using gm does not work anymore. I have to restart the page so that the image is displayed.
track:
apparently the image has not been fully processed, if a setTime is entered, the image is displayed correctly:
setTimeout((dataService: DataService, image: any) => {
dataService.getUser().image = image;
}, 1000, this.data, dataI.json().url);
any idea?
There is a finish event when the file is fully loaded, I solved the problem by adding the following:
output.on("finish", function() { return cb(); });

How to download excel file from nodejs terminal

I am new to nodejs. Need your help. From the nodejs terminal, i want to download an excel file and convert it to csv (say, mocha online.js). Note: i don't want to do this via a browser.
Below is a script i am working on to download and convert to csv. There is no error nor the expected result:
online.js
if (typeof require !== 'undefined') XLSX = require('xlsx');
var XMLHttpRequest = require("xmlhttprequest").XMLHttpRequest;
/* set up XMLHttpRequest */
var url = "http://oss.sheetjs.com/js-xlsx/test_files/formula_stress_test_ajax.xlsx";
var xhr = new XMLHttpRequest();
xhr.open("GET", url, true);
xhr.responseType = "arraybuffer";
describe('suite', function () {
it('case', function () {
var arraybuffer = xhr.response;
/* convert data to binary string */
var data = new Uint8Array(arraybuffer);
var arr = new Array();
for (var i = 0; i != data.length; ++i) arr[i] = String.fromCharCode(data[i]);
var bstr = arr.join("");
/* Call XLSX */
var sheetName = 'Database';
var workbook = XLSX.read(bstr, { type: "binary" });
var worksheet = workbook.Sheets[sheetName];
var csv = XLSX.utils.sheet_to_csv(worksheet);
console.log(csv);
xhr.send();
//.... perform validations here using the csv data
});
})}
I tried myself with this code, and it seems it is working, the only thing is that I spent 15 minutes trying to understand why my open office would not open the file, I eventually understood that they were sending a zip file ... here is the full code, the doc of the http get function is here http.get
You could have used the request module, but it isn't native, request is easier though.
enjoy!
const url = 'http://oss.sheetjs.com/js-xlsx/test_files/formula_stress_test_ajax.xlsx'
const http = require('http')
const fs = require('fs')
http.get(url, (res) => {
debugger
const {
statusCode
} = res;
const contentType = res.headers['content-type'];
console.log(`The type of the file is : ${contentType}`)
let error;
if (statusCode !== 200) {
error = new Error(`Request Failed.\n` +
`Status Code: ${statusCode}`);
}
if (error) {
console.error(error.message);
// consume response data to free up memory
res.resume();
return;
}
res.setEncoding('binary');
let rawData = '';
res.on('data', (chunk) => {
rawData += chunk;
});
res.on('end', () => {
try {
const parsedData = xlsxToCSVFunction(rawData);
// And / Or just put it in a file
fs.writeFileSync('fileName.zip', rawData, 'binary')
// console.log(parsedData);
} catch (e) {
console.error(e.message);
}
});
}).on('error', (e) => {
console.error(`Got error: ${e.message}`);
});
function xlsxToCSVFunction(rawData) {
return rawData //you should return the csv file here whatever your tools are
}
I actually encountered the same problem 3 months ago : here is what I did!
I did not find any nodeJS module that was exactly as I wanted, so I used in2csv (a python shell program) to transform the data; the t option is to use tabulation as the delimiter
1) Step 1: transforming the xlsx file into csv using in2csv
This code takes all the xlsx files in the current directory, transform them into csv files and put them in another directory
var shelljs = require('shelljs/global')
var dir = pwd().stdout.split('/')
dir = dir[dir.length - 1].replace(/\s/g, '\\ ')
mkdir('../'+ dir + 'CSV')
ls('*.xlsx').forEach(function(file) {
// below are the two lines you need
let string = 'in2csv -t ' + file.replace(/\s/g, '\\ ') + ' > ../'+ dir + 'CSV/' + file.replace('xlsx','csv').replace(/\s/g, '\\ ')
exec(string, {silent:true}, function(code, stdout, stderr){
console.log('new file : ' + file.replace('xlsx','csv'))
if(stderr){
console.log(string)
console.log('Program stderr:', stderr)
}
})
});
Step 2: loading the data in a nodejs program:
my script is very long but the main two lines are :
const args = fileContent.split('\n')[0].split(',')
const content = fileContent.split('\n').slice(1).map(e => e.split(','))
And for the benefit of seekers like me...here is a solution using mocha, request and xlsx
var request = require('request');
var XLSX = require('xlsx');
describe('suite', function () {
it('case', function (done) {
var url = "http://oss.sheetjs.com/js-xlsx/test_files/formula_stress_test_ajax.xlsx";
var options = {
url: url,
headers: {
'Content-Type': 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
},
encoding: null
};
request.get(options, function (err, res, body){
var arraybuffer = body;
/* convert data to binary string */
var data = arraybuffer;
//var data = new Uint8Array(arraybuffer);
var arr = new Array();
for (var i = 0; i != data.length; ++i) arr[i] = String.fromCharCode(data[i]);
var bstr = arr.join("");
/* Call XLSX */
var sheetName = 'Database';
var workbook = XLSX.read(bstr, { type: "binary" });
var worksheet = workbook.Sheets[sheetName];
var csv = XLSX.utils.sheet_to_csv(worksheet);
console.log(csv);
done();
});
});
});

Read/write binary data to MongoDB in Node.js

I've been able to successfully write binary data (an image) to MongoDB in Node.js. However I can't find clear documentation on how to read it back.
Here's how I'm writing the image to MongoDB:
var imageFile = req.files.myFile;
var imageData = fs.readFileSync(imageFile.path);
var imageBson = {};
imageBson.image = new db.bson_serializer.Binary(imageData);
imageBson.imageType = imageFile.type;
db.collection('images').insert(imageBson, {safe: true},function(err, data) {
I'd appreciate any pointers on reading the image from Mongo using Node. I'm assuming there's a function like "db.bson_deserializer...". Thanks!
Found the answer:
var imageFile = req.files.myFile;
fs.exists(imageFile.path, function(exists) {
if(exists)
{
console.log("File uploaded: " + util.inspect(imageFile));
fs.readFile(imageFile.path, function(err, imageData) {
if (err) {
res.end("Error reading your file on the server!");
}else{
//when saving an object with an image's byte array
var imageBson = {};
//var imageData = fs.readFileSync(imageFile.path);
imageBson.image = new req.mongo.Binary(imageData);
imageBson.imageType = imageFile.mimetype;
console.log("imageBson: " + util.inspect(imageBson));
req.imagesCollection.insert(imageBson, {safe: true},function(err, bsonData) {
if (err) {
res.end({ msg:'Error saving your file to the database!' });
}else{
fs.unlink(imageFile.path); // Deletes the file from the local disk
var imageBson = bsonData[0];
var imageId = imageBson._id;
res.redirect('images/' + imageId);
}
});
}
});
} else {
res.end("Oddly your file was uploaded but doesn't seem to exist!\n" + util.inspect(imageFile));
}
});
The MongoDB part isn't complicated. Once a Buffer is in the model, just let the db save it. MongoDB converts that into BinData. 80% of this code is just getting an image into and out of a PNG file.
People say don't store images in MongoDB, but icons/thumbs are tiny. Having said that, it might be a good idea to have an icons collection and only store them once using a hash of the image data as the _id.
model class example
class MyModel {
_icon: Buffer
get icon(): Buffer {
return this._icon
}
set icon(value: Buffer) {
this._icon = value
}
}
image helper
static async loadImage(url: string) {
var files = require('../lib/files')
var buffer = await files.urlContents(url, true)
return buffer
}
static async saveImage(image: Buffer, path: string) {
var files = require('../lib/files')
files.write(path, image.buffer)
return path
}
files helper
function urlResponse(url, binary) {
var request = require("request")
if (binary)
request = request.defaults({ encoding: null })
return new Promise(function (resolve, reject) {
request(url, function (error, res, body) {
if (error || res.statusCode !== 200 || body.includes('Incomplete response received from application'))
resolve({ statusCode: res?.statusCode !== 200 ? (res?.statusCode || 500) : 500 });
else
resolve(res);
});
});
}
async function urlContents(url, binary) {
var res = await urlResponse(url, binary)
if (binary)
return Buffer.from(res.body)
else
return res.body
}
function write(fileName, contents) {
fs.writeFileSync(fileName, contents)
}
mongodb helper
// ...saving
myModel.icon = loadImage('http://some.site.com/image.png')
collection.insertOne(myModel)
// ..getting
myModel = collection.findOne(query) // now myModel contains icon
saveImage(myModel.icon, '/home/toddmo/pictures/wow.png')

NodeJS/Meteor how to download many files from external server through request.get and future?

I'm currently having a problem when trying to download many images through a external request.get, bellow are the code
var future = new Future();
var images, nome, blob;
.each(imoveis, function(dadosImovel, numeroImovel){
images = dadosImovel.images;
_.each(imagens, function(value, key){
// the name of the image to a permalink format, this function is working
nome = Meteor.createPermalinkFromString(value[3]);
// the link pointing to the image
blob = Meteor.getImage(value[0]);
Meteor.saveImage(blob, nome, '.jpeg');
});
});
// Get a image through a url
Meteor.getImage = function(url){
var options = {
url : url,
encoding : null
};
// get raw image
request.get(options, function(error, result, body) {
if (error) {
return console.error(error);
}
// pause until binaries are fully loaded
future['return'](body);
});
return future.wait()
};
// save a image in a server folder
Meteor.saveImage = function(name, blob, ext, encoding) {
var projectPath = basepath.resolve('.').split('.meteor')[0],
chroot = Meteor.chroot || projectPath + 'public', // (process.env['PWD'] +'/public') ;
path = chroot + (path ? '/' + path + '/' : '/'),
name = Meteor.cleanName(name || 'file'),
encoding= encoding || 'binary';
// TODO Add file existance checks, etc...
fs.writeFile(path + name + ext, blob, encoding, function(err) {
if (err) {
console.log(err);
throw (new Meteor.Error(500, 'Failed to save file.', err));
} else {
console.log('The file ' + name + ' (' + encoding + ') was saved to ' + path);
}
});
return true;
}
Now here what happens: in the first iteration of the loop I receive the image just fine, the problem appears for the next ones iterations.
If I have, 10 images all of then are saved with 10 different names (and they also have 10 links pointing to the correct images link) but when you visualize the image it's the first image on the list save for all the other 9, it's like once the first image is loaded meteor don't wait for the next binary/image codes to arrive before saving then.
Any thoughts on how to fix this?
Changing the
var Future = new Future();
from the outter scope to inside the function getImage fixed the issue with the images, but created another one.
My current code is
_.each(imoveis, function(dadosImovel, numeroImovel){
var imagens = dadosImovel.imagens;
_.each(imagens, function(value, key){
var nome = Meteor.createPermalinkFromString(value[3]);
var blob = Meteor.getImage(value[0]);
Meteor.saveImage(blob, nome, '.jpeg');
});
});
And now I have a infinite outer loop, saving again and again only the set of images from the first item of the loop.
For referente (in case this might help someone in the future) this is the code I used to make this works:
// imovel[0] = link, imovel[1] = width, imovel[2] = height, imovel[3] = title, imovel[4] = codigo
var futures = _.map(links, function(imovel) {
var future = new Future();
var onComplete = future.resolver();
var options = {
url : imovel[0],
encoding : null
};
// get raw image
request.get(options, function(error, result, body) {
if (error) {
return console.error(error);
}
var nome = Meteor.createPermalinkFromString(imovel[3]);
var data = {'codigo' : imovel[4], 'blob' : body, 'nome' : nome};
onComplete(error, data);
});
return future;
});
// wait for all futures to finish
Future.wait(futures);
// and grab the results out.
links = _.invoke(futures, 'get');

Resources