How to decode chunked data in node js? - node.js

I am receiving a PDF file from a node server (it is running jsreport in this server) and i need to download this PDF in the client (i am using react in the client) but the problem is that when i download the file, it comes all blank and the title some strange symbols. After a lot of tests and researchs, i found that the problem may be that the file is coming enconded as chunked (i can see that in the headers of the response) and i need to decode do become a file again.
So, how to decode this chunked string to a file again?
In the client i am just downloading the file that comes in the responde:
handleGerarRelatorioButtonClick(){
axios.post(`${REQUEST_URL}/relatorios`, this.state.selectedExam).then((response) => {
fileDownload(response.data, this.state.selectedExam.cliente.nome.replace(' ', '_') + ".pdf");
});
}
In my server, i am making a request to my jsreport that is other node server and it returns the report as a PDF:
app.post('/relatorios', (request, response) => {
var exame = new Exame(request.body);
var pdf = '';
var body = {
"template": {
"shortid": "S1C9birB-",
"data": exame
}
};
var options = {
hostname: 'localhost',
port: 5488,
path: '/api/report',
method: 'POST',
headers: {
'Content-Type': 'application/json'
}
};
var bodyparts = [];
var bodylength = 0;
var post = http.request(options, (res) => {
res.on('data', (chunk) => {
bodyparts.push(chunk);
bodylength += chunk.length;
});
res.on('end', () => {
var pdf = new Buffer(bodylength);
var pdfPos = 0;
for(var i=0;i<bodyparts.length;i++){
bodyparts[i].copy(pdf, pdfPos, 0, bodyparts[i].length);
pdfPos += bodyparts[i].length;
}
response.setHeader('Content-Type', 'application/pdf');
response.setHeader('Content-disposition', exame._id + '.pdf');
response.setHeader('Content-Length', bodylength);
response.end(Buffer.from(pdf));
});
});
post.write(JSON.stringify(body));
post.end();
});
I am sure that my report is being rendered as expected because if i make a request from postman, it returns the PDF just fine.

Your solution is simply relaying data chunks but you are not telling your front end what to expect of these chunks or how to assemble them. At a minimum you should be setting the the Content-Type response header to application/pdf and to be complete should also be sending the Content-disposition as well as Content-Length. You may need to collect the PDF from your 3rd party source into a buffer and then send that buffer to your client if you are not able to set headers and pipe to response successfully.
[edit] - I'm not familiar with jsreport but it is possible (and likely) that the response they send is a buffer. If that is the case you could use something like this in place of your response to the client:
myGetPDFFunction(params, (err, res) => {
if (err) {
//handle it
} else {
response.writeHead(200, {
'Content-Type': 'application/pdf',
'Content-Length': [your buffer's content length]
});
response.end(Buffer.from([the res PDF buffer]));
}
}
What you haven't shown is the request made to obtain that PDF, so I couldn't be more specific at this time. You should look into the documentation of jsreport to see what it sends in its response, and you can also read up on buffers here
This is rough pseudo code but the point is to respond with the PDF buffer after setting the headers to their proper values.

Related

How to read and save pdf file from response in Node JS client

I have created a Node JS server that pushes the PDF to client as below
app.post("/home", function(req, res, next) {
// to download the file automatically
/*
var file = fs.createReadStream('E:\\test\\conversion\\310-output.pdf');
var stat = fs.statSync('E:\\test\\conversion\\310-output.pdf');
res.setHeader('Content-Length', stat.size);
res.setHeader('Content-Type', 'application/pdf');
res.setHeader('Content-Disposition', 'attachment; filename=output.pdf');
file.pipe(res);
*/
// To Stream file to browser
var data =fs.readFileSync('E:\\test\\conversion\\310-output.pdf');
res.contentType("application/pdf");
res.send(data);
});
In client script, i am trying to call above post command and want to fetch the pdf and save it locally from response. Not sure how to do it. Below is client script. In browser, i was able to see the PDF but i need to access it through client script
var request = require('request');
request.post(
'http://localhost:3000/home',
{},
function (error, response, body) {
if (!error && response.statusCode == 200) {
console.log(" Response :: "+response);
// console.log(" Body :: "+body);
}
}
);
To recieve your pdf response in proper format mension "responseType": 'arraybuffer', and "responseEncoding": 'binary' while calling the api
const config = {
baseURL: "<your base url>",
url: "<api endpoint>",
headers: {},
method: "<method>",
responseType: "arraybuffe",
responseEncoding: "binary"
}
let response = await axios(config)
//response.data will be binary encoded buffer Array of pdf data
//*To convert to base64*
var base64 =result.data.toString("base64")
//to convert to binary
var binary =result.data.toString("binary")
//to write to file
fs.writeFileSync(`${name}.pdf`, response.data,'binary');
To download a file by request lib, we will to many ways to do that (I think so!)
Below is a simple way: Create a pipe stream from request response, then save data from stream to a file with a write stream
const request = require('request');
const fs = require('fs');
const req = request.post(
'http://localhost:3000/home',
{},
);
req.on('response', function (res) {
res.pipe(fs.createWriteStream('./filename_to_save.pdf'));
});

Stream binary file with http post

I'm using the request library to send a binary (pdf) file in the body of the request using http post (NOTE: This API does not accept multi-part forms). However, I have only been able to get it to work using fs.readFilesync(). For some reason, when I try to use fs.createReadStream() the pdf file is still sent, but it is empty, and the request never finishes (I never get a response back from the server).
Here is my working version using fs.readFileSync():
const request = require('request');
const fs = require('fs');
const filename = 'test.pdf';
request({
url: 'http://localhost:8083/api/v1/endpoint',
method: 'POST',
headers: {
'Content-Type': 'application/octet-stream',
'Accept': 'application/vnd.api+json',
'Content-Disposition': `file; filename="${filename}"`
},
encoding: null,
body: fs.readFileSync(filename)
}, (error, response, body) => {
if (error) {
console.log('error:', error);
} else {
console.log(JSON.parse(response.body.toString()));
}
});
If I try to replace the body with the below, it doesn't work:
body: fs.createReadStream(filename)
I have also tried piping the http request on to the stream, like it says in the request library docs, but I get the same result:
fs.createReadStream(filename).pipe(request({...}))
I've tried to monitor the stream by doing the following:
var upload = fs.createReadStream('test.pdf');
upload.pipe(req);
var upload_progress = 0;
upload.on("data", function (chunk) {
upload_progress += chunk.length
console.log(new Date(), upload_progress);
})
upload.on("end", function (res) {
console.log('Finished');
req.end();
})
I see progress for the stream and Finished, but still no response is returned from the API.
I'd prefer to create a read stream because of the benefits of working better with larger files, but am clueless as to what is going wrong. I am making sure I'm not altering the file with any special encoding as well.
Is there some way to get some kind of output to see what process is taking forever?
UPDATE:
I decided to test with a simple 1 KB .txt file. I found that it is still empty using fs.createReadStream(), however, this time I got a response back from the server. The test PDF I'm working with is 363 KB, which isn't outrageous in size, but still... weren't streams made for large files anyway? Using fs.readFileSync() also worked fine for the text file.
I'm beginning to wonder if this is an synchronous vs asynchronous issue. I know that fs.readFileSync() is synchronous. Do I need to wait until fs.createReadStream() finishes before trying to append it to the body?
I was able to get this working by doing the following:
const request = require('request');
const fs = require('fs');
const filename = 'test.pdf';
const readStream = fs.createReadStream(filename);
let chunks = [];
readStream.on('data', (chunk) => chunks.push(chunk));
readStream.on('end', () => {
const data = Buffer.concat(chunks);
request({
url: 'http://localhost:8083/api/v1/endpoint',
method: 'POST',
headers: {
'Content-Type': 'application/octet-stream',
'Accept': 'application/vnd.api+json',
'Content-Disposition': `file; filename="${filename}"`
},
encoding: null,
body: data
}, (error, response, body) => {
if (error) {
console.log('error:', error);
} else {
console.log(JSON.parse(response.body.toString()));
}
});
});
I chunked the data together and concatenated it with a buffer before making the request.
I noticed in the documentation it said this:
The Buffer class was introduced as part of the Node.js API to enable interaction with octet streams in TCP streams, file system operations, and other contexts.
The API I'm calling requires the application/octet-stream header, so I need to use the buffer rather than streaming it directly.

How do I upload a file using multipart/form-data header with native Node.js https.request?

I am trying to upload a file from my computer using native https.request from node.js. My code look like this:
let query = require('https').request({
hostname: 'somehost.com',
path: '/path/to/upload',
headers: {'Content-Type': 'multipart/form-data'},
method: 'POST'
}, (res) => {
let data = '';
res.on("data", (chunk) => {
data += chunk.toString('utf8');
});
res.on('end', () => {
console.log("data");
})
});
query.on("error", (e) => {
console.error(e);
});
query.write(Buffer[
{key1: 1, file: require("fs").createReadStream("/path/to/file.txt")}
]); // I don't know how to put here
query.end();
I don't get any response from host, the file failed to upload. How can I do this?
When uploading multipart/form-data, the Content-Type header must include a boundary, in order to indicate where each "part" lives within the posted data. In order to set the boundary for The Multipart Content-Type, you can use the form-data package from NPM. You could set the header/boundary manually, but the form-data package will handle this for you and free you from having to worry about the details, etc.
In order to use form-data in your example, you will need to do the following:
Create a new FormData object and append the relevant parts:
let formData = new require('form-data')();
formData.append('key1', 1);
formData.append('file', require("fs").createReadStream("/path/to/file.txt"));
Use the getHeaders function to build the correct HTTP headers:
require('https').request({
headers: formData.getHeaders(),
// ...
}
Use pipe in order to allow form-data to process your data and send it to the server:
formData.pipe(query);
With this change, you no longer need your calls to query.write or query.end - the call to pipe takes care of that.
For completeness, here's the final code with the changes I described:
let formData = new require('form-data')();
formData.append('key1', 1);
formData.append('file', require("fs").createReadStream("/path/to/file.txt"));
let query = require('https').request({
hostname: 'somehost.com',
path: '/path/to/upload',
method: 'POST',
headers: formData.getHeaders()
}, (res) => {
let data = '';
res.on("data", (chunk) => {
data += chunk.toString('utf8');
});
res.on('end', () => {
console.log(data);
})
});
query.on("error", (e) => {
console.error(e);
});
formData.pipe(query);

Send new request response with Node HTTP server

Stack Overflow community, greetings. I'm trying to pass the response of a new request on the request object using the Node HTTP Module for a basic autocomplete search app for my website (i.e using Node as a proxy that will transform and redirect the requests within the server).
The flow basically is:
Client Browser - Node - ElasticSearch - Node - Client Browser
I've started with:
Listen to requests with http.createServer (function (req,res)
Get the body from the req object and use it in a new request with http.request(options, function (newReqResponse)
Get the body from that newReqResponse object and send it back to the client on the res object
The problem is that the content of newReqResponse is always outdated (trails behind the last typed character). i.e.:
If I type "te", the content of newReqResponse corresponds to that if I had typed only "t".
If I type "test", it corresponds to that if I had typed "tes".
And so on.
I've tried to solve it using Node.js streams and using the file system module to write and read files sync and async, but the result is the same. Here's a sample of the whole -code- picture:
var http = require('http');
var fs = require('fs');
http.createServer(function (req, res) {
var reqBody = '';
var newReqResponseBody = "";
req.on('data', function (chunk) {
reqBody += chunk;
fs.writeFile('reqbody.json', reqBody, function(err) {
if (err) {
throw err;
}
});
var options = {
hostname: '127.0.0.1',
port: 9500,
method: 'GET',
path: '/_search',
headers: { host: 'es',
'content-length': Buffer.byteLength(reqBody),
'content-type': 'application/json',
accept: 'application/json' },
};
var newReq = http.request(options, function (newReqResponse) {
newReqResponse.setEncoding("UTF-8");
newReqResponse.on('data', function (ch) {
newReqResponseBody += ch;
});
newReqResponse.on("end", function() {
fs.writeFile("newReqResponseBody.json", newReqResponseBody, function(err) {
if (err) {
throw err;
}
});
});
});
newReq.on("error", function(err) {
console.log(`problem with request: ${err.message}`);
});
newReq.write(reqBody);
newReq.end();
});
req.on('end', function() {
var responseBody = fs.readFileSync('newReqResponseBody.json', 'utf8');
console.log(responseBody);
res.end(responseBody);
});
}).listen(3000, '127.0.0.1');
Is there a workaround to work with requests and responses within the http server? If there isn't, I'll be very grateful if you give me any directions on how to solve this.
Since the planned use for Node is rather basic, I prefer to stick with core modules rather than having to get new ones from npm, unless that it's necessary.
Thanks in advance.
EDIT:
All I had to do was to call res.end(responseBody) within the newReqResponse.on("end") callback, which is totally counterintuitive for me, but... it works.
Glad you solved your own problem. However, I see room for improvement (not sure if you're new), especially if you're transferring data. Which you can do with streams.
You can see that I didn't calculate the content length, you're asked not to and should get ignore (for this specific case) according to HTTP specification as streams pass data in chunks with 'Transfer-Encoding': 'chunked' header.
const fs = require('fs');
const http = require('http');
const options = {
hostname: '127.0.0.1',
port: 9500,
method: 'GET',
path: '/_search',
headers: {
'Content-Type': 'application/json'
}
};
http.createServer((req, res) => {
req.pipe(fs.createWriteStream('reqBody.json'));
let request = http.request(options, (newRes) => {
newRes.pipe(res);
});
fs.createReadStream('reqBody.json').pipe(request);
res.setHeader('Content-Type', 'application/json');
}).listen(3000, '127.0.0.1');
You can shorten this snippet more if you don't want your data saved in the future and only want to pipe the req stream to request.

NodeJS: sending/uploading a local file to a remote server

I have used the Winston module to create a daily log file for my offline app. I now need to be able to send or upload that file to a remote server via POST (that part already exists)
I know I need to write the file in chunks so it doesn't hog the memory so I'm using fs.createReadStream however I seem to only get a 503 response, even if sending just sample text.
EDIT
I worked out that the receiver was expecting the data to be named 'data'. I have removed the createReadSteam as I could only get it to work with 'application/x-www-form-urlencoded' and a synchronous fs.readFileSync. If I change this to 'multipart/form-data' on the php server would I be able to use createReadStream again, or is that only if I change to physically uploading the json file.
I've only been learning node for the past couple of weeks so any pointers would be gratefully received.
var http = require('http'),
fs = require('fs');
var post_options = {
host: 'logger.mysite.co.uk',
path: '/',
port: 80,
timeout: 120000,
method: 'POST',
headers: {
'Content-Type': 'application/x-www-form-urlencoded'
}
}
var sender = http.request(post_options, function(res) {
if (res.statusCode < 399) {
var text = ""
res.on('data', function(chunk) {
text += chunk
})
res.on('end', function(data) {
console.log(text)
})
} else {
console.log("ERROR", res.statusCode)
}
})
var POST_DATA = 'data={['
POST_DATA += fs.readFileSync('./path/file.log').toString().replace(/\,+$/,'')
POST_DATA += ']}'
console.log(POST_DATA)
sender.write(POST_DATA)
sender.end()
After gazillion of trial-failure this worked for me. Using FormData with node-fetch. Oh, and request deprecated two days ago, btw.
const FormData = require('form-data');
const fetch = require('node-fetch');
function uploadImage(imageBuffer) {
const form = new FormData();
form.append('file', imageBuffer, {
contentType: 'image/jpeg',
filename: 'dummy.jpg',
});
return fetch(`myserver.cz/upload`, { method: 'POST', body: form })
};
In place of imageBuffer there can be numerous things. I had a buffer containing the image, but you can also pass the result of fs.createReadStream('/foo/bar.jpg') to upload a file from drive.
copied from https://github.com/mikeal/request#forms
var r = request.post('http://service.com/upload', function optionalCallback (err, httpResponse, body) {
if (err) {
return console.error('upload failed:', err);
}
console.log('Upload successful! Server responded with:', body);
})
var form = r.form()
form.append('my_field1', 'my_value23_321')
form.append('my_field2', '123123sdas')
form.append('my_file', fs.createReadStream(path.join(__dirname, 'doodle.png')))
Have a look at the request module.
It will provide you the ability to stream a file to POST requests.

Resources