How to set POST encoding with request module on node.js? - node.js

I'm using request module on node.js but there is something problem with encoding option. beneath codes are simple post request, but I don't know how to set up encoding of form field data. I already set headers to 'Content-Type': 'application/x-www-form-urlencoded; charset=euc-kr' But it doesn't works. field data is korean, like "안녕하세요", and I should post it with euc-kr encoding. (The site takes euc-kr, not utf8)
The same program on Java application, I coded like this :
PrintWriter wr = new PrintWriter(new OutputStreamWriter(conn.getOutputStream(), "euc-kr"));
But I don't know how to in nodejs. Can anyone give some solution...?
Code Sample
//Load the request module
var request = require('request');
//Lets configure and request
request({
url: 'http://google.com', //URL to hit
headers: { 'Content-Type': 'application/x-www-form-urlencoded; charset=euc-kr' },
method: 'POST',
form: {
field1: 'data',
field2: 'data'
}
}, function(error, response, body){
if(error) {
console.log(error);
} else {
console.log(response.statusCode, body);
}
});

Finally I got a soultion, and I solved this problem.
If you send a data as a form using request module, the module change your form encoding to utf-8 by force. So even you setted your form encoding to another charset, the module changes your charset to utf8 again. You can see that at request.js on line 1120-1130.
So, You'd better send a data by 'body' option, not 'form' option.

Node doesn't support EUC-KR so you can use iconv-lite to extend the native encodings available and set the encoding option in request.
List of Natively Supported Encodings
iconv.extendNodeEncodings(); only works for node pre v4+. See here to get this working for a newer version of node.
var iconv = require('iconv-lite');
var request = require('request');
// This will add to the native encodings available.
iconv.extendNodeEncodings();
request({
url: 'http://google.com', //URL to hit
method: 'POST',
form: {
field1: 'data',
field2: 'data'
},
encoding: 'EUC-KR'
}, function(error, response, body){
if(error) {
console.log(error);
} else {
console.log(response.statusCode, body);
}
});
iconv.undoExtendNodeEncodings();

Related

Send unparsed data in a Node JS request

I want to send a POST request (for example, with the 'request' module), but I don't find a way of sending unparsed data*.
*unparsed data => copied directly from the Chrome dev tool. Something like: tipo_accion=3&filtro=descarga&fecha=actual
It would also do the trick some way of translating that string to JSON.
I've tried so far with...
var request = require('request');
request.post({ url: 'https://target.com/form/', form: 'tipo_accion=3&filtro=descarga&fecha=actual' },
function (error, response, body) {
console.log(body)
}
);
... but it didn't work.
Firstly you should understand the difference between the request methods post and get.
The structure that you want to send:
tipo_accion=3&filtro=descarga&fecha=actual is telling me that you want to use a get request. So the proper code for that will be something like:
request(
'https://target.com/form/&tipo_accion=3&filtro=descarga&fecha=actual',
function (error, response, body) {
console.log(body)
},
);
But if it is a post request then you should use the json format
request.post({
url: 'https://target.com/form/', form: {
tipo_accion: 3,
filtro: 'descarga',
fecha: 'actual'
},
function(error, response, body) {
console.log(body)
}
);
You can convert form_data to string format and it works for me you can try it :
const request = require('request-promise');
var j = request.jar()
var data_fom = `a=value_a&b=value_b&c=value_c`
request({
url:"https://target.com/form/",
jar:j,
method:"POST",
resolveWithFullResponse: true,
form:data_form
})

Setting charset in post request on nodejs

I want to send post form data to some website using euc-kr charset by request module. And I use iconv-lite module too because nodejs supported charset isn't plenty.
Anyway, The website use euc-kr charset, So I have to handle form data's encoding(Node's default charset is utf-8). But it dosen't work well, I tried to change some options so many times but I stucked untill now, So could you tell me some hints.?
// added module request, iconv-lite(extendNodeEncoding) already.
function postDocumentForm() {
//Lets configure and request
request({
url: 'http://finance.naver.com/item/board_act.nhn', //URL to hit
headers: {
'Content-Type': 'content=text/html; charset=euc-kr'
},
method: 'POST',
encoding: 'euc-kr',
form: {
code:'000215',
mode: 'write',
temp: '',
keyCount: '0',
title: "폼 데이터 중 일부가 한글일 때",
opinion: '0',
body:'인코딩이 제대로 되지 않고 있음!'
}
}, function (error, response, body) {
if (error) {
console.log(error);
} else {
iconv.undoExtendNodeEncodings();
console.log(response.statusCode, response.body);
}
});
}
And here is result, odd characters.
I tried :
euc-kr to binary
euc-kr to null
euc-kr to utf-8
delete encoding option
delete request header
Finally I got a soultion, and I solved this problem.
If you send a data as a form using request module, the module change your form encoding to utf-8 by force. So even you setted your form encoding to another charset, the module changes your charset to utf8 again. You can see that at request.js on line 1120-1130.
So, You'd better send a data by 'body' option, not 'form' option.
(as a querystring)
body: "someKey=someValue&anotherKey=anotherValue...."
Notice how it handles receiving back the body encoding
iconv = require('iconv-lite');
function postDocumentForm() {
//Lets configure and request
request({
url: 'http://finance.naver.com/item/board_act.nhn', //URL to hit
headers: {
'Content-Type': 'content=text/html; charset=euc-kr'
},
method: 'POST',
encoding: null,
form: {
code:'000215',
mode: 'write',
temp: '',
keyCount: '0',
title: "폼 데이터 중 일부가 한글일 때",
opinion: '0',
body:'인코딩이 제대로 되지 않고 있음!'
}
}, function (error, response, body) {
if (error) {
console.log(error);
} else {
console.log(response.statusCode);
var utf8String = iconv.decode(new Buffer(body), "ISO-8859-1");
console.log(utf8String);
}
});
}
After reading the source code several hours, finally I found the simple solution.
First, write your encoding function, which input a string and output a encoded string:
const urlencode = require('urlencode');
function encoder(s){
return urlencode(s, 'gb2312');
}
Here is a Chinese encoder based on urlencode
Then just add a qsStringifyOptions option while posting:
request({
url: 'http://finance.naver.com/item/board_act.nhn', //URL to hit
headers: {
'Content-Type': 'content=text/html; charset=euc-kr'
},
method: 'POST',
encoding: null,
form: {
code:'000215',
mode: 'write',
temp: '',
keyCount: '0',
title: "폼 데이터 중 일부가 한글일 때",
opinion: '0',
body:'인코딩이 제대로 되지 않고 있음!'
},
qsStringifyOptions: {
encoder: encoder
}
});

how to send a pdf base64 variable through a nodejs request?

Trying to send a base64 pdf string with request, but I can't seem to figure out the proper structure of the request. Thanks in advance for the help!
var dpdf = pdfvar.toString('base64');
var options = {
method: 'POST',
body: dpdf,
url: FILEPICKER_URL,
headers: [
{
name: 'content-type',
value: 'application/pdf'
}
]
};
request(options, function(err, httpResponse, body){
console.log('body: ', body);
console.log('code ', httpResponse.statusCode)
});
The other side is expecting a PDF
application/pdf
and not a BASE64 representation of it.
Anyway, looking at what you are trying to do, without necessarily understanding how you are trying to do it... I would try and append a data url compatible header to your string like so :
var dpdf = 'data:application/pdf;base64,' + pdfvar.toString('base64')

Submit POST form with latin1 charset in Node.js

Short version of the problem : in my Node.js app using Sails.js framework, I'd like to send data (simple UTF-8 text charset) to a distant server POST form, which has ISO-8859-15 charset.
Details : using the request package, the form is submitted this way :
var postFields = {
'message' : message, // <== this var contains chars such as é,à,ç, etc.
'submit' : 'Send'
};
var opts = {
uri: url,
headers: {
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:33.0) Gecko/20100101 Firefox/33.0',
'Cookie' : cookie,
'Content-type': "application/x-www-form-urlencoded; charset=ISO-8859-15"
},
form: postFields
};
request.post(opts, function(error, response, body) {
if (!error && response.statusCode == 200) {
return callback(null, true);
} else {
return callback(null, false);
}
});
This works fine, my form is sent successfully, except the "é" chars turned into ugly "é" to the distant server. In the 'Content-type' property of the opts object, I tried to set charset to ISO-8859-15 / UTF-8 / none, text/html or application/x-www-form-urlencoded, still doesn't work...
Don't know if this helps, but here is what I have to do :
1) get data from the distant server (latin1 encoded), then convert it to UTF-8 text :
var iconv = require('iconv-lite');
request(options, function(error, response, body) {
var decodedBody = iconv.decode(body, 'iso-8859-15');
return callback(null, decodedBody)
}
});
Great, now I have beautiful accents.
2) update this data, then send it back to the distant server using the first code block above.
Actually I used to do exactly the same operations with PHP, which worked fine : instead of sending the "raw" UTF-8 text message (postFields object), it was passed into the utf8_decode() func and the distant server displayed correctly the accents.
I gave a try to js implementation of this function => the accents are correctly sent, but this time all other normal chars are turned weird...
Hoping this is clear enough, if you need more details, code, or practical examples : no problem! Thanks for reading :)
Well, after 3 years and 10 months I faced a similar problem but I managed to find a solution!
To resolve this, you need to use urlencode and body instead of form, like this:
const urlencode = require('urlencode');
const request = require('request');
const fields = {
message: 'é à ç'
};
const opts = {
uri: 'https://localhost',
headers: {
'Content-type': 'application/x-www-form-urlencoded; charset=ISO-8859-1'
},
body: urlencode.stringify(fields, {
charset: 'latin1'
})
};
request.post(opts, (err, res, body) => {
if (err) {
throw err;
}
console.log(res.statusCode);
});
This is because request encodes form fields using UTF-8 so you never get Latin-1 characters even if you encode them before (you'll end up with re-encoded characters).
I've tested this and it works as expected, so I hope this helps someone else!

Images are not proxied correctly when using NodeJS + Request module

I have a proxy that works well (as far as I can tell) UNTIL I attempt to proxy images (or perhaps any binary data?). By my estimations, the below code should work, but it does not. I'm sure I"m doing something obviously dumb, but I've been digging through forums and apis and I have yet to hit upon the correct approach.
The 'core' of my proxy looks something like the following.
function(req, res) {
...
options = {
url: 'a url',
headers: {
'Authorization': auth
}
};
request(options,
function(e, r, b){
var encoding = (r.headers['content-type'].indexOf('image') === -1) ? 'utf8' : 'binary';
res.writeHead(200, {
'Content-Length': r.headers['content-length'],
'Content-Type': r.headers['content-type']
});
if (encoding === 'binary') {
b = new Buffer(b);
}
res.end(b, encoding);
});
}
What am I missing here?
Thanks in advance for any and all help!
My problem was not with the response (as I though originally), but rather the fact that the request module was encoding it's response body to unicode by default, when disabled (encoding: null), the response body is converted to a buffer which is easily consumed by the response.
options = {
url: url,
encoding: null,
headers: {
'Authorization': auth
}
};
request(options,
function(e, r, b){
var encoding = (r.headers['content-type'].indexOf('image') === -1) ? 'utf8' : 'binary';
res.end(b, encoding);
});

Resources