Tried 2 Node.js packages:
https://www.npmjs.com/package/payumoney_nodejs
https://www.npmjs.com/package/payumoney-node
Debugging on localhost.
Debugged in the index.js file under the node_modules in each package.
params {
key: 'x1FanfbP',
salt: 'Vs2GrDyaMQ',
service_provider: 'payu_paisa',
hash: '65f75ced566e2d76dbc6153a277c25f591fc3c0a00a8f51a0699f609d5cbbc94dc7acd5d3be5fe0c0a855c4c6dc7faef49d8b6a1d77dd09398058f800bab068d',
firstname: '',
lastname: '',
email: 'xxxxxxxx#xxxxx.xxx',
phone: XXXXXXXXXX,
amount: '100',
productinfo: '',
txnid: '5b51d253-5d6e-4512-951a-cd6d05bf9e6b',
surl: 'http://localhost:3000/member/contribution/success',
furl: 'http://localhost:3000/member/contribution/failure'
}
request.post(this.payUmoneyURL, form: params, headers: this.headers },
function(error, response, body) {
if (!error) {
var result = response.headers.location;
callback(error, result);
}
});
request.post(payment_url[this.mode] + API.makePayment, { form: params, headers: this.headers }, function(error, response, body) {
if (!error) {
var result = response.headers.location;
callback(error, result);
}
});
Response of response.headers:
response.headers {
date: 'Fri, 28 Jun 2019 12:06:35 GMT',
server: 'Apache',
'x-powered-by': 'PHP/7.2.14',
p3p: 'CP="IDC DSP COR ADM DEVi TAIi PSA PSD IVAi IVDi CONi HIS OUR IND CNT"',
'set-cookie': [ 'PHPSESSID=naopga57qf58vl0hdfj5krq4n5; path=/; domain=.payu.in' ],
expires: 'Thu, 19 Nov 1981 08:52:00 GMT',
'cache-control': 'no-store, no-cache, must-revalidate',
pragma: 'no-cache',
vary: 'Accept-Encoding',
'content-length': '3129',
connection: 'close',
'content-type': 'text/html; charset=UTF-8'
}
The above does not have location key like response.headers.location
Can someone help to know why location is not returned?
Is it because of development on local machine? If yes, then how to test it on localhost?
Any help is appreciated.
It seems like a very silly mistake made but didn't know the area as where to look for the error in the response key before.
After going through the entire response object found that there is a body key sent at the very end of the response object which has HTML string. This HTML specifies if any error has occurred and what exactly it is.
In my case it were the missing mandatory fields:
Error Reason
One or more mandatory parameters are missing in the transaction request.
Corrective Action
Please ensure that you send all mandatory parameters in the transaction request to PayU.
Mandatory parameters which must be sent in the transaction are:
key, txnid, amount, productinfo, firstname, email, phone, surl, furl, hash.
The parameters which you have actually sent in the transaction are:
key, txnid, amount, surl, hash, email, phone.
Mandatory parameter missing from your transaction request are:
productinfo, firstname.
Please re-initiate the transaction with all the mandatory parameters.
After passing data for the missing parameters, received the url in the response.headers.location key as expected.
Integration is working as expected now.
Related
I am working on a remix project and have gotten into an issue where a loader requesting data from a foreign endpoint encoded with gzip do not seem to be decoded.
The remix loader is fairly simple, with some simplification it looks like this:
export const loader = async () => {
try {
const [
encodedData,
[... <other responses>]
] = await Promise.all([
gzippedEndpoint(),
[... <other requests>]
]).catch((e) => {
console.error(e);
});
return json([<loader data>]);
} catch (error) {
console.log("ERROR:", error);
return json({});
}
};
It's the gzippedEndpoint() that fails, where the error stack claims that the returned data is not valid json. I figured compression should not be a problem, but it seems like the fetch requests on the remix server side cannot correctly decode the gzipped data. I also see no option to enable decoding explicitly on remix. When I disable gzip on the foreign endpoint everything works fine for the remix server making the request and parsing the response.
Here is an example of the headers from a returned response (with some obfuscation):
200 GET https://dev.server.com/public/v1/endpoint {
'cache-control': 'no-store, must-revalidate, no-cache',
connection: 'close',
'content-encoding': 'gzip',
'content-type': 'application/json',
date: 'Mon, 12 Sep 2022 06:51:41 GMT',
expires: 'Mon, 12 Sep 2022 06:51:41 GMT',
pragma: 'no-cache',
'referrer-policy': 'no-referrer',
'strict-transport-security': 'max-age=31536000 ; includeSubDomains',
'transfer-encoding': 'chunked',
}
Is there some remix option or request header that I am missing here?
I'm trying to create an app-managed bucket but am encountering the error: Invalid or nonexistent Content-Type, accepted values are {text/json, application/json}
I'm using node.js and request-promise package. The error is confusing to me because I am setting my content-type within the headers of the request to application/json.
Here's my function which makes the request:
let globalOptions = {
resolveWithFullResponse: true
};
function createAppManagedBucket(){
let forgeToken = "eyJhb..."
const options = Object.assign({}, globalOptions, {
method: 'POST',
uri: `https://developer.api.autodesk.com/oss/v2/buckets`,
headers: {
"Content-Type": "application/json",
'User-Agent': 'Request-Promise'
},
form: {
"bucketKey": `someTestBucket`,
"policyKey": `transient`
},
auth: {
'bearer': forgeToken
},
json: true
})
return rp(options)
.then((response) => {
return response.body
}).catch((err) => {
return err
})
}
It seems like even though I've set Content-Type: application/json within the header my request is being forced to have Content-Type: application/x-www-form-urlencoded. If I log the response of this, then I get the error and it looks like my request is actually correct since these are my headers:
rawHeaders:
[ 'Access-Control-Allow-Headers',
'Authorization, Accept-Encoding, Range, Content-Type',
'Access-Control-Allow-Methods',
'GET',
'Access-Control-Allow-Origin',
'*',
'Content-Type',
'application/json; charset=utf-8',
'Date',
'Tue, 09 Apr 2019 15:58:07 GMT',
'Strict-Transport-Security',
'max-age=31536000; includeSubDomains',
'Content-Length',
'99',
'Connection',
'Close' ],
But further down in the request I see
_header: 'POST /oss/v2/buckets HTTP/1.1\r\nContent-Type: application/x-www-form-urlencoded\r\nUser
e\r\nhost: developer.api.autodesk.com\r\nauthorization: Bearer eyJhb...\r\naccept: application/json\r\ncontent-length: 43\r\nConnection: close\r\n\r\n'
Where I can see that the content-type is actually changing to application/form_urlencodedSo it looks like my content-type is being forced to something other than what I set in the header. Has anyone encountered something like this before?
From the request options documentation,
form - when passed an object or a querystring, this sets body to a querystring representation of value, and adds Content-type: application/x-www-form-urlencoded header. When passed no options, a FormData instance is returned (and is piped to request). See "Forms" section above.
You can't mix the json and form request options like you are doing without the Content Type being ambiguous
I'm trying to write a very simple solution to download and parse a calendar file from my Airbnb. Airbnb provides the calendar in ical format, with a unique url for each user such as:
https://www.airbnb.com/calendar/ical/1234.ics?s=abcd
Where those numbers (1234/5678) are unique hex keys to provide some security.
Whenever I hit my (private) url it replies instantly with an ical if I'm using a browser. I can be using any browser, even one from a different country that has never visited airbnb.com before. (I've got remote access to a server I tried it from when debugging.)
In nodejs it works only about 10% of the time. Most of the time I get a 403 error with the text of You don't have permission to access (redacted url) on this server.
Example code:
const request = require('request');
request.get(url, (error, response, body) => {
if (!error && response.statusCode === 200) {
return callback(null, body);
}
return callback('error');
});
This is using the request package here: https://github.com/request/request
I've set it up in an async.whilst loop and it takes about 50 tries to pull down a success, if I set a multi-second delay between each one. (Btw, https://github.com/caolan/async is awesome, so check that if you haven't.)
If it failed EVERY time, that'd be different, but the fact that it fails only occasionally really has me stumped. Furthermore, browsers seem to succeed EVERY time as well.
curl [url] also works, every time. So is there something I'm not specifying in the request that I need to?
Edit 1:
As requested, more of the headers from the reply. I also thought it was rate-limiting me at first. The problem is this is all from the same dev-box. I can curl, or request from a browser without issue multiple times. I can come back in 24 hours and use the nodejs code and it'll fail the first time, or first 50 times.
headers:
{ server: 'AkamaiGHost',
'mime-version': '1.0',
'content-type': 'text/html',
'content-length': '307',
expires: 'Wed, 24 May 2017 17:23:28 GMT',
date: 'Wed, 24 May 2017 17:23:28 GMT',
connection: 'close',
'set-cookie': [ 'mdr_browser=desktop; expires=Wed, 24-May-2017 19:23:28 GMT; path=/; domain=.airbnb.com' ] },
rawHeaders:
[ 'Server',
'AkamaiGHost',
'Mime-Version',
'1.0',
'Content-Type',
'text/html',
'Content-Length',
'307',
'Expires',
'Wed, 24 May 2017 17:23:28 GMT',
'Date',
'Wed, 24 May 2017 17:23:28 GMT',
'Connection',
'close',
'Set-Cookie',
'mdr_browser=desktop; expires=Wed, 24-May-2017 19:23:28 GMT; path=/; domain=.airbnb.com' ],
trailers: {},
rawTrailers: [],
upgrade: false,
url: '',
method: null,
statusCode: 403,
statusMessage: 'Forbidden',
I'm trying to implement a web scraper using request module and node.js. Some time at the scrapping I must post a form and then It always redirect to somewhere else where I must reach to continue scraping.
var jarEstados = requestEstados.jar();
options = {
url: urlPrincipal,
method: 'POST',
followRedirect: true,
maxRedirects: 10,
followAllRedirect: true,
jar: jarEstados,
form: requestObject
};
requestEstados(options,function (error, response, html) {
if (!error) {
console.log(html);
}
else {
console.error(error);
}
});
Response:
<head><title>Object moved</title></head>
<body><h1>Object Moved</h1>This object may be found here.</body>
headers:
{ 'cache-control': 'private',
'content-length': '152',
'content-type': 'text/html',
location: 'Resumo_Por_Estado_Municipio.asp',
server: 'Microsoft-IIS/8.5',
'x-powered-by': 'ASP.NET, ARR/2.5, ASP.NET',
'x-customname': 'ServidorANP',
'x-ua-compatible': 'IE=7',
date: 'Wed, 15 Jun 2016 16:08:42 GMT',
connection: 'close' },
statusCode: 302,
the resquest doest fallow the redirect, even if configured as the module site said> Resquest Module
What am I doing wrong? Can't figure it out!
I did the job by myself. I figured out that if I pass some valid User-Agent using the 302 response I could manualy fallow the redirection and keep the train on the rails for the rest of the scrapping process.
I am trying to get JSON data for a Trello board using the following URL, using Node.js's https module:
https://trello.com/b/nC8QJJoZ.json
Here's my code:
var https = require('https');
https.get('https://trello.com/b/nC8QJJoZ.json', function (res) {
console.log('statusCode:', res.statusCode);
console.log('headers:');
console.log(res.headers);
res.setEncoding('utf8');
res.on('data', function (chunk) {
console.log(chunk);
});
}).on('error', function (e) {
console.log('ERROR: ' + e);
});
Although the URL works perfectly in browser, It returns a body containing the string "invalid key", with a 401 status. Following is the output:
statusCode: 401
headers:
{ 'cache-control': 'max-age=0, must-revalidate, no-cache, no-store',
'x-content-type-options': 'nosniff',
'strict-transport-security': 'max-age=15768000',
'x-xss-protection': '1; mode=block',
'x-frame-options': 'DENY',
'x-trello-version': '1.430.0',
'x-trello-environment': 'Production',
'set-cookie':
[ 'dsc=ae78a354044f982079cd2b5d8adc4f334cda679656b3539ee0adaaf019aee48e; Path=
'visid_incap_168551=/NYMaLRtR+qQu/H8GYry1BCKl1UAAAAAQUIPAAAAAAC1zWDD1JLPowdC
'incap_ses_218_168551=+/2JSB4Vz0XJO/pWbX4GAxCKl1UAAAAA0pAbbN5Mbs4tFgbYuskVPw
expires: 'Thu, 01 Jan 1970 00:00:00',
'content-type': 'text/plain; charset=utf-8',
'content-length': '12',
etag: 'W/"c-b1ec112"',
vary: 'Accept-Encoding',
date: 'Sat, 04 Jul 2015 07:24:00 GMT',
'x-iinfo': '1-11281210-11279245 PNNN RT(1435994639565 404) q(0 0 0 -1) r(3 3) U
'x-cdn': 'Incapsula' }
invalid key
What am I doing wrong?
Well, it turns out that we need to provide a Trello API application key (generated from here) with our request.
var https = require('https');
var KEY = '<replace this with your app key>';
https.get('https://trello.com/b/nC8QJJoZ.json?key=' + KEY, function (res) {
...
});
This seems to me a weird requirement because we are not using Trello's API endpoint. (Even if I solved the problem, I would still like to know why a browser can access the resource, but a server side script cannot.)