I want to get result by HTTP request in which I would like to know how can I parse the post parameter in the same and get the result.
var http = require("http");
var email = "email";
var password = '12345';
var response_string='010000000157065146a6b32b224b87d47c1b0283b1ae2fa5bd4f9af9ebc45928110f87fbae00000000700';
var post_params="jsonrpc= 1.0&id=curltest&method=decoderawtransaction¶ms="+response_string;
http.request({
host: "127.0.0.1",
port:"8332",
path:"/path",
method: "POST",
headers: {
"Authorization": "Basic " + new Buffer( email + ":" + password ).toString('base64')
}
}, function(res){
var response = "";
res.on('data', function(chunk){
response += chunk;
console.log("res"+response);
});
res.on('end',function(){
response = JSON.parse(response);
console.log("res1"+response);
});
}).end();
How can i parse the post_params variable in request.
Related
I'm currently working on sending a GET request to my own private Domain, alongside
various Headers that would be populated with various values such as 'Token' etc. - that are base64 encoded. This is running perfectly fine.
My main goal here is to send the Response of another request i'm sending to a different endpoint.
This is the modified code (I've removed various fields so please ignore any best practices for now).
const fs = require('fs');
const http = require('http');
const net = require('net');
const os = require("os");
const dns = require("dns");
const https = require("https");
var token = process.env.HOME+'/token.txt';
let base64data1 = '';
try {
if (fs.existsSync(token)) {
var data1 = fs.readFileSync(token,'utf8');
let buff1 = Buffer.from(data1);
base64data1 = buff1.toString('base64');
}} catch(error) {
console.log('')
}
var options = {
hostname: "myprivatedomain.com",
port: 443,
path: "/",
method: "POST",
headers: {
"Content-Type": "application/x-www-form-urlencoded",
"Token": base64data1
},
};
var req = https.request(options, (res) => {
res.on("data", (d) => {
process.stdout.write(d);
});
});
req.on("error", (e) => {
// console.error(e);
});
req.write(postData);
req.end();
My goal, as mentioned, is to add additional Header (In addition to the "Token" header) to my private domain, which will be populated by the Response for the following domain - www.seconddomain.com
I was thinking about creating a simple function that would retrieve the response, save it as variable and use it as my 2nd Header. Something similar to this -
function 2ndresponse(url) {
let data = '';
http.get(url, (resp) => {resp.on('data', (chunk) => {
data += chunk;
});
});
let responsevalue = Buffer.from(data);
base64data = responsevalue.toString('base64');
return http.get(url).then((resp) => resp.json());
}
var = 2ndresponse("http://www.seconddomain.com");
Hopefully this is clear enough (:
Update
I figured it out -
The workaround is to set both request in one function like so -
function req2() {
http.get({
hostname: 'seconddomain.com',
port: 80,
path: '/blahblah',
agent: false}, (res) => {
res.setEncoding('utf8');
let data = '';
res.on("data", (d) => {
var x;
x = d;
let buff5 = Buffer.from(x);
seconddomainvalue = buff5.toString('base64');
var options = {
hostname: "myprivatedomain.com",
port: 443,
path: "/",
method: "POST",
headers: {
"Content-Type": "application/x-www-form-urlencoded",
"Content-Length": postData.length,
"token": tokenvalue,
"seconddomain": seconddomainvalue
},
};
var req = https.request(options, (res) => {
res.on("data", (d) => {
process.stdout.write(d);
});
});
req.on("error", (e) => {
// console.error(e);
});
req.write(postData);
req.end();
});
});
}
req2();
Thanks
The same can be achieved using the HTTP REQUEST also, But AXIOS allows us to make HTTP requests from both the browser and Node. js applications. It allows us to make both GET and POST requests which are the most used HTTP methods.
const axios = require('axios'); // Axios import
const controllerFunction = async () => {
const firstResponse = await axios.get('https://seconddomain.com'); // Here the request will wait, as it is synchronous
const bufferValue = Buffer.from(firstResponse.data);
const base64data = bufferValue.toString('base64');
const secondResponse = await axios.post('https://myprivatedomain.com', {"body": data}, {
headers: {
"Content-Type": "application/json",
"Token": base64data
}
}); // Here the second request can use the first request response data, as this code is executed synchronously
};
Also adding the AXIOS documentation link: https://www.npmjs.com/package/axios
I use a node.js lambda that makes post requests to remote api, which installed on several remote locations.
I have no access to the remote api code or logs.
The lambda gets called with HTTP gateway by external application which I do not control as well.
It works perfectly for all the location but one. For one location I get this error:
411 Length Required.
I have tried to neutralize the HTTP gateway, and run lambda posts with test events.
I get the same result.
I have sent the same exact request to other locations, and got a response.
I can't find the problem as I do send a ContentLength header.
This is the lambda code:
const https = require("https");
const iconv = require("iconv-lite");
const charset = require("charset");
const qs = require("qs");
const url = require("url");
exports.handler = (event, context, callback) => {
event = JSON.parse(JSON.stringify(event));
let enc ="";
let multiValueHeaders = event["multiValueHeaders"]["Content-Type"];
let PostParams = null;
let domain = event["queryStringParameters"]["domain"] ;
let buf = Buffer.from(JSON.stringify(event["body"]), "base64");
let tstring = buf.toString("utf8");
PostParams = qs.parse(tstring);
var postData = PostParams ? qs.stringify(PostParams) : {};
let ContentLength = new Buffer.from(postData).length;
let headers = "" ;
headers += (multiValueHeaders) ? (' { "Content-Type": "'+ multiValueHeaders + '",') : '{';
headers += ('"Content-Length":'+ ContentLength + '}');
headers = JSON.parse(headers);
var q = url.parse(domain, true);
let options = {
'method': 'POST',
'hostname': q.hostname,
'path': q.pathname,
'headers': {headers}
};
var req = http.request(options, function (res) {
let chunks = [];
res.on("data", function (chunk) {
chunks.push(chunk);
enc = charset(res.headers, chunk);
});
res.on("end", function (chunk) {
var decodedBody = iconv.decode(Buffer.concat(chunks), enc);
const response = {
statusCode: 200,
body: decodedBody
};
callback(null ,response );
});
res.on("error", function (error) {
console.error(error);
});
});
if (PostParams != null) req.write(postData);
req.end();
}
When a request sent to the endpoint straight form postman there is no error. Only from lambda.
Apart from why this event = JSON.parse(JSON.stringify(event));?
Apart from this is a very ugly way to build the headers object:
let headers = "";
headers += (multiValueHeaders) ? (' { "Content-Type": "'+ multiValueHeaders + '",') : '{';
headers += ('"Content-Length":'+ ContentLength + '}');
headers = JSON.parse(headers);
and I would have written as:
const headers = { "Content-Length": ContentLength };
if(multiValueHeaders) headers["Content-Type"] = multiValueHeaders;
The root cause of your problem is in this line:
'headers': {headers}
it needs to be changed in:
'headers': headers
Hope this helps
411 is returned when the server demands a valid Content-Length
event argument passed through the HTTP gateway is the entire client request object. You don't have to parse it.
event.body is an escaped string. Double-escaping it gives the wrong content-length. For example,
JSON.stringify({'double': 2}) !== JSON.stringify(JSON.stringify({'double': 2))
// false
With this in mind, you can perform your request like this:
exports.handler = (event, context, callback) => {
let enc = "";
let multiValueHeaders = event["multiValueHeaders"];
let domain = event["queryStringParameters"]["domain"] ;
const postHeaders = {...multiValueHeaders};
let postData = null;
if (event.body !== null) {
postData = qs.stringify(
qs.parse(
Buffer.from(event.body, "base64").toString("utf-8")
)
);
postHeaders['Content-Length'] = [ Buffer.byteLength(postData) ];
}
var q = url.parse(domain, true);
let options = {
'method': 'POST',
'hostname': q.hostname,
'path': q.pathname,
'headers': postHeaders
};
var req = http.request(options, function (res) {
let chunks = [];
res.on("data", function (chunk) {
chunks.push(chunk);
enc = charset(res.headers, chunk);
});
res.on("end", function (chunk) {
var decodedBody = iconv.decode(Buffer.concat(chunks), enc);
const response = {
statusCode: 200,
body: decodedBody
};
callback(null, response);
});
res.on("error", function (error) {
console.error(error);
});
});
if (postData !== null) req.write(postData);
req.end();
}
I am trying to implement a simple HTTP proxy that will only try to perform basic auth on the target host.
So far I have the following:
var http = require('http');
const my_proxy = http.createServer(function(request, response) {
console.log(request.connection.remoteAddress + ": " + request.method + " " + request.url);
const options = {
port: 80
, host: request.headers['host']
, method: request.method
, path: request.url
, headers: request.headers
, auth : 'real_user:real_password'
}
};
var proxy_request = http.request(options);
proxy_request.on('response', function (proxy_response) {
proxy_response.on('data', function(chunk) {
response.write(chunk, 'binary');
});
proxy_response.on('end', function() {
response.end();
});
response.writeHead(proxy_response.statusCode, proxy_response.headers);
});
request.on('data', function(chunk) {
proxy_request.write(chunk, 'binary');
});
request.on('end', function() {
proxy_request.end();
});
});
my_proxy.listen(8080);
However, "auth : 'real_user:real_password'" doesn't seem to do anything. Also have tried:
...
auth: {
user: real_user,
pass: real_pass
}
...
You have to generate the auth header
var username = 'Test';
var password = '123';
var auth = 'Basic ' + new Buffer(username + ':' + password).toString('base64');
// auth is: 'Basic VGVzdDoxMjM='
var header = {'Host': 'www.example.com', 'Authorization': auth};
var request = client.request('GET', '/', header);
DeprecationWarning: Buffer() is deprecated due to security and usability issues. Please use the
var username = 'Test';
var password = '123';
// Deprecated
// var auth = 'Basic ' + new Buffer(username + ':' + password).toString('base64');
var auth = 'Basic ' + Buffer.from(username + ':' + password).toString('base64');
// auth is: 'Basic VGVzdDoxMjM='
var header = {'Host': 'www.example.com', 'Authorization': auth};
var request = client.request('GET', '/', header);
I can write this Instagram API call using jQuery, but I'm not sure how to get Node and/or socket to do the same. Any help?
function getData(url){
$.ajax({
url: url,
dataType :"jsonp",
success: function(data){
console.log('success');
console.log(data);
}
});
}
var tag = "myTag";
var accessToken = "myAccessToken"
var url = 'https://api.instagram.com/v1/tags/' + tag + '/media/recent?access_token=' + accessToken + '&callback=callBackFunction';
getData(url);
Another way to do this would be to use request module
var request = require('request');
request('your url',function(error,response,body){
//body will contain the response from the api endpoint.
});
This should work as you would like it to (adding some extra STATUS, HEADERS and BODY markers) :
var http = require('http');
var myhost = "https://api.instagram.com/";
var tag = "myTag";
var accessToken = "myAccessToken"
var myurl = tag + '/media/recent?access_token=' + accessToken + '&callback=callBackFunction';
var options = {
hostname: myhost,
port: 80,
path: myurl,
method: 'GET'
};
var req = http.request(options, function(res) {
console.log('STATUS: ' + res.statusCode);
console.log('HEADERS: ' + JSON.stringify(res.headers));
res.setEncoding('utf8');
res.on('data', function (chunk) {
console.log('BODY: ' + chunk);
});
});
I'm trying to upload video to youtube programatically. I chose to use Node.js for the task.
I get an XML response as well as an HTTP Status Code of 201 and I see the video appear in video manager, however the video always has the message "Failed (unable to convert video file)".
I can upload the file through YouTube's own uploader on their page and there are no problems. I only have to upload to a single account, so I set up the OAuth2 for the account and stored the refresh token. The refresh token is hard-coded, though I replace it with a variable below.
Does the refresh token need to, itself, be refreshed?
My code:
var qs = require('querystring'),
https = require('https'),
fs = require('fs');
var p_data = qs.stringify({
client_id: myClientID,
client_secret: myClientSecret,
refresh_token: refreshTokenForAccount,
grant_type: 'refresh_token'
});
var p_options = {
host: 'accounts.google.com',
port: '443',
method: 'POST',
path: '/o/oauth2/token',
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
'Content-Length': p_data.length,
'X-GData-Key': myDeveloperKey
}
};
var file_path = process.argv[1] || "video.mp4";
var json = "";
var p_req = https.request(p_options, function(resp){
resp.setEncoding( 'utf8' );
resp.on('data', function( chunk ){
json += chunk;
});
resp.on("end", function(){
debugger;
var access_token = JSON.parse(json).access_token;
var title="test upload1",
description="Second attempt at an API video upload",
keywords="",
category="Comedy";
var file_reader = fs.createReadStream(file_path, {encoding: 'binary'});
var file_contents = '';
file_reader.on('data', function(data)
{
file_contents += data;
});
file_reader.on('end', function()
{
var xml =
'<?xml version="1.0"?>' +
'<entry xmlns="http://www.w3.org/2005/Atom" xmlns:media="http://search.yahoo.com/mrss/" xmlns:yt="http://gdata.youtube.com/schemas/2007">' +
' <media:group>' +
' <media:title type="plain">' + title + '</media:title>' +
' <media:description type="plain">' + description + '</media:description>' +
' <media:category scheme="http://gdata.youtube.com/schemas/2007/categories.cat">' + category + '</media:category>' +
' <media:keywords>' + keywords + '</media:keywords>' +
' </media:group>' +
'</entry>';
var boundary = Math.random();
var post_data = [];
var part = '';
part = "--" + boundary + "\r\nContent-Type: application/atom+xml; charset=UTF-8\r\n\r\n" + xml + "\r\n";
post_data.push(new Buffer(part, "utf8"));
part = "--" + boundary + "\r\nContent-Type: video/mp4\r\nContent-Transfer-Encoding: binary\r\n\r\n";
post_data.push(new Buffer(part, 'utf8'));
post_data.push(new Buffer(file_contents, 'binary'));
post_data.push(new Buffer("\r\n--" + boundary + "--\r\n\r\n", 'utf8'));
var post_length = 0;
for(var i = 0; i < post_data.length; i++)
{
post_length += post_data[i].length;
}
var options = {
host: 'uploads.gdata.youtube.com',
port: 443,
path: '/feeds/api/users/default/uploads',
method: 'POST',
headers: {
'Authorization': 'Bearer ' + access_token,
'X-GData-Key': myDeveloperKey,
'Slug': 'video.mp4',
'Content-Type': 'multipart/related; boundary="' + boundary + '"',
'Content-Length': post_length,
'Connection': 'close'
}
}
var req = https.request(options, function(res)
{
res.setEncoding('utf8');
console.dir(res.statusCode);
console.dir(res.headers);
var response = '';
res.on('data', function(chunk)
{
response += chunk;
});
res.on('end', function()
{
console.log( "We got response: " );
console.log(response);
});
});
for (var i = 0; i < post_data.length; i++)
{
req.write(post_data[i]);
}
req.on('error', function(e) {
console.error(e);
});
req.end();
});
});
});
p_req.write(p_data);
p_req.end();
The problem was in the file being uploaded.
This line: var file_path = process.argv[1] || "video.mp4"; should have been var file_path = process.argv[2] || "video.mp4";
Note argv[1] is the absolute path to the script being run, argv[2] is the first command line argument passed to the script.
Of course YouTube would fail to convert the "video", it wasn't video at all it was the script being run.