how to loop the url in options in nodejs - node.js

var request = require('request');
var options = {
'method': 'GET',
'url': 'https://api.github.com/orgs/organizationName/repos?per_page=100&page=1',//To get all the users data from the repos
'url': 'https://api.github.com/orgs/organizationName/repos?per_page=100&page=2',
'url': 'https://api.github.com/orgs/organizationName/repos?per_page=100&page=3',
'url': 'https://api.github.com/orgs/organizationName/repos?per_page=100&page=4',
'url': 'https://api.github.com/orgs/organizationName/repos?per_page=100&page=5',
'url': 'https://api.github.com/orgs/organizationName/repos?per_page=100&page=6',
'url': 'https://api.github.com/orgs/organizationName/repos?per_page=100&page=7',
'url': 'https://api.github.com/orgs/organizationName/repos?per_page=100&page=8',
'url': 'https://api.github.com/orgs/organizationName/repos?per_page=100&page=9',
'url': 'https://api.github.com/orgs/organizationName/repos?per_page=100&page=10',
'url': 'https://api.github.com/orgs/organizationName/repos?per_page=100&page=11',
'headers': {
'Accept': 'application/vnd.github.mercy-preview+json',//to get topics of the repos
'Authorization': 'Bxxx xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
'User-Agent' : 'sxxxxxxxxxxxxx'
}
};
request(options, function (error, response) {
if (error) throw new Error(error);
console.log(response.body);
});
In this above code i want to loop the urls continuously until the end of the page
if not anyone have the idea of using pagination in this help me out

cYou cannot have multiple attributes for one object key. You have to call every url individually. I tried to solve this using asyncronous code, because looping with callback functions is confusing and dangerous with regard to the call stack.
const request = require('request');
// configuration for the url generation
const perPages = 100;
const startPage = 1;
const endPage = 11;
const url = 'https://api.github.com/orgs/organizationName/repos?per_page=%perPages%&page=%page%';
// define a asyncronous call for one url
async function callOneUrl(url) {
// local options for each url
const options = {
method: 'GET',
url: url,
headers: {
Accept: 'application/vnd.github.mercy-preview+json',
Authorization: 'Bxxx xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
'User-Agent': 'sxxxxxxxxxxxxx'
}
}
return new Promise((resolve, reject) => {
request(options, function (error, response) {
if (error) return reject(error);
resolve(response);
});
});
}
// call each url with a for loop
(async () => {
for (let i = startPage; i <= endPage; i++) {
// using the await statement to get the resolved value of the Promise instance or catch the error
try {
var response = await callOneUrl(url.replace('%perPages%', perPages).replace('%page%', i));
// handle response here
console.log(response.body);
} catch (error) {
// handle errors here
throw new Error(error);
}
}
})()

const request = require('request-promise');
const urls = ["http://www.google.com", "http://www.example.com"];
const promises = urls.map(url => request(url));
Promise.all(promises).then((data) => {
// data = [promise1,promise2]
});
Apart from above you can also use async.eachseries or async.parallel etc..

You can download a list of repos with a do...while loop. We'll set a maximum number of pages to download and exit when we reach either this or the last page.
I would suggest using the request-promise-native package to allow us to use the very nice async-await syntax.
Now, I've given the example of downloading repos for the mongodb org. You can easily replace with whatever one you wish.
I would also note that the request library is now deprecated, we can use it of course, but we must consider replacing in the future.
We now also log the repo information and save it to the output file.
const rp = require("request-promise-native");
const fs = require("fs");
async function downloadRepoInformation(org, outputFile) {
let repoList = [];
let page = 0;
const resultsPerPage = 20;
const maxPages = 10;
const uri = `https://api.github.com/orgs/${org}/repos`;
do {
try {
let response = await rp.get({ uri, json: true, qs: { per_page: resultsPerPage, page: ++page }, headers: {"User-Agent" : "request"} });
console.log(`downloadRepoInformation: Downloaded page: ${page}, repos: ${response.length}...`);
repoList = repoList.concat(response);
console.log("downloadRepoInformation: response", JSON.stringify(response, null, 2));
console.log("downloadRepoInformation: repoList.length:", repoList.length);
if (response.length < resultsPerPage) {
console.log(`downloadRepoInformation: Last page reached: exiting loop...`);
break;
}
} catch (error) {
console.error(`downloadRepoInformation: An error occurred:`, error);
break;
}
} while (page <= maxPages)
console.log("downloadRepoInformation: download complete: repoList.length:", repoList.length)
console.log("downloadRepoInformation: Saving to file:", outputFile);
fs.writeFileSync(outputFile, JSON.stringify(repoList, null, 4));
}
downloadRepoInformation("mongodb", "./repolist.json");

Related

Bad gateway from Kibana behind nodejs reverse proxys

I have a kibana instance behind a nodejs 16.x aws lambda reverse proxy. It works fine for almost everything, except for the "Discover" section when I add 2 or more filters for 2+ months timerange, it gives back bad gateway error.
Detail of the error:
Wrapper#https://mydomain/_dashboards/909221/bundles/core/core.entry.js:6:4249
_createSuperInternal#https://mydomain/_dashboards/909221/bundles/core/core.entry.js:6:3388
...
...
Implementation:
function proxy(event, context, lambdaCallback) {
delete(event.headers["accept-encoding"])
var path = event.path;
if (event.multiValueQueryStringParameters) {
path += '?' + deserializeQueryString(event.multiValueQueryStringParameters)
}
// Calculate the options for the HTTPS request
var opts = {
host: my_es_endpoint,
path: path,
method: event.httpMethod,
service: 'es',
region: my_region,
headers: event.headers
}
if ((event.httpMethod == 'POST') || (event.httpMethod == 'PUT')) {
if (event.body) {
var buff = new Buffer(event.body, "base64")
var payload = buff.toString('utf-8')
opts.body = payload
}
}
// Use aws4 to sign the request so we can talk with ElasticSearch directly
aws4.sign(opts);
const req = https.request(opts, function (res) {
var bodyParts = [];
// We need to read all the incoming data
res.on('data', (chunk) => {
bodyParts.push(chunk)
});
res.on("end", () => {
// We re-create the read content
var body = Buffer.concat(bodyParts).toString()
// We send back uncompressed data
delete(res.headers['content-encoding'])
res.headers['X-Frame-Options'] = 'DENY'
// res.headers['content-security-policy'] = "default-src 'self'; frame-ancestors 'none'"
res.headers['X-XSS-Protection'] = '1; mode=block'
res.headers['X-Content-Type-Options'] = 'nosniff'
var response = {
statusCode: res.statusCode,
body: body,
headers: res.headers
}
lambdaCallback(null, response);
})
});
req.on('error', (e) => {
console.log(`Error caught when calling ElasticSearch: ${e}`)
})
// For POST/PUT request, we send the content of the paylod as well
if ((event.httpMethod == 'POST') || (event.httpMethod == 'PUT')) {
if (event.body) {
var buff = new Buffer(event.body, "base64")
var payload = buff.toString('utf-8')
req.write(payload)
}
}
req.end();
return req;
}
I tried:
Incraesing timeout on request
Increase lambda memory from 128 to 512
Increase lambda timeout
I think you can try is to increase the memory allocation for the Lambda function. it might resolve it.
Or for better and just remove all your headaches. You should use an Amazon API Gateway instead of a reverse proxy in a Lambda function to proxy requests to Kibana. You can also increase the timeout for Kibana requests. this will be more efficient and scalable to your project and less issues.
I try to optimized your code can you check this if it will improve or correct your issue?
const https = require('https');
const aws4 = require('aws4');
function deserializeQueryString(query) {
const params = new URLSearchParams();
for (const key of Object.keys(query)) {
for (const value of query[key]) {
params.append(key, value);
}
}
return params.toString();
}
exports.handler = async (event, context, lambdaCallback) => {
const { headers, path, httpMethod, multiValueQueryStringParameters, body } = event;
// Remove accept-encoding header
delete headers['accept-encoding'];
// Append query string parameters to the path
if (multiValueQueryStringParameters) {
path += '?' + deserializeQueryString(multiValueQueryStringParameters);
}
const opts = {
host: my_es_endpoint,
path,
method: httpMethod,
service: 'es',
region: my_region,
headers
};
// Add request body for POST and PUT methods
if (['POST', 'PUT'].includes(httpMethod) && body) {
opts.body = Buffer.from(body, 'base64').toString('utf-8');
}
// Sign the request using aws4
aws4.sign(opts);
return new Promise((resolve, reject) => {
const req = https.request(opts, res => {
let body = '';
res.on('data', chunk => {
body += chunk;
});
res.on('end', () => {
// Remove content-encoding header
delete res.headers['content-encoding'];
// Set security headers
res.headers['X-Frame-Options'] = 'DENY';
res.headers['X-XSS-Protection'] = '1; mode=block';
res.headers['X-Content-Type-Options'] = 'nosniff';
const response = {
statusCode: res.statusCode,
body,
headers: res.headers
};
resolve(response);
});
});
req.on('error', error => {
console.error(`Error caught when calling ElasticSearch: ${error}`);
reject(error);
});
// Write request body for POST and PUT methods
if (['POST', 'PUT'].includes(httpMethod) && body) {
req.write(opts.body);
}
req.end();
}).then(lambdaCallback)
.catch(error => {
lambdaCallback(error);
});
};
I am not sure if the error is related to the proxy implementation, however I found something that helped and now I am not experiencing the problem anymore.
I reduced the value of discover:sampleSize in Kibana Advanced Settings as suggested here

How to make api requests with a timer in node js?

I have an array which contains some data and for each data item an api request has to be made. The api will remain same but the array index will increment every time the api request is made.
Also the api request has to be called with a gap of 5 minutes. Hence I can't call the api for the entire array all at once. One api call is made with Array[0] in the request body and after 5 minutes api call is made with Array[1] in the request body.
I tried to implement a cron job with these requirements but there are no proper examples for a cron job within a for loop with api calls.
Any help would be appreciated.
`
const array = ['http://linkedin.com/charles123', 'http://linkedin.com/darwin123' ... ]
//API needs to be called every 5 minutes
const sendConnectionRequest = () => {
var i = 0;
for(i; i< array.length, i++) {
fetch("serverurl:123", {
headers: {
'Content-Type': 'application/json'
},
method: "POST",
body: JSON.stringify(array[i])
})
.then((res) => if(res) { console.log('Connection Request Send') } )
}
}`
May I suggest using an Async Generator this will allow you to manage sequential promises.
const fetch = require("node-fetch");
const sleep = require("util").promisify(setTimeout);
async function* responseGenerator(urls) {
let iterations = 0;
while (urls.length) {
const [url, ...rest] = urls;
urls = rest;
if (iterations > 0) {
await sleep(50000);
}
yield fetch("serverurl:123", {
headers: {
"Content-Type": "application/json"
},
method: "POST",
body: JSON.stringify(url)
});
iterations += 1
}
}
const array = ['http://linkedin.com/charles123', 'http://linkedin.com/darwin123' ]
for await (const response of responseGenerator(array)) {
// response.status
// response.statusText
// response.contentType
}
There are multiple way of doing timers in node.js. Check this link.
setInterval is a infinite loop and between each iteration it wait a certain amount of time.
const array = ['http://linkedin.com/charles123', 'http://linkedin.com/darwin123' ... ]
//API needs to be called every 5 minutes
const sendConnectionRequest = (data) => {
fetch("serverurl:123", {
headers: {
'Content-Type': 'application/json'
},
method: "POST",
body: JSON.stringify(data)
})
.then((res) => if(res) { console.log('Connection Request Send') } )
}
const callApi = setInterval(()=> {
sendConnectionRequest(array[0])
array.shift()
}, 30000);

call external API from aws lambda and get respose as callback in lambda funtion

I'm trying to call external API inside aws Lambda function using node request Module. so far I'm success of calling API and get the data within lambda execution. only problem i'm having is getting my userInfo data with response.even my userInfo has data in Giving me empty Object in client side
var AWS = require('aws-sdk');
AWS.config.region = 'us-east-1';
var request = require('request');
const encode = require('nodejs-base64-encode');
var lambda = new AWS.Lambda();
import { Handler, Context, Callback } from "aws-lambda";
import { PayPalLinkDetails } from "../../View/PayPalLinkDetails";
import { PayPalLinkResponse, PayPalLinkResponseBody } from "../../View/PayPalLinkResponseBody";
const PAYPAL_CLIENT = process.env.PayPalClientID;
const PAYPAL_SECRET = process.env.PayPalSecretKEY;
const PAYPAL_OAUTH_API = process.env.PayPalAuthAPI;
const PAYPAL_IDENTITY_API = process.env.PayPalIdentityAPI;
const LinkPayPal: Handler = async (paypalRequest : PayPalLinkDetails, context: Context, callback: Callback) => {
var userInfo = new PayPalLinkResponse();
var paypalresponse = new PayPalLinkResponseBody();
const basicAuth = encode.encode(PAYPAL_CLIENT+":"+PAYPAL_SECRET, 'base64');
var options = {
'method': 'POST',
'url': PAYPAL_OAUTH_API,
'headers': {
'Authorization': 'Basic '+basicAuth,
'Content-Type': 'application/x-www-form-urlencoded'
},
form: {
'grant_type': 'authorization_code',
'code': paypalRequest.code
}
};
await request(options, async function (error : any, response :any) {
if (error)
{
console.log(error);
}
else
{
paypalresponse = response.body;
// save data to DB here
}
});
var getIdentity = {'method': 'get','url': PAYPAL_IDENTITY_API,'headers': {'Authorization': 'Basic '+basicAuth,'Content-Type': 'application/x-www-form-urlencoded'},form: {'grant_type': 'authorization_code','code': paypalresponse.access_token}};
await request(getIdentity, function (err : any, res :any)
{
if (err)
{
console.log(err);
}
else
{
userInfo = res.body; // this Print the values as expected
console.log(userInfo);
}
});
callback(null,userInfo); // This Giving me Empty value
}
export {LinkPayPal}
i think i'm calling callback in wrong way. is there any suggestions for solve this issue ..?
The problem is that you have mixed up callback and async/await style which wouldn't work the way you expect it to be. You have couple of choices here
[Not Recommended]: Do a nested callback and on response of first callback, call second request and so on.
[Not Recommended]: Use a promise version of the request package which is called request-promise as this is now being deprected.
[Not Recommended]: Convert request's callback style to promise based by wraping up in promise. Again request module is being deperecated. See here for more details.
[Recommended]: Use some modern day packages which supports promises out of the box and maintained properly. Like got, axios etc. You can see the list here.
This is how the code will look if you use let's say got pacakge to make http calls.
var AWS = require("aws-sdk");
AWS.config.region = "us-east-1";
var got = require("got");
const encode = require("nodejs-base64-encode");
var lambda = new AWS.Lambda();
import { Handler, Context, Callback } from "aws-lambda";
import { PayPalLinkDetails } from "../../View/PayPalLinkDetails";
import {
PayPalLinkResponse,
PayPalLinkResponseBody
} from "../../View/PayPalLinkResponseBody";
const PAYPAL_CLIENT = process.env.PayPalClientID;
const PAYPAL_SECRET = process.env.PayPalSecretKEY;
const PAYPAL_OAUTH_API = process.env.PayPalAuthAPI;
const PAYPAL_IDENTITY_API = process.env.PayPalIdentityAPI;
const LinkPayPal: Handler = async (
paypalRequest: PayPalLinkDetails,
context: Context,
callback: Callback
) => {
var userInfo = new PayPalLinkResponse();
var paypalresponse = new PayPalLinkResponseBody();
const basicAuth = encode.encode(
PAYPAL_CLIENT + ":" + PAYPAL_SECRET,
"base64"
);
var options = {
method: "POST",
url: PAYPAL_OAUTH_API,
headers: {
Authorization: "Basic " + basicAuth,
"Content-Type": "application/x-www-form-urlencoded"
},
form: {
grant_type: "authorization_code",
code: paypalRequest.code
}
};
const paypalresponse = await got(options);
var getIdentity = {
method: "get",
url: PAYPAL_IDENTITY_API,
headers: {
Authorization: "Basic " + basicAuth,
"Content-Type": "application/x-www-form-urlencoded"
},
form: {
grant_type: "authorization_code",
code: paypalresponse.access_token
}
};
const userInfo = await got(getIdentity);
return userInfo;
};
export { LinkPayPal };
You might need to tweak the options as per the got style but you will get an idea.

how to do put request using node.js and add the json file

Hi I am new to this domain, I am trying to do PUT request and add the json file. I have the json file created and i have to perform put and post request using the URI's please can any one post a code using nodejs and it will be helpful.I created a put request file like this
var i = 0;
var fs = require("fs");
var request = require('request');
var jsonPath = fs.readFileSync('filepath');
// String --> Object
var jsonObj = JSON.parse(jsonPath);
console.log(changedevicename.call());
for( i = 0; i < jsonObj.ipConfig.length; i++)
{
var ipv4URI = jsonObj.ipConfig[i].ipv4; // taking ipv4 json file
var ipv6URI = jsonObj.ipConfig[i].ipv6; // taking ipv6 json file
console.log(ipv4URI);
console.log(ipv6URI);
rest_service();
//console.log(config[i]);
}
function rest_service() // should I change this or what
{
var i = 0;
var request = require('request');
var options = {
url: 'http://'+'USERNAME'+':'+'PASSWORD'+ '#'+'IPV6'+'URI',
method: 'PUT',
}
{
//IP = userGivenIP;
//IP = '192.168.0.1';
request(
{
method:'PUT',
url: 'http://'+'USERNAME'+':'+'PASSWORD'+ '#'+'IPV6'+'URI', //
headers: {
'Content-Type': 'application/json', // check this, I should change this
},
//var ip4Json = JSON.parse(body); // check this, I should change this
//console.log('\n\n'+ body + '\n\n');
},
function (error, response, body) // check this, I should change this
{
if (error!=undefined)
{
console.log(body);
}
else
{
console.log("printerror", error);
console.log("IP disabled");
}
});
}
This code has to be doen dynamically but I am not getting how to do this for put and post request please help me out and mail the code thanks.
thanks and regards
Prathamesh
You can add a body parameter to request.
const jsonBody = {
key1: value1,
key2: value2
};
const headers = {
authorization: "<token>"
};
const options = {
method: 'PUT',
uri: "some-url",
headers: headers, // headers if your api requires
body: jsonBody,
json: true
};
request(options, function(err, response) {
// handle err first
// do stuff with response
});
You should go through the docs: https://www.npmjs.com/package/request

Jenkins Git Plugin does not receive posted Parameters

I am trying to use Node.js to programmatically build Jenkins jobs that take Git parameters.
I am sending the parameters as post data, as shown below. However, no matter what value I assign to ref, Jenkins runs the build with the default parameter value (specified in the job's configuration). I have tried passing in the parameters as query strings in the URL, but that also did not work.
I am using Jenkins v1.651.1 and Node v6.2.0.
var jobOptions = {
url: requestedJobObject.url + 'build',
method: 'POST',
port: 8080
};
// parameters = { "name": "ref", "value": "origin/master" }
if (!_.isEmpty(parameters)) {
var jsonParametersString = JSON.stringify({"parameter": parameters});
var parameterParam = encodeURIComponent(jsonParametersString);
parameters.json = parameterParam;
jobOptions.headers = {
'Content-Type': 'application/x-www-form-urlencoded',
'Content-Length': querystring.stringify(parameters).length
};
jobOptions.url += 'WithParameters';
postData = querystring.stringify(parameters);
}
// jobOptions contains auth field & separates url into hostname and path
// makes an http request to jobOptions and calls req.write(postData)
makeRequest(jobOptions, callback, responseCB, postData)
makeRequest makes an http request:
function makeRequest (object, callback, responseCB, postData) {
var accumulator = '';
var parsedUrl = u.parse('//' + object.url, true, true);
var options = {
hostname: parsedUrl.hostname,
port: object.port || 8080,
path: parsedUrl.path,
method: object.method || 'GET',
auth: getAuthByHost(parsedUrl.hostname)
};
if (object.headers) {
options.headers = object.headers;
}
var response = null;
var req = http.request(options, function(res) {
response = res;
res.on('data', function (data) {
accumulator = accumulator + data.toString();
res.resume();
});
res.on('close', function () {
// first assume accumulator is JSON object
var responseContent;
try {
responseContent = JSON.parse(accumulator);
}
// if not object, use accumulator as string
catch (err) {
responseContent = accumulator;
}
callback(responseContent, response.statusCode);
if (responseCB) {
responseCB(res);
}
});
});
req.on('close', function () {
// first assume accumulator is JSON object
var responseContent;
try {
responseContent = JSON.parse(accumulator);
}
catch (err) {
responseContent = accumulator;
}
callback(responseContent, response.statusCode);
if (responseCB) {
responseCB(response);
}
});
if (postData) {
req.write(postData);
}
req.end();
}
try this, it works for me:
var auth = 'Basic yourUserToken';
var jobOptions = {
url:'jenkinsHostName:8080/jenkins/job/jobName/' +'build',
method: 'POST',
port: 8080
};
var parameter = {"parameter": [{"name":"ref", "value":"origin/master"}]};
var postData;
if (!_.isEmpty(parameter)) {
var jsonParametersString = JSON.stringify(parameter);
jobOptions.headers = {
'Authorization':auth,
'Content-Type': 'application/x-www-form-urlencoded',
};
jobOptions.url += '?token=jobRemoteTriggerToken';
postData = "json="+jsonParametersString;
console.log("postData = " + postData);
}
var callback;
var responseCB;
makeRequest(jobOptions, callback, responseCB, postData) ;
It is based on your code. I removed the querystring - it seems that it returned an empty string when performed on the parameters object. I change /buildWithParameters to /build - it didn't work the other way.
In addition, verify that when you pass the 'Content-Length' in the header, it doesn't truncated your json parameters object (I removed it ).
also note that I used the user API token, that you can get at http://yourJenkinsUrl/me/configure and click the "Shown API Token" button.
Not sure about this, as I don't know Node.js -- but maybe this fits: the Jenkins remote access API indicates that the parameter entity in the json request must point to an array, even if there's just one parameter to be defined.
Does the change below fix the problem (note the angle brackets around parameters)?
[...]
var jsonParametersString = JSON.stringify({"parameter": [parameters]});
[...]

Resources