Jenkins Git Plugin does not receive posted Parameters - node.js

I am trying to use Node.js to programmatically build Jenkins jobs that take Git parameters.
I am sending the parameters as post data, as shown below. However, no matter what value I assign to ref, Jenkins runs the build with the default parameter value (specified in the job's configuration). I have tried passing in the parameters as query strings in the URL, but that also did not work.
I am using Jenkins v1.651.1 and Node v6.2.0.
var jobOptions = {
url: requestedJobObject.url + 'build',
method: 'POST',
port: 8080
};
// parameters = { "name": "ref", "value": "origin/master" }
if (!_.isEmpty(parameters)) {
var jsonParametersString = JSON.stringify({"parameter": parameters});
var parameterParam = encodeURIComponent(jsonParametersString);
parameters.json = parameterParam;
jobOptions.headers = {
'Content-Type': 'application/x-www-form-urlencoded',
'Content-Length': querystring.stringify(parameters).length
};
jobOptions.url += 'WithParameters';
postData = querystring.stringify(parameters);
}
// jobOptions contains auth field & separates url into hostname and path
// makes an http request to jobOptions and calls req.write(postData)
makeRequest(jobOptions, callback, responseCB, postData)
makeRequest makes an http request:
function makeRequest (object, callback, responseCB, postData) {
var accumulator = '';
var parsedUrl = u.parse('//' + object.url, true, true);
var options = {
hostname: parsedUrl.hostname,
port: object.port || 8080,
path: parsedUrl.path,
method: object.method || 'GET',
auth: getAuthByHost(parsedUrl.hostname)
};
if (object.headers) {
options.headers = object.headers;
}
var response = null;
var req = http.request(options, function(res) {
response = res;
res.on('data', function (data) {
accumulator = accumulator + data.toString();
res.resume();
});
res.on('close', function () {
// first assume accumulator is JSON object
var responseContent;
try {
responseContent = JSON.parse(accumulator);
}
// if not object, use accumulator as string
catch (err) {
responseContent = accumulator;
}
callback(responseContent, response.statusCode);
if (responseCB) {
responseCB(res);
}
});
});
req.on('close', function () {
// first assume accumulator is JSON object
var responseContent;
try {
responseContent = JSON.parse(accumulator);
}
catch (err) {
responseContent = accumulator;
}
callback(responseContent, response.statusCode);
if (responseCB) {
responseCB(response);
}
});
if (postData) {
req.write(postData);
}
req.end();
}

try this, it works for me:
var auth = 'Basic yourUserToken';
var jobOptions = {
url:'jenkinsHostName:8080/jenkins/job/jobName/' +'build',
method: 'POST',
port: 8080
};
var parameter = {"parameter": [{"name":"ref", "value":"origin/master"}]};
var postData;
if (!_.isEmpty(parameter)) {
var jsonParametersString = JSON.stringify(parameter);
jobOptions.headers = {
'Authorization':auth,
'Content-Type': 'application/x-www-form-urlencoded',
};
jobOptions.url += '?token=jobRemoteTriggerToken';
postData = "json="+jsonParametersString;
console.log("postData = " + postData);
}
var callback;
var responseCB;
makeRequest(jobOptions, callback, responseCB, postData) ;
It is based on your code. I removed the querystring - it seems that it returned an empty string when performed on the parameters object. I change /buildWithParameters to /build - it didn't work the other way.
In addition, verify that when you pass the 'Content-Length' in the header, it doesn't truncated your json parameters object (I removed it ).
also note that I used the user API token, that you can get at http://yourJenkinsUrl/me/configure and click the "Shown API Token" button.

Not sure about this, as I don't know Node.js -- but maybe this fits: the Jenkins remote access API indicates that the parameter entity in the json request must point to an array, even if there's just one parameter to be defined.
Does the change below fix the problem (note the angle brackets around parameters)?
[...]
var jsonParametersString = JSON.stringify({"parameter": [parameters]});
[...]

Related

Bad gateway from Kibana behind nodejs reverse proxys

I have a kibana instance behind a nodejs 16.x aws lambda reverse proxy. It works fine for almost everything, except for the "Discover" section when I add 2 or more filters for 2+ months timerange, it gives back bad gateway error.
Detail of the error:
Wrapper#https://mydomain/_dashboards/909221/bundles/core/core.entry.js:6:4249
_createSuperInternal#https://mydomain/_dashboards/909221/bundles/core/core.entry.js:6:3388
...
...
Implementation:
function proxy(event, context, lambdaCallback) {
delete(event.headers["accept-encoding"])
var path = event.path;
if (event.multiValueQueryStringParameters) {
path += '?' + deserializeQueryString(event.multiValueQueryStringParameters)
}
// Calculate the options for the HTTPS request
var opts = {
host: my_es_endpoint,
path: path,
method: event.httpMethod,
service: 'es',
region: my_region,
headers: event.headers
}
if ((event.httpMethod == 'POST') || (event.httpMethod == 'PUT')) {
if (event.body) {
var buff = new Buffer(event.body, "base64")
var payload = buff.toString('utf-8')
opts.body = payload
}
}
// Use aws4 to sign the request so we can talk with ElasticSearch directly
aws4.sign(opts);
const req = https.request(opts, function (res) {
var bodyParts = [];
// We need to read all the incoming data
res.on('data', (chunk) => {
bodyParts.push(chunk)
});
res.on("end", () => {
// We re-create the read content
var body = Buffer.concat(bodyParts).toString()
// We send back uncompressed data
delete(res.headers['content-encoding'])
res.headers['X-Frame-Options'] = 'DENY'
// res.headers['content-security-policy'] = "default-src 'self'; frame-ancestors 'none'"
res.headers['X-XSS-Protection'] = '1; mode=block'
res.headers['X-Content-Type-Options'] = 'nosniff'
var response = {
statusCode: res.statusCode,
body: body,
headers: res.headers
}
lambdaCallback(null, response);
})
});
req.on('error', (e) => {
console.log(`Error caught when calling ElasticSearch: ${e}`)
})
// For POST/PUT request, we send the content of the paylod as well
if ((event.httpMethod == 'POST') || (event.httpMethod == 'PUT')) {
if (event.body) {
var buff = new Buffer(event.body, "base64")
var payload = buff.toString('utf-8')
req.write(payload)
}
}
req.end();
return req;
}
I tried:
Incraesing timeout on request
Increase lambda memory from 128 to 512
Increase lambda timeout
I think you can try is to increase the memory allocation for the Lambda function. it might resolve it.
Or for better and just remove all your headaches. You should use an Amazon API Gateway instead of a reverse proxy in a Lambda function to proxy requests to Kibana. You can also increase the timeout for Kibana requests. this will be more efficient and scalable to your project and less issues.
I try to optimized your code can you check this if it will improve or correct your issue?
const https = require('https');
const aws4 = require('aws4');
function deserializeQueryString(query) {
const params = new URLSearchParams();
for (const key of Object.keys(query)) {
for (const value of query[key]) {
params.append(key, value);
}
}
return params.toString();
}
exports.handler = async (event, context, lambdaCallback) => {
const { headers, path, httpMethod, multiValueQueryStringParameters, body } = event;
// Remove accept-encoding header
delete headers['accept-encoding'];
// Append query string parameters to the path
if (multiValueQueryStringParameters) {
path += '?' + deserializeQueryString(multiValueQueryStringParameters);
}
const opts = {
host: my_es_endpoint,
path,
method: httpMethod,
service: 'es',
region: my_region,
headers
};
// Add request body for POST and PUT methods
if (['POST', 'PUT'].includes(httpMethod) && body) {
opts.body = Buffer.from(body, 'base64').toString('utf-8');
}
// Sign the request using aws4
aws4.sign(opts);
return new Promise((resolve, reject) => {
const req = https.request(opts, res => {
let body = '';
res.on('data', chunk => {
body += chunk;
});
res.on('end', () => {
// Remove content-encoding header
delete res.headers['content-encoding'];
// Set security headers
res.headers['X-Frame-Options'] = 'DENY';
res.headers['X-XSS-Protection'] = '1; mode=block';
res.headers['X-Content-Type-Options'] = 'nosniff';
const response = {
statusCode: res.statusCode,
body,
headers: res.headers
};
resolve(response);
});
});
req.on('error', error => {
console.error(`Error caught when calling ElasticSearch: ${error}`);
reject(error);
});
// Write request body for POST and PUT methods
if (['POST', 'PUT'].includes(httpMethod) && body) {
req.write(opts.body);
}
req.end();
}).then(lambdaCallback)
.catch(error => {
lambdaCallback(error);
});
};
I am not sure if the error is related to the proxy implementation, however I found something that helped and now I am not experiencing the problem anymore.
I reduced the value of discover:sampleSize in Kibana Advanced Settings as suggested here

NodeJS Read file, make node-rest-client call to get related data and add retrieved response as element in JSON

I am reading JSON file using fs.readFileSync and for each document obtained, I am making a rest API call using client.post. Once I get response, I want to place the received content into another JSON file which is a replica of input JSON except additional element which is the data received from client.post call. However probably because of async nature of client.post, I am unable to add element to output JSON. I am new to NodeJS. Can you please help me where I am missing. Below is code and data
data:
[
{
"ticker": "CLYD"
},
{
"ticker": "EGH"
}
]
Code:
var fs = require('fs');
var Client = require('node-rest-client').Client;
var data = fs.readFileSync(__dirname + "/data/stocks.json", "utf8");
processData(data);
function processData (data) {
var obj = JSON.parse(data);
for (j = 0; j < obj.length; j++) {
obj[j].stockInformation = getValuesForTicker (obj[j].ticker.trim());
}
var jsonOutput = JSON.stringify(obj,null,'\t');
fs.writeFileSync(__dirname + "/data/response.json", jsonOutput);
};
function getValuesForTicker (ticker) {
/**
* More details and samples at https://www.npmjs.com/package/node-rest-client
*/
var client = new Client();
var values;
// set content-type header and data as json in args parameter
var args = {
data: { "ticker" : ticker},
headers: { "Content-Type": "application/json", "Accept" : "application/json" }
};
var responseToRequest = client.post("https://url.providing.response.as.json.content/", args, function (data, response) {
// parsed response body as js object
values = JSON.parse(JSON.stringify(data)).price;
});
return values;
};
Since getValueForTicker makes a async call to fetch data it should call a callback once data is recieved (or better a promise) and not return the result (currently undefined is returned as the value is returned before the value is assigned)
function getValuesForTicker (ticker, callback) {
/**
* More details and samples at https://www.npmjs.com/package/node-rest-client
*/
return new Promise(function(resolve, reject) {
var client = new Client();
var values;
// set content-type header and data as json in args parameter
var args = {
data: { "ticker" : ticker},
headers: { "Content-Type": "application/json", "Accept" : "application/json" }
};
var responseToRequest =
client.post("https://url.providing.response.as.json.content/", args, function (data, response) {
// parsed response body as js object
values = JSON.parse(JSON.stringify(data)).price;
resolve(values)
});
};
})
and to get the data once async call is done you will need to call then function as below:
getValuesForTicker(obj[j].ticker.trim())
.then(function(val) {
obj[j].stockInformation = val
})
Considering you are new to node.js it will be hard to get.Take some time to understand callback and promise first.

how to do put request using node.js and add the json file

Hi I am new to this domain, I am trying to do PUT request and add the json file. I have the json file created and i have to perform put and post request using the URI's please can any one post a code using nodejs and it will be helpful.I created a put request file like this
var i = 0;
var fs = require("fs");
var request = require('request');
var jsonPath = fs.readFileSync('filepath');
// String --> Object
var jsonObj = JSON.parse(jsonPath);
console.log(changedevicename.call());
for( i = 0; i < jsonObj.ipConfig.length; i++)
{
var ipv4URI = jsonObj.ipConfig[i].ipv4; // taking ipv4 json file
var ipv6URI = jsonObj.ipConfig[i].ipv6; // taking ipv6 json file
console.log(ipv4URI);
console.log(ipv6URI);
rest_service();
//console.log(config[i]);
}
function rest_service() // should I change this or what
{
var i = 0;
var request = require('request');
var options = {
url: 'http://'+'USERNAME'+':'+'PASSWORD'+ '#'+'IPV6'+'URI',
method: 'PUT',
}
{
//IP = userGivenIP;
//IP = '192.168.0.1';
request(
{
method:'PUT',
url: 'http://'+'USERNAME'+':'+'PASSWORD'+ '#'+'IPV6'+'URI', //
headers: {
'Content-Type': 'application/json', // check this, I should change this
},
//var ip4Json = JSON.parse(body); // check this, I should change this
//console.log('\n\n'+ body + '\n\n');
},
function (error, response, body) // check this, I should change this
{
if (error!=undefined)
{
console.log(body);
}
else
{
console.log("printerror", error);
console.log("IP disabled");
}
});
}
This code has to be doen dynamically but I am not getting how to do this for put and post request please help me out and mail the code thanks.
thanks and regards
Prathamesh
You can add a body parameter to request.
const jsonBody = {
key1: value1,
key2: value2
};
const headers = {
authorization: "<token>"
};
const options = {
method: 'PUT',
uri: "some-url",
headers: headers, // headers if your api requires
body: jsonBody,
json: true
};
request(options, function(err, response) {
// handle err first
// do stuff with response
});
You should go through the docs: https://www.npmjs.com/package/request

How to consume MTOM SOAP web service in node.js?

I need to download or process a file from a soap based web service in node.js.
can someone suggest me on how to handle this in node.js
I tried with 'node-soap' or 'soap' NPM module. it worked for normal soap web service. But, not for binary steam or MTOM based SOAP web service
I want to try to answer this... It's quite interesting that 2 years and 2 months later I can not figure it out how to easily solve the same problem.
I'm trying to get the attachment from a response like:
...
headers: { 'cache-control': 'no-cache="set-cookie"',
'content-type': 'multipart/related;boundary="----=_Part_61_425861994.1525782562904";type="application/xop+xml";start="";start-info="text/xml"',
...
body: '------=_Part_61_425861994.1525782562904\r\nContent-Type:
application/xop+xml; charset=utf-8;
type="text/xml"\r\nContent-Transfer-Encoding: 8bit\r\nContent-ID:
\r\n\r\n....\r\n------=_Part_61_425861994.1525782562904\r\nContent-Type:
application/octet-stream\r\nContent-Transfer-Encoding:
binary\r\nContent-ID:
\r\n\r\n�PNG\r\n\u001a\n\u0000\u0000\u0000\rIHDR\u0000\u0000\u0002,\u0000\u0000\u0005�\b\u0006\u0........binary....
I tried ws.js but no solution.
My solution:
var request = require("request");
var bsplit = require('buffer-split')
// it will extract "----=_Part_61_425861994.1525782562904" from the response
function getBoundaryFromResponse(response) {
var contentType = response.headers['content-type']
if (contentType && contentType.indexOf('boundary=') != -1 ) {
return contentType.split(';')[1].replace('boundary=','').slice(1, -1)
}
return null
}
function splitBufferWithPattern(binaryData, boundary) {
var b = new Buffer(binaryData),
delim = new Buffer(boundary),
result = bsplit(b, delim);
return result
}
var options = {
method: 'POST',
url: 'http://bla.blabal.../file',
gzip: true,
headers: {
SOAPAction: 'downloadFile',
'Content-Type': 'text/xml;charset=UTF-8'
},
body: '<soapenv: ... xml request of the file ... elope>'
};
var data = [];
var buffer = null;
var filename = "test.png"
request(options, function (error, response, body) {
if (error) throw new Error(error);
if (filename && buffer) {
console.log("filename: " + filename)
console.log(buffer.toString('base64'))
// after this, we can save the file from base64 ...
}
})
.on('data', function (chunk) {
data.push(chunk)
})
.on('end', function () {
var onlyPayload = splitBufferWithPattern(Buffer.concat(data), '\r\n\r\n') // this will get from PNG
buffer = onlyPayload[2]
buffer = splitBufferWithPattern(buffer, '\r\n-')[0]
console.log('Downloaded.');
})
I am not sure it will works in most of the cases. It looks like unstable code to my eyes and so I'm looking for something better.
Use ws.js
Here is how to fetch the file attachments:
const ws = require('ws.js')
const { Http, Mtom } = ws
var handlers = [ new Mtom(), new Http()];
var request = '<s:Envelope xmlns:s="http://www.w3.org/2003/05/soap-envelope">' +
'<s:Body>' +
'<EchoFiles xmlns="http://tempuri.org/">' +
'<File1 />' +
'</EchoFiles>' +
'</s:Body>' +
'</s:Envelope>'
var ctx = { request: request
, contentType: "application/soap+xml"
, url: "http://localhost:7171/Service/mtom"
, action: "http://tempuri.org/IService/EchoFiles"
}
ws.send(handlers, ctx, function(ctx) {
//read an attachment from the soap response
var file = ws.getAttachment(ctx, "response", "//*[local-name(.)='File1']")
// work with the file
fs.writeFileSync("result.jpg", file)
})
Two limitations:
No basic auth provided out-of-box, patch required https://github.com/yaronn/ws.js/pull/40
If the file name is an url, you need to apply another patch at mtom.js. Replace:
.
xpath = "//*[#href='cid:" + encodeURIComponent(id) + "']//parent::*"
with:
xpath = "//*[#href='cid:" + id + "']//parent::*"

How to access the variable data outside of a Node.js function?

This is a function in Node.js, which reads data from Analytics:
function getDataFromGA(Dimension, Metric, StartDate, EndDate, MaxResults) {
var fs = require('fs'),
crypto = require('crypto'),
request = require('request'); // This is an external module
var authHeader = {
'alg': 'RS256',
'typ': 'JWT'
},
authClaimSet = {
'iss': '***t#developer.gserviceaccount.com', // Service account email
'scope': 'https://www.googleapis.com/auth/analytics.readonly',
// We MUST tell them we just want to read data
'aud': 'https://accounts.google.com/o/oauth2/token'
},
SIGNATURE_ALGORITHM = '**',
SIGNATURE_ENCODE_METHOD = '**',
GA_KEY_PATH = '**',
//finds current directory then appends private key to the directory
gaKey;
function urlEscape(source) {
return source.replace(/\+/g, '-').replace(/\//g, '_').replace(/\=+$/, '');
}
function base64Encode(obj) {
var encoded = new Buffer(JSON.stringify(obj), 'utf8').toString('base64');
return urlEscape(encoded);
}
function readPrivateKey() {
if (!gaKey) {
gaKey = fs.readFileSync(GA_KEY_PATH, 'utf8');
}
return gaKey;
}
var authorize = function (callback) {
var self = this,
now = parseInt(Date.now() / 1000, 10), // Google wants us to use seconds
cipher,
signatureInput,
signatureKey = readPrivateKey(),
signature,
jwt;
// Setup time values
authClaimSet.iat = now;
authClaimSet.exp = now + 60; // Token valid for one minute
// Setup JWT source
signatureInput = base64Encode(authHeader) + '.' + base64Encode(authClaimSet);
// Generate JWT
cipher = crypto.createSign('RSA-SHA256');
cipher.update(signatureInput);
signature = cipher.sign(signatureKey, 'base64');
jwt = signatureInput + '.' + urlEscape(signature);
// Send request to authorize this application
request({
method: 'POST',
headers: {
'Content-Type': 'application/x-www-form-urlencoded'
},
uri: 'https://accounts.google.com/o/oauth2/token',
body: 'grant_type=' + escape('urn:ietf:params:oauth:grant-type:jwt-bearer') +
'&assertion=' + jwt
}, function (error, response, body) {
if (error) {
console.log(error);
callback(new Error(error));
} else {
var gaResult = JSON.parse(body);
if (gaResult.error) {
callback(new Error(gaResult.error));
} else {
callback(null, gaResult.access_token);
// console.log(gaResult);
console.log("Authorized");
}
}
});
};
var request = require('request'),
qs = require('querystring');
authorize(function (err, token) {
if (!err) {
// Query the number of total visits for a month
var requestConfig = {
'ids': 'ga:72333024',
'dimensions': Dimension,
'metrics': Metric,
// 'sort': '-ga:users',
'start-date': StartDate,
'end-date': EndDate,
'max-results': MaxResults
};
request({
method: 'GET',
headers: {
'Authorization': 'Bearer ' + token // Here is where we use the auth token
},
uri: 'https://www.googleapis.com/analytics/v3/data/ga?' + qs.stringify(requestConfig)
}, function (error, resp, body) {
console.log(body);
var data = JSON.parse(body);
console.log(data.totalsForAllResults);
console.log(data.rows);
});
}
});
}
Here I try to access it from outside:
var gaJSON = utils.getDataFromGA("ga:country", "ga:pageviews", "2011-08-04", "2014-09-12", "50");
res.send(gaJSON);
My question is how I can access the variable data in the end of the first method? How can I call it from outside of the function?
You can assign data to a variable declared in the first function. But since the authorize method is asynchronous the variable data will still be undefined at the end of the first function. The best way to do this is handle with callbacks.
I think you wanna return something related to this variable, right? Try to put a callback parameter to the first function and then call this function passing the result.
callback(variable)
Why do you want to access if from outside. ??
Even though you want to desperately then you need to create a function pass the "data" as argument and then invoke the function .
console.log(body);
var data = JSON.parse(body);
myNewFunction(data);
Write all ur logic inside "myNewFunction" that uses data .

Resources