How do I send UpperCase Headers in HTTP - node.js

The standard says headers are case insensitive.
Ruby and node both force lower case headers.
We are using an outside server program that expects headers 'AuthToken' to be case sensitive, using .NET framework, and apparently both don't follow standards. We need headers to be up case in this instance.

At the time of writing, the following setHeader was copied from
the _http_outgoing page of node's core lib
var http = require('http');
http.OutgoingMessage.prototype.setHeader = function(name, value) {
if (arguments.length < 2) {
throw new Error('`name` and `value` are required for setHeader().');
}
if (this._header) {
throw new Error('Can\'t set headers after they are sent.');
}
// NO LOWER CASE
var key = name//.toLowerCase();
this._headers = this._headers || {};
this._headerNames = this._headerNames || {};
this._headers[key] = value;
this._headerNames[key] = name;
// Since we're re-defining the method, we can't use this part anymore
//if (automaticHeaders[key]) {
// this._removedHeader[key] = false;
//}
};
Commented out part for lowercase
So.. if you get this problem. require http and override this method with the version you're currently using.
It should then work properly. You could do a similar thing of overriding a method in ruby, but it won't be a quick and easy
Then this will work:
require('request')
request({url: 'http://myurl.com', headers: {UpperCaseWorks: 'Yay'}})
EDIT: here's for the newer version of node
OutgoingMessage.prototype.setHeader = function setHeader(name, value) {
if (this._header) {
throw new errors.Error('ERR_HTTP_HEADERS_SENT', 'set');
}
validateHeader(name, value);
if (!this[outHeadersKey])
this[outHeadersKey] = {};
// no more lower case
const key = name//.toLowerCase();
this[outHeadersKey][key] = [name, value];
switch (key.length) {
case 10:
if (key === 'connection')
this._removedConnection = false;
break;
case 14:
if (key === 'content-length')
this._removedContLen = false;
break;
case 17:
if (key === 'transfer-encoding')
this._removedTE = false;
break;
}
};
Looks like it calls this local method, which'll need to be defined as well
function validateHeader(name, value) {
let err;
if (typeof name !== 'string' || !name || !checkIsHttpToken(name)) {
err = new errors.TypeError('ERR_INVALID_HTTP_TOKEN', 'Header name', name);
} else if (value === undefined) {
err = new errors.TypeError('ERR_HTTP_INVALID_HEADER_VALUE', value, name);
} else if (checkInvalidHeaderChar(value)) {
debug('Header "%s" contains invalid characters', name);
err = new errors.TypeError('ERR_INVALID_CHAR', 'header content', name);
}
if (err !== undefined) {
Error.captureStackTrace(err, validateHeader);
throw err;
}
}
And this
const { outHeadersKey } = require('internal/http');
Anyway, check your version of node for what you are overriding

Piggybacking on Funkodebat's answer, here's my solution for Node 16:
const http = require('http');
// https://github.com/nodejs/node/blob/v16.x/lib/_http_outgoing.js#L574-L587
const { validateHeaderName, validateHeaderValue } = http;
http.OutgoingMessage.prototype.setHeader = function setHeader(name, value) {
if (this._header) {
throw new Error('Cannot set headers after they are sent to the client');
}
validateHeaderName(name);
validateHeaderValue(name, value);
// Extra logic to find kOutHeaders symbol in `this`
const kOutHeaders = Object.getOwnPropertySymbols(this).find(
(sym) => sym.toString() === 'Symbol(kOutHeaders)'
);
let headers = this[kOutHeaders];
if (headers === null) this[kOutHeaders] = headers = Object.create(null);
headers[name] = [name, value]; // toLowerCase removed from here
return this;
};

By looking at the source of NodeJS library on github, you do not need to override the OutgoingMessage.prototype.setHeader
Instead of passing the headers as an Object, you should send them as an Array. Here is a working example :
const http = require('http');
const postData = JSON.stringify({
'msg': 'Hello World!'
});
const options = {
hostname: 'www.google.com',
port: 80,
path: '/upload',
method: 'POST',
// use an Array instead of Object to avoid lowercase transformation
headers: [
['Host' ,'localhost' ],
['X-CustomHeaderFancy' , 'valueForFancyHeader'],
['Content-Type', 'application/json'],
['Content-Length', Buffer.byteLength(postData)]
}
};
const req = http.request(options, (res) => {
console.log(`STATUS: ${res.statusCode}`);
console.log(`HEADERS: ${JSON.stringify(res.headers)}`);
res.setEncoding('utf8');
res.on('data', (chunk) => {
console.log(`BODY: ${chunk}`);
});
res.on('end', () => {
console.log('No more data in response.');
});
});
req.on('error', (e) => {
console.error(`problem with request: ${e.message}`);
});
// Write data to request body
req.write(postData);
req.end();
inside the source code of https://github.com/nodejs/node/blob/v16.x/lib/_http_client.js#L249 there is a test to know if the headers are an array, if it is the case, then it bypass the lowercase transformation.
I do not know why it is not documented ? It's a very useful feature.

Related

Bad gateway from Kibana behind nodejs reverse proxys

I have a kibana instance behind a nodejs 16.x aws lambda reverse proxy. It works fine for almost everything, except for the "Discover" section when I add 2 or more filters for 2+ months timerange, it gives back bad gateway error.
Detail of the error:
Wrapper#https://mydomain/_dashboards/909221/bundles/core/core.entry.js:6:4249
_createSuperInternal#https://mydomain/_dashboards/909221/bundles/core/core.entry.js:6:3388
...
...
Implementation:
function proxy(event, context, lambdaCallback) {
delete(event.headers["accept-encoding"])
var path = event.path;
if (event.multiValueQueryStringParameters) {
path += '?' + deserializeQueryString(event.multiValueQueryStringParameters)
}
// Calculate the options for the HTTPS request
var opts = {
host: my_es_endpoint,
path: path,
method: event.httpMethod,
service: 'es',
region: my_region,
headers: event.headers
}
if ((event.httpMethod == 'POST') || (event.httpMethod == 'PUT')) {
if (event.body) {
var buff = new Buffer(event.body, "base64")
var payload = buff.toString('utf-8')
opts.body = payload
}
}
// Use aws4 to sign the request so we can talk with ElasticSearch directly
aws4.sign(opts);
const req = https.request(opts, function (res) {
var bodyParts = [];
// We need to read all the incoming data
res.on('data', (chunk) => {
bodyParts.push(chunk)
});
res.on("end", () => {
// We re-create the read content
var body = Buffer.concat(bodyParts).toString()
// We send back uncompressed data
delete(res.headers['content-encoding'])
res.headers['X-Frame-Options'] = 'DENY'
// res.headers['content-security-policy'] = "default-src 'self'; frame-ancestors 'none'"
res.headers['X-XSS-Protection'] = '1; mode=block'
res.headers['X-Content-Type-Options'] = 'nosniff'
var response = {
statusCode: res.statusCode,
body: body,
headers: res.headers
}
lambdaCallback(null, response);
})
});
req.on('error', (e) => {
console.log(`Error caught when calling ElasticSearch: ${e}`)
})
// For POST/PUT request, we send the content of the paylod as well
if ((event.httpMethod == 'POST') || (event.httpMethod == 'PUT')) {
if (event.body) {
var buff = new Buffer(event.body, "base64")
var payload = buff.toString('utf-8')
req.write(payload)
}
}
req.end();
return req;
}
I tried:
Incraesing timeout on request
Increase lambda memory from 128 to 512
Increase lambda timeout
I think you can try is to increase the memory allocation for the Lambda function. it might resolve it.
Or for better and just remove all your headaches. You should use an Amazon API Gateway instead of a reverse proxy in a Lambda function to proxy requests to Kibana. You can also increase the timeout for Kibana requests. this will be more efficient and scalable to your project and less issues.
I try to optimized your code can you check this if it will improve or correct your issue?
const https = require('https');
const aws4 = require('aws4');
function deserializeQueryString(query) {
const params = new URLSearchParams();
for (const key of Object.keys(query)) {
for (const value of query[key]) {
params.append(key, value);
}
}
return params.toString();
}
exports.handler = async (event, context, lambdaCallback) => {
const { headers, path, httpMethod, multiValueQueryStringParameters, body } = event;
// Remove accept-encoding header
delete headers['accept-encoding'];
// Append query string parameters to the path
if (multiValueQueryStringParameters) {
path += '?' + deserializeQueryString(multiValueQueryStringParameters);
}
const opts = {
host: my_es_endpoint,
path,
method: httpMethod,
service: 'es',
region: my_region,
headers
};
// Add request body for POST and PUT methods
if (['POST', 'PUT'].includes(httpMethod) && body) {
opts.body = Buffer.from(body, 'base64').toString('utf-8');
}
// Sign the request using aws4
aws4.sign(opts);
return new Promise((resolve, reject) => {
const req = https.request(opts, res => {
let body = '';
res.on('data', chunk => {
body += chunk;
});
res.on('end', () => {
// Remove content-encoding header
delete res.headers['content-encoding'];
// Set security headers
res.headers['X-Frame-Options'] = 'DENY';
res.headers['X-XSS-Protection'] = '1; mode=block';
res.headers['X-Content-Type-Options'] = 'nosniff';
const response = {
statusCode: res.statusCode,
body,
headers: res.headers
};
resolve(response);
});
});
req.on('error', error => {
console.error(`Error caught when calling ElasticSearch: ${error}`);
reject(error);
});
// Write request body for POST and PUT methods
if (['POST', 'PUT'].includes(httpMethod) && body) {
req.write(opts.body);
}
req.end();
}).then(lambdaCallback)
.catch(error => {
lambdaCallback(error);
});
};
I am not sure if the error is related to the proxy implementation, however I found something that helped and now I am not experiencing the problem anymore.
I reduced the value of discover:sampleSize in Kibana Advanced Settings as suggested here

download <title> of a url with minimal data usage

For the purpose of generating links to another websites I need to download content of tag.
But I would like to use as minimal bandwidth as possible.
In some hardcore variant, to process input stream and close the connection when reached.
Or to e.g. fetch first 1024 chars on first attempt and when it did not contain the whole title as a fallback fetch the whole thing.
What could I use in nodejs to achieve this?
In case someone else is interested, here is what I end up with (very initial version, use just for notion what to use).
A few notes thought:
the core of this solution is to read as few chunks of http response as possible, till the is reached.
not sure, how friendly is by convention to interrupt the connection with response.destroy() (which probably closes the underlying socket).
import {get as httpGet, IncomingMessage} from 'http';
import {get as httpsGet} from 'https';
import {titleParser} from '../ParseTitleFromHtml';
async function parseFromStream(response: IncomingMessage): Promise<string|null> {
let body = '';
for await (const chunk of response) {
const text = chunk.toString();
body += text;
const title = titleParser.parse(body);
if (title !== null) {
response.destroy();
return title;
}
}
response.destroy();
return null;
}
export enum TitleDownloaderRejectionCodesEnum
{
INVALID_STATUS_CODE = 'INVALID_STATUS_CODE',
TIMEOUT = 'TIMEOUT',
NOT_FOUND = 'NOT_FOUND',
FAILED = 'FAILED', // all other errors
}
export class TitleDownloader
{
public async downloadTitle (url: string): Promise<string|null>
{
const isHttps = url.search(/https/i) === 0;
const method = isHttps ? httpsGet : httpGet;
return new Promise((resolve, reject) => {
const clientRequest = method(
url,
async (response) => {
if (!(response.statusCode >= 200 && response.statusCode < 300)) {
clientRequest.abort();
reject(response.statusCode === 404
? TitleDownloaderRejectionCodesEnum.NOT_FOUND
: TitleDownloaderRejectionCodesEnum.INVALID_STATUS_CODE
);
return;
}
const title = await parseFromStream(response);
resolve (title);
}
);
clientRequest.setTimeout(2000, () => {
clientRequest.abort();
reject(TitleDownloaderRejectionCodesEnum.TIMEOUT);
})
.on('error', (err: any) => {
// clear timeout
if (err && err.message && err.message.indexOf('ENOTFOUND') !== -1) {
reject(TitleDownloaderRejectionCodesEnum.NOT_FOUND);
}
reject(TitleDownloaderRejectionCodesEnum.FAILED);
});
});
}
}
export const titleDownloader = new TitleDownloader();

Titanium http request leak

I have to make a load of subsequent http requests to load product images into the app as it has to function in an offline mode.
Around 2000 calls.
The http client seems toi have a memory leak which causes the persistent mbytes in "instruments" to rise to around 200 without being garbaged.
After use of the http client it is being set to null.
I have tried setting the file property of the httpclient without any success
I have set the unload function to only call the callback function which in turn calls the http send function again (thus looping through the 2000 products to get the respective pictures)
I changed from SDK 7.5.0.v20180824022007 to SDK 8.1.0.v20190423134840 and even SDK 9.0.0.v20181031080737 but the problem remains
the code of my http common module:
function HttpClient(options = {}) {
this.root = options.root || "ROOT_OF_API";
this.endpoint = options.endpoint || false;
this.needsChecksum = options.needsChecksum || false;
this.data = {};
this.method = options.method || "Post";
this.timeout = options.timeout || 5000;
this.calculateChecksum = function () {
var moment = require('alloy/moment');
if (!Alloy.Models.user.authenticated()) {
return false;
}
var sp = (moment().unix() - Alloy.Models.meta.get("timeDiff"))
var hash = Ti.Utils.md5HexDigest("nX" + sp + "FossilSFAapp" + Alloy.Models.user.get('token').substring(10, 14) + "CS")
var checksum = sp + "-" + hash.substring(4, 8)
this.data.checksum = checksum
}
};
HttpClient.prototype.setData = function (data) {
this.data = data
};
HttpClient.prototype.send = function (callback) {
// set new checksum for request if is needed
if (this.needsChecksum) {
this.calculateChecksum()
}
// add app version
if (this.method === "POST") {
this.data.appversion = Ti.App.version;
}
// send
var client = Ti.Network.createHTTPClient({
onload: function () {
callback({
success: true
})
},
onerror: function(e) {
callback({
message: e.messgae,
success: false
})
},
timeout: this.timeout
});
client.open(this.method, this.root + this.endpoint);
if (this.setFile) {
client.file = Ti.Filesystem.getFile(Ti.Filesystem.applicationDataDirectory, this.fileName);
}
client.setRequestHeader('Content-Type', 'application/json');
client.send(JSON.stringify(this.data));
client = null;
};
module.exports = HttpClient;
and then the module is used in the product model like so:
var HttpClient = require('./HttpClient');
var httpClient = new HttpClient();
function getImage (i) {
if (collection.at(i) && collection.at(i).get('iimage0') && collection.at(i).needsImageUpdate()) {
httpClient.endpoint = collection.at(i).get('acarticlenumber') +".jpg";
httpClient.fileName = 'productImages/' + collection.at(i).get('acarticlenumber') + '.jpg'
httpClient.send(function(e){
if (i < collection.length) {
i++
getImage(i)
} else {
finished()
}
});
} else {
if (i < collection.length) {
i++
getImage(i)
} else {
finished()
}
}
}
// start getting images at index 0
getImage(0)
anyone have an idea why these memory leaks appear ?
It only ever occurs when actually sending the http request.

Jenkins Git Plugin does not receive posted Parameters

I am trying to use Node.js to programmatically build Jenkins jobs that take Git parameters.
I am sending the parameters as post data, as shown below. However, no matter what value I assign to ref, Jenkins runs the build with the default parameter value (specified in the job's configuration). I have tried passing in the parameters as query strings in the URL, but that also did not work.
I am using Jenkins v1.651.1 and Node v6.2.0.
var jobOptions = {
url: requestedJobObject.url + 'build',
method: 'POST',
port: 8080
};
// parameters = { "name": "ref", "value": "origin/master" }
if (!_.isEmpty(parameters)) {
var jsonParametersString = JSON.stringify({"parameter": parameters});
var parameterParam = encodeURIComponent(jsonParametersString);
parameters.json = parameterParam;
jobOptions.headers = {
'Content-Type': 'application/x-www-form-urlencoded',
'Content-Length': querystring.stringify(parameters).length
};
jobOptions.url += 'WithParameters';
postData = querystring.stringify(parameters);
}
// jobOptions contains auth field & separates url into hostname and path
// makes an http request to jobOptions and calls req.write(postData)
makeRequest(jobOptions, callback, responseCB, postData)
makeRequest makes an http request:
function makeRequest (object, callback, responseCB, postData) {
var accumulator = '';
var parsedUrl = u.parse('//' + object.url, true, true);
var options = {
hostname: parsedUrl.hostname,
port: object.port || 8080,
path: parsedUrl.path,
method: object.method || 'GET',
auth: getAuthByHost(parsedUrl.hostname)
};
if (object.headers) {
options.headers = object.headers;
}
var response = null;
var req = http.request(options, function(res) {
response = res;
res.on('data', function (data) {
accumulator = accumulator + data.toString();
res.resume();
});
res.on('close', function () {
// first assume accumulator is JSON object
var responseContent;
try {
responseContent = JSON.parse(accumulator);
}
// if not object, use accumulator as string
catch (err) {
responseContent = accumulator;
}
callback(responseContent, response.statusCode);
if (responseCB) {
responseCB(res);
}
});
});
req.on('close', function () {
// first assume accumulator is JSON object
var responseContent;
try {
responseContent = JSON.parse(accumulator);
}
catch (err) {
responseContent = accumulator;
}
callback(responseContent, response.statusCode);
if (responseCB) {
responseCB(response);
}
});
if (postData) {
req.write(postData);
}
req.end();
}
try this, it works for me:
var auth = 'Basic yourUserToken';
var jobOptions = {
url:'jenkinsHostName:8080/jenkins/job/jobName/' +'build',
method: 'POST',
port: 8080
};
var parameter = {"parameter": [{"name":"ref", "value":"origin/master"}]};
var postData;
if (!_.isEmpty(parameter)) {
var jsonParametersString = JSON.stringify(parameter);
jobOptions.headers = {
'Authorization':auth,
'Content-Type': 'application/x-www-form-urlencoded',
};
jobOptions.url += '?token=jobRemoteTriggerToken';
postData = "json="+jsonParametersString;
console.log("postData = " + postData);
}
var callback;
var responseCB;
makeRequest(jobOptions, callback, responseCB, postData) ;
It is based on your code. I removed the querystring - it seems that it returned an empty string when performed on the parameters object. I change /buildWithParameters to /build - it didn't work the other way.
In addition, verify that when you pass the 'Content-Length' in the header, it doesn't truncated your json parameters object (I removed it ).
also note that I used the user API token, that you can get at http://yourJenkinsUrl/me/configure and click the "Shown API Token" button.
Not sure about this, as I don't know Node.js -- but maybe this fits: the Jenkins remote access API indicates that the parameter entity in the json request must point to an array, even if there's just one parameter to be defined.
Does the change below fix the problem (note the angle brackets around parameters)?
[...]
var jsonParametersString = JSON.stringify({"parameter": [parameters]});
[...]

Why is PUT request body undefined?

I'm making the following request to my koajs server:
$.ajax({
type : 'PUT', // this.request.body undefined server side
// type : 'POST', // this.request.body all good server side
url : url,
data : body,
dataType : 'json'
})
But on the server side this.request.body is always undefined.
If I change the request type to POST, it works fine.
Any ideas?
EDIT
I'm using koa-route.
EDIT 2
Just realised I'm using koa-body-parser, which is probably more relevant.
Try using the koa-body parser:
const bodyParser = require('koa-bodyparser')
app.use(bodyParser())
I think koa-router will parse typical request stuff, url params, forms etc. If you want to parse the body of a request that contains a JSON object you need to apply a middleware (as alex alluded to).
Also please check to see if you are putting valid JSON.
Take a look at this Koa-bodyparser:
/**
* #param [Object] opts
* - {String} jsonLimit default '1mb'
* - {String} formLimit default '56kb'
* - {string} encoding default 'utf-8'
*/
return function *bodyParser(next) {
if (this.request.body !== undefined) {
return yield* next;
}
if (this.is('json')) {
this.request.body = yield parse.json(this, jsonOpts);
} else if (this.is('urlencoded')) {
this.request.body = yield parse.form(this, formOpts);
} else {
this.request.body = null;
}
yield* next;
};
there looks to be a 1mb limit on the amount of JSON. then to co-body/lib/json.js
module.exports = function(req, opts){
req = req.req || req;
opts = opts || {};
// defaults
var len = req.headers['content-length'];
if (len) opts.length = ~~len;
opts.encoding = opts.encoding || 'utf8';
opts.limit = opts.limit || '1mb';
return function(done){
raw(req, opts, function(err, str){
if (err) return done(err);
try {
done(null, JSON.parse(str));
} catch (err) {
err.status = 400;
err.body = str;
done(err);
}
});
}
};

Resources