Bad gateway from Kibana behind nodejs reverse proxys - node.js

I have a kibana instance behind a nodejs 16.x aws lambda reverse proxy. It works fine for almost everything, except for the "Discover" section when I add 2 or more filters for 2+ months timerange, it gives back bad gateway error.
Detail of the error:
Wrapper#https://mydomain/_dashboards/909221/bundles/core/core.entry.js:6:4249
_createSuperInternal#https://mydomain/_dashboards/909221/bundles/core/core.entry.js:6:3388
...
...
Implementation:
function proxy(event, context, lambdaCallback) {
delete(event.headers["accept-encoding"])
var path = event.path;
if (event.multiValueQueryStringParameters) {
path += '?' + deserializeQueryString(event.multiValueQueryStringParameters)
}
// Calculate the options for the HTTPS request
var opts = {
host: my_es_endpoint,
path: path,
method: event.httpMethod,
service: 'es',
region: my_region,
headers: event.headers
}
if ((event.httpMethod == 'POST') || (event.httpMethod == 'PUT')) {
if (event.body) {
var buff = new Buffer(event.body, "base64")
var payload = buff.toString('utf-8')
opts.body = payload
}
}
// Use aws4 to sign the request so we can talk with ElasticSearch directly
aws4.sign(opts);
const req = https.request(opts, function (res) {
var bodyParts = [];
// We need to read all the incoming data
res.on('data', (chunk) => {
bodyParts.push(chunk)
});
res.on("end", () => {
// We re-create the read content
var body = Buffer.concat(bodyParts).toString()
// We send back uncompressed data
delete(res.headers['content-encoding'])
res.headers['X-Frame-Options'] = 'DENY'
// res.headers['content-security-policy'] = "default-src 'self'; frame-ancestors 'none'"
res.headers['X-XSS-Protection'] = '1; mode=block'
res.headers['X-Content-Type-Options'] = 'nosniff'
var response = {
statusCode: res.statusCode,
body: body,
headers: res.headers
}
lambdaCallback(null, response);
})
});
req.on('error', (e) => {
console.log(`Error caught when calling ElasticSearch: ${e}`)
})
// For POST/PUT request, we send the content of the paylod as well
if ((event.httpMethod == 'POST') || (event.httpMethod == 'PUT')) {
if (event.body) {
var buff = new Buffer(event.body, "base64")
var payload = buff.toString('utf-8')
req.write(payload)
}
}
req.end();
return req;
}
I tried:
Incraesing timeout on request
Increase lambda memory from 128 to 512
Increase lambda timeout

I think you can try is to increase the memory allocation for the Lambda function. it might resolve it.
Or for better and just remove all your headaches. You should use an Amazon API Gateway instead of a reverse proxy in a Lambda function to proxy requests to Kibana. You can also increase the timeout for Kibana requests. this will be more efficient and scalable to your project and less issues.
I try to optimized your code can you check this if it will improve or correct your issue?
const https = require('https');
const aws4 = require('aws4');
function deserializeQueryString(query) {
const params = new URLSearchParams();
for (const key of Object.keys(query)) {
for (const value of query[key]) {
params.append(key, value);
}
}
return params.toString();
}
exports.handler = async (event, context, lambdaCallback) => {
const { headers, path, httpMethod, multiValueQueryStringParameters, body } = event;
// Remove accept-encoding header
delete headers['accept-encoding'];
// Append query string parameters to the path
if (multiValueQueryStringParameters) {
path += '?' + deserializeQueryString(multiValueQueryStringParameters);
}
const opts = {
host: my_es_endpoint,
path,
method: httpMethod,
service: 'es',
region: my_region,
headers
};
// Add request body for POST and PUT methods
if (['POST', 'PUT'].includes(httpMethod) && body) {
opts.body = Buffer.from(body, 'base64').toString('utf-8');
}
// Sign the request using aws4
aws4.sign(opts);
return new Promise((resolve, reject) => {
const req = https.request(opts, res => {
let body = '';
res.on('data', chunk => {
body += chunk;
});
res.on('end', () => {
// Remove content-encoding header
delete res.headers['content-encoding'];
// Set security headers
res.headers['X-Frame-Options'] = 'DENY';
res.headers['X-XSS-Protection'] = '1; mode=block';
res.headers['X-Content-Type-Options'] = 'nosniff';
const response = {
statusCode: res.statusCode,
body,
headers: res.headers
};
resolve(response);
});
});
req.on('error', error => {
console.error(`Error caught when calling ElasticSearch: ${error}`);
reject(error);
});
// Write request body for POST and PUT methods
if (['POST', 'PUT'].includes(httpMethod) && body) {
req.write(opts.body);
}
req.end();
}).then(lambdaCallback)
.catch(error => {
lambdaCallback(error);
});
};

I am not sure if the error is related to the proxy implementation, however I found something that helped and now I am not experiencing the problem anymore.
I reduced the value of discover:sampleSize in Kibana Advanced Settings as suggested here

Related

how to loop the url in options in nodejs

var request = require('request');
var options = {
'method': 'GET',
'url': 'https://api.github.com/orgs/organizationName/repos?per_page=100&page=1',//To get all the users data from the repos
'url': 'https://api.github.com/orgs/organizationName/repos?per_page=100&page=2',
'url': 'https://api.github.com/orgs/organizationName/repos?per_page=100&page=3',
'url': 'https://api.github.com/orgs/organizationName/repos?per_page=100&page=4',
'url': 'https://api.github.com/orgs/organizationName/repos?per_page=100&page=5',
'url': 'https://api.github.com/orgs/organizationName/repos?per_page=100&page=6',
'url': 'https://api.github.com/orgs/organizationName/repos?per_page=100&page=7',
'url': 'https://api.github.com/orgs/organizationName/repos?per_page=100&page=8',
'url': 'https://api.github.com/orgs/organizationName/repos?per_page=100&page=9',
'url': 'https://api.github.com/orgs/organizationName/repos?per_page=100&page=10',
'url': 'https://api.github.com/orgs/organizationName/repos?per_page=100&page=11',
'headers': {
'Accept': 'application/vnd.github.mercy-preview+json',//to get topics of the repos
'Authorization': 'Bxxx xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
'User-Agent' : 'sxxxxxxxxxxxxx'
}
};
request(options, function (error, response) {
if (error) throw new Error(error);
console.log(response.body);
});
In this above code i want to loop the urls continuously until the end of the page
if not anyone have the idea of using pagination in this help me out
cYou cannot have multiple attributes for one object key. You have to call every url individually. I tried to solve this using asyncronous code, because looping with callback functions is confusing and dangerous with regard to the call stack.
const request = require('request');
// configuration for the url generation
const perPages = 100;
const startPage = 1;
const endPage = 11;
const url = 'https://api.github.com/orgs/organizationName/repos?per_page=%perPages%&page=%page%';
// define a asyncronous call for one url
async function callOneUrl(url) {
// local options for each url
const options = {
method: 'GET',
url: url,
headers: {
Accept: 'application/vnd.github.mercy-preview+json',
Authorization: 'Bxxx xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
'User-Agent': 'sxxxxxxxxxxxxx'
}
}
return new Promise((resolve, reject) => {
request(options, function (error, response) {
if (error) return reject(error);
resolve(response);
});
});
}
// call each url with a for loop
(async () => {
for (let i = startPage; i <= endPage; i++) {
// using the await statement to get the resolved value of the Promise instance or catch the error
try {
var response = await callOneUrl(url.replace('%perPages%', perPages).replace('%page%', i));
// handle response here
console.log(response.body);
} catch (error) {
// handle errors here
throw new Error(error);
}
}
})()
const request = require('request-promise');
const urls = ["http://www.google.com", "http://www.example.com"];
const promises = urls.map(url => request(url));
Promise.all(promises).then((data) => {
// data = [promise1,promise2]
});
Apart from above you can also use async.eachseries or async.parallel etc..
You can download a list of repos with a do...while loop. We'll set a maximum number of pages to download and exit when we reach either this or the last page.
I would suggest using the request-promise-native package to allow us to use the very nice async-await syntax.
Now, I've given the example of downloading repos for the mongodb org. You can easily replace with whatever one you wish.
I would also note that the request library is now deprecated, we can use it of course, but we must consider replacing in the future.
We now also log the repo information and save it to the output file.
const rp = require("request-promise-native");
const fs = require("fs");
async function downloadRepoInformation(org, outputFile) {
let repoList = [];
let page = 0;
const resultsPerPage = 20;
const maxPages = 10;
const uri = `https://api.github.com/orgs/${org}/repos`;
do {
try {
let response = await rp.get({ uri, json: true, qs: { per_page: resultsPerPage, page: ++page }, headers: {"User-Agent" : "request"} });
console.log(`downloadRepoInformation: Downloaded page: ${page}, repos: ${response.length}...`);
repoList = repoList.concat(response);
console.log("downloadRepoInformation: response", JSON.stringify(response, null, 2));
console.log("downloadRepoInformation: repoList.length:", repoList.length);
if (response.length < resultsPerPage) {
console.log(`downloadRepoInformation: Last page reached: exiting loop...`);
break;
}
} catch (error) {
console.error(`downloadRepoInformation: An error occurred:`, error);
break;
}
} while (page <= maxPages)
console.log("downloadRepoInformation: download complete: repoList.length:", repoList.length)
console.log("downloadRepoInformation: Saving to file:", outputFile);
fs.writeFileSync(outputFile, JSON.stringify(repoList, null, 4));
}
downloadRepoInformation("mongodb", "./repolist.json");

How do I get data out of a Node http(s) request?

How do I get the data from a https request outside of its scope?
Update
I've seen Where is body in a nodejs http.get response?, but it doesn't answer this question. In fact, that question isn't answered accurately, either. In the accepted answer (posted by the asker), a third party library is used. Since the library returns an object different from that returned by http.get() it doesn't answer the question.
I tried to set a variable to the return value of http.get() using await, but that returns a http.clientRequest and doesn't give me access to the response data that I need.
I'm using Node v8.9.4 with Express and the https module to request data from Google's Custom Search.
I have two routes. One for a GET request and one for a POST request used when submitting a form on the front page. They both basically serve the same purpose... request the data from CSE and present the data as a simple JSON string. Rather than repeat myself, I want to put my code for the CSE request into a function and just call the function within the callback for either route.
I thought about returning all the way up from the innermost callback, but that won't work because it wouldn't get to the request's error event handler or the necessary .end() call.
Here's a subset of the actual code:
app.get('/api/imagesearch/:query', newQuery)
app.post('/', newQuery)
function newQuery (req, res) {
let query = req.body.query || req.params.query
console.log(`Search Query: ${query}`)
res.status(200)
res.set('Content-Type', 'application/json')
// This doesn't work
let searchResults = JSON.stringify(cseSearch(req))
res.end(searchResults)
}
function cseSearch (request) {
let cseParams = '' +
`?q=${request.params.query}` +
`&cx=${process.env.CSE_ID}` +
`&key=${process.env.API_KEY}` +
'&num=10' +
'&safe=high' +
'&searchType=image' +
`&start=${request.query.offset || 1}`
let options = {
hostname: 'www.googleapis.com',
path: '/customsearch/v1' + encodeURI(cseParams)
}
let cseRequest = https.request(options, cseResponse => {
let jsonString = ''
let searchResults = []
cseResponse.on('data', data => {
jsonString += data
})
cseResponse.on('end', () => {
let cseResult = JSON.parse(jsonString)
let items = cseResult.items
items.map(item => {
let resultItem = {
url: item.link,
snippet: item.title,
thumbnail: item.image.thumbnailLink,
context: item.image.contextLink
}
searchResults.push(resultItem)
})
// This doesn't work... wrong scope, two callbacks deep
return searchResults
})
})
cseRequest.on('error', e => {
console.log(e)
})
cseRequest.end()
}
If you're curious, it's for a freeCodeCamp project: Image Search Abstraction Layer
using promise method solve this issue.
cseSearch(req).then(searchResults=>{
res.end(searchResults)
}).catch(err=>{
res.status(500).end(searchResults)
})
function cseSearch (request) {
return new Promise((resolve, reject)=>{
...your http request code
cseResponse.on('end', () => {
let cseResult = JSON.parse(jsonString)
let items = cseResult.items
items.map(item => {
let resultItem = {
url: item.link,
snippet: item.title,
thumbnail: item.image.thumbnailLink,
context: item.image.contextLink
}
searchResults.push(resultItem)
})
resolve(searchResults);
})
})
}
Based on what I explained in the comments, to give you an idea how compact your code could be using the request-promise library, here's what you could use:
const rp = require('request-promise-native');
app.get('/api/imagesearch/:query', newQuery)
app.post('/', newQuery)
function newQuery (req, res) {
let query = req.body.query || req.params.query
console.log(`Search Query: ${query}`)
cseSearch(req).then(results => {
res.json(results);
}).catch(err => {
console.log("newQueryError ", err);
res.sendStatus(500);
});
}
function cseSearch (request) {
let cseParams = '' +
`?q=${request.params.query}` +
`&cx=${process.env.CSE_ID}` +
`&key=${process.env.API_KEY}` +
'&num=10' +
'&safe=high' +
'&searchType=image' +
`&start=${request.query.offset || 1}`
let options = {
hostname: 'www.googleapis.com',
path: '/customsearch/v1' + encodeURI(cseParams),
json: true
};
return rp(options).then(data => {
return data.items.map(item => {
return {
url: item.link,
snippet: item.title,
thumbnail: item.image.thumbnailLink,
context: item.image.contextLink
};
});
});

Jenkins Git Plugin does not receive posted Parameters

I am trying to use Node.js to programmatically build Jenkins jobs that take Git parameters.
I am sending the parameters as post data, as shown below. However, no matter what value I assign to ref, Jenkins runs the build with the default parameter value (specified in the job's configuration). I have tried passing in the parameters as query strings in the URL, but that also did not work.
I am using Jenkins v1.651.1 and Node v6.2.0.
var jobOptions = {
url: requestedJobObject.url + 'build',
method: 'POST',
port: 8080
};
// parameters = { "name": "ref", "value": "origin/master" }
if (!_.isEmpty(parameters)) {
var jsonParametersString = JSON.stringify({"parameter": parameters});
var parameterParam = encodeURIComponent(jsonParametersString);
parameters.json = parameterParam;
jobOptions.headers = {
'Content-Type': 'application/x-www-form-urlencoded',
'Content-Length': querystring.stringify(parameters).length
};
jobOptions.url += 'WithParameters';
postData = querystring.stringify(parameters);
}
// jobOptions contains auth field & separates url into hostname and path
// makes an http request to jobOptions and calls req.write(postData)
makeRequest(jobOptions, callback, responseCB, postData)
makeRequest makes an http request:
function makeRequest (object, callback, responseCB, postData) {
var accumulator = '';
var parsedUrl = u.parse('//' + object.url, true, true);
var options = {
hostname: parsedUrl.hostname,
port: object.port || 8080,
path: parsedUrl.path,
method: object.method || 'GET',
auth: getAuthByHost(parsedUrl.hostname)
};
if (object.headers) {
options.headers = object.headers;
}
var response = null;
var req = http.request(options, function(res) {
response = res;
res.on('data', function (data) {
accumulator = accumulator + data.toString();
res.resume();
});
res.on('close', function () {
// first assume accumulator is JSON object
var responseContent;
try {
responseContent = JSON.parse(accumulator);
}
// if not object, use accumulator as string
catch (err) {
responseContent = accumulator;
}
callback(responseContent, response.statusCode);
if (responseCB) {
responseCB(res);
}
});
});
req.on('close', function () {
// first assume accumulator is JSON object
var responseContent;
try {
responseContent = JSON.parse(accumulator);
}
catch (err) {
responseContent = accumulator;
}
callback(responseContent, response.statusCode);
if (responseCB) {
responseCB(response);
}
});
if (postData) {
req.write(postData);
}
req.end();
}
try this, it works for me:
var auth = 'Basic yourUserToken';
var jobOptions = {
url:'jenkinsHostName:8080/jenkins/job/jobName/' +'build',
method: 'POST',
port: 8080
};
var parameter = {"parameter": [{"name":"ref", "value":"origin/master"}]};
var postData;
if (!_.isEmpty(parameter)) {
var jsonParametersString = JSON.stringify(parameter);
jobOptions.headers = {
'Authorization':auth,
'Content-Type': 'application/x-www-form-urlencoded',
};
jobOptions.url += '?token=jobRemoteTriggerToken';
postData = "json="+jsonParametersString;
console.log("postData = " + postData);
}
var callback;
var responseCB;
makeRequest(jobOptions, callback, responseCB, postData) ;
It is based on your code. I removed the querystring - it seems that it returned an empty string when performed on the parameters object. I change /buildWithParameters to /build - it didn't work the other way.
In addition, verify that when you pass the 'Content-Length' in the header, it doesn't truncated your json parameters object (I removed it ).
also note that I used the user API token, that you can get at http://yourJenkinsUrl/me/configure and click the "Shown API Token" button.
Not sure about this, as I don't know Node.js -- but maybe this fits: the Jenkins remote access API indicates that the parameter entity in the json request must point to an array, even if there's just one parameter to be defined.
Does the change below fix the problem (note the angle brackets around parameters)?
[...]
var jsonParametersString = JSON.stringify({"parameter": [parameters]});
[...]

How do I send UpperCase Headers in HTTP

The standard says headers are case insensitive.
Ruby and node both force lower case headers.
We are using an outside server program that expects headers 'AuthToken' to be case sensitive, using .NET framework, and apparently both don't follow standards. We need headers to be up case in this instance.
At the time of writing, the following setHeader was copied from
the _http_outgoing page of node's core lib
var http = require('http');
http.OutgoingMessage.prototype.setHeader = function(name, value) {
if (arguments.length < 2) {
throw new Error('`name` and `value` are required for setHeader().');
}
if (this._header) {
throw new Error('Can\'t set headers after they are sent.');
}
// NO LOWER CASE
var key = name//.toLowerCase();
this._headers = this._headers || {};
this._headerNames = this._headerNames || {};
this._headers[key] = value;
this._headerNames[key] = name;
// Since we're re-defining the method, we can't use this part anymore
//if (automaticHeaders[key]) {
// this._removedHeader[key] = false;
//}
};
Commented out part for lowercase
So.. if you get this problem. require http and override this method with the version you're currently using.
It should then work properly. You could do a similar thing of overriding a method in ruby, but it won't be a quick and easy
Then this will work:
require('request')
request({url: 'http://myurl.com', headers: {UpperCaseWorks: 'Yay'}})
EDIT: here's for the newer version of node
OutgoingMessage.prototype.setHeader = function setHeader(name, value) {
if (this._header) {
throw new errors.Error('ERR_HTTP_HEADERS_SENT', 'set');
}
validateHeader(name, value);
if (!this[outHeadersKey])
this[outHeadersKey] = {};
// no more lower case
const key = name//.toLowerCase();
this[outHeadersKey][key] = [name, value];
switch (key.length) {
case 10:
if (key === 'connection')
this._removedConnection = false;
break;
case 14:
if (key === 'content-length')
this._removedContLen = false;
break;
case 17:
if (key === 'transfer-encoding')
this._removedTE = false;
break;
}
};
Looks like it calls this local method, which'll need to be defined as well
function validateHeader(name, value) {
let err;
if (typeof name !== 'string' || !name || !checkIsHttpToken(name)) {
err = new errors.TypeError('ERR_INVALID_HTTP_TOKEN', 'Header name', name);
} else if (value === undefined) {
err = new errors.TypeError('ERR_HTTP_INVALID_HEADER_VALUE', value, name);
} else if (checkInvalidHeaderChar(value)) {
debug('Header "%s" contains invalid characters', name);
err = new errors.TypeError('ERR_INVALID_CHAR', 'header content', name);
}
if (err !== undefined) {
Error.captureStackTrace(err, validateHeader);
throw err;
}
}
And this
const { outHeadersKey } = require('internal/http');
Anyway, check your version of node for what you are overriding
Piggybacking on Funkodebat's answer, here's my solution for Node 16:
const http = require('http');
// https://github.com/nodejs/node/blob/v16.x/lib/_http_outgoing.js#L574-L587
const { validateHeaderName, validateHeaderValue } = http;
http.OutgoingMessage.prototype.setHeader = function setHeader(name, value) {
if (this._header) {
throw new Error('Cannot set headers after they are sent to the client');
}
validateHeaderName(name);
validateHeaderValue(name, value);
// Extra logic to find kOutHeaders symbol in `this`
const kOutHeaders = Object.getOwnPropertySymbols(this).find(
(sym) => sym.toString() === 'Symbol(kOutHeaders)'
);
let headers = this[kOutHeaders];
if (headers === null) this[kOutHeaders] = headers = Object.create(null);
headers[name] = [name, value]; // toLowerCase removed from here
return this;
};
By looking at the source of NodeJS library on github, you do not need to override the OutgoingMessage.prototype.setHeader
Instead of passing the headers as an Object, you should send them as an Array. Here is a working example :
const http = require('http');
const postData = JSON.stringify({
'msg': 'Hello World!'
});
const options = {
hostname: 'www.google.com',
port: 80,
path: '/upload',
method: 'POST',
// use an Array instead of Object to avoid lowercase transformation
headers: [
['Host' ,'localhost' ],
['X-CustomHeaderFancy' , 'valueForFancyHeader'],
['Content-Type', 'application/json'],
['Content-Length', Buffer.byteLength(postData)]
}
};
const req = http.request(options, (res) => {
console.log(`STATUS: ${res.statusCode}`);
console.log(`HEADERS: ${JSON.stringify(res.headers)}`);
res.setEncoding('utf8');
res.on('data', (chunk) => {
console.log(`BODY: ${chunk}`);
});
res.on('end', () => {
console.log('No more data in response.');
});
});
req.on('error', (e) => {
console.error(`problem with request: ${e.message}`);
});
// Write data to request body
req.write(postData);
req.end();
inside the source code of https://github.com/nodejs/node/blob/v16.x/lib/_http_client.js#L249 there is a test to know if the headers are an array, if it is the case, then it bypass the lowercase transformation.
I do not know why it is not documented ? It's a very useful feature.

Node.js get image from web and encode with base64

I'm trying to fetch an image from the web and encode it with base64.
What I have so far is this:
var request = require('request');
var BufferList = require('bufferlist').BufferList;
bl = new BufferList(),
request({uri:'http://tinypng.org/images/example-shrunk-8cadd4c7.png',responseBodyStream: bl}, function (error, response, body)
{
if (!error && response.statusCode == 200)
{
var type = response.headers["content-type"];
var prefix = "data:" + type + ";base64,";
var base64 = new Buffer(bl.toString(), 'binary').toString('base64');
var data = prefix + base64;
console.log(data);
}
});
This seems to be pretty close to the solution, but I can't quite get it to work. It recognizes the data type and gives out this output:
data:image/png;base64
However, the bufferlist 'bl' seems to be empty.
BufferList is obsolete, as its functionality is now in Node core. The only tricky part here is setting request not to use any encoding:
var request = require('request').defaults({ encoding: null });
request.get('http://tinypng.org/images/example-shrunk-8cadd4c7.png', function (error, response, body) {
if (!error && response.statusCode == 200) {
data = "data:" + response.headers["content-type"] + ";base64," + Buffer.from(body).toString('base64');
console.log(data);
}
});
If anyone encounter the same issue while using axios as the http client, the solution is to add the responseType property to the request options with the value of 'arraybuffer':
let image = await axios.get('http://aaa.bbb/image.png', {responseType: 'arraybuffer'});
let returnedB64 = Buffer.from(image.data).toString('base64');
Hope this helps
LATEST, AS OF 2017 ENDING
Well, after reading above answers and a bit research, I got to know a new way which doesn't require any package installation, http module(which is built-in) is enough!
NOTE: I have used it in node version 6.x, so I guess its also applicable to above versions.
var http = require('http');
http.get('http://tinypng.org/images/example-shrunk-8cadd4c7.png', (resp) => {
resp.setEncoding('base64');
body = "data:" + resp.headers["content-type"] + ";base64,";
resp.on('data', (data) => { body += data});
resp.on('end', () => {
console.log(body);
//return res.json({result: body, status: 'success'});
});
}).on('error', (e) => {
console.log(`Got error: ${e.message}`);
});
I hope it helps!
Also, check more about the http.get(...) here !
Another way of using node fetch, which breaks down the steps per variable:
const fetch = require('node-fetch');
const imageUrl = "Your URL here";
const imageUrlData = await fetch(imageUrl);
const buffer = await imageUrlData.arrayBuffer();
const stringifiedBuffer = Buffer.from(buffer).toString('base64');
const contentType = imageUrlData.headers.get('content-type');
const imageBas64 =
`data:image/${contentType};base64,${stringifiedBuffer}`;
If you know the image type, it's a one-liner with the node-fetch package. Might not suit everyone, but I already had node-fetch as a dependency, so in case others are in a similar boat:
await fetch(url).then(r => r.buffer()).then(buf => `data:image/${type};base64,`+buf.toString('base64'));
If you are using axios then you can follow below steps
var axios = require('axios');
const url ="put your url here";
const image = await axios.get(url, {responseType: 'arraybuffer'});
const raw = Buffer.from(image.data).toString('base64');
const base64Image = "data:" + image.headers["content-type"] + ";base64,"+raw;
you can check with decode base64.
You can use the base64-stream Node.js module, which is a streaming Base64 encoder / decoder. The benefit of this method is that you can convert the image without having to buffer the whole thing into memory, and without using the request module.
var http = require('http');
var base64encode = require('base64-stream').Encode;
http.get('http://tinypng.org/images/example-shrunk-8cadd4c7.png', function(res) {
if (res.statusCode === 200)
res.pipe(base64encode()).pipe(process.stdout);
});
I use for load and encode image into base64 string node-base64-image npm module.
Download and encode an image:
var base64 = require('node-base64-image');
var options = {string: true};
base64.base64encoder('www.someurl.com/image.jpg', options, function (err, image) {
if (err) {
console.log(err);
}
console.log(image);
});
Encode a local image:
var base64 = require('node-base64-image');
var path = __dirname + '/../test.jpg',
options = {localFile: true, string: true};
base64.base64encoder(path, options, function (err, image) {
if (err) { console.log(err); }
console.log(image);
});
Oneliner:
Buffer.from(
(
await axios.get(image, {
responseType: "arraybuffer",
})
).data,
"utf-8"
).toString("base64")
Old post but could help someone.
On the basis of Dmytro response that help me.
const base64FromUrl = async (url: string) => {
try {
return Buffer.from((await axios.get(url, { responseType: "arraybuffer", })).data, "utf-8").toString("base64")
} catch (error) {
return ""
}
}
I've just add error handling.
You can use image-to-base64 Node.js module
The benefit of using this module is that you convert your image hassle-free
const imageToBase64 = require('image-to-base64');
const imageLink = 'Your image link'
imageToBase64(imageLink);
.then((response) => {
const base64Image = `data:image/png;base64,${response}`
console.log(base64Image);
})
.catch((error) => {
console.log(error);
})

Resources