Node.js get image from web and encode with base64 - node.js

I'm trying to fetch an image from the web and encode it with base64.
What I have so far is this:
var request = require('request');
var BufferList = require('bufferlist').BufferList;
bl = new BufferList(),
request({uri:'http://tinypng.org/images/example-shrunk-8cadd4c7.png',responseBodyStream: bl}, function (error, response, body)
{
if (!error && response.statusCode == 200)
{
var type = response.headers["content-type"];
var prefix = "data:" + type + ";base64,";
var base64 = new Buffer(bl.toString(), 'binary').toString('base64');
var data = prefix + base64;
console.log(data);
}
});
This seems to be pretty close to the solution, but I can't quite get it to work. It recognizes the data type and gives out this output:
data:image/png;base64
However, the bufferlist 'bl' seems to be empty.

BufferList is obsolete, as its functionality is now in Node core. The only tricky part here is setting request not to use any encoding:
var request = require('request').defaults({ encoding: null });
request.get('http://tinypng.org/images/example-shrunk-8cadd4c7.png', function (error, response, body) {
if (!error && response.statusCode == 200) {
data = "data:" + response.headers["content-type"] + ";base64," + Buffer.from(body).toString('base64');
console.log(data);
}
});

If anyone encounter the same issue while using axios as the http client, the solution is to add the responseType property to the request options with the value of 'arraybuffer':
let image = await axios.get('http://aaa.bbb/image.png', {responseType: 'arraybuffer'});
let returnedB64 = Buffer.from(image.data).toString('base64');
Hope this helps

LATEST, AS OF 2017 ENDING
Well, after reading above answers and a bit research, I got to know a new way which doesn't require any package installation, http module(which is built-in) is enough!
NOTE: I have used it in node version 6.x, so I guess its also applicable to above versions.
var http = require('http');
http.get('http://tinypng.org/images/example-shrunk-8cadd4c7.png', (resp) => {
resp.setEncoding('base64');
body = "data:" + resp.headers["content-type"] + ";base64,";
resp.on('data', (data) => { body += data});
resp.on('end', () => {
console.log(body);
//return res.json({result: body, status: 'success'});
});
}).on('error', (e) => {
console.log(`Got error: ${e.message}`);
});
I hope it helps!
Also, check more about the http.get(...) here !

Another way of using node fetch, which breaks down the steps per variable:
const fetch = require('node-fetch');
const imageUrl = "Your URL here";
const imageUrlData = await fetch(imageUrl);
const buffer = await imageUrlData.arrayBuffer();
const stringifiedBuffer = Buffer.from(buffer).toString('base64');
const contentType = imageUrlData.headers.get('content-type');
const imageBas64 =
`data:image/${contentType};base64,${stringifiedBuffer}`;

If you know the image type, it's a one-liner with the node-fetch package. Might not suit everyone, but I already had node-fetch as a dependency, so in case others are in a similar boat:
await fetch(url).then(r => r.buffer()).then(buf => `data:image/${type};base64,`+buf.toString('base64'));

If you are using axios then you can follow below steps
var axios = require('axios');
const url ="put your url here";
const image = await axios.get(url, {responseType: 'arraybuffer'});
const raw = Buffer.from(image.data).toString('base64');
const base64Image = "data:" + image.headers["content-type"] + ";base64,"+raw;
you can check with decode base64.

You can use the base64-stream Node.js module, which is a streaming Base64 encoder / decoder. The benefit of this method is that you can convert the image without having to buffer the whole thing into memory, and without using the request module.
var http = require('http');
var base64encode = require('base64-stream').Encode;
http.get('http://tinypng.org/images/example-shrunk-8cadd4c7.png', function(res) {
if (res.statusCode === 200)
res.pipe(base64encode()).pipe(process.stdout);
});

I use for load and encode image into base64 string node-base64-image npm module.
Download and encode an image:
var base64 = require('node-base64-image');
var options = {string: true};
base64.base64encoder('www.someurl.com/image.jpg', options, function (err, image) {
if (err) {
console.log(err);
}
console.log(image);
});
Encode a local image:
var base64 = require('node-base64-image');
var path = __dirname + '/../test.jpg',
options = {localFile: true, string: true};
base64.base64encoder(path, options, function (err, image) {
if (err) { console.log(err); }
console.log(image);
});

Oneliner:
Buffer.from(
(
await axios.get(image, {
responseType: "arraybuffer",
})
).data,
"utf-8"
).toString("base64")

Old post but could help someone.
On the basis of Dmytro response that help me.
const base64FromUrl = async (url: string) => {
try {
return Buffer.from((await axios.get(url, { responseType: "arraybuffer", })).data, "utf-8").toString("base64")
} catch (error) {
return ""
}
}
I've just add error handling.

You can use image-to-base64 Node.js module
The benefit of using this module is that you convert your image hassle-free
const imageToBase64 = require('image-to-base64');
const imageLink = 'Your image link'
imageToBase64(imageLink);
.then((response) => {
const base64Image = `data:image/png;base64,${response}`
console.log(base64Image);
})
.catch((error) => {
console.log(error);
})

Related

Bad gateway from Kibana behind nodejs reverse proxys

I have a kibana instance behind a nodejs 16.x aws lambda reverse proxy. It works fine for almost everything, except for the "Discover" section when I add 2 or more filters for 2+ months timerange, it gives back bad gateway error.
Detail of the error:
Wrapper#https://mydomain/_dashboards/909221/bundles/core/core.entry.js:6:4249
_createSuperInternal#https://mydomain/_dashboards/909221/bundles/core/core.entry.js:6:3388
...
...
Implementation:
function proxy(event, context, lambdaCallback) {
delete(event.headers["accept-encoding"])
var path = event.path;
if (event.multiValueQueryStringParameters) {
path += '?' + deserializeQueryString(event.multiValueQueryStringParameters)
}
// Calculate the options for the HTTPS request
var opts = {
host: my_es_endpoint,
path: path,
method: event.httpMethod,
service: 'es',
region: my_region,
headers: event.headers
}
if ((event.httpMethod == 'POST') || (event.httpMethod == 'PUT')) {
if (event.body) {
var buff = new Buffer(event.body, "base64")
var payload = buff.toString('utf-8')
opts.body = payload
}
}
// Use aws4 to sign the request so we can talk with ElasticSearch directly
aws4.sign(opts);
const req = https.request(opts, function (res) {
var bodyParts = [];
// We need to read all the incoming data
res.on('data', (chunk) => {
bodyParts.push(chunk)
});
res.on("end", () => {
// We re-create the read content
var body = Buffer.concat(bodyParts).toString()
// We send back uncompressed data
delete(res.headers['content-encoding'])
res.headers['X-Frame-Options'] = 'DENY'
// res.headers['content-security-policy'] = "default-src 'self'; frame-ancestors 'none'"
res.headers['X-XSS-Protection'] = '1; mode=block'
res.headers['X-Content-Type-Options'] = 'nosniff'
var response = {
statusCode: res.statusCode,
body: body,
headers: res.headers
}
lambdaCallback(null, response);
})
});
req.on('error', (e) => {
console.log(`Error caught when calling ElasticSearch: ${e}`)
})
// For POST/PUT request, we send the content of the paylod as well
if ((event.httpMethod == 'POST') || (event.httpMethod == 'PUT')) {
if (event.body) {
var buff = new Buffer(event.body, "base64")
var payload = buff.toString('utf-8')
req.write(payload)
}
}
req.end();
return req;
}
I tried:
Incraesing timeout on request
Increase lambda memory from 128 to 512
Increase lambda timeout
I think you can try is to increase the memory allocation for the Lambda function. it might resolve it.
Or for better and just remove all your headaches. You should use an Amazon API Gateway instead of a reverse proxy in a Lambda function to proxy requests to Kibana. You can also increase the timeout for Kibana requests. this will be more efficient and scalable to your project and less issues.
I try to optimized your code can you check this if it will improve or correct your issue?
const https = require('https');
const aws4 = require('aws4');
function deserializeQueryString(query) {
const params = new URLSearchParams();
for (const key of Object.keys(query)) {
for (const value of query[key]) {
params.append(key, value);
}
}
return params.toString();
}
exports.handler = async (event, context, lambdaCallback) => {
const { headers, path, httpMethod, multiValueQueryStringParameters, body } = event;
// Remove accept-encoding header
delete headers['accept-encoding'];
// Append query string parameters to the path
if (multiValueQueryStringParameters) {
path += '?' + deserializeQueryString(multiValueQueryStringParameters);
}
const opts = {
host: my_es_endpoint,
path,
method: httpMethod,
service: 'es',
region: my_region,
headers
};
// Add request body for POST and PUT methods
if (['POST', 'PUT'].includes(httpMethod) && body) {
opts.body = Buffer.from(body, 'base64').toString('utf-8');
}
// Sign the request using aws4
aws4.sign(opts);
return new Promise((resolve, reject) => {
const req = https.request(opts, res => {
let body = '';
res.on('data', chunk => {
body += chunk;
});
res.on('end', () => {
// Remove content-encoding header
delete res.headers['content-encoding'];
// Set security headers
res.headers['X-Frame-Options'] = 'DENY';
res.headers['X-XSS-Protection'] = '1; mode=block';
res.headers['X-Content-Type-Options'] = 'nosniff';
const response = {
statusCode: res.statusCode,
body,
headers: res.headers
};
resolve(response);
});
});
req.on('error', error => {
console.error(`Error caught when calling ElasticSearch: ${error}`);
reject(error);
});
// Write request body for POST and PUT methods
if (['POST', 'PUT'].includes(httpMethod) && body) {
req.write(opts.body);
}
req.end();
}).then(lambdaCallback)
.catch(error => {
lambdaCallback(error);
});
};
I am not sure if the error is related to the proxy implementation, however I found something that helped and now I am not experiencing the problem anymore.
I reduced the value of discover:sampleSize in Kibana Advanced Settings as suggested here

How do I upload a large Audio file longer than 30sec direct from the browser to AwS S3?

I would like to save audio recording to S3. I am using the functions below to load direct to awsS3 direct from the browser. It works for short audio recordings of up to around 25 seconds but fails for larger files.
Currently the functions is as follows: I speak into the microphone using recorder.js. Once the recording is complete I press stop which then saves the file to AWS
From the browser:
getSignedRequest(file,fileLoc);
function getFetchSignedRequest(file,fileLoc){
const fetchUrl = `/xxxxxxxxx?file-name=${file.name}&file-type=${file.type}&fileLoc=${fileLoc}`;
fetch(fetchUrl )
.then((response) => {
console.log('response',response)
if(!response.ok){
console.log('Network response was not OK',response.ok)
} else {
putAudioFetchFile(file, response.signedRequest, response.url)
}
})
.catch((error) => {
console.error('Could not get signed URL:', error);
})
}
This send a get request to the NodeJs server which calls this :
const aws = require('aws-sdk');
const fs = require('fs');
aws.config.region = 'xxxxxx';
const S3_BUCKET = process.env.AWS_S3_BUCKET
this.uploadToAWSDrive =
async function uploadToAWSDrive(req,res){
const s3 = new aws.S3();
const URL_EXPIRATION_SECONDS = 3000;
const subFolderName = req.query['fileLoc'];
const fileName = req.query['file-name'];
const fileType = req.query['file-type'];
const fileLocName = subFolderName + fileName;
const s3Params = {
Bucket: S3_BUCKET,
Key: fileLocName,
Expires: URL_EXPIRATION_SECONDS,
ContentType: fileType,
ACL: 'public-read'
};
await s3.getSignedUrl('putObject', s3Params, (err, data) => {
if(err){
console.log(err);
return res.end();
}
const returnData = {
signedRequest: data,
url: `https://${S3_BUCKET}.s3.amazonaws.com/${fileLocName}`
};
console.log('audio uploaded',returnData)
res.write(JSON.stringify(returnData));
res.end();
});
}
Which then calls this:
function uploadFile(file, signedRequest, url){
const xhr = new XMLHttpRequest();
xhr.open('PUT', signedRequest);
xhr.onreadystatechange = () => {
if(xhr.readyState === 4){
if(xhr.status === 200){
console.log('destination url= ', url,xhr.readyState,xhr.status)
}
else{
alert('Could not upload file.');
}
}
};
xhr.send(file);
}
This then sends the file to the awsS3 server. Ok for audio less than 30secs, but fails for longer audio files.
What do I need to do to enable this to work with audio files of greater than 20secs and upto 3 mins?
Any help most appreciated
Not very elegant but the issue was resolved by adding a timer to the origanal function call. A function that followed also needed to be delayed to I think allow processor time. I am sure there will be better ways to do this.
setTimeout( getSignedRequest( myAudioFile,fileLoc), proccessTime) ;

Using Express to get geoCoding from google API

I am having difficulty getting my LatLon look up to work - I have read
Get Google Maps Geocoding JSON from Express - but that just says use HTTP...and I have read the docs on http/https - but I'm still getting an error.
Here is my code - so calling myUrl/LatLon should give me the Google API response - or at least that is what I want...
const https = require('https');
router.get( '/LatLon', ( res ) => {console.log('Here getting https');
const googleKey = '---';
const address = '1600 Amphitheatre Parkway, Mountain View, CA';
const options = new URL('https://maps.googleapis.com/maps/api/geocode/json?address=' + address + '&key=' + googleKey);
const req = https.request(options, (res) => {
res.on('data', (chunk) => {
console.log(`BODY: ${chunk}`);
});
res.on('end', () => {
console.log('No more data in response.');
});
});
req.on('error', (e) => {
console.error(`problem with request: ${e.message}`);
});
req.write();
req.end();
});
I get this error -
TypeError [ERR_INVALID_ARG_TYPE]: The first argument must be one of type string or Buffer. Received type undefined
at write_ (_http_outgoing.js:595:11)
at ClientRequest.write (_http_outgoing.js:567:10)
Any help would be greatly appreciated - I have tried about 4 variations on using "get" or "https"...
I found node-geocoder - and it worked great...
Basically I did this, it is 'generalized code', non-functional; but you'll get the idea.
A bunch of checks and compares went into it so I am not hitting API's when I do not need to.
var NodeGeocoder = require('node-geocoder');
var options = {
provider: process.env.GEOCODING_PROVIDER,
httpAdapter: 'https',
apiKey: process.env.GEOCODING_KEY,
formatter: null
};
var geocoder = NodeGeocoder(options);
collection.getExistingId( req.params.id, ( err, record ) => {
const existingAddress = addresstoString(record.address);
const newAddress = addresstoString(newRecord.address);
if ( !compareAddresses(existingAddress,newAddress) ) {
geocoder.geocode(newAddress, function(err, geocode) {
let coords = []; // mongoDB wants [Long,Lat]
coords[0] = geocode[0].longitude;
coords[1] = geocode[0].latitude;
// set existingAddress.COORDINATES = coords
// save the record
});
}
});

Coinbase GDAX NodeJS - Invalid API Key

I'm trying to write a script that will cancel all my orders on GDAX. According to the documentation for Cancel an Order I need to send a DELETE request to /delete. But I assume before I can do that I need to sign the message first.
When I submit the request using fetch in Node, I get this response: { message: 'Invalid API Key' }
Here is the a code sample I am working on, with the confidential stuff replaced of course:
var crypto = require('crypto');
var fetch = require('fetch');
const coinbaseSecret = 'abc...';
const coinbaseAPIKey = 'abc...';
const coinbasePassword = 'abc...';
const coinbaseRestAPIURL = "https://api-public.sandbox.gdax.com";
function start(){
getTime(function(time){
cancelAll(time, function(){
console.log('done');
});
});
}
function getTime(callback){
fetch.fetchUrl(coinbaseRestAPIURL + '/time', null, function(error, meta, body){
var response = JSON.parse(body.toString());
console.log('response', response);
var timeStamp = response.epoch;
callback(timeStamp);
});
}
function cancelAll(timeStamp, callback) {
// Refer to https://docs.gdax.com/#cancel-an-order
var signature = getSignature('DELETE', '/delete', "");
console.log('signature', signature);
var headers = {
'Content-Type': 'application/json',
'CB-ACCESS-KEY': coinbaseAPIKey,
'CB-ACCESS-SIGN': signature,
'CB-ACCESS-TIMESTAMP': timeStamp, //Date.now() / 1000,
'CB-ACCESS-PASSPHRASE': coinbasePassword
};
console.log('headers', headers);
fetch.fetchUrl(coinbaseRestAPIURL + '/delete', {
method: 'DELETE',
headers: headers
}, function(error, meta, body){
var response = JSON.parse(body.toString());
console.log('response', response);
callback();
})
}
function getSignature(method, requestPath, body) {
// Refer to https://docs.gdax.com/#signing-a-message
const secret = coinbaseSecret;
const timestamp = Date.now() / 1000;
const what = timestamp + method + requestPath + body;
const key = Buffer(secret, 'base64');
const hmac = crypto.createHmac('sha256', key);
const signature = hmac.update(what).digest('base64');
return signature;
}
start();
Go to the Gdax-Node Github repo and take a look at their code and examples.
1) Create an authenticatedClient by configuring it with your api details,
2) Then simply use the authedClient object and calncelAllOrders method:
authedClient.cancelAllOrders({product_id: 'BTC-USD'}, callback);
You could wrap this with a function to call 'x' amount of times (it states in the documentation), or you cold think of something fancier if you'd like.
Note:- make sure you pull the github repo and do not install from npm directly as there are a few bugs and issues that have been fixed on the git repo but NOT pushed to npm.
...so use npm install coinbase/gdax-node when downloading your gdax package.
Hope that helps a little...

Node.js download multiple files

I need to download multiple files from urls. I have got list of them in the file. How should I do that? I already made it, but it's not working. I need to wain until last download is done before starting next wan. How can I do that?
You want to call the download function from the callback of the file before that. I threw together something, do not consider it pretty nor production ready, please ;-)
var http = require('http-get');
var files = { 'url' : 'local-location', 'repeat-this' : 'as often as you want' };
var MultiLoader = function (files, finalcb) {
var load_next_file = function (files) {
if (Object.keys(files) == 0) {
finalcb(null);
return;
}
var nexturl = Object.keys(files)[0];
var nextfnname = files[nexturl];
console.log('will load ' + nexturl);
http.get(nexturl, nextfnname, function (err, result) {
console.log('loaded ' + nexturl);
delete files[nexturl];
load_next_file(files);
});
};
load_next_file(JSON.parse(JSON.stringify(files)));
};
MultiLoader(files, function () { console.log('finalcb'); });
http-get is not a standard node module, you can install it via npm install http-get.
I think this is what you're looking for.
const fs = require('fs')
const https = require('https')
const downloadFolderPath = 'downloads'
const urls = [
'url 1',
'url 2'
]
const downloadFile = url => {
return new Promise((resolve, reject) => {
const splitUrl = url.split('/')
const filename = splitUrl[splitUrl.length - 1]
const outputPath = `${downloadFolderPath}/${filename}`
const file = fs.createWriteStream(outputPath)
https.get(url, res => {
if (res.statusCode === 200) {
res.pipe(file).on('close', resolve)
} else {
reject(res.statusCode)
}
})
})
}
if (!fs.existsSync(downloadFolderPath)) {
fs.mkdirSync(downloadFolderPath)
}
let downloadedFiles = 0
urls.forEach(async url => {
await downloadFile(url)
downloadedFiles++
console.log(`${downloadedFiles}/${urls.length} downloaded`)
})
You can read files using fs (var fs = require('fs');)in node js
fs.readFile('<filepath>', "utf8", function (err, data) {
if (err) throw err;
console.log(data);
});

Resources