fs.writefile() lost data of image - node.js

I use a post request to upload a picture and store the image data in my server but lost some image data:
let storePic = function(imgData) {
const base64Data = imgData.replace(/^data:image\/\w+;base64,/, "");
const dataBuffer = new Buffer.alloc(5000,base64Data, 'base64')
fs.writeFile(imgPath, dataBuffer, (err) => {
if (err) {
console.log('fail to store image')
} else {
console.log('success to store image')
}
})
}
When I get the image from the server, it is broken:

Should use Buffer.from(base64Data, 'base64') instead else its truncated.
Imo its slightly better to match out the image rather then just presume its there:
let matches = imgData.match(/^data:([A-Za-z-+\/]+);base64,(.+)$/)
if (matches.length !== 3) new Error('Invalid base64 image URI')
// matches[1] contains the mime-type which is handy for alot of things
fs.writeFile(imgPath, Buffer.from(matches[2], 'base64'), (err) => {

Related

retrieve image from mongodb and display on client side

Now I'm using express, node.js, and mongodb. I just saw that the images can be stored to mongodb with multer and grid fs storage and it works.
enter image description here
enter image description here
And I need to get back to client side. I guess the image can be converted from that chunk binary to image but I really sure how to do so. My ultimate purpose is to display menu with name, price, and picture from mongodb which I uploaded.
Does anyone know how to retrieve it and send image file from controller to boundary class?
Additional resources:
//this is entity class which is for obtaining information about image file
static async getImages(menu) {
try {
let filter = Object.values(menu.image)
const files = await this.files.find({ filename: { $in: filter } }).toArray()
let fileInfos = []
for (const file of files) {
let chunk = await this.chunks.find({ files_id: file._id }).toArray()
console.log(chunk.data)
fileInfos.push(chunk.data)
}
return fileInfos
} catch (err) {
console.log(`Unable to get files: ${err.message}`)
}
}
** so chunk object contains this**
{
_id: new ObjectId("627a28cda6d7935899174cd4"),
files_id: new ObjectId("627a28cda6d7935899174cd3"),
n: 0,
data: new Binary(Buffer.from("89504e470d0a1a0a0000000d49484452000000180000001808020000006f15aaaf0000000674524e530000000000006ea607910000009449444154789cad944b12c0200843a5e3fdaf9c2e3a636d093f95a586f004b5b5e30100c0b2f8daac6d1a25a144e4b74288325e5a23d6b6aea965b3e643e4243b2cc428f472908f35bb572dace8d4652e485bab83f4c84a0030b6347e3cb5cc28dbb84721ff23704c17a7661ad1ee96dc5f22ff5061f458e29621447e4ec8557ba585a99152b97bb4f5d5d68c92532b10f967bc015ce051246ff76d8b0000000049454e44ae426082", "hex"), 0)
}
//this is controller class
static async apiViewMenu(_req, res) {
try {
let menus = await MenusDAO.getAllMenus()
for (const menu of menus) {
menu.images = await ImagesDAO.getImages(menu)
}
//return menus list
res.json(menus)
} catch (err) {
res.status(400).json({ error: err.message })
}
}
I did not handle converting this buffer data to image because I do not know...

Import large pdf files to be indexed to Elastic Search

I am trying to large pdf files to elastic search to index them.
uploadPDFDocument: async (req, res, next) => {
try {
let data = req.body;
let client = await cloudSearchController.getElasticSearchClient();
const documentData = await fs.readFile("./large.pdf");
const encodedData = Buffer.from(documentData).toString('base64');
let document = {
id: 'my_id_7',
index: 'my-index-000001',
pipeline: 'attachment',
timeout: '5m',
body: {
data: encodedData
}
}
let response = await client.create(document);
console.log(response);
return res.status(200).send(response);
return true;
} catch (error) {
console.log(error.stack);
return next(error);
}
},
The above code works for small pdf files and I am able extract data from it and index it.
But for large pdf files I get timeout exception.
Is there any other way to this without time out issue?
I have read about fscrawler, filebeats and logstash but they all deal with logs not pdf files.

Smooch - create attachments from buffer

I'm trying to create an image via smooch-core API
I have an image as Buffer - base64, And I try something like this:
smoochClient.attachments
.create({
appId: appId,
props: {
for: 'message',
access: 'public',
appUserId: appUserId
},
source: myBuffer
})
.then(() => {
console.log('OK');
}).catch(err => {
console.log(JSON.stringify(err));
});
I get this error: "status":413,"statusText":"Payload Too Large"
[When I create this image normally through Postman it does work well, so it's not too big - I guess it's because of the Buffer's sending]
Anyone know how I can send a buffer to this API?
Are you able to submit the base64 data directly in the postman call?
Reading through the spec here it looks like source should be a filepath/name, and not raw binary data.
The easy way may be to save the base64 data to a[n appropriately encoded] file, then provide that file's path as source
Otherwise I'm not sure I'd go so far as to take apart api_instance.upload_attachment() to feed in the base64 data instead of opening/reading from the specified filename.
I found such a solution:
Create a temporary file to get it's read stream and send it in source instead of the myBuffer parameter and here is the code of creating the temporary file:
async getTempFileSource(bufferData) {
const fs = require("fs");
//remove mime type
if (bufferData.startsWith('data:'))
bufferData = bufferData.split('base64,')[1];
//Get file extension
const type = await require('file-type').fromBuffer(new Buffer(bufferData, 'base64'));
if (!type) {
console.log("getTempFileSource - The buffer data is corrupted", 'red');
return null;
}
//create temporary file
const tempFile = require('tmp').fileSync({postfix: '.' + type.ext});
//append buffer data to temp file
fs.appendFileSync(tempFile.name, new Buffer(bufferData, 'base64'));
//create read stream from the temp file
const source = fs.createReadStream(tempFile.name);
//remove the temp file
tempFile.removeCallback();
return source;
}
Here is the code for creating the attachment:
return new Promise(async (resolve, reject) => {
const source = await getTempFileSource(bufferData);
if (!source)
resolve(null);
else {
session.smoochClient.attachments
.create({
appId: appId,
props: {
for: 'message',
access: 'public',
appUserId: appUserId
},
source: source
})
.then(res => {
resolve(res);
}).catch(err => {
reject(err);
});
}
});

Read/write binary data to MongoDB in Node.js

I've been able to successfully write binary data (an image) to MongoDB in Node.js. However I can't find clear documentation on how to read it back.
Here's how I'm writing the image to MongoDB:
var imageFile = req.files.myFile;
var imageData = fs.readFileSync(imageFile.path);
var imageBson = {};
imageBson.image = new db.bson_serializer.Binary(imageData);
imageBson.imageType = imageFile.type;
db.collection('images').insert(imageBson, {safe: true},function(err, data) {
I'd appreciate any pointers on reading the image from Mongo using Node. I'm assuming there's a function like "db.bson_deserializer...". Thanks!
Found the answer:
var imageFile = req.files.myFile;
fs.exists(imageFile.path, function(exists) {
if(exists)
{
console.log("File uploaded: " + util.inspect(imageFile));
fs.readFile(imageFile.path, function(err, imageData) {
if (err) {
res.end("Error reading your file on the server!");
}else{
//when saving an object with an image's byte array
var imageBson = {};
//var imageData = fs.readFileSync(imageFile.path);
imageBson.image = new req.mongo.Binary(imageData);
imageBson.imageType = imageFile.mimetype;
console.log("imageBson: " + util.inspect(imageBson));
req.imagesCollection.insert(imageBson, {safe: true},function(err, bsonData) {
if (err) {
res.end({ msg:'Error saving your file to the database!' });
}else{
fs.unlink(imageFile.path); // Deletes the file from the local disk
var imageBson = bsonData[0];
var imageId = imageBson._id;
res.redirect('images/' + imageId);
}
});
}
});
} else {
res.end("Oddly your file was uploaded but doesn't seem to exist!\n" + util.inspect(imageFile));
}
});
The MongoDB part isn't complicated. Once a Buffer is in the model, just let the db save it. MongoDB converts that into BinData. 80% of this code is just getting an image into and out of a PNG file.
People say don't store images in MongoDB, but icons/thumbs are tiny. Having said that, it might be a good idea to have an icons collection and only store them once using a hash of the image data as the _id.
model class example
class MyModel {
_icon: Buffer
get icon(): Buffer {
return this._icon
}
set icon(value: Buffer) {
this._icon = value
}
}
image helper
static async loadImage(url: string) {
var files = require('../lib/files')
var buffer = await files.urlContents(url, true)
return buffer
}
static async saveImage(image: Buffer, path: string) {
var files = require('../lib/files')
files.write(path, image.buffer)
return path
}
files helper
function urlResponse(url, binary) {
var request = require("request")
if (binary)
request = request.defaults({ encoding: null })
return new Promise(function (resolve, reject) {
request(url, function (error, res, body) {
if (error || res.statusCode !== 200 || body.includes('Incomplete response received from application'))
resolve({ statusCode: res?.statusCode !== 200 ? (res?.statusCode || 500) : 500 });
else
resolve(res);
});
});
}
async function urlContents(url, binary) {
var res = await urlResponse(url, binary)
if (binary)
return Buffer.from(res.body)
else
return res.body
}
function write(fileName, contents) {
fs.writeFileSync(fileName, contents)
}
mongodb helper
// ...saving
myModel.icon = loadImage('http://some.site.com/image.png')
collection.insertOne(myModel)
// ..getting
myModel = collection.findOne(query) // now myModel contains icon
saveImage(myModel.icon, '/home/toddmo/pictures/wow.png')

How to render image as pdf (canvas and pdfkit)

I have a function on nodejs that generates an image from many images and then generate a pdf file from that. Im trying with just one image but i need to add more, but this doesnt seems to work
function HelperHandler() {
this.pdf = function(req, res, next) {
var doc = new PDFDocument;
mergeImages(function(err, image) {
if (err)
return res.json(err);
doc.image(image, 100, 100);
doc.output(function(string) {
res.contentType = "application/pdf";
res.send(string);
});
})
}
}
var mergeImages = function(callback) {
var Canvas = require("canvas")
, fs = require("fs");
fs.readFile(global.root_path + "/images/bg.jpg", function(err, data) {
if (err)
callback("error loading image");
else {
var canvas = new Canvas(408, 939)
, img = new Canvas.Image(data);
ctx = canvas.getContext("2d");
img.onload = function() {
ctx.drawImage(img, 0, 0, 408, 939);
}
canvas.toDataURL('image/png', function(err, str) {
callback(null, str);
});
}
});
}
Error
Error: ENAMETOOLONG, name too long 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAZgAAAOrCAYAAABqSpB/AAAABmJLR0QA/wD/AP+gvaeTAAAF5UlEQVR4nO3BMQEAAADCoPVPbQo/oAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAICjAWckAAHF4EUcAAAAAElFTkSuQmCC'
at Object.openSync (fs.js:427:18)
at Object.readFileSync (fs.js:284:15)
at Function.open (/Users/jtomasrl/code/app/server/node_modules/pdfkit/js/image.js:27:28)
at PDFDocument.image (/Users/jtomasrl/code/app/server/node_modules/pdfkit/js/mixins/images.js:27:26)
at /Users/jtomasrl/code/app/server/lib/handler/current/helper.js:15:11
at /Users/jtomasrl/code/app/server/lib/handler/current/helper.js:41:9
at /Users/jtomasrl/code/app/server/node_modules/canvas/lib/canvas.js:217:7
You can use a buffer or a path with PDFKit image.
But you can't use a base64 URL, you need to decode this string to a buffer.
To use base64 data:
doc.image(new Buffer(image.replace('data:image/png;base64,',''), 'base64'), 100, 100); // this will decode your base64 to a new buffer
More information on base64 de/encode with Node Buffer here.

Resources