How to decode image in node js with using bodyparser - node.js

I have tried decode image file in node js with body parser in and uploaded to server url. but I didn't get success in uplaod and parser image with base64 image.
if (req.method === 'POST') {
//base64_decode(req.body.profileImg, 'copy.jpeg');
//console.log(req.body.profileImg);
var NewImageName = Math.random().toString(36).substring(7);
var imageBuffer = decodeBase64Image(req.body.profileImg);
fs.writeFile('../assets/images/seller/'+NewImageName+'.png', imageBuffer.data, function(err) {
});

You can try this code for decoding the base64 image.
function decodeBase64Image(dataString) {
var matches = dataString.match(/^data:([A-Za-z-+\/]+);base64,(.+)$/),
response = {};
if (matches.length !== 3) {
return new Error('Invalid input string');
}
response.type = matches[1];
response.data = new Buffer(matches[2], 'base64');
return response;
}
app.post('/UploadImages', function (req,res){
if (req.method === 'POST') {
var NewImageName = Math.random().toString(36).substring(7);
var imageBuffer = decodeBase64Image(req.body.profileImg);
fs.writeFile('../assets/images/seller/'+NewImageName+'.png', imageBuffer.data, function(err) {
});
res.json(200, {profileImgName: NewImageName });
}
});
Just get your base64 encoded string in you nodeJs function and you can send it to the function decodeBase64Image i have created above which will decode the Image and then you can upload it.
Thanks

Related

How can I send a file from the Backend to download in the browser?

When I make the request with Postman or Thunder Client I can save the file, but when I try to do it from the browser I can't save it and the size is even larger than the original.
router.get('/files/download/:id', async (req, res) => {
try {
const { id } = req.params;
const file = await FileArea.findById(id);
if (!file) {
return res.status(404).send('File not found');
}
if (fs.existsSync (file.path)) {
const fileContents = fs.readFileSync(file.path); // read the file from the uploads folder with the path of the file in the database
const readStream = new stream.PassThrough(); // create a stream to read the file
readStream.end(fileContents); // end the stream
res.set('Content-disposition', 'attachment; filename=' + file.name); // set the name of the file to download with the name of the file in the database
res.set('Content-Type', file.type);
const fileToSend = readStream.pipe(res); // pipe the stream to the response
return fileToSend;
} else {
return res.status(404).send('File not found');
}
} catch (error) {
return res.status(500).send('Internal server error');
}
});
With this code from the frontend I try to put what is sent to a blob, which size is larger than the original file.
Example: The original file is 421KB and the Blob is 800KB
async downloadFiles(id: string) {
try {
const { data } = await downloadFile(id);
const blob = new Blob([data], { type: data.type });
const url = window.URL.createObjectURL(blob);
const link = document.createElement('a');
link.href = url;
link.setAttribute('download', data.name);
document.body.appendChild(link);
link.click();
link.remove();

Piping a multipart/form-data request to another request while changing some fields names in Node.js

I want to stream a file upload request in multipart/form-data to another server and change some fields name at the same time.
I don't want to store temporarily a file on disk and don't want to store the file completely in memory either.
I tried to use multer, busboy and multiparty. I think I got closer by using custom Transform streams but it is not working yet.
const express = require('express');
const request = require('request');
const { Transform } = require('stream');
const router = express.Router();
class TransformStream extends Transform {
_transform(chunk, encoding, callback) {
// here I tried to manipulate the chunk
this.push(chunk);
callback();
}
_flush(callback) {
callback();
}
}
router.post('/', function pipeFile(req, res) {
const transformStream = new TransformStream();
req.pipe(transformStream).pipe(request.post('http://somewhere.com'));
res.sendStatus(204);
});
I tried to manipulate chunks in _transform without success (EPIPE). It sounds quit hacky, are they any better solutions ?
Here is a solution using replacestream along with content-disposition.
const replaceStream = require('replacestream');
const contentDisposition = require('content-disposition');
router.post('/', function pipeFile(req, res) {
let changeFields = replaceStream(/Content-Disposition:\s+(.+)/g, (match, p1) => {
// Parse header
let {type, parameters} = contentDisposition.parse(p1);
// Change the desired field
parameters.name = "foo";
// Prepare replacement
let ret = `Content-Disposition: ${type}`;
for(let key in parameters) {
ret += `; ${key}="${parameters[key]}"`;
}
return ret;
})
req.pipe(changeFields)
.pipe(request.post('http://somewhere.com'))
.on('end', () => {
res.sendStatus(204);
});
});
This worked for a single file multipart upload using express, multiparty, form-data, pump and got.
const stream = require('stream');
const express = require('express');
const multiparty = require("multiparty");
const got = require("got");
const FormData = require('form-data');
const pump = require('pump');
const app = express();
app.post('/upload', (req, res) => {
const url = "<<multipart image upload endpoint>>";
var form = new multiparty.Form();
form.on("part", function(formPart) {
var contentType = formPart.headers['content-type'];
var formData = new FormData();
formData.append("file", formPart, {
filename: formPart.filename,
contentType: contentType,
knownLength: formPart.byteCount
});
const resultStream = new stream.PassThrough();
try {
// Pipe the formdata to the image upload endpoint stream and the result to the result stream
pump(formData, got.stream.post(url, {headers: formData.getHeaders(), https:{rejectUnauthorized: false}}), resultStream, (err) =>{
if(err) {
res.send(error);
}
else {
// Pipe the result of the image upload endpoint to the response when there are no errors.
resultStream.pipe(res);
}
resultStream.destroy();
});
}
catch(err) {
resultStream.destroy();
console.log(err);
}
});
form.on("error", function(error){
console.log(error);
})
form.parse(req);
});

Streaming image data from node server results in corrupted file (gridfs-stream)

I decided to post this after extensive searching here (1, 2, 3 ) and here (1, 2) and many, many other related posts. I am loosing hope, but will not give up that easily :)
I'm using multer to upload a PNG image to mongo database:
const storage = new GridFsStorage({
url: 'mongodb://my_database:thisIsfake#hostName/my_database',
file: (req, file) => {
return new Promise((resolve, reject) => {
crypto.randomBytes(16, (err, buf) => { // generating unique names to avoid duplicates
if (err) {
return reject(err);
}
const filename = buf.toString('hex') + path.extname(file.originalname);
const fileInfo = {
filename: filename,
bucketName: 'media',
metadata : {
clientId : req.body.client_id // added metadata to have a reference to the client to whom the image belongs
}
};
resolve(fileInfo);
});
});
}
});
const upload = multer({storage}).single('image');
Then I create a stream and pipe it to response:
loader: function (req, res) {
var conn = mongoose.createConnection('mongodb://my_database:thisIsfake#hostName/my_database');
conn.once('open', function () {
var gfs = Grid(conn.db, mongoose.mongo);
gfs.collection('media');
gfs.files.find({ metadata : {clientId : req.body.id}}).toArray(
(err, files) => {
if (err) throw err;
if (files) {
const readStream = gfs.createReadStream(files[0].filename); //testing only with the first file in the array
console.log(readStream);
res.set('Content-Type', files[0].contentType)
readStream.pipe(res);
}
});
});
}
Postman POST request to end point results in response body being displayed as an image file:
In the front end I pass the response in a File object, read it and save the result in a src attribute of img:
findAfile(){
let Data = {
id: this.$store.state.StorePatient._id,
};
console.log(this.$store.state.StorePatient._id);
visitAxios.post('http://localhost:3000/client/visits/findfile', Data )
.then(res => {
const reader = new FileReader();
let file = new File([res.data],"image.png", {type: "image/png"});
console.log('this is file: ',file);
reader.readAsDataURL(file); // encode a string
reader.onload = function() {
const img = new Image();
img.src = reader.result;
document.getElementById('imgContainer').appendChild(img);
};
})
.catch( err => console.error(err));
}
My File object is similar to the one I get when using input field only bigger:
This is original file:
When inspecting element I see this:
Looks like data URI is where it should be, but it's different from the original image on file input:
Again, when I want to display it through input element:
onFileSelected(event){
this.file = event.target.files[0];
this.fileName = event.target.files[0].name;
const reader = new FileReader();
console.log(this.file);
reader.onload = function() {
const img = new Image();
img.src = reader.result;
document.getElementById('imageContainer').appendChild(img);
};
reader.readAsDataURL(this.file);
}
I get this:
But when reading it from the response, it is corrupted:
Postman gets it right, so there must be something wrong with my front-end code, right? How do I pass this gfs stream to my html?
I managed to make a POST request to fetch an image from MongoDB and save it in the server dir:
const readStream = gfs.createReadStream(files[0].filename);
const wstream = fs.createWriteStream(path.join(__dirname,"uploads", "fileToGet.jpg"));
readStream.pipe(wstream);
Then, I just made a simple GET request by adding an absolute path to the and finally delete the file after successful response:
app.get('/image', function (req, res) {
var file = path.join(dir, 'fileToGet.jpg');
if (file.indexOf(dir + path.sep) !== 0) {
return res.status(403).end('Forbidden');
}
var type = mime[path.extname(file).slice(1)] || 'text/plain';
var s = fs.createReadStream(file);
s.on('open', function () {
res.set('Content-Type', type);
s.pipe(res);
});
s.on('end', function () {
fs.unlink(file, ()=>{
console.log("file deleted");
})
});
s.on('error', function () {
res.set('Content-Type', 'text/plain');
res.status(404).end('Not found');
});

Downloading image from the web with imagemagick and saving to parse

I'm able to download the image using http and and I am able to resize it using imagemagic, but I can't figure out how to upload it to Parse. Parse only lets me upload: an array of byte value numbers OR base64 string. What I'm confused on is how I can convert the stdout to base64 string so that I can upload it to parse. I tried using fs, but to no avail. I just keep getting errors when it tries to read the file and convert it. What is wrong with my code?
var _ = require('underscore');
var url = require("url");
var srcUrl = 'https://i.ytimg.com/vi/JxwwGtquGqw/maxresdefault.jpg';
var http = srcUrl.charAt(4) == 's' ? require("https") : require("http");
var im = require('imagemagick');
var Image = require("parse-image");
var image_card_2x = [540, 350];
var thumb = '';
var fs = require('fs');
var request = http.get(url.parse(srcUrl), function(response) {
console.log("got data");
var data = '';
response.setEncoding('binary');
response.on('data', function(chunk) {
data += chunk;
});
console.log("data is " + data);
response.on('end', function () {
// var image = new Image();
// image.setData(response.buffer);
// image.scale({
// width: image_card_2x,
// height: Math.floor((image.height() * arrayElement[0]) / image.width())
// });
// image.setFormat("JPEG");
// console.log("about to make image");
im.resize({
srcData: data,
width: 404,
height: 269,
format: 'jpg'
}, function(err, buffer) {
if (err) throw err;
var bitmap = fs.readFileSync(file);
var base64 = new Buffer(bitmap).toString('base64');
var file = new Parse.File("maa.jpg", {
base64: base64
});
console.log("about to save");
var VideoLinks = Parse.Object.extend("VideoLinks");
var videoLink = new VideoLinks();
videoLink.set("image", file);
return videoLink.save();
console.log("saved");
//fs.writeFileSync('kittens-resized.jpg', stdout, 'binary');
console.log("worked!");
});
})
});

how to upload base64 data as image to s3 using node js?

I am sending base64data of canvas to node.js script. I need the base64data to be stored as an image to the s3bucket. Is there any way to achieve it?
Store your Data URI in a variable.
Create function which decodes your data URI(64 bit encoded string) to string(Here I have created dataURItoBlob() function) and after decoding return the string.
Pass that string to in body of S3 upload function.
var myDataUri = "data:image/jpg;base64,JVBERi0xLjMKMyAwIG9iago8PC9UeXBlIC9QYW..."
var myFile=dataURItoBlob(myDataUri);
function dataURItoBlob(dataURI) {
var binary = atob(dataURI.split(',')[1]);
var array = [];
for (var i = 0; i < binary.length; i++) {
array.push(binary.charCodeAt(i));
}
return new Blob([new Uint8Array(array)], {
type: 'image/jpg'
});
}
if (myFile)) {
results.innerHTML = '';
var params = {
Key: fileName+'.jpg',
ContentType: 'image/jpg',
Body: myFile
};
bucket.upload(params, function(err, data) {
results.innerHTML = err ? 'ERROR!' : 'UPLOADED.: ' + file;
});
} else {
results.innerHTML = 'Nothing to upload.';
}
you can send base64 data with AWS putObject method as follows
var AWS = require('aws-sdk');
AWS.config.loadFromPath('./s3_config.json');
var s3Bucket = new AWS.S3( { params: {Bucket: 'myBucket'} } );
var imageBase64Data='Your base64 code '
s3Bucket.putObject(imageBase64Data, function(err, data){
if (err) {
console.log(err);
console.log('Error uploading data: ', data);
} else {
console.log('succesfully uploaded the image!');
}
});

Resources