Imagen with 0 bytes in Firebase-storage upload from Node js - node.js

I'm trying to upload an image to Firebase-storage from Node.js,
I followed the follow that gives firebase in their doc and all run fine but when the image is in the storage the size is 0 bytes and you can not see the preview.
This is my code:
const uploadImageToStorage = (file,filename) => {
let prom = new Promise((resolve, reject) => {
if (!file) {
reject('No image file');
}
let newFileName = `${file.originalname}_${Date.now()}`;
let fileUpload = bucket.file(newFileName);
const blobStream = fileUpload.createWriteStream({
metadata: {
contentType: file.mimetype
}
});
blobStream.on('error', (error) => {
reject('Something is wrong! Unable to upload at the moment.');
});
blobStream.on('finish', () => {
// The public URL can be used to directly access the file via HTTP.
const url = `https://storage.googleapis.com/${bucket.name}/${fileUpload.name}`;
resolve(url);
});
blobStream.end(file.buffer);
});
return prom;
}
This is my app.post method:
app.post('/Upload_img',multer.single("file"), function (req, res) {
console.log("Upload Imagennes");
let url = "";
let file= req.file;
if (file) {
uploadImageToStorage(file,file.name).then((success) => {
url = success;
res.status(200).send(url);
}).catch((error) => {
console.error(error);
});
}
}
The Storage:

I managed to upload a png file using your uploadImageToStorage function. I was calling it directly, without using multer:
(async () => {
const buffer = fs.readFileSync('./android.png');
const file = {
originalname: 'android.png',
mimetype: 'image/png',
buffer,
}
try {
return await uploadImageToStorage(file, file.originalname);
} catch (err) {
console.log(err);
}
})();
This works as expected. So the problem is probably in your Express.js code or in multer. Try logging req.file and req.file.buffer in the server code, and see it has the expected data.

Related

fs writes file with web image link instead of the image itself

I am using node's module fs.
When I run the following code:
return new Promise((resolve, reject) => {
if (!fs.existsSync(`./${imageDescription}`)) {
axios.get(imageUrl).then((images) => {
fs.writeFile(`./${imageDescription}`, images.data['hits'][0]['largeImageURL'], function (err, data) {
if (err) {
return console.log(err);
}
resolve(data);
});
});
} else {
fs.readFile(`./${imageDescription}`, (err, data) => {
if (err) {
console.error(err);
return;
}
resolve(data);
});
}
});
Only the image url which starts with https:// and ends on .png is saved in a file, but I want the image itself to be saved there.
Below you can find a piece of code that fetchs an image from a url and saves that with a random name and its extension.
const fs = require('fs');
const { randomUUID } = require('crypto');
const axios = require('axios');
const imgUrl = 'https://i.stack.imgur.com/ILTQq.png';
const randomId = randomUUID();
const fileExtension = imgUrl.split('.').pop();
axios
.get(imgUrl, { responseType: 'stream' })
.then((response) => {
response.data.pipe(fs.createWriteStream(`${randomId}.${fileExtension}`));
})
.catch((error) => {
console.log(error);
});

How to make express download link with GridFsBucket?

As the title says, how do you make a direct download link with a file from mongoDB(GridFsBucket) using express?
The file should be downloadable from memory, as i dont want to save it temporarily on the server.
I have this method:
async function downloadFileFromDB(fileId) {
var gridfsbucket = new mongoose.mongo.GridFSBucket(mongoose.connection.db, {
chunkSizeBytes: 1024,
bucketName: 'filesBucket'
});
try {
const stream = gridfsbucket.openDownloadStream(fileId)
const fileBuffer = Buffer.from(stream)
return fileBuffer
} catch (err) {
stream.on('error', () => {
console.log("Some error occurred in download:" + error);
})
console.log(err);
}
}
And this route:
router.get('/download-file', async (req,res) => {
const fileId = req.query.fileId
const ObjectFileId = new ObjectId(fileId)
const fileBuffer = await fileFacade.downloadFileFromDB(ObjectFileId)
res.download(fileBuffer)
})
But res.download wants a path and not a buffer. Aswell im not sure i can make a buffer directly from the openDownloadStream method.
Can anyone help?
I believe you need to write the data to your res object. I accomplished this like:
const readStream = gridfs.openDownloadStreamByName(filename);
readStream.on("data", (chunk) => {
res.write(chunk);
});
readStream.on("end", () => {
res.status(200).end();
mongoClient.close();
});
readStream.on("error", (err) => {
console.log(err);
res.status(500).send(err);
});
So, you may just have to do:
res.write(fileBuffer).end();
//// Instead of doing:
// res.download(fileBuffer);

download large files and continue uploading to minio with nodejs, KILLED MY SERVICE?

I have an API built with NodeJS, in that API there is a process where I download a large file using modules request-promise and then it is made a new buffer uploaded to minio. But I have a problem that my API always crashes if the file is above 80-100MB and its NodeJS Killed on the server, how to handle it?
This function is to download the file and convert it into a buffer :
const convertLink = async link => {
const options = {
uri: link,
encoding: null,
headers: {
'Content-type': 'application/octet-stream'
}
};
const res = rp.get(options)
.then((body) => {
console.log(body)
const a = new Buffer.from(body);
return a;
})
.catch(err => {
console.log(err)
return err;
});
return res;
};
this is function for uploading files to minio from miniosdk :
const streamUpload = async (bucketName, objectName, newBuffer) => {
try {
const isUploaded = await minioClient.putObject(bucketName, objectName, newBuffer);
if (isUploaded) {
return isUploaded;
}
} catch (err) {
return err;
}
};
I think the issue here is you are downloading the file and keeping it in the memory and then uploading it to your minioClient. which is not recommended for large files. you should download that file as a stream and then upload it as a stream too. keeping large files in memory can be the reason to kill your node.js server.
you can try as following example in which I am using request npm library and downloading the file and saving it as a stream to a temporary location and then reading the file from that temporary location and uploading to another URL:-
Downloading file:-
const downloadFile = async (url) => {
try {
let tempLocation = "./temp/";
let fileName="myfile";
return new Promise((resolve, reject) => {
request
.get(url)
.on('response', function (response) {
console.log(response.statusCode) // 200
console.log(response.headers['content-type'])
})
.on('error', function (error) {
console.log('downloading error', error)
reject()
})
.on('end', async function () {
console.log("donwload finished")
resolve();
})
.pipe(fs.createWriteStream(tempLocation + '/' + fileName))
});
} catch (error) {
console.log("error in downloadFile", error)
throw error;
}
}
now you can upload the file to your minioClient as a stream. you can use fs.createReadStream(file) to read the file as a stream data from that temporary location.

How to add item with image with react-redux-saga with nodejs/multer

I am trying to save an item that has images in the form. I was able to save the items using postman, but getting issue when trying from react-redux-saga. I am using ant design for the form.
Front-end code:
handleSubmit = e => {
e.preventDefault();
this.props.form.validateFields((err, values) => {
if (!err) {
const { fileList } = this.state;
const formData = new FormData();
fileList.forEach(file => {
formData.append("files[]", file);
});
const postItemData = { ...values, images: formData };
this.props.createItem(postItemData);
}
});
};
saga:
When I try to console, I can get the item details with the image field as a formdata
{
let itemData = yield select(makeSelectPostItemData());
const token = yield select(makeSelectToken());
console.log(itemData, "itemData");
const response = yield call(request, `/item`, {
method: "POST",
headers: {
Authorization: `${token}`
},
body: JSON.stringify(itemData)
});
const successMessage = "Item saved successfully!";
yield put(postItemSuccess(response, successMessage));
}
nodejs:
const upload = multer({
storage: storage,
limits: { fileSize: 1000000 },
fileFilter: function(req, file, cb) {
checkFileType(file, cb);
}
}).array("images");
upload(req, res, err => {
if (err) {
console.log(err, "error");
res.send({ msg: err });
} else {
console.log(req, "req");
if (req.files === undefined) {
res.send({ msg: "Error: No file selected!" });
} else {
const { errors, isValid } = validatePostItem(req.body);
// check validation
if (!isValid) {
return res.status(400).json(errors);
}
const files = req.files;
}
}
});
On the last line const files = req.files. When I try from postman, I get file details, but when I try to request it from react, I have no req.files. How can I get the req.files from the react as I am getting from postman?
I have found the solution for this. In the handleSubmit, I have following code:
fileList.forEach(file => {
formData.append("files[]", file);
});
const postItemData = { ...values, images: formData };
this.props.createItem(postItemData);
Instead of spreading values and inserting the formData in images, what I did was put the all of them into formData.
fileList.forEach(file => {
formData.append("files[]", images);
});
formData.append("name", values.name);
formData.append("price", values.price);
formData.append("detail", values.detail);
this.props.createItem(formData);
With this, I could get req.files and req.body.name, req.body.price, req.body.detail in the node side. Quite different from the postman, but this worked for me.

Uploading a buffer to google cloud storage

I'm trying to save a Buffer (of a file uploaded from a form) to Google Cloud storage, but it seems like the Google Node SDK only allows files with a given path to be uploaded (Read / Write streams).
This is what I have used for AWS (S3) - is the anything else similar in the Google node SDK?:
var fileContents = new Buffer('buffer');
var params = {
Bucket: //bucket name
Key: //file name
ContentType: // Set mimetype
Body: fileContents
};
s3.putObject(params, function(err, data) {
// Do something
});
The only way that I have found to do it so far is write the buffer to disk, upload the file using the SDK (specifying the path to the new file) and then delete the file once it's uploaded successfully - the downside to this is that the whole process is significantly slower, to where it seems to be unfeasible to use Google storage. Is there any work around / way to upload a buffer?
.save to save the day! Some code below where I save my "pdf" that I created.
https://googleapis.dev/nodejs/storage/latest/File.html#save
const { Storage } = require("#google-cloud/storage");
const gc = new Storage({
keyFilename: path.join(__dirname, "./path to your service account .json"),
projectId: "your project id",
});
const file = gc.bucket(bucketName).file("tester.pdf");
file.save(pdf, (err) => {
if (!err) {
console.log("cool");
} else {
console.log("error " + err);
}
});
This is actually easy:
let remotePath = 'some/key/to/store.json';
let localReadStream = new stream.PassThrough();
localReadStream.end(JSON.stringify(someObject, null, ' '));
let remoteWriteStream = bucket.file(remotePath).createWriteStream({
metadata : {
contentType : 'application/json'
}
});
localReadStream.pipe(remoteWriteStream)
.on('error', err => {
return callback(err);
})
.on('finish', () => {
return callback();
});
We have an issue about supporting this more easily: https://github.com/GoogleCloudPlatform/gcloud-node/issues/1179
But for now, you can try:
file.createWriteStream()
.on('error', function(err) {})
.on('finish', function() {})
.end(fileContents);
The following snippet is from a google example. The example assumes you have used multer, or something similar, and can access the file at req.file. You can stream the file to cloud storage using middleware that resembles the following:
function sendUploadToGCS (req, res, next) {
if (!req.file) {
return next();
}
const gcsname = Date.now() + req.file.originalname;
const file = bucket.file(gcsname);
const stream = file.createWriteStream({
metadata: {
contentType: req.file.mimetype
},
resumable: false
});
stream.on('error', (err) => {
req.file.cloudStorageError = err;
next(err);
});
stream.on('finish', () => {
req.file.cloudStorageObject = gcsname;
file.makePublic().then(() => {
req.file.cloudStoragePublicUrl = getPublicUrl(gcsname);
next();
});
});
stream.end(req.file.buffer);
}
I have this approach working to me:
const destFileName = `someFolder/${file.name}`;
const fileCloud = this.storage.bucket(bucketName).file(destFileName);
fileCloud.save(file.buffer, {
contentType: file.mimetype
}, (err) => {
if (err) {
console.log("error");
}
});

Resources