cannot access before initialization - node.js

I'm new in Node.js and OOP and need some help with my upload images system based on GridFS. In my class I have "initializeStorage" method and in the last line of that method "this.upload" works as middleware with I need to use in my routes file. In routes file I tried use "new" keyword to my class but then I was getting undiefiend error, I tried few others ways to solve that problem but I'm run of ideas. If someone have solution or hint lemme know and write below. Thanks.
GridFS class file
class MyGridFS {
constructor() {
let gfs = this.gfs;
let gridfsBucket = this.gridfsBucket;
}
initializeGridFS(conn) {
this.conn = conn;
conn.once("open", () => {
this.gridfsBucket = new mongoose.mongo.GridFSBucket(conn.db, {
bucketName: "uploads",
});
this.gfs = Grid(conn.db, mongoose.mongo);
this.gfs.collection("uploads");
});
}
initializeStorage(mongo_uri) {
this.mongo_uri = mongo_uri;
const storage = new GridFsStorage({
url: this.mongo_uri,
file: (req, file) => {
const filename = "file_" + Date.now() + path.extname(file.originalname);
return {
filename,
bucketName: "uploads",
};
},
});
this.upload = multer({ storage });
}
Routes file
upload.post("/upload", uploadMiddleware.single("file"), (req, res) =>
uploadController.upload(req, res),
);
In "uploadMiddleware" place should be upload middleware from class before

Related

Folder /tmp is not working in Vercel Production. Giving error of EORFs

Folder Structure image# Multer.js File
const multer = require("multer");
const path = require("path");
const fs = require("fs");
const httpStatus = require("http-status");
const ApiError = require("../utils/ApiError")
const logger = require("../utils/logger");
const multerUpload = async (req, res, next) => {
let fileName = "";
let storage = multer.diskStorage({
destination: function (req, file, callback) {
fs.mkdir(path.join(path.resolve(), "/tmp"), (err) => {
if (err) {
logger.error("mkdir tmp %o", err);
}
callback(null, path.join(path.resolve(), "/tmp"));
});
},
filename: function (req, file, callback) {
fileName = file.fieldname + "-" + req.query.eventId + Date.now() + path.extname(file.originalname);
logger.info("filename of uploadSheet===> %s", fileName);
callback(null, fileName);
},
});
// below code is to read the added data to DB from file
var upload = multer({
storage: storage,
fileFilter: function (req, file, callback) {
var ext = path.extname(file.originalname);
if (ext !== '.xlsx') {
return callback(new Error('Only Excel sheets are allowed'))
}
callback(null, true)
},
}).single("sheet");
upload(req, res, async function (err) {
if (err) {
next(new ApiError(httpStatus.INTERNAL_SERVER_ERROR, err.message));
} else {
req.fileName = fileName;
next();
}
})
}
module.exports = multerUpload;
It gives error of EORFS read only file in vercel production but the code works fine in local.
I'm trying to upload the excel sheet file from the Api and then read the data from it and add it into the Mongodb.
I once encountered this same problem working with Heroku a long time ago, I haven't worked with vercel but with quick research, I will say this is the cause, vercel does not provide storage for you to upload files to in production, you need a separate service for that like Amazon S3, but there also exists Azure File Storage and Google Cloud Storage.
alternatively, if you don't want to add more services to your project, you can just convert the image to base64 string and save it as text(but need to make the field/column read-only so it does not get corrupted) NOT the best alternative but it was something I once did
To use /tmp in server functions, you should just use /tmp/your-file. Remove path.resolve().
Only if you need to store something temporarily, you may try to use /tmp directory.
Limit 512 MB + no guaranty - https://github.com/vercel/vercel/discussions/5320

Delivering image from S3 to React client via Context API and Express server

I'm trying to download a photo from an AWS S3 bucket via an express server to serve to a react app but I'm not having much luck. Here are my (unsuccessful) attempts so far.
The Workflow is as follows:
Client requests photo after retrieving key from database via Context API
Request sent to express server route (important so as to hide the true location from the client)
Express server route requests blob file from AWS S3 bucket
Express server parses image to base64 and serves to client
Client updates state with new image
React Client
const [profilePic, setProfilePic] = useState('');
useEffect(() => {
await actions.getMediaSource(tempPhoto.key)
.then(resp => {
console.log('server resp: ', resp.data.data.newTest) // returns ����\u0000�\u0000\b\u0006\
const url = window.URL || window.webkitURL;
const blobUrl = url.createObjectURL(resp.data.data.newTest);
console.log("blob ", blobUrl);
setProfilePic({ ...profilePic, image : resp.data.data.newTest });
})
.catch(err => errors.push(err));
}
Context API - just axios wrapped into its own library
getMediaContents = async ( key ) => {
return await this.API.call(`http://localhost:5000/${MEDIA}/mediaitem/${key}`, "GET", null, true, this.state.accessToken, null);
}
Express server route
router.get("/mediaitem/:key", async (req, res, next) => {
try{
const { key } = req.params;
// Attempt 1 was to try with s3.getObject(downloadParams).createReadStream();
const readStream = getFileStream(key);
readStream.pipe(res);
// Attempt 2 - attempt to convert response to base 64 encoding
var data = await getFileStream(key);
var test = data.Body.toString("utf-8");
var container = '';
if ( data.Body ) {
container = data.Body.toString("utf-8");
} else {
container = undefined;
}
var buffer = (new Buffer.from(container));
var test = buffer.toString("base64");
require('fs').writeFileSync('../uploads', test); // it never wrote to this directory
console.log('conversion: ', test); // prints: 77+977+977+977+9AO+/vQAIBgYH - this doesn't look like base64 to me.
delete buffer;
res.status(201).json({ newTest: test });
} catch (err){
next(ApiError.internal(`Unexpected error > mediaData/:id GET -> Error: ${err.message}`));
return;
}
});
AWS S3 Library - I made my own library for using the s3 bucket as I'll need to use more functionality later.
const getFileStream = async (fileKey) => {
const downloadParams = {
Key: fileKey,
Bucket: bucketName
}
// This was attempt 1's return without async in the parameter
return s3.getObject(downloadParams).createReadStream();
// Attempt 2's intention was just to wait for the promise to be fulfilled.
return await s3.getObject(downloadParams).promise();
}
exports.getFileStream = getFileStream;
If you've gotten this far you may have realised that I've tried a couple of things from different sources and documentation but I'm not getting any further. I would really appreciate some pointers and advice on what I'm doing wrong and what I could improve on.
If any further information is needed then just let me know.
Thanks in advance for your time!
Maybe it be useful for you, that's how i get image from S3, and process image on server
Create temporary directory
createTmpDir(): Promise<string> {
return mkdtemp(path.join(os.tmpdir(), 'tmp-'));
}
Gets the file
readStream(path: string) {
return this.s3
.getObject({
Bucket: this.awsConfig.bucketName,
Key: path,
})
.createReadStream();
}
How i process file
async MainMethod(fileName){
const dir = await this.createTmpDir();
const serverPath = path.join(
dir,
fileName
);
await pipeline(
this.readStream(attachent.key),
fs.createWriteStream(serverPath + '.jpg')
);
const createFile= await sharp(serverPath + '.jpg')
.jpeg()
.resize({
width: 640,
fit: sharp.fit.inside,
})
.toFile(serverPath + '.jpeg');
const imageBuffer = fs.readFileSync(serverPath + '.jpeg');
//my manipulations
fs.rmSync(dir, { recursive: true, force: true }); //delete temporary folder
}

POSTing from Angular to Node. Getting empty body on node

Im posting two fields in Angular to a NodeJs endpoint.
I usually post on the body and everything is perfect at Node, but this time, I have to post a form to upload a file.
So this is my code for posting the form data (Angular side):
var APIURL = sessionStorage.getItem('endPoint') + "profile/updateeocoverage";
let formData = new FormData();
formData.append("data", JSON.stringify(this.payLoad));
if (this.eofile) {
formData.append("myfile", this.eofile, this.eofile.name);
}
this.httpClient.post(APIURL, formData).subscribe(
result => {
....
My problem is that I always retrieved the body at node as follows:
router.post('/updateeocoverage', async (req, res, next) => {
console.log(req.body)
return;
....
But with the method Im using now in Angular, req.body is retrieving {}
Is the POST wrong, or the router wrong at Node side?
Thanks.
UPDATE ON PABLO'S ANSWER BELOW (SOLUTION PROVIDED) for whoever runs into this issue:
Using Multer solved the problem, but as he said some workaround is needed as to set the file name, but most important it is required to authenticate the user, so:
const multer = require('multer')
const path = require('path')
To authenticate the user, I send the authentication parameters on the header. Sending it as formdata.append didn't work for me. This sets true or false to upload the file, otherwise anyone can upload anything to the route:
async function authenticateUser(req, file, cb) {
let tempcred = JSON.parse(req.headers.data)
let credentials = tempcred.credentials;
let userData = await utils.isValidUser((credentials), false);
if (userData.isValid == false) {
cb(null, false)
return;
}
else {
cb(null, true)
}
}
Then, since Multer uploads the file with a random name, and I need to save it with the user ID name and the file extension, I do the following:
var storage = multer.diskStorage({
destination: function (req, file, cb) {
cb(null, 'uploads/eofiles/')
},
filename: async function (req, file, cb) {
let tempcred = JSON.parse(req.headers.data)
let credentials = tempcred.credentials;
let userid = await utils.decrypt(credentials.userid, process.env.SECRET);
cb(null, userid + path.extname(file.originalname))
}
});
Finally, I declare the upload variable for using it with Multer:
var upload = multer({ storage: storage, fileFilter: authenticateUser })
And set the router:
router.post('/updateeofile', upload.single("myfile"), async (req, res, next) => {
let filename = req.file.filename //gets the file name
...
...
do my stuff, save on database, etc
...
...
});
For the record, "myfile" is the input file id.
And this is how I upload the file from Angular:
var APIURL = sessionStorage.getItem('endPoint') + "eoset/updateeofile";
const httpOptions = {
headers: new HttpHeaders({
'data': `${JSON.stringify(this.payLoad)}`
})
};
let formData = new FormData();
if (this.eofile) {
formData.append("myfile", this.eofile, this.eofile.name);
}
this.httpClient.post(APIURL, formData, httpOptions).subscribe(
result => {
...
...
...
},
error => {
});
I spent 6 hours on this today. I hope this helps you and saves you some time.
Try using Multer
npm i multer
const multer = require('multer')
const upload = multer({ dest: 'uploads/' })
router.post('/updateeocoverage', upload.single('myfile'), function (req, res, next) {
res.json(JSON.parse(req.body.data))
})
You will need to indicate the destination folder (line 2) and work out the file name, extension, etc.
First thing to do is ensure that the body isn't empty before making the request, so do a console.log before the request:
console.log(formData);
this.httpClient.post(APIURL, formData).subscribe(
result => {
....
After that, if the body is filled correctly, try to put the body like this on the request method:
this.httpClient.post(APIURL, {formData})
or
this.httpClient.post(APIURL, {
"field1": formData.field1,
...
})
and if these two things don't correct your issue, problaby you have something wrong on back-end side

how to make formidable not save to var/folders on nodejs and express app

I'm using formidable to parse incoming files and store them on AWS S3
When I was debugging the code I found out that formidable is first saving it to disk at /var/folders/ and overtime some unnecessary files are stacked up on disk which could lead to a big problem later.
It's very silly of me using a code without fully understanding it and now
I have to figure out how to either remove the parsed file after saving it to S3 or save it to s3 without storing it in disk.
But the question is how do I do it?
I would appreciate if someone could point me in the right direction
this is how i handle the files:
import formidable, { Files, Fields } from 'formidable';
const form = new formidable.IncomingForm();
form.parse(req, async (err: any, fields: Fields, files: Files) => {
let uploadUrl = await util
.uploadToS3({
file: files.uploadFile,
pathName: 'myPathName/inS3',
fileKeyName: 'file',
})
.catch((err) => console.log('S3 error =>', err));
}
This is how i solved this problem:
When I parse incoming form-multipart data I have access to all the details of the files. Because it's already parsed and saved to local disk on the server/my computer. So using the path variable given to me by formidable I unlink/remove that file using node's built-in fs.unlink function. Of course I remove the file after saving it to AWS S3.
This is the code:
import fs from 'fs';
import formidable, { Files, Fields } from 'formidable';
const form = new formidable.IncomingForm();
form.multiples = true;
form.parse(req, async (err: any, fields: Fields, files: Files) => {
const pathArray = [];
try {
const s3Url = await util.uploadToS3(files);
// do something with the s3Url
pathArray.push(files.uploadFileName.path);
} catch(error) {
console.log(error)
} finally {
pathArray.forEach((element: string) => {
fs.unlink(element, (err: any) => {
if (err) console.error('error:',err);
});
});
}
})
I also found a solution which you can take a look at here but due to the architecture if found it slightly hard to implement without changing my original code (or let's just say I didn't fully understand the given implementation)
I think i found it. According to the docs see options.fileWriteStreamHandler, "you need to have a function that will return an instance of a Writable stream that will receive the uploaded file data. With this option, you can have any custom behavior regarding where the uploaded file data will be streamed for. If you are looking to write the file uploaded in other types of cloud storages (AWS S3, Azure blob storage, Google cloud storage) or private file storage, this is the option you're looking for. When this option is defined the default behavior of writing the file in the host machine file system is lost."
const form = formidable({
fileWriteStreamHandler: someFunction,
});
EDIT: My whole code
import formidable from "formidable";
import { Writable } from "stream";
import { Buffer } from "buffer";
import { v4 as uuidv4 } from "uuid";
export const config = {
api: {
bodyParser: false,
},
};
const formidableConfig = {
keepExtensions: true,
maxFileSize: 10_000_000,
maxFieldsSize: 10_000_000,
maxFields: 2,
allowEmptyFiles: false,
multiples: false,
};
// promisify formidable
function formidablePromise(req, opts) {
return new Promise((accept, reject) => {
const form = formidable(opts);
form.parse(req, (err, fields, files) => {
if (err) {
return reject(err);
}
return accept({ fields, files });
});
});
}
const fileConsumer = (acc) => {
const writable = new Writable({
write: (chunk, _enc, next) => {
acc.push(chunk);
next();
},
});
return writable;
};
// inside the handler
export default async function handler(req, res) {
const token = uuidv4();
try {
const chunks = [];
const { fields, files } = await formidablePromise(req, {
...formidableConfig,
// consume this, otherwise formidable tries to save the file to disk
fileWriteStreamHandler: () => fileConsumer(chunks),
});
// do something with the files
const contents = Buffer.concat(chunks);
const bucketRef = storage.bucket("your bucket");
const file = bucketRef.file(files.mediaFile.originalFilename);
await file
.save(contents, {
public: true,
metadata: {
contentType: files.mediaFile.mimetype,
metadata: { firebaseStorageDownloadTokens: token },
},
})
.then(() => {
file.getMetadata().then((data) => {
const fileName = data[0].name;
const media_path = `https://firebasestorage.googleapis.com/v0/b/${bucketRef?.id}/o/${fileName}?alt=media&token=${token}`;
console.log("File link", media_path);
});
});
} catch (e) {
// handle errors
console.log("ERR PREJ ...", e);
}
}

multer and node/express file upload not working with Angular

I'm trying to do single file upload using multer. Although I'm able to upload a file manually in the relevant folder using tools like postman for testing the express route, I cannot upload it using Angular frontend. I created a folder called uploads in the node backend folder where the files are supposed to get uploaded. Also I need to upload the file within a form and pass it to a api where it should take the file along with other parameters also. But unfortunately, it is returning status 500 with Internal Server Error on the browser console while on my node terminal it is returning Cannot read property 'path' of undefined.
My node backend code which is working fine is below:
const multer = require('multer')
const storage = multer.diskStorage({
destination: function(req, file, cb) {
cb(null, './uploads/')
},
filename: function(req, file, cb) {
cb(null, Date.now() + file.originalname)
}
})
const upload = multer({storage: storage})
let baseUrl = appConfig.apiVersion+'/blogs';
app.post(baseUrl+'/create', upload.single('imagePath'), (req, res) => {
var today = time.now()
let blogId = shortid.generate()
let newBlog = new BlogModel({
blogId: blogId,
title: req.body.title,
description: req.body.description,
bodyHtml: req.body.blogBody,
isPublished: true,
category: req.body.category,
author: req.body.fullName,
created: today,
lastModified: today,
imagePath: req.file.path //node console is pointing towards this point
}) // end new blog model
let tags = (req.body.tags != undefined && req.body.tags != null && req.body.tags != '') ? req.body.tags.split(',') : []
newBlog.tags = tags
newBlog.save((err, result) => {
if (err) {
console.log(err)
res.send(err)
} else {
res.send(result)
}
}) // end new blog save
});
Below is my Angular Component code which is not working:
selectImage(event) {
if(event.target.files.length > 0){
const file = event.target.files[0];
this.images = file;
}
}
public createBlog(): any {
const formData = new FormData();
const form = formData.append('imagePath', this.images);
let blogData = {
title: this.blogTitle,
description: this.blogDescription,
blogBody: this.blogBodyHtml,
category: this.blogCategory,
imagePath: form
} //end blogData
console.log(blogData);
this.blogHttpService.createBlog(blogData).subscribe(
data => {
console.log(data);
this.toastr.successToastr('Blog Posted Susseccfully!', 'Success!');
setTimeout(() =>{
this.router.navigate(['blog', data.blogId]);
}, 1000)
},
error => {
console.log(error);
console.log(error.errorMessage);
this.toastr.errorToastr('This is not good!', 'Oops!');
})
}
Angular Service code
public createBlog(blogData): any {
let myResponse = this._http.post(this.baseUrl + '/create', blogData);
return myResponse;
}
Frontend HTML Code:
<div>
<input type="file" name="imagePath" (change)="selectImage($event)" />
</div>
It seems like you created a formData object, but you are not actually doing anything with it. As you can see here, you are building up an object and sending it along with your request, but it does not include your formData
let blogData = {
title: this.blogTitle,
description: this.blogDescription,
blogBody: this.blogBodyHtml,
category: this.blogCategory,
imagePath: this.imagePath
} //end blogData
console.log(blogData);
this.blogHttpService.createBlog(blogData).subscribe(
Not entirely sure what the exact syntax would be in your case, but here you can see some sample code I have in a project of mine which will hopefully give you an idea.
changeHandler(e) {
const fd = new FormData();
fd.append('sample', e.target.files[0]);
axios.post('/save-image', fd).then(i => {
this.setState({img: i.data.filename});
});
}
As you can see, the formData is what I am actually sending to the server.

Resources