Send file back after GET request Node.js - node.js

I am trying to build a REST API using a MEAN stack and I have encountered a problem. I am saving a .txt file sent in a POST request to the server and saving it using multer in an /uploads folder. I am then saving the req.file information in a collection on mongodb (path included).
The problem that I have now is that I want to be able to handle a GET request for that specific file with the ObjectId. However I want to be able to get the file from the file path and then send it to the user making the GET request.
Right now I am only returning the information corresponding to the ObjectId passed, not the file. How can I send back the whole .txt file back to the user?
exports.findById = function(req, res) {
try
{
var id = new require('mongodb').ObjectID(req.params.id);
console.log('Retrieving log: ' + id);
db.collection('logs', function(err, collection) {
if(err)
{
console.log(err);
}
else
{
collection.findOne({'_id':id}, function(err, item) {
if (err) {
console.log('Error finding log: ' + err);
res.send({'error':'An error has occurred'});
} else {
console.log('' + item + ' found log');
console.log(item.path);
var file = __dirname + item.path;
res.download(file);
//res.send(item);
}
});
}
});
}
catch (e)
{
console.log('Id passed not correct');
res.send({'error':'Id passed not correct'});
}
};

At the end I finally got the server to respond to the GET request.
I had to find the file path of the file that had been saved into the database.
collection.findOne({'_id':id}, function(err, item) {
if (err)
{
console.log('Error finding log: ' + err);
res.send({'error':'An error has occurred'});
}
if (item)
{
//Create the path of the file wanted
filepath = path.join(__dirname, "../uploads", path.normalize(item.filename));
//Send file with the joined file path
res.sendFile(filepath);
}
else
{
console.log("Could not find entry");
res.send({'error':'No match found'});
}
});
This enabled me to send the file back by getting the full path of the file.

Related

How to respond without reloading my current page?

I have Express API which used to upload file to the files directory. Whenever I call my API res.send redirecting to a new page. How can I perform this API with reloading my current page?
app.post('/upload', function(req, res) {
let sampleFile;
let uploadPath;
if (Object.keys(req.files).length == 0) {
res.status(400).send('No files were uploaded.');
return;
}
console.log('req.files >>>', req.files); // eslint-disable-line
sampleFile = req.files.sampleFile;
console.log('lusu', sampleFile); // eslint-disable-line
uploadPath = __dirname + '/uploads/' + sampleFile.name;
sampleFile.mv(uploadPath, function(err) {
if (err) {
return res.status(500).send(err);
}
res.send('File uploaded to ' + uploadPath);
});
});
First of all, I would like to recommend multer package for file upload in node js.
instead of res.send(), try res.status(200).json({message:"successfully uploaded"})
try debugging at front end, suppose you have a function for file upload like below,
function fileUpload(){
http.post('url',{headers:headers}).then(res){
// Try to handle the response here. Do not write anything that reloads the page.
}
}

How can I catch ANY error from formidable.js in node?

I want to catch ALL errors formidable may throw on file upload. So far I failed on the most basic one: folder does not exist. Note that this error is generated for testing purpose only by adding a non-existing path for file to be saved. The code itself works fine without the generated error.
Here is a sample of overkill/spamming try/catch that I have tried:
router.post('*', (req, res) => {
try {
// path is formed based on the type of file to be uploaded.
// the file type is sent to different path. Ex.: logo: upload/logo
const folder = path.join(__dirname, '../../public-NOT-EXISTING/media' + req.url);
// check if folder exist, if not, create it
// if (!fs.existsSync(folder)) {
// fs.mkdirSync(folder);
// console.log(util.yellow, 'Folder was created', util.Reset);
// }
// console.log(util.green,'Uploading to folder:', folder, util.Reset);
const form = new formidable.IncomingForm();
form.keepExtensions = true;
form.uploadDir = folder;
form.maxFieldsSize = 20 * 1024 * 1024;
//Emitted whenever a new file is detected in the upload stream. Use this event if you want to stream the file to somewhere else while buffering the upload on the file system.
/* this is where the renaming happens */
form.on('fileBegin', function (name, file) {
//rename the incoming file to the file's name
file.path = form.uploadDir + file.name;
});
//Emitted whenever a field / file pair has been received. file is an instance of File.
form.on('file', function(name, file) {
console.log(util.magenta, 'Uploaded file name:', name, '(current name:', file.name,"')", util.Reset);
res.status(200).send({message: 'File Uploaded'})
});
//Emitted when there is an error processing the incoming form. A request that experiences an error is automatically paused, you will have to manually call request.resume() if you want the request to continue firing 'data' events.
form.on('error', function(err) {
console.error('Something went wrong in uploading file:', err);
res.status(500).send({message: err})
});
function errorHandle(err){
console.error('Got the error in function cb', err);
}
form.parse(req, (errorHandle, fields, files) => {
if (errorHandle)
console.error('Got the error as CB argument', errorHandle);
try{
console.log('\n parsing uploaded file -----------');
console.log('Fields', fields);
console.log('Received:', Object.keys(files));
console.log();
}catch (e) {
console.error('Got the error in "parse" function', e)
}
});
}catch (e) {
console.error('Got the error in general try/cath', e)
}
});
However, nothing is catching the error and the server crushes:
Error: ENOENT: no such file or directory, open 'D:.... my path...'
Emitted 'error' event at:
at fs.open (internal/fs/streams.js:279:12)
at FSReqCallback.args [as oncomplete] (fs.js:145:20)
[nodemon] app crashed - waiting for file changes before starting...
Try this:
process.on('uncaughtException', function(err) {
// you can get all uncaught exception here.
console.log(err)
})
// Error Handlers
process.on('unhandledRejection', async err => {
console.error('Unhandled rejection', JSON.stringify(err));
});
process.on('uncaughtException', async err => {
console.error('Uncaught exception', JSON.stringify(err));
});

MEAN client-server connection timeout for downloading PDF

I am using MEAN Stack for our project. To generate pdf using pdfmake,
scenario goes like, At first have images from local server so every thing works fine. For scalibility have to move images to AWS and data from other server.
Now processs is like, first download images from aws then make buffer, convert it to base64 and give it to pdfmake. Now issue arises for client-server connection.
client makes http request for pdf, server process it but it takes too much time to download image from aws, so client connection breaks mean while
server processing the request and send response back but no one is there to listen.
/** client **/
function logicTriplogs() {
$rootScope.isLoading = true;
AssignsService.logicTriplogs({
driverId: vm.driver
}, {
_ids: vm.selectedTrips,
scheduleTime: vm.scheduleTime,
companyImage: vm.companyImage
},
function(response) {
console.log(response);
$rootScope.isLoading = false;
var Name = response.pdfName;
var data = response.content;
SaveFile.downloadURI(Name, data);
console.log('PDF Name:', Name);
},
function(err) {
console.log(err);
vm.error = err.data.message;
$rootScope.isLoading = false;
});
}
/** Server **/
getAssignedTripQuery(query, type)
.exec(function(err, assigns) {
if (err) {
console.log('Manifest');
return res.status(400).send({
message: errorHandler.getErrorMessage(err)
});
}
if (assigns.length) {
logicMan(req.body, user, driver, assigns, function(docDefinition) {
var pdfName = `${moment_tz.tz(startDay, req.user.timeZone).format('MM-DD-YYYY')}.pdf`;
config.pdfBuffer(docDefinition, function(err, pdfDoc) {
console.log('EROROR PDF: ', err);
if (err) {
console.log('pdfmake function call error');
return res.status(400).send({
message: 'Error while generate pdf'
});
}
console.log('PDF Name:', pdfName);
return res.json({
message: 'Generated Susscessfuly',
pdfName: pdfName,
content: pdfDoc
});
});
});
} else {
return res.status(400).send({
message: 'Some thing went wrong. Please try later.'
});
}
});

Downloading file in node using GridFS in production

I have an express app, which works when I run it locally. The issue is when downloading a file which as saved in mongoDB using GridFS. When running it locally (I just do ./bin/www and go to localhost:3000), I can download the file. But when I run it remotely, I download an html file.
This is the route which handles the response:
router.get('/getfile',function(req,res) {
if (req.isAuthenticated())
{
var gfs = Grid(mongoose.connection, mongoose.mongo);
var id = req.query.id;
gfs.exist({_id: id}, function (err, found) {
if (err) return handleError(err);
if (!found)
res.send('Error on the database looking for the file.')
});
var readStream = gfs.createReadStream({
_id: id
}).pipe(res);
}
else
res.redirect('/login');
});
and that is called by this line in a jade file:
td #[a(href="getfile?id=#{log.videoId}" download="video") #[span(name='video').glyphicon.glyphicon-download]]
On the server, I'm doing:
/logApp$ export NODE_ENV=production
/logApp$ ./bin/www
the mongoDB deamon is running. In fact, I can query the database. And I'm not writing any file! I want to read it.
EDIT: I found the error message:
MongoError: file with id #### not opened for writing
You need to move the code that pipes the file to the response into the gfs.exist callback so that it runs after the exist check.
gfs.exist({ _id: id }, function(err, found) {
if (err) {
handleError(err);
return;
}
if (!found) {
res.send('Error on the database looking for the file.')
return;
}
// We only get here if the file actually exists, so pipe it to the response
gfs.createReadStream({ _id: id }).pipe(res);
});
Apparently you get that generic "not opened for writing" error if the file doesn't exist.

multi client can not upload file at same time in node.js

am using express.js and uploadify to upload large file to node server, everything works fine except when more than one user login and try to upload file at same time, well it still works but it seems that the server can only upload one file at a time, so the user have to wait until the other user finish their uploading, this is so unacceptable.
here is server side code
exports.upload = function(req, res,next){
// console.log( req.body);
// console.log(req.files);
var tmp_path = req.files.product_video.path;
var target_path = 'F:/shopping/shop/' +req.body.shop_id+'/'+ req.files.product_video.name;
fs.rename(tmp_path, target_path, function(err) {
if (err) {
console.log(err)
}
else{
fs.unlink(tmp_path, function() {
if (err){
console.log(err)
}else{
exec("C:/ffmpeg/bin/ffmpeg -i shop/"+ req.body.shop_id+ '/' + req.files.product_video.name + " -ss 00:01:00.00 -r 1 -an -vframes 1 -s 250x150 -f mjpeg shop/"+ req.body.shop_id+ '/' + req.files.product_video.name + "_thumbnail.jpg", function(err){
var data = {
'thum_src':'shop/'+ req.body.shop_id+ '/' + req.files.product_video.name + "_thumbnail.jpg",
'video_name':req.files.product_video.name,
}
res.send(data);
});
}
});
}
});
};
here is front end code
$('#input_product_video').uploadify({
'formData':{'shop_id':$('#shop_id').val()},
'buttonText' : 'add',
'fileSizeLimit' : '100MB',
'fileObjName' : 'product_video',
'uploader' : '/uploads',
'swf' :'/public/javascripts/lib/uploadify/uploadify.swf',
'onUploadSuccess':function(file,data){
console.log(file);
console.log(JSON.parse(data));
console.log(response);
}
});
You shouldn't need the fs.unlink call because fs.rename is going to move the file to the correct path, not copy it, so if fs.rename succeeds, the temporary file will already be gone. Remove the whole fs.unlink block, which doesn't check for an error anyway. Then you need to make sure in every possible path through the code, you are either calling next(err) with an error or calling res.send. It looks like there are code paths in here where you will not respond and will just let the request time out. Make those changes and see if that gets it working.

Resources