I have created a function to upload image to a custom directory https://sailsjs.com/documentation/concepts/file-uploads .
code:
req.file('avatar').upload({
dirname: require('path').resolve(sails.config.appPath, 'assets/images')
},function (err, uploadedFiles) {
if (err) return res.negotiate(err);
return res.json({
message: uploadedFiles.length + ' file(s) uploaded successfully!'
});
});
how can I upload same file into 2 different paths.
the code I have tried:
var imgPathArr;
var dePathWeb = 'd:/images/web';
var dePath = 'd:/images/mobile';
imgPathArr.push(dePathWeb);
imgPathArr.push(dePath);
req.file('file').upload({
dirname: require('path').resolve(imgPathArr[0])
},function (err, uploadedFiles) {
if (err) return res.send(500, err);
console.log("uploadedFiles web " + uploadedFiles[0].fd);
req.file('file').upload({
dirname: require('path').resolve(imgPathArr[1])
},function (err, files) {
if (err) return res.send(500, err);
console.log("files mobile " + files[0].fd);
});
res.send("ok");
});
});
but each and every time the images are saved into a same path.
console:
uploadedFiles web c:\xampp\htdocs\meServer\images\secondNavi\000a073e-cd6c-4758-ab44-54d32ddfc20a.png
.
files mobile c:\xampp\htdocs\meServer\images\secondNavi\000a073e-cd6c-4758-ab44-54d32ddfc20a.png
why it always take the same path. the image is always only upload into the web directory. I'm defining the paths separately using array index.
Just use fsfrom node DOCS to copy this file.
Your code is really messy. Try to modulate that with a Controller + Service helper. Like ImageController dealing with req and res and ImageService to deal with everything else...
Related
I've just begun using Sails.JS and I tried to make a simple file upload. In Sails.JS, I've done this:
uploadFile: (req, res) => {
req.file('avatar').upload({
dirname: '../../assets/uploads'
}, function (err, uploadedFiles){
if (err) return res.serverError(err);
return res.json({
message: uploadedFiles.length + ' file(s) uploaded successfully!',
files: uploadedFiles
});
});
}
For Postman, I've done this:
The problem is, after I upload a file via Postman, I don't get a file in req.file(). However, console.log(req) does show my uploaded file's binary (or something like that). I just can't figure out what happened.
I am trying to build a REST API using a MEAN stack and I have encountered a problem. I am saving a .txt file sent in a POST request to the server and saving it using multer in an /uploads folder. I am then saving the req.file information in a collection on mongodb (path included).
The problem that I have now is that I want to be able to handle a GET request for that specific file with the ObjectId. However I want to be able to get the file from the file path and then send it to the user making the GET request.
Right now I am only returning the information corresponding to the ObjectId passed, not the file. How can I send back the whole .txt file back to the user?
exports.findById = function(req, res) {
try
{
var id = new require('mongodb').ObjectID(req.params.id);
console.log('Retrieving log: ' + id);
db.collection('logs', function(err, collection) {
if(err)
{
console.log(err);
}
else
{
collection.findOne({'_id':id}, function(err, item) {
if (err) {
console.log('Error finding log: ' + err);
res.send({'error':'An error has occurred'});
} else {
console.log('' + item + ' found log');
console.log(item.path);
var file = __dirname + item.path;
res.download(file);
//res.send(item);
}
});
}
});
}
catch (e)
{
console.log('Id passed not correct');
res.send({'error':'Id passed not correct'});
}
};
At the end I finally got the server to respond to the GET request.
I had to find the file path of the file that had been saved into the database.
collection.findOne({'_id':id}, function(err, item) {
if (err)
{
console.log('Error finding log: ' + err);
res.send({'error':'An error has occurred'});
}
if (item)
{
//Create the path of the file wanted
filepath = path.join(__dirname, "../uploads", path.normalize(item.filename));
//Send file with the joined file path
res.sendFile(filepath);
}
else
{
console.log("Could not find entry");
res.send({'error':'No match found'});
}
});
This enabled me to send the file back by getting the full path of the file.
I want get content from my uploaded file. Uploaded file is just text file and i want read this file line by line
req.file("model").upload(function (err, uploadedFiles){
if (err) return res.serverError(err);
return res.json({
message: uploadedFiles.length + ' file(s) uploaded successfully!',
files: uploadedFiles,
content: uploadedFiles[0] // I want to get my uploaded file content
// Stream or buffer
});
});
You can get the file descriptor from uploadedFiles[0].fd
Use it to read / stream the file.
fs.readFile(uploadedFiles[0].fd, 'utf8', function (err,data) {
return res.json(200, {message: 'Ok', data: data});
})
First install Local filesystem streaming binary adapter for Sails.js / Waterline
npm install sails-local-fs
in your code create fs instance
var fs = require('fs');
And now use readFile() method to get content of your file
fs.readFile(files[0].fd, 'utf8', function (err,data) {
return res.json(200, {message: 'Ok', data: data});
})
I'm allowing users to upload files and would like to know if it is possible to only allow the user who uploaded their files access to it.
Currently I am just uploading these to a static folder (Public).
Simply put a route to render the file (this will decrease performance)
Save the file as : filename.extension.userid in a non public folder for example a directory named upload.
Put a route to catch the request on the directory upload :
app.get("/upload/:filename", function(req, res){
var fname = req.params.filename;
var userid = req.user.id; // RETRIEVE AUTHENTICATED USER ID
var fullname = fname+"."+userid;
fs.readFile(__dirname+"/../public/upload/"+fullname, 'base64', function(err, data){
if(err || !data) return res.status(404);
res.set('Content-Disposition', 'attachment; filename="'+fname+'"');
res.set('Content-Type', 'application/pdf'); // example for pdf
return res.end(data, 'base64');
});
});
I couldn't find a reason why you were suggesting using another route to handle the upload. I was able to post to the same route. All that I needed to do was include the path and set it accordingly.
var folder = newPath = path.join(__dirname, '..', './uploads')
var newPath = folder + '/' + callback.fileName;
fs.writeFile(newPath, data, function(err) {
if (err) {
console.log(err)
} else {
//Do stuff
}
});
After the file was uploaded I was then able to complete my task.
am using express.js and uploadify to upload large file to node server, everything works fine except when more than one user login and try to upload file at same time, well it still works but it seems that the server can only upload one file at a time, so the user have to wait until the other user finish their uploading, this is so unacceptable.
here is server side code
exports.upload = function(req, res,next){
// console.log( req.body);
// console.log(req.files);
var tmp_path = req.files.product_video.path;
var target_path = 'F:/shopping/shop/' +req.body.shop_id+'/'+ req.files.product_video.name;
fs.rename(tmp_path, target_path, function(err) {
if (err) {
console.log(err)
}
else{
fs.unlink(tmp_path, function() {
if (err){
console.log(err)
}else{
exec("C:/ffmpeg/bin/ffmpeg -i shop/"+ req.body.shop_id+ '/' + req.files.product_video.name + " -ss 00:01:00.00 -r 1 -an -vframes 1 -s 250x150 -f mjpeg shop/"+ req.body.shop_id+ '/' + req.files.product_video.name + "_thumbnail.jpg", function(err){
var data = {
'thum_src':'shop/'+ req.body.shop_id+ '/' + req.files.product_video.name + "_thumbnail.jpg",
'video_name':req.files.product_video.name,
}
res.send(data);
});
}
});
}
});
};
here is front end code
$('#input_product_video').uploadify({
'formData':{'shop_id':$('#shop_id').val()},
'buttonText' : 'add',
'fileSizeLimit' : '100MB',
'fileObjName' : 'product_video',
'uploader' : '/uploads',
'swf' :'/public/javascripts/lib/uploadify/uploadify.swf',
'onUploadSuccess':function(file,data){
console.log(file);
console.log(JSON.parse(data));
console.log(response);
}
});
You shouldn't need the fs.unlink call because fs.rename is going to move the file to the correct path, not copy it, so if fs.rename succeeds, the temporary file will already be gone. Remove the whole fs.unlink block, which doesn't check for an error anyway. Then you need to make sure in every possible path through the code, you are either calling next(err) with an error or calling res.send. It looks like there are code paths in here where you will not respond and will just let the request time out. Make those changes and see if that gets it working.