I'm receiving following files in the server
{"file":{"size":6818,"path":"/tmp/a451340156a9986cd9d208678bdc40cf","name":"test.pdf","type":"application/pdf","mtime":"2014-09-03T15:26:25.733Z"}}
I have file updload handing as follows:
var form = new formidable.IncomingForm();
form.parse(req, function(err, fields, files) {
console.log(JSON.stringify(files));
// `file` is the name of the <input> field of type `file`
var old_path = files.file.path,
file_size = files.file.size,
file_ext = files.file.name.split('.').pop(),
index = old_path.lastIndexOf('/') + 1,
file_name = old_path.substr(index),
new_path = path.join(process.env.PWD, '/uploads/', file_name + '.' + file_ext);
fs.readFile(old_path, function(err, data) {
fs.writeFile(new_path, data, function(err) {
fs.unlink(old_path, function(err) {
if (err) {
res.status(500);
res.json({'success': false});
} else {
res.status(200);
res.json({'success': true});
}
});
});
});
});
This gives 200 ok but file is not uploaded to desired directory i.e upload/
new_path is returned as /home/abc/myapp/uploads/0bc49fa19d15fb5bdf779c02d3cbc1d5.pdf
however it should just be /uploads/test.pdf
Is it the path or the filename that's causing the issue?
I'd start by simplifying the code by using the rename function. It would look something like this.
var newFilePath = path.join(process.env.PWD, 'uploads', files.file.name);
fs.rename(files.file.path,newFilePath,function(err){
if(err){
//handle error
}
res.json({success: 'true'});
});
Related
Below code works with the small files, but could upload files that is more than 50kb. I think there is something I should setting with the maxFileSize. I'm using Uppy at the client. After I console.log in the server, actually it does do the request. I don't get any clue in the Stackoverflow, really need help.
const upload = (req, res) => {
// formidable : to parse html form data
const form = new formidable.IncomingForm({ multiples: true, maxFileSize: 10000 * 1024 * 1024 })
const d = new Date();
// I have console.log here and everything seems fine
form.parse(req, (err, fields, files) => {
console.log('err', err) // returns nothing
console.log('files', files) // returns nothing
console.log('fields', fields) // returns nothing
if (err) {
console.log("Error parsing the files");
console.log(err);
return res.status(400).json({
message: "There was an error parsing the files",
status: "Fail",
error: err
})
}
for (let file in files) {
try {
if (files[file]) {
let oldPath = files[file]['path']
let rawData = fs.readFileSync(oldPath)
const month = parseInt(d.getMonth() + 1) < 10 ? '0' + parseInt(d.getMonth() + 1) : parseInt(d.getMonth() + 1)
let today = `${d.getFullYear()}_${month}_${d.getDate()}`
let folderPath = __basedir + `\\media\\uploads\\storage\\${today}\\`;
// folderPath = ..\dashboard-v2.0\server\\media\uploads\storage\\2021_06_18\\
if (!fs.existsSync(folderPath)) {
fs.mkdirSync(folderPath, {
recursive: true
});
}
// newPath =..\dashboard-v2.0\server\\media\uploads\storage\\2021_06_18\\WIN.jpg
let newPath = folderPath + files[file]['name']
let databasePath = `storage/${today}/${files[file]['name']}`;
let filename = files[file]['name'] // example_files.zip
if (fs.existsSync(newPath)){
// if file is existed then add Date.now()
let time = Date.now()
let filenameSplit = filename.split('.')
filename = filenameSplit[0] + '_' + time + '.' + filenameSplit[1]
// filename = WIN_1626750408096.jpg
newPath = folderPath + filename
databasePath = `storage/${today}/${filename}`;
}
fs.writeFile(newPath, rawData, async (err) => {
if (err) {
console.log(err);
return res.status(400).send({ "err": err })
}
const userToken = jwt.verify(fields.user, config.TOKEN_SECRET)
const newFiles = {
filename: filename,
user_id: ObjectId(userToken.id),
filepath: databasePath,
added_time: Date.now(),
}
const result = await db.collection("ate_files").insertOne(newFiles)
console.log(`Created with the following id: ${result.insertedId}`)
console.log(`Successfull upload ${newPath}`);
})
}
} catch (err) {
console.log(`Error: ${err}`);
return res.status(409).send({ "error": `${err}` })
}
}
})
return res.status(200).send({ "message": "Successfully uploadded the files" })
}
Your return res.status(200).send({ "message": "Successfully uploadded the files" }) is too soon, it should be in the callback.
It could be problematic on large files since the beginning of the big file would be received and then the client already receives a response which could logically cut the connection in http
I'm creating a discord bot and I need to store a list of users (and one YT Link that they want each) in a JSON file. Then, when a user types %users into discord, the bot returns all the users and their YT link. These are stored as dictionaries It would do this by finding how many json files are in a folder ("./JSON") and cycling through each file and fetching the user IDs and links. However, when I read the file with fs.readFile and try to get the relevant element of the dictionary (var YT = data["Video"];), I meet the error: Can't read property of 'Video' undefined.
Here is the code that is erroring:
fs.readFile(file, (data) => {
var YT = data["Video"];
var FinalUserID = data["ID"];
msg.channel.send("ID:" + FinalUserID + ", Link: " + YT);
});
and here is the main block, for more context:
if(msg.content.startsWith(prefix + "users"))
{
//finds how many files are in the directory JSON.
const fs = require('fs');
const dir = './JSON';
fs.readdir(dir, (err, files) => {
var filesJSON = files.length;
if(filesJSON == 0)
{
msg.channel.send("No users have been added.");
console.log("User did %users and no users were printed. ");
}
else if(filesJSON > 0)
{
const path = require('path');
//joining path of directory
const DirPath = path.join(__dirname, 'JSON');
//passing DirPath and callback function.
fs.readdir(DirPath, function(err, files) {
//Error catching/handling:
if(err)
{
return console.log('Unable to scan directory: ' + err);
return msg.channel.send('Unable to scan directory: ' + err);
}
//listing all files using forEach
files.forEach(function(file) {
//fetching the actual contents of each file:
fs.readFile(file, (data) => {
var YT = data["Video"];
var FinalUserID = data["ID"];
msg.channel.send("ID:" + FinalUserID + ", Link: " + YT);
});
});
});
console.log("User did %users and the users in 'users' array was printed. ");
}
Because of readFile callback - docs:
fs.readFile(file, (err, data) => {
if (err) throw err;
var YT = data["Video"];
var FinalUserID = data["ID"];
msg.channel.send("ID:" + FinalUserID + ", Link: " + YT);
});
I have an item called style which has 2 attributes, one which has raw css text and another which has an S3File.
Style.add({
...
css: { type: Types.Code, language: 'css' },
cssFile: {
type: Types.S3File,
s3path: 'uploads/assets',
},
...
});
I want to update the S3File with the contents of the css text.
function uploadCSStoAmazon(style) {
// Store css code in temporal file (with a md5 name)
var rndm = crypto.randomBytes(20).toString('hex'), file_path = '/tmp/css_temp_' + rndm + '.css';
fs.writeFile(file_path, style.css, function(err) {
if(err) {
return console.log(err);
}
console.log("The file was saved!");
// style.cssFile = new Types.S3File();
// TODO upload file to amazon
style.cssFile._.uploadFile(file_path, true, function(err, fileData){
// TODO erase css file
});
});
}
...
var aStyle = new Style.model({
...
css: 'Some css string',
...
});
...
uploadCSStoAmazon(aStyle);
The cssFile attribute is undefined, I understand, but how could I create a new file and assign it to this attribute, and also upload the file?
I found out how, you can use the updateHandler that comes with Keystone. They're still using req.files form express 3.x though.
// A express file generator
function writeToFile(fileName, txt, ext, callback) {
var rndm = crypto.randomBytes(20).toString('hex'), file_path = '/tmp/css_temp_' + rndm + '.' + ext, the_file = {};
fs.writeFile(file_path, txt, function(err) {
if(err) {
callback(null, err);
}
var stats = fs.statSync(file_path);
var fileSizeInBytes = stats["size"];
the_file.path = file_path;
the_file.name = fileName + '.' + ext;
the_file.type = 'text/' + ext;
the_file.size = fileSizeInBytes;
console.log("The file was cached!");
callback(the_file, err);
});
}
...
/**
* Update Style by ID
*/
exports.update = function(req, res) {
var data = (req.method == 'POST') ? req.body : req.query;
Style.model.findById(data._id).exec(function(err, item) {
if (err) return res.apiError('database error', err);
if (!item) return res.apiError('not found');
writeToFile(item.slug, data.css, 'css', function(req_file, err){
if (err) return res.apiError('update error during file cache', err);
req.files['cssFile_upload'] = req_file;
item.getUpdateHandler(req).process(data, function(err) {
if (err) return res.apiError('update error', err);
res.apiResponse({
success: true
});
}); // end process
}); // end writeToFile
});
};
It's about two days I'm trying to upload images using Nodejs and Expressjs (4.0).
I tryed several middlewares so far, like: Formidable, Blueimp, Busboy, Multer...
With few of these I correctly saved a single image on a temporary folder but the problems comes when I try to upload multiple images.
So, my simple controller look like:
exports.postAccountImages = function(req, res, next) {
User.findById(req.user.id, function(err, user) {
console.log(req.files);
});
};
What I receive is always single Objects like:
{
files: {
// data...
}
}
{
files: {
// data...
}
}
But are not inside an array, so I can not manage all the files incoming using for.
I need to change the name to the images and save these on dynamic folder based on user.id name... but it seems to be too tricky.
I can do it one by one, but I wish to do that on multiple images.
Do you know a middleware or how to use correctly one of the ones I already tried to manage multiple files?
EDIT:
I used Dragzone for the client side.
Nothing special here, followed the initial tutorial:
Jade:
#uploader.fileInput
h3 Drop your images here!
Js:
var myDropzone = new Dropzone(document.body, {
url: "/account/images", // Set the url
autoQueue: true,
paramName: "file",
uploadMultiple: true,
autoProcessQueue: true,
clickable: ".fileInput"
});
Hope this solves your question, this is my method to multiple upload file:
Nodejs :
router.post('/upload', function(req , res) {
var multiparty = require('multiparty');
var form = new multiparty.Form();
var fs = require('fs');
form.parse(req, function(err, fields, files) {
var imgArray = files.imatges;
for (var i = 0; i < imgArray.length; i++) {
var newPath = './public/uploads/'+fields.imgName+'/';
var singleImg = imgArray[i];
newPath+= singleImg.originalFilename;
readAndWriteFile(singleImg, newPath);
}
res.send("File uploaded to: " + newPath);
});
function readAndWriteFile(singleImg, newPath) {
fs.readFile(singleImg.path , function(err,data) {
fs.writeFile(newPath,data, function(err) {
if (err) console.log('ERRRRRR!! :'+err);
console.log('Fitxer: '+singleImg.originalFilename +' - '+ newPath);
})
})
}
})
Make sure your has enctype="multipart/form-data"
I hope this gives you a hand ;)
Hope this solves your question.
How To Multiple Image upload using Nodejs And MongoDB
import formidable from 'formidable';
import multiparty from 'multiparty';
import _ from 'lodash'
import fs from 'fs'
async create(req,res){
let form = new multiparty.Form();
form.keepExtensions=true;
form.parse(req,(err,field,files) => {
if(err)
{
return res.status(400).json({
error:'Image Could Not Uploaded'
})
}
// Multiple Image Store into Database
let product = new Product(field)
var imgArray = files.photo;
var photoarray = new Array();
for (var i = 0; i < imgArray.length; i++) {
if(imgArray.size >= 1000000)
{
res.status(401).json({
error:'Image is Less then 1 MB'
})
}
var newPath = './uploads/product/';
var singleImg = imgArray[i];
newPath+= Date.now()+'_'+singleImg.originalFilename;
readAndWriteFile(singleImg, newPath);
photoarray.push(newPath)
}
product.photo=photoarray;
//Comma Separated Value Store into MongoDB
var sizestr = field.size.toString()
var text_arr = sizestr.split(',')
var sizearray = new Array();
for(var i=0;i<text_arr.length;i++)
{
sizearray.push(text_arr[i])
}
product.size=sizearray;
product.name=field.name.toString()
product.save((err,result)=>{
console.log(err)
if(err){
return res.status(400).json({
error:errorHandler(err)
})
}
return res.json(result)
})
});
function readAndWriteFile(singleImg, newPath) {
fs.readFile(singleImg.path , function(err,data) {
fs.writeFile(newPath,data, function(err) {
if (err) console.log('ERRRRRR!! :'+err);
console.log('Fitxer: '+singleImg.originalFilename +' - '+ newPath);
})
})
}
}
Here is my code:
connection.query("SELECT * FROM images", function(err, rows, fields) {
if (err) {
console.log("Error: ");
console.log(err);
}
else {
console.log("rows: ");
console.log(rows);
for (var i = 0; i < rows.length; i++) {
var thisRow = rows[i];
var thisImageName = thisRow["imagename"];
var newDir = __dirname.split(path.sep).join("/");
var thisImagePath = newDir + "/public/uploads/" + thisImageName + ".jpg";
console.log(thisImagePath);
fs.exists(thisImagePath, function(exists) {
if (exists) {
console.log("true");
res.end();
}
else {
console.log(thisImageName + " does not exist."); res.end();
//Always returns false
}
});
}
}
});
HOWEVER,
In the console:
fs.exists(/* imagepath */, function(exists){console.log(exists)}) //returns true
As you can see I'm using the node-mysql library to return the file names of my images. I concatenate them using the __dirname global and then I reverse the slashes in the directory string to use in the .exists() call. When I use the fs module to check if the image file exists, it returns false.
HOWEVER, if I use fs.exists() with the same path in the console, it returns true. Does anyone know what's going on here?