Trying to get working a system for uploading documents from tablets.
Using multer and express 4.
While using 2 or 3 devices simulanteously everything is okay, but when adding more devices, filename troubles are taking place. Multer provides the same filename for all of the uploads (watching console.log(storedFile + " " + idPatient);). Maybe something is done wrong, but can't figure what exactly.
app.post("/api/Upload", jsonParser, function(req, res) {
var saveFilename = "",
savePath = "",
savePathForSql = "",
fileToSave = "";
var idDocument = 0,
idPatient = 0,
idDoctor = 0;
var uploadRequest = "";
var f = "queryLog.txt";
async.series([
function(callback) {
upload = multer({ storage: Storage }).single("imgUploader");
upload(req, res, function(err) {
if (err) {
console.log(err);
return res.end("Something went wrong!");
}
fileToSave = storedFile;
uploadRequest = req;
idDocument = JSON.parse(req.body.json)['id_doc_type'];
idPatient = JSON.parse(req.body.json)['id_patient'];
idLogin = JSON.parse(req.body.json)['id_login'];
mv("Images" + separator + fileToSave, idPatient + ".jpg", function(err) {
if (err) {
console.log(err);
} else {
return res.end("File uploaded sucessfully! ");
}
});
console.log(storedFile + " " + idPatient);
callback();
});
},
function(callback) {
var request = new sql.Request()
var q1 = "exec storedproc";
request.query(q1, (err, result) => {
if (err) return callback(err);
console.log(result.recordset[0]);
savePath = result.recordset[0]['path'];
savePathForSql = savePath;
if (os != 'linux') {
savePath = savePath.replaceAll("/", "\\");
}
if (!fs.existsSync(f)) {
fs.writeFileSync(f, q1 + "\r\n", 'utf-8');
} else {
fs.appendFileSync(f, q1 + "\r\n", 'utf-8');
}
saveFilename = result.recordset[0]['filename'];
console.log(savePath + "/" + saveFilename);
callback();
})
},
function(callback) {
mkdirp(basePath + savePath, function(err) {
mv("Images" + separator + idPatient + ".jpg", basePath + savePath + separator + saveFilename, function(err) {
if (err) {
console.log(err);
console.log("Move failed: Images/" + idPatient + ".jpg" + " to " + basePath + savePath + separator + saveFilename);
} else {
console.log('Move complete. Images/' + idPatient + ".jpg" + " to " + basePath + savePath + separator + saveFilename);
return res.end("File uploaded sucessfully! ");
}
});
});
callback();
}
], function(err) {
var request2 = new sql.Request()
var q2 = "exec storedproc";
request2.query(q2, (err2, result2) => {
if (err2) return callback(err2);
if (err2) {
console.log(err2);
}
});
});
});
Multer config is:
Storage = multer.diskStorage({
destination: function(req, file, callback) {
callback(null, "./Images");
},
filename: function(req, file, callback) {
storedFile = file.fieldname + "_" + Date.now() + "_" + file.originalname + randomInteger(99999) + ".jpg"
callback(null, storedFile);
}
});
Multer adds file information to the request object.
req.file
Retrieve the uploaded image path from:
req.file.path
Your globally stored variable storedFile is what's tripping you. Update your code to use the image path string:
mv( req.file.path , idPatient + ".jpg", function(err) {
Related
I am using this node js script as an aws lambda script to create thumbnail of a pdf file as soon as pdf is uploaded to s3 bucket.
var async = require('async');
var s3 = new AWS.S3();
var ffmpeg = require('fluent-ffmpeg');
var fs = require('fs');
var util = require('util');
var mktemp = require("mktemp");
var gm = require('gm').subClass({ imageMagick: true });
const https = require('https');
var mediaconvert = new AWS.MediaConvert({
apiVersion: '2017-08-29',
});
p1 = process.env['LAMBDA_TASK_ROOT'];
process.env['FFMPEG_PATH'] = p1 + '/ffmpeg-git-20180111-64bit-static/ffmpeg';
process.env['FFPROBE_PATH'] = p1 + '/ffmpeg-git-20180111-64bit-static/ffprobe';
var imageResolutions = Object.freeze({
thumbnail: 0,
type1: 1,
type2: 2
});
function processPdf(srcBucket, srcKey, fileType, format) {
var lastSlash = srcKey.lastIndexOf('/');
var dotIndex = srcKey.lastIndexOf('.');
var parentPath = srcKey.slice(0, lastSlash + 1);
var fileName = srcKey.slice(lastSlash + 1, dotIndex);
var dstKey = parentPath + 'scaled/' + fileName + '_thumbnail.png';
console.log('lastSlash:' + lastSlash + ' dotIndex ' + dotIndex + ' parentPath ' + parentPath + ' fileName ' + fileName);
async.waterfall([
function download(next) {
// Download the pdf from S3 into a buffer.
s3.getObject({
Bucket: srcBucket,
Key: srcKey
},
next);
},
function thumbnail(response, next) {
console.log('generating thumbnail for pdf');
var temp_file, image;
temp_file = mktemp.createFileSync("/tmp/XXXXXXXXXX.pdf");
fs.writeFileSync(temp_file, response.Body);
image = gm(temp_file + "[0]").flatten().colorspace("CMYK");
console.log("image is " + image);
image.size(function(err, size) {
// Transform the image buffer in memory.
this.resize(200, 200)
.toBuffer("jpeg", function(err, buffer) {
if (err) {
console.log("error buffer " + err);
next(err);
} else {
next(null, response.ContentType, buffer);
}
});
});
},
function upload(contentType, data, next) {
// Stream the thumbnail
console.log('uploading thumbnail');
s3.putObject({
Bucket: srcBucket,
Key: dstKey,
ACL:"public-read",
Body: data,
ContentType: "image/jpeg"
},
next);
}
], function (err) {
if (err) {
console.log(
'Unable to create thumbnail for ' + srcBucket + '/' + dstKey +
' and upload to ' + srcBucket + '/' + dstKey +
' due to an error: ' + err
);
} else {
console.log(
'Successfully resized ' + srcBucket + '/' + dstKey +
' and uploaded to ' + srcBucket + '/' + dstKey
);
}
}
);
}
exports.handler = function(event, context, callback) {
console.time('Total time taken');
var srcBucket = event.Records[0].s3.bucket.name;
console.log('Source Bucket: ' + srcBucket);
var format = "png";
var srcKey = decodeURIComponent(event.Records[0].s3.object.key.replace(
/\+/g, " "));
console.log('Source Key: ' + srcKey);
var pathComponents = srcKey.split('/');
if (srcKey.indexOf('scaled/') > -1) {
console.log('Already scaled....Skipping');
return;
}
var dstBucket = srcBucket;
var typeMatch = srcKey.match(/\.([^.]*)$/);
var imageType = typeMatch[1].toLowerCase();
if (imageType == "mp4") {
console.log('Processing video ' + srcKey);
processVideo(srcBucket, srcKey, imageType);
}
else if(imageType == "pdf"){
console.log('Processing video ' + srcKey);
processPdf(srcBucket, srcKey, imageType, format);
}else if(imageType == "png"){
var params = {
Bucket: srcBucket,
Key: srcKey
};
s3.getObjectTagging(params, function(err, data) { //get Tags for this object
if (err) console.log(err, err.stack); // an error occurred
else {
var isAlreadyProcessed = false;
var lastSlash = srcKey.lastIndexOf('/');
var parentPath = srcKey.slice(0, lastSlash + 1);
var name = srcKey.slice(lastSlash + 1, srcKey.indexOf('.' + imageType));
var tagSet = data.TagSet;
console.log('isAlreadyProcessed ' + isAlreadyProcessed);
console.log('lastSlash ' + lastSlash);
console.log('parentPath ' + parentPath);
console.log('name ' + name);
console.log('tagSet ' + tagSet);
tagSet.forEach(function(value) {
if (value['Key'] == 'processed' && value['Value'] == 'true') { //if 'processed' tag is set to 'true'
isAlreadyProcessed = true;
}
});
if (isAlreadyProcessed) {
console.log("Alreay processed.....Skipping");
return;
}
tagSet.push({
Key: "processed",
Value: "true"
})
scale(params, srcBucket, parentPath, name, 200, imageType, imageResolutions.thumbnail, tagSet);
}
});
}
else{
console.log("no valid data type");
return;
}
};
I am getting this error whenever I upload a file to s3 bucket
Error: Stream yields empty buffer. I have increased the memory to 2 gb still issue exists. Please help and let me know whats wrong in here.
I created a simple function to process uploaded files. I'm using multer to process the multipart data into files. Then I use the code below to move the files around, and return data so my webpage knows how to display the images.
It seems that somehow NodeJS keeps the files open itself. I also created a function to remove the files, but this will give me an EBUSY error. If I try to remove through Windows, it says that NodeJS has te file locked. When I restart the NodeJS process and then re-request the delete URL, the file is removed correctly.
Is there some way I can force NodeJS to close the file resources? Or is there some other error in my script that I am missing?
I updated node to version 12.4.0 but this didn't help either.
Processing the uploads:
exports.handleFormNotes = async(req, res, next) => {
try {
const configVariables = req.app.get('configVariables');
const uploadSuffix = req.body.uploadFolderSuffix || '';
console.log('upload suffix', uploadSuffix);
if (!req.files.length) {
return;
}
const uploadedFiles = Array();
var destPath = configVariables['FormNotesUploadDirectory'];
if (uploadSuffix !== '')
destPath = destPath + '/' + uploadSuffix;
destPath = path.resolve(destPath);
// mkdirSync returns undefined, so run that first and see if the directory exists second.
if (!fs.mkdirSync(destPath, { recursive: true }) && !fs.existsSync(destPath)) {
console.log(destPath, 'does not exist!');
req.alertHandler.addAlert('Pad om afbeelding op te slaan is niet bereikbaar: ' + destPath, 'danger');
res.render('error');
return;
}
var baseUrlPath = configVariables['FormNotesUploadDocumentRoot'];
if (uploadSuffix != null) {
baseUrlPath = baseUrlPath + '/' + uploadSuffix;
}
for(const uploadedFile of req.files) {
let now = new Date();
let destFilename = getDateTime() + "_" + uploadedFile.originalname;
let destFilenameThumb = 'thumb_' + destFilename;
var fullDestination = path.resolve(destPath + '/' + destFilename);
var fullDestinationThumb = path.resolve(destPath + '/' + destFilenameThumb);
console.log('Copy src:', uploadedFile.path, fullDestination);
fs.copyFileSync(uploadedFile.path, fullDestination);
var unlinkResult = fs.unlinkSync(uploadedFile.path);
console.log('Unlink "' + uploadedFile.path + '", result after upload:', unlinkResult);
var newFileInfo = await sharp(destPath + '/' + destFilename)
.resize({ width: 120 })
.toFile(fullDestinationThumb);
console.log('new file info thumb:', newFileInfo);
uploadedFiles.push({
'fullImg': baseUrlPath + '/' + destFilename,
'thumbImg' : baseUrlPath + '/' + destFilenameThumb,
'original': uploadedFile.originalname
});
}
// Push to backend
const data = {
files: [...uploadedFiles],
uploadSuffix: uploadSuffix
};
// Normally retVal should be the return data from OI. If anything goes wrong, retVal = 'error'
this.saveAttachment(req, res, data);
return res.send(data);
}
catch (err) {
console.log('Error handling from notes:', err);
req.alertHandler.addAlert('Error handling form notes: ' + err);
return 'error';
}
}
Removing the uploads:
exports.rmFormNote = async(req, res, data) => {
let retVal;
try {
const configVariables = req.app.get('configVariables');
const httpPath = req.query.img;
console.log('http path:', httpPath);
// Strip off the document root, but check if they are the same first
const firstPart = httpPath.substring(0, configVariables['FormNotesUploadDocumentRoot'].length);
console.log('same?', firstPart, configVariables['FormNotesUploadDocumentRoot']);
var relPath = httpPath;
if (firstPart == configVariables['FormNotesUploadDocumentRoot']) {
relPath = httpPath.substring(configVariables['FormNotesUploadDocumentRoot'].length + 1);
}
var parts = relPath.split('/');
parts[parts.length-1] = 'thumb_' + parts[parts.length-1];
var thumbPath = parts.join('/');
thumbPath = path.resolve(configVariables['FormNotesUploadDirectory'] + '/' + thumbPath);
console.log('thumbpath: ', thumbPath);
var fullPath = configVariables['FormNotesUploadDirectory'] + '/' + relPath;
var dest = path.resolve(fullPath);
console.log('dest: ', dest);
if (!fs.existsSync(dest))
throw "File not found";
fs.unlink(dest, (err) => {
if (err) throw err;
console.log('File deleted');
});
retVal = { result: true };
}
catch(err) {
console.log('Ohnoo', err);
retVal = { result: false, msg: err };
}
return res.send(retVal);
}
Turns out the thumbnail creator sharp was the problem, as stated in this github issue.
I just had to disable the cache, like so:
sharp.cache(false);
var newFileInfo = await sharp(destPath + '/' + destFilename)
.resize({ width: 120 })
.toFile(fullDestinationThumb);
I want to upload an image in a folder using nodejs but i don't know how to do it
Here is my insert in my ImageDao
exports.insert = function(data, callback){
console.log("in imagesDao insert");
var query = " insert into " + tableName + " (url,ajoute_par)";
query = query + " values(?,?);";
var values = [data.url , data.ajoute_par];
// var values = [encodeURIComponent(data.url) , data.ajoute_par];
database.execute(query, values, function(){
callback();
});
}
And here is my image controller
// insert
exports.write = function(request, response){
console.log("in images write");
// Get the data.
var postData = "";
request.on('data', function(data){ // request.on is a listener. Call when data can be read
postData = postData + data;
});
request.on('end', function(){ // Called when data has been read
var dataObj = JSON.parse(postData);
dao.insert(dataObj, function(){
send(response, '{"write result" : "Inserted successfuly"}');
});
});
}
To upload files you can use multer module of nodejs. https://github.com/expressjs/multer
images_storage: function () {
return multer.diskStorage({
destination: function (req, file, cb) {
mkdirp(Config.upload_images_path, function (err) {
});
cb(null, Config.upload_images_path)
}
,
filename: function (req, file, cb) {
var getFileExt = function (fileName) {
var fileExt = fileName.split(".");
if (fileExt.length === 1 || (fileExt[0] === "" && fileExt.length === 2)) {
return "";
}
return fileExt.pop();
}
cb(null, Date.now() + '.' + getFileExt(file.originalname))
}
});
},
// Image uploading
const fs = require('fs');
const multer = require('multer');
const Uploads = multer({
storage: utility.images_storage(),
fileFilter: function (req, file, cb) {
if (Config.image_format_arr.indexOf(file.mimetype))
cb(null, true);
else
cb(null, false);
}
});
//And in your route you can use the upload function
router.post('/upload-logo', Uploads.single('school_logo'), function (req, res, next) {
var school_id = req.body.school_id;
var result = {flag: false, message: "Error Occurred! in saving school logo."};
console.log("REQUEST FILES " + req.file);
// Save School Logo
if (typeof req.file != 'undefined' && req.file.size > 0) {
School.findByIdAndUpdate(school_id, {
$set: {
school_logo: req.file.filename
}
}, {'new': true}, function (err, school_details) {
console.log("school_details " + school_details);
if (!err && school_details) {
result.flag = true;
result.message = "School logo has been successfully updated";
result.path = '/uploads/images/' + school_details.school_logo;
//req.session.school_details = school_details;
utility.upgradeSchoolLogoSessionValue(school_details, false, function (updated_school_details) {
console.log("BEFOR SESSION IS UPDATED" + JSON.stringify(req.session.school_details));
req.session.school_details = updated_school_details;
console.log("SESSION IS UPDATED" + JSON.stringify(req.session.school_details));
});
console.log("FILE NAME IS THIS " + req.file.filename);
}
res.json(JSON.stringify(result));
});
}
else {
res.json(JSON.stringify(result));
}
});
I'm using node.js for converting text files to CSV. It works for one file, but when i try process more files fileDestination variable doesn't change. Why? Input files like this: r10_1C_BP1-11_e41-81_10_5X1x9_05_train2.res
I got following console output:
./1_train.csv has been written successufully! r10_1C_BP1-11_e41-81_10_5X1x9_05_train2.res
./1_train.csv has been written successufully! r10_1C_BP1-11_e41-81_1_5X1x9_05_train2.res
/*
* lee archivos *.dat y los convierte *.csv
*/
const fs = require('fs');
const inputDir = './';
const outputDir = './';
function readFiles(inputDir, onError) {
fs.readdir(inputDir, function(err, filenames) {
if (err) {
onError(err);
return;
}
filenames.forEach(function(inputFile) {
// first we arre looking for "right" file name
if (inputFile.search(/res/) != -1) {
console.log('Starting processing ' + inputFile);
convert2csv(inputFile, function(error) {
throw err;
});
}
});
});
}
function convert2csv(filename, onError) {
arrayFromFilename = filename.split('_');
epoca = arrayFromFilename[4];
trainORval = arrayFromFilename[7].replace('2.res', '');
console.log("from convert " + filename + " " + epoca);
fs.readFile(inputDir + filename, 'utf-8', function(err, content) {
if (err) {
onError(err);
return;
}
content = content.replace(/^[^0].*\n/mg, '');
arr = content.split('\n');
pares = arr.filter(function(d, i) {
return i % 2 == 1;
});
content = pares.join('\n');
content = content.replace(/(^[\d.]*) ([\d.]*)/gm, '$1,$2');
fileDestination = outputDir + epoca + '_' + trainORval + '.csv';
console.log("filedestination :" + fileDestination);
fs.writeFile(fileDestination, 'y,x\n', function(err) {
if (err) {
return console.error(err);
}
fs.appendFile(fileDestination, content, function(err) {
if (err) {
return console.error(err);
}
console.log(fileDestination + " has been written successufully!", filename);
});
});
});
}
i have made a form for file upload and i have set the multiple option, so im trying to upload a bunch of files and then move them according to the album name that the client have set,
here is what iv done:
if (req.body && req.body.album){
var album_name = req.body.album;
}
else{
//need to change to time instead of random album
var album_name = 'unknown_album-' + (parseInt(Math.random() * 5) + 1);
}
//File name
var file_name = null;
switch(req.files.img_file.type){
case 'image/png':
file_name = new Date().getTime() + '.png';
break;
case 'image/jpeg':
file_name = new Date().getTime() + '.jpeg';
break;
default:
res.render('admin/panel', {
title: 'אדמין',
message: 'קובץ לא תקין'
});
break;
}
mkdirp('./public/img/albums/' + album_name, function (err) {
if (err)
console.error(err);
else
{
_.each(req.files.img_file,function(val,index){
console.log(val.path + " " + index);
//gives the file path so i can read it
fs.readFile(val.path, function (err, data) {
if (err){
console.log("fs " + err);
}
//so until here everything works fine, the files are uploaded to the "/uploads" directory, now im trying to move them to the correct album, the destiation is : public/img/albums/:album_name/:all_images here
mv(val.path, './public/img/albums/' + album_name + '/' + val.path, function(err) {
if (err){
console.log("mv " + err);
}
else{
res.render('admin/panel', {
title: 'אדמין',
message: 'קובץ עלה בהצלחה'
});
res.redirect('/admin');
}
});
});
});
}
});
the mv module throws an error rename "c:/work/xxx/xx/uploads/val.path.png
Its a file-access error. You X out some of the file name, but look at how that NPM module handles file, and ensure you are naming the files and paths properly. Then it should work out fine.
i have used read and write stream and deleted the "mv" module
if (req.body && req.body.album){
var album_name = req.body.album;
}
else{
//need to change to time instead of random album
var album_name = 'unknown_album-' + (parseInt(Math.random() * 5) + 1);
}
//File name
if (req.files.img_file.length > 1)
{
var Counter = 0;
_.each(req.files.img_file,function(val,index){
var file_name = null;
switch(val.type){
case 'image/png':
file_name = new Date().getTime() + '.png';
break;
case 'image/jpeg':
file_name = new Date().getTime() + '.jpeg';
break;
}
mkdirp('./public/img/albums/' + album_name, function (err) {
if (err)
console.error(err);
var source = fs.createReadStream(val.path);
var dest = fs.createWriteStream('./public/img/albums/' + album_name + '/' + val.name);
source.pipe(dest);
source.on('end', function() {
console.log('end...');
Counter++;
console.log(Counter);
console.log(req.files.img_file.length);
if (Counter == req.files.img_file.length){
res.redirect('/admin');
res.render('admin/panel', {
title: 'אדמין',
message: 'קובץ עלה בהצלחה',
albums: albums
}); //eo res render
}
});
source.on('error', function(err) { console.log('error'); });
});// eo mkdir
}); // eo _each
}