Nodejs bluebird promise fails while processing image - node.js

//Created a promise for each image size.
var promises = sizes.map(function (size) {
return new Promise(function (resolve, reject) {
var destinationDir = fileUtil.getAbsolutePathOfImage(destinationPath);
fileUtil.createDirectoryIfNotExists(destinationDir);
destinationDir += size.src;
fileUtil.createDirectoryIfNotExists(destinationDir);
//Resize the image.
//console.log('imagefile : ' + JSON.stringify(imageFile));
//console.log('destinationDir: ' + JSON.stringify(destinationDir));
//Called an imageUtil resize method to perform resize.
imageUtil.resize(imageFile, destinationDir, size).then(data => {
var fileName = destinationPath + size.src + '/' + data;
resolve(imageUtil.createImageData(fileName, size.height, size.width));
}).catch(err => {
console.error(err);
return reject(err);
});
});
});
Promise.all(promises)
.then(savedImages => {
console.log('saved Images are: ' + JSON.stringify(savedImages));
return res.status(200).json(savedImages);
}).catch(err => {
console.log('i am here' + JSON.stringify(err.message));
return res.status(400).json(JSON.stringify(err.message));
});
---------------------Resize method of imageutil---------------
var Promise = require('bluebird'),
gm = require('gm'),
path = require('path'),
fs = require('fs');
Promise.promisifyAll(gm.prototype);
module.exports = {
resize(imageFile, destinationPath, size){
if (!imageFile || !destinationPath || !size) {
return;
}
return new Promise(function (resolve, reject) {
// If we just passed callback directly, errors would be fatal
var fileName = fileUtil.getFileName(imageFile);
//console.log('sourceFile : ' + JSON.stringify(imageFile));
//console.log('saveDirectory : ' + JSON.stringify(destinationPath));
//console.log('fileName is :' + fileName);
//Create a write stream.
var writeStream = fs.createWriteStream(destinationPath + '/' + fileName);
//console.log('Saving at location: ' + writeStream.path);
gm(imageFile)
.resize(size.width, size.height, '^')
.gravity('Center')
.crop(size.width, size.height)
.writeAsync(writeStream.path, function (err) {
if (err) {
var error = 'Error while creating image of resolution : ' + size.width + 'x' + size.height + '.';
console.error(JSON.stringify(error));
return reject(new Error(error));
}
});
resolve(fileName);
});
}
};
*It seems like everything went correct and it creates four image file which is corrupted and gave me error later on but request processed succesfully. Console output of my image processing are as follows:
saved Images are: [{"src":"/uploads/300/fhjXFLgqq59F91uFK_2h8GiS.jpg","height":"200","width":"300"},{"src":"/uploads/120/fhjXFLgqq59F91uFK_2h8GiS.jpg","height":"120","width":"120"},{"src":"/uploads/48/fhjXFLgqq59F91uFK_2h8GiS.jpg","height":"48","width":"48"}]
POST /api/upload/image/ 200 51.790 ms - 241
"Error while creating image of resolution : 120x120."
"Error while creating image of resolution : 48x48."
"Error while creating image of resolution : 300x200."*

Since you are already using promisifyAll, you don't need to (and should not) use the Promise constructor. writeAsync already returns a promise - if you don't pass a callback, as that's the requirement for Bluebird being able to pass the callback itself in the right position.
You should be using
module.exports = {
resize(imageFile, destinationPath, size){
if (!imageFile || !destinationPath || !size) {
return Promise.reject(new Error("missing arguments"));
}
var fileName = fileUtil.getFileName(imageFile);
//console.log('sourceFile : ' + JSON.stringify(imageFile));
//console.log('saveDirectory : ' + JSON.stringify(destinationPath));
//console.log('fileName is :' + fileName);
//Create a write stream.
var writeStream = fs.createWriteStream(destinationPath + '/' + fileName);
//console.log('Saving at location: ' + writeStream.path);
var promise = gm(imageFile)
.resize(size.width, size.height, '^')
.gravity('Center')
.crop(size.width, size.height)
.writeAsync(writeStream.path);
return promise.then(function() {
return filename;
}, function (err) {
var error = 'Error while creating image of resolution : ' + size.width + 'x' + size.height + '.';
console.error(error, err);
throw new Error(error);
});
}
};
Similarly, you shouldn't use the Promise constructor antipattern in the call to resize - it already returns a promise:
var promises = sizes.map(function (size) {
var destinationDir = fileUtil.getAbsolutePathOfImage(destinationPath);
fileUtil.createDirectoryIfNotExists(destinationDir);
destinationDir += size.src;
fileUtil.createDirectoryIfNotExists(destinationDir);
//Resize the image.
//console.log('imagefile : ' + JSON.stringify(imageFile));
//console.log('destinationDir: ' + JSON.stringify(destinationDir));
return imageUtil.resize(imageFile, destinationDir, size)
//^^^^^^
.then(data => {
var fileName = destinationPath + size.src + '/' + data;
return imageUtil.createImageData(fileName, size.height, size.width));
}, err => {
console.error(err);
throw err;
});
});

Related

Create and download text files- Node.js & React.Js

As part of my project, I have to create text files which have to be downloaded as a ".txt".
I am using Node.js and React JavaScript, and I have already tried using the Node.js "fs.writeFile", but the browser doesn't recognize the download, the file name is always being called as the folder name and the file is always empty although the variable is a string and not empty.
I'm calling from the client to this function:
app.post("/downloadnetworks", async (req, res) => {
let selectedApps=req.body.selectedApps;
let arr=await sqlFunctions.createByIds(selectedApps);
res.send();
module.exports.createByIds = (productsArray) => {
return new Promise(function(resolve, reject) {
var bulkedString = '';
var product;
for (let obj of productsArray) {
let query = "select * from...........";
con.query(query, function(err, result, fields) {
if (err) throw err;
let stringifiedJson = JSON.stringify(result)
let parsedJson = JSON.parse(stringifiedJson)
The DB data is being added into the variable 'stringifiedJson', and it continues from here:
let parsedJson = JSON.parse(stringifiedJson) //has all the data from the DB
for (let network of parsedJson) {
if (network.certification_Id) {
bulkedString += network.domain_Name + ", " + network.publisher_Id + ", " + network.relationship + ", " + network.certification_Id;
} else {
bulkedString += network.domain_Name + ", " + network.publisher_Id + ", " +
network.relationship;
}
bulkedString += "\n";
product = network.product;
}
})
fs.writeFile('C:\Work\App ads.txt\App-Ads Files\'' + product + '.txt', bulkedString, 'utf8', (err) => {
if (err) throw err;
console.log('The file has been saved!');
});
}
resolve(bulkedString)
})
}

NodeJS itself keeps file EBUSY on Windows?

I created a simple function to process uploaded files. I'm using multer to process the multipart data into files. Then I use the code below to move the files around, and return data so my webpage knows how to display the images.
It seems that somehow NodeJS keeps the files open itself. I also created a function to remove the files, but this will give me an EBUSY error. If I try to remove through Windows, it says that NodeJS has te file locked. When I restart the NodeJS process and then re-request the delete URL, the file is removed correctly.
Is there some way I can force NodeJS to close the file resources? Or is there some other error in my script that I am missing?
I updated node to version 12.4.0 but this didn't help either.
Processing the uploads:
exports.handleFormNotes = async(req, res, next) => {
try {
const configVariables = req.app.get('configVariables');
const uploadSuffix = req.body.uploadFolderSuffix || '';
console.log('upload suffix', uploadSuffix);
if (!req.files.length) {
return;
}
const uploadedFiles = Array();
var destPath = configVariables['FormNotesUploadDirectory'];
if (uploadSuffix !== '')
destPath = destPath + '/' + uploadSuffix;
destPath = path.resolve(destPath);
// mkdirSync returns undefined, so run that first and see if the directory exists second.
if (!fs.mkdirSync(destPath, { recursive: true }) && !fs.existsSync(destPath)) {
console.log(destPath, 'does not exist!');
req.alertHandler.addAlert('Pad om afbeelding op te slaan is niet bereikbaar: ' + destPath, 'danger');
res.render('error');
return;
}
var baseUrlPath = configVariables['FormNotesUploadDocumentRoot'];
if (uploadSuffix != null) {
baseUrlPath = baseUrlPath + '/' + uploadSuffix;
}
for(const uploadedFile of req.files) {
let now = new Date();
let destFilename = getDateTime() + "_" + uploadedFile.originalname;
let destFilenameThumb = 'thumb_' + destFilename;
var fullDestination = path.resolve(destPath + '/' + destFilename);
var fullDestinationThumb = path.resolve(destPath + '/' + destFilenameThumb);
console.log('Copy src:', uploadedFile.path, fullDestination);
fs.copyFileSync(uploadedFile.path, fullDestination);
var unlinkResult = fs.unlinkSync(uploadedFile.path);
console.log('Unlink "' + uploadedFile.path + '", result after upload:', unlinkResult);
var newFileInfo = await sharp(destPath + '/' + destFilename)
.resize({ width: 120 })
.toFile(fullDestinationThumb);
console.log('new file info thumb:', newFileInfo);
uploadedFiles.push({
'fullImg': baseUrlPath + '/' + destFilename,
'thumbImg' : baseUrlPath + '/' + destFilenameThumb,
'original': uploadedFile.originalname
});
}
// Push to backend
const data = {
files: [...uploadedFiles],
uploadSuffix: uploadSuffix
};
// Normally retVal should be the return data from OI. If anything goes wrong, retVal = 'error'
this.saveAttachment(req, res, data);
return res.send(data);
}
catch (err) {
console.log('Error handling from notes:', err);
req.alertHandler.addAlert('Error handling form notes: ' + err);
return 'error';
}
}
Removing the uploads:
exports.rmFormNote = async(req, res, data) => {
let retVal;
try {
const configVariables = req.app.get('configVariables');
const httpPath = req.query.img;
console.log('http path:', httpPath);
// Strip off the document root, but check if they are the same first
const firstPart = httpPath.substring(0, configVariables['FormNotesUploadDocumentRoot'].length);
console.log('same?', firstPart, configVariables['FormNotesUploadDocumentRoot']);
var relPath = httpPath;
if (firstPart == configVariables['FormNotesUploadDocumentRoot']) {
relPath = httpPath.substring(configVariables['FormNotesUploadDocumentRoot'].length + 1);
}
var parts = relPath.split('/');
parts[parts.length-1] = 'thumb_' + parts[parts.length-1];
var thumbPath = parts.join('/');
thumbPath = path.resolve(configVariables['FormNotesUploadDirectory'] + '/' + thumbPath);
console.log('thumbpath: ', thumbPath);
var fullPath = configVariables['FormNotesUploadDirectory'] + '/' + relPath;
var dest = path.resolve(fullPath);
console.log('dest: ', dest);
if (!fs.existsSync(dest))
throw "File not found";
fs.unlink(dest, (err) => {
if (err) throw err;
console.log('File deleted');
});
retVal = { result: true };
}
catch(err) {
console.log('Ohnoo', err);
retVal = { result: false, msg: err };
}
return res.send(retVal);
}
Turns out the thumbnail creator sharp was the problem, as stated in this github issue.
I just had to disable the cache, like so:
sharp.cache(false);
var newFileInfo = await sharp(destPath + '/' + destFilename)
.resize({ width: 120 })
.toFile(fullDestinationThumb);

Trouble with asynchronous functions in node.js

I'm very new to node, and I'm trying to pull a list of IDs from an API, iterate through that list for each ID saving the output, and ultimately rename each file generated. The code below is the closest I've come, and while it works sometimes, it frequently fails as I believe one function isn't waiting for the other to complete (e.g. tries to read before a write), but I'm sure I have other issues going on.
const apiKey = inputData.apiKey
var https = require('https');
var sync = require('sync');
var fs = require('fs');
var JSONfileloc = "./pdfs/file.json"
var queryurl = 'https://intakeq.com/api/v1/intakes/summary?startDate=2018-01-01'
var authHeaders = { 'X-Auth-Key': apiKey }
var queryOpts = { method: 'GET', headers: authHeaders}
function handleFile (error, file)
{
if (error) return console.error('Ran into a problem here', error)
}
fetch(queryurl, queryOpts)
.then
(function findAPI(res, err)
{
if( err )
{ console.log('I cant find the API '+err) }
return res.json()
{console.log('found the API!')}
}
)
.then (function itID(res, err)
{
if( err )
{ console.log('I cant iterate the API '+err) }
for(var i = 0; i < res.length; i++)
{
var intakeID=res[i].Id;
var APIoptions={ host:"intakeq.com", path:"/api/v1/intakes/"+ intakeID, headers: authHeaders };
var PDFoptions={ host:"intakeq.com", path:"/api/v1/intakes/"+ intakeID+'/pdf', headers: authHeaders };
console.log('Working on ID:'+intakeID)
var JSONrequest = https.get(APIoptions, writeJSON)
}})
//READ JSON FUNCTION
function readJSON (err, data)
{
if (err) throw err;
if(data.indexOf('New Patient Forms') >= 0)
var contents = fs.readFileSync(JSONfileloc, handleFile);
var jsonContent = JSON.parse(contents)
//pull PT Name
pName = (jsonContent.ClientName);
console.log('The Patient Name Is ' + jsonContent.ClientName)
//pull PT DOB
pDob = (jsonContent.Questions[3].Answer)
console.log('Patient DOB Is ' + jsonContent.Questions[3].Answer)
//pull Form Type
pForm = (jsonContent.QuestionnaireName)
console.log('The Form Submitted is ' + jsonContent.QuestionnaireName)
//rename and move JSON
fs.rename("./pdfs/file.json", './JSONLogs/'+pName+' '+pForm+' '+Date.now()+'.json', function(err) {
if ( err ) console.log('Problem renaming! ' + err)
else console.log('Copying & Renaming JSON File!');
})
};
//WRITE JSON FUNCTION
function writeJSON(response, err)
{
var JSONfile = fs.createWriteStream(JSONfileloc, handleFile);
if (err) throw err;
response.pipe(JSONfile);
console.log('JSON Created')
fs.readFile(JSONfileloc, readJSON)
}
The research I've done leads me to believe that async.forEach is probably the right approach here, but I've been having a hard time getting that to work properly. Thanks in advance and any suggestions are much appreciated.
const apiKey = inputData.apiKey
var https = require('https');
var sync = require('sync');
var fs = require('fs');
var JSONfileloc = "./pdfs/file.json"
var queryurl = 'https://intakeq.com/api/v1/intakes/summary?startDate=2018-01-01'
var authHeaders = {
'X-Auth-Key': apiKey
}
var queryOpts = {
method: 'GET',
headers: authHeaders
}
function handleFile(error, file) {
if (error) return console.error('Ran into a problem here', error)
}
fetch(queryurl, queryOpts)
.then(function findAPI(res) {
return res.json();
})
.then(function itID(res) {
const JSONRequests = [];
for (var i = 0; i < res.length; i++) {
var intakeID = res[i].Id;
var APIoptions = {
host: "intakeq.com",
path: "/api/v1/intakes/" + intakeID,
headers: authHeaders
};
var PDFoptions = {
host: "intakeq.com",
path: "/api/v1/intakes/" + intakeID + '/pdf',
headers: authHeaders
};
// https.get has response as a stream and not a promise
// This `httpsGet` function converts it to a promise
JSONRequests.push(httpsGet(APIoptions, i));
}
return Promise.all(JSONRequests);
})
function httpsGet(options, filename) {
return new Promise((resolve, reject) => {
https.get(options, (response) => {
// The WriteJSON function, just for brewity
// Otherwise pass resolve to the seperate writeJSON and call it in there
var JSONfile = fs.createWriteStream(filename + ".json");
response.pipe(JSONfile);
JSONfile.on('close', () => {
readJSON(filename + ".json").then(() => {
resolve();
})
})
})
})
}
//READ JSON FUNCTION
function readJSON(filename) {
// if (err) throw err;
var contents = fs.readFileSync(filename, 'utf-8'); // removed handleFile as readFileSync does not allow callbacks, added format
var jsonContent = JSON.parse(contents)
// Make your conditional checks here with the jsonContents
//pull PT Name
pName = (jsonContent.ClientName);
console.log('The Patient Name Is ' + jsonContent.ClientName)
//pull PT DOB
pDob = (jsonContent.Questions[3].Answer)
console.log('Patient DOB Is ' + jsonContent.Questions[3].Answer)
//pull Form Type
pForm = (jsonContent.QuestionnaireName)
console.log('The Form Submitted is ' + jsonContent.QuestionnaireName)
//rename and move JSON
return new Promise((resolve, reject) => {
fs.rename("./pdfs/file.json", './JSONLogs/' + pName + ' ' + pForm + ' ' + Date.now() + '.json', function (err) {
if (err) {
console.log('Problem renaming! ' + err);
reject(err);
} else {
console.log('Copying & Renaming JSON File!');
resolve();
}
})
})
};
Updated to convert https.get response stream to return a Promise which can be handled much better.

Iteration of http requests in nodejs

I am trying to make multiple http get requests in nodeJs. I need to continue getting records until there are no more available. I am having problems trying to do the iteration of a chunk of code. I had been reading about the async module but I am not understanding how to apply it. Please help.
Here is the New code 11/25/2014:
The step I want to repeat is the GetUrl(newBlock) is the step I want to repeat until there is no more blocks. I determine that there is no more blocks when the result.lenght of the request response is less than 5,000.
Is there a way of me getting this parameter pack from newBlock function and then stop the loop if I use the whilst async module?
// JS Script: GPSAPIClient
// Code Description: API client code to retrieve GPS data for customer with account number: 47631
// This script requires Node.Js environment installed with Request() and Node-crontab modules (through NPM).
// It will run (as a backend script) every minute and retrieve the GPS available.
//
// Author : Vanessa Torres, Professional Services
var request = require("request");
var fs = require('fs');
var async = require('async');
var crontab = require('node-crontab');
var csvFile = "";
var debug = false;
process.argv.forEach(function(val, idx, array) {
if (val.toLowerCase() === 'debug') {
debug = true;
console.log('\n\n\n******************DEBUG MODE ACTIVE(Do not run in prod)******************\n\n\n')
}
});
console.log('waiting to start running');
var jobId = crontab.scheduleJob('* * * * *', function() {
//Gettting system date and adding leading zeros when are single digits. I need this to build the get request with date filters.
var d = new Date();
var nday = d.getDate();
var nmonth = d.getMonth();
var nhour = d.getHours();
var nmin = d.getMinutes();
var nfullyear = d.getFullYear();
if (nday < 10) {
nday = '0' + nday;
};
//1 minute substraction except for when is zero (the value will negative).
if (nmin != 0) {
var nmin = nmin - 1;
};
if (nmin < 10) {
nmin = '0' + nmin;
};
// added because TLC Plumbing is 2 hours behind us. (MST)
nhour = nhour - 2;
if (nhour < 10) {
nhour = '0' + nhour;
};
var nmonth = nmonth + 1;
if (nmonth < 10) {
nmonth = '0' + nmonth;
};
var options = {
//url: 'https://credentials#api.comettracker.com/v1/gpsdata' + '?fromdate=' + nfullyear + '-' + nmonth + '-' + nday + 'T' + nhour + '%3a' + nmin + '%3a' + '00',
url: 'https://credentials#api.comettracker.com/v1/gpsdata?fromdate=2015-11-23T17%3a00%3a00&todate=2015-11-24T10%3a00%3a00',
method: 'GET',
rejectUnauthorized: !debug
};
function GetUrl(callback) {
return request(options, function(error, response, body) {
console.log('request for links');
if (error) throw new Error(error);
var result = JSON.parse(body)['links'];
var urlnext = result[2].href;
console.log('UrlNext: ' + urlnext);
console.log(result[2].rel);
//moreBlocks = newBlock(urlnext);
moreBlocks = callback(urlnext);
});
// request to get the next block of records (maximun 5,000)
};
// HTTP get request - 1st request
request(options, function(error, response, body) {
if (error) throw new Error(error);
var result = JSON.parse(body)['gps-recs'];
console.log(result.length);
//console.log(result);
//create .csv file if result.length is > = 1 (If we received GPS records)
if (result.length >= 1 && result.length < 5000) {
console.log('entered first if, result.length: ' + result.length);
buildCSV(result);
};
//add the subsequent request when result.length = 5000
if (result.length == 5000) {
console.log('entered second if, result.length: ' + result.length);
buildCSV(result);
console.log('I came back from buildCSV. result.length is : ' + result.length);
**GetUrl(newBlock)**;
//console.log('moreblocks back from newblock: ' + moreBlocks);
};
});
});
function newBlock(urlnext) {
console.log('URL passed to newBlock: ' + urlnext);
// option 2 - variables needed to built the url (like the first request)
var n = urlnext.length;
var rest = urlnext.substr(40, n);
console.log('Rest: ' + rest);
var options = {
url: 'https://credentials#api.comettracker.com/v1/gpsdata/' + rest,
method: 'GET',
rejectUnauthorized: !debug
};
request(options, function(error, response, body) {
console.log('nextblock request');
if (error) throw new Error(error);
var result = JSON.parse(body)['gps-recs'];
if (result.length == 0) {
//no records for filter
console.log('first if' + result.length);
moreBlocks = false;
};
if (result.length < 5000 && result.length > 0) {
//last block appends record and ends
appendCSV(result);
moreBlocks = false;
console.log('second if' + result.length);
};
if (result.length == 5000) {
//appends records but has to keep running
appendCSV(result);
console.log('third if- moreBlocks' + result.length);
};
console.log('moreBlocks: ' + moreBlocks);
});
};
function buildCSV(result) {
var csvFile = " ";
console.log('before csvFile: ' + csvFile);
//adding headers
csvFile = csvFile.concat('UserNumber' + ',' + 'UserTimeTag' + ',' + 'Latitude' + ',' + 'Longitude' + ',' + 'SpeedMph' + ',' + 'Heading' + ',' + 'Status' + '\r\n');
// loop runs result.length times
for (var i = 0; i < result.length; i++) {
csvFile = csvFile.concat(result[i].UserInfo.UserNumber + ',' + result[i].UserTimeTag + ',' + result[i].Latitude + ',' + result[i].Longitude + ',' + result[i].SpeedMph + ',' + result[i].Heading + ',' + result[i].Status + '\r\n');
};
//console.log(csvFile);
fs.writeFile('file.csv', csvFile, function(err) {
if (err) throw err;
console.log('file saved');
});
};
//appending additional blocks
function appendCSV(result) {
var csvString = " ";
// loop runs result.length times
for (var i = 0; i < result.length; i++) {
csvString = csvString.concat(result[i].UserInfo.UserNumber + ',' + result[i].UserTimeTag + ',' + result[i].Latitude + ',' + result[i].Longitude + ',' + result[i].SpeedMph + ',' + result[i].Heading + ',' + result[i].Status + '\r\n');
};
// console.log(csvString);
fs.appendFile('file.csv', csvString, function(err) {
if (err) throw err;
console.log('Data appended');
});
};
It is difficult to see if moreBlocks is in the same scope as the while. On the other hand, the while statement will start making calls indefinetilly until that flags changes, and that might be too late. You need to make sure you do each subsequent request at a time.
You could use async's ´whilst´ function:
var moreBlocks = true;
whilst(function(){
return moreBlocks === true;
},function(callback){
return request(options, function(err, res, body){
console.log('request for links');
if (err) throw new Error(err);
var result = JSON.parse(body)['links'];
var urlnext = result[2].href;
console.log('UrlNext: ' + urlnext);
console.log(result[2].rel);
//You NEED to make sure you update moreBlocks
moreBlocks = newBlock(urlnext);
// Go to next step.
return callback(res);
})
},{
//You are done!
//Call any function you'd like.
});
Check the docs here!
Edit, if newBlock is asynchronous:
var moreBlocks = true;
whilst(function(){
return moreBlocks === true;
},function(callback){
return request(options, function(err, res, body){
console.log('request for links');
if (err) throw new Error(err);
var result = JSON.parse(body)['links'];
var urlnext = result[2].href;
console.log('UrlNext: ' + urlnext);
console.log(result[2].rel);
//Now status has the info about moreBlocks.
return newBlock(urlnext, function(status){
moreBlocks = status;
// Go to next step.
return callback(res);
});
})
},{
//You are done!
//Call any function you'd like.
});
Somewhere on newBlock:
function newBlock(urlnext, callback) {
// some code..
return request(options, function(){
//here status decides if we are done.
return callback(status);
});
}

node.js move bunch of files

i have made a form for file upload and i have set the multiple option, so im trying to upload a bunch of files and then move them according to the album name that the client have set,
here is what iv done:
if (req.body && req.body.album){
var album_name = req.body.album;
}
else{
//need to change to time instead of random album
var album_name = 'unknown_album-' + (parseInt(Math.random() * 5) + 1);
}
//File name
var file_name = null;
switch(req.files.img_file.type){
case 'image/png':
file_name = new Date().getTime() + '.png';
break;
case 'image/jpeg':
file_name = new Date().getTime() + '.jpeg';
break;
default:
res.render('admin/panel', {
title: 'אדמין',
message: 'קובץ לא תקין'
});
break;
}
mkdirp('./public/img/albums/' + album_name, function (err) {
if (err)
console.error(err);
else
{
_.each(req.files.img_file,function(val,index){
console.log(val.path + " " + index);
//gives the file path so i can read it
fs.readFile(val.path, function (err, data) {
if (err){
console.log("fs " + err);
}
//so until here everything works fine, the files are uploaded to the "/uploads" directory, now im trying to move them to the correct album, the destiation is : public/img/albums/:album_name/:all_images here
mv(val.path, './public/img/albums/' + album_name + '/' + val.path, function(err) {
if (err){
console.log("mv " + err);
}
else{
res.render('admin/panel', {
title: 'אדמין',
message: 'קובץ עלה בהצלחה'
});
res.redirect('/admin');
}
});
});
});
}
});
the mv module throws an error rename "c:/work/xxx/xx/uploads/val.path.png
Its a file-access error. You X out some of the file name, but look at how that NPM module handles file, and ensure you are naming the files and paths properly. Then it should work out fine.
i have used read and write stream and deleted the "mv" module
if (req.body && req.body.album){
var album_name = req.body.album;
}
else{
//need to change to time instead of random album
var album_name = 'unknown_album-' + (parseInt(Math.random() * 5) + 1);
}
//File name
if (req.files.img_file.length > 1)
{
var Counter = 0;
_.each(req.files.img_file,function(val,index){
var file_name = null;
switch(val.type){
case 'image/png':
file_name = new Date().getTime() + '.png';
break;
case 'image/jpeg':
file_name = new Date().getTime() + '.jpeg';
break;
}
mkdirp('./public/img/albums/' + album_name, function (err) {
if (err)
console.error(err);
var source = fs.createReadStream(val.path);
var dest = fs.createWriteStream('./public/img/albums/' + album_name + '/' + val.name);
source.pipe(dest);
source.on('end', function() {
console.log('end...');
Counter++;
console.log(Counter);
console.log(req.files.img_file.length);
if (Counter == req.files.img_file.length){
res.redirect('/admin');
res.render('admin/panel', {
title: 'אדמין',
message: 'קובץ עלה בהצלחה',
albums: albums
}); //eo res render
}
});
source.on('error', function(err) { console.log('error'); });
});// eo mkdir
}); // eo _each
}

Resources