Nodejs: Writestream on windows machine - node.js

For some reason when I try to write a file on my localhost (windows 7) the writestream won't open. On a linux machine, it works fine. Is there some type of permissions I need to add in windows?
I'm already running as administrator.
Here is the current method.
// Mainfunction to recieve and process the file upload data asynchronously
var uploadFile = function(req, targetdir,callback) {
var total_uploaded = 0
,total_file;
// Moves the uploaded file from temp directory to it's destination
// and calls the callback with the JSON-data that could be returned.
var moveToDestination = function(sourcefile, targetfile) {
moveFile(sourcefile, targetfile, function(err) {
if(!err)
callback({success: true});
else
callback({success: false, error: err});
});
};
// Direct async xhr stream data upload, yeah baby.
if(req.xhr) {
var fname = req.header('x-file-name');
// Be sure you can write to '/tmp/'
var tmpfile = '/tmp/'+uuid.v1();
total_file = req.header('content-length');
// Open a temporary writestream
var ws = fs.createWriteStream(tmpfile);
ws.on('error', function(err) {
console.log("uploadFile() - req.xhr - could not open writestream.");
callback({success: false, error: "Sorry, could not open writestream."});
});
ws.on('close', function(err) {
moveToDestination(tmpfile, targetdir+fname);
});
// Writing filedata into writestream
req.on('data', function(data,t,s) {
ws.write(data,'binary',function(r,e){
total_uploaded = total_uploaded+e;
var feed = {user:'hitesh',file:fname,progress:(total_uploaded/total_file)*100};
require('./../../redis').broadCast(JSON.stringify(feed))
});
});
req.on('end', function() {
ws.end();
});
}
// Old form-based upload
else {
moveToDestination(req.files.qqfile.path, targetdir+req.files.qqfile.name);
}
};

As your code is running fine on Linux it must be something specific to Windows.
var tmpfile = '/tmp/'+uuid.v1();
might be your problem. The folder/path structure on windows is different. Try using the path module and change your code to
var path = require('path');
var tmpfile = path.join('tmp', uuid.v1());
The same goes probably to your parameter targetdir.
see this related question.

The problem is with the directory. Unless you have a C:\tmp directory (assuming you're running node from the C drive), it doesn't have anywhere to write the tmp file.
You could either create a C:\tmp directory or modify the line
var tmpfile = '/tmp/'+uuid.v1();
to something like
var tmpfile = __dirname + '/tmp/'+ uuid.v1();
Note: requires a directory something like C:\mynodeproject\tmp

Related

Unable to use fs and and webshot with Meteor on server side (galxy hosted)

I am currently running a meteor app on galaxy that creates PDF based on input from the user.
I've been using webshot-node and FS to temporarly save the PDFs and then it direct download on the user side.
Because of some other bugs I solved, I deployed the new projects again on Galaxy, but it is now imposssible to generate the PDF anymore (nothing as changed on this part of the code).
The error I get is the following:
Error: ENOENT: no such file or directory, open /app/bundle/programs/web.browser/app/cv_resume.pdf
errno: -2,
code: 'ENOENT',
syscall: 'open',
path: '/app/bundle/programs/web.browser/app/cv_resume.pdf'
As stated, I didn't changed anything on the generation of the PDFs, so I'm wondering if anyone of you got similar error using webshot and FS on server side.
By the way, it works perfectly fine on my local machine.
Oh and here is the code of the generation in case someone would like to give it a try:
var myDocument1 = Personalinfo.findOne({ email: creator });
// SETUP
// Grab required packages
var fs = Npm.require('fs');
var Future = Npm.require('fibers/future');
var webshot = Npm.require('webshot-node');
var fut = new Future();
var meteorRoot = fs.realpathSync(process.cwd() + "/../" );
var publicPath = meteorRoot + "/web.browser/app/";
var fileName = publicPath + "cv_resume.pdf";
// GENERATE HTML STRING
var css = Assets.getText('style.css');
SSR.compileTemplate('layout', Assets.getText('layout.html'));
Template.layout.helpers({
getDocType: function () {
return "<!DOCTYPE html>";
}
});
SSR.compileTemplate('resume', Assets.getText('resume.html'));
// PREPARE DATA
var html_string = SSR.render('layout', {
css: css,
template: "resume",
data: myDocument1
});
var options = {
"paperSize": {
"format": "A4",
"orientation": "portrait",
"margin": "0.8cm"
},
siteType: 'html'
};
// Commence Webshot
webshot(html_string, fileName, options, function (err) {
fs.readFile(fileName, function (err, data) {
if (err) {
return console.error(err);
}
fs.unlinkSync(fileName);
fut.return(data);
});
});
let pdfData = fut.wait();
let base64String = Buffer.from(pdfData).toString('base64');
if (fullName != "") {
return [base64String, fullName];
} else {
return base64String;
}
Hope someone can help me, I've been stuck with this for a long time now.
At disposal if anyone needs more info.
Thanks for the clarification. Seems like you just need a folder with write access where webshot can put the pdf and from where you can then read it again. In that case, just use /tmp:
var publicPath = "/tmp/";
var fileName = publicPath + "cv_resume.pdf";
Updated to use /tmp, so you don't need to create the folder first (in code).

Gulp: Abnormal behavior of program

I'm new to Gulp and I'm having a problem with gulp,here are some points that I want to be done
I want to lookup for a file that has an .storyboard extension
(it is already DONE)
I want to perform a task whenever a certain file's content is
changed,
I want to Watch that file and when something is being changed in
that file
I want to rewrite its content by removing all other content that was
already in the file.
When I make changes in file with .storyboard extension, it just keep on displaying a message done, file has been saved
Here is my Code:
//fs to read and write files while path is for iterating directories
fs = require('fs'),
path = require('path')
//DomParser to Parse Xml
var DOMParser = new (require('xmldom')).DOMParser({ normalizeTags: { default: false } });
//Gulp for detecting changes
var gulp = require('gulp')
var mainStoryBoardFile;
function crawl(dir) {
// console.log('[+]', dir);
var files = fs.readdirSync(dir);
for (var file in files) {
var next = path.join(dir, files[file]);
//iterate through files to check whether next is a file or direcory
if (fs.lstatSync(next).isDirectory()) {
//if its a directory dive into it
crawl(next);
} else if (next.indexOf('.storyboard') >= 0) {
//if its a file just check it whether it is a .storyboard file or not
mainStoryBoardFile = next;
mainStoryBoardFile = mainStoryBoardFile.replace(/\\/g, "/");
};
}
}
//calling function
crawl(__dirname);
var newFilePath = './data.xml'
var document;
var dataFound;
//What to do
gulp.task('read', function (done) {
dataFound = fs.readFileSync(mainStoryBoardFile, "utf-8");
document = DOMParser.parseFromString(
dataFound.toString()
);
done();
});
gulp.task('write', function (done) {
fs.writeFile(mainStoryBoardFile, '', function () { console.log('done') })
fs.writeFile(mainStoryBoardFile, document, (err) => {
if (err) throw err;
console.log('The file has been saved!');
});
done();
});
gulp.task('watch', function (done) {
gulp.watch(mainStoryBoardFile, gulp.series('read', 'write'));
});
Here is a solution to solve this problem, You can watch changes on a single file and you can also perform some sort of function whenever a file is changed. in xml case, you can watch a file, when it changes you can add new properties or attributes or you can create new elements in xml file.
//Dependencies
//fs to read and write files while path is for iterating directories
var fs = require('fs'),
path = require('path'),
DOMParser = new (require('xmldom')).DOMParser({ normalizeTags: { default: false } }),
gulp = require('gulp'),
arrayOfControls = require('./object.json'),
RandExp = require('randexp');
console.log("GulpService has been Started\n");
function crawl(dir) {
var files = fs.readdirSync(dir);
for (var file in files) {
var next = path.join(dir, files[file]);
//iterate through files to check whether next is a file or direcory
if (fs.lstatSync(next).isDirectory()) {
//if its a directory dive into it
crawl(next);
} else if (next.indexOf('.storyboard') >= 0) {
//if its a file just check it whether it is a .storyboard file or not
mainStoryBoardFile = next;
mainStoryBoardFile = mainStoryBoardFile.replace(/\\/g, "/");
}
}
}
//calling function
crawl(__dirname);
var mainStoryBoardFile;
var document, dataFound;
function readWrite() {
crawl(__dirname);
dataFound = fs.readFileSync(mainStoryBoardFile, "utf-8");
document = DOMParser.parseFromString(
dataFound.toString()
);
fs.writeFileSync(mainStoryBoardFile, '', function () {
console.log('done')
});
fs.writeFileSync(mainStoryBoardFile, document, (err) => {
if (err) throw err;
console.log('The file has been saved!');
});
}
var watcher = gulp.watch(mainStoryBoardFile);
watcher.on('change', function (path, stats) {
readWrite();
console.log('File ' + path + ' was changed');
watcher.unwatch(mainStoryBoardFile);
watcher.add(mainStoryBoardFile);
});

On which format send file to save it on gridfs?

Hy every one,
Please , i 'm study on a project using nodeJS, and i would like to know , in which format my node client must send the file to the server ( is it in base64 format or else ?).
my client is :
//client.js
$('#file').on('change', function(e){
encode64(this);
});
function encode64(input) {
if (input.files){
chap.emit('test', { "test" : input.files[0] });
var FR= new FileReader();
FR.readAsDataURL(input.files[0]);
FR.onload = function(e) {
chap.emit('test', { "test" : e.target.result } );
}
}
}
My server side is :
socket.on('test', function(e){
var gs = new gridStore(db, e.test,"w");
gs.writeFile(new Buffer(e.test,"base64"), function(err,calb){
if (!err)
console.log('bien passe');
else
console.log('erreur');
});
});
But this doesn't work , i get this error :
TypeError: Bad argument
at Object.fs.fstat (fs.js:667:11)
Any one could help me ?
Normally this is how you store into gridFs . I have used it to store files. hope it works.
fs = require('fs'),
var gfs = require('gridfs-stream');
var form = new multiparty.Form();
form.parse(req, function (err, fields, files) {
var file = files.file[0];
var filename = file.originalFilename; //filename
var contentType = file.headers['content-type'];
console.log(files)
var tmpPath = file.path ;// temporary path
var writestream = gfs.createWriteStream({filename: fileName});
// open a stream to the temporary file created by Express...
fs.createReadStream(tmpPath)
// and pipe it to gfs
.pipe(writestream);
writestream.on('close', function (file) {
// do something with `file`
res.send(value);
});
})

Upload a file using FS and NodeJS

I get an error when I try to upload a file, it works in local but it doesn't work on my remote server...
My error :
[sbaf.fr.3005-53 (out) 2014-03-05T20:19:59] { [Error: ENOENT, rename '/tmp/1e426309d298d9ab1d099e1017584058']
[sbaf.fr.3005-53 (out) 2014-03-05T20:19:59] errno: 34,
[sbaf.fr.3005-53 (out) 2014-03-05T20:19:59] code: 'ENOENT',
[sbaf.fr.3005-53 (out) 2014-03-05T20:19:59] path: '/tmp/1e426309d298d9ab1d099e1017584058' }
My controller :
photoDAL.prototype.save = function(photo, file, callback) {
photo.file = file.name;
var photo = dbContext.photo.build(photo);
var file_tmp = file.path;
var file_name = file.name;
var file_type = file.type;
var file = './public/images/photo/'+file_name;
fs.rename(file_tmp, file, function(err){
if( err ) console.log(err);
});
photo.save().success(function(photo) {
callback(photo);
}).error(function(error) {
callback({message: error});
});
};
EDIT #1 :
Screenshots of my ExpressJS app...
Screenshot 1 : http://glui.me/?i=eweyq4ovennej50/2014-03-05_at_20.34_2x.png/
Screenshot 2 : http://glui.me/?i=1n2cjv57jd2fmwq/2014-03-05_at_20.33_2x.png/
EDIT #2 :
My code :
console.log(process.cwd());
console.log(__dirname);
The console :
[sbaf.fr.3005-71 (out) 2014-03-05T21:55:48] /home/anthoc/apps
[sbaf.fr.3005-71 (out) 2014-03-05T21:55:48] /home/anthoc/apps/sbaf.fr/app/dal
So this: var file = './public/images/photo/'+file_name; is a relative path based on process.cwd(). Presumably if your server process was started with your app repo root as the cwd, all should be well, but probably that is not the case. It's more robust to not rely on the cwd but use __dirname and construct paths relative to the location of the current javascript file. So give that a try and see if it fixes it. You can confirm one way or another with: console.log(process.cwd()) in your controller module to debug this.
You can try using __dirname. Here is how I would do it:
photoDAL.prototype.save = function(photo, file, callback) {
photo.file = file.name;
var photo = dbContext.photo.build(photo);
var file_tmp = file.path;
var file_name = file.name;
var file_type = file.type;
var file = __dirname + '/public/images/photo/'+file_name;
fs.rename(file_tmp, file, function(err){
if( err ) console.log(err);
});
photo.save().success(function(photo) {
callback(photo);
}).error(function(error) {
callback({message: error});
});
};
Now, if you're code is in a folder and your target is in the parent, like me:
-root
- public
-photos
-server
-upload.js
You can add a function to the string proto.
String.prototype.getParent = function () {
// Be cross-platform
var replaced = this.replace(new RegExp("\\\\", "g"), '/');
var index = replaced.lastIndexOf('/');
return replaced.substring(0, index);
};
Now call __dirname.getParent() as many time as needed (__dirname.getParent().getParent()...).
You can use formidable module. easy to use
https://www.npmjs.org/package/formidable

How do I unzip a .zip/.rar file in Node.js into a folder

I am using zlib along with fstream now for zipping and sending to the client, Now I need to unzip an archive(which may contains sub folders) into a folder maintaining the folder structure. How do I do that?
There are plenty of node modules that can do this for you. One of them is node-unzip. You can extract a .zip file to a directory as simple as this.
fs.createReadStream('path/to/archive.zip').pipe(unzip.Extract({ path: 'output/path' }));
Further reading: https://github.com/EvanOxfeld/node-unzip
Rar is a closed-source software. The only way you can do it -- install command-line rar (rar.exe or linux version of rar, which is available on most platforms) and call it by means of this:
var exec = require('child_process').exec;
exec("rar.exe x file.rar", function (error) {
if (error) {
// error code here
} else {
// success code here
}
});
you can use this amazing module http://node-machine.org/machinepack-zip
for uncompress a zip file with directory structure inside zip
var Zip = require('machinepack-zip');
// Unzip the specified .zip file and write the decompressed files/directories as contents of the specified destination directory.
Zip.unzip({
source: '/Users/mikermcneil/stuff.zip',
destination: '/Users/mikermcneil/my-stuff',
}).exec(callbackSuccess, callbackFail );
for download remote file and unzip you can use this code:
var fs = require('fs');
var unzip = require("unzip2");
var tar = require('tar');
var zlib = require('zlib');
var path = require('path');
var mkdirp = require('mkdirp'); // used to create directory tree
var request = require("request");
var http = require('http');
var zip = require("machinepack-zip");
for (var i = 0; i < _diff.length; i++) {
request(constants.base_patch +"example.zip")
request = http.get({ host: 'localhost',
path: '/update/patchs/' + "example.zip",
port: 80,
headers: { 'accept-encoding': 'gzip,deflate' } });
request.on('response', (response) => {
var output = fs.createWriteStream(__dirname + "/tmp/" +"example.zip");
switch (response.headers['content-encoding']) {
// or, just use zlib.createUnzip() to handle both cases
case 'gzip':
response.pipe(zlib.createGunzip()).pipe(unzip.Extract({ path: __dirname }));
break;
case 'deflate':
response.pipe(zlib.createInflate()).pipe(unzip.Extract({ path: __dirname }));
break;
default:
response.pipe(output);
break;
}
})
request.on('close', function(){
zip.unzip({
source: __dirname + "/tmp/" + "example.zip",
destination: __dirname,
}).exec({
error: function (err){
alert("error")
},
success: function (){
//delete temp folder content after finish uncompress
},
});
})
}
note : remove unnecesary modules.
Use node js decompress-zip, first install it with npm:
npm install decompress-zip --save
Then you have to require it:
const DecompressZip = require('decompress-zip');
Finally you can use it in the following way:
let unzipper = new DecompressZip( absolutePathFileZip );
The directory to be extracted must be specified:
unzipper.extract({
path: pathToExtract
});
Additional you can use the following for better control:
Handle Error:
unzipper.on('error', function (err) {
console.log('event error')
});
Notify when everything is extracted
unzipper.on('extract', function (log) {
console.log('log es', log);
});
Notify "progress" of the decompressed files:
unzipper.on('progress', function (fileIndex, fileCount) {
console.log('Extracted file ' + (fileIndex + 1) + ' of ' + fileCount);
});
If anyone looking for async-await way syntax:
const request = require('request');
const unzip = require('unzip');
await new Promise(resolve =>
request('url')
.pipe(fs.createWriteStream('path/zipfilename'))
.on('finish', () => {
resolve();
}));
await new Promise(resolve =>
fs.createReadStream('path/filename')
.pipe(unzip.Extract({ path: 'path/extractDir }))
.on('close', ()=>{
resolve()
}));

Resources