This is basic imagemagick setup for node.js that i came across lately. I understand that question i'm asking is very newbie but i'm new to node js and imagemagick and i wanted to try to make it dynamic. Current script bellow is cropping one specific file, is there a way to make it to apply conversion to all the files from directory /before and then output it to directory /after ? Also the operation i want to do for all the images in directory is to apply watermark on each (crop operation in code is just from example).
var http = require("http");
var im = require("imagemagick");
var args = [
"image.png",
"-crop",
"120x80+30+15",
"output.png"
];
var server = http.createServer(function(req, res) {
im.convert(args, function(err) {
if(err) { throw err; }
res.end("Image crop complete");
});
}).listen(8080);
Yes, you can do it by implementing foreach by all files itself.
Also you will need to install async module:
npm install async
Example code:
var async = require('async'),
fs = require('fs'),
im = require('imagemagick'),
maxworkers = require('os').cpus().length,
path = require('path');
module.exports = resize;
function resize(params) {
var queue = async.queue(resizeimg, maxworkers);
fs.readdir(params.src, function(err, files) {
files.forEach(function(file) {
queue.push({
src: path.join(params.src, '/', file),
dest: path.join(params.dest, '/', file),
width: params.width,
height: params.height
})
});
});
}
function resizeimg(params, cb) {
var imoptions = {
srcPath: params.src,
dstPath: params.dest
};
if (params.width !== undefined) imoptions.width = params.width;
if (params.height !== undefined) imoptions.height = params.height
im.resize(imoptions, cb);
}
Then in you code:
var http = require("http");
var resize = require("./resize");
var server = http.createServer(function(req, res) {
resize({
src: '/source/folder',
dest: '/destination/folder',
width: 300
});
}).listen(8080);
Related
I have a NodeJS project that starts with server.js under /opt/battleship. If I cd to that dir and type node server, everything works correctly, but if I sit in /root and use an absolute path to start it up, such as node /opt/battleship/server, then everything I serve from its ./lib and ./public subdirs gets a 404 response. This is a problem in production because my /etc/init/battleship.conf script specifies starting the process on boot with
exec /usr/local/bin/node /opt/battleship/server.js >> /var/log/battleship.log 2>&1
It runs, but then I get the 404 errors. If I put the line
cd /opt/battleship
just above that in the battleship.conf file, I don't get 404 errors, I get all my files correctly, but it seems as if using cd in a .conf file is messy and prone to errors. Correct me if I'm wrong, this is my first handwritten .conf file. Is there a better way to have my server.js server up files correctly from its ./lib and ./public subdirectories? Below is my server.js file for reference:
var PORT = 3000;
var requirejs = require('requirejs');
var http = requirejs('http');
var fs = requirejs('fs');
var path = requirejs('path');
var mime = requirejs('mime');
var cache = {};
requirejs.config({
baseUrl: __dirname,
nodeRequire: require,
packages: [
{name: 'ship', location: 'public/js/lib/ship'},
{name: 'GameState', location: 'public/js/lib', main: 'GameState'}
]
});
requirejs(['./lib/battleship_server'], function(battleship_server) {
function send404(response) {
response.writeHead(404, {'Content-Type': 'text/plain'});
response.write('Error 404: response not found.');
response.end();
}
function sendFile(response, filePath, fileContents) {
response.writeHead(
200,
{'Content-Type': mime.lookup(path.basename(filePath))}
);
response.end(fileContents);
}
function serveStatic(response, cache, absPath) {
if (cache[absPath]) {
sendFile(response, absPath, cache[absPath]);
} else {
fs.exists(absPath, function(exists) {
if (exists) {
fs.readFile(absPath, function(err, data) {
if (err) {
send404(response);
} else {
cache[absPath] = data;
sendFile(response, absPath, data);
}
});
} else {
send404(response);
}
});
}
}
var server = http.createServer(function(request, response) {
var filePath = false;
if (request.url === '/') {
filePath = 'public/index.html';
} else {
filePath = 'public' + request.url;
}
var absPath = './' + filePath;
serveStatic(response, cache, absPath);
});
server.listen(PORT, function() {
console.log('Server listening on port ' + PORT + '.');
});
battleship_server(server);
});
Use __dirname to get the folder of the script.
For example, if you want to get index.html, and it's in the directory of the script, use this:
var path = require('path');
var pathOfIndex = path.join(__dirname, "index.html");
Finally, after weeks of self study I've managed to get PhantomJS (running in NodeJS) to process a file, with attributes, output as a PNG or PDF and this is working perfectly.
However, I've been stuck the last few days and hoping someone can give me a pointer.
When PhantomJS has created the PDF, AND my childprocess has completed, how can I show a dialog box asking the user if they wish to download the file?
I've used the following code:
childProcess.execFile(binPath, childArgs,
function(err, stdout, stderr) {
console.log('Child Processing complete!');
download('myservername','test.png',function(){
console.log('done')
});
}
);
With the "download" function as the following:
var download = function(uri, filename, callback){
request.head(uri, function(err, res, body){
console.log('content-type:', res.headers['content-type']);
console.log('content-length:', res.headers['content-length']);
var r = request(uri).pipe(fs.createWriteStream(filename));
r.on('close', callback);
});
};
But I just can't seem to get a prompt on the screen asking me to download "test.png"
Is there something obvious I'm missing?
* EDIT *
Actual code here:
This is the page that calls:
var util = require("util"),
path = require('path'),
fs = require('fs'),
request = require('request'),
url = require('url'),
http = require('http');
var phantomjs = require('phantomjs'),
path = require('path'),
childProcess = require('child_process');
childProcess2 = require('child_process');
var express = require('express');
var app = express();
var download = function(uri, filename, callback){
request.head(uri, function(err, res, body){
console.log('content-type:', res.headers['content-type']);
console.log('content-length:', res.headers['content-length']);
var r = request(uri).pipe(fs.createWriteStream(filename));
r.on('close', callback);
});
};
var binPath = phantomjs.path;
http.createServer(function(request,response){
console.log("Webpage was called");
var childArgs = [path.join(__dirname, 'tst2.js'),''];
childProcess.execFile(binPath, childArgs,
function(err, stdout, stderr) {
console.log('Child Processing complete!');
download('[here is the url of my server with filename.png','test2.png',function(){
console.log('done')
});
});
response.writeHeader(200, {"Content-Type": "text/plain"});
response.write("Processing Server");
response.end();
}).listen(8080);
and the phantomJS page is here:
"use strict";
var page = require('webpage').create();
page.viewportSize = { width: 1663, height : 768};
page.paperSize = {
width: '11in',
height: '8.5in'
}
page.orientation='landscape'
page.onResourceRequested = function(requestData, request) {
console.log('::loading', requestData['url']);
};
page.onLoadFinished = function() {
console.log('::rendering');
page.render('test.png');
phantom.exit();
};
var content='';
content += '<html><head>'
content +='</head><body>';
content+='<div class="col-md-12 col-lg-12 results">'
content+='<div class="col-md-8 detailsPanel">'
content+='</div>'
content +='</body></html>';
page.content=content;
Like I said, they both work perfectly, the webpage I call is running on port 8080, I can see the console.logs appearing in a command box on Windows Server, the PNG file is rendered perfectly, but after that it just hangs there and I get no download prompt.
I am using sailsjs, the website is sailsjs.org.
Im tring to create a controller function so that when an image is requested it can be resized and cached the first time a visitor asks for an image, then retrieved from cache the next time a visitor requests it.
I have setup a route to an asset controller:
'get /image/:width/:height/:image': {
controller: 'AssetController',
action: 'image'
},
I have setup a policy to intercept requests for an image from the above route and resize / save the file into a cache:
var fs = require('fs'),
im = require('imagemagick');
module.exports = function imageCacheResize(req, res, next){
var width = req.param('width');
var height = req.param('height');
var file = req.param('image');
if(width && height){
//read from cache
dir = 'assets/images/uploads/cache/'+width+'X'+height+'/';
filename = dir+file;
if (!fs.existsSync(dir)){
fs.mkdirSync(dir);
}
if(!fs.exists(filename)){
//write to file if not exist
var originalFile = 'assets/images/uploads/'+file;
im.resize({
srcPath: originalFile,
dstPath: filename,
width: width
}, function(err, stdout, stderr){
if (err) throw err;
});
}
}
next();
};
I have also got an action/function setup for the controller to handle returning the resized image:
image: function(req, res){
var file = req.param('image');
var filename = 'assets/images/uploads/'+file;
if((typeof req.param('width') != 'undefined')
&&(typeof req.param('height') != 'undefined'))
{
var width = req.param('width');
var height = req.param('height');
}
if(typeof req.param('size') != 'undefined'){
var size = req.param('size');
}
if(width && height){
//read from cache
dir = 'assets/images/uploads/cache/'+width+'X'+height+'/';
file = dir+file;
}else{
file = 'assets/images/uploads/'+file;
}
console.log(file);
res.sendfile(file);
}
All of the code works correctly and creates then saves the resized image, however it does not return the image on the first request but it does on the second.
So it turns out I had not placed the next() commands in the correct place. Seems like whilst waiting for an image resize the code continues and hits the final next().
I have change the image resize policy to this:
var fs = require('fs'),
im = require('imagemagick');
module.exports = function imageCacheResize(req, res, next){
var width = req.param('width');
var height = req.param('height');
var file = req.param('image');
if(width && height){
//read from cache
dir = 'assets/images/uploads/cache/'+width+'X'+height+'/';
filename = dir+file;
if (!fs.existsSync(dir)){
fs.mkdirSync(dir);
}
if(!fs.exists(filename)){
//write to file if not exist
var originalFile = 'assets/images/uploads/'+file;
im.resize({
srcPath: originalFile,
dstPath: filename,
width: width
}, function(err, stdout, stderr){
if (err) throw err;
next();
});
}else{
next();
}
}else{
next();
}
};
i need to send a PDF file from angularjs client to NodeJS service.
I did the angularjs service, and when i receive the file its a string like this:
%PDF-1.3
3 0 obj
<</Type /Page
/Parent 1 0 R
/Reso
How can i reconvert this string to PDF in NodeJS?
This is the client code:
var sendByEmail = function () {
$scope.generatingPdf = true;
$('#budget').show();
var pdf = new JsPDF('p', 'pt', 'letter');
var source = $('#budget')[0];
pdf.addHTML(source, 0, 0, function () {
var resultPdf = pdf.output();
BillService.sendByEmail("rbrlnx#gmail.com", resultPdf).then(function () {
});
$('#budget').hide();
});
};
var sendByEmail = function (email, file) {
var deferred = $q.defer();
var data = {
email: email,
file: file
};
BillService.sendByEmail(data, function (result) {
deferred.resolve(result);
}, function () {
deferred.reject();
});
return deferred.promise;
};
The server code controller its empty:
var sendByEmail = function (req, res, next) {
var file = req.body.file;
};
I experimented with this a while ago, and I came up with this. It's not production ready by a long shot maybe you find it useful. It's free of front end libraries (except Angular ofcourse), but assumes you're using Express 4x and body-parser.
The result:
In the browser:
On the server:
What you're seeing:
You're seeing a tiny node server, serving static index.html and angular files, and a POST route receiving a PDF in base64 as delivered by the HTML FileReader API, and saves it to disk.
Instead of saving to disk, you can send it as an email attachment. See for instance here or here for some info on that.
The example below assumes uploading a PDF by a user through a file input, but the idea is the same for all other ways of sending a document to your back end system. The most important thing is to send the pdf data as BASE64, because this is the format that most file writers and email packages use (as opposed to straight up binary for instance..). This also goes for images, documents etc.
How did I do that:
In your HTML:
<div pdfs>Your browser doesn't support File API.</div>
A directive called pdfs:
myApp.directive('pdfs', ['upload', function(upload) {
return {
replace: true,
scope: function() {
files = null;
},
template: '<input id="files" type="file">',
link: function(scope,element) {
element.bind('change', function(evt) {
scope.$apply(function() {
scope.files = evt.target.files;
});
});
},
controller: function($scope, $attrs) {
$scope.$watch('files', function(files) {
//upload.put(files)
if(typeof files !== 'undefined' && files.length > 0) {
for(var i = 0; i<files.length;i++) {
readFile(files[i])
}
}
}, true);
function readFile(file) {
var reader = new FileReader();
reader.addEventListener("loadend", function(evt) {
upload.post({name: file.name, data: reader.result})
})
if(reader.type = 'application/pdf') {
reader.readAsDataURL(file);
}
}
}
}
}]);
A tiny service:
myApp.service('upload', function($http) {
this.post = function(file) {
$http.post('/pdf', file);
}
});
And a node server:
var express = require('express');
var bodyParser = require('body-parser')
var fs = require("fs");
var app = express();
app.use(express.static('.'));
app.use( bodyParser.json({limit: '1mb'}) );
app.post('/pdf', function(req, res){
var name = req.body.name;
var pdf = req.body.data;
var pdf = pdf.replace('data:application/pdf;base64,', '');
res.send('received');
fs.writeFile(name, pdf, 'base64', function(err) {
console.log(err);
});
});
var server = app.listen(3000, function() {
console.log('Listening on port %d', server.address().port);
});
I am using zlib along with fstream now for zipping and sending to the client, Now I need to unzip an archive(which may contains sub folders) into a folder maintaining the folder structure. How do I do that?
There are plenty of node modules that can do this for you. One of them is node-unzip. You can extract a .zip file to a directory as simple as this.
fs.createReadStream('path/to/archive.zip').pipe(unzip.Extract({ path: 'output/path' }));
Further reading: https://github.com/EvanOxfeld/node-unzip
Rar is a closed-source software. The only way you can do it -- install command-line rar (rar.exe or linux version of rar, which is available on most platforms) and call it by means of this:
var exec = require('child_process').exec;
exec("rar.exe x file.rar", function (error) {
if (error) {
// error code here
} else {
// success code here
}
});
you can use this amazing module http://node-machine.org/machinepack-zip
for uncompress a zip file with directory structure inside zip
var Zip = require('machinepack-zip');
// Unzip the specified .zip file and write the decompressed files/directories as contents of the specified destination directory.
Zip.unzip({
source: '/Users/mikermcneil/stuff.zip',
destination: '/Users/mikermcneil/my-stuff',
}).exec(callbackSuccess, callbackFail );
for download remote file and unzip you can use this code:
var fs = require('fs');
var unzip = require("unzip2");
var tar = require('tar');
var zlib = require('zlib');
var path = require('path');
var mkdirp = require('mkdirp'); // used to create directory tree
var request = require("request");
var http = require('http');
var zip = require("machinepack-zip");
for (var i = 0; i < _diff.length; i++) {
request(constants.base_patch +"example.zip")
request = http.get({ host: 'localhost',
path: '/update/patchs/' + "example.zip",
port: 80,
headers: { 'accept-encoding': 'gzip,deflate' } });
request.on('response', (response) => {
var output = fs.createWriteStream(__dirname + "/tmp/" +"example.zip");
switch (response.headers['content-encoding']) {
// or, just use zlib.createUnzip() to handle both cases
case 'gzip':
response.pipe(zlib.createGunzip()).pipe(unzip.Extract({ path: __dirname }));
break;
case 'deflate':
response.pipe(zlib.createInflate()).pipe(unzip.Extract({ path: __dirname }));
break;
default:
response.pipe(output);
break;
}
})
request.on('close', function(){
zip.unzip({
source: __dirname + "/tmp/" + "example.zip",
destination: __dirname,
}).exec({
error: function (err){
alert("error")
},
success: function (){
//delete temp folder content after finish uncompress
},
});
})
}
note : remove unnecesary modules.
Use node js decompress-zip, first install it with npm:
npm install decompress-zip --save
Then you have to require it:
const DecompressZip = require('decompress-zip');
Finally you can use it in the following way:
let unzipper = new DecompressZip( absolutePathFileZip );
The directory to be extracted must be specified:
unzipper.extract({
path: pathToExtract
});
Additional you can use the following for better control:
Handle Error:
unzipper.on('error', function (err) {
console.log('event error')
});
Notify when everything is extracted
unzipper.on('extract', function (log) {
console.log('log es', log);
});
Notify "progress" of the decompressed files:
unzipper.on('progress', function (fileIndex, fileCount) {
console.log('Extracted file ' + (fileIndex + 1) + ' of ' + fileCount);
});
If anyone looking for async-await way syntax:
const request = require('request');
const unzip = require('unzip');
await new Promise(resolve =>
request('url')
.pipe(fs.createWriteStream('path/zipfilename'))
.on('finish', () => {
resolve();
}));
await new Promise(resolve =>
fs.createReadStream('path/filename')
.pipe(unzip.Extract({ path: 'path/extractDir }))
.on('close', ()=>{
resolve()
}));