I want to upload files of my form to my server.
I have already test this but i haven't a success.
What is the best npm module for that ?
Can i test it in localhost ?
Thanks
For Express Use,
https://www.npmjs.com/package/multer
For Hapi.js
https://gist.github.com/joyrexus/0c6bd5135d7edeba7b87
Hope This Helps!
Using Hapijs
I have done Image upload in one of my projects
I had Used Nginx to define my root location for this file upload.
var mkdirp = require('mkdirp');
var path = require('path');
var mv = require('mv');
exports.imageUpload = function (req, reply) {
var payload = req.payload;
commonImageUpload(payload.uploadFile,urid,function(err,res){
});
}
var commonImageUpload = function (file, idUser, callback) {
if (null != file) {
var extention = path.extname(file.filename);
var extentionsList = [];
extentionsList.push('.jpg');
extentionsList.push('.png');
extentionsList.push('.jpeg');
extentionsList.push('.gif');
var index = extentionsList.indexOf(extention.toLowerCase());
if (index < 0) {
callback(true,"Invalid Media Type");
} else {
var filepath;
filepath = '../cdn/idcard/';
var fname = filepath + idUser + extention;
console.log(fname);
mkdirp(filepath, function (err) {
if (err) {
console.log(err);
callback(true,"Internal Server Error");
}
else {
mv(file.path, fname, function (err) {
});
}
});
}
} else {
callback(true);
}
}
Let me know if this solves your problem.
Related
I made rtsp cctv streaming server with nodjs.
But it is not stable.
Some cctvs works well but others are not.
First I thought rtsp url has its own problem, but it may not.
Because the url worked well in vlc player.
I don't know what I'm missing.
below is my whole code related cctv streaming.
var express = require('express');
var router = express.Router();
var kill = require('tree-kill');
var fs = require('fs');
var path = require('path');
var ffmpeg = require('fluent-ffmpeg');
var ffmpegInstaller = require('#ffmpeg-installer/ffmpeg');
ffmpeg.setFfmpegPath(ffmpegInstaller.path)
var streams = {};
//start cctv
router.post('/', (req, res) => {
var cname = req.body.cname;
var url = req.body.url;
//if there is same cctv name
if(streams[cname] != null) {
res.status(409).send("duplicate name");
return;
};
//create dir as given cctv name;
mkdir(cname);
stream = ffmpeg(url).addOptions([
'-hls_time 5',
'-hls_list_size 10',
'-hls_flags delete_segments',
'-f hls'
]).output('./public/video/' + cname + '/' + cname + '.m3u8'); //save path
console.log("Start cctv streaming");
stream.on('error', function(err, stdout, stderr) {
console.log("cctv has been stoped");
console.log(err);
});
stream.run();
streams[cname] = stream;
res.status(201).send("OK");
});
//bring cctv pid by cctv name
router.get('/:cname', (req, res) => {
var cname = req.params.cname;
if(streams[cname] == null) {
res.status(404).send("not found such a cctv");
return;
};
var pid = streams[cname].ffmpegProc.pid;
res.send({"pid": pid});
});
//stop cctv by pid
router.delete('/:cname', async (req, res) => {
var cname = req.params.cname;
//no cctv
if(streams[cname] == null) {
res.status(404).send("not found such a cctv");
return;
};
//del dir
var filePath = './public/video/' + cname;
fs.rmdir(filePath, { recursive: true }, (err) => {
if (err) {
console.log(err)
} else {
console.log('dir is deleted.');
}
});
//var pid = streams[cname].ffmpegProc.pid;
streams[cname].kill();
res.status(204).send("OK");
});
const mkdir = (name) => {
var root = './public/video/';
if(!fs.existsSync(root + name)){
fs.mkdirSync(root + name);
}
}
And this is ts file save folder.
cctv1 dosen't work well but cctv2 works well.
(cctv1 started first but created less ts file than cctv2.)
I have created a file called config.js which looks like below:
const config = {
staticFiles:{
name:[
'./',
'./index.html',
'./script.js',
'./icon.jpg'
]
},
outputFolderName: "D:\\DemoApp",
sourceApplicationParentPath: "D:\\DemoApp\\"
};
Now I am reading list of files from sourceApplicationParentPath folder using node and have to update staticFiles array of above file. I am not sure how should I do it. Can someone please help.
Thanks in advance.
config.js
const config = {
staticFiles: {
name: ['./',
'./index.html',
'./script.js',
'./icon.jpg',
]
},
outputFolderName: 'D:\\DemoApp',
sourceApplicationParentPath: 'D:\\DemoApp'
};
module.exports = config;
index.js
var fs = require('fs'),
config = require('./config'),
util = require('util');
fs.readdir(config.sourceApplicationParentPath, function(err, files) {
if (err) console.log(err.message);
for (var i = 0; i < files.length; i++) {
if (config.staticFiles.name.indexOf(`./${files[i]}`) == -1) {
config.staticFiles.name.push('./' + files[i]);
}
if (i == (files.length - 1)) {
var buffer = `const config = \n ${util.inspect(config, false, 2, false)}; \n module.exports = config;`;
fs.writeFile('./config.js', buffer, function(err) {
err || console.log('Data replaced \n');
})
}
}
});
The Above code is tested and working fine.
You can add or change the object or an array or value in config.js without duplicate entry.
config.js
const config = {
staticFiles:{
name:[
'./',
'./index.html',
'./script.js',
'./icon.jpg'
]
},
outputFolderName: "D:\\DemoApp",
sourceApplicationParentPath: "D:\\DemoApp\\"
};
exports.config = config;
code for the file from where you want to change the data
var fs = require('fs');
var bodyparser = require('body-parser');
var config = require('./config.js')
//path of directory
var directoryPath = "D:\\DemoApp\\"
var data = config.config;
//passsing directoryPath and callback function
fs.readdir(directoryPath, function (err, files) {
//handling error
if (err) {
return console.log('Unable to scan directory: ' + err);
}
var dataToUpdate = data.staticFiles.name;
//listing all files using forEach
files.forEach(function (file) {
// Do whatever you want to do with the file
console.log(file)
dataToUpdate.push(file)
});
data.staticFiles.name = dataToUpdate;
var value = 'const config = ' + JSON.stringify(data) + ';' + '\n' + 'exports.config = config';
fs.writeFile('./config.js',value, er => {
if(er){
throw er;
}
else{console.log('success')}
});
});
This is the code written by me to get all the js files in a directory to be minified:
var http = require('http');
var testFolder = './tests/';
var UglifyJS = require("uglify-js");
var fs = require('fs');
var glob = require("glob");
var fillnam="";
hello();
function hello()
{
glob("gen/*.js", function (er, files) {
//console.log(files);
for(var i=0;i<files.length;i++)
{
fillnam=files[i];
console.log("File Name "+fillnam);
fs.readFile(fillnam, 'utf8', function (err,data)
{
if (err) {
console.log(err);
}
console.log(fillnam+" "+data);
var result = UglifyJS.minify(data);
var gtemp_file=fillnam.replace(".js","");
console.log(gtemp_file);
fs.writeFile(gtemp_file+".min.js", result.code, function(err) {
if(err) {
console.log(err);
} else {
console.log("File was successfully saved.");
}
});
});
}
});
}
http.createServer(function (req, res) {
res.writeHead(200, {'Content-Type': 'text/html'});
res.end('Hello World!');
}).listen(8080);
As a result respective minified js files with same name with .min.js should be formed in the same directory.
But what I am getting is a single file with all files data over written. Like for example if there are two files in a directory a.js and b.js with content:
var a=10;var b=20;
var name="stack";
What I'm getting is single file a.min.js with file content:
var a=10tack;
Please help.
You need to collect all file contents first, concat them and then run UglifyJS.minify on them to be able to save it as a single file.
Something like this (not fully tested)
const testFolder = './tests/';
const UglifyJS = require("uglify-js");
const fs = require('fs');
const readFile = require('util').promisify(fs.readFile);
const glob = require("glob");
function hello() {
glob("gen/*.js", async(er, files) {
let data = [];
for (const file of files) {
const fileData = await readFile(file, {
encoding: 'utf-8'
});
data.push(fileData);
}
const uglified = UglifyJS.minify(data.join('\n'));
fs.writeFile('main.min.js', uglified);
});
}
hello();
I have created one function on the AWS lambda using Node js version 6.10.
I need to create PDF file from html string and send that file in the email.
Following is my code:
exports.handler = function index(event, context, callback) {
var toAddress = event.to;
var fromAddress = event.from;
var subject = event.subject;
var MailBody = event.mailBody;
var PDFHTML = event.PDFHTML;
var LabelHTML = event.LabelHtml;
var options = { format: 'Legal',"header": {
"height": "25mm",
}
}
pdf.convertHTMLString(LabelHTML, '/tmp/LabelDetails.pdf', function(err, res1) {
if (err)
{
console.log(err);
callback(err, false);
}
else
{
pdf.convertHTMLString(PDFHTML, '/tmp/DiagramDetails.pdf', function(err, res1) {
if (err)
{
console.log(err);
callback(null, false);
}
else
{
merge(['/tmp/LabelDetails.pdf','/tmp/DiagramDetails.pdf'],'/tmp/Final.pdf',function(err){
if(err)
{
console.log(err);
callback(null, false);
}
else
{
/*Send mail code */
callback(null, true);
}
});
}
});
}
});
};
var fs = require("fs");
var pdf = require('html-to-pdf');
var merge = require('easy-pdf-merge');
var nodemailer = require('nodemailer');
var path = require("path");
When I try to convert the html string to the PDF file it throws the error EROFS: read only file system.
My simple node js code works perfectly fine.
After doing more research on this issue I found out that AWS gives write credentials only to the /tmp folder. So I applied file path like /tmp/FileName.pdf but still issue seems to be there.
I'm wondering if this is the write way:
node.js
var fs = require('fs');
var path = require('path');
var childProcess = require('child_process');
var phantomjs = require('phantomjs');
var binPath = phantomjs.path;
var childArgs = [
path.join(__dirname, 'phantomjs-runner.js'),
'http://localhost:3000'
]
childProcess.execFile(binPath, childArgs, function(err, stdout, stderr) {
if(err){
}
if(stderr){
}
fs.writeFile(__dirname+'/public/snapshots/index.html', stdout, function(err) {
if(err) {
console.log(err);
}
else {
console.log("The file was saved!");
}
});
});
phantomjs-runner.js
var system = require('system');
var url = system.args[1] || '';
if(url.length > 0) {
var page = require('webpage').create();
page.open(url, function (status) {
if (status == 'success') {
var delay, checker = (function() {
var html = page.evaluate(function () {
var body = document.getElementsByTagName('body')[0];
if(body.getAttribute('data-status') == 'ready') {
return document.getElementsByTagName('html')[0].outerHTML;
}
});
if(html) {
clearTimeout(delay);
console.log(html);
phantom.exit();
}
});
delay = setInterval(checker, 100);
}
});
}
may be could be a better way like
clearTimeout(delay);
fs.writeFile
phantom.exit();
How can I manage ie
different urls and different files
I mean
http://localhost:3000 index.html
http://localhost:3000/blog blog.html
http://localhost:3000/blog/postid postid.html
ENDED UP
'use strict';
var fs = require('fs'),
path = require('path'),
childProcess = require('child_process'),
phantomjs = require('phantomjs'),
binPath = phantomjs.path,
config = require('../config/config');
var env = (process.env.NODE_ENV === 'production') ? 'production' : null,
currentPath = (env) ? config.proot + '/build/default/snapshots' : config.proot + '/default/snapshots';
function normalizeUrl(url){
if( (typeof url === 'undefined') || !url){
return '';
}
if ( url.charAt( 0 ) === '/' ){
url = url.substring(1);
}
return '/'+url.replace(/\/$/, "");
}
function normalizePage(route){
if(!route){
return 'index';
}
var chunks = route.substring(1).split('/');
var len = chunks.length;
if(len===1){
return chunks[0];
}
chunks.shift();
//I get the id
return chunks[0];
}
module.exports = function (url) {
var route = normalizeUrl(url);
var page = normalizePage(route);
var childArgs = [
path.join(__dirname, './phantomjs-runner.js'),
config.url+route
];
childProcess.execFile(binPath, childArgs, function(err, stdout, stderr) {
if(err){
}
if(stderr){
}
fs.writeFile(currentPath + '/' + page + '.html', stdout, function(err) {
if(err) {
console.log(err);
}
else {
console.log("The file was saved!");
}
});
});
};
So I worked out for the parameters I'm still
waiting for way to get the output ^^