Nodejs project not starting/hangs - node.js

I have nodejs project running on centos 7.
I'm trying to run my nodejs project with the command node start.js aswell tried sudo node start.js but tends to be hanging up, i included a screenshot too, which you can find there: http://prntscr.com/g4ogcf
Can anybody help me with this? I'm quite clueless about that because it doesn't return any errors aswell.
My start.js application code:
const app = require('express')();
const fs = require('fs');
const server = require('http').Server(app);
const cors = require('cors');
const express = require('express');
const Database = require('./Database.js');
const db = new Database();
const cfg = require('./config/config.json');
const io = require('socket.io')(cfg.client.socket_port);
const Router = require('./Router')(db, io);
app.listen(cfg.client.server_port);
app.use(cors());
app.get('/', (req, res) => {
fs.readFile(`${__dirname}/../public/client/app/index.html`, (err, data) => {
if (err)
throw err;
res.send(data.toString())
});
});
app.get('/media', (req, res) => {
if (req.query.cfg) {
res.set('Content-Type', 'application/javascript');
fs.readFile(`${__dirname}/config/config.json`, (err, data) => {
if (err)
throw err;
res.send(`export default ${JSON.stringify(JSON.parse(data).client)}`);
});
}
res.set('Content-Type', 'image/jpeg');
if (req.query.avatar) {
db.getAvatarImg(req.query.avatar).then(x => {
var img = x[0] ? x[0].img : '';
const data = img.replace(/^data:image\/\w+;base64,/, "");
const buf = new Buffer(data, 'base64');
res.send(buf);
});
}
if (req.query.background) {
db.getBackgroundImg(req.query.background).then(x => {
var img = x[0] ? x[0].img : '';
const data = img.replace(/^data:image\/\w+;base64,/, "");
const buf = new Buffer(data, 'base64');
res.send(buf);
});
}
if (req.query.post) {
db.getPostImg(req.query.post).then(x => {
var img = x[0] ? x[0].img : '';
const data = img.replace(/^data:image\/\w+;base64,/, "");
const buf = new Buffer(data, 'base64');
res.send(buf);
});
}
});
app.use(express.static(`${__dirname}/../public/`));

Put
console.time('app started');
before all of the require statements and
console.timeEnd('app started');
at the end of the start.js code. With the help of this, you can check whether your application hanged or initialized successfully.

Nothing is wrong with the program.
Just update app.listen(cfg.client.server_port); to
app.listen(cfg.client.server_port, ()=>{
console.log("server started at port "+cfg.client.server_port);
});
This will log on the console a text telling you that the server has started.

Related

How to upload (pdf) file from FileReader to node js Express app

I have a (React) js app that reads a PDF file using FileReader then uses fetch to send it to a node js server running Express. When the request is received on the server side, however, the request body is undefined. What is missing from my code for this to work?
Client side:
function readFile() {
let file = fileInputRef.current.files[0];
const reader = new FileReader();
return new Promise((resolve) => {
reader.onload = function (e) {
resolve(e.target.result);
};
reader.readAsDataURL(file);//readAsDataURL, readAsArrayBuffer, or readAsBinaryString?
});
}
function handleSubmit(event) {
event.preventDefault();
readFile().then((value) => {
fetch('/gen/file', {
method: 'POST',
body: value
})
});
Server side:
const express = require('express');
const path = require('path');
const bodyParser = require('body-parser')
const app = express();
const jsonParser = bodyParser.json()
const port = 3000;
app.post("/gen/file", function (req, res, next) {
console.log(req.body);//undefined - Why????
});
app.listen(port, function (err) {
if (err) console.log(err);
});
Client Side:
function getBase64(file,callback){
const reader = new FileReader();
reader.addEventListener('load',()=> callback(reader.result));
reader.readAsDataURL(file);
}
function handleSubmit(event) {
event.preventDefault();
let body = {};
getBase64(file,fileUrl=>{
body.file = fileUrl;
fetch('/gen/file', {
method: 'POST',
body
})
})
Server Side:
const express = require('express');
const path = require('path');
const bodyParser = require('body-parser')
const app = express();
const jsonParser = bodyParser.json()
const port = 3000;
const fs = require('fs');
app.post("/gen/file", function (req, res, next) {
console.log(req.body);//undefined - Why????
let file = req.body.file;
let base64 = file.match(/^data:([A-Za-z-+\/]+);base64,(.+)$/);
var buffer = new Buffer.from(base64[2],'base64');
fs.writeFile(__dirname+"/out.jpeg", buffer, 'base64', function (err) {
console.log(err);
});
});

changing content of fs.createReadstream

I have requirement such that I am reading a file on express request as follows:
const fs = require('fs');
const os = require('os');
const path = require('path');
var express = require("express");
app = express();
app.get('/getdata', function (req, res) {
var stream = fs.createReadStream('myFileLocation');// this location contains encrypted file
let tempVariable = [];
stream.on('data', function(chunk) {
tempVariable += chunk;
});
stream.on('end', function () {
*****here I read tempVariable and using it I decrypt the file content and output a buffer (say,finalBuffer)****
})
stream.on('error', function (error) {
res.writeHead(404, 'Not Found');
res.end();
});
stream.pipe(res);
So what should I do to make the 'finalBuffer' readable on request,in other words, how to pipe the finalBuffer data with res(response).
Finally I got the way for creating read stream from a Buffer using stream of node js.
I got exact solution from here.
I have just put a little bit code like
const fs = require('fs');
const os = require('os');
const path = require('path');
var express = require("express");
app = express();
app.get('/getdata', function (req, res) { // getdata means getting decrypted data
fs.readFile(file_location, function read(err, data) {
// here I am performing my decryption logic which gives output say
//"decryptedBuffer"
var stream = require("./index.js");
stream.createReadStream(decryptedBuffer).pipe(res);
})
})
// index.js
'use strict';
var util = require('util');
var stream = require('stream');
module.exports.createReadStream = function (object, options) {
return new MultiStream (object, options);
};
var MultiStream = function (object, options) {
if (object instanceof Buffer || typeof object === 'string') {
options = options || {};
stream.Readable.call(this, {
highWaterMark: options.highWaterMark,
encoding: options.encoding
});
} else {
stream.Readable.call(this, { objectMode: true });
}
this._object = object;
};
util.inherits(MultiStream, stream.Readable);
MultiStream.prototype._read = function () {
this.push(this._object);
this._object = null;
};
If anybody has some issue with this please comment I will try my best to make him/her understood my code snippet.

MongoDB GridFS, downloading image always returning only last image from a set of images

I am trying to retreive a set of images stored in mongoDB using mongoose and GridFS. I'm storing the filenames in an array and looping to retreive each one and store it in a local path. However, only the last image is downloaded and stored, the loop executes correctly but only the last filename is executed in each iteration.
Here filename inside gfs.exist() is always sony4 and that gets executed on each iteration and only that gets downloaded.
const express = require('express')
const bodyParser = require('body-parser')
const cors = require('cors');
const app = express()
const mongoose = require('mongoose')
var gridfs = require('gridfs-stream');
var fs = require('fs');
mongoose.Promise = Promise
gridfs.mongo = mongoose.mongo;
mongoose.connect('mongodb://localhost:27017/CompanyDetails', { useNewUrlParser: true })
.then(() => console.log("mongoose is up"))
let connection = mongoose.connection;
connection.on('error', function (err) {
console.log(err);
})
app.use(cors())
app.use(bodyParser.json())
connection.once('open', () => {
var gfs = gridfs(connection.db);
app.get('/api/download', async (req, res) => {
var filenames = ['asus1.jpg', 'asus2.jpg', 'asus3.jpg', 'asus4.jpg',
'dell1.jpg', 'dell2.jpeg', 'dell3.jpg', 'dell4.png',
'hp1.jpg', 'hp2.jpg', 'hp3.jpg', 'hp4.jpg',
'lenovo1.jpg', 'lenovo2.jpg', 'lenovo3.jpg', 'lenovo4.jpg',
'sony1.jpg', 'sony2.jpg', 'sony3.jpg', 'sony4.jpg'];
for (var i = 0; i < filenames.length; i++) {
var filename = filenames[i];
console.log(filenames[i]);
// Check file exist on MongoDB
gfs.exist({ filename: filename }, (err, file) => {
if (err || !file) {
res.status(404).send('File Not Found');
return
}
var readstream = gfs.createReadStream({ filename: filename });
var fs_write_stream = fs.createWriteStream('D:/data/db/images/' + filename);
readstream.pipe(fs_write_stream);
});
}
});
})
app.listen(1234, () => console.log("Server listening at 1234"))

How to render response after stream finished pipe?

This is what I want, server is a file server, when client asks for certain file, it'll stream that file back. Instead of koi-static, I try to do this, but the dest.jpg only contains 'Not found'.
client code:
var Koa = require('koa');
var Router = require('koa-router');
const HttpStatus = require('http-status');
const fs = require('fs');
const koaBody = require('koa-body');
const request = require('request');
const tempSaveFile = fs.createWriteStream('dest.jpg');
const writeStream = request.post('http://localhost:3456/getfile/src.jpg').pipe(tempSaveFile);
writeStream.on('finish', () => {
tempSaveFile.end();
console.log('Upload successful! ');
});
server code:
var Koa = require('koa');
var Router = require('koa-router');
const HttpStatus = require('http-status');
const fs = require('fs');
const koaBody = require('koa-body');
var app = new Koa();
var router = new Router();
const serve = require('koa-static');
router
.post([`/getfile/:fileName`],
(ctx) => {
const { params: { fileName } } = ctx;
console.error(`------- server will return ${fileName} --------`);
const readStream = fs.createReadStream(fileName).pipe(ctx.res);
readStream.on('finish', () => {
console.error('---- server finished stream ----');
ctx.status = HttpStatus.OK;
});
})
app.use(router.routes());
app.use(router.allowedMethods());
app.listen(3456);
When I change the server to use koa-static, client can gets the file successfully, did diff, look the same.
I suspect server returns too fast before it finishes, but another post said this is the way to wait for pipe to finish.
callback to handle completion of pipe
Any suggestions ? thanks !
ok, I added async, working now
const multiparty = require('multiparty');
const multer = require('koa-multer');
const request = require('request');
var app = new Koa();
var router = new Router();
const serve = require('koa-static');
const streamEnd = fd => new Promise((resolve, reject) => {
fd.on('end', () => {console.error('-- 51 --'); resolve(51); });
fd.on('finish', () => {console.error('-- 53 --'); resolve(53); });
fd.on('error', reject);
});
router
.get([`/getfile/:fileName`],
async (ctx) => {
const { params: { fileName } } = ctx;
console.error(`------- server will return ${fileName} --------`);
if (fs.existsSync(fileName)) {
const readStream = fs.createReadStream(fileName).pipe(ctx.res);
await streamEnd(readStream);
ctx.status = HttpStatus.OK;
} else {
console.error(`File ${fileName} doesnot exist`);
ctx.status = HttpStatus.INTERNAL_SERVER_ERROR;
}
})
app.use(serve('./customer'));
app.use(router.routes());
app.use(router.allowedMethods());
app.listen(3456);

Not able to see ipfs node id

Below is my app.js file. When I run the following code it shows "swarm listening on" and "file-path" and then nothing happens.
I am also running daemon on another command prompt listening api on 5001.
I think node is not getting initiated thats why it never becomes ready.
var express = require('express');
var app = express();
var ipfsAPI = require('ipfs-api')
var ipfs = ipfsAPI('localhost', '5001', {protocol: 'http'})
const IPFS = require('ipfs');
// Spawn your IPFS node \o/
const node = new IPFS();
var multer = require('multer');
var upload = multer({ dest: './z000000000'});
var fs = require('fs');
/** Permissible loading a single file,
the value of the attribute "name" in the form of "recfile". **/
var type = upload.single('filer');
app.post('/upload', type, function (req,res) {
/** When using the "single"
data come in "req.file" regardless of the attribute "name". **/
var tmp_path = req.file.path;
console.log(tmp_path);
//
node.on('ready', () => {
node.id((err, id) => {
if (err) {
return console.log(err)
}
console.log(id)
})
var data = fs.readFileSync('./'+tmp_path);
console.log("Synchronous read: " + data.toString());
let files = [
{
path: './'+tmp_path,
content: data
}
]
node.files.add(files, function (err, files) {
if (err) {
console.log(err);
} else {
console.log(files)
}
})
})
//
res.end();
});
var server = app.listen(8081, function () {
var host = server.address().address
var port = server.address().port
console.log("Example app listening at http://%s:%s", host, port)
})
Use ipfsAPI:
var ipfs = ipfsAPI('localhost', '5001', {protocol: 'http'}
To get the ID of node:
ipfs.id()
.then(res => {
showStatus(`daemon active\nid: ${res.ID}`, COLORS.success)
doYourWork()
})
.catch(err => {
showStatus('daemon inactive', COLORS.error)
console.error(err)
})
Find Documentation here: https://github.com/ipfs/js-ipfs-api

Resources