changing content of fs.createReadstream - node.js

I have requirement such that I am reading a file on express request as follows:
const fs = require('fs');
const os = require('os');
const path = require('path');
var express = require("express");
app = express();
app.get('/getdata', function (req, res) {
var stream = fs.createReadStream('myFileLocation');// this location contains encrypted file
let tempVariable = [];
stream.on('data', function(chunk) {
tempVariable += chunk;
});
stream.on('end', function () {
*****here I read tempVariable and using it I decrypt the file content and output a buffer (say,finalBuffer)****
})
stream.on('error', function (error) {
res.writeHead(404, 'Not Found');
res.end();
});
stream.pipe(res);
So what should I do to make the 'finalBuffer' readable on request,in other words, how to pipe the finalBuffer data with res(response).

Finally I got the way for creating read stream from a Buffer using stream of node js.
I got exact solution from here.
I have just put a little bit code like
const fs = require('fs');
const os = require('os');
const path = require('path');
var express = require("express");
app = express();
app.get('/getdata', function (req, res) { // getdata means getting decrypted data
fs.readFile(file_location, function read(err, data) {
// here I am performing my decryption logic which gives output say
//"decryptedBuffer"
var stream = require("./index.js");
stream.createReadStream(decryptedBuffer).pipe(res);
})
})
// index.js
'use strict';
var util = require('util');
var stream = require('stream');
module.exports.createReadStream = function (object, options) {
return new MultiStream (object, options);
};
var MultiStream = function (object, options) {
if (object instanceof Buffer || typeof object === 'string') {
options = options || {};
stream.Readable.call(this, {
highWaterMark: options.highWaterMark,
encoding: options.encoding
});
} else {
stream.Readable.call(this, { objectMode: true });
}
this._object = object;
};
util.inherits(MultiStream, stream.Readable);
MultiStream.prototype._read = function () {
this.push(this._object);
this._object = null;
};
If anybody has some issue with this please comment I will try my best to make him/her understood my code snippet.

Related

How to update only one key in a JSON file with node.js

I am making an API for my minecraft server and have been able to get as far as getting the JSON file to update what I send it in a POST request. I would like to know if it is possible to only update on key of the JSON file.
This is my current code:
var fs = require('fs');
var fileName = './serverStatus.json';
var file = require(fileName);
const express = require('express');
const bodyParser = require('body-parser');
const app = express();
app.use(bodyParser.urlencoded({ extended: false }));
app.use(bodyParser.json());
const cors = require('cors');
const { fileURLToPath } = require('url');
app.get('/status', alldata);
function alldata(request, response) {
response.send(file);
}
app.post('/status', (req, res) => {
if (!req.is('application/json')) {
res.status(500);
res.send('500 - Server Error');
} else {
res.status(201);
fs.writeFile(
fileName,
JSON.stringify(req.body, null, 4),
function writeJSON(err) {
if (err) return console.error(err);
console.log(JSON.stringify(file));
console.log('writing to ' + fileName);
}
);
res.send(file);
}
});
const PORT = process.env.PORT || 3000;
app.listen(PORT, () =>
console.log(`Server running on: http://localhost:${PORT}`)
);
and my JSON file:
{
"lobby": "offline",
"survival": "offline",
"creative": "offline"
}
Thanks in advance!
You could use fs.readFileSync or to read file content.
Then update your JSON content such as jsonData["survival"] = "online".
Final, write content back to file with fs.writeFile. (See note-1)
You could see the following example code.
const fs = require("fs");
// 1. get the json data
// This is string data
const fileData = fs.readFileSync("./serverStatus.json", "utf8")
// Use JSON.parse to convert string to JSON Object
const jsonData = JSON.parse(fileData)
// 2. update the value of one key
jsonData["survival"] = "online"
// 3. write it back to your json file
fs.writeFile("./serverStatus.json", JSON.stringify(jsonData))
Note-1: Because you save data in file, you need to write the whole data when you want to update file content.
But, if you want to get the latest file content after you write your new data into file, you should fs.readFileSync your file again like following code to avoiding any modified which are forgot to save.
app.get('/status', alldata);
function alldata(request, response) {
const fileContent = fs.readFileSync(fileName, "utf8");
const fileJsonContent = JSON.parse(fileContent)
// do other stuff
response.send(fileContent);
}
var fs = require('fs');
const express = require('express');
const bodyParser = require('body-parser');
var fileName = './serverStatus.json';
const app = express();
app.use(bodyParser.urlencoded({ extended: false }));
app.use(bodyParser.json());
// maybe use this instead of bodyParser:
//app.use(express.json());
const cors = require('cors');
const { fileURLToPath } = require('url');
app.get('/status', alldata);
function alldata(request, response) {
response.send(file);
}
app.post('/status', (req, res) => {
if (!req.is('application/json')) {
res.status(500);
res.send('500 - Server Error');
} else {
// read full config file:
var src = fs.readFileSync(fileName);
// convert src json text to js object
var srcObj = JSON.parse(src);
// convert req json text to js object
var reqObj = JSON.parse(req.body);
// update the src with the new stuff in the req
for(var prop in reqObj){
srcObj[prop] = reqObj[prop];
}
// update any additional things you want to do manually like this
srcObj.bob = "creep";
// convert the updated src object back to JSON text
var updatedJson = JSON.stringify(srcObj, null, 4);
// write the updated src back down to the file system
fs.writeFile(
fileName,
updatedJson,
function (err) {
if (err) {
return console.error(err);
}
console.log(updatedJson);
console.log('updated ' + fileName);
}
);
res.send(updatedJson);
}
});
const PORT = process.env.PORT || 3000;
app.listen(PORT, () =>
console.log(`Server running on: http://localhost:${PORT}`)
);
//res.status(201);

How to upload (pdf) file from FileReader to node js Express app

I have a (React) js app that reads a PDF file using FileReader then uses fetch to send it to a node js server running Express. When the request is received on the server side, however, the request body is undefined. What is missing from my code for this to work?
Client side:
function readFile() {
let file = fileInputRef.current.files[0];
const reader = new FileReader();
return new Promise((resolve) => {
reader.onload = function (e) {
resolve(e.target.result);
};
reader.readAsDataURL(file);//readAsDataURL, readAsArrayBuffer, or readAsBinaryString?
});
}
function handleSubmit(event) {
event.preventDefault();
readFile().then((value) => {
fetch('/gen/file', {
method: 'POST',
body: value
})
});
Server side:
const express = require('express');
const path = require('path');
const bodyParser = require('body-parser')
const app = express();
const jsonParser = bodyParser.json()
const port = 3000;
app.post("/gen/file", function (req, res, next) {
console.log(req.body);//undefined - Why????
});
app.listen(port, function (err) {
if (err) console.log(err);
});
Client Side:
function getBase64(file,callback){
const reader = new FileReader();
reader.addEventListener('load',()=> callback(reader.result));
reader.readAsDataURL(file);
}
function handleSubmit(event) {
event.preventDefault();
let body = {};
getBase64(file,fileUrl=>{
body.file = fileUrl;
fetch('/gen/file', {
method: 'POST',
body
})
})
Server Side:
const express = require('express');
const path = require('path');
const bodyParser = require('body-parser')
const app = express();
const jsonParser = bodyParser.json()
const port = 3000;
const fs = require('fs');
app.post("/gen/file", function (req, res, next) {
console.log(req.body);//undefined - Why????
let file = req.body.file;
let base64 = file.match(/^data:([A-Za-z-+\/]+);base64,(.+)$/);
var buffer = new Buffer.from(base64[2],'base64');
fs.writeFile(__dirname+"/out.jpeg", buffer, 'base64', function (err) {
console.log(err);
});
});

How to render response after stream finished pipe?

This is what I want, server is a file server, when client asks for certain file, it'll stream that file back. Instead of koi-static, I try to do this, but the dest.jpg only contains 'Not found'.
client code:
var Koa = require('koa');
var Router = require('koa-router');
const HttpStatus = require('http-status');
const fs = require('fs');
const koaBody = require('koa-body');
const request = require('request');
const tempSaveFile = fs.createWriteStream('dest.jpg');
const writeStream = request.post('http://localhost:3456/getfile/src.jpg').pipe(tempSaveFile);
writeStream.on('finish', () => {
tempSaveFile.end();
console.log('Upload successful! ');
});
server code:
var Koa = require('koa');
var Router = require('koa-router');
const HttpStatus = require('http-status');
const fs = require('fs');
const koaBody = require('koa-body');
var app = new Koa();
var router = new Router();
const serve = require('koa-static');
router
.post([`/getfile/:fileName`],
(ctx) => {
const { params: { fileName } } = ctx;
console.error(`------- server will return ${fileName} --------`);
const readStream = fs.createReadStream(fileName).pipe(ctx.res);
readStream.on('finish', () => {
console.error('---- server finished stream ----');
ctx.status = HttpStatus.OK;
});
})
app.use(router.routes());
app.use(router.allowedMethods());
app.listen(3456);
When I change the server to use koa-static, client can gets the file successfully, did diff, look the same.
I suspect server returns too fast before it finishes, but another post said this is the way to wait for pipe to finish.
callback to handle completion of pipe
Any suggestions ? thanks !
ok, I added async, working now
const multiparty = require('multiparty');
const multer = require('koa-multer');
const request = require('request');
var app = new Koa();
var router = new Router();
const serve = require('koa-static');
const streamEnd = fd => new Promise((resolve, reject) => {
fd.on('end', () => {console.error('-- 51 --'); resolve(51); });
fd.on('finish', () => {console.error('-- 53 --'); resolve(53); });
fd.on('error', reject);
});
router
.get([`/getfile/:fileName`],
async (ctx) => {
const { params: { fileName } } = ctx;
console.error(`------- server will return ${fileName} --------`);
if (fs.existsSync(fileName)) {
const readStream = fs.createReadStream(fileName).pipe(ctx.res);
await streamEnd(readStream);
ctx.status = HttpStatus.OK;
} else {
console.error(`File ${fileName} doesnot exist`);
ctx.status = HttpStatus.INTERNAL_SERVER_ERROR;
}
})
app.use(serve('./customer'));
app.use(router.routes());
app.use(router.allowedMethods());
app.listen(3456);

Node JS Stream Only works on first iteration

I have a simple node app that parses a csv file into a string. In my server file, I call a module that runs makes a stream and pipes it into my parser.
The problem is that is code works perfectly the first time it is run, but fails after that. I've gotten a "Write after End" error so I believe there is something wrong with the stream or parser variable not being reset properly after each use. Thanks for any help!
const express = require('express');
const app = express();
const path = require('path');
const port = process.env.PORT || 3000;
const formidable = require('formidable');
const parser = require('./csvparse.js');
const fs = require('fs');
//send the index page on a get request
app.listen(port, () => console.log('Example app listening on port: ' + port));
app.get('*', (req, res) => res.sendFile(path.join(__dirname + "/index.html")));
app.post('/upload', function(req, res) {
//upload the file from the html form
var form = new formidable.IncomingForm();
form.parse(req,function(err, fields, files) {
if (err) throw err;
//get the path to the uploaded file for the parser to use
var filePath = files.spinFile.path;
parser(filePath, function(data) {
if (data == null) {
res.sendFile(path.join(__dirname + "/index.html"));
}
res.send("<code>" + data + "</code>");
});
});
});
The module export function looks like this:
module.exports = function(filePath, cb) {
var stream = fs.createReadStream(filePath);
stream.pipe(parser);
//when the stream is done, songsLog is complete and ready to be returned
stream.on('close', function() {
cb(songsLog);
});
};
Try wrapping the contents of your module.exports in another function.
module.exports = function(filepath, cb) {
function parse(filepath) {
const stream = fs.createReadStream(filepath)
etc...
}
return {
parse
}
}
then from your route, call parser.parse('file.txt') and you should have a new read stream.

Nodejs project not starting/hangs

I have nodejs project running on centos 7.
I'm trying to run my nodejs project with the command node start.js aswell tried sudo node start.js but tends to be hanging up, i included a screenshot too, which you can find there: http://prntscr.com/g4ogcf
Can anybody help me with this? I'm quite clueless about that because it doesn't return any errors aswell.
My start.js application code:
const app = require('express')();
const fs = require('fs');
const server = require('http').Server(app);
const cors = require('cors');
const express = require('express');
const Database = require('./Database.js');
const db = new Database();
const cfg = require('./config/config.json');
const io = require('socket.io')(cfg.client.socket_port);
const Router = require('./Router')(db, io);
app.listen(cfg.client.server_port);
app.use(cors());
app.get('/', (req, res) => {
fs.readFile(`${__dirname}/../public/client/app/index.html`, (err, data) => {
if (err)
throw err;
res.send(data.toString())
});
});
app.get('/media', (req, res) => {
if (req.query.cfg) {
res.set('Content-Type', 'application/javascript');
fs.readFile(`${__dirname}/config/config.json`, (err, data) => {
if (err)
throw err;
res.send(`export default ${JSON.stringify(JSON.parse(data).client)}`);
});
}
res.set('Content-Type', 'image/jpeg');
if (req.query.avatar) {
db.getAvatarImg(req.query.avatar).then(x => {
var img = x[0] ? x[0].img : '';
const data = img.replace(/^data:image\/\w+;base64,/, "");
const buf = new Buffer(data, 'base64');
res.send(buf);
});
}
if (req.query.background) {
db.getBackgroundImg(req.query.background).then(x => {
var img = x[0] ? x[0].img : '';
const data = img.replace(/^data:image\/\w+;base64,/, "");
const buf = new Buffer(data, 'base64');
res.send(buf);
});
}
if (req.query.post) {
db.getPostImg(req.query.post).then(x => {
var img = x[0] ? x[0].img : '';
const data = img.replace(/^data:image\/\w+;base64,/, "");
const buf = new Buffer(data, 'base64');
res.send(buf);
});
}
});
app.use(express.static(`${__dirname}/../public/`));
Put
console.time('app started');
before all of the require statements and
console.timeEnd('app started');
at the end of the start.js code. With the help of this, you can check whether your application hanged or initialized successfully.
Nothing is wrong with the program.
Just update app.listen(cfg.client.server_port); to
app.listen(cfg.client.server_port, ()=>{
console.log("server started at port "+cfg.client.server_port);
});
This will log on the console a text telling you that the server has started.

Resources