Node JS Stream Only works on first iteration - node.js

I have a simple node app that parses a csv file into a string. In my server file, I call a module that runs makes a stream and pipes it into my parser.
The problem is that is code works perfectly the first time it is run, but fails after that. I've gotten a "Write after End" error so I believe there is something wrong with the stream or parser variable not being reset properly after each use. Thanks for any help!
const express = require('express');
const app = express();
const path = require('path');
const port = process.env.PORT || 3000;
const formidable = require('formidable');
const parser = require('./csvparse.js');
const fs = require('fs');
//send the index page on a get request
app.listen(port, () => console.log('Example app listening on port: ' + port));
app.get('*', (req, res) => res.sendFile(path.join(__dirname + "/index.html")));
app.post('/upload', function(req, res) {
//upload the file from the html form
var form = new formidable.IncomingForm();
form.parse(req,function(err, fields, files) {
if (err) throw err;
//get the path to the uploaded file for the parser to use
var filePath = files.spinFile.path;
parser(filePath, function(data) {
if (data == null) {
res.sendFile(path.join(__dirname + "/index.html"));
}
res.send("<code>" + data + "</code>");
});
});
});
The module export function looks like this:
module.exports = function(filePath, cb) {
var stream = fs.createReadStream(filePath);
stream.pipe(parser);
//when the stream is done, songsLog is complete and ready to be returned
stream.on('close', function() {
cb(songsLog);
});
};

Try wrapping the contents of your module.exports in another function.
module.exports = function(filepath, cb) {
function parse(filepath) {
const stream = fs.createReadStream(filepath)
etc...
}
return {
parse
}
}
then from your route, call parser.parse('file.txt') and you should have a new read stream.

Related

How to use Node to read PUT file?

I'm trying to replicate the functionality of bashupload.com but using node. I want the simplicity of just doing curl host -T file but I ran into some problems because I can't seem to understand how to read the PUT file. Curl uses a PUT request when you use the -T option, so it has to be PUT.
I tried using packages like multiparty:
receiveAndUploadFile: function (req, res) {
var multiparty = require('multiparty');
var form = new multiparty.Form();
// var fs = require('fs');
form.parse(req, function(err, fields, files) {
console.log('The files', files)
console.log('The fields', fields)
})
res.send("Okay, bye.")
}
But this prints undefined values for files and fields.
I also tried using express-fileupload middleware
app.use(fileUpload({}));
but still, if I try to print req.files then I will get undefined.
Is there any specific way to read the file?
Thanks a lot!
This is my main file, index.js::
const express = require("express");
const path = require("path");
const app = express();
const port = 8080;
const tools = require("./tools");
const fileUpload = require("express-fileupload");
app.use(fileUpload());
app.use(express.static(__dirname + "/assets"));
app.get("/", (req, res) => {
res.sendFile(path.join(__dirname + "/index.html"));
});
app.get("/f", (req, res) => {
res.send("This route is only available as a POST route.");
});
app.put("/f", tools.receiveAndUploadFile);
app.listen(port, () => {
console.log(`Server started listening on port: ${port}`);
});
And the tools.js file:
const fs = require("fs");
const path = require("path");
module.exports = {
receiveAndUploadFile: function (req, res) {
console.log("Files: ", req.files);
res.send("Okay bye");
},
};
This is printing "Files: undefined" to the console.
A PUT and a POST are effectively the same thing. To upload arbitrary data, just read the data stream and write it to a file. Node provides a .pipe method on streams to easily pipe data from one stream into another, for example a file stream here:
const fs = require('fs')
const express = require('express')
const app = express()
const PORT = 8080
app.get('/*', (req, res) => res.status(401).send(req.url + ': This route is only available as a POST route'))
app.put('/*', function (req, res, next) {
console.log('Now uploading', req.url, ': ', req.get('content-length'), 'bytes')
req.pipe(fs.createWriteStream(__dirname + req.url))
req.on('end', function () { // Done reading!
res.sendStatus(200)
console.log('Uploaded!')
next()
})
})
app.listen(8080, () => console.log('Started on :8080'))
If you do a PUT to /file.mp4, it will upload all the data over to the script dir (__dirname) + the URL file path.
via curl, curl http://localhost:8080/ -T hello.txt

How to update only one key in a JSON file with node.js

I am making an API for my minecraft server and have been able to get as far as getting the JSON file to update what I send it in a POST request. I would like to know if it is possible to only update on key of the JSON file.
This is my current code:
var fs = require('fs');
var fileName = './serverStatus.json';
var file = require(fileName);
const express = require('express');
const bodyParser = require('body-parser');
const app = express();
app.use(bodyParser.urlencoded({ extended: false }));
app.use(bodyParser.json());
const cors = require('cors');
const { fileURLToPath } = require('url');
app.get('/status', alldata);
function alldata(request, response) {
response.send(file);
}
app.post('/status', (req, res) => {
if (!req.is('application/json')) {
res.status(500);
res.send('500 - Server Error');
} else {
res.status(201);
fs.writeFile(
fileName,
JSON.stringify(req.body, null, 4),
function writeJSON(err) {
if (err) return console.error(err);
console.log(JSON.stringify(file));
console.log('writing to ' + fileName);
}
);
res.send(file);
}
});
const PORT = process.env.PORT || 3000;
app.listen(PORT, () =>
console.log(`Server running on: http://localhost:${PORT}`)
);
and my JSON file:
{
"lobby": "offline",
"survival": "offline",
"creative": "offline"
}
Thanks in advance!
You could use fs.readFileSync or to read file content.
Then update your JSON content such as jsonData["survival"] = "online".
Final, write content back to file with fs.writeFile. (See note-1)
You could see the following example code.
const fs = require("fs");
// 1. get the json data
// This is string data
const fileData = fs.readFileSync("./serverStatus.json", "utf8")
// Use JSON.parse to convert string to JSON Object
const jsonData = JSON.parse(fileData)
// 2. update the value of one key
jsonData["survival"] = "online"
// 3. write it back to your json file
fs.writeFile("./serverStatus.json", JSON.stringify(jsonData))
Note-1: Because you save data in file, you need to write the whole data when you want to update file content.
But, if you want to get the latest file content after you write your new data into file, you should fs.readFileSync your file again like following code to avoiding any modified which are forgot to save.
app.get('/status', alldata);
function alldata(request, response) {
const fileContent = fs.readFileSync(fileName, "utf8");
const fileJsonContent = JSON.parse(fileContent)
// do other stuff
response.send(fileContent);
}
var fs = require('fs');
const express = require('express');
const bodyParser = require('body-parser');
var fileName = './serverStatus.json';
const app = express();
app.use(bodyParser.urlencoded({ extended: false }));
app.use(bodyParser.json());
// maybe use this instead of bodyParser:
//app.use(express.json());
const cors = require('cors');
const { fileURLToPath } = require('url');
app.get('/status', alldata);
function alldata(request, response) {
response.send(file);
}
app.post('/status', (req, res) => {
if (!req.is('application/json')) {
res.status(500);
res.send('500 - Server Error');
} else {
// read full config file:
var src = fs.readFileSync(fileName);
// convert src json text to js object
var srcObj = JSON.parse(src);
// convert req json text to js object
var reqObj = JSON.parse(req.body);
// update the src with the new stuff in the req
for(var prop in reqObj){
srcObj[prop] = reqObj[prop];
}
// update any additional things you want to do manually like this
srcObj.bob = "creep";
// convert the updated src object back to JSON text
var updatedJson = JSON.stringify(srcObj, null, 4);
// write the updated src back down to the file system
fs.writeFile(
fileName,
updatedJson,
function (err) {
if (err) {
return console.error(err);
}
console.log(updatedJson);
console.log('updated ' + fileName);
}
);
res.send(updatedJson);
}
});
const PORT = process.env.PORT || 3000;
app.listen(PORT, () =>
console.log(`Server running on: http://localhost:${PORT}`)
);
//res.status(201);

How to read file content in Node.js and convert the data to JSON?

I am trying to send data from a .json file as a response with Node.js. I am pretty new to it, and i don't know how to handle the Buffer.
This is what I did:
const express = require('express');
const fs = require('fs');
const path = require('path');
const bodyParser = require('body-parser');
const app = express();
const port = 3000;
app.use(bodyParser.urlencoded({extended: false}));
app.use('/', (req, res, next) => {
fs.readFile(path.join(__dirname, 'data', 'filename.json'), (err, content) => {
res.send(JSON.stringify(content));
})
});
app.listen(port, () => {
console.log(`server is running on port: ${port}`)
});
I expect to get the data in JSON format but what I get is a Buffer or just numbers. I guess i'm not getting some concepts right.
Save the buffer into a variable and then use toString() method and after that JSON.parse
What you want to do is specify the encoding like so:
fs.readFile(path.join(__dirname, 'data', 'filename.json'), 'utf8', (err, content) => {
res.setHeader('Content-Type', 'application/json');
res.send(content); // content will be a string
})
Otherwise according to the documentation you will get the Buffer.

Not able to see ipfs node id

Below is my app.js file. When I run the following code it shows "swarm listening on" and "file-path" and then nothing happens.
I am also running daemon on another command prompt listening api on 5001.
I think node is not getting initiated thats why it never becomes ready.
var express = require('express');
var app = express();
var ipfsAPI = require('ipfs-api')
var ipfs = ipfsAPI('localhost', '5001', {protocol: 'http'})
const IPFS = require('ipfs');
// Spawn your IPFS node \o/
const node = new IPFS();
var multer = require('multer');
var upload = multer({ dest: './z000000000'});
var fs = require('fs');
/** Permissible loading a single file,
the value of the attribute "name" in the form of "recfile". **/
var type = upload.single('filer');
app.post('/upload', type, function (req,res) {
/** When using the "single"
data come in "req.file" regardless of the attribute "name". **/
var tmp_path = req.file.path;
console.log(tmp_path);
//
node.on('ready', () => {
node.id((err, id) => {
if (err) {
return console.log(err)
}
console.log(id)
})
var data = fs.readFileSync('./'+tmp_path);
console.log("Synchronous read: " + data.toString());
let files = [
{
path: './'+tmp_path,
content: data
}
]
node.files.add(files, function (err, files) {
if (err) {
console.log(err);
} else {
console.log(files)
}
})
})
//
res.end();
});
var server = app.listen(8081, function () {
var host = server.address().address
var port = server.address().port
console.log("Example app listening at http://%s:%s", host, port)
})
Use ipfsAPI:
var ipfs = ipfsAPI('localhost', '5001', {protocol: 'http'}
To get the ID of node:
ipfs.id()
.then(res => {
showStatus(`daemon active\nid: ${res.ID}`, COLORS.success)
doYourWork()
})
.catch(err => {
showStatus('daemon inactive', COLORS.error)
console.error(err)
})
Find Documentation here: https://github.com/ipfs/js-ipfs-api

File download is not working with nodejs gridfs

i'm uploading images to gridfs-stream using node and express..uploading is working fine but am unable to download
app.post('/upload', function (req, res) {
var tempfile = req.files.displayImage.path;
var origname = req.files.displayImage.name;
var _id = guid();
var writestream = gfs.createWriteStream({
filename: _id
});
// open a stream to the temporary file created by Express...
fs.createReadStream(tempfile)
.on('end', function () {
res.send(_id);
})
.on('error', function () {
res.send('ERR');
})
// and pipe it to gfs
.pipe(writestream);
});
app.get('/download', function (req, res) {
// TODO: set proper mime type + filename, handle errors, etc...
gfs
// create a read stream from gfs...
.createReadStream({
filename: req.param('filename')
})
// and pipe it to Express' response
.pipe(res);
});
the above code is unable to download the image by this cmd download?filename=acf58ae4-c853-f9f3-5c66-c395b663298a
You might need to check your values in params. But hopefully this near minimal sample provides some help:
Update
And it has helped, because it highlights that you are looking up the _id as a filename. Instead you should be doing this:
.createReadStream({
_id: req.param('filename')
})
if not
.createReadStream({
_id: mongoose.Types.ObjectId(req.param('filename'))
})
Since the _id field is different to the filename
app.js
var express = require('express');
var routes = require('./routes');
var http = require('http');
var path = require('path');
var app = express();
var mongoose = require('mongoose');
var Grid = require('gridfs-stream');
Grid.mongo = mongoose.mongo;
var conn = mongoose.createConnection('mongodb://localhost/mytest');
conn.once('open', function() {
console.log('opened connection');
gfs = Grid(conn.db);
// all environments
app.set('port', process.env.PORT || 3000);
app.use(express.logger('dev'));
app.use(app.router);
// development only
if ('development' == app.get('env')) {
app.use(express.errorHandler());
}
app.get('/', routes.index);
http.createServer(app).listen(app.get('port'), function(){
console.log('Express server listening on port ' + app.get('port'));
});
});
routes/index.js
exports.index = function(req, res){
res.set('Content-Type', 'image/jpeg');
gfs.createReadStream({
filename: 'receptor.jpg'
}).pipe(res);
};

Resources