I have to build a custom logger that logs information about each request it receives. I have to use: Agent,Time,Method,Resource,Version,Status I think I already created my logger and the things i want to to log. now i have to Expose an endpoint http://localhost:3000/logs that will return a json object with all the logs I dont know how to do it. Help!
const express = require('express');
const fs = require('fs');
const app = express();
app.use(( req, res, next) => {
// write your logging code here
var agent = req.headers('user-agent');
var time = new Date()
var method = req.method;
var baseUrl = req.originalUrl;
var version = 'HTTP/' + req.httpVersion;
var status = res.statusCode;
var allData = agent + time + method + baseUrl + version + status;
fs.appendFile('./log.csv', allData, (err) => {
if (err) throw err;
console.log(allData)
next()
})
});
app.get('/', (req, res) => {
// write your code to respond "ok" here
res.status(200).send('Ok');
});
app.get('/logs', (req, res) => {
// write your code to return a json object containing the log data here
fs.readFile('log.csv', 'utf8', (err, data) => {
});
module.exports = app;
}
Check this library. You can use it for csvtojson conversion.
Anyway, consider that if the csv file dimensions grows a lot, reading the whole file and converting it to JSON will be an overkill. Consider a database for a scalable solution.
Related
I have a small web service that basically receives a PUT and saves the payload to a file. It can happen that the file write fails due to permission issues. I would like that to be communicated back to the client.
To make it easier to read I have boiled the program down to a minimum. The req.pipe chain is way longer in the real program. (with many more possibilities for errors)
const fs = require('fs');
const express = require('express');
const app = express();
app.put('/write/:id', (req, res, next) => {
const filename = 'data/' + req.params.id;
console.log("write: " + filename);
req
.pipe(fs.createWriteStream(filename))
.on('error', next)
req.on('end', () => {
res.send('saved\n' );
console.log("sent response");
})
});
app.listen(8081, '0.0.0.0');
Trouble is that no matter what I do it will always respond "saved" to the client. I had kinda hoped the next call would have got me out of there.
What is the elegant way to make differentiated responses on errors occurring server side?
several hours later it seems I nailed it:
const fs = require('fs');
const express = require('express');
const app = express();
app.put('/write/:id', (req, res, next) => {
const filename = 'data/' + req.params.id;
console.log("write: " + filename);
req
.pipe(fs.createWriteStream(filename))
.on('error', (e) => {
console.log("error ", e);
res.status(400).send("failed");
})
.on('close', () => {
res.send("saved\n");
})
});
app.listen(8081, '0.0.0.0');
Notice how I'm listening for close within the pipeline and not end on the request it self.
I'm trying to read a video file, given the file name as a request parameter to express router endpoint. However, only the console.log ging of params happens and I can never see video file data logged into the console. What am I doing wrong here? Any helpful tip is highly appreciated.
var express = require("express");
var fs = require('fs')
var path = require('path')
var router = express.Router();
router.get("/:file", (req, res) => {
console.log(req.params.file)
fs.readFileSync(path.resolve('/uploads', './'+req.params.file), function (err, data){
if (!err) {
console.log("d: ",data);
res.send(data)
} else {
console.log(err);
}
});
});
You don't need to provide a callback to fs.readFileSync, it will return the data once it has completed.
Be aware, it the file size is large, this will block the main thread and would not be considered good practice.
You'd use it like so:
var express = require("express");
var fs = require('fs')
var path = require('path')
var router = express.Router();
router.get("/:file", (req, res) => {
try {
console.log(req.params.file)
let data = fs.readFileSync(path.resolve('/uploads', './'+req.params.file));
console.log("d: ",data);
res.send(data)
} catch (err) {
console.error(err);
res.status(500).send("Something bad happened");
}
});
If you wish to do this asynchronously using fs.readFile, I would modify your code like so:
var express = require("express");
var fs = require('fs')
var path = require('path')
var router = express.Router();
router.get("/:file", (req, res) => {
console.log(req.params.file)
fs.readFile(path.resolve('/uploads', './'+req.params.file), function (err, data){
if (!err) {
console.log("d: ",data);
res.send(data)
} else {
console.log(err);
}
});
});
I can upload a file via postman and download a file from server in two different service .. But what i need is ..In a single call i should able to upload the file to server ,then perform some operation after performing some operation i should able to download the file automatically.
Here is my code.
My firsts service(file upload operation)
var express = require('express');
var fs = require('fs');
var formidable = require('formidable');
var router = express.Router();
/* GET home page. */
router.post('/', function(req, res, next) {
var form = new formidable.IncomingForm();
form.uploadDir="./file"
form.keepExtensions=true;
form.maxFileSize=10*1024*1024;
form.multiples=false;
form.parse(req, function (err, fields, files) {
res.write('File uploaded');
res.end();
});
});
module.exports = router;
Download service
var express = require('express');
var router = express.Router();
var express = require('express');
router.get('/', function(req, res, next) {
var file = './file/myOutput.txt';
var name = 'ENC.txt'
res.download(file, name);
});
module.exports = router;
Now i need to make this two service as one?
var express = require('express');
var formidable = require('formidable');
var app=express();
async function calculation(parameters)
{
if(parameters)
{
//Here you can do calculation depending upon parameter values
}
else
{
//Display error or as per your choice
}
}
app.get('/',function(req,res){
res.sendFile(__dirname+'/index.html');
});
async function cal(res,file,form)
{
try{
const data = await calculation(true)
if(data){
res.set({
'Location' : __dirname+'/index.html',
});
res.download( __dirname+file.name);
}
}
catch(error)
{
console.log(error);
}
}
app.post('/',function (req,res){
var form = new formidable.IncomingForm();
form.parse(req);
form.on('fileBegin',function(name,file){
file.path = __dirname+file.name;
console.log("Uploading");
});
form.on('file',
function(name,file)
{
console.log('Uploaded ',file.name);
cal(res,file);
});
});
Hope it helps
I have a simple node app that parses a csv file into a string. In my server file, I call a module that runs makes a stream and pipes it into my parser.
The problem is that is code works perfectly the first time it is run, but fails after that. I've gotten a "Write after End" error so I believe there is something wrong with the stream or parser variable not being reset properly after each use. Thanks for any help!
const express = require('express');
const app = express();
const path = require('path');
const port = process.env.PORT || 3000;
const formidable = require('formidable');
const parser = require('./csvparse.js');
const fs = require('fs');
//send the index page on a get request
app.listen(port, () => console.log('Example app listening on port: ' + port));
app.get('*', (req, res) => res.sendFile(path.join(__dirname + "/index.html")));
app.post('/upload', function(req, res) {
//upload the file from the html form
var form = new formidable.IncomingForm();
form.parse(req,function(err, fields, files) {
if (err) throw err;
//get the path to the uploaded file for the parser to use
var filePath = files.spinFile.path;
parser(filePath, function(data) {
if (data == null) {
res.sendFile(path.join(__dirname + "/index.html"));
}
res.send("<code>" + data + "</code>");
});
});
});
The module export function looks like this:
module.exports = function(filePath, cb) {
var stream = fs.createReadStream(filePath);
stream.pipe(parser);
//when the stream is done, songsLog is complete and ready to be returned
stream.on('close', function() {
cb(songsLog);
});
};
Try wrapping the contents of your module.exports in another function.
module.exports = function(filepath, cb) {
function parse(filepath) {
const stream = fs.createReadStream(filepath)
etc...
}
return {
parse
}
}
then from your route, call parser.parse('file.txt') and you should have a new read stream.
I have problem when I use formidable parse function. In my project, I use httpsys (not build-in http module) to create server (for port sharing), and then I send a post request with multipart form data(including string and zip file). Then I want to use formidable to parse request body. But parse function callback does not be called. There is no error. I do not use Express application, but I use Express Router to route my requests. I already use error handler to catch error, but it never be called (form.on('error', function(err) { console.log(err); });). Anyone has same problem? Please help me out, thanks in advance.
// main.js
var router = express.Router();
router.use(function (req, res, next) {
for (var i in req.headers) {
req.headers[i] = querystring.unescape(req.headers[i]);
req.headers[i] = req.headers[i].replace(/\+/g, "");
}
next();
});
//router.use(bodyParser());
router.post('/TestServer/' + 'TestRequest', function(req, res) {
testRequestHandler.execute(req, res);
});
var server = require('httpsys').http().createServer(router);
var port = '80'; // or other port
var listeningPort = 'http://localhost:' + port + '/TestServer/';
server.listen(listeningPort );
// In testRequestHandler
var execute = function(req, res) {
var form = new Formidable.IncomingForm();
form.uploadDir = uploadDir.getPath();
form.encoding = Constants.ENCODING_UTF8;
form.on('file', function(name, file) {console.log('file='+file);});
form.on('error', function(err) { console.log(err); }); // never be called
form.on('aborted', function() { console.log('Aborted'); });
form.parse(req, function(err, fields, files) {
//todo test code
console.log( "parse finished" );
});
}