I am using the boilerplate code of mean.io and starting my server with the command:
node server.js
How do I log stdout and stderr of my Express application?
Here's my file server.js:
'use strict';
/**
* Module dependencies.
*/
var mongoose = require('mongoose'),
passport = require('passport'),
logger = require('mean-logger');
/**
* Main application entry file.
* Please note that the order of loading is important.
*/
// Initializing system variables
var config = require('./server/config/config');
var db = mongoose.connect(config.db);
// Bootstrap Models, Dependencies, Routes and the app as an express app
var app = require('./server/config/system/bootstrap')(passport, db);
// Start the app by listening on <port>, optional hostname
app.listen(config.port, config.hostname);
console.log('Mean app started on port ' + config.port + ' (' + process.env.NODE_ENV + ')');
// Initializing logger
logger.init(app, passport, mongoose);
// Expose app
exports = module.exports = app;
What about this?
console.log("I will goto the STDOUT");
console.error("I will goto the STDERR");
Note: both of these functions automatically add new line to your input.
If you don't want those newlines appended to your input, do this
process.stdout.write("I will goto the STDOUT")
process.stderr.write("I will goto the STDERR")
Both process.stdout and process.stderr are streams, so you can even pipe a stream into them. See Node.js docs on streams for more info.
You can do this by writing to stdout and stderr streams
process.stdout.write('Hello')
or
process.stderr.write('Error')
Better will be to use some thirdparty logging module like winston or bunyan
The only way I can think of to do this is to spawn a child process (like the fork system call), which then you can "pipe" the output of stderr, stdout to files.
var out = fs.openSync('./output.log', 'a')
, err = fs.openSync('./error.log', 'a');
require('child_process').spawn('./server', [], {
detached : true,
stdio : ['ignore', out, err]
});
Related
My application is simple and I want to avoid using a logging library like Winston. I need to log the output to both the console, and to file. I found a few tutorials on how to do this using a child process, such as this, but I can't find anything that leverages the main process stdio, like process.stdout and process.stdin
The key to solving this was recognizing that process.stdio is a writable stream whereas a child process's stdio using the child_process module is a readable stream (thanks to this article). Therefore I needed to create both a readable and writable file stream, and pipe the readable stream out to process.stdio. You could probably simplify this even further with a duplex stream, but for noobs like myself, this is a straightforward and easy to read approach.
const { Console } = require("console")
, process = require("process")
, path = require("path")
, fs = require('fs');
// Define the file paths to log to
const outputFilePath = path.join(__dirname, './stdout.log');
const errorFilePath = path.join(__dirname, './stderr.log');
// Create the empty files synchronously to guarantee it exists prior to stream creation.
// Change flag to 'w' to overwrite rather than append.
fs.closeSync(fs.openSync(outputFilePath, 'a+'));
fs.closeSync(fs.openSync(errorFilePath, 'a+'));
// Create a writable file stream for both stdout and stderr
const fileWriterOut = fs.createWriteStream(outputFilePath);
const fileWriterErr = fs.createWriteStream(errorFilePath);
// Create a new Console object using the file writers
const Logger = new Console({ stdout: fileWriterOut, stderr: fileWriterErr });
// Create readable file streams for process.stdio to consume
const fileReaderOut = fs.createReadStream(path.join(__dirname, './stdout.log'));
const fileReaderErr = fs.createReadStream(path.join(__dirname, './stderr.log'));
// Pipe out the file reader into process stdio
fileReaderOut.pipe(process.stdout);
fileReaderErr.pipe(process.stderr);
// Test the new logger
Logger.log("Logger initialized");
// Export
module.exports = Logger;
I have Node application with Express server. I also have node scripts in server folder. During some events I need get data from separate node scripts, so I create child process.
Without arguments, everything works fine, but I need to pass some data from parent process.
var express = require('express');
var router = express.Router();
var child_process = require('child_process');
router.get('/:site/start', function(req, res, next) {
const basedir = req.app.get('basedir');
const child_script_path = basedir + '/scripts/script.js';
const child_argv = [
'--slowmo=0',
'--headless=1'
];
child = child_process.fork(child_script_path, {
execArgv: child_argv
});
...
}
});
When I try to pass arguments and run script through Express, these errors are shown:
/home/user/.nvm/versions/node/v8.9.4/bin/node: bad option: --slowmo=0
/home/user/.nvm/versions/node/v8.9.4/bin/node: bad option: --headless=1
But when I run script from command line like :
node /scripts/script.js --slowmo=0 --headless=1
I get no errors and script can catch args from command line.
How can I pass args to child script in this situation?
Ubuntu 16.04
Node 8.9.4
Express 4.15.5
execArgv option is used to pass arguments for the execution process, not for your script.
This could be useful for passing specific execution environment to your forked process.
If you want to pass arguments to your script, you should use args.
child_process.fork(modulePath[, args][, options])
Example:
const child_process = require('child_process');
const child_script_path = './script.js';
const child_argv = [
'--foo',
'--bar'
]
const child_execArgv = [
'--use-strict'
]
let child = child_process.fork(child_script_path, child_argv, {
execArgv: child_execArgv // script.js will be executed in strict mode
})
// script.js
console.log(process.argv[2], process.argv[3]) // --foo --bar
I want to get logs if something wrong happens to my electron app when in production mode i.e after giving .exe file to a user wrt windows platform.
How to go about it, how can i basically write my errors to a file which will be in cyclic in nature.
Take a look at electron log
// Setup file logging
const log = require('electron-log');
log.transports.file.level = 'info';
log.transports.file.file = __dirname + 'log.log';
// Log a message
log.info('log message');
EDIT:
As mentioned in the comments the "log.transports.file.file" is deprecated.
Instead I would suggest to use the following method.
log.transports.file.resolvePath = () => __dirname + "/log.log";
Create a file next to you electron.js called logger.js
const path = require("path");
const log = require('electron-log');
log.transports.file.resolvePath = () => path.join(__dirname, '/logsmain.log');
log.transports.file.level = "info";
exports.log = (entry) => log.info(entry)
then on your app
const logger = require('electron').remote.require('./logger');
logger.log("some text")
Please have a look here:
https://www.electronjs.org/docs/api/net-log
const { netLog } = require('electron')
app.whenReady().then(async () => {
await netLog.startLogging('/path/to/net-log')
// After some network events
const path = await netLog.stopLogging()
console.log('Net-logs written to', path)
})
I'm trying to have a spawned Powershell process on Windows with NodeJS, which i can send some commands, and the the output. I've did that with the stdio configuration form the parent node process, but i want to have it separated, and i cant achieve any of this to correctly work.
In this code, i was hoping to get the output of $PSTableVersion powershell variable on the psout.txt file, but it gets never written.
The simple code is:
var
express = require('express'),
fs = require('fs'),
spawn = require("child_process").spawn;
var err = fs.openSync('pserr.txt', 'w');
var out = fs.openSync('psout.txt', 'w');
var writer = fs.createWriteStream(null, {
fd: fs.openSync('psin.txt', 'w')
});
var child = spawn("powershell.exe", ["-Command", "-"], {
detached: true,
stdio: [writer, out, err]
});
writer.write('$PSTableVersion\n');
writer.end();
child.unref();
// just wait
var app = express();
app.listen(3000);
Update
I've tried with the code on this node github issue tracker: https://github.com/joyent/node/issues/8795
It seems calling .NET vs calling native executable are different things...
how can interact with multiple console windows, from one node.js script?
so far i have researched a bit, and not have found anything that covers my case.
What i want to accomplish is to have one main console window, which it reads my input,
1. action#1
2. action#2
> do 1 // select action
and it would redirect its output to another console window named as Logger which shows the stdout of the action that the user selected, but keeps the main "select action" console window clean.
well i manage to find a way around it, since i wanted to stay with node.js all the way.
start.js
var cp = require("child_process");
cp.exec('start "Logger" cmd /K node logger.js',[],{});
cp.exec("start cmd /K node startAdminInterface.js",[],{});
setTimeout(function(){process.exit(0);},2000);
logger.js
var net = require('net');
net.createServer(function (socket) {
socket.on('data',function(d){
console.log(": "+d.toString("utf8"));
});
socket.on('error',function(err){
console.log("- An error occured : "+err.message);
});
}).listen(9999);
startAdminInterface.js
var net = require("net");
var logger = net.connect(9999);
var readline = require('readline'),
rl = readline.createInterface(process.stdin,process.stdout);
rl.setPrompt('> ');
rl.prompt();
rl.on('line', function(line) {
logger.write(line);
rl.prompt();
}).on('close', function() {
process.exit(0);
});
bottom, line its a workaround not exactly what i was after, put i saw potential, on logger.js it could listen from multiple sources, which is an enormous plus in the application that i'm building.