Node.js function to generate RSA keys using Open SSL - node.js

I'm using Node.js as a server side language and I would like to generate an RSA key pairs for any user that registers himself on my website. I'm using a module called keypair. It's working fine for keys with small sizes but when I generate keys with 2048 in size, it's taking so long to perform it so I would like to use Open SSL directly from Node.js using Node's child_process as described in the script down below :
var cp = require('child_process')
, assert = require('assert');
var privateKey, publicKey;
publicKey = '';
cp.exec('openssl genrsa 2048', function(err, stdout, stderr) {
assert.ok(!err);
privateKey = stdout;
console.log(privateKey);
makepub = cp.spawn('openssl', ['rsa', '-pubout']);
makepub.on('exit', function(code) {
assert.equal(code, 0);
console.log(publicKey);
});
makepub.stdout.on('data', function(data) {
publicKey += data;
});
makepub.stdout.setEncoding('ascii');
makepub.stdin.write(privateKey);
makepub.stdin.end();
});
This is working and more faster in key pairs generation than the Node.js keypair module so the issue I'm having is that I don't understand this code (if it's writing files on the server side and reading keys from them or not?) and I would like to turn this script into a function that returns a JSON or an array as result that holds the public and private key.
So any suggestion is welcome, thank you in advance.

Try this.. Moved the code around a little. Uses tmp file, which is deleted, possibly could be done without the tmp file, but this should work.
var cp = require('child_process')
, assert = require('assert')
, fs = require('fs')
;
// gen pub priv key pair
function genKeys(cb){
// gen private
cp.exec('openssl genrsa 2048', function(err, priv, stderr) {
// tmp file
var randomfn = './' + Math.random().toString(36).substring(7);
fs.writeFileSync(randomfn, priv);
// gen public
cp.exec('openssl rsa -in '+randomfn+' -pubout', function(err, pub, stderr) {
// delete tmp file
fs.unlinkSync(randomfn);
// callback
cb(JSON.stringify({public: pub, private: priv}, null, 4));
});
});
}
genKeys(console.log);

You can simply use the small rsa-json module
It's really easy to use and it's asynchronous:
var createRsaKeys = require('rsa-json');
createRsaKeys({bits: 1024}, function(err, keyPair) {
console.log(keyPair.private);
console.log(keyPair.public);
});
rsa-json does not make a direct uses of OpenSSL RSA_generate_key but uses ssh-keygen (from OpenSSH) which is a wrapper around OpenSSL. There is no direct security difference (see this for more information).
PS: Have a look at the only 48 lines of code composing rsa-json.
If you really want to use OpenSSL, you can have a look at the ursa module but:
It's not asynchronous
Is not maintained, the last commit was from Dec 21, 2012
The project is heavy, it does too much things like sugary stuff (base64 encoding, etc.).
It's have embedded C++ OpenSSL wrapper inside it, initialized during installation.
PS: keypair uses native JS, that's why it's very slow. It's not recommended with Node.js which is not good at performing CPU-intensive operations (but good with non-blocking events).

Related

Redirect Readable object stdout process to file in node

I use an NPM library to parse markdown to HTML like this:
var Markdown = require('markdown-to-html').Markdown;
var md = new Markdown();
...
md.render('./test', opts, function(err) {
md.pipe(process.stdout)
});
This outputs the result to my terminal as intended.
However, I need the result inside the execution of my node program. I thought about writing the output stream to file and then reading it in at a later time but I can't figure out a way to write the output to a file instead.
I tried to play around var file = fs.createWriteStream('./test.html'); but the node.js streams rather give me headaches than results.
I've also looked into the library's repo and Markdown inherits from Readable via util like this:
var util = require('util');
var Readable = require('stream').Readable;
util.inherits(Markdown, Readable);
Any resources or advice would be highly appreciated. (I would also take another library for parsing the markdown, but this gave me the best results so far)
Actually creating a writable file-stream and piping the markdown to this stream should work just fine. Try it with:
const writeStream = fs.createWriteStream('./output.html');
md.render('./test', opts, function(err) {
md.pipe(writeStream)
});
// in case of errors you should handle them
writeStream.on('error', function (err) {
console.log(err);
});

Is there a way to run a self-terminating js script that can pass variables to the next?

I'd really like to have some of my my secrets/keys be iterable, since I have a growing list of external api keys that would be easier to use if I could match them based on the route being used without having to statically map them at the start of my application.
The only way I can think to better organize them without writing massive JSON one-line strings in a batch/bash file would be to have it all defined in js object literals and have a js script stringify it and load it into ENV variables to be passed to the application that's about to start.
NPM pre-start script:
const env = {
secret: 'supersecret',
key: `key
that requires
line breaks`,
apiKeys: {
'api-1':'a;sodhgfasdgflksdaj;lg',
'api-2':'ajl;sdfj;adjsfkljasd;f'
}
}
for (let x in env) {
if (typeof env[x] == 'string') {
process.env[x] = env[x];
} else {
process.env[x] = JSON.stringify(env[x])
}
console.log(x)
}
process.exit(22);
NPM start script:
const key = process.env.key
const apiKeys = JSON.parse(process.env.apiKeys)
Unfortunately, the ENV variables don't remain between instances, so this is useless.
Would it also be secure to use STDIN and STDOUT to pass the data between the two scripts?
My solution ended up being to pipe output by converting to JSON then streaming to STDOUT and receiving on STDIN on the second script. Doing this made it platform agnostic and I can add any sort of active secret management in the source script (e.g. accepting secrets from various other secret management systems/vaults or generating new secrets at every launch)
Send to STDOUT:
const env = {
someSecret: 'supersecret',
superSecretObject: {
moreProperties: 'data'
}
};
/* If you have an array of properties or have a very large set of secrets,
you should create a readable stream from it, and send that to stdout,
but this is much simpler */
process.stdout.write(JSON.stringify(env));
Accept on STDIN:
const fs = require('fs')
const env = (function () {
/* Using fs will error out on no input, but you can use process.stdin
if you don't need to suspend the whole application waiting for the input */
let envTmp = fs.readFileSync(0).toString();
envTmp = JSON.parse(envTmp);
return envTmp;
})();

ssh2-sftp-client get() request giving 'denied permission - error'

I am using this code in my electron app to connect to an sftp server where I need to collect some data. I have no problem listing the files in the /out folder, but it fails to get the sftp file with 'deined permission' error. Ideally I would like to be able get() file and access the text data within directly in the function without storing to a file.
let Client = require('ssh2-sftp-client');
let sftp = new Client();
var root = '/out';
var today = new Date();
var mon = ((today.getMonth()+1) < 10)? "0" + (today.getMonth()+1) : (today.getMonth()+1);
var date = (today.getDate() < 10)? "0" + today.getDate() : today.getDate();
var fileDate = mon + date;
sftp.connect({
host: '<server-address>',
port: 2222,
username: 'XXXXXXXX',
password: 'xxxxxxxx',
privateKey: fs.readFileSync(path.join(__dirname, '../rsa/<file-name-here>.pem'))
})
.then(() => {
return sftp.list(root, 'SN5M' + fileDate);
})
.then((fileInfo) => {
if (fileInfo) {
var filePath = root + '/' + fileInfo[fileInfo.length - 1].name;
return sftp.get(filePath).then((file) => {
console.log(file);
event.returnValue = file;
sftp.end();
})
.catch((err) => {
console.log('File get error', err);
event.returnValue = err;
sftp.end();
});
}
})
.catch((err) => {
console.log('File info error', err);
event.returnValue = err;
sftp.end();
});
Try this and see if it works or not
'get' returns (String|Stream|Buffer).
let dst = fs.createWriteStream('/local/file/path/data.txt');
sftp.get(filePath,dst)
Refer https://www.npmjs.com/package/ssh2-sftp-client#orga0dfcd5
Looking at your code, you have two problems.
If you call get() with only 1 argument, it returns a buffer, not a file. To get the file, just do
client.get(sourceFilePath, localFilePath)
and the file will be saved locally as localFilePath. Both arguments are strings and need to be full paths i.e. include the filename, not just the directory. The filename for the second argument can be different from the first. However, if all you want is to retrieve the file, you are better off using fastGet() rather than get(). The get() method is good for when you want to do something in the code with the data e.g. a buffer or write stream piping/processing. The fastGet() method is faster than get() as it does the transfer using concurrent processes, but does not permit use of buffers or streams for further processing.
The error message you are seeing is either due to the way you are calling get() or it is an indication you don't have permission to read the file your trying to access (as the user your connected with). Easiest way to check this is to use the openSSH sftp program (available on Linux, mac and windows) and the key your using (use the -i switch) to try and download the file. If it fails with a permission error, then you know it is a permission error and not a problem with your code or ssh2-sftp-client module.
EDIT: I just noticed you are also using both a password and a key file. You don't need both - either one will work, but you don't need to use both. I tend to use a keyfile when possible as it avoids having to have a password stored somewhere. Make sure not to add a passphrase to your key. Alternatively, you can use something like the dotenv module and store your credentials and other config in a .env file which you do not check into version control.

Buffer entire file in memory with Node.js

I have a relatively small file (some hundreds of kilobytes) that I want to be in memory for direct access for the entire execution of the code.
I don't know exactly the internals of Node.js, so I'm asking if a fs open is enough or I have to read all file and copy to a Buffer?
Basically, you need to use the readFile or readFileSync function from the fs module. They return the complete content of the given file, but differ in their behavior (asynchronous versus synchronous).
If blocking Node.js (e.g. on startup of your application) is not an issue, you can go with the synchronized version, which is as easy as:
var fs = require('fs');
var data = fs.readFileSync('/etc/passwd');
If you need to go asynchronous, the code is like that:
var fs = require('fs');
fs.readFile('/etc/passwd', function (err, data ) {
// ...
});
Please note that in either case you can give an options object as the second parameter, e.g. to specify the encoding to use. If you omit the encoding, the raw buffer is returned:
var fs = require('fs');
fs.readFile('/etc/passwd', { encoding: 'utf8' }, function (err, data ) {
// ...
});
Valid encodings are utf8, ascii, utf16le, ucs2, base64 and hex. There is also a binary encoding, but it is deprecated and should not be used any longer. You can find more details on how to deal with encodings and buffers in the appropriate documentation.
As easy as
var buffer = fs.readFileSync(filename);
With Node 0.12, it's possible to do this synchronously now:
var fs = require('fs');
var path = require('path');
// Buffer mydata
var BUFFER = bufferFile('../public/mydata');
function bufferFile(relPath) {
return fs.readFileSync(path.join(__dirname, relPath)); // zzzz....
}
fs is the file system. readFileSync() returns a Buffer, or string if you ask.
fs correctly assumes relative paths are a security issue. path is a work-around.
To load as a string, specify the encoding:
return readFileSync(path,{ encoding: 'utf8' });

Why append rather than write when using knox / node.js to grab file from Amazon s3

I'm experimenting with the knox module for node.js as a way of managing some small files in an Amazon S3 bucket. Everything works fine stand-alone: I can upload a file, download a file, etc. However, I want to be able to download a file on recurring schedule. When I modify the code to run on an interval, I'm getting the downloaded file appending to the previous instance instead of overwriting.
I'm not sure if I've made a mistake in the file write code or in the knox handling code. I've tried several different write approaches (writeFile, writeStream, etc.) and I've looked at the knox source code. Nothing obvious to me stands out as a problem. Here's the code I'm using:
knox = require('knox');
fs = require('fs');
var downFile = DOWNFILE;
var downTxt = '';
var timer = INTERVAL;
var path = S3PATH + downFile;
setInterval(function()
{
var s3client = knox.createClient(
{
key: '********************',
secret: '**********************************',
bucket: '********'
});
s3client.get(path).on('response', function(response)
{
response.setEncoding('ascii');
response.on('data', function(chunk)
{
downTxt += chunk;
});
response.on('end', function()
{
fs.writeFileSync(downFile, downTxt, 'ascii');
});
}).end();
},
timer);
The problem is with your placement of var downTxt = '';. That is the only place you set downTxt to blank, so every time you retrieve more data, you add it to the data that you got in the previous request because you never clear the data from the previous request. The simplest fix is to move that line to just before the setEncoding line.
However, the way you are processing the data is unnecessarily complicated. Try something like this instead. You don't need to recreate the client every time, and setting the encoding will just break things if you are downloading non-text files, and it won't make a difference with text files. Next, you shouldn't manually collect the data, you can immediately start writing it to the file as you receive it. Lastly, since request is a standard stream, you don't need to monitor the 'data' event because you can just use pipe.
var knox = require('knox'),
fs = require('fs'),
downFile = DOWNFILE,
timer = INTERVAL,
path = S3PATH + downFile,
s3client = knox.createClient({
key: '********************',
secret: '**********************************',
bucket: '********'
});
(function downloadFile() {
var str = fs.createWriteStream(downFile);
s3client.get(path).pipe(str);
str.on('close', function() {
setTimeout(downloadFile, timer);
});
})();

Resources