How to SSH into a server from a node.js application? - linux

var exec = require('child_process').exec;
exec('ssh my_ip',function(err,stdout,stderr){
console.log(err,stdout,stderr);
});
This just freezes - I guess, because ssh my_ip asks for password, is interactive, etc. How to do it correctly?

There's a node.js module written to perform tasks in SSH using node called ssh2 by mscdex. It can be found here. An example for what you're wanting (from the readme) would be:
var Connection = require('ssh2');
var c = new Connection();
c.on('connect', function() {
console.log('Connection :: connect');
});
c.on('ready', function() {
console.log('Connection :: ready');
c.exec('uptime', function(err, stream) {
if (err) throw err;
stream.on('data', function(data, extended) {
console.log((extended === 'stderr' ? 'STDERR: ' : 'STDOUT: ')
+ data);
});
stream.on('end', function() {
console.log('Stream :: EOF');
});
stream.on('close', function() {
console.log('Stream :: close');
});
stream.on('exit', function(code, signal) {
console.log('Stream :: exit :: code: ' + code + ', signal: ' + signal);
c.end();
});
});
});
c.on('error', function(err) {
console.log('Connection :: error :: ' + err);
});
c.on('end', function() {
console.log('Connection :: end');
});
c.on('close', function(had_error) {
console.log('Connection :: close');
});
c.connect({
host: '192.168.100.100',
port: 22,
username: 'frylock',
privateKey: require('fs').readFileSync('/here/is/my/key')
});

The other library on this page has a lower level API.
So I've wrote a lightweight wrapper for it. node-ssh which is also available on GitHub under the MIT license.
Here's an example on how to use it.
var driver, ssh;
driver = require('node-ssh');
ssh = new driver();
ssh.connect({
host: 'localhost',
username: 'steel',
privateKey: '/home/steel/.ssh/id_rsa'
})
.then(function() {
// Source, Target
ssh.putFile('/home/steel/.ssh/id_rsa', '/home/steel/.ssh/id_rsa_bkp').then(function() {
console.log("File Uploaded to the Remote Server");
}, function(error) {
console.log("Error here");
console.log(error);
});
// Command
ssh.exec('hh_client', ['--check'], { cwd: '/var/www/html' }).then(function(result) {
console.log('STDOUT: ' + result.stdout);
console.log('STDERR: ' + result.stderr);
});
});

The best way is to use promisify and async/await. Example:
const { promisify } = require('util');
const exec = promisify(require('child_process').exec);
export default async function (req, res) {
const { username, host, password } = req.query;
const { command } = req.body;
let output = {
stdout: '',
stderr: '',
};
try {
output = await exec(`sshpass -p ${password} ssh -o StrictHostKeyChecking=no ${username}#${host} ${command}`);
} catch (error) {
output.stderr = error.stderr;
}
return res.status(200).send({ data: output, message: 'Output from the command' });
}

Check my code:
// redirect to https:
app.use((req, res, next) => {
if(req.connection.encrypted === true) // or (req.protocol === 'https') for express
return next();
console.log('redirect to https => ' + req.url);
res.redirect("https://" + req.headers.host + req.url);
});

pure js/node way to ssh into hosts. Special thanks to
ttfreeman. assumes ssh keys are on host. No need for request object.
const { promisify } = require('util');
const exec = promisify(require('child_process').exec);
require('dotenv').config()
//SSH into host and run CMD
const ssh = async (command, host) => {
let output ={};
try {
output['stdin'] = await exec(`ssh -o -v ${host} ${command}`)
} catch (error) {
output['stderr'] = error.stderr
}
return output
}
exports.ssh = ssh

Related

Why can't I use imap to read email messages multiple times?

I have the following node/typescript process running on Mac.
ip-192-168-1-3:~ reza.razavipour$ uname -a
Darwin ip-192-168-1-3.us-west-2.compute.internal 18.7.0 Darwin Kernel Version 18.7.0: Thu Jan 23 06:52:12 PST 2020; root:xnu-4903.278.25~1/RELEASE_X86_64 x86_64
ip-192-168-1-3:~ reza.razavipour$ node -v
v12.14.0
ip-192-168-1-3:~ reza.razavipour$ npm -v
"imap": "^0.8.19"
"typescript": "^3.9.7",
I have a node server that is supposed to read emails, it is called from a specific route. Nothing fancy.
The following code is in a ts file and it exports one function namely
export const connect = () => {
imap.connect()
}
first time i call this function, all is well, I connect I read the specific email and no issues.
Next time I call the exported function nothing happens.
Is there a clean up that i am not doing or what am I missing?
const Imap = require('imap');
const MailParser = require('mailparser').MailParser;
import { logger } from '../util/logger';
var imap = new Imap({
user: 'user',
password: 'pw',
host: 'host',
port: 32,
tls: true,
tlsOptions: {rejectUnauthorized: false}
}
);
function openInbox(cb) {
imap.openBox('INBOX', false, cb);
}
function processMessage(msg, seq) {
logger.debug('Processing msg #' + seq);
const parser = new MailParser();
parser.on('data', data => {
if (data.type === 'attachment') {
console.log(data.filename);
// data.content.pipe(process.stdout);
data.content.on('end', () => data.release());
}
});
msg.on('body', stream => {
stream.on('data', function(chunk) {
parser.write(chunk.toString('utf8'));
});
});
msg.once('end', () => {
logger.debug('Finished msg #' + seq);
parser.end();
});
}
imap.once('ready', function() {
openInbox((err, box) => {
try {
if (err) {
logger.error(err);
imap.end();
return;
}
imap.search(['ALL', ['SUBJECT', '32']], (err, results) => {
if (err) {
logger.error(err);
imap.end();
return;
}
if (!results || !results.length) {
logger.error('No unread mails');
imap.end();
return;
}
var f = imap.fetch(results, {bodies: ''});
results.forEach(result => {
const f = imap.fetch(result, {bodies: ''});
f.on('message', processMessage);
f.once('error', (err) => {
logger.error('Fetch error: ' + err);
});
f.once('end', () => {
logger.debug('Done fetching all unseen messages.');
imap.end();
});
});
});
} catch (e) {
logger.error(e);
}
});
});
imap.once('error', (err) => {
logger.error(err);
});
imap.once('end', () => {
logger.debug('Connection ended');
});
export const connect = () => {
imap.connect()
}
it turns, at least in my experimentation, once an Imap instance is connected and ended, it can not be used for follow on connection activities.
Only change I made was to create a new Imap object and use it for the next connection.

node read stream a file from remote computer using ssh

I am trying to read data in stream from a remote server log file, which is continuously growing. I want to display the new lines added to my local console. I am using ssh for connection from local to remote server.
I found below solution on github which is writing local file content to remote file but i want in other way. Not getting an idea to convert this in reverse direction.
var Connection = require('ssh2');
var fs = require('fs');
var BufferedStream = require('buffered-stream');
console.log('sshstream started')
connectSSH();
function readFile(filepath, startOffset, outputStream) {
var fileSize = fs.statSync(filepath).size;
var length = fileSize - startOffset;
var myFD = fs.openSync(filepath, 'r');
var readable = fs.createReadStream(filepath, {
fd: myFD, start: startOffset, end: fileSize, autoClose: true
})
return readable;
}
function connectSSH(){
var c = new Connection();
c.on('connect', function() {
console.log('Connection :: connect');
});
c.on('ready', function() {
console.log('Connection :: ready');
c.shell('', function(err, stream) {
if (err) throw err;
stream.on('data', function(data, extended) {
console.log((extended === 'stderr' ? 'STDERR: ' : 'STDOUT: ')
+ data);
});
stream.on('end', function(code, signal) {
console.log('Stream :: EOF', code, signal);
});
stream.on('close', function(code, signal) {
console.log('Stream :: close', code, signal);
});
stream.on('exit', function(code, signal) {
console.log('Stream :: exit :: code: ' + code + ', signal: ' + signal);
c.end();
});
stream.on('drain', function() {
console.log('Stream :: drain');
});
var bufferStream = new BufferedStream(4*1024*1024);
bufferStream.pipe(stream);
stream.write('cat - >> /home/username/mylog.log');
readable = readFile('test.log', 0, bufferStream);
readable.once('end', function(){
console.log("ENDED");
});
readable.pipe(bufferStream).pipe(stream);
});
});
c.on('error', function(err) {
console.log('Connection :: error :: ' + err);
});
c.on('end', function() {
console.log('Connection :: end');
});
c.on('close', function(had_error) {
console.log('Connection :: close');
});
c.connect({
host: 'xx.xxx.xx.xxx',
port: 22,
username: 'username',
password: 'password'
});
}
Please suggest
Thanks
This works and it could be customized as you wish.
'use strict';
const SSH = require('simple-ssh');
function run(sshConfig, script) {
return new Promise((resolve, reject) => {
let scriptOutput = '';
const sshFtw = new SSH(sshConfig);
sshFtw.exec(script,
{ out: console.log.bind(console) })
.on('error', (err) => reject(err))
.on('close', () => resolve(scriptOutput))
.start();
});
};
run({
"host": "1.2.3.4",
"user": "my-user",
"pass": "my-psw"
}, 'tail -f /home/my-app/log/api-out.log');

Create a Node SSH2 Server with ability to treat Remote Forwarding

After some research on staskoverflow, Google and official Node SSH2 repository, I still can not create a Node SSH2 server which work with remote port forwarding...
Actually I can do what I want with standard SSH daemon on distant server and this command on client side :
ssh -R 8100:localsite.tld:80 sub.distantserver.com
All traffic from sub.distantserver.com:8100 is redirect to localsite.tld:80. (port 22, traditional SSH daemon).
My only goal is to achieve this with a Node SSH2 Server on port 21 :
ssh -R 8100:localsite.tld:80 foo#sub.distantserver.com -p 21
password: bar
When it'll work, I can do some check on the user and start some other process ;)
Basing on official Github issues example I try something like this, just like a POC to catch stream but it fail on forwardIn that does not exists.
var fs = require('fs');
var crypto = require('crypto');
var inspect = require('util').inspect;
var buffersEqual = require('buffer-equal-constant-time');
var ssh2 = require('ssh2');
var utils = ssh2.utils;
new ssh2.Server({
hostKeys: [fs.readFileSync('/etc/ssh/ssh_host_rsa_key')]
}, function(client) {
console.log('Client connected!');
client.on('authentication', function(ctx) {
if (ctx.method === 'password'
&& ctx.username === 'foo'
&& ctx.password === 'bar')
ctx.accept();
else
ctx.reject();
}).on('ready', function() {
console.log('Client authenticated!');
client.on('session', function(accept, reject) {
var session = accept();
session.once('exec', function(accept, reject, info) {
console.log('Client wants to execute: ' + inspect(info.command));
var stream = accept();
stream.stderr.write('Oh no, the dreaded errors!\n');
stream.write('Just kidding about the errors!\n');
stream.exit(0);
stream.end();
});
});
client.on('request', function(accept, reject, name, info) {
console.log(info);
if (name === 'tcpip-forward') {
accept();
setTimeout(function() {
console.log('Sending incoming tcpip forward');
client.forwardIn(info.bindAddr,
info.bindPort,
function(err, stream) {
if (err)
return;
stream.end('hello world\n');
});
}, 1000);
} else {
reject();
}
});
});
}).listen(21, '0.0.0.0', function() {
console.log('Listening on port ' + this.address().port);
});
Does anybody know how to achieve a simple conventional SSH forward server side ?
Thanks !
Found a solution with author's help :
Official Github solution on issue
let fs = require('fs'),
inspect = require('util').inspect,
ssh2 = require('ssh2'),
net = require('net');
new ssh2.Server({
hostKeys: [fs.readFileSync('/etc/ssh/ssh_host_rsa_key')]
}, client => {
console.log('Client connected!');
client
.on('authentication', ctx => {
if (
ctx.method === 'password'
&& ctx.username === 'foo'
&& ctx.password === 'bar'
) {
ctx.accept();
} else {
ctx.reject();
}
})
.on('ready', () => {
console.log('Client authenticated!');
client
.on('session', (accept, reject) => {
let session = accept();
session.on('shell', function(accept, reject) {
let stream = accept();
});
})
.on('request', (accept, reject, name, info) => {
if (name === 'tcpip-forward') {
accept();
net.createServer(function(socket) {
socket.setEncoding('utf8');
client.forwardOut(
info.bindAddr, info.bindPort,
socket.remoteAddress, socket.remotePort,
(err, upstream) => {
if (err) {
socket.end();
return console.error('not working: ' + err);
}
upstream.pipe(socket).pipe(upstream);
});
}).listen(info.bindPort);
} else {
reject();
}
});
});
}).listen(21, '0.0.0.0', function() {
console.log('Listening on port ' + server.address().port);
});

Entering password programmatically in ssh2 nodejs

I'm using ssh2 nodejs client (https://github.com/mscdex/ssh2)
I'm trying to do the following:
SSH into box.
Login to docker.
It will re-prompt for password. Enter that password.
I'm failing on 3rd step.
This is my code
var Client = require('ssh2').Client;
var conn = new Client();
conn.on('ready', function() {
console.log('Client :: ready');
conn.exec('sudo docker ps', {pty: true}, function(err, stream) {
if (err) throw err;
stream.on('close', function(code, signal) {
conn.end();
// data comes here
}).on('data', function(data) {
console.log('STDOUT: ' + data);
}).stderr.on('data', function(data) {
console.log('STDERR: ' + data);
});
// stream.end(user.password+'\n');
^^ If i do this, it will work but I won't be able to do anything else afterwards
});
}).connect({
host: 'demo.landingpage.com',
username: 'demo',
password: 'testuser!'
});
How do I enter the password programmatically? (I'm already using {pty: true} while doing conn.exec
Please enlighten!
Assuming your stream is a duplex stream, you have the possibility to write into the stream without ending it by writing
.on('data', (data) => {
stream.write(user.password+'\n');
}
or you can use an cb function
function write(data, cb) {
if (!stream.write(data)) {
stream.once('drain', cb);
} else {
process.nextTick(cb);
}
}
and
.on('data', (data) => {
write(user.password+ '\n', () => {
console.log('done');
}
});

node.js mariasql return undefined

I believe i have a problem with the Syntax.
By the Function xx the return is undefined :(.
Here the Problem in one File.
var Client = require('mariasql');
var inspect = require('util').inspect;
var c = new Client();
c.connect({
host: '127.0.0.1',
user: 'root',
password: '38nudel5nu',
db: 'artikel2'
});
var login = function(){
console.log("LOGIN\n");
c.on('connect', function() {
console.log('Client connected');
})
.on('error', function(err) {
console.log('Client error: ' + err);
})
.on('close', function(hadError) {
console.log('Client closed');
});
}
var end = function(){
console.log("EXIT");
c.end();
}
login();
var xx = function(){
c.query("SELECT COUNT(ArtikelID) AS Count FROM artikel")
.on('result', function(res) {
res.on('row', function(row) {
return "YOLO";
})
.on('error', function(err) {
})
.on('end', function(info) {
});
})
.on('end', function() {
});
}
var autohaus = xx();
console.log("\n\n --> " + autohaus);
And here is the Output:
[cseipel#myhost testumgebung]$ node skript.js LOGIN
--> undefined Client connected
You're using an asynchronous function as if it were synchronous. That's not going to work. You need to pass in a callback to your ArtikelCount function and call the callback once you have the results you want (the typical convention for callbacks is to have the first argument be an error if an error occurred, otherwise it should be null).
Example:
var ArtikelCount = function(cb) {
var count,
error;
c.query('SELECT COUNT(ArtikelID) AS Count FROM artikel')
.on('result', function(res) {
res.on('row', function(row) {
count = row.Count;
})
.on('error', function(err) {
console.log('Result error: ' + inspect(err));
error = err;
})
.on('end', function(info) {
console.log('Result finished successfully');
});
})
.on('end', function() {
console.log('Done with all results');
cb(error, count);
});
}
Then use it like:
wc.ArtikelCount(function(err, count) {
if (err)
throw err;
else
console.log('Row count', count);
});

Resources