I am running this code with flightplan:
var plan = require('flightplan');
var appName = 'personal-website';
var username = 'deploy';
var startFile = 'bin/www';
var tmpDir = appName+'-' + new Date().getTime();
// configuration
plan.target('staging', [
{
host: '104.131.153.117',
username: username,
}
]);
plan.target('production', [
{
host: '104.131.153.117',
username: username,
},
//add in another server if you have more than one
// {
// host: '104.131.93.216',
// username: username,
// agent: process.env.SSH_AUTH_SOCK
// }
]);
// run commands on localhost
plan.local(function(local) {
local.log('Copy files to remote hosts');
var filesToCopy = local.exec('git ls-files', {silent: true});
// rsync files to all the destination's hosts
local.transfer(filesToCopy, '/tmp/' + tmpDir);
});
// run commands on remote hosts (destinations)
plan.remote(function(remote) {
remote.log('Move folder to root');
remote.sudo('cp -R /tmp/' + tmpDir + ' ~', {user: username});
remote.rm('-rf /tmp/' + tmpDir);
remote.log('Install dependencies');
remote.sudo('npm --production --prefix ~/' + tmpDir + ' install ~/' + tmpDir, {user: username});
remote.log('Reload application');
remote.sudo('ln -snf ~/' + tmpDir + ' ~/'+appName, {user: username});
remote.exec('forever stop ~/'+appName+'/'+startFile, {failsafe: true});
remote.exec('forever start ~/'+appName+'/'+startFile);
});
This is the error I get when I try to deploy:
Error connecting to 104.131.153.117: Error: Authentication failure. Available authentication methods: publickey,password
I have no idea what going on. I am trying to deploy this to digital ocean. I am not sure what is causing this problem.
It looks like you haven't authenticated to your remote host properly. You need to add your SSH key to the remote host for password-free access.
The command to do this is
$ ssh-copy-id <user>#<host>
If you need to specify an exact key to use, use the following command
$ ssh-copy-id -i <path-to-.pub-file> <user>#<host>
Related
I finally joined the community
So for my first question in this community:
Generally I want to execute some remote operations to a remote windows machine in node.js (of course I have permissions, credential and so on to the remote machine).
Specifically, right now I'm trying to retrieve list of services from windows machine.
I've tried using the wmi-client package in order to do so:
const WmiClient = require('wmi-client');
var wmi = new WmiClient({
username: '*****', //credentials - username
password: '*****', //credentials - password
host: '*********', // remote windows machine
});
wmi.query(`Select * from Win32_Service`, function (err, result) {
console.log(result);
});
but I keep receiving error: Exit code: 44125. Invalid Global Switch.
I'll mention that using wmi in powershell make no issues for me.
but when I trying to use the same technology in nodejs its failed.
what am I doing wrong? Any other suggestions?
just to mention, when I need to retrieve same info from linux machine I easiliy do it using 'simple-ssh' package, without any issues:
const SSH = require('simple-ssh');
var ssh = new SSH({
host: '*******', // remote linux machine
user: '*******', // credentials - username
pass: '*******' // credentials - password
});
ssh.exec(`systemctl list-units --full -all`, {
out: function(stdout) {
// stdout as expected
}
}).start()});
but things getting complicated when trying to do the same for windows remote machine.
any ideas?
Thank you very much!
seems like the following is working for me:
var exec = require('node-ssh-exec');
var config = {
host: '*******',
username: '***',
password: '***'
},
command = 'sc query';
exec(config, command, function (error, response) {
if (error) {
throw error;
}
the response is as expected, but for some reason the error associated with it is not empty:
{
errno: -4077,
code: "ECONNRESET",
syscall: "read",
level: "connection-socket",
}
I'm running a file watcher app as a windows service (W10) with the following code used to install the service:
var Service = require('node-windows').Service;
const config = require('./SHR_modules/config');
// Create a new service object
var svc = new Service({
name:'SmartHR',
description: 'Smart HR file watcher',
script: require('path').join(__dirname,'watcher.js'),
workingDirectory: __dirname
});
// Listen for the "install" event, which indicates the
// process is available as a service.
svc.on('install',function(){
svc.start();
console.log('installed as user: ' + svc.logOnAs.account)
});
svc.on('uninstall',function(){
console.log('Uninstall complete.');
console.log('The service exists: ',svc.exists);
});
svc.logOnAs.domain = config.sqlServerLogin.domain;
svc.logOnAs.account = config.sqlServerLogin.user;
svc.logOnAs.password = config.sqlServerLogin.password;
svc.install();
//svc.uninstall();
Running the code as an administrator and the service does install properly, but it's stopped, so when I try to start it, the message is that the service can't start for the wrong password. Username and domain are correct. If I copy/paste the password into the Services Manager from my config.js, the service starts up and runs from now on.
Why the password (whatever it is) is not passed by that line:
svc.logOnAs.password = config.sqlServerLogin.password;
correctly?
The XML generator function in the winsw.js is missing one line for the service account.
Original code:
if (config.logOnAs) {
xml.push({
serviceaccount: [
{domain: config.logOnAs.domain || 'NT AUTHORITY'},
{user: config.logOnAs.account || 'LocalSystem'},
{password: config.logOnAs.password || ''},
]
});
}
Working code:
if (config.logOnAs) {
xml.push({
serviceaccount: [
{domain: config.logOnAs.domain || 'NT AUTHORITY'},
{user: config.logOnAs.account || 'LocalSystem'},
{password: config.logOnAs.password || ''},
{allowservicelogon: 'true'}
]
});
}
I'll commit that change to the projects git.
I tried simple file upload on the FTP path via node js. For that I have installed ftp-client module.
index.js
const ftpClient = require("ftp-client");
// FTP Configuration
const config = {
host: '',
port: 22,
user: '',
password: ''
};
const options = {
logging: 'basic'
};
client = new ftpClient(config, options);
client.connect(function (result) {
client.upload(['./upload/test.txt'], '/opt/test_api', {
baseDir: 'ogrds',
overwrite: 'older'
}, function (result) {
console.log(result);
})
}
);
When tried to execute by node index.js command, this is neither giving any results nor moved the file to the FTP server.
I am trying to run http-server for nodejs.
After using npm start the server starts on the given port(8000) perfectly with a few ignorable errors. But when I try to run the application url, it says 'Webpage is not available'.
When i tried to ping the IP it responds by sending packets perfectly.
When I run http-server command it shows that it is running on 127.0.0.1:8080 and 172.31.46.121:8080. I tried changing the localhost port to 8080,but with no difference in results.
I am using putty client to run linux on windows.
nodejs version-5.4.1
npm version-3.7.0
Please help..
There is one more thing..
I ran this server on putty at port 80. Then with the server still running i tried to run the same nodejs server using Bitnami client(i.e. using npm start) at the same port(80) and there was no conflict of ports. Even the app is running fine at that port when I run it through Bitnami client.
http-server file code-
` #!/usr/bin/env node
var colors = require('colors'),
httpServer = require('../lib/http-server'),
argv = require('optimist').argv,
portfinder = require('portfinder'),
opener = require('opener');
if (argv.h || argv.help) {
console.log([
"usage: http-server [path] [options]",
"",
"options:",
" -p Port to use [8080]",
" -a Address to use [0.0.0.0]",
" -d Show directory listings [true]",
" -i Display autoIndex [true]",
" -e --ext Default file extension if none supplied [none]",
" -s --silent Suppress log messages from output",
" --cors Enable CORS via the 'Access-Control-Allow-Origin' header",
" -o Open browser window after staring the server",
" -c Set cache time (in seconds). e.g. -c10 for 10 seconds.",
" To disable caching, use -c-1.",
" -h --help Print this list and exit."
].join('\n'));
process.exit();
}
var port = argv.p || parseInt(process.env.PORT, 10),
host = argv.a || '0.0.0.0',
log = (argv.s || argv.silent) ? (function () {}) : console.log,
requestLogger;
if (!argv.s && !argv.silent) {
requestLogger = function(req) {
log('[%s] "%s %s" "%s"', (new Date).toUTCString(), req.method.cyan, req.url.cyan, req.headers['user-agent']);
}
}
if (!port) {
portfinder.basePort = 8080;
portfinder.getPort(function (err, port) {
if (err) throw err;
listen(port);
});
} else {
listen(port);
}
function listen(port) {
var options = {
root: argv._[0],
cache: argv.c,
showDir: argv.d,
autoIndex: argv.i,
ext: argv.e || argv.ext,
logFn: requestLogger
};
if (argv.cors) {
options.headers = { 'Access-Control-Allow-Origin': '*' };
}
var server = httpServer.createServer(options);
server.listen(port, host, function() {
log('Starting up http-server, serving '.yellow
+ server.root.cyan
+ ' on port: '.yellow
+ port.toString().cyan);
log('Hit CTRL-C to stop the server');
if (argv.o) {
opener('http://127.0.0.1:' + port.toString());
}
});
}
if (process.platform !== 'win32') {
//
// Signal handlers don't work on Windows.
//
process.on('SIGINT', function () {
log('http-server stopped.'.red);
process.exit();
});
}
`
Please provide some code,
out of pure speculation there may be an issue with bind IP ie. you may have your IP address of server bind to 127.0.0.1 which can only be accessed from locahost, change it to 0.0.0.0 to allow access from outside.
var http = require('http');
http.createServer(function(req, res) {
res.writeHead(200, {
'content-type': 'text/plain'
});
res.end('It works');
}).listen(3000, '0.0.0.0');
I'm writing a node program that uses dockernode as the Docker client. The program creates a container with a volume that is bound to a directory on the host when the container is started. One started, I attempt to print the contents of the shared volume to prove that it's working properly. However, I keep getting (ls: /tmp/app: No such file or directory.
Here is my code...
var Docker = require('dockerode'),
docker = new Docker(),
mkdirp = require('mkdirp'),
volume = (process.env.HOME || process.env.HOMEPATH || process.env.USERPROFILE) + '/' + Date.now();
function handleError(action, err) {
if (err) {
console.error('error while ' + action + '...');
console.error(err);
}
}
mkdirp.sync(volume);
docker.createContainer({
Image: 'ubuntu',
Volumes: {
'/tmp/app': {}
}
}, function(err, container) {
handleError('building', err);
container.start({
Binds: [volume + ':/tmp/app']
}, function(err, data) {
handleError('starting', err);
container.exec({
AttachStdout: true,
AttachStderr: true,
Tty: false,
Cmd: ['/bin/ls', '/tmp/app']
}, function(err, exec) {
handleError('executing `ls /tmp/app`', err);
exec.start(function(err, stream) {
handleError('handling response from `ls /tmp/app`', err);
stream.setEncoding('utf8');
stream.pipe(process.stdout);
});
});
});
});
I've succeeded by doing this without exec, where I create the container, attach to it, start it with the ls command, wait for it to finish, and then kill it and remove it. But I'm looking to use exec so I can issue multiple commands once the container is running. I've been trying to piece this together from the examples in the dockerode library and the Docker remote API documentation. I just don't know where I'm going wrong.
For reference, here is the code without exec...
docker.createContainer({
Image: 'ubuntu',
Cmd: ['/bin/ls', '/tmp/app'],
Volumes: {
'/tmp/app': {}
}
}, function(err, container) {
console.log('attaching to... ' + container.id);
container.attach({stream: true, stdout: true, stderr: true, tty: true}, function(err, stream) {
handleError('attaching', err);
stream.pipe(process.stdout);
console.log('starting... ' + container.id);
container.start({
Binds: [volume + ':/tmp/app']
}, function(err, data) {
handleError('starting', err);
});
container.wait(function(err, data) {
handleError('waiting', err);
console.log('killing... ' + container.id);
container.kill(function(err, data) {
handleError('killing', err);
console.log('removing... ' + container.id);
container.remove(function(err, data) {
handleError('removing', err);
});
});
});
});
});
I had struggled with same issue for some time,but I found solution. It seems that Remote API do not accept command with arguments as one string, but you have to split each argument as new token in Cmd property array; for example with gcc:
"Cmd":["/bin/bash","-c","gcc -Wall -std=c99 hello.c -o hello.bin"]
After this modification it works correctly.
Official documentation could be better for Remote API configuration.