Pushing process to background causes high kswapd0 - node.js

I have a cpu-intensive process running on a raspberry pi that's executed by running a nodejs file. Running the first command (below) and then running the file on another tab works just fine. However when I run the process via a bash shell script, the process stalls.
Looking at the processes using top I see that kswapd0 and kworker/2:1+ takes over most of the cpu. What could be causing this?
FYI, the first command begins the Ethereum discovery protocol via HTTP and IPC
geth --datadir $NODE --syncmode 'full' --port 8080 --rpc --rpcaddr 'localhost' --rpcport 30310 --rpcapi 'personal,eth,net,web3,miner,txpool,admin,debug' --networkid 777 --allow-insecure-unlock --unlock "$HOME_ADDRESS" --password ./password.txt --mine --maxpeers 100 2> results/log.txt &
sleep 10
# create storage contract and output result
node performanceContract.js
UPDATE:
performanceContract.js
const ethers = require('ethers');
const fs = require('fs')
const provider = new ethers.providers.IpcProvider('./node2/geth.ipc')
const walletJson = fs.readFileSync('./node2/keystore/keys', 'utf8')
const pwd = fs.readFileSync('./password.txt', 'utf8').trim();
const PerformanceContract = require('./contracts/PerformanceContract.json');
(async function () {
try {
const wallet = await ethers.Wallet.fromEncryptedJson(walletJson, pwd)
const connectedWallet = wallet.connect(provider)
const factory = new ethers.ContractFactory(PerformanceContract.abi, PerformanceContract.bytecode, connectedWallet)
const contract = await factory.deploy()
const deployedInstance = new ethers.Contract(contract.address, PerformanceContract.abi, connectedWallet);
let tx = await deployedInstance.loop(6000)
fs.writeFile(`./results/contract_result_xsmall_${new Date()}.txt`, JSON.stringify(tx, null, 4), () => {
console.log('file written')
})
...
Where loop is a method that loops keccak256 encryption method. It's purpose is to test diffent gas costs by alternating the loop #.

Solved by increasing the sleep time to 1min. Assume it was just a memory issue that need more time before executing the contract.

Related

Slash command registers command from wrong folder discord.js14

I'm tired of trying to solve this. First off, here is my deployment code
const { REST, Routes } = require('discord.js');
const fs = require('node:fs');
const { client_id } = require('./config.json')
const commands = [];
// Grab all the command files from the commands directory you created earlier
const commandFiles = fs.readdirSync('./slashCommands').filter(file => file.endsWith('.js'));
// Grab the SlashCommandBuilder#toJSON() output of each command's data for deployment
for (const file of commandFiles) {
const command = require(`./slashCommands/${file}`);
commands.push(command.data.toJSON());
}
// Construct and prepare an instance of the REST module
const rest = new REST({ version: '10' }).setToken(process.env.TOKEN);
// and deploy your commands!
(async () => {
try {
console.log(`Started refreshing ${commands.length} application (/) commands.`);
// The put method is used to fully refresh all commands in the guild with the current set
const data = await rest.put(
Routes.applicationCommands(client_id),
{ body: commands },
);
console.log(`Successfully reloaded ${data.length} application (/) commands.`);
} catch (error) {
// And of course, make sure you catch and log any errors!
console.error(error);
}
})();
It is supposed to get the command from the "slashCommand" folder. So I run 'node deploy-commands.js' and it works.
The problem is when I do the slash command '/ping', I get this error:
/home/runner/Nocinel/commands/ping.js:8
message.reply('🏓 **Ball is going over the net...**').then(m => { m.edit(`**🏓 Pong!\n:stopwatch: Uptime: ${Math.round(message.client.uptime / 60000)} minutes\n:sparkling_heart: Websocket Heartbeat: ${message.client.ws.ping}ms\n:round_pushpin: Rountrip Latency: ${m.createdTimestamp - message.createdTimestamp}ms**`) });
^
TypeError: m.edit is not a function
at /home/runner/Nocinel/commands/ping.js:8:73
repl process died unexpectedly: exit status 1
Now this error indicates that I am running a command from my "command" folder rather than my "slashCommand" folder. Which doesnt make sense because I explicitly coded it to only get commands from the "slash command folder"
I have restarted, deleted, waited for an hour, and tested it multiple times, it always gives the same disappointing result. I see absolutely nothing wrong with my code.
There is no problem with registring comannd (deploy-comannds.js is only registring comannds not using making them work). Problem have to be in your index.js you have to handle interaction comannds to your folder slashComannds. Registring comannds was sucessfull.
Documentation:
https://discordjs.guide/creating-your-bot/command-handling.html#loading-command-files

How to deploy a solana program from a particular address or public key

I've two problems here, major of all, I can't understand the difference between account address and public key of a program
The second thing is, there is a problem while deploying my solana program. I've made a deployment script...
const { Keypair } = require("#solana/web3.js");
const { Connection, LAMPORTS_PER_SOL } = require("#solana/web3.js");
const path = require('path');
const spawn = require("cross-spawn");
const fs = require('fs');
//import { Connection, LAMPORTS_PER_SOL } from "#solana/web3.js"
async function main(){
const SLASH = path.sep;
let programAuthorityKeypair = new Keypair();
this.connection = new Connection("https://api.devnet.solana.com", "confirmed");
const signature = await this.connection.requestAirdrop(programAuthorityKeypair.publicKey, LAMPORTS_PER_SOL * 5);
await this.connection.confirmTransaction(signature);
const programAuthorityKeyfileName = `target/deploy/programauthority-keypair.json`
const programAuthorityKeypairFile = path.resolve(
`${__dirname}${SLASH}${programAuthorityKeyfileName}`
);
fs.writeFileSync(
programAuthorityKeypairFile,
`[${Buffer.from(programAuthorityKeypair.secretKey.toString())}]`
);
const programKeyfileName = `target/deploy/solblog-keypair.json`
const programKeypairFile = path.resolve(
`${__dirname}${SLASH}${programKeyfileName}`
);
let programKeypair = readKeyfile(programKeypairFile);
let programId = programKeypair.publicKey.toString();
//////////////////////////////////configurations//////////////////////////////////
let method = ["deploy"] // we are deploying for the first time, using 'deploy'
spawn.sync(
"anchor",
[
...method, // we use a variable so we when we want to upgrade, we can use 'upgrade' instead
"--provider.cluster", // we want to specify the node cluster
"Devnet", // the node cluster as the Devnet
"--provider.wallet", // we need to pass in a keyfile to pay for the deployment
`${programAuthorityKeypairFile}`, // this is the keypair file we created just a moment ago
],
{ stdio: "inherit" }
)
}
main()
.then(() => process.exit(0))
.catch((error) => {
console.error(error);
process.exit(1);
});
function readKeyfile(keypairfile) {
let kf = fs.readFileSync(keypairfile)
let parsed = JSON.parse(kf.toString()) // [1,1,2,2,3,4]
kf = new Uint8Array(parsed)
const keypair = Keypair.fromSecretKey(kf)
return keypair
}
now, I'm running the script by node deploy.js but it is giving me balance insufficient error...
Deploying workspace: https://api.devnet.solana.com
Upgrade authority: /home/adil/Desktop/solblog/target/deploy/programauthority-keypair.json
Deploying program "solblog"...
Program path: /home/adil/Desktop/solblog/target/deploy/solblog.so...
===========================================================================
Recover the intermediate account's ephemeral keypair file with
`solana-keygen recover` and the following 12-word seed phrase:
===========================================================================
all orbit slim nothing learn country polar casual print help coffee gesture
===========================================================================
To resume a deploy, pass the recovered keypair as the
[BUFFER_SIGNER] to `solana program deploy` or `solana write-buffer'.
Or to recover the account's lamports, pass it as the
[BUFFER_ACCOUNT_ADDRESS] argument to `solana program close`.
===========================================================================
Error: Account JjetRg5FHM6xpPjTT7KfrEaTBeEWxMh5Pyjo4UXw3nm has insufficient funds for spend (1.8711612 SOL) + fee (0.00135 SOL)
There was a problem deploying: Output { status: ExitStatus(unix_wait_status(256)), stdout: "", stderr: "" }.
so I'm taking the above account address JjetRg5FHM6xpPjTT7KfrEaTBeEWxMh5Pyjo4UXw3nm to solana faucet site, and airdropping 5 SOLs into that address then again I'm running the script, AND GETTING THE SAME ERROR WITH DIFFERENT ACCOUNT like...
Error: Account .
already set the env to the DEVNET.
What I'm thinking that how can I deploy the program with the same account into which I'm airdropping SOLs?

How to allow Node.js child_process.execSync to run `scp -P 4422 root#myserver.com:/data/backups/...` without getting Permission Denied

I am running a simply Node.js process to backup my data everyday by using child_process.execSync to run:
scp -P 4422 root#myserver.com:/data/backups/dbs.zip /data/backups/dbs.zip
Notice if I run the above command directly, it will work. But when I do it in Node, the log I got is:
[2020-03-04 05:00:00] error downloading backup...Command failed:
Permission denied, please try again.
root#myserver.com: Permission denied (publickey,password).
Do I have to create a key file for Node.js' child_process to use when it fires scp? If so, how come if I run scp -i id_rsa.pem -P 4422 root#myserver.com:/data/backups/dbs.zip /data/backups/dbs.zip in Node.js it just stuck (like it even stops running any async actions such as appendFile. It also created a lot of processes called (node) and these processes cannot be killed.
const path = require('path');
const {
backupPath,
downloadPath
} = require('../../conf');
const keyPath = path.join(
__dirname,
'../../key/id_rsa.pem'
);
const downloadProcess = log => {
const { execSync } = require('child_process');
log('downloading backup...');
try {
const date = new Date();
const backupName = `db_${date.format('yyyy-MM-dd')}.tar.gz`;
const command = `scp -i ${keyPath} -P 4422 root#myserver.com:${backupPath}/${backupName} ${downloadPath}/${backupName}`;
log(`running command: ${command}`);
const stdout = execSync(command);
log(`downloaded backup ${backupName} at ${downloadPath}${'\n'}stdout:${'\n'}${stdout}`);
} catch (e) {
log(`error downloading backup...${e.message}`);
}
}
module.exports = downloadProcess;

Query a remote server's operating system

I'm writing a microservice in Node.js, that runs a particular command line operation to get a specific piece of information. The service runs on multiple server, some of them on Linux, some on Windows. I'm using ssh2-exec to connect to the servers and execute a command, however, I need a way of determining the server's OS to run the correct command.
let ssh2Connect = require('ssh2-connect');
let ssh2Exec = require('ssh2-exec');
ssh2Connect(config, function(error, connection) {
let process = ssh2Exec({
cmd: '<CHANGE THE COMMAND BASED ON OS>',
ssh: connection
});
//using the results of process...
});
I have an idea for the solution: following this question, run some other command beforehand, and determine the OS from the output of said command; however, I want to learn if there's a more "formal" way of achieving this, specifically using SSH2 library.
Below would be how i would think it would be done...
//Import os module this will allow you to read the os type the app is running on
const os = require('os');
//define windows os in string there is only one but for consistency sake we will leave it in an array *if it changes in the future makes it a bit easier to add to an array the remainder of the code doesn't need to change
const winRMOS = ['win32']
//define OS' that need to use ssh protocol *see note above
const sshOS = ['darwin', 'linux', 'freebsd']
// ssh function
const ssh2Connect = (config, function(error, connection) => {
let process = ssh2Exec({
if (os.platform === 'darwin') {
cmd: 'Some macOS command'
},
if (os.platform === 'linux') {
cmd: 'Some linux command'
},
ssh: connection
});
//using the results of process...
});
// winrm function there may but some other way to do this but winrm is the way i know how
const winRM2Connect = (config, function(error, connection) => {
let process = ssh2Exec({
cmd: 'Some Windows command'
winRM: connection
});
//using the results of process...
});
// if statements to determine which one to use based on the os.platform that is returned.
if (os.platform().includes(sshOS)){
ssh2Connect(config)
} elseif( os.platform().includes(winrmOS)){
winrm2Connect(config)
}

Open up terminal/shell on remote server via tcp request

I have this:
const http = require('http');
const cp = require('child_process');
const server = http.createServer((req,res) => {
const bash = cp.spawn('bash');
req.pipe(bash.stdin, {end:false);
bash.stdout.pipe(res);
bash.stderr.pipe(res);
});
server.listen('4004');
when I hit the server with:
curl localhost:4004
and I type bash commands, nothing gets outputed to my console, anybody know why?
Note: To address security I plan to run this in a docker container, use https/ssl, and implement authentication (any recommendations on auth schemes lmk).
More importantly, I am looking for shell prompts to appear ... apparently bash by itself doesn't open up a shell/prompt?
It is possible to do this "over the web" so to speak. However, your approach will not work, because you are mixing paradigms (batch vs. interactive), and you are missing large chunks of setup that's needed to run terminal applications.
Normally I would show you how to program this, however, that's really involved. Have a look at:
https://github.com/chjj/tty.js
and,
https://github.com/xtermjs/xterm.js
as starting points to create your solution.
Both are usable directly from node.js to serve up terminal applications over HTTP.
This is a partial answer, but I started a bounty because I am looking for something better. I was able to create something rudimentary with TCP like so:
const net = require('net'); // !use net package not http
const cp = require('child_process');
const server = net.createServer(s => {
const bash = cp.spawn('bash');
s.pipe(bash.stdin, {end:false});
bash.stdout.pipe(s);
bash.stderr.pipe(s);
});
server.listen('4004');
not sure why it won't work with HTTP though. I connect to it using netcat:
nc localhost 4004
but this isn't opening a terminal, just a bash process. the experience is not ideal, as described here:
https://unix.stackexchange.com/questions/519364/bash-shell-modes-how-to-pipe-request-to-shell-on-remote-server
however I am looking to replicate the shell experience you have when you do something like:
docker exec -ti <container> /bin/bash
when I run my script it "works", but I don't get any shell prompts or anything like that. (One way to solve this might be with ssh, but I am trying to figure out a different way).
You can connect to an http server with telnet. It depends on how you're starting the http server. Here's an example
Start an http server with the npm package http-server
npm install -g http-server
cd ~/ <Any directory>
http-server
Now seperately start a telnet session
telnet localhost 8080
OR
nc localhost 8080
And then type something like GET /
Use the telnet client instead of nc
Check this: https://www.the-art-of-web.com/system/telnet-http11/
Update: Running an ssh server over nodejs. It allows you to run an ssh server
I found this at https://github.com/mscdex/ssh2
var fs = require('fs');
var crypto = require('crypto');
var inspect = require('util').inspect;
var ssh2 = require('ssh2');
var utils = ssh2.utils;
var allowedUser = Buffer.from('foo');
var allowedPassword = Buffer.from('bar');
var allowedPubKey = utils.parseKey(fs.readFileSync('foo.pub'));
new ssh2.Server({
hostKeys: [fs.readFileSync('host.key')]
}, function(client) {
console.log('Client connected!');
client.on('authentication', function(ctx) {
var user = Buffer.from(ctx.username);
if (user.length !== allowedUser.length
|| !crypto.timingSafeEqual(user, allowedUser)) {
return ctx.reject();
}
switch (ctx.method) {
case 'password':
var password = Buffer.from(ctx.password);
if (password.length !== allowedPassword.length
|| !crypto.timingSafeEqual(password, allowedPassword)) {
return ctx.reject();
}
break;
case 'publickey':
var allowedPubSSHKey = allowedPubKey.getPublicSSH();
if (ctx.key.algo !== allowedPubKey.type
|| ctx.key.data.length !== allowedPubSSHKey.length
|| !crypto.timingSafeEqual(ctx.key.data, allowedPubSSHKey)
|| (ctx.signature && !allowedPubKey.verify(ctx.blob, ctx.signature))) {
return ctx.reject();
}
break;
default:
return ctx.reject();
}
ctx.accept();
}).on('ready', function() {
console.log('Client authenticated!');
client.on('session', function(accept, reject) {
var session = accept();
session.once('exec', function(accept, reject, info) {
console.log('Client wants to execute: ' + inspect(info.command));
var stream = accept();
stream.stderr.write('Oh no, the dreaded errors!\n');
stream.write('Just kidding about the errors!\n');
stream.exit(0);
stream.end();
});
});
}).on('end', function() {
console.log('Client disconnected');
});
}).listen(0, '127.0.0.1', function() {
console.log('Listening on port ' + this.address().port);
});
Your approaches are quite mixed, nonetheless, when ever you finally connect to the remote server do not use 'bash' as a method to start the connection, BASH is just born again shell with other commands & stuff in it,
Rather use some of the following program, command-line names: i.e :
~ $ 'gnome-terminal'
~ $ 'xterm'
there you will now be referencing a true program in the system, even kernel level C code has its own recognition of these, if not changed.

Resources