how to use npm cli-progress in ssh2-sftp-client - node.js

i have a project with npm ssh2-sftp-client to download files from remote server,so i want to display downloading progress displaying in console.Downloading files works fine, but i do not know how to use cli-progress to display downloading progress while the files are downloading .
function getConnect(ip, name, pwd, remotepath, localpath) {
const sftp = new SftpClient();
sftp.connect({
host: ip,
port: 22,
username: name,
password: pwd
}).then(async () => {
const files = await sftp.list(remotepath, '.');
for (var j = 0; j < files.length; j++) {
var e =files[j];
await sftp.fastGet(remotepath + "/" + e.name, localpath + "\\" + e.name);
}
});

I have revised, hopefully it will be better
function getConnect(ip, name, pwd, remotepath, localpath) {
const sftp = new SftpClient();
sftp.connect({
host: ip,
port: 22,
username: name,
password: pwd
}).then(async () => {
const files = await sftp.list(remotepath, '.');
for (var j = 0; j < files.length; j++) {
var e =files[j];
//=================================================
const Throttle = require('throttle');
const progress = require('progress-stream');
const throttleStream = new Throttle(1); // create a "Throttle " instance that reads at 1 bps
const progressStream = progress({
length: e.size,
time: 100, // ms
});
progressStream.on('progress', (progress) => {
process.stdout.write("\r" + " [" +e.name+"] downloaded ["+progress.percentage.toFixed(2)+"%]");
});
const outStream = createWriteStream(localpath);
throttleStream.pipe(progressStream).pipe(outStream);
try {
await sftp.get(remotepath + "/" + e.name, throttleStream, { autoClose: false });
} catch {
console.log('sftp error', e);
} finally {
await sftp.end();
}
}
}
}

i followed the suggestion from #Abbas Agus Basari like:
await sftp.fastGet(secondPath + "/" + e.name, localPath + "\\" + e.name, {
step: step=> {
const percent = Math.floor((step / e.size) * 100);
process.stdout.write("\r" + "【"+e.name+"】downloaded【"+percent+'%】');
}
});
and run like:
[1]: https://i.stack.imgur.com/97sRi.png
i downloaded two files from remote server ,but the console only could see one file 100%,the other stopped at 59%

Related

'mongodump' is not recognized as an internal or external command,operable program or batch file

Here I am going to back up my MongoDB Atlas database on my MERN project. I use the below code to backup, but this message shows when I run the express server. First create mongodb_backup.js, cron.js and require the server.js file. These are all working fine.
And one thing that is the correct URL to give to the dbOptions.host.
mongodb_backup.js
var fs = require('fs');
var _ = require('lodash');
var exec = require('child_process').exec;
const path = require('path');
const backupDirPath = path.join(__dirname, 'database-backup/');
var dbOptions = {
user: '****',
pass: '****',
host: 'mongodb+srv://cluster0-gtick.mongodb.net/test?retryWrites=true&w=majority',
port: 27017,
database: 'Project-Management',
autoBackup: true,
removeOldBackup: true,
keepLastDaysBackup: 2,
autoBackupPath: backupDirPath
};
exports.stringToDate = dateString => {
-------------------- all works fine
};
exports.empty = mixedVar => {
------------------------all works fine
};
// Auto backup function
exports.dbAutoBackUp = () => {
if (dbOptions.autoBackup == true) {
let date = new Date();
let beforeDate, oldBackupDir, oldBackupPath;
currentDate = this.stringToDate(date);
let newBackupDir = ------;
let newBackupPath = dbOptions.autoBackupPath + '-mongodump-' + newBackupDir;
if (dbOptions.removeOldBackup == true) {
-------------all works fine
}
// Command for mongodb dump process
let cmd =
'mongodump --host ' +
dbOptions.host +
' --port ' +
dbOptions.port +
' --db ' +
dbOptions.database +
' --username ' +
dbOptions.user +
' --password ' +
dbOptions.pass +
' --out ' +
newBackupPath;
exec(cmd, (error, stdout, stderr) => {
if (this.empty(error)) {
// check for remove old backup after keeping # of days given in configuration.
if (dbOptions.removeOldBackup == true) {
if (fs.existsSync(oldBackupPath)) {
exec('rm -rf ' + oldBackupPath, err => { });
}
}
}
else{
console.log(stderr); //here consoling the error
}
});
}
};

I am unable to read multiple rows from xlsx file

I am trying to read multiple rows from xlsx file. My xlsx sheet contains various details one of which is an FTP directory, password detail.
Each row contains a different FTP directory.
I am able to read, fetch and retrieve the data from FTP if only one row is mentioned in the sheet
but as soon as I add an extra row to be read it starts showing promise error,
Below is my code
dataJson1 is the excel sheet(JSON)
for (let i = 0; i < dataJson1.length; i++) {
dir = dataJson1[i]['FTP DIR'];
subfolder = dataJson1[i]['Sub Folder'];
ftpPath = dir + subfolder;
host = dataJson1[i]['FTP IP'];
user = dataJson1[i]['FTP Username'];
password = dataJson1[i]['FTP Password'];
ticketStatus = dataJson1[i]['Status']
console.log("FTP LOCATION:" + ftpPath + "\n" + "HOSTNAME:" + host + "\n" + "USERNAME:" + user + "\n" + "PASSWORD:" + password + "\n")
//Reading the list of directories present in the FTP location
console.log("value of i" + i);
///////////
if (ticketStatus == true) {
if (!ftp) {
ftp = new PromiseFtp();
}
const ftpPromise = ftp.connect({
host: host, user: user,
password: password
}).then(function (serverMessage) {
console.log('Server message: ' + serverMessage)
//console.log("value of i" + i)
return ftp.list(ftpPath);
}).then(function (list) {
console.log('Directory listing:');
var dirList = (list)
console.log(dirList)
console.log("Number of directories:" + dirList.length)
var jsondirString = JSON.stringify(dirList)
var datadirJson1 = JSON.parse(jsondirString)
for (var j = 0; j < dirList.length; j++) {
//console.log(datadirJson1[j]['name'])
ticketName.push(datadirJson1[j]['name'])
//console.log(ftpTime)
ftpTime.push(datadirJson1[j]['date'])
}
return ftp.end();
});
promises.push(ftpPromise)
}//status check ends
}//Loop ends here
Promise.all(promises).then(arrayOfResults => {
console.log(ticketName);
var ticketNameArr = [];
for (let i = 0; i < ticketName.length; i++) {
let ticketNameIs = ticketName[i];
let ftpTimeIs = ftpTime[i]
let ticketDetail = ticketApp(ticketNameIs, ftpTimeIs);
Promise.all(ticketDetail).then(function (values) {
//console.log(values);
ticketNameArr.push({
// FtpTime: values[0].ftpTime,
Date: values[0].ftpTime,
TicketName: ticketNameIs,
//Add Dynamic folder column----
In_Input_Location: values[0].ticketStatusInput,
Input_Time: values[0].mtime,
In_Result_Location: values[1].ticketStatusResult,
Result_Time: values[1].mtime,
//Will help in preparing comments
CallAPi: values[3].apiStatus,
ReportStatus: values[3].reportStatus,
Comment: values[4].Comment,
Status: values[5].ticketStatus
// LogStatus: values[2].logStatus,
// LogCreateTime: values[2].birthtime,
// LogModifiedTime: values[2].mtime,
});
if (ticketNameArr.length == ticketName.length) {
//uncomment below command if sheet is blank then comment back
// ws = XLSX.utils.sheet_add_json(ws, ticketNameArr,{origin:0, skipHeader:false});
//comment below command if sheet is blank then uncomment
ws = XLSX.utils.sheet_add_json(ws, ticketNameArr, { origin: -1, skipHeader: true });
//
let wsRemDup = removeDuplicate(ws)
console.log("Unique Data", wsRemDup)
//OverWriting Unique data
wb.Sheets[first_sheet_name] = XLSX.utils.json_to_sheet(wsRemDup);
XLSX.writeFile(wb, 'DailyTicketSatus.xlsx')
// respond.render('result', { "ticketNameArr": ticketNameArr });
respond.render('index', { "ticketNameArr": ticketNameArr });
}
});
}
})
})```
I would suggest using the async/await syntax for this task, it's easier to read and you can ensure that the ftp.end() call is complete before proceeding to the next ftp host. This is probably the reason why the original code is failing to process more than one row.
async function runFTPJob(dataJson1) {
let promises = [];
for (let i = 0; i < dataJson1.length; i++) {
dir = dataJson1[i]['FTP DIR'];
subfolder = dataJson1[i]['Sub Folder'];
ftpPath = dir + subfolder;
host = dataJson1[i]['FTP IP'];
user = dataJson1[i]['FTP Username'];
password = dataJson1[i]['FTP Password'];
ticketStatus = dataJson1[i]['Status']
console.log("Getting row:", i);
console.log("FTP LOCATION:" + ftpPath + "\n" + "HOSTNAME:" + host + "\n" + "USERNAME:" + user + "\n" + "PASSWORD:" + password + "\n")
//Reading the list of directories present in the FTP location
if (ticketStatus == true) {
if (!ftp) {
ftp = new PromiseFtp();
}
try {
let ftpPromise = ftp.connect({ host, user, password });
promises.push(ftpPromise);
let serverMessage = await ftpPromise;
console.log('Server message: ' + serverMessage)
let dirList = await ftp.list(ftpPath);
console.log('Directory listing:', dirList);
console.log("Number of directories:" + dirList.length)
for (let dirEntry of dirList) {
ticketName.push(dirEntry.name);
ftpTime.push(dirEntry.date);
}
await ftp.end();
} catch (e) {
console.error("An error occurred accessing ftp site:", e.message);
}
}//status check ends
}//Loop ends here``
return promises;
}
function processTickets() {
console.log(ticketName);
var ticketNameArr = [];
for (let i = 0; i < ticketName.length; i++) {
let ticketNameIs = ticketName[i];
let ftpTimeIs = ftpTime[i]
let ticketDetail = ticketApp(ticketNameIs, ftpTimeIs);
Promise.all(ticketDetail).then(function (values) {
//console.log(values);
ticketNameArr.push({
// FtpTime: values[0].ftpTime,
Date: values[0].ftpTime,
TicketName: ticketNameIs,
//Add Dynamic folder column----
In_Input_Location: values[0].ticketStatusInput,
Input_Time: values[0].mtime,
In_Result_Location: values[1].ticketStatusResult,
Result_Time: values[1].mtime,
//Will help in preparing comments
CallAPi: values[3].apiStatus,
ReportStatus: values[3].reportStatus,
Comment: values[4].Comment,
Status: values[5].ticketStatus
// LogStatus: values[2].logStatus,
// LogCreateTime: values[2].birthtime,
// LogModifiedTime: values[2].mtime,
});
if (ticketNameArr.length == ticketName.length) {
//uncomment below command if sheet is blank then comment back
// ws = XLSX.utils.sheet_add_json(ws, ticketNameArr,{origin:0, skipHeader:false});
//comment below command if sheet is blank then uncomment
ws = XLSX.utils.sheet_add_json(ws, ticketNameArr, { origin: -1, skipHeader: true });
//
let wsRemDup = removeDuplicate(ws)
console.log("Unique Data", wsRemDup)
//OverWriting Unique data
wb.Sheets[first_sheet_name] = XLSX.utils.json_to_sheet(wsRemDup);
XLSX.writeFile(wb, 'DailyTicketSatus.xlsx')
// respond.render('result', { "ticketNameArr": ticketNameArr });
respond.render('index', { "ticketNameArr": ticketNameArr });
}
});
}
}
( async() => {
await runFTPJob(dataJson1);
processTickets();
})();

How to upload entire directory in local machine to a server using SSH NodeJS?

I don't really have knowledge in this but just do what I'm told. I'm trying to upload an entire folder to a directory on server with SSH NodeJS. I don't have public/private key and only have password, I copied my code following this link https://www.npmjs.com/package/node-ssh?fbclid=IwAR0HoJPSCP66RDjzzA2TkL7wzaOqwL9lX2jziryrdZgb6WhaiTR17c6yds0 and Transfer entire directory using ssh2 in Nodejs but none of these works for me. This is my code so far :
console.log("Begin deploy ...")
var path, node_ssh, ssh, fs
fs = require('fs')
path = require('path')
node_ssh = require('node-ssh')
ssh = new node_ssh()
const LOCAL_DIRECTORY = ''
const REMOTE_DIRECTORY = ''
var tar = require('tar-fs');
var zlib = require('zlib');
function transferDir(conn, remotePath, localPath, compression, cb) {
var cmd = 'tar cf - "' + remotePath + '" 2>/dev/null';
if (typeof compression === 'function')
cb = compression;
else if (compression === true)
compression = 6;
if (typeof compression === 'number'
&& compression >= 1
&& compression <= 9)
cmd += ' | gzip -' + compression + 'c 2>/dev/null';
else
compression = undefined;
conn.exec(cmd, function(err, stream) {
if (err)
return cb(err);
var exitErr;
var tarStream = tar.extract(remotePath);
tarStream.on('finish', function() {
cb(exitErr);
});
stream.on('exit', function(code, signal) {
if (typeof code === 'number' && code !== 0) {
exitErr = new Error('Remote process exited with code '
+ code);
} else if (signal) {
exitErr = new Error('Remote process killed with signal '
+ signal);
}
}).stderr.resume();
if (compression)
stream = stream.pipe(zlib.createGunzip());
stream.pipe(tarStream);
});
}
var ssh = require('ssh2');
var conn = new ssh();
conn.on('ready', function() {
transferDir(conn,
LOCAL_DIRECTORY,
REMOTE_DIRECTORY,
true,
function(err) {
if (err) throw err;
console.log('Done transferring');
conn.end();
});
}).connect({
host: '',
port: 22,
username: '',
password: ''
});
console.log("End deploy ...")
I also tried this one following the firstlink :
console.log("Begin deploy ...")
var path, node_ssh, ssh, fs
fs = require('fs')
path = require('path')
node_ssh = require('node-ssh')
ssh = new node_ssh()
const LOCAL_DIRECTORY = ''
const REMOTE_DIRECTORY = ''
ssh.connect({
host: '',
username: '',
port: 22,
password: '',
tryKeyboard: true,
onKeyboardInteractive: (name, instructions, instructionsLang, prompts, finish) => {
if (prompts.length > 0 && prompts[0].prompt.toLowerCase().includes('password')) {
finish([password])
}
}
})
.then(function() {
const failed = []
const successful = []
ssh.putDirectory(LOCAL_DIRECTORY, REMOTE_DIRECTORY, {
recursive: true,
concurrency: 10,
validate: function(itemPath) {
const baseName = path.basename(itemPath)
return baseName.substr(0, 1) !== '.' &&
baseName !== IGNORE_DIRS_FILES[0] &&
baseName !== IGNORE_DIRS_FILES[1] &&
baseName !== IGNORE_DIRS_FILES[2] &&
baseName !== IGNORE_DIRS_FILES[3]
},
tick: function(localPath, remotePath, error) {
if (error) {
failed.push(LOCAL_DIRECTORY)
} else {
successful.push(LOCAL_DIRECTORY)
}
}
})
.then(function(status) {
console.log('the directory transfer was', status ? 'successful' : 'unsuccessful')
console.log('failed transfers', failed.join(', '))
console.log('successful transfers', successful.join(', '))
})
})
console.log("End deploy ...")
But the outputs of both are like this :
The first code
Begin deploy ...
End deploy ...
Done transferring
Done in 1.01s
The time was too fast so I get it wrong because the folder is around ~300 MB
The second code
Begin deploy ...
End deploy ...
The directory transfer was successful
failed transfers
successful transfers
Please help me. Thank you for your help.

Proper calling sequence to have Node.js disconnect from database after processing input file

Creating a very simple Node.js utility to process each record separately in a text file (line by line), but it is surprisingly difficult to handle the following scenario due to the inherent async world of Node:
Open connection to database
Read each line of a text file
Based on conditions within the processed text of the line, look up a record in the database
Upon completion of reading the text file, close the
database connection
The challenge I face is that the text file is read in line-by-line (using the 'readline' module), attaching a listener to the 'line' event emitted by the module. The lines of the file are all processed rapidly and the queries to the database are queued up. I have tried many approaches to essentially create a synchronous process to no avail. Here is my latest attempt that is definitely full of async/await functions. Being a longtime developer but new to Node.js I know I am missing something simple. Any guidance will be greatly appreciated.
const { Pool, Client } = require('pg')
const client = new Client({
user: '*****',
host: '****',
database: '*****',
password: '******#',
port: 5432,
})
client.connect()
.then(() => {
console.log("Connected");
console.log("Processing file");
const fs = require('fs');
const readline = require('readline');
const instream = fs.createReadStream("input.txt");
const outstream = new (require('stream'))();
const rl = readline.createInterface(instream, outstream);
rl.on('line', async function (line) {
var callResult;
if (line.length > 0) {
var words = line.replace(/[^0-9a-z ]/gi, '').split(" ");
var len = words.length;
for (var i = 0; i < words.length; i++) {
if (words[i].length === 0) {
words.splice(i, 1);
i--;
} else {
words[i] = words[i].toLowerCase();
}
}
for (var i = 0; i < words.length; i++) {
if (i <= words.length - 3) {
callResult = await isKeyPhrase(words[i].trim() + " " + words[i + 1].trim() + " " + words[i + 2].trim());
if (!callResult) {
callResult = await isKeyPhrase(words[i].trim() + " " + words[i + 1].trim());
if (!callResult) {
callResult = await isKeyPhrase(words[i].trim());
}
};
} else if (i <= words.length - 2) {
callResult = await isKeyPhrase(words[i].trim() + " " + words[i + 1].trim());
if (!callResult ) {
callResult = await isKeyPhrase(words[i].trim());
};
} else if (i < words.length) {
callResult = await isKeyPhrase(words[i].trim());
}
}
} // (line.length > 0)
});
rl.on('close', function (line) {
console.log('done reading file.');
// stubbed out because queries are still running
//client.end();
});
}).catch( (err) => {
console.error('connection error', err.stack);
});
async function isKeyPhrase(keyPhraseText) {
var callResult = false;
return new Promise(async function(resolve, reject) {
const query = {
name: 'get-name',
text: 'select KP.EntryID from KeyPhrase KP where (KP.KeyPhraseText = $1) and (Active = true)',
values: [keyPhraseText],
rowMode: 'array'
}
// promise
await client.query(query)
.then(result => {
if (result.rowCount == 1) {
console.log(`Key phrase '${keyPhraseText}' found in table with Phase ID = ${result.rows}`);
calResult = true;
}
}).catch(e => {
console.error(e.stack)
console.log(e.stack);
reject(e);
});
resolve(callResult);
});
}
welcome to StackOverflow. :)
Indeed there's no (sensible) way to read a file synchronously while trying to interact the data per-line with a database. There's no feasible way if the file is bigger than probably 1/8th of your memory.
This doesn't mean however there's no way or writing a sane code for this. The only problem is that standard node streams (including readline) do not wait for async code.
I'd recommend using scramjet, a functional stream programming framework, pretty much designed for you use case (disclamer: I'm the author). Here's how the code would look like:
const { Pool, Client } = require('pg')
const { StringStream } = require("scramjet");
const client = new Client({
user: '*****',
host: '****',
database: '*****',
password: '******#',
port: 5432,
})
client.connect()
.then(async () => {
console.log("Connected, processing file");
return StringStream
// this creates a "scramjet" stream from input.
.from(fs.createReadStream("input.txt"))
// this splits fs line by line
.lines()
// the next line is just to show when the file is fully read
.use(stream => stream.whenEnd.then(() => console.log("done reading file.")))
// this splits the words like the first "for" loop in your code
.map(line => line.toLowerCase().replace(/[^0-9a-z ]+/g, '').split(" "))
// this one gets rid of empty lines (i.e. no words)
.filter(line => line.length > 0)
// this splits the words like the first "for" loop in your code
.map(async words => {
for (var i = 0; i < words.length; i++) {
const callResult = await isKeyPhrase(words.slice(i, i + 3).join(" "));
if (callResult) return callResult;
}
})
// this runs the above list of operations to the end and returns a promise.
.run();
})
.then(() => {
console.log("done processing file.");
client.end();
})
.catch((e) => {
console.error(e.stack);
});
async function isKeyPhrase(keyPhraseText) {
const query = {
name: 'get-name',
text: 'select KP.EntryID from KeyPhrase KP where (KP.KeyPhraseText = $1) and (Active = true)',
values: [keyPhraseText],
rowMode: 'array'
};
const result = await client.query(query);
if (result.rowCount > 0) {
console.log(`Key phrase '${keyPhraseText}' found in table with Phase ID = ${result.rows}`);
return true;
}
return false;
}
I compacted and optimized your code in some places, but in general this should get you what you want - scramjet adds the asynchronous mode for each operation and will wait until all the operations are ended.

RecordRTC upload video to node js server

I am using RecordRTC from recording webrtc meeting. After implementing recording, when I test this application if both client are on the same system then its working fine. When I test this application on different system it isn't working fine and meeting is not recorded.
Here this is my code from stop recording client side.
recordRTC.stopRecording(function (videoURL) {
console.log('recordRTC.stopRecording Function inside');
SelectedFile = recordRTC.getBlob();
$('#uploadForm').append('#attachmentFileId', recordRTC.getBlob());
StartUpload();
});
var FReader;
var Name = "Meeting" + "_" + Date.now() + ".webm";
function StartUpload()
{
FReader = new FileReader();
FReader.onload = function (evnt)
{
socket.emit('Upload', { 'Name': Name, Data: evnt.target.result });
}
socket.emit('Start', { 'Name': Name, 'Size': SelectedFile.size });
}
socket.on('MoreData', function (data)
{
var Place = data['Place'] * 524288; //The Next Blocks Starting Position
var NewFile; //The Variable that will hold the new Block of Data
if (SelectedFile.webkitSlice)
NewFile = SelectedFile.webkitSlice(Place, Place + Math.min(524288, (SelectedFile.size - Place)));
else
NewFile = SelectedFile.slice(Place, Place + Math.min(524288, (SelectedFile.size - Place)));
FReader.readAsBinaryString(NewFile);
});
Server Side Code
I get this from here.
socket.on('Start', function (data) { //data contains the variables that we passed through in the html file
var Name = data['Name'];
Files[Name] = { //Create a new Entry in The Files Variable
FileSize : data['Size'],
Data : "",
Downloaded : 0
}
var Place = 0;
try{
var Stat = fs.statSync('Temp/' + Name);
if(Stat.isFile())
{
Files[Name]['Downloaded'] = Stat.size;
Place = Stat.size / 524288;
}
}
catch(er){} //It's a New File
fs.open("Temp/" + Name, 'a', 0755, function(err, fd){
if(err)
{
console.log(err);
}
else
{
Files[Name]['Handler'] = fd; //We store the file handler so we can write to it later
socket.emit('MoreData', { 'Place' : Place, Percent : 0 });
}
});
});
socket.on('Upload', function (data){
var Name = data['Name'];
Files[Name]['Downloaded'] += data['Data'].length;
Files[Name]['Data'] += data['Data'];
if(Files[Name]['Downloaded'] == Files[Name]['FileSize']) //If File is Fully Uploaded
{
fs.write(Files[Name]['Handler'], Files[Name]['Data'], null, 'Binary', function(err, Writen){
var input = fs.createReadStream("Temp/" + Name);
var output = fs.createWriteStream("Video/" + Name);
//util.pump(readableStream, writableStream, [callback])
//Deprecated: Use readableStream.pipe(writableStream)
input.pipe(output);
input.on("end", function() {
console.log("end");
fs.unlink("Temp/" + Name, function ()
{ //This Deletes The Temporary File
console.log("unlink this file:",Name );
//socket.emit('Done', {'Image' : 'Video/' + Name + '.jpg'});
});
});
});
}
else if(Files[Name]['Data'].length > 10485760){ //If the Data Buffer reaches 10MB
fs.write(Files[Name]['Handler'], Files[Name]['Data'], null, 'Binary', function(err, Writen){
Files[Name]['Data'] = ""; //Reset The Buffer
var Place = Files[Name]['Downloaded'] / 524288;
var Percent = (Files[Name]['Downloaded'] / Files[Name]['FileSize']) * 100;
socket.emit('MoreData', { 'Place' : Place, 'Percent' : Percent});
});
}
else
{
var Place = Files[Name]['Downloaded'] / 524288;
var Percent = (Files[Name]['Downloaded'] / Files[Name]['FileSize']) * 100;
socket.emit('MoreData', { 'Place' : Place, 'Percent' : Percent});
}
});
If both clients are on same machine/system its working fine, but if both clients are on different system then meeting is not recorded.

Resources