Downloading multiple file from ftp site using node js - node.js

i am trying to download each file on ftp server from root folder.
what i did is this-
ftpClient.ls(".", function(err, res) {
res.forEach(function(file) {
console.log(file.name);
ftpClient.get("./"+file.name, 'D:/styleinc/ftp/'+file.name, function(hadErr) {
if (hadErr)
console.log(hadErr);
else
console.log('File copied successfully!');
});
});
but on running it gives me error-
{ [Error: connect ECONNREFUSED]
code: 'ECONNREFUSED',
errno: 'ECONNREFUSED',
syscall: 'connect',
msg: 'Probably trying a PASV operation while one is in progress'
}
i have already successfully logged in and authenticated my self on ftp site.....
i don't know what to do please guide me.

This is the chunk of code I used with async.mapLimit to make it work with only one connection concurrently.
'use strict'
var JSFtp = require('jsftp');
var inspect = require('util').inspect;
var fs = require('fs');
var async = require('async');
var ftp = new JSFtp(require('./util/ftp'))
var local = 'EDI/mohawk/OUTBOX/'
var remote = 'OUTBOX'
var gatherFiles = function(dir){
return new Promise(function(resolve, reject){
ftp.ls(dir + '/*', function(err, res) {
if (err) reject(err)
console.log(res)
var files = [];
res.forEach(function(file){
files.push(file.name)
});
resolve(files)
})
})
}
gatherFiles(remote).then(function(files){
console.log(files)
async.mapLimit(files, 1, function(file, callback){
console.log('attempting: ' +remote + file + '->' + local + file)
ftp.get(remote +'/'+ file, local +'/'+ file, function(err){
if(err){
console.log('Error getting ' + file)
callback(err)
}else{
console.log('Got ' + file)
callback()
}
})
}, function(err, res){
if(err){
console.log(err)
}
console.log('updates complete' + res)
})
})

Related

NodeJs - Error while reading files using fs.createReadStream

I am trying to implement a feature to my web app where you can upload CSV files and insert data into Postgresql. I have made my app endpoint and written some code
const router = require('express').Router()
const uploadMid = require('./fileUpMid')
const pool = require('./db')
const fs = require("fs");
const fastcsv = require("fast-csv");
const upload = async (req, res) => {
if (req.files === null) {
return res.status(400).json({ msg: 'No file uploaded' });
}
const file = req.files.file;
file.mv(`${__dirname}/uploads/${file.name}`, err => {
if (err) {
console.error(err);
return res.status(500).send(err);
}
res.json({ fileName: file.name, filePath: `/uploads/${file.name}` });
});
let persons = [];
let path = __dirname + "/uploads/" +file.name;
fs.createReadStream(path)
.pipe(fastcsv.parse({ headers: true }))
.on("error", (error) => {
console.error(error.message);
})
.on("data", (row) => {
persons.push(row);
})
.on("end", () => {
//remove head
persons.shift();
const q = "some query here";
pool.connect((err, client, done) => {
if (err) throw err;
try {
persons.forEach(row => {
console.log(typeof row)
var obj = JSON.parse(JSON.stringify(row));
var values = Object.keys(obj).map(function (key) { return obj[key]; });
console.log(values)
client.query(q, values, (err, res) => {
if (err) {
console.log(err.stack);
} else {
console.log("inserted " + res.rowCount + " row:", row);
}
});
});
} finally {
done();
}
});
})
// fs.unlinkSync(path)
}
router.post('/file', uploadMid.single("file") ,upload)
module.exports = router
Everything seemed to work fine, but when I try to upload a second file I awlways get an error on terminal
Error: ENOENT: no such file or directory, open 'filename here with full path'
>- Emitted 'error' event on ReadStream instance at:
>- at internal/fs/streams.js:126:14
>- at FSReqCallback.oncomplete (fs.js:180:23) {
>- errno: -4058,
>- code: 'ENOENT',
>- syscall: 'open',
>- path: 'filename here with full path'}
I know this is not a safe nor secure way to upload data but this app is intended to be run only locally. Even when the first file is upload successfully in DevTools console it logs
GET http://localhost:3000/uploads/filename [HTTP/1.1 404 Not Found 8ms]
But the file is created with all its content on uploads directory.
Any tip for what to look for ?
Thank you in advance :)
Judging by the error (Error: ENOENT: no such file or directory, open 'filename here with full path'), here is the suggested way of defining paths in NodeJS apps using the path module.
const path = require('path');
// Inside`upload` middleware
const filePath = path.join(__dirname, 'uploads', file.name);

Nodejs: ENOENT on Twitter Image Bot

I'm a beginner level programmer. I used some online guides as well as my starter knowledge. I'm attempting to create a bot that posts to twitter every hour. I keep getting the error ENOENT, which as I understand, means that it can't find the directory. Here is the error I'm getting (All I censored was personal information, but it's not censored in the actual code or error)
opening an image...
15.jpg
internal/fs/utils.js:269
throw err;
^
Error: ENOENT: no such file or directory, open 'C:\Users\####\Desktop\####\bot\images15.jpg'
at Object.openSync (fs.js:462:3)
at Object.readFileSync (fs.js:364:35)
at C:\Users\####\Desktop\####\bot\server.js:32:29
at FSReqCallback.oncomplete (fs.js:156:23) {
errno: -4058,
syscall: 'open',
code: 'ENOENT',
path: 'C:\\Users\\#####\\Desktop\\####\\bot\\images15.jpg'
}
It looks like its changing the name of the file to have images in front. I can't figure out why it's doing this though. Anyone know what I'm doing wrong? Here's my code:
const fs = require('fs'),
path = require('path'),
Twit = require('twit'),
config = require(path.join(__dirname, 'config.js')),
images = require(path.join(__dirname, 'images.js'));
const T = new Twit(config);
function randomFromArray(images) {
return images[Math.floor(Math.random() * images.length)];
}
function tweetRandomImage() {
fs.readdir(__dirname + '/images', function (err, files) {
if (err) {
console.log('error:', err);
}
else {
let images = [];
files.forEach(function (f) {
images.push(f);
});
console.log('opening an image...');
const image = randomFromArray(images);
console.log(image);
const imagePath = path.join(__dirname, '/images' + image);
const imageSource = image.source
b64content = fs.readFileSync(imagePath, { encoding: 'base64' });
console.log('uploading an image...');
T.post('media/upload', { media_data: b64content }, function (err, data, response) {
if (err) {
console.log('error:', err);
}
else {
console.log('image uploaded, now tweeting it...');
T.post('statuses/update', {
media_ids: new Array(data.media_id_string)
},
function (err, data, response) {
if (err) {
console.log('error:', err);
}
else {
console.log('posted an image!');
}
}
);
}
});
}
});
}
setInterval(function () {
tweetRandomImage();
}, 10000);
Any help would be appreciated, thank you!
lesson of the day, ALWAYS WATCH THE SynTAx
const fs = require('fs'),
path = require('path'),
Twit = require('twit'),
config = require(path.join(__dirname, 'config.js')),
images = require(path.join(__dirname, 'images.js'));
const T = new Twit(config);
function randomFromArray(images) {
return images[Math.floor(Math.random() * images.length)];
}
function tweetRandomImage() {
fs.readdir(__dirname + '/images', function (err, files) {
if (err) {
console.log('error:', err);
}
else {
let images = [];
files.forEach(function (f) {
images.push(f);
});
console.log('opening an image...');
const image = randomFromArray(images);
console.log(image);
//THE ONLY CHANGE I MADE BELOW
const imagePath = path.join(__dirname, '/images/' + image);
//THE ONLY CHANGE I MADE ABOVE
//THE ONLY PROBLEM WAS THAT YOU TRIED GETTING A FILE BUT HAVING A FOLDER AND A FILE NAME AS 1 THING UNSEPARATED BY "/" OR "\\" AND THE COMPUTER(& me who read it at first) THOUGHT TO LOOK FOR A FILE CALLED "images15.jpg" instead of "images/15.jpg"
const imageSource = image.source
b64content = fs.readFileSync(imagePath, { encoding: 'base64' });
console.log('uploading an image...');
T.post('media/upload', { media_data: b64content }, function (err, data, response) {
if (err) {
console.log('error:', err);
}
else {
console.log('image uploaded, now tweeting it...');
T.post('statuses/update', {
media_ids: new Array(data.media_id_string)
},
function (err, data, response) {
if (err) {
console.log('error:', err);
}
else {
console.log('posted an image!');
}
}
);
}
});
}
});
}
setInterval(function () {
tweetRandomImage();
}, 10000);

How do I download directory with all files and folders inside using npm module ssh2 for nodejs?

I am trying to download files and folders via SFTP to my local machine.I am using the below code and am able to download the files in a particular directory but am unable to download the folders(with their respective files and folders recursively) in the same directory
const folderDir=moment().format('MMM YYYY/D');
const remoteDir = '/var/www/html/view';
const remoteDir = 'Backup';
const download=(remoteDir,folderDir,FolderName)=>{
conn.on('ready', function() {
conn.sftp((err, sftp) => {
if (err) throw err;
sftp.readdir(remoteDir, (err, list) => {
if (err) throw err;
let count = list.length;
list.forEach(item => {
let remoteFile = remoteDir + '/' + item.filename;
var localFile = 'C:/Users/Desktop/'+folderDir+'/'+FolderName+'/' + item.filename;
//console.log('Downloading ' + remoteFile);
sftp.fastGet(remoteFile, localFile, (err) => {
if (err) throw err;
//console.log('Downloaded to ' + localFile);
count--;
if (count <= 0) {
conn.end();
}
});
});
});
});
}).connect({
host: '0.0.0.0',
port: 0,
username: 'test',
privateKey: require('fs').readFileSync('')
});
}
Install the module by running:
npm i ssh2-sftp-client
You can do it like this:
let fs = require('fs');
let Client = require('ssh2-sftp-client');
let sftp = new Client();
sftp.connect({
host: '',
port: '22',
username: 'your-username',
password: 'your-password'
}).then(() => {
// will return an array of objects with information about all files in the remote folder
return sftp.list('/');
}).then(async (data) => {
// data is the array of objects
len = data.length;
// x is one element of the array
await data.forEach(x => {
let remoteFilePath = '/' + x.name;
sftp.get(remoteFilePath).then((stream) => {
// save to local folder ftp
let file = './ftp/' + x.name;
fs.writeFile(file, stream, (err) => {
if (err) console.log(err);
});
});
});
}).catch((err) => {
console.log(err, 'catch error');
});
For more info on the ssh2-sftp-client module.

formidable handle no uploaded file errors

Everything is working as expected in my code except if the user has not uploaded a file it crashes my node app.
The error is arising at the fs.rename part, as there is nothing uploaded the app cant rename anything and causes the following:
Error:
EPERM: operation not permitted, rename 'C:\Users\xxx\AppData\Local\Temp\upload_0a145049089fa69e9df64f8d20abb362' -> 'C:\Users\xxx\Dropbox\Automate NodeJS\Login_Register+Submit - 0.01\data\5be13231807fe33f14b2834a\sdS9m'
Im having difficulty searching for my err and how to handle that, If anyone can point me to some resources for how to handle formidable errors and how to stop the process of the user hasn't uploaded anything it would be fantastic.
It's not a production app or anything of the like its just me learning how to deal with multiple functions and a database at once.
router.post('/submit', userisSubbed, userhasTime, (req, res, next0) => {
var userId = req.user._id;
var username = req.user.email;
var isCompleted = 'No'
var jobNumber = generator.generate({
length: 5,
numbers: true
});
// Validate
const errors = req.validationErrors();
if (errors) {
res.render('index', {
errors: errors
});}
else {
var form = new formidable.IncomingForm();
form.parse(req, function (err, fields, files) {
var userPath = req.user._id
var dir = './data/' + userPath + '/' + jobNumber + '/';
if (!fs.existsSync(dir)){
fs.mkdirSync(dir);
}else
{
console.log("Directory already exist");
}
var oldpath = files.filetoupload.path;
var newpath = dir + files.filetoupload.name;
// copy the file to a new location
fs.rename(oldpath, newpath, function (err) {
if (err) throw err;
console.log('renamed complete');
const newJob = new Job({
userId: userId,
username: username,
isCompleted: isCompleted,
filepath: newpath,
jobNumber: jobNumber,
});
Job.createJob(newJob, function(err, job){
if (err) throw err;
console.log(job);
});
req.flash('success_msg', 'Job Submitted...');
res.redirect('/')
});
});
}
});

nodejs download files from sftp

I need to download some .txt.pgp files from sftp. I've tried npm ssh2, ssh2-sftp-client and node-ssh without any success.
The closest I got so far is the list of the files in the remote folder using sftp.readdir (ssh2) or sftp.list (ssh2-sftp-client).
I've tried pipe and fs.createWriteStream and sftp.fastGet but there's no file saved on my local machine.
const conn = new Client();
conn.on('ready', () => {
console.log('Client :: ready');
conn.sftp((err, sftp) => {
if (err) throw err;
sftp.readdir('out', (err, list) => {
if (err) throw err;
list.forEach(item => {
console.log(item.filename);
const fileName = item.filename;
sftp.fastGet(fileName, fileName, {}, downloadError => {
if(downloadError) throw downloadError;
console.log("Succesfully uploaded");
});
})
conn.end();
});
});
}).connect(config);
OR
const Client = require('ssh2-sftp-client');
const sftp = new Client();
sftp.connect(config).then(() => {
return sftp.list('out');
})
.then(files => {
// console.log(files);
if (files.length > 0) {
console.log('got list of files!');
}
files.map(file => {
const fileName = file.name;
sftp.get(fileName)
.then(() => {
fs.writeFile(fileName);
});
})
})
.then(() => {
sftp.end();
}).catch((err) => {
console.log(err);
});
Regarding your first attempt (with the ssh2 module), there are three issues that I can see:
You are calling conn.end() outside of a series of preceding async functions, which is almost definitely causing the SSH session to close before you've finished downloading the files.
You are not providing the sftp.fastGet() function with the correct path to the remote file. Earlier in the code, you call sftp.readdir() with the remote directory argument 'out', which returns a list of files relative to the remote directory. (Point is: you need to prepend the remote directory to the file name to create a correctly qualified path.)
You're not handling the stream event error, so I suspect you're not getting useful error messages to help troubleshoot.
Try something like:
const Client = require('ssh2').Client;
const conn = new Client();
const sshOpt = someFunctionThatPopulatesSshOptions();
const remoteDir = 'out';
conn.on('ready', () => {
conn.sftp((err, sftp) => {
if (err) throw err;
sftp.readdir(remoteDir, (err, list) => {
if (err) throw err;
let count = list.length;
list.forEach(item => {
let remoteFile = remoteDir + '/' + item.filename;
let localFile = '/tmp/' + item.filename;
console.log('Downloading ' + remoteFile);
sftp.fastGet(remoteFile, localFile, (err) => {
if (err) throw err;
console.log('Downloaded to ' + localFile);
count--;
if (count <= 0) {
conn.end();
}
});
});
});
});
});
conn.on('error', (err) => {
console.error('SSH connection stream problem');
throw err;
});
conn.connect(sshOpt);
This should address all the issues I mentioned. Specifically:
We are using a count variable to ensure the SSH session is closed only after all files are downloaded. (I know it is not pretty.)
We are prepending remoteDir to all of our remote file downloads.
We're listening for the error event in our conn stream.

Resources