node.js (or nwjs) ftp download - node.js

Need to download new files periodicaly from public ftp:
ftp://ftp.cmegroup.com/bulletin/
But I cannot connect ftp module to sever. My code is:
var url = "ftp://ftp.cmegroup.com/bulletin/";
var path = require('path');
var fs = require('fs');
//var Promise = require('bluebird');
var Client = require('ftp');
var c = new Client();
var connectionProperties = {
host: "ftp.cmegroup.com",
};
c.on('ready', function () {
console.log('ready');
c.list(function (err, list) {
if (err) throw err;
list.forEach(function (element, index, array) {
//Ignore directories
if (element.type === 'd') {
console.log('ignoring directory ' + element.name);
return;
}
//Ignore non zips
if (path.extname(element.name) !== '.zip') {
console.log('ignoring file ' + element.name);
return;
}
//Download files
c.get(element.name, function (err, stream) {
if (err) throw err;
stream.once('close', function () {
c.end();
});
stream.pipe(fs.createWriteStream(element.name));
});
});
});
});
c.connect(connectionProperties);
Error:
Uncaught Error: connect ECONNREFUSED 127.0.0.1:21
Can't understand why it connects to localhost despite I pointed connection params.

lack of one line
c.connect(connectionProperties);

Related

Can't hide file extension in NODE.js

I want to hide a file extension and folder from url. So I wrote such code, that I suppose would work.
Physical path to file is /data/users.json . In url i want to get 200 response if user enteres localhost:3000/api/users
But I've been getting 404 . And if I enter localhost:3000/api/users.json - 200 instead of 404 (
const express = require('express');
const path = require('path');
const fs = require('fs');
const app = express ();
const PORT = 3000;
app.use('/api', express.static(path.join(__dirname,'data')) );
app.get('/api/:name',(req,res) => { //http://expressjs.com/en/guide/routing.html#route-parameters
let {name}=req.params;
const filename= name+'.json'; //adding extension
fs.open(filename, 'r', (err, fd) => { //https://nodejs.org/api/fs.html#fsopenpath-flags-mode-callback
if (err) {
if (err.code === 'ENOENT') {
console.log(filename);
res.status(404 );
res.end('404 '+filename);
return;
}
throw err;
}
try {
res.json(fd);
} finally {
close(fd, (err) => {
if (err) throw err;
});
}
});
});

Error: server instance pool gets destroyed in nested db collection functions

I have searched for the solution of the error specified in title.
MongoError: server instance pool was destroyed
I believe it is because misplacement of db.close(). But I am nesting dbo.collection and unable to get the exact solution of this error.
Firstly, I am fetching data (array of ids having status 0) from database and then I am concatenating (each app-id) them one by one with URL to get desired appUrl which will be used for crawling data one by one and then crawled data is meant to be stored into another collection of mongoDB. This process will repeat for each id in the array. But my code is having error of "server instance pool gets destroyed" before storing data into collection. I am doing misplacement of db.close() but I am unable to resolve this. Please help me resolving this error
Here is my code
///* global sitehead */
const request = require('request');
const cheerio = require('cheerio');
//const response = require('response');
const fs = require('fs');
const express = require('express');
const app = express();
var MongoClient = require('mongodb').MongoClient;
var url = "mongodb://localhost:27017/";
var dateTime = require('node-datetime');
MongoClient.connect(url, {useNewUrlParser: true}, function (err, db) {
if (err) {
throw err;
} else {
var dbo = db.db("WebCrawler");
var app_id;
var appUrl;
let arr = [];
dbo.collection("Unique_Apps").find({"Post_Status": 0}, {projection: {_id: 0, App_Id: 1}}).toArray(function (err, result)
{
// console.log(result);
if (err) {
throw err;
// console.log(err);
} else {
for (var i = 0; i < result.length; i++)
{
arr[i] = result[i];
}
arr.forEach((el) => {
app_id = el.App_Id;
//console.log(app_id);
appUrl = 'https://play.google.com/store/apps/details?id=' + app_id;
console.log(appUrl);
request(appUrl, function (error, response, html) {
if (!error && response.statusCode === 200) {
//START Crawling ###########
const $ = cheerio.load(html); //cheerio
const appTitle = $('.AHFaub');
const iconUrl = $('.T75of.sHb2Xb').attr("src");
const developedBy = $('.T32cc.UAO9ie').children().eq(0);
const category = $('.T32cc.UAO9ie').children().eq(1);
//store in database collection: "Single_App_Data_Post"
var curr_obj = {App_Id: app_id, App_Name: appTitle.text(),
Icon_Url: iconUrl, Price: "Free", Developed_By: developedBy.text(),
Category: category.text()
};
dbo.collection("Single_App_Data_Post").insertOne(curr_obj, function (err, res) {
console.log("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa");
if (err) {
throw err;
// console.log(err);
} else {
console.log("inserted....");
} //main else
});
dbo.collection("Unique_Apps").updateOne({App_Id: app_id}, {$set: {Post_Status: 0}}, function (err, res) {
if (err)
throw err;
console.log("1 document updated");
//dbo.close();
});
} else
{
throw error;
}
});
});
}
db.close();
});
} //else
}); //mongoClient connect db
Output
The following is a good start about how to turn callback into promises. Try to use it, execute the code block, by block, understand it and then add your updateOne/insertOne requests into it.
const request = require('request');
const cheerio = require('cheerio');
const fs = require('fs');
const express = require('express');
const app = express();
const MongoClient = require('mongodb').MongoClient;
const dateTime = require('node-datetime');
// Class used to handle the database basic interractions
class DB {
constructor() {
this.db = false;
this.url = "mongodb://localhost:27017/";
}
// Do connect to the database
connect() {
return new Promise((resolve, reject) => {
MongoClient.connect(this.url, {
useNewUrlParser: true,
}, (err, db) => {
if (err) {
console.log('error mongodb connect');
return reject(err);
}
this.db = db;
return resolve(db);
});
});
}
disconnect() {
db.close();
this.db = false;
}
getCollection(name) {
return this.db.db(name);
}
}
// Get the data from the database
function getAppsIds(dbObj) {
return new Promise((resolve, reject) => {
const dbo = dbObj.getCollection('WebCrawler');
dbo.collection('Unique_Apps').find({
'Post_Status': 0,
}, {
projection: {
_id: 0,
App_Id: 1,
}
}).toArray(function(err, result) {
if (err) {
return reject(err);
}
return resolve(result);
});
});
}
function requestPlayStore(idApp) {
return new Promise((resolve, reject) => {
const appUrl = `https://play.google.com/store/apps/details?id=${app_id}`;
request(appUrl, function(error, response, html) {
if (error || response.statusCode !== 200) {
return reject(error);
}
return resolve({
response,
html,
});
});
});
}
// Do treat one id app at a time
function treatOneIdApp(dbObj, idApp) {
return requestPlayStore(idApp)
.then(({
response,
html,
}) => {
// Perform your requests here updateOne and insertOne ...
});
}
const dbObj = new DB();
dbObj.connect()
.then(() => getAppsIds(dbObj))
.then(rets => Promise.all(rets.map(x => treatOneIdApp(dbObj, x.App_Id))))
.then(() => dbObj.disconnect())
.catch((err) => {
console.log(err);
});

Node.js - copy file from FTP A to FTP B by stream

I want to copy data from FTP A to FTP B.
I use stream instead of the local temp file.
But i cannot read the data from stream.
transformComponent.js
const Transform = require('stream').Transform;
const util = require('util');
const transformComponent = module.exports = function(options) {
if (!(this instanceof transformComponent))
return new Parser(options);
Transform.call(this, options);
};
util.inherits(transformComponent, Transform);
transformComponent.prototype._transform = function(data, encoding, callback) {
this.push(data);
callback();
};
transformComponent.prototype._flush = function(callback) {
callback();
};
ftpComponent.js
const Client = require('ftp');
var ftp = new Client();
var ftpComponent = function(host){
this.host = host;
this.port = 21;
this.user = "";
this.password = "";
};
function connectionProperties(){
let properties ={
host: this.host,
port: this.port
};
if(this.user && this.user!== "")
properties.user = this.user;
if(this.password && this.password !== "")
properties.password = this.password;
return properties;
}
function download(filepath, target){
ftp.on('ready', function () {
ftp.get(filepath, function(err, stream) {
if (err) throw err;
stream.once('close', function() { ftp.end(); });
stream.pipe(target);
});
});
ftp.connect(this.connectionProperties());
}
function upload(source, filename){
ftp.on('ready', function () {
// Upload files to the server:
ftp.put(source, filename, function(err) {
if (err) throw err;
ftp.end();
});
});
ftp.connect(this.connectionProperties());
}
ftpComponent.prototype = {
connectionProperties: connectionProperties,
pull : download,
push : upload
}
module.exports = ftpComponent;
My usage :
const ftp = require('./node_component/ftpComponent.js');
const transform = require('./node_component/transformComponent.js');
var ftpSourceObject = new ftp('host A');
var ftpTargetObject = new ftp('host B');
ftpSourceObject.user = usernameA;
ftpSourceObject.password = passwordA;
ftpTargetObject.user = usernameB;
ftpTargetObject.password = passwordB;
var temp = new transform();
ftpTargetObject.push(temp, 'file-cp.txt');
ftpSourceObject.pull('file.txt', temp);
I can write data into stream from FTP A.
But when reading data from stream and put on FTP B.
It shows the error like below.
Error: Unable to parse PASV server response
at Object.reentry [as cb] (/home/locadmin/fileshareservice/app/node_modules/ftp/lib/connection.js:857:19)
at Parser.<anonymous> (/home/locadmin/fileshareservice/app/node_modules/ftp/lib/connection.js:117:20)
at Parser.emit (events.js:182:13)
at Parser._write (/home/locadmin/fileshareservice/app/node_modules/ftp/lib/parser.js:59:10)
at doWrite (_stream_writable.js:410:12)
at writeOrBuffer (_stream_writable.js:394:5)
at Parser.Writable.write (_stream_writable.js:294:11)
at Socket.ondata (/home/locadmin/fileshareservice/app/node_modules/ftp/lib/connection.js:273:20)
at Socket.emit (events.js:182:13)
at addChunk (_stream_readable.js:283:12)
But when i run just a single line
var temp = new transform();
ftpTargetObject.push(temp, 'file-cp.txt');
temp.write('123');
temp.end();
//ftpSourceObject.pull('file.txt', temp);
It work successful.
How can I pass the stream as parameter to function??
Sorry for my poor English...
My source code reference source is https://bountify.co/node-js-script-for-copying-files-between-two-ftp-locations.
I fix it.
transformComponent.js
const Transform = require('stream').Transform;
const util = require('util');
const transformComponent = module.exports = function(options) {
if (!(this instanceof transformComponent))
return new Parser(options);
Transform.call(this, options);
};
util.inherits(transformComponent, Transform);
transformComponent.prototype._transform = function(data, encoding, callback) {
this.push(data);
callback();
};
transformComponent.prototype._flush = function(callback) {
callback();
};
ftpComponent.js
const Client = require('ftp');
var ftpComponent = function(host){
this.host = host;
this.port = 21;
this.user = "";
this.password = "";
};
function connectionProperties(){
let properties ={
host: this.host,
port: this.port
};
if(this.user && this.user!== "")
properties.user = this.user;
if(this.password && this.password !== "")
properties.password = this.password;
return properties;
}
function download(filepath, target){
var ftp = new Client();
ftp.on('ready', function () {
ftp.get(filepath, function(err, stream) {
if (err) throw err;
stream.once('close', function() { ftp.end(); });
stream.pipe(target);
});
});
ftp.connect(this.connectionProperties());
}
function upload(source, filename){
var ftp = new Client();
ftp.on('ready', function () {
// Upload files to the server:
ftp.put(source, filename, function(err) {
if (err) throw err;
ftp.end();
});
});
ftp.connect(this.connectionProperties());
}
ftpComponent.prototype = {
connectionProperties: connectionProperties,
pull : download,
push : upload
}
module.exports = ftpComponent;
My usage :
const ftp = require('./node_component/ftpComponent.js');
const transform = require('./node_component/transformComponent.js');
var ftpSourceObject = new ftp('host A');
var ftpTargetObject = new ftp('host B');
ftpSourceObject.user = usernameA;
ftpSourceObject.password = passwordA;
ftpTargetObject.user = usernameB;
ftpTargetObject.password = passwordB;
var temp = new transform();
ftpTargetObject.push(temp, 'file-cp.txt');
ftpSourceObject.pull('file.txt', temp);

SFTP Server for reading directory using node js

I have created a node based SSH2 SFTP Server and Client. My objective is to read directory structure of SFTP Server. Suppose I have an SFTP Server containing folder temp, I want to read files inside the temp directory. I am using ssh2 npm module for creating SFTP Server and Client. It is making connection to SFTP Server but not listing the directory
Below is the code
CLIENT SIDE SFTP
var Client = require('ssh2').Client;
var connSettings = {
host: 'localhost',
port: 22,
username:'ankit',
password:'shruti',
method:'password'
// You can use a key file too, read the ssh2 documentation
};
var conn = new Client();
conn.on('ready', function() {
console.log("connected to sftp server")
conn.sftp(function(err, sftp) {
if (err)
throw err;
sftp.readdir('',function(err,list)
{
console.log('Inside read')
if(err)
{
console.log(err);
throw err;
}
console.log('showing directory listing')
console.log(list);
conn.end();
})
/**
var moveFrom = "/remote/file/path/file.txt";
var moveTo = __dirname+'file1.txt';
sftp.fastGet(moveFrom, moveTo , {}, function(downloadError){
if(downloadError) throw downloadError;
console.log("Succesfully uploaded");
});
*/
})
}).connect(connSettings);
SERVER SIDE SFTP :
var constants = require('constants');
var fs = require('fs');
var ssh2 = require('ssh2');
var OPEN_MODE = ssh2.SFTP_OPEN_MODE;
var STATUS_CODE = ssh2.SFTP_STATUS_CODE;
var srv = new ssh2.Server({
hostKeys: [fs.readFileSync(__dirname+'/key/id_rsa')],debug:console.log
}, function(client) {
console.log('Client connected!');
client.on('authentication', function(ctx) {
if (ctx.method === 'password'
// Note: Don't do this in production code, see
// https://www.brendanlong.com/timing-attacks-and-usernames.html
// In node v6.0.0+, you can use `crypto.timingSafeEqual()` to safely
// compare two values.
&& ctx.username === 'ankit'
&& ctx.password === 'shruti')
ctx.accept();
else
ctx.reject();
}).on('ready', function() {
console.log('Client authenticated!');
});
client.on('session',function(accept,reject)
{
console.log("Client asking for session");
var session = accept();
var handleCount = 0;
var names=[]
session.on('sftp',function(accept,reject)
{
console.log('Client sftp connection')
var sftpStream = accept();
sftpStream.on('OPENDIR',function(reqID,path)
{
var handle = new Buffer(4);
handle.writeUInt32BE(handleCount++, 0, true);
sftpStream.handle(reqID,handle);
console.log(handle);
console.log('Opening Read Directory for --'+path);
console.log(reqID);
console.log(path);
}).on('READDIR',function(reqID,handle)
{
console.log('Read request')
console.log(reqID);
if(handle.length!==4)
return sftpStream.status(reqID, STATUS_CODE.FAILURE,'There was failure')
sftpStream.status(reqID, STATUS_CODE.OK,'Everything ok')
sftpStream.name(reqID,names);
})
})
})
}).listen(0, '127.0.0.1', function() {
console.log('Listening on port ' + this.address().port);
});
srv.listen(22)
The client will send multiple READDIR requests. On the first request, you should send the file listing, on the second you should send an EOF status, to tell the client the listing has finished.
An example would be:
let listSent = false;
let names = [];
sftpStream.on('OPENDIR', function (reqID, path) {
listSent = false;
sftpStream.handle(new Buffer());
});
sftpStream.on('READDIR', function (reqID, handle) {
if (listSent) {
sftpStream.status(reqID, STATUS_CODE.EOF);
return;
}
sftpStream.name(reqID, names);
listSent = true;
});

Fire callback after cursors foreach function is done?

i am using NodeJS to iterate over a large product collection. MongoDb native driver is used. Everything is fine but i want to write a footer line to a file after all documents are processed. How can i accomplish this?
var MongoClient = require('mongodb').MongoClient
var assert = require('assert');
var filename = '/tmp/' + feed.outputFilename;
fs.writeFileSync(filename, feed.header, feed.encoding, function(err) {
if(err) throw err;
});
var url = process.env.DB_HOST;
MongoClient.connect(url, function(err, db) {
assert.equal(null, err);
var collection = db.collection('products');
var cursor = collection.find({ "catalog": "electronics"}, { "batchSize": 1,fields: {} }).forEach(function(product) {
if(product != null) {
var child = workers[Math.floor(Math.random()*workers.length)];
var data = {};
data.product = product;
data.feed = feed;
child.send(data);
}
}, function(err) {
assert.equal(null, err);
db.close();
});
// This doens't work for me (Error: Connot read property 'on' of undefined)
/*cursor.on('end', function() {
fs.appendFile('/tmp/' + filename, feed.footer, function(err) {
if(err) throw err;
});
db.close();
})*/
});
Possibly what could be happening here is that the value returned from your call to forEach is being assigned into the cursor var.
Try assigning the value returned from the find into the cursor var and calling your forEach as cursor.forEach and cursor.on later.

Resources