node-imap module fetch gmail list folders(labels) - node.js

I'm trying to fetch Gmail folders list(labels actually).
I'm using node js and this module : https://github.com/mscdex/node-imap
i want to fetch all folders and sub folders.
the documentation that the author left is not so bright.
any idea about this?

finally after hard working i found the answer,
this is how i get folders not only for google even for every email system that use imap standards
after connection to imap server
the get all folders by this function.
function getFolders(username, callback) {
var folders = [];
if (Connection[username]) {
Connection[username].once('ready', function() {
Connection[username].getBoxes(function (err, boxes) {
if (err) {
// TODO : parse some error here
} else {
folders = imapNestedFolders(boxes);
}
return callback(err, folders);
});
});
} else {
return framework.httpError(500, self);
}
}
the parse the folder to a pretty nested tree json object by this function
function imapNestedFolders(folders) {
var FOLDERS = [];
var folder = {};
for (var key in folders) {
if (folders[key].attribs.indexOf('\\HasChildren') > -1) {
var children = imapNestedFolders(folders[key].children);
folder = {
name : key,
children : children
};
} else {
folder = {
name : key,
children : null
};
}
FOLDERS.push(folder);
}
return FOLDERS;
}
you might also change the connection variable to what you wanted.
these functions work with multiple connection because of this the connection variable is an array you can read more about this here,
i wrote how to use multiple connection in node imap here
How can i handle multiple imap connections in node.js?

Related

Correct way to organise this process in Node

I need some advice on how to structure this function as at the moment it is not happening in the correct order due to node being asynchronous.
This is the flow I want to achieve; I don't need help with the code itself but with the order to achieve the end results and any suggestions on how to make it efficient
Node routes a GET request to my controller.
Controller reads a .csv file on local system and opens a read stream using fs module
Then use csv-parse module to convert that to an array line by line (many 100,000's of lines)
Start a try/catch block
With the current row from the csv, take a value and try to find it in a MongoDB
If found, take the ID and store the line from the CSV and this id as a foreign ID in a separate database
If not found, create an entry into the DB and take the new ID and then do 6.
Print out to terminal the row number being worked on (ideally at some point I would like to be able to send this value to the page and have it update like a progress bar as the rows are completed)
Here is a small part of the code structure that I am currently using;
const fs = require('fs');
const parse = require('csv-parse');
function addDataOne(req, id) {
const modelOneInstance = new InstanceOne({ ...code });
const resultOne = modelOneInstance.save();
return resultOne;
}
function addDataTwo(req, id) {
const modelTwoInstance = new InstanceTwo({ ...code });
const resultTwo = modelTwoInstance.save();
return resultTwo;
}
exports.add_data = (req, res) => {
const fileSys = 'public/data/';
const parsedData = [];
let i = 0;
fs.createReadStream(`${fileSys}${req.query.file}`)
.pipe(parse({}))
.on('data', (dataRow) => {
let RowObj = {
one: dataRow[0],
two: dataRow[1],
three: dataRow[2],
etc,
etc
};
try {
ModelOne.find(
{ propertyone: RowObj.one, propertytwo: RowObj.two },
'_id, foreign_id'
).exec((err, searchProp) => {
if (err) {
console.log(err);
} else {
if (searchProp.length > 1) {
console.log('too many returned from find function');
}
if (searchProp.length === 1) {
addDataOne(RowObj, searchProp[0]).then((result) => {
searchProp[0].foreign_id.push(result._id);
searchProp[0].save();
});
}
if (searchProp.length === 0) {
let resultAddProp = null;
addDataTwo(RowObj).then((result) => {
resultAddProp = result;
addDataOne(req, resultAddProp._id).then((result) => {
resultAddProp.foreign_id.push(result._id);
resultAddProp.save();
});
});
}
}
});
} catch (error) {
console.log(error);
}
i++;
let iString = i.toString();
process.stdout.clearLine();
process.stdout.cursorTo(0);
process.stdout.write(iString);
})
.on('end', () => {
res.send('added');
});
};
I have tried to make the functions use async/await but it seems to conflict with the fs.openReadStream or csv parse functionality, probably due to my inexperience and lack of correct use of code...
I appreciate that this is a long question about the fundamentals of the code but just some tips/advice/pointers on how to get this going would be appreciated. I had it working when the data was sent one at a time via a post request from postman but can't implement the next stage which is to read from the csv file which contains many records
First of all you can make the following checks into one query:
if (searchProp.length === 1) {
if (searchProp.length === 0) {
Use upsert option in mongodb findOneAndUpdate query to update or upsert.
Secondly don't do this in main thread. Use a queue mechanism it will be much more efficient.
Queue which I personally use is Bull Queue.
https://github.com/OptimalBits/bull#basic-usage
This also provides the functionality you need of showing progress.
Also regarding using Async Await with ReadStream, a lot of example can be found on net such as : https://humanwhocodes.com/snippets/2019/05/nodejs-read-stream-promise/

How create a service in Angular 7 that use FS module to list files from directory?

I´m coding an app based on Node v10.13.0, Electron 3.0.7 & Angular 7.
A service is in charge of file procesing, a function named(listFiles) read and return an array of filenames contained in a directory.
listFiles(path):Observable<string[]>{
this.files_in_directories = [];
return of(this.fs.readdir(path, function(err, items) {
if (err) {
console.log(err);
return;
}
for (var i=0; i<items.length; i++) {
this.files_in_directories.push(items[i]);
console.log(items[i]);
}
return this.files_in_directories;
}.bind(this)));
}
My code is based on Angular Guide: https://angular.io/tutorial/toh-pt4#observable-heroservice
In the component, the service is called:
ngOnInit() {
this.getScripts('Project');
}
getScripts(project){
this.generalService.listFiles(this.config.location.datastore+'\\'+project).subscribe(files=>{
this.scripts = files;
this.cdref.detectChanges();
});
}
When this code is executed, the files are readed but never returned, as an Observable, instead an undefined value is obtained(See the image):
Undefined Values Debugger
I'm a little bit lost how to proceed from this point foward, the files in the directory are correctly listed.File Listed in console
I belive that problem comes from returning the array inside a callback. Any ideas.
#NishKal Kashyap, you are rigth men, before see you response I modified the code in the service to read the files in the directory synchronously. And worked just fine, thankyou for the sugestion.
listFiles(path):Observable<string[]>{
this.files_in_directories = [];
this.fs.readdirSync(path).forEach(file => {
this.files_in_directories.push(file);
});
return of(this.files_in_directories);
}
You identified the problem correctly, perhaps modify your code like this:
listFiles(path):Observable<string[]>{
this.files_in_directories = [];
this.fs.readdir(path, function(err, items) {
if (err) {
console.log(err);
return;
}
for (var i=0; i<items.length; i++) {
this.files_in_directories.push(items[i]);
console.log(items[i]);
}
});
of(this.files_in_directories);

Using socketio-file-upload to upload multiple files

Im using NodeJS with socket.io and socketio-file-upload to upload multiple files, it works great! However I'm having an issue where I'm trying to save the name attribute of the input these files come to save them into my DB.
When I upload 1 or more files, I can't seem to access the input field name or something that shows me which of the files come from which input field.
Here is my front:
var uploader = new SocketIOFileUpload(socket);
var array_files_lvl_3 = [
document.getElementById("l3_id_front"),
document.getElementById("l3_id_back"),
document.getElementById("l3_address_proof_1"),
document.getElementById("l3_address_proof_2"),
document.getElementById("l3_passport")
];
uploader.listenOnArraySubmit(document.getElementById("save_level_3"), array_files_lvl_3);
And here is my back:
var uploader = new siofu();
uploader.dir = "uploads/userL3";
uploader.listen(socket);
uploader.on('saved', function(evnt){
console.log(evnt);
//this "event" variable has a lot of information
//but none of it tells me the input name where it came from.
});
This is what the "evnt" variable holds:
Unfortunately the library doesn't send that information. So there is nothing existing config you can do. So this needs code modification.
client.js:374
var _fileSelectCallback = function (event) {
var files = event.target.files || event.dataTransfer.files;
event.preventDefault();
var source = event.target;
_baseFileSelectCallback(files, source);
client.js:343
var _baseFileSelectCallback = function (files, source) {
if (files.length === 0) return;
// Ensure existence of meta property on each file
for (var i = 0; i < files.length; i++) {
if (source) {
if (!files[i].meta) files[i].meta = {
sourceElementId: source.id || "",
sourceElementName: source.name || ""
};
} else {
if (!files[i].meta) files[i].meta = {};
}
}
After these changes I am able to get the details in event.file.meta
I'm the author of socketio-file-upload.
It looks like the specific input field is not currently being recorded, but this would not be a hard feature to add. Someone opened a new issue and left a backpointer to this SO question.
A workaround would be to directly use submitFiles instead of listenOnArraySubmit. Something like this might work (untested):
// add a manual listener on your submit button
document.getElementById("save_level_3").addEventListener("click", () => {
let index = 0;
for (let element of array_files_lvl_3) {
let files = element.files;
for (let file of files) {
file.meta = { index };
}
uploader.submitFiles(files);
index++;
}
});

Persist data on disk using chrome extension API

I am trying to save some data which should be available even when restart the browser So this data should persist. I am using Chrome Storage Sync API for this. But when I am restarting my browser, I get empty object on using chrome.storage.get.
Here is my sample code:
SW.methods.saveTaskListStore = function() {
chrome.storage.sync.set({
'taskListStore': SW.stores.taskListStore
}, function() {
if (SW.callbacks.watchProcessSuccessCallback) {
SW.callbacks.watchProcessSuccessCallback(SW.messages.INFO_DATA_SAVED);
SW.callbacks.watchProcessSuccessCallback = null;
}
});
};
SW.methods.loadTaskListStore = function() {
SW.stores.loadTaskListStore = [];
chrome.storage.sync.get('taskListStore', function(taskFeed) {
var tasks = taskFeed.tasks;
if (tasks && !tasks.length) {
SW.stores.loadTaskListStore = tasks;
}
});
};
I guess I am using the Wrong API.
If this is not some copy-paste error, you are storing under key taskListStore and trying to get data under key loadTaskListStore.
Besides that, according to the documentation on StorageArea.get(), the result object is an object with items in their key-value mappings. Thus, in your case, you should do:
chrome.storage.sync.get("taskListStore", function(items) {
if (items.taskListStore) {
var tasks = items.taskListStore.tasks;
...

Get a collection and add a value to the response

I want to create in the Server script a function that can return a collection plus some extra value.
For example:
Meteor.publish("users", function () {
var users;
users = Meteor.users.find();
users.forEach(function (user){
user.profile.image = "some-url";
});
return users;
});
But this don't work proper. My question is: What is the right way to add a value to a collection reponse in a publish function.
There are 2 ways you can implement a publish function:
By returning a cursor (or an array of cursors)
By using this.added(), this.changed() and this.removed().
Only method 2 allows to modify returned documents.
Please refer to Meteor documentation here. However, since the provided sample code might look complex, here is another one:
// server: publish the rooms collection
Meteor.publish("rooms", function () {
return Rooms.find({});
});
is equivalent to:
// server: publish the rooms collection
Meteor.publish("rooms", function () {
var self = this;
var handle = Rooms.find({}).observeChanges({
added: function(id, fields) { self.added("rooms", id, fields); },
changed: function(id, fields) { self.changed("rooms", id, fields); },
removed: function(id) { self.added("rooms", id); },
}
});
self.ready();
self.onStop(function () { handle.stop(); });
});
In the second sample, you can modify the 'field' parameter before sending it for publication, like this:
added: function(id, fields) {
fields.newField = 12;
self.added("rooms", id, fields);
},
Source: this post.
Is this important to do with the server? You could use the transform function on the client:
Client JS
//Somewhere where it can run before anything else (make sure you have access to the other bits of the document i.e services.facebook.id otherwise you'll get a services is undefined
Meteor.users._transform = function(doc) {
doc.profile.image = "http://graph.facebook.com/" + doc.services.facebook.id + "/picture";
return doc;
}
Now when you do:
Meteor.user().profile.image
=> "http://graph.facebook.com/55592/picture"
I have opened an issue before with regards to sharing a transform onto the client: https://github.com/meteor/meteor/issues/821

Resources