My goal is to use IMAP to retrieve and display emails (pushed to an array called 'email') from a Gmail account as long as they have a specific string in the subject line. This is my first time using Node-IMAP. So far, I am able to return all emails found in the inbox, but as soon as I try to use imap.search it returns an empty array. I assume I'm missing something, but I can't figure out what.
var email = [];
imap.once('ready', function() {
openInbox(function(err, box) {
if (err) throw err;
imap.search([[ 'HEADER', 'SUBJECT', 'ABCDEFG'] ], function(err, results) {
if (err) throw err;
var f = imap.seq.fetch(results, {
bodies: 'HEADER.FIELDS (FROM SUBJECT DATE)',
});
f.on('message', function(msg, seqno) {
var prefix = '(#' + seqno + ') ';
msg.on('body', function(stream, info) {
var buffer = '';
stream.on('data', function(chunk) {
buffer += chunk.toString('utf8');
});
stream.once('end', function() {
email.push(prefix + inspect(Imap.parseHeader(buffer)));
});
});
});
Related
I have a code that reads unseen emails and creates pdf.
The problem is;
I cannot pull email if any new unseen email exist without executing code again.
var Imap = require('imap');
const MailParser = require('mailparser').MailParser;
var pdf = require('html-pdf');
var fs = require('fs');
var Promise = require("bluebird");
Promise.longStackTraces();
var imapConfig = {
user: '*****',
password: '*****',
host: 'imap.gmail.com',
port: 993,
tls: true
};
var imap = new Imap(imapConfig);
Promise.promisifyAll(imap);
imap.once("ready", execute);
imap.once("error", function(err) {
log.error("Connection error: " + err.stack);
});
imap.connect();
function execute() {
imap.openBox("INBOX", false, function(err, mailBox) {
if (err) {
console.error(err);
return;
}
imap.search(["UNSEEN"], function(err, results) {
if(!results || !results.length){console.log("No unread mails");imap.end();return;}
var f = imap.fetch(results, { bodies: "" });
f.on("message", processMessage);
f.once("error", function(err) {
return Promise.reject(err);
});
f.once("end", function() {
console.log("Done fetching all unseen messages.");
imap.end();
});
});
});
}
const options = { format: 'A2', width:"19in", height:"17in", orientation: "portrait" };
function processMessage(msg, seqno) {
console.log("Processing msg #" + seqno);
// console.log(msg);
var parser = new MailParser();
parser.on("headers", function(headers) {
console.log("Header: " + JSON.stringify(headers));
});
parser.on('data', data => {
if (data.type === 'text') {
console.log(seqno);
console.log(data.html); /* data.html*/
var test = data.html
pdf.create(test, options).toStream(function(err, stream){
stream.pipe(fs.createWriteStream('./foo.pdf'));
});
}
});
msg.on("body", function(stream) {
stream.on("data", function(chunk) {
parser.write(chunk.toString("utf8"));
});
});
msg.once("end", function() {
// console.log("Finished msg #" + seqno);
parser.end();
});
}
Also I have tried to use setInterval to check new unseen emails but I get
'Error: Not authenticated'
How can I pull new unseen emails in a loop and create pdf from that email?
Your observation is correct. You must poll your IMAP (or POP3) server on a regular schedule to keep up with incoming messages. Depending on your requirements, a schedule of once every few minutes is good.
continous polling, or polling every second or so, is very rude. The operator of the IMAP server may block your application if you try to do that: it looks to them like an attempt to overload the server.
Here is a module retrieve_email.js which connects to my gmail account and download the UNSEEN emails after a date. The code is pretty much copied from the example of the [imap module]1.
const Imap = require('imap');
const inspect = require('util').inspect;
const simpleParser = require('mailparser').simpleParser;
const imap = new Imap({
user: 'mygmail#gmail.com',
password: 'mypassword',
host: 'imap.gmail.com',
port: 993,
tls: true
});
function openInbox(callback) {
imap.openBox('INBOX', true, callback);
};
async function parse_email(body) {
let parsed = simpleParser(body);
...............
};
module.exports = function() {
imap.once('ready', function() {
openInbox(function(err, box) {
if (error) throw err;
imap.search(['UNSEEN', ['SINCE', 'May 20, 2018']], function(err, results){
if (err) throw err;
var f = imap.fetch(results, {bodies: ''});
f.on('message', function(msg, seqno) {
console.log('Message #%d', seqno);
var prefix = '(#' + seqno + ') ';
msg.on('body', function(stream, info) {
if (info.which === 'TEXT')
console.log(prefix + 'Body [%s] found, %d total bytes', inspect(info.which), info.size);
var buffer = '', count = 0;
stream.on('data', function(chunk) {
count += chunk.length;
buffer += chunk.toString('utf8');
parse_email(buffer);
if (info.which === 'TEXT')
console.log(prefix + 'Body [%s] (%d/%d)', inspect(info.which), count, info.size);
});
stream.once('end', function() {
if (info.which !== 'TEXT')
console.log(prefix + 'Parsed header: %s', inspect(Imap.parseHeader(buffer)));
else
console.log(prefix + 'Body [%s] Finished', inspect(info.which));
});
});
msg.once('attributes', function(attrs) {
console.log(prefix + 'Attributes: %s', inspect(attrs, false, 8));
});
msg.once('end', function() {
console.log(prefix + 'Finished');
});
});
f.once('error', function(err) {
console.log('Fetch error: ' + err);
});
f.once('end', function() {
console.log('Done fetching all messages');
imap.end();
});
});
});
});
imap.once('error', function(err) {
console.log(err);
});
imap.once('end', function() {
console.log('Connection ended');
});
imap.connect();
};
When the module is called in index.js, I can see in debug that code is scanned from top to the bottom and the last line of code scanned is imap.connect() and then back to the next line in index.js, with no connection to the gmail account and no action of retrieving the emails. What is wrong with the code above?
UPDATED: status after socket.connect() in debug:
Have a look at this, this is the Gmail API reference from Google. On that page there is an example of how to connect to it using Node.js.
https://developers.google.com/gmail/api/quickstart/nodejs
And here is an example from the same docs that show you how to search and retrieve message list using the q parameter:
https://developers.google.com/gmail/api/v1/reference/users/messages/list
P.S. In my comment i was just asking you if you were sure that you did all the other configuration stuff needed to access your Gmail account by code, meaning creating the app, authorizing OAuth or in what seemed to be your case authorizing less secure application access, just have a look at the links you might find that you are missing something.
And do you really need to use IMAP package ???
The problem found was with Avast mail shield as middle man intercepting the IMAP traffic and causes the HTTPS fails. Also the IDE debugger stops at somewhere to keep the connecting active but not ready. Here is the detail of the solution.
I am using express-ws https://www.npmjs.com/package/express-ws (API which helps creating server for express and websocket clients).
app.ws('/', function(ws, req) {
console.log("New connection")
if (content.length > 0) {
console.log(content)
ws.send(content)
}
ws.on('message', function(msg, flags) {
console.log("Received "+ msg);
});
ws.on('data', function(msg, flags) {
var data = []; // List of Buffer objects
res.on("data", function(chunk) {
data.push(chunk); // Append Buffer object
console.log(data)
})
})
});
Now as you can see with code above, whenever a connection is created it checks length of content and sends conetent to client if more than 0.
Following router code, on web request, updates the file.
Issue with this if sometime after connection creation if this file was modified, this connection doesn't know about it and hence send function is not called.
I also tried fs.watch but i am not able to make it to work.
router.post('/run_restart', function(req, res, next) {
text = '{"to_do": "run_test", "devices":"all", "argv": { "test": "' + req.body.cmd + '", "cycles": "' + req.body.cycles + '", "awake_for": "' + req.body.wt + '" }}'
path = process.env['HOME']+'/Desktop/automation/Stressem/StressemWeb/bin/task.txt'
fs.writeFile(path, text)
res.render('home.jade', { title: 'Stressem' });
});
fs.watch(file, function (event) {
fs.stat(file, function (err, stats) {
if(stats.size>80){
console.log("Event: " + event);
fs.readFile(file, 'utf8', function (err, data) {
if (err) throw err;
content = data.toString();
});
}
});
What i would like is whenever the file is updated, ws.send can be called for one of the websocket connection.
Since your server is the one that changes the file, there's no need to use fs.watch as you already know when a file changes. All that's left to do is iterate over a list of open connections and send them the new contents.
var connections = []; // Keeps track of all connections
app.ws('/', function(ws, req) {
console.log("New connection")
connections.push(ws); // Add the new connection to the list
if (content.length > 0) {
console.log(content)
ws.send(content)
}
ws.on('message', function(msg, flags) {
console.log("Received "+ msg);
});
ws.on('data', function(msg, flags) {
var data = []; // List of Buffer objects
res.on("data", function(chunk) {
data.push(chunk); // Append Buffer object
console.log(data)
})
})
// TODO: Make sure you remove closed connections from `connections`
// by listening for the ws `close` event.
});
router.post('/run_restart', function(req, res, next) {
text = '{"to_do": "run_test", "devices":"all", "argv": { "test": "' + req.body.cmd + '", "cycles": "' + req.body.cycles + '", "awake_for": "' + req.body.wt + '" }}'
path = process.env['HOME']+'/Desktop/automation/Stressem/StressemWeb/bin/task.txt'
fs.writeFile(path, text)
res.render('home.jade', { title: 'Stressem' });
connections.forEach(function(c){
c.send(text); // Send the new text to all open connections
}
});
Please note: this won't work if you have multiple processes or servers, but since you're writing to the local file system instead of a database, I assume this is not a requirement.
This simple code work good with express. If a few delay not a problem for you, you can use this.
setInterval(milisecondsToCheck, checkFunction)
for more
http://www.w3schools.com/jsref/met_win_setinterval.asp
https://developer.mozilla.org/en-US/docs/Web/API/WindowTimers/setInterval
if you use it like this, you can finish it after your job done:
var timer = setInterval(milisecondsToCheck, checkFunction);
To clear it:
clearInterval(timer);
solved this with something like this
var conn_array = [];
app.ws('/', function(ws, req) {
conn_array.push(ws)
console.log("New connection")
fs.readFile(file, 'utf8', function (err, data) {
if (err) throw err;
content = data.toString();
if (content.length > 0) {
console.log(content.length)
conn_array[0].send(content)
}
});
ws.on('message', function(msg, flags) {
console.log("Received "+ msg);
});
ws.on('data', function(msg, flags) {
var data = []; // List of Buffer objects
res.on("data", function(chunk) {
data.push(chunk); // Append Buffer object
console.log(data)
})
})
});
function readFile(){
console.log("I am here")
fs.readFile(file, 'utf8', function (err, data) {
if (err) throw err;
content = data.toString();
if (content.length > 0 && conn_array.length>0) conn_array[0].send(content);
})
}
var interval = setInterval(readFile, 100000);
for now i have assumed there is just one client
I'm trying to get a file from S3, and store it in NoSQL Couchbase.
I'm trying to store the outputStream, with the following code:
var outputStream1 = fs.createWriteStream("./tmp/" + url);
// if statusCode == 200, then we have the file, lets save it in out cache and then send it to the client
res.on('data', function (chunk) {
outputStream1.write(chunk);
});
res.on('end', function () {
var value = outputStream1;
cb.set(key, value, function (err, result) {
if (err) { console.log(err); }
console.log("Set item for key with CAS: " + result.cas);
cb.get(key, function (err, result) {
if (err) { console.log(err); }
console.log("Value for key is: " + result.value);
var readStream = result.value;
readStream.on('open', function () {
console.log(response);
});
readStream.on('end', function () {
readStream.close();
});
});
});
The problem is that I store the outputStream, and then trying to read it.
I search for a way to store the data object we get from S3 in Couchbase, and then have the ability to send it to the client. Is it possible?
I think you can just set a Buffer as value and get that later. First you need to save the data in a buffer:
var buffers = [];
res.on('data', function (chunk) {
buffers.push(chunk);
});
res.on('end', function () {
var value = Buffer.concat(buffers);
cb.set(key, value, function (err, result) {
if (err) { console.log(err); }
console.log("Set item for key with CAS: " + result.cas);
cb.get(key, function (err, result) {
if (err) { console.log(err); }
console.log("Value for key is: " + result.value); // You should get your value in here });
});
You can also use concat-stream module to help you. I've answered a similar question on buffering stream.
I am quite newbie with node.js. What i am trying to achieve is the following:
Connect to my postgresql database and get info of a place (id, coordinates).
call a weather api and get the info of that spot using the coordinates obtained in the previous step.
Insert the returned json in the database. I get 8 hourly objects, with the weather info every 3 hours (0,3,6,9,12,15,18,21). I need to iterate through this objects and the store them in 8 records in the database.
I wrote the following code:
app.get('/getapi', function(req, res){
var json_bbdd;
//------------ BBDD CONNECTION----------------
var pg = require('pg');
var conString = "postgres://postgres:postgres2#localhost/places";
var client = new pg.Client(conString);
client.connect(function(err) {
if(err) {
console.log('could not connect to postgres');
}
client.query('SELECT * from places where id=3276', function(err, result) {
if(err) {
console.log('error running query');
}
json_bbdd=result.rows[0];
var coords = JSON.parse(json_bbdd.json).coordinates;
var id = json_bbdd.id;
var input = {
query: coords[1] + ',' + coords[0] ,
format: 'JSON',
fx: '',
callback: 'MarineWeatherCallback'
};
var url = _PremiumApiBaseURL + "marine.ashx?q=" + input.query + "&format=" + input.format + "&fx=" + input.fx + "&key=" + _PremiumApiKey + "&tide=yes";
$.ajax({
type: 'GET',
url: url,
async: false,
contentType: "application/json",
dataType: 'jsonp',
success: function (json) {
var date= json.data.weather[0].date;
for (var i=0; i < 8; i++){
var hourly = json.data.weather[0].hourly[i];
var time= hourly.time;
client.query('INSERT into parte (id, date, time) VALUES($1, $2, $3)', [id, date, time],
function(err, result) {
if (err) {
console.log(err);
} else {
console.log('row inserted: ' + id + ' ' + time);
}
});
} // FOR
},
error: function (e) {
console.log(e.message);
}
});
client.end();
});
});
});
The steps 1 and 2 are performed perfectly. The third step, on the other hand, does nothing and it doesn't even throw an error.
I read in this post: node-postgres will not insert data, but doesn't throw errors either that using async module could help but i have no idea how to rewrite the code. I need some help.
Regards,
Aitor
I didn't test your snippet, I can only help you with things which looks bad to my eyes.
It is better not to use jQuery on node server. There is excellent library called request to do remote http requests.
You should better handle database errors because in your example your code will continue after DB error.
You are calling client.end() too early and at the time when you try to insert data to the database a connection is already closed. You have to move client.end() at the end of success and error functions and wait to all callbacks are done.
I think it is also better to use connection pool instead of Client.
You can possibly use JSON type in PostgreSQL to avoid serializing/deserializing JSON data in your code.
Here is revised example(untested). I didn't replace jQuery here, some minor tweaking included.
var pg = require('pg');
var conString = "postgres://postgres:postgres2#localhost/places";
app.get('/getapi', function(req, res, next){
var json_bbdd;
//------------ BBDD CONNECTION----------------
pg.connect(conString, function(err, client, done) {
if(err) {
// example how can you handle errors
console.error('could not connect to postgres');
return next(new Error('Database error'));
}
client.query('SELECT * from places where id=3276', function(err, result) {
if(err) {
console.error('error running query');
done();
return next(new Error('Database error'));
}
json_bbdd = result.rows[0];
var coords = JSON.parse(json_bbdd.json).coordinates;
var id = json_bbdd.id;
var input = {
query: coords[1] + ',' + coords[0] ,
format: 'JSON',
fx: '',
callback: 'MarineWeatherCallback'
};
var url = _PremiumApiBaseURL + "marine.ashx?q=" +
input.query + "&format=" + input.format +
"&fx=" + input.fx + "&key=" +
_PremiumApiKey + "&tide=yes";
$.ajax({
type: 'GET',
url: url,
async: false,
contentType: "application/json",
dataType: 'jsonp',
success: function (json) {
var date = json.data.weather[0].date;
var callbacks = 0;
for (var i=0; i < 8; i++) {
var hourly = json.data.weather[0].hourly[i];
var time= hourly.time;
client.query(
'INSERT into parte (id, date, time) VALUES($1, $2, $3)',
[id, date, time],
function(err, result) {
if (err) {
console.log(err);
} else {
console.log('row inserted: ' + id + ' ' + time);
}
callbacks++;
if (callbacks === 8) {
console.log('All callbacks done!');
done(); // done(); is rough equivalent of client.end();
}
});
} // FOR
},
error: function (e) {
console.error(e.message);
done(); // done(); is rough equivalent of client.end();
return next(new Error('Http error'));
}
});
});
});
});
Ok, now cam up with another problem...i was doubting of creating a new post but i think that maybe could have relation with the previous post.
The aim is to read from the database instead of one place 3 places and do the same process than before for each one.
The code is as follows (with the changes proposed by ivoszz):
app.get('/getapi', function(req, res, next){
//------------ BBDD CONNECTION----------------
pg.connect(conString, function(err, client, done) {
if(err) {
// example how can you handle errors
console.error('could not connect to postgres',err);
return next(new Error('Database error'));
}
client.query('SELECT * from places where id>3274 and id<3278', function(err, result) {
if(err) {
console.error('error running query',err);
done();
return next(new Error('Database error'));
}
var first_callback = 0;
for (var y=0; y<result.rows.length; y++) {
var coords = JSON.parse(result.rows[y].json).coordinates;
var id = result.rows[y].id;
var input = {
query: coords[1] + ',' + coords[0] ,
format: 'JSON',
fx: ''
};
var url = _PremiumApiBaseURL + "marine.ashx?q=" + input.query + "&format=" + input.format + "&fx=" + input.fx + "&key=" + _PremiumApiKey;
request(url, function(err, resp, body) {
body = JSON.parse(body);
if (!err && resp.statusCode == 200) {
var date = body.data.weather[0].date;
var callbacks = 0;
for (var i=0; i < 8; i++) {
var hourly = body.data.weather[0].hourly[i];
client.query(
'INSERT into parte (id, date, time) VALUES($1, $2, $3)',
[id, date, hourly.time],
function(err, result) {
if (err) {
console.log(err);
} else {
console.log('row inserted: ' + id + ' iteration ' + i);
}
callbacks++;
if (callbacks === 8) {
console.log('All callbacks done!from id '+id);
//done(); // done(); is rough equivalent of client.end();
//res.send("done");
}
});
} // FOR
}
else { // if the API http request throws an error
console.error(err);
done(); // done(); is rough equivalent of client.end();
return next(new Error('Http API error'));
}
}); // REQUEST API URL
first_callback++;
if (first_callback === result.rows.length-1) {
console.log('All global callbacks done!');
done(); // done(); is rough equivalent of client.end();
res.send("done");
}}
}); // SELECT from pg
}); // CONNECT to pg
}); // app.get
I don't know why it tries to insert the id=3277 three times instead of inserting id=3275, id=3276 and then id=3277... what it does instead is: it inserts the first 8 records ok the first time (id=3277), but then it throws an error saying that the records are already inserted (primary key=id,date,time) with id 3277...
It seems that first does the 3 iterations of the first FOR and then does the 3 iteration of the second FOR but with the info of the last iteration(place). I can't understand it very well...