var twitter = require('ntwitter');
// Configure twitter
var keywords = ['hello', 'world'];
twit.stream('statuses/filter', {'track':keywords.join(',')}, function(stream) {
stream.on('data', function (data) {
console.log(data);
});
stream.on('end', function (response) {
console.log("\n====================================================");
console.log("DESTROYING");
console.log("====================================================\n");
});
setTimeout(function(){
stream.destroy();
}, 60000);
});
I'm new to nodejs. What is the best way to stop this and start it again with but a different set of keywords.
I can destroy() the stream and then create a new one. But is there anyway I can just change the track keywords without disconnecting?
I'm quite noob, so maybe this way it's not a good one and it's wasting resources, but I still don't know how to check it, so I will drop it here and hope that someone skilled told us if it's OK or it's wrong, and the most important, why?.
The way is to put the tw.stream call, inside a function and call that function with the array of words you want to track. It will start tracking new words, and stop tracking removed words:
// Array to store the tracked words
var TwitWords = [];
// Tracker function
function TrackWords(array){
tw.stream('statuses/filter',{track:array},function(stream){
stream.on('data',function(data){
console.log(data.text);
});
});
}
// Add word
function AddTwitWord(word){
if(TwitWords.indexOf(word)==-1){
TwitWords.push(word);
TrackWords(TwitWords);
}
}
// Remove word
function RemoveTwitWord(word){
if(TwitWords.indexOf(word)!=-1){
TwitWords.splice(TwitWords.indexOf(word),1);
TrackWords(TwitWords);
}
}
I hope it's ok, because it's the only way I found.
Related
First of all I am a newbie in NodeJS and want to imporve my skills on it.
I have a table in Airtable and want to get all the elements from it.
Easy with the airtable api for nodejs.
But what I want to do is push and save these elements in a tab for the future(JSON, excel ...).
To do so, I am using callbaks since the call is async.. I heared about Promises but it's very new to me, and I am hardly uderstanding it..
Here is my code for now:
var Airtable = require('airtable');
Airtable.configure({
endpointUrl: 'https://api.airtable.com',
apiKey: 'keyKWYJPOEObWhNt2'
});
var base = Airtable.base('app4qIwfmG0ZKAdBH');
var view = "Main View";
var tab = [];
base('Table 1').select({
view : view}).eachPage(function page(records, fetchNextPage){records.forEach(function(record){
tab.push({
"Name": record.get('Name'),
"Notes": record.get('Notes')
});
});
fetchNextPage();
pushToArray(tab);
}, function done (error){
if(error){ console.log(error);
console.log(tab);}
});
function pushToArray(tab) {
TabToJson(tab);
return tab;
};
function TabToJson(tab){
console.log(tab);
return JSON.stringify(tab);
};
How can I implements promises? Is it necessary here? I don't want to end up with dozen of callback functions..
Thank you all and have a nice day!
Careful here! You're on the right track with realizing that this function is async and that you want to wait until every iteration of #eachpage has resolved before writing outputting your JSON, like a Promise would. But Airtable was kind enough to already provide what you're looking for: the callback
function done (error){
if(error){ console.log(error);
console.log(tab);
}
}
will run immediately after the last successful call to #fetchNextPage. This is where you should have your JSON-writing logic. You would want something like
function done (error){
TabToJson(tab);
if(error){ console.log(error);
console.log(tab);
}
}
You don't need your function pushToArray, as you've already pushed the individual records from Airtable into your array 'tab' in each call to #page. Furthermore, if you want to do more than log your JSON output, which your question makes it seem, you should look into Node's File System Library. checkout the fs#writeFile method.
You can use async/await for it. Please make sure you can use await in an async function.
try {
const records = await base('Table 1').select({ view }).all()
records.map((record) => {
tab.push({
"Name": record.get('Name'),
"Notes": record.get('Notes')
});
pushToArray(tab)
})
} catch (e) {
console.error(e)
}
I have a node app that reads two files as streams. I use event.on('end') to then work with the results. The problem is I don't really know how I can wait for BOTH events to trigger 'end'.
What I have now is:
reader1.on('end', function(){
reader2.on('end',function(){
doSomething();
});
});
With small files this works, but if one of the files is very large the app aborts.
Your execution logic is somewhat flawed. You ought to do something like this instead
var checklist = [];
// checklist will contain sort of a counter
function reader_end(){
if(checklist.length == 2 )
// doSomething only if both have been added to the checklist
doSomething();
}
reader1.on('end', function() {
checklist.push('reader1');
// increment the counter
reader_end();
});
reader2.on('end', function() {
checklist.push('reader2');
reader_end();
});
Although there are libraries to better handle this sort of stuff, like Async and Promises.
With Async you'll need to use compose
var r12_done = async.compose(reader1.on, reader2.on);
r12_done('end', function(){
doSomething();
});
Edit: I just noticed that since probably reader1.on is a Stream 'end' event which doesn't have the standard callback argument signature of (err, results), this probably won't work. In that case you should just go with Promise.
With Promise you'll need to first Promisify and then join
var reader1Promise = Promise.promisify(reader1.on)('end');
var reader2Promise = Promise.promisify(reader2.on)('end');
var reader12Promise = Promise.join(reader1Promise, reader1Promise);
reader12Promise.then(function(){
doSomething();
});
I keep banging my head against the wall because of tons of different errors. This is what the code i try to use :
fs.readFile("balance.txt", function (err, data) //At the beginning of the script (checked, it works)
{
if (err) throw err;
balance=JSON.parse(data);;
});
fs.readFile("pick.txt", function (err, data)
{
if (err) throw err;
pick=JSON.parse(data);;
});
/*....
.... balance and pick are modified
....*/
if (shutdown)
{
fs.writeFile("balance2.txt", JSON.stringify(balance));
fs.writeFile("pick2.txt", JSON.stringify(pick));
process.exit(0);
}
At the end of the script, the files have not been modified the slightest. I then found out on this site that the files were being opened 2 times simultaneously, or something like that, so i tried this :
var balance, pick;
var stream = fs.createReadStream("balance.txt");
stream.on("readable", function()
{
balance = JSON.parse(stream.read());
});
var stream2 = fs.createReadStream("pick.txt");
stream2.on("readable", function()
{
pick = JSON.parse(stream2.read());
});
/****
****/
fs.unlink("pick.txt");
fs.unlink("balance.txt");
var stream = fs.createWriteStream("balance.txt", {flags: 'w'});
var stream2 = fs.createWriteStream("pick.txt", {flags: 'w'});
stream.write(JSON.stringify(balance));
stream2.write(JSON.stringify(pick));
process.exit(0);
But, this time, both files are empty... I know i should catch errors, but i just don't see where the problem is. I don't mind storing the 2 objects in the same file, if that can helps. Besides that, I never did any javascript in my life before yesterday, so, please give me a simple explanation if you know what failed here.
What I think you want to do is use readFileSync and not use readFile to read your files since you need them to be read before doing anything else in your program (http://nodejs.org/api/fs.html#fs_fs_readfilesync_filename_options).
This will make sure you have read both the files before you execute any of the rest of your code.
Make your like code do this:
try
{
balance = JSON.parse(fs.readFileSync("balance.txt"));
pick = JSON.parse(fs.readFileSync("pick.txt"));
}
catch(err)
{ throw err; }
I think you will get the functionality you are looking for by doing this.
Note, you will not be able to check for an error in the same way you can with readFile. Instead you will need to wrap each call in a try catch or use existsSync before each operation to make sure you aren't trying to read a file that doesn't exist.
How to capture no file for fs.readFileSync()?
Furthermore, you have the same problem on the writes. You are kicking off async writes and then immediately calling process.exit(0). A better way to do this would be to either write them sequentially asynchronously and then exit or to write them sequentially synchronously then exit.
Async option:
if (shutdown)
{
fs.writeFile("balance2.txt", JSON.stringify(balance), function(err){
fs.writeFile("pick2.txt", JSON.stringify(pick), function(err){
process.exit(0);
});
});
}
Sync option:
if (shutdown)
{
fs.writeFileSync("balance2.txt", JSON.stringify(balance));
fs.writeFileSync("pick2.txt", JSON.stringify(pick));
process.exit(0);
}
I'm using Mongoose with Node.js and have the following code that will call the callback after all the save() calls has finished. However, I feel that this is a very dirty way of doing it and would like to see the proper way to get this done.
function setup(callback) {
// Clear the DB and load fixtures
Account.remove({}, addFixtureData);
function addFixtureData() {
// Load the fixtures
fs.readFile('./fixtures/account.json', 'utf8', function(err, data) {
if (err) { throw err; }
var jsonData = JSON.parse(data);
var count = 0;
jsonData.forEach(function(json) {
count++;
var account = new Account(json);
account.save(function(err) {
if (err) { throw err; }
if (--count == 0 && callback) callback();
});
});
});
}
}
You can clean up the code a bit by using a library like async or Step.
Also, I've written a small module that handles loading fixtures for you, so you just do:
var fixtures = require('./mongoose-fixtures');
fixtures.load('./fixtures/account.json', function(err) {
//Fixtures loaded, you're ready to go
};
Github:
https://github.com/powmedia/mongoose-fixtures
It will also load a directory of fixture files, or objects.
I did a talk about common asyncronous patterns (serial and parallel) and ways to solve them:
https://github.com/masylum/i-love-async
I hope its useful.
I've recently created simpler abstraction called wait.for to call async functions in sync mode (based on Fibers). It's at an early stage but works. It is at:
https://github.com/luciotato/waitfor
Using wait.for, you can call any standard nodejs async function, as if it were a sync function, without blocking node's event loop. You can code sequentially when you need it.
using wait.for your code will be:
//in a fiber
function setup(callback) {
// Clear the DB and load fixtures
wait.for(Account.remove,{});
// Load the fixtures
var data = wait.for(fs.readFile,'./fixtures/account.json', 'utf8');
var jsonData = JSON.parse(data);
jsonData.forEach(function(json) {
var account = new Account(json);
wait.forMethod(account,'save');
}
callback();
}
That's actually the proper way of doing it, more or less. What you're doing there is a parallel loop. You can abstract it into it's own "async parallel foreach" function if you want (and many do), but that's really the only way of doing a parallel loop.
Depending on what you intended, one thing that could be done differently is the error handling. Because you're throwing, if there's a single error, that callback will never get executed (count won't be decremented). So it might be better to do:
account.save(function(err) {
if (err) return callback(err);
if (!--count) callback();
});
And handle the error in the callback. It's better node-convention-wise.
I would also change another thing to save you the trouble of incrementing count on every iteration:
var jsonData = JSON.parse(data)
, count = jsonData.length;
jsonData.forEach(function(json) {
var account = new Account(json);
account.save(function(err) {
if (err) return callback(err);
if (!--count) callback();
});
});
If you are already using underscore.js anywhere in your project, you can leverage the after method. You need to know how many async calls will be out there in advance, but aside from that it's a pretty elegant solution.
So Im trying to use the nodejs express FS module to iterate a directory in my app, store each filename in an array, which I can pass to my express view and iterate through the list, but Im struggling to do so. When I do a console.log within the files.forEach function loop, its printing the filename just fine, but as soon as I try to do anything such as:
var myfiles = [];
var fs = require('fs');
fs.readdir('./myfiles/', function (err, files) { if (err) throw err;
files.forEach( function (file) {
myfiles.push(file);
});
});
console.log(myfiles);
it fails, just logs an empty object. So Im not sure exactly what is going on, I think it has to do with callback functions, but if someone could walk me through what Im doing wrong, and why its not working, (and how to make it work), it would be much appreciated.
The myfiles array is empty because the callback hasn't been called before you call console.log().
You'll need to do something like:
var fs = require('fs');
fs.readdir('./myfiles/',function(err,files){
if(err) throw err;
files.forEach(function(file){
// do something with each file HERE!
});
});
// because trying to do something with files here won't work because
// the callback hasn't fired yet.
Remember, everything in node happens at the same time, in the sense that, unless you're doing your processing inside your callbacks, you cannot guarantee asynchronous functions have completed yet.
One way around this problem for you would be to use an EventEmitter:
var fs=require('fs'),
EventEmitter=require('events').EventEmitter,
filesEE=new EventEmitter(),
myfiles=[];
// this event will be called when all files have been added to myfiles
filesEE.on('files_ready',function(){
console.dir(myfiles);
});
// read all files from current directory
fs.readdir('.',function(err,files){
if(err) throw err;
files.forEach(function(file){
myfiles.push(file);
});
filesEE.emit('files_ready'); // trigger files_ready event
});
As several have mentioned, you are using an async method, so you have a nondeterministic execution path.
However, there is an easy way around this. Simply use the Sync version of the method:
var myfiles = [];
var fs = require('fs');
var arrayOfFiles = fs.readdirSync('./myfiles/');
//Yes, the following is not super-smart, but you might want to process the files. This is how:
arrayOfFiles.forEach( function (file) {
myfiles.push(file);
});
console.log(myfiles);
That should work as you want. However, using sync statements is not good, so you should not do it unless it is vitally important for it to be sync.
Read more here: fs.readdirSync
fs.readdir is asynchronous (as with many operations in node.js). This means that the console.log line is going to run before readdir has a chance to call the function passed to it.
You need to either:
Put the console.log line within the callback function given to readdir, i.e:
fs.readdir('./myfiles/', function (err, files) { if (err) throw err;
files.forEach( function (file) {
myfiles.push(file);
});
console.log(myfiles);
});
Or simply perform some action with each file inside the forEach.
I think it has to do with callback functions,
Exactly.
fs.readdir makes an asynchronous request to the file system for that information, and calls the callback at some later time with the results.
So function (err, files) { ... } doesn't run immediately, but console.log(myfiles) does.
At some later point in time, myfiles will contain the desired information.
You should note BTW that files is already an Array, so there is really no point in manually appending each element to some other blank array. If the idea is to put together the results from several calls, then use .concat; if you just want to get the data once, then you can just assign myfiles = files directly.
Overall, you really ought to read up on "Continuation-passing style".
I faced the same problem, and basing on answers given in this post I've solved it with Promises, that seem to be of perfect use in this situation:
router.get('/', (req, res) => {
var viewBag = {}; // It's just my little habit from .NET MVC ;)
var readFiles = new Promise((resolve, reject) => {
fs.readdir('./myfiles/',(err,files) => {
if(err) {
reject(err);
} else {
resolve(files);
}
});
});
// showcase just in case you will need to implement more async operations before route will response
var anotherPromise = new Promise((resolve, reject) => {
doAsyncStuff((err, anotherResult) => {
if(err) {
reject(err);
} else {
resolve(anotherResult);
}
});
});
Promise.all([readFiles, anotherPromise]).then((values) => {
viewBag.files = values[0];
viewBag.otherStuff = values[1];
console.log(viewBag.files); // logs e.g. [ 'file.txt' ]
res.render('your_view', viewBag);
}).catch((errors) => {
res.render('your_view',{errors:errors}); // you can use 'errors' property to render errors in view or implement different error handling schema
});
});
Note: you don't have to push found files into new array because you already get an array from fs.readdir()'c callback. According to node docs:
The callback gets two arguments (err, files) where files is an array
of the names of the files in the directory excluding '.' and '..'.
I belive this is very elegant and handy solution, and most of all - it doesn't require you to bring in and handle new modules to your script.