I have this code which just reads in data from a .csv file and converts it to json and logs the data:
const fs = require('fs');
const path = require('path');
const sd = path.resolve(__dirname + '/fixtures/SampleData.csv');
const strm = fs.createReadStream(sd).setEncoding('utf8');
const Rx = require('rxjs/Rx');
const csv2json = require('csv2json');
const dest = strm
.pipe(csv2json({
separator: ','
}));
dest.on('error', function(e){
console.error(e.stack || e);
})
const obs = Rx.Observable.fromEvent(dest, 'data')
.flatMap(d => Rx.Observable.timer(100).mapTo(d))
obs.subscribe(v => {
console.log(String(v));
})
What the code is doing is logging all the data after a 100 ms delay. I actually want to delay on each line of data and log each line after a small delay.
The above code doesn't achieve that - what is the best way to control the rate at which the data is logged?
Hypothesis: All the lines of data come in approximately at the same time, so all are delayed 100 ms, so they end up getting printed at pretty much the same time. I need to only start delaying the next line after the previous as been logged.
the following code seems to do the same thing as using the timer above:
const obs = Rx.Observable.fromEvent(dest, 'data')
.delay(100)
Hypothesis: All the lines of data come in approximately at the same
time, so all are delayed 100 ms, so they end up getting printed at
pretty much the same time. I need to only start delaying the next line
after the previous as been logged.
Your hypothesis is correct
Solution
Swap out the .flatMap() in your original solution with .concatMap()
Rx.Observable.from([1,2,3,4])
.mergeMap(i => Rx.Observable.timer(500).mapTo(i))
.subscribe(val => console.log('mergeMap value: ' + val));
Rx.Observable.from([1,2,3,4])
.concatMap(i => Rx.Observable.timer(500).mapTo(i))
.subscribe(val => console.log('concatMap value: ' + val));
<script src="https://cdnjs.cloudflare.com/ajax/libs/rxjs/5.0.3/Rx.js"></script>
This will ensure that every emission completes before the next emission is subscribed to and starts delaying its value.
I couldn't find the functionality I needed in the RxJS library (although it might be there, I just couldn't find it, let me know if there is a better, more idiomatic, way).
So I wrote this, which seems to do the job:
const fs = require('fs');
const path = require('path');
const sd = path.resolve(__dirname + '/fixtures/SampleData.csv');
const strm = fs.createReadStream(sd).setEncoding('utf8');
const Rx = require('rxjs/Rx');
const csv2json = require('csv2json');
const p = Rx.Observable.prototype;
p.eachWait = function(timeout){
const source = this;
const values = [];
let flipped = true;
const onNext = function (sub){
flipped = false;
setTimeout(() => {
var c = values.pop();
if(c) sub.next(c);
if(values.length > 0){
onNext(sub);
}
else{
flipped = true;
}
}, timeout);
}
return Rx.Observable.create(sub => {
return source.subscribe(
function next(v){
values.unshift(v);
if(flipped){
onNext(sub);
}
},
sub.error.bind(sub),
sub.complete.bind(sub)
);
});
}
const dest = strm
.pipe(csv2json({
separator: ','
}));
dest.on('error', function(e){
console.error(e.stack || e);
});
const obs = Rx.Observable.fromEvent(dest, 'data')
.eachWait(1000)
obs.subscribe(v => {
console.log(String(v));
});
I assume this is as about as performant as you can make it - only one timer should be running at any given moment.
Related
Error: 'The write action you are performing on the channel has hit the write rate limit.',
How can I make my loop send at a slower rate. It seems to be sending all at once, although I'm trying to use an increment in a loop one by one.
Its still causing the sendrate to throttle and break. Is using an interval or timeout a good idea. But I'm not sure how I should set it up.
Simple index.js using node.js
const autosend = require("discord-autosender")
const fs = require("fs");
function send(){
var channelID = ""
var tokenID = ""
const data = fs.readFileSync('mr_robot.txt', 'UTF-8');
const lines = data.split(/\r?\n/);
for (let l_indx = 0; l_indx < lines.length; l_indx++) {
var message = lines[l_indx];
autosend.Post(message, channelID, tokenID)
}
}
send();
Mr.Robot.txt
A dog of that house shall move me to stand. I
will take the wall of any man or maid of Montague’s.
That shows thee a weak slave, for the weakest
goes to the wall.
’Tis true, and therefore women, being the
weaker vessels, are ever thrust to the wall. Therefore
I will push Montague’s men from the wall and
thrust his maids to the wall.
The quarrel is between our masters and us
their men.
You can combine Promise, setTimeout, and async-await to reduce the speed of the loop.
function send() {
var channelID = '';
var tokenID = '';
const data = fs.readFileSync('mr_robot.txt', 'UTF-8');
const lines = data.split(/\r?\n/);
const newFun = async () => {
for (let l_indx = 0; l_indx < lines.length; l_indx++) {
var message = lines[l_indx];
await new Promise((resolve) => setTimeout(resolve, 1000));
autosend.Post(message, channelID, tokenID);
}
};
newFun();
}
send();
I have the following code
const readline = require('readline');
const {stdin, stderr} = process;
const rl = readline.createInterface({
output: stderr,
input: stdin
})
console.log(rl.getCursorPos());
let i = 5;
while (i > 0) {
console.log('fill up with logs');
i--
}
console.log(rl.getCursorPos())
and its output
{ cols: 2, rows: 0 }
fill up with logs
fill up with logs
fill up with logs
fill up with logs
{ cols: 2, rows: 0 }
as from above the last 'getCursorPos()' should return a new position since the stderr output has been filled up with logs, instead, it returns its default value, did I misunderstand the way it works?
or is there anyway I can get cursor position and save it multiple times using readline? I've looked through using asni escape, but it seems like it can only store one cursor position at a time.
The position returned by readline is relative to the current cursor location, not absolute on the screen.
I was able to create a promise to get this information using the answer below containing the escape sequence for requesting terminal cursor position:
How can I get position of cursor in terminal?
const getCursorPos = () => new Promise((resolve) => {
const termcodes = { cursorGetPosition: '\u001b[6n' };
process.stdin.setEncoding('utf8');
process.stdin.setRawMode(true);
const readfx = function () {
const buf = process.stdin.read();
const str = JSON.stringify(buf); // "\u001b[9;1R"
const regex = /\[(.*)/g;
const xy = regex.exec(str)[0].replace(/\[|R"/g, '').split(';');
const pos = { rows: xy[0], cols: xy[1] };
process.stdin.setRawMode(false);
resolve(pos);
}
process.stdin.once('readable', readfx);
process.stdout.write(termcodes.cursorGetPosition);
})
const AppMain = async function () {
process.stdout.write('HELLO');
const pos = await getCursorPos();
process.stdout.write("\n");
console.log({ pos });
}
AppMain();
I am using OpenCV4NodeJS-prebuilt for my project to use match template.
I Created Two Files one being Index.js and other named matchTemlate.js
In Index.js i call match template:
const { matchTemplate } = require("./matchTemplate");
...
let a = async function () {
let tm = performance.now();
try {
await Promise.all([
matchTemplate(baseImage, templateR),
matchTemplate(baseImage, templateL)
]).then(result => {
const c = result.map((ob) => (ob.C)) // confidence
top = c[0] > c[1] ? result[0].Y + 8 : result[1].Y + 11
})
} catch (error) {
console.log(error)
}
tm = performance.now() - tm;
console.log(tm)
}
and this is matchTemplate.js
const cv = require('opencv4nodejs-prebuilt')
exports.matchTemplate = async function (inputFile, templateImage) {
// eslint-disable-next-line no-unused-expressions
const matS = await cv.imdecodeAsync(templateImage)
console.time('templateMatching')
const matched = inputFile.matchTemplate(matS, 3)
console.timeEnd('templateMatching')
const minMax = matched.minMaxLoc()
return ({ Y: minMax.maxLoc.y, C: minMax.maxVal })
}
The log output of matchTemplate is:
templateMatching: 892.648ms templateMatching: 890.387ms
and The Log output of index.js is:
TemplateMatching: 1824.8019220000133
Why there is no improvement is speed ?
while the execution is done in parallel why it's still taking time equal to time taken by both ?
I tried Promise.all method to call ghostscript via gs4fb npm package and convert PDF to Image and the time improvement was there.
By time improvement i mean the difference of total time taken by Promise.all method and calling the functions one by one.
I am trying to create a file with dummy data. Since the file will be huge with 32^5 data points, I was using the write-stream. But I cannot see any data being written to the file. What could be the reason for this?
const faker = require('faker');
const fs = require('fs');
const os = require('os');
const MAX_DATA_POINTS = Math.pow(32, 5)
const stream = fs.createWriteStream('sample-data.csv', { flags : 'a' });
for(let i = 0; i < MAX_DATA_POINTS; i++) {
console.log(i)
stream.write(`${faker.name.findName()}, ${i} ${os.EOL}`);
}
console.log(`Written ${MAX_DATA_POINTS} .. `);
The write method returns false if the stream wishes for the calling code to wait for the drain event to be emitted before continuing to write additional data and true otherwise. It is quite possible that the stream is not draining, and calls to write() are just buffering chunks and returning false.
You will need to wait till all the buffered chunks are drained (accepted for delivery by the operating system). When that happens, the drain event will be emitted.
Note: It is recommended that once write() returns false, no more chunks be written until the 'drain' event is emitted.
You could modify your dummy-csv file creator code in the way I have given.
const faker = require('faker');
const fs = require('fs');
const os = require('os');
const MAX_DATA_POINTS = Math.pow(32, 5);
let c = 0;
const stream = fs.createWriteStream('sample-data.csv', { flags : 'a' });
function write() {
let ok = true;
do {
c++;
ok = stream.write(`${faker.name.findName()}, ${c} ${os.EOL}`);
} while (c < MAX_DATA_POINTS && ok);
if(c < MAX_DATA_POINTS) {
stream.once('drain', write)
}
}
write()
I have a list of words with pronunciation data in a text file. What I would like to do is have the user enter a word, and then have the program check to see if I have data on that word in that file. I'd like to do it in RxJs, which I am new to.
The code below is the closest I can get to what I want. Within my main stream I have a filter call that creates a dependent stream, called 'checkstream'. What I don't understand is how to use the results of that dependent stream in the filter method of my main stream. Currently that filter method fails, but I still get the data to the screen by console logging it.
If there is data in my text file for a word, then the checkstream will end up being an observable containing just the data I want to retrieve and show to the user. I want to somehow pipe that data down to the consumer of my main stream, but I don't understand how to do that.
I would appreciate any help you can provide. Even just some intuition would be useful.
var Rx = require('rx');
var RxNode = require('rx-node');
var fs = require('fs');
var split = require('split');
RxNode.fromReadableStream(process.stdin)
.map( (inputData) => {
var inputWord = inputData.toString().trim();
return inputWord;
})
.map( (inputWord) => {
var checkStream = fs.createReadStream('./dict.txt');
RxNode.fromReadableStream(checkStream.pipe(split()))
.map( (lineFromFile) => {
return JSON.parse(lineFromFile);
})
.filter((parsedDataToCheck) => {
return parsedDataToCheck.word.toLowerCase().trim() === inputWord; })
.subscribe((dataThatMatches) => { console.log(dataThatMatches) });
return dataToReturn;
})
.subscribe(function(dataToReturn) {
console.log(dataToReturn);
});
Maybe something like this:
var Rx = require('rx');
var RxNode = require('rx-node');
var fs = require('fs');
var split = require('split');
RxNode.fromReadableStream(process.stdin).map(
inputData => inputData.toString().trim()
).flatMap(inputWord => {
var checkStream = fs.createReadStream('./dict.txt');
return RxNode.fromReadableStream(
checkStream.pipe(split())
).map(
lineFromFile => JSON.parse(lineFromFile)
).find(
parsedDataToCheck => parsedDataToCheck.word.toLowerCase().trim() === inputWord
);
}).subscribe(dataToReturn => {
console.log(dataToReturn);
});
Note that it is possible that the input words from stdin get reordered after filtering due to the asynchronous fs reads.