How to play back time-related events - node.js

To get familiar with Node.js I'm working on a little project where users can play a piano in their browser and all connected users get to hear the keys played. For this I'm using Express with Socket.io.
So far so good: communication works and sounds are heard by all users. My next challenge is to let a user record him playing the piano and then play it back to all connected users.
The recording itself is rather straightforward:
user.socket.on('keyDown', function (message) {
user.socket.broadcast.emit('keyDownReceived', JSON.stringify(message));
if (user.isRecording) {
var key = message.key;
var time = currentTime() - user.recordingStartTime;
user.recording.timeOffsets.push(time);
user.recording.keys.push(key);
}
});
Every time a key is pressed, I fill two arrays. One with the time offsets for each key since the record button was pressed and one with the key. This later gets saved to MongoDB.
My question is how I would go about playing this back to the user. My initial idea was to do the following:
user.socket.on('playRecording', function () {
user.playRecordingStartTime = currentTime();
var interval = setInterval(function () {
var time = currentTime() - user.playRecordingStartTime;
var index = user.recording.timeOffsets.indexOf(time);
if (index != -1) {
var message = {key: user.recording.keys[index]};
user.socket.broadcast.emit('keyDownReceived', JSON.stringify(message));
}
}, 1);
});
So essentially I'm firing a timer every millisecond and doing a reverse lookup in the array to see if a key has been pressed at that time. As you might imagine, this is not ideal and often times keys are skipped because the execution takes too long.
How should such a problem be tackled in Node.js?
Edit: As suggested by Steven, I could just use setTimeout. How embarrassing
user.socket.on('playRecording', function () {
user.recording.timeOffsets.forEach(function (offset, index) {
setTimeout(function () {
var message = {key: user.recording.keys[index]};
user.socket.emit('playKey', JSON.stringify(message));
user.socket.broadcast.emit('keyDownReceived', JSON.stringify(message));
}, offset);
});
});

In your specific case I would recommend looping over the notearray you are playing and creating a new setTimeOut() with each time interval of the including note.
As an example:
var arrNotes = [{ note: "do", time: 1500 }, { note: "re", time: 2500 }],
length = arrNotes.length,
i = 0;
for (var i = 0; i < length; i++){
(function () {
var t = i;
setTimeout(function () {
playNote(arrNotes[t].note);
}, arrNotes[t].time);
})();
}
function playNote(note) {
console.log(note);
}

Related

Nodejs - Fire multiple API calls while limiting the rate and wait until they are all done

My issues
Launch 1000+ online API that limits the number of API calls to 10 calls/sec.
Wait for all the API calls to give back a result (or retry), it can take 5 sec before the API sends it data
Use the combined data in the rest of my app
What I have tried while looking at a lot of different questions and answers here on the site
Use promise to wait for one API request
const https = require("https");
function myRequest(param) {
const options = {
host: "api.xxx.io",
port: 443,
path: "/custom/path/"+param,
method: "GET"
}
return new Promise(function(resolve, reject) {
https.request(options, function(result) {
let str = "";
result.on('data', function(chunk) {str += chunk;});
result.on('end', function() {resolve(JSON.parse(str));});
result.on('error', function(err) {console.log("Error: ", err);});
}).end();
});
};
Use Promise.all to do all the requests and wait for them to finish
const params = [{item: "param0"}, ... , {item: "param1000+"}]; // imagine 1000+ items
const promises = [];
base.map(function(params){
promises.push(myRequest(params.item));
});
result = Promise.all(promises).then(function(data) {
// doing some funky stuff with dat
});
So far so good, sort of
It works when I limit the number of API requests to a maximum of 10 because then the rate limiter kicks in. When I console.log(promises), it gives back an array of 'request'.
I have tried to add setTimeout in different places, like:
...
base.map(function(params){
promises.push(setTimeout(function() {
myRequest(params.item);
}, 100));
});
...
But that does not seem to work. When I console.log(promises), it gives back an array of 'function'
My questions
Now I am stuck ... any ideas?
How do I build in retries when the API gives an error
Thank you for reading up to hear, you are already a hero in my book!
When you have a complicated control-flow using async/await helps a lot to clarify the logic of the flow.
Let's start with the following simple algorithm to limit everything to 10 requests per second:
make 10 requests
wait 1 second
repeat until no more requests
For this the following simple implementation will work:
async function rateLimitedRequests (params) {
let results = [];
while (params.length > 0) {
let batch = [];
for (i=0; i<10; i++) {
let thisParam = params.pop();
if (thisParam) { // use shift instead
batch.push(myRequest(thisParam.item)); // of pop if you want
} // to process in the
// original order.
}
results = results.concat(await Promise.all(batch));
await delayOneSecond();
}
return results;
}
Now we just need to implement the one second delay. We can simply promisify setTimeout for this:
function delayOneSecond() {
return new Promise(ok => setTimeout(ok, 1000));
}
This will definitely give you a rate limiter of just 10 requests each second. In fact it performs somewhat slower than that because each batch will execute in request time + one second. This is perfectly fine and already meet your original intent but we can improve this to squeeze a few more requests to get as close as possible to exactly 10 requests per second.
We can try the following algorithm:
remember the start time
make 10 requests
compare end time with start time
delay one second minus request time
repeat until no more requests
Again, we can use almost exactly the same logic as the simple code above but just tweak it to do time calculations:
const ONE_SECOND = 1000;
async function rateLimitedRequests (params) {
let results = [];
while (params.length > 0) {
let batch = [];
let startTime = Date.now();
for (i=0; i<10; i++) {
let thisParam = params.pop();
if (thisParam) {
batch.push(myRequest(thisParam.item));
}
}
results = results.concat(await Promise.all(batch));
let endTime = Date.now();
let requestTime = endTime - startTime;
let delayTime = ONE_SECOND - requestTime;
if (delayTime > 0) {
await delay(delayTime);
}
}
return results;
}
Now instead of hardcoding the one second delay function we can write one that accept a delay period:
function delay(milliseconds) {
return new Promise(ok => setTimeout(ok, milliseconds));
}
We have here a simple, easy to understand function that will rate limit as close as possible to 10 requests per second. It is rather bursty in that it makes 10 parallel requests at the beginning of each one second period but it works. We can of course keep implementing more complicated algorithms to smooth out the request pattern etc. but I leave that to your creativity and as homework for the reader.

How to stop re-buffering when rewind or fast forward with AVPlay API on Tizen?

I am building an application for Tizen OS that has an archive playback functionality and now I want to fastForward or Rewind the video but after clicking the button it always starts buffering. This is bad for user experience, so I want to somehow eliminate it. How can I do it?
Here is the code for rewind:
var successCallback = function () {
console.log("Media seek successful");
};
var errorCallback = function () {
console.log("Media seek failed");
};
var currentTime = webapis.avplay.getCurrentTime();
var newTime = currentTime - length;
webapis.avplay.seekTo(newTime, successCallback, errorCallback);
I also tried to add the code to set the buffer size but it doesn't seem to work:
webapis.avplay.setBufferingParam(
"PLAYER_BUFFER_FOR_PLAY",
"PLAYER_BUFFER_SIZE_IN_SECOND",
60);

Sorting data into appropriate key in an object

I'm trying to loop & sort a large number of data ( the whole ethereum blockchain lol )
I'm trying to create a record of all transactions for every address.
Obviously this is a very intensive process and I'm not sure how to make it more efficient beyond what I have (which isn't that efficient)
It starts out quick but I'm thinking now it has slowed because of the lookup for the address in the txs object.
Any help opinions / help is greatly appreciated.
https://giphy.com/gifs/3o6fJ7KWqxESY9okk8
var txs = {};
var i = 0;
// Loop over blocks
(function loop () {
setTimeout(function () {
// Get current block
var block = web3.eth.getBlock(i, true, (error, block) => {
// debugger;
// Loop over transactions in block
for(var j = 0; j < block.transactions.length; j++) {
// debugger;
if(txs[block.transactions[j].to]) {
txs[block.transactions[j].to].transactions.push(block.transactions[j]);
} else if (txs[block.transactions[j].to]) {
txs[block.transactions[j].from].transactions.push(block.transactions[j]);
} else {
txs[block.transactions[j].to] = {
transactions: [block.transactions[j]]
}
txs[block.transactions[j].from] = {
transactions: [block.transactions[j]]
}
}
}
});
i++
if (i < highestBlock) {
loop();
}
}, 50);
})();
I think that your code has an error in it located at "else-if", it seems that you should use the txs[block.transactions[j].from] property instead of the txs[block.transactions[j].to]. If you simply want to accomplish a recursive pattern you could use the setImmediate function or the process.nextTick method. If you use node.js v6+ you could use a Map instead of the object.

Node.js dialling back API requests

I have a list of 125,000 + Id numbers.
I am making a request to an api to get more information for each one.
But my problem is that the api will stop giving me a response if I request more then 6 per second.
I need a way to control the speed of the requests.
Just use a function called by setInterval to do the actual API querying ?
Simple example:
var ids = [ /* big array here */ ];
function queryAllIds(ids, callback) {
var processedIds = [];
var lastProcessedId = 0;
var processedIdCount = 0;
var intervalId;
function queryApi() {
var idToProcess = lastProcessedId++;
doActualQuery(ids[idToProcess], function(result) {
processedIds[idToProcess] = result;
processedIdCount++;
if (processedIdCount === ids.length) {
nextTick(callback, processedIds);
}
});
}
if (intervalId && lastProcessedId === ids.length)
clearInterval(intervalId);
}
intervalId = setInterval(queryApi, 1000/6);
}
queryAllIds(ids, function (processedIds) {
// Do stuff here
});
We ended up using rate limiter which provided the rate limiting we needed right out of the box. https://www.npmjs.com/package/limiter

Node socket.io, anything to prevent flooding?

How can I prevent someone from simply doing
while(true){client.emit('i am spammer', true)};
This sure proves to be a problem when someone has the urge to crash my node server!
Like tsrurzl said you need to implement a rate limiter (throttling sockets).
Following code example only works reliably if your socket returns a Buffer (instead of a string). The code example assumes that you will first call addRatingEntry(), and then call evalRating() immediately afterwards. Otherwise you risk a memory leak in the case where evalRating() doesn't get called at all or too late.
var rating, limit, interval;
rating = []; // rating: [*{'timestamp', 'size'}]
limit = 1048576; // limit: maximum number of bytes/characters.
interval = 1000; // interval: interval in milliseconds.
// Describes a rate limit of 1mb/s
function addRatingEntry (size) {
// Returns entry object.
return rating[(rating.push({
'timestamp': Date.now(),
'size': size
}) - 1);
}
function evalRating () {
// Removes outdated entries, computes combined size, and compares with limit variable.
// Returns true if you're connection is NOT flooding, returns false if you need to disconnect.
var i, newRating, totalSize;
// totalSize in bytes in case of underlying Buffer value, in number of characters for strings. Actual byte size in case of strings might be variable => not reliable.
newRating = [];
for (i = rating.length - 1; i >= 0; i -= 1) {
if ((Date.now() - rating[i].timestamp) < interval) {
newRating.push(rating[i]);
}
}
rating = newRating;
totalSize = 0;
for (i = newRating.length - 1; i >= 0; i -= 1) {
totalSize += newRating[i].timestamp;
}
return (totalSize > limit ? false : true);
}
// Assume connection variable already exists and has a readable stream interface
connection.on('data', function (chunk) {
addRatingEntry(chunk.length);
if (evalRating()) {
// Continue processing chunk.
} else {
// Disconnect due to flooding.
}
});
You can add extra checks, like checking whether or not the size parameter really is a number etc.
Addendum: Make sure the rating, limit and interval variables are enclosed (in a closure) per connection, and that they don't define a global rate (where each connection manipulates the same rating).
I implemented a little flood function, not perfect (see improvements below) but it will disconnect a user when he does to much request.
// Not more then 100 request in 10 seconds
let FLOOD_TIME = 10000;
let FLOOD_MAX = 100;
let flood = {
floods: {},
lastFloodClear: new Date(),
protect: (io, socket) => {
// Reset flood protection
if( Math.abs( new Date() - flood.lastFloodClear) > FLOOD_TIME ){
flood.floods = {};
flood.lastFloodClear = new Date();
}
flood.floods[socket.id] == undefined ? flood.floods[socket.id] = {} : flood.floods[socket.id];
flood.floods[socket.id].count == undefined ? flood.floods[socket.id].count = 0 : flood.floods[socket.id].count;
flood.floods[socket.id].count++;
//Disconnect the socket if he went over FLOOD_MAX in FLOOD_TIME
if( flood.floods[socket.id].count > FLOOD_MAX){
console.log('FLOODPROTECTION ', socket.id)
io.sockets.connected[socket.id].disconnect();
return false;
}
return true;
}
}
exports = module.exports = flood;
And then use it like this:
let flood = require('../modules/flood')
// ... init socket io...
socket.on('message', function () {
if(flood.protect(io, socket)){
//do stuff
}
});
Improvements would be, to add another value next to the count, how often he got disconneted and then create a banlist and dont let him connect anymore. Also when a user refreshes the page he gets a new socket.id so maybe use here a unique cookie value instead of the socket.id
Here is simple rate-limiter-flexible package example.
const app = require('http').createServer();
const io = require('socket.io')(app);
const { RateLimiterMemory } = require('rate-limiter-flexible');
app.listen(3000);
const rateLimiter = new RateLimiterMemory(
{
points: 5, // 5 points
duration: 1, // per second
});
io.on('connection', (socket) => {
socket.on('bcast', async (data) => {
try {
await rateLimiter.consume(socket.handshake.address); // consume 1 point per event from IP
socket.emit('news', { 'data': data });
socket.broadcast.emit('news', { 'data': data });
} catch(rejRes) {
// no available points to consume
// emit error or warning message
socket.emit('blocked', { 'retry-ms': rejRes.msBeforeNext });
}
});
});
Read more in official docs

Resources