I'm trying to use socket.io with phaser, and I'm having trouble with spawning players on connection. I'm pretty sure it's because the player's image is not loaded when the connection happens. I basically just need a way to check if the preload function is done. Something like this:
function preload() {
game.load.image('player', 'client/assets/player.png');
let interval = setInterval(() => {
if(LOADED) {
loaded = true;
clearInterval(interval);
}
}, 1);
}
Is something like this possible, or is there another approach? Thanks.
EDIT:
I'm able to make it work like this:
function preload() {
game.load.image('player', 'client/assets/player.png');
setTimeout(() => {
loaded = true;
}, 100);
};
And in the function spawning the player:
let testI = setInterval(() => {
console.log(loaded);
if(loaded) {
let test = game.add.sprite(0, 0, 'player');
game.physics.enable(test, Phaser.Physics.ARCADE);
test.body.gravity.y = 200;
clearInterval(testI);
}
}, 1);
But it's very messy and not a good solution
Related
I'm working on an interview project for which I have to add an endpoint that lets me POST an array of products (listings) and it should create them (MongoDB + Mongoose) or update them accordingly. The problem is I'm clearly not dealing with Promises properly and I'm getting a timeout on my test.
Here's the spec:
it.only('should create listings or update them if they already exist, incrementing the quantity with the difference ' +
'between the current sold_quantity and the initial_sold_quantity', (done) => {
var iPhone = { ... };
var samsung = { ... };
request(app).post('/listings/myEndpoint').send([iPhone, samsung]).expect(200, {
created: 1,
updated: 1
}, done);
});
exports.myEndpoint = (req, res) => {
var listings = req.body;
var created, updated = 0;
listings.forEach(reqListing => {
Listing.findOne({ listing_id: reqListing.listing_id })
.then(foundListing => {
if (!foundListing) {
var newListing = reqListing;
newListing.initial_sold_quantity = newListing.sold_quantity;
Listing.create(newListing);
created++;
} else {
var newQuantity = reqListing.sold_quantity - foundListing._doc.initial_sold_quantity;
if (foundListing._doc.quantity != newQuantity) {
foundListing._doc.quantity = newQuantity;
foundListing.save();
updated++;
}
}
});
return {
created: created,
updated: updated
};
});
};
THINGS I'VE TRIED:
Giving it more time. I tried changing the default timeout for Mocha tests but it doesn't really matter if it's 2 seconds or 20, it'll still timeout.
Isolating the update vs the creation. Really doesn't matter either if I'm only updating a product or if I'm only creating one, it'll still timeout.
Removing the logic. As far as I've checked it doesn't really matter what happens inside the if/else blocks because it'll still give me a timeout. So even if the code looks like this:
exports.myEndpoint = (req, res) => {
var listings = req.body;
var created, updated = 0;
listings.forEach(reqListing => {
Listing.findOne({ listing_id: reqListing.listing_id })
.then(foundListing => {
if (!foundListing) {
console.log("creating");
} else {
console.log("updating");
}
});
return {
created: created,
updated: updated
};
});
};
it'll still timeout.
After asking some questions in the Nodeiflux Discord server I managed to find a solution, maybe not the prettiest because it doesn't make use of async/await but I'm not supposed to change the style of the project too much so I'll leave it without async/await.
First, the silly problem which came after fixing the post's question:
var created = 0, updated = 0;
instead of not initializing created.
Second, making use of forEach with Promises inside didn't make too much sense because it would discard whatever was returning inside so I put the return outside the forEach clause and changed the forEach iteration for a map instead. I also made use of Promise.all() to get all promises to resolve before returning:
exports.upsert = (req, res) => {
var listings = req.body;
var created = 0, updated = 0;
var allowedArrayLength = 50;
return Promise.all(listings.map(reqListing =>
Listing.findOne({
listing_id: reqListing.listing_id
})
.then(foundListing => {
if (!foundListing) {
createListing(reqListing);
created++;
} else {
var prevQuantity = foundListing.quantity;
if (updateListing(reqListing, foundListing).quantity > prevQuantity) {
updated++;
}
}
})
)).then(() => ({ created: created, updated: updated }));
};
i'm trying to achieve something like this :
describe("TEST",function() {
Offer.find({},{_id:1, title:1}).exec(function(error, offers) {
for (var i = 0; i < offers.length; i++) {
it("Ask transaction : " + offers[i].title, function(done) {
// do something with offers[i];
}
}
...
But Mocha does not even detect a test in the file. Why ?
Every test case starts with the it("", function(){ /* write test code here */ } ) code block.
If you are looking at performing some test setup like inserting data then you can use the before function to do those.
Example:
describe("TEST",function() {
before(function() {
// runs before all tests in this block
});
it("should blah", function(done) {
// Your test case starts here.
}
}
There are examples in Mocha's official site you can refer to;
See:
https://mochajs.org/
So, thanks to your answer and some research i managed to do exactly what i wanted.
describe("TRANSACTIONS TESTS",function() {
var offers;
before(function(done) {
Offer.find({},{_id:1, title:1}).exec(function(error, result) {
offers = result;
done();
});
});
it("TEST ALL OFFERS", function(done) {
for (var i = 0; i < offers.length; i++) {
const tmp_i = i;
server
.post('/transactions')
.send(data)
.expect("Content-type",/json/)
.expect(200)
.end(function(err,res) {
// DO TEST STUFF HERE
if (tmp_i == offers.length - 1) {
done();
}
});
}
});
The const variable is necessary to avoid an error (i being always equals to the maximum size of the array instead of incrementing)
I'm learning node and would like to optimize the code I did. I tried using Async.parallel to perform the operations and when finished return a json.
I am new to js node and I'm trying to do with async.parallel but I returned [Function] in other code that I'm trying to understand it
getTabletIntRout: function(req, res) {
var reqMAC = req.param('id_tablet');
Tablet.findOne(reqMAC).populate('rout_tablet').exec(function(err, tablet) {
if (err) return next(err);
if (!tablet) return next();
var arrRoutes = tablet.rout_tablet;
if (arrRoutes.length > 0) {
var routesNotRemoved = [];
arrRoutes.forEach(function(route) {
if (route.removed == 'no') {
Rout.findOne(route.id)
.populate('rout_assigned') // Pin
.populate('in_rout') // Tablet
.populate('rout_description_assigned')
.exec(function(err, rout2) {
var arrRout = rout2.rout_assigned;
var routsNotRemoved = [];
if (arrRout.length > 0) {
arrRout.forEach(function(ruta) {
if (ruta.removed == 'no') {
routsNotRemoved.push(ruta);
}
});
}
var arrTablets = rout2.in_rout;
var tabletsNotRemoved = [];
if (arrTablets.length > 0) {
arrTablets.forEach(function(tab) {
if (tab.removed == 'no') {
tabletsNotRemoved.push(tab);
}
});
}
var arrDesc = rout2.rout_description_assigned;
var descripNotRemoved = [];
if (arrDesc.length > 0) {
arrDesc.forEach(function(desc) {
if (desc.removed == 'no') {
descripNotRemoved.push(desc);
}
});
}
rout2.rout_assigned = routsNotRemoved;
rout2.in_rout = tabletsNotRemoved;
rout2.rout_description_assigned = descripNotRemoved;
routesNotRemoved.push(rout2);
});
}
});
setTimeout(function() {
if (routesNotRemoved.length > 0) {
res.json({ info: routesNotRemoved });
} else {
return res.json({"error": "-1", "message": "Todas las rutas asociadas a esa tablet están eliminadas"});
}
}, 2000);
} else {
return res.json({"error": "-2", "message": "No existen rutas asociadas en esa tablet"});
}
}););});}},
I will try to provide some thoughts, hopefully some will make sense in your domain.
Extract a function to make people understand what you're doing in that big function
So instead of
Tablet.findOne(reqMAC).populate('rout_tablet').exec(function(err, tablet) { // ...
You would have
Tablet.findOne(reqMAC).populate('rout_tablet').exec(meaningfulFunctionName);
Don't repeat yourself
So your code becomes shorter and whenever the reader of your code finds a function name he / she already knows what is happening inside that
if (arrRout.length > 0) {
arrRout.forEach(function(ruta) {
if (ruta.removed == 'no') {
routsNotRemoved.push(ruta);
}
});
}
No need to check for empty arrRout as the argument function to arrRout.forEach will simply not run in the case of length being zero.
What you wrote is just a filter function, so why not using filter? Like so
arrRout.filter(function(ruta) {
return ruta.removed == 'no';
});
You can also reuse this, if you extract the anonymous function, for arrTablets and arrDesc.
On the argument: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/filter
Don't use a huge if else
Either check for the inverse or return a default or something that makes sense in your domain, but don't have that big chunk of logic, it makes it hard to reason about your code.
Extract more function so that it's easier to use async
You might want to have something like this
async.waterfall([
function(next) {
// here you can put
// Tablet.findOne(reqMAC).populate('rout_tablet').exec
// invoke next with err, tablet
},
function(tablet, next) {
async.each(arrRoutes, function(arrRoute, nextEach) {
// write your code logic here
});
}
], function() {
// decide what to invoke res.json with
});
Remember to extract functions after you're done putting the logic inside the async steps, I didn't do it so it is more clear where to put what.
I hope this makes sense, feel free to ask if you have any doubts.
Next time you post a question please make sure to properly indent it, don't just paste it here.
I'm trying to convert an existing API to work with RxJS... fairly new to node, and very new to RxJs, so please bear with me.
I have an existing API (getNextMessage), that either blocks (asynchronously), or returns a new item or error via a node-style (err, val) callback, when the something becomes available.
so it looks something like:
getNextMessage(nodeStyleCompletionCallback);
You could think of getNextMessage like an http request, that completes in the future, when the server responds, but you do need to call getNextMessage again, once a message is received, to keep getting new items from the server.
So, in order to make it into an observable collection, I have to get RxJs to keep calling my getNextMessage function until the subscriber is disposed();
Basically, I'm trying to create my own RxJs observable collection.
The problems are:
I don't know how to make subscriber.dispose() kill the async.forever
I probably shouldn't be using async.forever in the first place
I'm not sure I should be even getting 'completed' for each message - shouldn't that be at the end of a sequence
I'd like to eventually remove the need for using fromNodeCallback, to have a first class RxJS observable
Clearly I'm a little confused.
Would love a bit of help, thanks!
Here is my existing code:
var Rx = require('rx');
var port = require('../lib/port');
var async = require('async');
function observableReceive(portName)
{
var observerCallback;
var listenPort = new port(portName);
var disposed = false;
var asyncReceive = function(asyncCallback)
{
listenPort.getNextMessage(
function(error, json)
{
observerCallback(error, json);
if (!disposed)
setImmediate(asyncCallback);
}
);
}
return function(outerCallback)
{
observerCallback = outerCallback;
async.forever(asyncReceive);
}
}
var receive = Rx.Observable.fromNodeCallback(observableReceive('rxtest'));
var source = receive();
var subscription = source.forEach(
function (json)
{
console.log('receive completed: ' + JSON.stringify(json));
},
function (error) {
console.log("receive failed: " + error.toString());
},
function () {
console.log('Completed');
subscription.dispose();
}
);
So here's probably what I would do.
var Rx = require('Rx');
// This is just for kicks. You have your own getNextMessage to use. ;)
var getNextMessage = (function(){
var i = 1;
return function (callback) {
setTimeout(function () {
if (i > 10) {
callback("lawdy lawd it's ova' ten, ya'll.");
} else {
callback(undefined, i++);
}
}, 5);
};
}());
// This just makes an observable version of getNextMessage.
var nextMessageAsObservable = Rx.Observable.create(function (o) {
getNextMessage(function (err, val) {
if (err) {
o.onError(err);
} else {
o.onNext(val);
o.onCompleted();
}
});
});
// This repeats the call to getNextMessage as many times (11) as you want.
// "take" will cancel the subscription after receiving 11 items.
nextMessageAsObservable
.repeat()
.take(11)
.subscribe(
function (x) { console.log('next', x); },
function (err) { console.log('error', err); },
function () { console.log('done'); }
);
I realize this is over a year old, but I think a better solution for this would be to make use of recursive scheduling instead:
Rx.Observable.forever = function(next, scheduler) {
scheduler = scheduler || Rx.Scheduler.default,
//Internally wrap the the callback into an observable
next = Rx.Observable.fromNodeCallback(next);
return Rx.Observable.create(function(observer) {
var disposable = new Rx.SingleAssignmentDisposable(),
hasState = false;
disposable.setDisposable(scheduler.scheduleRecursiveWithState(null,
function(state, self) {
hasState && observer.onNext(state);
hasState = false;
next().subscribe(function(x){
hasState = true;
self(x);
}, observer.onError.bind(observer));
}));
return disposable;
});
};
The idea here is that you can schedule new items once the previous one has completed. You call next() which invokes the passed in method and when it returns a value, you schedule the next item for invocation.
You can then use it like so:
Rx.Observable.forever(getNextMessage)
.take(11)
.subscribe(function(message) {
console.log(message);
});
See a working example here
I've got a Node.js app that gets a list of file locally and uploads them to a server. This list could contain thousands of files.
for (var i = 0; i < files.length; i++) {
upload_file(files[i]);
}
If I execute this with thousands of files, upload_file will get called thousands of times all at once, and most likely die (or at least struggle). In the synchronous world, we'd create a thread pool and limit it to a certain number of threads. Is there a simple way to limit how many asynchronous calls get executed at once?
As usual, I recommend Caolan McMahon's async module.
Make your upload_file function take a callback as it's second parameter:
var async = require("async");
function upload_file(file, callback) {
// Do funky stuff with file
callback();
}
var queue = async.queue(upload_file, 10); // Run ten simultaneous uploads
queue.drain = function() {
console.log("All files are uploaded");
};
// Queue your files for upload
queue.push(files);
queue.concurrency = 20; // Increase to twenty simultaneous uploads
The answer above, re: async on NPM is the best answer, but if you'd like to learn more about control flow:
You should look into control flow patterns. There's a wonderful discussion on control flow patterns in Chapter 7 of Mixu's Node Book. Namely, I'd look at the example in 7.2.3: Limited parallel - an asynchronous, parallel, concurrency limited for loop.
I've adapted his example:
function doUpload() {
// perform file read & upload here...
}
var files = [...];
var limit = 10; // concurrent read / upload limit
var running = 0; // number of running async file operations
function uploader() {
while(running < limit && files.length > 0) {
var file = files.shift();
doUpload(file, function() {
running--;
if(files.length > 0)
uploader();
});
running++;
}
}
uploader();
You should try queueing. I assume that a callback is fired when upload_file() finishes. Something like this should do the trick (untested):
function upload_files(files, maxSimultaneousUploads, callback) {
var runningUploads = 0,
startedUploads = 0,
finishedUploads = 0;
function next() {
runningUploads--;
finishedUploads++;
if (finishedUploads == files.length) {
callback();
} else {
// Make sure that we are running at the maximum capacity.
queue();
}
}
function queue() {
// Run as many uploads as possible while not exceeding the given limit.
while (startedUploads < files.length && runningUploads < maxSimultaneousUploads) {
runningUploads++;
upload_file(files[startedUploads++], next);
}
}
// Start the upload!
queue();
}
The others answers seem to be outdated. This can be solved easily using paralleLimit from async. Below is how to use it. I haven't tested it.
var tasks = files.map(function(f) {
return function(callback) {
upload_file(f, callback)
}
});
parallelLimit(tasks, 10, function(){
});
No external libraries. Just plain JS.
It can be resolved using recursion.
The idea is that initially we immediately start the maximum allowed number of uploads and each of these requests should recursively initiate a new upload on its completion.
In this example I populate successful responses together with errors and I execute all requests but it's possible to slightly modify algorithm if you want to terminate batch upload on the first failure.
async function batchUpload(files, limit) {
limit = Math.min(files.length, limit);
return new Promise((resolve, reject) => {
const responsesOrErrors = new Array(files.length);
let startedCount = 0;
let finishedCount = 0;
let hasErrors = false;
function recursiveUpload() {
let index = startedCount++;
uploadFile(files[index])
.then(res => {
responsesOrErrors[index] = res;
})
.catch(error => {
responsesOrErrors[index] = error;
hasErrors = true;
})
.finally(() => {
finishedCount++;
if (finishedCount === files.length) {
hasErrors ? reject(responsesOrErrors) : resolve(responsesOrErrors);
} else if (startedCount < files.length) {
recursiveUpload();
}
});
}
for (let i = 0; i < limit; i++) {
recursiveUpload();
}
});
}
async function uploadFile(file) {
console.log(`${file} started`);
const delay = Math.floor(Math.random() * 1500);
return new Promise((resolve, reject) => {
setTimeout(() => {
if (delay <= 1000) {
console.log(`${file} finished successfully`);
resolve(`${file} success`);
} else {
console.log(`${file} finished with error`);
reject(`${file} error`);
}
}, delay);
});
}
const files = new Array(10).fill('file').map((file, index) => `${file}_${index + 1}`);
batchUpload(files, 3)
.then(responses => console.log('All successfull', responses))
.catch(responsesWithErrors => console.log('All with several failed', responsesWithErrors));