face detection with OpenCV and nodejs - node.js

i'm try to make a face detection with nodejs and opencv.
var cv = require('opencv');
// camera properties
var camWidth = 320;
var camHeight = 240;
var camFps = 10;
var camInterval = 1000 / camFps;
// face detection properties
var rectColor = [0, 255, 0];
var rectThickness = 1;
// initialize camera
var camera = new cv.VideoCapture(0);
camera.setWidth(camWidth);
camera.setHeight(camHeight);
module.exports = function (socket) {
setInterval(function() {
sTime = new Date();
camera.read(function(err, im) {
if (err) throw err;
im.detectObject('/usr/lib/node_modules/opencv/data/lbpcascades/lbpcascade_frontalface.xml', {}, function(err, faces) {
if (err) throw err;
for (var i = 0; i < faces.length; i++) {
face = faces[i];
im.rectangle([face.x, face.y], [face.width, face.height], rectColor, rectThickness);
}
socket.emit('frame', { buffer: im.toBuffer() });
});
});
}, camInterval);
};
im.detectObject take 80/120 seconds to execute and over time it creates a big delay between the actual image that the camera sees and what I see on the PC with the rectangle around my face. how can I improve that and delete the "lag"?

When you got the first match you have a set of ROIs. At this point you could stop using the detection algorithm and start to use a tracking algorithm (with motion estimation it will work better).
If you don't want/need the performance of a tracking algorithm, you could fallback on a template matching algorithm. Using the detected faces as templates and the current frame as the destination image.
I did the same in a C++ project. Here's the code I used to "track" the detected faces (stored into _camFaces that has the same role of your `faces' array).
The code below is executed after a detection triggered and _camFaces has been filled with a set of pairs.
Every pair consists of:
a rectangle, that contains the dimension and the position of the roi
in the previous frame.
the ROI, in gray scale. That ROi will be used as a template, for the template matching algorithm.
.
cv::Mat1b grayFrame = Preprocessor::gray(frame)
for (auto& pair : _camFaces) {
cv::Mat1f dst;
cv::matchTemplate(grayFrame, pair.second, dst, CV_TM_SQDIFF_NORMED);
double minval, maxval;
cv::Point minloc, maxloc;
cv::minMaxLoc(dst, &minval, &maxval, &minloc, &maxloc);
if (minval <= 0.2) {
pair.first.x = minloc.x;
pair.first.y = minloc.y;
noneTracked = false;
} else {
pair.first.x = pair.first.y = pair.first.width = pair.first.height = 0;
}
}
// draw rectangles
cv::Mat frame2;
frame.copyTo(frame2);
for (const auto& pair : _camFaces) {
cv::rectangle(frame2, pair.first, cv::Scalar(255, 255, 0), 2);
}
_updateCamView(frame2);

Try This
im.detectObject(cv.FACE_CASCADE, {}, function(err, faces) {
if (err) throw err;
;
for (var i = 0; i < faces.length; i++) {
var f1 = faces.length;
face = faces[i];
im.rectangle([face.x, face.y], [face.width, face.height], rectColor, rectThickness);
}
im.save('image.jpg');
console.log('image saved');
console.log(f1);
fs.readFile('image.jpg', function (err, buffer) {
socket.emit('image', {buffer: buffer , faces : f1 });
});
im.toBuffer is the reason for that lag. I just saved the file and then read it again and buffered it back. I've also added the number of faces detected to the emit.

Related

Emitting an array using socket.io

For a school project I'am making a multiplayer snake game with socket.io. I tried to sync the body of the snake you play (snake1, which is an array with vectors as location. The snakes them self are objects) and send that with socket.io to the other player. To send the body I use socket.emit('snakeBody', snake1.body). But when I load the page i get the error "Uncaught RangeError: Maximum call stack size exceeded". I first thought it was the array but when i try to sync a normal variable with a vector i still got the error (when i sync normal variables or arrays, without a vector in it, it does work). My question is if it is possible to sync an array with a vectors as values using socket.io.
The index.js file (the file where all the socket things happen):
var express = require('express');
var app = express();
var server = app.listen(3000);
app.use(express.static('public'));
console.log("The server is live");
var socket = require('socket.io');
var io = socket(server);
io.sockets.on('connection', newConnection);
function newConnection(socket) {
socket.on('snakeBody', body);
function body(data) {
socket.broadcast.emit('testBody', data);
}
}
The game.js file (the base of the game. Where the socket sends and receives)
//Defines both snakes
var snake1;
var snake2;
var socket;
function setup() {
//The canvas for p5js to show something
createCanvas(400, 400);
//The starting location for the snakes (the snakes are objects in a class)
snake1 = new Snake(200, 200);
snake2 = new Snake(150, 250);
//Socket server ip
socket = io.connect('https://myIP.com');
socket.on('snakeBody', newSnake);
}
function draw() {
background(0);
snake1.loop(255, 0, 0, 1, 340);
//Sends all the players data to the server to be send to the other player
socket.emit('snakeBody', snake1.body);
}
function newSnake(newSnake) {
//The function that will do thing when it receives something from the socket
}
The snake class:
It is possible that it will call function that do not exist in this part but thats because I removed them because they were not directly important for this question.
class Snake {
//----------------------------------------------//
// Snake Setup: //
//----------------------------------------------//
//Contains all building info
constructor(x, y) {
//Snake elements:
this.body = [];
this.body[0] = createVector(x, y);
this.head = '';
this.part = '';
//Game elements:
//Dimension
this.dim = 10;
//Direction
this.x = 0;
this.y = 0;
//Speed
this.s = 2;
//Score
this.scoreLeng = 0;
}
//Contains all functions that needs to be looped
loop(r, g, b, p, t) {
//Move and update
this.move(p);
this.update();
//If snake is dead
if (this.gameOver()) {
//Respawn
this.respawn(p);
}
//If snake eat
if (this.eat(food)) {
//Grow
this.grow();
//Update food location
food.update();
//Play eat sound
// eatSound.play();
}
//Show snake
this.show(r, g, b, t);
}
//----------------------------------------------//
// All snake functions: //
//----------------------------------------------//
show(r, g, b, t) {
//Loop thru every body part of array
for (var i = 0; i < this.body.length; i++) {
//Rectangle with rgb color:
fill(r, g, b);
noStroke();
rect(this.body[i].x, this.body[i].y, this.dim, this.dim);
}
//Score text:
textSize(17);
text("score:" + this.scoreLeng, t, 395);
}
dir(x, y) {
//Directions:
this.x = x;
this.y = y;
}
update() {
//Copy of the last element of the array:
this.head = this.body[this.body.length - 1].copy();
//Shift the array
this.body.shift();
//Add direction to snake location
this.head.x += this.x;
this.head.y += this.y;
//Push head to end of array
this.body.push(this.head);
}
gameOver() {
//If snake is outside play area
if (this.head.x == 400 || this.head.y == 400 || this.head.x < 0 || this.head.y < 0) {
return true;
}
//Loop thru body parts in array
for (var i = 0; i < this.body.length - 1; i++) {
//Alle body parts in part variable
this.part = this.body[i];
//If head of snake hits part
if (this.part.x == this.head.x && this.part.y == this.head.y) {
return true;
}
}
//Loop thru body array
for (var j = 0; j < this.body.length - 1; j++) {
//If snake 1 or snake 2 head hits parts of other snake
if (snake1.head.x == this.body[j].x && snake1.head.y == this.body[j].y) {
console.log("snake 1 is dead");
}
if (snake2.head.x == this.body[j].x && snake2.head.y == this.body[j].y) {
console.log("snake 2 is dead");
}
}
return false;
}
}
When you get a `Maximum call stack size exceeded" error, it means that somewhere in your code, you are calling a function which in turn calls another function and so forth, until you hit the call stack limit. I'm not sure how that would apply in your case here, you're not sharing much.
My quick suggestion for you would be to send your data as string:
socket.emit('snakeBody', JSON.stringify(snake1.body))
Then on the other end, parse it:
const snakeBody = JSON.parse(snakeBody)

streaming audio from mic across websocket. I can see the data being sent but cannot hear it on the receiving client side

I'm trying to broadcast captured mic audio across a websocket. I can see the buffer array is being sent, and the array has actual valid data but the receiving client side cannot hear it. I'm pretty sure my playback function is correct, because I can generate white noise by filling an array with random numbers and using the playback function to hear it. I'm thinking maybe the audio it's broadcasting is too quiet to hear, because the numbers generated in the array are seem to mostly be in the .000### range. Any ideas? Capturing mic audio and broadcasting it seems to be over complicated... :/
//broadcasting side
navigator.mediaDevices.getUserMedia({audio: true,video: false}) // request cam
.then(stream => {
vid.srcObject = stream;
context = new AudioContext();
var source = context.createMediaStreamSource(stream);
var processor = context.createScriptProcessor(1024, 2, 2);
source.connect(processor);
processor.connect(context.destination);
processor.onaudioprocess = function(e) {
audiodata = e.inputBuffer.getChannelData(1);
socket.send(JSON.stringify({sound: audiodata, to: to, from: '$username', text:''}));
};
return vid.play(); // returns a Promise
});
//receiving side object to array
if(typeof (message.sound) != "undefined"){
//$('#video_stream_btn').trigger('click');
var json_sound = message.sound;
var array_sound = [];
for(var i in json_sound){
array_sound.push([i, json_sound [i]]);
}
if(typeof(context) == 'undefined'){
context = new AudioContext();
}
play_sound(array_sound, context);
return;
}
// receiving side play sound function
function play_sound(raw,context){
//alert(raw.length);
var audioBuffer = context.createBuffer(1, raw.length, context.sampleRate);
audioBuffer.getChannelData(0).set(raw);
var source = context.createBufferSource();
source.buffer = audioBuffer;
source.connect(context.destination);
source.start(0);
}
For anyone out there trying to figure this out. I ended up encoding it to an int16array, then sent it across the socket, where the client encoded it back into a float32 array and passed it to the play_sound function. I basically just stole a bunch of stuff off stackoverflow and faked it until I made it, cause I'm not that smart :)
capturing mic and converting to int16array, then sending it across the socket
navigator.mediaDevices.getUserMedia({audio: {sampleSize: 16, channelCount: 2},video: true}) // request cam
.then(stream => {
vid.srcObject = stream; // don't use createObjectURL(MediaStream)
context = new AudioContext();
var source = context.createMediaStreamSource(stream);
var processor = context.createScriptProcessor(1024, 2, 2);
source.connect(processor);
processor.connect(context.destination);
processor.onaudioprocess = function(e) {
// Do something with the data, i.e Convert this to WAV
audiodata = new Int16Array(convertFloat32ToInt16(e.inputBuffer.getChannelData(0)));
console.log(audiodata);
socket.send(JSON.stringify({sound: audiodata, to: to, from: '$username', text:''}));
};
return vid.play(); // returns a Promise
});
relevant function for converting captured mic to int16array:
function convertFloat32ToInt16(buffer){
l = buffer.length;
buf = new Int16Array(l);
while (l--)
{
buf[l] = Math.min(1, buffer[l])*0x7FFF;
}
return buf.buffer;
}
receiving client side json object to int16array, then int16array back to float32array:
if(typeof (message.sound) != "undefined"){
//$('#video_stream_btn').trigger('click');
//var json_sound = message.sound;
if(typeof(context) == 'undefined'){
context = new AudioContext();
}
sound_array = [];
for (i in message.sound)
{
sound_array[i] = (message.sound [i]);
}
//sound_array16 = new Int16Array(sound_array);
sound_array32 = int16ToFloat32(sound_array);
play_sound(sound_array32, context);
return;
}
relevant receiving side int16array to float32array function:
function int16ToFloat32(inputArray) {
let int16arr = new Int16Array(inputArray)
var output = new Float32Array(int16arr.length);
for (var i = 0; i < int16arr.length; i++) {
var int = int16arr[i];
var float = (int >= 0x8000) ? -(0x10000 - int) / 0x8000 : int / 0x7FFF;
output[i] = float;
}
return output;
}

fabricjs on retina: new object jumps to left top

I continue my work on collaborative sketch tool and trying to add retina devices support. Currently i have following behavior if user creating drawing on ipad air:
small movie
Here is my code:
this.getZoomLevel = function (height) {
if (height > 1024) {
return 1024 / height;
} else {
return height / 1024;
}
};
this.calculateCanvasSize = function(pHeight, pWidth) {
var result = {
height: 0,
width: 0
};
while (result.width < pWidth - 1 && result.height < pHeight - 1) {
result.height = result.height + 1;
result.width = result.height * 4 / 3;
}
return result;
};
this.initCanvas = function () {
try {
var parent = document.getElementsByClassName('komaso-canvas-container')[0];
var canvasSize = this.calculateCanvasSize(parent.clientHeight, parent.clientWidth);
var canvasHtml = "<div id='wrapper-" + this.Id + "' class='whiteboard-canvas-wrapper' data-ng-show='CurrentPage.Id==" + this.Id + "'><canvas width='" + canvasSize.width + "' height='" + canvasSize.height + "' id='whiteboard-" + this.Id + "' class='whiteboard'><p>Your brower does not support Canvas/p></canvas></div>";
$(parent).append($compile(canvasHtml)(scope));
this.Canvaso = document.getElementById(this.HtmlId);
if (!this.Canvaso) {
console.log('Error: Cannot find the imageView canvas element!');
return;
}
if (!this.Canvaso.getContext) {
console.log('Error: no canvas.getContext!');
return;
}
this.FabricCanvas = new fabric.Canvas(this.HtmlId, { selectionColor: 'transparent' });
this.FabricCanvas.setWidth(canvasSize.width);
this.FabricCanvas.setHeight(canvasSize.height);
fabric.Object.prototype.transparentCorners = false;
this.FabricCanvas.on('mouse:down', this.onMouseDown);
this.FabricCanvas.on('mouse:up', this.onMouseUp);
this.FabricCanvas.on('mouse:move', this.onMouseMove);
this.FabricCanvas.on('object:added', this.onObjectAdded);
this.FabricCanvas.on('text:editing:exited', self.onTextObjectEdited);
if (window.devicePixelRatio !== 1) {
var c = this.FabricCanvas.getElement();
var w = c.width, h = c.height;
c.setAttribute('width', w * window.devicePixelRatio);
c.setAttribute('height', h * window.devicePixelRatio);
$(c).width(canvasSize.width);
$(c).height(canvasSize.height);
c.getContext('2d').scale(window.devicePixelRatio, window.devicePixelRatio);
}
this.FabricCanvas.setZoom(this.getZoomLevel(this.Canvaso.height));
this.ToggleTool(self.CurrentTool.ToolName);
this.WhiteboardInitiated = true;
} catch (e) {
console.log(e);
}
};
getZoomLevel returns value to pass into SetZoom method of fabric js canvas object. We decided to have all clients canvas aspects are 4:3 and default dimension is 1024*768. So based on this dimensions we calculation zoom factor.
calculateCanvasSize - returns width and height for canvas according to 4:3 rule.
If you have any idea about how to fix this wrong behavior then post your comment please. Thank you in advance!
I would suggest you yo update to a retina enabled version of fabricjs (grab 1.6.2).
If, for any reason you can't, i think the problem is here:
if (window.devicePixelRatio !== 1) {
var c = this.FabricCanvas.getElement();
...
c.getContext('2d').scale(window.devicePixelRatio, window.devicePixelRatio);
}
getContext return a new context. This is not the context where fabric is gonna render later. If you want to have retina enabled lowerCanvas you have to scale this.FabricCanvas.contextContainer that gets created and referenced on fabric.Canvas initialization.
I suggest you to switch to newer fabric anyway.

Move the sprite but don't collide with other sprites in the scene in Phaser

I'm referring to the official example on Phaser.io site, but have copied it here for reference below. What I want, and repeatedly fail to achieve is that the moving (with keyboard keys) starfield sprite would not collide with other vegies sprites.
I did go through the docs and looked here on SO and their forum, and it seems that the solutions should be easy enough; to just put the following code in the update() function:
game.world.bringToTop(sprite);
But, for some reason this is not working for me, so please tell me what I'm doing wrong.
var game = new Phaser.Game(800, 600, Phaser.CANVAS, 'phaser-example', { preload: preload, create: create, update: update });
function preload() {
game.load.image('sky', 'assets/skies/sky4.png');
game.load.image('starfield', 'assets/misc/starfield.jpg');
game.load.spritesheet('veggies', 'assets/sprites/fruitnveg64wh37.png', 64, 64);
}
var sprite;
var cursors;
var veggies;
function create() {
game.add.image(0, 0, 'sky');
// Enable p2 physics
game.physics.startSystem(Phaser.Physics.P2JS);
// Make things a bit more bouncey
game.physics.p2.defaultRestitution = 0.8;
// Add a sprite
sprite = game.add.tileSprite(300, 450, 200, 50, 'starfield');
// Enable if for physics. This creates a default rectangular body.
game.physics.p2.enable(sprite);
veggies = game.add.group();
veggies.enableBody = true;
veggies.physicsBodyType = Phaser.Physics.P2JS;
var vegFrames = [ 1, 3, 4, 8 ];
for (var i = 0; i < 10; i++)
{
var veg = veggies.create(game.world.randomX, game.world.randomY, 'veggies', game.rnd.pick(vegFrames));
veg.body.setCircle(26);
}
text = game.add.text(20, 20, 'move with arrow keys', { fill: '#ffffff' });
cursors = game.input.keyboard.createCursorKeys();
}
function update() {
sprite.body.setZeroVelocity();
game.world.bringToTop(veggies);
if (cursors.left.isDown)
{
sprite.body.moveLeft(400);
sprite.tilePosition.x -= 8;
}
else if (cursors.right.isDown)
{
sprite.body.moveRight(400);
sprite.tilePosition.x += 8;
}
if (cursors.up.isDown)
{
sprite.body.moveUp(400);
sprite.tilePosition.y -= 8;
}
else if (cursors.down.isDown)
{
sprite.body.moveDown(400);
sprite.tilePosition.y += 8;
}
}
edit: Solution which worked in the end thanks to SirSandman's answer:
var game = new Phaser.Game(800, 600, Phaser.AUTO, 'phaser-example', { preload: preload, create: create, update: update, render: render });
function preload() {
game.load.image('stars', 'assets/misc/starfield.jpg');
game.load.spritesheet('ship', 'assets/sprites/humstar.png', 32, 32);
game.load.image('panda', 'assets/sprites/spinObj_01.png');
game.load.image('sweet', 'assets/sprites/spinObj_06.png');
}
var ship;
var starfield;
var cursors;
function create() {
// Enable P2
game.physics.startSystem(Phaser.Physics.P2JS);
// Turn on impact events for the world, without this we get no collision callbacks
game.physics.p2.setImpactEvents(true);
game.physics.p2.restitution = 0.8;
// Create our collision groups. One for the player, one for the pandas
var playerCollisionGroup = game.physics.p2.createCollisionGroup();
var pandaCollisionGroup = game.physics.p2.createCollisionGroup();
// This part is vital if you want the objects with their own collision groups to still collide with the world bounds
// (which we do) - what this does is adjust the bounds to use its own collision group.
game.physics.p2.updateBoundsCollisionGroup();
starfield = game.add.tileSprite(0, 0, 800, 600, 'stars');
starfield.fixedToCamera = true;
var pandas = game.add.group();
pandas.enableBody = true;
pandas.physicsBodyType = Phaser.Physics.P2JS;
for (var i = 0; i < 4; i++)
{
var panda = pandas.create(game.world.randomX, game.world.randomY, 'panda');
panda.body.setRectangle(40, 40);
// Tell the panda to use the pandaCollisionGroup
panda.body.setCollisionGroup(pandaCollisionGroup);
// Pandas will collide against themselves and the player
// If you don't set this they'll not collide with anything.
// The first parameter is either an array or a single collision group.
panda.body.collides(pandaCollisionGroup);
panda.body.velocity.x = 500;
panda.body.velocity.y = 500;
}
// Create our ship sprite
ship = game.add.sprite(200, 200, 'ship');
ship.scale.set(2);
ship.smoothed = false;
ship.animations.add('fly', [0,1,2,3,4,5], 10, true);
ship.play('fly');
game.physics.p2.enable(ship, false);
ship.body.setCircle(28);
ship.body.fixedRotation = true;
// Set the ships collision group
ship.body.setCollisionGroup(playerCollisionGroup);
// The ship will collide with the pandas, and when it strikes one the hitPanda callback will fire, causing it to alpha out a bit
// When pandas collide with each other, nothing happens to them.
game.camera.follow(ship);
cursors = game.input.keyboard.createCursorKeys();
}
function hitPanda(body1, body2) {
// body1 is the space ship (as it's the body that owns the callback)
// body2 is the body it impacted with, in this case our panda
// As body2 is a Phaser.Physics.P2.Body object, you access its own (the sprite) via the sprite property:
body2.sprite.alpha -= 0.1;
}
function update() {
ship.body.setZeroVelocity();
if (cursors.left.isDown)
{
ship.body.moveLeft(200);
}
else if (cursors.right.isDown)
{
ship.body.moveRight(200);
}
if (cursors.up.isDown)
{
ship.body.moveUp(200);
}
else if (cursors.down.isDown)
{
ship.body.moveDown(200);
}
if (!game.camera.atLimit.x)
{
starfield.tilePosition.x += (ship.body.velocity.x * 16) * game.time.physicsElapsed;
}
if (!game.camera.atLimit.y)
{
starfield.tilePosition.y += (ship.body.velocity.y * 16) * game.time.physicsElapsed;
}
}
function render() {
game.debug.text('Collide with the Pandas!', 32, 32);
}
I P2 you have to set the Collisiongroups in contrast to arcarde.
I think you have to set a collisiongroup for the sprite like that:
var veggCollisionGroup = game.physics.p2.createCollisionGroup();
and then define with which other groups this group shell collide like that in the Loop:
veggies.body.setCollisionGroup(veggCollisionGroup);
veggies.body.collides(veggCollisionGroup);
And then the your tilesprite should collide with your other tilesprites.
Source:
http://phaser.io/examples/v2/p2-physics/collision-groups
if i should be wrong you will find your answer in the examples. :)

leap motion JS, pitch undefined

I have some hello world happening with leap motion using nodeJS
I want to know the pitch value for a hand, but is returning "undefined"
var Leap = require('leapjs');
var webSocket = require('ws'),
ws = new webSocket('ws://127.0.0.1:6437');
ws.on('message', function(data, flags) {
frame = JSON.parse(data);
var controller = Leap.loop(function(frame){
if(frame.hands.length == 1)
{
var hand = frame.hands[0];
var position = hand.palmPosition;
var velocity = hand.palmVelocity;
var direction = hand.direction;
var finger = hand.fingers[0];
var pitch = direction.pitch;
var type = hand.type;
if(type == "left"){
console.log("Left hand.");
console.log("pitch ::"+pitch);
}
if(type == "right") {
console.log("Right hand.")
}
}
});
if (frame.hands && frame.hands.length > 1) {
console.log("two");
}
if (frame.hands && frame.hands.length == 0) {
console.log("apaga too");
}
});
So, when i log my left hand i get
Left hand.
pitch ::undefined
I believe pitch is a method belonging to hand not direction, not an attribute belonging to direction docs. Have you tried hand.pitch() ?

Resources