Wrong metadata when using cls-hooked - node.js

I have an app that connects to the MQTT, I want to publish 1200 devices with the id of each device as a metadata. the following is the code
"use-strict"
const RSVP = require('rsvp');
const Mqtt = require('mqtt');
const cls = require('cls-hooked');
const namespace = "firstName";
let clsNamespace;
let client = Mqtt.connect("alis://test.mosquitto.org");
if (!client) {
logger.Error("Test", 'Init', 'No mqtt client provided');
throw new extError('No mqtt client created');
}
client.on('connect', async () => {
console.log("Connected");
try {
clsNamespace = cls.createNamespace(namespace);
main();
} catch (error) {
console.log(error);
}
});
function main() {
var devices = [];
for (var i = 0; i < 1200; i++) {
devices.push({ "id": i });
}
RSVP.all(devices.map(async (item) => await updateDevice(item)));
}
async function updateDevice(device) {
try {
return await wrapContext(clsNamespace, async () => {
setContext({ device: device.id });
console.log("update " + device.id + " metadata =" + JSON.stringify(__getMetadata()));
return publish("message", device.id);
});
} catch (error) {
console.log(error);
}
}
function setContext(context) {
try {
let ctxKeys = clsNamespace.get('contextKeys') ? clsNamespace.get('contextKeys') : [];
for (const key in context) {
clsNamespace.set(key, context[key]);
if (ctxKeys.indexOf(key) === -1) {
ctxKeys.push(key);
}
}
clsNamespace.set('contextKeys', ctxKeys);
} catch (error) {
console.error(error);
console.log('cannot set context', context);
throw error;
}
}
function publish(message, deviceId) {
return new RSVP.Promise((resolve, reject) => {
try {
client.publish(message,
deviceId,
(error) => {
if (error) {
console.log("error")
reject(error);
} else {
console.log("publish " + deviceId + " metadata" + JSON.stringify(__getMetadata()));
resolve();
}
});
} catch (error) {
console.log(error);
}
});
}
async function wrapContext(cls, callback) {
let defer = RSVP.defer();
let context = await cls.run(async (contextObj) => {
try {
let result = await callback(contextObj);
defer.resolve(result);
} catch (error) {
defer.reject(error);
}
});
return defer.promise;
};
function __getMetadata() {
const metadata = {};
let contextData = {};
for (const key of clsNamespace.get('contextKeys') || []) {
contextData[key] = clsNamespace.get(key);
}
for (const key in contextData) {
metadata[key] = contextData[key];
}
return metadata;
}
the output is the following:
update 0 metadata ={"device":0}
publish 0 metadata{"device":0}
update 1 metadata ={"device":1}
publish 1 metadata{"device":1}
... (same thing for 1165 devices)
update 1166 metadata ={"device":1166}
update 1167 metadata ={"device":1167}
update 1168 metadata ={"device":1168}
update 1169 metadata ={"device":1169}
... (same thing until 1199)
update 1199 metadata ={"device":1199}
publish 1166 metadata{"device":1199}
publish 1167 metadata{"device":1199}
publish 1168 metadata{"device":1199}
... (same thing until 1199)
As you can see, the metadata is correct for the 1165 first publish log but once there's an interruption in the iteration and the function become asychnronous, the metadata of the first publish will be missmatched.
Is there a way to fix this?

Related

How to get redis value for a given redis key using Nodejs + Redis

I am using Nodejs + redis to Set and Get key:value pairs. I have written a sample code to set a key:value pair and then fetch it using the default readme from npm redis plugin.
My goal here is to get value from the redis server using any given key. I have followed the steps as given by npm redis plugin. I am able to log it, but since the plugin is async cant figure out a way to get a synchronous client.get request.
My code is
var redis = require("redis"),
client = redis.createClient();
const bluebird = require("bluebird");
bluebird.promisifyAll(redis.RedisClient.prototype);
bluebird.promisifyAll(redis.Multi.prototype);
const redisKey = "redis-set-key";
const redisValue = "hello-world";
client.set(redisKey, redisValue);
function getData(key) {
return client.get(key, function(err, result) {
console.log("1. Get key from redis - ", result.toString());
return result.toString();
});
}
const getRedisdata = getData(redisKey);
console.log("2. getRedisdata", getRedisdata);
Result
2. getRedisdata false
1. Get key from redis - hello-world
My goal is to get the result like this
1. Get key from redis - hello-world
2. getRedisdata hello-world
Please help me resolve this.
Found a solution, here is my resolved code
const redis = require("redis");
const client = redis.createClient();
const bluebird = require("bluebird");
bluebird.promisifyAll(redis.RedisClient.prototype);
bluebird.promisifyAll(redis.Multi.prototype);
const redisKey = "redis-set-key";
const redisValue = "hello-world";
client.set(redisKey, redisValue);
async function setKey(key, value, expire = "EX", time = 300) {
return new Promise((resolve, reject) => {
return client.set(key, value, function(err, result) {
if (result === null) {
reject("set key fail promise");
} else {
resolve(result);
}
});
});
}
async function getKey(key) {
return new Promise((resolve, reject) => {
return client.getAsync(key).then(function(res) {
if (res == null) {
reject("fail promise");
} else {
resolve(res);
}
});
});
}
async function hashGetKey(hashKey, hashvalue) {
return new Promise((resolve, reject) => {
return client.hget(hashKey, hashvalue, function(err, res) {
if (res == null) {
reject("hash key fail promise");
} else {
resolve(res.toString());
}
});
});
}
async function hashGetAllKey(hashKey) {
return new Promise((resolve, reject) => {
return client.hgetall(hashKey, function(err, res) {
if (res == null) {
reject("hash key all fail promise");
} else {
resolve(res);
}
});
});
}
async function delKey(key) {
return new Promise((resolve, reject) => {
return client.del(key, function(err, result) {
if (result === null) {
reject("delete fail promise");
} else {
resolve(result);
}
});
});
}
(async () => {
// get single key value
try {
const keyData = await getKey("string key");
console.log("Single key data:-", keyData);
} catch (error) {
console.log("Single key data error:-", error);
}
// get single hash key value
try {
const hashKeyData = await hashGetKey("hashkey", "hashtest 1");
console.log("Single hash key data:-", hashKeyData);
} catch (error) {
console.log("Single hash key data error:-", error);
}
// get all hash key values
try {
const allHashKeyData = await hashGetAllKey("hashkey");
console.log("All hash key data:-", allHashKeyData);
} catch (error) {
console.log("All hash key data error:-", error);
}
// delte single key
try {
const checkDel = await delKey("XXYYZZ!!!!");
console.log("Check key delete:-", checkDel);
} catch (error) {
console.log("Check key delete error:-", error);
}
// set single key
try {
const checkSet = await setKey("XXYYZZ", "AABBCC");
console.log("Check data setkey", checkSet);
} catch (error) {
console.log("Check data setkey error", error);
}
})();
// hget hashkey "hashtest 1"
client.hset("hashkey", "hashtest 1", "some value", redis.print);
client.hset(["hashkey", "hashtest 2", "some other value"], redis.print);
Haven't you read the Redis module's readme, it provides another way to use async/await way to make the async redis get process as sync.
const { promisify } = require("util");
const getAsync = promisify(client.get).bind(client);
getAsync.then(console.log).catch(console.error);

WebRTC P2P with browser and Android (Flutter) not working although everything seems fine

I tried to set up a WebRTC P2P video communication with flutter, a node backend as signaling server and kurento media server.
When I run the app in combination with the Kurento Demo everything works fine but as soon as I try to implement my own backend the video stream doesn't start although the log messages indicate that everything is ok.
Please let me know if more input is required to find a solution.
Relevant code snippets
Web Frontend:
call(username) {
const wrapper = this;
const remoteVideo = this.videoOutputFactory();
if (!remoteVideo) {
console.error('videoOutput not found');
}
const options = {
'remoteVideo': document.getElementById('testVideo'),
onicecandidate(candidate) {
console.debug('onicecandidate', candidate);
wrapper.rpc.onIceCandidate(candidate);
},
mediaConstraints: wrapper.remoteMediaConstraints
};
console.debug('WebRtcWrapper.call: options', options);
return new Promise((resolve, reject) => {
console.log('Creating WebRtcPeer');
this.webRtcPeer = kurentoUtils.WebRtcPeer.WebRtcPeerSendrecv(options, (error) => {
if (error) {
console.error('Error while creating WebRtcPeer', error);
reject(error);
return;
}
console.log('Generating WebRtcPeer offer');
this.webRtcPeer.generateOffer((offerError, offerSdp) => {
if (offerError) {
console.error('Error while generating WebRtcPeer offer', error);
reject(error);
return;
}
this.rpc.call(username, offerSdp).then((res) => {
console.log("Got call answer - Generated-SDPOffer: " + offerSdp);
if (res.response === 'rejected') {
console.log('Call rejected by peer');
reject(res.rejectionMessage);
return;
}
console.log('Processing peer SDP answer', res.sdpAnswer);
this.webRtcPeer.processAnswer(res.sdpAnswer);
});
});
});
});
}
App
TestController._()
: _channel = IOWebSocketChannel.connect('wss://server.marcostephan.at:443') {
_peer = jsonrpc.Peer(_channel.cast<String>());
_peer.registerMethod(
'rtc.incomingCall', (jsonrpc.Parameters message) async => await _onIncomingCall(message));
_peer.registerMethod(
'rtc.offerIceCandidate', (jsonrpc.Parameters message) => _onOfferIceCandidate(message));
_peer.registerMethod(
'rtc.startCommunication', (jsonrpc.Parameters message) => _onStartCommunication(message));
_peer.registerMethod('conn.heartbeat', (jsonrpc.Parameters message) => "");
_peer.registerFallback((jsonrpc.Parameters params) =>
print('Unknown request [${params.method}]: ${params.value}'));
_peer.listen();
_peer.sendRequest("auth.login", {'username': 'john.doe', 'role': 'actor'});
_peer.sendNotification("disp.helpMe", {'category': 'spareParts'});
}
_onIncomingCall(jsonrpc.Parameters message) async {
try{
print('Incoming call from ${message['username'].value}');
if (this.onStateChange != null) {
this.onStateChange(SignalingState.CallStateNew);
}
await _createPeerConnection();
RTCSessionDescription s = await _peerConnection
.createOffer(_constraints);
_peerConnection.setLocalDescription(s);
return {
'from': message['username'].value,
'callResponse': 'accept',
'sdpOffer': s.sdp
};
}
catch(e){
print('TestController._onIncomingCall: ERROR: $e');
}
}
_onOfferIceCandidate(jsonrpc.Parameters message) {
try{
var candidateMap = message['candidate'].value;
print('Received IceCandidate $candidateMap');
if (_peerConnection != null) {
RTCIceCandidate candidate = new RTCIceCandidate(candidateMap['candidate'],
candidateMap['sdpMid'], candidateMap['sdpMLineIndex']);
_peerConnection.addCandidate(candidate);
}
}
catch(e){
print('TestController._onOfferIceCandidate: ERROR: $e');
}
}
_onStartCommunication(jsonrpc.Parameters message) {
try{
_peerConnection.setRemoteDescription(
RTCSessionDescription(message['sdpAnswer'].value, 'answer'));
}
catch(e){
print('TestController._onStartCommunication: ERROR: $e');
}
}
_createPeerConnection() async {
_localStream = await _createStream();
RTCPeerConnection pc = await createPeerConnection(_iceServers, _config);
_peerConnection = pc;
pc.addStream(_localStream);
pc.onAddStream = (stream) {
if (this.onRemoteStream != null) this.onRemoteStream(stream);
//_remoteStreams.add(stream);
};
pc.onIceConnectionState = (state) {
print(
'TestController._createPeerConnection: onIceConnectionState: $state');
};
pc.onIceCandidate = (candidate) {
_peer.sendNotification("rtc.onIceCandidate", {
'candidate': {
'sdpMLineIndex': candidate.sdpMlineIndex,
'sdpMid': candidate.sdpMid,
'candidate': candidate.candidate
}
});
};
}
Future<MediaStream> _createStream() async {
final Map<String, dynamic> mediaConstraints = {
'audio': true,
'video': {
'mandatory': {
'minWidth': '1980',
'minHeight': '1020',
'minFrameRate': '30',
},
'facingMode': 'environment',
'optional': [],
}
};
MediaStream stream = await navigator.getUserMedia(mediaConstraints);
if (this.onLocalStream != null) {
this.onLocalStream(stream);
}
return stream;
}
final Map<String, dynamic> _iceServers = {
'iceServers': [
{'url': 'stun:stun.l.google.com:19302'},
]
};
final Map<String, dynamic> _config = {
'mandatory': {},
'optional': [
{'DtlsSrtpKeyAgreement': true},
],
};
final Map<String, dynamic> _constraints = {
'mandatory': {
'OfferToReceiveAudio': true,
'OfferToReceiveVideo': true,
},
'optional': [],
};
Logs
Web Frontend
Pastebin
App
Pastebin

WebSocket and multiple Dyno's on Heroku Node.js app Using Redis

I'm building an App deployed to Heroku which uses WebSocket and Redis.
The WebSocket connection is working properly when I use only 1 dyno, but when I scale to 2, then I send event my application do it twice.
const ws = require('ws')
const jwt = require('jsonwebtoken')
const redis = require('redis')
const User = require('../models/user')
function verifyClient (info, callback) {
let token = info.req.headers['sec-websocket-protocol']
if (!token) { callback(false, 401, 'Unauthorized') } else {
jwt.verify(token, Config.APP_SECRET, (err, decoded) => {
if (err) { callback(false, 401, 'Unauthorized') } else {
if (info.req.headers.gameId) { info.req.gameId = info.req.headers.gameId }
info.req.userId = decoded.aud
callback(true)
}
})
}
};
let websocketServer, pub, sub
let clients = {}
let namespaces = {}
exports.initialize = function (httpServer) {
websocketServer = new ws.Server({
server: httpServer,
verifyClient: verifyClient
})
pub = redis.createClient(Config.REDIS_URL, { no_ready_check: true, detect_buffers: true })
pub.auth(Config.REDIS_PASSWORD, function (err) {
if (err) throw err
})
sub = redis.createClient(Config.REDIS_URL, { no_ready_check: true, detect_buffers: true })
sub.auth(Config.REDIS_PASSWORD, function (err) {
if (err) throw err
})
function handleConnection (socket) {
// socket.send(socket.upgradeReq.userId);
socket.userId = socket.upgradeReq.userId // get the user id parsed from the decoded JWT in the middleware
socket.isAlive = true
socket.scope = socket.upgradeReq.url.split('/')[1] // url = "/scope/whatever" => ["", "scope", "whatever"]
console.log('New connection: ' + socket.userId + ', scope: ' + socket.scope)
socket.on('message', (data, flags) => { handleIncomingMessage(socket, data, flags) })
socket.once('close', (code, reason) => { handleClosedConnection(socket, code, reason) })
socket.on('pong', heartbeat)
if (socket.scope === 'gameplay') {
try {
User.findByIdAndUpdate(socket.userId, { $set: { isOnLine: 2, lastSeen: Date.now() } }).select('id').lean()
let key = [socket.userId, socket.scope].join(':')
clients[key] = socket
sub.psubscribe(['dispatch', '*', socket.userId, socket.scope].join(':'))
} catch (e) { console.log(e) }
} else {
console.log('Scope : ' + socket.scope)
}
console.log('Connected Users : ' + Object.keys(clients))
}
function handleIncomingMessage (socket, message, flags) {
let scope = socket.scope
let userId = socket.userId
let channel = ['dispatch', 'in', userId, scope].join(':')
pub.publish(channel, message)
}
function handleClosedConnection (socket, code, reason) {
console.log('Connection with ' + socket.userId + ' closed. Code: ' + code)
if (socket.scope === 'gameplay') {
try {
User.findByIdAndUpdate(socket.userId, { $set: { isOnLine: 1 } }).select('id').lean()
let key = [socket.userId, socket.scope].join(':')
delete clients[key]
} catch (e) {
console.log(e)
}
} else {
console.log('Scope : ' + socket.scope)
}
}
function heartbeat (socket) {
socket.isAlive = true
}
sub.on('pmessage', (pattern, channel, message) => {
let channelComponents = channel.split(':')
let dir = channelComponents[1]
let userId = channelComponents[2]
let scope = channelComponents[3]
if (dir === 'in') {
try {
let handlers = namespaces[scope] || []
if (handlers.length) {
handlers.forEach(h => {
h(userId, message)
})
}
} catch (e) {
console.log(e)
}
} else if (dir === 'out') {
try {
let key = [userId, scope].join(':')
if (clients[key]) { clients[key].send(message) }
} catch (e) {
console.log(e)
}
}
// otherwise ignore
})
websocketServer.on('connection', handleConnection)
}
exports.on = function (scope, callback) {
if (!namespaces[scope]) { namespaces[scope] = [callback] } else { namespaces[scope].push(callback) }
}
exports.send = function (userId, scope, data) {
let channel = ['dispatch', 'out', userId, scope].join(':')
if (typeof (data) === 'object') { data = JSON.stringify(data) } else if (typeof (data) !== 'string') { throw new Error('DispatcherError: Cannot send this type of message ' + typeof (data)) }
pub.publish(channel, data)
}
exports.clients = clients
This is working on localhost.
Please let me know if I need to provide more info or code.Any help on this would be greatly appreciated, thanks in advance!
You have alot of extraneous info in the code you posted, so it's difficult to understand exactly what you mean.
However, if I understand correctly, you currently have multiple worker dyno instances subscribing to the same channels in some kind of pub/sub network. If you don't want all dynos to subscribe to the same channels, you need to put some logic in to make sure that your channels get distributed across dynos.
One simple way to do that might be to use something like the logic described in this answer.
In your case you might be able to use socket.userId as the key to distribute your channels across dynos.

Bulk update to Postgres with node js performance issue

I'm facing performance issue while trying to do bulk update in PostgresDB. It's taking more than 180 seconds to update around 23000 records. PFB the code. I'm using pg-promise library. Is there anything I could do to improve the performance?
const pgp = require('pg-promise')();
const postgresDBConfig = {
host: Config.postgresDBHost,
port: Config.postgresDBPort,
database: Constants.postgresDBName,
user: Config.postgresDBUser,
password: 'pswd'
};
export async function getTransactionDetails(): Promise<any> {
return new Promise<any>(async function (resolve, reject) {
try {
let db = pgp(postgresDBConfig);
db.connect();
let query = "SELECT * FROM table_name";
db.any(query)
.then(data => {
console.log("Executed successfully::");
resolve(data);
})
.catch(error => {
console.log('ERROR:', error);
})
} catch (error) {
log.error("Error::" + error);
throw error;
}
});
}
export async function updateStatus(result: any, status: string) {
try {
let db = pgp(postgresDBConfig);
//db.connect();
let updateData = [];
_.forEach(result, function (row) {
let updateInfo = {};
updateInfo["sessionid"] = row.sessionid;
updateInfo["status"] = status;
updateData.push(updateInfo);
});
console.log("updateData::" + updateData.length);
const tableName = new pgp.helpers.TableName('table_name', 'schema_name');
let columnset = new pgp.helpers.ColumnSet(['?sessionid', 'status'], { table: tableName });
let update = pgp.helpers.update(updateData, columnset);
db.none(update).then(() => {
console.log("Updated successfully");
})
.catch(error => {
console.log("Error updating the status" + error);
});
}
catch (error) {
log.error("Error in function updateStatus::" + error);
throw error;
}
}
The code exhibits problems all over the place
You should initialize the database object only once
You should not use db.connect() at all, which you also use incorrectly for the async code
You again incorrectly use async block, skipping await, so it doesn't execute correctly.
You do not append any UPDATE logic clause, so it is updating everything all over again, unconditionally, which may be resulting in a delayed mess that you're in.
Here's an improved example, though it may need some more work from your side...
const pgp = require('pg-promise')();
const postgresDBConfig = {
host: Config.postgresDBHost,
port: Config.postgresDBPort,
database: Constants.postgresDBName,
user: Config.postgresDBUser,
password: 'pswd'
};
const db = pgp(postgresDBConfig);
const tableName = new pgp.helpers.TableName('table_name', 'schema_name');
const columnSet = new pgp.helpers.ColumnSet(['?sessionid', 'status'], {table: tableName});
export async function getTransactionDetails(): Promise<any> {
try {
const res = await db.any('SELECT * FROM table_name');
console.log('Executed successfully::');
return res;
} catch (error) {
console.log('ERROR:', error);
throw error;
}
}
export async function updateStatus(result: any, status: string) {
try {
let updateData = [];
_.forEach(result, row => {
let updateInfo = {};
updateInfo["sessionid"] = row.sessionid;
updateInfo["status"] = status;
updateData.push(updateInfo);
});
console.log('updateData::', updateData.length);
const update = pgp.helpers.update(updateData, columnSet) +
' WHERE v.sessionid = t.sessionid';
await db.none(update);
console.log('Updated successfully');
}
catch (error) {
console.log('Error in function updateStatus:', error);
throw error;
}
}

how to handle array of 'promis'

I built a database that contains a list of users who receive messages from firebase with a request key. Every time a new request with status 'open' inserted, I am trying to sort them all by the value of 'Timestamp' and send it by this order to the receivers(each receiver will get one message).
if the list of receivers is empty I want to hold it, until another receiver will be added to the list and continue to the next request.
I am not sure how to send each 'promise' separately one after another-
exports.notificationListener=functions.database.ref('requests')
.onWrite(event=>{
const ref = event.data.ref;
let requests= [];
var query=ref.orderByChild('TimeStamp');
query.once('value',function(snap){
snap.forEach(request=>{
if(request.val().Status==OPEN)
requests.push(request.key);
});
});
for (let key of requests) {
return getOnlinReceiversToken().then(token=>{
let msg = {
data: {
source: key
}
};
return admin.messaging().sendToDevice(token, msg);
)};
}
});
function getOnlinReceiversToken() {
let result = new Promise((resolve, reject) => {
receiverRef.once('value', (snap) => {
resolve(snap);
},
(err) => {
reject(err);
});
});
return result.then(snap => {
snap.forEach(child => {
if(child.Status == ONLINE){
let token = helper.getToken(child.key,db);
break;
}
});
return token;
});
}
try something like this
var promisesArray = [];
for (let key of requests) {
var currentPromise = getOnlinReceiversToken().then(token=>{
let msg = {
data: {
source: key
}
};
return admin.messaging().sendToDevice(token, msg);
});
promisesArray.push(currentPromise);
}
return Promise.all(promisesArray);
You could use a function that calls itself to iterate through the promises sequentially to send them one after the other
function runPromise(index) {
// jump out of loop if there are no more requests
if (index >= requests.length) {
return console.log('finished!');
}
return getOnlinReceiversToken().then((token) => {
let msg = {
data: { source: key }
};
admin.messaging().sendToDevice(token, msg);
// iterate to the next item
return runPromise(index + 1);
}).catch((err) => {
// break out of loop when an error occurs
console.log(err);
});
}
runPromise(0);

Resources