Nodejs generating duplicate http-requests - node.js

Hope some can help with my issue. i'm using below nodejs code from this SAP Tutorial to read Sensor values post them per HTTP. All works pretty fine, but for the fact that every record is posted twice(see Screenshot). i'm not versed with server-side JS and don't know why the duplicates.Agreed, the values not aways the same, but for further processing i'd like to have single datasets per timestamp. Could someone please help me locate the issue and if possible, provide a solution/workaround?
Also the script reads and transmits the data every 10s. Am looking for a way to set the interval to maybe 3mins. I would appreciate every bit of help here as well
/* sensorTag IR Temperature sensor example
* Craig Cmehil, SAP SE (c) 2015
*/
/* Choose the proper HTTP or HTTPS, SAP Cloud Platformrequires HTTPS */
var http = require('https');
var SensorTag = require('sensortag');
var lv_temp;
var lv_humid;
var lv_deviceid = "";
var DEBUG_VALUE = true;
var xtimestamp;
var date = new Date();
var time = date.getTime ();
// SAP Cloud Platform connection details
var portIoT = 443;
var pathIoT = '/com.sap.iotservices.mms/v1/api/http/data/';
var hostIoT = 'iotmmsXXXXXXXXXXtrial.hanatrial.ondemand.com';
var authStrIoT = 'Bearer XXXXXXXXXXXX';
var deviceId = 'XXXXXX-XXXX-XXXX-XXXX-XXXXXXXXX';
var messageTypeID = 'XXXXXXXXXXXX';
var options = {
host: hostIoT,
port: portIoT,
path: pathIoT + deviceId,
agent: false,
headers: {
'Authorization': authStrIoT,
'Content-Type': 'application/json;charset=utf-8',
'Accept': '*/*'
},
method: 'POST',
};
/***************************************************************/
/* Coding to access TI SensorTag and values of various sensors */
/***************************************************************/
console.log("If not yet activated, then press the power button.");
SensorTag.discover(function(tag) {
tag.on('disconnect', function() {
console.log('disconnected!');
process.exit(0);
});
function connectExecute() {
console.log('Connect Device and Execute Sensors');
tag.connectAndSetUp(enableSensors);
}
function enableSensors() {
/* Read device specifics */
tag.readDeviceName(function(error, deviceName) {
console.log('Device Name = ' + deviceName);
});
tag.readSystemId(function(error, systemId) {
console.log('System ID = ' + systemId);
lv_deviceid = systemId;
});
tag.readSerialNumber(function(error, serialNumber) {
console.log('Serial Number = ' + serialNumber);
});
tag.readFirmwareRevision(function(error, firmwareRevision) {
console.log('Firmware Rev = ' + firmwareRevision);
});
tag.readHardwareRevision(function(error, hardwareRevision) {
console.log('Hardware Rev = ' + hardwareRevision);
});
tag.readHardwareRevision(function(error, softwareRevision) {
console.log('Software Revision = ' + softwareRevision);
});
tag.readManufacturerName(function(error, manufacturerName) {
console.log('Manufacturer = ' + manufacturerName);
});
/* Enable Sensors */
console.log("Enabling sensors:");
console.log('\tenableIRTemperatureSensor');
tag.enableIrTemperature(notifyMe);
console.log('\tenableHumidity');
tag.enableHumidity(notifyMe);
console.log("*********************************************");
console.log(" To stop press both buttons on the SensorTag ");
console.log("*********************************************");
}
function notifyMe() {
tag.notifySimpleKey(listenForButton);
setImmediate(function loop () {
tag.readIrTemperature(function(error, objectTemperature, ambientTemperature){
lv_obj = objectTemperature.toFixed(1);
lv_ambient = ambientTemperature.toFixed(1);
});
tag.readHumidity(function(error, temperature, humidity) {
lv_temp = temperature.toFixed(1);
lv_humid = humidity.toFixed(1);
});
if(DEBUG_VALUE)
console.log("Sending Data: " + lv_deviceid + " " + lv_temp + " " + lv_humid);
setSensorData(lv_temp, lv_humid);
setTimeout(loop, 10000);
});
}
function listenForButton() {
tag.on('simpleKeyChange', function(left, right) {
if (left && right) {
tag.disconnect();
}
});
}
connectExecute();
});
/******************************************************************/
/* FUNCTION to get Temperature from the Sensor & update into HANA */
/******************************************************************/
function setSensorData(lv_temp,lv_humid){
date = new Date();
time =date.getTime();
var data = {
"mode":"sync",
"messageType": messageTypeID,
"messages": [{
"timestamp": time,
"temperature": lv_temp,
"humidity": lv_humid
}]
};
var strData = JSON.stringify(data);
if(DEBUG_VALUE)
console.log("Data: " + strData);
if(strData.length > 46){
if(DEBUG_VALUE)
console.log("Sending Data to server");
/* Process HTTP or HTTPS request */
options.agent = new http.Agent(options);
var request_callback = function(response) {
var body = '';
response.on('data', function (data) {
body += data;
});
response.on('end', function () {
if(DEBUG_VALUE)
console.log("REQUEST END:", response.statusCode);
});
response.on('error', function(e) {
console.error(e);
});
}
var request = http.request(options, request_callback);
request.on('error', function(e) {
console.error(e);
});
request.write(strData);
request.end();
}else{
if(DEBUG_VALUE)
console.log("Incomplete Data");
}
}

It should only be posting once to the system but twice to the screen.
In the function notifyMe you need to change the line
setTimeout(loop, 10000);
Change the number to the interval you want it to delay before posting again.

Related

Lambda function taking >3 seconds to run + 5-10 secs warmup each time

I have a simple node.js function with 2 REST API calls and a socket connection output hosted in an AWS lambda. It takes 5-10 secs warmup time and >3+ secs execution time.
When the code is run locally it executes both requests, socket connection and completes in about ~1300ms. Why is AWS more then double the execution time? I have set-timeout to 120seconds and memory at 128mb (default).
I appreciate the code is not very tidy; I am working on cleaning it but needed something going for the time being.
The project simply gets info from ServiceM8 via API when called by a webhook subscription, then formats the info into ZPL strings and forwards them to a tcp server for printing via thermal printer.
My questions are:
Is it my code bottle necking?
Can it be optimized to run faster?
Do i simply need to employ a warming plugin for my function to allow hot starting?
My function:
'use strict';
//Require libraries
var request = require("request");
var net = require('net');
exports.handler = (event, context, callback) => {
if (event.eventName != 'webhook_subscription') {
callback(null, {});
}
//Global Variables
var strAssetUUID;
var strAssetURL;
var strFormUUID;
var strTestDate;
var strRetestDate;
var appliancePass = true;
var strAccessToken;
var strResponseUUID;
//Printer Access
const tcpUrl = 'example.com';
const tcpPort = 12345;
var client = new net.Socket();
//UUID of Appliance Test Form.
const strTestFormUUID = 'UUID_of_form';
//Begin function
/**
* Inspect the `eventArgs.entry` argument to get details of the change that caused the webhook
* to fire.
*/
strResponseUUID = event.eventArgs.entry[0].uuid;
strAccessToken = event.auth.accessToken;
console.log('Response UUID: ' + strResponseUUID);
console.log('Access Token: ' + strAccessToken);
//URL Options for FormResponse UUID query
const urlFormResponse = {
url: 'https://api.servicem8.com/api_1.0/formresponse.json?%24filter=uuid%20eq%20' + strResponseUUID,
headers: {
// Use the temporary Access Token that was issued for this event
'Authorization': 'Bearer ' + strAccessToken
}
};
//Query form Response UUID to get information required.
request.get(urlFormResponse, function(err, res, body) {
//Check response code from API query
if (res.statusCode != 200) {
// Unable to query form response records
callback(null, {err: "Unable to query form response records, received HTTP " + res.statusCode + "\n\n" + body});
return;
}
//If we do recieve a 200 status code, begin
var arrRecords = JSON.parse(body);
//Store the UUID of the form used for the form response.
strFormUUID = arrRecords[0].form_uuid;
console.log('Form UUID: ' + strFormUUID);
//Store the UUID of the asset the form response relates to.
strAssetUUID = arrRecords[0].asset_uuid;
console.log('Asset UUID: ' + strAssetUUID);
if (strFormUUID == strTestFormUUID){
//Get the edited date and parse it into a JSON date object.
var strEditDate = new Date(arrRecords[0].edit_date);
//Reassemble JSON date to dd-mm-yyyy.
strTestDate = strEditDate.getDate() + '/' + (strEditDate.getMonth() + 1) + '/' + strEditDate.getFullYear();
//Extract the response for retest period.
var strRetestAnswer = JSON.parse(arrRecords[0].field_data);
strRetestAnswer = strRetestAnswer[0].Response;
//Appropriate function based on retest response.
switch(strRetestAnswer) {
case '3 Months':
//Add x months to current test date object
strEditDate.setMonth(strEditDate.getMonth() + 3);
strRetestDate = strEditDate.getDate() + '/' + (strEditDate.getMonth() + 1) + '/' + strEditDate.getFullYear();
break;
case '6 Months':
strEditDate.setMonth(strEditDate.getMonth() + 6);
strRetestDate = strEditDate.getDate() + '/' + (strEditDate.getMonth() + 1) + '/' + strEditDate.getFullYear();
break;
case '12 Months':
strEditDate.setMonth(strEditDate.getMonth() + 12);
strRetestDate = strEditDate.getDate() + '/' + (strEditDate.getMonth() + 1) + '/' + strEditDate.getFullYear();
break;
case '2 Years':
strEditDate.setMonth(strEditDate.getMonth() + 24);
strRetestDate = strEditDate.getDate() + '/' + (strEditDate.getMonth() + 1) + '/' + strEditDate.getFullYear();
break;
case '5 Years':
strEditDate.setMonth(strEditDate.getMonth() + 60);
strRetestDate = strEditDate.getDate() + '/' + (strEditDate.getMonth() + 1) + '/' + strEditDate.getFullYear();
break;
default:
strRetestDate = "FAIL";
appliancePass = false;
}
console.log('Appliance Pass: ' + appliancePass);
console.log('Test Date: ' + strTestDate);
console.log('Retest Period: ' + strRetestAnswer);
console.log('Retest Date: ' + strRetestDate);
//URL Options for Asset UUID query
const urlAssetResponse = {
url: 'https://api.servicem8.com/api_1.0/asset/' + strAssetUUID + '.json',
headers: {
// Use the temporary Access Token that was issued for this event
'Authorization': 'Bearer ' + strAccessToken
}
};
//Query the api for the asset URL of the provided asset UUID.
request.get(urlAssetResponse, function(err, res, body) {
//Check response code from API query
if (res.statusCode != 200) {
// Unable to query asset records
callback(null, {err: "Unable to query asset records, received HTTP " + res.statusCode + "\n\n" + body});
return;
}
//If we do recieve a 200 status code, begin
var strAssetResponse = JSON.parse(body);
//Store the asset URL
strAssetURL = 'https://sm8.io/' + strAssetResponse.asset_code;
console.log('Asset URL: ' + strAssetURL);
//generate tag and send to printer
var strZPLPass = ('^XA....^XZ\n');
var strZPLFail = ('^XA....^XZ\n');
//Now that we have our ZPL generated from our dates and URLs
//Send the correct ZPL to the printer.
client.connect(tcpPort, tcpUrl, function() {
console.log('Connected');
//Send Appropriate ZPL
if (appliancePass) {
client.write(strZPLPass);
}else {
client.write(strZPLFail);
}
console.log('Tag Successfully Printed!');
//As the tcp server receiving the string does not return any communication
//there is no way to know when the data has been succesfully received in full.
//So we simply timeout the connection after 750ms which is generally long enough
//to ensure complete transmission.
setTimeout(function () {
console.log('Timeout, connection closing...');
client.destroy();
}, 750);
});
});
}
});
};
First of all, I would suggest you stop using the request module and switch to native. Everything can be done without a tons of lines these days. request is a module with 48 total dependencies; if you do the math, that's thousands of lines for a simple GET request.
You should always minimize the complexity of your dependencies. I use a Lambda to check the health of my sites, grabbing the whole request and checking the HTML on completely different servers. VPS is located in Frankfurt, AWS in Ireland. My ms/request is ranging between 100~150 ms.
Here's a simple promise request I'm using:
function request(obj, timeout) {
return new Promise(function(res, rej) {
if (typeof obj !== "object") {
rej("Argument must be a valid http request options object")
}
obj.timeout = timeout;
obj.rejectUnauthorized = false;
let request = http.get(obj, (response) => {
if (response.statusCode !== 200) {
rej("Connection error");
}
var body = '';
response.on('data', (chunk) => {
body += chunk;
});
response.on('end', () => {
res(body);
});
response.on('error', (error) => {
rej(error);
});
});
request.setTimeout(timeout);
request.on('error', (error) => {
rej(error);
})
request.on('timeout', () => {
request.abort();
rej("Timeout!")
})
});
}
Example
const reqOpts = {
hostname: 'www.example.com',
port: 443,
path: '/hello',
method: 'GET',
headers: {
handshake: "eXTNxFMxQL4pRrj6JfzQycn3obHL",
remoteIpAddress: event.sourceIp || "lambda"
}
}
try {
httpTestCall = await request(reqOpts, 250);
}
catch (e) {
console.error(e);
}
Now based on that change switch your handler to async using exports.handler = async(event, context, callback) => {} and use console to measure the execution time of every request using console.time() and console.timeEnd() for your request or anything. From there you could see what's stepping down your code using Cloudwatch logs. Here's another example based on your code:
let reqOpts = {
hostname: 'api.servicem8.com',
port: 443,
path: '/api_1.0/formresponse.json?%24filter=uuid%20eq%20' + strResponseUUID,
method: 'GET',
headers: {
// Use the temporary Access Token that was issued for this event
'Authorization': 'Bearer ' + strAccessToken
}
}
console.time("=========MEASURE_servicem8=========")
let error = null;
await request(reqOpts, 5555).catch((e)=>{
error = e;
})
console.timerEnd("=========MEASURE_servicem8=========")
if (error){
callback(null, {err: "Unable to query form response records, received HTTP" + error}); /* or anything similar */
}
References
https://docs.aws.amazon.com/lambda/latest/dg/best-practices.html
https://docs.aws.amazon.com/lambda/latest/dg/nodejs-prog-model-handler.html
aws lambdas are not fast by nature (as of writing this answer). The startup time is not guaranteed, and it is known to be high.
If you need performance - you will not get it this way.

Socket.io fs.readFileSync for many connections at same time

This is not really a question, but I wonder to know if what I did is correct because its working!
So, lets to the question, I`m monitoring many interfaces (PPPoE clients) at same to know its traffic reading the statistics from linux.
I`m using npm packages: express, socket.io and socket.io-stream.
Client:
var sessionsAccel = $('table.accel').DataTable([]);
sessionsAccel.on('preDraw', function() {
$('.interfaceAccel').each(function(i) {
var t = $(this).data();
sockets['socket' + t.id].disconnect();
delete speeds['tx_bytes' + t.id];
delete speeds['rx_bytes' + t.id];
});
})
.on('draw', function() {
$('.interfaceAccel').each(function(i) {
var t = $(this).data();
sockets['socket' + t.id] = io.connect('http://172.16.101.2:3000/status', {
query: 'interface=' + t.interface,
'forceNew': true
});
sockets['socket' + t.id].on("connect", function() {
ss(sockets['socket' + t.id]).on('sendStatus', function(stream, data) {
if (typeof speeds['tx_bytes' + t.id] != 'undefined') {
var speedtx = (data.tx_bytes - speeds['tx_bytes' + t.id]) * 8 / 1000;
var speedrx = (data.rx_bytes - speeds['rx_bytes' + t.id]) * 8 / 1000;
if (speedtx > 1000) {
speedtx = speedtx / 1000;
speedtx = speedtx.toFixed(2);
speedtx_info = speedtx + ' Mbps';
} else {
speedtx = speedtx.toFixed(2);
speedtx_info = speedtx + ' kbps';
}
if (speedrx > 1000) {
speedrx = speedrx / 1000;
speedrx = speedrx.toFixed(2);
speedrx_info = speedrx + ' Mbps';
} else {
speedrx = speedrx.toFixed(2);
speedrx_info = speedrx + ' kbps';
}
$('.tx_' + t.id).html(speedtx_info);
$('.rx_' + t.id).html(speedrx_info);
}
speeds['tx_bytes' + t.id] = data.tx_bytes;
speeds['rx_bytes' + t.id] = data.rx_bytes;
});
});
});
})
Server:
const app = require('express')();
const http = require('http').createServer(app);
const io = require('socket.io')(http);
const ss = require('socket.io-stream');
const path = require('path');
const fs = require('fs');
function getIntInfo(interface) {
if(fs.existsSync('/sys/class/net/'+ interface +'/statistics/tx_bytes')) {
var tx_bytes = fs.readFileSync('/sys/class/net/'+ interface +'/statistics/tx_bytes').toString();
var rx_bytes = fs.readFileSync('/sys/class/net/'+ interface +'/statistics/rx_bytes').toString();
var tx_packets = fs.readFileSync('/sys/class/net/'+ interface +'/statistics/tx_packets').toString();
var rx_packets = fs.readFileSync('/sys/class/net/'+ interface +'/statistics/rx_packets').toString();
return {tx_bytes : tx_bytes, rx_bytes : rx_bytes, tx_packets: tx_packets, rx_packets: rx_packets};
}else
return false;
}
io.of('/status').on('connection', function(socket) {
var query = socket.handshake.query['interface'];
var timer = setInterval(function() {
var stream = ss.createStream();
var info = getIntInfo(query);
ss(socket).emit('sendStatus', stream, info);
}, 1000);
socket.on('disconnect', function(){
socket.disconnect(true);
//console.info('disconnected user (id=' + socket.id + ').');
});
})
http.listen(3000, function(){
console.log('listening on *:3000');
});
That's it, every row from Datatable (which is the interface) open a socket connection and retrieve the statistics.
My question is, this will mess up my server with many I/O reading these files?
Since you're doing this every second for every connected client, it seems like you should probably cache this data so it doesn't have to be read from the disk or sent over the wire when it hasn't changed to save both server load and bandwidth usage. But, the details of how to best do that depend upon knowledge about your particular application that you haven't included.
You can at least use asynchronous I/O like this:
const util = require('util');
const fs = require('fs');
const readFile = util.promisify(fs.readFile);
function getIntInfo(interface) {
function readInfo(name) {
return readFile('/sys/class/net/'+ interface +'/statistics/' + name).then(data => data.toString());
}
return Promise.all(
readFile('tx_bytes'),
readFile('rx_bytes'),
readFile('tx_packets'),
readFile('rx_packets')
).then(([tx_bytes, rx_bytes, tx_packets, rx_packets]) => {
return {tx_bytes, rx_bytes, tx_packets, rx_packets};
}).catch(err => {
console.log(err);
return false;
});
}
And, you have to stop the interval any time a client disconnects and change how it calls getIntInfo():
io.of('/status').on('connection', function(socket) {
var query = socket.handshake.query['interface'];
var timer = setInterval(function() {
getIntInfo(query).then(info => {
var stream = ss.createStream();
ss(socket).emit('sendStatus', stream, info);
});
}, 1000);
socket.on('disconnect', function(){
// stop the timer for this connection
clearInterval(timer);
});
});
Now that I think about it a bit more, you could improve scalability quite a bit by having just one interval timer that was reading the data and then sending that one set of data to all listening clients that had connected to the /status namespace. You would reduce the file reading from once per second for every client to just once per second for no matter how many clients.

Create webrtc video, voice call and file transfer using node js step by step Q-A

If any one need to add information or edit some information they are welcome.
Hi,
First of all this questions main target is shear my experience to other developers who finding good tutorials about webrtc.i'm not going to explain about webrtc. In this i add scourse code that working webrtc Video, voice call and file transfer example tested by me.
I get webrtc information from https://webrtc.org/ and get nodejs
from https://nodejs.org/en/
Ok let get start
Is web rtc need ssl certificated?
If you doing experiment in you local PC server no need. but when you add the to live server Yes you need it.
How i get ssl certificate?
one of my friend help me to get that SSl. there plenty of tutorials for you read and watch
How i get turn and stun server?
if you go production level you need to setup those server , but for test your project you can get stun server and turn servers for free.
For Stun server - https://gist.github.com/zziuni/3741933
For Turn server - Use this link and create free one ( http://numb.viagenie.ca/ ).
I add my working code as a an answer below
This is Working code for webrtc with node js
This code and comment that in code not by me. They already there when i got the code. I cant find code original owner. but i thanks that developer.If some found that developer please edit this and add that developer link :)
var express = require('express');
var socket = require('socket.io');
var app = express();
var fs = require('fs');
var https = require('https');
// link your https certicate path
var options = {
key: fs.readFileSync('/../../etc/ssl/private/apache-selfsigned.key'),
cert: fs.readFileSync('/../../etc/ssl/certs/apache-selfsigned.crt')
};
var main = https.createServer(options, app);
var server = main.listen(8443, function() {
console.log('server up and running at %s port', 8443);
});
/*var server = app.listen(443, function () {
});*/
app.use(express.static('public'));
var io = socket(server);
/*************************/
/*** INTERESTING STUFF ***/
/*************************/
var channels = {};
var sockets = {};
/**
* Users will connect to the signaling server, after which they'll issue a "join"
* to join a particular channel. The signaling server keeps track of all sockets
* who are in a channel, and on join will send out 'addPeer' events to each pair
* of users in a channel. When clients receive the 'addPeer' even they'll begin
* setting up an RTCPeerConnection with one another. During this process they'll
* need to relay ICECandidate information to one another, as well as SessionDescription
* information. After all of that happens, they'll finally be able to complete
* the peer connection and will be streaming audio/video between eachother.
*/
io.on('connection', function (socket) {
var channel;
socket.channels = {};
sockets[socket.id] = socket;
console.log("[" + socket.id + "] connection accepted");
socket.on('disconnect', function () {
for (var channel in socket.channels) {
part(channel);
}
console.log("[" + socket.id + "] disconnected");
delete sockets[socket.id];
});
socket.on('join-room', function (config) {
if (config) {
channel = config.channel;
var userdata = config.userdata;
var userID = config.userdata.userID;
if (channel in socket.channels) {
console.log("[" + socket.id + "] ERROR: already joined ", channel);
return;
}
if (!(channel in channels)) {
channels[channel] = {};
}
for (id in channels[channel]) {
channels[channel][id].emit('addPeer-room', {'peer_id': socket.id, 'should_create_offer': false});
socket.emit('addPeer-room', {'peer_id': id, 'should_create_offer': true});
console.log("what is this id -> ", id);
}
console.log(config.userdata.name, ' joining room', config.channel);
socket.join(config.channel);
socket.broadcast.in(config.channel).emit('room-users', config);
channels[channel][socket.id] = socket;
socket.channels[channel] = channel;
}
});
function part(channel) {
console.log("[" + socket.id + "] part ");
if (!(channel in socket.channels)) {
console.log("[" + socket.id + "] ERROR: not in ", channel);
return;
}
delete socket.channels[channel];
delete channels[channel][socket.id];
for (id in channels[channel]) {
channels[channel][id].emit('removePeer', {'peer_id': socket.id});
socket.emit('removePeer', {'peer_id': id});
}
}
socket.on('part', part);
socket.on('relayICECandidate-room', function (config) {
var peer_id = config.peer_id;
var ice_candidate = config.ice_candidate;
console.log("[" + socket.id + "] relaying ICE candidate to [" + peer_id + "] ", ice_candidate);
if (peer_id in sockets) {
sockets[peer_id].emit('iceCandidate-room', {'peer_id': socket.id, 'ice_candidate': ice_candidate});
}
});
socket.on('relaySessionDescription-room', function (config) {
var peer_id = config.peer_id;
var session_description = config.session_description;
console.log("[" + socket.id + "] relaying session description to [" + peer_id + "] ", session_description);
if (peer_id in sockets) {
sockets[peer_id].emit('sessionDescription-room', {
'peer_id': socket.id,
'session_description': session_description
});
}
});
// this for file transfer
socket.on('file-send-room', function (file) {
console.log(file);
socket.to(channel).emit('file-out-room', file);
});
socket.on('file-send-room-result', function (file) {
console.log(file);
socket.to(channel).emit('file-out-room-result', file);
});
});
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Title</title>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.10.2/jquery.min.js"></script>
<script src="https://cdn.socket.io/socket.io-1.4.5.js"></script>
<script>
function getParameterByName(name, url) {
if (!url) url = window.location.href;
name = name.replace(/[\[\]]/g, "\\$&");
var regex = new RegExp("[?&]" + name + "(=([^&#]*)|&|#|$)"),
results = regex.exec(url);
if (!results) return null;
if (!results[2]) return '';
return decodeURIComponent(results[2].replace(/\+/g, " "));
}
var fileInput = document.querySelector('input#fileInput');
var downloadAnchor = document.querySelector('a#download');
// this function use to get url parameters
var room = getParameterByName('room');
var userID = getParameterByName('userid');
var name = getParameterByName('name');
/** CONFIG **/
var SIGNALING_SERVER = "https://xxx.xx.xx.xxx:8443"; //your node server addres or IP adress
var USE_AUDIO = true;
var USE_VIDEO = true;
var MUTE_AUDIO_BY_DEFAULT = false;
/** You should probably use a different stun server doing commercial stuff **/
/** Also see: https://gist.github.com/zziuni/3741933 **/
var ICE_SERVERS = [
{urls: "stun:stun.l.google.com:19302"},{
urls: 'turn:numb.viagenie.ca:3478',
credential: '12344', //your password
username: 'your#email.com'
}
];
var socket = null;
/* our socket.io connection to our webserver */
var local_media_stream = null;
/* our own microphone / webcam */
var peers = {};
/* keep track of our peer connections, indexed by peer_id (aka socket.io id) */
var peer_media_elements = {};
/* keep track of our <video>/<audio> tags, indexed by peer_id */
$(document).ready(function (a) {
socket = io(SIGNALING_SERVER);
socket = io();
//----------------------------------------------------------------------->>>>> Files Send Start
const BYTES_PER_CHUNK = 1200;
var file;
var currentChunk;
var fileInput = $('input[type=file]');
var fileReader = new FileReader();
function readNextChunk() {
var start = BYTES_PER_CHUNK * currentChunk;
var end = Math.min(file.size, start + BYTES_PER_CHUNK);
fileReader.readAsArrayBuffer(file.slice(start, end));
}
fileReader.onload = function () {
socket.emit('file-send-room-result', fileReader.result);
//p2pConnection.send( fileReader.result );
currentChunk++;
if (BYTES_PER_CHUNK * currentChunk < file.size) {
readNextChunk();
}
};
fileInput.on('change', function () {
file = fileInput[0].files[0];
currentChunk = 0;
// send some metadata about our file
// to the receiver
socket.emit('file-send-room', JSON.stringify({
fileName: file.name,
fileSize: file.size
}));
readNextChunk();
});
var incomingFileInfo;
var incomingFileData;
var bytesReceived;
var downloadInProgress = false;
socket.on('file-out-room', function (data) {
startDownload(data);
console.log(data);
});
socket.on('file-out-room-result', function (data) {
progressDownload(data);
console.log(data); });
function startDownload(data) {
incomingFileInfo = JSON.parse(data.toString());
incomingFileData = [];
bytesReceived = 0;
downloadInProgress = true;
console.log('incoming file <b>' + incomingFileInfo.fileName + '</b> of ' + incomingFileInfo.fileSize + ' bytes');
}
function progressDownload(data) {
bytesReceived += data.byteLength;
incomingFileData.push(data);
console.log('progress: ' + ((bytesReceived / incomingFileInfo.fileSize ) * 100).toFixed(2) + '%');
if (bytesReceived === incomingFileInfo.fileSize) {
endDownload();
}
}
function endDownload() {
downloadInProgress = false;
var blob = new Blob(incomingFileData);
var a = document.createElement("a");
document.body.appendChild(a);
a.style = "display: none";
var blob = new Blob(incomingFileData);
var url = window.URL.createObjectURL(blob);
a.href = url;
a.download = incomingFileInfo.fileName;
a.click();
window.URL.revokeObjectURL(url);
}
//==================================================================<<< Filse Send End
//------------------------ Funtion
function join_chat_channel(channel, userdata) {
socket.emit('join-room', {"channel": channel, "userdata": userdata});
}
socket.on('connect', function (userID) {
console.log("Connected to signaling server");
setup_local_media(function () {
/* once the user has given us access to their
* microphone/camcorder, join the channel and start peering up */
join_chat_channel(room, {'name': name, 'userID': userID});
});
});
socket.on('room-user', function (data) {
console.log(data);
$("#online-user").append('<tr><td>Name = ' + data.userdata.name + ' <br> User ID= ' + data.userdata.userID + '</td><td><button class="call" id="' + data.userdata.userID + '">Call</button></td></tr>');
});
$('body').on('click', '.call', function () {
var callerID = $(this).attr('id');
socket.emit('call', {"callToId": callerID, "callFromId": userID});
});
/**
* When we join a group, our signaling server will send out 'addPeer' events to each pair
* of users in the group (creating a fully-connected graph of users, ie if there are 6 people
* in the channel you will connect directly to the other 5, so there will be a total of 15
* connections in the network).
*/
socket.on('addPeer-room', function (config) {
console.log('Signaling server said to add peer:', config);
var peer_id = config.peer_id;
if (peer_id in peers) {
/* This could happen if the user joins multiple channels where the other peer is also in. */
console.log("Already connected to peer ", peer_id);
return;
}
var peer_connection = new RTCPeerConnection(
{"iceServers": ICE_SERVERS},
{"optional": [{"DtlsSrtpKeyAgreement": true}]} /* this will no longer be needed by chrome
* eventually (supposedly), but is necessary
* for now to get firefox to talk to chrome */
);
peers[peer_id] = peer_connection;
peer_connection.onicecandidate = function (event) {
if (event.candidate) {
socket.emit('relayICECandidate-room', {
'peer_id': peer_id,
'ice_candidate': {
'sdpMLineIndex': event.candidate.sdpMLineIndex,
'candidate': event.candidate.candidate
}
});
}
}
peer_connection.onaddstream = function (event) {
console.log("onAddStream", event);
var remote_media = USE_VIDEO ? $("<video>") : $("<audio>");
remote_media.attr("autoplay", "autoplay");
if (MUTE_AUDIO_BY_DEFAULT) {
remote_media.attr("muted", "true");
}
remote_media.attr("controls", "");
peer_media_elements[peer_id] = remote_media;
$('body').append(remote_media);
attachMediaStream(remote_media[0], event.stream);
}
/* Add our local stream */
peer_connection.addStream(local_media_stream);
/* Only one side of the peer connection should create the
* offer, the signaling server picks one to be the offerer.
* The other user will get a 'sessionDescription' event and will
* create an offer, then send back an answer 'sessionDescription' to us
*/
if (config.should_create_offer) {
console.log("Creating RTC offer to ", peer_id);
peer_connection.createOffer(
function (local_description) {
console.log("Local offer description is: ", local_description);
peer_connection.setLocalDescription(local_description,
function () {
socket.emit('relaySessionDescription-room',
{'peer_id': peer_id, 'session_description': local_description});
console.log("Offer setLocalDescription succeeded");
},
function () {
Alert("Offer setLocalDescription failed!");
}
);
},
function (error) {
console.log("Error sending offer: ", error);
});
}
});
/**
* Peers exchange session descriptions which contains information
* about their audio / video settings and that sort of stuff. First
* the 'offerer' sends a description to the 'answerer' (with type
* "offer"), then the answerer sends one back (with type "answer").
*/
socket.on('sessionDescription-room', function (config) {
console.log('Remote description received: ', config);
var peer_id = config.peer_id;
var peer = peers[peer_id];
var remote_description = config.session_description;
console.log(config.session_description);
var desc = new RTCSessionDescription(remote_description);
var stuff = peer.setRemoteDescription(desc,
function () {
console.log("setRemoteDescription succeeded");
if (remote_description.type == "offer") {
console.log("Creating answer");
peer.createAnswer(
function (local_description) {
console.log("Answer description is: ", local_description);
peer.setLocalDescription(local_description,
function () {
socket.emit('relaySessionDescription-room',
{'peer_id': peer_id, 'session_description': local_description});
console.log("Answer setLocalDescription succeeded");
},
function () {
Alert("Answer setLocalDescription failed!");
}
);
},
function (error) {
console.log("Error creating answer: ", error);
console.log(peer);
});
}
},
function (error) {
console.log("setRemoteDescription error: ", error);
}
);
console.log("Description Object: ", desc);
});
/**
* The offerer will send a number of ICE Candidate blobs to the answerer so they
* can begin trying to find the best path to one another on the net.
*/
socket.on('iceCandidate-room', function (config) {
var peer = peers[config.peer_id];
var ice_candidate = config.ice_candidate;
peer.addIceCandidate(new RTCIceCandidate(ice_candidate));
});
/**
* When a user leaves a channel (or is disconnected from the
* signaling server) everyone will recieve a 'removePeer' message
* telling them to trash the media channels they have open for those
* that peer. If it was this client that left a channel, they'll also
* receive the removePeers. If this client was disconnected, they
* wont receive removePeers, but rather the
* signaling_socket.on('disconnect') code will kick in and tear down
* all the peer sessions.
*/
socket.on('removePeer-room', function (config) {
console.log('Signaling server said to remove peer:', config);
var peer_id = config.peer_id;
if (peer_id in peer_media_elements) {
peer_media_elements[peer_id].remove();
}
if (peer_id in peers) {
peers[peer_id].close();
}
delete peers[peer_id];
delete peer_media_elements[config.peer_id];
});
});
function setup_local_media(callback, errorback) {
if (local_media_stream != null) { /* ie, if we've already been initialized */
if (callback) callback();
return;
}
/* Ask user for permission to use the computers microphone and/or camera,
* attach it to an <audio> or <video> tag if they give us access. */
console.log("Requesting access to local audio / video inputs");
navigator.getUserMedia = ( navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
attachMediaStream = function (element, stream) {
console.log('DEPRECATED, attachMediaStream will soon be removed.');
element.srcObject = stream;
};
navigator.getUserMedia({"audio": USE_AUDIO, "video": USE_VIDEO},
function (stream) { /* user accepted access to a/v */
console.log("Access granted to audio/video");
local_media_stream = stream;
var local_media = USE_VIDEO ? $("<video>") : $("<audio>");
local_media.attr("autoplay", "autoplay");
local_media.attr("muted", "true");
/* always mute ourselves by default */
local_media.attr("controls", "");
$('body').append(local_media);
attachMediaStream(local_media[0], stream);
if (callback) callback();
},
function () { /* user denied access to a/v */
console.log("Access denied for audio/video");
alert("You chose not to provide access to the camera/microphone, demo will not work.");
if (errorback) errorback();
});
}
</script>
</head>
<body>
<form id="fileInfo">
<input type="file" id="fileInput" name="files"/>
</form>
<a id="download"></a>
</body>
</html>

How to get synchronous SerialPort Read in NodeJS

Actually working on an automaton project (based on Arduino <-> Serial <-> Raspberry), I seem to be kind of stuck on the serial interface.
Basically, I'm using Express to read/write on some parameter on the arduino by URL in that kind of syntax (http://localhost:3000/parameter/6 or http://localhost:3000/parameter/6/set?value=10).
Since I'm willing to get a simple/short result (as an API), I decided to render a simple json object on each request, there is also no way for me to use post-rendering scripts/frameworks like Socket.io/jQuery/... for this purpose (I'd probably often call those URLs from curl/wget/[other html parsers]).
Now the thing is that there is no problem to write on the port, but for the reading, I'd like to wait for the buffer to be returned from serialport.on('data', function(buffer) { ... }) before rendering the page.
As serialport.on('data', ... ) does seem to be called only once per request, the only way I found for the moment is to redirect to the same page until the buffer has been read, which seem kind of a nasty way of getting things done, also it is incomplete ...
Here's a bit of code to get an idea of the actual logic :
Library :
// -> lib/serial.js - LIBRARY
var SerialPort = require("serialport");
var data = {'state': 0};
var serialPort = new SerialPort(port, {
baudrate: 115200,
parser: SerialPort.parsers.readline('\r\n'),
});
serialPort.on('open', function() {
console.log('Serial port Open');
serialPort.on('data', function(buffer) { data.buffer = buffer, data.state = 1 });
});
function readConfig(cmd, paramNb) {
var cmd = String.fromCharCode(cmd);
var param = String.fromCharCode(paramNb);
if (serialPort.isOpen() == true) {
serialPort.write(cmd + param + '\n', function(err) {
if (err) {
return console.log('Error on write: ', err.message);
}
console.log('message written');
});
}
return data;
};
function writeConfig(cmd, paramNb, value) {
var cmd = String.fromCharCode(cmd);
var param = String.fromCharCode(paramNb);
var value = String.fromCharCode(value);
if (serialPort.isOpen() == true) {
serialPort.write(cmd + param + value + '\n', function(err) {
if (err) {
return console.log('Error on write: ', err.message);
}
console.log('message written');
});
}
};
exports.readConfig = readConfig;
exports.writeConfig = writeConfig;
Route :
// -> parameter.js - ROUTE
var express = require('express');
var router = express.Router();
var serial = require('../../lib/serial');
var sleep = require('sleep');
/* SET Parameter. */
router.get('/:id/set', function(req, res) {
var id = req.params.id;
var value = req.query.value;
serial.writeConfig('b', id, value);
res.json({'result': value, 'id': id});
});
/* GET Parameter. */
router.get('/:id', function(req, res) {
var id = req.params.id;
var buffer = serial.readConfig('a', id);
if (buffer.state != 0) {
res.json({'result': buffer});
}
else {
sleep.usleep(100000);
res.redirect('/api/parameter/' + id); // <- Nasty Bulls**t
}
});
module.exports = router;
The first idea I came up with was to find a synchronous way to read the port so I can call something like this (pseudo-code) :
while (buffer == '') { var buffer = serial.read; }
res.json({buffer});
Anyway thanks everyone.

Time web requests in node.js

Given Node's async nature it is difficult to time a series of web requests. How would I fire off 100 webrequests and figure out how long each individual request takes? Knowing the OS will only allow a few concurrent web request, how do I get the timeing for each individual webrequest, removing the time spent waiting for the other connections to complete. I was hoping the socket event was fired when the request launched but it seems that the socket event is fired after the connection has been established.
var http = require('http');
var urls = [
'/cameron',
'/sara',
'...',
// Time a url collection.
function timeUrl(url, calback) {
var options = {
host: 'www.examplesite.com',
port: 80,
path: ''
};
var times = [];
times.push({'text': 'start', 'time':Date.now()});
http.get(options, function(res) {
times.push({'text': 'response', 'time':Date.now()});
var result = '';
res.on('data', function(chunk) {
result += chunk.length ;
// result += chunk;
});
res.on('end', function() {
times.push({'text': 'end', 'time': Date.now(), 'body': result, 'statusCode': res.statusCode}); // ,
calback(times);
});
}).on('error', function(e) {
calback();
console.log("Got error: " + e.message);
times.push({'error':Date.now()});
}).on('socket', function (response) {
times.push({'text': 'socket', 'time':Date.now()});
});
}
for (var i = 0; i < urls.length; i++) {
var url = urls[i];
timeUrl(url, function(times) {
console.log(url);
for (var i = 0; i < times.length; i++) {
console.log(times[i].text, times[i].time - times[1].time , 'ms');
}
console.log('statusCode:', times[times.length -1].statusCode, 'Response Size:', times[times.length -1].body);
console.log('-');
});
}
If you're worried about OS concurrency just introduce maximum concurrency (throttling) into your requests instead of trying to guess when exactly the OS has started. I'm skipping over some minor details like error handling and using the excellent async.js library:
var http = require('http')
, async = require('async')
, CONCURRENCY = 5 // edit to fit your OS concurrency limit
, results = {}
, urls = [
'/cameron',
'/sara',
'/...'
];
// Time a url collection.
function timeUrl(url, callback) {
var options = { host: 'www.examplesite.com', port: 80 }
, start = Date.now()
, socket = null;
options.path = url;
http.get(options, function(res) {
var response = Date.now()
, size = 0;
res.on('data', function(chunk) { size += chunk.length; });
res.on('end', function() {
var end = Date.now();
results[url] = { start: start, socket: socket, response: response, end: end, size: size };
callback();
});
}).on('error', function(e) {
results[url] = { start: start, socket: socket, error: Date.now(), stack: e };
callback();
}).on('socket', function () {
socket = Date.now();
});
}
async.forEachLimit(urls, CONCURRENCY, timeUrl, function() {
console.log(JSON.stringify(results));
});
For ease of use, doing what you seem to want to do, I've not seen anything beat Nodetime.

Resources