How to convert jquery function to nodejs module - node.js

I build a web page with nodejs & koa & ejs, and in webpage.ejs, there's a function like following
<a id="createACode"></a>
<script src="https://code.jquery.com/jquery-3.3.1.min.js"></script>
<script>
var code;
function createCode(){
code = "";
var codeLength = 6;
var checkCode = document.getElementById("createACode");
var random = new Array(2,3,4,5,6,7,8,9,'A','B','C','D','E','F','G','H','J','K','M','N','P','Q','R',
'S','T','U','V','W','X','Y','Z');
for(var i = 0; i < codeLength; i++) {
var index = Math.floor(Math.random()*30);
code += random[index];
}
createACode.text = code;
}
$(document).ready(function(){
$("#createACode").click(function(){
createCode();
});
});
</script>
When I click the a link with ID createACode, its text will be changed, and app.js like this
.get('/webpage', async ( ctx )=>{
await ctx.render('webpage', {
});
})
And if I want to convert this function createCode to module in nodejs, and how to convert, and use it? Many thanks.

Related

How to parse with parse5?

As example parse5. Parse function return document. But, querySelector function doesn't exist. https://developer.mozilla.org/en-US/docs/Web/API/Document.
import fetch from 'node-fetch';
import { parse } from 'parse5';
(async () => {
const options = {
redirect: "manual"
};
const response = await fetch('https://google.com', options);
const dom = parse(await response.text());
console.log(dom.querySelector('title'));
})();
The object that the parse function returns is called a Document, but it's not the same as the Web API Document you'd find in a browser. Instead, it's the root of a very simple tree data structure. The documentation for this data structure is at https://github.com/inikulin/parse5/blob/master/packages/parse5/docs/tree-adapter/default/interface-list.md
Despite its simplicity, the tree has everything you need to parse the document. It's likely that you'll have to recursively search for the element you're looking for. Try this code as an example:
const fetch = require('node-fetch');
const { parse } = require('parse5');
const getDescendantByTag = (node, tag) => {
for (let i = 0; i < node.childNodes?.length; i++) {
if (node.childNodes[i].tagName === tag) return node.childNodes[i];
const result = getDescendantByTag(node.childNodes[i], tag);
if (result) return result;
}
return null;
};
fetch('https://google.com', { redirect: 'manual' })
.then((response) => response.text())
.then((text) => {
console.log(getDescendantByTag(parse(text), 'title'));
});
I'm not sure, but i think Node.JS has no implementation for querySelector.
You can use polyfill to solve this, like this implementation at Github
/**
* Polyfills the querySelector and querySelectorAll methods.
* #see https://gist.github.com/Fusselwurm/4673695
*/
(function () {
var style;
var select = function (selector, maxCount) {
var all = document.all,
l = all.length,
i,
resultSet = [];
style.addRule(selector, "foo:bar");
for (i = 0; i < l; i += 1) {
if (all[i].currentStyle.foo === "bar") {
resultSet.push(all[i]);
if (resultSet.length > maxCount) {
break;
}
}
}
style.removeRule(0);
return resultSet;
};
if (document.querySelectorAll || document.querySelector) {
return;
}
style = document.createStyleSheet();
document.querySelectorAll = document.body.querySelectorAll = function (selector) {
return select(selector, Infinity);
};
document.querySelector = document.body.querySelector = function (selector) {
return select(selector, 1)[0] || null;
};
}());
Polyfills help with browser support and implementations for NodeJS. You can use same technique for other functions (in example, to support non-existent functions in IE7 in a React application).
NOTE: Also available as npm package, see https://www.npmjs.com/package/polyfill-queryselector
hast-util-select can help
/*
npm i parse5 hast-util-from-parse5 hast-util-to-html hast-util-select
*/
import {parse} from 'parse5'
import {fromParse5} from 'hast-util-from-parse5'
import {toHtml} from 'hast-util-to-html'
import {matches, select, selectAll} from 'hast-util-select'
function parseHtml(source) {
const p5ast = parse(source, { sourceCodeLocationInfo: true })
return fromParse5(p5ast)
}
const html = `
<html>
<body>
<div class="some-class">find me 1</div>
<div class="some-class">find me 2</div>
<div>ignore me</div>
</body>
</html>
`
const tree = parseHtml(html)
for (const div of selectAll("div.some-class", tree)) {
//console.dir(div)
console.log(toHtml(div))
}
gives
<div class="some-class">find me 1</div>
<div class="some-class">find me 2</div>

Socket.io online users list doesn't work properly in production

I'm trying to implement a online users page using Socket.io in a exprses web server. The code I'm using is listed below. When I'm using the website in the local network it works fine. But in production Socket.io behaves in a inexplicable way
app.js
...
const app = express()
const http = require('http').createServer(app)
const io = require('socket.io')(http)
let activeUsers = []
...
io.on('connection', function(socket){
let userName
socket.on('user viewing', function(user){
let inActiveUsers = false
for(var x = 0; x < activeUsers.length; x++) {
if(activeUsers[x].username == user) {
inActiveUsers = true
activeUsers[x].last_activity = MDate.getDateTime()
break
}
}
if(!inActiveUsers) {
activeUsers.push({"username": user, "last_activity": MDate.getDateTime()})
}
userName = user
io.emit('active users', activeUsers)
})
socket.on('disconnect', function(){
let userIndex = -1
for(var x = 0; x < activeUsers.length; x++) {
if(activeUsers[x].username == userName) {
userIndex = x
break
}
}
if(userIndex != -1) {
activeUsers[x].last_activity = 'NULL'
}
io.emit('active users', activeUsers)
})
})
...
In every other page I have implemented the following client side JavaScript code
<script>
var socket = io();
socket.emit('user viewing', '<%= user.username %>');
</script>
activeUsers.ejs
<script src="/socket.io/socket.io.js"></script>
<script>
var socket = io();
socket.on('active users', function(activeUsers){
document.getElementById('users').innerHTML = '';
for(var x = 0; x < activeUsers.length; x++) {
if(activeUsers[x].last_activity != 'NULL') {
$('#users').append('<tr><td>' + activeUsers[x].username + '</td><td>' + activeUsers[x].last_activity + '</td></tr>');
}
}
});
</script>
When I am on localhost the site shows all active users properly. But when I'm using the site in production. The site shows only few users who are online even though others are online too. Sometimes it doesn't even show the account that is viewing the page. I'm using PM2 for production. What could be the issue for this?

How to get all fonts used on a page using node.js?

I need to crawl all the pages on a site (the crwling part works fine.) and so i need to run THIS script on my server using node.js. I tried implementing the following logic:
var request = require('request');
var cheerio = require('cheerio');
var URL = require('url-parse');
var jsdom = require("jsdom");
var { JSDOM } = jsdom;
var START_URL = "http://balneol.com/";
var SEARCH_FONT = "helvetica";
var MAX_PAGES_TO_VISIT = 100000;
var pagesVisited = {};
var numPagesVisited = 0;
var pagesToVisit = [];
var url = new URL(START_URL);
var baseUrl = url.protocol + "//" + url.hostname;
pagesToVisit.push(START_URL);
crawl();
function crawl() {
if(numPagesVisited >= MAX_PAGES_TO_VISIT) {
console.log("Reached max limit of number of pages to visit.");
return;
}
var nextPage = pagesToVisit.pop();
if (nextPage in pagesVisited) {
// We've already visited this page, so repeat the crawl
crawl();
} else {
// New page we haven't visited
visitPage(nextPage, crawl);
}
}
function visitPage(url, callback) {
// Add page to our set
pagesVisited[url] = true;
numPagesVisited++;
// Make the request
console.log("Visiting page " + url);
request(url, function(error, response, body) {
// Check status code (200 is HTTP OK)
console.log("Status code: " + response.statusCode);
if(response.statusCode !== 200) {
callback();
return;
}
// Parse the window.document body
// var window = jsdom.jsdom(body).defaultView();
var { window } = new JSDOM(body);
//var $ = cheerio.load(body);
var helveticaFound = searchForHelvetica(window, 'font-family');
if(helveticaFound) {
console.log('Word ' + SEARCH_FONT + ' found at page ' + url);
} else {
collectInternalLinks($);
// In this short program, our callback is just calling crawl()
// callback();
}
});
}
function searchForHelvetica( window , css) {
if(typeof getComputedStyle == "undefined")
getComputedStyle= function(elem){
return elem.currentStyle;
}
var who, hoo, values= [], val,
nodes= window.document.body.getElementsByTagName('*'),
L= nodes.length;
for(var i= 0; i<L; i++){
who= nodes[i];
console.log(nodes[i]);
if(who.style){
hoo= '#'+(who.id || who.nodeName+'('+i+')');
console.log(who.style._values);
// return false;
val= who.style.fontFamily || getComputedStyle(who, '')[css];
if(val){
if(verbose) values.push([hoo, val]);
else if(values.indexOf(val)== -1) values.push(val);
// before IE9 you need to shim Array.indexOf (shown below)
}
}
}
// console.log(values);
// return values;
}
function collectInternalLinks($) {
var relativeLinks = $("a[href^='/']");
console.log("Found " + relativeLinks.length + " relative links on page");
relativeLinks.each(function() {
pagesToVisit.push(baseUrl + $(this).attr('href'));
});
}
If you see my visit page function you will see the below two lines of code:
var { window } = new JSDOM(body);
var helveticaFound = searchForHelvetica(window, 'font-family');
as you can see on the 2nd line i am passing the window object to the searchForHelvetic function.
In my searchForHelvetic function , if i console.log(nodes[i]); , i don't get the html element and hence the rest of the script does't quite run as expected. does the jsdom window differ from the window object in the browser ? how do i get the script working ? I.E. basically use the window object to run through all the pages on the page and spit out all the fonts used on the page ?
EDIT::-
To break the problem down to a micro level, if i console.log(who); inside searchForHelvetica function , i get the following result:
HTMLElement {}
HTMLDivElement {}
HTMLDivElement {}
HTMLDivElement {}
HTMLAnchorElement {}
HTMLImageElement {}
HTMLDivElement {}
HTMLFormElement {}
HTMLDivElement {}
HTMLLabelElement {}
HTMLInputElement {}
HTMLButtonElement {}
HTMLButtonElement {}
HTMLSpanElement {}
etc..
But if i were to do the same in a web browser the result world be different Eg.
nodes = window.document.body.getElementsByTagName('*');
console.log(node[1]) // <div id="mobile-menu-box" class="hide">...</div>
How do i get a similar result in node.js ?

xls-parser returning an empty object after parsing a non-empty file

I'm using xls-parser to parse a csv file. I'm calling the onFileSelection API of the parser in handleFileChange event of FormControl component. But, I'm getting an empty object every time as a response from this API.
Code:
<FormControl
type="file"
name="myFile"
onChange={this.handleFileChange}
/>
'use strict'
var xlsParser = require('xls-parser');
handleFileChange(event) {
var file = _.get(event, 'taget.files[0], null);
var data = xlsParser.onFileSelection(file);
console.log(data); // This data is empty
}
xls-parser Code:
'use strict';
var sheetJs = require('xlsx');
var parsedXls;
exports.onFileSelection = function(file) {
parsedXls = [];
var reader = new FileReader();
reader.readAsArrayBuffer(file);
reader.addEventListener('loadend', function() {
var binary = '';
var bytes = new Uint8Array(reader.result);
console.log("Entered");
for (var i = 0; i < bytes.byteLength; i++) {
binary += String.fromCharCode(bytes[i]);
}
return onLoadEvent(binary, reader);
});
return parsedXls;
};
The 'loadend' event is triggered only after handleFileChange function is completed. I want to get the file parsed within the handleFileChange function. How is the 'loadend' event being triggered here and can I get it triggered in handleFileChange function?

Looking for a better way to do real time stock updates

I started with this project for my real time stock price update project.
This project works well when I am working with one or two stocks, but not when I want to update the price of hundreds of stocks at the same time. I'd like to know if I'm doing this the right way. Right now I fetch the data for all stocks in a for loop on the server, but the price update is very very slow. I'd like to know how to improve this.
I'd like to know how to update hundreds of stock prices each second, without affecting server performance.
I don't know if I should be sending the server a list of stocks I need from the client like: var ids = [ '', '', '', ... ], or if I can run those ids from the server itself.
Which is best: Stocks request from client to server, or from server to client?
Note: I will be using a different url to get stock price.
My server side code :
////
// CONFIGURATION SETTINGS
///
var PORT = 4000;
var FETCH_INTERVAL = 5000;
var PRETTY_PRINT_JSON = true;
///
// START OF APPLICATION
///
var express = require('express');
var http = require('http');
var io = require('socket.io');
var app = express();
var server = http.createServer(app);
var io = io.listen(server);
io.set('log level', 1);
server.listen(PORT);
var ticker = "";
app.get('/:ticker', function(req, res) {
ticker = req.params.ticker;
res.sendfile(__dirname + '/index.html');
});
io.sockets.on('connection', function(socket) {
var local_ticker = ticker;
ticker = "";
//Run the first time immediately
get_quote(socket, local_ticker);
//Every N seconds
var timer = setInterval(function() {
var ids = ['AAPL', '' , ..........100 stocks];
var l = ids.length;
for(var i=0; i<l; i++){
get_quote(socket, ids[i])
}
}, FETCH_INTERVAL);
socket.on('disconnect', function () {
clearInterval(timer);
});
});
function get_quote(p_socket, p_ticker) {
http.get({
host: 'www.google.com',
port: 80,
path: '/finance/info?client=ig&q=' + p_ticker
}, function(response) {
response.setEncoding('utf8');
var data = "";
response.on('data', function(chunk) {
data += chunk;
});
response.on('end', function() {
if(data.length > 0) {
try {
var data_object = JSON.parse(data.substring(3));
} catch(e) {
return;
}
var quote = {};
quote.ticker = data_object[0].t;
quote.exchange = data_object[0].e;
quote.price = data_object[0].l_cur;
quote.change = data_object[0].c;
quote.change_percent = data_object[0].cp;
quote.last_trade_time = data_object[0].lt;
quote.dividend = data_object[0].div;
quote.yield = data_object[0].yld;
p_socket.emit('quote', PRETTY_PRINT_JSON ? JSON.stringify(quote, true, '\t') : JSON.stringify(quote));
}
});
});
}
My client side code :
<script type="text/javascript" src="https://ajax.googleapis.com/ajax/libs/jquery/1.7.2/jquery.js"></script>
<script type="text/javascript" src="http://localhost:4000/socket.io/socket.io.js"></script>
<script type="text/javascript">
$(document).ready(function() {
var socket = io.connect("http://localhost:4000");
socket.on('quote', function(data) {
var data = $("<pre>" + data + "</pre><hr />");
$("#quotes").append(data);
$("html, body").animate({ scrollTop: $(document).height() }, 100);
$(data).show("slide", { direction: "up" }, 250);
$(data).effect("highlight", {}, 1500);
});
});
</script>
<body>
<div id="quotes"></div>
</body>
I think that sending the desired ID's from the client side will make your application more flexible and easy to use. You can still write your server in a way that will be performant.
'For loops' will block Node's event loop. For async actions that need to iterate over an array I recommend:
https://github.com/caolan/async
Specifically 'async.each'
I haven't run your code but my gut tells me that my browser would not enjoy that much DOM manipulation all at once. I think that breaking the groups into smaller pieces would help. For instance:
Take your array of ID's and break it into 5. Then stagger the intervals of each.
var arr1 = [...]
var arr2 = [...]
var arr3 = [...]
var arr4 = [...]
var arr5 = [...]
setTimeout(doWorkOnArray(arr1), 4000)
setTimeout(doWorkOnArray(arr2), 3000)
setTimeout(doWorkOnArray(arr3), 2000)
setTimeout(doWorkOnArray(arr4), 1000)
setTimeout(doWorkOnArray(arr5), 0)
function doWorkOnArray(arr) {
setInterval(getData(arr), 5000)
}
Alternatively you could look at setting up a Master/Worker with something like Redis to queue the work. I think this would be the best performance. Check out:
https://github.com/Automattic/kue

Resources