How to access localStorage with nodeJS? - node.js

I have a simply nodeJS app that parses a youtube URL that stores a user's watch history and lets them bookmark videos. I store the watch history and bookmarks in localStorage.
I need to provide two routes in nodeJS to list the current history, and save a URL into a history table.
I have no back-end experience, but have managed to create the app using nodeJS. Any suggestions are welcome!
Relevant code:
//parse youtube url and change iframe src
function loadVideo(videoURL){
//split youtube url and get ID
var videoID = videoURL.split("v=")[1];
//change iframe src
videoSource = `https://www.youtube.com/embed/${videoID}`;
videoPlayer.src = videoSource;
logHistory(videoSource)
}
//create div element
function logHistory(videoSource){
var newHistory = document.createElement("li");
newHistory.classList.add("history-item");
newHistory.innerText = videoSource;
historyContainer.appendChild(newHistory);
//store element in localStorage
var historyCount = historyContainer.children.length;
localStorage.setItem("History item "+historyCount, videoInput.value)
}

LocalStorage is a frontend concept. It is accessible to JavaScript running in the browser. Node.js resides on backend and can not access the LocalStorage directly.
You'd need to access it through the scripts running in frontend whcih can communicate with backend using Ajax requests.

Note. This answer makes use of socket.io which was not in the question asked but allows the node server to communicate with client side scripts.
I'm going to share an example from an app I've created where I used node.js [and socket.io] and also local storage. Much like the others said, this cannot be done directly from your node.js server.
This code is from my server.js (node.js server) file.
function setStartingPlayer(startingPlayer, privateUsers) {
//send starting player name to
io.emit('set-starting-player', startingPlayer, privateUsers);
}
It emits to the sockets two pieces of data: startingPlayer & privateUsers. In this example startingPlayer is a player's user name and privateUsers is an array of names of current players.
In the client script there is a function like this:
socket.on('set-starting-player', function(name, privateUsers) {
setStorage('turnIndicator', username);
var username = getStorage('username');
if (name === username) {
console.log('I am the starting player. My name is ' + username);
}
let playerInGame = checkPrivateUsers(privateUsers);
if (playerInGame === true) {
updateTurnIndicator(name);
}
});
This function accesses local storage in two ways, it stores data to local storage and it gets data from local storage. This is an example of how you can pass data from Node.js to a client script and access local storage.
Worth noting:
I have two javascript functions, this is how I access local storage.
function setStorage(key,info) {
localStorage.setItem(key, JSON.stringify(info));
}
function getStorage(key) {
var item = localStorage.getItem(key);
return JSON.parse(item);
}
If you have questions let me know. Hope this helps.

Related

Custom Computed Etag for Express.js

I'm working on a simple local image server that provides images to a web application with some JSON. The web application has pagination that will do a get request "/images?page=X&limit&200" to an express.js server that returns the JSON files in a single array. I want to take advantage of the browser's internal caching such that if a user goes to a previous page the express.js returns an ETAG. I was wondering how this could be achieved with express.js? For this application, I really just want the computation of the ETAG to take in three parameters the page, the directory, and the limit (It doesn't need to consider the whole JSON body). Also this application is for local use only, so I want the server to do the heavy lifting since I figured it be faster than the browser. I did see https://www.npmjs.com/package/etag which seems promising, but I'm not sure how to use it with express.js
Here's a boilerplate of the express.js code I have below:
var express = require('express');
var app = express();
var fs = require('fs');
app.get('/', async (req, res) =>{
let files = [];
let directory = fs.readdirSync("mypath");
let page = parseInt(req.query.page);
let limit = parseInt(req.query.limit);
for (let i = 0; i < limit; ++i) {
files.push(new Promise((resolve) => {
fs.readFile(files[i + page * limit].name, (err, data) => {
// format the data so easy to use for UI
resolve(JSON.parse(data));
});
});
}
let results = await Promise.all(files);
// compute an etag here and attach it the results.
res.send(results);
});
app.listen(3000);
When your server sends an ETag to the client, it must also be prepared to check the ETag that the client sends back to the server in the If-None-Match header in a subsequent "conditional" request.
If it matches, the server shall respond with status 304; otherwise there is no benefit in using ETags.
var serverEtag = "<compute from page, directory and limit>";
var clientEtag = req.get("If-None-Match");
if (clientEtag === serverEtag) res.status(304).end();
else {
// Your code from above
res.set("ETag", serverEtag);
res.send(results);
}
The computation of the serverEtag could be based on the time of the last modification in the directory, so that it changes whenever any of the images in that directory changes. Importantly, this could be done without carrying out the fs.readFile statements from your code.

confused about node-localstorage

so I'm making a site with node js, and I need to use localstorage, so I'm using the node-localstorage library. So basically, in one file I add data to it, and in another file I want to retrieve it. I'm not 100% sure about how to retrieve it. I know I need to use localStorage.getItem to retrieve it, but do I need to include localStorage = new LocalStorage('./scratch');? So I was wondering what the localStorage = new LocalStorage('./scratch'); did. So here is my code for adding data:
const ls = require('node-localstorage');
const express = require("express");
const router = express.Router();
router.route("/").post((req, res, next) => {
var localStorage = new ls.LocalStorage('./scratch');
if(req.body.name != undefined){
localStorage.setItem("user", req.body.name);
res.redirect('/')
}
else{
console.log("undefind")
}
});
module.exports = router;
If my question is confusing, I just want to know what var localStorage = new ls.LocalStorage('./scratch'); does.
A drop-in substitute for the browser native localStorage API that runs on node.js.
It creates an instance of the "localStorage" class, which this library provides. The constructor expects the location of the file, the scripts stores the key, value elements in.
Opinion: This looks pointless to me - I guess it fits your use case.

keep fetching data up to date

I have just a question I want to ask if anybody have an idea about it.
I'm building a full stack application backed by nodejs and using typescript for it, in my nodejs app I'm making a fetch for an API that later on I will serve it to the user but I have one small issue, I'm using node-fetch for now but the data which are fetched are changing all the time eg. now I have 10 entries, after 5 seconds I have 30 entries, so is there a way or mechanism to make my fetching to the data with nodejs up to date by fetching them in the background?
Thanks in advance!
Easiest solution to implement and good in actual sense for making your web app realtime https://pusher.com/
This is how you can handle pusher within your NodeJS App
import Pusher from 'pusher'
//Below are the keys that you will get from pusher when you go to getting started
// within your Dashboard
const pusher = new Pusher({
appId: "<Your app id provided by pusher>",
key: "<Key id provided by pusher>",
secret: "<Secret key given by pusher>",
cluster: "<cluster given by pusher",
useTLS: true
});
Now you want to setup a changeStream for your Collection in MongoDB
const db = mongoose.collection;
db.once('open', ()=>{
const postCollection = db.collection('posts')//This will be dependent on the collection you want to watch
const changeStream = postCollection.watch()//Make sure the collection name above are acurate
changeStream.on('change', (change)=>{
const post = change.fullDocument;//Change bring back content that change in DB Collection
if (change.operationType === 'insert'){
pusher.triger('<write channel for your pusher>', '<event in this case inser>', {
newPost:post
})
}
})
})
By that setup your pusher and backend is working now is time to setup frontend
If your usin VanillaJS the Pusher getting started has code for you
If your using ReactJS here's is the code below
import Pusher from 'pusher-js'
useEffect(()=>{
Pusher.logToConsole = true;
var pusher = new Pusher('<Key received from pusher>', {
cluster: '<cluster received from pusher>'
});
var channel = pusher.subscribe('<channel name that you wrote in server');
channel.bind('<event that you wrote in sever',(data)=> {
alert(JSON.stringify(data)); // This will be the data entries coming as soon as they enter DB then you can update your state by using spread operators to maintain what you have and also add new contents
});
//Very important to have a clean-up function to render this once
return ()=>{
pusher.unbind();
pusher.unsubscribe_all();
}
})
Now like this you have everything being realtime

Server Side rendering in mongo with dust

Is it possible to fetch data from MongoDB and render a html template on the server side itself for a node-js project?
As of now in my serverside js file I've done the following.
//Failing array will be populated by a db.find later on.
var failing = [
{ name: "Pop" },
{ name: "BOB" }
];
/*Now i have to send a mail from the server for which I'm using nodemailer.
Where do i store the template ? This is what I've done in the same file */
var template = "<body>{#failing} <p>{.name}</p> {/failing}</body>"
// Add this as the body of the mail and send it.
I'm not sure how to render the data and how to get it displayed. I'm aware storing the template in the variable isn't right but I'm not sure what else to do.
If your template is that short, you can store it in a variable without problem. Obviously, you can store it in a file also.
Let's say you decide to store it in a file index.dust:
<body>{#failing} <p>{.name}</p> {/failing}</body>
Now, in your node controller you need to load the file and generate the html content from it:
const fs = require('fs');
const dust = require('dustjs-linkedin');
// Read the template
var src = fs.readFileSync('<rest_of_path>/index.dust', 'utf8');
// Compile and load it. Note that we give it the index name.
var compiled = dust.compile(src, 'index');
dust.loadSource(compiled);
// Render the template with the context. Take into account that this is
// an async function
dust.render('index', { failing: failing }, function(err, html) {
// In html you have the generated html.
console.log(html);
});
Check the documentation in order not to have to compile the template every time you have to use it.

How can I get (Express's) sessionID for a SockJS connection

I am using SockJS on Express server. Is there any way to get the associate HTTP session ID of the client?
I see there is a way to do it for raw web socket and Socket.io, but I am struggling to find how to do it for SockJS.
This is how my server looks like. I want a similar handler to fetch session ID:
var sockjs_echo = sockjs.createServer(sockjs_opts);
sockjs_echo.on('connection', function(conn) {
conn.on('data', function(message) {
conn.write(message);
});
});
This is a "hack", but it works for me:
sockjs_echo.on('connection', function(conn) {
var cookieHeader = conn._session.recv.ws._stream._readableState.pipes._driver._request.headers.cookie
var cookies = {}
cookieHeader.split(';').forEach(function( cookie ) {
var parts = cookie.split('=');
cookies[parts.shift().trim()] = decodeURI(parts.join('='));
});
conn.on('data', function(message) {
conn.write(message);
});
});
'cookies' variable (example):
{
"dev_cookie": "1mimec6rbcolp0ujkcbqq9pdq4uoa5v0p8a284v32tmd4q3k0qi9p4vjteoifdn9b0lsm238fghf974o9jfehfuhvm3ltrgq02ad6k0",
"session_cookie": "s%3AjkKYPKFFT8r60rXUsVYISoOF17o49GUl.pbpu6T1%2BcdrIu5uQPRxZUYOrl5GnC179GaI5pWyR7SA",
"other_cookie": "s%3AzRMiC3fjo4gxTXX1p2XSi_C_EydIa358.KAdP1gwtZBVfcbkmwi%2B3pa0L1pbOCzQ3lHnNEyFvvHc"
}
Thanks so much for asking this question, #darwinbaisa, and for the answer, c-toesca. This came after days of searching.
For XHR streaming, the cookies are at: conn._session.recv.request.headers.cookie.
The only other way I could think of doing this was to make the express session cookie httpOnly: false, thus exposing it to javascript and, of course, the possibility of hacking, then pass it back as a prefix to any message from the SockJS javascript client to the node server.
Or to assign the ID to a javascript variable as I dynamically wrote a web page response, so that javascript would have access to the variable, and again could return it to the server. But again, this would have exposed the ID, and even if the ID was hashed or encrypted, it could still be used in a malicious call to the server from javascript.
Things like this are made a lot easier in the node WS library, but I need a fallback from that for websocket-challenged browsers.

Resources