Cross-Domain requests with ShareJS - node.js

I am trying to develop a plugin for DokuWiki that will enable real-time collaborative editing of wiki pages. I am using Node.js and ShareJS to do so, but I am having some trouble since it's the first time I am using them...
What I got so far is based on this and this pages.
ShareJS Server - http://localhost:3000
var express = require('express');
var app = express();
app.use(function(req, res, next) {
res.header('Access-Control-Allow-Origin', 'http://localhost:3000');
res.header('Access-Control-Allow-Methods', 'GET, OPTIONS');
res.header('Access-Control-Allow-Headers', 'Content-Type, Authorization');
res.header('Access-Control-Allow-Credentials', true);
return next();
});
// public folder to store assets
app.use(express.static(__dirname + '/public'));
// get sharejs dependencies
var sharejs = require('share');
require('redis');
// options for sharejs
var options = {
db: {type: 'redis'},
browserChannel: { cors: "http://localhost/dokuwiki/" },
};
// attach the express server to sharejs
sharejs.server.attach(app, options);
// listen on port 3000 (for localhost) or the port defined for heroku
var port = process.env.PORT || 3000;
app.listen(port);
Which by the way outputs this warning when I run it:
DokuWiki
When editing a wiki page, say http://localhost/dokuwiki/doku.php?id=start&do=edit, the plugin includes these scripts:
http://localhost:3000/channel/bcsocket.js
http://localhost:3000/share/share.js
http://localhost:3000/share/textarea.js
And then executes this:
window.onload = function() {
// get dokuwiki editor textarea element
var pad = document.getElementById('wiki__text');
if (pad) { // if a wiki page is being edited
// Server options
var options = {
origin: "http://localhost:3000/channel"
};
// Connect to the server
var connection = sharejs.open('test', 'text', options, function(error, doc) {
doc.attach_textarea(pad);
});
}
};
Which lead to the following errors on the DokuWiki page editor:
What am I missing? Thanks in advance!

Managed to solve it! :) The DeprecationWarning is still there and I don't know why, but it's working now.
All it took was editing the ShareJS Server code:
Removed the browserChannel: { cors: "http://localhost/dokuwiki/" }, line from the ShareJS options
Tweaked with the Access-Control-Allow-Origin middleware
Here's the "final" ShareJS Server code:
var express = require('express');
var app = express();
var port = 3000;
app.use(function (req, res, next) {
res.setHeader('Access-Control-Allow-Origin', '*');
res.setHeader('Access-Control-Allow-Methods', 'GET, POST, OPTIONS, PUT, PATCH, DELETE');
res.setHeader('Access-Control-Allow-Headers', 'X-Requested-With, content-type');
res.setHeader('Access-Control-Allow-Credentials', true);
next();
});
// public folder to store assets
app.use(express.static(__dirname + '/public'));
// get sharejs dependencies
var sharejs = require('share');
require('redis');
// options for sharejs
var options = {
db: {type: 'redis'},
};
// attach the express server to sharejs
sharejs.server.attach(app, options);
app.listen(port, function() {
console.log('listening on *:' + port);
});
And that's it! Now I just need to edit this code a bit more to make different wiki pages have different ShareJS textareas.

Related

How to solve cors error in socket.io laravel and nodejs signalling app

i have a chat app built with laravel and socket.io. My laravel app is located on one domain while my nodejs app is on another domain. Connecting to my nodejs signalling app gives a cors error while the nodejs app also returns cors error. Here is my nodejs app
"use strict";
require('dotenv').config();
const express = require('express');
const app = express();
const fs = require('fs');
const options = {
key: fs.readFileSync(process.env.KEY_PATH),
cert: fs.readFileSync(process.env.CERT_PATH)
};
const https = require('https').Server(options, app);
const io = require('socket.io')(https);
io.origins('*:*');
const listner = https.listen(process.env.PORT, function() {
console.log('Listening on ', listner.address().port);
});
//allow only the specified domain to connect
io.set('origins', process.env.DOMAIN + ':*');
require('./socket')(io);
app.get('/', function (req, res, next) {
res.header("Access-Control-Allow-Origin", "*");
res.header("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept");
// Add this
if (req.method === 'OPTIONS') {
res.header('Access-Control-Allow-Methods', 'PUT, POST, PATCH, DELETE, OPTIONS');
res.header('Access-Control-Max-Age', 120);
return res.status(200).json({});
}
res.send('Ok');
next();
I have installed a cors middleware on my laravel backend but no difference whatsover. Any help will be appreciated
You need to remove these two line. cos, you have first set for any origin and the you have specified domain with env.
io.origins('*:*');
io.set('origins', process.env.DOMAIN + ':*');
Exact way to allow user is
// process.env.DOMAIN == "https://anydomain.com:port"
// process.env.DOMAIN != "anydomain.com:port"
const options={
cors:true,
origins:[process.env.DOMAIN],
}
const io = require('socket.io')(https, options);

Angular image not served properly from nodejs backend

I want to serve a image from my nodejs backend and show it in frontend, my backend running at 8081 port and frontend at 8080. I can see images in http://localhost:8081/image.JPG but in frontend I am getting 404 as it is looking for http://localhost:8081/image.JPG.
var express = require('express');
var cors = require("cors");
var bodyParser = require('body-parser');
var app = express();
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: true }));
global.__basedir = __dirname;
app.use(express.static(__dirname + '/resources/static/assets/uploads/'));
app.use(function (req, res, next) {
res.setHeader('Access-Control-Allow-Origin', 'http://localhost:8080');
res.setHeader('Access-Control-Allow-Methods', 'GET, POST, OPTIONS, PUT, PATCH, DELETE');
// Request headers you wish to allow
res.setHeader('Access-Control-Allow-Headers', 'X-Requested-With,content-type');
// Set to true if you need the website to include cookies in the requests sent
// to the API (e.g. in case you use sessions)
res.setHeader('Access-Control-Allow-Credentials', true);
// Pass to next layer of middleware
next();
});
const db = require('./app/config/db.config');
app.use(cors({origin: 'http://localhost:8080'}));
// force: true will drop the table if it already exists
db.sequelize.sync({force: false}).then(() => {
console.log('Drop and Resync with { force: false }');
});
require('./app/routers/upload.router.js')(app);
// Create a Server
var server = app.listen(8081, function () {
var host = server.address().address
var port = server.address().port
console.log("App listening at http://%s:%s", host, port)
})
i can see image when i query in browser as it is looking for 8081 port
frontend looking for port 8080 so getting below error--
someone please suggest me a solution. I am not getting any clue. Thank you.
An approach that you can take is to use proxy config provided by Angular like below:
{
"/api/assets": {
"target": "http://localhost:8081",
"secure": false
}
}
Now, from the frontend when you hit this URL: http://localhost:8080/api/assets/image.JPG it will proxy to http://localhost:8081/api/assets/image.JPG
your image tag will look like <img src="/api/assets/image.JPG">
More on Proxy config & how to configure: https://angular.io/guide/build#proxying-to-a-backend-server

Socket.io and node JS resulting CORS blocked issue

I've been working on chat functionality. There are two kinds of clients, One is the frontend of my application and the second is random another website.
I know there are plenty of issues like this but I tried all of the solution but I'm still getting following error:
Access to XMLHttpRequest at 'https://mydomain/socket.io/?EIO=3&transport=polling&t=NCjoM1w' from origin 'null' has been blocked by CORS policy: The value of the 'Access-Control-Allow-Origin' header in the response must not be the wildcard '*' when the request's credentials mode is 'include'. The credentials mode of requests initiated by the XMLHttpRequest is controlled by the withCredentials attribute.
and this the error I'm getting on client-side of my own front end:
https://mydomain/socket.io/?EIO=3&transport=polling&t=NCjnJUX 404 (Not Found)
This is how I'm trying to connect from client-side.
var socket = io.connect("https://mydomain:443/", {secure: true, port: '443'});
and this is my server.js code
const express = require("express");
const mongoose = require("mongoose");
const bodyParser = require("body-parser");
const passport = require("passport");
const users = require("./routes/api/users");
const base = require("./routes/api/base");
const leads = require("./routes/api/leads");
const requests = require("./routes/api/requests");
const offApp = require("./routes/api/offApp");
const chat = require("./routes/api/chat");
const chatSocket = require("./routes/socket/chat");
const path = require("path"); // on top
const app = express();
// const client = require('socket.io').listen(4000).sockets;
const https = require('https');
const fs = require('fs');
var options = {
pfx: fs.readFileSync('certificate.pfx'),
passphrase: 'password'
};
app.all('/*', function(req, res, next) {
res.header("Access-Control-Allow-Origin", "*");
res.header('Access-Control-Allow-Methods', 'GET,PUT,POST,DELETE,OPTIONS');
res.header('Access-Control-Allow-Headers', 'Content-Type, Authorization, Content-Length, X-Requested-With, *');
next();
});
var server = https.createServer(options, app);
var client = require("socket.io").listen(server);
client.origins('*:*') ;
server.listen(443);
// Bodyparser middleware
app.use(
bodyParser.urlencoded({
extended: false
})
);
app.use(bodyParser.json());
// DB Config
const db = require("./config/keys").mongoURI;
// Connect to MongoDB
mongoose
.connect(
db,
{ useNewUrlParser: true }, (err, db) => {
if (err) {
throw err;
}
console.log('MongoDB connected');
chatSocket(db, client);
});
// Passport middleware
app.use(passport.initialize());
// Passport config
require("./config/passport")(passport);
// Routes
app.use("/api/users", users);
app.use("/api/base", base);
app.use("/api/leads", leads);
app.use("/api/requests", requests);
app.use("/api/offapp", offApp);
app.use("/api/chat", chat);
const port = process.env.PORT || 5000;
app.use(express.static("client/build")); // change this if your dir structure is different
app.get("*", (req, res) => {
res.sendFile(path.resolve(__dirname, "client", "build", "index.html"));
});
app.listen(port, () => console.log(`Server up and running on port ${port} !`));
Please help me resolve this CORS and other issues. I am using the Azure app service. That's why I can't use any other port than 80 and 433
Install cors package using npm i cors
In your app.js file,
const cors = require('cors')
app.use(cors());
// use CORS like that-
// you need to use it as middle ware
app.use(function(req, res, next) {
res.header("Access-Control-Allow-Origin", "*"); // update to match the domain you will make the request from
res.header("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept");
next();
});
Danish try this solution, I hope it will work
const client = require("socket.io")(server, {
handlePreflightRequest: (req, res) => {
const headers = {
"Access-Control-Allow-Headers": "Content-Type, Authorization",
"Access-Control-Allow-Origin": req.headers.origin, //or the specific origin you want to give access to,
"Access-Control-Allow-Credentials": true
};
res.writeHead(200, headers);
res.end();
}
});
client.on("connection", () => {
console.log("Connected!");
});
server.listen(443);

express route causes 404

Having issues with the express router
I'm getting a 404 for the login route, so it must be how I've initialised my routes?
I'm not handling "/" but I'm not sure how to??
My index file:
var express = require('express');
var app = express();
var bodyParser = require('body-parser');
var mongoose = require('mongoose');
var blogRoutes = require('./routes/blogs');
var loginRoute = require('./routes/login');
var port = process.env.PORT || '3000';
var http = require('http');
app.set('port', port);
var server = http.createServer(app);
server.listen(port);
mongoose.connect(myInfo);
app.set('view engine', 'hbs'); //Templating engine (HandleBars)
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({
extended: false
}));
app.use(function(req, res, next) {
res.setHeader('Access-Control-Allow-Origin', '*');
res.setHeader('Access-Control-Allow-Headers', 'Origin, X-Requested-With, Content-Type, Accept, Authorization');
res.setHeader('Access-Control-Expose-Headers', 'Authorization');
res.setHeader('Access-Control-Allow-Methods', 'POST, GET, PATCH, DELETE, OPTIONS');
next();
});
//ROUTES
//Handle specific routes first
app.use('/blogs', blogRoutes);
app.use('/login', loginRoute);
// catch 404 and forward to error handler
app.use(function(req, res, next) {
return res.render('404');
});
module.exports = app;
my login.js Just a simple logging for now:
var express = require('express');
var router = express.Router();
router.get('/', function(req, res, next) {
console.log("LOG::::::::::::::::::::::::::")
});
And in Angular:
login(email, password) {
return this.http.post(this.blogsURL + 'login', {email:email, password:password} )
.map((res) => {
console.log(res)
return res;
})
Any pointers?
You are sending a POST call to a GET route /login.
(I'm sorry. I don't have enough reputation to comment.)
Apologies, I made a noobie error and didn't subscribe to the login function in the component
this.authService.login(email, password)
.subscribe(data=>console.log(data));
Thanks for your help

Accessing internal API with React, Axios on Heroku

I am building a full stack React application that accesses its own back end API with Axios. In my local environment, the following works as expected, with the server responding with JSON data, which is then rendered properly.
axios.get('/api/questions/categories')
I deployed to Heroku, and the app is launching normally and MongoDB is connected. Now, when the same GET request is made, it is not reaching the back end. When I log the response from Axios to the console, it contains the actual HTML of the page, instead of the JSON object expected.
For further clarification, if I manually type 'http://localhost:8080/api/questions/categories' in the address bar, the expected JSON data is displayed. If I do the same with the app on Heroku, I see that a '#' is appended to the url and the page display does not change, no error messages. This leads me to think that react-router is involved, but I have not been able to figure out how/why.
My stack: Node, Express, Mongo, React
Not using Redux
Using Axios to call my own API
// Dependencies
var express = require('express');
var path = require('path');
var webpack = require('webpack');
var webpackMiddleware = require('webpack-dev-middleware');
var webpackHotMiddleware = require('webpack-hot-middleware');
var config = require('./webpack.config.js');
var bodyParser = require('body-parser');
var mongoose = require('mongoose');
var morgan = require('morgan');
var inDev = process.env.NODE_ENV !== 'production';
var port = inDev ? 8080 : process.env.PORT;
var app = express();
// MIDDLEWARE
if (inDev){
var compiler = webpack(config);
var middleware = webpackMiddleware(compiler, {
publicPath: config.output.publicPath,
contentBase: 'app',
stats: {
colors: true,
hash: false,
timings: true,
chunks: false,
chunkModules: false,
modules: false
}
});
app.use(morgan('dev'));
app.use(middleware);
app.use(webpackHotMiddleware(compiler));
app.get('/', function response(req, res) {
res.write(middleware.fileSystem.readFileSync(path.join(__dirname, 'dist/index.html')));
res.end();
});
} else {
app.use(express.static(__dirname + '/dist'));
app.get('*', function response(req, res) {
res.sendFile(path.join(__dirname, 'dist/index.html'));
});
}
app.use(bodyParser.urlencoded({extended: true}));
app.use(bodyParser.json());
app.use(function(req, res, next) {
res.setHeader('Access-Control-Allow-Origin', '*');
res.setHeader('Access-Control-Allow-Credentials', 'true');
res.setHeader('Access-Control-Allow-Methods', 'GET,HEAD,OPTIONS,POST,PUT,DELETE');
res.setHeader('Access-Control-Allow-Headers', 'Access-Control-Allow-Headers, Origin,Accept, X-Requested-With, Content-Type, Access-Control-Request-Method, Access-Control-Request-Headers');
//and remove caching so we get the most recent comments
res.setHeader('Cache-Control', 'no-cache');
next();
});
// DATABASE
var dbPath = inDev ? 'mongodb://localhost/quizMe' : 'mongodb://heroku_pmjl5579:c28cf07fpf05uus13ipjeur5s7#ds143000.mlab.com:43000/heroku_pmjl5579';
mongoose.connect(dbPath);
// ROUTING / API
// var indexRoute = require('./routes/index');
var questionsRoute = require('./routes/api/questions');
// app.use('/', indexRoute);
app.use('/api/questions', questionsRoute);
app.listen(port, function(){
console.log('Express server up on ' + port);
});
Thanks for any help!
Most single page applications route all requests to the root path and let the front end router take over. I suspect that is what is happening to your app.
Do you have any form of requests redirection logic in your back end code or any server configuration code?
What you can do is to whitelist some paths that you don't want front end routing to take over, such as those that start with /api. Pasting your server side config here will be helpful.
In your server config, when inDev is false, you have a app.get('*', ...) that catches all requests and responds with the static single page app. Hence API requests will also give the same response. You will need to restructure your routes to match /api before the wildcard *. Some examples can be found here

Resources