How to get a reference to the Nextjs server - node.js

I'm trying to utilize ws (web socket) in my Nextjs app.
Instead of creating a new server, I want to pass the current server object to the ws initialization:
const { Server } = require('ws');
wss = new Server({ myNextJs server instance here ... });
So: how to get a reference to the Nextjs server at run time?

You can create a custom server. See https://nextjs.org/docs/advanced-features/custom-server
Here is an example:
const express = require("express");
const next = require("next");
const dev = process.env.NODE_ENV !== "production";
const app = next({ dev });
const handle = app.getRequestHandler();
const server = express();
app.prepare().then(() => {
server.get("/some-random-path", (req, res) => res.send("Hello, world!"));
server.get("*", (req, res) => handle(req, res));
server.listen(3000, "0.0.0.0", () => {
console.log("Application started on http://localhost:3000");
});
});
Then just run your new server file

you can merge the code from my answer about socket.io
https://stackoverflow.com/a/62547135/2068876
with the example given on Github:
https://github.com/websockets/ws#multiple-servers-sharing-a-single-https-server
try something like this (not tested, but it seems valid since the principle is the same):
./pages/api/wss1.js
import { WebSocketServer } from "ws";
const wss1Handler = (req, res) => {
if (!res.socket.server.wss1) {
console.log("*First use, starting wss1");
const wss1 = new WebSocketServer({ noServer: true });
res.socket.server.on("upgrade", function upgrade(request, socket, head) {
wss1.handleUpgrade(request, socket, head, function done(ws) {
wss1.emit('connection', ws, request);
});
});
res.socket.server.wss1 = wss1;
} else {
console.log("wss1 already running");
}
res.end();
}
export const config = {
api: {
bodyParser: false
}
}
export default wss1Handler;

Related

Trying to reuse socket io instance in a controller but getting an error that socket hasn't been initialized

I'm relatively new to node.js and I'm trying to include socket.io into a controller. The idea is to respond to a client when an order is placed through the response object of express but in addition I'd also like to emit an event so that the restaurant owner sees the orders from all the customers coming in 'live'.
I have an index.js file in an api folder with the following code, where I export api, server and PORT:
`
const express = require('express');
const morgan = require('morgan');
const http = require('http');
const cors = require('cors');
const api = express();
const server = http.createServer(api);
const PORT = process.env.PORT || 3000;
api.use(cors());
api.use(morgan('common'));
api.use(express.urlencoded({ extended: true }));
api.use(express.json({ extended: true }));
api.use('/api/v1', require('../routers'));
api.get('/', (req, res) => {
res.send('Backend running.');
});
module.exports = { api, server, PORT };
In the root of the project I have another index.js file with the following code:
/* eslint-disable no-console */
require('dotenv').config();
const mongoose = require('mongoose');
const { api, server, PORT } = require('./api');
const { MONGO_URI } = require('./config');
mongoose.connect(
MONGO_URI,
{ useNewUrlParser: true, useUnifiedTopology: true },
)
.then(() => console.log('Connected to DB'))
.catch((err) => console.log('Error occured while trying to connect to DB', err));
api.listen(PORT, () => console.log(`Listening on ${PORT}`));
const io = require('./socket').init(server);
io.on('connection', (socket) => {
console.log('Connection success', socket.id);
socket.on('disconnect', () => {
console.log('Connection disconnected', socket.id);
});
});
I've placed the code to initialize socket.io and get an instance of it in a folder named socket with the following code in the index.js file:
/* eslint-disable consistent-return */
/* eslint-disable global-require */
const { Server } = require('socket.io');
let io;
module.exports = {
init: (server) => {
try {
io = new Server(server);
return io;
} catch (err) {
console.log(err);
}
},
get: () => {
if (!io) {
throw new Error('socket is not initialized');
}
return io;
},
};
Then I import the io instance in a controller but when I emit an event I get the error that the socket is not initialized. This is how I import the socket instance and emit an event:
const { OrdersService } = require('../services');
const io = require('../socket/index').get();
module.exports = {
create: async (req, res) => {
const { body } = req;
try {
const order = await OrdersService.create(body);
io.emit('new order', order);
res.status(201).json(order);
} catch (err) {
res.status(400).json(err);
}
},
};
What am I doing wrong? Any help would be greatly appreciated :)
I configured socket.io like I did based on previous questions that were raised on this topic here in stackoverflow.

Node.js server return error 404 in production

hope you are good,
I am building my next.js weather app project using openweather API, I set up my proxy server, so the API key won't appear on the client side, it works perfect on my localhost, but when I deploy it on vercel or heroku, it returns error 404, it does the same when requesting directly on postman or browser.
PS: the environment variables are set in my vercel dashboard.
PS: api url example localhost:3000/api/weather?q=london
my server.js
const next = require('next');
const port = parseInt(process.env.PORT, 10) || 3000;
const dev = process.env.NODE_ENV !== 'production';
const app = next({ dev });
const handle = app.getRequestHandler();
const needle = require('needle');
const url = require('url');
const cors = require('cors');
app.prepare().then(() => {
const API_BASE_URL = process.env.API_BASE_URL;
const API_KEY_NAME = process.env.API_KEY_NAME;
const API_KEY_VALUE = process.env.API_KEY_VALUE;
const server = express();
server.use(
cors({
origin: '*',
})
);
server.get(
'/api/:stat',
async (req, res) => {
try {
const params = new URLSearchParams({
...url.parse(req.url, true).slashes,
...url.parse(req.url, true).query,
[API_KEY_NAME]: API_KEY_VALUE,
});
const apiRes = await needle(
'get',
`${API_BASE_URL}/${req.params.stat}?${params}`
);
const data = apiRes.body;
res.status(200).json(data);
} catch (error) {
res.status(500).json({ error });
}
}
);
server.all('*', (req, res) => {
return handle(req, res);
});
server.listen(port, (err) => {
if (err) throw err;
console.log(`> Ready on http://localhost:${port}`);
});
});

How can I run a websocket server in next js custom server in dev mode

Assuming I want to run a custom next js server, and to accept websocket connections on that same server, how can I avoid clobbering the next js dev server hot reloading which is also using websockets on the same server...
const { createServer } = require('http')
const WebSocket = require("ws")
const { parse } = require('url')
const next = require('next')
const dev = process.env.NODE_ENV !== 'production'
const app = next({ dev })
const handle = app.getRequestHandler()
app.prepare().then(() => {
const server = createServer((req, res) => handle(req, res, parse(req.url, true)))
// pass the same server instance that is used by next js to the websocket server
const wss = new WebSocket.Server({ server })
wss.on("connection", async function connection(ws) {
console.log('incoming connection', ws);
ws.onclose = () => {
console.log('connection closed', wss.clients.size);
};
});
server.listen(port, (err) => {
if (err) throw err
console.log(`> Ready on http://localhost:${port} and ws://localhost:${port}`)
})
})
I believe this server should work in the production built version, so that the websocket server is created on the same server instance used to handle next js requests, but when I try to do this, the hot module reloading stops working, and errors appear in the chrome dev tools console because websocket connections it expects to be handled by webpack are now being handled by my custom websocket server.
How can I somehow route websocket connections for dev server to next and webpack and others to my own handler?
I know I can run my websocket server on another port, but I want to run it on the same server instance and same port as next js.
So the trick is to create a websocket server with noServer property set to true, and then listen to the server upgrade event, and depending on the pathname, do nothing to allow next js to do it's thing, or pass the request on to the websocket server we created...
const wss = new WebSocket.Server({ noServer: true })
server.on('upgrade', function (req, socket, head) {
const { pathname } = parse(req.url, true);
if (pathname !== '/_next/webpack-hmr') {
wss.handleUpgrade(req, socket, head, function done(ws) {
wss.emit('connection', ws, req);
});
}
});
... all together something like this ...
const { createServer } = require('http')
const WebSocket = require("ws")
const { parse } = require('url')
const next = require('next')
const dev = process.env.NODE_ENV !== 'production'
const app = next({ dev })
const handle = app.getRequestHandler()
app.prepare().then(() => {
const server = createServer((req, res) => handle(req, res, parse(req.url, true)))
const wss = new WebSocket.Server({ noServer: true })
wss.on("connection", async function connection(ws) {
console.log('incoming connection', ws);
ws.onclose = () => {
console.log('connection closed', wss.clients.size);
};
});
server.on('upgrade', function (req, socket, head) {
const { pathname } = parse(req.url, true);
if (pathname !== '/_next/webpack-hmr') {
wss.handleUpgrade(req, socket, head, function done(ws) {
wss.emit('connection', ws, req);
});
}
});
server.listen(port, (err) => {
if (err) throw err
console.log(`> Ready on http://localhost:${port} and ws://localhost:${port}`)
})
})
Here is my answer to create a webSocket server on Next.js by using an api route instead of creating a custom server.
/pages/api/websocketserverinit.js:
import { WebSocketServer } from 'ws';
const SocketHandler = async (req, res) => {
if (res.socket.server.wss) {
console.log('Socket is already running')
} else {
console.log('Socket is initializing')
const server = res.socket.server
const wss = new WebSocketServer({ noServer: true })
res.socket.server.wss = wss
server.on('upgrade', (req, socket, head) => {
console.log("upgrade", req.url)
if (!req.url.includes('/_next/webpack-hmr')) {
wss.handleUpgrade(req, socket, head, (ws) => {
wss.emit('connection', ws, req);
});
}
});
wss.on('connection', (ws)=> {
console.log("connection", ws);
ws.on('message', (data) => {
console.log('received: %s', data);
})
ws.send('something');
});
}
res.end()
}
export default SocketHandler
You will have to call the api route to start the websocket server from the client (or server init script):
fetch("http://localhost:3000/api/websocketserverinit")
And then connect to it:
const ws = new WebSocket("ws://localhost:3000")
Not super nice, but may be useful in some case

How to stub function used on a server in mocha test suite

I'm trying to stub auth.session when testing endpoint /allowUser2 on an express server app.js.
//--auth.js--
module.exports.session = (req, res, next) => {
req.user = null;
next();
};
//--app.js--
const express = require('express');
const auth = require('./auth');
const app = express();
app.use(auth.session);
app.get('/allowUser2', (req, res) => {
if (!req.user) return res.status(401).send();
if (req.user.user === 2) return res.status(200).send();
});
app.listen(4001).on('listening', () => {
console.log(`HTTP server listening on port 4001`);
});
module.exports = app;
If I just have this one test file test1.js in my test suite, auth gets stubbed successfully.
//--test1.js--
let app;
const sinon = require('sinon');
const auth = require('../../auth.js');
const chai = require('chai');
const chaiHttp = require('chai-http');
const { expect } = chai;
chai.use(chaiHttp);
let agent;
describe('should allow access', () => {
before(async () => {
// delete require.cache[require.resolve('../../app.js')]; // causes Error: listen EADDRINUSE: address already in use
sinon.stub(auth, 'session').callsFake((req, res, next) => {
req.user = { user: 1 };
next();
});
app = require('../../app.js');
agent = chai.request.agent(app);
});
after(async () => {
auth.session.restore();
});
it('should not allow access', async function () {
const response = await agent.get('/allowUser2');
expect(response.status).to.be.equal(200);
});
});
However, if I have more than one test file that requires app.js then I have a problem. If app.js was already required in another test file, such as test2.js below, node doesn't reload app.js when it's required again in test1.js. This causes app.js to call the old auth.session function, not the new stubbed one. So the user isn't authenticated and the test fails.
//--test2.js--
const chai = require('chai');
const chaiHttp = require('chai-http');
const app = require('../../app.js');
const { expect } = chai;
chai.use(chaiHttp);
const agent = chai.request.agent(app);
describe('route /allowUser2', () => {
it("shouldn't allow access", async function () {
const response = await agent.get('/allowUser2');
expect(response.status).to.be.equal(401);
});
});
I tried to reload the app.js by using delete require.cache[require.resolve('../../app.js')];. This worked when reloading a file with a plain function, but when the file is a server like app.js this causes an error: Error: listen EADDRINUSE: address already in use.
Recreate:
download Repo
npm i
npm test
How do you stub a function on the server?
One solution is turn app.js into a function that starts the server on a port number passed in as an argument. Then change the port randomly when requiring. I do not like this option because there may be some reason to keep the app on a specific port.
app.js
const express = require('express');
const auth = require('./auth');
module.exports = (port) => {
const app = express();
app.use(auth.session);
app.get('/allowUser2', (req, res) => {
if (!req.user) return res.status(401).send();
if (req.user.user === 2) return res.status(200).send();
});
app.listen(port).on('listening', () => {
console.log(`HTTP server listening on port ${port}`);
});
return app;
};
when requiring
app = require('../../app.js')((Math.random() * 10000).toString().slice(0, 4));
Instead of exporting the app in app.js, I export a function that launches the server and returns the server instance and app. By exporting the server instance I have the ability to close the server. The app is needed to pass into chai. Make sure const app = express(); is in this function and not before it or it won't recreate.
const express = require('express');
const auth = require('./auth');
const port = 4000;
module.exports = () => {
const app = express();
app.use(auth.session);
app.get('/allowUser2', (req, res) => {
if (!req.user) return res.status(401).send();
if (req.user.user === 2) return res.status(200).send();
});
app.post('/allowUser2', (req, res) => {
if (!req.user) return res.status(401).send();
if (req.user.user === 2) return res.status(200).send();
});
return {
server: app.listen(port).on('listening', () => {
console.log(`HTTP server listening on port ${port}`);
}),
app,
};
};
Then in my tests I can launch the server in before and close the server in after in both tests.
let app;
const sinon = require('sinon');
const auth = require('../../auth.js');
const chai = require('chai');
const chaiHttp = require('chai-http');
const { expect } = chai;
chai.use(chaiHttp);
let server;
describe('route /allowUser2', () => {
before(async () => {
// delete require.cache[require.resolve('../../app.js')]; // causes an error: `Error: listen EADDRINUSE: address already in use`.
sinon.stub(auth, 'session').callsFake((req, res, next) => {
req.user = { user: 2 };
next();
});
server = require('../../app.js')();
agent = chai.request.agent(server.app);
});
after(async () => {
server.server.close(() => {
console.log('Http server closed.');
});
auth.session.restore();
});
it('should allow access', async function () {
const response = await agent.get('/allowUser2');
expect(response.status).to.be.equal(200);
});
});
const chai = require('chai');
const chaiHttp = require('chai-http');
const { expect } = chai;
chai.use(chaiHttp);
let server;
let agent;
describe('route /allowUser2', () => {
before(async () => {
server = require('../../app.js')();
agent = chai.request.agent(server.app);
});
after(async () => {
server.server.close(() => {
console.log('Http server closed.');
});
});
it("shouldn't allow access", async function () {
const response = await agent.get('/allowUser2');
expect(response.status).to.be.equal(401);
});
});
working repo
UPDATE: Proposed Solution https://github.com/DashBarkHuss/mocha_stub_server/pull/1
One problem is the way you are using a direct method reference in app.js prevents Sinon from working. https://gist.github.com/corlaez/12382f97b706c964c24c6e70b45a4991
The other problem (address in use) is because each time we want to get a reference to app, we are trying to create a server in the same port. Breaking that app/server creation into a separate step alleviates that issue.

Nextjs and Express as middle ware. How do I set 'localhost:3000/newpage' and 'localhost:3000/newpage/' as the same routes

I am new to express and next and was trying to set 'localhost:3000/newpage' and 'localhost:3000/newpage/' as the same route however as I add a '/' at the end it shows a 404 error.
I am using "next-routes" for dynamic routing and have created routes.js file that looks like this:
const nextRoutes = require("next-routes");
const routes = (module.exports = nextRoutes());
routes.add("index", "/");
routes.add("newpage", "/newpage/:slug"); //with body parser this doesnt work
and my server.js file looks like this:
const express = require("express");
const next = require("next");
const routes = require("./routes");
const dev = process.env.NODE_ENV !== "production";
const port = process.env.PORT || 3000;
const app = next({ dev });
const handle = app.getRequestHandler();
const bodyParser = require("body-parser");
const handler = routes.getRequestHandler(app);
app
.prepare()
.then(() => {
const server = express();
server.use(bodyParser.json()); //with this dynamic routes dont work
server.use (handler); //with this dynamic routes work but / url show 404
server.get("*", (req, res) => {
server.use(handler);
if (req.url.endsWith("/")) {
req.url = req.url.slice(0, -1); // works only when using body parser
}
return handle(req, res);
});
server.listen(port, (err) => {
if (err) throw err;
console.log("> Ready on http://localhost:3000");
});
})
.catch((ex) => {
console.error(ex.stack);
process.exit(1);
});
You can modify the url that you get before passing it to Next's handling.
const next = require('next');
const express = require('express');
const routes = require('./routes');
const port = process.env.PORT || 3000;
const dev = process.env.NODE_ENV !== 'production';
const app = next({dev});
const handle = app.getRequestHandler();
// const handler = routes.getRequestHandler(app); // redundant line
app.prepare().then(() => {
const server = express();
// server.use(handler); // <-- this line is redundant since you need only one handle!
server.get('*', (req, res) => {
if (req.url.endsWith('/')) {
req.url = req.url.slice(0, -1); // remove the last slash
}
return handle(req, res);
});
server.listen(port, (err) => {
if (err) throw err;
console.log('> Ready on http://localhost:3000');
});
});
Working example: https://codesandbox.io/s/express-nextjs-react-c47y8?file=/src/index.js
Navigate to /form or /form/
I had to install the body-parser package then used body-parser. I also changed the folder structure such that I didn't have to import the routes. The final code in server.js looks like this:
const express = require("express");
const next = require("next");
const dev = process.env.NODE_ENV !== "production";
const port = process.env.PORT || 3000;
const app = next({ dev });
const handle = app.getRequestHandler();
app
.prepare()
.then(() => {
const server = express();
server.get("*", (req, res) => {
if (req.url.endsWith("/")) {
req.url = req.url.slice(0, -1); // remove the last slash
}
return handle(req, res);
});
server.listen(port, (err) => {
if (err) throw err;
console.log("> Ready on http://localhost:3000");
});
})
.catch((ex) => {
console.error(ex.stack);
process.exit(1);
});

Resources