I'm just getting used to NodeJS and MongoDB so please go gentle! I'm trying to "console.log" the "network Interfaces" to find the IP address.
No matter what I do, I keep getting an error:
"TypeError: Cannot read property '1' of undefined "
I suspect that it's this code in line 11:
const ip = networkInterfaces.Ethernet[1].address;
Here is what is in my app.js file:
const express = require("express");
const app = express();
const mongoose = require("mongoose");
const morgan = require("morgan");
const bodyParser = require("body-parser");
const path = require("path");
const cors = require("cors");
const os = require("os");
const networkInterfaces = os.networkInterfaces();
const ip = networkInterfaces.Ethernet[1].address;
require("dotenv/config");
//import routes
const productRoute = require("./routes/product");
const cartRoute = require("./routes/cart");
const orderRoute = require("./routes/order");
const favoriteRoute = require("./routes/favorite");
const authRoute = require("./routes/auth");
const notification = require("./middlewares/pushNotification");
//Connect to DB
const dbURI = process.env.DB_CONNECTION;
mongoose.connect(
dbURI,
{
useNewUrlParser: true,
useUnifiedTopology: true,
useFindAndModify: false,
useCreateIndex: true,
},
() => {
app.listen(process.env.PORT, ip);
let dirPath = path.join(
__dirname,
"public/api/static/images/productPictures"
);
let dirPathUser = path.join(
__dirname,
"public/api/static/images/userprofile"
);
createDir(dirPath);
createDir(dirPathUser);
console.log("Connected to DB");
}
);
function createDir(dirPath) {
if (!fs.existsSync(dirPath)) {
fs.mkdirSync(dirPath, { recursive: true }, (err) => {
if (err) {
console.error("createDir Error:", err);
} else {
console.log("Directory is made!");
}
});
}
}
...
this answer will help you with
network interfaces
If you are trying to get the client IP then you can use
const RequestIp = require("#supercharge/request-ip");
const ip = RequestIp.getClientIp(req);
inside your API
or to get the current machine address you can use child process
I solved the issue by removing Ethernet[1].address with en0[1].
line 11 now reads:
const ip = networkInterfaces.en0[1].address;
I added this to the end of my App.js files:
const port = process.env.PORT || '3000';
const address = process.env.ADDRESS || '127.0.0.1';
app.listen(port,address, () => console.log('Server running on http:// ' + address + ':' + port +'/'));
Related
I am developing an REST-API that is running via a load-balancer with a database in MongoDB.
The API works when not using the load-balancer, but when implemented i do not get a response in Postman/Insomia when posting a GET request to the database - on the index ("/") route there in no problem getting a response back though.
But as the title shows, it get this error when canceling the request in PostMan, if like the connection could not be established.
This is the full error:
Error: socket hang up
at connResetException (internal/errors.js:628:14)
at TLSSocket.socketCloseListener (_http_client.js:449:25)
at TLSSocket.emit (events.js:412:35)
at net.js:675:12
at TCP.done (_tls_wrap.js:563:7) {
code: 'ECONNRESET'
}
This is my load-balancer
const https = require ("https");
const httpProxy = require("http-proxy");
const seaport = require("seaport");
const express = require("express");
const fs = require("fs");
const HOST = 'localhost';
const path = require("path")
const PORT = 8080;
const connect = seaport.connect('localhost', 9090);
let i = - 1;
const certificate = {
key: fs.readFileSync(path.join(__dirname, 'cert', 'key.pem')),
cert: fs.readFileSync(path.join(__dirname, 'cert', 'cert.pem')),
secure: false
};
let proxy = httpProxy.createProxyServer({secure: false});
let server = https.createServer(certificate, function(req, res) {
let addresses = connect.query('server');
if (!addresses.length) {
res.end('Server failed');
};
i = (i + 1) % addresses.length;
let host = addresses[i].host.split(":").reverse()[0];
let port = addresses[i].port;
proxy.web(req, res, { target: 'https://' + host + ':' + port });
});
server.listen(PORT, function() {
console.log('loadbalancer listens on'+ ":" + PORT)
});
And this is my app.js
const express = require("express");
const app = express();
const morgan = require("morgan")
const bodyParser = require("body-parser")
const https = require('https');
const fs = require('fs');
const seaport = require('seaport');
const path = require('path');
const mongoose = require("mongoose");
//Database
const db = require('./database/db');
const { MongooseDocument } = require('mongoose');
//Routes
const clientRoute = require ("./api/routes/client")
const reservationsRoute = require ("./api/routes/reservations")
app.use('/client', clientRoute);
app.use('/reservations', reservationsRoute);
//index route
app.use('/', (req, res) =>{
res.send('Welcome to the app');
});
//Error handling 1
app.use((req, res, next) => {
const error = new Error("Not found" + " ");
error.status = 400;
next(error);
})
//Error handling 2
app.use((error, req, res, next) => {
res.status(error.status || 500);
res.json({
error: {
message:("Error 2" + " - " + error.message )
}
});
});
//Create server with the https key and certificate
const sslServer = https.createServer({
key: fs.readFileSync(path.join(__dirname, 'cert', 'key.pem')),
cert: fs.readFileSync(path.join(__dirname, 'cert', 'cert.pem')),
}, app);
//Middleware
app.use(morgan("dev"));
app.use(bodyParser.json());
const seaportObject = seaport.connect('localhost', 9090);
//Start listening
let port = seaportObject.register('server');
sslServer.listen(port, () => {
db.mongoConnection()
.then(
console.log('Localserver listens on: ' + port)
)
.catch(err =>{
console.log(err)
res.status(500).json({
error:err,
message: "
It went wrong here "
})
});
});
My Database folder
const mongoose = require('mongoose');
let connection;
const mongoConnection = async () => {
if (!connection) {
connection =
await mongoose.connect("removed",
() => console.log("Mongo is up and running")),{
useNewUrlParser: true,
useCreateIndex: true,
useUnifiedTopology: true
}
}
return connection;
}
module.exports = {
mongoConnection: mongoConnection
};
I am stuck on this subject, please share some guidance
A solution has been found - the problem was that the database never connected
Im trying to configure/connect mongodb to express but when go to localhost:5000/contacts it doesnt show up
im thinking the problem is with the uri but i cant be sure. Maybe incorrect syntax? Tehre are no error messgaes so i dont think i see what the problem is
route file:
const router = require('express').Router()
let Contacts = require('../models/contacts.model')
router.route('/').get((req, res) => {
Contacts.find()
.then(contacts => res.json(contacts))
.catch(err => res.status(400).json('Error: ' + err))
});
router.route('/add').post((req, res) => {
const name = req.body.name;
const email = req.body.email;
const phone = Number(req.body.phone);
const newContact = new Contacts({
name,
email,
phone
})
newContact.save()
.then(() => {res.json('Contacts added!')})
.catch(err => res.status(400).json('Error: ' + err))
})
module.exports = router
and for the server js:
const express = require('express');
const cors = require('cors');
const mongoose = require('mongoose')
require('dotenv').config();
const app = express();
const port = process.env.PORT || 5000;
app.use(cors());
app.use(express.json())
const uri = process.env.ATLAS_URI;
mongoose.connect(uri, {useNewUrlParser: true, useCreateIndex: true});
const connection = mongoose.connection;
connection.once('open', () => {
console.log("MongoDb database connection established succesfully")
})
const contactsRouter = require('./routes/contacts');
app.use('/contacts', contactsRouter)
app.listen(port, () => {
console.log(`Server is running on port ${port}`)
})
Try this one the problem might be server was not created.
const http = require('http')
const express = require('express');
const cors = require('cors');
const mongoose = require('mongoose')
require('dotenv').config();
const app = express();
const port = process.env.PORT || 5000;
app.use(cors());
app.use(express.json())
const server = http.createServer(app);
const uri = process.env.ATLAS_URI;
server.on('listening', () => {
mongoose.connect(uri, {useNewUrlParser: true, useCreateIndex: true});
const connection = mongoose.connection;
connection.once('open', () => {
console.log("MongoDb database connection established succesfully")
})
})
const contactsRouter = require('./routes/contacts');
app.use('/contacts', contactsRouter)
server.listen(port, () => {
console.log(`Server is running on port ${port}`)
})
I have a router and it should execute my function when the user wants to access /matches URL, but it doesn't
this is my controller that my function is wroten in there
const matchModel = require(`./../models/matchModel`);
//This function will get matches
exports.getMatches = async (req, res) => {
const matches = await matchModel.find();
res.status(200).json({
status: "success",
results: matches.length,
data: {
matches,
},
});
};
this is my route handler that should execute the function in the controller
const express = require("express");
const matchController = require(`./../controllers/matchController`);
const router = express.Router();
router.route("/matches").get(matchController.getMatches);
module.exports = router;
and this is my app file that adds URLs to middleware
const express = require("express");
const matchRouter = require(`./routes/matchRoutes`);
const app = express();
app.use(express.json());
//importing routers
app.use("/matches", matchRouter);
module.exports = app;
and after all, I run this server.js file to run my app
const mongoose = require("mongoose");
const dotenv = require("dotenv");
//setting up dotenv setting and
dotenv.config({ path: "./config.env" });
const conString = process.env.DATABASE_CON_STRING;
const port = process.env.PORT;
const app = require(`./app`);
mongoose
.connect(conString, {
useNewUrlParser: true,
useCreateIndex: true,
useFindAndModify: false,
useUnifiedTopology: true,
})
.then(() => {
console.log("connection to DB has been established successfully!");
});
app.listen(port, () => {
console.log("server is running...");
});
You are loading the router under /matches
app.use("/matches", matchRouter);
Then you are loading the /matches route under that router
router.route("/matches").get(matchController.getMatches);
That is going to make the route /matches/matches.
You probably want it to be router.route(''). This will be the index route under the /matches router.
I have been getting a problem when I want to module.export the pool variable to use it in other files. I have this program in src\db\index.js:
const {Pool} = require('pg');
const express = require('express');
//Initialize
const path = require('path');
const app = express();
const fetch = require('fetch');
const PORT = process.env.PORT || 5000;
//Global Variables
const pool = new Pool({
connectionString: process.env.DATABASE_URL,
ssl:true
});
//Setting
app.use(express.static(path.join(__dirname, 'public')));
//Routes
app.use(require('../Routes/logIn'));
app.use(require('../Routes/singIn'));
app.use(require('../Routes/forgotPass.js'));
app.listen(PORT, () => console.log(`Listening on ${PORT}`));
module.exports = pool;
And then I want to require the const pool in this file src\Routes\LogIn.js:
const express = require('express');
const pool = require('../db');
const router = express.Router();
router.get('/usuario/:user', function (req, res) {
//console.log("GET usuario");
var user = req.params.user;
pool.query(
`select * from users where email = '${user}' limit 1`,
function (error, resq, fields) {
if (error) {
console.log(error);
} else {
console.log(user);
res.send(resq.rows);
}
}
);
});
module.exports = router;
But when I run the index.js and go to the route ulr/usuario/:user, in the logs I see that the program has an error that says "pool.query is not a function". I want to know how i could export the const pool to use it in other files.
You can use
module.exports = {
query: (text, params) => pool.query(text, params),
}
Use express-promise-router
const Router = require('express-promise-router')
const db = require('../db')
const router = new Router()
Use
await db.query(`
SELECT * from local
`)
instead of pool.query in your router.get or router.post
The above should solve your issue - You can check the same reference here
https://node-postgres.com/guides/async-express
I have a site, implemented on NodeJS, base MongoDB, Mongoose plugin. Recently, the site began to fall about once a day. I recently found out that this is due to the lack of memory, which is due to the fact that active connections are being accumulated (db.serverStatus (). Connections.current). Perhaps this is not related, but I have a script on NodeJS, which is executed by crown every minute. It checks if there is a post with the current date in the documents. But I close the mongoose connection there, I don’t know what could be the problem. Actually this file contents:
process.env.NODE_TLS_REJECT_UNAUTHORIZED = 0;
const { new_time } = require("lib/functions");
const push = require("lib/push");
const apiCallback = require("middleware/socket/apiCallback");
const mongoose = require("lib/mongoose");
const User = require("models/User");
const Post = require("models/Post");
(async () => {
let currentPost = await Post.findCurrent(1);
if (currentPost) {
await currentPost.setPublished(1);
await apiCallback.call({
roomName: "index",
event : "posts.new",
data : {
post: {
id: currentPost._id.toString()
}
}
});
await push.sendAll({
// unnecessary data
});
}
await mongoose.connection.close();
process.exit(0);
})();
app.js:
const path = require("path");
const express = require("express");
const app = express();
const bodyParser = require("body-parser");
const cookieParser = require("cookie-parser");
const expressSession = require("express-session");
const MongoStore = require("connect-mongo")(expressSession);
const conf = require("conf");
const mongoose = require("lib/mongoose");
const expressSessionConfig = conf.get("session");
expressSessionConfig.cookie.expires = new Date(new Date().getTime() + 60 * 60 * 24 * 30 * 1000);
expressSessionConfig.store = new MongoStore({
mongooseConnection: mongoose.connection
});
const templateDir = path.join(__dirname, conf.get("template_dir"));
app.engine("ejs", require("ejs-locals"));
app.set("views", templateDir);
app.set("view engine", "ejs")
app.use(express.static("frontend"));
app.use(cookieParser());
app.use(expressSession(expressSessionConfig));
app.use(bodyParser.urlencoded({
extended: true
}));
require("routes")(app);
app.listen(conf.get("app_port"));
app.io.js (socket server on socket.io):
const fs = require("fs");
const path = require("path");
const app = require("express")();
const bodyParser = require("body-parser");
const apiCallback = require("middleware/socket/apiCallback");
const conf = require("conf");
const sslPath = conf.get("sslPath");
const sslOptions = {
key : fs.readFileSync(path.join(sslPath, "key.key")),
cert: fs.readFileSync(path.join(sslPath, "crt.crt"))
};
const server = require("https").Server(sslOptions, app);
const io = require("socket.io")(server);
app.use(bodyParser.urlencoded({
extended: true
}));
app.use(conf.get("api_callback:path"), apiCallback.watch(io));
require("routes/socket")(io);
server.listen(conf.get("socket_port"));
routes/socket.js:
const { in_array } = require("lib/functions");
const loadUser = require("middleware/socket/loadUser");
const User = require("models/User");
module.exports = io => {
io.on("connection", async socket => {
let query = socket.handshake.query || {};
let { ssid } = query;
ssid = ssid || "";
let user = socket.user = await loadUser(ssid);
let oldPageName = null;
User.setOnline(user._id, 1);
socket.on("setPageName", pageName => {
if (oldPageName) socket.leave(oldPageName);
oldPageName = pageName;
socket.join(pageName);
});
socket.on("disconnect", () => {
socket.leave(oldPageName);
User.setOnline(user._id, 0);
});
});
};
Tell me how to properly close connections so that they do not remain in memory and do not load the server to such an extent that it kills the process of the MongoDB daemon?
your code has no issues, you should go for connection pooling. your issue would automatically resolve. you will have a pool of connections whenever any api needs a db conenction. connection would be picked up from the pool and after completing db operation connection wouldn't be destroyed instead it would be returned back to the pool, in this manner your product's performance would be increased along with resolution to this issue.
https://mongoosejs.com/docs/connections.html