Express req.ip returning object - node.js

I have an express app:
const express = require('express');
const app = express();
const cors = require('cors');
app.use(cors({ optionsSuccessStatus: 200 }));
app.get('/api/whoami', (req, res) => {
const ipaddress = req.ip;
res.status(200).json({ ipaddress });
});
app.listen(process.env.PORT || 3000);
module.exports = app;
and a test file:
const chai = require('chai');
const chaiHttp = require('chai-http');
const chaiMatch = require('chai-match');
const { describe, it } = require('mocha');
const server = require('../../server');
const should = chai.should();
const { expect } = chai;
chai.use(chaiHttp);
chai.use(chaiMatch);
describe('/GET /api/whoami', () => {
it('should return the IP address', (done) => {
chai.request(server)
.get('/api/whoami')
.end((err, res) => {
res.should.have.status(200);
res.body.should.be.a('object');
res.body.should.have.property('ipaddress');
expect(res.body.ipaddress).should.match(/* very long regex */);
done();
});
});
});
for some reason, I keep geting Uncaught AssertionError: expected Assertion{ __flags: { …(4) } } to match [my very long regex], i didn't find anyone with the same error. How can I get my true ip with express? Or what is the right way to test it?

The syntax is expect(something).to.match and not expect(something).should.match. See docs. Or, if you want to use should, you don't need expect, since the syntax for that is something.should.match.
The fix is therefore to change your code either as follows:
expect(res.body.ipaddress).to.match(/* very long regex */);
...or as follows:
res.body.ipaddress.should.match(/* very long regex */);
In the style guide, you get a good comparison between how to use expect and how to use should.
By mixing those two things, you took expect(...) which returns object that contains things like to and used it as source of your should, so that should.match check operated on the object returned by expect(...) and not the IP address itself.

Related

How do I grab json from an external api (serp api) from my backend and then make that same data available for my front end application?

Right now I have a front end react application using axios and and a backend server using node.js and express. I cannot for the life of me get my serp api data to post so that my front end can get it through axios and display the json data. I know how to get data to the front end but I am not a backend developer so this is proving to be incredibly difficult at the moment. I'm able to get the data from the the external api, I just don't know how to post it once I get it. Also I would not like to have all these request running on server.js so I created a controller but I think that is where it is messing up. Any help is appreciated
//pictures controller
const SerpApi = require('google-search-results-nodejs');
const {json} = require("express");
const search = new SerpApi.GoogleSearch("674d023b72e91fcdf3da14c730387dcbdb611f548e094bfeab2fff5bd86493fe");
const handlePictures = async (req, res) => {
const params = {
q: "Coffee",
location: "Austin, Texas, United States",
hl: "en",
gl: "us",
google_domain: "google.com"
};
const callback = function(data) {
console.log(data);
return res.send(data);
};
// Show result as JSON
search.json(params, callback);
//res.end();
}
// the above code works. how do i then post it to the server so that i can retrieve it to the backend?
module.exports = {handlePictures};
//server.js
const express = require('express');
const app = express();
const path = require('path');
const cors = require('cors');
const corsOptions = require('./config/corsOptions');
const { logger } = require('./middleware/logEvents');
const errorHandler = require('./middleware/errorHandler');
const cookieParser = require('cookie-parser');
const credentials = require('./middleware/credentials');
const PORT = process.env.PORT || 3500;
// custom middleware logger
app.use(logger);
// Handle options credentials check - before CORS!
// and fetch cookies credentials requirement
app.use(credentials);
// Cross Origin Resource Sharing
app.use(cors(corsOptions));
// built-in middleware to handle urlencoded form data
app.use(express.urlencoded({ extended: false }));
// built-in middleware for json
app.use(express.json());
//middleware for cookies
app.use(cookieParser());
//serve static files
app.use('/', express.static(path.join(__dirname, '/public')));
// routes
app.use('/', require('./routes/root'));
app.use('/pictures', require('./routes/api/pictures'));
app.all('*', (req, res) => {
res.status(404);
if (req.accepts('html')) {
res.sendFile(path.join(__dirname, 'views', '404.html'));
} else if (req.accepts('json')) {
res.json({ "error": "404 Not Found" });
} else {
res.type('txt').send("404 Not Found");
}
});
app.use(errorHandler);
app.listen(PORT, () => console.log(`Server running on port ${PORT}`));
//api/pictures.js
const picturesController= require('../../controllers/picturesController');
const express = require('express')
const router = express.Router();
// for POST request use app.post
router.route('/')
.post( async (req, res) => {
// use the controller to request external API
const response = await picturesController.handlePictures()
// send the response back to client
res.json(response)
})
module.exports = router;
You just need to return the result from SerpApi in your handlePictures function. To do this make a new Promise and when search.json runs callback do what you need with the results and pass it in resolve.
Your picturesController.js with an example of returning all results.
//pictures controller
const SerpApi = require("google-search-results-nodejs");
const { json } = require("express");
const search = new SerpApi.GoogleSearch(process.env.API_KEY); //your API key from serpapi.com
const handlePictures = async (req, res) => {
return new Promise((resolve) => {
const params = {
q: "Coffee",
location: "Austin, Texas, United States",
hl: "en",
gl: "us",
google_domain: "google.com",
};
const callback = function(data) {
resolve(data);
};
search.json(params, callback);
});
};
module.exports = { handlePictures };
Output:
And I advise you to change your API key to SerpApi to prevent it from being used by outsiders.
Since I don't have the full context of your App I can just assume the context. But given the fact that you already have wrapped the logic of calling the external API into a dedicated controller you can use it in the following way in an express app (used the hello world example from express):
// import your controller here
const express = require('express')
const app = express()
const port = 3000
// for POST request use app.post
app.get('/', async (req, res) => {
// use the controller to request external API
const response = await yourController.method()
// send the response back to client
res.json(response)
})
app.listen(port, () => {
console.log(`Example app listening on port ${port}`)
})
Here's an example how to execute the http request from the frontend:
const response = await fetch('http://localhost:3000') // result from res.json(response)

How to stub function used on a server in mocha test suite

I'm trying to stub auth.session when testing endpoint /allowUser2 on an express server app.js.
//--auth.js--
module.exports.session = (req, res, next) => {
req.user = null;
next();
};
//--app.js--
const express = require('express');
const auth = require('./auth');
const app = express();
app.use(auth.session);
app.get('/allowUser2', (req, res) => {
if (!req.user) return res.status(401).send();
if (req.user.user === 2) return res.status(200).send();
});
app.listen(4001).on('listening', () => {
console.log(`HTTP server listening on port 4001`);
});
module.exports = app;
If I just have this one test file test1.js in my test suite, auth gets stubbed successfully.
//--test1.js--
let app;
const sinon = require('sinon');
const auth = require('../../auth.js');
const chai = require('chai');
const chaiHttp = require('chai-http');
const { expect } = chai;
chai.use(chaiHttp);
let agent;
describe('should allow access', () => {
before(async () => {
// delete require.cache[require.resolve('../../app.js')]; // causes Error: listen EADDRINUSE: address already in use
sinon.stub(auth, 'session').callsFake((req, res, next) => {
req.user = { user: 1 };
next();
});
app = require('../../app.js');
agent = chai.request.agent(app);
});
after(async () => {
auth.session.restore();
});
it('should not allow access', async function () {
const response = await agent.get('/allowUser2');
expect(response.status).to.be.equal(200);
});
});
However, if I have more than one test file that requires app.js then I have a problem. If app.js was already required in another test file, such as test2.js below, node doesn't reload app.js when it's required again in test1.js. This causes app.js to call the old auth.session function, not the new stubbed one. So the user isn't authenticated and the test fails.
//--test2.js--
const chai = require('chai');
const chaiHttp = require('chai-http');
const app = require('../../app.js');
const { expect } = chai;
chai.use(chaiHttp);
const agent = chai.request.agent(app);
describe('route /allowUser2', () => {
it("shouldn't allow access", async function () {
const response = await agent.get('/allowUser2');
expect(response.status).to.be.equal(401);
});
});
I tried to reload the app.js by using delete require.cache[require.resolve('../../app.js')];. This worked when reloading a file with a plain function, but when the file is a server like app.js this causes an error: Error: listen EADDRINUSE: address already in use.
Recreate:
download Repo
npm i
npm test
How do you stub a function on the server?
One solution is turn app.js into a function that starts the server on a port number passed in as an argument. Then change the port randomly when requiring. I do not like this option because there may be some reason to keep the app on a specific port.
app.js
const express = require('express');
const auth = require('./auth');
module.exports = (port) => {
const app = express();
app.use(auth.session);
app.get('/allowUser2', (req, res) => {
if (!req.user) return res.status(401).send();
if (req.user.user === 2) return res.status(200).send();
});
app.listen(port).on('listening', () => {
console.log(`HTTP server listening on port ${port}`);
});
return app;
};
when requiring
app = require('../../app.js')((Math.random() * 10000).toString().slice(0, 4));
Instead of exporting the app in app.js, I export a function that launches the server and returns the server instance and app. By exporting the server instance I have the ability to close the server. The app is needed to pass into chai. Make sure const app = express(); is in this function and not before it or it won't recreate.
const express = require('express');
const auth = require('./auth');
const port = 4000;
module.exports = () => {
const app = express();
app.use(auth.session);
app.get('/allowUser2', (req, res) => {
if (!req.user) return res.status(401).send();
if (req.user.user === 2) return res.status(200).send();
});
app.post('/allowUser2', (req, res) => {
if (!req.user) return res.status(401).send();
if (req.user.user === 2) return res.status(200).send();
});
return {
server: app.listen(port).on('listening', () => {
console.log(`HTTP server listening on port ${port}`);
}),
app,
};
};
Then in my tests I can launch the server in before and close the server in after in both tests.
let app;
const sinon = require('sinon');
const auth = require('../../auth.js');
const chai = require('chai');
const chaiHttp = require('chai-http');
const { expect } = chai;
chai.use(chaiHttp);
let server;
describe('route /allowUser2', () => {
before(async () => {
// delete require.cache[require.resolve('../../app.js')]; // causes an error: `Error: listen EADDRINUSE: address already in use`.
sinon.stub(auth, 'session').callsFake((req, res, next) => {
req.user = { user: 2 };
next();
});
server = require('../../app.js')();
agent = chai.request.agent(server.app);
});
after(async () => {
server.server.close(() => {
console.log('Http server closed.');
});
auth.session.restore();
});
it('should allow access', async function () {
const response = await agent.get('/allowUser2');
expect(response.status).to.be.equal(200);
});
});
const chai = require('chai');
const chaiHttp = require('chai-http');
const { expect } = chai;
chai.use(chaiHttp);
let server;
let agent;
describe('route /allowUser2', () => {
before(async () => {
server = require('../../app.js')();
agent = chai.request.agent(server.app);
});
after(async () => {
server.server.close(() => {
console.log('Http server closed.');
});
});
it("shouldn't allow access", async function () {
const response = await agent.get('/allowUser2');
expect(response.status).to.be.equal(401);
});
});
working repo
UPDATE: Proposed Solution https://github.com/DashBarkHuss/mocha_stub_server/pull/1
One problem is the way you are using a direct method reference in app.js prevents Sinon from working. https://gist.github.com/corlaez/12382f97b706c964c24c6e70b45a4991
The other problem (address in use) is because each time we want to get a reference to app, we are trying to create a server in the same port. Breaking that app/server creation into a separate step alleviates that issue.

axios first request succeeds second request fails

i have a simple node node express server in which i get data from an api,it works fine on the first request but fails when i try to make a second request
const express=require("express");
const axios =require("axios");
const cors = require('cors');
const app=express();
app.use(cors());
app.get("/devices",(req,res)=>{
axios.get(
'http://ipaddress/api/reports',
).then((response) => {
res.status(200);
res.json(response.data);
}).catch((error) => {
res.status(400)
res.send("error")
});
});
app.listen(3002,()=>{
console.log("started on port 3002");
});
The problem i found here is you have initialize the server in your get route. The app.listen.. code should be outside the get route implementation. I doubt if your code even runs for the first time. Change your code like:
const express = require("express");
const axios = require("axios");
const cors = require('cors');
const app = express();
app.use(cors());
app.get("/devices", (req,res) => {
axios.get(
'http://ipaddress/api/reports',
).then((response) => {
res.status(200).json(response.data);
}).catch((error) => {
res.status(400).send("error")
});
});
app.listen(3002,() => {
console.log("started on port 3002");
});
Hope this helps :)

Why is express.urlencoded not a function while testing with Jest?

I am testing my app.js using Jest and it says express.urlencoded is not a function.
Im using express 4.16.4 and while I am writing test for app.js that looks somewhat like this
const express = require('express');
const app = express();
app.use(express.json());
app.use(express.urlencoded({ extended: true }));
app.use(require('./routes'));
module.exports = app;
and my test case is:
jest.mock('express');
it('has a parser', () => {
const app = require('../src/app');
const express = require('express');
expect(app.use).toHaveBeenCalledWith(express.urlencoded());
});
This gives me an error saying: express.urlencoded is not a function.
I expect this test to pass because the app works perfectly fine but only inside the test it says that express.urlencoded is not a function.
You have to manually provide factory and mock module methods there, which you want to test. This would look something like this:
const urlencodedMock = jest.fn();
jest.mock('express', () => {
urlencoded: urlencodedMock,
});
it('has a parser', () => {
const app = require('../src/app');
const express = require('express');
expect(app.use).toHaveBeenCalledWith(urlencodedMock);
});
EDIT:
I am not familiar with express, but you can try something like example below. jest.mock() will mock express in a way that it passes express() and then inside the test when that part is passed, it will mock it again, to provide json() and urlencoded() functions to express.
jest.mock('express', () => {
return {
use: () => {
return;
}
}
});
it('has a parser', () => {
const jsonMock = jest.fn();
const urlencodedMock = jest.fn();
const app = require('../src/app');
jest.doMock('express', () => {
json: jsonMock,
urlencoded: urlencodedMock,
});
expect(app.use).toHaveBeenCalledWith(urlencodedMock);
});

Server instances, jest and 'listen EADDRINUSE :::3000'

Am new to jest, node and express, and am having a problem with testing my app.
The actual code seems to be working - it's just when passing the server instance to each of the test files (user.test.js and genres.test.js) and running jest, the port is being blocked. I assume it's because I'm creating duplicate instances of the server in each test file, without realising it.
Running jest with the flag --runInBand works, and so does only using one test file, but this doesn't help me understand exactly what's going on.
I've posted example code below, but I'm struggling to cut it down, however I do think most of the code is irrelevant, and it's just down to how I'm passing the server to each file.
Again, apologies for the length, but I think it should be very basic for anyone but me! Thanks.
index.js (.)
const express = require('express');
const app = express();
const genres = require('./routes/genres');
const users = require('./routes/users');
app.use(express.json());
app.use('/api/genres', genres);
app.use('/api/users', users);
const port = process.env.PORT || 3000;
const server = app.listen(port, () => console.log(`Listening on port ${port}...`));
console.log(typeof server);
// export server to be used in test file
module.exports = server;
genres.js (./routes)
const express = require('express');
const router = express.Router();
router.post('/', async (req, res) => {
res.send('post genre ok');
});
module.exports = router;
users.js (./routes)
const express = require('express');
const router = express.Router();
router.post('/', async (req, res) => {
res.send('post user ok');
});
module.exports = router;
genres.test.js (./tests)
const request = require('supertest');
let server;
describe('auth tests', () => {
const exec = async function(){
return await request(server)
.post('/api/genres');
};
beforeEach(() => {
server = require('../index');
});
afterEach(async () => {
await server.close();
});
describe('POST /', () => {
it('should return 200', async () => {
const res = await request(server).post('/api/genres');
expect(res.status).toBe(200);
});
});
});
user.test.js (./tests)
const request = require('supertest');
let server;
describe('user tests', () => {
const exec = async function(){
return await request(server)
.post('/api/user');
};
beforeEach(() => {
server = require('../index');
});
afterEach(async () => {
await server.close();
});
describe('POST /', () => {
it('should return 200', async () => {
const res = await request(server).post('/api/users');
expect(res.status).toBe(200);
});
});
});
Hopefully this (point 2) helps others with this question
It has worked for me, by splitting the app from the server. I'm not sure if this is the right approach, and I'm not 100% sure why it works with the app rather than the server, but all my tests are now passing.
index.js is now app.js:
const express = require('express');
const app = express();
const genres = require('./routes/genres');
const users = require('./routes/users');
app.use(express.json());
app.use('/api/genres', genres);
app.use('/api/users', users);
// export server to be used in test file
module.exports = app;
The server is separated into another file:
const app = require('./app');
const port = process.env.PORT || 3000;
app.listen(port, () => console.log(`Listening on port ${port}...`));
and then the test files import the app rather than the server. therefore each test doesn't create its own instance.
NB: I think - I don't really know how correct this is, but as mentioned, it works

Resources