Nodejs/Express: Facing issues while working on routings - node.js

viewController.js
const Book = require('./../models/bookModel')
const APIFeatures = require('./../utils/apiFeatures.js')
exports.getOverview=async(req,res,next)=>{
const features = new APIFeatures(Book.find(),req.query).filter().paginations()
let getItAllProd = await features.getBook;
res.render('overview',{
data:getItAllProd,
title:"trending books"
})
next()
}
exports.getOneBook =async(req,res,next)=>{
const getThatBook = await Book.findOne({slug:req.params.id})
res.render('onebook',{
data:getThatBook
})
next()
}
exports.categories= async(req,res,next)=>{
const getCate = await Book.find({category:req.params.category});
res.render('overview',{
data:getCate,
name:req.params
})
next()
}
exports.paginationOfBook = async(req,res,next)=>{
let pages = req.params.page
pages = pages * 6 || 6;
const limit = 6;
let skip = pages-6;
const getlimitedbook = await Book.find({}).limit(limit).skip(skip);
res.render('overview',{
data:getlimitedbook,
})
next()
}
viewRoutes.js
const express = require('express');
const viewController = require('./../controllers/viewController')
const router = express.Router();
router.get('/overview',viewController.getOverview)
router.get('/overview/:category',viewController.categories)
router.get('/overview/:page',viewController.paginationOfBook)
router.get('/overview/:id',viewController.getOneBook)
module.exports = router;
using http://localhost:3000/overview I can get all the books and http://localhost:3000/overview/:category - using this I get the books by categories. but if I try to load the the for http://localhost:3000/overview/:page and http://localhost:3000/overview?:id - the data will not load up.. so below I did some changes in routing..
router.get('/overview',viewController.getOverview)
router.get('/overview/:id',viewController.getOneBook)
router.get('/overview/:category',viewController.categories)
router.get('/overview/:page',viewController.paginationOfBook)
Now I have access to the '/overview' and '/overview/:Id'. but not for others. the data is not loading for them. do you have any solution? so I can access all the routes.uuu

First of all please elaborate you're question more. But I think the problem is in the following line:
const getThatBook = await Book.findOne({slug:req.params.id})
edit this line to:
const getThatBook = await Book.findById(req.params.id)
and call this route like this http://localhost:3000/overview/34234232...
Or change your "id" to "slug" in the following line.
const getThatBook = await Book.findOne({slug:req.params.slug})
and call this route like this http://localhost:3000/overview/xyz_book_slug...
router.get('/overview/:slug',viewController.getOneBook)
and pass the book's slug property value when calling this route.

Related

How can i get a client side cookie with next.js 13 before rendering from middleware

I am trying to get a cookie from the middleware of Nextjs 13 and I can't find up to date information on this.
My question: is there a way to see the cookie before rendering the html.
window is fined or something?
page.tsx
async function getData() {
const nextCookies = cookies(); // Get cookies object
try {
let cityToken = nextCookies.get('regionCode'); // Find cookie city
return cityToken;
} catch (e) {
console.log(e);
}
}
export default async function Page() {
const nextCookies = cookies(); // Get cookies object
const token = nextCookies.get('new'); // Find cookie for is new user
// const cityToken = await getData();
const cityToken = cookies().get('regionCode')?.value;
console.log(cityToken);
}
middleware.tsx
const myFunction = async () => {
// https://geolocation.onetrust.com/cookieconsentpub/v1/geo/location
// run asynchronous tasks here - ip location
const response = await fetch('https://www.cloudflare.com/cdn-cgi/trace');
const text = await response.text();
const arr = text.split('\n');
const ip = arr.filter((v) => v.includes('ip='))[0].split('=')[1];
// https://stackoverflow.com/questions/65752416/how-can-i-most-easily-parse-this-http-response-into-json-to-then-access-response
// https://stackoverflow.com/questions/64591296/access-to-fetch-at-https-www-cloudflare-com-cdn-cgi-trace-from-origin-http
const ipCity = await fetch(`https://ipapi.co/${ip}/json/`);
const textJson = await ipCity.json();
};
const data = await myFunction();
res.cookies.set('regionCode', 'textJson.region');

SWAPI Pagination with Node.JS, Express and Axios

I wanted to fetch all people/films/etc. from SWAPI.
I tried a lot of things before finally get something usable. However, it only shows the first 10. people (in the people case)
const express = require("express");
const router = express.Router();
const axios = require("axios");
router.get("/people", (req, res) => {
axios
.get("https://swapi.dev/api/people/?format=json")
.then((data) => {
return res.send(data.data.results);
})
.catch((error) => {
console.log("error: ", error);
});
});
module.exports = router;
What I tried, thanks to a lot of searches from Stack Overflow, is this :
const express = require("express");
const router = express.Router();
const axios = require("axios");
router.get("/people", async (req, res) => {
let nextPage = `https://swapi.dev/api/people/`;
let people = [];
while (nextPage) {
res = await axios(nextPage)
const { next, results } = await res.data;
nextPage = next
people = [...people, ...results]
}
console.log(people.length) // 82
console.log(people) // shows what I wanted, all people !
return people;
});
module.exports = router;
When starting the server, the page doesn't finish loading (it's still loading at this moment), but the console.log managed to show exactly what I wanted.
So, how can I manage to show this in the page ?
My goal is to use that route for axios API calls from a React front-end (searching for a specific name)
Do not overwrite the res in your code and finish it with res.send:
let nextPage = `https://swapi.dev/api/people/`;
let people = [];
while (nextPage) {
let nextres = await axios(nextPage)
const { next, results } = await nextres.data;
nextPage = next
people = [...people, ...results]
}
console.log(people.length) // 82
console.log(people) // shows what I wanted, all people !
res.send(people);

Puppeteer : use second match with page.evaluate

i'm using puppeteer to retrieve datas online, and facing an issue.
Two functions have the same name and return serialized object, the first one returns an empty object, but the second one does contains the datas i'm targeting.
My question is, how can I proceed to select the second occurence of the function instead of the first one, which return an empty object.
Thanks.
My code :
const puppeteer = require('puppeteer');
const cheerio = require('cheerio');
const Variants = require('./variants.js');
const Feedback = require('./feedback.js');
async function Scraper(productId, feedbackLimit) {
const browser = await puppeteer.launch();
const page = await browser.newPage();
/** Scrape page for details */
await page.goto(`${productId}`);
const data = (await page.evaluate()).match(/window.runParams = {"result/)
const data = data.items
await page.close();
await browser.close();
console.log(data);
return data;
}
module.exports = Scraper;
Website source code :
window.runParams = {};
window.runParams = {"resultCount":19449,"seoFeaturedSnippet":};
Please try this, it should work.
const data = await page.content();
const regexp = /window.runParams/g;
const matches = string.matchAll(regexp);
for (const match of matches) {
console.log(match);
console.log(match.index)
}

Why not all values store in mongodb?

I want to store form data in "moongodb" with MEAN APPLICATION
When I post values of form through my Angular form or using postman all values not stores even not creating any space for them.
Here is Course Model
const mongoose = require('mongoose');
const CourseSchema = mongoose.Schema({
degprog:String,
session:String,
semester:String,
c_code:String,
c_title:String,
c_hours:String,
c_goals:String,
m_quiz:Number,
m_assign:Number,
m_lab:Number,
m_mid:Number,
m_final:Number,
m_total:Number,
c_coprdinator:String,
c_url:String,
c_catelog:String,
c_tbook:String,
c_reference:String,
teacher:{
type:mongoose.Schema.Types.ObjectId,
ref:"teacher",
}
});
module.exports = mongoose.model('course', CourseSchema);
This is course controller file
const Teacher = require('../models/teacher.model.js');
const mongoose = require('mongoose');
// Create and Save a new Course
exports.create = async (req, res) => {
if (!req.body.courseCode) {
return res.status(400).send({
message: "Note Course Code can not be empty"
});
}
//searching for a teacher to add course
const teacher = await Teacher.findOne({ _id: req.params.teacherId });
// Create a Course
const course = new Course();
course._id = mongoose.Types.ObjectId(),
course.degprog = req.body. degreeProgram;
course.session = req.body.session;
course.semester = req.body.semester;
course.c_code = req.body.courseCode;
course.c_title = req.body.courseTitle;
course.c_hours = req.body.creditHours;
course.m_quiz = req.body.quiz;
course.m_assign = req.body.assignment;
course.c_coprdinator = req.body.courseCoordinator;
course.c_url = req.body.url;
course.c_catelog = req.body.courseCatelog;
course.c_tbook = req.body.textbook;
course.c_reference = req.body.reference;
course.c_goals = req.body.goals;
course.teacher = req.body.teacherId;
course.m_lab = req.body.lab;
course.m_mid = req.body.mid;
course.m_final = req.body.final;
course.m_total = req.body.total;
//save course in dataBase and attach to particular teacher
await course.save();
await teacher.courses.push(course._id);
await teacher.save();
res.send(course);
};
This one is routing file
app.post('/teacher/:teacherId/course', course.create);
Only till the "URL" values stores. why the remaining values not saving?
incoming req.body values were not matching , i did fix it with guide of #DanStarns after console.log incoming values were not matching with schema therefore values were not storing and not creating any space for them.
here Is Working solution
const Teacher = require('../models/teacher.model.js');
const Course = require('../models/course.model');
const mongoose = require('mongoose');
// Create and Save a new Course
exports.create = async (req, res) => {
if (!req.body.courseCode) {
return res.status(400).send({
message: "Note Course Code can not be empty"
});
}
//searching for a teacher to add course
console.log(req.body);
const teacher = await Teacher.findOne({ _id: req.params.teacherId });
// Create a Course
const course = new Course();
course._id = mongoose.Types.ObjectId(),
course.degprog = req.body. degreeProgram;
course.session = req.body.session;
course.semester = req.body.semester;
course.c_code = req.body.courseCode;
course.c_title = req.body.courseTitle;
course.c_hours = req.body.creditHours;
course.m_quiz = req.body.quiz;
course.m_assign = req.body.assignment;
course.c_coordinator = req.body.courseCoordinator;
course.c_url = req.body.url;
course.c_catelog = req.body.currentCatelogDescription;
course.c_tbook = req.body.textBook;
course.c_reference = req.body.referenceMaterial;
course.c_goals = req.body.courseGoals;
course.teacher = req.body.teacherId;
course.c_pre = req.body.pre;
course.m_lab = req.body.lab;
course.m_mid = req.body.midTerm;
course.m_final = req.body.finalTerm;
course.m_total = req.body.totalMarks;
//save course in dataBase and attach to particular teacher
await course.save();
await teacher.courses.push(course._id);
await teacher.save();
res.send(course);
};

All my scraped text ends up in one big object instead of separate objects with Cheerio

I'm following a web scraping course that uses Cheerio. I practice on a different website then they use in the course and now I run into the problem that all my scraped text end up in one big object. But every title should end up in it's own object. Can someone see what I did wrong? I already bumbed my head 2 hours on this problem.
const request = require('request-promise');
const cheerio = require('cheerio');
const url = "https://huurgoed.nl/gehele-aanbod";
const scrapeResults = [];
async function scrapeHuurgoed() {
try {
const htmlResult = await request.get(url);
const $ = await cheerio.load(htmlResult);
$("div.aanbod").each((index, element) => {
const result = $(element).children(".item");
const title = result.find("h2").text().trim();
const characteristics = result.find("h4").text();
const scrapeResult = {title, characteristics};
scrapeResults.push(scrapeResult);
});
console.log(scrapeResults);
} catch(err) {
console.error(err);
}
}
scrapeHuurgoed();
This is the link to the repo: https://github.com/danielkroon/huurgoed-scraper/blob/master/index.js
Thanks!
That is because of the way you used selectors. I've modified your script to fetch the content as you expected. Currently the script is collecting titles and characteristics. Feel free to add the rest within your script.
This is how you can get the required output:
const request = require('request-promise');
const cheerio = require('cheerio');
const url = "https://huurgoed.nl/gehele-aanbod";
const scrapeResults = [];
async function scrapeHuurgoed() {
try {
const htmlResult = await request.get(url);
const $ = await cheerio.load(htmlResult);
$("div.item").each((index, element) => {
const title = $(element).find(".kenmerken > h2").text().trim();
const characteristics = $(element).find("h4").text().trim();
scrapeResults.push({title,characteristics});
});
console.log(scrapeResults);
} catch(err) {
console.error(err);
}
}
scrapeHuurgoed();

Resources