how to resolve Cannot GET / in a kubernetes deployment? - node.js

I have an node.js application called "process-core" which has the main logic to run another node.js app called "page-core".Through "Process-core" I am deploying the distribution folder(static contents) of the "page-core" application.I am deploying this using kubernetes.
dockerfile used to build image for "process-core"
FROM node:8
RUN mkdir -p usr/src/app
WORKDIR /usr/src/app
COPY . .
WORKDIR /usr/src/app
RUN npm install
CMD [ "npm","start" ]
my server.js goes like this-
var express = require('express');
var cors = require('cors')
var app = express();
var path = require('path');
var router = express.Router();
var request = require('request');
var api = require('./server/api.js')
const base_url = 'http://localhost:8080/api/';
const static_dir = '/home/vignesh/page-core-devbranch/dist'; // location of
const assets_dir = path.join(__dirname + '/server/assets/'); dist folder
const tools_dir = path.join(__dirname + '/server/tools/');
const index_file = path.join(static_dir + '/index.html');
/* static */
app.use(cors())
app.use(express.static(static_dir));
app.use('/assets', express.static(assets_dir));
app.use('/tools', express.static(tools_dir));
/* listen */
app.listen(5000);
node.yaml
apiVersion: v1
kind: Service
metadata:
labels:
node: test
name: nodetest
spec:
ports:
- nodePort: 30000
port: 5000
protocol: TCP
targetPort: 5000
selector:
node: test
type: NodePort
---
apiVersion: v1
kind: Pod
metadata:
name: nodetest
labels:
node: test
spec:
containers:
- name: node-test
image: suji165475/devops-sample:processcore
ports:
- containerPort: 5000
When I hit http://206.189.22.155:30000/ i get an error saying Cannot GET /

FROM node:8
RUN mkdir -p usr/src/app
WORKDIR /usr/src/app
COPY . .
WORKDIR /usr/src/app
RUN npm install
EXPOSE 5000
CMD [ "npm","start" ]
You have not expose port from docker image in docker file please add one line as EXPOSE 5000 to open the container port 5000 from docker container.

Related

Docker - React app can not fetch from the server

Hello I am new to the docker and I am trying to dockerize my application that uses React as frontend, nodejs as backend and mySQL as database. However when I try to fetch data from server from my react app, it gives me error:
Access to fetch at 'http://localhost:3001/api' from origin 'http://localhost:3000' has been blocked by CORS policy: The 'Access-Control-Allow-Origin' header has a value 'http://127.0.0.1:3000' that is not equal to the supplied origin. Have the server send the header with a valid value, or, if an opaque response serves your needs, set the request's mode to 'no-cors' to fetch the resource with CORS disabled.
My react app is rendered and also when I go to http://localhost:3001/api I receive the data I would like to get. Just the communication between react and nodejs is somehow broken.
Here are my Docker files and env files:
.env:
DB_HOST=localhost
DB_USER=root
DB_PASSWORD=123456
DB_NAME=testdb
DB_PORT=3306
MYSQLDB_USER=root
MYSQLDB_ROOT_PASSWORD=123456
MYSQLDB_DATABASE=testdb
MYSQLDB_LOCAL_PORT=3306
MYSQLDB_DOCKER_PORT=3306
NODE_LOCAL_PORT=3001
NODE_DOCKER_PORT=3001
CLIENT_ORIGIN=http://127.0.0.1:3000
CLIENT_API_BASE_URL=http://127.0.0.1:3001/api
REACT_LOCAL_PORT=3000
REACT_DOCKER_PORT=80
dockerfile for react:
FROM node:14.17.0 as build-stage
WORKDIR /frontend
COPY package.json .
RUN npm install
COPY . .
ARG REACT_APP_API_BASE_URL
ENV REACT_APP_API_BASE_URL=$REACT_APP_API_BASE_URL
RUN npm run build
FROM nginx:1.17.0-alpine
COPY --from=build-stage /frontend/build /usr/share/nginx/html
EXPOSE 80
CMD nginx -g 'daemon off;'
dockerfile for nodejs:
FROM node:14.17.0
WORKDIR /
COPY package.json .
RUN npm install
COPY . .
EXPOSE 3001
CMD [ "node", "server.js" ]
docker-compose.yml :
version: '3.8'
services:
mysqldb:
image: mysql
restart: unless-stopped
env_file: ./.env
environment:
- MYSQL_ROOT_PASSWORD=$MYSQLDB_ROOT_PASSWORD
- MYSQL_DATABASE=$MYSQLDB_DATABASE
ports:
- $MYSQLDB_LOCAL_PORT:$MYSQLDB_DOCKER_PORT
volumes:
- db:/var/lib/mysql
networks:
- backend
server-api:
depends_on:
- mysqldb
build: ./
restart: unless-stopped
env_file: ./.env
ports:
- $NODE_LOCAL_PORT:$NODE_DOCKER_PORT
environment:
- DB_HOST=mysqldb
- DB_USER=$MYSQLDB_USER
- DB_PASSWORD=$MYSQLDB_ROOT_PASSWORD
- DB_NAME=$MYSQLDB_DATABASE
- DB_PORT=$MYSQLDB_DOCKER_PORT
- CLIENT_ORIGIN=$CLIENT_ORIGIN
networks:
- backend
- frontend
frontend-ui:
depends_on:
- server-api
build:
context: ./frontend
args:
- REACT_APP_API_BASE_URL=$CLIENT_API_BASE_URL
ports:
- $REACT_LOCAL_PORT:$REACT_DOCKER_PORT
networks:
- frontend
volumes:
db:
networks:
backend:
frontend:
My project folder structure is a bit weird as my server its things(node_modules, package.json...) are in the root where docker-compose, .env and Dockerfile for server is located.
React app and frontend is in /frontend folder where also Dockerfile for react is located.
In react I call fetch("http://localhost:3001/api").
Server is created with express :
const express = require('express');
const cors = require('cors');
const server = express();
var mysql = require('mysql2');
require("dotenv").config();
const port = 3001
server.use(express.static('public'));
var corsOptions = {
origin: "http://127.0.0.1:3000"
}
server.use(cors(corsOptions));
var con = mysql.createConnection({
host: process.env.DB_HOST,
port: process.env.DB_PORT,
user: process.env.DB_USER,
password: process.env.DB_PASSWORD
});
server.get('/api', async (req, res) => {
console.log("START");
con.connect(function (err) {
if (err) throw err;
console.log("connected !");
con.query("use testdb;", function (err, result, fields) {
if (err) throw err;
console.log(result);
});
con.query("select * from records;", function (err, result, fields) {
if (err) throw err;
res.send(result);
});
});
});
server.listen(port, () => {
console.log(`Server listening on port ${port}`)
})
I created this thanks to This tutorial
Thanks for any help.
change this: origin: "http://127.0.0.1:3000"
to this: origin: "http://localhost:3000"

localhost didn't send any data. in post service in kubernetes

I have working on Microservices with Node JS and React (course from udemy) ,
but unfortunately I'm stuck with kubernetes problem
i have created posts-srv service,
posts-srv.yaml
apiVersion: v1
kind: Service
metadata:
name: posts-srv
spec:
type: NodePort
selector:
app: posts
ports:
- name: posts
protocol: TCP
port: 4000
targetPort: 4000
posts.js (index.js)
const { randomBytes } = require('crypto');
const express = require('express')
const cors = require('cors')
const axios = require('axios')
const app = express();
app.use(express.json())
app.use(cors())
const posts = {};
app.get('/test',(req,res)=>
{
res.status(200).json({
success: true
})
})
app.get('/posts',(req,res)=>
{
res.send(posts)
})
app.post('/posts', async (req,res)=>
{
console.log("app.post('/posts',(req,res)=> ")
const id = randomBytes(4).toString('hex');
const {title } = req.body;
posts[id] = {
id,title
};
await axios.post('htt://localhost:4005/events',{
type:'PostCreated',
data:{
id,title
}
});
res.status(201).send(posts[id])
})
app.post('/events',(req,res)=>
{
console.log("received event",req.body.type);
res.send({});
})
app.listen(4000,()=>{
console.log("server started for posts on 4000");
})
and the docker file is
from node:alpine
WORKDIR /app
COPY package.json ./
RUN npm install
COPY ./ ./
CMD ["npm","start"]
then i used the command kubectl apply -f post-srv.yaml and service was successfully created but that is not accessible to my computer using browser
service details
Name: posts-srv
Namespace: default
Labels: <none>
Annotations: <none>
Selector: app=posts
Type: NodePort
IP Families: <none>
IP: 10.108.229.174
IPs: <none>
Port: posts 4001/TCP
TargetPort: 4001/TCP
NodePort: posts 30095/TCP
Endpoints: <none>
External Traffic Policy: Cluster
I have accessed through the localhost:30095
but getting same error, please suggest some solutions

Dockerfile not working on backend with react

I'm trying to run a react app with 2 node servers. One for the front end and one for the back-end connected with a mysql data-base.
I'm trying to use docker for the container and I managed to get the database and the front-end server up. However,When the back-end server is fired it seems like it doesn't acknowledge the Dockerfile.
node_server | npm WARN exec The following package was not found and will be installed: nodemon
node_server | Usage: nodemon [nodemon options] [script.js[args]
node_server |
node_server | See "nodemon --help" for more.
node_server |
node_server exited with code 0
Dockerfile - client:
FROM node:latest
RUN mkdir -p /usr/src/app
WORKDIR /usr/scr/app
EXPOSE 3000
COPY package.json .
RUN npm install express body-parser nano nodemon cors
COPY . .
Dockerfile - server
FROM node:latest
RUN mkdir -p /usr/src/app
WORKDIR /usr/src/app
RUN npm init -y
RUN npm install express body-parser nano nodemon cors
EXPOSE 5000
CMD ["npx", "nodemon", "src/server.js"]
docker-compose
version: '3'
services:
backend:
build:
context: ./server
dockerfile: ./Dockerfile
depends_on:
- mysql
container_name: node_server
image:
raff/node_server
ports:
- "5000:5000"
volumes:
- "./server:/usr/src/app"
frontend:
build:
context: ./client
dockerfile: ./Dockerfile
container_name: node_client
image:
raff/node_client
ports:
- "3000:3000"
volumes:
- "./client:/usr/src/app"
mysql:
image: mysql:5.7.31
container_name: db
ports:
- "3306:3306"
environment:
MYSQL_ROOT_PASSWORD: admin
MYSQL_DATABASE: assignment
The server side is not done yet, but i don't believe it's causing this error.
Server.js
"use strict";
const path = require("path");
const express = require("express");
const app = express();
const bodyParser = require("body-parser");
app.use(bodyParser.urlencoded({ extended: true }));
app.use(express.json());
const mysql = require("mysql");
let con = mysql.createConnection({
host: "mysql",
port: "3306",
user: "root",
password: "admin",
});
const PORT = 5000;
const HOST = "0.0.0.0";
app.post("/posting", (req, res) => {
var topic = req.body.param1;
var data = req.body.param2;
sql_insertion(topic, data);
});
// Helper
const panic = (err) => console.error(err);
// Connect to database
con.connect((err) => {
if (err) {
panic(err);
}
console.log("Connected!");
con.query("CREATE DATABASE IF NOT EXISTS assignment", (err, result) => {
if (err) {
panic(err);
} else {
console.log("Database created!");
}
});
});
//select database
con.query("use assignment", (err, result) => {
if (err) {
panic(err);
}
});
// Create Table
let table =
"CREATE TABLE IF NOT EXISTS posts (ID int NOT NULL AUTO_INCREMENT, Topic varchar(255), Data varchar(255), Timestamp varchar(255), PRIMARY KEY(ID));";
con.query(table, (err) => {
if (err) {
panic(err);
} else {
console.log("Table created!");
}
});
app.get("*", (req, res) => {
res.sendFile(path.join(__dirname, "client/build" , "index.html"));
});
app.listen(PORT, HOST);
console.log("up!");
Modify this line
CMD ["npx", "nodemon", "src/server.js"]
By
CMD ["npx", "nodemon", "--exec", "node src/server.js"]
While putting the command in package.json under scripts section is better.
Your volumes: declarations are hiding everything that's in the image, including its node_modules directory. That's not normally required, and you should be able to trim the frontend: container definition down to
backend:
build: ./server # default `dockerfile:` location
depends_on:
- mysql
image: raff/node_server # only if you plan to `docker-compose push`
ports:
- "5000:5000"
The image then contains a fixed copy of the application, so there's no particular need to use nodemon; just run the application directly.
FROM node:latest
WORKDIR /usr/src/app # also creates the directory
COPY package.json package-lock.json .
RUN npm ci # do not `npm install` unmanaged packages
COPY . . # CHECK: `.dockerignore` must include `node_modules`
EXPOSE 5000
CMD ["node", "src/server.js"]
This apparently isn't a problem for your frontend application, because there's a typo in WORKDIR -- the image installs and runs its code in /usr/scr/app but the bind mount is over /usr/src/app, so the actual application's /usr/scr/app/node_modules directory isn't hidden.

NodeJs Docker deployment on google cloud run

I am trying to deploy my NodeJs app on google cloud run over last two days. I use cloudbuild.ymal and all the building process and deployment process were all done without error. But I somehow can't open the app after deployment.
Before I was getting error comming from the errorHandler middleware but It was gone after I set NODE_ENV = production. I still get this error
The app is working fine without error on my local machine with Docker and MongoDB.
Here is my cloudbuild.yaml
steps:
- name: 'docker/compose:1.19.0'
args: ['up', '-d']
- name: 'gcr.io/cloud-builders/docker'
args: ['build', '-t', 'gcr.io/$PROJECT_ID/ecommerce:latest', '-t', 'gcr.io/$PROJECT_ID/ecommerce:$COMMIT_SHA', '-t', 'gcr.io/$PROJECT_ID/ecommerce:$BUILD_ID', '.']
id: 'build-image-ecommerce'
timeout: 500s
- name: 'gcr.io/cloud-builders/docker'
args: ['push', 'gcr.io/$PROJECT_ID/ecommerce:$COMMIT_SHA']
id: 'push-image-to-container-registry'
waitFor: ['build-image-ecommerce']
- name: 'gcr.io/cloud-builders/gcloud'
args:
- 'run'
- 'deploy'
- 'ecommerce'
- '--image'
- 'gcr.io/$PROJECT_ID/ecommerce:$COMMIT_SHA'
- '--region'
- 'us-east4'
- '--platform'
- 'managed'
- '--allow-unauthenticated'
waitFor: ['push-image-to-container-registry']
id: 'deploy-to-cloud-run'
images:
- 'gcr.io/$PROJECT_ID/ecommerce:latest'
- 'gcr.io/$PROJECT_ID/ecommerce:$COMMIT_SHA'
- 'gcr.io/$PROJECT_ID/ecommerce:$BUILD_ID'
Here is my server.js
const express = require('express')
const dotenv = require('dotenv')
const morgan = require('morgan')
const colors = require('colors')
const connectDB = require('./config/db')
const authRoute = require('./Routes/authRoute')
const productRoute = require('./Routes/productRoute')
const orderRoute = require('./Routes/orderRoute')
const { errorHandler, notFound } = require('./Middlewares/errorMiddleware')
// DB Connection
connectDB()
dotenv.config()
const app = express()
if (process.env.NODE_ENV === 'development') {
app.use(morgan('dev'))
}
app.use(express.json())
//Routes
app.use('/api/users', authRoute)
app.use('/api/products', productRoute)
app.use('/api/orders', orderRoute)
// Error Middlewares
app.use(errorHandler)
app.use(notFound)
const PORT = process.env.PORT || 5000
app.listen(PORT, console.log(`App is running in ${process.env.NODE_ENV} on port ${PORT}`.blue.underline))
Here is my docker-compose.yml very simple setup
version: '3'
services:
app:
container_name: ecommerceApi
restart: always
build: .
ports:
- '80:5000'
links:
- mongodb
mongodb:
container_name: mongodb
image: mongo
ports:
- '27017:27017'
environment:
- MONGO_INITDB_ROOT_USERNAME=admin
- MONGO_INITDB_ROOT_PASSWORD=password
Here is Dockerfile
FROM node:14
WORKDIR /usr/src/app
COPY package*.json ./
# Install production dependencies.
RUN npm install --only=production
COPY . ./
EXPOSE 5000
CMD ["npm", "start"]
I am using MongoDB as database, I do have another as well. How do I migrate some data to the db while working with Docker i.e run command like npm run data:import to import data in the db. I tried some ways but I didn't work for me. I could do it docker exec -it sh and run the command but on production this process to be automated.
Please any help solving these issues, thanks in advance!!

I am running 2 images with Docker Compose and I am having trouble hitting the localhost from my Mac. I am exposing ports 3000. Am I missing something?

I am building a Node/Mongo app using Docker and I am having trouble hitting my localhost from my host computer running MacOs when I run docker-compose up. Using postman or curl -i localhost:3000 returns nothing. I have also tried inspecting the container and connecting with that ip. What am I doing wrong? Thanks!
docker-compose.yml:
version: "2"
services:
web:
build: .
ports:
- "3000:3000"
volumes:
- .:/app
env_file:
- todoListDocker.env
links:
- mongo
mongo:
image: mongo
environment:
- MONGO_INITDB_ROOT_USERNAME=root
- MONGO_INITDB_ROOT_PASSWORD=tWwp3Fm4hZUsaLw4
volumes:
- mongo:/data/db
ports:
- "27017:27017"
env_file:
- todoListDocker.env
volumes:
mongo:
Dockerfile:
FROM node:boron
MAINTAINER Clinton Medbery <clintomed#gmail.com>
RUN ["apt-get", "update"]
RUN ["apt-get", "install", "-y", "vim"]
RUN mkdir - p /app
WORKDIR /app
COPY package.json /app
RUN npm install
COPY . /app
EXPOSE 3000
CMD ["npm", "start"]
Index.js:
const express = require('express');
const morgan = require('morgan');
const bodyParser = require('body-parser');
const mongoose = require('mongoose');
var app = express();
var router = require('./services/router');
//Use ENV Variables
console.log("Connecting to Mongo");
mongoose.connect('mongodb://root:tWwp3Fm4hZUsaLw4#mongo:27017');
// mongoose.connect('mongodb://localhost:todoList/todoList');
console.log("Connected to Mongo");
app.use(morgan('combined'));
app.use(bodyParser.json());
app.use('/v1', router);
var PORT = process.env.PORT || 3000;
var HOST = process.env.HOST || '127.0.0.1';
app.get('/hello', function (req, res) {
console.log("Hello World");
res.send({hello:'Hello World!'});
});
console.log('Listening on port ', HOST, PORT);
app.listen(PORT, HOST);
Your express server is listening on localhost port 3000.
var PORT = process.env.PORT || 3000;
var HOST = process.env.HOST || '127.0.0.1';
This will bind to the container's localhost. That is independent from the Mac's localhost, and from any other container's localhost. You cannot reach it from outside the container.
You need to bind to the external interface of the container, which will let the Mac, or other containers, connect to the port. You can use the special address 0.0.0.0 for this.
var PORT = process.env.PORT || 3000;
var HOST = process.env.HOST || '0.0.0.0';
Now that the express server is reachable from the Mac, the port binding 3000:3000 will work. By default, that will be bound on all of the Mac's network interfaces, but you can limit it to the Mac's localhost if you prefer.
ports:
- "127.0.0.1:3000:3000"

Resources