Dockerfile:
FROM node:lts-slim AS base
# Install dependencies
RUN apt-get update \
&& apt-get install --no-install-recommends -y openssl
# Create app directory
WORKDIR /usr/src
FROM base AS builder
# Files required by npm install
COPY package*.json ./
# Files required by prisma
COPY prisma ./prisma
# Install app dependencies
RUN npm ci
# Bundle app source
COPY . .
# Build app
RUN npm install -g prisma --force
RUN prisma generate
RUN npm run build \
&& npm prune --omit=dev
FROM base AS runner
# Copy from build image
COPY --from=builder /usr/src/node_modules ./node_modules
COPY --from=builder /usr/src/dist ./dist
COPY --from=builder /usr/src/package*.json ./
COPY prisma ./prisma
RUN apt-get update \
&& apt-get install --no-install-recommends -y procps openssl
RUN chown -R node /usr/src/node_modules
RUN chown -R node /usr/src/dist
RUN chown -R node /usr/src/package*.json
USER node
# Start the app
EXPOSE 80
CMD ["node", "dist/index.js"]
docker-compose.yml
version: '3'
services:
mysql:
image: mysql:latest
container_name: mysql
ports:
- 3306:3306
bot:
container_name: bot
build:
context: .
depends_on:
- mysql
docker-compose.prod.yml
version: '3'
services:
mysql:
volumes:
- ./mysql:/var/lib/mysql
environment:
MYSQL_ROOT_PASSWORD: '123123'
MYSQL_DATABASE: 'test'
MYSQL_USER: 'test'
MYSQL_PASSWORD: '123123'
bot:
ports:
- "3000:80"
env_file:
- docker-compose.prod.bot.env
volumes:
mysql:
for some reason after running this commands:
docker-compose -f docker-compose.yml -f docker-compose.prod.yml run bot npx prisma migrate deploy
docker-compose -f docker-compose.yml -f docker-compose.prod.yml up
im getting an error when the bot container running up, that he cant find any node module...
im using ubuntu 20.4 to run docker inside, installed docker and for some reason only this part is not working, wehn im running a build on normal machine without docker, build is working fine.
The only problem is with the docker.
error:
bot | node:internal/modules/cjs/loader:936
bot | throw err;
bot | ^
bot |
bot | Error: Cannot find module 'envalid'
bot | Require stack:
bot | - /usr/src/dist/config.js
Related
I'm trying to set up a Profittrailer Docker container through Docker Compose.
I've tested the Docker image using docker run which launches the container just fine.
When using docker-compose up instead, PM2 (NodeJS process manager) fails to find the configuration file. I believe this happens because the container is unable to write to the shared volume.
Below is my Dockerfile:
FROM eclipse-temurin:8-jdk-alpine
ARG PT_VERSION=2.5.32
ENV PT_VERSION ${PT_VERSION}
RUN mkdir -p /app/
# install tools
RUN apk update && apk add unzip curl
# install nodejs
RUN apk add --update nodejs npm
RUN npm install pm2#latest -g
# install profittrailer
RUN curl https://github.com/taniman/profit-trailer/releases/download/$PT_VERSION/ProfitTrailer-$PT_VERSION.zip -L -o /app/profittrailer.zip
RUN unzip /app/profittrailer.zip -d /app/ && mv /app/ProfitTrailer-$PT_VERSION /app/ProfitTrailer
WORKDIR /app/ProfitTrailer
RUN chmod +x ProfitTrailer.jar
VOLUME /app/ProfitTrailer
CMD pm2 start pm2-ProfitTrailer.json && pm2 log 0
EXPOSE 8081
And the docker-compose.yml file:
version: '3'
services:
profittrailer:
container_name: profittrailer
image: "doccie/profittrailer:latest"
volumes:
- /home/[user]/.profittrailer:/app/ProfitTrailer
restart: unless-stopped
ports:
- "8081:8081"
The output logged (inside the container) is:
profittrailer | [PM2] Spawning PM2 daemon with pm2_home=/root/.pm2
profittrailer | [PM2] PM2 Successfully daemonized
profittrailer | [PM2][ERROR] File pm2-ProfitTrailer.json not found
I managed to get it working by not sharing the entire app directory, but limiting it to the folders and files I needed for configuration editing and migration.
version: '3'
services:
profittrailer:
container_name: profittrailer
image: "doccie/profittrailer:latest"
volumes:
- /home/[USERNAME]/.profittrailer/data:/app/ProfitTrailer/data
- /home/[USERNAME]/.profittrailer/application.properties:/app/ProfitTrailer/application.properties
restart: unless-stopped
ports:
- "8081:8081"
I needed to set up the application.properties file first, since otherwise, it would create the file as a folder instead.
I am a newbie in Docker, and while going through a course online, I stumbled into a problem. While trying to build separate dev, prod and test images, only my dev one seems to build correctly.
My prod and test images build with their flag as "<none>", even though I run the build with the following command:
"sudo docker build -t ultimatenode:test --target test ."
Also my prod image is supposed to be smaller in size, because I removed the original node_modules and ./test folder, but there seems to be some mistake on my part.
Would anyone be kind enough to check the problem in the following Dockerfile:
FROM node:16 as base
EXPOSE 80
WORKDIR /app
COPY package*.json ./
RUN npm config list
RUN npm ci \
&& npm cache clean --force
ENV PATH /app/node_modules/.bin:$PATH
CMD ["node", "server.js"]
#DEVELOPMENT
FROM base as dev
ENV NODE_ENV=development
# NOTE: these apt dependencies are only needed
# for testing. they shouldn't be in production
RUN apt-get update -qq \
&& apt-get install -qy --no-install-recommends \
bzip2 \
ca-certificates \
curl \
libfontconfig \
&& rm -rf /var/lib/apt/lists/*
RUN npm config list
RUN npm install --only=development \
&& npm cache clean --force
COPY . /app
CMD ["nodemon", "server.js"]
#TEST
FROM dev as test
COPY . .
RUN npm audit
#PREPROD
FROM test as preprod
#Removing unecessary folders
RUN rm -rf ./tests && rm -rf ./node_modules
FROM base as prod
COPY --from=pre-prod /app /app
WORKDIR /app
HEALTHCHECK CMD curl http://127.0.0.1/ || exit 1
CMD ["node", "server.js"]
Also, here is my docker-compose.yml
version: '2.4'
services:
redis:
image: redis:alpine
db:
image: postgres:9.6
environment:
- POSTGRES_HOST_AUTH_METHOD=trust
volumes:
- db-data:/var/lib/postgresql/data
vote:
image: bretfisher/examplevotingapp_vote
ports:
- '5000:80'
depends_on:
- redis
result:
build:
context: .
target: dev
ports:
- '5001:80'
volumes:
- .:/app
environment:
- NODE_ENV=development
depends_on:
- db
worker:
image: bretfisher/examplevotingapp_worker
depends_on:
- redis
- db
volumes:
db-data:
I'm new to docker so I'm sure I'm missing something.
I'm trying to create a container with a react app. I'm using docker on a Windows 10 machine.
This is my docker file
FROM node:latest
EXPOSE 3000
ENV PATH /app/node_modules/.bin:$PATH
# install app dependencies
COPY package.json ./
COPY package-lock.json ./
RUN npm install --silent
RUN npm install react-scripts#3.4.1 -g --silent
COPY . /app
WORKDIR /app
CMD ["npm","run", "start"]
and this is my docker compose
version: '3.7'
services:
sample:
container_name: prova-react1
build:
context: .
dockerfile: Dockerfile
volumes:
- '.:/app'
- '/app/node_modules'
ports:
- 3000:3000
environment:
- CHOKIDAR_USEPOLLING=true
- COMPOSE_CONVERT_WINDOWS_PATHS=1
When i start the container if I go on the browser everything is working fine but when i go back to Visual Studio Code and I make a modification to the files and save nothing occurs to the container and even to the website
I'm trying to connect my Python-Flask app with a Postgres database in a docker environment. I am using a docker-compose file to build my web and db environment.
However, I am getting the following error:
psql: could not connect to server: No such file or directory
Is the server running locally and accepting
connections on Unix domain socket "/var/run/postgresql/.s.PGSQL.5432"?
Here is my docker file:
FROM ubuntu:16.04 as base
RUN apt-get update -y && apt-get install -y python3-pip python3-dev postgresql libpq-dev libffi-dev jq
ENV LC_ALL=C.UTF-8 \
LANG=C.UTF-8
ENV FLASK_APP=manage.py \
FLASK_ENV=development \
APP_SETTINGS=config.DevelopmentConfig \
DATABASE_URL=postgresql://user:pw#postgres/database
COPY . /app
WORKDIR /app
RUN pip3 install -r requirements.txt
FROM base as development
EXPOSE 5000
CMD ["bash"]
Here is my Docker-compose file:
version: "3.6"
services:
development_default: &DEVELOPMENT_DEFAULT
build:
context: .
target: development
working_dir: /app
volumes:
- .:/app
environment:
- GOOGLE_CLIENT_ID=none
- GOOGLE_CLIENT_SECRET=none
web:
<<: *DEVELOPMENT_DEFAULT
ports:
- "5000:5000"
depends_on:
- db
command: flask run --host=0.0.0.0
db:
image: postgres:10.6
environment:
- POSTGRES_USER=user
- POSTGRES_PASSWORD=db
I’m trying to figure it out how to build static files using Node and Webpack during production build and mount them as volume which is going to be served for Django webapp and used for Django collectstatic.
I’ve got all services separated to its own containers and each has own Dockerfile.
Current problem is that i can’t access generated access files generated by webpack inside Django app. Question is, can I achieve this using separate Dockerfiles for Node and Django, or shall this be done in one Dockerfile?
Node Dockerfile
FROM node:alpine
WORKDIR ./code
COPY ./package.json ./yarn.lock /code/
COPY ./webpack.base.config.js ./webpack.prod.config.js /code/
RUN yarn install --production
ADD static /code/static/
RUN yarn run prod
Python app Dockerfile
FROM python:3.6.2-alpine
ENV PYTHONUNBUFFERED 1
RUN apk update \
&& apk add \
bash \
curl \
build-base \
postgresql-dev \
postgresql-client \
libpq \
tzdata
WORKDIR /code
ADD requirements.txt /code
RUN pip install --upgrade pip
RUN pip install -r requirements.txt
ADD ./ /code
ENV TZ=Europe/London
EXPOSE 8000
Docker Compose production
version: '3'
services:
frontend:
build: docker/services/node
volumes:
- static_files:/code/static
webapp:
build: .
env_file:
- .env
expose:
- "8000"
volumes:
- ./public:/code/public/
- static_files:/code/static
command: ["./docker/scripts/wait-for-it.sh", "database:5432", "--", "./docker/services/webapp/run-prod.sh"]
depends_on:
- frontend
- database
database:
image: postgres
env_file:
- .env
expose:
- "5432"
volumes:
- postgres_data:/var/lib/postgresql/data/
nginx:
build: docker/services/nginx
env_file:
- .env
ports:
- "80:80"
volumes:
- ./public:/www/public/
depends_on:
- webapp
healthcheck:
test: ["CMD", "curl", "-f", "http://0.0.0.0:8000"]
interval: 30s
timeout: 10s
retries: 3
volumes:
postgres_data:
static_files:
You can use a multi stage build for this. In your case the first stage would generate your static files, while the second stage would package your python app and copy the static files from the node.js docker image. The resulting image will only contain python dependencies.
Here is the multistage dockerfile, documentation can be found here https://docs.docker.com/develop/develop-images/multistage-build/#use-multi-stage-builds
# static files build stage
FROM node:alpine as static # named as static for easy reference
WORKDIR /code
COPY ./package.json ./yarn.lock /code/
COPY ./webpack.base.config.js ./webpack.prod.config.js /code/
RUN yarn install --production
ADD static /code/static/
RUN yarn run prod
# python app package stage
FROM python:3.6.2-alpine as final # named as final because it's final :)
ENV PYTHONUNBUFFERED 1
RUN apk update \
&& apk add \
bash \
curl \
build-base \
postgresql-dev \
postgresql-client \
libpq \
tzdata
WORKDIR /code
ADD requirements.txt /code
RUN pip install --upgrade pip
RUN pip install -r requirements.txt
ADD ./ /code
# This next command has access to the file contents of the previous stage.
# Ideally you should rewrite the paths to copy the static files from where they have been generated to where they should end up
# The 'from' clause is used to reference the first build stage
COPY --from=static /code/static/path/to/static/files /code/desired/location
ENV TZ=Europe/London
EXPOSE 8000
You can then use this single image in your docker-compose file.