add basic docker support

This commit is contained in:
MarcZierle 2022-10-31 09:23:18 +01:00
parent 06e90ec2d0
commit 3808c7d4d1
11 changed files with 144 additions and 64 deletions

View File

@ -18,13 +18,16 @@ S3_ACCESS_SECRET=
S3_BUCKET_NAME=
S3_ENDPOINT_URL=
S3_ROOT_USER=
S3_ROOT_PASSWORD=
# Celery Message Broker
# e.g.: redis://127.0.0.1:6378/1
MSG_BROKER_URL=
MSG_BROKER_PREFIX=
# Celery task work directory to store temporary files
# use ./worker folder as absolute path: /home/user/app/worker
# use ./services_data/worker folder as absolute path: /home/user/app/worker
TASK_WORKER_DIR=
# Channels Layers Backend (Websocket)

4
.gitignore vendored
View File

@ -4,3 +4,7 @@ static/*
*/migrations/*
env/
static_bk/
**/.env
services_data/*/*
!services_data/*/.gitkeep

3
.gitmodules vendored
View File

@ -1,3 +0,0 @@
[submodule "scrapers/news-scrapers"]
path = scrapers/news-scrapers
url = https://github.com/MarcZierle/news-scrapers

34
Dockerfile Normal file
View File

@ -0,0 +1,34 @@
FROM python:3.10.0-bullseye AS builder
# create user and group
RUN mkdir -p /home/app
RUN adduser app && adduser app app
WORKDIR /home/app
# install dependencies
RUN apt-get update && apt-get upgrade -y && \
apt-get install -y postgresql gcc python3-dev \
libgl1
#musl-dev libxml2-dev libxslt-dev
COPY ./requirements.txt .
RUN python3 -m pip install --upgrade pip && \
pip3 install -r requirements.txt && \
pip3 install psycopg2==2.9.4 && \
pip3 install gunicorn==20.1.0
COPY . .
RUN chown -R app:app /home/app
USER app
FROM builder AS backend
# run gunicorn
CMD ["gunicorn", "config.wsgi:application", "--bind", "0.0.0.0:8000"]
FROM builder AS worker
# run celery worker
CMD ["celery", "-A", "config", "worker", "-l", "info"]
FROM builder AS websocket
# run daphne server
CMD ["daphne", "-b", "0.0.0.0", "-p", "8001", "config.asgi:application"]

25
README.md Normal file
View File

@ -0,0 +1,25 @@
# Photolog Backend Services
requires docker and docker-compose
## Usage
First make sure to fill in all .env file related secrets (see .env.dist).
### Django REST API Backend
`docker-compose up -d backend`
### Celery Worker Node
`docker-compose up -d worker`
### Daphne Websocket server
`docker-compose up -d websocket`
### Redis Cache and Celery Message Broker
`docker-compose up -d cache`
### S3 Object Storage
`docker-compose up -d s3`
### PostgreSQL Database
`docker-compose up -d db`

View File

@ -1,6 +0,0 @@
{
"name": "config",
"lockfileVersion": 2,
"requires": true,
"packages": {}
}

View File

@ -13,7 +13,6 @@ https://docs.djangoproject.com/en/3.2/ref/settings/
from pathlib import Path
import os
import environ
from macpath import join
env = environ.Env(
DEBUG=(bool, False)
@ -33,9 +32,9 @@ SECRET_KEY = env('SECRET_KEY')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = env('DEBUG')
ALLOWED_HOSTS = env('ALLOWED_HOSTS')
ALLOWED_HOSTS = env('ALLOWED_HOSTS').split(',')
CORS_ALLOWED_ORIGINS = env('ALLOWED_HOSTS')
CORS_ALLOWED_ORIGINS = [ 'https://' + url for url in env('ALLOWED_HOSTS').split(',')]
# Application definition

View File

@ -1,24 +0,0 @@
[program:celery_zierle_training_staging]
directory=/home/marc/www-staging/backend
user=www-data
numprocs=1
stdout_logfile=/var/log/celery/worker.log
stderr_logfile=/var/log/celery/worker.log
autostart=true
autorestart=true
startsecs=10
command=/home/marc/www-staging/backend/env/bin/python3 -m celery -A config worker --loglevel=INFO
; Need to wait for currently executing tasks to finish at shutdown.
; Increase this if you have very long running tasks.
stopwaitsecs = 60
; Causes supervisor to send the termination signal (SIGTERM) to the whole process group.
stopasgroup=true
; Set Celery priority higher than default (999)
; so, if rabbitmq is supervised, it will start first.
priority=1000

View File

@ -1,26 +0,0 @@
[fcgi-program:ws_zierle_training_staging]
# TCP socket used by Nginx backend upstream
socket=tcp://localhost:8001
user=www-data
# Directory where your site's project files are located
directory=/home/marc/www-staging/backend
# Each process needs to have a separate socket file, so we use process_num
# Make sure to update "mysite.asgi" to match your project name
command=/home/marc/www-staging/backend/env/bin/python3 -m daphne -u /run/daphne/daphne%(process_num)d.sock --fd 0 --access-log - --proxy-headers config.asgi:application
# Number of processes to startup, roughly the number of CPUs you have
numprocs=2
# Give each process a unique name so they can be told apart
process_name=ws_zierle_training_staging%(process_num)d
# Automatically start and recover processes
autostart=true
autorestart=true
# Choose where you want your log to go
stdout_logfile=/home/marc/www-staging/logs/daphne.log
redirect_stderr=true

74
docker-compose.yml Normal file
View File

@ -0,0 +1,74 @@
version: '3.8'
services:
backend:
build:
context: .
target: backend
#restart: always
ports:
- "8000:8000"
env_file:
- .env
worker:
build:
context: .
target: worker
restart: always
env_file:
- .env
websocket:
build:
context: .
target: websocket
restart: always
ports:
- "8001:8001"
env_file:
- .env
db:
image: postgres:15-alpine
restart: always
ports:
- "5432:5432"
env_file:
- .env
environment:
- POSTGRES_PASSWORD=${DB_PASSWORD}
- POSTGRES_USER=${DB_USER}
- POSTGRES_DB=${DB_NAME}
volumes:
- db:/var/lib/postgresql/data
cache:
image: redis:7-alpine
restart: always
ports:
- "6379:6379"
command: redis-server --save 60 1 --loglevel warning
volumes:
- cache:/data
s3:
image: quay.io/minio/minio
restart: always
ports:
- "9000:9000"
- "9001:9001"
env_file:
- .env
environment:
- MINIO_ROOT_USER=${S3_ACCESS_ID}
- MINIO_ROOT_PASSWORD=${S3_ACCESS_SECRET}
#- MINIO_SCHEME=http
volumes:
- s3:/data
command: server /data --console-address ":9001"
volumes:
db:
s3:
cache:

View File

@ -71,7 +71,7 @@ ruamel.yaml==0.17.21
ruamel.yaml.clib==0.2.6
s3transfer==0.6.0
scikit-image==0.19.1
scipy==1.7.3
scipy
service-identity==21.1.0
six==1.16.0
soupsieve==2.2.1