diff --git a/.env.dist b/.env.dist index 16833b8..d90f5b9 100644 --- a/.env.dist +++ b/.env.dist @@ -18,13 +18,16 @@ S3_ACCESS_SECRET= S3_BUCKET_NAME= S3_ENDPOINT_URL= +S3_ROOT_USER= +S3_ROOT_PASSWORD= + # Celery Message Broker # e.g.: redis://127.0.0.1:6378/1 MSG_BROKER_URL= MSG_BROKER_PREFIX= # Celery task work directory to store temporary files -# use ./worker folder as absolute path: /home/user/app/worker +# use ./services_data/worker folder as absolute path: /home/user/app/worker TASK_WORKER_DIR= # Channels Layers Backend (Websocket) diff --git a/.gitignore b/.gitignore index 957fdeb..37f40c2 100755 --- a/.gitignore +++ b/.gitignore @@ -4,3 +4,7 @@ static/* */migrations/* env/ static_bk/ +**/.env + +services_data/*/* +!services_data/*/.gitkeep diff --git a/.gitmodules b/.gitmodules deleted file mode 100755 index c89c320..0000000 --- a/.gitmodules +++ /dev/null @@ -1,3 +0,0 @@ -[submodule "scrapers/news-scrapers"] - path = scrapers/news-scrapers - url = https://github.com/MarcZierle/news-scrapers diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..6d6b046 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,34 @@ +FROM python:3.10.0-bullseye AS builder + +# create user and group +RUN mkdir -p /home/app +RUN adduser app && adduser app app +WORKDIR /home/app + +# install dependencies +RUN apt-get update && apt-get upgrade -y && \ + apt-get install -y postgresql gcc python3-dev \ + libgl1 + #musl-dev libxml2-dev libxslt-dev +COPY ./requirements.txt . +RUN python3 -m pip install --upgrade pip && \ + pip3 install -r requirements.txt && \ + pip3 install psycopg2==2.9.4 && \ + pip3 install gunicorn==20.1.0 + +COPY . . +RUN chown -R app:app /home/app + +USER app + +FROM builder AS backend +# run gunicorn +CMD ["gunicorn", "config.wsgi:application", "--bind", "0.0.0.0:8000"] + +FROM builder AS worker +# run celery worker +CMD ["celery", "-A", "config", "worker", "-l", "info"] + +FROM builder AS websocket +# run daphne server +CMD ["daphne", "-b", "0.0.0.0", "-p", "8001", "config.asgi:application"] diff --git a/README.md b/README.md new file mode 100644 index 0000000..a5ec620 --- /dev/null +++ b/README.md @@ -0,0 +1,25 @@ +# Photolog Backend Services + +requires docker and docker-compose + +## Usage + +First make sure to fill in all .env file related secrets (see .env.dist). + +### Django REST API Backend +`docker-compose up -d backend` + +### Celery Worker Node +`docker-compose up -d worker` + +### Daphne Websocket server +`docker-compose up -d websocket` + +### Redis Cache and Celery Message Broker +`docker-compose up -d cache` + +### S3 Object Storage +`docker-compose up -d s3` + +### PostgreSQL Database +`docker-compose up -d db` diff --git a/config/package-lock.json b/config/package-lock.json deleted file mode 100755 index 2f2f358..0000000 --- a/config/package-lock.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "name": "config", - "lockfileVersion": 2, - "requires": true, - "packages": {} -} diff --git a/config/settings.py b/config/settings.py index 286427f..a411569 100755 --- a/config/settings.py +++ b/config/settings.py @@ -13,7 +13,6 @@ https://docs.djangoproject.com/en/3.2/ref/settings/ from pathlib import Path import os import environ -from macpath import join env = environ.Env( DEBUG=(bool, False) @@ -33,9 +32,9 @@ SECRET_KEY = env('SECRET_KEY') # SECURITY WARNING: don't run with debug turned on in production! DEBUG = env('DEBUG') -ALLOWED_HOSTS = env('ALLOWED_HOSTS') +ALLOWED_HOSTS = env('ALLOWED_HOSTS').split(',') -CORS_ALLOWED_ORIGINS = env('ALLOWED_HOSTS') +CORS_ALLOWED_ORIGINS = [ 'https://' + url for url in env('ALLOWED_HOSTS').split(',')] # Application definition diff --git a/config/supervisor/celery_supervisor.conf b/config/supervisor/celery_supervisor.conf deleted file mode 100644 index 1153a54..0000000 --- a/config/supervisor/celery_supervisor.conf +++ /dev/null @@ -1,24 +0,0 @@ -[program:celery_zierle_training_staging] - -directory=/home/marc/www-staging/backend - -user=www-data -numprocs=1 -stdout_logfile=/var/log/celery/worker.log -stderr_logfile=/var/log/celery/worker.log -autostart=true -autorestart=true -startsecs=10 - -command=/home/marc/www-staging/backend/env/bin/python3 -m celery -A config worker --loglevel=INFO - -; Need to wait for currently executing tasks to finish at shutdown. -; Increase this if you have very long running tasks. -stopwaitsecs = 60 - -; Causes supervisor to send the termination signal (SIGTERM) to the whole process group. -stopasgroup=true - -; Set Celery priority higher than default (999) -; so, if rabbitmq is supervised, it will start first. -priority=1000 diff --git a/config/supervisor/daphne_supervisor.conf b/config/supervisor/daphne_supervisor.conf deleted file mode 100644 index eee5ca7..0000000 --- a/config/supervisor/daphne_supervisor.conf +++ /dev/null @@ -1,26 +0,0 @@ -[fcgi-program:ws_zierle_training_staging] -# TCP socket used by Nginx backend upstream -socket=tcp://localhost:8001 - -user=www-data - -# Directory where your site's project files are located -directory=/home/marc/www-staging/backend - -# Each process needs to have a separate socket file, so we use process_num -# Make sure to update "mysite.asgi" to match your project name -command=/home/marc/www-staging/backend/env/bin/python3 -m daphne -u /run/daphne/daphne%(process_num)d.sock --fd 0 --access-log - --proxy-headers config.asgi:application - -# Number of processes to startup, roughly the number of CPUs you have -numprocs=2 - -# Give each process a unique name so they can be told apart -process_name=ws_zierle_training_staging%(process_num)d - -# Automatically start and recover processes -autostart=true -autorestart=true - -# Choose where you want your log to go -stdout_logfile=/home/marc/www-staging/logs/daphne.log -redirect_stderr=true diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..e2000d3 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,74 @@ +version: '3.8' + +services: + backend: + build: + context: . + target: backend + #restart: always + ports: + - "8000:8000" + env_file: + - .env + + worker: + build: + context: . + target: worker + restart: always + env_file: + - .env + + websocket: + build: + context: . + target: websocket + restart: always + ports: + - "8001:8001" + env_file: + - .env + + db: + image: postgres:15-alpine + restart: always + ports: + - "5432:5432" + env_file: + - .env + environment: + - POSTGRES_PASSWORD=${DB_PASSWORD} + - POSTGRES_USER=${DB_USER} + - POSTGRES_DB=${DB_NAME} + volumes: + - db:/var/lib/postgresql/data + + cache: + image: redis:7-alpine + restart: always + ports: + - "6379:6379" + command: redis-server --save 60 1 --loglevel warning + volumes: + - cache:/data + + s3: + image: quay.io/minio/minio + restart: always + ports: + - "9000:9000" + - "9001:9001" + env_file: + - .env + environment: + - MINIO_ROOT_USER=${S3_ACCESS_ID} + - MINIO_ROOT_PASSWORD=${S3_ACCESS_SECRET} + #- MINIO_SCHEME=http + volumes: + - s3:/data + command: server /data --console-address ":9001" + +volumes: + db: + s3: + cache: diff --git a/requirements.txt b/requirements.txt index 668b345..3322599 100755 --- a/requirements.txt +++ b/requirements.txt @@ -71,7 +71,7 @@ ruamel.yaml==0.17.21 ruamel.yaml.clib==0.2.6 s3transfer==0.6.0 scikit-image==0.19.1 -scipy==1.7.3 +scipy service-identity==21.1.0 six==1.16.0 soupsieve==2.2.1