add user authentication

This commit is contained in:
MarcZierle 2022-10-31 09:20:43 +01:00
parent 9a39f3776d
commit b9c3ab93fe
10 changed files with 154 additions and 46 deletions

1
api/autocrop Submodule

@ -0,0 +1 @@
Subproject commit 70828ba4e14e67a1db819de5c6371713145f868c

View File

@ -1,3 +1,4 @@
from django.contrib.auth import get_user_model
from rest_framework import serializers
from photo_log.models import (
PhotoGroup,
@ -23,7 +24,7 @@ class PhotoLogTemplateSerializer(serializers.ModelSerializer):
class PhotoGroupSerializer(serializers.ModelSerializer):
class Meta:
model = PhotoGroup
fields = ('id', 'name', 'date')
fields = ('id', 'name', 'date', 'parent')
class PhotosSerializer(serializers.ModelSerializer):
@ -37,7 +38,7 @@ class PhotoSerializer(serializers.ModelSerializer):
class Meta:
model = Photo
fields = ('id', 'legacy_id', 'group', 'bbox_coords', 'rotate', 'intersections', 'original_image', 'cropped_image', 'ocr_text', 'tag')
fields = ('id', 'owner', 'legacy_id', 'group', 'bbox_coords', 'rotate', 'intersections', 'original_image', 'cropped_image', 'ocr_text', 'tag')
class AddPhotoSerializer(serializers.ModelSerializer):
@ -62,3 +63,8 @@ class PhotoLogsSerializer(serializers.ModelSerializer):
class Meta:
model = PhotoLog
fields = ('id', 'title', 'date', 'pdf')
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = get_user_model()
fields = ('id', 'email')

View File

@ -20,6 +20,7 @@ from .views import (
PhotoLogTemplatesAPIView,
CreatePhotoLogTemplateAPIView,
RetrieveUpdateDestroyPhotoLogTemplateAPIView,
UsersAPIView,
)
@ -47,4 +48,6 @@ urlpatterns = [
path('photolog/template/', CreatePhotoLogTemplateAPIView.as_view()),
path('photolog/templates/', PhotoLogTemplatesAPIView.as_view()),
path('photolog/template/<int:pk>/', RetrieveUpdateDestroyPhotoLogTemplateAPIView.as_view()),
path('users/', UsersAPIView.as_view()),
]

View File

@ -1,6 +1,7 @@
from rest_framework import generics, views, status
from rest_framework.response import Response
from django.shortcuts import get_list_or_404
from rest_framework.permissions import IsAuthenticated
from photo_log.models import (
PhotoGroup,
Photo,
@ -18,6 +19,7 @@ from .serializers import (
PhotoLogsSerializer,
PhotoTagSerializer,
PhotoLogTemplateSerializer,
UserSerializer,
)
from photo_log import tasks
@ -25,6 +27,7 @@ from photo_log import tasks
from django.db.models import FileField
from django.core.files.uploadedfile import InMemoryUploadedFile
from django.core.files.base import ContentFile
from django.contrib.auth import get_user_model
from io import BytesIO
from PIL import Image, ExifTags
@ -61,16 +64,31 @@ class RetrieveUpdateDestroyPhotoLogTemplateAPIView(generics.RetrieveUpdateDestro
class PhotoGroupAPIView(generics.ListAPIView):
queryset = PhotoGroup.objects.all()
serializer_class = PhotoGroupSerializer
permission_classes = [IsAuthenticated]
def get_queryset(self):
queryset = PhotoGroup.objects.all()
user = self.request.user
if not user.is_superuser:
queryset = queryset.filter(owner=user)
return queryset
class PhotosAPIView(generics.ListAPIView):
serializer_class = PhotoSerializer
permission_classes = [IsAuthenticated]
def get_queryset(self):
queryset = Photo.objects.all()
user = self.request.user
if not user.is_superuser:
queryset = queryset.filter(owner=user)
self.serializer_class = PhotosSerializer
photogroup = self.request.query_params.get('photogroup')
@ -89,11 +107,19 @@ class PhotoAPIView(generics.RetrieveAPIView):
class AddPhotoAPIView(generics.CreateAPIView):
queryset = Photo.objects.all()
serializer_class = AddPhotoSerializer
permission_classes = [IsAuthenticated]
def perform_create(self, serializer):
serializer.save(owner=self.request.user)
class AddPhotoGroupAPIView(generics.CreateAPIView):
queryset = PhotoGroup.objects.all()
serializer_class = PhotoGroupSerializer
permission_classes = [IsAuthenticated]
def perform_create(self, serializer):
serializer.save(owner=self.request.user)
class PhotoLogAPIView(generics.RetrieveAPIView):
@ -220,3 +246,8 @@ class AutoCropPhotoAPIView(generics.RetrieveAPIView):
return self.retrieve(request, *args, **kwargs)
return Response({"error": "Not Found"}, status=status.HTTP_404_NOT_FOUND)
class UsersAPIView(generics.ListAPIView):
queryset = get_user_model().objects.all()
serializer_class = UserSerializer

View File

@ -26,7 +26,8 @@ SECRET_KEY = 'django-insecure-z465dl_(vk55hxbm0bj*mp-ok3!*=ssw#!$5s2nrxa!9j+67z+
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['zierle-training-staging.riezel.com', 'localhost', '127.0.0.1', '192.168.1.244']
ALLOWED_HOSTS = ['zierle-training.riezel.com', 'localhost', '127.0.0.1', '192.168.1.244', '192.168.1.114']
CORS_ORIGIN_ALLOW_ALL = True
# Application definition
@ -43,6 +44,7 @@ INSTALLED_APPS = [
'channels', # as high as possible (channels overloads 'runserver', may conflict with e.g. whitenoise)
'rest_framework',
'corsheaders',
'rest_framework_simplejwt',
'drf_yasg',
'storages',
'django_extensions',
@ -202,13 +204,14 @@ STATIC_ROOT = os.path.join(BASE_DIR, "static/")
# See https://docs.celeryq.dev/en/stable/django/first-steps-with-django.html
CELERY_CACHE_BACKEND = 'default'
CELERY_WORK_DIR = '/home/marc/www-staging/celery/'
CELERY_WORK_DIR = '/home/marc/www/celery/'
CELERY_BROKER_URL = 'redis://localhost:6378/1'
CELERY_RESULT_BACKEND= 'redis://localhost:6378/1'
CELERY_TIMEZONE = 'CET'
CELERY_TASK_DEFAULT_QUEUE = 'zierletraining_prod'
CELERY_BROKER_TRANSPORT_OPTIONS = {
'visibility_timeout': 300,
}
@ -224,7 +227,7 @@ CACHES = {
"OPTIONS": {
"CLIENT_CLASS": "django_redis.client.DefaultClient",
},
"KEY_PREFIX": "zierletraining",
"KEY_PREFIX": "zierletraining_prod",
}
}
@ -238,8 +241,24 @@ CHANNEL_LAYERS = {
"BACKEND": "channels_redis.core.RedisChannelLayer",
"CONFIG": {
"hosts": [("127.0.0.1", 6378)],
"prefix": "asgi_zierle_training_staging:",
"prefix": "asgi_zierle_training_prod:",
"group_expiry": 7200,
},
},
}
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.SessionAuthentication',
'rest_framework.authentication.BasicAuthentication',
'rest_framework_simplejwt.authentication.JWTAuthentication',
),
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.PageNumberPagination',
'PAGE_SIZE': 9999,
}
from datetime import timedelta
SIMPLE_JWT = {
'REFRESH_TOKEN_LIFETIME': timedelta(days=30),
}

View File

@ -1,16 +1,16 @@
[program:celery_zierle_training_staging]
[program:celery_zierle_training]
directory=/home/marc/www-staging/backend
directory=/home/marc/www/backend
user=www-data
numprocs=1
stdout_logfile=/var/log/celery/worker.log
stderr_logfile=/var/log/celery/worker.log
stdout_logfile=/var/log/celery/worker_prod.log
stderr_logfile=/var/log/celery/worker_prod.log
autostart=true
autorestart=true
startsecs=10
command=/home/marc/www-staging/backend/env/bin/python3 -m celery -A config worker --loglevel=INFO
command=/home/marc/www/backend/env/bin/python3 -m celery -A config worker --loglevel=INFO
; Need to wait for currently executing tasks to finish at shutdown.
; Increase this if you have very long running tasks.

View File

@ -1,26 +1,26 @@
[fcgi-program:ws_zierle_training_staging]
[fcgi-program:ws_zierle_training]
# TCP socket used by Nginx backend upstream
socket=tcp://localhost:8001
socket=tcp://localhost:8002
user=www-data
# Directory where your site's project files are located
directory=/home/marc/www-staging/backend
directory=/home/marc/www/backend
# Each process needs to have a separate socket file, so we use process_num
# Make sure to update "mysite.asgi" to match your project name
command=/home/marc/www-staging/backend/env/bin/python3 -m daphne -u /run/daphne/daphne%(process_num)d.sock --fd 0 --access-log - --proxy-headers config.asgi:application
command=/home/marc/www/backend/env/bin/python3 -m daphne -u /run/daphne/daphne_prod%(process_num)d.sock --fd 0 --access-log - --proxy-headers config.asgi:application
# Number of processes to startup, roughly the number of CPUs you have
numprocs=2
# Give each process a unique name so they can be told apart
process_name=ws_zierle_training_staging%(process_num)d
process_name=ws_zierle_training%(process_num)d
# Automatically start and recover processes
autostart=true
autorestart=true
# Choose where you want your log to go
stdout_logfile=/home/marc/www-staging/logs/daphne.log
stdout_logfile=/home/marc/www/logs/daphne.log
redirect_stderr=true

View File

@ -18,6 +18,13 @@ from django.urls import path, include
from django.views.generic import TemplateView
from django.conf.urls.static import static
from django.conf import settings
from django.http import HttpResponse
from rest_framework_simplejwt.views import (
TokenObtainPairView,
TokenRefreshView,
TokenVerifyView,
)
# API documentation
from rest_framework import permissions
@ -27,6 +34,12 @@ from drf_yasg import openapi
api_patterns = [
path('api/v1/', include('api.urls')),
path('api/v1/api-auth/', include('rest_framework.urls')),
path('api/v1/token/', TokenObtainPairView.as_view(), name='token_obtain_pair'),
path('api/v1/token/refresh/', TokenRefreshView.as_view(), name='token_refresh'),
path('api/v1/token/verify/', TokenVerifyView.as_view(), name='token_verify'),
path('api/v1/ping/', lambda request: HttpResponse('pong'), name='ping_pong'),
]

View File

@ -1,6 +1,7 @@
from django.contrib.postgres.fields import ArrayField
from django.db import models
from django.dispatch import receiver
from django.contrib.auth import get_user_model
from model_utils import FieldTracker
@ -14,6 +15,9 @@ import os
import uuid
UserModel = get_user_model()
class PhotoTag(models.Model):
name = models.CharField(unique=True, null=False, blank=False, max_length=100)
color = ColorField(default='#FFE5B4')
@ -21,10 +25,20 @@ class PhotoTag(models.Model):
def __str__(self):
return self.name
class Meta:
ordering = ('name',)
class PhotoGroup(models.Model):
name = models.CharField(unique=True, null=False, max_length=200)
name = models.CharField(unique=False, null=False, max_length=200)
date = models.DateField(null=True)
parent = models.ForeignKey('self', blank=True, null=True, on_delete=models.SET_NULL)
owner = models.ForeignKey(
UserModel,
on_delete=models.CASCADE,
related_name='photogroups',
)
def __str__(self):
return self.name
@ -85,6 +99,12 @@ class Photo(models.Model):
default=None,
)
owner = models.ForeignKey(
UserModel,
on_delete=models.CASCADE,
related_name='photos',
)
tracker = FieldTracker()
def __str__(self):

View File

@ -1,5 +1,6 @@
from celery import shared_task, chain, group, chord
import boto3
from boto3.s3.transfer import TransferConfig
import imghdr
from PIL import Image, ExifTags
@ -19,6 +20,15 @@ from .photolog_layout import generate_tex
from .autocrop.autocrop import autocrop
s3_resource = boto3.resource(
's3',
endpoint_url=settings.AWS_S3_ENDPOINT_URL,
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
)
client = s3_resource.meta.client
@shared_task
def chordfinisher(*args, **kwargs):
"""
@ -47,22 +57,21 @@ def download_s3_file(folder_path, s3_file_path, bucket):
:param bucket The name of the bucket where the file is stored in.
"""
global client
# create local folder
if not os.path.exists(folder_path):
os.makedirs(folder_path, exist_ok=True) # mkdir -p
s3_resource = boto3.resource(
's3',
endpoint_url=settings.AWS_S3_ENDPOINT_URL,
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
)
client = s3_resource.meta.client
# retrieve the file name if the file is stored in a sub dir on the S3
file_name = s3_file_path.split('/')[-1]
client.download_file(bucket, s3_file_path, os.path.join(folder_path, file_name))
client.download_file(
bucket,
s3_file_path,
os.path.join(folder_path, file_name),
Config=TransferConfig(use_threads=False)
)
@shared_task
@ -73,14 +82,8 @@ def upload_s3_file(file_path, s3_file_path, bucket, content_type='application/oc
:param s3_file_path Full file path in the S3 storage bucket (without the buckets name).
:param bucket The name of the bucket where the file will be stored in.
"""
s3_resource = boto3.resource(
's3',
endpoint_url=settings.AWS_S3_ENDPOINT_URL,
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
)
client = s3_resource.meta.client
global client
client.upload_file(
file_path,
@ -89,7 +92,8 @@ def upload_s3_file(file_path, s3_file_path, bucket, content_type='application/oc
ExtraArgs={
'ContentDisposition': 'inline',
'ContentType': content_type,
}
},
Config=TransferConfig(use_threads=False)
)
@ -161,7 +165,8 @@ def crop_image_bbox(image_path, bbox, rotate_angle):
bbox[2][0],
bbox[2][1]
))
img = img.convert('RGB')
img.save(image_path)
@ -192,15 +197,24 @@ def delete_folder(folder_path):
@shared_task
def generate_photolog_from_latex(title, date, render_date, start_slide_image, slides, id_to_name, work_dir, out_file):
template_name = 'photolog.tex'
context = {
'content': generate_tex(title, date, render_date, start_slide_image, slides, id_to_name, work_dir)
}
try:
template_name = 'photolog.tex'
context = {
'content': generate_tex(title, date, render_date, start_slide_image, slides, id_to_name, work_dir)
}
pdf_bytes = compile_template_to_pdf(template_name, context)
pdf_bytes = compile_template_to_pdf(template_name, context)
with open(out_file, 'wb+') as file:
file.write(pdf_bytes)
with open(out_file, 'wb+') as file:
file.write(pdf_bytes)
except Exception as e:
notify_client(
description='error',
content={
'exception': str(e),
}
)
raise e
def max_resize_image_chain(full_file_name, max_width, work_folder_name=uuid4()):
@ -381,7 +395,8 @@ def generate_photo_log_chain(photo_log_id, work_folder_name=uuid4()):
download_files_tasks.extend(get_photo_log_assets_tasks())
download_files_tasks = chord( group(download_files_tasks), chordfinisher.si() )
#download_files_tasks = chord( group(download_files_tasks), chordfinisher.si() )
download_files_tasks = chain(download_files_tasks)
pdf_file = photo_log.pdf.name
if pdf_file: