mirror of
https://github.com/MarcZierle/photo-log-backend.git
synced 2025-01-01 12:27:58 +00:00
add user authentication
This commit is contained in:
parent
9a39f3776d
commit
b9c3ab93fe
1
api/autocrop
Submodule
1
api/autocrop
Submodule
@ -0,0 +1 @@
|
|||||||
|
Subproject commit 70828ba4e14e67a1db819de5c6371713145f868c
|
@ -1,3 +1,4 @@
|
|||||||
|
from django.contrib.auth import get_user_model
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
from photo_log.models import (
|
from photo_log.models import (
|
||||||
PhotoGroup,
|
PhotoGroup,
|
||||||
@ -23,7 +24,7 @@ class PhotoLogTemplateSerializer(serializers.ModelSerializer):
|
|||||||
class PhotoGroupSerializer(serializers.ModelSerializer):
|
class PhotoGroupSerializer(serializers.ModelSerializer):
|
||||||
class Meta:
|
class Meta:
|
||||||
model = PhotoGroup
|
model = PhotoGroup
|
||||||
fields = ('id', 'name', 'date')
|
fields = ('id', 'name', 'date', 'parent')
|
||||||
|
|
||||||
|
|
||||||
class PhotosSerializer(serializers.ModelSerializer):
|
class PhotosSerializer(serializers.ModelSerializer):
|
||||||
@ -37,7 +38,7 @@ class PhotoSerializer(serializers.ModelSerializer):
|
|||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Photo
|
model = Photo
|
||||||
fields = ('id', 'legacy_id', 'group', 'bbox_coords', 'rotate', 'intersections', 'original_image', 'cropped_image', 'ocr_text', 'tag')
|
fields = ('id', 'owner', 'legacy_id', 'group', 'bbox_coords', 'rotate', 'intersections', 'original_image', 'cropped_image', 'ocr_text', 'tag')
|
||||||
|
|
||||||
|
|
||||||
class AddPhotoSerializer(serializers.ModelSerializer):
|
class AddPhotoSerializer(serializers.ModelSerializer):
|
||||||
@ -62,3 +63,8 @@ class PhotoLogsSerializer(serializers.ModelSerializer):
|
|||||||
class Meta:
|
class Meta:
|
||||||
model = PhotoLog
|
model = PhotoLog
|
||||||
fields = ('id', 'title', 'date', 'pdf')
|
fields = ('id', 'title', 'date', 'pdf')
|
||||||
|
|
||||||
|
class UserSerializer(serializers.ModelSerializer):
|
||||||
|
class Meta:
|
||||||
|
model = get_user_model()
|
||||||
|
fields = ('id', 'email')
|
||||||
|
@ -20,6 +20,7 @@ from .views import (
|
|||||||
PhotoLogTemplatesAPIView,
|
PhotoLogTemplatesAPIView,
|
||||||
CreatePhotoLogTemplateAPIView,
|
CreatePhotoLogTemplateAPIView,
|
||||||
RetrieveUpdateDestroyPhotoLogTemplateAPIView,
|
RetrieveUpdateDestroyPhotoLogTemplateAPIView,
|
||||||
|
UsersAPIView,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -47,4 +48,6 @@ urlpatterns = [
|
|||||||
path('photolog/template/', CreatePhotoLogTemplateAPIView.as_view()),
|
path('photolog/template/', CreatePhotoLogTemplateAPIView.as_view()),
|
||||||
path('photolog/templates/', PhotoLogTemplatesAPIView.as_view()),
|
path('photolog/templates/', PhotoLogTemplatesAPIView.as_view()),
|
||||||
path('photolog/template/<int:pk>/', RetrieveUpdateDestroyPhotoLogTemplateAPIView.as_view()),
|
path('photolog/template/<int:pk>/', RetrieveUpdateDestroyPhotoLogTemplateAPIView.as_view()),
|
||||||
|
|
||||||
|
path('users/', UsersAPIView.as_view()),
|
||||||
]
|
]
|
||||||
|
33
api/views.py
33
api/views.py
@ -1,6 +1,7 @@
|
|||||||
from rest_framework import generics, views, status
|
from rest_framework import generics, views, status
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from django.shortcuts import get_list_or_404
|
from django.shortcuts import get_list_or_404
|
||||||
|
from rest_framework.permissions import IsAuthenticated
|
||||||
from photo_log.models import (
|
from photo_log.models import (
|
||||||
PhotoGroup,
|
PhotoGroup,
|
||||||
Photo,
|
Photo,
|
||||||
@ -18,6 +19,7 @@ from .serializers import (
|
|||||||
PhotoLogsSerializer,
|
PhotoLogsSerializer,
|
||||||
PhotoTagSerializer,
|
PhotoTagSerializer,
|
||||||
PhotoLogTemplateSerializer,
|
PhotoLogTemplateSerializer,
|
||||||
|
UserSerializer,
|
||||||
)
|
)
|
||||||
|
|
||||||
from photo_log import tasks
|
from photo_log import tasks
|
||||||
@ -25,6 +27,7 @@ from photo_log import tasks
|
|||||||
from django.db.models import FileField
|
from django.db.models import FileField
|
||||||
from django.core.files.uploadedfile import InMemoryUploadedFile
|
from django.core.files.uploadedfile import InMemoryUploadedFile
|
||||||
from django.core.files.base import ContentFile
|
from django.core.files.base import ContentFile
|
||||||
|
from django.contrib.auth import get_user_model
|
||||||
|
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from PIL import Image, ExifTags
|
from PIL import Image, ExifTags
|
||||||
@ -61,16 +64,31 @@ class RetrieveUpdateDestroyPhotoLogTemplateAPIView(generics.RetrieveUpdateDestro
|
|||||||
|
|
||||||
|
|
||||||
class PhotoGroupAPIView(generics.ListAPIView):
|
class PhotoGroupAPIView(generics.ListAPIView):
|
||||||
queryset = PhotoGroup.objects.all()
|
|
||||||
serializer_class = PhotoGroupSerializer
|
serializer_class = PhotoGroupSerializer
|
||||||
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
queryset = PhotoGroup.objects.all()
|
||||||
|
|
||||||
|
user = self.request.user
|
||||||
|
if not user.is_superuser:
|
||||||
|
queryset = queryset.filter(owner=user)
|
||||||
|
|
||||||
|
return queryset
|
||||||
|
|
||||||
|
|
||||||
class PhotosAPIView(generics.ListAPIView):
|
class PhotosAPIView(generics.ListAPIView):
|
||||||
|
|
||||||
serializer_class = PhotoSerializer
|
serializer_class = PhotoSerializer
|
||||||
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
queryset = Photo.objects.all()
|
queryset = Photo.objects.all()
|
||||||
|
|
||||||
|
user = self.request.user
|
||||||
|
if not user.is_superuser:
|
||||||
|
queryset = queryset.filter(owner=user)
|
||||||
|
|
||||||
self.serializer_class = PhotosSerializer
|
self.serializer_class = PhotosSerializer
|
||||||
|
|
||||||
photogroup = self.request.query_params.get('photogroup')
|
photogroup = self.request.query_params.get('photogroup')
|
||||||
@ -89,11 +107,19 @@ class PhotoAPIView(generics.RetrieveAPIView):
|
|||||||
class AddPhotoAPIView(generics.CreateAPIView):
|
class AddPhotoAPIView(generics.CreateAPIView):
|
||||||
queryset = Photo.objects.all()
|
queryset = Photo.objects.all()
|
||||||
serializer_class = AddPhotoSerializer
|
serializer_class = AddPhotoSerializer
|
||||||
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
|
def perform_create(self, serializer):
|
||||||
|
serializer.save(owner=self.request.user)
|
||||||
|
|
||||||
|
|
||||||
class AddPhotoGroupAPIView(generics.CreateAPIView):
|
class AddPhotoGroupAPIView(generics.CreateAPIView):
|
||||||
queryset = PhotoGroup.objects.all()
|
queryset = PhotoGroup.objects.all()
|
||||||
serializer_class = PhotoGroupSerializer
|
serializer_class = PhotoGroupSerializer
|
||||||
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
|
def perform_create(self, serializer):
|
||||||
|
serializer.save(owner=self.request.user)
|
||||||
|
|
||||||
|
|
||||||
class PhotoLogAPIView(generics.RetrieveAPIView):
|
class PhotoLogAPIView(generics.RetrieveAPIView):
|
||||||
@ -220,3 +246,8 @@ class AutoCropPhotoAPIView(generics.RetrieveAPIView):
|
|||||||
return self.retrieve(request, *args, **kwargs)
|
return self.retrieve(request, *args, **kwargs)
|
||||||
|
|
||||||
return Response({"error": "Not Found"}, status=status.HTTP_404_NOT_FOUND)
|
return Response({"error": "Not Found"}, status=status.HTTP_404_NOT_FOUND)
|
||||||
|
|
||||||
|
|
||||||
|
class UsersAPIView(generics.ListAPIView):
|
||||||
|
queryset = get_user_model().objects.all()
|
||||||
|
serializer_class = UserSerializer
|
||||||
|
@ -26,7 +26,8 @@ SECRET_KEY = 'django-insecure-z465dl_(vk55hxbm0bj*mp-ok3!*=ssw#!$5s2nrxa!9j+67z+
|
|||||||
# SECURITY WARNING: don't run with debug turned on in production!
|
# SECURITY WARNING: don't run with debug turned on in production!
|
||||||
DEBUG = True
|
DEBUG = True
|
||||||
|
|
||||||
ALLOWED_HOSTS = ['zierle-training-staging.riezel.com', 'localhost', '127.0.0.1', '192.168.1.244']
|
ALLOWED_HOSTS = ['zierle-training.riezel.com', 'localhost', '127.0.0.1', '192.168.1.244', '192.168.1.114']
|
||||||
|
CORS_ORIGIN_ALLOW_ALL = True
|
||||||
|
|
||||||
|
|
||||||
# Application definition
|
# Application definition
|
||||||
@ -43,6 +44,7 @@ INSTALLED_APPS = [
|
|||||||
'channels', # as high as possible (channels overloads 'runserver', may conflict with e.g. whitenoise)
|
'channels', # as high as possible (channels overloads 'runserver', may conflict with e.g. whitenoise)
|
||||||
'rest_framework',
|
'rest_framework',
|
||||||
'corsheaders',
|
'corsheaders',
|
||||||
|
'rest_framework_simplejwt',
|
||||||
'drf_yasg',
|
'drf_yasg',
|
||||||
'storages',
|
'storages',
|
||||||
'django_extensions',
|
'django_extensions',
|
||||||
@ -202,13 +204,14 @@ STATIC_ROOT = os.path.join(BASE_DIR, "static/")
|
|||||||
# See https://docs.celeryq.dev/en/stable/django/first-steps-with-django.html
|
# See https://docs.celeryq.dev/en/stable/django/first-steps-with-django.html
|
||||||
CELERY_CACHE_BACKEND = 'default'
|
CELERY_CACHE_BACKEND = 'default'
|
||||||
|
|
||||||
CELERY_WORK_DIR = '/home/marc/www-staging/celery/'
|
CELERY_WORK_DIR = '/home/marc/www/celery/'
|
||||||
|
|
||||||
CELERY_BROKER_URL = 'redis://localhost:6378/1'
|
CELERY_BROKER_URL = 'redis://localhost:6378/1'
|
||||||
CELERY_RESULT_BACKEND= 'redis://localhost:6378/1'
|
CELERY_RESULT_BACKEND= 'redis://localhost:6378/1'
|
||||||
|
|
||||||
CELERY_TIMEZONE = 'CET'
|
CELERY_TIMEZONE = 'CET'
|
||||||
|
|
||||||
|
CELERY_TASK_DEFAULT_QUEUE = 'zierletraining_prod'
|
||||||
CELERY_BROKER_TRANSPORT_OPTIONS = {
|
CELERY_BROKER_TRANSPORT_OPTIONS = {
|
||||||
'visibility_timeout': 300,
|
'visibility_timeout': 300,
|
||||||
}
|
}
|
||||||
@ -224,7 +227,7 @@ CACHES = {
|
|||||||
"OPTIONS": {
|
"OPTIONS": {
|
||||||
"CLIENT_CLASS": "django_redis.client.DefaultClient",
|
"CLIENT_CLASS": "django_redis.client.DefaultClient",
|
||||||
},
|
},
|
||||||
"KEY_PREFIX": "zierletraining",
|
"KEY_PREFIX": "zierletraining_prod",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -238,8 +241,24 @@ CHANNEL_LAYERS = {
|
|||||||
"BACKEND": "channels_redis.core.RedisChannelLayer",
|
"BACKEND": "channels_redis.core.RedisChannelLayer",
|
||||||
"CONFIG": {
|
"CONFIG": {
|
||||||
"hosts": [("127.0.0.1", 6378)],
|
"hosts": [("127.0.0.1", 6378)],
|
||||||
"prefix": "asgi_zierle_training_staging:",
|
"prefix": "asgi_zierle_training_prod:",
|
||||||
"group_expiry": 7200,
|
"group_expiry": 7200,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
REST_FRAMEWORK = {
|
||||||
|
'DEFAULT_AUTHENTICATION_CLASSES': (
|
||||||
|
'rest_framework.authentication.SessionAuthentication',
|
||||||
|
'rest_framework.authentication.BasicAuthentication',
|
||||||
|
'rest_framework_simplejwt.authentication.JWTAuthentication',
|
||||||
|
),
|
||||||
|
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.PageNumberPagination',
|
||||||
|
'PAGE_SIZE': 9999,
|
||||||
|
}
|
||||||
|
|
||||||
|
from datetime import timedelta
|
||||||
|
|
||||||
|
SIMPLE_JWT = {
|
||||||
|
'REFRESH_TOKEN_LIFETIME': timedelta(days=30),
|
||||||
|
}
|
||||||
|
@ -1,16 +1,16 @@
|
|||||||
[program:celery_zierle_training_staging]
|
[program:celery_zierle_training]
|
||||||
|
|
||||||
directory=/home/marc/www-staging/backend
|
directory=/home/marc/www/backend
|
||||||
|
|
||||||
user=www-data
|
user=www-data
|
||||||
numprocs=1
|
numprocs=1
|
||||||
stdout_logfile=/var/log/celery/worker.log
|
stdout_logfile=/var/log/celery/worker_prod.log
|
||||||
stderr_logfile=/var/log/celery/worker.log
|
stderr_logfile=/var/log/celery/worker_prod.log
|
||||||
autostart=true
|
autostart=true
|
||||||
autorestart=true
|
autorestart=true
|
||||||
startsecs=10
|
startsecs=10
|
||||||
|
|
||||||
command=/home/marc/www-staging/backend/env/bin/python3 -m celery -A config worker --loglevel=INFO
|
command=/home/marc/www/backend/env/bin/python3 -m celery -A config worker --loglevel=INFO
|
||||||
|
|
||||||
; Need to wait for currently executing tasks to finish at shutdown.
|
; Need to wait for currently executing tasks to finish at shutdown.
|
||||||
; Increase this if you have very long running tasks.
|
; Increase this if you have very long running tasks.
|
||||||
|
@ -1,26 +1,26 @@
|
|||||||
[fcgi-program:ws_zierle_training_staging]
|
[fcgi-program:ws_zierle_training]
|
||||||
# TCP socket used by Nginx backend upstream
|
# TCP socket used by Nginx backend upstream
|
||||||
socket=tcp://localhost:8001
|
socket=tcp://localhost:8002
|
||||||
|
|
||||||
user=www-data
|
user=www-data
|
||||||
|
|
||||||
# Directory where your site's project files are located
|
# Directory where your site's project files are located
|
||||||
directory=/home/marc/www-staging/backend
|
directory=/home/marc/www/backend
|
||||||
|
|
||||||
# Each process needs to have a separate socket file, so we use process_num
|
# Each process needs to have a separate socket file, so we use process_num
|
||||||
# Make sure to update "mysite.asgi" to match your project name
|
# Make sure to update "mysite.asgi" to match your project name
|
||||||
command=/home/marc/www-staging/backend/env/bin/python3 -m daphne -u /run/daphne/daphne%(process_num)d.sock --fd 0 --access-log - --proxy-headers config.asgi:application
|
command=/home/marc/www/backend/env/bin/python3 -m daphne -u /run/daphne/daphne_prod%(process_num)d.sock --fd 0 --access-log - --proxy-headers config.asgi:application
|
||||||
|
|
||||||
# Number of processes to startup, roughly the number of CPUs you have
|
# Number of processes to startup, roughly the number of CPUs you have
|
||||||
numprocs=2
|
numprocs=2
|
||||||
|
|
||||||
# Give each process a unique name so they can be told apart
|
# Give each process a unique name so they can be told apart
|
||||||
process_name=ws_zierle_training_staging%(process_num)d
|
process_name=ws_zierle_training%(process_num)d
|
||||||
|
|
||||||
# Automatically start and recover processes
|
# Automatically start and recover processes
|
||||||
autostart=true
|
autostart=true
|
||||||
autorestart=true
|
autorestart=true
|
||||||
|
|
||||||
# Choose where you want your log to go
|
# Choose where you want your log to go
|
||||||
stdout_logfile=/home/marc/www-staging/logs/daphne.log
|
stdout_logfile=/home/marc/www/logs/daphne.log
|
||||||
redirect_stderr=true
|
redirect_stderr=true
|
||||||
|
@ -18,6 +18,13 @@ from django.urls import path, include
|
|||||||
from django.views.generic import TemplateView
|
from django.views.generic import TemplateView
|
||||||
from django.conf.urls.static import static
|
from django.conf.urls.static import static
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
from django.http import HttpResponse
|
||||||
|
|
||||||
|
from rest_framework_simplejwt.views import (
|
||||||
|
TokenObtainPairView,
|
||||||
|
TokenRefreshView,
|
||||||
|
TokenVerifyView,
|
||||||
|
)
|
||||||
|
|
||||||
# API documentation
|
# API documentation
|
||||||
from rest_framework import permissions
|
from rest_framework import permissions
|
||||||
@ -27,6 +34,12 @@ from drf_yasg import openapi
|
|||||||
|
|
||||||
api_patterns = [
|
api_patterns = [
|
||||||
path('api/v1/', include('api.urls')),
|
path('api/v1/', include('api.urls')),
|
||||||
|
path('api/v1/api-auth/', include('rest_framework.urls')),
|
||||||
|
path('api/v1/token/', TokenObtainPairView.as_view(), name='token_obtain_pair'),
|
||||||
|
path('api/v1/token/refresh/', TokenRefreshView.as_view(), name='token_refresh'),
|
||||||
|
path('api/v1/token/verify/', TokenVerifyView.as_view(), name='token_verify'),
|
||||||
|
|
||||||
|
path('api/v1/ping/', lambda request: HttpResponse('pong'), name='ping_pong'),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
from django.contrib.postgres.fields import ArrayField
|
from django.contrib.postgres.fields import ArrayField
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.dispatch import receiver
|
from django.dispatch import receiver
|
||||||
|
from django.contrib.auth import get_user_model
|
||||||
|
|
||||||
from model_utils import FieldTracker
|
from model_utils import FieldTracker
|
||||||
|
|
||||||
@ -14,6 +15,9 @@ import os
|
|||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
|
|
||||||
|
UserModel = get_user_model()
|
||||||
|
|
||||||
|
|
||||||
class PhotoTag(models.Model):
|
class PhotoTag(models.Model):
|
||||||
name = models.CharField(unique=True, null=False, blank=False, max_length=100)
|
name = models.CharField(unique=True, null=False, blank=False, max_length=100)
|
||||||
color = ColorField(default='#FFE5B4')
|
color = ColorField(default='#FFE5B4')
|
||||||
@ -21,10 +25,20 @@ class PhotoTag(models.Model):
|
|||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.name
|
return self.name
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
ordering = ('name',)
|
||||||
|
|
||||||
|
|
||||||
class PhotoGroup(models.Model):
|
class PhotoGroup(models.Model):
|
||||||
name = models.CharField(unique=True, null=False, max_length=200)
|
name = models.CharField(unique=False, null=False, max_length=200)
|
||||||
date = models.DateField(null=True)
|
date = models.DateField(null=True)
|
||||||
|
parent = models.ForeignKey('self', blank=True, null=True, on_delete=models.SET_NULL)
|
||||||
|
|
||||||
|
owner = models.ForeignKey(
|
||||||
|
UserModel,
|
||||||
|
on_delete=models.CASCADE,
|
||||||
|
related_name='photogroups',
|
||||||
|
)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.name
|
return self.name
|
||||||
@ -85,6 +99,12 @@ class Photo(models.Model):
|
|||||||
default=None,
|
default=None,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
owner = models.ForeignKey(
|
||||||
|
UserModel,
|
||||||
|
on_delete=models.CASCADE,
|
||||||
|
related_name='photos',
|
||||||
|
)
|
||||||
|
|
||||||
tracker = FieldTracker()
|
tracker = FieldTracker()
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
from celery import shared_task, chain, group, chord
|
from celery import shared_task, chain, group, chord
|
||||||
import boto3
|
import boto3
|
||||||
|
from boto3.s3.transfer import TransferConfig
|
||||||
import imghdr
|
import imghdr
|
||||||
from PIL import Image, ExifTags
|
from PIL import Image, ExifTags
|
||||||
|
|
||||||
@ -19,6 +20,15 @@ from .photolog_layout import generate_tex
|
|||||||
from .autocrop.autocrop import autocrop
|
from .autocrop.autocrop import autocrop
|
||||||
|
|
||||||
|
|
||||||
|
s3_resource = boto3.resource(
|
||||||
|
's3',
|
||||||
|
endpoint_url=settings.AWS_S3_ENDPOINT_URL,
|
||||||
|
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
|
||||||
|
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
|
||||||
|
)
|
||||||
|
client = s3_resource.meta.client
|
||||||
|
|
||||||
|
|
||||||
@shared_task
|
@shared_task
|
||||||
def chordfinisher(*args, **kwargs):
|
def chordfinisher(*args, **kwargs):
|
||||||
"""
|
"""
|
||||||
@ -47,22 +57,21 @@ def download_s3_file(folder_path, s3_file_path, bucket):
|
|||||||
:param bucket The name of the bucket where the file is stored in.
|
:param bucket The name of the bucket where the file is stored in.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
global client
|
||||||
|
|
||||||
# create local folder
|
# create local folder
|
||||||
if not os.path.exists(folder_path):
|
if not os.path.exists(folder_path):
|
||||||
os.makedirs(folder_path, exist_ok=True) # mkdir -p
|
os.makedirs(folder_path, exist_ok=True) # mkdir -p
|
||||||
|
|
||||||
s3_resource = boto3.resource(
|
|
||||||
's3',
|
|
||||||
endpoint_url=settings.AWS_S3_ENDPOINT_URL,
|
|
||||||
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
|
|
||||||
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
|
|
||||||
)
|
|
||||||
client = s3_resource.meta.client
|
|
||||||
|
|
||||||
# retrieve the file name if the file is stored in a sub dir on the S3
|
# retrieve the file name if the file is stored in a sub dir on the S3
|
||||||
file_name = s3_file_path.split('/')[-1]
|
file_name = s3_file_path.split('/')[-1]
|
||||||
|
|
||||||
client.download_file(bucket, s3_file_path, os.path.join(folder_path, file_name))
|
client.download_file(
|
||||||
|
bucket,
|
||||||
|
s3_file_path,
|
||||||
|
os.path.join(folder_path, file_name),
|
||||||
|
Config=TransferConfig(use_threads=False)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@shared_task
|
@shared_task
|
||||||
@ -74,13 +83,7 @@ def upload_s3_file(file_path, s3_file_path, bucket, content_type='application/oc
|
|||||||
:param bucket The name of the bucket where the file will be stored in.
|
:param bucket The name of the bucket where the file will be stored in.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
s3_resource = boto3.resource(
|
global client
|
||||||
's3',
|
|
||||||
endpoint_url=settings.AWS_S3_ENDPOINT_URL,
|
|
||||||
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
|
|
||||||
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
|
|
||||||
)
|
|
||||||
client = s3_resource.meta.client
|
|
||||||
|
|
||||||
client.upload_file(
|
client.upload_file(
|
||||||
file_path,
|
file_path,
|
||||||
@ -89,7 +92,8 @@ def upload_s3_file(file_path, s3_file_path, bucket, content_type='application/oc
|
|||||||
ExtraArgs={
|
ExtraArgs={
|
||||||
'ContentDisposition': 'inline',
|
'ContentDisposition': 'inline',
|
||||||
'ContentType': content_type,
|
'ContentType': content_type,
|
||||||
}
|
},
|
||||||
|
Config=TransferConfig(use_threads=False)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -162,6 +166,7 @@ def crop_image_bbox(image_path, bbox, rotate_angle):
|
|||||||
bbox[2][1]
|
bbox[2][1]
|
||||||
))
|
))
|
||||||
|
|
||||||
|
img = img.convert('RGB')
|
||||||
img.save(image_path)
|
img.save(image_path)
|
||||||
|
|
||||||
|
|
||||||
@ -192,15 +197,24 @@ def delete_folder(folder_path):
|
|||||||
|
|
||||||
@shared_task
|
@shared_task
|
||||||
def generate_photolog_from_latex(title, date, render_date, start_slide_image, slides, id_to_name, work_dir, out_file):
|
def generate_photolog_from_latex(title, date, render_date, start_slide_image, slides, id_to_name, work_dir, out_file):
|
||||||
template_name = 'photolog.tex'
|
try:
|
||||||
context = {
|
template_name = 'photolog.tex'
|
||||||
'content': generate_tex(title, date, render_date, start_slide_image, slides, id_to_name, work_dir)
|
context = {
|
||||||
}
|
'content': generate_tex(title, date, render_date, start_slide_image, slides, id_to_name, work_dir)
|
||||||
|
}
|
||||||
|
|
||||||
pdf_bytes = compile_template_to_pdf(template_name, context)
|
pdf_bytes = compile_template_to_pdf(template_name, context)
|
||||||
|
|
||||||
with open(out_file, 'wb+') as file:
|
with open(out_file, 'wb+') as file:
|
||||||
file.write(pdf_bytes)
|
file.write(pdf_bytes)
|
||||||
|
except Exception as e:
|
||||||
|
notify_client(
|
||||||
|
description='error',
|
||||||
|
content={
|
||||||
|
'exception': str(e),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
raise e
|
||||||
|
|
||||||
|
|
||||||
def max_resize_image_chain(full_file_name, max_width, work_folder_name=uuid4()):
|
def max_resize_image_chain(full_file_name, max_width, work_folder_name=uuid4()):
|
||||||
@ -381,7 +395,8 @@ def generate_photo_log_chain(photo_log_id, work_folder_name=uuid4()):
|
|||||||
|
|
||||||
download_files_tasks.extend(get_photo_log_assets_tasks())
|
download_files_tasks.extend(get_photo_log_assets_tasks())
|
||||||
|
|
||||||
download_files_tasks = chord( group(download_files_tasks), chordfinisher.si() )
|
#download_files_tasks = chord( group(download_files_tasks), chordfinisher.si() )
|
||||||
|
download_files_tasks = chain(download_files_tasks)
|
||||||
|
|
||||||
pdf_file = photo_log.pdf.name
|
pdf_file = photo_log.pdf.name
|
||||||
if pdf_file:
|
if pdf_file:
|
||||||
|
Loading…
Reference in New Issue
Block a user