backend/photo_log/tasks.py

410 lines
11 KiB
Python

from celery import shared_task, chain, group, chord
import boto3
import imghdr
from PIL import Image, ExifTags
import os, shutil
from uuid import uuid4
from django.conf import settings
from django.core.files import File
from django.apps import apps
from channels.layers import get_channel_layer
from asgiref.sync import async_to_sync
from django_tex.shortcuts import compile_template_to_pdf
from .photolog_layout import generate_tex
from .autocrop.autocrop import autocrop
@shared_task
def chordfinisher(*args, **kwargs):
"""
Used at the end of chord( group( ... ), chordfinisher.s())
to chain multiple groups together.
"""
return 'OK'
@shared_task
def notify_client(description, content):
channel_layer = get_channel_layer()
async_to_sync(channel_layer.group_send)("notifications", {
"type": "task.finished",
"description": description,
"content": content
})
@shared_task
def download_s3_file(folder_path, s3_file_path, bucket):
"""
Downloads a file stored in a S3 object storages.
:param folder_path Path on local disk where the file should be saved.
:param s3_file_path Full file path in the S3 storage bucket (without the buckets name).
:param bucket The name of the bucket where the file is stored in.
"""
# create local folder
if not os.path.exists(folder_path):
os.makedirs(folder_path, exist_ok=True) # mkdir -p
s3_resource = boto3.resource(
's3',
endpoint_url=settings.AWS_S3_ENDPOINT_URL,
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
)
client = s3_resource.meta.client
# retrieve the file name if the file is stored in a sub dir on the S3
file_name = s3_file_path.split('/')[-1]
client.download_file(bucket, s3_file_path, os.path.join(folder_path, file_name))
@shared_task
def upload_s3_file(file_path, s3_file_path, bucket):
"""
Uploads a file to a S3 object storages.
:param file_path Path on local disk where the is saved.
:param s3_file_path Full file path in the S3 storage bucket (without the buckets name).
:param bucket The name of the bucket where the file will be stored in.
"""
s3_resource = boto3.resource(
's3',
endpoint_url=settings.AWS_S3_ENDPOINT_URL,
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
)
client = s3_resource.meta.client
client.upload_file(file_path, bucket, s3_file_path)
@shared_task
def update_model_field(app_name, model_name, pk, field_name, new_value):
model = apps.get_model(app_name, model_name)
instance = model.objects.get(pk=pk)
setattr(instance, field_name, new_value)
instance.save()
def rotateByExif(img):
try:
for orientation in ExifTags.TAGS.keys():
if ExifTags.TAGS[orientation]=='Orientation':
break
exif = img._getexif()
if not exif:
return img
if exif[orientation] == 3:
img=img.rotate(180, expand=True)
elif exif[orientation] == 6:
img=img.rotate(270, expand=True)
elif exif[orientation] == 8:
img=img.rotate(90, expand=True)
except (AttributeError, KeyError, IndexError):
# cases: image don't have getexif
pass
return img
@shared_task
def max_resize_image(image_path, max_width):
if not imghdr.what(image_path):
return
img = Image.open(image_path)
img = rotateByExif(img)
if img.size[0] <= max_width:
return
wpercent = (max_width/float(img.size[0]))
hsize = int((float(img.size[1])*float(wpercent)))
img = img.resize((max_width,hsize), Image.ANTIALIAS)
img.save(image_path)
@shared_task
def crop_image_bbox(image_path, bbox, rotate_angle):
if not imghdr.what(image_path):
return
img = Image.open(image_path)
img = rotateByExif(img)
if rotate_angle:
img = img.rotate(rotate_angle, expand=1)
if bbox:
img = img.crop((
bbox[0][0],
bbox[0][1],
bbox[2][0],
bbox[2][1]
))
img.save(image_path)
@shared_task
def crop_image_auto(image_path):
if not imghdr.what(image_path):
return
img = Image.open(image_path)
img = rotateByExif(img)
try:
cropped_img, _, bbox, intersections = autocrop(img)
except Exception:
cropped_img = img
bbox = None
intersections = None
# TODO: save bbox an intersections in photo.bbox_cords and photo.intersections
cropped_img.save(image_path)
@shared_task
def delete_folder(folder_path):
shutil.rmtree(folder_path, ignore_errors=True)
@shared_task
def generate_photolog_from_latex(title, date, render_date, start_slide_image, slides, id_to_name, work_dir, out_file):
template_name = 'photolog.tex'
context = {
'content': generate_tex(title, date, render_date, start_slide_image, slides, id_to_name, work_dir)
}
pdf_bytes = compile_template_to_pdf(template_name, context)
with open(out_file, 'wb+') as file:
file.write(pdf_bytes)
def max_resize_image_chain(full_file_name, max_width, work_folder_name=uuid4()):
BASE_DIR = settings.CELERY_WORK_DIR
folder = os.path.join(BASE_DIR, str(work_folder_name))
local_file = os.path.join(folder, full_file_name.split('/')[-1])
bucket = settings.AWS_STORAGE_BUCKET_NAME
chain(
download_s3_file.si(
folder,
full_file_name,
bucket
),
max_resize_image.si(
local_file,
max_width=max_width
),
upload_s3_file.si(
local_file,
full_file_name,
bucket
),
delete_folder.si(
folder
),
)()
def crop_image_bbox_chain(photo_pk, full_file_name, bbox, rotate_angle, cropped_img_name=None, work_folder_name=uuid4()):
BASE_DIR = settings.CELERY_WORK_DIR
if not cropped_img_name:
cropped_img_name = os.path.join('cropped_images', str(uuid4()) + '.png')
folder = os.path.join(BASE_DIR, str(work_folder_name))
local_file = os.path.join(folder, os.path.basename(full_file_name))
bucket = settings.AWS_STORAGE_BUCKET_NAME
s3_upload_name = 'cropped_images/' + os.path.basename(cropped_img_name)
chain(
download_s3_file.si(
folder,
full_file_name,
bucket
),
crop_image_bbox.si(
local_file,
bbox,
rotate_angle
),
upload_s3_file.si(
local_file,
s3_upload_name,
bucket
),
update_model_field.si(# photo_log.Photo(pk=photo_pk).cropped_image = s3_upload_name
'photo_log', 'Photo', photo_pk,
'cropped_image', s3_upload_name
),
delete_folder.si(
folder
),
)()
def crop_image_auto_chain(photo_pk, full_file_name, cropped_img_name=None, work_folder_name=uuid4()):
BASE_DIR = settings.CELERY_WORK_DIR
if not cropped_img_name:
cropped_img_name = os.path.join('cropped_images', str(uuid4()) + '.png')
folder = os.path.join(BASE_DIR, str(work_folder_name))
local_file = os.path.join(folder, os.path.basename(full_file_name))
bucket = settings.AWS_STORAGE_BUCKET_NAME
s3_upload_name = 'cropped_images/' + os.path.basename(cropped_img_name)
chain(
download_s3_file.si(
folder,
full_file_name,
bucket
),
crop_image_auto.si(
local_file
),
upload_s3_file.si(
local_file,
s3_upload_name,
bucket
),
update_model_field.si(# photo_log.Photo(pk=photo_pk).cropped_image = s3_upload_name
'photo_log', 'Photo', photo_pk,
'cropped_image', s3_upload_name
),
delete_folder.si(
folder
),
)()
def get_photo_log_assets_tasks():
BASE_DIR = settings.CELERY_WORK_DIR
folder = os.path.join(BASE_DIR, 'photolog_assets')
bucket = settings.AWS_STORAGE_BUCKET_NAME
download_tasks = []
asset_files = [
'Gill Sans MT Bold.ttf',
'Gill Sans MT Medium.ttf',
'smile.png',
'wood_floor.jpg'
]
for file in asset_files:
path = os.path.join(folder, file)
if not os.path.exists(path):
download_tasks.append(
download_s3_file.si(
folder,
'photolog_assets/' + file,
bucket
)
)
return download_tasks
def generate_photo_log_chain(photo_log_id, work_folder_name=uuid4()):
from photo_log.models import Photo, PhotoLog
BASE_DIR = settings.CELERY_WORK_DIR
folder = os.path.join(BASE_DIR, str(work_folder_name))
bucket = settings.AWS_STORAGE_BUCKET_NAME
photo_log = PhotoLog.objects.get(pk=photo_log_id)
slides = photo_log.slides
slides = [photo for slide in slides for photo in slide] # flatten slides lists
if photo_log.start_slide_image:
slides.append(photo_log.start_slide_image)
photos = Photo.objects.filter(pk__in=slides).values('id', 'original_image', 'cropped_image')
photo_id_to_file_name = {}
download_files_tasks = []
for photo in photos:
image = photo['cropped_image']
if not image:
image = photo['original_image']
download_files_tasks.append(
download_s3_file.si(
folder,
image,
bucket
)
)
photo_id_to_file_name[photo['id']], _ = os.path.splitext(os.path.basename(image))
download_files_tasks.extend(get_photo_log_assets_tasks())
download_files_tasks = chord( group(download_files_tasks), chordfinisher.si() )
pdf_file = photo_log.pdf.name
if pdf_file:
pdf_file = os.path.basename(pdf_file)
else:
pdf_file = os.path.join(str(uuid4()) + '.pdf')
pdf_file = os.path.join(folder, pdf_file)
pdf_s3_path = 'photolog_pdf/' + os.path.basename(pdf_file)
chain(
download_files_tasks,
generate_photolog_from_latex.si(
photo_log.title,
photo_log.date,
photo_log.render_date,
photo_log.start_slide_image,
photo_log.slides,
photo_id_to_file_name,
folder,
pdf_file
),
upload_s3_file.si(
pdf_file,
pdf_s3_path,
bucket
),
update_model_field.si(
'photo_log', 'PhotoLog', photo_log.id,
'pdf', pdf_s3_path
),
notify_client.si(
description='update_photolog_pdf',
content={
'pdf': pdf_s3_path,
'id': photo_log.id
}
),
delete_folder.si(
folder
)
)()