2019-12-18 22:27:06 +00:00
|
|
|
from YtManagerApp.services.scheduler.jobs.download_video_job import DownloadVideoJob
|
2018-10-27 00:33:45 +00:00
|
|
|
from YtManagerApp.models import Video, Subscription, VIDEO_ORDER_MAPPING
|
2018-12-09 23:15:10 +00:00
|
|
|
from YtManagerApp.utils import first_non_null
|
2018-10-27 00:33:45 +00:00
|
|
|
from django.conf import settings as srv_settings
|
2018-10-10 22:43:50 +00:00
|
|
|
import logging
|
|
|
|
import requests
|
|
|
|
import mimetypes
|
|
|
|
import os
|
2019-08-19 18:05:13 +00:00
|
|
|
import PIL.Image
|
|
|
|
import PIL.ImageOps
|
2018-10-10 22:43:50 +00:00
|
|
|
from urllib.parse import urljoin
|
|
|
|
|
|
|
|
log = logging.getLogger('downloader')
|
|
|
|
|
|
|
|
|
|
|
|
def __get_subscription_config(sub: Subscription):
|
2018-12-09 23:15:10 +00:00
|
|
|
user = sub.user
|
2018-10-10 22:43:50 +00:00
|
|
|
|
2018-12-29 18:43:39 +00:00
|
|
|
enabled = first_non_null(sub.auto_download, user.preferences['auto_download'])
|
2018-12-09 23:15:10 +00:00
|
|
|
global_limit = user.preferences['download_global_limit']
|
2018-12-29 18:43:39 +00:00
|
|
|
limit = first_non_null(sub.download_limit, user.preferences['download_subscription_limit'])
|
2018-12-09 23:15:10 +00:00
|
|
|
order = first_non_null(sub.download_order, user.preferences['download_order'])
|
2018-10-27 00:33:45 +00:00
|
|
|
order = VIDEO_ORDER_MAPPING[order]
|
2018-10-10 22:43:50 +00:00
|
|
|
|
|
|
|
return enabled, global_limit, limit, order
|
|
|
|
|
|
|
|
|
2018-10-20 22:20:31 +00:00
|
|
|
def downloader_process_subscription(sub: Subscription):
|
2018-10-10 22:43:50 +00:00
|
|
|
log.info('Processing subscription %d [%s %s]', sub.id, sub.playlist_id, sub.id)
|
|
|
|
|
|
|
|
enabled, global_limit, limit, order = __get_subscription_config(sub)
|
|
|
|
log.info('Determined settings enabled=%s global_limit=%d limit=%d order="%s"', enabled, global_limit, limit, order)
|
|
|
|
|
|
|
|
if enabled:
|
|
|
|
videos_to_download = Video.objects\
|
|
|
|
.filter(subscription=sub, downloaded_path__isnull=True, watched=False)\
|
|
|
|
.order_by(order)
|
|
|
|
|
|
|
|
log.info('%d download candidates.', len(videos_to_download))
|
|
|
|
|
|
|
|
if global_limit > 0:
|
|
|
|
global_downloaded = Video.objects.filter(subscription__user=sub.user, downloaded_path__isnull=False).count()
|
|
|
|
allowed_count = max(global_limit - global_downloaded, 0)
|
|
|
|
videos_to_download = videos_to_download[0:allowed_count]
|
|
|
|
log.info('Global limit is set, can only download up to %d videos.', allowed_count)
|
|
|
|
|
|
|
|
if limit > 0:
|
|
|
|
sub_downloaded = Video.objects.filter(subscription=sub, downloaded_path__isnull=False).count()
|
|
|
|
allowed_count = max(limit - sub_downloaded, 0)
|
|
|
|
videos_to_download = videos_to_download[0:allowed_count]
|
|
|
|
log.info('Limit is set, can only download up to %d videos.', allowed_count)
|
|
|
|
|
|
|
|
# enqueue download
|
|
|
|
for video in videos_to_download:
|
|
|
|
log.info('Enqueuing video %d [%s %s] index=%d', video.id, video.video_id, video.name, video.playlist_index)
|
2019-08-14 14:14:16 +00:00
|
|
|
DownloadVideoJob.schedule(video)
|
2018-10-10 22:43:50 +00:00
|
|
|
|
|
|
|
log.info('Finished processing subscription %d [%s %s]', sub.id, sub.playlist_id, sub.id)
|
|
|
|
|
|
|
|
|
|
|
|
def downloader_process_all():
|
|
|
|
for subscription in Subscription.objects.all():
|
2018-10-20 22:20:31 +00:00
|
|
|
downloader_process_subscription(subscription)
|
2018-10-10 22:43:50 +00:00
|
|
|
|
|
|
|
|
2019-08-19 18:05:13 +00:00
|
|
|
def fetch_thumbnail(url, object_type, identifier, thumb_size):
|
2018-10-10 22:43:50 +00:00
|
|
|
|
2019-08-19 18:05:13 +00:00
|
|
|
log.info('Fetching thumbnail url=%s object_type=%s identifier=%s', url, object_type, identifier)
|
2018-10-10 22:43:50 +00:00
|
|
|
|
|
|
|
# Make request to obtain mime type
|
|
|
|
try:
|
|
|
|
response = requests.get(url, stream=True)
|
|
|
|
except requests.exceptions.RequestException as e:
|
|
|
|
log.error('Failed to fetch thumbnail %s. Error: %s', url, e)
|
|
|
|
return url
|
|
|
|
|
|
|
|
ext = mimetypes.guess_extension(response.headers['Content-Type'])
|
|
|
|
|
|
|
|
# Build file path
|
2019-08-19 18:05:13 +00:00
|
|
|
file_name = f"{identifier}{ext}"
|
2018-10-27 00:33:45 +00:00
|
|
|
abs_path_dir = os.path.join(srv_settings.MEDIA_ROOT, "thumbs", object_type)
|
2018-10-10 22:43:50 +00:00
|
|
|
abs_path = os.path.join(abs_path_dir, file_name)
|
2019-08-19 18:05:13 +00:00
|
|
|
abs_path_tmp = file_name + '.tmp'
|
2018-10-10 22:43:50 +00:00
|
|
|
|
|
|
|
# Store image
|
|
|
|
try:
|
|
|
|
os.makedirs(abs_path_dir, exist_ok=True)
|
2019-08-19 18:05:13 +00:00
|
|
|
with open(abs_path_tmp, "wb") as f:
|
2018-10-10 22:43:50 +00:00
|
|
|
for chunk in response.iter_content(chunk_size=1024):
|
|
|
|
if chunk:
|
|
|
|
f.write(chunk)
|
2019-08-19 18:05:13 +00:00
|
|
|
|
|
|
|
# Resize and crop to thumbnail size
|
|
|
|
image = PIL.Image.open(abs_path_tmp)
|
|
|
|
image = PIL.ImageOps.fit(image, thumb_size)
|
|
|
|
image.save(abs_path)
|
|
|
|
image.close()
|
|
|
|
|
|
|
|
# Delete temp file
|
|
|
|
os.unlink(abs_path_tmp)
|
|
|
|
|
2018-10-10 22:43:50 +00:00
|
|
|
except requests.exceptions.RequestException as e:
|
|
|
|
log.error('Error while downloading stream for thumbnail %s. Error: %s', url, e)
|
|
|
|
return url
|
|
|
|
except OSError as e:
|
|
|
|
log.error('Error while writing to file %s for thumbnail %s. Error: %s', abs_path, url, e)
|
|
|
|
return url
|
|
|
|
|
|
|
|
# Return
|
2018-10-27 00:33:45 +00:00
|
|
|
media_url = urljoin(srv_settings.MEDIA_URL, f"thumbs/{object_type}/{file_name}")
|
2018-10-10 22:43:50 +00:00
|
|
|
return media_url
|