bulk-downloader-for-reddit/bdfr/resource.py

80 lines
2.9 KiB
Python
Raw Normal View History

2021-02-11 07:09:49 +08:00
#!/usr/bin/env python3
# coding=utf-8
import hashlib
2021-03-11 11:20:39 +08:00
import logging
2021-02-11 07:09:49 +08:00
import re
2021-02-26 16:56:05 +08:00
import time
2021-04-18 19:24:11 +08:00
import urllib.parse
from collections import namedtuple
2021-07-27 11:39:49 +08:00
from typing import Callable, Optional
2021-02-11 07:09:49 +08:00
2021-02-26 16:56:05 +08:00
import _hashlib
import requests
2021-02-11 07:09:49 +08:00
from praw.models import Submission
2021-04-12 15:58:32 +08:00
from bdfr.exceptions import BulkDownloaderException
2021-02-26 16:56:05 +08:00
2021-03-11 11:20:39 +08:00
logger = logging.getLogger(__name__)
2021-02-11 07:09:49 +08:00
class Resource:
2021-07-27 11:39:49 +08:00
def __init__(self, source_submission: Submission, url: str, download_function: Callable, extension: str = None):
2021-02-11 07:09:49 +08:00
self.source_submission = source_submission
2021-02-26 16:56:05 +08:00
self.content: Optional[bytes] = None
2021-02-11 07:09:49 +08:00
self.url = url
2021-02-26 16:56:05 +08:00
self.hash: Optional[_hashlib.HASH] = None
self.extension = extension
2021-07-27 11:39:49 +08:00
self.download_function = download_function
2021-02-26 16:56:05 +08:00
if not self.extension:
self.extension = self._determine_extension()
2021-02-11 07:09:49 +08:00
@staticmethod
def retry_download(url: str) -> Callable:
max_wait_time = 300
2021-07-27 11:39:49 +08:00
def http_download() -> Optional[bytes]:
current_wait_time = 60
while True:
try:
response = requests.get(url)
if re.match(r'^2\d{2}', str(response.status_code)) and response.content:
return response.content
elif response.status_code in (408, 429):
raise requests.exceptions.ConnectionError(f'Response code {response.status_code}')
else:
raise BulkDownloaderException(
f'Unrecoverable error requesting resource: HTTP Code {response.status_code}')
except (requests.exceptions.ConnectionError, requests.exceptions.ChunkedEncodingError) as e:
logger.warning(f'Error occured downloading from {url}, waiting {current_wait_time} seconds: {e}')
time.sleep(current_wait_time)
if current_wait_time < max_wait_time:
current_wait_time += 60
else:
logger.error(f'Max wait time exceeded for resource at url {url}')
raise
return http_download
2021-02-26 16:56:05 +08:00
2021-07-27 11:39:49 +08:00
def download(self):
2021-02-26 16:56:05 +08:00
if not self.content:
try:
2021-07-27 11:39:49 +08:00
content = self.download_function()
except requests.exceptions.ConnectionError as e:
raise BulkDownloaderException(f'Could not download resource: {e}')
except BulkDownloaderException:
raise
2021-02-26 16:56:05 +08:00
if content:
self.content = content
if not self.hash and self.content:
self.create_hash()
2021-02-26 16:56:05 +08:00
2021-02-28 07:40:42 +08:00
def create_hash(self):
self.hash = hashlib.md5(self.content)
2021-04-13 11:17:40 +08:00
def _determine_extension(self) -> Optional[str]:
2021-04-18 19:24:11 +08:00
extension_pattern = re.compile(r'.*(\..{3,5})$')
stripped_url = urllib.parse.urlsplit(self.url).path
match = re.search(extension_pattern, stripped_url)
2021-02-26 16:56:05 +08:00
if match:
return match.group(1)