2021-02-11 07:10:40 +08:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
# coding=utf-8
|
|
|
|
|
2021-03-11 12:18:48 +08:00
|
|
|
import hashlib
|
2021-04-05 12:47:39 +08:00
|
|
|
import logging.handlers
|
2021-03-11 12:18:48 +08:00
|
|
|
import os
|
2021-05-17 18:49:35 +08:00
|
|
|
import time
|
2021-02-11 07:10:40 +08:00
|
|
|
from datetime import datetime
|
2021-03-26 08:42:51 +08:00
|
|
|
from multiprocessing import Pool
|
2021-02-11 07:10:40 +08:00
|
|
|
from pathlib import Path
|
|
|
|
|
|
|
|
import praw
|
2021-03-11 15:18:21 +08:00
|
|
|
import praw.exceptions
|
2021-02-11 07:10:40 +08:00
|
|
|
import praw.models
|
|
|
|
|
2021-05-17 08:56:44 +08:00
|
|
|
from bdfr import exceptions as errors
|
2021-04-12 15:58:32 +08:00
|
|
|
from bdfr.configuration import Configuration
|
2021-05-17 08:56:44 +08:00
|
|
|
from bdfr.connector import RedditConnector
|
2021-04-12 15:58:32 +08:00
|
|
|
from bdfr.site_downloaders.download_factory import DownloadFactory
|
2021-02-11 07:10:40 +08:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2021-03-26 08:42:51 +08:00
|
|
|
def _calc_hash(existing_file: Path):
|
2021-05-23 10:13:44 +08:00
|
|
|
chunk_size = 1024 * 1024
|
2021-05-22 02:41:57 +08:00
|
|
|
md5_hash = hashlib.md5()
|
2021-03-26 08:42:51 +08:00
|
|
|
with open(existing_file, 'rb') as file:
|
2021-05-23 10:13:44 +08:00
|
|
|
chunk = file.read(chunk_size)
|
2021-05-22 02:41:57 +08:00
|
|
|
while chunk:
|
|
|
|
md5_hash.update(chunk)
|
2021-05-23 10:13:44 +08:00
|
|
|
chunk = file.read(chunk_size)
|
2021-05-22 02:41:57 +08:00
|
|
|
file_hash = md5_hash.hexdigest()
|
|
|
|
return existing_file, file_hash
|
2021-03-26 08:42:51 +08:00
|
|
|
|
|
|
|
|
2021-05-17 08:56:44 +08:00
|
|
|
class RedditDownloader(RedditConnector):
|
2021-03-10 19:47:57 +08:00
|
|
|
def __init__(self, args: Configuration):
|
2021-05-17 08:56:44 +08:00
|
|
|
super(RedditDownloader, self).__init__(args)
|
2021-03-12 11:24:25 +08:00
|
|
|
if self.args.search_existing:
|
2021-03-20 10:03:53 +08:00
|
|
|
self.master_hash_list = self.scan_existing_files(self.download_directory)
|
2021-02-26 16:56:21 +08:00
|
|
|
|
2021-02-11 07:10:40 +08:00
|
|
|
def download(self):
|
|
|
|
for generator in self.reddit_lists:
|
|
|
|
for submission in generator:
|
2021-06-06 18:29:09 +08:00
|
|
|
self._download_submission(submission)
|
2021-02-11 07:10:40 +08:00
|
|
|
|
|
|
|
def _download_submission(self, submission: praw.models.Submission):
|
2021-06-06 18:29:09 +08:00
|
|
|
if submission.id in self.excluded_submission_ids:
|
|
|
|
logger.debug(f'Object {submission.id} in exclusion list, skipping')
|
|
|
|
return
|
|
|
|
elif submission.subreddit.display_name.lower() in self.args.skip_subreddit:
|
|
|
|
logger.debug(f'Submission {submission.id} in {submission.subreddit.display_name} in skip list')
|
|
|
|
return
|
2021-11-29 22:23:04 +08:00
|
|
|
elif (submission.author and submission.author.name in self.args.ignore_user) or \
|
|
|
|
(submission.author is None and 'DELETED' in self.args.ignore_user):
|
2021-11-01 21:28:46 +08:00
|
|
|
logger.debug(
|
2021-11-24 08:54:29 +08:00
|
|
|
f'Submission {submission.id} in {submission.subreddit.display_name} skipped'
|
2021-11-29 22:23:04 +08:00
|
|
|
f' due to {submission.author.name if submission.author else "DELETED"} being an ignored user')
|
2021-11-01 21:28:46 +08:00
|
|
|
return
|
2021-06-06 18:29:09 +08:00
|
|
|
elif not isinstance(submission, praw.models.Submission):
|
2021-03-25 14:28:08 +08:00
|
|
|
logger.warning(f'{submission.id} is not a submission')
|
|
|
|
return
|
2021-06-23 12:30:39 +08:00
|
|
|
elif not self.download_filter.check_url(submission.url):
|
|
|
|
logger.debug(f'Submission {submission.id} filtered due to URL {submission.url}')
|
|
|
|
return
|
2021-06-06 18:29:09 +08:00
|
|
|
|
|
|
|
logger.debug(f'Attempting to download submission {submission.id}')
|
2021-03-12 11:24:25 +08:00
|
|
|
try:
|
|
|
|
downloader_class = DownloadFactory.pull_lever(submission.url)
|
|
|
|
downloader = downloader_class(submission)
|
2021-03-13 11:36:18 +08:00
|
|
|
logger.debug(f'Using {downloader_class.__name__} with url {submission.url}')
|
2021-03-12 11:24:25 +08:00
|
|
|
except errors.NotADownloadableLinkError as e:
|
2021-04-05 12:54:26 +08:00
|
|
|
logger.error(f'Could not download submission {submission.id}: {e}')
|
2021-03-12 11:24:25 +08:00
|
|
|
return
|
2021-06-06 18:47:56 +08:00
|
|
|
if downloader_class.__name__.lower() in self.args.disable_module:
|
|
|
|
logger.debug(f'Submission {submission.id} skipped due to disabled module {downloader_class.__name__}')
|
|
|
|
return
|
2021-03-13 10:01:30 +08:00
|
|
|
try:
|
|
|
|
content = downloader.find_resources(self.authenticator)
|
2021-04-04 06:38:48 +08:00
|
|
|
except errors.SiteDownloaderError as e:
|
|
|
|
logger.error(f'Site {downloader_class.__name__} failed to download submission {submission.id}: {e}')
|
2021-03-13 10:01:30 +08:00
|
|
|
return
|
2021-03-12 11:24:25 +08:00
|
|
|
for destination, res in self.file_name_formatter.format_resource_paths(content, self.download_directory):
|
|
|
|
if destination.exists():
|
2021-05-22 09:47:48 +08:00
|
|
|
logger.debug(f'File {destination} from submission {submission.id} already exists, continuing')
|
2021-05-21 14:50:05 +08:00
|
|
|
continue
|
2021-05-03 11:57:06 +08:00
|
|
|
elif not self.download_filter.check_resource(res):
|
2021-06-23 12:30:39 +08:00
|
|
|
logger.debug(f'Download filter removed {submission.id} file with URL {submission.url}')
|
2021-05-21 14:50:05 +08:00
|
|
|
continue
|
|
|
|
try:
|
2021-07-29 17:10:10 +08:00
|
|
|
res.download({'max_wait_time': self.args.max_wait_time})
|
2021-05-21 14:50:05 +08:00
|
|
|
except errors.BulkDownloaderException as e:
|
|
|
|
logger.error(f'Failed to download resource {res.url} in submission {submission.id} '
|
|
|
|
f'with downloader {downloader_class.__name__}: {e}')
|
|
|
|
return
|
|
|
|
resource_hash = res.hash.hexdigest()
|
|
|
|
destination.parent.mkdir(parents=True, exist_ok=True)
|
|
|
|
if resource_hash in self.master_hash_list:
|
|
|
|
if self.args.no_dupes:
|
|
|
|
logger.info(
|
|
|
|
f'Resource hash {resource_hash} from submission {submission.id} downloaded elsewhere')
|
2021-03-15 12:00:21 +08:00
|
|
|
return
|
2021-05-21 14:50:05 +08:00
|
|
|
elif self.args.make_hard_links:
|
|
|
|
self.master_hash_list[resource_hash].link_to(destination)
|
|
|
|
logger.info(
|
2021-05-22 09:47:48 +08:00
|
|
|
f'Hard link made linking {destination} to {self.master_hash_list[resource_hash]}'
|
|
|
|
f' in submission {submission.id}')
|
2021-05-21 14:50:05 +08:00
|
|
|
return
|
2021-05-23 10:17:14 +08:00
|
|
|
try:
|
|
|
|
with open(destination, 'wb') as file:
|
|
|
|
file.write(res.content)
|
|
|
|
logger.debug(f'Written file to {destination}')
|
|
|
|
except OSError as e:
|
|
|
|
logger.exception(e)
|
2021-06-13 07:49:42 +08:00
|
|
|
logger.error(f'Failed to write file in submission {submission.id} to {destination}: {e}')
|
2021-06-10 16:59:22 +08:00
|
|
|
return
|
2021-05-21 14:50:05 +08:00
|
|
|
creation_time = time.mktime(datetime.fromtimestamp(submission.created_utc).timetuple())
|
|
|
|
os.utime(destination, (creation_time, creation_time))
|
|
|
|
self.master_hash_list[resource_hash] = destination
|
|
|
|
logger.debug(f'Hash added to master list: {resource_hash}')
|
2021-05-31 11:42:03 +08:00
|
|
|
logger.info(f'Downloaded submission {submission.id} from {submission.subreddit.display_name}')
|
2021-03-12 11:24:25 +08:00
|
|
|
|
|
|
|
@staticmethod
|
2021-03-20 10:03:53 +08:00
|
|
|
def scan_existing_files(directory: Path) -> dict[str, Path]:
|
2021-03-11 12:18:48 +08:00
|
|
|
files = []
|
2021-03-12 11:24:25 +08:00
|
|
|
for (dirpath, dirnames, filenames) in os.walk(directory):
|
2021-03-11 12:18:48 +08:00
|
|
|
files.extend([Path(dirpath, file) for file in filenames])
|
|
|
|
logger.info(f'Calculating hashes for {len(files)} files')
|
2021-03-26 08:42:51 +08:00
|
|
|
|
|
|
|
pool = Pool(15)
|
|
|
|
results = pool.map(_calc_hash, files)
|
|
|
|
pool.close()
|
|
|
|
|
|
|
|
hash_list = {res[1]: res[0] for res in results}
|
2021-03-11 12:18:48 +08:00
|
|
|
return hash_list
|