bulk-downloader-for-reddit/bdfr/archiver.py

110 lines
5.0 KiB
Python
Raw Normal View History

2021-03-13 18:18:30 +08:00
#!/usr/bin/env python3
# coding=utf-8
import json
import logging
import re
from typing import Iterator
2021-03-13 18:18:30 +08:00
2021-03-14 07:00:00 +08:00
import dict2xml
2021-03-13 18:18:30 +08:00
import praw.models
2021-03-14 07:00:00 +08:00
import yaml
2021-03-13 18:18:30 +08:00
2021-04-12 15:58:32 +08:00
from bdfr.archive_entry.base_archive_entry import BaseArchiveEntry
from bdfr.archive_entry.comment_archive_entry import CommentArchiveEntry
from bdfr.archive_entry.submission_archive_entry import SubmissionArchiveEntry
from bdfr.configuration import Configuration
from bdfr.connector import RedditConnector
2021-04-12 15:58:32 +08:00
from bdfr.exceptions import ArchiverError
from bdfr.resource import Resource
2021-03-13 18:18:30 +08:00
logger = logging.getLogger(__name__)
class Archiver(RedditConnector):
2021-03-13 18:18:30 +08:00
def __init__(self, args: Configuration):
super(Archiver, self).__init__(args)
def download(self):
for generator in self.reddit_lists:
for submission in generator:
2021-11-29 22:22:21 +08:00
if (submission.author and submission.author.name in self.args.ignore_user) or \
(submission.author is None and 'DELETED' in self.args.ignore_user):
logger.debug(
f'Submission {submission.id} in {submission.subreddit.display_name} skipped'
2021-11-29 22:22:21 +08:00
f' due to {submission.author.name if submission.author else "DELETED"} being an ignored user')
2021-10-31 10:19:46 +08:00
continue
2022-07-06 14:52:01 +08:00
if submission.id in self.excluded_submission_ids:
logger.debug(f'Object {submission.id} in exclusion list, skipping')
continue
2021-03-13 18:18:30 +08:00
logger.debug(f'Attempting to archive submission {submission.id}')
self.write_entry(submission)
2021-03-13 18:18:30 +08:00
def get_submissions_from_link(self) -> list[list[praw.models.Submission]]:
supplied_submissions = []
for sub_id in self.args.link:
if len(sub_id) == 6:
supplied_submissions.append(self.reddit_instance.submission(id=sub_id))
elif re.match(r'^\w{7}$', sub_id):
supplied_submissions.append(self.reddit_instance.comment(id=sub_id))
else:
supplied_submissions.append(self.reddit_instance.submission(url=sub_id))
return [supplied_submissions]
def get_user_data(self) -> list[Iterator]:
results = super(Archiver, self).get_user_data()
if self.args.user and self.args.all_comments:
sort = self.determine_sort_function()
for user in self.args.user:
logger.debug(f'Retrieving comments of user {user}')
results.append(sort(self.reddit_instance.redditor(user).comments, limit=self.args.limit))
return results
@staticmethod
def _pull_lever_entry_factory(praw_item: (praw.models.Submission, praw.models.Comment)) -> BaseArchiveEntry:
if isinstance(praw_item, praw.models.Submission):
return SubmissionArchiveEntry(praw_item)
elif isinstance(praw_item, praw.models.Comment):
return CommentArchiveEntry(praw_item)
else:
raise ArchiverError(f'Factory failed to classify item of type {type(praw_item).__name__}')
def write_entry(self, praw_item: (praw.models.Submission, praw.models.Comment)):
2021-06-12 08:35:31 +08:00
if self.args.comment_context and isinstance(praw_item, praw.models.Comment):
2021-06-11 13:31:11 +08:00
logger.debug(f'Converting comment {praw_item.id} to submission {praw_item.submission.id}')
praw_item = praw_item.submission
archive_entry = self._pull_lever_entry_factory(praw_item)
2021-03-13 18:18:30 +08:00
if self.args.format == 'json':
self._write_entry_json(archive_entry)
2021-03-13 18:18:30 +08:00
elif self.args.format == 'xml':
self._write_entry_xml(archive_entry)
2021-03-13 18:18:30 +08:00
elif self.args.format == 'yaml':
self._write_entry_yaml(archive_entry)
2021-03-13 18:18:30 +08:00
else:
raise ArchiverError(f'Unknown format {self.args.format} given')
logger.info(f'Record for entry item {praw_item.id} written to disk')
2021-03-13 18:18:30 +08:00
def _write_entry_json(self, entry: BaseArchiveEntry):
2021-07-27 11:39:49 +08:00
resource = Resource(entry.source, '', lambda: None, '.json')
content = json.dumps(entry.compile())
self._write_content_to_disk(resource, content)
2021-03-13 18:18:30 +08:00
def _write_entry_xml(self, entry: BaseArchiveEntry):
2021-07-27 11:39:49 +08:00
resource = Resource(entry.source, '', lambda: None, '.xml')
content = dict2xml.dict2xml(entry.compile(), wrap='root')
self._write_content_to_disk(resource, content)
def _write_entry_yaml(self, entry: BaseArchiveEntry):
2021-07-27 11:39:49 +08:00
resource = Resource(entry.source, '', lambda: None, '.yaml')
content = yaml.dump(entry.compile())
self._write_content_to_disk(resource, content)
2021-03-13 18:18:30 +08:00
def _write_content_to_disk(self, resource: Resource, content: str):
2021-03-14 07:00:00 +08:00
file_path = self.file_name_formatter.format_path(resource, self.download_directory)
2021-03-14 09:11:37 +08:00
file_path.parent.mkdir(exist_ok=True, parents=True)
with open(file_path, 'w', encoding="utf-8") as file:
logger.debug(
f'Writing entry {resource.source_submission.id} to file in {resource.extension[1:].upper()}'
f' format at {file_path}')
file.write(content)