archiver.py 3.5 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283
  1. #!/usr/bin/env python3
  2. # coding=utf-8
  3. import json
  4. import logging
  5. import re
  6. from typing import Iterator
  7. import dict2xml
  8. import praw.models
  9. import yaml
  10. from bdfr.archive_entry.base_archive_entry import BaseArchiveEntry
  11. from bdfr.archive_entry.comment_archive_entry import CommentArchiveEntry
  12. from bdfr.archive_entry.submission_archive_entry import SubmissionArchiveEntry
  13. from bdfr.configuration import Configuration
  14. from bdfr.connector import RedditConnector
  15. from bdfr.exceptions import ArchiverError
  16. from bdfr.resource import Resource
  17. logger = logging.getLogger(__name__)
  18. class Archiver(RedditConnector):
  19. def __init__(self, args: Configuration):
  20. super(Archiver, self).__init__(args)
  21. def download(self):
  22. for generator in self.reddit_lists:
  23. for submission in generator:
  24. if (submission.author and submission.author.name in self.args.ignore_user) or \
  25. (submission.author is None and 'DELETED' in self.args.ignore_user):
  26. logger.debug(
  27. f'Submission {submission.id} in {submission.subreddit.display_name} skipped'
  28. f' due to {submission.author.name if submission.author else "DELETED"} being an ignored user')
  29. continue
  30. logger.debug(f'Attempting to archive submission {submission.id}')
  31. self.write_entry(submission)
  32. def get_submissions_from_link(self):
  33. supplied_submissions = []
  34. for sub_id in self.args.link:
  35. if len(sub_id) == 6:
  36. supplied_submissions.append(self.reddit_instance.submission(id=sub_id))
  37. elif re.match(r'^\w{7}$', sub_id):
  38. supplied_submissions.append(self.reddit_instance.comment(id=sub_id))
  39. else:
  40. supplied_submissions.append(self.reddit_instance.submission(url=sub_id))
  41. return [supplied_submissions]
  42. def get_user_data(self):
  43. results = super(Archiver, self).get_user_data()
  44. if self.args.user and self.args.all_comments:
  45. sort = self.determine_sort_function()
  46. for user in self.args.user:
  47. logger.debug(f'Retrieving comments of user {user}')
  48. results.append(sort(self.reddit_instance.redditor(user).comments, limit=self.args.limit))
  49. return results
  50. def _write_entry_json(self, entry: BaseArchiveEntry):
  51. resource = Resource(entry.source, '', lambda: None, '.json')
  52. content = json.dumps(entry.compile())
  53. self._write_content_to_disk(resource, content)
  54. def _write_entry_xml(self, entry: BaseArchiveEntry):
  55. resource = Resource(entry.source, '', lambda: None, '.xml')
  56. content = dict2xml.dict2xml(entry.compile(), wrap='root')
  57. self._write_content_to_disk(resource, content)
  58. def _write_entry_yaml(self, entry: BaseArchiveEntry):
  59. resource = Resource(entry.source, '', lambda: None, '.yaml')
  60. content = yaml.dump(entry.compile())
  61. self._write_content_to_disk(resource, content)
  62. def _write_content_to_disk(self, resource: Resource, content: str):
  63. file_path = self.file_name_formatter.format_path(resource, self.download_directory)
  64. file_path.parent.mkdir(exist_ok=True, parents=True)
  65. with open(file_path, 'w', encoding="utf-8") as file:
  66. logger.debug(
  67. f'Writing entry {resource.source_submission.id} to file in {resource.extension[1:].upper()}'
  68. f' format at {file_path}')
  69. file.write(content)