db_archiver_2.py 1.6 KB

1234567891011121314151617181920212223242526272829303132333435363738
  1. def archive(host, archive_host, db_name, table_name, where_clause, column_name_to_log_in_file,
  2. transaction_size, optimize, index_hint):
  3. logging.info('')
  4. logging.info('')
  5. logging.info(f'------------- archiving {db_name}.{table_name} -------------')
  6. archive_db_name = db_name + '_archive'
  7. archive_table_name = table_name + '_archive'
  8. db_utils.create_archive_database(db_name, archive_db_name)
  9. try:
  10. db_utils.create_archive_table(
  11. db_name, table_name, archive_db_name, archive_table_name)
  12. except ProgrammingError as er:
  13. if er.errno == 1050:
  14. logging.info(
  15. f'Archive table {archive_db_name}.{archive_table_name} exists,'
  16. f' archiving older rows'
  17. )
  18. fetch_archived_data_upload_to_s3_and_delete(
  19. archive_host, db_name, table_name, archive_db_name, archive_table_name,
  20. column_name_to_log_in_file, transaction_size, '')
  21. archive(host, archive_host, db_name, table_name, where_clause, column_name_to_log_in_file, transaction_size,
  22. optimize, index_hint)
  23. return None
  24. else:
  25. raise er
  26. archive_utils.archive_to_db(host, archive_host, db_name, table_name, archive_db_name, archive_table_name,
  27. where_clause, transaction_size, optimize, index_hint)
  28. fetch_archived_data_upload_to_s3_and_delete(
  29. archive_host, db_name, table_name, archive_db_name, archive_table_name,
  30. column_name_to_log_in_file, transaction_size, where_clause)