1234567891011121314151617181920212223242526272829303132333435363738 |
- def archive(host, archive_host, db_name, table_name, where_clause, column_name_to_log_in_file,
- transaction_size, optimize, index_hint):
- logging.info('')
- logging.info('')
- logging.info(f'------------- archiving {db_name}.{table_name} -------------')
- archive_db_name = db_name + '_archive'
- archive_table_name = table_name + '_archive'
- db_utils.create_archive_database(db_name, archive_db_name)
- try:
- db_utils.create_archive_table(
- db_name, table_name, archive_db_name, archive_table_name)
- except ProgrammingError as er:
- if er.errno == 1050:
- logging.info(
- f'Archive table {archive_db_name}.{archive_table_name} exists,'
- f' archiving older rows'
- )
- fetch_archived_data_upload_to_s3_and_delete(
- archive_host, db_name, table_name, archive_db_name, archive_table_name,
- column_name_to_log_in_file, transaction_size, '')
- archive(host, archive_host, db_name, table_name, where_clause, column_name_to_log_in_file, transaction_size,
- optimize, index_hint)
- return None
- else:
- raise er
- archive_utils.archive_to_db(host, archive_host, db_name, table_name, archive_db_name, archive_table_name,
- where_clause, transaction_size, optimize, index_hint)
- fetch_archived_data_upload_to_s3_and_delete(
- archive_host, db_name, table_name, archive_db_name, archive_table_name,
- column_name_to_log_in_file, transaction_size, where_clause)
|