compose_s3_backup_2.py 1.1 KB

1234567891011121314151617181920212223
  1. def upload_to_s3(s3key, filename, bucket, aws_key, aws_secret):
  2. conn = boto.connect_s3(aws_key, aws_secret)
  3. bucket = conn.get_bucket(bucket)
  4. # Get file info
  5. source_path = filename
  6. source_size = os.stat(source_path).st_size
  7. # Create a multipart upload request
  8. mp = bucket.initiate_multipart_upload(s3key)
  9. # Use a chunk size of 50 MiB
  10. chunk_size = 52428800
  11. chunk_count = int(math.ceil(source_size / chunk_size))
  12. # Send the file parts, using FileChunkIO to create a file-like object
  13. # that points to a certain byte range within the original file. We
  14. # set bytes to never exceed the original file size.
  15. for i in range(chunk_count + 1):
  16. print('Uploading file chunk: {0} of {1}'.format(i + 1, chunk_count + 1))
  17. offset = chunk_size * i
  18. bytes = min(chunk_size, source_size - offset)
  19. with FileChunkIO(source_path, 'r', offset=offset, bytes=bytes) as fp:
  20. mp.upload_part_from_file(fp, part_num=i + 1)
  21. # Finish the upload
  22. completed_upload = mp.complete_upload()
  23. return completed_upload