s3-mp-download.py 6.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170
  1. #!/usr/bin/env python
  2. import argparse
  3. import logging
  4. from math import ceil
  5. from multiprocessing import Pool
  6. import os
  7. import time
  8. import urlparse
  9. import boto
  10. from boto.s3.connection import OrdinaryCallingFormat
  11. parser = argparse.ArgumentParser(description="Download a file from S3 in parallel",
  12. prog="s3-mp-download")
  13. parser.add_argument("src", help="The S3 key to download")
  14. parser.add_argument("dest", help="The destination file")
  15. parser.add_argument("-np", "--num-processes", help="Number of processors to use",
  16. type=int, default=2)
  17. parser.add_argument("-s", "--split", help="Split size, in Mb", type=int, default=32)
  18. parser.add_argument("-f", "--force", help="Overwrite an existing file",
  19. action="store_true")
  20. parser.add_argument("--insecure", dest='secure', help="Use HTTP for connection",
  21. default=True, action="store_false")
  22. parser.add_argument("-t", "--max-tries", help="Max allowed retries for http timeout", type=int, default=5)
  23. parser.add_argument("-v", "--verbose", help="Be more verbose", default=False, action="store_true")
  24. parser.add_argument("-q", "--quiet", help="Be less verbose (for use in cron jobs)",
  25. default=False, action="store_true")
  26. logger = logging.getLogger("s3-mp-download")
  27. def do_part_download(args):
  28. """
  29. Download a part of an S3 object using Range header
  30. We utilize the existing S3 GET request implemented by Boto and tack on the
  31. Range header. We then read in 1Mb chunks of the file and write out to the
  32. correct position in the target file
  33. :type args: tuple of (string, string, int, int)
  34. :param args: The actual arguments of this method. Due to lameness of
  35. multiprocessing, we have to extract these outside of the
  36. function definition.
  37. The arguments are: S3 Bucket name, S3 key, local file name,
  38. chunk size, and part number
  39. """
  40. bucket_name, key_name, fname, min_byte, max_byte, split, secure, max_tries, current_tries = args
  41. conn = boto.connect_s3(calling_format=OrdinaryCallingFormat())
  42. conn.is_secure = secure
  43. # Make the S3 request
  44. resp = conn.make_request("GET", bucket=bucket_name,
  45. key=key_name, headers={'Range':"bytes=%d-%d" % (min_byte, max_byte)})
  46. # Open the target file, seek to byte offset
  47. fd = os.open(fname, os.O_WRONLY)
  48. logger.debug("Opening file descriptor %d, seeking to %d" % (fd, min_byte))
  49. os.lseek(fd, min_byte, os.SEEK_SET)
  50. chunk_size = min((max_byte-min_byte), split*1024*1024)
  51. logger.debug("Reading HTTP stream in %dM chunks" % (chunk_size/1024./1024))
  52. t1 = time.time()
  53. s = 0
  54. try:
  55. while True:
  56. data = resp.read(chunk_size)
  57. if data == "":
  58. break
  59. os.write(fd, data)
  60. s += len(data)
  61. t2 = time.time() - t1
  62. os.close(fd)
  63. s = s / 1024 / 1024.
  64. logger.debug("Downloaded %0.2fM in %0.2fs at %0.2fMBps" % (s, t2, s/t2))
  65. except Exception as err:
  66. logger.debug("Retry request %d of max %d times" % (current_tries, max_tries))
  67. if (current_tries > max_tries):
  68. logger.error(err)
  69. else:
  70. time.sleep(3)
  71. current_tries += 1
  72. do_part_download(bucket_name, key_name, fname, min_byte, max_byte, split, secure, max_tries, current_tries)
  73. def gen_byte_ranges(size, num_parts):
  74. part_size = int(ceil(1. * size / num_parts))
  75. for i in range(num_parts):
  76. yield (part_size*i, min(part_size*(i+1)-1, size-1))
  77. def main(src, dest, num_processes=2, split=32, force=False, verbose=False, quiet=False, secure=True, max_tries=5):
  78. # Check that src is a valid S3 url
  79. split_rs = urlparse.urlsplit(src)
  80. if split_rs.scheme != "s3":
  81. raise ValueError("'%s' is not an S3 url" % src)
  82. # Check that dest does not exist
  83. if os.path.isdir(dest):
  84. filename = split_rs.path.split('/')[-1]
  85. dest = os.path.join(dest, filename)
  86. if os.path.exists(dest):
  87. if force:
  88. os.remove(dest)
  89. else:
  90. raise ValueError("Destination file '%s' exists, specify -f to"
  91. " overwrite" % dest)
  92. # Split out the bucket and the key
  93. s3 = boto.connect_s3()
  94. s3 = boto.connect_s3(calling_format=OrdinaryCallingFormat())
  95. s3.is_secure = secure
  96. logger.debug("split_rs: %s" % str(split_rs))
  97. bucket = s3.lookup(split_rs.netloc)
  98. if bucket == None:
  99. raise ValueError("'%s' is not a valid bucket" % split_rs.netloc)
  100. key = bucket.get_key(split_rs.path)
  101. if key is None:
  102. raise ValueError("'%s' does not exist." % split_rs.path)
  103. # Determine the total size and calculate byte ranges
  104. resp = s3.make_request("HEAD", bucket=bucket, key=key)
  105. if resp is None:
  106. raise ValueError("response is invalid.")
  107. size = int(resp.getheader("content-length"))
  108. logger.debug("Got headers: %s" % resp.getheaders())
  109. # Skipping multipart if file is less than 1mb
  110. if size < 1024 * 1024:
  111. t1 = time.time()
  112. key.get_contents_to_filename(dest)
  113. t2 = time.time() - t1
  114. size_mb = size / 1024 / 1024
  115. logger.info("Finished single-part download of %0.2fM in %0.2fs (%0.2fMBps)" %
  116. (size_mb, t2, size_mb/t2))
  117. else:
  118. # Touch the file
  119. fd = os.open(dest, os.O_CREAT)
  120. os.close(fd)
  121. size_mb = size / 1024 / 1024
  122. num_parts = (size_mb+(-size_mb%split))//split
  123. def arg_iterator(num_parts):
  124. for min_byte, max_byte in gen_byte_ranges(size, num_parts):
  125. yield (bucket.name, key.name, dest, min_byte, max_byte, split, secure, max_tries, 0)
  126. s = size / 1024 / 1024.
  127. try:
  128. t1 = time.time()
  129. pool = Pool(processes=num_processes)
  130. pool.map_async(do_part_download, arg_iterator(num_parts)).get(9999999)
  131. t2 = time.time() - t1
  132. logger.info("Finished downloading %0.2fM in %0.2fs (%0.2fMBps)" %
  133. (s, t2, s/t2))
  134. except KeyboardInterrupt:
  135. logger.warning("User terminated")
  136. except Exception as err:
  137. logger.error(err)
  138. if __name__ == "__main__":
  139. logging.basicConfig(level=logging.INFO)
  140. args = parser.parse_args()
  141. arg_dict = vars(args)
  142. if arg_dict['quiet'] == True:
  143. logger.setLevel(logging.WARNING)
  144. if arg_dict['verbose'] == True:
  145. logger.setLevel(logging.DEBUG)
  146. logger.debug("CLI args: %s" % args)
  147. main(**arg_dict)