1234567891011121314151617181920212223242526272829 |
- def uploadThread_small(srcfile, prefix_and_key):
- print(f'\033[0;32;1m--->Uploading\033[0m {srcfile["Key"]} - small file')
- with open(os.path.join(SrcDir, srcfile["Key"]), 'rb') as data:
- for retryTime in range(MaxRetry + 1):
- try:
- pstart_time = time.time()
- chunkdata = data.read()
- chunkdata_md5 = hashlib.md5(chunkdata)
- s3_dest_client.put_object(
- Body=chunkdata,
- Bucket=DesBucket,
- Key=prefix_and_key,
- ContentMD5=base64.b64encode(chunkdata_md5.digest()).decode('utf-8'),
- StorageClass=StorageClass
- )
- pload_time = time.time() - pstart_time
- pload_bytes = len(chunkdata)
- pload_speed = size_to_str(int(pload_bytes / pload_time)) + "/s"
- print(f'\033[0;34;1m --->Complete\033[0m {srcfile["Key"]} - small file - {pload_speed}')
- break
- except Exception as e:
- logger.warning(f'Upload small file Fail: {srcfile["Key"]}, '
- f'{str(e)}, Attempts: {retryTime}')
- if retryTime >= MaxRetry:
- logger.error(f'Fail MaxRetry Download/Upload small file: {srcfile["Key"]}')
- return "MaxRetry"
- else:
- time.sleep(5 * retryTime)
- return
|