s3_helper.py 3.0 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788
  1. """
  2. s3_helper.py
  3. ~~~~~~~~~~~~~~~~~
  4. Helper functions for models.py
  5. :copyright: (c) 2017 by Marc Lijour, brolycjw.
  6. :license: MIT License, see LICENSE for more details.
  7. """
  8. import boto3
  9. # uncomment for debug mode:
  10. # boto3.set_stream_logger('')
  11. import botocore
  12. from boto3.session import Session
  13. from boto3.s3.transfer import S3Transfer
  14. def parse_bucket_url(bucket_url):
  15. scheme = bucket_url[:5]
  16. assert scheme == 's3://', \
  17. "Expecting an s3:// scheme, got {} instead.".format(scheme)
  18. # scheme:
  19. # s3://<Your-AWS-Access-Key-ID>:<Your-AWS-Secret-Key>@<Your-S3-Bucket-name>&<Your-DigitalOcean-base-url>+SSE
  20. # where +SSE is optional (meaning server-side encryption enabled)
  21. try:
  22. encryption_enabled = False
  23. remain = bucket_url.lstrip(scheme)
  24. access_key_id = remain.split(':')[0]
  25. remain = remain.lstrip(access_key_id).lstrip(':')
  26. secret_key = remain.split('@')[0]
  27. remain = remain.lstrip(secret_key).lstrip('@')
  28. bucket_name = remain.split('&')[0]
  29. remain = remain.lstrip(bucket_name).lstrip('&').split('+')
  30. do_space_url = remain[0]
  31. encryption_enabled = len(remain) > 1
  32. if not access_key_id or not secret_key:
  33. raise Exception(
  34. "No AWS access and secret keys were provided."
  35. " Unable to establish a connexion to S3."
  36. )
  37. except Exception:
  38. raise Exception("Unable to parse the S3 bucket url.")
  39. return (access_key_id, secret_key, bucket_name, do_space_url, encryption_enabled)
  40. def bucket_exists(s3, bucket_name):
  41. exists = True
  42. try:
  43. s3.meta.client.head_bucket(Bucket=bucket_name)
  44. except botocore.exceptions.ClientError as e:
  45. error_code = int(e.response['Error']['Code'])
  46. if error_code == 404:
  47. exists = False
  48. return exists
  49. def object_exists(s3, bucket_name, key):
  50. exists = True
  51. try:
  52. s3.meta.client.head_object(Bucket=bucket_name, Key=key)
  53. except botocore.exceptions.ClientError as e:
  54. error_code = int(e.response['Error']['Code'])
  55. if error_code == 404:
  56. exists = False
  57. return exists
  58. def get_resource(access_key_id, secret_key, endpoint_url):
  59. session = boto3.Session(access_key_id, secret_key)
  60. s3 = session.resource('s3', endpoint_url='https://' + endpoint_url)
  61. return s3
  62. # extra: works for files stored in the file system
  63. # (not called by models.py which only deal with in-memory)
  64. def upload(value, storage):
  65. access_key_id, secret_key, bucket_name, do_space_url, encryption_enabled = parse_bucket_url(storage)
  66. s3 = get_resource(access_key_id, secret_key)
  67. ### S3Transfer allows multi-part, call backs etc
  68. # http://boto3.readthedocs.io/en/latest/_modules/boto3/s3/transfer.html
  69. transfer = S3Transfer(s3.meta.client)
  70. if encryption_enabled:
  71. transfer.upload_file(value, bucket_name, do_space_url, value, extra_args={'ServerSideEncryption': 'AES256'})
  72. else:
  73. transfer.upload_file(value, bucket_name, do_space_url, value)