flask_admin_s3_upload.py 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336
  1. __version__ = '0.1.4'
  2. try:
  3. from PIL import Image, ImageOps
  4. except ImportError:
  5. Image = None
  6. ImageOps = None
  7. from io import BytesIO
  8. import os
  9. import os.path as op
  10. import re
  11. from boto.s3.connection import S3Connection
  12. from boto.exception import S3ResponseError
  13. from boto.s3.key import Key
  14. from werkzeug.datastructures import FileStorage
  15. from wtforms import ValidationError
  16. from flask_admin.form.upload import FileUploadField, ImageUploadInput, \
  17. thumbgen_filename
  18. from flask_admin._compat import urljoin
  19. from url_for_s3 import url_for_s3
  20. class S3FileUploadField(FileUploadField):
  21. """
  22. Inherits from flask-admin FileUploadField, to allow file uploading
  23. to Amazon S3 (as well as the default local storage).
  24. """
  25. def __init__(self, label=None, validators=None, storage_type=None,
  26. bucket_name=None, access_key_id=None,
  27. access_key_secret=None, acl='public-read',
  28. storage_type_field=None, bucket_name_field=None,
  29. static_root_parent=None, **kwargs):
  30. super(S3FileUploadField, self).__init__(label, validators, **kwargs)
  31. if storage_type and (storage_type != 's3'):
  32. raise ValueError(
  33. 'Storage type "%s" is invalid, the only supported storage type'
  34. ' (apart from default local storage) is s3.' % storage_type
  35. )
  36. self.storage_type = storage_type
  37. self.bucket_name = bucket_name
  38. self.access_key_id = access_key_id
  39. self.access_key_secret = access_key_secret
  40. self.acl = acl
  41. self.storage_type_field = storage_type_field
  42. self.bucket_name_field = bucket_name_field
  43. self.static_root_parent = static_root_parent
  44. def populate_obj(self, obj, name):
  45. field = getattr(obj, name, None)
  46. if field:
  47. # If field should be deleted, clean it up
  48. if self._should_delete:
  49. self._delete_file(field, obj)
  50. setattr(obj, name, '')
  51. if self.storage_type_field:
  52. setattr(obj, self.storage_type_field, '')
  53. if self.bucket_name_field:
  54. setattr(obj, self.bucket_name_field, '')
  55. return
  56. if (self.data and isinstance(self.data, FileStorage)
  57. and self.data.filename):
  58. if field:
  59. self._delete_file(field, obj)
  60. filename = self.generate_name(obj, self.data)
  61. temp_file = BytesIO()
  62. self.data.save(temp_file)
  63. filename = self._save_file(temp_file, filename)
  64. # update filename of FileStorage to our validated name
  65. self.data.filename = filename
  66. setattr(obj, name, filename)
  67. if self.storage_type == 's3':
  68. if self.storage_type_field:
  69. setattr(obj, self.storage_type_field, self.storage_type)
  70. if self.bucket_name_field:
  71. setattr(obj, self.bucket_name_field, self.bucket_name)
  72. else:
  73. if self.storage_type_field:
  74. setattr(obj, self.storage_type_field, '')
  75. if self.bucket_name_field:
  76. setattr(obj, self.bucket_name_field, '')
  77. def _get_s3_path(self, filename):
  78. if not self.static_root_parent:
  79. raise ValueError('S3FileUploadField field requires '
  80. 'static_root_parent to be set.')
  81. return re.sub('^\/', '', self._get_path(filename).replace(
  82. self.static_root_parent, ''))
  83. def _delete_file(self, filename, obj):
  84. storage_type = getattr(obj, self.storage_type_field, '')
  85. bucket_name = getattr(obj, self.bucket_name_field, '')
  86. if not (storage_type and bucket_name):
  87. return super(S3FileUploadField, self)._delete_file(filename)
  88. if storage_type != 's3':
  89. raise ValueError(
  90. 'Storage type "%s" is invalid, the only supported storage type'
  91. ' (apart from default local storage) is s3.' % storage_type)
  92. conn = S3Connection(self.access_key_id, self.access_key_secret)
  93. bucket = conn.get_bucket(bucket_name)
  94. path = self._get_s3_path(filename)
  95. k = Key(bucket)
  96. k.key = path
  97. try:
  98. bucket.delete_key(k)
  99. except S3ResponseError:
  100. pass
  101. def _save_file_local(self, temp_file, filename):
  102. path = self._get_path(filename)
  103. if not op.exists(op.dirname(path)):
  104. os.makedirs(os.path.dirname(path), self.permission | 0o111)
  105. fd = open(path, 'wb')
  106. # Thanks to:
  107. # http://stackoverflow.com/a/3253276/2066849
  108. temp_file.seek(0)
  109. t = temp_file.read(1048576)
  110. while t:
  111. fd.write(t)
  112. t = temp_file.read(1048576)
  113. fd.close()
  114. return filename
  115. def _save_file(self, temp_file, filename):
  116. if not (self.storage_type and self.bucket_name):
  117. return self._save_file_local(temp_file, filename)
  118. if self.storage_type != 's3':
  119. raise ValueError(
  120. 'Storage type "%s" is invalid, the only supported storage type'
  121. ' (apart from default local storage) is s3.'
  122. % self.storage_type)
  123. conn = S3Connection(self.access_key_id, self.access_key_secret)
  124. bucket = conn.get_bucket(self.bucket_name)
  125. path = self._get_s3_path(filename)
  126. k = bucket.new_key(path)
  127. k.set_contents_from_string(temp_file.getvalue())
  128. k.set_acl(self.acl)
  129. return filename
  130. class S3ImageUploadInput(ImageUploadInput):
  131. """
  132. Inherits from flask-admin ImageUploadInput, to render images
  133. uploaded to Amazon S3 (as well as the default local storage).
  134. """
  135. def get_url(self, field):
  136. if op.isfile(op.join(field.base_path, field.data)):
  137. return super(S3ImageUploadInput, self).get_url(field)
  138. if field.thumbnail_size:
  139. filename = field.thumbnail_fn(field.data)
  140. else:
  141. filename = field.data
  142. if field.url_relative_path:
  143. filename = urljoin(field.url_relative_path, filename)
  144. return url_for_s3(field.endpoint, bucket_name=field.bucket_name,
  145. filename=filename)
  146. class S3ImageUploadField(S3FileUploadField):
  147. """
  148. Revised version of flask-admin ImageUploadField, to allow image
  149. uploading to Amazon S3 (as well as the default local storage).
  150. Based loosely on code from:
  151. http://stackoverflow.com/a/29178240/2066849
  152. """
  153. widget = S3ImageUploadInput()
  154. keep_image_formats = ('PNG',)
  155. def __init__(self, label=None, validators=None,
  156. max_size=None, thumbgen=None, thumbnail_size=None,
  157. url_relative_path=None, endpoint='static',
  158. **kwargs):
  159. # Check if PIL is installed
  160. if Image is None:
  161. raise ImportError('PIL library was not found')
  162. self.max_size = max_size
  163. self.thumbnail_fn = thumbgen or thumbgen_filename
  164. self.thumbnail_size = thumbnail_size
  165. self.endpoint = endpoint
  166. self.image = None
  167. self.url_relative_path = url_relative_path
  168. if (not ('allowed_extensions' in kwargs)
  169. or not kwargs['allowed_extensions']):
  170. kwargs['allowed_extensions'] = \
  171. ('gif', 'jpg', 'jpeg', 'png', 'tiff')
  172. super(S3ImageUploadField, self).__init__(label, validators,
  173. **kwargs)
  174. def pre_validate(self, form):
  175. super(S3ImageUploadField, self).pre_validate(form)
  176. if (self.data and
  177. isinstance(self.data, FileStorage) and
  178. self.data.filename):
  179. try:
  180. self.image = Image.open(self.data)
  181. except Exception as e:
  182. raise ValidationError('Invalid image: %s' % e)
  183. # Deletion
  184. def _delete_file(self, filename, obj):
  185. storage_type = getattr(obj, self.storage_type_field, '')
  186. bucket_name = getattr(obj, self.bucket_name_field, '')
  187. super(S3ImageUploadField, self)._delete_file(filename, obj)
  188. self._delete_thumbnail(filename, storage_type, bucket_name)
  189. def _delete_thumbnail_local(self, filename):
  190. path = self._get_path(self.thumbnail_fn(filename))
  191. if op.exists(path):
  192. os.remove(path)
  193. def _delete_thumbnail(self, filename, storage_type, bucket_name):
  194. if not (storage_type and bucket_name):
  195. self._delete_thumbnail_local(filename)
  196. return
  197. if storage_type != 's3':
  198. raise ValueError(
  199. 'Storage type "%s" is invalid, the only supported storage type'
  200. ' (apart from default local storage) is s3.' % storage_type)
  201. conn = S3Connection(self.access_key_id, self.access_key_secret)
  202. bucket = conn.get_bucket(bucket_name)
  203. path = self._get_s3_path(self.thumbnail_fn(filename))
  204. k = Key(bucket)
  205. k.key = path
  206. try:
  207. bucket.delete_key(k)
  208. except S3ResponseError:
  209. pass
  210. # Saving
  211. def _save_file(self, temp_file, filename):
  212. if self.storage_type and (self.storage_type != 's3'):
  213. raise ValueError(
  214. 'Storage type "%s" is invalid, the only supported storage type'
  215. ' (apart from default local storage) is s3.' % (
  216. self.storage_type,))
  217. # Figure out format
  218. filename, format = self._get_save_format(filename, self.image)
  219. if self.image: # and (self.image.format != format or self.max_size):
  220. if self.max_size:
  221. image = self._resize(self.image, self.max_size)
  222. else:
  223. image = self.image
  224. temp_file = BytesIO()
  225. self._save_image(image, temp_file, format)
  226. super(S3ImageUploadField, self)._save_file(temp_file, filename)
  227. temp_file_thumbnail = BytesIO()
  228. self._save_thumbnail(temp_file_thumbnail, filename, format)
  229. return filename
  230. def _save_thumbnail(self, temp_file, filename, format):
  231. if self.image and self.thumbnail_size:
  232. self._save_image(self._resize(self.image, self.thumbnail_size),
  233. temp_file,
  234. format)
  235. super(S3ImageUploadField, self)._save_file(
  236. temp_file, self.thumbnail_fn(filename))
  237. def _resize(self, image, size):
  238. (width, height, force) = size
  239. if image.size[0] > width or image.size[1] > height:
  240. if force:
  241. return ImageOps.fit(self.image, (width, height),
  242. Image.ANTIALIAS)
  243. else:
  244. thumb = self.image.copy()
  245. thumb.thumbnail((width, height), Image.ANTIALIAS)
  246. return thumb
  247. return image
  248. def _save_image(self, image, temp_file, format='JPEG'):
  249. if image.mode not in ('RGB', 'RGBA'):
  250. image = image.convert('RGBA')
  251. image.save(temp_file, format)
  252. def _get_save_format(self, filename, image):
  253. if image.format not in self.keep_image_formats:
  254. name, ext = op.splitext(filename)
  255. filename = '%s.jpg' % name
  256. return filename, 'JPEG'
  257. return filename, image.format