storages_optimized.py 1.6 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243
  1. from storages.backends.s3boto3 import S3Boto3Storage
  2. class S3OptimizedUploadStorage(S3Boto3Storage):
  3. """
  4. Class for an optimized S3 storage.
  5. This storage prevents unnecessary operation to copy with the general ``upload_fileobj``
  6. command when the object already is a S3 object where the faster copy command can be used.
  7. The assumption is that ``content`` contains a S3 object from which we can copy.
  8. See also discussion here: https://github.com/codingjoe/django-s3file/discussions/126
  9. """
  10. def _save(self, name, content):
  11. # Basically copy the implementation of _save of S3Boto3Storage
  12. # and replace the obj.upload_fileobj with a copy function
  13. cleaned_name = self._clean_name(name)
  14. name = self._normalize_name(cleaned_name)
  15. params = self._get_write_parameters(name, content)
  16. if (
  17. self.gzip
  18. and params["ContentType"] in self.gzip_content_types
  19. and "ContentEncoding" not in params
  20. ):
  21. content = self._compress_content(content)
  22. params["ContentEncoding"] = "gzip"
  23. obj = self.bucket.Object(name)
  24. # content.seek(0, os.SEEK_SET) # Disable unnecessary seek operation
  25. # obj.upload_fileobj(content, ExtraArgs=params) # Disable upload function
  26. if not hasattr(content, "obj") or not hasattr(content.obj, "key"):
  27. raise TypeError(
  28. "The content object must be a S3 object and contain a valid key."
  29. )
  30. # Copy the file instead uf uploading
  31. obj.copy({"Bucket": self.bucket.name, "Key": content.obj.key}, ExtraArgs=params)
  32. return cleaned_name