s3-raw-to-jpeg.py 3.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111
  1. __author__ = 'niels'
  2. import sys, re, time, os
  3. from boto.s3.connection import S3Connection
  4. from boto.s3.key import Key
  5. from subprocess import call
  6. conn = S3Connection(sys.argv[1], sys.argv[2])
  7. bucket = conn.get_bucket("owncloud-photos")
  8. content = bucket.list(prefix="Kina/20141117")
  9. class KeyMeta:
  10. s3_path = ""
  11. filename = ""
  12. filename_tiny = ""
  13. filename_prev = ""
  14. filename_full = ""
  15. def __init__(self, s3_path, filename):
  16. self.s3_path = s3_path
  17. self.filename = filename
  18. def getRawFromS3(key):
  19. # Get filename from path
  20. req = re.search('^(.+/)(.+).NEF$', key.name)
  21. if(req):
  22. s3_path = req.group(1)
  23. filename = req.group(2)
  24. # Copy to file
  25. fp = open(filename + ".NEF", "w")
  26. key.get_file(fp)
  27. return KeyMeta(s3_path, filename)
  28. else:
  29. return None
  30. def produceJPGs(key_meta):
  31. params_def = ["ufraw-batch", key_meta.filename + ".NEF", "--embedded-image", "--overwrite"]
  32. # Tiny thumbnail (250px)
  33. key_meta.filename_tiny = key_meta.filename + "_250.jpg"
  34. params_tiny = list(params_def)
  35. params_tiny.extend(["--size=250", "--output=" + key_meta.filename_tiny])
  36. call(params_tiny)
  37. # Preview (1600px)
  38. key_meta.filename_prev = key_meta.filename + "_1600.jpg"
  39. params_prev = list(params_def)
  40. params_prev.extend(["--size=1600", "--output=" + key_meta.filename_prev])
  41. call(params_prev)
  42. # Full
  43. key_meta.filename_full = key_meta.filename + ".jpg"
  44. params_full = list(params_def)
  45. params_full.extend(["--output=" + key_meta.filename_full])
  46. call(params_full)
  47. return key_meta
  48. def uploadToS3(key_meta):
  49. # Tiny
  50. key_tiny = Key(bucket)
  51. key_tiny.key = key_meta.s3_path + key_meta.filename_tiny
  52. key_tiny.set_contents_from_filename(key_meta.filename_tiny)
  53. print ("[%s] uploaded" % key_tiny.key)
  54. # Preview
  55. key_prev = Key(bucket)
  56. key_prev.key = key_meta.s3_path + key_meta.filename_prev
  57. key_prev.set_contents_from_filename(key_meta.filename_prev)
  58. print ("[%s] uploaded" % key_prev.key)
  59. # Full
  60. key_full = Key(bucket)
  61. key_full.key = key_meta.s3_path + key_meta.filename_full
  62. key_full.set_contents_from_filename(key_meta.filename_full)
  63. print ("[%s] uploaded" % key_full.key)
  64. def cleanUp(key_meta):
  65. os.remove(key_meta.filename + ".NEF")
  66. os.remove(key_meta.filename_tiny)
  67. os.remove(key_meta.filename_prev)
  68. os.remove(key_meta.filename_full)
  69. start = time.time()
  70. file_count = 0
  71. for key in content:
  72. print ("Getting raw from S3")
  73. key_meta = getRawFromS3(key)
  74. if(key_meta):
  75. print ("Raw [%s] retrieved" % key_meta.filename)
  76. print ("Producing bitmaps")
  77. produceJPGs(key_meta)
  78. print ("Bitmaps produced")
  79. print ("Uploading bitmaps to S3")
  80. uploadToS3(key_meta)
  81. print ("All bitmaps uploaded to S3")
  82. print ("Cleaning up")
  83. cleanUp(key_meta)
  84. file_count = file_count + 1
  85. else:
  86. print ("Non-RAW found, skipping")
  87. end = time.time()
  88. time_delta = end - start
  89. print ("Completed for [%s] files in [%s] seconds" % (file_count, time_delta))