buechner_3.py 1.5 KB

1234567891011121314151617181920212223242526272829303132333435363738394041
  1. def upload_new_files(staticdir, bucket):
  2. """
  3. Upload newer files recursively under `staticdir` to `bucket`.
  4. This assumes that the directory `staticdir` represents the root of
  5. the S3 bucket. `bucket` should be an instance of boto.s3.bucket.Bucket.
  6. Return a list of the files uploaded, with paths relative to `staticdir`.
  7. """
  8. allkeys = bucket.list()
  9. local_files_mtimes = get_keys_from_directory(staticdir)
  10. # `fmt` should be ISO 8601, but the time zone isn't parsed right when
  11. # given as %Z, so we hack it off below. Hopefully it's always Zulu time
  12. fmt = '%Y-%m-%dT%H:%M:%S.%f'
  13. # This is a dict of key_name -> [key_obj, key.last_modified]
  14. remote_files_mtimes_keys = dict(
  15. (
  16. k.name,
  17. [
  18. k,
  19. datetime.strptime(
  20. k.last_modified[:-1], # strip off Z at end
  21. fmt)
  22. ]
  23. ) for k in allkeys)
  24. uploaded_files = []
  25. for filepath, local_mtime in local_files_mtimes.iteritems():
  26. if filepath in remote_files_mtimes_keys:
  27. the_key, remote_mtime = remote_files_mtimes_keys[filepath]
  28. # Skip file if local is older
  29. if remote_mtime > local_mtime:
  30. continue
  31. else:
  32. the_key = Key(bucket)
  33. the_key.key = filepath
  34. uploaded_files.append(filepath)
  35. the_key.set_contents_from_filename(os.path.join(staticdir, filepath))
  36. the_key.set_acl('public-read')
  37. return uploaded_files