1234567891011121314151617181920212223242526272829303132333435363738394041 |
- def upload_new_files(staticdir, bucket):
- """
- Upload newer files recursively under `staticdir` to `bucket`.
- This assumes that the directory `staticdir` represents the root of
- the S3 bucket. `bucket` should be an instance of boto.s3.bucket.Bucket.
- Return a list of the files uploaded, with paths relative to `staticdir`.
- """
- allkeys = bucket.list()
- local_files_mtimes = get_keys_from_directory(staticdir)
- # `fmt` should be ISO 8601, but the time zone isn't parsed right when
- # given as %Z, so we hack it off below. Hopefully it's always Zulu time
- fmt = '%Y-%m-%dT%H:%M:%S.%f'
- # This is a dict of key_name -> [key_obj, key.last_modified]
- remote_files_mtimes_keys = dict(
- (
- k.name,
- [
- k,
- datetime.strptime(
- k.last_modified[:-1], # strip off Z at end
- fmt)
- ]
- ) for k in allkeys)
- uploaded_files = []
- for filepath, local_mtime in local_files_mtimes.iteritems():
- if filepath in remote_files_mtimes_keys:
- the_key, remote_mtime = remote_files_mtimes_keys[filepath]
- # Skip file if local is older
- if remote_mtime > local_mtime:
- continue
- else:
- the_key = Key(bucket)
- the_key.key = filepath
- uploaded_files.append(filepath)
- the_key.set_contents_from_filename(os.path.join(staticdir, filepath))
- the_key.set_acl('public-read')
- return uploaded_files
|