1234567891011121314151617181920212223242526272829303132333435363738394041 |
- def upload_new_files(staticdir, bucket):
- """
- Upload newer files recursively under `staticdir` to `bucket`.
- This assumes that the directory `staticdir` represents the root of
- the S3 bucket. `bucket` should be an instance of boto.s3.bucket.Bucket.
- Return a list of the files uploaded, with paths relative to `staticdir`.
- """
- allkeys = bucket.list()
- local_files_mtimes = get_keys_from_directory(staticdir)
-
-
- fmt = '%Y-%m-%dT%H:%M:%S.%f'
-
- remote_files_mtimes_keys = dict(
- (
- k.name,
- [
- k,
- datetime.strptime(
- k.last_modified[:-1],
- fmt)
- ]
- ) for k in allkeys)
- uploaded_files = []
- for filepath, local_mtime in local_files_mtimes.iteritems():
- if filepath in remote_files_mtimes_keys:
- the_key, remote_mtime = remote_files_mtimes_keys[filepath]
-
- if remote_mtime > local_mtime:
- continue
- else:
- the_key = Key(bucket)
- the_key.key = filepath
- uploaded_files.append(filepath)
- the_key.set_contents_from_filename(os.path.join(staticdir, filepath))
- the_key.set_acl('public-read')
- return uploaded_files
|