def synchronize(self, delete=False): """Synchronize the localpath to S3. Upload new or changed files. Delete files that no longer exist locally.""" bucket = self.get_bucket() s3_paths = s3_util.get_paths_from_keys(bucket) local_files = set() for dirpath, dirnames, filenames in os.walk(self.localpath): for filename in filenames: file_path = os.path.join(dirpath,filename) file_key = os.path.relpath(file_path,self.localpath) if os.sep == "\\": #Windows paths need conversion file_key = file_key.replace("\\","/") local_files.add(file_key) try: s3_key = s3_paths[file_key] except KeyError: #File is new s3_key = bucket.new_key(file_key) logger.info("Uploading new file: {0}".format(file_key)) s3_key.set_contents_from_filename(file_path) s3_key.set_acl("public-read") else: #File already exists, check if it's changed. local_md5 = util.md5_for_file(file_path) if local_md5 != s3_key.etag.replace("\"",""): #File has changed logger.info("Uploading changed file: {0}".format(file_key)) s3_key.set_contents_from_filename(file_path) s3_key.set_acl("public-read") if delete: #Delete all files that don't exist locally for name, key in s3_paths.items(): if name not in local_files: #Delete it. logger.info("Deleting old file: {0}".format(name)) key.delete()