s3_access.py 2.2 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465
  1. # owner: Rohit Kumar
  2. #Install Boto package. sudo apt-get install python-boto
  3. import boto
  4. import boto.s3
  5. import sys
  6. from boto.s3.key import Key
  7. AWS_ACCESS_KEY_ID = ''
  8. AWS_SECRET_ACCESS_KEY = ''
  9. bucket_name ='ember-s3' #Name of the bucket
  10. testfile = "Memory_Discussion.pdf"
  11. key_file = 'prediction.json' # name of the file to be accessed on the S3
  12. download_path = '/home/user/Downloads/ember/prediction.json' ## change this to your local machine path
  13. # Connect to the S3 using the access keys set in /etc/boto.cfg. If not set, follow the these steps:
  14. # 1) Go to Amazon AWS account-> Click the top right account name -> My security Credentials -> Continue (if pop up arrives) -> Access Keys -> Create New Access Keys -> Download the CSV file
  15. # Note that you cannot retreive this file from your account again, so keep it saved someplace
  16. # 2) Go to the directory where the file is downloaded. Open terminal froom here and type in
  17. # cp <your key name> /etc/boto.cfg
  18. # 3) Open /etc/boto.cfg in editor and ensure that it is in following format:
  19. #
  20. # [Credentials]
  21. # aws_access_key_id = {ACCESS KEY ID}
  22. # aws_secret_access_key = {SECRET ACCESS KEY}
  23. #
  24. # If not, make changes to reflect in the above format
  25. #conn = boto.connect_s3(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
  26. conn = boto.connect_s3()
  27. #Count all the buckets
  28. all_b = conn.get_all_buckets()
  29. if all_b is None:
  30. print ("No bucket exists ")
  31. ## Following contains the method to create a new bucket in given location
  32. #bucket = conn.create_bucket(bucket_name, location=boto.s3.connection.Location.DEFAULT)
  33. ##Create a key of given name and transfer contents to that from local file
  34. #k = Key(bucket)
  35. #k.key = key_file
  36. #k.set_contents_from_filename(testfile, cb=percent_cb, num_cb=10)
  37. #k.set_acl('public-read')
  38. #def percent_cb(complete, total):
  39. # sys.stdout.write('.')
  40. # sys.stdout.flush()
  41. ##Download the file from S3
  42. conn_bucket = conn.get_bucket(bucket_name)
  43. if conn_bucket is None:
  44. print ("Bucket does not exist!")
  45. else:
  46. for key_list in conn_bucket.list():
  47. print (key_list.name)
  48. all_files = conn_bucket.list() #list all keys inside the bucket
  49. #key_file = [i.name for i in conn_bucket.list()]
  50. #all_files.name.encode('utf-8')[1];
  51. #down_key = conn_bucket.get_key(key_file)
  52. #down_key.get_contents_to_filename(download_path)