upload.py 3.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108
  1. #!/usr/bin/python
  2. import boto3
  3. import math
  4. import os
  5. import datetime
  6. import time
  7. import bz2
  8. import shutil
  9. import os
  10. import gnupg
  11. from botocore.client import Config
  12. aws_access_key = 'aws_access_key_here'
  13. aws_secret_key = 'aws_secret_key_here'
  14. phrase = 'AES_key'
  15. gpg = gnupg.GPG(gnupghome='/var/log/scribe/logger')
  16. log_dir = '/path/to/your/logs'
  17. log_sub = ['subdir1',
  18. 'subdir2'
  19. ]
  20. bucket = 's3_bucket_name'
  21. def upload_to_s3(file_list, bucket_name):
  22. conn = boto3.resource('s3', aws_access_key_id=aws_access_key, aws_secret_access_key=aws_secret_key,
  23. config=Config(signature_version='s3v4'))
  24. for file in file_list:
  25. print('Uploading: ' + file + '\n')
  26. remote_name = file.split('/')
  27. conn.Object(bucket_name, file).put(Body=open(log_dir + file, 'rb'))
  28. def is_older(filename):
  29. now = time.time()
  30. try:
  31. mtime = os.path.getmtime(log_dir + '/' + filename)
  32. if mtime < (now - 4 * 3600):
  33. return True
  34. else:
  35. return False
  36. except:
  37. print('File ' + filename + ' doesnt exist')
  38. def validate_object(bucket, file_list, access_key, secret_key):
  39. conn = boto3.resource('s3', aws_access_key_id=access_key, aws_secret_access_key=secret_key,
  40. config=Config(signature_version='s3v4'))
  41. my_bucket = conn.Bucket(bucket)
  42. for file in file_list:
  43. key = file
  44. objs = list(my_bucket.objects.filter(Prefix=key))
  45. if len(objs) > 0 and objs[0].key == key:
  46. print(key + ' alredy uploaded')
  47. file_list.remove(key)
  48. else:
  49. print(key + ' doesnt exist')
  50. def encrypt(filename):
  51. print('Encrypting: ' + log_dir + filename)
  52. with open(log_dir + filename, 'rb') as f:
  53. status = gpg.encrypt_file(f, None, passphrase=phrase, symmetric='AES256', output=log_dir + filename + '.aes')
  54. print('ok: ', status.ok)
  55. print('status: ', status.status)
  56. print('stderr: ', status.stderr)
  57. if status.ok:
  58. return True
  59. else:
  60. return False
  61. def decrypt(filename):
  62. with open(log_dir + filename, 'rb') as f:
  63. status = gpg.decrypt_file(f, output=log_dir + filename + '-decrypted')
  64. print('ok: ', status.ok)
  65. print('status: ', status.status)
  66. print('stderr: ', status.stderr)
  67. if status.ok:
  68. return True
  69. else:
  70. return False
  71. def upload(filename):
  72. parsed_name = filename.split('/')
  73. dir = parsed_name[0]
  74. file = parsed_name[1].split('-')
  75. gz_filename = file[0] + '-archive'
  76. for el in file[1:]:
  77. gz_filename = gz_filename + '-' + el
  78. gz_filename = gz_filename + '.bz2'
  79. full_file_path = log_dir + dir + '/' + gz_filename
  80. print('Compressing: ' + log_dir + filename)
  81. with open(log_dir + filename, 'rb') as f_in, bz2.BZ2File(log_dir + dir + '/' + gz_filename, 'w') as f_out:
  82. shutil.copyfileobj(f_in, f_out)
  83. print('Removing: ' + log_dir + filename)
  84. os.remove(log_dir + filename)
  85. if encrypt(dir + '/' + gz_filename):
  86. upload_to_s3([dir + '/' + gz_filename + '.aes', ], bucket)
  87. os.remove(log_dir + dir + '/' + gz_filename + '.aes')
  88. file_list = get_files_from_dir(log_dir, log_sub)
  89. print('Objects: ' + str(len(file_list)))
  90. for file in file_list:
  91. if is_older(file):
  92. upload(file)