mysql2s3.py 1.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263
  1. #!/usr/bin/env python
  2. import os
  3. import MySQLdb as mdb
  4. import boto
  5. from datetime import datetime
  6. serverName = '' #Server name
  7. dbUser = '' #DB User eg. root
  8. dbPass = '' #DB User Pass
  9. skipdb=['information_schema',"mysql","performance_schema"]
  10. folderName = '' + datetime.now().strftime('%Y.%m.%d.%H.%M') #Folder Name for backup <servername>_date
  11. backupFolder = '/usr/local/bin/db_backup/' + folderName
  12. currentMonth = datetime.now().strftime("%B")
  13. access_key = '' # Amazon S3 Access Key
  14. secret_key = '' # Amazon S3 Secret Key
  15. bucket = '' # Amazon S3 Bucket Name
  16. con = mdb.connect('localhost', dbUser, dbPass)
  17. os.system("mkdir -p %s" % (backupFolder))
  18. print ('CONNECTING TO MYSQL SERVER')
  19. cur = con.cursor()
  20. cur.execute("SHOW DATABASES")
  21. print ('BUILDING A LIST OF DATABASES')
  22. dbList = []
  23. for i in cur:
  24. if i[0] in skipdb: continue
  25. dbList.append(i[0])
  26. cmd='mysqldump -h localhost -u %s -p%s %s >%s/%s.sql' % (dbUser,dbPass, i[0],backupFolder,i[0])
  27. print ('BACKING UP - ' + i[0])
  28. os.system(cmd)
  29. print ('COMPRESSING BACKUP FOLDER')
  30. cmd = 'tar -cvzf ' + backupFolder + '.tar.gz ' + backupFolder
  31. os.system(cmd)
  32. print ('TRANSFEREING BACKUP TO AMAZON S3')
  33. s3 = boto.connect_s3(access_key, secret_key)
  34. bucket = s3.get_bucket(bucket)
  35. key = bucket.new_key("%s/%s/%s.tar.gz" % (serverName,currentMonth,folderName))
  36. key.set_contents_from_filename(os.path.join(os.curdir, "%s.tar.gz" % (backupFolder)))
  37. key.set_acl('private')
  38. print ('CLEANUP PROCESS')
  39. cmd = 'rm -rf ' + backupFolder + '.tar.gz '
  40. os.system(cmd)
  41. print ('REMOVED' + backupFolder + '.tar.gz')
  42. cmd = 'rm -rf ' + backupFolder
  43. os.system(cmd)
  44. print ('REMOVED' + backupFolder + '/')