azure_automate.py 44 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024
  1. #!/usr/bin/env python
  2. from azure_storage.methods import arg_dict_cleanup, create_batch_dict, create_parent_parser, parse_batch_file, \
  3. setup_arguments
  4. from azure_storage.azure_upload import AzureUpload
  5. from azure_storage.azure_sas import AzureContainerSAS, AzureSAS
  6. from azure_storage.azure_move import AzureContainerMove, AzureMove
  7. from azure_storage.azure_download import AzureContainerDownload, AzureDownload
  8. from azure_storage.azure_tier import AzureContainerTier, AzureTier
  9. from azure_storage.azure_delete import AzureContainerDelete, AzureDelete
  10. from argparse import ArgumentParser, RawTextHelpFormatter
  11. import coloredlogs
  12. import logging
  13. import sys
  14. import os
  15. def file_upload(args, batch_dict=None):
  16. """
  17. Read in the batch file, clean up the arguments to work with code base, run the AzureUpload class for each file
  18. :param args: type ArgumentParser arguments
  19. :param batch_dict: type Pandas dataframe.transpose().to_dict()
  20. """
  21. # If batch_dict has not been supplied by the batch function, extract the batch information from the file
  22. if not batch_dict:
  23. batch_dict = create_batch_dict(batch_file=args.batch_file,
  24. headers=['container', 'file', 'reset_path', 'storage_tier'])
  25. # The format of the dictionary is: {primary key: {header: value, ...}, primary key: {header:value, ...}, ....}
  26. # e.g. {1 : {container_name: $CONTAINER_NAME, file: $FILE_NAME...}, 2: {container_name: ...}, ...}
  27. for key, arg_dict in batch_dict.items():
  28. # Clean up the arguments, as some are optional, or not interpreted correctly
  29. arg_dict = arg_dict_cleanup(arg_dict=arg_dict)
  30. # Run the file upload
  31. try:
  32. # Create the upload_file object
  33. upload_file = AzureUpload(
  34. object_name=arg_dict['file'],
  35. account_name=args.account_name,
  36. container_name=arg_dict['container'],
  37. passphrase=args.passphrase,
  38. path=arg_dict['reset_path'],
  39. storage_tier=arg_dict['storage_tier'],
  40. category='file'
  41. )
  42. # Run the file upload
  43. upload_file.main()
  44. # Don't crash on SystemExits
  45. except SystemExit:
  46. pass
  47. def folder_upload(args, batch_dict=None):
  48. """
  49. Read in the batch file, clean up the arguments to work with code base, run the AzureUpload class for each folder
  50. :param args: type ArgumentParser arguments
  51. :param batch_dict: type Pandas dataframe.transpose().to_dict()
  52. """
  53. # If batch_dict has not been supplied by the batch function, extract the batch information from the file
  54. if not batch_dict:
  55. batch_dict = create_batch_dict(batch_file=args.batch_file,
  56. headers=['container', 'folder', 'reset_path', 'storage_tier'])
  57. # The format of the dictionary is: {primary key: {header: value, ...}, primary key: {header:value, ...}, ....}
  58. # e.g. {1 : {container_name: $CONTAINER_NAME, file: $FOLDER_NAME...}, 2: {container_name: ...}, ...}
  59. for key, arg_dict in batch_dict.items():
  60. # Clean up the arguments, as some are optional, or not interpreted correctly
  61. arg_dict = arg_dict_cleanup(arg_dict=arg_dict)
  62. try:
  63. # Create the upload_folder object
  64. upload_folder = AzureUpload(
  65. object_name=arg_dict['folder'],
  66. account_name=args.account_name,
  67. container_name=arg_dict['container'],
  68. passphrase=args.passphrase,
  69. path=arg_dict['reset_path'],
  70. storage_tier=arg_dict['storage_tier'],
  71. category='folder'
  72. )
  73. # Run the folder upload
  74. upload_folder.main()
  75. # Don't crash on SystemExits
  76. except SystemExit:
  77. pass
  78. def container_sas(args, batch_dict=None):
  79. """
  80. Read in the batch file, clean up the arguments to work with code base, run the AzureSAS class for each container
  81. :param args: type ArgumentParser arguments
  82. :param batch_dict: type Pandas dataframe.transpose().to_dict()
  83. """
  84. # If batch_dict has not been supplied by the batch function, extract the batch information from the file
  85. if not batch_dict:
  86. batch_dict = create_batch_dict(batch_file=args.batch_file,
  87. headers=['container', 'expiry', 'output_file'])
  88. # The format of the dictionary is: {primary key: {header: value, ...}, primary key: {header:value, ...}, ....}
  89. # e.g. {1 : {container_name: $CONTAINER_NAME, expiry: $EXPIRY...}, 2: {container_name: ...}, ...}
  90. for key, arg_dict in batch_dict.items():
  91. # Clean up the arguments, as some are optional, or not interpreted correctly
  92. arg_dict = arg_dict_cleanup(arg_dict=arg_dict)
  93. try:
  94. # Create the sas_container object
  95. sas_container = AzureContainerSAS(
  96. container_name=arg_dict['container'],
  97. output_file=arg_dict['output_file'],
  98. account_name=args.account_name,
  99. passphrase=args.passphrase,
  100. expiry=arg_dict['expiry'],
  101. verbosity=args.verbosity
  102. )
  103. # Run the container SAS URL creation
  104. sas_container.main()
  105. # Don't crash on SystemExits
  106. except SystemExit:
  107. pass
  108. def file_sas(args, batch_dict=None):
  109. """
  110. Read in the batch file, clean up the arguments to work with code base, run the AzureSAS class for each file
  111. :param args: type ArgumentParser arguments
  112. :param batch_dict: type Pandas dataframe.transpose().to_dict()
  113. """
  114. # If batch_dict has not been supplied by the batch function, extract the batch information from the file
  115. if not batch_dict:
  116. batch_dict = create_batch_dict(batch_file=args.batch_file,
  117. headers=['container', 'file', 'expiry', 'output_file'])
  118. # The format of the dictionary is: {primary key: {header: value, ...}, primary key: {header:value, ...}, ....}
  119. # e.g. {1 : {container_name: $CONTAINER_NAME, file: $FILE...}, 2: {container_name: ...}, ...}
  120. for key, arg_dict in batch_dict.items():
  121. # Clean up the arguments, as some are optional, or not interpreted correctly
  122. arg_dict = arg_dict_cleanup(arg_dict=arg_dict)
  123. try:
  124. # Create the sas_file object
  125. sas_file = AzureSAS(
  126. object_name=arg_dict['file'],
  127. container_name=arg_dict['container'],
  128. output_file=arg_dict['output_file'],
  129. account_name=args.account_name,
  130. passphrase=args.passphrase,
  131. expiry=arg_dict['expiry'],
  132. verbosity=args.verbosity,
  133. category='file',
  134. )
  135. # Run the container SAS URL creation
  136. sas_file.main()
  137. # Don't crash on SystemExits
  138. except SystemExit:
  139. pass
  140. def folder_sas(args, batch_dict=None):
  141. """
  142. Read in the batch file, clean up the arguments to work with code base, run the AzureSAS class for each file
  143. :param args: type ArgumentParser arguments
  144. :param batch_dict: type Pandas dataframe.transpose().to_dict()
  145. """
  146. # If batch_dict has not been supplied by the batch function, extract the batch information from the file
  147. if not batch_dict:
  148. batch_dict = create_batch_dict(batch_file=args.batch_file,
  149. headers=['container', 'folder', 'expiry', 'output_file'])
  150. # The format of the dictionary is: {primary key: {header: value, ...}, primary key: {header:value, ...}, ....}
  151. # e.g. {1 : {container_name: $CONTAINER_NAME, folder: $FOLDER...}, 2: {container_name: ...}, ...}
  152. for key, arg_dict in batch_dict.items():
  153. # Clean up the arguments, as some are optional, or not interpreted correctly
  154. arg_dict = arg_dict_cleanup(arg_dict=arg_dict)
  155. try:
  156. # Create the sas_file object
  157. sas_folder = AzureSAS(
  158. object_name=arg_dict['folder'],
  159. container_name=arg_dict['container'],
  160. output_file=arg_dict['output_file'],
  161. account_name=args.account_name,
  162. passphrase=args.passphrase,
  163. expiry=arg_dict['expiry'],
  164. verbosity=args.verbosity,
  165. category='folder',
  166. )
  167. # Run the container SAS URL creation
  168. sas_folder.main()
  169. # Don't crash on SystemExits
  170. except SystemExit:
  171. pass
  172. def container_move(args, batch_dict=None):
  173. """
  174. Read in the batch file, clean up the arguments to work with code base, run the AzureContainerMove class
  175. for each container
  176. :param args: type ArgumentParser arguments
  177. :param batch_dict: type Pandas dataframe.transpose().to_dict()
  178. """
  179. # If batch_dict has not been supplied by the batch function, extract the batch information from the file
  180. if not batch_dict:
  181. batch_dict = create_batch_dict(batch_file=args.batch_file,
  182. headers=['container', 'target', 'reset_path', 'storage_tier'])
  183. # The format of the dictionary is: {primary key: {header: value, ...}, primary key: {header:value, ...}, ....}
  184. # e.g. {1 : {container_name: $CONTAINER_NAME, target: $TARGET...}, 2: {container_name: ...}, ...}
  185. for key, arg_dict in batch_dict.items():
  186. # Clean up the arguments, as some are optional, or not interpreted correctly
  187. arg_dict = arg_dict_cleanup(arg_dict=arg_dict)
  188. try:
  189. # Create the move_container object
  190. move_container = AzureContainerMove(
  191. container_name=arg_dict['container'],
  192. account_name=args.account_name,
  193. passphrase=args.passphrase,
  194. target_container=arg_dict['target'],
  195. path=arg_dict['reset_path'],
  196. storage_tier=arg_dict['storage_tier']
  197. )
  198. # Run the container move
  199. move_container.main()
  200. # Don't crash on SystemExits
  201. except SystemExit:
  202. pass
  203. def file_move(args, batch_dict=None):
  204. """
  205. Read in the batch file, clean up the arguments to work with code base, run the AzureMove class for each file
  206. :param args: type ArgumentParser arguments
  207. :param batch_dict: type Pandas dataframe.transpose().to_dict()
  208. """
  209. # If batch_dict has not been supplied by the batch function, extract the batch information from the file
  210. if not batch_dict:
  211. batch_dict = create_batch_dict(batch_file=args.batch_file,
  212. headers=['container', 'target', 'file', 'reset_path', 'storage_tier'])
  213. # The format of the dictionary is: {primary key: {header: value, ...}, primary key: {header:value, ...}, ....}
  214. # e.g. {1 : {container_name: $CONTAINER_NAME, target: $TARGET...}, 2: {container_name: ...}, ...}
  215. for key, arg_dict in batch_dict.items():
  216. # Clean up the arguments, as some are optional, or not interpreted correctly
  217. arg_dict = arg_dict_cleanup(arg_dict=arg_dict)
  218. try:
  219. # Create the move_file object
  220. move_file = AzureMove(
  221. object_name=arg_dict['file'],
  222. container_name=arg_dict['container'],
  223. account_name=args.account_name,
  224. passphrase=args.passphrase,
  225. target_container=arg_dict['target'],
  226. path=arg_dict['reset_path'],
  227. storage_tier=arg_dict['storage_tier'],
  228. category='file'
  229. )
  230. # Run the file move
  231. move_file.main()
  232. # Don't crash on SystemExits
  233. except SystemExit:
  234. pass
  235. def folder_move(args, batch_dict=None):
  236. """
  237. Read in the batch file, clean up the arguments to work with code base, run the AzureMove class for each folder
  238. :param args: type ArgumentParser arguments
  239. :param batch_dict: type Pandas dataframe.transpose().to_dict()
  240. """
  241. # If batch_dict has not been supplied by the batch function, extract the batch information from the file
  242. if not batch_dict:
  243. batch_dict = create_batch_dict(batch_file=args.batch_file,
  244. headers=['container', 'target', 'folder', 'reset_path', 'storage_tier'])
  245. # The format of the dictionary is: {primary key: {header: value, ...}, primary key: {header:value, ...}, ....}
  246. # e.g. {1 : {container_name: $CONTAINER_NAME, target: $TARGET...}, 2: {container_name: ...}, ...}
  247. for key, arg_dict in batch_dict.items():
  248. # Clean up the arguments, as some are optional, or not interpreted correctly
  249. arg_dict = arg_dict_cleanup(arg_dict=arg_dict)
  250. try:
  251. # Create the move_folder object
  252. move_folder = AzureMove(
  253. object_name=arg_dict['folder'],
  254. container_name=arg_dict['container'],
  255. account_name=args.account_name,
  256. passphrase=args.passphrase,
  257. target_container=arg_dict['target'],
  258. path=arg_dict['reset_path'],
  259. storage_tier=arg_dict['storage_tier'],
  260. category='folder'
  261. )
  262. # Run the folder move
  263. move_folder.main()
  264. # Don't crash on SystemExits
  265. except SystemExit:
  266. pass
  267. def container_download(args, batch_dict=None):
  268. """
  269. Read in the batch file, clean up the arguments, run the AzureContainerDownload class for each container
  270. :param args: type ArgumentParser arguments
  271. :param batch_dict: type Pandas dataframe.transpose().to_dict()
  272. """
  273. # If batch_dict has not been supplied by the batch function, extract the batch information from the file
  274. if not batch_dict:
  275. batch_dict = create_batch_dict(batch_file=args.batch_file,
  276. headers=['container', 'output_path'])
  277. # The format of the dictionary is: {primary key: {header: value, ...}, primary key: {header:value, ...}, ....}
  278. # e.g. {1 : {container_name: $CONTAINER_NAME, output_path: $OUTPUT_PATH...}, 2: {container_name: ...}, ...}
  279. for key, arg_dict in batch_dict.items():
  280. # Clean up the arguments, as some are optional, or not interpreted correctly
  281. arg_dict = arg_dict_cleanup(arg_dict=arg_dict)
  282. try:
  283. # Create the download_container object
  284. download_container = AzureContainerDownload(
  285. container_name=arg_dict['container'],
  286. account_name=args.account_name,
  287. passphrase=args.passphrase,
  288. output_path=arg_dict['output_path']
  289. )
  290. # Run the container download
  291. download_container.main()
  292. # Don't crash on SystemExits
  293. except SystemExit:
  294. pass
  295. def file_download(args, batch_dict=None):
  296. """
  297. Read in the batch file, clean up the arguments, run the AzureDownload class for each file
  298. :param args: type ArgumentParser arguments
  299. :param batch_dict: type Pandas dataframe.transpose().to_dict()
  300. """
  301. # If batch_dict has not been supplied by the batch function, extract the batch information from the file
  302. if not batch_dict:
  303. batch_dict = create_batch_dict(batch_file=args.batch_file,
  304. headers=['container', 'file', 'output_path'])
  305. # The format of the dictionary is: {primary key: {header: value, ...}, primary key: {header:value, ...}, ....}
  306. # e.g. {1 : {container_name: $CONTAINER_NAME, file: $FILE...}, 2: {container_name: ...}, ...}
  307. for key, arg_dict in batch_dict.items():
  308. # Clean up the arguments, as some are optional, or not interpreted correctly
  309. arg_dict = arg_dict_cleanup(arg_dict=arg_dict)
  310. try:
  311. # Create the download_file object
  312. download_file = AzureDownload(
  313. container_name=arg_dict['container'],
  314. object_name=arg_dict['file'],
  315. account_name=args.account_name,
  316. passphrase=args.passphrase,
  317. output_path=arg_dict['output_path'],
  318. category='file'
  319. )
  320. # Run the file download
  321. download_file.main()
  322. # Don't crash on SystemExits
  323. except SystemExit:
  324. pass
  325. def folder_download(args, batch_dict=None):
  326. """
  327. Read in the batch file, clean up the arguments, run the AzureDownload class for each folder
  328. :param args: type ArgumentParser arguments
  329. :param batch_dict: type Pandas dataframe.transpose().to_dict()
  330. """
  331. # If batch_dict has not been supplied by the batch function, extract the batch information from the file
  332. if not batch_dict:
  333. batch_dict = create_batch_dict(batch_file=args.batch_file,
  334. headers=['container', 'folder', 'output_path'])
  335. # The format of the dictionary is: {primary key: {header: value, ...}, primary key: {header:value, ...}, ....}
  336. # e.g. {1 : {container_name: $CONTAINER_NAME, folder: $FOLDER...}, 2: {container_name: ...}, ...}
  337. for key, arg_dict in batch_dict.items():
  338. # Clean up the arguments, as some are optional, or not interpreted correctly
  339. arg_dict = arg_dict_cleanup(arg_dict=arg_dict)
  340. try:
  341. # Create the download_folder object
  342. download_folder = AzureDownload(
  343. container_name=arg_dict['container'],
  344. object_name=arg_dict['folder'],
  345. account_name=args.account_name,
  346. passphrase=args.passphrase,
  347. output_path=arg_dict['output_path'],
  348. category='folder'
  349. )
  350. # Run the folder download
  351. download_folder.main()
  352. # Don't crash on SystemExits
  353. except SystemExit:
  354. pass
  355. def container_tier(args, batch_dict=None):
  356. """
  357. Read in the batch file, clean up the arguments, run the AzureContainerTier class for each container
  358. :param args: type ArgumentParser arguments
  359. :param batch_dict: type Pandas dataframe.transpose().to_dict()
  360. """
  361. # If batch_dict has not been supplied by the batch function, extract the batch information from the file
  362. if not batch_dict:
  363. batch_dict = create_batch_dict(batch_file=args.batch_file,
  364. headers=['container', 'storage_tier'])
  365. # The format of the dictionary is: {primary key: {header: value, ...}, primary key: {header:value, ...}, ....}
  366. # e.g. {1 : {container_name: $CONTAINER_NAME, storage_tier: $STORAGE_TIER ...}, 2: {container_name: ...}, ...}
  367. for key, arg_dict in batch_dict.items():
  368. # Clean up the arguments, as some are optional, or not interpreted correctly
  369. arg_dict = arg_dict_cleanup(arg_dict=arg_dict)
  370. try:
  371. # Create the tier_container object
  372. tier_container = AzureContainerTier(
  373. container_name=arg_dict['container'],
  374. account_name=args.account_name,
  375. passphrase=args.passphrase,
  376. storage_tier=arg_dict['storage_tier']
  377. )
  378. # Run the container tier
  379. tier_container.main()
  380. # Don't crash on SystemExits
  381. except SystemExit:
  382. pass
  383. def file_tier(args, batch_dict=None):
  384. """
  385. Read in the batch file, clean up the arguments, run the AzureTier class for each file
  386. :param args: type ArgumentParser arguments
  387. :param batch_dict: type Pandas dataframe.transpose().to_dict()
  388. """
  389. # If batch_dict has not been supplied by the batch function, extract the batch information from the file
  390. if not batch_dict:
  391. batch_dict = create_batch_dict(batch_file=args.batch_file,
  392. headers=['container', 'file', 'storage_tier'])
  393. # The format of the dictionary is: {primary key: {header: value, ...}, primary key: {header:value, ...}, ....}
  394. # e.g. {1 : {container_name: $CONTAINER_NAME, file: $FILE ...}, 2: {container_name: ...}, ...}
  395. for key, arg_dict in batch_dict.items():
  396. # Clean up the arguments, as some are optional, or not interpreted correctly
  397. arg_dict = arg_dict_cleanup(arg_dict=arg_dict)
  398. try:
  399. # Create the tier_file object
  400. tier_file = AzureTier(
  401. container_name=arg_dict['container'],
  402. object_name=arg_dict['file'],
  403. account_name=args.account_name,
  404. passphrase=args.passphrase,
  405. storage_tier=arg_dict['storage_tier'],
  406. category='file'
  407. )
  408. # Run the file tier
  409. tier_file.main()
  410. # Don't crash on SystemExits
  411. except SystemExit:
  412. pass
  413. def folder_tier(args, batch_dict=None):
  414. """
  415. Read in the batch file, clean up the arguments, run the AzureTier class for each folder
  416. :param args: type ArgumentParser arguments
  417. :param batch_dict: type Pandas dataframe.transpose().to_dict()
  418. """
  419. # If batch_dict has not been supplied by the batch function, extract the batch information from the file
  420. if not batch_dict:
  421. batch_dict = create_batch_dict(batch_file=args.batch_file,
  422. headers=['container', 'folder', 'storage_tier'])
  423. # The format of the dictionary is: {primary key: {header: value, ...}, primary key: {header:value, ...}, ....}
  424. # e.g. {1 : {container_name: $CONTAINER_NAME, folder: $FOLDER ...}, 2: {container_name: ...}, ...}
  425. for key, arg_dict in batch_dict.items():
  426. # Clean up the arguments, as some are optional, or not interpreted correctly
  427. arg_dict = arg_dict_cleanup(arg_dict=arg_dict)
  428. try:
  429. # Create the tier_folder object
  430. tier_folder = AzureTier(
  431. container_name=arg_dict['container'],
  432. object_name=arg_dict['folder'],
  433. account_name=args.account_name,
  434. passphrase=args.passphrase,
  435. storage_tier=arg_dict['storage_tier'],
  436. category='folder'
  437. )
  438. # Run the folder tier
  439. tier_folder.main()
  440. # Don't crash on SystemExits
  441. except SystemExit:
  442. pass
  443. def container_delete(args, batch_dict=None):
  444. """
  445. Read in the batch file, clean up the arguments, run the AzureContainerDelete class for each container
  446. :param args: type ArgumentParser arguments
  447. :param batch_dict: type Pandas dataframe.transpose().to_dict()
  448. """
  449. # If batch_dict has not been supplied by the batch function, extract the batch information from the file
  450. if not batch_dict:
  451. batch_dict = create_batch_dict(batch_file=args.batch_file,
  452. headers=['container'])
  453. # The format of the dictionary is: {primary key: {header: value, ...}, primary key: {header:value, ...}, ....}
  454. # e.g. {1 : {container_name: $CONTAINER_NAME}, 2: {container_name: $CONTAINER_NAME}, ...}
  455. for key, arg_dict in batch_dict.items():
  456. # Clean up the arguments, as some are optional, or not interpreted correctly
  457. arg_dict = arg_dict_cleanup(arg_dict=arg_dict)
  458. try:
  459. # Create the delete_container object
  460. delete_container = AzureContainerDelete(
  461. container_name=arg_dict['container'],
  462. account_name=args.account_name,
  463. passphrase=args.passphrase,
  464. )
  465. # Run the container delete
  466. delete_container.main()
  467. # Don't crash on SystemExits
  468. except SystemExit:
  469. pass
  470. def file_delete(args, batch_dict=None):
  471. """
  472. Read in the batch file, clean up the arguments, run the AzureDelete class for each file
  473. :param args: type ArgumentParser arguments
  474. :param batch_dict: type Pandas dataframe.transpose().to_dict()
  475. """
  476. # If batch_dict has not been supplied by the batch function, extract the batch information from the file
  477. if not batch_dict:
  478. batch_dict = create_batch_dict(batch_file=args.batch_file,
  479. headers=['container', 'file', 'retention_time'])
  480. # The format of the dictionary is: {primary key: {header: value, ...}, primary key: {header:value, ...}, ....}
  481. # e.g. {1 : {container_name: $CONTAINER_NAME, file: $FILE ...}, 2: {container_name: ...}, ...}
  482. for key, arg_dict in batch_dict.items():
  483. # Clean up the arguments, as some are optional, or not interpreted correctly
  484. arg_dict = arg_dict_cleanup(arg_dict=arg_dict)
  485. try:
  486. # Create the delete_file object
  487. delete_file = AzureDelete(
  488. container_name=arg_dict['container'],
  489. object_name=arg_dict['file'],
  490. account_name=args.account_name,
  491. passphrase=args.passphrase,
  492. retention_time=arg_dict['retention_time'],
  493. category='file'
  494. )
  495. # Run the file delete
  496. delete_file.main()
  497. # Don't crash on SystemExits
  498. except SystemExit:
  499. pass
  500. def folder_delete(args, batch_dict=None):
  501. """
  502. Read in the batch file, clean up the arguments, run the AzureDelete class for each folder
  503. :param args: type ArgumentParser arguments
  504. :param batch_dict: type Pandas dataframe.transpose().to_dict()
  505. """
  506. # If batch_dict has not been supplied by the batch function, extract the batch information from the file
  507. if not batch_dict:
  508. batch_dict = create_batch_dict(batch_file=args.batch_file,
  509. headers=['container', 'folder', 'retention_time'])
  510. # The format of the dictionary is: {primary key: {header: value, ...}, primary key: {header:value, ...}, ....}
  511. # e.g. {1 : {container_name: $CONTAINER_NAME, folder: $FOLDER ...}, 2: {container_name: ...}, ...}
  512. for key, arg_dict in batch_dict.items():
  513. # Clean up the arguments, as some are optional, or not interpreted correctly
  514. arg_dict = arg_dict_cleanup(arg_dict=arg_dict)
  515. try:
  516. # Create the delete_folder object
  517. delete_folder = AzureDelete(
  518. container_name=arg_dict['container'],
  519. object_name=arg_dict['folder'],
  520. account_name=args.account_name,
  521. passphrase=args.passphrase,
  522. retention_time=arg_dict['retention_time'],
  523. category='folder'
  524. )
  525. # Run the folder delete
  526. delete_folder.main()
  527. # Don't crash on SystemExits
  528. except SystemExit:
  529. pass
  530. def batch(args):
  531. """
  532. Read in the batch file, and run the appropriate function for each requested command and subcommand combination
  533. :param args: type ArgumentParser arguments
  534. """
  535. # Ensure that the batch file exists
  536. try:
  537. assert os.path.isfile(args.batch_file)
  538. except AssertionError:
  539. logging.error(f'Could not locate the supplied batch file {args.batch_file}. Please ensure the you entered '
  540. f'the name and path correctly')
  541. raise SystemExit
  542. # Create a dictionary of all the functions with the corresponding command and subcommands as keys
  543. function_dict = {
  544. 'upload': {
  545. 'file': file_upload,
  546. 'folder': folder_upload
  547. },
  548. 'sas': {
  549. 'container': container_sas,
  550. 'file': file_sas,
  551. 'folder': folder_sas
  552. },
  553. 'move': {
  554. 'container': container_move,
  555. 'file': file_move,
  556. 'folder': folder_move
  557. },
  558. 'download': {
  559. 'container': container_download,
  560. 'file': file_download,
  561. 'folder': folder_download
  562. },
  563. 'tier': {
  564. 'container': container_tier,
  565. 'file': file_tier,
  566. 'folder': folder_tier
  567. },
  568. 'delete': {
  569. 'container': container_delete,
  570. 'file': file_delete,
  571. 'folder': folder_delete
  572. }
  573. }
  574. # Read in the batch file
  575. with open(args.batch_file, 'r') as batch_doc:
  576. for line in batch_doc:
  577. # Ignore commented lines
  578. if not line.startswith('#'):
  579. # Convert the line to a dictionary with appropriate header: value pairs. Extract the command, and
  580. # subcommand
  581. command, subcommand, batch_dict = parse_batch_file(line=line)
  582. # Run the appropriate function for the supplied command, subcommand combination
  583. function_dict[command][subcommand](args, batch_dict=batch_dict)
  584. def cli():
  585. parser = ArgumentParser(
  586. description='Automate the submission of multiple AzureStorage commands'
  587. )
  588. # Create the parental parser, and the subparser
  589. subparsers, parent_parser = create_parent_parser(
  590. parser=parser,
  591. container=False
  592. )
  593. # Upload parser
  594. upload = subparsers.add_parser(
  595. parents=[],
  596. name='upload',
  597. description='Upload files/folders to Azure storage',
  598. formatter_class=RawTextHelpFormatter,
  599. help='Upload files/folders to Azure storage'
  600. )
  601. # Upload subparser
  602. upload_subparsers = upload.add_subparsers(
  603. title='Upload functionality',
  604. dest='upload'
  605. )
  606. # File upload subparser
  607. upload_file_subparser = upload_subparsers.add_parser(
  608. parents=[parent_parser],
  609. name='file',
  610. description='Upload files to Azure storage',
  611. formatter_class=RawTextHelpFormatter,
  612. help='Upload files to Azure storage'
  613. )
  614. upload_file_subparser.add_argument(
  615. '-b', '--batch_file',
  616. required=True,
  617. type=str,
  618. help='Tab-separated file with the following fields:\n '
  619. 'container name, file name, destination path (optional), storage tier (optional)'
  620. )
  621. upload_file_subparser.set_defaults(func=file_upload)
  622. # Folder upload subparser
  623. upload_folder_subparser = upload_subparsers.add_parser(
  624. parents=[parent_parser],
  625. name='folder',
  626. description='Upload folders to Azure storage',
  627. formatter_class=RawTextHelpFormatter,
  628. help='Upload folders to Azure storage'
  629. )
  630. upload_folder_subparser.add_argument(
  631. '-b', '--batch_file',
  632. required=True,
  633. type=str,
  634. help='Tab-separated file with the following fields (one entry per line):\n '
  635. 'container name, folder name, destination path (optional), storage tier (optional)'
  636. )
  637. upload_folder_subparser.set_defaults(func=folder_upload)
  638. # SAS URLs subparser
  639. sas_urls = subparsers.add_parser(
  640. parents=[],
  641. name='sas',
  642. description='Create SAS URLs for containers/files/folders in Azure storage',
  643. formatter_class=RawTextHelpFormatter,
  644. help='Create SAS URLs for containers/files/folders in Azure storage')
  645. sas_url_subparsers = sas_urls.add_subparsers(
  646. title='SAS URL creation functionality',
  647. dest='sas'
  648. )
  649. # Container SAS URL subparser
  650. sas_url_container_subparser = sas_url_subparsers.add_parser(
  651. parents=[parent_parser],
  652. name='container',
  653. description='Create SAS URLs for containers in Azure storage',
  654. formatter_class=RawTextHelpFormatter,
  655. help='Create SAS URLs for containers in Azure storage'
  656. )
  657. sas_url_container_subparser.add_argument(
  658. '-b', '--batch_file',
  659. required=True,
  660. type=str,
  661. help='Tab-separated file with the following fields: \n'
  662. 'container name, expiry (optional), output file (optional)'
  663. )
  664. sas_url_container_subparser.set_defaults(func=container_sas)
  665. # File SAS URL subparser
  666. sas_url_file_subparser = sas_url_subparsers.add_parser(
  667. parents=[parent_parser],
  668. name='file',
  669. description='Create SAS URLs for files in Azure storage',
  670. formatter_class=RawTextHelpFormatter,
  671. help='Create SAS URLs for files in Azure storage'
  672. )
  673. sas_url_file_subparser.add_argument(
  674. '-b', '--batch_file',
  675. required=True,
  676. type=str,
  677. help='Tab-separated file with the following fields:\n '
  678. 'container name, file name and path, expiry (optional), output file (optional)'
  679. )
  680. sas_url_file_subparser.set_defaults(func=file_sas)
  681. # Folder SAS URL subparser
  682. sas_url_folder_subparser = sas_url_subparsers.add_parser(
  683. parents=[parent_parser],
  684. name='folder',
  685. description='Create SAS URLs for folders in Azure storage',
  686. formatter_class=RawTextHelpFormatter,
  687. help='Create SAS URLs for folders in Azure storage'
  688. )
  689. sas_url_folder_subparser.add_argument(
  690. '-b', '--batch_file',
  691. required=True,
  692. type=str,
  693. help='Tab-separated file with the following fields:\n '
  694. 'container name, folder name and path, expiry (optional), output file (optional)'
  695. )
  696. sas_url_folder_subparser.set_defaults(func=folder_sas)
  697. # Move subparser
  698. move = subparsers.add_parser(
  699. parents=[],
  700. name='move',
  701. description='Move containers/files/folders in Azure storage',
  702. formatter_class=RawTextHelpFormatter,
  703. help='Move containers/files/folders in Azure storage'
  704. )
  705. move_subparsers = move.add_subparsers(
  706. title='Move functionality',
  707. dest='move'
  708. )
  709. # Container move subparser
  710. move_container_subparser = move_subparsers.add_parser(
  711. parents=[parent_parser],
  712. name='container',
  713. description='Move containers in Azure storage',
  714. formatter_class=RawTextHelpFormatter,
  715. help='Move containers in Azure storage'
  716. )
  717. move_container_subparser.add_argument(
  718. '-b', '--batch_file',
  719. required=True,
  720. type=str,
  721. help='Tab-separated file with the following fields:\n '
  722. 'container name, target container, destination path (optional), storage tier (optional)'
  723. )
  724. move_container_subparser.set_defaults(func=container_move)
  725. # File move subparser
  726. move_file_subparser = move_subparsers.add_parser(
  727. parents=[parent_parser],
  728. name='file',
  729. description='Move files in Azure storage',
  730. formatter_class=RawTextHelpFormatter,
  731. help='Move files in Azure storage'
  732. )
  733. move_file_subparser.add_argument(
  734. '-b', '--batch_file',
  735. required=True,
  736. type=str,
  737. help='Tab-separated file with the following fields:\n '
  738. 'container name, target container, file name, destination path (optional), storage tier (optional)'
  739. )
  740. move_file_subparser.set_defaults(func=file_move)
  741. # Folder move subparser
  742. move_folder_subparser = move_subparsers.add_parser(
  743. parents=[parent_parser],
  744. name='folder',
  745. description='Move folders in Azure storage',
  746. formatter_class=RawTextHelpFormatter,
  747. help='Move folders in Azure storage'
  748. )
  749. move_folder_subparser.add_argument(
  750. '-b', '--batch_file',
  751. required=True,
  752. type=str,
  753. help='Tab-separated file with the following fields:\n '
  754. 'container name, target container, folder name, destination path (optional), storage tier (optional)'
  755. )
  756. move_folder_subparser.set_defaults(func=folder_move)
  757. # Download subparser
  758. download = subparsers.add_parser(
  759. parents=[],
  760. name='download',
  761. description='Download containers/files/folders in Azure storage',
  762. formatter_class=RawTextHelpFormatter,
  763. help='Download containers/files/folders in Azure storage'
  764. )
  765. download_subparsers = download.add_subparsers(
  766. title='Download functionality',
  767. dest='download'
  768. )
  769. # Container download subparser
  770. download_container_subparser = download_subparsers.add_parser(
  771. parents=[parent_parser],
  772. name='container',
  773. description='Download containers from Azure storage',
  774. formatter_class=RawTextHelpFormatter,
  775. help='Download containers from Azure storage'
  776. )
  777. download_container_subparser.add_argument(
  778. '-b', '--batch_file',
  779. required=True,
  780. type=str,
  781. help='Tab-separated file with the following fields:\n '
  782. 'container name, output path (optional)'
  783. )
  784. download_container_subparser.set_defaults(func=container_download)
  785. # File download subparser
  786. download_file_subparser = download_subparsers.add_parser(
  787. parents=[parent_parser],
  788. name='file',
  789. description='Download files from Azure storage',
  790. formatter_class=RawTextHelpFormatter,
  791. help='Download files from Azure storage'
  792. )
  793. download_file_subparser.add_argument(
  794. '-b', '--batch_file',
  795. required=True,
  796. type=str,
  797. help='Tab-separated file with the following fields:\n '
  798. 'container name, file name, output path (optional)'
  799. )
  800. download_file_subparser.set_defaults(func=file_download)
  801. # Folder download subparser
  802. download_folder_subparser = download_subparsers.add_parser(
  803. parents=[parent_parser],
  804. name='folder',
  805. description='Download folders from Azure storage',
  806. formatter_class=RawTextHelpFormatter,
  807. help='Download folders from Azure storage'
  808. )
  809. download_folder_subparser.add_argument(
  810. '-b', '--batch_file',
  811. required=True,
  812. type=str,
  813. help='Tab-separated file with the following fields:\n '
  814. 'container name, folder name, output path (optional)'
  815. )
  816. download_folder_subparser.set_defaults(func=folder_download)
  817. # Storage tier subparser
  818. tier = subparsers.add_parser(
  819. parents=[],
  820. name='tier',
  821. description='Set the storage tier of containers/files/folders in Azure storage',
  822. formatter_class=RawTextHelpFormatter,
  823. help='Set the storage tier of containers/files/folders in Azure storage'
  824. )
  825. tier_subparsers = tier.add_subparsers(
  826. title='Storage tier setting functionality',
  827. dest='tier'
  828. )
  829. # Container storage tier subparser
  830. tier_container_subparser = tier_subparsers.add_parser(
  831. parents=[parent_parser],
  832. name='container',
  833. description='Set the storage tier of containers in Azure storage',
  834. formatter_class=RawTextHelpFormatter,
  835. help='Set the storage tier of containers in Azure storage'
  836. )
  837. tier_container_subparser.add_argument(
  838. '-b', '--batch_file',
  839. required=True,
  840. type=str,
  841. help='Tab-separated file with the following fields:\n '
  842. 'container name, storage tier'
  843. )
  844. tier_container_subparser.set_defaults(func=container_tier)
  845. # File storage tier subparser
  846. tier_file_subparser = tier_subparsers.add_parser(
  847. parents=[parent_parser],
  848. name='file',
  849. description='Set the storage tier of files in Azure storage',
  850. formatter_class=RawTextHelpFormatter,
  851. help='Set the storage tier of files in Azure storage'
  852. )
  853. tier_file_subparser.add_argument(
  854. '-b', '--batch_file',
  855. required=True,
  856. type=str,
  857. help='Tab-separated file with the following fields:\n '
  858. 'container name, file name, storage tier'
  859. )
  860. tier_file_subparser.set_defaults(func=file_tier)
  861. # Folder storage tier subparser
  862. tier_folder_subparser = tier_subparsers.add_parser(
  863. parents=[parent_parser],
  864. name='folder',
  865. description='Set the storage tier of folders in Azure storage',
  866. formatter_class=RawTextHelpFormatter,
  867. help='Set the storage tier of folders in Azure storage'
  868. )
  869. tier_folder_subparser.add_argument(
  870. '-b', '--batch_file',
  871. required=True,
  872. type=str,
  873. help='Tab-separated file with the following fields:\n '
  874. 'container name, folder name, storage tier'
  875. )
  876. tier_folder_subparser.set_defaults(func=folder_tier)
  877. # Delete subparser
  878. delete = subparsers.add_parser(
  879. parents=[],
  880. name='delete',
  881. description='Delete containers/files/folders in Azure storage',
  882. formatter_class=RawTextHelpFormatter,
  883. help='Delete containers/files/folders in Azure storage'
  884. )
  885. delete_subparsers = delete.add_subparsers(
  886. title='Delete functionality',
  887. dest='delete'
  888. )
  889. # Container delete subparser
  890. delete_container_subparser = delete_subparsers.add_parser(
  891. parents=[parent_parser],
  892. name='container',
  893. description='Delete containers in Azure storage',
  894. formatter_class=RawTextHelpFormatter,
  895. help='Delete containers in Azure storage'
  896. )
  897. delete_container_subparser.add_argument(
  898. '-b', '--batch_file',
  899. required=True,
  900. type=str,
  901. help='File with the following field:\n '
  902. 'container name'
  903. )
  904. delete_container_subparser.set_defaults(func=container_delete)
  905. # File delete subparser
  906. delete_file_subparser = delete_subparsers.add_parser(
  907. parents=[parent_parser],
  908. name='file',
  909. description='Delete files in Azure storage',
  910. formatter_class=RawTextHelpFormatter,
  911. help='Delete files in Azure storage'
  912. )
  913. delete_file_subparser.add_argument(
  914. '-b', '--batch_file',
  915. required=True,
  916. type=str,
  917. help='Tab-separated file with the following fields:\n '
  918. 'container name, file name, retention time (optional)'
  919. )
  920. delete_file_subparser.set_defaults(func=file_delete)
  921. # Folder delete subparser
  922. delete_folder_subparser = delete_subparsers.add_parser(
  923. parents=[parent_parser],
  924. name='folder',
  925. description='Delete folders in Azure storage',
  926. formatter_class=RawTextHelpFormatter,
  927. help='Delete folders in Azure storage'
  928. )
  929. delete_folder_subparser.add_argument(
  930. '-b', '--batch_file',
  931. required=True,
  932. type=str,
  933. help='Tab-separated file with the following fields:\n '
  934. 'container name, folder name, retention time (optional)'
  935. )
  936. delete_folder_subparser.set_defaults(func=folder_delete)
  937. # Batch subparser
  938. batch_subparser = subparsers.add_parser(
  939. parents=[parent_parser],
  940. name='batch',
  941. description='Perform multiple different operations in batch',
  942. formatter_class=RawTextHelpFormatter,
  943. help='Perform multiple different operations in batch'
  944. )
  945. batch_subparser.add_argument(
  946. '-b', '--batch_file',
  947. required=True,
  948. type=str,
  949. help='Tab-separated file in the following format:\n'
  950. 'command, sub-command, arguments\n\n'
  951. 'Below is the complete list of functionalities:\n'
  952. 'upload, file, container name, file name, destination path (optional), storage tier (optional)\n'
  953. 'upload, folder, container name, folder name, destination path (optional), storage tier (optional)\n'
  954. 'sas, container, container name, expiry (optional), output file (optional)\n'
  955. 'sas, file, container name, file name and path, expiry (optional), output file (optional)\n'
  956. 'sas, folder, container name, folder name and path, expiry (optional), output file (optional)\n'
  957. 'move, container, container name, target container, destination path (optional), storage tier (optional)\n'
  958. 'move, file, container name, target container, file name, destination path (optional), '
  959. 'storage tier (optional)\n'
  960. 'move, folder, container name, target container, folder name, destination path (optional), '
  961. 'storage tier (optional)\n'
  962. 'download, container, container name, output path (optional)\n'
  963. 'download, file, container name, file name, output path (optional)\n'
  964. 'download, folder, container name, folder name, output path (optional)\n'
  965. 'tier, container, container name, storage tier\n'
  966. 'tier, file, container name, file name, storage tier\n'
  967. 'tier, folder, container name, folder name, storage tier\n'
  968. 'delete, container, container name\n'
  969. 'delete, file, container name, file name, retention time (optional)\n'
  970. 'delete, folder, container name, folder name, retention time (optional)'
  971. )
  972. batch_subparser.set_defaults(func=batch)
  973. # Set up the arguments, and run the appropriate subparser
  974. arguments = setup_arguments(parser=parser)
  975. # Return to the requested logging level, as it has been increased to WARNING to suppress the log being filled with
  976. # information from azure.core.pipeline.policies.http_logging_policy
  977. coloredlogs.install(level=arguments.verbosity.upper())
  978. logging.info('Operations complete')
  979. # Prevent the arguments being printed to the console (they are returned in order for the tests to work)
  980. sys.stderr = open(os.devnull, 'w')
  981. return arguments
  982. if __name__ == '__main__':
  983. cli()