publishers-backup.py 29 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813
  1. # Copyright (C) 2010 Ion Torrent Systems, Inc. All Rights Reserved
  2. """
  3. Tasks
  4. =====
  5. The ``publishers`` module contains Django views and their helper functions
  6. related to the processing if publisher uploads.
  7. Not all functions contained in ``publishers`` are actual Django views, only
  8. those that take ``request`` as their first argument and appear in a ``urls``
  9. module are in fact Django views.
  10. """
  11. from __future__ import absolute_import
  12. import datetime
  13. import subprocess
  14. import logging
  15. import traceback
  16. import os
  17. import os.path
  18. import time
  19. import httplib
  20. import mimetypes
  21. import shutil
  22. import time
  23. import dateutil
  24. from django.utils.encoding import force_unicode
  25. from django.utils.translation import ugettext_lazy
  26. from iondb.utils import validation, i18n_errors
  27. from tastypie.bundle import Bundle
  28. from django.core.exceptions import ObjectDoesNotExist
  29. from django.shortcuts import render_to_response
  30. from django.http import (
  31. HttpResponse,
  32. StreamingHttpResponse,
  33. HttpResponseRedirect,
  34. HttpResponseBadRequest,
  35. )
  36. from django import forms
  37. from django.contrib.auth.decorators import login_required
  38. from django.template import RequestContext, Context
  39. from django.conf import settings
  40. from iondb.rundb import models, labels
  41. from iondb.rundb import publisher_types
  42. from iondb.celery import app
  43. from iondb.utils import validation
  44. import json
  45. from iondb.rundb.ajax import render_to_json
  46. from celery.utils.log import get_task_logger
  47. import httplib2
  48. import urllib
  49. from iondb.rundb.json_field import JSONEncoder
  50. from iondb.utils.verify_types import RepresentsJSON
  51. logger = logging.getLogger(__name__)
  52. # ============================================================================
  53. # Publisher Management
  54. # ============================================================================
  55. def search_for_publishers(pub_dir="/results/publishers/"):
  56. """
  57. Searches for new publishers, reads their publisher_meta.json, and makes any
  58. necessary updates to the publisher's record.
  59. """
  60. def create_new(name, version, path):
  61. pub = models.Publisher()
  62. pub.name = name
  63. pub.version = version
  64. pub.date = datetime.datetime.now()
  65. pub.path = path
  66. pub.save()
  67. def update_version(pub, version):
  68. pub.version = version
  69. pub.save()
  70. if os.path.exists(pub_dir):
  71. # only list files in the 'publishers' directory if they are actually folders
  72. folder_list = [
  73. i
  74. for i in os.listdir(pub_dir)
  75. if (os.path.isdir(os.path.join(pub_dir, i)) and i != "scratch")
  76. ]
  77. for pname in folder_list:
  78. full_path = os.path.join(pub_dir, pname)
  79. pub_meta_path = os.path.join(full_path, "publisher_meta.json")
  80. try:
  81. with open(pub_meta_path) as pub_meta_file:
  82. publisher_meta = json.load(pub_meta_file)
  83. version = str(publisher_meta["version"])
  84. # Begin Righteous error reporting!
  85. except NameError:
  86. logger.error("Publisher %s is missing publisher_meta.json" % pname)
  87. except IOError as error:
  88. logger.error(
  89. "Publisher %s failed to read publisher_meta.json with %s"
  90. % (pname, error)
  91. )
  92. except (ValueError, KeyError) as error:
  93. logger.error(
  94. "Publisher %s has an improperly formatted publisher_meta.json with %s"
  95. % (pname, error)
  96. )
  97. else:
  98. try:
  99. p = models.Publisher.objects.get(name=pname.strip())
  100. if p.version != version:
  101. update_version(p, version)
  102. logger.info(
  103. "Publisher %s updated to version %s" % (pname, version)
  104. )
  105. except ObjectDoesNotExist:
  106. create_new(pname, version, full_path)
  107. logger.info("Publisher %s version %s added" % (pname, version))
  108. def purge_publishers():
  109. """Removes records from publisher table which no longer have corresponding
  110. folder on the file system. If the folder does not exist, we assume that
  111. the publisher has been deleted. In any case, one cannot execute the
  112. publisher if the publisher's folder has been removed.
  113. """
  114. pubs = models.Publisher.objects.all()
  115. # for each record, test for corresponding folder
  116. for pub in pubs:
  117. # if folder does not exist
  118. if not os.path.isdir(pub.path):
  119. # remove this record
  120. pub.delete()
  121. logger.info(
  122. "Deleting publisher %s which no longer exists at %s"
  123. % (pub.name, pub.path)
  124. )
  125. # ============================================================================
  126. # Content Upload Publication
  127. # ============================================================================
  128. class PublisherContentUploadValidator(forms.Form):
  129. file = forms.FileField()
  130. meta = forms.CharField(widget=forms.HiddenInput)
  131. def write_file(file_data, destination):
  132. """Write Django uploaded file object to disk incrementally in order to
  133. avoid sucking up all of the system's RAM by reading the whole thing in to
  134. memory at once.
  135. """
  136. out = open(destination, "wb+")
  137. for chunk in file_data.chunks():
  138. out.write(chunk)
  139. out.close()
  140. def validate_plupload(request, pub_name, file_name, labelsContent):
  141. file_name = os.path.basename(file_name).strip().replace(" ", "_")
  142. if pub_name == "BED":
  143. # validate bed file name is unique
  144. if models.Content.objects.filter(file__endswith="/" + file_name).count() > 0:
  145. raise Exception(
  146. validation.invalid_entity_field_unique_value(
  147. labelsContent.verbose_name,
  148. labelsContent.pickfile.verbose_name,
  149. file_name,
  150. )
  151. )
  152. # validate file name
  153. if not validation.is_valid_chars(file_name):
  154. raise Exception(validation.invalid_chars_error("BED file name"))
  155. return file_name
  156. def write_plupload(request, pub_name):
  157. """file upload for plupload"""
  158. logger.info("Starting write plupload")
  159. pub = models.Publisher.objects.get(name=pub_name)
  160. meta = request.POST.get("meta", "{}")
  161. logger.debug("%s" % meta)
  162. if request.method == "POST":
  163. name = request.REQUEST.get("name", "")
  164. uploaded_file = request.FILES["file"]
  165. if not name:
  166. name = uploaded_file.name
  167. logger.debug("plupload name = '%s'" % name)
  168. try:
  169. labelsContent = labels.TargetRegionsContent # default case
  170. if RepresentsJSON(meta):
  171. metaJson = json.loads(meta)
  172. labelsContent = (
  173. labels.HotspotsContent
  174. if metaJson and metaJson["hotspot"]
  175. else labels.TargetRegionsContent
  176. )
  177. file_name = validate_plupload(request, pub_name, name, labelsContent)
  178. except Exception as err:
  179. return HttpResponseBadRequest(unicode(err))
  180. upload_dir = "/results/referenceLibrary/temp"
  181. if not os.path.exists(upload_dir):
  182. return render_to_json({"error": validation.missing_error(upload_dir)})
  183. dest_path = os.path.join(upload_dir, file_name)
  184. logger.debug("plupload destination = '%s'" % dest_path)
  185. chunk = request.REQUEST.get("chunk", "0")
  186. chunks = request.REQUEST.get("chunks", "0")
  187. logger.debug(
  188. "plupload chunk %s %s of %s" % (str(type(chunk)), str(chunk), str(chunks))
  189. )
  190. debug = [chunk, chunks]
  191. with open(dest_path, ("wb" if chunk == "0" else "ab")) as f:
  192. for content in uploaded_file.chunks():
  193. logger.debug("content chunk = '%d'" % len(content))
  194. f.write(content)
  195. my_contentupload_id = None
  196. if int(chunk) + 1 >= int(chunks):
  197. try:
  198. meta = json.loads(meta)
  199. meta["username"] = request.user.username
  200. upload = move_upload(pub, dest_path, file_name, json.dumps(meta))
  201. async_upload = run_pub_scripts.delay(pub, upload)
  202. my_contentupload_id = upload.id
  203. except Exception as err:
  204. logger.exception(
  205. "There was a problem during upload of a file for a publisher."
  206. )
  207. else:
  208. logger.info("Successfully pluploaded %s" % name)
  209. logger.debug("plupload done")
  210. return render_to_json(
  211. {"chunk posted": debug, "contentupload_id": my_contentupload_id}
  212. )
  213. else:
  214. return render_to_json(
  215. {
  216. "method": i18n_errors.fatal_unsupported_http_method_expected(
  217. request.method, "POST"
  218. )
  219. }
  220. )
  221. def new_upload(pub, file_name, meta_data=None):
  222. upload_date = dateutil.parser.parse(time.asctime()).isoformat()
  223. # set up meta.json
  224. meta_data_dict = json.loads(meta_data)
  225. meta_data_dict["upload_date"] = upload_date
  226. meta_data = json.dumps(meta_data_dict)
  227. # create ContentUpload
  228. upload = models.ContentUpload()
  229. upload.status = "Saving"
  230. upload.publisher = pub
  231. upload.meta = meta_data
  232. upload.username = meta_data_dict.get("username", "")
  233. upload.source = meta_data_dict.get("source") or meta_data_dict.get("url", "")
  234. upload.upload_date = upload_date
  235. if "upload_type" in meta_data_dict:
  236. upload.upload_type = meta_data_dict["upload_type"]
  237. elif pub.name == "BED" and "hotspot" in meta_data_dict:
  238. upload.upload_type = (
  239. publisher_types.HOTSPOT
  240. if meta_data_dict["hotspot"]
  241. else publisher_types.TARGET
  242. )
  243. else:
  244. upload.upload_type = pub.name
  245. upload.save()
  246. upload_dir = os.path.join("/results/uploads", pub.name, str(upload.pk))
  247. upload.file_path = os.path.join(upload_dir, file_name)
  248. upload.save()
  249. try:
  250. meta_path = os.path.join(upload_dir, "meta.json")
  251. # set both the user and group to read/write to allow the celery tasks to write to this directory
  252. original_umask = os.umask(0)
  253. os.makedirs(upload_dir, 0o0775)
  254. open(meta_path, "w").write(meta_data)
  255. except OSError as err:
  256. logger.exception("File error while saving new %s upload" % pub)
  257. upload.status = "Error: %s" % err
  258. upload.save()
  259. raise
  260. finally:
  261. os.umask(original_umask)
  262. return upload
  263. def move_upload(pub, file_path, file_name, meta_data=None):
  264. upload = new_upload(pub, file_name, meta_data)
  265. shutil.move(file_path, upload.file_path)
  266. upload.status = "Queued for processing"
  267. upload.save()
  268. return upload
  269. def store_upload(pub, file_data, file_name, meta_data=None):
  270. """Create a unique folder for an uploaded file and begin editing it for
  271. publication.
  272. """
  273. upload = new_upload(pub, file_name, meta_data)
  274. write_file(file_data, upload.file_path)
  275. upload.status = "Queued for processing"
  276. upload.save()
  277. return upload
  278. @app.task
  279. def run_pub_scripts(pub, upload):
  280. """Spawn subshells in which the Publisher's editing scripts are run, with
  281. the upload's folder and the script's output folder as command line args.
  282. """
  283. task_logger = get_task_logger(__name__)
  284. try:
  285. # TODO: Handle unique file upload instance particulars
  286. task_logger.info("Editing upload for %s" % pub.name)
  287. previous_status = upload.status
  288. upload_path = upload.file_path
  289. upload_dir = os.path.dirname(upload_path)
  290. meta_path = os.path.join(upload_dir, "meta.json")
  291. pub_dir = pub.path
  292. pub_scripts = pub.get_editing_scripts()
  293. for script_path, stage_name in pub_scripts:
  294. # If at some point in the loop, one of the scripts changes the status,
  295. # then we cease updating it automatically.
  296. if upload.status == previous_status:
  297. previous_status = stage_name
  298. upload.status = stage_name
  299. upload.save()
  300. upload_id = str(upload.id)
  301. cmd = [script_path, upload_id, upload_dir, upload_path, meta_path]
  302. logpath = os.path.join(upload_dir, "publisher.log")
  303. # Spawn the test subprocess and wait for it to complete.
  304. with open(logpath, "a") as log_out:
  305. proc = subprocess.Popen(
  306. cmd, stdout=log_out, stderr=subprocess.STDOUT, cwd=pub_dir
  307. )
  308. success = proc.wait() == 0
  309. # success = run_script(pub_dir, script_path, str(upload.id), upload_dir, upload_path, meta_path)
  310. # The script may have updated the upload during execution, so we reload
  311. upload = models.ContentUpload.objects.get(pk=upload.pk)
  312. if success:
  313. task_logger.info(
  314. "Editing upload for %s finished %s" % (pub.name, script_path)
  315. )
  316. else:
  317. task_logger.error(
  318. "Editing for %s died during %s." % (pub.name, script_path)
  319. )
  320. upload.status = "Error: %s" % stage_name
  321. upload.save()
  322. # If either the script itself or we set the status to anything starting
  323. # with "Error" then we abort further processing here.
  324. if upload.status.startswith("Error") or upload.status.startswith("Waiting"):
  325. return
  326. # At this point every script has finished running and we have not returned
  327. # early due to an error, alright!
  328. upload.status = "Successfully Completed"
  329. upload.save()
  330. except Exception:
  331. tb = "\n".join(" " + s for s in traceback.format_exc().split("\n"))
  332. task_logger.error(
  333. "Exception in %s upload %d during %s\n%s"
  334. % (pub.name, upload.id, stage_name, tb)
  335. )
  336. upload.status = "Error: processing failed."
  337. upload.save()
  338. def edit_upload(pub, upload, meta=None):
  339. """Editing is the process which converts an uploaded file into one or more
  340. files of published content.
  341. """
  342. upload = store_upload(pub, upload, upload.name, meta)
  343. async_upload = run_pub_scripts.delay(pub, upload)
  344. return upload, async_upload
  345. def publisher_upload(request, pub_name):
  346. """Display the publishers upload.html template on a GET of the page.
  347. If the view is POSTed to, the pass the uploaded data to the publisher.
  348. """
  349. pub = models.Publisher.objects.get(name=pub_name)
  350. if request.method == "POST":
  351. meta = request.POST.dict()
  352. files = list(request.FILES.values())
  353. if len(files) == 0:
  354. return render_to_json({"error": "Error: No file selected for upload"})
  355. else:
  356. try:
  357. meta["username"] = request.user.username
  358. upload, async_task = edit_upload(pub, files[0], json.dumps(meta))
  359. return render_to_json({"status": upload.status, "id": upload.id})
  360. except Exception as e:
  361. return render_to_json({"error": str(e)})
  362. else:
  363. action = request.get_full_path()
  364. error = ""
  365. contents = ""
  366. try:
  367. path = os.path.join(pub.path, "upload.html")
  368. with open(path, "r") as f:
  369. contents = f.read()
  370. except Exception:
  371. error = "Error: Unable to read %s" % path
  372. ctx = RequestContext(
  373. request, {"contents": contents, "action": action, "error": error}
  374. )
  375. return render_to_response(
  376. "rundb/configure/modal_publisher_upload.html", context_instance=ctx
  377. )
  378. def publisher_api_upload(request, pub_name):
  379. """TastyPie does not support file uploads, so for now, this is handled
  380. outside of the normal API space.
  381. """
  382. if request.method == "POST":
  383. pub = models.Publisher.objects.get(name=pub_name)
  384. form = PublisherContentUploadValidator(request.POST, request.FILES)
  385. if form.is_valid():
  386. upload, async_task = edit_upload(
  387. pub, form.cleaned_data["file"], form.cleaned_data["meta"]
  388. )
  389. from iondb.rundb.api import ContentUploadResource
  390. resource = ContentUploadResource()
  391. bundle = Bundle(upload)
  392. serialized_upload = resource.serialize(
  393. None, resource.full_dehydrate(bundle), "application/json"
  394. )
  395. return HttpResponse(serialized_upload, mimetype="application/json")
  396. else:
  397. logger.warning(form.errors)
  398. else:
  399. return HttpResponseRedirect("/rundb/publish/%s/" % pub_name)
  400. def upload_status(request, contentupload_id, frame=False):
  401. """If we're in an iframe, we can skip basically everything, and tell the
  402. template to redirect the parent window to the normal page.
  403. """
  404. if frame:
  405. return render_to_response(
  406. "rundb/ion_jailbreak.html",
  407. dictionary={
  408. "go": "/rundb/uploadstatus/%s/" % contentupload_id,
  409. "contentupload_id": contentupload_id,
  410. },
  411. context_instance=RequestContext(request),
  412. )
  413. upload = models.ContentUpload.objects.get(pk=contentupload_id)
  414. source = upload.source
  415. filemonitor = None
  416. try:
  417. filemonitor = models.FileMonitor.objects.get(url=source, status="Complete")
  418. except Exception as err:
  419. logger.error(err)
  420. logs = list(upload.logs.all())
  421. logs.sort(key= x.timeStamp)
  422. file_log = ""
  423. try:
  424. with open(os.path.dirname(upload.file_path) + "/publisher.log", "r") as f:
  425. file_log = f.read()
  426. except Exception as err:
  427. # file_log = str(err)
  428. pass
  429. try:
  430. file_size_string = "(%s bytes)" % "{:,}".format(
  431. os.stat(upload.file_path).st_size
  432. )
  433. except Exception:
  434. file_size_string = ""
  435. processed_uploads = []
  436. for content in upload.contents.filter(type__in=["target", "hotspot"]):
  437. if "unmerged/detail" not in content.file:
  438. continue
  439. try:
  440. content_file_size_string = "(%s bytes)" % "{:,}".format(
  441. os.stat(content.file).st_size
  442. )
  443. except Exception:
  444. content_file_size_string = ""
  445. content_type = publisher_types.BED_TYPES.get(content.type) or content.type
  446. bonus_fields = []
  447. if content.type == "hotspot":
  448. if "reference" in content.meta:
  449. bonus_fields.append(
  450. {
  451. "label": labels.HotspotsContent.reference.verbose_name,
  452. "value": content.meta["reference"],
  453. }
  454. )
  455. if "num_loci" in content.meta:
  456. bonus_fields.append(
  457. {
  458. "label": labels.HotspotsContent.num_loci.verbose_name,
  459. "value": "{:,}".format(content.meta["num_loci"]),
  460. }
  461. )
  462. content_type_hash = "hotspots"
  463. else:
  464. if "reference" in content.meta:
  465. bonus_fields.append(
  466. {
  467. "label": labels.TargetRegionsContent.reference.verbose_name,
  468. "value": content.meta["reference"],
  469. }
  470. )
  471. if "num_targets" in content.meta:
  472. bonus_fields.append(
  473. {
  474. "label": labels.TargetRegionsContent.num_targets.verbose_name,
  475. "value": "{:,}".format(content.meta["num_targets"]),
  476. }
  477. )
  478. if "num_genes" in content.meta:
  479. bonus_fields.append(
  480. {
  481. "label": labels.TargetRegionsContent.num_genes.verbose_name,
  482. "value": "{:,}".format(content.meta["num_genes"]),
  483. }
  484. )
  485. if "num_bases" in content.meta:
  486. bonus_fields.append(
  487. {
  488. "label": labels.TargetRegionsContent.num_bases.verbose_name,
  489. "value": "{:,}".format(content.meta["num_bases"]),
  490. }
  491. )
  492. content_type_hash = "target-regions"
  493. enabled = content.meta.get("enabled", True)
  494. content_name = content.get_file_name()
  495. processed_uploads.append(
  496. {
  497. "file_name": content.file,
  498. "file_size_string": content_file_size_string,
  499. "file_name_label": labels.Content.file_name.verbose_name, # 'Processed File'
  500. "content_type_hash": content_type_hash,
  501. "description": content.description,
  502. "description_label": labels.Content.description.verbose_name, # 'Description'
  503. "notes": content.notes,
  504. "notes_label": labels.Content.notes.verbose_name, # 'Notes'
  505. "enabled": content.enabled,
  506. "enabled_label": labels.Content.enabled.verbose_name, # 'Enabled',
  507. "bonus_fields": bonus_fields,
  508. "content_id": content.id,
  509. "title": ugettext_lazy("Content.details.title").format(
  510. content_type=content_type, content_name=content_name
  511. ), # u'{content_type} Details - {content_name}'
  512. "action_back_label": ugettext_lazy("global.action.backto").format(
  513. name=content_type
  514. ),
  515. "action_back": "/configure/references/#{hash}".format(
  516. hash=content_type_hash
  517. ),
  518. }
  519. )
  520. return render_to_response(
  521. "rundb/ion_publisher_upload_status.html",
  522. {
  523. "contentupload": upload,
  524. "upload_name": upload.get_file_name(),
  525. "logs": logs,
  526. "file_log": file_log,
  527. "file_path_label": labels.ContentUpload.file_path.verbose_name,
  528. "upload_type": publisher_types.BED_TYPES.get(upload.upload_type)
  529. or upload.upload_type,
  530. "upload_type_label": labels.ContentUpload.upload_type.verbose_name,
  531. "upload_date": upload.upload_date,
  532. "upload_date_label": labels.ContentUpload.upload_date.verbose_name,
  533. "file_size_string": file_size_string,
  534. "status_line": upload.status,
  535. "status_line_label": labels.ContentUpload.status.verbose_name,
  536. "processed_uploads": processed_uploads,
  537. "filemonitor": filemonitor,
  538. },
  539. context_instance=RequestContext(request),
  540. )
  541. def content_download(request, content_id):
  542. content = models.Content.objects.get(pk=content_id)
  543. response = StreamingHttpResponse(open(content.file, "r"))
  544. response["Content-Type"] = "application/octet-stream"
  545. response["Content-Disposition"] = 'attachment; filename="%s"' % os.path.basename(
  546. content.file
  547. )
  548. return response
  549. def upload_download(request, contentupload_id):
  550. upload = models.ContentUpload.objects.get(pk=contentupload_id)
  551. response = StreamingHttpResponse(open(upload.file_path, "r"))
  552. response["Content-Type"] = "application/octet-stream"
  553. response["Content-Disposition"] = 'attachment; filename="%s"' % os.path.basename(
  554. upload.file_path
  555. )
  556. return response
  557. def content_add(request, hotspot=False):
  558. active_ref = None
  559. if request.method == "GET":
  560. active_ref = request.GET.get("reference", None)
  561. references = []
  562. # for ref in models.ReferenceGenome.objects.all():
  563. for ref in models.ReferenceGenome.objects.filter(
  564. index_version=settings.TMAP_VERSION
  565. ):
  566. references.append(
  567. {
  568. "long_name": ref.short_name + " - " + ref.name,
  569. "short_name": ref.short_name,
  570. "selected": ref.short_name == active_ref,
  571. }
  572. )
  573. if hotspot:
  574. title = ugettext_lazy("content_add_hotspots.title")
  575. else:
  576. title = ugettext_lazy("content_add_targetregions.title")
  577. return render_to_response(
  578. "rundb/ion_publisher_content_add.html",
  579. {
  580. "hotspot": hotspot,
  581. "references": references,
  582. "Content": labels.Content,
  583. "HotspotsContent": labels.HotspotsContent,
  584. "TargetRegionsContent": labels.TargetRegionsContent,
  585. },
  586. context_instance=RequestContext(request),
  587. )
  588. def list_content(request):
  589. publishers = models.Publisher.objects.all()
  590. selected = request.GET.get("from", "")
  591. ctxd = {"publishers": publishers, "selected": selected}
  592. return render_to_response(
  593. "rundb/configure/contentupload_history.html",
  594. dictionary=ctxd,
  595. context_instance=RequestContext(request),
  596. )
  597. def post_multipart(host, selector, fields, files):
  598. """Post fields and files to an http host as multipart/form-data.
  599. fields is a sequence of (name, value) elements for regular form fields.
  600. files is a sequence of (name, filename, value) elements for data to be uploaded as files
  601. Return the server's response page.
  602. """
  603. content_type, body = encode_multipart_formdata(fields, files)
  604. h = httplib.HTTP(host)
  605. h.putrequest("POST", selector)
  606. h.putheader("content-type", content_type)
  607. h.putheader("content-length", str(len(body)))
  608. h.endheaders()
  609. h.send(body)
  610. errcode, errmsg, headers = h.getreply()
  611. return h.file.read()
  612. def encode_multipart_formdata(fields, files):
  613. """fields is a sequence of (name, value) elements for regular form fields.
  614. files is a sequence of (name, filename, value) elements for data to be uploaded as files
  615. Return (content_type, body) ready for httplib.HTTP instance
  616. """
  617. BOUNDARY = "GlobalNumberOfPiratesDecreasing-GlobalTemperatureIncreasing"
  618. CRLF = "\r\n"
  619. request = []
  620. for (key, value) in fields:
  621. request.extend(
  622. [
  623. "--" + BOUNDARY,
  624. 'Content-Disposition: form-data; name="%s"' % key,
  625. "",
  626. value,
  627. ]
  628. )
  629. for (key, filename, value) in files:
  630. request.extend(
  631. [
  632. "--" + BOUNDARY,
  633. 'Content-Disposition: form-data; name="%s"; filename="%s"'
  634. % (key, filename),
  635. "Content-Type: %s" % get_content_type(filename),
  636. "",
  637. value,
  638. ]
  639. )
  640. request.append("--" + BOUNDARY + "--")
  641. request.append("")
  642. body = CRLF.join(request)
  643. content_type = "multipart/form-data; boundary=%s" % BOUNDARY
  644. return content_type, body
  645. def get_content_type(filename):
  646. return mimetypes.guess_type(filename)[0] or "application/octet-stream"
  647. @app.task
  648. def publish_file(args, pub_name, meta):
  649. """ This task will process file downloaded via FileMonitor """
  650. pub = models.Publisher.objects.get(name=pub_name)
  651. full_path, monitor_id = args
  652. if full_path:
  653. monitor = models.FileMonitor.objects.get(id=monitor_id)
  654. upload = move_upload(pub, full_path, monitor.name, meta)
  655. run_pub_scripts(pub, upload)
  656. class call_api:
  657. def __init__(self):
  658. self.url = "http://localhost/rundb/api/v1/%s/"
  659. self.uri = "http://localhost/rundb/api/v1/%s/%s/"
  660. self.headers = {"Content-type": "application/json"}
  661. def post(self, where, **query):
  662. """Returns the API URI for the newly created item."""
  663. body = json.dumps(query, cls=JSONEncoder)
  664. item_url = self.url % where
  665. h = httplib2.Http()
  666. response, content = h.request(
  667. item_url, method="POST", body=body, headers=self.headers
  668. )
  669. return response["status"] == "201", response, content
  670. def patch(self, where, item_id, **update):
  671. """Returns True if successful; otherwise, False"""
  672. body = json.dumps(update, cls=JSONEncoder)
  673. item_uri = self.uri % (where, str(item_id))
  674. h = httplib2.Http()
  675. response, content = h.request(
  676. item_uri, method="PATCH", body=body, headers=self.headers
  677. )
  678. return response["status"] == "202", response, content
  679. def update_meta(self, meta, args):
  680. print("Updating Meta")
  681. meta_file_handle = open(args.meta_file, "w")
  682. json.dump(meta, meta_file_handle, cls=JSONEncoder, sort_keys=True, indent=4)
  683. meta_file_handle.close()
  684. self.patch("contentupload", args.upload_id, meta=meta)
  685. def get(self, where, **query):
  686. """Returns a JSON API result object."""
  687. body = urllib.urlencode(query)
  688. query_string = "%s?%s" % (self.url % where, body)
  689. h = httplib2.Http()
  690. response, content = h.request(
  691. query_string, method="GET", body=body, headers=self.headers
  692. )
  693. return json.loads(content), response, content
  694. def delete(self, where, item_id):
  695. """Returns a JSON API result object."""
  696. item_uri = self.uri % (where, str(item_id))
  697. h = httplib2.Http()
  698. response, content = h.request(item_uri, method="DELETE", headers=self.headers)
  699. return response["status"] == "204", response, content