123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263 |
- def run_pub_scripts(pub, upload):
- """Spawn subshells in which the Publisher's editing scripts are run, with
- the upload's folder and the script's output folder as command line args.
- """
- task_logger = get_task_logger(__name__)
- try:
- # TODO: Handle unique file upload instance particulars
- task_logger.info("Editing upload for %s" % pub.name)
- previous_status = upload.status
- upload_path = upload.file_path
- upload_dir = os.path.dirname(upload_path)
- meta_path = os.path.join(upload_dir, "meta.json")
- pub_dir = pub.path
- pub_scripts = pub.get_editing_scripts()
- for script_path, stage_name in pub_scripts:
- # If at some point in the loop, one of the scripts changes the status,
- # then we cease updating it automatically.
- if upload.status == previous_status:
- previous_status = stage_name
- upload.status = stage_name
- upload.save()
- upload_id = str(upload.id)
- cmd = [script_path, upload_id, upload_dir, upload_path, meta_path]
- logpath = os.path.join(upload_dir, "publisher.log")
- # Spawn the test subprocess and wait for it to complete.
- with open(logpath, "a") as log_out:
- proc = subprocess.Popen(
- cmd, stdout=log_out, stderr=subprocess.STDOUT, cwd=pub_dir
- )
- success = proc.wait() == 0
- # success = run_script(pub_dir, script_path, str(upload.id), upload_dir, upload_path, meta_path)
- # The script may have updated the upload during execution, so we reload
- upload = models.ContentUpload.objects.get(pk=upload.pk)
- if success:
- task_logger.info(
- "Editing upload for %s finished %s" % (pub.name, script_path)
- )
- else:
- task_logger.error(
- "Editing for %s died during %s." % (pub.name, script_path)
- )
- upload.status = "Error: %s" % stage_name
- upload.save()
- # If either the script itself or we set the status to anything starting
- # with "Error" then we abort further processing here.
- if upload.status.startswith("Error") or upload.status.startswith("Waiting"):
- return
- # At this point every script has finished running and we have not returned
- # early due to an error, alright!
- upload.status = "Successfully Completed"
- upload.save()
- except Exception:
- tb = "\n".join(" " + s for s in traceback.format_exc().split("\n"))
- task_logger.error(
- "Exception in %s upload %d during %s\n%s"
- % (pub.name, upload.id, stage_name, tb)
- )
- upload.status = "Error: processing failed."
- upload.save()
|