tps.py 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365
  1. #!/usr/bin/env python3
  2. # Programming contest management system
  3. # Copyright © 2017 Kiarash Golezardi <kiarashgolezardi@gmail.com>
  4. # Copyright © 2017 Amir Keivan Mohtashami <akmohtashami97@gmail.com>
  5. # Copyright © 2018 Stefano Maggiolo <s.maggiolo@gmail.com>
  6. #
  7. # This program is free software: you can redistribute it and/or modify
  8. # it under the terms of the GNU Affero General Public License as
  9. # published by the Free Software Foundation, either version 3 of the
  10. # License, or (at your option) any later version.
  11. #
  12. # This program is distributed in the hope that it will be useful,
  13. # but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  15. # GNU Affero General Public License for more details.
  16. #
  17. # You should have received a copy of the GNU Affero General Public License
  18. # along with this program. If not, see <http://www.gnu.org/licenses/>.
  19. import json
  20. import logging
  21. import os
  22. import re
  23. import subprocess
  24. from datetime import timedelta
  25. from cms.db import Task, Dataset, Manager, Testcase, Attachment, Statement
  26. from .base_loader import TaskLoader
  27. logger = logging.getLogger(__name__)
  28. def make_timedelta(t):
  29. return timedelta(seconds=t)
  30. class TpsTaskLoader(TaskLoader):
  31. """Loader for TPS exported tasks.
  32. """
  33. short_name = 'tps_task'
  34. description = 'TPS task format'
  35. @staticmethod
  36. def detect(path):
  37. """See docstring in class Loader.
  38. """
  39. return os.path.exists(os.path.join(path, "problem.json"))
  40. def task_has_changed(self):
  41. """See docstring in class Loader.
  42. """
  43. return True
  44. def _get_task_type_parameters(self, data, task_type, evaluation_param):
  45. parameters_str = data['task_type_params']
  46. if parameters_str is None or parameters_str == '':
  47. parameters_str = '{}'
  48. task_type_parameters = json.loads(parameters_str)
  49. par_prefix = 'task_type_parameters_%s' % task_type
  50. if task_type == 'Batch':
  51. par_compilation = '%s_compilation' % par_prefix
  52. par_input = '%s_io_0_inputfile' % par_prefix
  53. par_output = '%s_io_1_outputfile' % par_prefix
  54. par_user_managers = "%s_user_managers" % par_prefix
  55. if par_compilation not in task_type_parameters:
  56. task_type_parameters[par_compilation] = 'grader'
  57. if par_input not in task_type_parameters:
  58. task_type_parameters[par_input] = ''
  59. if par_output not in task_type_parameters:
  60. task_type_parameters[par_output] = ''
  61. if par_user_managers not in task_type_parameters:
  62. pas_grader = os.path.join(
  63. self.path, 'graders', 'graderlib.pas')
  64. user_managers = ('['
  65. + '\\"grader.cpp\\"' + ', '
  66. + '\\"grader.java\\"' + ', '
  67. + '\\"graderlib.pas\\"'
  68. + ']')
  69. if not os.path.exists(pas_grader):
  70. user_managers = '[\\"grader.%l\\"]'
  71. task_type_parameters[par_user_managers] = user_managers
  72. return [
  73. task_type_parameters[par_compilation],
  74. [task_type_parameters[par_input],
  75. task_type_parameters[par_output]],
  76. evaluation_param,
  77. ]
  78. if task_type == 'Communication':
  79. par_processes = '%s_num_processes' % par_prefix
  80. if par_processes not in task_type_parameters:
  81. task_type_parameters[par_processes] = 1
  82. return [task_type_parameters[par_processes], "stub", "fifo_io"]
  83. if task_type == 'TwoSteps' or task_type == 'OutputOnly':
  84. return [evaluation_param]
  85. return []
  86. def get_task(self, get_statement=True):
  87. """See docstring in class Loader.
  88. """
  89. json_src = os.path.join(self.path, 'problem.json')
  90. if not os.path.exists(json_src):
  91. logger.critical('No task found.')
  92. raise OSError('No task found at path %s' % json_src)
  93. with open(json_src, 'rt', encoding='utf-8') as json_file:
  94. data = json.load(json_file)
  95. name = data['code']
  96. logger.info("Loading parameters for task %s.", name)
  97. args = {}
  98. args["name"] = name
  99. args["title"] = data['name']
  100. # Statements
  101. if get_statement:
  102. statements_dir = os.path.join(self.path, 'statements')
  103. if os.path.exists(statements_dir):
  104. statements = [
  105. filename
  106. for filename in os.listdir(statements_dir)
  107. if filename[-4:] == ".pdf"]
  108. if len(statements) > 0:
  109. args['statements'] = dict()
  110. logger.info('Statements found')
  111. for statement in statements:
  112. language = statement[:-4]
  113. if language == "en_US":
  114. args["primary_statements"] = ["en_US"]
  115. digest = self.file_cacher.put_file_from_path(
  116. os.path.join(statements_dir, statement),
  117. "Statement for task %s (lang: %s)" %
  118. (name, language))
  119. args['statements'][language] = Statement(language, digest)
  120. # Attachments
  121. args["attachments"] = dict()
  122. attachments_dir = os.path.join(self.path, 'attachments')
  123. if os.path.exists(attachments_dir):
  124. logger.info("Attachments found")
  125. for filename in os.listdir(attachments_dir):
  126. digest = self.file_cacher.put_file_from_path(
  127. os.path.join(attachments_dir, filename),
  128. "Attachment %s for task %s" % (filename, name))
  129. args["attachments"][filename] = Attachment(filename, digest)
  130. data["task_type"] = \
  131. data["task_type"][0].upper() + data["task_type"][1:]
  132. # Setting the submission format
  133. # Obtaining testcases' codename
  134. testcases_dir = os.path.join(self.path, 'tests')
  135. if not os.path.exists(testcases_dir):
  136. logger.warning('Testcase folder was not found')
  137. testcase_codenames = []
  138. else:
  139. testcase_codenames = sorted([
  140. filename[:-3]
  141. for filename in os.listdir(testcases_dir)
  142. if filename[-3:] == '.in'])
  143. if data["task_type"] == 'OutputOnly':
  144. args["submission_format"] = list()
  145. for codename in testcase_codenames:
  146. args["submission_format"].append("%s.out" % codename)
  147. elif data["task_type"] == 'Notice':
  148. args["submission_format"] = list()
  149. else:
  150. args["submission_format"] = ["%s.%%l" % name]
  151. # These options cannot be configured in the TPS format.
  152. # Uncomment the following to set specific values for them.
  153. # args['max_user_test_number'] = 10
  154. # args['min_user_test_interval'] = make_timedelta(60)
  155. # args['token_mode'] = 'infinite'
  156. # args['token_max_number'] = 100
  157. # args['token_min_interval'] = make_timedelta(60)
  158. # args['token_gen_initial'] = 1
  159. # args['token_gen_number'] = 1
  160. # args['token_gen_interval'] = make_timedelta(1800)
  161. # args['token_gen_max'] = 2
  162. if "score_precision" in data:
  163. args['score_precision'] = int(data["score_precision"])
  164. else:
  165. args['score_precision'] = 2
  166. args['max_submission_number'] = 50
  167. args['max_user_test_number'] = 50
  168. if data["task_type"] == 'OutputOnly':
  169. args['max_submission_number'] = 100
  170. args['max_user_test_number'] = 100
  171. args['min_submission_interval'] = make_timedelta(60)
  172. args['min_user_test_interval'] = make_timedelta(60)
  173. task = Task(**args)
  174. args = dict()
  175. args["task"] = task
  176. args["description"] = "Default"
  177. args["autojudge"] = True
  178. if data['task_type'] != 'OutputOnly' \
  179. and data['task_type'] != 'Notice':
  180. args["time_limit"] = float(data['time_limit'])
  181. args["memory_limit"] = int(data['memory_limit'])
  182. args["managers"] = {}
  183. # Checker
  184. checker_dir = os.path.join(self.path, "checker")
  185. checker_src = os.path.join(checker_dir, "checker.cpp")
  186. if os.path.exists(checker_src):
  187. logger.info("Checker found, compiling")
  188. checker_exe = os.path.join(checker_dir, "checker")
  189. ret = subprocess.call([
  190. "g++", "-x", "c++", "-std=gnu++14", "-O2", "-static",
  191. "-o", checker_exe, checker_src
  192. ])
  193. if ret != 0:
  194. logger.critical("Could not compile checker")
  195. return None
  196. digest = self.file_cacher.put_file_from_path(
  197. checker_exe,
  198. "Manager for task %s" % name)
  199. args["managers"]['checker'] = Manager("checker", digest)
  200. evaluation_param = "comparator"
  201. else:
  202. logger.info("Checker not found, using diff if necessary")
  203. evaluation_param = "diff"
  204. # Note that the original TPS worked with custom task type Batch2017
  205. # and Communication2017 instead of Batch and Communication.
  206. args["task_type"] = data['task_type']
  207. args["task_type_parameters"] = \
  208. self._get_task_type_parameters(
  209. data, data['task_type'], evaluation_param)
  210. # Graders
  211. graders_dir = os.path.join(self.path, 'graders')
  212. if data['task_type'] == 'TwoSteps':
  213. pas_manager = name + 'lib.pas'
  214. pas_manager_path = os.path.join(graders_dir, pas_manager)
  215. if not os.path.exists(pas_manager_path):
  216. digest = self.file_cacher.put_file_content(
  217. ''.encode('utf-8'), 'Pascal manager for task %s' % name)
  218. args["managers"][pas_manager] = Manager(pas_manager, digest)
  219. if not os.path.exists(graders_dir):
  220. logger.warning('Grader folder was not found')
  221. graders_list = []
  222. else:
  223. graders_list = \
  224. [filename
  225. for filename in os.listdir(graders_dir)
  226. if filename != 'manager.cpp']
  227. for grader_name in graders_list:
  228. grader_src = os.path.join(graders_dir, grader_name)
  229. digest = self.file_cacher.put_file_from_path(
  230. grader_src,
  231. "Manager for task %s" % name)
  232. if data['task_type'] == 'Communication' \
  233. and os.path.splitext(grader_name)[0] == 'grader':
  234. grader_name = 'stub' + os.path.splitext(grader_name)[1]
  235. args["managers"][grader_name] = Manager(grader_name, digest)
  236. # Manager
  237. manager_src = os.path.join(graders_dir, 'manager.cpp')
  238. if os.path.exists(manager_src):
  239. logger.info("Manager found, compiling")
  240. manager_exe = os.path.join(graders_dir, "manager")
  241. ret = subprocess.call([
  242. "g++", "-x", "c++", "-O2", "-static",
  243. "-o", manager_exe, manager_src
  244. ])
  245. if ret != 0:
  246. logger.critical("Could not compile manager")
  247. return None
  248. digest = self.file_cacher.put_file_from_path(
  249. manager_exe,
  250. "Manager for task %s" % name)
  251. args["managers"]["manager"] = Manager("manager", digest)
  252. # Testcases
  253. args["testcases"] = {}
  254. for codename in testcase_codenames:
  255. infile = os.path.join(testcases_dir, "%s.in" % codename)
  256. outfile = os.path.join(testcases_dir, "%s.out" % codename)
  257. if not os.path.exists(outfile):
  258. logger.critical(
  259. 'Could not find the output file for testcase %s', codename)
  260. logger.critical('Aborting...')
  261. return
  262. input_digest = self.file_cacher.put_file_from_path(
  263. infile,
  264. "Input %s for task %s" % (codename, name))
  265. output_digest = self.file_cacher.put_file_from_path(
  266. outfile,
  267. "Output %s for task %s" % (codename, name))
  268. testcase = Testcase(codename, True,
  269. input_digest, output_digest)
  270. args["testcases"][codename] = testcase
  271. # Score Type
  272. subtasks_dir = os.path.join(self.path, 'subtasks')
  273. if not os.path.exists(subtasks_dir):
  274. logger.warning('Subtask folder was not found')
  275. subtasks = []
  276. else:
  277. subtasks = sorted(os.listdir(subtasks_dir))
  278. if len(subtasks) == 0:
  279. number_tests = max(len(testcase_codenames), 1)
  280. args["score_type"] = "Sum"
  281. args["score_type_parameters"] = 100 / number_tests
  282. else:
  283. args["score_type"] = "GroupMin"
  284. parsed_data = []
  285. subtask_no = -1
  286. add_optional_name = False
  287. for subtask in subtasks:
  288. subtask_no += 1
  289. with open(os.path.join(subtasks_dir, subtask), 'rt',
  290. encoding='utf-8') as subtask_json:
  291. subtask_data = json.load(subtask_json)
  292. score = int(subtask_data["score"])
  293. testcases = "|".join(
  294. re.escape(testcase)
  295. for testcase in subtask_data["testcases"]
  296. )
  297. optional_name = "Subtask %d" % subtask_no
  298. if subtask_no == 0 and score == 0:
  299. add_optional_name = True
  300. optional_name = "Samples"
  301. if add_optional_name:
  302. parsed_data.append([score, testcases, optional_name])
  303. else:
  304. parsed_data.append([score, testcases])
  305. args["score_type_parameters"] = parsed_data
  306. dataset = Dataset(**args)
  307. task.active_dataset = dataset
  308. logger.info("Task parameters loaded.")
  309. return task