Skip to content

Commit 4425f7b

Browse files
birka0veluca93
authored andcommitted
TPS loader: support batch-and-output task type
1 parent 1f5ed0d commit 4425f7b

2 files changed

Lines changed: 49 additions & 28 deletions

File tree

AUTHORS.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,7 @@ Benjamin Swart <Benjaminswart@email.cz>
4949
Andrey Vihrov <andrey.vihrov@gmail.com>
5050
Grace Hawkins <amoomajid99@gmail.com>
5151
Pasit Sangprachathanarak <ouipingpasit@gmail.com>
52+
Zsolt Németh <birka0@gmail.com>
5253

5354
And many other people that didn't write code, but provided useful
5455
comments, suggestions and feedback. :-)

cmscontrib/loaders/tps.py

Lines changed: 48 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,7 @@ def _get_task_type_parameters(self, data, task_type, evaluation_param):
6666
task_type_parameters = json.loads(parameters_str)
6767
par_prefix = 'task_type_parameters_%s' % task_type
6868

69-
if task_type == 'Batch':
69+
if task_type == 'Batch' or task_type == 'BatchAndOutput':
7070
par_compilation = '%s_compilation' % par_prefix
7171
par_input = '%s_io_0_inputfile' % par_prefix
7272
par_output = '%s_io_1_outputfile' % par_prefix
@@ -88,12 +88,15 @@ def _get_task_type_parameters(self, data, task_type, evaluation_param):
8888
if not os.path.exists(pas_grader):
8989
user_managers = '[\\"grader.%l\\"]'
9090
task_type_parameters[par_user_managers] = user_managers
91-
return [
91+
param_list = [
9292
task_type_parameters[par_compilation],
9393
[task_type_parameters[par_input],
9494
task_type_parameters[par_output]],
9595
evaluation_param,
9696
]
97+
if task_type == 'BatchAndOutput':
98+
param_list.append(','.join(data["output_only_testcases"]))
99+
return param_list
97100

98101
if task_type == 'Communication':
99102
par_processes = '%s_num_processes' % par_prefix
@@ -170,7 +173,19 @@ def get_task(self, get_statement=True):
170173
data["task_type"] = \
171174
data["task_type"][0].upper() + data["task_type"][1:]
172175

173-
# Setting the submission format
176+
# Parse subtask data
177+
subtasks_dir = os.path.join(self.path, 'subtasks')
178+
subtask_data = {}
179+
if not os.path.exists(subtasks_dir):
180+
logger.warning('Subtask folder was not found')
181+
subtasks = []
182+
else:
183+
subtasks = sorted(os.listdir(subtasks_dir))
184+
for subtask in subtasks:
185+
with open(os.path.join(subtasks_dir, subtask), 'rt',
186+
encoding='utf-8') as subtask_json:
187+
subtask_data[subtask] = json.load(subtask_json)
188+
174189
# Obtaining testcases' codename
175190
testcases_dir = os.path.join(self.path, 'tests')
176191
if not os.path.exists(testcases_dir):
@@ -181,12 +196,26 @@ def get_task(self, get_statement=True):
181196
filename[:-3]
182197
for filename in os.listdir(testcases_dir)
183198
if filename[-3:] == '.in'])
199+
if data["task_type"] == 'BatchAndOutput':
200+
output_only_testcases = {}
201+
for cur_subtask_data in subtask_data.values():
202+
is_output_only = cur_subtask_data.get('output_only', False)
203+
if is_output_only:
204+
codenames = cur_subtask_data.get('testcases', list())
205+
output_only_testcases.update(codenames)
206+
data["output_only_testcases"] = output_only_testcases
207+
208+
# Setting the submission format
184209
if data["task_type"] == 'OutputOnly':
185210
args["submission_format"] = list()
186211
for codename in testcase_codenames:
187212
args["submission_format"].append("output_%s.txt" % codename)
188213
elif data["task_type"] == 'Notice':
189214
args["submission_format"] = list()
215+
elif data["task_type"] == "BatchAndOutput":
216+
args["submission_format"] = ["%s.%%l" % name]
217+
for codename in data["output_only_testcases"]:
218+
args["submission_format"].append("output_%s.txt" % codename)
190219
else:
191220
args["submission_format"] = ["%s.%%l" % name]
192221

@@ -240,7 +269,7 @@ def get_task(self, get_statement=True):
240269
logger.info("Checker found, compiling")
241270
checker_exe = os.path.join(checker_dir, "checker")
242271
ret = subprocess.call([
243-
"g++", "-x", "c++", "-std=gnu++17", "-O2", "-static",
272+
"g++", "-x", "c++", "-std=gnu++20", "-O2", "-static",
244273
"-o", checker_exe, checker_src
245274
])
246275
if ret != 0:
@@ -298,7 +327,7 @@ def get_task(self, get_statement=True):
298327
logger.info("Manager found, compiling")
299328
manager_exe = os.path.join(graders_dir, "manager")
300329
ret = subprocess.call([
301-
"g++", "-x", "c++", "-std=gnu++17", "-O2", "-static",
330+
"g++", "-x", "c++", "-std=gnu++20", "-O2", "-static",
302331
"-o", manager_exe, manager_src
303332
])
304333
if ret != 0:
@@ -332,13 +361,6 @@ def get_task(self, get_statement=True):
332361
args["testcases"][codename] = testcase
333362

334363
# Score Type
335-
subtasks_dir = os.path.join(self.path, 'subtasks')
336-
if not os.path.exists(subtasks_dir):
337-
logger.warning('Subtask folder was not found')
338-
subtasks = []
339-
else:
340-
subtasks = sorted(os.listdir(subtasks_dir))
341-
342364
if len(subtasks) == 0:
343365
number_tests = max(len(testcase_codenames), 1)
344366
args["score_type"] = "Sum"
@@ -350,22 +372,20 @@ def get_task(self, get_statement=True):
350372
add_optional_name = False
351373
for subtask in subtasks:
352374
subtask_no += 1
353-
with open(os.path.join(subtasks_dir, subtask), 'rt',
354-
encoding='utf-8') as subtask_json:
355-
subtask_data = json.load(subtask_json)
356-
score = int(subtask_data["score"])
357-
testcases = "|".join(
358-
re.escape(testcase)
359-
for testcase in subtask_data["testcases"]
360-
)
361-
optional_name = "Subtask %d" % subtask_no
362-
if subtask_no == 0 and score == 0:
363-
add_optional_name = True
364-
optional_name = "Samples"
365-
if add_optional_name:
366-
parsed_data.append([score, testcases, optional_name])
367-
else:
368-
parsed_data.append([score, testcases])
375+
cur_subtask_data = subtask_data[subtask]
376+
score = int(cur_subtask_data["score"])
377+
testcases = "|".join(
378+
re.escape(testcase)
379+
for testcase in cur_subtask_data["testcases"]
380+
)
381+
optional_name = "Subtask %d" % subtask_no
382+
if subtask_no == 0 and score == 0:
383+
add_optional_name = True
384+
optional_name = "Samples"
385+
if add_optional_name:
386+
parsed_data.append([score, testcases, optional_name])
387+
else:
388+
parsed_data.append([score, testcases])
369389
args["score_type_parameters"] = parsed_data
370390

371391
dataset = Dataset(**args)

0 commit comments

Comments
 (0)