Skip to content

Commit 7d01efe

Browse files
committed
fixes
1 parent 898ee8b commit 7d01efe

2 files changed

Lines changed: 33 additions & 5 deletions

File tree

scoring/performance_profile.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -227,6 +227,9 @@ def get_workloads_time_to_target(submission,
227227
else:
228228
time_val = float('inf')
229229
time_vals_per_study.append(time_val)
230+
num_s = len(time_vals_per_study)
231+
print(f'TIME VALS PER STUDY: {num_s}')
232+
print(time_vals_per_study)
230233

231234
workloads.append({
232235
'submission': submission_name,

scoring/score_submissions.py

Lines changed: 30 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@
3232
'Path to submission directory containing experiment directories.')
3333
flags.DEFINE_string('output_dir',
3434
'scoring_results',
35-
'Path to save performance profile table and plot.')
35+
'Path to save performance profile artifacts, submission_summaries and results files.')
3636
flags.DEFINE_boolean('compute_performance_profiles',
3737
False,
3838
'Whether or not to compute the performance profiles.')
@@ -51,11 +51,16 @@
5151
None,
5252
'Filename to save the processed results that are fed into the performance profile functions.'
5353
)
54-
flags.DEFINE_boolean(
54+
flags.DEFINE_string(
5555
'load_results_from_filename',
5656
None,
5757
'Filename to load processed results from that are fed into performance profile functions'
5858
)
59+
flags.DEFINE_string(
60+
'exclude_submissions',
61+
'',
62+
'Optional comma seperated list of names of submissions to exclude from scoring.'
63+
)
5964
FLAGS = flags.FLAGS
6065

6166

@@ -128,6 +133,21 @@ def get_submission_summary(df, include_test_split=True):
128133
logging.info('\n' + tabulate(df, headers='keys', tablefmt='psql'))
129134
return df
130135

136+
def compute_leaderboard_score(df, normalize=False):
137+
"""Compute leaderboard score by taking integral of performance profile.
138+
139+
Args:
140+
df: pd.DataFrame returned from `compute_performance_profiles`.
141+
normalize: divide by the range of the performance profile's tau.
142+
143+
Returns:
144+
pd.DataFrame with one column of scores indexed by submission.
145+
"""
146+
scores = np.trapz(df, x=df.columns)
147+
if normalize:
148+
scores /= df.columns.max() - df.columns.min()
149+
return pd.DataFrame(scores, columns=['score'], index=df.index)
150+
131151

132152
def main(_):
133153
results = {}
@@ -144,6 +164,8 @@ def main(_):
144164
for submission in os.listdir(
145165
os.path.join(FLAGS.submission_directory, team)):
146166
print(submission)
167+
if submission in FLAGS.exclude_submissions.split(','):
168+
continue
147169
experiment_path = os.path.join(FLAGS.submission_directory,
148170
team,
149171
submission)
@@ -185,10 +207,13 @@ def main(_):
185207
os.mkdir(FLAGS.output_dir)
186208
performance_profile.plot_performance_profiles(
187209
performance_profile_df, 'score', save_dir=FLAGS.output_dir)
188-
perf_df = tabulate(
210+
performance_profile_str = tabulate(
189211
performance_profile_df.T, headers='keys', tablefmt='psql')
190-
logging.info(f'Performance profile:\n {perf_df}')
191-
212+
logging.info(f'Performance profile:\n {performance_profile_str}')
213+
scores = compute_leaderboard_score(performance_profile_df)
214+
scores.to_csv(os.path.join(FLAGS.output_dir, 'scores.csv'))
215+
scores_str = tabulate(scores, headers='keys', tablefmt='psql')
216+
logging.info(f'Scores: \n {scores_str}')
192217

193218
if __name__ == '__main__':
194219
# flags.mark_flag_as_required('submission_directory')

0 commit comments

Comments
 (0)