From 2c8cd7a550faca0fc450e4159a4a874d4795ac25 Mon Sep 17 00:00:00 2001 From: Kuang Lu Date: Fri, 16 Nov 2018 16:18:23 -0500 Subject: [PATCH] Fine tuning tiebreaker + performances update (#479) * add filename tiebreaker * update performances * added comments specifying the sorting order --- src/main/python/fine_tuning/xfold.py | 3 ++- src/main/resources/fine_tuning/models.yaml | 12 ++++++------ 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/src/main/python/fine_tuning/xfold.py b/src/main/python/fine_tuning/xfold.py index 870a352dda..04e2931133 100644 --- a/src/main/python/fine_tuning/xfold.py +++ b/src/main/python/fine_tuning/xfold.py @@ -106,8 +106,9 @@ def tune(self,verbose): if param not in training_data: training_data[param] = .0 training_data[param] += fold_performance[param] + # sort in descending order based on performance first, then use filenames(x[0]) to break ties sorted_training_performance = sorted(training_data.items(), - key=lambda x:x[1], + key=lambda x:(x[1], x[0]), reverse=True) best_param = sorted_training_performance[0][0] if verbose: diff --git a/src/main/resources/fine_tuning/models.yaml b/src/main/resources/fine_tuning/models.yaml index afe5c89d92..28eb306881 100644 --- a/src/main/resources/fine_tuning/models.yaml +++ b/src/main/resources/fine_tuning/models.yaml @@ -208,16 +208,16 @@ models: robust04: map: best_avg: 0.3020 - oracles_per_topic: 0.4343 + oracles_per_topic: 0.4402 2-fold: 0.2973 5-fold: 0.2956 P_20: best_avg: 0.4012 - oracles_per_topic: 0.5960 - 2-fold: 0.3779 + oracles_per_topic: 0.6054 + 2-fold: 0.3871 5-fold: 0.3931 ndcg20: best_avg: 0.44958 - oracles_per_topic: 0.6606 - 2-fold: 0.4386 - 5-fold: 0.4410 + oracles_per_topic: 0.6702 + 2-fold: 0.4358 + 5-fold: 0.4402