24 import matplotlib.pyplot 
as plt
 
   28     """Returns the figure of merit for the optimization. 
   29     The functions trains the classifier with the given hyperparameters on the training sample and 
   30     calculates the AUC on the independent test sample. 
   32     g_options = general_options
 
   33     g_options.m_identifier = 
"test.xml" 
   34     options = basf2_mva.FastBDTOptions()
 
   35     options.m_nTrees = int(x[0])
 
   36     options.m_nLevels = int(x[1])
 
   37     basf2_mva.teacher(g_options, options)
 
   39     p, t = m.apply_expert(test_data, general_options.m_treename)
 
   43 if __name__ == 
"__main__":
 
   45     training_data = basf2_mva.vector(
"train.root")
 
   46     test_data = basf2_mva.vector(
"test.root")
 
   48     general_options = basf2_mva.GeneralOptions()
 
   49     general_options.m_datafiles = training_data
 
   50     general_options.m_treename = 
"tree" 
   51     general_options.m_variables = basf2_mva.vector(
'p', 
'pz', 
'daughter(0, kaonID)', 
'chiProb', 
'M')
 
   52     general_options.m_target_variable = 
"isSignal" 
   55     res = skopt.gp_minimize(f,  
 
   62     skopt.plots.plot_convergence(res)
 
   63     plt.savefig(
'convergence.png')
 
   64     skopt.plots.plot_evaluations(res)
 
   65     plt.savefig(
'evaluations.png')
 
   66     skopt.plots.plot_objective(res)
 
   67     plt.savefig(
'objective.png')
 
   70     skopt.dump(res, 
'opt-result.pkl')
 
def calculate_auc_efficiency_vs_background_retention(p, t, w=None)