Belle II Software light-2406-ragdoll
xgboost_default.py
1#!/usr/bin/env python3
2
3
10
11import basf2_mva
12import basf2_mva_util
13import time
14
15if __name__ == "__main__":
16 from basf2 import conditions, find_file
17 # NOTE: do not use testing payloads in production! Any results obtained like this WILL NOT BE PUBLISHED
18 conditions.testing_payloads = [
19 'localdb/database.txt'
20 ]
21
22 variables = ['M', 'p', 'pt', 'pz',
23 'daughter(0, p)', 'daughter(0, pz)', 'daughter(0, pt)',
24 'daughter(1, p)', 'daughter(1, pz)', 'daughter(1, pt)',
25 'daughter(2, p)', 'daughter(2, pz)', 'daughter(2, pt)',
26 'chiProb', 'dr', 'dz',
27 'daughter(0, dr)', 'daughter(1, dr)',
28 'daughter(0, dz)', 'daughter(1, dz)',
29 'daughter(0, chiProb)', 'daughter(1, chiProb)', 'daughter(2, chiProb)',
30 'daughter(0, kaonID)', 'daughter(0, pionID)',
31 'daughterInvM(0, 1)', 'daughterInvM(0, 2)', 'daughterInvM(1, 2)']
32
33 train_file = find_file("mva/train_D0toKpipi.root", "examples")
34 test_file = find_file("mva/test_D0toKpipi.root", "examples")
35
36 training_data = basf2_mva.vector(train_file)
37 testing_data = basf2_mva.vector(test_file)
38
39 general_options = basf2_mva.GeneralOptions()
40 general_options.m_datafiles = training_data
41 general_options.m_treename = "tree"
42 general_options.m_identifier = "XGBoost"
43 general_options.m_variables = basf2_mva.vector(*variables)
44 general_options.m_target_variable = "isSignal"
45
46 specific_options = basf2_mva.PythonOptions()
47 specific_options.m_steering_file = 'mva/examples/python/xgboost_default.py'
48 specific_options.m_framework = "xgboost"
49 param = ('{"max_depth": 3, "eta": 0.1, "silent": 1, "objective": "binary:logistic",'
50 '"subsample": 0.5, "nthread": 1, "nTrees": 100}')
51 specific_options.m_config = param
52
53 training_start = time.time()
54 basf2_mva.teacher(general_options, specific_options)
55 training_stop = time.time()
56 training_time = training_stop - training_start
57 method = basf2_mva_util.Method(general_options.m_identifier)
58 inference_start = time.time()
59 p, t = method.apply_expert(testing_data, general_options.m_treename)
60 inference_stop = time.time()
61 inference_time = inference_stop - inference_start
63 print("XGBoost", training_time, inference_time, auc)
def calculate_auc_efficiency_vs_background_retention(p, t, w=None)