14 from torch.nn.init
import kaiming_uniform_
15 from torch.nn.init
import xavier_uniform_
18 from torch.utils.data
import Dataset
19 from torch.utils.data
import random_split
20 from sklearn.preprocessing
import LabelEncoder
21 from sklearn.preprocessing
import PolynomialFeatures
28 Dataloader for PID prior probability training.
31 x (np.array): Array containing feature data with a second order combination of momentum, cos(theta) and transverse momentum.
32 y (np.array): Array containing the label encoded PDG values.
36 def __init__(self, path: str, key: str, particlelist: list, labels: list):
38 Initialize the dataloader for PID prior training.
41 path (str): Path to the root file containing the data.
42 key (str): Key (i.e. path) of the tree within the root file.
43 particlelist (list(int)): List of particle PDG values for which the model has to be trained.
44 labels (str): Labels of pandas columns containing cos(theta), momentum and PDG values (in this order).
48 data = data[key].pandas.df(labels)
49 df = data.dropna().reset_index(drop=
True)
50 df.loc[:, labels[2]] = df.loc[:, labels[2]].abs()
51 droplist = np.setdiff1d(np.unique(df[labels[2]].values), particlelist)
53 df = df.drop(df.loc[df[labels[2]] == i].index).reset_index(drop=
True)
55 x = np.hstack((x, (np.sin(np.arccos(x[:, 0])) * x[:, 1]).reshape(-1, 1)))
56 pol = PolynomialFeatures(2, include_bias=
False)
57 x = pol.fit_transform(x)
59 self.
xx = x.astype(
"float32")
62 y = le.fit_transform(y)
64 self.
yy = y.astype(
"int64")
68 Function to get feature and label tensors at the given index location.
71 index (int): The index of required tensors.
74 Tensors of features and labels at the given index.
76 return [self.
xx[index], self.
yy[index]]
80 Function to obtain length of a tensor.
86 Number of feature sets.
90 def get_split(self, n_test: float = 0.1) -> torch.tensor:
92 Split the input data into training and validation set.
95 n_test (float): Ratio of number of particles to be taken in the validation set to that of training set.
98 A randomly split data set with the ratio given by 'n_test'.
100 test_size = round(n_test * len(self.
xx))
101 train_size = len(self.
xx) - test_size
102 return random_split(self, [train_size, test_size])
107 Pytorch model for PID prior probability calculation.
110 hidden1: Linear layer with 9 inputs and 128 outputs.
111 act1: An RELU activation layer.
112 hidden2: A batch normalization layer.
113 hidden3: Linear layer with 128 inputs and 64 outputs.
114 act2: An RELU activation layer.
115 hidden4: A batch normalization layer.
116 hidden5: Linear layer with 64 inputs and 32 outputs.
117 act3: An RELU activation layer.
118 hidden6: A batch normalization layer.
119 hidden7: Linear layer with 9 inputs and 128 outputs.
120 act4: A softmax activation layer.
126 Initialize the PID prior probability model.
129 n_output (int): Number of output nodes.
135 kaiming_uniform_(self.
hidden1hidden1.weight, nonlinearity=
"relu")
142 kaiming_uniform_(self.
hidden3hidden3.weight, nonlinearity=
"relu")
149 kaiming_uniform_(self.
hidden5hidden5.weight, nonlinearity=
"relu")
156 xavier_uniform_(self.
hidden7hidden7.weight)
160 def forward(self, x: torch.tensor) -> torch.tensor:
162 Gives PID prior probabilities for the input features.
165 x (torch.tensor): A 2D tensor containing features for a particle as a row.
168 A torch tensor containing PID prior probabilities for the provided features.
def __init__(self, str path, str key, list particlelist, list labels)
def __getitem__(self, index)
torch.tensor get_split(self, float n_test=0.1)
hidden6
Batchnormalization layer.
act1
ReLU activation layer.
act4
Softmax activation layer.
act2
ReLU activation layer.
def __init__(self, int n_output)
hidden4
Batchnormalization layer.
hidden5
Linear layer with 64 inputs and 32 outputs.
torch.tensor forward(self, torch.tensor x)
hidden1
Linear layer with 9 inputs and 128 outputs.
hidden7
Linear layer with 32 inputs and outputs for each particle in the particlelist.
hidden3
Linear layer with 128 inputs and 64 outputs.
act3
ReLU activation layer.
hidden2
Batchnormalization layer.