Analysis Software
Documentation for sPHENIX simulation software
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Groups Pages
eback_PDEFoam.class.C
Go to the documentation of this file. Or view the newest version in sPHENIX GitHub for file eback_PDEFoam.class.C
1 // Class: ReadPDEFoam
2 // Automatically generated by MethodBase::MakeClass
3 //
4 
5 /* configuration options =====================================================
6 
7 #GEN -*-*-*-*-*-*-*-*-*-*-*- general info -*-*-*-*-*-*-*-*-*-*-*-
8 
9 Method : PDEFoam::PDEFoam
10 TMVA Release : 4.2.0 [262656]
11 ROOT Release : 5.34/38 [336422]
12 Creator : vassalli
13 Date : Wed Jan 23 17:52:07 2019
14 Host : Linux cvmfswrite02.sdcc.bnl.gov 3.10.0-693.11.6.el7.x86_64 #1 SMP Wed Jan 3 18:09:42 CST 2018 x86_64 x86_64 x86_64 GNU/Linux
15 Dir : /direct/phenix+u/vassalli/sphenix/single/Training
16 Training events: 2540
17 Analysis type : [Classification]
18 
19 
20 #OPT -*-*-*-*-*-*-*-*-*-*-*-*- options -*-*-*-*-*-*-*-*-*-*-*-*-
21 
22 # Set by User:
23 SigBgSeparate: "True" [Separate foams for signal and background]
24 VolFrac: "5.880000e-02" [Size of sampling box, used for density calculation during foam build-up (maximum value: 1.0 is equivalent to volume of entire foam)]
25 # Default:
26 V: "False" [Verbose output (short form of "VerbosityLevel" below - overrides the latter one)]
27 VerbosityLevel: "Default" [Verbosity level]
28 VarTransform: "None" [List of variable transformations performed before training, e.g., "D_Background,P_Signal,G,N_AllClasses" for: "Decorrelation, PCA-transformation, Gaussianisation, Normalisation, each for the given class of events ('AllClasses' denotes all events of all classes, if no class indication is given, 'All' is assumed)"]
29 H: "False" [Print method-specific help message]
30 CreateMVAPdfs: "False" [Create PDFs for classifier outputs (signal and background)]
31 IgnoreNegWeightsInTraining: "False" [Events with negative weights are ignored in the training (but are included for testing and performance evaluation)]
32 TailCut: "1.000000e-03" [Fraction of outlier events that are excluded from the foam in each dimension]
33 nActiveCells: "500" [Maximum number of active cells to be created by the foam]
34 nSampl: "2000" [Number of generated MC events per cell]
35 nBin: "5" [Number of bins in edge histograms]
36 Compress: "True" [Compress foam output file]
37 MultiTargetRegression: "False" [Do regression with multiple targets]
38 Nmin: "100" [Number of events in cell required to split cell]
39 MaxDepth: "0" [Maximum depth of cell tree (0=unlimited)]
40 FillFoamWithOrigWeights: "False" [Fill foam with original or boost weights]
41 UseYesNoCell: "False" [Return -1 or 1 for bkg or signal like events]
42 DTLogic: "None" [Use decision tree algorithm to split cells]
43 Kernel: "None" [Kernel type used]
44 TargetSelection: "Mean" [Target selection method]
45 ##
46 
47 
48 #VAR -*-*-*-*-*-*-*-*-*-*-*-* variables *-*-*-*-*-*-*-*-*-*-*-*-
49 
50 NVar 10
51 track_deta track_deta track_deta track_deta 'F' [4.76837158203e-07,1.92375802994]
52 track_dlayer track_dlayer track_dlayer track_dlayer 'I' [0,14]
53 track_layer track_layer track_layer track_layer 'I' [0,23]
54 track_pT track_pT track_pT track_pT 'F' [0.238087445498,34.1584281921]
55 approach_dist approach_dist approach_dist approach_dist 'F' [1.40332131195e-05,12.8133029938]
56 vtx_radius vtx_radius vtx_radius vtx_radius 'F' [5.07211098011e-06,20.9999389648]
57 vtxTrack_dist vtxTrack_dist vtxTrack_dist vtxTrack_dist 'F' [0.0258899498731,8.61121940613]
58 photon_m photon_m photon_m photon_m 'F' [1.044480443,713.936157227]
59 photon_pT photon_pT photon_pT photon_pT 'F' [0.0209014415741,5008.76708984]
60 cluster_prob cluster_prob cluster_prob cluster_prob 'F' [0,0.999874174595]
61 NSpec 1
62 vtx_chi2 vtx_chi2 vtx_chi2 F 'F' [0,3.33078734987e-36]
63 
64 
65 ============================================================================ */
66 
67 #include <vector>
68 #include <cmath>
69 #include <string>
70 #include <iostream>
71 
72 #ifndef IClassifierReader__def
73 #define IClassifierReader__def
74 
75 class IClassifierReader {
76 
77  public:
78 
79  // constructor
81  virtual ~IClassifierReader() {}
82 
83  // return classifier response
84  virtual double GetMvaValue( const std::vector<double>& inputValues ) const = 0;
85 
86  // returns classifier status
87  bool IsStatusClean() const { return fStatusIsClean; }
88 
89  protected:
90 
91  bool fStatusIsClean;
92 };
93 
94 #endif
95 
96 class ReadPDEFoam : public IClassifierReader {
97 
98  public:
99 
100  // constructor
101  ReadPDEFoam( std::vector<std::string>& theInputVars )
102  : IClassifierReader(),
103  fClassName( "ReadPDEFoam" ),
104  fNvars( 10 ),
105  fIsNormalised( false )
106  {
107  // the training input variables
108  const char* inputVars[] = { "track_deta", "track_dlayer", "track_layer", "track_pT", "approach_dist", "vtx_radius", "vtxTrack_dist", "photon_m", "photon_pT", "cluster_prob" };
109 
110  // sanity checks
111  if (theInputVars.size() <= 0) {
112  std::cout << "Problem in class \"" << fClassName << "\": empty input vector" << std::endl;
113  fStatusIsClean = false;
114  }
115 
116  if (theInputVars.size() != fNvars) {
117  std::cout << "Problem in class \"" << fClassName << "\": mismatch in number of input values: "
118  << theInputVars.size() << " != " << fNvars << std::endl;
119  fStatusIsClean = false;
120  }
121 
122  // validate input variables
123  for (size_t ivar = 0; ivar < theInputVars.size(); ivar++) {
124  if (theInputVars[ivar] != inputVars[ivar]) {
125  std::cout << "Problem in class \"" << fClassName << "\": mismatch in input variable names" << std::endl
126  << " for variable [" << ivar << "]: " << theInputVars[ivar].c_str() << " != " << inputVars[ivar] << std::endl;
127  fStatusIsClean = false;
128  }
129  }
130 
131  // initialize min and max vectors (for normalisation)
132  fVmin[0] = 0;
133  fVmax[0] = 0;
134  fVmin[1] = 0;
135  fVmax[1] = 0;
136  fVmin[2] = 0;
137  fVmax[2] = 0;
138  fVmin[3] = 0;
139  fVmax[3] = 0;
140  fVmin[4] = 0;
141  fVmax[4] = 0;
142  fVmin[5] = 0;
143  fVmax[5] = 0;
144  fVmin[6] = 0;
145  fVmax[6] = 0;
146  fVmin[7] = 0;
147  fVmax[7] = 0;
148  fVmin[8] = 0;
149  fVmax[8] = 0;
150  fVmin[9] = 0;
151  fVmax[9] = 0;
152 
153  // initialize input variable types
154  fType[0] = 'F';
155  fType[1] = 'I';
156  fType[2] = 'I';
157  fType[3] = 'F';
158  fType[4] = 'F';
159  fType[5] = 'F';
160  fType[6] = 'F';
161  fType[7] = 'F';
162  fType[8] = 'F';
163  fType[9] = 'F';
164 
165  // initialize constants
166  Initialize();
167 
168  }
169 
170  // destructor
171  virtual ~ReadPDEFoam() {
172  Clear(); // method-specific
173  }
174 
175  // the classifier response
176  // "inputValues" is a vector of input values in the same order as the
177  // variables given to the constructor
178  double GetMvaValue( const std::vector<double>& inputValues ) const;
179 
180  private:
181 
182  // method-specific destructor
183  void Clear();
184 
185  // common member variables
186  const char* fClassName;
187 
188  const size_t fNvars;
189  size_t GetNvar() const { return fNvars; }
190  char GetType( int ivar ) const { return fType[ivar]; }
191 
192  // normalisation of input variables
193  const bool fIsNormalised;
194  bool IsNormalised() const { return fIsNormalised; }
195  double fVmin[10];
196  double fVmax[10];
197  double NormVariable( double x, double xmin, double xmax ) const {
198  // normalise to output range: [-1, 1]
199  return 2*(x - xmin)/(xmax - xmin) - 1.0;
200  }
201 
202  // type of input variable: 'F' or 'I'
203  char fType[10];
204 
205  // initialize internal variables
206  void Initialize();
207  double GetMvaValue__( const std::vector<double>& inputValues ) const;
208 
209  // private members (method specific)
210  inline double ReadPDEFoam::GetMvaValue( const std::vector<double>& inputValues ) const
211  {
212  // classifier response value
213  double retval = 0;
214 
215  // classifier response, sanity check first
216  if (!IsStatusClean()) {
217  std::cout << "Problem in class \"" << fClassName << "\": cannot return classifier response"
218  << " because status is dirty" << std::endl;
219  retval = 0;
220  }
221  else {
222  if (IsNormalised()) {
223  // normalise variables
224  std::vector<double> iV;
225  iV.reserve(inputValues.size());
226  int ivar = 0;
227  for (std::vector<double>::const_iterator varIt = inputValues.begin();
228  varIt != inputValues.end(); varIt++, ivar++) {
229  iV.push_back(NormVariable( *varIt, fVmin[ivar], fVmax[ivar] ));
230  }
231  retval = GetMvaValue__( iV );
232  }
233  else {
234  retval = GetMvaValue__( inputValues );
235  }
236  }
237 
238  return retval;
239  }