CMS 3D CMS Logo

 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Pages
Forest.cc
Go to the documentation of this file.
1 // Forest.cxx //
3 // =====================================================================//
4 // This is the object implementation of a forest of decision trees. //
5 // We need this to implement gradient boosting. //
6 // References include //
7 // *Elements of Statistical Learning by Hastie, //
8 // Tibshirani, and Friedman. //
9 // *Greedy Function Approximation: A Gradient Boosting Machine. //
10 // Friedman. The Annals of Statistics, Vol. 29, No. 5. Oct 2001. //
11 // *Inductive Learning of Tree-based Regression Models. Luis Torgo. //
12 // //
14 
16 // _______________________Includes_______________________________________//
18 
21 
22 #include "TStopwatch.h"
23 
24 #include <iostream>
25 #include <sstream>
26 #include <algorithm>
27 #include <fstream>
28 #include <utility>
29 
31 // _______________________Constructor(s)________________________________//
33 
35 {
36  events = std::vector< std::vector<Event*> >(1);
37 }
38 
40 // ----------------------------------------------------------------------
42 
43 Forest::Forest(std::vector<Event*>& trainingEvents)
44 {
45  setTrainingEvents(trainingEvents);
46 }
47 
49 // _______________________Destructor____________________________________//
51 
53 {
54 // When the forest is destroyed it will delete the trees as well as the
55 // events from the training and testing sets.
56 // The user may want the events to remain after they destroy the forest
57 // this should be changed in future upgrades.
58 
59  for(unsigned int i=0; i < trees.size(); i++)
60  {
61  delete trees[i];
62  }
63 }
65 // ______________________Get/Set_Functions______________________________//
67 
68 void Forest::setTrainingEvents(std::vector<Event*>& trainingEvents)
69 {
70 // tell the forest which events to use for training
71 
72  Event* e = trainingEvents[0];
73  // Unused variable
74  // unsigned int numrows = e->data.size();
75 
76  // Reset the events matrix.
77  events = std::vector< std::vector<Event*> >();
78 
79  for(unsigned int i=0; i<e->data.size(); i++)
80  {
81  events.push_back(trainingEvents);
82  }
83 }
84 
86 // ----------------------------------------------------------------------
88 
89 // return a copy of the training events
90 std::vector<Event*> Forest::getTrainingEvents(){ return events[0]; }
91 
93 // ----------------------------------------------------------------------
95 
96 // return the ith tree
97 Tree* Forest::getTree(unsigned int i)
98 {
99  if(/*i>=0 && */i<trees.size()) return trees[i];
100  else
101  {
102  //std::cout << i << "is an invalid input for getTree. Out of range." << std::endl;
103  return 0;
104  }
105 }
106 
108 // ______________________Various_Helpful_Functions______________________//
110 
111 unsigned int Forest::size()
112 {
113 // Return the number of trees in the forest.
114  return trees.size();
115 }
116 
118 //*** Need to make a data structure that includes the next few functions ***
119 //*** pertaining to events. These don't really have much to do with the ***
120 //*** forest class. ***
122 
124 // ----------------------------------------------------------------------
126 
127 void Forest::listEvents(std::vector< std::vector<Event*> >& e)
128 {
129 // Simply list the events in each event vector. We have multiple copies
130 // of the events vector. Each copy is sorted according to a different
131 // determining variable.
132  std::cout << std::endl << "Listing Events... " << std::endl;
133 
134  for(unsigned int i=0; i < e.size(); i++)
135  {
136  std::cout << std::endl << "Variable " << i << " vector contents: " << std::endl;
137  for(unsigned int j=0; j<e[i].size(); j++)
138  {
139  e[i][j]->outputEvent();
140  }
141  std::cout << std::endl;
142  }
143 }
144 
146 // ----------------------------------------------------------------------
148 
149 // We have to initialize Event::sortingIndex outside of a function since
150 // it is a static member.
151 Int_t Event::sortingIndex = 1;
152 
154 {
155 // Sort the events according to the variable given by the sortingIndex.
157 }
159 // ----------------------------------------------------------------------
161 
163 {
164 // Sort the events by ID. We need this to produce rate plots.
165  return e1->id < e2->id;
166 }
168 // ----------------------------------------------------------------------
170 
171 void Forest::sortEventVectors(std::vector< std::vector<Event*> >& e)
172 {
173 // When a node chooses the optimum split point and split variable it needs
174 // the events to be sorted according to the variable it is considering.
175 
176  for(unsigned int i=0; i<e.size(); i++)
177  {
179  std::sort(e[i].begin(), e[i].end(), compareEvents);
180  }
181 }
182 
184 // ----------------------------------------------------------------------
186 
187 void Forest::rankVariables(std::vector<int>& rank)
188 {
189 // This function ranks the determining variables according to their importance
190 // in determining the fit. Use a low learning rate for better results.
191 // Separates completely useless variables from useful ones well,
192 // but isn't the best at separating variables of similar importance.
193 // This is calculated using the error reduction on the training set. The function
194 // should be changed to use the testing set, but this works fine for now.
195 // I will try to change this in the future.
196 
197  // Initialize the vector v, which will store the total error reduction
198  // for each variable i in v[i].
199  std::vector<double> v(events.size(), 0);
200 
201  //std::cout << std::endl << "Ranking Variables by Net Error Reduction... " << std::endl;
202 
203  for(unsigned int j=0; j < trees.size(); j++)
204  {
205  trees[j]->rankVariables(v);
206  }
207 
208  double max = *std::max_element(v.begin(), v.end());
209 
210  // Scale the importance. Maximum importance = 100.
211  for(unsigned int i=0; i < v.size(); i++)
212  {
213  v[i] = 100*v[i]/max;
214  }
215 
216  // Change the storage format so that we can keep the index
217  // and the value associated after sorting.
218  std::vector< std::pair<double, Int_t> > w(events.size());
219 
220  for(unsigned int i=0; i<v.size(); i++)
221  {
222  w[i] = std::pair<double, Int_t>(v[i],i);
223  }
224 
225  // Sort so that we can output in order of importance.
226  std::sort(w.begin(),w.end());
227 
228  // Output the results.
229  for(int i=(v.size()-1); i>=0; i--)
230  {
231  rank.push_back(w[i].second);
232  // std::cout << "x" << w[i].second << ": " << w[i].first << std::endl;
233  }
234 
235  //std::cout << std::endl << "Done." << std::endl << std::endl;
236 }
237 
239 // ----------------------------------------------------------------------
241 
242 void Forest::saveSplitValues(const char* savefilename)
243 {
244 // This function gathers all of the split values from the forest and puts them into lists.
245 
246  std::ofstream splitvaluefile;
247  splitvaluefile.open(savefilename);
248 
249  // Initialize the matrix v, which will store the list of split values
250  // for each variable i in v[i].
251  std::vector<std::vector<double>> v(events.size(), std::vector<double>());
252 
253  //std::cout << std::endl << "Gathering split values... " << std::endl;
254 
255  // Gather the split values from each tree in the forest.
256  for(unsigned int j=0; j<trees.size(); j++)
257  {
258  trees[j]->getSplitValues(v);
259  }
260 
261  // Sort the lists of split values and remove the duplicates.
262  for(unsigned int i=0; i<v.size(); i++)
263  {
264  std::sort(v[i].begin(),v[i].end());
265  v[i].erase( unique( v[i].begin(), v[i].end() ), v[i].end() );
266  }
267 
268  // Output the results after removing duplicates.
269  // The 0th variable is special and is not used for splitting, so we start at 1.
270  for(unsigned int i=1; i<v.size(); i++)
271  {
272  TString splitValues;
273  for(unsigned int j=0; j<v[i].size(); j++)
274  {
275  std::stringstream ss;
276  ss.precision(14);
277  ss << std::scientific << v[i][j];
278  splitValues+=",";
279  splitValues+=ss.str().c_str();
280  }
281 
282  splitValues=splitValues(1,splitValues.Length());
283  splitvaluefile << splitValues << std::endl << std::endl;;
284  }
285 }
287 // ______________________Update_Events_After_Fitting____________________//
289 
290 void Forest::updateRegTargets(Tree* tree, double learningRate, LossFunction* l)
291 {
292 // Prepare the global vector of events for the next tree.
293 // Update the fit for each event and set the new target value
294 // for the next tree.
295 
296  // Get the list of terminal nodes for this tree.
297  std::list<Node*>& tn = tree->getTerminalNodes();
298 
299  // Loop through the terminal nodes.
300  for(std::list<Node*>::iterator it=tn.begin(); it!=tn.end(); it++)
301  {
302  // Get the events in the current terminal region.
303  std::vector<Event*>& v = (*it)->getEvents()[0];
304 
305  // Fit the events depending on the loss function criteria.
306  double fit = l->fit(v);
307 
308  // Scale the rate at which the algorithm converges.
309  fit = learningRate*fit;
310 
311  // Store the official fit value in the terminal node.
312  (*it)->setFitValue(fit);
313 
314  // Loop through each event in the terminal region and update the
315  // the target for the next tree.
316  for(unsigned int j=0; j<v.size(); j++)
317  {
318  Event* e = v[j];
319  e->predictedValue += fit;
320  e->data[0] = l->target(e);
321  }
322 
323  // Release memory.
324  (*it)->getEvents() = std::vector< std::vector<Event*> >();
325  }
326 }
327 
329 // ----------------------------------------------------------------------
331 
333 {
334 // Prepare the test events for the next tree.
335 
336  // Get the list of terminal nodes for this tree.
337  std::list<Node*>& tn = tree->getTerminalNodes();
338 
339  // Loop through the terminal nodes.
340  for(std::list<Node*>::iterator it=tn.begin(); it!=tn.end(); it++)
341  {
342  std::vector<Event*>& v = (*it)->getEvents()[0];
343  double fit = (*it)->getFitValue();
344 
345  // Loop through each event in the terminal region and update the
346  // the global event it maps to.
347  for(unsigned int j=0; j<v.size(); j++)
348  {
349  Event* e = v[j];
350  e->predictedValue += fit;
351  }
352 
353  // Release memory.
354  (*it)->getEvents() = std::vector< std::vector<Event*> >();
355  }
356 }
357 
359 // ____________________Do/Test_the Regression___________________________//
361 
362 void Forest::doRegression(Int_t nodeLimit, Int_t treeLimit, double learningRate, LossFunction* l, const char* savetreesdirectory, bool saveTrees)
363 {
364 // Build the forest using the training sample.
365 
366  //std::cout << std::endl << "--Building Forest..." << std::endl << std::endl;
367 
368  // The trees work with a matrix of events where the rows have the same set of events. Each row however
369  // is sorted according to the feature variable given by event->data[row].
370  // If we only had one set of events we would have to sort it according to the
371  // feature variable every time we want to calculate the best split point for that feature.
372  // By keeping sorted copies we avoid the sorting operation during splint point calculation
373  // and save computation time. If we do not sort each of the rows the regression will fail.
374  //std::cout << "Sorting event vectors..." << std::endl;
376 
377  // See how long the regression takes.
378  TStopwatch timer;
379  timer.Start(kTRUE);
380 
381  for(unsigned int i=0; i< (unsigned) treeLimit; i++)
382  {
383  // std::cout << "++Building Tree " << i << "... " << std::endl;
384  Tree* tree = new Tree(events);
385  trees.push_back(tree);
386  tree->buildTree(nodeLimit);
387 
388  // Update the targets for the next tree to fit.
389  updateRegTargets(tree, learningRate, l);
390 
391  // Save trees to xml in some directory.
392  std::ostringstream ss;
393  ss << savetreesdirectory << "/" << i << ".xml";
394  std::string s = ss.str();
395  const char* c = s.c_str();
396 
397  if(saveTrees) tree->saveToXML(c);
398  }
399  //std::cout << std::endl;
400  //std::cout << std::endl << "Done." << std::endl << std::endl;
401 
402 // std::cout << std::endl << "Total calculation time: " << timer.RealTime() << std::endl;
403 }
404 
406 // ----------------------------------------------------------------------
408 
409 void Forest::predictEvents(std::vector<Event*>& eventsp, unsigned int numtrees)
410 {
411 // Predict values for eventsp by running them through the forest up to numtrees.
412 
413  //std::cout << "Using " << numtrees << " trees from the forest to predict events ... " << std::endl;
414  if(numtrees > trees.size())
415  {
416  //std::cout << std::endl << "!! Input greater than the forest size. Using forest.size() = " << trees.size() << " to predict instead." << std::endl;
417  numtrees = trees.size();
418  }
419 
420  // i iterates through the trees in the forest. Each tree corrects the last prediction.
421  for(unsigned int i=0; i < numtrees; i++)
422  {
423  //std::cout << "++Tree " << i << "..." << std::endl;
424  appendCorrection(eventsp, i);
425  }
426 }
427 
429 // ----------------------------------------------------------------------
431 
432 void Forest::appendCorrection(std::vector<Event*>& eventsp, Int_t treenum)
433 {
434 // Update the prediction by appending the next correction.
435 
436  Tree* tree = trees[treenum];
437  tree->filterEvents(eventsp);
438 
439  // Update the events with their new prediction.
440  updateEvents(tree);
441 }
442 
444 // ----------------------------------------------------------------------
446 
447 void Forest::predictEvent(Event* e, unsigned int numtrees)
448 {
449 // Predict values for eventsp by running them through the forest up to numtrees.
450 
451  //std::cout << "Using " << numtrees << " trees from the forest to predict events ... " << std::endl;
452  if(numtrees > trees.size())
453  {
454  //std::cout << std::endl << "!! Input greater than the forest size. Using forest.size() = " << trees.size() << " to predict instead." << std::endl;
455  numtrees = trees.size();
456  }
457 
458  // i iterates through the trees in the forest. Each tree corrects the last prediction.
459  for(unsigned int i=0; i < numtrees; i++)
460  {
461  //std::cout << "++Tree " << i << "..." << std::endl;
462  appendCorrection(e, i);
463  }
464 }
465 
467 // ----------------------------------------------------------------------
469 
470 void Forest::appendCorrection(Event* e, Int_t treenum)
471 {
472 // Update the prediction by appending the next correction.
473 
474  Tree* tree = trees[treenum];
475  Node* terminalNode = tree->filterEvent(e);
476 
477  // Update the event with its new prediction.
478  double fit = terminalNode->getFitValue();
479  e->predictedValue += fit;
480 }
482 // ----------------------------------------------------------------------------------
484 
485 void Forest::loadForestFromXML(const char* directory, unsigned int numTrees)
486 {
487 // Load a forest that has already been created and stored into XML somewhere.
488 
489  // Initialize the vector of trees.
490  trees = std::vector<Tree*>(numTrees);
491 
492  // Load the Forest.
493  //std::cout << std::endl << "Loading Forest from XML ... " << std::endl;
494  for(unsigned int i=0; i < numTrees; i++)
495  {
496  trees[i] = new Tree();
497 
498  std::stringstream ss;
499  ss << directory << "/" << i << ".xml";
500 
501  //trees[i]->loadFromXML(ss.str().c_str());
502  trees[i]->loadFromXML(edm::FileInPath(ss.str().c_str()).fullPath().c_str());
503  }
504 
505  // std::cout << "Done." << std::endl << std::endl;
506 }
507 
509 // ___________________Stochastic_Sampling_&_Regression__________________//
511 
513 {
514 // We use this for Stochastic Gradient Boosting. Basically you
515 // take a subsample of the training events and build a tree using
516 // those. Then use the tree built from the subsample to update
517 // the predictions for all the events.
518 
519  subSample = std::vector< std::vector<Event*> >(events.size()) ;
520  size_t subSampleSize = fraction*events[0].size();
521 
522  // Randomize the first subSampleSize events in events[0].
523  shuffle(events[0].begin(), events[0].end(), subSampleSize);
524 
525  // Get a copy of the random subset we just made.
526  std::vector<Event*> v(events[0].begin(), events[0].begin()+subSampleSize);
527 
528  // Initialize and sort the subSample collection.
529  for(unsigned int i=0; i<subSample.size(); i++)
530  {
531  subSample[i] = v;
532  }
533 
535 }
536 
538 // ----------------------------------------------------------------------
540 
541 void Forest::doStochasticRegression(Int_t nodeLimit, Int_t treeLimit, double learningRate, double fraction, LossFunction* l)
542 {
543 // If the fraction of events to use is one then this algorithm is slower than doRegression due to the fact
544 // that we have to sort the events every time we extract a subsample. Without random sampling we simply
545 // use all of the events and keep them sorted.
546 
547 // Anyways, this algorithm uses a portion of the events to train each tree. All of the events are updated
548 // afterwards with the results from the subsample built tree.
549 
550  // Prepare some things.
552  trees = std::vector<Tree*>(treeLimit);
553 
554  // See how long the regression takes.
555  TStopwatch timer;
556  timer.Start(kTRUE);
557 
558  // Output the current settings.
559  // std::cout << std::endl << "Running stochastic regression ... " << std::endl;
560  //std::cout << "# Nodes: " << nodeLimit << std::endl;
561  //std::cout << "Learning Rate: " << learningRate << std::endl;
562  //std::cout << "Bagging Fraction: " << fraction << std::endl;
563  //std::cout << std::endl;
564 
565 
566  for(unsigned int i=0; i< (unsigned) treeLimit; i++)
567  {
568  // Build the tree using a random subsample.
569  prepareRandomSubsample(fraction);
570  trees[i] = new Tree(subSample);
571  trees[i]->buildTree(nodeLimit);
572 
573  // Fit all of the events based upon the tree we built using
574  // the subsample of events.
575  trees[i]->filterEvents(events[0]);
576 
577  // Update the targets for the next tree to fit.
578  updateRegTargets(trees[i], learningRate, l);
579 
580  // Save trees to xml in some directory.
581  std::ostringstream ss;
582  ss << "trees/" << i << ".xml";
583  std::string s = ss.str();
584  const char* c = s.c_str();
585 
586  trees[i]->saveToXML(c);
587  }
588 
589  //std::cout << std::endl << "Done." << std::endl << std::endl;
590 
591  //std::cout << std::endl << "Total calculation time: " << timer.RealTime() << std::endl;
592 }
Node * filterEvent(Event *e)
Definition: Tree.cc:202
Double_t predictedValue
Definition: Event.h:20
Double_t getFitValue()
Definition: Node.cc:155
int i
Definition: DBlmapReader.cc:9
Forest()
Definition: Forest.cc:34
Int_t id
Definition: Event.h:29
void doRegression(Int_t nodeLimit, Int_t treeLimit, double learningRate, LossFunction *l, const char *savetreesdirectory, bool saveTrees)
Definition: Forest.cc:362
std::vector< std::vector< Event * > > events
Definition: Forest.h:58
virtual Double_t fit(std::vector< Event * > &v)=0
void saveSplitValues(const char *savefilename)
Definition: Forest.cc:242
virtual Double_t target(Event *e)=0
void predictEvents(std::vector< Event * > &eventsp, unsigned int trees)
Definition: Forest.cc:409
void predictEvent(Event *e, unsigned int trees)
Definition: Forest.cc:447
const double w
Definition: UKUtility.cc:23
Definition: Node.h:10
Tree * getTree(unsigned int i)
Definition: Forest.cc:97
void doStochasticRegression(Int_t nodeLimit, Int_t treeLimit, double learningRate, double fraction, LossFunction *l)
Definition: Forest.cc:541
bidiiter shuffle(bidiiter begin, bidiiter end, size_t num_random)
Definition: Utilities.h:26
Definition: Event.h:16
void buildTree(Int_t nodeLimit)
Definition: Tree.cc:107
U second(std::pair< T, U > const &p)
static Int_t sortingIndex
Definition: Event.h:28
void loadForestFromXML(const char *directory, unsigned int numTrees)
Definition: Forest.cc:485
int j
Definition: DBlmapReader.cc:9
~Forest()
Definition: Forest.cc:52
#define end
Definition: vmac.h:37
void updateEvents(Tree *tree)
Definition: Forest.cc:332
Definition: Tree.h:17
void sortEventVectors(std::vector< std::vector< Event * > > &e)
Definition: Forest.cc:171
bool compareEvents(Event *e1, Event *e2)
Definition: Forest.cc:153
Float e1
Definition: deltaR.h:20
void updateRegTargets(Tree *tree, double learningRate, LossFunction *l)
Definition: Forest.cc:290
std::vector< Tree * > trees
Definition: Forest.h:60
std::vector< std::vector< Event * > > subSample
Definition: Forest.h:59
std::list< Node * > & getTerminalNodes()
Definition: Tree.cc:75
Float e2
Definition: deltaR.h:21
void filterEvents(std::vector< Event * > &tEvents)
Definition: Tree.cc:168
#define begin
Definition: vmac.h:30
void listEvents(std::vector< std::vector< Event * > > &e)
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
Definition: Forest.cc:127
unsigned int size()
Definition: Forest.cc:111
void appendCorrection(std::vector< Event * > &eventsp, Int_t treenum)
Definition: Forest.cc:432
void rankVariables(std::vector< int > &rank)
Definition: Forest.cc:187
tuple cout
Definition: gather_cfg.py:145
void setTrainingEvents(std::vector< Event * > &trainingEvents)
Definition: Forest.cc:68
void saveToXML(const char *filename)
Definition: Tree.cc:332
std::vector< Event * > getTrainingEvents()
Definition: Forest.cc:90
bool compareEventsById(Event *e1, Event *e2)
Definition: Forest.cc:162
std::vector< Double_t > data
Definition: Event.h:30
void prepareRandomSubsample(double fraction)
Definition: Forest.cc:512