CMS 3D CMS Logo

 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Pages
ProcLikelihood.cc
Go to the documentation of this file.
1 #include <algorithm>
2 #include <iostream>
3 #include <numeric>
4 #include <iomanip>
5 #include <cstring>
6 #include <vector>
7 #include <string>
8 #include <memory>
9 #include <map>
10 
11 #include <xercesc/dom/DOM.hpp>
12 
13 #include <TH1.h>
14 
16 
18 
24 
25 XERCES_CPP_NAMESPACE_USE
26 
27 using namespace PhysicsTools;
28 
29 namespace { // anonymous
30 
31 class ProcLikelihood : public TrainProcessor {
32  public:
34 
35  ProcLikelihood(const char *name, const AtomicId *id,
36  MVATrainer *trainer);
37  virtual ~ProcLikelihood();
38 
39  virtual void configure(DOMElement *elem) override;
40  virtual Calibration::VarProcessor *getCalibration() const override;
41 
42  virtual void trainBegin() override;
43  virtual void trainData(const std::vector<double> *values,
44  bool target, double weight) override;
45  virtual void trainEnd() override;
46 
47  virtual bool load() override;
48  virtual void save() override;
49 
50  struct PDF {
51  std::vector<double> distr;
53  };
54 
55  private:
56  enum Iteration {
57  ITER_EMPTY,
58  ITER_RANGE,
59  ITER_FILL,
60  ITER_DONE
61  };
62 
63  struct SigBkg {
64  PDF signal;
65  PDF background;
66  unsigned int smooth;
67  Iteration iteration;
68  };
69 
70  std::vector<SigBkg> pdfs;
71  std::vector<double> sigSum;
72  std::vector<double> bkgSum;
73  std::vector<double> bias;
74  int categoryIdx;
75  bool logOutput;
76  bool individual;
77  bool neverUndefined;
78  bool keepEmpty;
79  unsigned int nCategories;
80  bool doCategoryBias;
81  bool doGivenBias;
82  bool doGlobalBias;
83  Iteration iteration;
84 };
85 
86 static ProcLikelihood::Registry registry("ProcLikelihood");
87 
88 ProcLikelihood::ProcLikelihood(const char *name, const AtomicId *id,
89  MVATrainer *trainer) :
90  TrainProcessor(name, id, trainer),
91  categoryIdx(-1),
92  logOutput(false),
93  individual(false),
94  neverUndefined(true),
95  keepEmpty(false),
96  nCategories(1),
97  doCategoryBias(false),
98  doGivenBias(false),
99  doGlobalBias(false),
100  iteration(ITER_FILL)
101 {
102 }
103 
104 ProcLikelihood::~ProcLikelihood()
105 {
106 }
107 
108 void ProcLikelihood::configure(DOMElement *elem)
109 {
110  int i = 0;
111  bool first = true;
112  for(DOMNode *node = elem->getFirstChild();
113  node; node = node->getNextSibling()) {
114  if (node->getNodeType() != DOMNode::ELEMENT_NODE)
115  continue;
116 
117  DOMElement *elem = static_cast<DOMElement*>(node);
118 
119  XMLSimpleStr nodeName(node->getNodeName());
120 
121  if (std::strcmp(nodeName, "general") == 0) {
122  if (!first)
123  throw cms::Exception("ProcLikelihood")
124  << "Config tag general needs to come "
125  "first." << std::endl;
126 
127  if (XMLDocument::hasAttribute(elem, "bias")) {
128  double globalBias =
129  XMLDocument::readAttribute<double>(
130  elem, "bias");
131  bias.push_back(globalBias);
132  doGivenBias = true;
133  } else
134  doGivenBias = false;
135 
136  doCategoryBias = XMLDocument::readAttribute<bool>(
137  elem, "category_bias", false);
138  doGlobalBias = XMLDocument::readAttribute<bool>(
139  elem, "global_bias", false);
140  logOutput = XMLDocument::readAttribute<bool>(
141  elem, "log", false);
142  individual = XMLDocument::readAttribute<bool>(
143  elem, "individual", false);
144  neverUndefined = !XMLDocument::readAttribute<bool>(
145  elem, "strict", false);
146  keepEmpty = !XMLDocument::readAttribute<bool>(
147  elem, "ignore_empty", true);
148 
149  first = false;
150  continue;
151  }
152  first = false;
153 
154  if (std::strcmp(nodeName, "bias_table") == 0) {
155  if (!bias.empty())
156  throw cms::Exception("ProcLikelihood")
157  << "Bias can be only specified once."
158  << std::endl;
159 
160  for(DOMNode *subNode = node->getFirstChild();
161  subNode; subNode = subNode->getNextSibling()) {
162  if (subNode->getNodeType() !=
163  DOMNode::ELEMENT_NODE)
164  continue;
165 
166  if (std::strcmp(XMLSimpleStr(
167  subNode->getNodeName()),
168  "bias") != 0)
169  throw cms::Exception("ProcLikelihood")
170  << "Expected bias tag in "
171  "config." << std::endl;
172 
173  bias.push_back(
174  XMLDocument::readContent<double>(
175  subNode));
176  }
177 
178  continue;
179  }
180 
181  if (std::strcmp(nodeName, "category") != 0) {
182  i++;
183  continue;
184  }
185 
186  if (categoryIdx >= 0)
187  throw cms::Exception("ProcLikelihood")
188  << "More than one category variable given."
189  << std::endl;
190 
191 
192  unsigned int count = XMLDocument::readAttribute<unsigned int>(
193  elem, "count");
194 
195  categoryIdx = i;
196  nCategories = count;
197  }
198 
199  for(DOMNode *node = elem->getFirstChild();
200  node; node = node->getNextSibling()) {
201  if (node->getNodeType() != DOMNode::ELEMENT_NODE)
202  continue;
203 
204  XMLSimpleStr nodeName(node->getNodeName());
205  if (std::strcmp(nodeName, "general") == 0 ||
206  std::strcmp(nodeName, "bias_table") == 0 ||
207  std::strcmp(nodeName, "category") == 0)
208  continue;
209 
210  if (std::strcmp(nodeName, "sigbkg") != 0)
211  throw cms::Exception("ProcLikelihood")
212  << "Expected sigbkg tag in config section."
213  << std::endl;
214  elem = static_cast<DOMElement*>(node);
215 
216  SigBkg pdf;
217 
218  unsigned int size = XMLDocument::readAttribute<unsigned int>(
219  elem, "size", 50);
220  pdf.signal.distr.resize(size);
221  pdf.background.distr.resize(size);
222 
223  pdf.smooth = XMLDocument::readAttribute<unsigned int>(
224  elem, "smooth", 0);
225 
226  if (XMLDocument::hasAttribute(elem, "lower") &&
227  XMLDocument::hasAttribute(elem, "upper")) {
228  pdf.signal.range.min =
229  XMLDocument::readAttribute<double>(
230  elem, "lower");
231  pdf.signal.range.max =
232  XMLDocument::readAttribute<double>(
233  elem, "upper");
234  pdf.background.range = pdf.signal.range;
235  pdf.iteration = ITER_FILL;
236  } else
237  pdf.iteration = ITER_EMPTY;
238 
239  for(unsigned int i = 0; i < nCategories; i++)
240  pdfs.push_back(pdf);
241  }
242 
243  unsigned int nInputs = getInputs().size();
244  if (categoryIdx >= 0)
245  nInputs--;
246 
247  sigSum.resize(nCategories);
248  bkgSum.resize(nCategories);
249 
250  if (!doGivenBias && !bias.empty()) {
251  doGivenBias = true;
252  if (bias.size() != nCategories)
253  throw cms::Exception("ProcLikelihood")
254  << "Invalid number of category bias entries."
255  << std::endl;
256  }
257  while (doGivenBias && bias.size() < nCategories)
258  bias.push_back(bias.front());
259 
260  if (pdfs.size() != nInputs * nCategories)
261  throw cms::Exception("ProcLikelihood")
262  << "Got " << (pdfs.size() / nCategories)
263  << " pdf configs for " << nInputs
264  << " input variables." << std::endl;
265 }
266 
267 Calibration::VarProcessor *ProcLikelihood::getCalibration() const
268 {
269  typedef Calibration::ProcLikelihood Calib;
270 
272 
273  std::vector<unsigned int> pdfMap;
274  for(unsigned int i = 0; i < nCategories; i++)
275  for(unsigned int j = i; j < pdfs.size(); j += nCategories)
276  pdfMap.push_back(j);
277 
278  double totalSig = std::accumulate(sigSum.begin(), sigSum.end(), 0.0);
279  double totalBkg = std::accumulate(bkgSum.begin(), bkgSum.end(), 0.0);
280 
281  for(unsigned int i = 0; i < pdfs.size(); i++) {
282  const SigBkg *iter = &pdfs[pdfMap[i]];
284 
285  pdf.signal = Calibration::HistogramF(iter->signal.distr.size(),
286  iter->signal.range);
287  double factor = std::accumulate(iter->signal.distr.begin(),
288  iter->signal.distr.end(), 0.0);
289  if (factor < 1e-20)
290  factor = 1.0;
291  else
292  factor = 1.0 / factor;
293  std::vector<double> values(iter->signal.distr.size() + 2);
294  std::transform(iter->signal.distr.begin(),
295  iter->signal.distr.end(),
296  values.begin() + 1,
297  std::bind1st(std::multiplies<double>(),
298  factor));
299  pdf.signal.setValues(values);
300 
301  pdf.background =
302  Calibration::HistogramF(iter->background.distr.size(),
303  iter->background.range.min,
304  iter->background.range.max);
305  factor = std::accumulate(iter->background.distr.begin(),
306  iter->background.distr.end(), 0.0);
307  if (factor < 1e-20)
308  factor = 1.0;
309  else
310  factor = 1.0 / factor;
311  std::transform(iter->background.distr.begin(),
312  iter->background.distr.end(),
313  values.begin() + 1,
314  std::bind1st(std::multiplies<double>(),
315  factor));
317 
318  pdf.useSplines = true;
319 
320  calib->pdfs.push_back(pdf);
321  }
322 
323  calib->categoryIdx = categoryIdx;
324  calib->logOutput = logOutput;
325  calib->individual = individual;
326  calib->neverUndefined = neverUndefined;
327  calib->keepEmpty = keepEmpty;
328 
329  if (doGlobalBias || doCategoryBias || doGivenBias) {
330  for(unsigned int i = 0; i < nCategories; i++) {
331  double bias = doGlobalBias
332  ? totalSig / totalBkg
333  : 1.0;
334  if (doGivenBias)
335  bias *= this->bias[i];
336  if (doCategoryBias)
337  bias *= (sigSum[i] / totalSig) /
338  (bkgSum[i] / totalBkg);
339  calib->bias.push_back(bias);
340  }
341  }
342 
343  return calib;
344 }
345 
346 void ProcLikelihood::trainBegin()
347 {
348 }
349 
350 void ProcLikelihood::trainData(const std::vector<double> *values,
351  bool target, double weight)
352 {
353  int category = 0;
354  if (categoryIdx >= 0)
355  category = (int)values[categoryIdx].front();
356  if (category < 0 || category >= (int)nCategories)
357  return;
358 
359  if (iteration == ITER_FILL) {
360  if (target)
361  sigSum[category] += weight;
362  else
363  bkgSum[category] += weight;
364  }
365 
366  int i = 0;
367  for(std::vector<SigBkg>::iterator iter = pdfs.begin() + category;
368  iter < pdfs.end(); iter += nCategories, values++) {
369  if (i++ == categoryIdx)
370  values++;
371 
372  switch(iter->iteration) {
373  case ITER_EMPTY:
374  for(std::vector<double>::const_iterator value =
375  values->begin();
376  value != values->end(); value++) {
377  iter->signal.range.min =
378  iter->signal.range.max = *value;
379  iter->iteration = ITER_RANGE;
380  break;
381  }
382  case ITER_RANGE:
383  for(std::vector<double>::const_iterator value =
384  values->begin();
385  value != values->end(); value++) {
386  iter->signal.range.min =
387  std::min(iter->signal.range.min,
388  *value);
389  iter->signal.range.max =
390  std::max(iter->signal.range.max,
391  *value);
392  }
393  continue;
394  case ITER_FILL:
395  break;
396  default:
397  continue;
398  }
399 
400  PDF &pdf = target ? iter->signal : iter->background;
401  unsigned int n = pdf.distr.size() - 1;
402  double mult = 1.0 / pdf.range.width();
403 
404  for(std::vector<double>::const_iterator value =
405  values->begin(); value != values->end(); value++) {
406  double x = (*value - pdf.range.min) * mult;
407  if (x < 0.0)
408  x = 0.0;
409  else if (x >= 1.0)
410  x = 1.0;
411 
412  pdf.distr[(unsigned int)(x * n + 0.5)] += weight;
413  }
414  }
415 }
416 
417 static void smoothArray(unsigned int n, double *values, unsigned int nTimes)
418 {
419  for(unsigned int iter = 0; iter < nTimes; iter++) {
420  double hold = n > 0 ? values[0] : 0.0;
421  for(unsigned int i = 0; i < n; i++) {
422  double delta = hold * 0.1;
423  double rem = 0.0;
424  if (i > 0) {
425  values[i - 1] += delta;
426  rem -= delta;
427  }
428  if (i < n - 1) {
429  hold = values[i + 1];
430  values[i + 1] += delta;
431  rem -= delta;
432  }
433  values[i] += rem;
434  }
435  }
436 }
437 
438 void ProcLikelihood::trainEnd()
439 {
440  bool done = true;
441  if (iteration == ITER_FILL)
442  iteration = ITER_DONE;
443 
444  for(std::vector<SigBkg>::iterator iter = pdfs.begin();
445  iter != pdfs.end(); iter++) {
446  switch(iter->iteration) {
447  case ITER_EMPTY:
448  case ITER_RANGE:
449  iter->background.range = iter->signal.range;
450  iter->iteration = ITER_FILL;
451  done = false;
452  break;
453  case ITER_FILL:
454  iter->signal.distr.front() *= 2;
455  iter->signal.distr.back() *= 2;
456  smoothArray(iter->signal.distr.size(),
457  &iter->signal.distr.front(),
458  iter->smooth);
459 
460  iter->background.distr.front() *= 2;
461  iter->background.distr.back() *= 2;
462  smoothArray(iter->background.distr.size(),
463  &iter->background.distr.front(),
464  iter->smooth);
465 
466  iter->iteration = ITER_DONE;
467  break;
468  default:
469  /* shut up */;
470  }
471  }
472 
473  if (done)
474  trained = true;
475 
476  if (done && monitoring) {
477  std::vector<SourceVariable*> inputs = getInputs().get();
478  if (categoryIdx >= 0)
479  inputs.erase(inputs.begin() + categoryIdx);
480 
481  for(std::vector<SigBkg>::iterator iter = pdfs.begin();
482  iter != pdfs.end(); iter++) {
483  unsigned int idx = iter - pdfs.begin();
484  unsigned int catIdx = idx % nCategories;
485  unsigned int varIdx = idx / nCategories;
486  SourceVariable *var = inputs[varIdx];
487  std::string name =
488  (const char*)var->getSource()->getName()
489  + std::string("_")
490  + (const char*)var->getName();
492  if (categoryIdx >= 0) {
493  name += Form("_CAT%d", catIdx);
494  title += Form(" (cat. %d)", catIdx);
495  }
496 
497  unsigned int n = iter->signal.distr.size() - 1;
498  double min = iter->signal.range.min -
499  0.5 * iter->signal.range.width() / n;
500  double max = iter->signal.range.max +
501  0.5 * iter->signal.range.width() / n;
502  TH1F *histo = monitoring->book<TH1F>(name + "_sig",
503  (name + "_sig").c_str(),
504  (title + " signal").c_str(), n + 1, min, max);
505  for(unsigned int i = 0; i < n; i++)
506  histo->SetBinContent(
507  i + 1, iter->signal.distr[i]);
508 
509  n = iter->background.distr.size() - 1;
510  min = iter->background.range.min -
511  0.5 * iter->background.range.width() / n;
512  max = iter->background.range.max +
513  0.5 * iter->background.range.width() / n;
514  histo = monitoring->book<TH1F>(name + "_bkg",
515  (name + "_bkg").c_str(),
516  (title + " background").c_str(),
517  n + 1, min, max);
518  for(unsigned int i = 0; i < n; i++)
519  histo->SetBinContent(
520  i + 1, iter->background.distr[i]);
521  }
522  }
523 }
524 
525 static void xmlParsePDF(ProcLikelihood::PDF &pdf, DOMElement *elem)
526 {
527  if (!elem ||
528  std::strcmp(XMLSimpleStr(elem->getNodeName()), "pdf") != 0)
529  throw cms::Exception("ProcLikelihood")
530  << "Expected pdf tag in sigbkg train data."
531  << std::endl;
532 
533  pdf.range.min = XMLDocument::readAttribute<double>(elem, "lower");
534  pdf.range.max = XMLDocument::readAttribute<double>(elem, "upper");
535 
536  pdf.distr.clear();
537  for(DOMNode *node = elem->getFirstChild();
538  node; node = node->getNextSibling()) {
539  if (node->getNodeType() != DOMNode::ELEMENT_NODE)
540  continue;
541 
542  if (std::strcmp(XMLSimpleStr(node->getNodeName()),
543  "value") != 0)
544  throw cms::Exception("ProcLikelihood")
545  << "Expected value tag in train file."
546  << std::endl;
547 
548  pdf.distr.push_back(XMLDocument::readContent<double>(node));
549  }
550 }
551 
552 namespace {
553  struct Id {
555  AtomicId name;
556  unsigned int category;
557 
558  inline Id(AtomicId source, AtomicId name,
559  unsigned int category) :
560  source(source), name(name), category(category) {}
561 
562  inline bool operator == (const Id &other) const
563  {
564  return source == other.source &&
565  name == other.name &&
566  category == other.category;
567  }
568 
569  inline bool operator < (const Id &other) const
570  {
571  if (source < other.source)
572  return true;
573  if (!(source == other.source))
574  return false;
575  if (name < other.name)
576  return true;
577  if (!(name == other.name))
578  return false;
579  return category < other.category;
580  }
581  };
582 }
583 
585 {
586  std::string filename = trainer->trainFileName(this, "xml");
587  if (!exists(filename))
588  return false;
589 
590  XMLDocument xml(filename);
591  DOMElement *elem = xml.getRootNode();
592  if (std::strcmp(XMLSimpleStr(elem->getNodeName()),
593  "ProcLikelihood") != 0)
594  throw cms::Exception("ProcLikelihood")
595  << "XML training data file has bad root node."
596  << std::endl;
597 
598  unsigned int version = XMLDocument::readAttribute<unsigned int>(
599  elem, "version", 1);
600 
601  if (version < 1 || version > 2)
602  throw cms::Exception("ProcLikelihood")
603  << "Unsupported version " << version
604  << "in train file." << std::endl;
605 
606  DOMNode *node;
607  for(node = elem->getFirstChild();
608  node; node = node->getNextSibling()) {
609  if (node->getNodeType() != DOMNode::ELEMENT_NODE)
610  continue;
611 
612  if (std::strcmp(XMLSimpleStr(node->getNodeName()),
613  "categories") != 0)
614  throw cms::Exception("ProcLikelihood")
615  << "Expected categories tag in train file."
616  << std::endl;
617 
618  unsigned int i = 0;
619  for(DOMNode *subNode = node->getFirstChild();
620  subNode; subNode = subNode->getNextSibling()) {
621  if (subNode->getNodeType() != DOMNode::ELEMENT_NODE)
622  continue;
623 
624  if (i >= nCategories)
625  throw cms::Exception("ProcLikelihood")
626  << "Too many categories in train "
627  "file." << std::endl;
628 
629  if (std::strcmp(XMLSimpleStr(subNode->getNodeName()),
630  "category") != 0)
631  throw cms::Exception("ProcLikelihood")
632  << "Expected category tag in train "
633  "file." << std::endl;
634 
635  elem = static_cast<DOMElement*>(subNode);
636 
637  sigSum[i] = XMLDocument::readAttribute<double>(
638  elem, "signal");
639  bkgSum[i] = XMLDocument::readAttribute<double>(
640  elem, "background");
641  i++;
642  }
643  if (i < nCategories)
644  throw cms::Exception("ProcLikelihood")
645  << "Too few categories in train file."
646  << std::endl;
647 
648  break;
649  }
650 
651  std::map<Id, SigBkg*> pdfMap;
652 
653  for(std::vector<SigBkg>::iterator iter = pdfs.begin();
654  iter != pdfs.end(); ++iter) {
655  SigBkg *ptr = &*iter;
656  unsigned int idx = iter - pdfs.begin();
657  unsigned int catIdx = idx % nCategories;
658  unsigned int varIdx = idx / nCategories;
659  if (categoryIdx >= 0 && (int)varIdx >= categoryIdx)
660  varIdx++;
661  const SourceVariable *var = getInputs().get()[varIdx];
662  Id id(var->getSource()->getName(), var->getName(), catIdx);
663 
664  pdfMap[id] = ptr;
665  }
666 
667  std::vector<SigBkg>::iterator cur = pdfs.begin();
668 
669  for(node = node->getNextSibling();
670  node; node = node->getNextSibling()) {
671  if (node->getNodeType() != DOMNode::ELEMENT_NODE)
672  continue;
673 
674  if (std::strcmp(XMLSimpleStr(node->getNodeName()),
675  "sigbkg") != 0)
676  throw cms::Exception("ProcLikelihood")
677  << "Expected sigbkg tag in train file."
678  << std::endl;
679  elem = static_cast<DOMElement*>(node);
680 
681  SigBkg *pdf = 0;
682  switch(version) {
683  case 1:
684  if (cur == pdfs.end())
685  throw cms::Exception("ProcLikelihood")
686  << "Superfluous SigBkg in train data."
687  << std::endl;
688  pdf = &*cur++;
689  break;
690  case 2: {
691  Id id(XMLDocument::readAttribute<std::string>(
692  elem, "source"),
693  XMLDocument::readAttribute<std::string>(
694  elem, "name"),
695  XMLDocument::readAttribute<unsigned int>(
696  elem, "category", 0));
697  std::map<Id, SigBkg*>::const_iterator pos =
698  pdfMap.find(id);
699  if (pos == pdfMap.end())
700  continue;
701  else
702  pdf = pos->second;
703  } break;
704  }
705 
706  for(node = elem->getFirstChild();
707  node && node->getNodeType() != DOMNode::ELEMENT_NODE;
708  node = node->getNextSibling());
709  DOMElement *elemSig =
710  node ? static_cast<DOMElement*>(node) : 0;
711 
712  for(node = node->getNextSibling();
713  node && node->getNodeType() != DOMNode::ELEMENT_NODE;
714  node = node->getNextSibling());
715  while(node && node->getNodeType() != DOMNode::ELEMENT_NODE)
716  node = node->getNextSibling();
717  DOMElement *elemBkg =
718  node ? static_cast<DOMElement*>(node) : 0;
719 
720  for(node = node->getNextSibling();
721  node && node->getNodeType() != DOMNode::ELEMENT_NODE;
722  node = node->getNextSibling());
723  if (node)
724  throw cms::Exception("ProcLikelihood")
725  << "Superfluous tags in sigbkg train data."
726  << std::endl;
727 
728  xmlParsePDF(pdf->signal, elemSig);
729  xmlParsePDF(pdf->background, elemBkg);
730 
731  pdf->iteration = ITER_DONE;
732 
733  node = elem;
734  }
735 
736  if (version == 1 && cur != pdfs.end())
737  throw cms::Exception("ProcLikelihood")
738  << "Missing SigBkg in train data." << std::endl;
739 
740  iteration = ITER_DONE;
741  trained = true;
742  for(std::vector<SigBkg>::const_iterator iter = pdfs.begin();
743  iter != pdfs.end(); ++iter) {
744  if (iter->iteration != ITER_DONE) {
745  trained = false;
746  break;
747  }
748  }
749 
750  return true;
751 }
752 
753 static DOMElement *xmlStorePDF(DOMDocument *doc,
754  const ProcLikelihood::PDF &pdf)
755 {
756  DOMElement *elem = doc->createElement(XMLUniStr("pdf"));
757 
758  XMLDocument::writeAttribute(elem, "lower", pdf.range.min);
759  XMLDocument::writeAttribute(elem, "upper", pdf.range.max);
760 
761  for(std::vector<double>::const_iterator iter =
762  pdf.distr.begin(); iter != pdf.distr.end(); iter++) {
763  DOMElement *value = doc->createElement(XMLUniStr("value"));
764  elem->appendChild(value);
765 
766  XMLDocument::writeContent<double>(value, doc, *iter);
767  }
768 
769  return elem;
770 }
771 
773 {
774  XMLDocument xml(trainer->trainFileName(this, "xml"), true);
775  DOMDocument *doc = xml.createDocument("ProcLikelihood");
776  XMLDocument::writeAttribute(doc->getDocumentElement(), "version", 2);
777 
778  DOMElement *elem = doc->createElement(XMLUniStr("categories"));
779  xml.getRootNode()->appendChild(elem);
780  for(unsigned int i = 0; i < nCategories; i++) {
781  DOMElement *category = doc->createElement(XMLUniStr("category"));
782  elem->appendChild(category);
783  XMLDocument::writeAttribute(category, "signal", sigSum[i]);
784  XMLDocument::writeAttribute(category, "background", bkgSum[i]);
785  }
786 
787  for(std::vector<SigBkg>::const_iterator iter = pdfs.begin();
788  iter != pdfs.end(); iter++) {
789  elem = doc->createElement(XMLUniStr("sigbkg"));
790  xml.getRootNode()->appendChild(elem);
791 
792  unsigned int idx = iter - pdfs.begin();
793  unsigned int catIdx = idx % nCategories;
794  unsigned int varIdx = idx / nCategories;
795  if (categoryIdx >= 0 && (int)varIdx >= categoryIdx)
796  varIdx++;
797  const SourceVariable *var = getInputs().get()[varIdx];
798  XMLDocument::writeAttribute(elem, "source",
799  (const char*)var->getSource()->getName());
800  XMLDocument::writeAttribute(elem, "name",
801  (const char*)var->getName());
802  if (categoryIdx >= 0)
803  XMLDocument::writeAttribute(elem, "category", catIdx);
804 
805  elem->appendChild(xmlStorePDF(doc, iter->signal));
806  elem->appendChild(xmlStorePDF(doc, iter->background));
807  }
808 }
809 
810 } // anonymous namespace
dbl * delta
Definition: mlp_gen.cc:36
int i
Definition: DBlmapReader.cc:9
Histogram< float > HistogramF
Definition: Histogram.h:120
Source * getSource() const
template to generate a registry singleton for a type.
tuple node
Definition: Node.py:50
Cheap generic unique keyword identifier class.
Definition: AtomicId.h:31
MVATrainerComputer * calib
Definition: MVATrainer.cc:64
bool operator<(const FedChannelConnection &, const FedChannelConnection &)
tuple iteration
Definition: align_cfg.py:5
static bool hasAttribute(XERCES_CPP_NAMESPACE_QUALIFIER DOMElement *elem, const char *name)
Definition: XMLDocument.cc:303
const AtomicId getName() const
Definition: Variable.h:143
bool operator==(const QGLikelihoodParameters &lhs, const QGLikelihoodCategory &rhs)
Test if parameters are compatible with category.
def load
Definition: svgfig.py:546
int j
Definition: DBlmapReader.cc:9
tuple doc
Definition: asciidump.py:381
T min(T a, T b)
Definition: MathUtil.h:58
static void writeAttribute(XERCES_CPP_NAMESPACE_QUALIFIER DOMElement *elem, const char *name, const T &value)
tuple idx
DEBUGGING if hasattr(process,&quot;trackMonIterativeTracking2012&quot;): print &quot;trackMonIterativeTracking2012 D...
XERCES_CPP_NAMESPACE_QUALIFIER DOMDocument * createDocument(const std::string &root)
Definition: XMLDocument.cc:266
list save
Definition: cuy.py:1163
AtomicId getName() const
Definition: Source.h:19
void setValues(const std::vector< Value_t > &values)
tuple filename
Definition: lut2db_cfg.py:20
volatile std::atomic< bool > shutdown_flag false
Definition: DDAxes.h:10
int weight
Definition: histoStyle.py:50
static Interceptor::Registry registry("Interceptor")
static std::string const source
Definition: EdmProvDump.cc:42
tuple size
Write out results.