CMS 3D CMS Logo

DAClusterizerInZ.cc
Go to the documentation of this file.
5 
6 using namespace std;
7 
8 namespace {
9 
10  bool recTrackLessZ1(const DAClusterizerInZ::track_t& tk1, const DAClusterizerInZ::track_t& tk2) {
11  return tk1.z < tk2.z;
12  }
13 } // namespace
14 
15 vector<DAClusterizerInZ::track_t> DAClusterizerInZ::fill(const vector<reco::TransientTrack>& tracks) const {
16  // prepare track data for clustering
17  vector<track_t> tks;
18  for (vector<reco::TransientTrack>::const_iterator it = tracks.begin(); it != tracks.end(); it++) {
19  track_t t;
20  t.z = ((*it).stateAtBeamLine().trackStateAtPCA()).position().z();
21  double tantheta = tan(((*it).stateAtBeamLine().trackStateAtPCA()).momentum().theta());
22  double phi = ((*it).stateAtBeamLine().trackStateAtPCA()).momentum().phi();
23  // get the beam-spot
24  reco::BeamSpot beamspot = (it->stateAtBeamLine()).beamSpot();
25  t.dz2 = pow((*it).track().dzError(), 2) // track errror
26  + (pow(beamspot.BeamWidthX() * cos(phi), 2) + pow(beamspot.BeamWidthY() * sin(phi), 2)) /
27  pow(tantheta, 2) // beam-width induced
28  + pow(vertexSize_, 2); // intrinsic vertex size, safer for outliers and short lived decays
29  if (d0CutOff_ > 0) {
30  Measurement1D IP = (*it).stateAtBeamLine().transverseImpactParameter(); // error constains beamspot
31  t.pi = 1. / (1. + exp(pow(IP.value() / IP.error(), 2) - pow(d0CutOff_, 2))); // reduce weight for high ip tracks
32  } else {
33  t.pi = 1.;
34  }
35  t.tt = &(*it);
36  t.Z = 1.;
37  tks.push_back(t);
38  }
39  return tks;
40 }
41 
42 double DAClusterizerInZ::Eik(const track_t& t, const vertex_t& k) const { return pow(t.z - k.z, 2) / t.dz2; }
43 
44 double DAClusterizerInZ::update(double beta, vector<track_t>& tks, vector<vertex_t>& y) const {
45  //update weights and vertex positions
46  // mass constrained annealing without noise
47  // returns the squared sum of changes of vertex positions
48 
49  unsigned int nt = tks.size();
50 
51  //initialize sums
52  double sumpi = 0;
53  for (vector<vertex_t>::iterator k = y.begin(); k != y.end(); k++) {
54  k->se = 0;
55  k->sw = 0;
56  k->swz = 0;
57  k->swE = 0;
58  k->Tc = 0;
59  }
60 
61  // loop over tracks
62  for (unsigned int i = 0; i < nt; i++) {
63  // update pik and Zi
64  double Zi = 0.;
65  for (vector<vertex_t>::iterator k = y.begin(); k != y.end(); k++) {
66  k->ei = exp(-beta * Eik(tks[i], *k)); // cache exponential for one track at a time
67  Zi += k->pk * k->ei;
68  }
69  tks[i].Z = Zi;
70 
71  // normalization for pk
72  if (tks[i].Z > 0) {
73  sumpi += tks[i].pi;
74  // accumulate weighted z and weights for vertex update
75  for (vector<vertex_t>::iterator k = y.begin(); k != y.end(); k++) {
76  k->se += tks[i].pi * k->ei / Zi;
77  double w = k->pk * tks[i].pi * k->ei / Zi / tks[i].dz2;
78  k->sw += w;
79  k->swz += w * tks[i].z;
80  k->swE += w * Eik(tks[i], *k);
81  }
82  } else {
83  sumpi += tks[i].pi;
84  }
85 
86  } // end of track loop
87 
88  // now update z and pk
89  double delta = 0;
90  for (vector<vertex_t>::iterator k = y.begin(); k != y.end(); k++) {
91  if (k->sw > 0) {
92  double znew = k->swz / k->sw;
93  delta += pow(k->z - znew, 2);
94  k->z = znew;
95  k->Tc = 2 * k->swE / k->sw;
96  } else {
97  edm::LogInfo("sumw") << "invalid sum of weights in fit: " << k->sw << endl;
98  if (verbose_) {
99  cout << " a cluster melted away ? pk=" << k->pk << " sumw=" << k->sw << endl;
100  }
101  k->Tc = -1;
102  }
103 
104  k->pk = k->pk * k->se / sumpi;
105  }
106 
107  // return how much the prototypes moved
108  return delta;
109 }
110 
111 double DAClusterizerInZ::update(double beta, vector<track_t>& tks, vector<vertex_t>& y, double& rho0) const {
112  // MVF style, no more vertex weights, update tracks weights and vertex positions, with noise
113  // returns the squared sum of changes of vertex positions
114 
115  unsigned int nt = tks.size();
116 
117  //initialize sums
118  for (vector<vertex_t>::iterator k = y.begin(); k != y.end(); k++) {
119  k->se = 0;
120  k->sw = 0;
121  k->swz = 0;
122  k->swE = 0;
123  k->Tc = 0;
124  }
125 
126  // loop over tracks
127  for (unsigned int i = 0; i < nt; i++) {
128  // update pik and Zi
129  double Zi = rho0 * exp(-beta * dzCutOff_ * dzCutOff_); // cut-off
130  for (vector<vertex_t>::iterator k = y.begin(); k != y.end(); k++) {
131  k->ei = exp(-beta * Eik(tks[i], *k)); // cache exponential for one track at a time
132  Zi += k->pk * k->ei;
133  }
134  tks[i].Z = Zi;
135 
136  // normalization
137  if (tks[i].Z > 0) {
138  // accumulate weighted z and weights for vertex update
139  for (vector<vertex_t>::iterator k = y.begin(); k != y.end(); k++) {
140  k->se += tks[i].pi * k->ei / Zi;
141  double w = k->pk * tks[i].pi * k->ei / Zi / tks[i].dz2;
142  k->sw += w;
143  k->swz += w * tks[i].z;
144  k->swE += w * Eik(tks[i], *k);
145  }
146  }
147 
148  } // end of track loop
149 
150  // now update z
151  double delta = 0;
152  for (vector<vertex_t>::iterator k = y.begin(); k != y.end(); k++) {
153  if (k->sw > 0) {
154  double znew = k->swz / k->sw;
155  delta += pow(k->z - znew, 2);
156  k->z = znew;
157  k->Tc = 2 * k->swE / k->sw;
158  } else {
159  edm::LogInfo("sumw") << "invalid sum of weights in fit: " << k->sw << endl;
160  if (verbose_) {
161  cout << " a cluster melted away ? pk=" << k->pk << " sumw=" << k->sw << endl;
162  }
163  k->Tc = 0;
164  }
165  }
166 
167  // return how much the prototypes moved
168  return delta;
169 }
170 
171 bool DAClusterizerInZ::merge(vector<vertex_t>& y, int nt) const {
172  // merge clusters that collapsed or never separated, return true if vertices were merged, false otherwise
173 
174  if (y.size() < 2)
175  return false;
176 
177  for (vector<vertex_t>::iterator k = y.begin(); (k + 1) != y.end(); k++) {
178  if (fabs((k + 1)->z - k->z) < 1.e-3) { // with fabs if only called after freeze-out (splitAll() at highter T)
179  double rho = k->pk + (k + 1)->pk;
180  if (rho > 0) {
181  k->z = (k->pk * k->z + (k + 1)->z * (k + 1)->pk) / rho;
182  } else {
183  k->z = 0.5 * (k->z + (k + 1)->z);
184  }
185  k->pk = rho;
186 
187  y.erase(k + 1);
188  return true;
189  }
190  }
191 
192  return false;
193 }
194 
195 bool DAClusterizerInZ::merge(vector<vertex_t>& y, double& beta) const {
196  // merge clusters that collapsed or never separated,
197  // only merge if the estimated critical temperature of the merged vertex is below the current temperature
198  // return true if vertices were merged, false otherwise
199  if (y.size() < 2)
200  return false;
201 
202  for (vector<vertex_t>::iterator k = y.begin(); (k + 1) != y.end(); k++) {
203  if (fabs((k + 1)->z - k->z) < 2.e-3) {
204  double rho = k->pk + (k + 1)->pk;
205  double swE = k->swE + (k + 1)->swE - k->pk * (k + 1)->pk / rho * pow((k + 1)->z - k->z, 2);
206  double Tc = 2 * swE / (k->sw + (k + 1)->sw);
207 
208  if (Tc * beta < 1) {
209  if (rho > 0) {
210  k->z = (k->pk * k->z + (k + 1)->z * (k + 1)->pk) / rho;
211  } else {
212  k->z = 0.5 * (k->z + (k + 1)->z);
213  }
214  k->pk = rho;
215  k->sw += (k + 1)->sw;
216  k->swE = swE;
217  k->Tc = Tc;
218  y.erase(k + 1);
219  return true;
220  }
221  }
222  }
223 
224  return false;
225 }
226 
227 bool DAClusterizerInZ::purge(vector<vertex_t>& y, vector<track_t>& tks, double& rho0, const double beta) const {
228  // eliminate clusters with only one significant/unique track
229  if (y.size() < 2)
230  return false;
231 
232  unsigned int nt = tks.size();
233  double sumpmin = nt;
234  vector<vertex_t>::iterator k0 = y.end();
235  for (vector<vertex_t>::iterator k = y.begin(); k != y.end(); k++) {
236  int nUnique = 0;
237  double sump = 0;
238  double pmax = k->pk / (k->pk + rho0 * exp(-beta * dzCutOff_ * dzCutOff_));
239  for (unsigned int i = 0; i < nt; i++) {
240  if (tks[i].Z > 0) {
241  double p = k->pk * exp(-beta * Eik(tks[i], *k)) / tks[i].Z;
242  sump += p;
243  if ((p > 0.9 * pmax) && (tks[i].pi > 0)) {
244  nUnique++;
245  }
246  }
247  }
248 
249  if ((nUnique < 2) && (sump < sumpmin)) {
250  sumpmin = sump;
251  k0 = k;
252  }
253  }
254 
255  if (k0 != y.end()) {
256  if (verbose_) {
257  cout << "eliminating prototype at " << k0->z << " with sump=" << sumpmin << endl;
258  }
259  //rho0+=k0->pk;
260  y.erase(k0);
261  return true;
262  } else {
263  return false;
264  }
265 }
266 
267 double DAClusterizerInZ::beta0(double betamax, vector<track_t>& tks, vector<vertex_t>& y) const {
268  double T0 = 0; // max Tc for beta=0
269  // estimate critical temperature from beta=0 (T=inf)
270  unsigned int nt = tks.size();
271 
272  for (vector<vertex_t>::iterator k = y.begin(); k != y.end(); k++) {
273  // vertex fit at T=inf
274  double sumwz = 0;
275  double sumw = 0;
276  for (unsigned int i = 0; i < nt; i++) {
277  double w = tks[i].pi / tks[i].dz2;
278  sumwz += w * tks[i].z;
279  sumw += w;
280  }
281  k->z = sumwz / sumw;
282 
283  // estimate Tcrit, eventually do this in the same loop
284  double a = 0, b = 0;
285  for (unsigned int i = 0; i < nt; i++) {
286  double dx = tks[i].z - (k->z);
287  double w = tks[i].pi / tks[i].dz2;
288  a += w * pow(dx, 2) / tks[i].dz2;
289  b += w;
290  }
291  double Tc = 2. * a / b; // the critical temperature of this vertex
292  if (Tc > T0)
293  T0 = Tc;
294  } // vertex loop (normally there should be only one vertex at beta=0)
295 
296  if (T0 > 1. / betamax) {
297  return betamax / pow(coolingFactor_, int(log(T0 * betamax) / log(coolingFactor_)) - 1);
298  } else {
299  // ensure at least one annealing step
300  return betamax / coolingFactor_;
301  }
302 }
303 
304 bool DAClusterizerInZ::split(double beta, vector<track_t>& tks, vector<vertex_t>& y, double threshold) const {
305  // split only critical vertices (Tc >~ T=1/beta <==> beta*Tc>~1)
306  // an update must have been made just before doing this (same beta, no merging)
307  // returns true if at least one cluster was split
308 
309  double epsilon = 1e-3; // split all single vertices by 10 um
310  bool split = false;
311 
312  // avoid left-right biases by splitting highest Tc first
313 
314  std::vector<std::pair<double, unsigned int> > critical;
315  for (unsigned int ik = 0; ik < y.size(); ik++) {
316  if (beta * y[ik].Tc > 1.) {
317  critical.push_back(make_pair(y[ik].Tc, ik));
318  }
319  }
320  stable_sort(critical.begin(), critical.end(), std::greater<std::pair<double, unsigned int> >());
321 
322  for (unsigned int ic = 0; ic < critical.size(); ic++) {
323  unsigned int ik = critical[ic].second;
324  // estimate subcluster positions and weight
325  double p1 = 0, z1 = 0, w1 = 0;
326  double p2 = 0, z2 = 0, w2 = 0;
327  //double sumpi=0;
328  for (unsigned int i = 0; i < tks.size(); i++) {
329  if (tks[i].Z > 0) {
330  //sumpi+=tks[i].pi;
331  double p = y[ik].pk * exp(-beta * Eik(tks[i], y[ik])) / tks[i].Z * tks[i].pi;
332  double w = p / tks[i].dz2;
333  if (tks[i].z < y[ik].z) {
334  p1 += p;
335  z1 += w * tks[i].z;
336  w1 += w;
337  } else {
338  p2 += p;
339  z2 += w * tks[i].z;
340  w2 += w;
341  }
342  }
343  }
344  if (w1 > 0) {
345  z1 = z1 / w1;
346  } else {
347  z1 = y[ik].z - epsilon;
348  }
349  if (w2 > 0) {
350  z2 = z2 / w2;
351  } else {
352  z2 = y[ik].z + epsilon;
353  }
354 
355  // reduce split size if there is not enough room
356  if ((ik > 0) && (y[ik - 1].z >= z1)) {
357  z1 = 0.5 * (y[ik].z + y[ik - 1].z);
358  }
359  if ((ik + 1 < y.size()) && (y[ik + 1].z <= z2)) {
360  z2 = 0.5 * (y[ik].z + y[ik + 1].z);
361  }
362 
363  // split if the new subclusters are significantly separated
364  if ((z2 - z1) > epsilon) {
365  split = true;
366  vertex_t vnew;
367  vnew.pk = p1 * y[ik].pk / (p1 + p2);
368  y[ik].pk = p2 * y[ik].pk / (p1 + p2);
369  vnew.z = z1;
370  y[ik].z = z2;
371  y.insert(y.begin() + ik, vnew);
372 
373  // adjust remaining pointers
374  for (unsigned int jc = ic; jc < critical.size(); jc++) {
375  if (critical[jc].second > ik) {
376  critical[jc].second++;
377  }
378  }
379  }
380  }
381 
382  // stable_sort(y.begin(), y.end(), clusterLessZ);
383  return split;
384 }
385 
386 void DAClusterizerInZ::splitAll(vector<vertex_t>& y) const {
387  double epsilon = 1e-3; // split all single vertices by 10 um
388  double zsep = 2 * epsilon; // split vertices that are isolated by at least zsep (vertices that haven't collapsed)
389  vector<vertex_t> y1;
390 
391  for (vector<vertex_t>::iterator k = y.begin(); k != y.end(); k++) {
392  if (((k == y.begin()) || (k - 1)->z < k->z - zsep) && (((k + 1) == y.end()) || (k + 1)->z > k->z + zsep)) {
393  // isolated prototype, split
394  vertex_t vnew;
395  vnew.z = k->z - epsilon;
396  (*k).z = k->z + epsilon;
397  vnew.pk = 0.5 * (*k).pk;
398  (*k).pk = 0.5 * (*k).pk;
399  y1.push_back(vnew);
400  y1.push_back(*k);
401 
402  } else if (y1.empty() || (y1.back().z < k->z - zsep)) {
403  y1.push_back(*k);
404  } else {
405  y1.back().z -= epsilon;
406  k->z += epsilon;
407  y1.push_back(*k);
408  }
409  } // vertex loop
410 
411  y = y1;
412 }
413 
415  // some defaults to avoid uninitialized variables
416  verbose_ = conf.getUntrackedParameter<bool>("verbose", false);
417  useTc_ = true;
418  betamax_ = 0.1;
419  betastop_ = 1.0;
420  coolingFactor_ = 0.8;
421  maxIterations_ = 100;
422  vertexSize_ = 0.05; // 0.5 mm
423  dzCutOff_ = 4.0; // Adaptive Fitter uses 3.0 but that appears to be a bit tight here sometimes
424 
425  // configure
426 
427  double Tmin = conf.getParameter<double>("Tmin");
428  vertexSize_ = conf.getParameter<double>("vertexSize");
429  coolingFactor_ = conf.getParameter<double>("coolingFactor");
430  d0CutOff_ = conf.getParameter<double>("d0CutOff");
431  dzCutOff_ = conf.getParameter<double>("dzCutOff");
432  maxIterations_ = 100;
433  if (Tmin == 0) {
434  cout << "DAClusterizerInZ: invalid Tmin" << Tmin << " reset do default " << 1. / betamax_ << endl;
435  } else {
436  betamax_ = 1. / Tmin;
437  }
438 
439  // for testing, negative cooling factor: revert to old splitting scheme
440  if (coolingFactor_ < 0) {
441  coolingFactor_ = -coolingFactor_;
442  useTc_ = false;
443  }
444 }
445 
446 void DAClusterizerInZ::dump(const double beta,
447  const vector<vertex_t>& y,
448  const vector<track_t>& tks0,
449  int verbosity) const {
450  // copy and sort for nicer printout
451  vector<track_t> tks;
452  for (vector<track_t>::const_iterator t = tks0.begin(); t != tks0.end(); t++) {
453  tks.push_back(*t);
454  }
455  stable_sort(tks.begin(), tks.end(), recTrackLessZ1);
456 
457  cout << "-----DAClusterizerInZ::dump ----" << endl;
458  cout << "beta=" << beta << " betamax= " << betamax_ << endl;
459  cout << " z= ";
460  cout.precision(4);
461  for (vector<vertex_t>::const_iterator k = y.begin(); k != y.end(); k++) {
462  cout << setw(8) << fixed << k->z;
463  }
464  cout << endl << "T=" << setw(15) << 1. / beta << " Tc= ";
465  for (vector<vertex_t>::const_iterator k = y.begin(); k != y.end(); k++) {
466  cout << setw(8) << fixed << k->Tc;
467  }
468 
469  cout << endl << " pk=";
470  double sumpk = 0;
471  for (vector<vertex_t>::const_iterator k = y.begin(); k != y.end(); k++) {
472  cout << setw(8) << setprecision(3) << fixed << k->pk;
473  sumpk += k->pk;
474  }
475  cout << endl;
476 
477  if (verbosity > 0) {
478  double E = 0, F = 0;
479  cout << endl;
480  cout << "---- z +/- dz ip +/-dip pt phi eta weights ----" << endl;
481  cout.precision(4);
482  for (unsigned int i = 0; i < tks.size(); i++) {
483  if (tks[i].Z > 0) {
484  F -= log(tks[i].Z) / beta;
485  }
486  double tz = tks[i].z;
487  cout << setw(3) << i << ")" << setw(8) << fixed << setprecision(4) << tz << " +/-" << setw(6) << sqrt(tks[i].dz2);
488 
489  if (tks[i].tt->track().quality(reco::TrackBase::highPurity)) {
490  cout << " *";
491  } else {
492  cout << " ";
493  }
494  if (tks[i].tt->track().hitPattern().hasValidHitInPixelLayer(PixelSubdetector::SubDetector::PixelBarrel, 1)) {
495  cout << "+";
496  } else {
497  cout << "-";
498  }
499  cout << setw(1)
500  << tks[i]
501  .tt->track()
502  .hitPattern()
503  .pixelBarrelLayersWithMeasurement(); // see DataFormats/TrackReco/interface/HitPattern.h
504  cout << setw(1) << tks[i].tt->track().hitPattern().pixelEndcapLayersWithMeasurement();
505  cout << setw(1) << hex
506  << tks[i].tt->track().hitPattern().trackerLayersWithMeasurement() -
507  tks[i].tt->track().hitPattern().pixelLayersWithMeasurement()
508  << dec;
509  cout << "=" << setw(1) << hex
510  << tks[i].tt->track().hitPattern().numberOfLostHits(reco::HitPattern::MISSING_OUTER_HITS) << dec;
511 
512  Measurement1D IP = tks[i].tt->stateAtBeamLine().transverseImpactParameter();
513  cout << setw(8) << IP.value() << "+/-" << setw(6) << IP.error();
514  cout << " " << setw(6) << setprecision(2) << tks[i].tt->track().pt() * tks[i].tt->track().charge();
515  cout << " " << setw(5) << setprecision(2) << tks[i].tt->track().phi() << " " << setw(5) << setprecision(2)
516  << tks[i].tt->track().eta();
517 
518  double sump = 0.;
519  for (vector<vertex_t>::const_iterator k = y.begin(); k != y.end(); k++) {
520  if ((tks[i].pi > 0) && (tks[i].Z > 0)) {
521  //double p=pik(beta,tks[i],*k);
522  double p = k->pk * exp(-beta * Eik(tks[i], *k)) / tks[i].Z;
523  if (p > 0.0001) {
524  cout << setw(8) << setprecision(3) << p;
525  } else {
526  cout << " . ";
527  }
528  E += p * Eik(tks[i], *k);
529  sump += p;
530  } else {
531  cout << " ";
532  }
533  }
534  cout << endl;
535  }
536  cout << endl << "T=" << 1 / beta << " E=" << E << " n=" << y.size() << " F= " << F << endl << "----------" << endl;
537  }
538 }
539 
540 vector<TransientVertex> DAClusterizerInZ::vertices(const vector<reco::TransientTrack>& tracks,
541  const int verbosity) const {
542  vector<track_t> tks = fill(tracks);
543  unsigned int nt = tracks.size();
544  double rho0 = 0.0; // start with no outlier rejection
545 
546  vector<TransientVertex> clusters;
547  if (tks.empty())
548  return clusters;
549 
550  vector<vertex_t> y; // the vertex prototypes
551 
552  // initialize:single vertex at infinite temperature
553  vertex_t vstart;
554  vstart.z = 0.;
555  vstart.pk = 1.;
556  y.push_back(vstart);
557  int niter = 0; // number of iterations
558 
559  // estimate first critical temperature
560  double beta = beta0(betamax_, tks, y);
561  niter = 0;
562  while ((update(beta, tks, y) > 1.e-6) && (niter++ < maxIterations_)) {
563  }
564 
565  // annealing loop, stop when T<Tmin (i.e. beta>1/Tmin)
566  while (beta < betamax_) {
567  if (useTc_) {
568  update(beta, tks, y);
569  while (merge(y, beta)) {
570  update(beta, tks, y);
571  }
572  split(beta, tks, y, 1.);
573  beta = beta / coolingFactor_;
574  } else {
575  beta = beta / coolingFactor_;
576  splitAll(y);
577  }
578 
579  // make sure we are not too far from equilibrium before cooling further
580  niter = 0;
581  while ((update(beta, tks, y) > 1.e-6) && (niter++ < maxIterations_)) {
582  }
583  }
584 
585  if (useTc_) {
586  // last round of splitting, make sure no critical clusters are left
587  update(beta, tks, y);
588  while (merge(y, beta)) {
589  update(beta, tks, y);
590  }
591  unsigned int ntry = 0;
592  while (split(beta, tks, y, 1.) && (ntry++ < 10)) {
593  niter = 0;
594  while ((update(beta, tks, y) > 1.e-6) && (niter++ < maxIterations_)) {
595  }
596  merge(y, beta);
597  update(beta, tks, y);
598  }
599  } else {
600  // merge collapsed clusters
601  while (merge(y, beta)) {
602  update(beta, tks, y);
603  }
604  if (verbose_) {
605  cout << "dump after 1st merging " << endl;
606  dump(beta, y, tks, 2);
607  }
608  }
609 
610  // switch on outlier rejection
611  rho0 = 1. / nt;
612  for (vector<vertex_t>::iterator k = y.begin(); k != y.end(); k++) {
613  k->pk = 1.;
614  } // democratic
615  niter = 0;
616  while ((update(beta, tks, y, rho0) > 1.e-8) && (niter++ < maxIterations_)) {
617  }
618  if (verbose_) {
619  cout << "rho0=" << rho0 << " niter=" << niter << endl;
620  dump(beta, y, tks, 2);
621  }
622 
623  // merge again (some cluster split by outliers collapse here)
624  while (merge(y, tks.size())) {
625  }
626  if (verbose_) {
627  cout << "dump after 2nd merging " << endl;
628  dump(beta, y, tks, 2);
629  }
630 
631  // continue from freeze-out to Tstop (=1) without splitting, eliminate insignificant vertices
632  while (beta <= betastop_) {
633  while (purge(y, tks, rho0, beta)) {
634  niter = 0;
635  while ((update(beta, tks, y, rho0) > 1.e-6) && (niter++ < maxIterations_)) {
636  }
637  }
638  beta /= coolingFactor_;
639  niter = 0;
640  while ((update(beta, tks, y, rho0) > 1.e-6) && (niter++ < maxIterations_)) {
641  }
642  }
643 
644  // // new, one last round of cleaning at T=Tstop
645  // while(purge(y,tks,rho0, beta)){
646  // niter=0; while((update(beta, tks,y,rho0) > 1.e-6) && (niter++ < maxIterations_)){ }
647  // }
648 
649  if (verbose_) {
650  cout << "Final result, rho0=" << rho0 << endl;
651  dump(beta, y, tks, 2);
652  }
653 
654  // select significant tracks and use a TransientVertex as a container
655  GlobalError dummyError;
656 
657  // ensure correct normalization of probabilities, should make double assginment reasonably impossible
658  for (unsigned int i = 0; i < nt; i++) {
659  tks[i].Z = rho0 * exp(-beta * dzCutOff_ * dzCutOff_);
660  for (vector<vertex_t>::iterator k = y.begin(); k != y.end(); k++) {
661  tks[i].Z += k->pk * exp(-beta * Eik(tks[i], *k));
662  }
663  }
664 
665  for (vector<vertex_t>::iterator k = y.begin(); k != y.end(); k++) {
666  GlobalPoint pos(0, 0, k->z);
667  vector<reco::TransientTrack> vertexTracks;
668  for (unsigned int i = 0; i < nt; i++) {
669  if (tks[i].Z > 0) {
670  double p = k->pk * exp(-beta * Eik(tks[i], *k)) / tks[i].Z;
671  if ((tks[i].pi > 0) && (p > 0.5)) {
672  vertexTracks.push_back(*(tks[i].tt));
673  tks[i].Z = 0;
674  } // setting Z=0 excludes double assignment
675  }
676  }
677  TransientVertex v(pos, dummyError, vertexTracks, 5);
678  clusters.push_back(v);
679  }
680 
681  return clusters;
682 }
683 
684 vector<vector<reco::TransientTrack> > DAClusterizerInZ::clusterize(const vector<reco::TransientTrack>& tracks) const {
685  if (verbose_) {
686  cout << "###################################################" << endl;
687  cout << "# DAClusterizerInZ::clusterize nt=" << tracks.size() << endl;
688  cout << "###################################################" << endl;
689  }
690 
691  vector<vector<reco::TransientTrack> > clusters;
692  vector<TransientVertex> pv = vertices(tracks);
693 
694  if (verbose_) {
695  cout << "# DAClusterizerInZ::clusterize pv.size=" << pv.size() << endl;
696  }
697  if (pv.empty()) {
698  return clusters;
699  }
700 
701  // fill into clusters and merge
702  vector<reco::TransientTrack> aCluster = pv.begin()->originalTracks();
703 
704  for (vector<TransientVertex>::iterator k = pv.begin() + 1; k != pv.end(); k++) {
705  if (fabs(k->position().z() - (k - 1)->position().z()) > (2 * vertexSize_)) {
706  // close a cluster
707  clusters.push_back(aCluster);
708  aCluster.clear();
709  }
710  for (unsigned int i = 0; i < k->originalTracks().size(); i++) {
711  aCluster.push_back(k->originalTracks().at(i));
712  }
713  }
714  clusters.push_back(aCluster);
715 
716  return clusters;
717 }
DAClusterizerInZ::purge
bool purge(std::vector< vertex_t > &, std::vector< track_t > &, double &, const double) const
Definition: DAClusterizerInZ.cc:227
HIPAlignmentAlgorithm_cfi.verbosity
verbosity
Definition: HIPAlignmentAlgorithm_cfi.py:7
reco::HitPattern::MISSING_OUTER_HITS
Definition: HitPattern.h:155
alignBH_cfg.fixed
fixed
Definition: alignBH_cfg.py:54
PDWG_EXOHSCP_cff.tracks
tracks
Definition: PDWG_EXOHSCP_cff.py:28
Measurement1D
Definition: Measurement1D.h:11
w2
common ppss p3p6s2 common epss epspn46 common const1 w2
Definition: inclppp.h:1
mps_fire.i
i
Definition: mps_fire.py:428
pwdgSkimBPark_cfi.beamSpot
beamSpot
Definition: pwdgSkimBPark_cfi.py:5
MessageLogger.h
reco::ParticleMasses::k0
const double k0
Definition: ParticleMasses.h:7
groupFilesInBlocks.tt
int tt
Definition: groupFilesInBlocks.py:144
nt
int nt
Definition: AMPTWrapper.h:42
DAClusterizerInZ::clusterize
std::vector< std::vector< reco::TransientTrack > > clusterize(const std::vector< reco::TransientTrack > &tracks) const override
Definition: DAClusterizerInZ.cc:684
zMuMuMuonUserData.beta
beta
Definition: zMuMuMuonUserData.py:10
DAClusterizerInZ::DAClusterizerInZ
DAClusterizerInZ(const edm::ParameterSet &conf)
Definition: DAClusterizerInZ.cc:414
AlCaHLTBitMon_ParallelJobs.p
p
Definition: AlCaHLTBitMon_ParallelJobs.py:153
HLT_FULL_cff.Tmin
Tmin
Definition: HLT_FULL_cff.py:34839
gather_cfg.cout
cout
Definition: gather_cfg.py:144
pos
Definition: PixelAliasList.h:18
Measurement1D.h
DAClusterizerInZ::dump
void dump(const double beta, const std::vector< vertex_t > &y, const std::vector< track_t > &tks, const int verbosity=0) const
Definition: DAClusterizerInZ.cc:446
edm::second
U second(std::pair< T, U > const &p)
Definition: ParameterSet.cc:222
DAClusterizerInZ::Eik
double Eik(const track_t &t, const vertex_t &k) const
Definition: DAClusterizerInZ.cc:42
edm::ParameterSet::getUntrackedParameter
T getUntrackedParameter(std::string const &, T const &) const
edm::LogInfo
Log< level::Info, false > LogInfo
Definition: MessageLogger.h:125
DAClusterizerInZ::merge
bool merge(std::vector< vertex_t > &, int) const
Definition: DAClusterizerInZ.cc:171
findQualityFiles.v
v
Definition: findQualityFiles.py:179
geometryDiff.epsilon
int epsilon
Definition: geometryDiff.py:26
F
static uInt32 F(BLOWFISH_CTX *ctx, uInt32 x)
Definition: blowfish.cc:163
funct::sin
Sin< T >::type sin(const T &t)
Definition: Sin.h:22
testProducerWithPsetDescEmpty_cfi.z2
z2
Definition: testProducerWithPsetDescEmpty_cfi.py:41
GeomDetEnumerators::PixelBarrel
Definition: GeomDetEnumerators.h:11
DAClusterizerInZ::track_t
Definition: DAClusterizerInZ.h:20
testProducerWithPsetDescEmpty_cfi.y1
y1
Definition: testProducerWithPsetDescEmpty_cfi.py:29
funct::cos
Cos< T >::type cos(const T &t)
Definition: Cos.h:22
submitPVValidationJobs.split
def split(sequence, size)
Definition: submitPVValidationJobs.py:352
DAClusterizerInZ.h
w
const double w
Definition: UKUtility.cc:23
mathSSE::sqrt
T sqrt(T t)
Definition: SSEVec.h:19
reco::BeamSpot
Definition: BeamSpot.h:21
p2
double p2[4]
Definition: TauolaWrapper.h:90
DAClusterizerInZ::vertex_t::z
double z
Definition: DAClusterizerInZ.h:29
beamspot
Definition: BeamSpotWrite2Txt.h:8
dqmdumpme.k
k
Definition: dqmdumpme.py:60
Point3DBase< float, GlobalTag >
b
double b
Definition: hdecay.h:118
DAClusterizerInZ::vertex_t
Definition: DAClusterizerInZ.h:28
bsc_activity_cfg.clusters
clusters
Definition: bsc_activity_cfg.py:36
listHistos.IP
IP
Definition: listHistos.py:76
ntuplemaker.fill
fill
Definition: ntuplemaker.py:304
edm::ParameterSet
Definition: ParameterSet.h:47
a
double a
Definition: hdecay.h:119
dumpMFGeometry_cfg.delta
delta
Definition: dumpMFGeometry_cfg.py:25
position
static int position[264][3]
Definition: ReadPGInfo.cc:289
funct::tan
Tan< T >::type tan(const T &t)
Definition: Tan.h:22
FrontierConditions_GlobalTag_cff.dump
dump
Definition: FrontierConditions_GlobalTag_cff.py:12
MetAnalyzer.pv
def pv(vc)
Definition: MetAnalyzer.py:7
hlt_jetmet_dqm_QT_fromfile_cfg.critical
critical
Definition: hlt_jetmet_dqm_QT_fromfile_cfg.py:109
p1
double p1[4]
Definition: TauolaWrapper.h:89
GlobalErrorBase< double, ErrorMatrixTag >
TransientVertex
Definition: TransientVertex.h:18
DAClusterizerInZ::track_t::z
double z
Definition: DAClusterizerInZ.h:21
std
Definition: JetResolutionObject.h:76
DAClusterizerInZ::vertices
std::vector< TransientVertex > vertices(const std::vector< reco::TransientTrack > &tracks, const int verbosity=0) const
Definition: DAClusterizerInZ.cc:540
DAClusterizerInZ::splitAll
void splitAll(std::vector< vertex_t > &y) const
Definition: DAClusterizerInZ.cc:386
DAClusterizerInZ::update
double update(double beta, std::vector< track_t > &tks, std::vector< vertex_t > &y) const
Definition: DAClusterizerInZ.cc:44
edm::ParameterSet::getParameter
T getParameter(std::string const &) const
Definition: ParameterSet.h:303
VertexException.h
dqm-mbProfile.log
log
Definition: dqm-mbProfile.py:17
funct::pow
Power< A, B >::type pow(const A &a, const B &b)
Definition: Power.h:29
DAClusterizerInZ::split
bool split(double beta, std::vector< track_t > &tks, std::vector< vertex_t > &y, double threshold) const
Definition: DAClusterizerInZ.cc:304
pi
const Double_t pi
Definition: trackSplitPlot.h:36
BeamSpotPI::Z
Definition: BeamSpotPayloadInspectorHelper.h:32
JetChargeProducer_cfi.exp
exp
Definition: JetChargeProducer_cfi.py:6
remoteMonitoring_LED_IterMethod_cfg.threshold
threshold
Definition: remoteMonitoring_LED_IterMethod_cfg.py:426
MatrixUtil.merge
def merge(dictlist, TELL=False)
Definition: MatrixUtil.py:201
submitPVValidationJobs.t
string t
Definition: submitPVValidationJobs.py:644
DAClusterizerInZ::beta0
double beta0(const double betamax, std::vector< track_t > &tks, std::vector< vertex_t > &y) const
Definition: DAClusterizerInZ.cc:267
TauDecayModes.dec
dec
Definition: TauDecayModes.py:143
PVValHelper::dx
Definition: PVValidationHelpers.h:48
update
#define update(a, b)
Definition: TrackClassifier.cc:10
pwdgSkimBPark_cfi.vertices
vertices
Definition: pwdgSkimBPark_cfi.py:7
MillePedeFileConverter_cfg.e
e
Definition: MillePedeFileConverter_cfg.py:37
DAClusterizerInZ::vertex_t::pk
double pk
Definition: DAClusterizerInZ.h:30
DAClusterizerInZ::fill
std::vector< track_t > fill(const std::vector< reco::TransientTrack > &tracks) const
Definition: DAClusterizerInZ.cc:15
reco::TrackBase::highPurity
Definition: TrackBase.h:154