CMS 3D CMS Logo

HGVHistoProducerAlgo.cc
Go to the documentation of this file.
1 #include <numeric>
2 #include <iomanip>
3 
7 #include "TMath.h"
8 #include "TLatex.h"
9 #include "TF1.h"
10 
11 using namespace std;
12 
13 //Parameters for the score cut. Later, this will become part of the
14 //configuration parameter for the HGCAL associator.
15 const double ScoreCutLCtoCP_ = 0.1;
16 const double ScoreCutCPtoLC_ = 0.1;
17 const double ScoreCutMCLtoCPFakeMerge_ = 0.6;
18 const double ScoreCutCPtoMCLDup_ = 0.2;
19 
21  : //parameters for eta
22  minEta_(pset.getParameter<double>("minEta")),
23  maxEta_(pset.getParameter<double>("maxEta")),
24  nintEta_(pset.getParameter<int>("nintEta")),
25  useFabsEta_(pset.getParameter<bool>("useFabsEta")),
26 
27  //parameters for energy
28  minEne_(pset.getParameter<double>("minEne")),
29  maxEne_(pset.getParameter<double>("maxEne")),
30  nintEne_(pset.getParameter<int>("nintEne")),
31 
32  //parameters for pt
33  minPt_(pset.getParameter<double>("minPt")),
34  maxPt_(pset.getParameter<double>("maxPt")),
35  nintPt_(pset.getParameter<int>("nintPt")),
36 
37  //parameters for phi
38  minPhi_(pset.getParameter<double>("minPhi")),
39  maxPhi_(pset.getParameter<double>("maxPhi")),
40  nintPhi_(pset.getParameter<int>("nintPhi")),
41 
42  //parameters for counting mixed hits clusters
43  minMixedHitsCluster_(pset.getParameter<double>("minMixedHitsCluster")),
44  maxMixedHitsCluster_(pset.getParameter<double>("maxMixedHitsCluster")),
45  nintMixedHitsCluster_(pset.getParameter<int>("nintMixedHitsCluster")),
46 
47  //parameters for the total amount of energy clustered by all layer clusters (fraction over caloparticles)
48  minEneCl_(pset.getParameter<double>("minEneCl")),
49  maxEneCl_(pset.getParameter<double>("maxEneCl")),
50  nintEneCl_(pset.getParameter<int>("nintEneCl")),
51 
52  //parameters for the longitudinal depth barycenter.
53  minLongDepBary_(pset.getParameter<double>("minLongDepBary")),
54  maxLongDepBary_(pset.getParameter<double>("maxLongDepBary")),
55  nintLongDepBary_(pset.getParameter<int>("nintLongDepBary")),
56 
57  //parameters for z positionof vertex plots
58  minZpos_(pset.getParameter<double>("minZpos")),
59  maxZpos_(pset.getParameter<double>("maxZpos")),
60  nintZpos_(pset.getParameter<int>("nintZpos")),
61 
62  //Parameters for the total number of layer clusters per layer
63  minTotNClsperlay_(pset.getParameter<double>("minTotNClsperlay")),
64  maxTotNClsperlay_(pset.getParameter<double>("maxTotNClsperlay")),
65  nintTotNClsperlay_(pset.getParameter<int>("nintTotNClsperlay")),
66 
67  //Parameters for the energy clustered by layer clusters per layer (fraction over caloparticles)
68  minEneClperlay_(pset.getParameter<double>("minEneClperlay")),
69  maxEneClperlay_(pset.getParameter<double>("maxEneClperlay")),
70  nintEneClperlay_(pset.getParameter<int>("nintEneClperlay")),
71 
72  //Parameters for the score both for:
73  //1. calo particle to layer clusters association per layer
74  //2. layer cluster to calo particles association per layer
75  minScore_(pset.getParameter<double>("minScore")),
76  maxScore_(pset.getParameter<double>("maxScore")),
77  nintScore_(pset.getParameter<int>("nintScore")),
78 
79  //Parameters for shared energy fraction. That is:
80  //1. Fraction of each of the layer clusters energy related to a
81  //calo particle over that calo particle's energy.
82  //2. Fraction of each of the calo particles energy
83  //related to a layer cluster over that layer cluster's energy.
84  minSharedEneFrac_(pset.getParameter<double>("minSharedEneFrac")),
85  maxSharedEneFrac_(pset.getParameter<double>("maxSharedEneFrac")),
86  nintSharedEneFrac_(pset.getParameter<int>("nintSharedEneFrac")),
87 
88  //Same as above for multiclusters
89  minMCLSharedEneFrac_(pset.getParameter<double>("minMCLSharedEneFrac")),
90  maxMCLSharedEneFrac_(pset.getParameter<double>("maxMCLSharedEneFrac")),
91  nintMCLSharedEneFrac_(pset.getParameter<int>("nintMCLSharedEneFrac")),
92 
93  //Parameters for the total number of layer clusters per thickness
94  minTotNClsperthick_(pset.getParameter<double>("minTotNClsperthick")),
95  maxTotNClsperthick_(pset.getParameter<double>("maxTotNClsperthick")),
96  nintTotNClsperthick_(pset.getParameter<int>("nintTotNClsperthick")),
97 
98  //Parameters for the total number of cells per per thickness per layer
99  minTotNcellsperthickperlayer_(pset.getParameter<double>("minTotNcellsperthickperlayer")),
100  maxTotNcellsperthickperlayer_(pset.getParameter<double>("maxTotNcellsperthickperlayer")),
101  nintTotNcellsperthickperlayer_(pset.getParameter<int>("nintTotNcellsperthickperlayer")),
102 
103  //Parameters for the distance of cluster cells to seed cell per thickness per layer
104  minDisToSeedperthickperlayer_(pset.getParameter<double>("minDisToSeedperthickperlayer")),
105  maxDisToSeedperthickperlayer_(pset.getParameter<double>("maxDisToSeedperthickperlayer")),
106  nintDisToSeedperthickperlayer_(pset.getParameter<int>("nintDisToSeedperthickperlayer")),
107 
108  //Parameters for the energy weighted distance of cluster cells to seed cell per thickness per layer
109  minDisToSeedperthickperlayerenewei_(pset.getParameter<double>("minDisToSeedperthickperlayerenewei")),
110  maxDisToSeedperthickperlayerenewei_(pset.getParameter<double>("maxDisToSeedperthickperlayerenewei")),
111  nintDisToSeedperthickperlayerenewei_(pset.getParameter<int>("nintDisToSeedperthickperlayerenewei")),
112 
113  //Parameters for the distance of cluster cells to max cell per thickness per layer
114  minDisToMaxperthickperlayer_(pset.getParameter<double>("minDisToMaxperthickperlayer")),
115  maxDisToMaxperthickperlayer_(pset.getParameter<double>("maxDisToMaxperthickperlayer")),
116  nintDisToMaxperthickperlayer_(pset.getParameter<int>("nintDisToMaxperthickperlayer")),
117 
118  //Parameters for the energy weighted distance of cluster cells to max cell per thickness per layer
119  minDisToMaxperthickperlayerenewei_(pset.getParameter<double>("minDisToMaxperthickperlayerenewei")),
120  maxDisToMaxperthickperlayerenewei_(pset.getParameter<double>("maxDisToMaxperthickperlayerenewei")),
121  nintDisToMaxperthickperlayerenewei_(pset.getParameter<int>("nintDisToMaxperthickperlayerenewei")),
122 
123  //Parameters for the distance of seed cell to max cell per thickness per layer
124  minDisSeedToMaxperthickperlayer_(pset.getParameter<double>("minDisSeedToMaxperthickperlayer")),
125  maxDisSeedToMaxperthickperlayer_(pset.getParameter<double>("maxDisSeedToMaxperthickperlayer")),
126  nintDisSeedToMaxperthickperlayer_(pset.getParameter<int>("nintDisSeedToMaxperthickperlayer")),
127 
128  //Parameters for the energy of a cluster per thickness per layer
129  minClEneperthickperlayer_(pset.getParameter<double>("minClEneperthickperlayer")),
130  maxClEneperthickperlayer_(pset.getParameter<double>("maxClEneperthickperlayer")),
131  nintClEneperthickperlayer_(pset.getParameter<int>("nintClEneperthickperlayer")),
132 
133  //Parameters for the energy density of cluster cells per thickness
134  minCellsEneDensperthick_(pset.getParameter<double>("minCellsEneDensperthick")),
135  maxCellsEneDensperthick_(pset.getParameter<double>("maxCellsEneDensperthick")),
136  nintCellsEneDensperthick_(pset.getParameter<int>("nintCellsEneDensperthick")),
137 
138  //Parameters for the total number of multiclusters per event
139  //We always treet one event as two events, one in +z one in -z
140  minTotNMCLs_(pset.getParameter<double>("minTotNMCLs")),
141  maxTotNMCLs_(pset.getParameter<double>("maxTotNMCLs")),
142  nintTotNMCLs_(pset.getParameter<int>("nintTotNMCLs")),
143 
144  //Parameters for the total number of layer clusters in multicluster
145  minTotNClsinMCLs_(pset.getParameter<double>("minTotNClsinMCLs")),
146  maxTotNClsinMCLs_(pset.getParameter<double>("maxTotNClsinMCLs")),
147  nintTotNClsinMCLs_(pset.getParameter<int>("nintTotNClsinMCLs")),
148 
149  //Parameters for the total number of layer clusters in multicluster per layer
150  minTotNClsinMCLsperlayer_(pset.getParameter<double>("minTotNClsinMCLsperlayer")),
151  maxTotNClsinMCLsperlayer_(pset.getParameter<double>("maxTotNClsinMCLsperlayer")),
152  nintTotNClsinMCLsperlayer_(pset.getParameter<int>("nintTotNClsinMCLsperlayer")),
153 
154  //Parameters for the multiplicity of layer clusters in multicluster
155  minMplofLCs_(pset.getParameter<double>("minMplofLCs")),
156  maxMplofLCs_(pset.getParameter<double>("maxMplofLCs")),
157  nintMplofLCs_(pset.getParameter<int>("nintMplofLCs")),
158 
159  //Parameters for cluster size
160  minSizeCLsinMCLs_(pset.getParameter<double>("minSizeCLsinMCLs")),
161  maxSizeCLsinMCLs_(pset.getParameter<double>("maxSizeCLsinMCLs")),
162  nintSizeCLsinMCLs_(pset.getParameter<int>("nintSizeCLsinMCLs")),
163 
164  //Parameters for the energy of a cluster per thickness per layer
165  minClEnepermultiplicity_(pset.getParameter<double>("minClEnepermultiplicity")),
166  maxClEnepermultiplicity_(pset.getParameter<double>("maxClEnepermultiplicity")),
167  nintClEnepermultiplicity_(pset.getParameter<int>("nintClEnepermultiplicity")),
168 
169  //parameters for x
170  minX_(pset.getParameter<double>("minX")),
171  maxX_(pset.getParameter<double>("maxX")),
172  nintX_(pset.getParameter<int>("nintX")),
173 
174  //parameters for y
175  minY_(pset.getParameter<double>("minY")),
176  maxY_(pset.getParameter<double>("maxY")),
177  nintY_(pset.getParameter<int>("nintY")),
178 
179  //parameters for z
180  minZ_(pset.getParameter<double>("minZ")),
181  maxZ_(pset.getParameter<double>("maxZ")),
182  nintZ_(pset.getParameter<int>("nintZ")) {}
183 
185 
187  histograms.lastLayerEEzm = ibook.bookInt("lastLayerEEzm");
188  histograms.lastLayerFHzm = ibook.bookInt("lastLayerFHzm");
189  histograms.maxlayerzm = ibook.bookInt("maxlayerzm");
190  histograms.lastLayerEEzp = ibook.bookInt("lastLayerEEzp");
191  histograms.lastLayerFHzp = ibook.bookInt("lastLayerFHzp");
192  histograms.maxlayerzp = ibook.bookInt("maxlayerzp");
193 }
194 
196  histograms.h_caloparticle_eta[pdgid] =
197  ibook.book1D("num_caloparticle_eta", "N of caloparticle vs eta", nintEta_, minEta_, maxEta_);
198  histograms.h_caloparticle_eta_Zorigin[pdgid] =
199  ibook.book2D("Eta vs Zorigin", "Eta vs Zorigin", nintEta_, minEta_, maxEta_, nintZpos_, minZpos_, maxZpos_);
200 
201  histograms.h_caloparticle_energy[pdgid] =
202  ibook.book1D("caloparticle_energy", "Energy of caloparticle", nintEne_, minEne_, maxEne_);
203  histograms.h_caloparticle_pt[pdgid] = ibook.book1D("caloparticle_pt", "Pt of caloparticle", nintPt_, minPt_, maxPt_);
204  histograms.h_caloparticle_phi[pdgid] =
205  ibook.book1D("caloparticle_phi", "Phi of caloparticle", nintPhi_, minPhi_, maxPhi_);
206 }
207 
210  unsigned layers,
211  std::vector<int> thicknesses,
212  std::string pathtomatbudfile) {
213  //---------------------------------------------------------------------------------------------------------------------------
214  histograms.h_cluster_eta.push_back(
215  ibook.book1D("num_reco_cluster_eta", "N of reco clusters vs eta", nintEta_, minEta_, maxEta_));
216 
217  //---------------------------------------------------------------------------------------------------------------------------
218  //z-
219  histograms.h_mixedhitscluster_zminus.push_back(
220  ibook.book1D("mixedhitscluster_zminus",
221  "N of reco clusters that contain hits of more than one kind in z-",
225  //z+
226  histograms.h_mixedhitscluster_zplus.push_back(
227  ibook.book1D("mixedhitscluster_zplus",
228  "N of reco clusters that contain hits of more than one kind in z+",
232 
233  //---------------------------------------------------------------------------------------------------------------------------
234  //z-
235  histograms.h_energyclustered_zminus.push_back(
236  ibook.book1D("energyclustered_zminus",
237  "percent of total energy clustered by all layer clusters over caloparticles energy in z-",
238  nintEneCl_,
239  minEneCl_,
240  maxEneCl_));
241  //z+
242  histograms.h_energyclustered_zplus.push_back(
243  ibook.book1D("energyclustered_zplus",
244  "percent of total energy clustered by all layer clusters over caloparticles energy in z+",
245  nintEneCl_,
246  minEneCl_,
247  maxEneCl_));
248 
249  //---------------------------------------------------------------------------------------------------------------------------
250  //z-
251  std::string subpathtomat = pathtomatbudfile.substr(pathtomatbudfile.find("Validation"));
252  histograms.h_longdepthbarycentre_zminus.push_back(
253  ibook.book1D("longdepthbarycentre_zminus",
254  "The longitudinal depth barycentre in z- for " + subpathtomat,
257  maxLongDepBary_));
258  //z+
259  histograms.h_longdepthbarycentre_zplus.push_back(
260  ibook.book1D("longdepthbarycentre_zplus",
261  "The longitudinal depth barycentre in z+ for " + subpathtomat,
264  maxLongDepBary_));
265 
266  //---------------------------------------------------------------------------------------------------------------------------
267  for (unsigned ilayer = 0; ilayer < 2 * layers; ++ilayer) {
268  auto istr1 = std::to_string(ilayer);
269  while (istr1.size() < 2) {
270  istr1.insert(0, "0");
271  }
272  //We will make a mapping to the regural layer naming plus z- or z+ for convenience
273  std::string istr2 = "";
274  //First with the -z endcap
275  if (ilayer < layers) {
276  istr2 = std::to_string(ilayer + 1) + " in z-";
277  } else { //Then for the +z
278  istr2 = std::to_string(ilayer - (layers - 1)) + " in z+";
279  }
280  histograms.h_clusternum_perlayer[ilayer] = ibook.book1D("totclusternum_layer_" + istr1,
281  "total number of layer clusters for layer " + istr2,
285  histograms.h_energyclustered_perlayer[ilayer] =
286  ibook.book1D("energyclustered_perlayer" + istr1,
287  "percent of total energy clustered by layer clusters over caloparticles energy for layer " + istr2,
291  histograms.h_score_layercl2caloparticle_perlayer[ilayer] =
292  ibook.book1D("Score_layercl2caloparticle_perlayer" + istr1,
293  "Score of Layer Cluster per CaloParticle for layer " + istr2,
294  nintScore_,
295  minScore_,
296  maxScore_);
297  histograms.h_score_caloparticle2layercl_perlayer[ilayer] =
298  ibook.book1D("Score_caloparticle2layercl_perlayer" + istr1,
299  "Score of CaloParticle per Layer Cluster for layer " + istr2,
300  nintScore_,
301  minScore_,
302  maxScore_);
303  histograms.h_energy_vs_score_caloparticle2layercl_perlayer[ilayer] =
304  ibook.book2D("Energy_vs_Score_caloparticle2layer_perlayer" + istr1,
305  "Energy vs Score of CaloParticle per Layer Cluster for layer " + istr2,
306  nintScore_,
307  minScore_,
308  maxScore_,
312  histograms.h_energy_vs_score_layercl2caloparticle_perlayer[ilayer] =
313  ibook.book2D("Energy_vs_Score_layer2caloparticle_perlayer" + istr1,
314  "Energy vs Score of Layer Cluster per CaloParticle Layer for layer " + istr2,
315  nintScore_,
316  minScore_,
317  maxScore_,
321  histograms.h_sharedenergy_caloparticle2layercl_perlayer[ilayer] =
322  ibook.book1D("SharedEnergy_caloparticle2layercl_perlayer" + istr1,
323  "Shared Energy of CaloParticle per Layer Cluster for layer " + istr2,
327  histograms.h_sharedenergy_caloparticle2layercl_vs_eta_perlayer[ilayer] =
328  ibook.bookProfile("SharedEnergy_caloparticle2layercl_vs_eta_perlayer" + istr1,
329  "Shared Energy of CaloParticle vs #eta per best Layer Cluster for layer " + istr2,
330  nintEta_,
331  minEta_,
332  maxEta_,
335  histograms.h_sharedenergy_caloparticle2layercl_vs_phi_perlayer[ilayer] =
336  ibook.bookProfile("SharedEnergy_caloparticle2layercl_vs_phi_perlayer" + istr1,
337  "Shared Energy of CaloParticle vs #phi per best Layer Cluster for layer " + istr2,
338  nintPhi_,
339  minPhi_,
340  maxPhi_,
343  histograms.h_sharedenergy_layercl2caloparticle_perlayer[ilayer] =
344  ibook.book1D("SharedEnergy_layercluster2caloparticle_perlayer" + istr1,
345  "Shared Energy of Layer Cluster per Layer Calo Particle for layer " + istr2,
349  histograms.h_sharedenergy_layercl2caloparticle_vs_eta_perlayer[ilayer] =
350  ibook.bookProfile("SharedEnergy_layercl2caloparticle_vs_eta_perlayer" + istr1,
351  "Shared Energy of LayerCluster vs #eta per best Calo Particle for layer " + istr2,
352  nintEta_,
353  minEta_,
354  maxEta_,
357  histograms.h_sharedenergy_layercl2caloparticle_vs_phi_perlayer[ilayer] =
358  ibook.bookProfile("SharedEnergy_layercl2caloparticle_vs_phi_perlayer" + istr1,
359  "Shared Energy of LayerCluster vs #phi per best Calo Particle for layer " + istr2,
360  nintPhi_,
361  minPhi_,
362  maxPhi_,
365  histograms.h_num_caloparticle_eta_perlayer[ilayer] =
366  ibook.book1D("Num_CaloParticle_Eta_perlayer" + istr1,
367  "Num CaloParticle Eta per Layer Cluster for layer " + istr2,
368  nintEta_,
369  minEta_,
370  maxEta_);
371  histograms.h_numDup_caloparticle_eta_perlayer[ilayer] =
372  ibook.book1D("NumDup_CaloParticle_Eta_perlayer" + istr1,
373  "Num Duplicate CaloParticle Eta per Layer Cluster for layer " + istr2,
374  nintEta_,
375  minEta_,
376  maxEta_);
377  histograms.h_denom_caloparticle_eta_perlayer[ilayer] =
378  ibook.book1D("Denom_CaloParticle_Eta_perlayer" + istr1,
379  "Denom CaloParticle Eta per Layer Cluster for layer " + istr2,
380  nintEta_,
381  minEta_,
382  maxEta_);
383  histograms.h_num_caloparticle_phi_perlayer[ilayer] =
384  ibook.book1D("Num_CaloParticle_Phi_perlayer" + istr1,
385  "Num CaloParticle Phi per Layer Cluster for layer " + istr2,
386  nintPhi_,
387  minPhi_,
388  maxPhi_);
389  histograms.h_numDup_caloparticle_phi_perlayer[ilayer] =
390  ibook.book1D("NumDup_CaloParticle_Phi_perlayer" + istr1,
391  "Num Duplicate CaloParticle Phi per Layer Cluster for layer " + istr2,
392  nintPhi_,
393  minPhi_,
394  maxPhi_);
395  histograms.h_denom_caloparticle_phi_perlayer[ilayer] =
396  ibook.book1D("Denom_CaloParticle_Phi_perlayer" + istr1,
397  "Denom CaloParticle Phi per Layer Cluster for layer " + istr2,
398  nintPhi_,
399  minPhi_,
400  maxPhi_);
401  histograms.h_num_layercl_eta_perlayer[ilayer] =
402  ibook.book1D("Num_LayerCluster_Eta_perlayer" + istr1,
403  "Num LayerCluster Eta per Layer Cluster for layer " + istr2,
404  nintEta_,
405  minEta_,
406  maxEta_);
407  histograms.h_numMerge_layercl_eta_perlayer[ilayer] =
408  ibook.book1D("NumMerge_LayerCluster_Eta_perlayer" + istr1,
409  "Num Merge LayerCluster Eta per Layer Cluster for layer " + istr2,
410  nintEta_,
411  minEta_,
412  maxEta_);
413  histograms.h_denom_layercl_eta_perlayer[ilayer] =
414  ibook.book1D("Denom_LayerCluster_Eta_perlayer" + istr1,
415  "Denom LayerCluster Eta per Layer Cluster for layer " + istr2,
416  nintEta_,
417  minEta_,
418  maxEta_);
419  histograms.h_num_layercl_phi_perlayer[ilayer] =
420  ibook.book1D("Num_LayerCluster_Phi_perlayer" + istr1,
421  "Num LayerCluster Phi per Layer Cluster for layer " + istr2,
422  nintPhi_,
423  minPhi_,
424  maxPhi_);
425  histograms.h_numMerge_layercl_phi_perlayer[ilayer] =
426  ibook.book1D("NumMerge_LayerCluster_Phi_perlayer" + istr1,
427  "Num Merge LayerCluster Phi per Layer Cluster for layer " + istr2,
428  nintPhi_,
429  minPhi_,
430  maxPhi_);
431  histograms.h_denom_layercl_phi_perlayer[ilayer] =
432  ibook.book1D("Denom_LayerCluster_Phi_perlayer" + istr1,
433  "Denom LayerCluster Phi per Layer Cluster for layer " + istr2,
434  nintPhi_,
435  minPhi_,
436  maxPhi_);
437  histograms.h_cellAssociation_perlayer[ilayer] =
438  ibook.book1D("cellAssociation_perlayer" + istr1, "Cell Association for layer " + istr2, 5, -4., 1.);
439  histograms.h_cellAssociation_perlayer[ilayer]->setBinLabel(2, "TN(purity)");
440  histograms.h_cellAssociation_perlayer[ilayer]->setBinLabel(3, "FN(ineff.)");
441  histograms.h_cellAssociation_perlayer[ilayer]->setBinLabel(4, "FP(fake)");
442  histograms.h_cellAssociation_perlayer[ilayer]->setBinLabel(5, "TP(eff.)");
443  }
444 
445  //---------------------------------------------------------------------------------------------------------------------------
446  for (std::vector<int>::iterator it = thicknesses.begin(); it != thicknesses.end(); ++it) {
447  auto istr = std::to_string(*it);
448  histograms.h_clusternum_perthick[(*it)] = ibook.book1D("totclusternum_thick_" + istr,
449  "total number of layer clusters for thickness " + istr,
453  //---
454  histograms.h_cellsenedens_perthick[(*it)] = ibook.book1D("cellsenedens_thick_" + istr,
455  "energy density of cluster cells for thickness " + istr,
459  }
460 
461  //---------------------------------------------------------------------------------------------------------------------------
462  //Not all combination exists but we should keep them all for cross checking reason.
463  for (std::vector<int>::iterator it = thicknesses.begin(); it != thicknesses.end(); ++it) {
464  for (unsigned ilayer = 0; ilayer < 2 * layers; ++ilayer) {
465  auto istr1 = std::to_string(*it);
466  auto istr2 = std::to_string(ilayer);
467  while (istr2.size() < 2)
468  istr2.insert(0, "0");
469  auto istr = istr1 + "_" + istr2;
470  //We will make a mapping to the regural layer naming plus z- or z+ for convenience
471  std::string istr3 = "";
472  //First with the -z endcap
473  if (ilayer < layers) {
474  istr3 = std::to_string(ilayer + 1) + " in z- ";
475  } else { //Then for the +z
476  istr3 = std::to_string(ilayer - (layers - 1)) + " in z+ ";
477  }
478  //---
479  histograms.h_cellsnum_perthickperlayer[istr] =
480  ibook.book1D("cellsnum_perthick_perlayer_" + istr,
481  "total number of cells for layer " + istr3 + " for thickness " + istr1,
485  //---
486  histograms.h_distancetoseedcell_perthickperlayer[istr] =
487  ibook.book1D("distancetoseedcell_perthickperlayer_" + istr,
488  "distance of cluster cells to seed cell for layer " + istr3 + " for thickness " + istr1,
492  //---
493  histograms.h_distancetoseedcell_perthickperlayer_eneweighted[istr] = ibook.book1D(
494  "distancetoseedcell_perthickperlayer_eneweighted_" + istr,
495  "energy weighted distance of cluster cells to seed cell for layer " + istr3 + " for thickness " + istr1,
499  //---
500  histograms.h_distancetomaxcell_perthickperlayer[istr] =
501  ibook.book1D("distancetomaxcell_perthickperlayer_" + istr,
502  "distance of cluster cells to max cell for layer " + istr3 + " for thickness " + istr1,
506  //---
507  histograms.h_distancetomaxcell_perthickperlayer_eneweighted[istr] = ibook.book1D(
508  "distancetomaxcell_perthickperlayer_eneweighted_" + istr,
509  "energy weighted distance of cluster cells to max cell for layer " + istr3 + " for thickness " + istr1,
513  //---
514  histograms.h_distancebetseedandmaxcell_perthickperlayer[istr] =
515  ibook.book1D("distancebetseedandmaxcell_perthickperlayer_" + istr,
516  "distance of seed cell to max cell for layer " + istr3 + " for thickness " + istr1,
520  //---
521  histograms.h_distancebetseedandmaxcellvsclusterenergy_perthickperlayer[istr] = ibook.book2D(
522  "distancebetseedandmaxcellvsclusterenergy_perthickperlayer_" + istr,
523  "distance of seed cell to max cell vs cluster energy for layer " + istr3 + " for thickness " + istr1,
530  }
531  }
532  //---------------------------------------------------------------------------------------------------------------------------
533 }
534 
536  histograms.h_score_multicl2caloparticle.push_back(ibook.book1D(
537  "Score_multicl2caloparticle", "Score of Multi Cluster per CaloParticle", nintScore_, minScore_, maxScore_));
538  histograms.h_score_caloparticle2multicl.push_back(ibook.book1D(
539  "Score_caloparticle2multicl", "Score of CaloParticle per Multi Cluster", nintScore_, minScore_, maxScore_));
540  histograms.h_energy_vs_score_multicl2caloparticle.push_back(
541  ibook.book2D("Energy_vs_Score_multi2caloparticle",
542  "Energy vs Score of Multi Cluster per CaloParticle",
543  nintScore_,
544  minScore_,
545  maxScore_,
549  histograms.h_energy_vs_score_caloparticle2multicl.push_back(
550  ibook.book2D("Energy_vs_Score_caloparticle2multi",
551  "Energy vs Score of CaloParticle per Multi Cluster",
552  nintScore_,
553  minScore_,
554  maxScore_,
558 
559  //back to all multiclusters
560  histograms.h_num_multicl_eta.push_back(
561  ibook.book1D("Num_MultiCluster_Eta", "Num MultiCluster Eta per Multi Cluster ", nintEta_, minEta_, maxEta_));
562  histograms.h_numMerge_multicl_eta.push_back(ibook.book1D(
563  "NumMerge_MultiCluster_Eta", "Num Merge MultiCluster Eta per Multi Cluster ", nintEta_, minEta_, maxEta_));
564  histograms.h_denom_multicl_eta.push_back(
565  ibook.book1D("Denom_MultiCluster_Eta", "Denom MultiCluster Eta per Multi Cluster", nintEta_, minEta_, maxEta_));
566  histograms.h_num_multicl_phi.push_back(
567  ibook.book1D("Num_MultiCluster_Phi", "Num MultiCluster Phi per Multi Cluster ", nintPhi_, minPhi_, maxPhi_));
568  histograms.h_numMerge_multicl_phi.push_back(ibook.book1D(
569  "NumMerge_MultiCluster_Phi", "Num Merge MultiCluster Phi per Multi Cluster", nintPhi_, minPhi_, maxPhi_));
570  histograms.h_denom_multicl_phi.push_back(
571  ibook.book1D("Denom_MultiCluster_Phi", "Denom MultiCluster Phi per Multi Cluster", nintPhi_, minPhi_, maxPhi_));
572  histograms.h_sharedenergy_multicl2caloparticle.push_back(
573  ibook.book1D("SharedEnergy_multicluster2caloparticle",
574  "Shared Energy of Multi Cluster per Calo Particle in each layer",
578  histograms.h_sharedenergy_multicl2caloparticle_vs_eta.push_back(
579  ibook.bookProfile("SharedEnergy_multicl2caloparticle_vs_eta",
580  "Shared Energy of MultiCluster vs #eta per best Calo Particle in each layer",
581  nintEta_,
582  minEta_,
583  maxEta_,
586  histograms.h_sharedenergy_multicl2caloparticle_vs_phi.push_back(
587  ibook.bookProfile("SharedEnergy_multicl2caloparticle_vs_phi",
588  "Shared Energy of MultiCluster vs #phi per best Calo Particle in each layer",
589  nintPhi_,
590  minPhi_,
591  maxPhi_,
594  histograms.h_sharedenergy_caloparticle2multicl.push_back(
595  ibook.book1D("SharedEnergy_caloparticle2multicl",
596  "Shared Energy of CaloParticle per Multi Cluster",
600  histograms.h_sharedenergy_caloparticle2multicl_vs_eta.push_back(
601  ibook.bookProfile("SharedEnergy_caloparticle2multicl_vs_eta",
602  "Shared Energy of CaloParticle vs #eta per best Multi Cluster",
603  nintEta_,
604  minEta_,
605  maxEta_,
608  histograms.h_sharedenergy_caloparticle2multicl_vs_phi.push_back(
609  ibook.bookProfile("SharedEnergy_caloparticle2multicl_vs_phi",
610  "Shared Energy of CaloParticle vs #phi per best Multi Cluster",
611  nintPhi_,
612  minPhi_,
613  maxPhi_,
616  histograms.h_num_caloparticle_eta.push_back(
617  ibook.book1D("Num_CaloParticle_Eta", "Num CaloParticle Eta per Multi Cluster", nintEta_, minEta_, maxEta_));
618  histograms.h_numDup_multicl_eta.push_back(
619  ibook.book1D("NumDup_MultiCluster_Eta", "Num Duplicate MultiCl vs Eta", nintEta_, minEta_, maxEta_));
620  histograms.h_denom_caloparticle_eta.push_back(
621  ibook.book1D("Denom_CaloParticle_Eta", "Denom CaloParticle Eta per Multi Cluster", nintEta_, minEta_, maxEta_));
622  histograms.h_num_caloparticle_phi.push_back(
623  ibook.book1D("Num_CaloParticle_Phi", "Num CaloParticle Phi per Multi Cluster", nintPhi_, minPhi_, maxPhi_));
624  histograms.h_numDup_multicl_phi.push_back(
625  ibook.book1D("NumDup_MultiCluster_Phi", "Num Duplicate MultiCl vs Phi", nintPhi_, minPhi_, maxPhi_));
626  histograms.h_denom_caloparticle_phi.push_back(
627  ibook.book1D("Denom_CaloParticle_Phi", "Denom CaloParticle Phi per Multi Cluster", nintPhi_, minPhi_, maxPhi_));
628 
629  std::unordered_map<int, dqm::reco::MonitorElement*> clusternum_in_multicluster_perlayer;
630  clusternum_in_multicluster_perlayer.clear();
631 
632  for (unsigned ilayer = 0; ilayer < 2 * layers; ++ilayer) {
633  auto istr1 = std::to_string(ilayer);
634  while (istr1.size() < 2) {
635  istr1.insert(0, "0");
636  }
637  //We will make a mapping to the regural layer naming plus z- or z+ for convenience
638  std::string istr2 = "";
639  //First with the -z endcap
640  if (ilayer < layers) {
641  istr2 = std::to_string(ilayer + 1) + " in z-";
642  } else { //Then for the +z
643  istr2 = std::to_string(ilayer - (layers - 1)) + " in z+";
644  }
645 
646  clusternum_in_multicluster_perlayer[ilayer] =
647  ibook.book1D("clusternum_in_multicluster_perlayer" + istr1,
648  "Number of layer clusters in multicluster for layer " + istr2,
652  }
653 
654  histograms.h_clusternum_in_multicluster_perlayer.push_back(std::move(clusternum_in_multicluster_perlayer));
655 
656  histograms.h_multiclusternum.push_back(
657  ibook.book1D("totmulticlusternum", "total number of multiclusters", nintTotNMCLs_, minTotNMCLs_, maxTotNMCLs_));
658 
659  histograms.h_contmulticlusternum.push_back(ibook.book1D("contmulticlusternum",
660  "number of multiclusters with 3 contiguous layers",
662  minTotNMCLs_,
663  maxTotNMCLs_));
664 
665  histograms.h_noncontmulticlusternum.push_back(ibook.book1D("noncontmulticlusternum",
666  "number of multiclusters without 3 contiguous layers",
668  minTotNMCLs_,
669  maxTotNMCLs_));
670 
671  histograms.h_clusternum_in_multicluster.push_back(ibook.book1D("clusternum_in_multicluster",
672  "total number of layer clusters in multicluster",
676 
677  histograms.h_clusternum_in_multicluster_vs_layer.push_back(
678  ibook.bookProfile("clusternum_in_multicluster_vs_layer",
679  "Profile of 2d layer clusters in multicluster vs layer number",
680  2 * layers,
681  0.,
682  2. * layers,
685 
686  histograms.h_multiplicityOfLCinMCL.push_back(ibook.book2D("multiplicityOfLCinMCL",
687  "Multiplicity vs Layer cluster size in Multiclusters",
689  minMplofLCs_,
690  maxMplofLCs_,
694 
695  histograms.h_multiplicity_numberOfEventsHistogram.push_back(ibook.book1D("multiplicity_numberOfEventsHistogram",
696  "multiplicity numberOfEventsHistogram",
698  minMplofLCs_,
699  maxMplofLCs_));
700 
701  histograms.h_multiplicity_zminus_numberOfEventsHistogram.push_back(
702  ibook.book1D("multiplicity_zminus_numberOfEventsHistogram",
703  "multiplicity numberOfEventsHistogram in z-",
705  minMplofLCs_,
706  maxMplofLCs_));
707 
708  histograms.h_multiplicity_zplus_numberOfEventsHistogram.push_back(
709  ibook.book1D("multiplicity_zplus_numberOfEventsHistogram",
710  "multiplicity numberOfEventsHistogram in z+",
712  minMplofLCs_,
713  maxMplofLCs_));
714 
715  histograms.h_multiplicityOfLCinMCL_vs_layercluster_zminus.push_back(
716  ibook.book2D("multiplicityOfLCinMCL_vs_layercluster_zminus",
717  "Multiplicity vs Layer number in z-",
719  minMplofLCs_,
720  maxMplofLCs_,
721  layers,
722  0.,
723  (float)layers));
724 
725  histograms.h_multiplicityOfLCinMCL_vs_layercluster_zplus.push_back(
726  ibook.book2D("multiplicityOfLCinMCL_vs_layercluster_zplus",
727  "Multiplicity vs Layer number in z+",
729  minMplofLCs_,
730  maxMplofLCs_,
731  layers,
732  0.,
733  (float)layers));
734 
735  histograms.h_multiplicityOfLCinMCL_vs_layerclusterenergy.push_back(
736  ibook.book2D("multiplicityOfLCinMCL_vs_layerclusterenergy",
737  "Multiplicity vs Layer cluster energy",
739  minMplofLCs_,
740  maxMplofLCs_,
744 
745  histograms.h_multicluster_pt.push_back(
746  ibook.book1D("multicluster_pt", "Pt of the multicluster", nintPt_, minPt_, maxPt_));
747  histograms.h_multicluster_eta.push_back(
748  ibook.book1D("multicluster_eta", "Eta of the multicluster", nintEta_, minEta_, maxEta_));
749  histograms.h_multicluster_phi.push_back(
750  ibook.book1D("multicluster_phi", "Phi of the multicluster", nintPhi_, minPhi_, maxPhi_));
751  histograms.h_multicluster_energy.push_back(
752  ibook.book1D("multicluster_energy", "Energy of the multicluster", nintEne_, minEne_, maxEne_));
753  histograms.h_multicluster_x.push_back(
754  ibook.book1D("multicluster_x", "X position of the multicluster", nintX_, minX_, maxX_));
755  histograms.h_multicluster_y.push_back(
756  ibook.book1D("multicluster_y", "Y position of the multicluster", nintY_, minY_, maxY_));
757  histograms.h_multicluster_z.push_back(
758  ibook.book1D("multicluster_z", "Z position of the multicluster", nintZ_, minZ_, maxZ_));
759  histograms.h_multicluster_firstlayer.push_back(
760  ibook.book1D("multicluster_firstlayer", "First layer of the multicluster", 2 * layers, 0., (float)2 * layers));
761  histograms.h_multicluster_lastlayer.push_back(
762  ibook.book1D("multicluster_lastlayer", "Last layer of the multicluster", 2 * layers, 0., (float)2 * layers));
763  histograms.h_multicluster_layersnum.push_back(ibook.book1D(
764  "multicluster_layersnum", "Number of layers of the multicluster", 2 * layers, 0., (float)2 * layers));
765 }
766 
768  //We will save some info straight from geometry to avoid mistakes from updates
769  //----------- TODO ----------------------------------------------------------
770  //For now values returned for 'lastLayerFHzp': '104', 'lastLayerFHzm': '52' are not the one expected.
771  //Will come back to this when there will be info in CMSSW to put in DQM file.
772  histograms.lastLayerEEzm->Fill(recHitTools_->lastLayerEE());
773  histograms.lastLayerFHzm->Fill(recHitTools_->lastLayerFH());
774  histograms.maxlayerzm->Fill(layers);
775  histograms.lastLayerEEzp->Fill(recHitTools_->lastLayerEE() + layers);
776  histograms.lastLayerFHzp->Fill(recHitTools_->lastLayerFH() + layers);
777  histograms.maxlayerzp->Fill(layers + layers);
778 }
779 
781  int pdgid,
782  const CaloParticle& caloparticle,
783  std::vector<SimVertex> const& simVertices) const {
784  const auto eta = getEta(caloparticle.eta());
785  if (histograms.h_caloparticle_eta.count(pdgid)) {
786  histograms.h_caloparticle_eta.at(pdgid)->Fill(eta);
787  }
788  if (histograms.h_caloparticle_eta_Zorigin.count(pdgid)) {
789  histograms.h_caloparticle_eta_Zorigin.at(pdgid)->Fill(
790  simVertices.at(caloparticle.g4Tracks()[0].vertIndex()).position().z(), eta);
791  }
792 
793  if (histograms.h_caloparticle_energy.count(pdgid)) {
794  histograms.h_caloparticle_energy.at(pdgid)->Fill(caloparticle.energy());
795  }
796  if (histograms.h_caloparticle_pt.count(pdgid)) {
797  histograms.h_caloparticle_pt.at(pdgid)->Fill(caloparticle.pt());
798  }
799  if (histograms.h_caloparticle_phi.count(pdgid)) {
800  histograms.h_caloparticle_phi.at(pdgid)->Fill(caloparticle.phi());
801  }
802 }
803 
805  int count,
806  const reco::CaloCluster& cluster) const {
807  const auto eta = getEta(cluster.eta());
808  histograms.h_cluster_eta[count]->Fill(eta);
809 }
810 
812  const Histograms& histograms,
815  edm::Handle<std::vector<CaloParticle>> caloParticleHandle,
816  std::vector<CaloParticle> const& cP,
817  std::vector<size_t> const& cPIndices,
818  std::vector<size_t> const& cPSelectedIndices,
819  std::unordered_map<DetId, const HGCRecHit*> const& hitMap,
820  unsigned layers,
821  const edm::Handle<hgcal::LayerClusterToCaloParticleAssociator>& LCAssocByEnergyScoreHandle) const {
822  auto nLayerClusters = clusters.size();
823 
824  std::unordered_map<DetId, std::vector<HGVHistoProducerAlgo::detIdInfoInCluster>> detIdToCaloParticleId_Map;
825  std::unordered_map<DetId, std::vector<HGVHistoProducerAlgo::detIdInfoInCluster>> detIdToLayerClusterId_Map;
826 
827  // The association has to be done in an all-vs-all fashion.
828  // For this reason we use the full set of caloParticles, with the only filter on bx
829  for (const auto& cpId : cPIndices) {
830  const SimClusterRefVector& simClusterRefVector = cP[cpId].simClusters();
831  for (const auto& it_sc : simClusterRefVector) {
832  const SimCluster& simCluster = (*(it_sc));
833  const auto& hits_and_fractions = simCluster.hits_and_fractions();
834  for (const auto& it_haf : hits_and_fractions) {
835  DetId hitid = (it_haf.first);
836  std::unordered_map<DetId, const HGCRecHit*>::const_iterator itcheck = hitMap.find(hitid);
837  if (itcheck != hitMap.end()) {
838  auto hit_find_it = detIdToCaloParticleId_Map.find(hitid);
839  if (hit_find_it == detIdToCaloParticleId_Map.end()) {
840  detIdToCaloParticleId_Map[hitid] = std::vector<HGVHistoProducerAlgo::detIdInfoInCluster>();
841  detIdToCaloParticleId_Map[hitid].emplace_back(
842  HGVHistoProducerAlgo::detIdInfoInCluster{cpId, it_haf.second});
843  } else {
844  auto findHitIt = std::find(detIdToCaloParticleId_Map[hitid].begin(),
845  detIdToCaloParticleId_Map[hitid].end(),
846  HGVHistoProducerAlgo::detIdInfoInCluster{cpId, it_haf.second});
847  if (findHitIt != detIdToCaloParticleId_Map[hitid].end()) {
848  findHitIt->fraction += it_haf.second;
849  } else {
850  detIdToCaloParticleId_Map[hitid].emplace_back(
851  HGVHistoProducerAlgo::detIdInfoInCluster{cpId, it_haf.second});
852  }
853  }
854  }
855  }
856  }
857  }
858 
859  for (unsigned int lcId = 0; lcId < nLayerClusters; ++lcId) {
860  const std::vector<std::pair<DetId, float>>& hits_and_fractions = clusters[lcId].hitsAndFractions();
861  unsigned int numberOfHitsInLC = hits_and_fractions.size();
862 
863  // This vector will store, for each hit in the Layercluster, the index of
864  // the CaloParticle that contributed the most, in terms of energy, to it.
865  // Special values are:
866  //
867  // -2 --> the reconstruction fraction of the RecHit is 0 (used in the past to monitor Halo Hits)
868  // -3 --> same as before with the added condition that no CaloParticle has been linked to this RecHit
869  // -1 --> the reco fraction is >0, but no CaloParticle has been linked to it
870  // >=0 --> index of the linked CaloParticle
871  std::vector<int> hitsToCaloParticleId(numberOfHitsInLC);
872  const auto firstHitDetId = hits_and_fractions[0].first;
873  int lcLayerId =
874  recHitTools_->getLayerWithOffset(firstHitDetId) + layers * ((recHitTools_->zside(firstHitDetId) + 1) >> 1) - 1;
875 
876  // This will store the fraction of the CaloParticle energy shared with the LayerCluster: e_shared/cp_energy
877  std::unordered_map<unsigned, float> CPEnergyInLC;
878 
879  for (unsigned int hitId = 0; hitId < numberOfHitsInLC; hitId++) {
880  DetId rh_detid = hits_and_fractions[hitId].first;
881  auto rhFraction = hits_and_fractions[hitId].second;
882 
883  std::unordered_map<DetId, const HGCRecHit*>::const_iterator itcheck = hitMap.find(rh_detid);
884  const HGCRecHit* hit = itcheck->second;
885 
886  auto hit_find_in_LC = detIdToLayerClusterId_Map.find(rh_detid);
887  if (hit_find_in_LC == detIdToLayerClusterId_Map.end()) {
888  detIdToLayerClusterId_Map[rh_detid] = std::vector<HGVHistoProducerAlgo::detIdInfoInCluster>();
889  }
890  detIdToLayerClusterId_Map[rh_detid].emplace_back(HGVHistoProducerAlgo::detIdInfoInCluster{lcId, rhFraction});
891 
892  auto hit_find_in_CP = detIdToCaloParticleId_Map.find(rh_detid);
893 
894  // if the fraction is zero or the hit does not belong to any calo
895  // particle, set the caloparticleId for the hit to -1 this will
896  // contribute to the number of noise hits
897 
898  // MR Remove the case in which the fraction is 0, since this could be a
899  // real hit that has been marked as halo.
900  if (rhFraction == 0.) {
901  hitsToCaloParticleId[hitId] = -2;
902  }
903  if (hit_find_in_CP == detIdToCaloParticleId_Map.end()) {
904  hitsToCaloParticleId[hitId] -= 1;
905  } else {
906  auto maxCPEnergyInLC = 0.f;
907  auto maxCPId = -1;
908  for (auto& h : hit_find_in_CP->second) {
909  CPEnergyInLC[h.clusterId] += h.fraction * hit->energy();
910  // Keep track of which CaloParticle contributed the most, in terms
911  // of energy, to this specific LayerCluster.
912  if (CPEnergyInLC[h.clusterId] > maxCPEnergyInLC) {
913  maxCPEnergyInLC = CPEnergyInLC[h.clusterId];
914  maxCPId = h.clusterId;
915  }
916  }
917  hitsToCaloParticleId[hitId] = maxCPId;
918  }
919  histograms.h_cellAssociation_perlayer.at(lcLayerId)->Fill(
920  hitsToCaloParticleId[hitId] > 0. ? 0. : hitsToCaloParticleId[hitId]);
921  } // End loop over hits on a LayerCluster
922 
923  } // End of loop over LayerClusters
924 
925  hgcal::RecoToSimCollection cpsInLayerClusterMap =
926  LCAssocByEnergyScoreHandle->associateRecoToSim(clusterHandle, caloParticleHandle);
927  hgcal::SimToRecoCollection cPOnLayerMap =
928  LCAssocByEnergyScoreHandle->associateSimToReco(clusterHandle, caloParticleHandle);
929  // Here we do fill the plots to compute the different metrics linked to
930  // reco-level, namely fake-rate an merge-rate. In this loop we should *not*
931  // restrict only to the selected caloParaticles.
932  for (unsigned int lcId = 0; lcId < nLayerClusters; ++lcId) {
933  const std::vector<std::pair<DetId, float>>& hits_and_fractions = clusters[lcId].hitsAndFractions();
934  const auto firstHitDetId = hits_and_fractions[0].first;
935  const int lcLayerId =
936  recHitTools_->getLayerWithOffset(firstHitDetId) + layers * ((recHitTools_->zside(firstHitDetId) + 1) >> 1) - 1;
937  histograms.h_denom_layercl_eta_perlayer.at(lcLayerId)->Fill(clusters[lcId].eta());
938  histograms.h_denom_layercl_phi_perlayer.at(lcLayerId)->Fill(clusters[lcId].phi());
939  //
940  const edm::Ref<reco::CaloClusterCollection> lcRef(clusterHandle, lcId);
941  const auto& cpsIt = cpsInLayerClusterMap.find(lcRef);
942  if (cpsIt == cpsInLayerClusterMap.end())
943  continue;
944 
945  const auto& cps = cpsIt->val;
946  if (clusters[lcId].energy() == 0. && !cps.empty()) {
947  for (const auto& cpPair : cps) {
948  histograms.h_score_layercl2caloparticle_perlayer.at(lcLayerId)->Fill(cpPair.second);
949  }
950  continue;
951  }
952  for (const auto& cpPair : cps) {
953  LogDebug("HGCalValidator") << "layerCluster Id: \t" << lcId << "\t CP id: \t" << cpPair.first.index()
954  << "\t score \t" << cpPair.second << std::endl;
955  histograms.h_score_layercl2caloparticle_perlayer.at(lcLayerId)->Fill(cpPair.second);
956  auto const& cp_linked =
957  std::find_if(std::begin(cPOnLayerMap[cpPair.first]),
958  std::end(cPOnLayerMap[cpPair.first]),
959  [&lcRef](const std::pair<edm::Ref<reco::CaloClusterCollection>, std::pair<float, float>>& p) {
960  return p.first == lcRef;
961  });
962  if (cp_linked ==
963  cPOnLayerMap[cpPair.first].end()) // This should never happen by construction of the association maps
964  continue;
965  histograms.h_sharedenergy_layercl2caloparticle_perlayer.at(lcLayerId)->Fill(
966  cp_linked->second.first / clusters[lcId].energy(), clusters[lcId].energy());
967  histograms.h_energy_vs_score_layercl2caloparticle_perlayer.at(lcLayerId)->Fill(
968  cpPair.second, cp_linked->second.first / clusters[lcId].energy());
969  }
970  const auto assoc =
971  std::count_if(std::begin(cps), std::end(cps), [](const auto& obj) { return obj.second < ScoreCutLCtoCP_; });
972  if (assoc) {
973  histograms.h_num_layercl_eta_perlayer.at(lcLayerId)->Fill(clusters[lcId].eta());
974  histograms.h_num_layercl_phi_perlayer.at(lcLayerId)->Fill(clusters[lcId].phi());
975  if (assoc > 1) {
976  histograms.h_numMerge_layercl_eta_perlayer.at(lcLayerId)->Fill(clusters[lcId].eta());
977  histograms.h_numMerge_layercl_phi_perlayer.at(lcLayerId)->Fill(clusters[lcId].phi());
978  }
979  const auto& best = std::min_element(
980  std::begin(cps), std::end(cps), [](const auto& obj1, const auto& obj2) { return obj1.second < obj2.second; });
981  const auto& best_cp_linked =
982  std::find_if(std::begin(cPOnLayerMap[best->first]),
983  std::end(cPOnLayerMap[best->first]),
984  [&lcRef](const std::pair<edm::Ref<reco::CaloClusterCollection>, std::pair<float, float>>& p) {
985  return p.first == lcRef;
986  });
987  if (best_cp_linked ==
988  cPOnLayerMap[best->first].end()) // This should never happen by construction of the association maps
989  continue;
990  histograms.h_sharedenergy_layercl2caloparticle_vs_eta_perlayer.at(lcLayerId)->Fill(
991  clusters[lcId].eta(), best_cp_linked->second.first / clusters[lcId].energy());
992  histograms.h_sharedenergy_layercl2caloparticle_vs_phi_perlayer.at(lcLayerId)->Fill(
993  clusters[lcId].phi(), best_cp_linked->second.first / clusters[lcId].energy());
994  }
995  } // End of loop over LayerClusters
996 
997  // Here we do fill the plots to compute the different metrics linked to
998  // gen-level, namely efficiency and duplicate. In this loop we should restrict
999  // only to the selected caloParaticles.
1000  for (const auto& cpId : cPSelectedIndices) {
1001  const edm::Ref<CaloParticleCollection> cpRef(caloParticleHandle, cpId);
1002  const auto& lcsIt = cPOnLayerMap.find(cpRef);
1003 
1004  std::map<unsigned int, float> cPEnergyOnLayer;
1005  for (unsigned int layerId = 0; layerId < layers * 2; ++layerId)
1006  cPEnergyOnLayer[layerId] = 0;
1007 
1008  const SimClusterRefVector& simClusterRefVector = cP[cpId].simClusters();
1009  for (const auto& it_sc : simClusterRefVector) {
1010  const SimCluster& simCluster = (*(it_sc));
1011  const auto& hits_and_fractions = simCluster.hits_and_fractions();
1012  for (const auto& it_haf : hits_and_fractions) {
1013  const DetId hitid = (it_haf.first);
1014  const int cpLayerId =
1015  recHitTools_->getLayerWithOffset(hitid) + layers * ((recHitTools_->zside(hitid) + 1) >> 1) - 1;
1016  std::unordered_map<DetId, const HGCRecHit*>::const_iterator itcheck = hitMap.find(hitid);
1017  if (itcheck != hitMap.end()) {
1018  const HGCRecHit* hit = itcheck->second;
1019  cPEnergyOnLayer[cpLayerId] += it_haf.second * hit->energy();
1020  }
1021  }
1022  }
1023 
1024  for (unsigned int layerId = 0; layerId < layers * 2; ++layerId) {
1025  if (!cPEnergyOnLayer[layerId])
1026  continue;
1027 
1028  histograms.h_denom_caloparticle_eta_perlayer.at(layerId)->Fill(cP[cpId].g4Tracks()[0].momentum().eta());
1029  histograms.h_denom_caloparticle_phi_perlayer.at(layerId)->Fill(cP[cpId].g4Tracks()[0].momentum().phi());
1030 
1031  if (lcsIt == cPOnLayerMap.end())
1032  continue;
1033  const auto& lcs = lcsIt->val;
1034 
1035  auto getLCLayerId = [&](const unsigned int lcId) {
1036  const std::vector<std::pair<DetId, float>>& hits_and_fractions = clusters[lcId].hitsAndFractions();
1037  const auto firstHitDetId = hits_and_fractions[0].first;
1038  const unsigned int lcLayerId = recHitTools_->getLayerWithOffset(firstHitDetId) +
1039  layers * ((recHitTools_->zside(firstHitDetId) + 1) >> 1) - 1;
1040  return lcLayerId;
1041  };
1042 
1043  for (const auto& lcPair : lcs) {
1044  if (getLCLayerId(lcPair.first.index()) != layerId)
1045  continue;
1046  histograms.h_score_caloparticle2layercl_perlayer.at(layerId)->Fill(lcPair.second.second);
1047  histograms.h_sharedenergy_caloparticle2layercl_perlayer.at(layerId)->Fill(
1048  lcPair.second.first / cPEnergyOnLayer[layerId], cPEnergyOnLayer[layerId]);
1049  histograms.h_energy_vs_score_caloparticle2layercl_perlayer.at(layerId)->Fill(
1050  lcPair.second.second, lcPair.second.first / cPEnergyOnLayer[layerId]);
1051  }
1052  const auto assoc = std::count_if(std::begin(lcs), std::end(lcs), [&](const auto& obj) {
1053  if (getLCLayerId(obj.first.index()) != layerId)
1054  return false;
1055  else
1056  return obj.second.second < ScoreCutCPtoLC_;
1057  });
1058  if (assoc) {
1059  histograms.h_num_caloparticle_eta_perlayer.at(layerId)->Fill(cP[cpId].g4Tracks()[0].momentum().eta());
1060  histograms.h_num_caloparticle_phi_perlayer.at(layerId)->Fill(cP[cpId].g4Tracks()[0].momentum().phi());
1061  if (assoc > 1) {
1062  histograms.h_numDup_caloparticle_eta_perlayer.at(layerId)->Fill(cP[cpId].g4Tracks()[0].momentum().eta());
1063  histograms.h_numDup_caloparticle_phi_perlayer.at(layerId)->Fill(cP[cpId].g4Tracks()[0].momentum().phi());
1064  }
1065  const auto best = std::min_element(std::begin(lcs), std::end(lcs), [&](const auto& obj1, const auto& obj2) {
1066  if (getLCLayerId(obj1.first.index()) != layerId)
1067  return false;
1068  else if (getLCLayerId(obj2.first.index()) == layerId)
1069  return obj1.second.second < obj2.second.second;
1070  else
1071  return true;
1072  });
1073  histograms.h_sharedenergy_caloparticle2layercl_vs_eta_perlayer.at(layerId)->Fill(
1074  cP[cpId].g4Tracks()[0].momentum().eta(), best->second.first / cPEnergyOnLayer[layerId]);
1075  histograms.h_sharedenergy_caloparticle2layercl_vs_phi_perlayer.at(layerId)->Fill(
1076  cP[cpId].g4Tracks()[0].momentum().phi(), best->second.first / cPEnergyOnLayer[layerId]);
1077  }
1078  }
1079  }
1080 }
1081 
1083  const Histograms& histograms,
1084  int count,
1087  const Density& densities,
1088  edm::Handle<std::vector<CaloParticle>> caloParticleHandle,
1089  std::vector<CaloParticle> const& cP,
1090  std::vector<size_t> const& cPIndices,
1091  std::vector<size_t> const& cPSelectedIndices,
1092  std::unordered_map<DetId, const HGCRecHit*> const& hitMap,
1093  std::map<double, double> cummatbudg,
1094  unsigned layers,
1095  std::vector<int> thicknesses,
1096  edm::Handle<hgcal::LayerClusterToCaloParticleAssociator>& LCAssocByEnergyScoreHandle) const {
1097  //Each event to be treated as two events: an event in +ve endcap,
1098  //plus another event in -ve endcap. In this spirit there will be
1099  //a layer variable (layerid) that maps the layers in :
1100  //-z: 0->51
1101  //+z: 52->103
1102 
1103  //To keep track of total num of layer clusters per layer
1104  //tnlcpl[layerid]
1105  std::vector<int> tnlcpl(1000, 0); //tnlcpl.clear(); tnlcpl.reserve(1000);
1106 
1107  //To keep track of the total num of clusters per thickness in plus and in minus endcaps
1108  std::map<std::string, int> tnlcpthplus;
1109  tnlcpthplus.clear();
1110  std::map<std::string, int> tnlcpthminus;
1111  tnlcpthminus.clear();
1112  //At the beginning of the event all layers should be initialized to zero total clusters per thickness
1113  for (std::vector<int>::iterator it = thicknesses.begin(); it != thicknesses.end(); ++it) {
1114  tnlcpthplus.insert(std::pair<std::string, int>(std::to_string(*it), 0));
1115  tnlcpthminus.insert(std::pair<std::string, int>(std::to_string(*it), 0));
1116  }
1117  //To keep track of the total num of clusters with mixed thickness hits per event
1118  tnlcpthplus.insert(std::pair<std::string, int>("mixed", 0));
1119  tnlcpthminus.insert(std::pair<std::string, int>("mixed", 0));
1120 
1122  clusterHandle,
1123  clusters,
1124  caloParticleHandle,
1125  cP,
1126  cPIndices,
1127  cPSelectedIndices,
1128  hitMap,
1129  layers,
1130  LCAssocByEnergyScoreHandle);
1131 
1132  //To find out the total amount of energy clustered per layer
1133  //Initialize with zeros because I see clear gives weird numbers.
1134  std::vector<double> tecpl(1000, 0.0); //tecpl.clear(); tecpl.reserve(1000);
1135  //for the longitudinal depth barycenter
1136  std::vector<double> ldbar(1000, 0.0); //ldbar.clear(); ldbar.reserve(1000);
1137 
1138  //We need to compare with the total amount of energy coming from caloparticles
1139  double caloparteneplus = 0.;
1140  double caloparteneminus = 0.;
1141  for (const auto& cpId : cPIndices) {
1142  if (cP[cpId].eta() >= 0.) {
1143  caloparteneplus = caloparteneplus + cP[cpId].energy();
1144  }
1145  if (cP[cpId].eta() < 0.) {
1146  caloparteneminus = caloparteneminus + cP[cpId].energy();
1147  }
1148  }
1149 
1150  //loop through clusters of the event
1151  for (unsigned int layerclusterIndex = 0; layerclusterIndex < clusters.size(); layerclusterIndex++) {
1152  const std::vector<std::pair<DetId, float>> hits_and_fractions = clusters[layerclusterIndex].hitsAndFractions();
1153 
1154  const DetId seedid = clusters[layerclusterIndex].seed();
1155  const double seedx = recHitTools_->getPosition(seedid).x();
1156  const double seedy = recHitTools_->getPosition(seedid).y();
1157  DetId maxid = findmaxhit(clusters[layerclusterIndex], hitMap);
1158 
1159  // const DetId maxid = clusters[layerclusterIndex].max();
1160  double maxx = recHitTools_->getPosition(maxid).x();
1161  double maxy = recHitTools_->getPosition(maxid).y();
1162 
1163  //Auxillary variables to count the number of different kind of hits in each cluster
1164  int nthhits120p = 0;
1165  int nthhits200p = 0;
1166  int nthhits300p = 0;
1167  int nthhitsscintp = 0;
1168  int nthhits120m = 0;
1169  int nthhits200m = 0;
1170  int nthhits300m = 0;
1171  int nthhitsscintm = 0;
1172  //For the hits thickness of the layer cluster.
1173  double thickness = 0.;
1174  //The layer the cluster belongs to. As mentioned in the mapping above, it takes into account -z and +z.
1175  int layerid = 0;
1176  //We will need another layer variable for the longitudinal material budget file reading.
1177  //In this case we need no distinction between -z and +z.
1178  int lay = 0;
1179  //We will need here to save the combination thick_lay
1180  std::string istr = "";
1181  //boolean to check for the layer that the cluster belong to. Maybe later will check all the layer hits.
1182  bool cluslay = true;
1183  //zside that the current cluster belongs to.
1184  int zside = 0;
1185 
1186  for (std::vector<std::pair<DetId, float>>::const_iterator it_haf = hits_and_fractions.begin();
1187  it_haf != hits_and_fractions.end();
1188  ++it_haf) {
1189  const DetId rh_detid = it_haf->first;
1190  //The layer that the current hit belongs to
1191  layerid = recHitTools_->getLayerWithOffset(rh_detid) + layers * ((recHitTools_->zside(rh_detid) + 1) >> 1) - 1;
1192  lay = recHitTools_->getLayerWithOffset(rh_detid);
1193  zside = recHitTools_->zside(rh_detid);
1194  if (rh_detid.det() == DetId::Forward || rh_detid.det() == DetId::HGCalEE || rh_detid.det() == DetId::HGCalHSi) {
1195  thickness = recHitTools_->getSiThickness(rh_detid);
1196  } else if (rh_detid.det() == DetId::HGCalHSc) {
1197  thickness = -1;
1198  } else {
1199  LogDebug("HGCalValidator") << "These are HGCal layer clusters, you shouldn't be here !!! " << layerid << "\n";
1200  continue;
1201  }
1202 
1203  //Count here only once the layer cluster and save the combination thick_layerid
1204  std::string curistr = std::to_string((int)thickness);
1205  std::string lay_string = std::to_string(layerid);
1206  while (lay_string.size() < 2)
1207  lay_string.insert(0, "0");
1208  curistr += "_" + lay_string;
1209  if (cluslay) {
1210  tnlcpl[layerid]++;
1211  istr = curistr;
1212  cluslay = false;
1213  }
1214 
1215  if ((thickness == 120.) && (recHitTools_->zside(rh_detid) > 0.)) {
1216  nthhits120p++;
1217  } else if ((thickness == 120.) && (recHitTools_->zside(rh_detid) < 0.)) {
1218  nthhits120m++;
1219  } else if ((thickness == 200.) && (recHitTools_->zside(rh_detid) > 0.)) {
1220  nthhits200p++;
1221  } else if ((thickness == 200.) && (recHitTools_->zside(rh_detid) < 0.)) {
1222  nthhits200m++;
1223  } else if ((thickness == 300.) && (recHitTools_->zside(rh_detid) > 0.)) {
1224  nthhits300p++;
1225  } else if ((thickness == 300.) && (recHitTools_->zside(rh_detid) < 0.)) {
1226  nthhits300m++;
1227  } else if ((thickness == -1) && (recHitTools_->zside(rh_detid) > 0.)) {
1228  nthhitsscintp++;
1229  } else if ((thickness == -1) && (recHitTools_->zside(rh_detid) < 0.)) {
1230  nthhitsscintm++;
1231  } else { //assert(0);
1232  LogDebug("HGCalValidator")
1233  << " You are running a geometry that contains thicknesses different than the normal ones. "
1234  << "\n";
1235  }
1236 
1237  std::unordered_map<DetId, const HGCRecHit*>::const_iterator itcheck = hitMap.find(rh_detid);
1238  if (itcheck == hitMap.end()) {
1239  LogDebug("HGCalValidator") << " You shouldn't be here - Unable to find a hit " << rh_detid.rawId() << " "
1240  << rh_detid.det() << " " << HGCalDetId(rh_detid) << "\n";
1241  continue;
1242  }
1243 
1244  const HGCRecHit* hit = itcheck->second;
1245 
1246  //Here for the per cell plots
1247  //----
1248  const double hit_x = recHitTools_->getPosition(rh_detid).x();
1249  const double hit_y = recHitTools_->getPosition(rh_detid).y();
1250  double distancetoseed = distance(seedx, seedy, hit_x, hit_y);
1251  double distancetomax = distance(maxx, maxy, hit_x, hit_y);
1252  if (distancetoseed != 0. && histograms.h_distancetoseedcell_perthickperlayer.count(curistr)) {
1253  histograms.h_distancetoseedcell_perthickperlayer.at(curistr)->Fill(distancetoseed);
1254  }
1255  //----
1256  if (distancetoseed != 0. && histograms.h_distancetoseedcell_perthickperlayer_eneweighted.count(curistr)) {
1257  histograms.h_distancetoseedcell_perthickperlayer_eneweighted.at(curistr)->Fill(distancetoseed, hit->energy());
1258  }
1259  //----
1260  if (distancetomax != 0. && histograms.h_distancetomaxcell_perthickperlayer.count(curistr)) {
1261  histograms.h_distancetomaxcell_perthickperlayer.at(curistr)->Fill(distancetomax);
1262  }
1263  //----
1264  if (distancetomax != 0. && histograms.h_distancetomaxcell_perthickperlayer_eneweighted.count(curistr)) {
1265  histograms.h_distancetomaxcell_perthickperlayer_eneweighted.at(curistr)->Fill(distancetomax, hit->energy());
1266  }
1267 
1268  //Let's check the density
1269  std::map<DetId, float>::const_iterator dit = densities.find(rh_detid);
1270  if (dit == densities.end()) {
1271  LogDebug("HGCalValidator") << " You shouldn't be here - Unable to find a density " << rh_detid.rawId() << " "
1272  << rh_detid.det() << " " << HGCalDetId(rh_detid) << "\n";
1273  continue;
1274  }
1275 
1276  if (histograms.h_cellsenedens_perthick.count((int)thickness)) {
1277  histograms.h_cellsenedens_perthick.at((int)thickness)->Fill(dit->second);
1278  }
1279 
1280  } // end of loop through hits and fractions
1281 
1282  //Check for simultaneously having hits of different kind. Checking at least two combinations is sufficient.
1283  if ((nthhits120p != 0 && nthhits200p != 0) || (nthhits120p != 0 && nthhits300p != 0) ||
1284  (nthhits120p != 0 && nthhitsscintp != 0) || (nthhits200p != 0 && nthhits300p != 0) ||
1285  (nthhits200p != 0 && nthhitsscintp != 0) || (nthhits300p != 0 && nthhitsscintp != 0)) {
1286  tnlcpthplus["mixed"]++;
1287  } else if ((nthhits120p != 0 || nthhits200p != 0 || nthhits300p != 0 || nthhitsscintp != 0)) {
1288  //This is a cluster with hits of one kind
1289  tnlcpthplus[std::to_string((int)thickness)]++;
1290  }
1291  if ((nthhits120m != 0 && nthhits200m != 0) || (nthhits120m != 0 && nthhits300m != 0) ||
1292  (nthhits120m != 0 && nthhitsscintm != 0) || (nthhits200m != 0 && nthhits300m != 0) ||
1293  (nthhits200m != 0 && nthhitsscintm != 0) || (nthhits300m != 0 && nthhitsscintm != 0)) {
1294  tnlcpthminus["mixed"]++;
1295  } else if ((nthhits120m != 0 || nthhits200m != 0 || nthhits300m != 0 || nthhitsscintm != 0)) {
1296  //This is a cluster with hits of one kind
1297  tnlcpthminus[std::to_string((int)thickness)]++;
1298  }
1299 
1300  //To find the thickness with the biggest amount of cells
1301  std::vector<int> bigamoth;
1302  bigamoth.clear();
1303  if (zside > 0) {
1304  bigamoth.push_back(nthhits120p);
1305  bigamoth.push_back(nthhits200p);
1306  bigamoth.push_back(nthhits300p);
1307  bigamoth.push_back(nthhitsscintp);
1308  }
1309  if (zside < 0) {
1310  bigamoth.push_back(nthhits120m);
1311  bigamoth.push_back(nthhits200m);
1312  bigamoth.push_back(nthhits300m);
1313  bigamoth.push_back(nthhitsscintm);
1314  }
1315  auto bgth = std::max_element(bigamoth.begin(), bigamoth.end());
1316  istr = std::to_string(thicknesses[std::distance(bigamoth.begin(), bgth)]);
1317  std::string lay_string = std::to_string(layerid);
1318  while (lay_string.size() < 2)
1319  lay_string.insert(0, "0");
1320  istr += "_" + lay_string;
1321 
1322  //Here for the per cluster plots that need the thickness_layer info
1323  if (histograms.h_cellsnum_perthickperlayer.count(istr)) {
1324  histograms.h_cellsnum_perthickperlayer.at(istr)->Fill(hits_and_fractions.size());
1325  }
1326 
1327  //Now, with the distance between seed and max cell.
1328  double distancebetseedandmax = distance(seedx, seedy, maxx, maxy);
1329  //The thickness_layer combination in this case will use the thickness of the seed as a convention.
1330  std::string seedstr = std::to_string((int)recHitTools_->getSiThickness(seedid)) + "_" + std::to_string(layerid);
1331  seedstr += "_" + lay_string;
1332  if (histograms.h_distancebetseedandmaxcell_perthickperlayer.count(seedstr)) {
1333  histograms.h_distancebetseedandmaxcell_perthickperlayer.at(seedstr)->Fill(distancebetseedandmax);
1334  }
1335  if (histograms.h_distancebetseedandmaxcellvsclusterenergy_perthickperlayer.count(seedstr)) {
1336  histograms.h_distancebetseedandmaxcellvsclusterenergy_perthickperlayer.at(seedstr)->Fill(
1337  distancebetseedandmax, clusters[layerclusterIndex].energy());
1338  }
1339 
1340  //Energy clustered per layer
1341  tecpl[layerid] = tecpl[layerid] + clusters[layerclusterIndex].energy();
1342  ldbar[layerid] = ldbar[layerid] + clusters[layerclusterIndex].energy() * cummatbudg[(double)lay];
1343 
1344  } //end of loop through clusters of the event
1345 
1346  //After the end of the event we can now fill with the results.
1347  //First a couple of variables to keep the sum of the energy of all clusters
1348  double sumeneallcluspl = 0.;
1349  double sumeneallclusmi = 0.;
1350  //And the longitudinal variable
1351  double sumldbarpl = 0.;
1352  double sumldbarmi = 0.;
1353  //Per layer : Loop 0->103
1354  for (unsigned ilayer = 0; ilayer < layers * 2; ++ilayer) {
1355  if (histograms.h_clusternum_perlayer.count(ilayer)) {
1356  histograms.h_clusternum_perlayer.at(ilayer)->Fill(tnlcpl[ilayer]);
1357  }
1358  // Two times one for plus and one for minus
1359  //First with the -z endcap
1360  if (ilayer < layers) {
1361  if (histograms.h_energyclustered_perlayer.count(ilayer)) {
1362  if (caloparteneminus != 0.) {
1363  histograms.h_energyclustered_perlayer.at(ilayer)->Fill(100. * tecpl[ilayer] / caloparteneminus);
1364  }
1365  }
1366  //Keep here the total energy for the event in -z
1367  sumeneallclusmi = sumeneallclusmi + tecpl[ilayer];
1368  //And for the longitudinal variable
1369  sumldbarmi = sumldbarmi + ldbar[ilayer];
1370  } else { //Then for the +z
1371  if (histograms.h_energyclustered_perlayer.count(ilayer)) {
1372  if (caloparteneplus != 0.) {
1373  histograms.h_energyclustered_perlayer.at(ilayer)->Fill(100. * tecpl[ilayer] / caloparteneplus);
1374  }
1375  }
1376  //Keep here the total energy for the event in -z
1377  sumeneallcluspl = sumeneallcluspl + tecpl[ilayer];
1378  //And for the longitudinal variable
1379  sumldbarpl = sumldbarpl + ldbar[ilayer];
1380  } //end of +z loop
1381 
1382  } //end of loop over layers
1383 
1384  //Per thickness
1385  for (std::vector<int>::iterator it = thicknesses.begin(); it != thicknesses.end(); ++it) {
1386  if (histograms.h_clusternum_perthick.count(*it)) {
1387  histograms.h_clusternum_perthick.at(*it)->Fill(tnlcpthplus[std::to_string(*it)]);
1388  histograms.h_clusternum_perthick.at(*it)->Fill(tnlcpthminus[std::to_string(*it)]);
1389  }
1390  }
1391  //Mixed thickness clusters
1392  histograms.h_mixedhitscluster_zplus[count]->Fill(tnlcpthplus["mixed"]);
1393  histograms.h_mixedhitscluster_zminus[count]->Fill(tnlcpthminus["mixed"]);
1394 
1395  //Total energy clustered from all layer clusters (fraction)
1396  if (caloparteneplus != 0.) {
1397  histograms.h_energyclustered_zplus[count]->Fill(100. * sumeneallcluspl / caloparteneplus);
1398  }
1399  if (caloparteneminus != 0.) {
1400  histograms.h_energyclustered_zminus[count]->Fill(100. * sumeneallclusmi / caloparteneminus);
1401  }
1402 
1403  //For the longitudinal depth barycenter
1404  histograms.h_longdepthbarycentre_zplus[count]->Fill(sumldbarpl / sumeneallcluspl);
1405  histograms.h_longdepthbarycentre_zminus[count]->Fill(sumldbarmi / sumeneallclusmi);
1406 }
1407 
1409  int count,
1410  const std::vector<reco::HGCalMultiCluster>& multiClusters,
1411  std::vector<CaloParticle> const& cP,
1412  std::vector<size_t> const& cPIndices,
1413  std::vector<size_t> const& cPSelectedIndices,
1414  std::unordered_map<DetId, const HGCRecHit*> const& hitMap,
1415  unsigned layers) const {
1416  auto nMultiClusters = multiClusters.size();
1417  //Consider CaloParticles coming from the hard scatterer, excluding the PU contribution.
1418  auto nCaloParticles = cPIndices.size();
1419 
1420  std::unordered_map<DetId, std::vector<HGVHistoProducerAlgo::detIdInfoInCluster>> detIdToCaloParticleId_Map;
1421  std::unordered_map<DetId, std::vector<HGVHistoProducerAlgo::detIdInfoInMultiCluster>> detIdToMultiClusterId_Map;
1422  std::vector<int> tracksters_fakemerge(nMultiClusters, 0);
1423  std::vector<int> tracksters_duplicate(nMultiClusters, 0);
1424 
1425  // this contains the ids of the caloparticles contributing with at least one hit to the multi cluster and the reconstruction error
1426  //cpsInLayerCluster[multicluster][CPids]
1427  //Connects a multicluster with all related caloparticles.
1428  std::vector<std::vector<std::pair<unsigned int, float>>> cpsInMultiCluster;
1429  cpsInMultiCluster.resize(nMultiClusters);
1430 
1431  //cPOnLayer[caloparticle][layer]
1432  //This defines a "calo particle on layer" concept. It is only filled in case
1433  //that calo particle has a reconstructed hit related via detid. So, a cPOnLayer[i][j] connects a
1434  //specific calo particle i in layer j with:
1435  //1. the sum of all rechits energy times fraction of the relevant simhit in layer j related to that calo particle i.
1436  //2. the hits and fractions of that calo particle i in layer j.
1437  //3. the layer clusters with matched rechit id.
1438  std::unordered_map<int, std::vector<caloParticleOnLayer>> cPOnLayer;
1439  for (unsigned int i = 0; i < nCaloParticles; ++i) {
1440  auto cpIndex = cPIndices[i];
1441  cPOnLayer[cpIndex].resize(layers * 2);
1442  for (unsigned int j = 0; j < layers * 2; ++j) {
1443  cPOnLayer[cpIndex][j].caloParticleId = cpIndex;
1444  cPOnLayer[cpIndex][j].energy = 0.f;
1445  cPOnLayer[cpIndex][j].hits_and_fractions.clear();
1446  }
1447  }
1448 
1449  for (const auto& cpId : cPIndices) {
1450  //take sim clusters
1451  const SimClusterRefVector& simClusterRefVector = cP[cpId].simClusters();
1452  //loop through sim clusters
1453  for (const auto& it_sc : simClusterRefVector) {
1454  const SimCluster& simCluster = (*(it_sc));
1455  const auto& hits_and_fractions = simCluster.hits_and_fractions();
1456  for (const auto& it_haf : hits_and_fractions) {
1457  DetId hitid = (it_haf.first);
1458  //V9:maps the layers in -z: 0->51 and in +z: 52->103
1459  //V10:maps the layers in -z: 0->49 and in +z: 50->99
1460  int cpLayerId = recHitTools_->getLayerWithOffset(hitid) + layers * ((recHitTools_->zside(hitid) + 1) >> 1) - 1;
1461  std::unordered_map<DetId, const HGCRecHit*>::const_iterator itcheck = hitMap.find(hitid);
1462  //Checks whether the current hit belonging to sim cluster has a reconstructed hit.
1463  if (itcheck != hitMap.end()) {
1464  const HGCRecHit* hit = itcheck->second;
1465  //Since the current hit from sim cluster has a reconstructed hit with the same detid,
1466  //make a map that will connect a detid with:
1467  //1. the caloparticles that have a simcluster with sim hits in that cell via caloparticle id.
1468  //2. the sum of all simhits fractions that contributes to that detid.
1469  //So, keep in mind that in case of multiple caloparticles contributing in the same cell
1470  //the fraction is the sum over all calo particles. So, something like:
1471  //detid: (caloparticle 1, sum of hits fractions in that detid over all cp) , (caloparticle 2, sum of hits fractions in that detid over all cp), (caloparticle 3, sum of hits fractions in that detid over all cp) ...
1472  auto hit_find_it = detIdToCaloParticleId_Map.find(hitid);
1473  if (hit_find_it == detIdToCaloParticleId_Map.end()) {
1474  detIdToCaloParticleId_Map[hitid] = std::vector<HGVHistoProducerAlgo::detIdInfoInCluster>();
1475  detIdToCaloParticleId_Map[hitid].emplace_back(
1476  HGVHistoProducerAlgo::detIdInfoInCluster{cpId, it_haf.second});
1477  } else {
1478  auto findHitIt = std::find(detIdToCaloParticleId_Map[hitid].begin(),
1479  detIdToCaloParticleId_Map[hitid].end(),
1480  HGVHistoProducerAlgo::detIdInfoInCluster{cpId, it_haf.second});
1481  if (findHitIt != detIdToCaloParticleId_Map[hitid].end()) {
1482  findHitIt->fraction += it_haf.second;
1483  } else {
1484  detIdToCaloParticleId_Map[hitid].emplace_back(
1485  HGVHistoProducerAlgo::detIdInfoInCluster{cpId, it_haf.second});
1486  }
1487  }
1488  //Since the current hit from sim cluster has a reconstructed hit with the same detid,
1489  //fill the cPOnLayer[caloparticle][layer] object with energy (sum of all rechits energy times fraction
1490  //of the relevant simhit) and keep the hit (detid and fraction) that contributed.
1491  cPOnLayer[cpId][cpLayerId].energy += it_haf.second * hit->energy();
1492  // We need to compress the hits and fractions in order to have a
1493  // reasonable score between CP and LC. Imagine, for example, that a
1494  // CP has detID X used by 2 SimClusters with different fractions. If
1495  // a single LC uses X with fraction 1 and is compared to the 2
1496  // contributions separately, it will be assigned a score != 0, which
1497  // is wrong.
1498  auto& haf = cPOnLayer[cpId][cpLayerId].hits_and_fractions;
1499  auto found = std::find_if(
1500  std::begin(haf), std::end(haf), [&hitid](const std::pair<DetId, float>& v) { return v.first == hitid; });
1501  if (found != haf.end()) {
1502  found->second += it_haf.second;
1503  } else {
1504  cPOnLayer[cpId][cpLayerId].hits_and_fractions.emplace_back(hitid, it_haf.second);
1505  }
1506  }
1507  } // end of loop through simhits
1508  } // end of loop through simclusters
1509  } // end of loop through caloparticles
1510 
1511  //Loop through multiclusters
1512  for (unsigned int mclId = 0; mclId < nMultiClusters; ++mclId) {
1513  const auto& hits_and_fractions = multiClusters[mclId].hitsAndFractions();
1514  if (!hits_and_fractions.empty()) {
1515  std::unordered_map<unsigned, float> CPEnergyInMCL;
1516  int maxCPId_byNumberOfHits = -1;
1517  unsigned int maxCPNumberOfHitsInMCL = 0;
1518  int maxCPId_byEnergy = -1;
1519  float maxEnergySharedMCLandCP = 0.f;
1520  float energyFractionOfMCLinCP = 0.f;
1521  float energyFractionOfCPinMCL = 0.f;
1522 
1523  //In case of matched rechit-simhit, so matched
1524  //caloparticle-layercluster-multicluster, he counts and saves the number of
1525  //rechits related to the maximum energy CaloParticle out of all
1526  //CaloParticles related to that layer cluster and multicluster.
1527 
1528  std::unordered_map<unsigned, unsigned> occurrencesCPinMCL;
1529  unsigned int numberOfNoiseHitsInMCL = 0;
1530  unsigned int numberOfHaloHitsInMCL = 0;
1531  unsigned int numberOfHitsInMCL = 0;
1532 
1533  //number of hits related to that cluster.
1534  unsigned int numberOfHitsInLC = hits_and_fractions.size();
1535  numberOfHitsInMCL += numberOfHitsInLC;
1536  std::unordered_map<unsigned, float> CPEnergyInLC;
1537 
1538  //hitsToCaloParticleId is a vector of ints, one for each rechit of the
1539  //layer cluster under study. If negative, there is no simhit from any CaloParticle related.
1540  //If positive, at least one CaloParticle has been found with matched simhit.
1541  //In more detail:
1542  // 1. hitsToCaloParticleId[hitId] = -3
1543  // TN: These represent Halo Cells(N) that have not been
1544  // assigned to any CaloParticle (hence the T).
1545  // 2. hitsToCaloParticleId[hitId] = -2
1546  // FN: There represent Halo Cells(N) that have been assigned
1547  // to a CaloParticle (hence the F, since those should have not been marked as halo)
1548  // 3. hitsToCaloParticleId[hitId] = -1
1549  // FP: These represent Real Cells(P) that have not been
1550  // assigned to any CaloParticle (hence the F, since these are fakes)
1551  // 4. hitsToCaloParticleId[hitId] >= 0
1552  // TP There represent Real Cells(P) that have been assigned
1553  // to a CaloParticle (hence the T)
1554 
1555  std::vector<int> hitsToCaloParticleId(numberOfHitsInLC);
1556  //det id of the first hit just to make the lcLayerId variable
1557  //which maps the layers in -z: 0->51 and in +z: 52->103
1558  const auto firstHitDetId = hits_and_fractions[0].first;
1559  int lcLayerId = recHitTools_->getLayerWithOffset(firstHitDetId) +
1560  layers * ((recHitTools_->zside(firstHitDetId) + 1) >> 1) - 1;
1561 
1562  //Loop though the hits of the layer cluster under study
1563  for (unsigned int hitId = 0; hitId < numberOfHitsInLC; hitId++) {
1564  DetId rh_detid = hits_and_fractions[hitId].first;
1565  auto rhFraction = hits_and_fractions[hitId].second;
1566 
1567  //Since the hit is belonging to the layer cluster, it must also be in the rechits map.
1568  std::unordered_map<DetId, const HGCRecHit*>::const_iterator itcheck = hitMap.find(rh_detid);
1569  const HGCRecHit* hit = itcheck->second;
1570 
1571  //Make a map that will connect a detid (that belongs to a rechit of the layer cluster under study,
1572  //no need to save others) with:
1573  //1. the layer clusters that have rechits in that detid
1574  //2. the fraction of the rechit of each layer cluster that contributes to that detid.
1575  //So, something like:
1576  //detid: (layer cluster 1, hit fraction) , (layer cluster 2, hit fraction), (layer cluster 3, hit fraction) ...
1577  //here comparing with the calo particle map above the
1578  auto hit_find_in_LC = detIdToMultiClusterId_Map.find(rh_detid);
1579  if (hit_find_in_LC == detIdToMultiClusterId_Map.end()) {
1580  detIdToMultiClusterId_Map[rh_detid] = std::vector<HGVHistoProducerAlgo::detIdInfoInMultiCluster>();
1581  }
1582  detIdToMultiClusterId_Map[rh_detid].emplace_back(
1583  HGVHistoProducerAlgo::detIdInfoInMultiCluster{mclId, mclId, rhFraction});
1584 
1585  //Check whether the rechit of the layer cluster under study has a sim hit in the same cell.
1586  auto hit_find_in_CP = detIdToCaloParticleId_Map.find(rh_detid);
1587 
1588  // if the fraction is zero or the hit does not belong to any calo
1589  // particle, set the caloparticleId for the hit to -1 this will
1590  // contribute to the number of noise hits
1591 
1592  // MR Remove the case in which the fraction is 0, since this could be a
1593  // real hit that has been marked as halo.
1594  if (rhFraction == 0.) {
1595  hitsToCaloParticleId[hitId] = -2;
1596  numberOfHaloHitsInMCL++;
1597  }
1598  if (hit_find_in_CP == detIdToCaloParticleId_Map.end()) {
1599  hitsToCaloParticleId[hitId] -= 1;
1600  } else {
1601  auto maxCPEnergyInLC = 0.f;
1602  auto maxCPId = -1;
1603  for (auto& h : hit_find_in_CP->second) {
1604  auto shared_fraction = std::min(rhFraction, h.fraction);
1605  //We are in the case where there are calo particles with simhits connected via detid with the rechit under study
1606  //So, from all layers clusters, find the rechits that are connected with a calo particle and save/calculate the
1607  //energy of that calo particle as the sum over all rechits of the rechits energy weighted
1608  //by the caloparticle's fraction related to that rechit.
1609  CPEnergyInMCL[h.clusterId] += shared_fraction * hit->energy();
1610  //Same but for layer clusters for the cell association per layer
1611  CPEnergyInLC[h.clusterId] += shared_fraction * hit->energy();
1612  //Here cPOnLayer[caloparticle][layer] describe above is set.
1613  //Here for multi clusters with matched rechit the CP fraction times hit energy is added and saved .
1614  cPOnLayer[h.clusterId][lcLayerId].layerClusterIdToEnergyAndScore[mclId].first +=
1615  shared_fraction * hit->energy();
1616  cPOnLayer[h.clusterId][lcLayerId].layerClusterIdToEnergyAndScore[mclId].second = FLT_MAX;
1617  //cpsInMultiCluster[multicluster][CPids]
1618  //Connects a multi cluster with all related caloparticles.
1619  cpsInMultiCluster[mclId].emplace_back(h.clusterId, FLT_MAX);
1620  //From all CaloParticles related to a layer cluster, he saves id and energy of the calo particle
1621  //that after simhit-rechit matching in layer has the maximum energy.
1622  if (shared_fraction > maxCPEnergyInLC) {
1623  //energy is used only here. cpid is saved for multiclusters
1624  maxCPEnergyInLC = CPEnergyInLC[h.clusterId];
1625  maxCPId = h.clusterId;
1626  }
1627  }
1628  //Keep in mind here maxCPId could be zero. So, below ask for negative not including zero to count noise.
1629  hitsToCaloParticleId[hitId] = maxCPId;
1630  }
1631 
1632  } //end of loop through rechits of the layer cluster.
1633 
1634  //Loop through all rechits to count how many of them are noise and how many are matched.
1635  //In case of matched rechit-simhit, he counts and saves the number of rechits related to the maximum energy CaloParticle.
1636  for (auto c : hitsToCaloParticleId) {
1637  if (c < 0) {
1638  numberOfNoiseHitsInMCL++;
1639  } else {
1640  occurrencesCPinMCL[c]++;
1641  }
1642  }
1643 
1644  //Below from all maximum energy CaloParticles, he saves the one with the largest amount
1645  //of related rechits.
1646  for (auto& c : occurrencesCPinMCL) {
1647  if (c.second > maxCPNumberOfHitsInMCL) {
1648  maxCPId_byNumberOfHits = c.first;
1649  maxCPNumberOfHitsInMCL = c.second;
1650  }
1651  }
1652 
1653  //Find the CaloParticle that has the maximum energy shared with the multicluster under study.
1654  for (auto& c : CPEnergyInMCL) {
1655  if (c.second > maxEnergySharedMCLandCP) {
1656  maxCPId_byEnergy = c.first;
1657  maxEnergySharedMCLandCP = c.second;
1658  }
1659  }
1660  //The energy of the CaloParticle that found to have the maximum energy shared with the multicluster under study.
1661  float totalCPEnergyFromLayerCP = 0.f;
1662  if (maxCPId_byEnergy >= 0) {
1663  //Loop through all layers
1664  for (unsigned int j = 0; j < layers * 2; ++j) {
1665  totalCPEnergyFromLayerCP = totalCPEnergyFromLayerCP + cPOnLayer[maxCPId_byEnergy][j].energy;
1666  }
1667  energyFractionOfCPinMCL = maxEnergySharedMCLandCP / totalCPEnergyFromLayerCP;
1668  if (multiClusters[mclId].energy() > 0.f) {
1669  energyFractionOfMCLinCP = maxEnergySharedMCLandCP / multiClusters[mclId].energy();
1670  }
1671  }
1672 
1673  LogDebug("HGCalValidator") << std::setw(12) << "multiCluster"
1674  << "\t" //LogDebug("HGCalValidator")
1675  << std::setw(10) << "mulcl energy"
1676  << "\t" << std::setw(5) << "nhits"
1677  << "\t" << std::setw(12) << "noise hits"
1678  << "\t" << std::setw(22) << "maxCPId_byNumberOfHits"
1679  << "\t" << std::setw(8) << "nhitsCP"
1680  << "\t" << std::setw(16) << "maxCPId_byEnergy"
1681  << "\t" << std::setw(23) << "maxEnergySharedMCLandCP"
1682  << "\t" << std::setw(22) << "totalCPEnergyFromAllLayerCP"
1683  << "\t" << std::setw(22) << "energyFractionOfMCLinCP"
1684  << "\t" << std::setw(25) << "energyFractionOfCPinMCL"
1685  << "\t" << std::endl;
1686  LogDebug("HGCalValidator") << std::setw(12) << mclId << "\t" //LogDebug("HGCalValidator")
1687  << std::setw(10) << multiClusters[mclId].energy() << "\t" << std::setw(5)
1688  << numberOfHitsInMCL << "\t" << std::setw(12) << numberOfNoiseHitsInMCL << "\t"
1689  << std::setw(22) << maxCPId_byNumberOfHits << "\t" << std::setw(8)
1690  << maxCPNumberOfHitsInMCL << "\t" << std::setw(16) << maxCPId_byEnergy << "\t"
1691  << std::setw(23) << maxEnergySharedMCLandCP << "\t" << std::setw(22)
1692  << totalCPEnergyFromLayerCP << "\t" << std::setw(22) << energyFractionOfMCLinCP << "\t"
1693  << std::setw(25) << energyFractionOfCPinMCL << std::endl;
1694 
1695  } //end of loop through multi clusters
1696  }
1697  //Loop through multiclusters
1698  for (unsigned int mclId = 0; mclId < nMultiClusters; ++mclId) {
1699  const auto& hits_and_fractions = multiClusters[mclId].hitsAndFractions();
1700  if (!hits_and_fractions.empty()) {
1701  // find the unique caloparticles id contributing to the multi clusters
1702  //cpsInMultiCluster[multicluster][CPids]
1703  std::sort(cpsInMultiCluster[mclId].begin(), cpsInMultiCluster[mclId].end());
1704  auto last = std::unique(cpsInMultiCluster[mclId].begin(), cpsInMultiCluster[mclId].end());
1705  cpsInMultiCluster[mclId].erase(last, cpsInMultiCluster[mclId].end());
1706 
1707  if (multiClusters[mclId].energy() == 0. && !cpsInMultiCluster[mclId].empty()) {
1708  //Loop through all CaloParticles contributing to multicluster mclId.
1709  for (auto& cpPair : cpsInMultiCluster[mclId]) {
1710  //In case of a multi cluster with zero energy but related CaloParticles the score is set to 1.
1711  cpPair.second = 1.;
1712  LogDebug("HGCalValidator") << "multiCluster Id: \t" << mclId << "\t CP id: \t" << cpPair.first
1713  << "\t score \t" << cpPair.second << std::endl;
1714  histograms.h_score_multicl2caloparticle[count]->Fill(cpPair.second);
1715  }
1716  continue;
1717  }
1718 
1719  // Compute the correct normalization
1720  float invMultiClusterEnergyWeight = 0.f;
1721  for (auto const& haf : multiClusters[mclId].hitsAndFractions()) {
1722  invMultiClusterEnergyWeight +=
1723  (haf.second * hitMap.at(haf.first)->energy()) * (haf.second * hitMap.at(haf.first)->energy());
1724  }
1725  invMultiClusterEnergyWeight = 1.f / invMultiClusterEnergyWeight;
1726 
1727  unsigned int numberOfHitsInLC = hits_and_fractions.size();
1728  for (unsigned int i = 0; i < numberOfHitsInLC; ++i) {
1729  DetId rh_detid = hits_and_fractions[i].first;
1730  float rhFraction = hits_and_fractions[i].second;
1731  bool hitWithNoCP = false;
1732 
1733  auto hit_find_in_CP = detIdToCaloParticleId_Map.find(rh_detid);
1734  if (hit_find_in_CP == detIdToCaloParticleId_Map.end())
1735  hitWithNoCP = true;
1736  auto itcheck = hitMap.find(rh_detid);
1737  const HGCRecHit* hit = itcheck->second;
1738  float hitEnergyWeight = hit->energy() * hit->energy();
1739 
1740  for (auto& cpPair : cpsInMultiCluster[mclId]) {
1741  float cpFraction = 0.f;
1742  if (!hitWithNoCP) {
1743  auto findHitIt = std::find(detIdToCaloParticleId_Map[rh_detid].begin(),
1744  detIdToCaloParticleId_Map[rh_detid].end(),
1745  HGVHistoProducerAlgo::detIdInfoInCluster{cpPair.first, 0.f});
1746  if (findHitIt != detIdToCaloParticleId_Map[rh_detid].end()) {
1747  cpFraction = findHitIt->fraction;
1748  }
1749  }
1750  if (cpPair.second == FLT_MAX) {
1751  cpPair.second = 0.f;
1752  }
1753  cpPair.second +=
1754  (rhFraction - cpFraction) * (rhFraction - cpFraction) * hitEnergyWeight * invMultiClusterEnergyWeight;
1755  }
1756  } //end of loop through rechits of layer cluster
1757 
1758  //In case of a multi cluster with some energy but none related CaloParticles print some info.
1759  if (cpsInMultiCluster[mclId].empty())
1760  LogDebug("HGCalValidator") << "multiCluster Id: \t" << mclId << "\tCP id:\t-1 "
1761  << "\t score \t-1"
1762  << "\n";
1763 
1764  auto score = std::min_element(std::begin(cpsInMultiCluster[mclId]),
1765  std::end(cpsInMultiCluster[mclId]),
1766  [](const auto& obj1, const auto& obj2) { return obj1.second < obj2.second; });
1767  for (auto& cpPair : cpsInMultiCluster[mclId]) {
1768  // LogDebug("HGCalValidator") << "multiCluster Id: \t" << mclId
1769  // << "\t CP id: \t" << cpPair.first
1770  // << "\t score \t" << cpPair.second
1771  // << "\n";
1772  LogDebug("HGCalValidator") << "multiCluster Id: \t" << mclId << "\t CP id: \t" << cpPair.first << "\t score \t"
1773  << cpPair.second << std::endl;
1774  if (cpPair.first == score->first) {
1775  histograms.h_score_multicl2caloparticle[count]->Fill(score->second);
1776  }
1777  float sharedeneCPallLayers = 0.;
1778  //Loop through all layers
1779  for (unsigned int j = 0; j < layers * 2; ++j) {
1780  auto const& cp_linked = cPOnLayer[cpPair.first][j].layerClusterIdToEnergyAndScore[mclId];
1781  sharedeneCPallLayers += cp_linked.first;
1782  } //end of loop through layers
1783  LogDebug("HGCalValidator") << "sharedeneCPallLayers " << sharedeneCPallLayers << std::endl;
1784  if (cpPair.first == score->first) {
1785  histograms.h_sharedenergy_multicl2caloparticle[count]->Fill(sharedeneCPallLayers /
1786  multiClusters[mclId].energy());
1787  histograms.h_energy_vs_score_multicl2caloparticle[count]->Fill(
1788  score->second, sharedeneCPallLayers / multiClusters[mclId].energy());
1789  }
1790  }
1791  auto assocFakeMerge = std::count_if(std::begin(cpsInMultiCluster[mclId]),
1792  std::end(cpsInMultiCluster[mclId]),
1793  [](const auto& obj) { return obj.second < ScoreCutMCLtoCPFakeMerge_; });
1794  tracksters_fakemerge[mclId] = assocFakeMerge;
1795  }
1796  } //end of loop through multiclusters
1797 
1798  std::unordered_map<int, std::vector<float>> score3d;
1799  std::unordered_map<int, std::vector<float>> mclsharedenergy;
1800  std::unordered_map<int, std::vector<float>> mclsharedenergyfrac;
1801 
1802  for (unsigned int i = 0; i < nCaloParticles; ++i) {
1803  auto cpIndex = cPIndices[i];
1804  score3d[cpIndex].resize(nMultiClusters);
1805  mclsharedenergy[cpIndex].resize(nMultiClusters);
1806  mclsharedenergyfrac[cpIndex].resize(nMultiClusters);
1807  for (unsigned int j = 0; j < nMultiClusters; ++j) {
1808  score3d[cpIndex][j] = FLT_MAX;
1809  mclsharedenergy[cpIndex][j] = 0.f;
1810  mclsharedenergyfrac[cpIndex][j] = 0.f;
1811  }
1812  }
1813 
1814  // Here we do fill the plots to compute the different metrics linked to
1815  // gen-level, namely efficiency an duplicate. In this loop we should restrict
1816  // only to the selected caloParaticles.
1817  for (const auto& cpId : cPSelectedIndices) {
1818  //We need to keep the multiclusters ids that are related to
1819  //CaloParticle under study for the final filling of the score.
1820  std::vector<unsigned int> cpId_mclId_related;
1821  cpId_mclId_related.clear();
1822 
1823  float CPenergy = 0.f;
1824  for (unsigned int layerId = 0; layerId < layers * 2; ++layerId) {
1825  unsigned int CPNumberOfHits = cPOnLayer[cpId][layerId].hits_and_fractions.size();
1826  //Below gives the CP energy related to multicluster per layer.
1827  CPenergy += cPOnLayer[cpId][layerId].energy;
1828  if (CPNumberOfHits == 0)
1829  continue;
1830  int mclWithMaxEnergyInCP = -1;
1831  //This is the maximum energy related to multicluster per layer.
1832  float maxEnergyMCLperlayerinCP = 0.f;
1833  float CPEnergyFractionInMCLperlayer = 0.f;
1834  //Remember and not confused by name. layerClusterIdToEnergyAndScore contains the multicluster id.
1835  for (const auto& mcl : cPOnLayer[cpId][layerId].layerClusterIdToEnergyAndScore) {
1836  if (mcl.second.first > maxEnergyMCLperlayerinCP) {
1837  maxEnergyMCLperlayerinCP = mcl.second.first;
1838  mclWithMaxEnergyInCP = mcl.first;
1839  }
1840  }
1841  if (CPenergy > 0.f)
1842  CPEnergyFractionInMCLperlayer = maxEnergyMCLperlayerinCP / CPenergy;
1843 
1844  LogDebug("HGCalValidator") << std::setw(8) << "LayerId:\t" << std::setw(12) << "caloparticle\t" << std::setw(15)
1845  << "cp total energy\t" << std::setw(15) << "cpEnergyOnLayer\t" << std::setw(14)
1846  << "CPNhitsOnLayer\t" << std::setw(18) << "mclWithMaxEnergyInCP\t" << std::setw(15)
1847  << "maxEnergyMCLinCP\t" << std::setw(20) << "CPEnergyFractionInMCL"
1848  << "\n";
1849  LogDebug("HGCalValidator") << std::setw(8) << layerId << "\t" << std::setw(12) << cpId << "\t" << std::setw(15)
1850  << cP[cpId].energy() << "\t" << std::setw(15) << CPenergy << "\t" << std::setw(14)
1851  << CPNumberOfHits << "\t" << std::setw(18) << mclWithMaxEnergyInCP << "\t"
1852  << std::setw(15) << maxEnergyMCLperlayerinCP << "\t" << std::setw(20)
1853  << CPEnergyFractionInMCLperlayer << "\n";
1854 
1855  for (unsigned int i = 0; i < CPNumberOfHits; ++i) {
1856  auto& cp_hitDetId = cPOnLayer[cpId][layerId].hits_and_fractions[i].first;
1857  auto& cpFraction = cPOnLayer[cpId][layerId].hits_and_fractions[i].second;
1858 
1859  bool hitWithNoMCL = false;
1860  if (cpFraction == 0.f)
1861  continue; //hopefully this should never happen
1862  auto hit_find_in_MCL = detIdToMultiClusterId_Map.find(cp_hitDetId);
1863  if (hit_find_in_MCL == detIdToMultiClusterId_Map.end())
1864  hitWithNoMCL = true;
1865  auto itcheck = hitMap.find(cp_hitDetId);
1866  const HGCRecHit* hit = itcheck->second;
1867  float hitEnergyWeight = hit->energy() * hit->energy();
1868  for (auto& lcPair : cPOnLayer[cpId][layerId].layerClusterIdToEnergyAndScore) {
1869  unsigned int multiClusterId = lcPair.first;
1870  if (std::find(std::begin(cpId_mclId_related), std::end(cpId_mclId_related), multiClusterId) ==
1871  std::end(cpId_mclId_related)) {
1872  cpId_mclId_related.push_back(multiClusterId);
1873  }
1874  float mclFraction = 0.f;
1875 
1876  if (!hitWithNoMCL) {
1877  auto findHitIt = std::find(detIdToMultiClusterId_Map[cp_hitDetId].begin(),
1878  detIdToMultiClusterId_Map[cp_hitDetId].end(),
1879  HGVHistoProducerAlgo::detIdInfoInMultiCluster{multiClusterId, 0, 0.f});
1880  if (findHitIt != detIdToMultiClusterId_Map[cp_hitDetId].end())
1881  mclFraction = findHitIt->fraction;
1882  }
1883  //Observe here that we do not divide as before by the layer cluster energy weight. We should sum first
1884  //over all layers and divide with the total CP energy over all layers.
1885  if (lcPair.second.second == FLT_MAX) {
1886  lcPair.second.second = 0.f;
1887  }
1888  lcPair.second.second += (mclFraction - cpFraction) * (mclFraction - cpFraction) * hitEnergyWeight;
1889  LogDebug("HGCalValidator") << "multiClusterId:\t" << multiClusterId << "\t"
1890  << "mclfraction,cpfraction:\t" << mclFraction << ", " << cpFraction << "\t"
1891  << "hitEnergyWeight:\t" << hitEnergyWeight << "\t"
1892  << "currect score numerator:\t" << lcPair.second.second << "\n";
1893  }
1894  } //end of loop through sim hits of current calo particle
1895 
1896  if (cPOnLayer[cpId][layerId].layerClusterIdToEnergyAndScore.empty())
1897  LogDebug("HGCalValidator") << "CP Id: \t" << cpId << "\t MCL id:\t-1 "
1898  << "\t layer \t " << layerId << " Sub score in \t -1"
1899  << "\n";
1900 
1901  for (const auto& lcPair : cPOnLayer[cpId][layerId].layerClusterIdToEnergyAndScore) {
1902  //3d score here without the denominator at this point
1903  if (score3d[cpId][lcPair.first] == FLT_MAX) {
1904  score3d[cpId][lcPair.first] = 0.f;
1905  }
1906  score3d[cpId][lcPair.first] += lcPair.second.second;
1907  mclsharedenergy[cpId][lcPair.first] += lcPair.second.first;
1908  }
1909  } //end of loop through layers
1910 
1911  // Compute the correct normalization
1912  // We need to loop on the cPOnLayer data structure since this is the
1913  // only one that has the compressed information for multiple usage
1914  // of the same DetId by different SimClusters by a single CaloParticle.
1915  float invCPEnergyWeight = 0.f;
1916  for (const auto& layer : cPOnLayer[cpId]) {
1917  for (const auto& haf : layer.hits_and_fractions) {
1918  invCPEnergyWeight +=
1919  (haf.second * hitMap.at(haf.first)->energy()) * (haf.second * hitMap.at(haf.first)->energy());
1920  }
1921  }
1922  invCPEnergyWeight = 1.f / invCPEnergyWeight;
1923 
1924  //Loop through related multiclusters here
1925  //Will switch to vector for access because it is faster
1926  std::vector<int> cpId_mclId_related_vec(cpId_mclId_related.begin(), cpId_mclId_related.end());
1927  for (unsigned int i = 0; i < cpId_mclId_related_vec.size(); ++i) {
1928  auto mclId = cpId_mclId_related_vec[i];
1929  //Now time for the denominator
1930  score3d[cpId][mclId] = score3d[cpId][mclId] * invCPEnergyWeight;
1931  mclsharedenergyfrac[cpId][mclId] = (mclsharedenergy[cpId][mclId] / CPenergy);
1932 
1933  LogDebug("HGCalValidator") << "CP Id: \t" << cpId << "\t MCL id: \t" << mclId << "\t score \t" //
1934  << score3d[cpId][mclId] << "\t"
1935  << "invCPEnergyWeight \t" << invCPEnergyWeight << "\t"
1936  << "shared energy:\t" << mclsharedenergy[cpId][mclId] << "\t"
1937  << "shared energy fraction:\t" << mclsharedenergyfrac[cpId][mclId] << "\n";
1938 
1939  histograms.h_score_caloparticle2multicl[count]->Fill(score3d[cpId][mclId]);
1940 
1941  histograms.h_sharedenergy_caloparticle2multicl[count]->Fill(mclsharedenergyfrac[cpId][mclId]);
1942  histograms.h_energy_vs_score_caloparticle2multicl[count]->Fill(score3d[cpId][mclId],
1943  mclsharedenergyfrac[cpId][mclId]);
1944  } //end of loop through multiclusters
1945 
1946  auto is_assoc = [&](const auto& v) -> bool { return v < ScoreCutCPtoMCLDup_; };
1947 
1948  auto assocDup = std::count_if(std::begin(score3d[cpId]), std::end(score3d[cpId]), is_assoc);
1949 
1950  if (assocDup > 0) {
1951  histograms.h_num_caloparticle_eta[count]->Fill(cP[cpId].g4Tracks()[0].momentum().eta());
1952  histograms.h_num_caloparticle_phi[count]->Fill(cP[cpId].g4Tracks()[0].momentum().phi());
1953  auto best = std::min_element(std::begin(score3d[cpId]), std::end(score3d[cpId]));
1954  auto bestmclId = std::distance(std::begin(score3d[cpId]), best);
1955 
1956  histograms.h_sharedenergy_caloparticle2multicl_vs_eta[count]->Fill(cP[cpId].g4Tracks()[0].momentum().eta(),
1957  multiClusters[bestmclId].energy() / CPenergy);
1958  histograms.h_sharedenergy_caloparticle2multicl_vs_phi[count]->Fill(cP[cpId].g4Tracks()[0].momentum().phi(),
1959  multiClusters[bestmclId].energy() / CPenergy);
1960  }
1961  if (assocDup >= 2) {
1962  auto match = std::find_if(std::begin(score3d[cpId]), std::end(score3d[cpId]), is_assoc);
1963  while (match != score3d[cpId].end()) {
1964  tracksters_duplicate[std::distance(std::begin(score3d[cpId]), match)] = 1;
1965  match = std::find_if(std::next(match), std::end(score3d[cpId]), is_assoc);
1966  }
1967  }
1968  histograms.h_denom_caloparticle_eta[count]->Fill(cP[cpId].g4Tracks()[0].momentum().eta());
1969  histograms.h_denom_caloparticle_phi[count]->Fill(cP[cpId].g4Tracks()[0].momentum().phi());
1970 
1971  } //end of loop through caloparticles
1972 
1973  // Here we do fill the plots to compute the different metrics linked to
1974  // reco-level, namely fake-rate an merge-rate. In this loop we should *not*
1975  // restrict only to the selected caloParaticles.
1976  for (unsigned int mclId = 0; mclId < nMultiClusters; ++mclId) {
1977  const auto& hits_and_fractions = multiClusters[mclId].hitsAndFractions();
1978  if (!hits_and_fractions.empty()) {
1979  auto assocFakeMerge = tracksters_fakemerge[mclId];
1980  auto assocDuplicate = tracksters_duplicate[mclId];
1981  if (assocDuplicate) {
1982  histograms.h_numDup_multicl_eta[count]->Fill(multiClusters[mclId].eta());
1983  histograms.h_numDup_multicl_phi[count]->Fill(multiClusters[mclId].phi());
1984  }
1985  if (assocFakeMerge > 0) {
1986  histograms.h_num_multicl_eta[count]->Fill(multiClusters[mclId].eta());
1987  histograms.h_num_multicl_phi[count]->Fill(multiClusters[mclId].phi());
1988  auto best = std::min_element(std::begin(cpsInMultiCluster[mclId]),
1989  std::end(cpsInMultiCluster[mclId]),
1990  [](const auto& obj1, const auto& obj2) { return obj1.second < obj2.second; });
1991 
1992  //This is the shared energy taking the best caloparticle in each layer
1993  float sharedeneCPallLayers = 0.;
1994  //Loop through all layers
1995  for (unsigned int j = 0; j < layers * 2; ++j) {
1996  auto const& best_cp_linked = cPOnLayer[best->first][j].layerClusterIdToEnergyAndScore[mclId];
1997  sharedeneCPallLayers += best_cp_linked.first;
1998  } //end of loop through layers
1999  histograms.h_sharedenergy_multicl2caloparticle_vs_eta[count]->Fill(
2000  multiClusters[mclId].eta(), sharedeneCPallLayers / multiClusters[mclId].energy());
2001  histograms.h_sharedenergy_multicl2caloparticle_vs_phi[count]->Fill(
2002  multiClusters[mclId].phi(), sharedeneCPallLayers / multiClusters[mclId].energy());
2003  }
2004  if (assocFakeMerge >= 2) {
2005  histograms.h_numMerge_multicl_eta[count]->Fill(multiClusters[mclId].eta());
2006  histograms.h_numMerge_multicl_phi[count]->Fill(multiClusters[mclId].phi());
2007  }
2008  histograms.h_denom_multicl_eta[count]->Fill(multiClusters[mclId].eta());
2009  histograms.h_denom_multicl_phi[count]->Fill(multiClusters[mclId].phi());
2010  }
2011  }
2012 }
2013 
2015  int count,
2016  const std::vector<reco::HGCalMultiCluster>& multiClusters,
2017  std::vector<CaloParticle> const& cP,
2018  std::vector<size_t> const& cPIndices,
2019  std::vector<size_t> const& cPSelectedIndices,
2020  std::unordered_map<DetId, const HGCRecHit*> const& hitMap,
2021  unsigned layers) const {
2022  //Each event to be treated as two events:
2023  //an event in +ve endcap, plus another event in -ve endcap.
2024 
2025  //To keep track of total num of multiclusters
2026  int tnmclmz = 0; //-z
2027  int tnmclpz = 0; //+z
2028  //To count the number of multiclusters with 3 contiguous layers per event.
2029  int tncontmclpz = 0; //+z
2030  int tncontmclmz = 0; //-z
2031  //For the number of multiclusters without 3 contiguous layers per event.
2032  int tnnoncontmclpz = 0; //+z
2033  int tnnoncontmclmz = 0; //-z
2034  //We want to check below the score of cont and non cont multiclusters
2035  std::vector<bool> contmulti;
2036  contmulti.clear();
2037 
2038  //[mclId]-> vector of 2d layer clusters size
2039  std::unordered_map<unsigned int, std::vector<unsigned int>> multiplicity;
2040  //[mclId]-> [layer][cluster size]
2041  std::unordered_map<unsigned int, std::vector<unsigned int>> multiplicity_vs_layer;
2042  //We will need for the scale text option
2043  // unsigned int totallcinmcls = 0;
2044  // for (unsigned int mclId = 0; mclId < nMultiClusters; ++mclId) {
2045  // totallcinmcls = totallcinmcls + multiClusters[mclId].clusters().size();
2046  // }
2047 
2048  auto nMultiClusters = multiClusters.size();
2049  //loop through multiclusters of the event
2050  for (unsigned int mclId = 0; mclId < nMultiClusters; ++mclId) {
2051  const auto layerClusters = multiClusters[mclId].clusters();
2052  auto nLayerClusters = layerClusters.size();
2053 
2054  if (nLayerClusters == 0)
2055  continue;
2056 
2057  if (multiClusters[mclId].z() < 0.) {
2058  tnmclmz++;
2059  }
2060  if (multiClusters[mclId].z() > 0.) {
2061  tnmclpz++;
2062  }
2063 
2064  //Total number of layer clusters in multicluster
2065  int tnlcinmcl = 0;
2066 
2067  //To keep track of total num of layer clusters per multicluster
2068  //tnlcinmclperlaypz[layerid], tnlcinmclperlaymz[layerid]
2069  std::vector<int> tnlcinmclperlay(1000, 0); //+z
2070 
2071  //For the layers the multicluster expands to. Will use a set because there would be many
2072  //duplicates and then go back to vector for random access, since they say it is faster.
2073  std::set<int> multicluster_layers;
2074 
2075  bool multiclusterInZplus = false;
2076  bool multiclusterInZminus = false;
2077 
2078  //Loop through layer clusters
2079  for (unsigned int lcId = 0; lcId < nLayerClusters; ++lcId) {
2080  //take the hits and their fraction of the specific layer cluster.
2081  const std::vector<std::pair<DetId, float>>& hits_and_fractions = layerClusters[lcId]->hitsAndFractions();
2082 
2083  //For the multiplicity of the 2d layer clusters in multiclusters
2084  multiplicity[mclId].emplace_back(hits_and_fractions.size());
2085 
2086  const auto firstHitDetId = hits_and_fractions[0].first;
2087  //The layer that the layer cluster belongs to
2088  int layerid = recHitTools_->getLayerWithOffset(firstHitDetId) +
2089  layers * ((recHitTools_->zside(firstHitDetId) + 1) >> 1) - 1;
2090  multicluster_layers.insert(layerid);
2091  multiplicity_vs_layer[mclId].emplace_back(layerid);
2092 
2093  tnlcinmclperlay[layerid]++;
2094  tnlcinmcl++;
2095 
2096  if (recHitTools_->zside(firstHitDetId) > 0.) {
2097  multiclusterInZplus = true;
2098  }
2099  if (recHitTools_->zside(firstHitDetId) < 0.) {
2100  multiclusterInZminus = true;
2101  }
2102 
2103  } //end of loop through layerclusters
2104 
2105  //Per layer : Loop 0->99
2106  for (unsigned ilayer = 0; ilayer < layers * 2; ++ilayer) {
2107  if (histograms.h_clusternum_in_multicluster_perlayer[count].count(ilayer) && tnlcinmclperlay[ilayer] != 0) {
2108  histograms.h_clusternum_in_multicluster_perlayer[count].at(ilayer)->Fill((float)tnlcinmclperlay[ilayer]);
2109  }
2110  //For the profile now of 2d layer cluster in multiclusters vs layer number.
2111  if (tnlcinmclperlay[ilayer] != 0) {
2112  histograms.h_clusternum_in_multicluster_vs_layer[count]->Fill((float)ilayer, (float)tnlcinmclperlay[ilayer]);
2113  }
2114  } //end of loop over layers
2115 
2116  //Looking for multiclusters with 3 contiguous layers per event.
2117  std::vector<int> multicluster_layers_vec(multicluster_layers.begin(), multicluster_layers.end());
2118  //Since we want to also check for non contiguous multiclusters
2119  bool contimulti = false;
2120  //Observe that we start from 1 and go up to size - 1 element.
2121  if (multicluster_layers_vec.size() >= 3) {
2122  for (unsigned int i = 1; i < multicluster_layers_vec.size() - 1; ++i) {
2123  if ((multicluster_layers_vec[i - 1] + 1 == multicluster_layers_vec[i]) &&
2124  (multicluster_layers_vec[i + 1] - 1 == multicluster_layers_vec[i])) {
2125  //So, this is a multicluster with 3 contiguous layers per event
2126  if (multiclusterInZplus) {
2127  tncontmclpz++;
2128  }
2129  if (multiclusterInZminus) {
2130  tncontmclmz++;
2131  }
2132  contimulti = true;
2133  break;
2134  }
2135  }
2136  }
2137  //Count non contiguous multiclusters
2138  if (!contimulti) {
2139  if (multiclusterInZplus) {
2140  tnnoncontmclpz++;
2141  }
2142  if (multiclusterInZminus) {
2143  tnnoncontmclmz++;
2144  }
2145  }
2146 
2147  //Save for the score
2148  contmulti.push_back(contimulti);
2149 
2150  histograms.h_clusternum_in_multicluster[count]->Fill(tnlcinmcl);
2151 
2152  for (unsigned int lc = 0; lc < multiplicity[mclId].size(); ++lc) {
2153  //multiplicity of the current LC
2154  float mlp = std::count(std::begin(multiplicity[mclId]), std::end(multiplicity[mclId]), multiplicity[mclId][lc]);
2155  //LogDebug("HGCalValidator") << "mlp %" << (100. * mlp)/ ((float) nLayerClusters) << std::endl;
2156  // histograms.h_multiplicityOfLCinMCL[count]->Fill( mlp , multiplicity[mclId][lc] , 100. / (float) totallcinmcls );
2157  histograms.h_multiplicityOfLCinMCL[count]->Fill(mlp, multiplicity[mclId][lc]);
2158  //When we will plot with the text option we want the entries to be the same
2159  //as the % of the current cell over the whole number of clusters. For this we need an extra histo.
2160  histograms.h_multiplicity_numberOfEventsHistogram[count]->Fill(mlp);
2161  //For the cluster multiplicity vs layer
2162  //First with the -z endcap (V10:0->49)
2163  if (multiplicity_vs_layer[mclId][lc] < layers) {
2164  histograms.h_multiplicityOfLCinMCL_vs_layercluster_zminus[count]->Fill(mlp, multiplicity_vs_layer[mclId][lc]);
2165  histograms.h_multiplicity_zminus_numberOfEventsHistogram[count]->Fill(mlp);
2166  } else { //Then for the +z (V10:50->99)
2167  histograms.h_multiplicityOfLCinMCL_vs_layercluster_zplus[count]->Fill(
2168  mlp, multiplicity_vs_layer[mclId][lc] - layers);
2169  histograms.h_multiplicity_zplus_numberOfEventsHistogram[count]->Fill(mlp);
2170  }
2171  //For the cluster multiplicity vs cluster energy
2172  histograms.h_multiplicityOfLCinMCL_vs_layerclusterenergy[count]->Fill(mlp, layerClusters[lc]->energy());
2173  }
2174 
2175  if (!multicluster_layers.empty()) {
2176  histograms.h_multicluster_x[count]->Fill(multiClusters[mclId].x());
2177  histograms.h_multicluster_y[count]->Fill(multiClusters[mclId].y());
2178  histograms.h_multicluster_z[count]->Fill(multiClusters[mclId].z());
2179  histograms.h_multicluster_eta[count]->Fill(multiClusters[mclId].eta());
2180  histograms.h_multicluster_phi[count]->Fill(multiClusters[mclId].phi());
2181 
2182  histograms.h_multicluster_firstlayer[count]->Fill((float)*multicluster_layers.begin());
2183  histograms.h_multicluster_lastlayer[count]->Fill((float)*multicluster_layers.rbegin());
2184  histograms.h_multicluster_layersnum[count]->Fill((float)multicluster_layers.size());
2185 
2186  histograms.h_multicluster_pt[count]->Fill(multiClusters[mclId].pt());
2187 
2188  histograms.h_multicluster_energy[count]->Fill(multiClusters[mclId].energy());
2189  }
2190 
2191  } //end of loop through multiclusters
2192 
2193  histograms.h_multiclusternum[count]->Fill(tnmclmz + tnmclpz);
2194  histograms.h_contmulticlusternum[count]->Fill(tncontmclpz + tncontmclmz);
2195  histograms.h_noncontmulticlusternum[count]->Fill(tnnoncontmclpz + tnnoncontmclmz);
2196 
2197  multiClusters_to_CaloParticles(histograms, count, multiClusters, cP, cPIndices, cPSelectedIndices, hitMap, layers);
2198 }
2199 
2201  const double y1,
2202  const double x2,
2203  const double y2) const { //distance squared
2204  const double dx = x1 - x2;
2205  const double dy = y1 - y2;
2206  return (dx * dx + dy * dy);
2207 } //distance squaredq
2209  const double y1,
2210  const double x2,
2211  const double y2) const { //2-d distance on the layer (x-y)
2212  return std::sqrt(distance2(x1, y1, x2, y2));
2213 }
2214 
2215 void HGVHistoProducerAlgo::setRecHitTools(std::shared_ptr<hgcal::RecHitTools> recHitTools) {
2216  recHitTools_ = recHitTools;
2217 }
2218 
2220  std::unordered_map<DetId, const HGCRecHit*> const& hitMap) const {
2221  DetId themaxid;
2222  const std::vector<std::pair<DetId, float>>& hits_and_fractions = cluster.hitsAndFractions();
2223 
2224  double maxene = 0.;
2225  for (std::vector<std::pair<DetId, float>>::const_iterator it_haf = hits_and_fractions.begin();
2226  it_haf != hits_and_fractions.end();
2227  ++it_haf) {
2228  DetId rh_detid = it_haf->first;
2229 
2230  std::unordered_map<DetId, const HGCRecHit*>::const_iterator itcheck = hitMap.find(rh_detid);
2231  const HGCRecHit* hit = itcheck->second;
2232 
2233  if (maxene < hit->energy()) {
2234  maxene = hit->energy();
2235  themaxid = rh_detid;
2236  }
2237  }
2238 
2239  return themaxid;
2240 }
2241 
2242 double HGVHistoProducerAlgo::getEta(double eta) const {
2243  if (useFabsEta_)
2244  return fabs(eta);
2245  else
2246  return eta;
2247 }
HGVHistoProducerAlgo::nintLongDepBary_
int nintLongDepBary_
Definition: HGVHistoProducerAlgo.h:261
HGVHistoProducerAlgo::maxSizeCLsinMCLs_
double maxSizeCLsinMCLs_
Definition: HGVHistoProducerAlgo.h:300
HGVHistoProducerAlgo::maxEta_
double maxEta_
Definition: HGVHistoProducerAlgo.h:247
DDAxes::y
HGVHistoProducerAlgo::bookInfo
void bookInfo(DQMStore::IBooker &ibook, Histograms &histograms)
Definition: HGVHistoProducerAlgo.cc:186
hgcal::LayerClusterToCaloParticleAssociator::associateSimToReco
hgcal::SimToRecoCollection associateSimToReco(const edm::Handle< reco::CaloClusterCollection > &cCCH, const edm::Handle< CaloParticleCollection > &cPCH) const
Associate a CaloParticle to LayerClusters.
Definition: LayerClusterToCaloParticleAssociator.h:32
HGVHistoProducerAlgo::maxY_
double maxY_
Definition: HGVHistoProducerAlgo.h:306
electrons_cff.bool
bool
Definition: electrons_cff.py:393
HGVHistoProducerAlgo::nintTotNcellsperthickperlayer_
int nintTotNcellsperthickperlayer_
Definition: HGVHistoProducerAlgo.h:277
edm::AssociationMap::find
const_iterator find(const key_type &k) const
find element with specified reference key
Definition: AssociationMap.h:173
mps_fire.i
i
Definition: mps_fire.py:428
HGVHistoProducerAlgo::bookMultiClusterHistos
void bookMultiClusterHistos(DQMStore::IBooker &ibook, Histograms &histograms, unsigned layers)
Definition: HGVHistoProducerAlgo.cc:535
HGVHistoProducerAlgo::maxClEnepermultiplicity_
double maxClEnepermultiplicity_
Definition: HGVHistoProducerAlgo.h:302
HGVHistoProducerAlgo::nintDisSeedToMaxperthickperlayer_
int nintDisSeedToMaxperthickperlayer_
Definition: HGVHistoProducerAlgo.h:287
MessageLogger.h
HGVHistoProducerAlgo::minMixedHitsCluster_
double minMixedHitsCluster_
Definition: HGVHistoProducerAlgo.h:256
HGVHistoProducerAlgo::useFabsEta_
bool useFabsEta_
Definition: HGVHistoProducerAlgo.h:249
HGVHistoProducerAlgo::minZ_
double minZ_
Definition: HGVHistoProducerAlgo.h:308
HGVHistoProducerAlgo::fill_caloparticle_histos
void fill_caloparticle_histos(const Histograms &histograms, int pdgid, const CaloParticle &caloparticle, std::vector< SimVertex > const &simVertices) const
Definition: HGVHistoProducerAlgo.cc:780
CaloParticle::eta
float eta() const
Momentum pseudorapidity. Note this is taken from the simtrack before the calorimeter.
Definition: CaloParticle.h:142
HGVHistoProducerAlgo::maxMixedHitsCluster_
double maxMixedHitsCluster_
Definition: HGVHistoProducerAlgo.h:256
HGVHistoProducerAlgo::nintClEnepermultiplicity_
int nintClEnepermultiplicity_
Definition: HGVHistoProducerAlgo.h:303
HGVHistoProducerAlgo::nintPhi_
int nintPhi_
Definition: HGVHistoProducerAlgo.h:255
HGVHistoProducerAlgo::maxEneClperlay_
double maxEneClperlay_
Definition: HGVHistoProducerAlgo.h:266
f
double f[11][100]
Definition: MuScleFitUtils.cc:78
HGVHistoProducerAlgo::nintZpos_
int nintZpos_
Definition: HGVHistoProducerAlgo.h:263
HGVHistoProducerAlgo::minDisToMaxperthickperlayer_
double minDisToMaxperthickperlayer_
Definition: HGVHistoProducerAlgo.h:282
DiDispStaMuonMonitor_cfi.pt
pt
Definition: DiDispStaMuonMonitor_cfi.py:39
min
T min(T a, T b)
Definition: MathUtil.h:58
ecaldqm::zside
int zside(DetId const &)
Definition: EcalDQMCommonUtils.cc:189
testProducerWithPsetDescEmpty_cfi.x2
x2
Definition: testProducerWithPsetDescEmpty_cfi.py:28
AlCaHLTBitMon_ParallelJobs.p
p
Definition: AlCaHLTBitMon_ParallelJobs.py:153
HGVHistoProducerAlgo::minX_
double minX_
Definition: HGVHistoProducerAlgo.h:304
DetId::det
constexpr Detector det() const
get the detector field from this detid
Definition: DetId.h:46
HGVHistoProducerAlgo::minTotNClsinMCLs_
double minTotNClsinMCLs_
Definition: HGVHistoProducerAlgo.h:294
HGVHistoProducerAlgo::~HGVHistoProducerAlgo
~HGVHistoProducerAlgo()
Definition: HGVHistoProducerAlgo.cc:184
HGVHistoProducerAlgo::maxTotNClsperthick_
double maxTotNClsperthick_
Definition: HGVHistoProducerAlgo.h:274
hgcal::LayerClusterToCaloParticleAssociator::associateRecoToSim
hgcal::RecoToSimCollection associateRecoToSim(const edm::Handle< reco::CaloClusterCollection > &cCCH, const edm::Handle< CaloParticleCollection > &cPCH) const
Associate a LayerCluster to CaloParticles.
Definition: LayerClusterToCaloParticleAssociator.h:26
HGVHistoProducerAlgo::nintEne_
int nintEne_
Definition: HGVHistoProducerAlgo.h:251
HGVHistoProducerAlgo::HGVHistoProducerAlgo
HGVHistoProducerAlgo(const edm::ParameterSet &pset)
Definition: HGVHistoProducerAlgo.cc:20
CaloParticle::g4Tracks
const std::vector< SimTrack > & g4Tracks() const
Definition: CaloParticle.h:74
CaloParticle::energy
float energy() const
Energy. Note this is taken from the first SimTrack only.
Definition: CaloParticle.h:98
DDAxes::x
edm::RefVector< SimClusterCollection >
SimCluster
Monte Carlo truth information used for tracking validation.
Definition: SimCluster.h:29
findQualityFiles.v
v
Definition: findQualityFiles.py:179
ScoreCutLCtoCP_
const double ScoreCutLCtoCP_
Definition: HGVHistoProducerAlgo.cc:15
HGVHistoProducerAlgo::minEneClperlay_
double minEneClperlay_
Definition: HGVHistoProducerAlgo.h:266
spr::find
void find(edm::Handle< EcalRecHitCollection > &hits, DetId thisDet, std::vector< EcalRecHitCollection::const_iterator > &hit, bool debug=false)
Definition: FindCaloHit.cc:19
newFWLiteAna.found
found
Definition: newFWLiteAna.py:118
edm::Handle
Definition: AssociativeIterator.h:50
HGVHistoProducerAlgo::minSizeCLsinMCLs_
double minSizeCLsinMCLs_
Definition: HGVHistoProducerAlgo.h:300
edm::Ref
Definition: AssociativeIterator.h:58
HGVHistoProducerAlgo::nintDisToMaxperthickperlayerenewei_
int nintDisToMaxperthickperlayerenewei_
Definition: HGVHistoProducerAlgo.h:285
trackingPlots.assoc
assoc
Definition: trackingPlots.py:184
HGVHistoProducerAlgo::nintEneCl_
int nintEneCl_
Definition: HGVHistoProducerAlgo.h:259
ScoreCutMCLtoCPFakeMerge_
const double ScoreCutMCLtoCPFakeMerge_
Definition: HGVHistoProducerAlgo.cc:17
HGVHistoProducerAlgo::maxDisToMaxperthickperlayerenewei_
double maxDisToMaxperthickperlayerenewei_
Definition: HGVHistoProducerAlgo.h:284
HGVHistoProducerAlgo::maxEneCl_
double maxEneCl_
Definition: HGVHistoProducerAlgo.h:258
DetId
Definition: DetId.h:17
HGVHistoProducerAlgo::setRecHitTools
void setRecHitTools(std::shared_ptr< hgcal::RecHitTools > recHitTools)
Definition: HGVHistoProducerAlgo.cc:2215
HGVHistoProducerAlgo::minLongDepBary_
double minLongDepBary_
Definition: HGVHistoProducerAlgo.h:260
edm::AssociationMap::end
const_iterator end() const
last iterator over the map (read only)
Definition: AssociationMap.h:171
HGVHistoProducerAlgo::maxZpos_
double maxZpos_
Definition: HGVHistoProducerAlgo.h:262
DetId::HGCalHSi
Definition: DetId.h:33
DetId::HGCalEE
Definition: DetId.h:32
testProducerWithPsetDescEmpty_cfi.x1
x1
Definition: testProducerWithPsetDescEmpty_cfi.py:33
dqmdumpme.last
last
Definition: dqmdumpme.py:56
testProducerWithPsetDescEmpty_cfi.y1
y1
Definition: testProducerWithPsetDescEmpty_cfi.py:29
HGVHistoProducerAlgo::nintTotNMCLs_
int nintTotNMCLs_
Definition: HGVHistoProducerAlgo.h:293
HGVHistoProducerAlgo::nintTotNClsinMCLs_
int nintTotNClsinMCLs_
Definition: HGVHistoProducerAlgo.h:295
HGVHistoProducerAlgo.h
SimCluster.h
h
HGVHistoProducerAlgo::multiClusters_to_CaloParticles
void multiClusters_to_CaloParticles(const Histograms &histograms, int count, const std::vector< reco::HGCalMultiCluster > &multiClusters, std::vector< CaloParticle > const &cP, std::vector< size_t > const &cPIndices, std::vector< size_t > const &cPSelectedIndices, std::unordered_map< DetId, const HGCRecHit * > const &, unsigned layers) const
Definition: HGVHistoProducerAlgo.cc:1408
HGVHistoProducerAlgo::maxMplofLCs_
double maxMplofLCs_
Definition: HGVHistoProducerAlgo.h:298
HGVHistoProducerAlgo::nintDisToSeedperthickperlayer_
int nintDisToSeedperthickperlayer_
Definition: HGVHistoProducerAlgo.h:279
HGVHistoProducerAlgo::minEneCl_
double minEneCl_
Definition: HGVHistoProducerAlgo.h:258
Calorimetry_cff.thickness
thickness
Definition: Calorimetry_cff.py:114
PVValHelper::eta
Definition: PVValidationHelpers.h:69
HGVHistoProducerAlgo::maxLongDepBary_
double maxLongDepBary_
Definition: HGVHistoProducerAlgo.h:260
reco::CaloCluster
Definition: CaloCluster.h:31
mathSSE::sqrt
T sqrt(T t)
Definition: SSEVec.h:19
reco::CaloClusterCollection
std::vector< CaloCluster > CaloClusterCollection
collection of CaloCluster objects
Definition: CaloClusterFwd.h:19
ScoreCutCPtoMCLDup_
const double ScoreCutCPtoMCLDup_
Definition: HGVHistoProducerAlgo.cc:18
mps_fire.end
end
Definition: mps_fire.py:242
DDAxes::z
HGVHistoProducerAlgo::nintMplofLCs_
int nintMplofLCs_
Definition: HGVHistoProducerAlgo.h:299
dqm::implementation::IBooker::bookProfile
MonitorElement * bookProfile(TString const &name, TString const &title, int nchX, double lowX, double highX, int, double lowY, double highY, char const *option="s", FUNC onbooking=NOOP())
Definition: DQMStore.h:322
HGVHistoProducerAlgo::maxDisToSeedperthickperlayerenewei_
double maxDisToSeedperthickperlayerenewei_
Definition: HGVHistoProducerAlgo.h:280
CaloParticle::phi
float phi() const
Momentum azimuthal angle. Note this is taken from the first SimTrack only.
Definition: CaloParticle.h:134
submitPVResolutionJobs.count
count
Definition: submitPVResolutionJobs.py:352
HCALHighEnergyHPDFilter_cfi.energy
energy
Definition: HCALHighEnergyHPDFilter_cfi.py:5
HGVHistoProducerAlgo::bookClusterHistos
void bookClusterHistos(DQMStore::IBooker &ibook, Histograms &histograms, unsigned layers, std::vector< int > thicknesses, std::string pathtomatbudfile)
Definition: HGVHistoProducerAlgo.cc:208
HGVHistoProducerAlgo::getEta
double getEta(double eta) const
Definition: HGVHistoProducerAlgo.cc:2242
HGVHistoProducerAlgo::minClEneperthickperlayer_
double minClEneperthickperlayer_
Definition: HGVHistoProducerAlgo.h:288
HGVHistoProducerAlgo::minTotNClsinMCLsperlayer_
double minTotNClsinMCLsperlayer_
Definition: HGVHistoProducerAlgo.h:296
HGVHistoProducerAlgo::nintSizeCLsinMCLs_
int nintSizeCLsinMCLs_
Definition: HGVHistoProducerAlgo.h:301
HGVHistoProducerAlgo::minClEnepermultiplicity_
double minClEnepermultiplicity_
Definition: HGVHistoProducerAlgo.h:302
HLTEgPhaseIITestSequence_cff.layerClusters
layerClusters
Definition: HLTEgPhaseIITestSequence_cff.py:2506
HGVHistoProducerAlgo::maxTotNClsinMCLsperlayer_
double maxTotNClsinMCLsperlayer_
Definition: HGVHistoProducerAlgo.h:296
HGVHistoProducerAlgo::nintZ_
int nintZ_
Definition: HGVHistoProducerAlgo.h:309
HGVHistoProducerAlgo::nintPt_
int nintPt_
Definition: HGVHistoProducerAlgo.h:253
HGVHistoProducerAlgo::nintTotNClsperlay_
int nintTotNClsperlay_
Definition: HGVHistoProducerAlgo.h:265
HGVHistoProducerAlgo::nintSharedEneFrac_
int nintSharedEneFrac_
Definition: HGVHistoProducerAlgo.h:271
HGVHistoProducerAlgo::maxPt_
double maxPt_
Definition: HGVHistoProducerAlgo.h:252
HGVHistoProducerAlgo::minSharedEneFrac_
double minSharedEneFrac_
Definition: HGVHistoProducerAlgo.h:270
getGTfromDQMFile.obj
obj
Definition: getGTfromDQMFile.py:32
HGVHistoProducerAlgo::nintScore_
int nintScore_
Definition: HGVHistoProducerAlgo.h:269
HGVHistoProducerAlgo::fill_cluster_histos
void fill_cluster_histos(const Histograms &histograms, int count, const reco::CaloCluster &cluster) const
Definition: HGVHistoProducerAlgo.cc:804
HGVHistoProducerAlgo::maxDisToSeedperthickperlayer_
double maxDisToSeedperthickperlayer_
Definition: HGVHistoProducerAlgo.h:278
HGVHistoProducerAlgo::recHitTools_
std::shared_ptr< hgcal::RecHitTools > recHitTools_
Definition: HGVHistoProducerAlgo.h:244
AlCaHLTBitMon_QueryRunRegistry.string
string
Definition: AlCaHLTBitMon_QueryRunRegistry.py:256
HGVHistoProducerAlgo::minDisToSeedperthickperlayer_
double minDisToSeedperthickperlayer_
Definition: HGVHistoProducerAlgo.h:278
HGCRecHit
Definition: HGCRecHit.h:14
HGVHistoProducerAlgo::distance
double distance(const double x1, const double y1, const double x2, const double y2) const
Definition: HGVHistoProducerAlgo.cc:2208
HGVHistoProducerAlgo::nintCellsEneDensperthick_
int nintCellsEneDensperthick_
Definition: HGVHistoProducerAlgo.h:291
HGVHistoProducerAlgo::minPhi_
double minPhi_
Definition: HGVHistoProducerAlgo.h:254
bsc_activity_cfg.clusters
clusters
Definition: bsc_activity_cfg.py:36
testProducerWithPsetDescEmpty_cfi.y2
y2
Definition: testProducerWithPsetDescEmpty_cfi.py:30
edm::helpers::KeyVal::val
V val
Definition: AssociationMapHelpers.h:33
LogDebug
#define LogDebug(id)
Definition: MessageLogger.h:223
edm::ParameterSet
Definition: ParameterSet.h:47
HGVHistoProducerAlgo::maxClEneperthickperlayer_
double maxClEneperthickperlayer_
Definition: HGVHistoProducerAlgo.h:288
CaloParticle
Definition: CaloParticle.h:16
reco::CaloCluster::eta
double eta() const
pseudorapidity of cluster centroid
Definition: CaloCluster.h:181
reco::CaloCluster::hitsAndFractions
const std::vector< std::pair< DetId, float > > & hitsAndFractions() const
Definition: CaloCluster.h:210
edm::AssociationMap
Definition: AssociationMap.h:48
HGVHistoProducerAlgo::nintEneClperlay_
int nintEneClperlay_
Definition: HGVHistoProducerAlgo.h:267
HGVHistoProducerAlgo::maxMCLSharedEneFrac_
double maxMCLSharedEneFrac_
Definition: HGVHistoProducerAlgo.h:272
HGVHistoProducerAlgo::detIdInfoInMultiCluster
Definition: HGVHistoProducerAlgo.h:227
match
std::pair< typename Association::data_type::first_type, double > match(Reference key, Association association, bool bestMatchByMaxValue)
Generic matching function.
Definition: Utils.h:10
HGVHistoProducerAlgo::maxZ_
double maxZ_
Definition: HGVHistoProducerAlgo.h:308
ScoreCutCPtoLC_
const double ScoreCutCPtoLC_
Definition: HGVHistoProducerAlgo.cc:16
HGVHistoProducerAlgo::nintTotNClsinMCLsperlayer_
int nintTotNClsinMCLsperlayer_
Definition: HGVHistoProducerAlgo.h:297
position
static int position[264][3]
Definition: ReadPGInfo.cc:289
dqm::implementation::IBooker::bookInt
MonitorElement * bookInt(TString const &name, FUNC onbooking=NOOP())
Definition: DQMStore.h:73
HGVHistoProducerAlgo::nintClEneperthickperlayer_
int nintClEneperthickperlayer_
Definition: HGVHistoProducerAlgo.h:289
HGVHistoProducerAlgo::maxSharedEneFrac_
double maxSharedEneFrac_
Definition: HGVHistoProducerAlgo.h:270
HGVHistoProducerAlgo::detIdInfoInCluster
Definition: HGVHistoProducerAlgo.h:221
createfilelist.int
int
Definition: createfilelist.py:10
HGVHistoProducerAlgo::fill_generic_cluster_histos
void fill_generic_cluster_histos(const Histograms &histograms, int count, edm::Handle< reco::CaloClusterCollection > clusterHandle, const reco::CaloClusterCollection &clusters, const Density &densities, edm::Handle< std::vector< CaloParticle >> caloParticleHandle, std::vector< CaloParticle > const &cP, std::vector< size_t > const &cPIndices, std::vector< size_t > const &cPSelectedIndices, std::unordered_map< DetId, const HGCRecHit * > const &, std::map< double, double > cummatbudg, unsigned layers, std::vector< int > thicknesses, edm::Handle< hgcal::LayerClusterToCaloParticleAssociator > &LCAssocByEnergyScoreHandle) const
Definition: HGVHistoProducerAlgo.cc:1082
HGVHistoProducerAlgo::minEta_
double minEta_
Definition: HGVHistoProducerAlgo.h:247
HGVHistoProducerAlgo::maxPhi_
double maxPhi_
Definition: HGVHistoProducerAlgo.h:254
trackerHitRTTI::vector
Definition: trackerHitRTTI.h:21
HGVHistoProducerAlgo::maxX_
double maxX_
Definition: HGVHistoProducerAlgo.h:304
PVValHelper::dy
Definition: PVValidationHelpers.h:49
histograms
Definition: histograms.py:1
HltBtagPostValidation_cff.c
c
Definition: HltBtagPostValidation_cff.py:31
HGVHistoProducerAlgo::minPt_
double minPt_
Definition: HGVHistoProducerAlgo.h:252
HGVHistoProducerAlgo::nintDisToMaxperthickperlayer_
int nintDisToMaxperthickperlayer_
Definition: HGVHistoProducerAlgo.h:283
CaloParticle::pt
float pt() const
Transverse momentum. Note this is taken from the first SimTrack only.
Definition: CaloParticle.h:130
HGVHistoProducerAlgo::nintTotNClsperthick_
int nintTotNClsperthick_
Definition: HGVHistoProducerAlgo.h:275
HGVHistoProducerAlgo::nintMixedHitsCluster_
int nintMixedHitsCluster_
Definition: HGVHistoProducerAlgo.h:257
HGVHistoProducerAlgo::maxTotNClsperlay_
double maxTotNClsperlay_
Definition: HGVHistoProducerAlgo.h:264
HGVHistoProducerAlgo::minY_
double minY_
Definition: HGVHistoProducerAlgo.h:306
DDAxes::phi
HGVHistoProducerAlgo::maxTotNcellsperthickperlayer_
double maxTotNcellsperthickperlayer_
Definition: HGVHistoProducerAlgo.h:276
HGCalDetId
Definition: HGCalDetId.h:8
HGVHistoProducerAlgo::bookCaloParticleHistos
void bookCaloParticleHistos(DQMStore::IBooker &ibook, Histograms &histograms, int pdgid)
Definition: HGVHistoProducerAlgo.cc:195
eostools.move
def move(src, dest)
Definition: eostools.py:511
std
Definition: JetResolutionObject.h:76
DetId::rawId
constexpr uint32_t rawId() const
get the raw id
Definition: DetId.h:57
HGVHistoProducerAlgo::maxDisSeedToMaxperthickperlayer_
double maxDisSeedToMaxperthickperlayer_
Definition: HGVHistoProducerAlgo.h:286
HGVHistoProducerAlgo::nintEta_
int nintEta_
Definition: HGVHistoProducerAlgo.h:248
HGVHistoProducerAlgo::maxCellsEneDensperthick_
double maxCellsEneDensperthick_
Definition: HGVHistoProducerAlgo.h:290
tier0.unique
def unique(seq, keepstr=True)
Definition: tier0.py:24
HGVHistoProducerAlgo::maxTotNMCLs_
double maxTotNMCLs_
Definition: HGVHistoProducerAlgo.h:292
HGVHistoProducerAlgo::nintY_
int nintY_
Definition: HGVHistoProducerAlgo.h:307
DetId::HGCalHSc
Definition: DetId.h:34
HGVHistoProducerAlgo::minTotNClsperlay_
double minTotNClsperlay_
Definition: HGVHistoProducerAlgo.h:264
HGVHistoProducerAlgo::minDisToMaxperthickperlayerenewei_
double minDisToMaxperthickperlayerenewei_
Definition: HGVHistoProducerAlgo.h:284
dqm::implementation::IBooker::book2D
MonitorElement * book2D(TString const &name, TString const &title, int nchX, double lowX, double highX, int nchY, double lowY, double highY, FUNC onbooking=NOOP())
Definition: DQMStore.h:177
relativeConstraints.empty
bool empty
Definition: relativeConstraints.py:46
HGVHistoProducerAlgo::fill_multi_cluster_histos
void fill_multi_cluster_histos(const Histograms &histograms, int count, const std::vector< reco::HGCalMultiCluster > &multiClusters, std::vector< CaloParticle > const &cP, std::vector< size_t > const &cPIndices, std::vector< size_t > const &cPSelectedIndices, std::unordered_map< DetId, const HGCRecHit * > const &, unsigned layers) const
Definition: HGVHistoProducerAlgo.cc:2014
HGVHistoProducerAlgo::minCellsEneDensperthick_
double minCellsEneDensperthick_
Definition: HGVHistoProducerAlgo.h:290
HGVHistoProducerAlgo::minZpos_
double minZpos_
Definition: HGVHistoProducerAlgo.h:262
HGVHistoProducerAlgo::minEne_
double minEne_
Definition: HGVHistoProducerAlgo.h:250
HGVHistoProducerAlgo::findmaxhit
DetId findmaxhit(const reco::CaloCluster &cluster, std::unordered_map< DetId, const HGCRecHit * > const &) const
Definition: HGVHistoProducerAlgo.cc:2219
HGVHistoProducerAlgo::minTotNcellsperthickperlayer_
double minTotNcellsperthickperlayer_
Definition: HGVHistoProducerAlgo.h:276
dqm::implementation::IBooker
Definition: DQMStore.h:43
HGVHistoProducerAlgo::distance2
double distance2(const double x1, const double y1, const double x2, const double y2) const
Definition: HGVHistoProducerAlgo.cc:2200
SimCluster::hits_and_fractions
std::vector< std::pair< uint32_t, float > > hits_and_fractions() const
Returns list of rechit IDs and fractions for this SimCluster.
Definition: SimCluster.h:184
HGVHistoProducerAlgo::maxDisToMaxperthickperlayer_
double maxDisToMaxperthickperlayer_
Definition: HGVHistoProducerAlgo.h:282
HGVHistoProducerAlgo::maxEne_
double maxEne_
Definition: HGVHistoProducerAlgo.h:250
HGVHistoProducerAlgo::maxScore_
double maxScore_
Definition: HGVHistoProducerAlgo.h:268
HGCalValidator_cfi.simVertices
simVertices
Definition: HGCalValidator_cfi.py:43
offlineSlimmedPrimaryVertices_cfi.score
score
Definition: offlineSlimmedPrimaryVertices_cfi.py:6
HGVHistoProducerAlgo::nintDisToSeedperthickperlayerenewei_
int nintDisToSeedperthickperlayerenewei_
Definition: HGVHistoProducerAlgo.h:281
dqmiolumiharvest.j
j
Definition: dqmiolumiharvest.py:66
DetId::Forward
Definition: DetId.h:30
HGVHistoProducerAlgo::nintX_
int nintX_
Definition: HGVHistoProducerAlgo.h:305
EgammaValidation_cff.pdgid
pdgid
Definition: EgammaValidation_cff.py:30
HGVHistoProducerAlgo::minTotNClsperthick_
double minTotNClsperthick_
Definition: HGVHistoProducerAlgo.h:274
HLT_FULL_cff.distance
distance
Definition: HLT_FULL_cff.py:7799
HGVHistoProducerAlgo::minTotNMCLs_
double minTotNMCLs_
Definition: HGVHistoProducerAlgo.h:292
HGVHistoProducerAlgoHistograms
Definition: HGVHistoProducerAlgo.h:30
HGVHistoProducerAlgo::fill_info_histos
void fill_info_histos(const Histograms &histograms, unsigned layers) const
Definition: HGVHistoProducerAlgo.cc:767
PVValHelper::dx
Definition: PVValidationHelpers.h:48
HGVHistoProducerAlgo::minScore_
double minScore_
Definition: HGVHistoProducerAlgo.h:268
hgcalTopologyTester_cfi.layers
layers
Definition: hgcalTopologyTester_cfi.py:8
HGVHistoProducerAlgo::minDisSeedToMaxperthickperlayer_
double minDisSeedToMaxperthickperlayer_
Definition: HGVHistoProducerAlgo.h:286
HGVHistoProducerAlgo::minDisToSeedperthickperlayerenewei_
double minDisToSeedperthickperlayerenewei_
Definition: HGVHistoProducerAlgo.h:280
hit
Definition: SiStripHitEffFromCalibTree.cc:88
muonDTDigis_cfi.pset
pset
Definition: muonDTDigis_cfi.py:27
HGVHistoProducerAlgo::layerClusters_to_CaloParticles
void layerClusters_to_CaloParticles(const Histograms &histograms, edm::Handle< reco::CaloClusterCollection > clusterHandle, const reco::CaloClusterCollection &clusters, edm::Handle< std::vector< CaloParticle >> caloParticleHandle, std::vector< CaloParticle > const &cP, std::vector< size_t > const &cPIndices, std::vector< size_t > const &cPSelectedIndices, std::unordered_map< DetId, const HGCRecHit * > const &, unsigned layers, const edm::Handle< hgcal::LayerClusterToCaloParticleAssociator > &LCAssocByEnergyScoreHandle) const
Definition: HGVHistoProducerAlgo.cc:811
dqm::implementation::IBooker::book1D
MonitorElement * book1D(TString const &name, TString const &title, int const nchX, double const lowX, double const highX, FUNC onbooking=NOOP())
Definition: DQMStore.h:98
HGVHistoProducerAlgo::minMplofLCs_
double minMplofLCs_
Definition: HGVHistoProducerAlgo.h:298
HGVHistoProducerAlgo::minMCLSharedEneFrac_
double minMCLSharedEneFrac_
Definition: HGVHistoProducerAlgo.h:272
Density
hgcal_clustering::Density Density
Definition: HGCalImagingAlgo.h:29
GetRecoTauVFromDQM_MC_cff.next
next
Definition: GetRecoTauVFromDQM_MC_cff.py:31
HGVHistoProducerAlgo::maxTotNClsinMCLs_
double maxTotNClsinMCLs_
Definition: HGVHistoProducerAlgo.h:294